pax_global_header00006660000000000000000000000064141400534640014513gustar00rootroot0000000000000052 comment=4363704ada0ff9d24af21fe3f679ca50bce9b9c9 elan-1.3.1/000077500000000000000000000000001414005346400124345ustar00rootroot00000000000000elan-1.3.1/.github/000077500000000000000000000000001414005346400137745ustar00rootroot00000000000000elan-1.3.1/.github/workflows/000077500000000000000000000000001414005346400160315ustar00rootroot00000000000000elan-1.3.1/.github/workflows/ci.yml000066400000000000000000000065321414005346400171550ustar00rootroot00000000000000name: CI on: push: branches: - '*' tags: - 'v*' pull_request: branches: - '*' jobs: Build: runs-on: ${{ matrix.os }} strategy: matrix: include: - name: Linux os: ubuntu-latest target: x86_64-unknown-linux-musl # back compat release-target-name: x86_64-unknown-linux-gnu - name: Linux aarch64 os: ubuntu-latest target: aarch64-unknown-linux-musl # back compat release-target-name: aarch64-unknown-linux-gnu - name: macOS os: macos-latest target: x86_64-apple-darwin - name: Windows os: windows-latest target: x86_64-pc-windows-msvc # complete all jobs fail-fast: false env: RELEASE_TARGET_NAME: ${{ matrix.release-target-name || matrix.target }} steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: toolchain: stable target: ${{ matrix.target }} profile: minimal - uses: actions/cache@v2 with: path: | ~/.cargo/registry ~/.cargo/git target key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} - name: Setup macOS if: matrix.name == 'macOS' shell: bash run: | brew install coreutils - name: Build uses: actions-rs/cargo@v1 with: command: build args: --release --target ${{ matrix.target }} use-cross: ${{ matrix.name == 'Linux' || matrix.target == 'aarch64-unknown-linux-musl' }} - name: Package shell: bash run: | cd target/${{ matrix.target }}/release tar czf ../../../elan-${{ matrix.release-target-name || matrix.target }}.tar.gz elan-init if: matrix.os != 'windows-latest' - name: Package run: | cd target/${{ matrix.target }}/release Compress-Archive elan-init.exe ../../../elan-${{ matrix.release-target-name || matrix.target }}.zip if: matrix.os == 'windows-latest' - uses: actions/upload-artifact@v2 with: name: build-${{ matrix.name }} path: elan-${{ matrix.release-target-name || matrix.target }}* if-no-files-found: error - name: Test if: matrix.target != 'aarch64-unknown-linux-musl' uses: actions-rs/cargo@v1 with: command: test args: --release --target ${{ matrix.target }} use-cross: ${{ matrix.name == 'Linux' }} - name: Install Test if: matrix.target != 'aarch64-unknown-linux-musl' shell: bash run: | RUST_BACKTRACE=1 target/${{ matrix.target }}/release/elan-init -y # not created on Windows [ -f ~/.elan/env ] && source ~/.elan/env || export PATH=$PATH:~/.elan/bin elan which leanpkg leanpkg new foo (cd foo; leanpkg build) elan default leanprover/lean4:nightly mkdir foo4; cd foo4 leanpkg init foo4 leanpkg build - uses: softprops/action-gh-release@v1 if: startsWith(github.ref, 'refs/tags/v') with: files: elan-${{ matrix.release-target-name || matrix.target }}* env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} elan-1.3.1/.gitignore000066400000000000000000000002131414005346400144200ustar00rootroot00000000000000# Generated by Cargo # will have compiled files and executables /target/ # These are backup files generated by rustfmt **/*.rs.bk /.idea/ elan-1.3.1/CHANGELOG.md000066400000000000000000000076731414005346400142620ustar00rootroot00000000000000# 1.3.1 - 2021-11-01 ## Changed - Update dependencies # 1.3.0 - 2021-11-01 ## Added - Support for zstd-compressed tarballs # 1.2.0 - 2021-10-19 ## Added - Support for ARM64 Linux # 1.1.2 - 2021-10-15 ## Changed - Remove another "press any key to exit" # 1.1.1 - 2021-10-15 ## Changed - Remove "press any key to exit" step from Windows installation not needed for VS Code or PowerShell method # 1.1.0 - 2021-10-08 ## Added - Add `lake` Lean 4 executable # 1.0.8 - 2021-09-10 ## Changed - Fix `elan self update` on not-Linux, again # 1.0.7 - 2021-08-16 ## Changed - Default to respective toolchain inside of `~/.elan` (#36) # 1.0.6 - 2021-05-25 ## Changed - Fix `elan self update` on not-Linux and build from cmdline # 1.0.5 - 2021-05-25 ## Changed - Run extension-less tools such as `leanc` using `sh` on Windows (and hope for the best...) # 1.0.4 - 2021-05-24 ## Changed - Update suggestion when no default toolchain is configured (#31) - Fix `elan show` when no default toolchain is configured (#33) # 1.0.3 - 2021-04-30 ## Changed - Fix `elan self update` download URL on Linux # 1.0.2 - 2021-04-28 ## Changed - Fix installation from non-default repos # 1.0.1 - 2021-04-28 ## Changed - Fix updating channels from non-default repos (e.g. `leanprover/lean4:nightly`) This change affects the store location of such toolchains, so you will have to re-install them first. ```sh $ elan toolchain uninstall leanprover-lean4-nightly $ elan toolchain install leanprover/lean4:nightly ``` # 1.0.0 - 2021-04-17 - Move to `leanprover/elan` # 0.11.0 - 2021-03-09 ## Changed - Make `elan` a static executable on Linux - Improve `leanpkg.toml` error handling (#26) - Make downloaded files read-only (on Linux/macOS) (#27) # 0.10.3 - 2021-01-15 ## Changed - Hopefully fix Lean 4 leanpkg on Windows # 0.10.2 - 2020-05-11 ## Changed - Hopefully actually restore `elan toolchain link` functionality # 0.10.1 - 2020-05-11 ## Changed - Hopefully restore `elan toolchain link` functionality # 0.10.0 - 2020-05-08 ## Changed - Accept (almost) arbitrary release tag names in addition to version numbers # 0.9.0 - 2020-05-07 ## Added - Add `leanc`, `leanmake` Lean 4 executables # 0.8.0 - 2020-03-06 ## Changed - stable/nightly now refer to leanprover-community, Lean's community fork. This includes the toolchain installed by default (stable). # 0.7.5 - 2019-03-21 ## Changed - Fix release lookup once more with feeling # 0.7.4 - 2019-03-20 ## Changed - Fix self-update always triggering # 0.7.3 - 2019-03-20 ## Changed - Fix lookup of latest Github release of both Lean and elan # 0.7.2 - 2019-01-15 ## Changed - Fix name check in `elan toolchain link` (#17) # 0.7.0 - 2018-09-16 ## Added - elan will now warn if there are other Lean installations in the PATH before installing ## Changed - Fix mtimes not being restored from installation archives - Fix invoking leanpkg on Windows # 0.6.0 - 2018-08-01 ## Added - Version specifiers can now point to custom forks of Lean, such as `khoek/klean:3.4.1` (#8) # 0.5.0 - 2018-04-20 ## Changed - An explicit version passed to a proxy command like in `leanpkg +nightly build` will now be installed automatically when necessary - Full toolchain names and their directories do not mention the operating system (the "target triple", to be exact) any more. You may want to delete your old toolchains from `~/.elan/toolchains` to save space. # [0.4.0 - 2018-04-17] ## Changed - `leanpkg.toml` and `lean-toolchain` files can now reference custom toolchains (those added by `elan toolchain link`) # [0.3.0] - 2018-04-11 ## Added - `leanchecker` proxy # [0.2.0] - 2018-04-11 ## Added - `curl | sh` installation and instructions ## Changed - Fix `elan toolchain link` (#1) - Fix self-update - De-rustify docs # [0.1.0] - 2018-04-10 Minimum viable product release ## Added - Building on Rustup's code, implement installing and managing Lean toolchains - Have leanpkg.toml files override the Lean version elan-1.3.1/Cargo.lock000066400000000000000000001326751414005346400143570ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "addr2line" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e61f2b7f93d2c7d2b08263acaa4a363b3e276806c68af6134c44f523bf1aacd" dependencies = [ "gimli", ] [[package]] name = "adler" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aho-corasick" version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ "memchr", ] [[package]] name = "ansi_term" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" dependencies = [ "winapi 0.3.9", ] [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi", "libc", "winapi 0.3.9", ] [[package]] name = "autocfg" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "backtrace" version = "0.3.62" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "091bcdf2da9950f96aa522681ce805e6857f6ca8df73833d35736ab2dc78e152" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", ] [[package]] name = "base64" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ "generic-array", ] [[package]] name = "bumpalo" version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1e260c3a9040a7c19a12468758f4c16f31a81a1fe087482be9570ec864bb6c" [[package]] name = "byteorder" version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "bzip2" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6afcd980b5f3a45017c57e57a2fcccbb351cc43a356ce117ef760ef8052b89b0" dependencies = [ "bzip2-sys", "libc", ] [[package]] name = "bzip2-sys" version = "0.1.11+1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" dependencies = [ "cc", "libc", "pkg-config", ] [[package]] name = "cc" version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" dependencies = [ "jobserver", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" dependencies = [ "ansi_term", "atty", "bitflags", "strsim", "textwrap", "unicode-width", "vec_map", ] [[package]] name = "core-foundation" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "core-foundation-sys" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" dependencies = [ "libc", ] [[package]] name = "crc32fast" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" dependencies = [ "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-deque" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" dependencies = [ "cfg-if", "crossbeam-utils", "lazy_static", "memoffset", "scopeguard", ] [[package]] name = "crossbeam-utils" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ "cfg-if", "lazy_static", ] [[package]] name = "curl" version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaa3b8db7f3341ddef15786d250106334d4a6c4b0ae4a46cd77082777d9849b9" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", "socket2", "winapi 0.3.9", ] [[package]] name = "curl-sys" version = "0.4.49+curl-7.79.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0f44960aea24a786a46907b8824ebc0e66ca06bf4e4978408c7499620343483" dependencies = [ "cc", "libc", "libz-sys", "openssl-sys", "pkg-config", "vcpkg", "winapi 0.3.9", ] [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ "generic-array", ] [[package]] name = "dirs" version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-next" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ "cfg-if", "dirs-sys-next", ] [[package]] name = "dirs-sys" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780" dependencies = [ "libc", "redox_users", "winapi 0.3.9", ] [[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", "winapi 0.3.9", ] [[package]] name = "download" version = "0.4.0" dependencies = [ "curl", "env_proxy", "error-chain", "futures", "hyper", "lazy_static", "openssl", "reqwest", "url", ] [[package]] name = "either" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "elan" version = "1.3.1" dependencies = [ "clap", "download", "elan-dist", "elan-utils", "error-chain", "flate2", "gcc", "itertools", "json", "libc", "markdown", "rand", "regex", "remove_dir_all 0.6.1", "same-file", "scopeguard", "serde", "serde_derive", "serde_json", "sha2", "tar", "tempfile", "term", "time 0.3.4", "toml", "url", "wait-timeout", "winapi 0.3.9", "winreg 0.8.0", "zip", ] [[package]] name = "elan-dist" version = "1.11.0" dependencies = [ "elan-utils", "error-chain", "filetime", "flate2", "itertools", "json", "libc", "ole32-sys", "regex", "remove_dir_all 0.7.0", "sha2", "tar", "toml", "url", "walkdir", "winapi 0.3.9", "winreg 0.8.0", "zip", "zstd", ] [[package]] name = "elan-utils" version = "1.11.0" dependencies = [ "curl", "dirs", "download", "error-chain", "libc", "openssl", "rand", "regex", "remove_dir_all 0.7.0", "scopeguard", "semver", "sha2", "toml", "url", "winapi 0.3.9", "winreg 0.8.0", ] [[package]] name = "encoding_rs" version = "0.8.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a74ea89a0a1b98f6332de42c95baff457ada66d1cb4030f9ff151b2041a1c746" dependencies = [ "cfg-if", ] [[package]] name = "env_proxy" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a5019be18538406a43b5419a5501461f0c8b49ea7dfda0cfc32f4e51fc44be1" dependencies = [ "log", "url", ] [[package]] name = "error-chain" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" dependencies = [ "backtrace", "version_check", ] [[package]] name = "filetime" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" dependencies = [ "cfg-if", "libc", "redox_syscall", "winapi 0.3.9", ] [[package]] name = "flate2" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" dependencies = [ "cfg-if", "crc32fast", "libc", "miniz_oxide", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foreign-types" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" dependencies = [ "foreign-types-shared", ] [[package]] name = "foreign-types-shared" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" dependencies = [ "matches", "percent-encoding", ] [[package]] name = "futures" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a12aa0eb539080d55c3f2d45a67c3b58b6b0773c1a3ca2dfec66d58c97fd66ca" dependencies = [ "futures-channel", "futures-core", "futures-executor", "futures-io", "futures-sink", "futures-task", "futures-util", ] [[package]] name = "futures-channel" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" [[package]] name = "futures-executor" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45025be030969d763025784f7f355043dc6bc74093e4ecc5000ca4dc50d8745c" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-io" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377" [[package]] name = "futures-macro" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb" dependencies = [ "autocfg", "proc-macro-hack", "proc-macro2", "quote", "syn", ] [[package]] name = "futures-sink" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" [[package]] name = "futures-task" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" [[package]] name = "futures-util" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" dependencies = [ "autocfg", "futures-channel", "futures-core", "futures-io", "futures-macro", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "proc-macro-hack", "proc-macro-nested", "slab", ] [[package]] name = "gcc" version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" [[package]] name = "generic-array" version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" dependencies = [ "typenum", "version_check", ] [[package]] name = "getrandom" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "gimli" version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7" [[package]] name = "h2" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fd819562fcebdac5afc5c113c3ec36f902840b70fd4fc458799c8ce4607ae55" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", "http", "indexmap", "slab", "tokio", "tokio-util", "tracing", ] [[package]] name = "hashbrown" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" [[package]] name = "hermit-abi" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] [[package]] name = "http" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" dependencies = [ "bytes", "fnv", "itoa", ] [[package]] name = "http-body" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" dependencies = [ "bytes", "http", "pin-project-lite", ] [[package]] name = "httparse" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" [[package]] name = "httpdate" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" [[package]] name = "hyper" version = "0.14.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b91bb1f221b6ea1f1e4371216b70f40748774c2fb5971b450c07773fb92d26b" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", "h2", "http", "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", "socket2", "tokio", "tower-service", "tracing", "want", ] [[package]] name = "hyper-tls" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", "hyper", "native-tls", "tokio", "tokio-native-tls", ] [[package]] name = "idna" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" dependencies = [ "matches", "unicode-bidi", "unicode-normalization", ] [[package]] name = "indexmap" version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ "autocfg", "hashbrown", ] [[package]] name = "ipnet" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" [[package]] name = "itertools" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" dependencies = [ "either", ] [[package]] name = "itoa" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "jobserver" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" dependencies = [ "wasm-bindgen", ] [[package]] name = "json" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a60553f9a9e039a333b4e9b20573b9e9b9c0bb3a11e201ccc48ef4283456d673" [[package]] name = "libz-sys" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de5435b8549c16d423ed0c03dbaafe57cf6c3344744f1242520d59c9d8ecec66" dependencies = [ "cc", "libc", "pkg-config", "vcpkg", ] [[package]] name = "log" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" dependencies = [ "cfg-if", ] [[package]] name = "markdown" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef3aab6a1d529b112695f72beec5ee80e729cb45af58663ec902c8fac764ecdd" dependencies = [ "lazy_static", "pipeline", "regex", ] [[package]] name = "matches" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "memchr" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" [[package]] name = "memoffset" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" dependencies = [ "autocfg", ] [[package]] name = "mime" version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" [[package]] name = "miniz_oxide" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" dependencies = [ "adler", "autocfg", ] [[package]] name = "mio" version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" dependencies = [ "libc", "log", "miow", "ntapi", "winapi 0.3.9", ] [[package]] name = "miow" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" dependencies = [ "winapi 0.3.9", ] [[package]] name = "native-tls" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" dependencies = [ "lazy_static", "libc", "log", "openssl", "openssl-probe", "openssl-sys", "schannel", "security-framework", "security-framework-sys", "tempfile", ] [[package]] name = "ntapi" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" dependencies = [ "winapi 0.3.9", ] [[package]] name = "num_cpus" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" dependencies = [ "hermit-abi", "libc", ] [[package]] name = "object" version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" dependencies = [ "memchr", ] [[package]] name = "ole32-sys" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d2c49021782e5233cd243168edfa8037574afed4eba4bbaf538b3d8d1789d8c" dependencies = [ "winapi 0.2.8", "winapi-build", ] [[package]] name = "once_cell" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" [[package]] name = "opaque-debug" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" version = "0.10.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bc6b9e4403633698352880b22cbe2f0e45dd0177f6fabe4585536e56a3e4f75" dependencies = [ "bitflags", "cfg-if", "foreign-types", "libc", "once_cell", "openssl-sys", ] [[package]] name = "openssl-probe" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" [[package]] name = "openssl-src" version = "111.16.0+1.1.1l" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ab2173f69416cf3ec12debb5823d244127d23a9b127d5a5189aa97c5fa2859f" dependencies = [ "cc", ] [[package]] name = "openssl-sys" version = "0.9.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c571f25d3f66dd427e417cebf73dbe2361d6125cf6e3a70d143fdf97c9f5150" dependencies = [ "autocfg", "cc", "libc", "openssl-src", "pkg-config", "vcpkg", ] [[package]] name = "percent-encoding" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pest" version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" dependencies = [ "ucd-trie", ] [[package]] name = "pin-project-lite" version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pipeline" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d15b6607fa632996eb8a17c9041cb6071cb75ac057abd45dece578723ea8c7c0" [[package]] name = "pkg-config" version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f" [[package]] name = "ppv-lite86" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" [[package]] name = "proc-macro-hack" version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro-nested" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" dependencies = [ "unicode-xid", ] [[package]] name = "quote" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" dependencies = [ "libc", "rand_chacha", "rand_core", "rand_hc", ] [[package]] name = "rand_chacha" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", "rand_core", ] [[package]] name = "rand_core" version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ "getrandom", ] [[package]] name = "rand_hc" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" dependencies = [ "rand_core", ] [[package]] name = "rayon" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" dependencies = [ "autocfg", "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" dependencies = [ "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", "lazy_static", "num_cpus", ] [[package]] name = "redox_syscall" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" dependencies = [ "bitflags", ] [[package]] name = "redox_users" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" dependencies = [ "getrandom", "redox_syscall", ] [[package]] name = "regex" version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.6.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] name = "remove_dir_all" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ "winapi 0.3.9", ] [[package]] name = "remove_dir_all" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7b19f5c2df95a07275e7224924cc62f76f04525f4fda801473f85e325e81977" dependencies = [ "log", "num_cpus", "rayon", "winapi 0.3.9", ] [[package]] name = "remove_dir_all" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "882f368737489ea543bc5c340e6f3d34a28c39980bd9a979e47322b26f60ac40" dependencies = [ "libc", "log", "num_cpus", "rayon", "winapi 0.3.9", ] [[package]] name = "reqwest" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66d2927ca2f685faf0fc620ac4834690d29e7abb153add10f5812eef20b5e280" dependencies = [ "base64", "bytes", "encoding_rs", "futures-core", "futures-util", "http", "http-body", "hyper", "hyper-tls", "ipnet", "js-sys", "lazy_static", "log", "mime", "native-tls", "percent-encoding", "pin-project-lite", "serde", "serde_json", "serde_urlencoded", "tokio", "tokio-native-tls", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "winreg 0.7.0", ] [[package]] name = "rustc-demangle" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] name = "rustversion" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088" [[package]] name = "ryu" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "schannel" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" dependencies = [ "lazy_static", "winapi 0.3.9", ] [[package]] name = "scopeguard" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "security-framework" version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" dependencies = [ "bitflags", "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", ] [[package]] name = "security-framework-sys" version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" dependencies = [ "core-foundation-sys", "libc", ] [[package]] name = "semver" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" dependencies = [ "semver-parser", ] [[package]] name = "semver-parser" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" dependencies = [ "pest", ] [[package]] name = "serde" version = "1.0.130" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" [[package]] name = "serde_derive" version = "1.0.130" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" dependencies = [ "itoa", "ryu", "serde", ] [[package]] name = "serde_urlencoded" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" dependencies = [ "form_urlencoded", "itoa", "ryu", "serde", ] [[package]] name = "sha2" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa" dependencies = [ "block-buffer", "cfg-if", "cpufeatures", "digest", "opaque-debug", ] [[package]] name = "slab" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" [[package]] name = "socket2" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dc90fe6c7be1a323296982db1836d1ea9e47b6839496dde9a541bc496df3516" dependencies = [ "libc", "winapi 0.3.9", ] [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "syn" version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2afee18b8beb5a596ecb4a2dce128c719b4ba399d34126b9e4396e3f9860966" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] [[package]] name = "tar" version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6f5515d3add52e0bbdcad7b83c388bb36ba7b754dda3b5f5bc2d38640cdba5c" dependencies = [ "filetime", "libc", "xattr", ] [[package]] name = "tempfile" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" dependencies = [ "cfg-if", "libc", "rand", "redox_syscall", "remove_dir_all 0.5.3", "winapi 0.3.9", ] [[package]] name = "term" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ "dirs-next", "rustversion", "winapi 0.3.9", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ "unicode-width", ] [[package]] name = "thiserror" version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "time" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" dependencies = [ "libc", "winapi 0.3.9", ] [[package]] name = "time" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99beeb0daeac2bd1e86ac2c21caddecb244b39a093594da1a661ec2060c7aedd" dependencies = [ "libc", ] [[package]] name = "tinyvec" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" dependencies = [ "tinyvec_macros", ] [[package]] name = "tinyvec_macros" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588b2d10a336da58d877567cd8fb8a14b463e2104910f8132cd054b4b96e29ee" dependencies = [ "autocfg", "bytes", "libc", "memchr", "mio", "pin-project-lite", "winapi 0.3.9", ] [[package]] name = "tokio-native-tls" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" dependencies = [ "native-tls", "tokio", ] [[package]] name = "tokio-util" version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" dependencies = [ "bytes", "futures-core", "futures-sink", "log", "pin-project-lite", "tokio", ] [[package]] name = "toml" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" dependencies = [ "serde", ] [[package]] name = "tower-service" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" dependencies = [ "cfg-if", "pin-project-lite", "tracing-core", ] [[package]] name = "tracing-core" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" dependencies = [ "lazy_static", ] [[package]] name = "try-lock" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "typenum" version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" [[package]] name = "ucd-trie" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" [[package]] name = "unicode-bidi" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" [[package]] name = "unicode-normalization" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unicode-xid" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" [[package]] name = "url" version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" dependencies = [ "form_urlencoded", "idna", "matches", "percent-encoding", ] [[package]] name = "vcpkg" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "wait-timeout" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" dependencies = [ "libc", ] [[package]] name = "walkdir" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi 0.3.9", "winapi-util", ] [[package]] name = "want" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" dependencies = [ "log", "try-lock", ] [[package]] name = "wasi" version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "wasm-bindgen" version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" dependencies = [ "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" dependencies = [ "bumpalo", "lazy_static", "log", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" dependencies = [ "cfg-if", "js-sys", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" [[package]] name = "web-sys" version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "winapi" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-build" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi 0.3.9", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winreg" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" dependencies = [ "winapi 0.3.9", ] [[package]] name = "winreg" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d107f8c6e916235c4c01cabb3e8acf7bea8ef6a63ca2e7fa0527c049badfc48c" dependencies = [ "winapi 0.3.9", ] [[package]] name = "xattr" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c" dependencies = [ "libc", ] [[package]] name = "zip" version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93ab48844d61251bb3835145c521d88aa4031d7139e8485990f60ca911fa0815" dependencies = [ "byteorder", "bzip2", "crc32fast", "flate2", "thiserror", "time 0.1.43", ] [[package]] name = "zstd" version = "0.9.0+zstd.1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07749a5dc2cb6b36661290245e350f15ec3bbb304e493db54a1d354480522ccd" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" version = "4.1.1+zstd.1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c91c90f2c593b003603e5e0493c837088df4469da25aafff8bce42ba48caf079" dependencies = [ "libc", "zstd-sys", ] [[package]] name = "zstd-sys" version = "1.6.1+zstd.1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "615120c7a2431d16cf1cf979e7fc31ba7a5b5e5707b29c8a99e5dbf8a8392a33" dependencies = [ "cc", "libc", ] elan-1.3.1/Cargo.toml000066400000000000000000000027761414005346400144000ustar00rootroot00000000000000[package] name = "elan" version = "1.3.1" authors = [ "Sebastian Ullrich " ] description = "Manage multiple Lean installations with ease" publish = false license = "MIT OR Apache-2.0" build = "build.rs" [features] default = ["curl-backend"] curl-backend = ["download/curl-backend"] reqwest-backend = ["download/reqwest-backend"] # Include in the default set to disable self-update and uninstall. no-self-update = [] # Used to change behavior of self-update and uninstall if installed via MSI msi-installed = [] [dependencies] elan-dist = { path = "src/elan-dist" } elan-utils = { path = "src/elan-utils" } download = { path = "src/download" } clap = "2.33.3" error-chain = "0.12.4" itertools = "0.10.0" libc = "0.2.82" markdown = "0.3.0" rand = "0.8.2" regex = "1.4.3" remove_dir_all = "0.6.1" same-file = "1.0.6" scopeguard = "1.1.0" serde = "1.0.119" serde_derive = "1.0.119" serde_json = "1.0.61" sha2 = "0.9.2" tempfile = "3.2.0" term = "0.7.0" time = "0.3.4" toml = "0.5.8" url = "2.2.0" wait-timeout = "0.2.0" zip = "0.5.9" tar = ">=0.4.36" flate2 = "1.0.14" json = "0.12.4" [target."cfg(windows)".dependencies] winapi = { version = "0.3.9", features = ["jobapi", "jobapi2", "processthreadsapi", "psapi", "synchapi", "winuser"] } winreg = "0.8.0" gcc = "0.3.55" [workspace] members = ["src/download", "src/elan-dist", "src/elan-utils"] [lib] name = "elan" path = "src/elan/lib.rs" test = false # no unit tests [[bin]] name = "elan-init" path = "src/elan-cli/main.rs" test = false # no unit tests elan-1.3.1/Cross.toml000066400000000000000000000000621414005346400144200ustar00rootroot00000000000000[build.env] passthrough = ["RELEASE_TARGET_NAME"] elan-1.3.1/LICENSE000066400000000000000000000261351414005346400134500ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. elan-1.3.1/LICENSE-APACHE000066400000000000000000000251421414005346400143640ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. elan-1.3.1/LICENSE-MIT000066400000000000000000000021421414005346400140670ustar00rootroot00000000000000Copyright (c) 2016 The Rust Project Developers Modifications copyright (c) 2018 Sebastian Ullrich Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. elan-1.3.1/README.md000066400000000000000000000067731414005346400137300ustar00rootroot00000000000000# elan: Lean version manager *elan* is a small tool for managing your installations of the [Lean theorem prover](https://leanprover.github.io). It places `lean` and `leanpkg` binaries in your `PATH` that automatically select and, if necessary, download the Lean version described in the `lean_version` field of your project's `leanpkg.toml`. You can also install, select, run, and uninstall Lean versions manually using the commands of the `elan` executable. ```bash ~/my/package $ cat leanpkg.toml | grep lean_version lean_version = "nightly-2018-04-10" ~/my/package $ leanpkg -v info: downloading component 'lean' 14.6 MiB / 14.6 MiB (100 %) 2.2 MiB/s ETA: 0 s info: installing component 'lean' Lean package manager, version nightly-2018-04-10 [...] ~/my/package $ elan show installed toolchains -------------------- stable nightly-2018-04-06 nightly-2018-04-10 master active toolchain ---------------- nightly-2018-04-10 (overridden by '/home/me/my/package/leanpkg.toml') Lean (version 3.3.1, nightly-2018-04-10, commit d36b859c6579, Release) ``` # Installation ## Manual Installation **Linux/macOS/Cygwin/MSYS2/git bash/...**: run the following command in a terminal: ```bash curl https://raw.githubusercontent.com/leanprover/elan/master/elan-init.sh -sSf | sh ``` **Windows**: run the following commands in a terminal: ```bash curl -O --location https://raw.githubusercontent.com/leanprover/elan/master/elan-init.ps1 powershell -f elan-init.ps1 del elan-init.ps1 ``` Alternatively, on **any supported platform**: Grab the [latest release](https://github.com/leanprover/elan/releases/latest) for your platform, unpack it, and run the contained installation program. The installation will tell you where it will install elan to (`~/.elan` by default), and also ask you about editing your shell config to extend `PATH`. elan can be uninstalled via `elan self uninstall`, which should revert these changes. ## Homebrew ```bash $ brew install elan ``` ## Nix ```bash $ nix-env -iA nixpkgs.elan ``` # Prerequisites On some systems, `lean`/`leanpkg` will not work out of the box even if installed through elan: * You'll need [git](https://git-scm.com/download) to download dependencies through `leanpkg`. * macOS: Install [Homebrew](https://brew.sh/), then run `brew install gmp coreutils`. (`gmp` is required by `lean`, `coreutils` is required by `leanpkg`) # Implementation *elan* is basically a fork of [rustup](https://github.com/rust-lang-nursery/rustup.rs). Apart from new features and adaptions to the Lean infrastructure, these are the basic changes to the original code: * Replaced every mention of `rustup` with `elan`, `cargo` with `leanpkg`, and `rust(c)` with `lean` * Merged `CARGO_HOME` and `RUSTUP_HOME` * Removed options to configure host triple # Build If you want to build elan from source, you will need to install [Rust](https://www.rust-lang.org/tools/install) and Cargo and run the following: ``` cargo build ``` The built binaries will show up in `target/debug` folder. You can test that it works by running the following: ``` ./target/debug/elan --help ``` # Build on Windows The windows build requires a 64bit developer command prompt and a windows version of `perl.exe` which you can download from [https://strawberryperl.com/](https://strawberryperl.com/). Make sure this downloaded perl.exe is the first thing in your PATH so that the build does not try and use `c:\Program Files\Git\usr\bin\perl.exe`. The git provided version of perl doesn't work for some reason. Then you can run `cargo build` as shown above. elan-1.3.1/build.rs000066400000000000000000000024371414005346400141070ustar00rootroot00000000000000use std::env; use std::error::Error; use std::fs::File; use std::io::Write; use std::path::PathBuf; use std::process::Command; struct Ignore; impl From for Ignore where E: Error, { fn from(_: E) -> Ignore { Ignore } } fn main() { let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); File::create(out_dir.join("commit-info.txt")) .unwrap() .write_all(commit_info().as_bytes()) .unwrap(); println!("cargo:rerun-if-changed=build.rs"); } // Try to get hash and date of the last commit on a best effort basis. If anything goes wrong // (git not installed or if this is not a git repository) just return an empty string. fn commit_info() -> String { match (commit_hash(), commit_date()) { (Ok(hash), Ok(date)) => format!(" ({} {})", hash.trim_end(), date), _ => String::new(), } } fn commit_hash() -> Result { Ok(String::from_utf8( Command::new("git") .args(&["rev-parse", "--short=9", "HEAD"]) .output()? .stdout, )?) } fn commit_date() -> Result { Ok(String::from_utf8( Command::new("git") .args(&["log", "-1", "--date=short", "--pretty=format:%cd"]) .output()? .stdout, )?) } elan-1.3.1/elan-init.ps1000066400000000000000000000053151414005346400147450ustar00rootroot00000000000000 # This is just a little script that can be downloaded from the internet to # install elan. It just does platform detection, downloads the installer # and runs it. $ELAN_UPDATE_ROOT="https://github.com/leanprover/elan/releases" #XXX: If you change anything here, please make the same changes in setup_mode.rs function usage() { Write-Host " elan-init 1.0.0 (408ed84 2017-02-11) The installer for elan USAGE: elan-init [FLAGS] [OPTIONS] FLAGS: -v, --verbose Enable verbose output -y Disable confirmation prompt. --no-modify-path Don't configure the PATH environment variable -h, --help Prints help information -V, --version Prints version information OPTIONS: --default-toolchain Choose a default toolchain to install --default-toolchain none Do not install any toolchains " } Function Get-RedirectedUrl { Param ( [Parameter(Mandatory=$true)] [String]$url ) $request = [System.Net.WebRequest]::Create($url) $request.AllowAutoRedirect=$true $request.UserAgent = 'Mozilla/5.0 (Windows NT; Windows NT 10.0; en-US) AppleWebKit/534.6 (KHTML, like Gecko) Chrome/7.0.500.0 Safari/534.6' try { $response = $request.GetResponse() $response.ResponseUri.AbsoluteUri $response.Close() } catch { "Error: $_" } } function main($cmdline) { $cputype=[System.Environment]::GetEnvironmentVariable("PROCESSOR_ARCHITECTURE"); if ($cputype -ne "AMD64") { Write-Host "### Elan install only supports 64 bit windows with AMD64 architecture" return 1 } $_arch="x86_64-pc-windows-msvc" $_ext = ".exe" $temp = [System.IO.Path]::GetTempPath() $_dir = Join-Path $temp "elan" if (-not (Test-Path -Path $_dir)) { New-Item -ItemType Directory -Path $_dir } $_file = "$_dir/elan-init$_ext" Write-Host "info: downloading installer to ${temp}" $x = Get-RedirectedUrl "https://github.com/leanprover/elan/releases/latest" $xs = -split $x -split '/' $_latest = $xs[-1] Invoke-WebRequest -Uri "$ELAN_UPDATE_ROOT/download/$_latest/elan-$_arch.zip" -OutFile "$_dir/elan-init.zip" Expand-Archive -Path "$_dir/elan-init.zip" -DestinationPath "$_dir" -Force if ($cmdline.Count -eq 0) { Start-Process -FilePath "$_dir/elan-init.exe" -Wait -NoNewWindow } else { Start-Process -FilePath "$_dir/elan-init.exe" -ArgumentList $cmdline -Wait -NoNewWindow } if( -not $? ) { Write-Host "Elan failed with error code $?" return 1 } Remove-Item -Recurse -Force "$_dir" return 0 } $rc = main -cmdline $args Exit $rc elan-1.3.1/elan-init.sh000077500000000000000000000231531414005346400146570ustar00rootroot00000000000000#!/bin/sh # Copyright 2016 The Rust Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution and at # http://rust-lang.org/COPYRIGHT. # # Licensed under the Apache License, Version 2.0 or the MIT license # , at your # option. This file may not be copied, modified, or distributed # except according to those terms. # This is just a little script that can be downloaded from the internet to # install elan. It just does platform detection, downloads the installer # and runs it. set -u ELAN_UPDATE_ROOT="https://github.com/leanprover/elan/releases" #XXX: If you change anything here, please make the same changes in setup_mode.rs usage() { cat 1>&2 < Choose a default toolchain to install --default-toolchain none Do not install any toolchains EOF } main() { need_cmd curl need_cmd awk need_cmd uname need_cmd mktemp need_cmd chmod need_cmd mkdir need_cmd rm need_cmd rmdir get_architecture || return 1 local _arch="$RETVAL" assert_nz "$_arch" "arch" local _ext="" case "$_arch" in *windows*) _ext=".exe" ;; esac local _dir="$(mktemp -d 2>/dev/null || ensure mktemp -d -t elan)" local _file="$_dir/elan-init$_ext" local _ansi_escapes_are_valid=false if [ -t 2 ]; then if [ "${TERM+set}" = 'set' ]; then case "$TERM" in xterm*|rxvt*|urxvt*|linux*|vt*) _ansi_escapes_are_valid=true ;; esac fi fi # check if we have to use /dev/tty to prompt the user local need_tty=yes for arg in "$@"; do case "$arg" in -h|--help) usage exit 0 ;; -y) # user wants to skip the prompt -- we don't need /dev/tty need_tty=no ;; *) ;; esac done if $_ansi_escapes_are_valid; then printf "\33[1minfo:\33[0m downloading installer\n" 1>&2 else printf '%s\n' 'info: downloading installer' 1>&2 fi ensure mkdir -p "$_dir" local _latest=$(ensure curl -sSf "$ELAN_UPDATE_ROOT/latest" | cut -d'"' -f2 | awk -F/ '{print $NF}') case "$_arch" in *windows*) ensure curl -sSfL "$ELAN_UPDATE_ROOT/download/$_latest/elan-$_arch.zip" -o "$_dir/elan-init.zip" (cd "$_dir"; ensure unzip elan-init.zip; ignore rm elan-init.zip) ;; *) ensure curl -sSfL "$ELAN_UPDATE_ROOT/download/$_latest/elan-$_arch.tar.gz" -o "$_dir/elan-init.tar.gz" (cd "$_dir"; ensure tar xf elan-init.tar.gz; ignore rm elan-init.tar.gz) ;; esac ensure chmod u+x "$_file" if [ ! -x "$_file" ]; then printf '%s\n' "Cannot execute $_file (likely because of mounting /tmp as noexec)." 1>&2 printf '%s\n' "Please copy the file to a location where you can execute binaries and run ./elan-init$_ext." 1>&2 exit 1 fi if [ "$need_tty" = "yes" ]; then # The installer is going to want to ask for confirmation by # reading stdin. This script was piped into `sh` though and # doesn't have stdin to pass to its children. Instead we're going # to explicitly connect /dev/tty to the installer's stdin. if [ ! -t 1 ]; then err "Unable to run interactively. Run with -y to accept defaults, --help for additional options" fi ignore "$_file" "$@" < /dev/tty else ignore "$_file" "$@" fi local _retval=$? ignore rm "$_file" ignore rmdir "$_dir" return "$_retval" } get_bitness() { need_cmd head # Architecture detection without dependencies beyond coreutils. # ELF files start out "\x7fELF", and the following byte is # 0x01 for 32-bit and # 0x02 for 64-bit. # The printf builtin on some shells like dash only supports octal # escape sequences, so we use those. local _current_exe_head=$(head -c 5 /proc/self/exe ) if [ "$_current_exe_head" = "$(printf '\177ELF\001')" ]; then echo 32 elif [ "$_current_exe_head" = "$(printf '\177ELF\002')" ]; then echo 64 else err "unknown platform bitness" fi } get_endianness() { local cputype=$1 local suffix_eb=$2 local suffix_el=$3 # detect endianness without od/hexdump, like get_bitness() does. need_cmd head need_cmd tail local _current_exe_endianness="$(head -c 6 /proc/self/exe | tail -c 1)" if [ "$_current_exe_endianness" = "$(printf '\001')" ]; then echo "${cputype}${suffix_el}" elif [ "$_current_exe_endianness" = "$(printf '\002')" ]; then echo "${cputype}${suffix_eb}" else err "unknown platform endianness" fi } get_architecture() { local _ostype="$(uname -s)" local _cputype="$(uname -m)" if [ "$_ostype" = Linux ]; then if [ "$(uname -o)" = Android ]; then local _ostype=Android fi fi if [ "$_ostype" = Darwin -a "$_cputype" = i386 ]; then # Darwin `uname -s` lies if sysctl hw.optional.x86_64 | grep -q ': 1'; then local _cputype=x86_64 fi fi case "$_ostype" in Android) local _ostype=linux-android ;; Linux) local _ostype=unknown-linux-gnu ;; FreeBSD) local _ostype=unknown-freebsd ;; NetBSD) local _ostype=unknown-netbsd ;; DragonFly) local _ostype=unknown-dragonfly ;; Darwin) local _ostype=apple-darwin ;; MINGW* | MSYS* | CYGWIN*) local _ostype=pc-windows-msvc ;; *) err "unrecognized OS type: $_ostype" ;; esac case "$_cputype" in i386 | i486 | i686 | i786 | x86) local _cputype=i686 ;; xscale | arm) local _cputype=arm if [ "$_ostype" = "linux-android" ]; then local _ostype=linux-androideabi fi ;; armv6l) local _cputype=arm if [ "$_ostype" = "linux-android" ]; then local _ostype=linux-androideabi else local _ostype="${_ostype}eabihf" fi ;; armv7l | armv8l) local _cputype=armv7 if [ "$_ostype" = "linux-android" ]; then local _ostype=linux-androideabi else local _ostype="${_ostype}eabihf" fi ;; aarch64) local _cputype=aarch64 ;; x86_64 | x86-64 | x64 | amd64) local _cputype=x86_64 ;; mips) local _cputype="$(get_endianness $_cputype "" 'el')" ;; mips64) local _bitness="$(get_bitness)" if [ $_bitness = "32" ]; then if [ $_ostype = "unknown-linux-gnu" ]; then # 64-bit kernel with 32-bit userland # endianness suffix is appended later local _cputype=mips fi else # only n64 ABI is supported for now local _ostype="${_ostype}abi64" fi local _cputype="$(get_endianness $_cputype "" 'el')" ;; ppc) local _cputype=powerpc ;; ppc64) local _cputype=powerpc64 ;; ppc64le) local _cputype=powerpc64le ;; *) err "unknown CPU type: $_cputype" esac # Detect 64-bit linux with 32-bit userland if [ $_ostype = unknown-linux-gnu -a $_cputype = x86_64 ]; then if [ "$(get_bitness)" = "32" ]; then local _cputype=i686 fi fi # Detect armv7 but without the CPU features Lean needs in that build, # and fall back to arm. # See https://github.com/rust-lang-nursery/rustup.rs/issues/587. if [ $_ostype = "unknown-linux-gnueabihf" -a $_cputype = armv7 ]; then if ensure grep '^Features' /proc/cpuinfo | grep -q -v neon; then # At least one processor does not have NEON. local _cputype=arm fi fi local _arch="$_cputype-$_ostype" RETVAL="$_arch" } say() { echo "elan: $1" } err() { say "$1" >&2 exit 1 } need_cmd() { if ! check_cmd "$1" then err "need '$1' (command not found)" fi } check_cmd() { command -v "$1" > /dev/null 2>&1 return $? } need_ok() { if [ $? != 0 ]; then err "$1"; fi } assert_nz() { if [ -z "$1" ]; then err "assert_nz $2"; fi } # Run a command that should never fail. If the command fails execution # will immediately terminate with an error showing the failing # command. ensure() { "$@" need_ok "command failed: $*" } # This is just for indicating that commands' results are being # intentionally ignored. Usually, because it's being executed # as part of error handling. ignore() { "$@" } main "$@" || exit 1 elan-1.3.1/release.toml000066400000000000000000000002031414005346400147440ustar00rootroot00000000000000dev-version-ext = "pre" pre-release-replacements = [ {file="CHANGELOG.md", search="Unreleased", replace="{{version}} - {{date}}"} ]elan-1.3.1/src/000077500000000000000000000000001414005346400132235ustar00rootroot00000000000000elan-1.3.1/src/download/000077500000000000000000000000001414005346400150325ustar00rootroot00000000000000elan-1.3.1/src/download/Cargo.toml000066400000000000000000000011741414005346400167650ustar00rootroot00000000000000[package] name = "download" version = "0.4.0" authors = [ "Brian Anderson " ] license = "MIT/Apache-2.0" [features] default = ["curl-backend"] curl-backend = ["curl"] reqwest-backend = ["reqwest", "env_proxy", "lazy_static"] [dependencies] error-chain = "0.12.4" url = "2.2.1" curl = { version = "0.4.34", optional = true } openssl = { version = "0.10", features = ["vendored"], optional = true } env_proxy = { version = "0.4.1", optional = true } lazy_static = { version = "1.4.0", optional = true } reqwest = { version = "0.11.1", optional = true } [dev-dependencies] futures = "0.3.13" hyper = "0.14.4" elan-1.3.1/src/download/src/000077500000000000000000000000001414005346400156215ustar00rootroot00000000000000elan-1.3.1/src/download/src/errors.rs000066400000000000000000000011601414005346400175010ustar00rootroot00000000000000error_chain! { links { } foreign_links { Io(::std::io::Error); Reqwest(::reqwest::Error) #[cfg(feature = "reqwest-backend")]; } errors { HttpStatus(e: u32) { description("http request returned an unsuccessful status code") display("http request returned an unsuccessful status code: {}", e) } FileNotFound { description("file not found") } BackendUnavailable(be: &'static str) { description("download backend unavailable") display("download backend '{}' unavailable", be) } } } elan-1.3.1/src/download/src/lib.rs000066400000000000000000000314261414005346400167430ustar00rootroot00000000000000//! Easy file downloading #[macro_use] extern crate error_chain; extern crate url; #[cfg(feature = "reqwest-backend")] extern crate reqwest; #[cfg(feature = "reqwest-backend")] #[macro_use] extern crate lazy_static; use std::path::Path; use url::Url; mod errors; pub use errors::*; #[derive(Debug, Copy, Clone)] pub enum Backend { Curl, Reqwest, } #[derive(Debug, Copy, Clone)] pub enum Event<'a> { ResumingPartialDownload, /// Received the Content-Length of the to-be downloaded data. DownloadContentLengthReceived(u64), /// Received some data. DownloadDataReceived(&'a [u8]), } fn download_with_backend( backend: Backend, url: &Url, resume_from: u64, callback: &dyn Fn(Event) -> Result<()>, ) -> Result<()> { match backend { Backend::Curl => curl::download(url, resume_from, callback), Backend::Reqwest => reqwest_be::download(url, resume_from, callback), } } pub fn download_to_path_with_backend( backend: Backend, url: &Url, path: &Path, resume_from_partial: bool, callback: Option<&dyn Fn(Event) -> Result<()>>, ) -> Result<()> { use std::cell::RefCell; use std::fs::OpenOptions; use std::io::{Read, Seek, SeekFrom, Write}; || -> Result<()> { let (file, resume_from) = if resume_from_partial { let possible_partial = OpenOptions::new().read(true).open(&path); let downloaded_so_far = if let Ok(mut partial) = possible_partial { if let Some(cb) = callback { cb(Event::ResumingPartialDownload)?; let mut buf = vec![0; 32768]; let mut downloaded_so_far = 0; loop { let n = partial.read(&mut buf)?; downloaded_so_far += n as u64; if n == 0 { break; } cb(Event::DownloadDataReceived(&buf[..n]))?; } downloaded_so_far } else { let file_info = partial.metadata()?; file_info.len() } } else { 0 }; let mut possible_partial = OpenOptions::new() .write(true) .create(true) .open(&path) .chain_err(|| "error opening file for download")?; possible_partial.seek(SeekFrom::End(0))?; (possible_partial, downloaded_so_far) } else { ( OpenOptions::new() .write(true) .create(true) .open(&path) .chain_err(|| "error creating file for download")?, 0, ) }; let file = RefCell::new(file); download_with_backend(backend, url, resume_from, &|event| { if let Event::DownloadDataReceived(data) = event { file.borrow_mut() .write_all(data) .chain_err(|| "unable to write download to disk")?; } match callback { Some(cb) => cb(event), None => Ok(()), } })?; file.borrow_mut() .sync_data() .chain_err(|| "unable to sync download to disk")?; Ok(()) }() .map_err(|e| { // TODO is there any point clearing up here? What kind of errors will leave us with an unusable partial? e }) } /// Download via libcurl; encrypt with the native (or OpenSSl) TLS /// stack via libcurl #[cfg(feature = "curl-backend")] pub mod curl { extern crate curl; use self::curl::easy::Easy; use super::Event; use errors::*; use std::cell::RefCell; use std::str; use std::time::Duration; use url::Url; thread_local!(pub static EASY: RefCell = RefCell::new(Easy::new())); pub fn download( url: &Url, resume_from: u64, callback: &dyn Fn(Event) -> Result<()>, ) -> Result<()> { // Fetch either a cached libcurl handle (which will preserve open // connections) or create a new one if it isn't listed. // // Once we've acquired it, reset the lifetime from 'static to our local // scope. EASY.with(|handle| { let mut handle = handle.borrow_mut(); handle .url(&url.to_string()) .chain_err(|| "failed to set url")?; handle .follow_location(true) .chain_err(|| "failed to set follow redirects")?; if resume_from > 0 { handle .resume_from(resume_from) .chain_err(|| "setting the range header for download resumption")?; } else { // an error here indicates that the range header isn't supported by underlying curl, // so there's nothing to "clear" - safe to ignore this error. let _ = handle.resume_from(0); } // Take at most 30s to connect handle .connect_timeout(Duration::new(30, 0)) .chain_err(|| "failed to set connect timeout")?; { let cberr = RefCell::new(None); let mut transfer = handle.transfer(); // Data callback for libcurl which is called with data that's // downloaded. We just feed it into our hasher and also write it out // to disk. transfer .write_function(|data| match callback(Event::DownloadDataReceived(data)) { Ok(()) => Ok(data.len()), Err(e) => { *cberr.borrow_mut() = Some(e); Ok(0) } }) .chain_err(|| "failed to set write")?; // Listen for headers and parse out a `Content-Length` if it comes // so we know how much we're downloading. transfer .header_function(|header| { if let Ok(data) = str::from_utf8(header) { let prefix = "Content-Length: "; if data.starts_with(prefix) { if let Ok(s) = data[prefix.len()..].trim().parse::() { let msg = Event::DownloadContentLengthReceived(s + resume_from); match callback(msg) { Ok(()) => (), Err(e) => { *cberr.borrow_mut() = Some(e); return false; } } } } } true }) .chain_err(|| "failed to set header")?; // If an error happens check to see if we had a filesystem error up // in `cberr`, but we always want to punt it up. transfer.perform().or_else(|e| { // If the original error was generated by one of our // callbacks, return it. match cberr.borrow_mut().take() { Some(cberr) => Err(cberr), None => { // Otherwise, return the error from curl if e.is_file_couldnt_read_file() { Err(e).chain_err(|| ErrorKind::FileNotFound) } else { Err(e).chain_err(|| "error during download") } } } })?; } // If we didn't get a 20x or 0 ("OK" for files) then return an error let code = handle .response_code() .chain_err(|| "failed to get response code")?; match code { 0 | 200..=299 => {} _ => { return Err(ErrorKind::HttpStatus(code).into()); } }; Ok(()) }) } } #[cfg(feature = "reqwest-backend")] pub mod reqwest_be { extern crate env_proxy; use super::Event; use errors::*; use reqwest::{header, Client, Proxy, Response}; use std::io; use std::time::Duration; use url::Url; pub fn download(url: &Url, resume_from: u64, callback: &Fn(Event) -> Result<()>) -> Result<()> { // Short-circuit reqwest for the "file:" URL scheme if download_from_file_url(url, resume_from, callback)? { return Ok(()); } let mut res = request(url, resume_from).chain_err(|| "failed to make network request")?; if !res.status().is_success() { let code: u16 = res.status().into(); return Err(ErrorKind::HttpStatus(code as u32).into()); } let buffer_size = 0x10000; let mut buffer = vec![0u8; buffer_size]; if let Some(len) = res.headers().get::() { callback(Event::DownloadContentLengthReceived(len.0 + resume_from))?; } loop { let bytes_read = io::Read::read(&mut res, &mut buffer).chain_err(|| "error reading from socket")?; if bytes_read != 0 { callback(Event::DownloadDataReceived(&buffer[0..bytes_read]))?; } else { return Ok(()); } } } lazy_static! { static ref CLIENT: Client = { let catcher = || { Client::builder() .gzip(false) .proxy(Proxy::custom(env_proxy)) .timeout(Duration::from_secs(30)) .build() }; // woah, an unwrap?! // It's OK. This is the same as what is happening in curl. // // The curl::Easy::new() internally assert!s that the initialized // Easy is not null. Inside reqwest, the errors here would be from // the TLS library returning a null pointer as well. catcher().unwrap() }; } fn env_proxy(url: &Url) -> Option { env_proxy::for_url(url).to_url() } fn request(url: &Url, resume_from: u64) -> ::reqwest::Result { let mut req = CLIENT.get(url.clone()); if resume_from != 0 { req.header(header::Range::Bytes(vec![header::ByteRangeSpec::AllFrom( resume_from, )])); } req.send() } fn download_from_file_url( url: &Url, resume_from: u64, callback: &Fn(Event) -> Result<()>, ) -> Result { use std::fs; use std::io; // The file scheme is mostly for use by tests to mock the dist server if url.scheme() == "file" { let src = url .to_file_path() .map_err(|_| Error::from(format!("bogus file url: '{}'", url)))?; if !src.is_file() { // Because some of elan's logic depends on checking // the error when a downloaded file doesn't exist, make // the file case return the same error value as the // network case. return Err(ErrorKind::FileNotFound.into()); } let ref mut f = fs::File::open(src).chain_err(|| "unable to open downloaded file")?; io::Seek::seek(f, io::SeekFrom::Start(resume_from))?; let ref mut buffer = vec![0u8; 0x10000]; loop { let bytes_read = io::Read::read(f, buffer).chain_err(|| "unable to read downloaded file")?; if bytes_read == 0 { break; } callback(Event::DownloadDataReceived(&buffer[0..bytes_read]))?; } Ok(true) } else { Ok(false) } } } #[cfg(not(feature = "curl-backend"))] pub mod curl { use super::Event; use errors::*; use url::Url; pub fn download( _url: &Url, _resume_from: u64, _callback: &dyn Fn(Event) -> Result<()>, ) -> Result<()> { Err(ErrorKind::BackendUnavailable("curl").into()) } } #[cfg(not(feature = "reqwest-backend"))] pub mod reqwest_be { use super::Event; use errors::*; use url::Url; pub fn download( _url: &Url, _resume_from: u64, _callback: &dyn Fn(Event) -> Result<()>, ) -> Result<()> { Err(ErrorKind::BackendUnavailable("reqwest").into()) } } elan-1.3.1/src/elan-cli/000077500000000000000000000000001414005346400147075ustar00rootroot00000000000000elan-1.3.1/src/elan-cli/common.rs000066400000000000000000000242701414005346400165520ustar00rootroot00000000000000//! Just a dumping ground for cli stuff use elan::telemetry_analysis::TelemetryAnalysis; use elan::{self, Cfg, Notification, Toolchain, UpdateStatus}; use elan_utils::notify::NotificationLevel; use elan_utils::utils; use errors::*; use self_update; use std; use std::io::{BufRead, BufReader, Write}; use std::path::Path; use std::process::{Command, Stdio}; use std::sync::Arc; use std::time::Duration; use std::{cmp, iter}; use term2; use wait_timeout::ChildExt; pub fn confirm(question: &str, default: bool) -> Result { print!("{} ", question); let _ = std::io::stdout().flush(); let input = read_line()?; let r = match &*input { "y" | "Y" => true, "n" | "N" => false, "" => default, _ => false, }; println!(""); Ok(r) } pub enum Confirm { Yes, No, Advanced, } pub fn confirm_advanced() -> Result { println!(""); println!("1) Proceed with installation (default)"); println!("2) Customize installation"); println!("3) Cancel installation"); let _ = std::io::stdout().flush(); let input = read_line()?; let r = match &*input { "1" | "" => Confirm::Yes, "2" => Confirm::Advanced, _ => Confirm::No, }; println!(""); Ok(r) } pub fn question_str(question: &str, default: &str) -> Result { println!("{}", question); let _ = std::io::stdout().flush(); let input = read_line()?; println!(""); if input.is_empty() { Ok(default.to_string()) } else { Ok(input) } } pub fn question_bool(question: &str, default: bool) -> Result { println!("{}", question); let _ = std::io::stdout().flush(); let input = read_line()?; println!(""); if input.is_empty() { Ok(default) } else { match &*input { "y" | "Y" | "yes" => Ok(true), "n" | "N" | "no" => Ok(false), _ => Ok(default), } } } pub fn read_line() -> Result { let stdin = std::io::stdin(); let stdin = stdin.lock(); let mut lines = stdin.lines(); lines .next() .and_then(|l| l.ok()) .ok_or("unable to read from stdin for confirmation".into()) } pub fn set_globals(verbose: bool) -> Result { use download_tracker::DownloadTracker; use std::cell::RefCell; let download_tracker = RefCell::new(DownloadTracker::new()); Ok(Cfg::from_env(Arc::new(move |n: Notification| { if download_tracker.borrow_mut().handle_notification(&n) { return; } match n.level() { NotificationLevel::Verbose => { if verbose { verbose!("{}", n); } } NotificationLevel::Info => { info!("{}", n); } NotificationLevel::Warn => { warn!("{}", n); } NotificationLevel::Error => { err!("{}", n); } } }))?) } pub fn show_channel_update( cfg: &Cfg, name: &str, updated: elan::Result, ) -> Result<()> { show_channel_updates(cfg, vec![(name.to_string(), updated)]) } fn show_channel_updates( cfg: &Cfg, toolchains: Vec<(String, elan::Result)>, ) -> Result<()> { let data = toolchains.into_iter().map(|(name, result)| { let ref toolchain = cfg.get_toolchain(&name, false).expect(""); let version = lean_version(toolchain); let banner; let color; match result { Ok(UpdateStatus::Installed) => { banner = "installed"; color = Some(term2::color::BRIGHT_GREEN); } Ok(UpdateStatus::Updated) => { banner = "updated"; color = Some(term2::color::BRIGHT_GREEN); } Ok(UpdateStatus::Unchanged) => { banner = "unchanged"; color = None; } Err(_) => { banner = "update failed"; color = Some(term2::color::BRIGHT_RED); } } let width = name.len() + 1 + banner.len(); (name, banner, width, color, version) }); let mut t = term2::stdout(); let data: Vec<_> = data.collect(); let max_width = data .iter() .fold(0, |a, &(_, _, width, _, _)| cmp::max(a, width)); for (name, banner, width, color, version) in data { let padding = max_width - width; let padding: String = iter::repeat(' ').take(padding).collect(); let _ = write!(t, " {}", padding); let _ = t.attr(term2::Attr::Bold); if let Some(color) = color { let _ = t.fg(color); } let _ = write!(t, "{} ", name); let _ = write!(t, "{}", banner); let _ = t.reset(); let _ = writeln!(t, " - {}", version); } let _ = writeln!(t, ""); Ok(()) } pub fn update_all_channels(cfg: &Cfg, self_update: bool, force_update: bool) -> Result<()> { let toolchains = cfg.update_all_channels(force_update)?; if toolchains.is_empty() { info!("no updatable toolchains installed"); } let setup_path = if self_update { self_update::prepare_update()? } else { None }; if !toolchains.is_empty() { println!(""); show_channel_updates(cfg, toolchains)?; } if let Some(ref setup_path) = setup_path { self_update::run_update(setup_path)?; unreachable!(); // update exits on success } else if self_update { // Try again in case we emitted "tool `{}` is already installed" last time. self_update::install_proxies()?; } Ok(()) } pub fn lean_version(toolchain: &Toolchain) -> String { if toolchain.exists() { let lean_path = toolchain.binary_file("lean"); if utils::is_file(&lean_path) { let mut cmd = Command::new(&lean_path); cmd.arg("--version"); cmd.stdin(Stdio::null()); cmd.stdout(Stdio::piped()); cmd.stderr(Stdio::piped()); toolchain.set_ldpath(&mut cmd); // some toolchains are faulty with some combinations of platforms and // may fail to launch but also to timely terminate. // (known cases include Lean 1.3.0 through 1.10.0 in recent macOS Sierra.) // we guard against such cases by enforcing a reasonable timeout to read. let mut line1 = None; if let Ok(mut child) = cmd.spawn() { let timeout = Duration::new(10, 0); match child.wait_timeout(timeout) { Ok(Some(status)) if status.success() => { let out = child .stdout .expect("Child::stdout requested but not present"); let mut line = String::new(); if BufReader::new(out).read_line(&mut line).is_ok() { let lineend = line.trim_end_matches(&['\r', '\n'][..]).len(); line.truncate(lineend); line1 = Some(line); } } Ok(None) => { let _ = child.kill(); return String::from("(timeout reading lean version)"); } Ok(Some(_)) | Err(_) => {} } } if let Some(line1) = line1 { line1.to_owned() } else { String::from("(error reading lean version)") } } else { String::from("(lean does not exist)") } } else { String::from("(toolchain not installed)") } } pub fn list_toolchains(cfg: &Cfg) -> Result<()> { let toolchains = cfg.list_toolchains()?; if toolchains.is_empty() { println!("no installed toolchains"); } else { if let Ok(Some(def_toolchain)) = cfg.find_default() { for toolchain in toolchains { let if_default = if def_toolchain.name() == &*toolchain { " (default)" } else { "" }; println!("{}{}", &toolchain, if_default); } } else { for toolchain in toolchains { println!("{}", &toolchain); } } } Ok(()) } pub fn list_overrides(cfg: &Cfg) -> Result<()> { let overrides = cfg.settings_file.with(|s| Ok(s.overrides.clone()))?; if overrides.is_empty() { println!("no overrides"); } else { let mut any_not_exist = false; for (k, v) in overrides { let dir_exists = Path::new(&k).is_dir(); if !dir_exists { any_not_exist = true; } println!( "{:<40}\t{:<20}", utils::format_path_for_display(&k) + if dir_exists { "" } else { " (not a directory)" }, v ) } if any_not_exist { println!(""); info!( "you may remove overrides for non-existent directories with `elan override unset --nonexistent`" ); } } Ok(()) } pub fn version() -> &'static str { concat!( env!("CARGO_PKG_VERSION"), include_str!(concat!(env!("OUT_DIR"), "/commit-info.txt")) ) } pub fn report_error(e: &Error) { err!("{}", e); for e in e.iter().skip(1) { info!("caused by: {}", e); } if show_backtrace() { if let Some(backtrace) = e.backtrace() { info!("backtrace:"); println!(""); println!("{:?}", backtrace); } } else { } fn show_backtrace() -> bool { use std::env; use std::ops::Deref; if env::var("RUST_BACKTRACE").as_ref().map(Deref::deref) == Ok("1") { return true; } for arg in env::args() { if arg == "-v" || arg == "--verbose" { return true; } } false } } pub fn show_telemetry(analysis: TelemetryAnalysis) -> Result<()> { println!("Telemetry Analysis"); println!("{}", analysis); Ok(()) } elan-1.3.1/src/elan-cli/download_tracker.rs000066400000000000000000000161651414005346400206100ustar00rootroot00000000000000use elan::Notification; use elan_dist::Notification as In; use elan_utils::tty; use elan_utils::Notification as Un; use std::collections::VecDeque; use std::fmt; use term; use time::OffsetDateTime; /// Keep track of this many past download amounts const DOWNLOAD_TRACK_COUNT: usize = 5; /// Tracks download progress and displays information about it to a terminal. pub struct DownloadTracker { /// Content-Length of the to-be downloaded object. content_len: Option, /// Total data downloaded in bytes. total_downloaded: usize, /// Data downloaded this second. downloaded_this_sec: usize, /// Keeps track of amount of data downloaded every last few secs. /// Used for averaging the download speed. downloaded_last_few_secs: VecDeque, /// Time stamp of the last second last_sec: Option, /// How many seconds have elapsed since the download started seconds_elapsed: u32, /// The terminal we write the information to. term: Option>, /// Whether we displayed progress for the download or not. /// /// If the download is quick enough, we don't have time to /// display the progress info. /// In that case, we do not want to do some cleanup stuff we normally do. displayed_progress: bool, } impl DownloadTracker { /// Creates a new DownloadTracker. pub fn new() -> Self { DownloadTracker { content_len: None, total_downloaded: 0, downloaded_this_sec: 0, downloaded_last_few_secs: VecDeque::with_capacity(DOWNLOAD_TRACK_COUNT), seconds_elapsed: 0, last_sec: None, term: term::stdout(), displayed_progress: false, } } pub fn handle_notification(&mut self, n: &Notification) -> bool { match *n { Notification::Install(In::Utils(Un::DownloadContentLengthReceived(content_len))) => { self.content_length_received(content_len); true } Notification::Install(In::Utils(Un::DownloadDataReceived(data))) => { if tty::stdout_isatty() && self.term.is_some() { self.data_received(data.len()); } true } Notification::Install(In::Utils(Un::DownloadFinished)) => { self.download_finished(); true } _ => false, } } /// Notifies self that Content-Length information has been received. pub fn content_length_received(&mut self, content_len: u64) { self.content_len = Some(content_len); } /// Notifies self that data of size `len` has been received. pub fn data_received(&mut self, len: usize) { self.total_downloaded += len; self.downloaded_this_sec += len; let current_time: f64 = (OffsetDateTime::now_utc() - OffsetDateTime::UNIX_EPOCH).as_seconds_f64(); match self.last_sec { None => self.last_sec = Some(current_time), Some(start) => { let elapsed = current_time - start; if elapsed >= 1.0 { self.seconds_elapsed += 1; self.display(); self.last_sec = Some(current_time); if self.downloaded_last_few_secs.len() == DOWNLOAD_TRACK_COUNT { self.downloaded_last_few_secs.pop_back(); } self.downloaded_last_few_secs .push_front(self.downloaded_this_sec); self.downloaded_this_sec = 0; } } } } /// Notifies self that the download has finished. pub fn download_finished(&mut self) { if self.displayed_progress { // Display the finished state self.display(); let _ = writeln!(self.term.as_mut().unwrap(), ""); } self.prepare_for_new_download(); } /// Resets the state to be ready for a new download. fn prepare_for_new_download(&mut self) { self.content_len = None; self.total_downloaded = 0; self.downloaded_this_sec = 0; self.downloaded_last_few_secs.clear(); self.seconds_elapsed = 0; self.last_sec = None; self.displayed_progress = false; } /// Display the tracked download information to the terminal. fn display(&mut self) { let total_h = HumanReadable(self.total_downloaded as f64); let sum = self .downloaded_last_few_secs .iter() .fold(0., |a, &v| a + v as f64); let len = self.downloaded_last_few_secs.len(); let speed = if len > 0 { sum / len as f64 } else { 0. }; let speed_h = HumanReadable(speed); match self.content_len { Some(content_len) => { let content_len = content_len as f64; let percent = (self.total_downloaded as f64 / content_len) * 100.; let content_len_h = HumanReadable(content_len); let remaining = content_len - self.total_downloaded as f64; let eta_h = HumanReadable(remaining / speed); let _ = write!( self.term.as_mut().unwrap(), "{} / {} ({:3.0} %) {}/s ETA: {:#}", total_h, content_len_h, percent, speed_h, eta_h ); } None => { let _ = write!( self.term.as_mut().unwrap(), "Total: {} Speed: {}/s", total_h, speed_h ); } } // delete_line() doesn't seem to clear the line properly. // Instead, let's just print some whitespace to clear it. let _ = write!(self.term.as_mut().unwrap(), " "); let _ = self.term.as_mut().unwrap().flush(); let _ = self.term.as_mut().unwrap().carriage_return(); self.displayed_progress = true; } } /// Human readable representation of data size in bytes struct HumanReadable(f64); impl fmt::Display for HumanReadable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if f.alternate() { // repurposing the alternate mode for ETA let sec = self.0; if sec.is_infinite() { write!(f, "Unknown") } else if sec > 1e3 { let sec = self.0 as u64; let min = sec / 60; let sec = sec % 60; write!(f, "{:3} min {:2} s", min, sec) // XYZ min PQ s } else { write!(f, "{:3.0} s", self.0) // XYZ s } } else { const KIB: f64 = 1024.0; const MIB: f64 = KIB * KIB; let size = self.0; if size >= MIB { write!(f, "{:5.1} MiB", size / MIB) // XYZ.P MiB } else if size >= KIB { write!(f, "{:5.1} KiB", size / KIB) } else { write!(f, "{:3.0} B", size) } } } } elan-1.3.1/src/elan-cli/elan_mode.rs000066400000000000000000000461051414005346400172060ustar00rootroot00000000000000use clap::{App, AppSettings, Arg, ArgMatches, Shell, SubCommand}; use common; use elan::settings::TelemetryMode; use elan::{command, Cfg, Toolchain}; use elan_utils::utils; use errors::*; use help::*; use self_update; use std::error::Error; use std::io::{self, Write}; use std::iter; use std::path::Path; use std::process::Command; use term2; pub fn main() -> Result<()> { ::self_update::cleanup_self_updater()?; let ref matches = cli().get_matches(); let verbose = matches.is_present("verbose"); let ref cfg = common::set_globals(verbose)?; match matches.subcommand() { ("show", Some(_)) => show(cfg)?, ("install", Some(m)) => update(cfg, m)?, ("update", Some(m)) => update(cfg, m)?, ("uninstall", Some(m)) => toolchain_remove(cfg, m)?, ("default", Some(m)) => default_(cfg, m)?, ("toolchain", Some(c)) => match c.subcommand() { ("install", Some(m)) => update(cfg, m)?, ("list", Some(_)) => common::list_toolchains(cfg)?, ("link", Some(m)) => toolchain_link(cfg, m)?, ("uninstall", Some(m)) => toolchain_remove(cfg, m)?, (_, _) => unreachable!(), }, ("override", Some(c)) => match c.subcommand() { ("list", Some(_)) => common::list_overrides(cfg)?, ("set", Some(m)) => override_add(cfg, m)?, ("unset", Some(m)) => override_remove(cfg, m)?, (_, _) => unreachable!(), }, ("run", Some(m)) => run(cfg, m)?, ("which", Some(m)) => which(cfg, m)?, ("doc", Some(m)) => doc(cfg, m)?, ("man", Some(m)) => man(cfg, m)?, ("self", Some(c)) => match c.subcommand() { ("update", Some(_)) => self_update::update()?, ("uninstall", Some(m)) => self_uninstall(m)?, (_, _) => unreachable!(), }, ("telemetry", Some(c)) => match c.subcommand() { ("enable", Some(_)) => set_telemetry(&cfg, TelemetryMode::On)?, ("disable", Some(_)) => set_telemetry(&cfg, TelemetryMode::Off)?, ("analyze", Some(_)) => analyze_telemetry(&cfg)?, (_, _) => unreachable!(), }, ("completions", Some(c)) => { if let Some(shell) = c.value_of("shell") { cli().gen_completions_to( "elan", shell.parse::().unwrap(), &mut io::stdout(), ); } } (_, _) => unreachable!(), } Ok(()) } pub fn cli() -> App<'static, 'static> { let app = App::new("elan") .version(common::version()) .about("The Lean toolchain installer") .after_help(ELAN_HELP) .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::DeriveDisplayOrder) .setting(AppSettings::SubcommandRequiredElseHelp) .arg(Arg::with_name("verbose") .help("Enable verbose output") .short("v") .long("verbose")) .subcommand(SubCommand::with_name("show") .about("Show the active and installed toolchains") .after_help(SHOW_HELP)) .subcommand(SubCommand::with_name("install") .about("Update Lean toolchains") .after_help(INSTALL_HELP) .setting(AppSettings::Hidden) // synonym for 'toolchain install' .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true) .multiple(true))) .subcommand(SubCommand::with_name("uninstall") .about("Uninstall Lean toolchains") .setting(AppSettings::Hidden) // synonym for 'toolchain uninstall' .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true) .multiple(true))) .subcommand(SubCommand::with_name("update") .about("Update Lean toolchains and elan") .after_help(UPDATE_HELP) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(false) .multiple(true)) .arg(Arg::with_name("no-self-update") .help("Don't perform self update when running the `elan` command") .long("no-self-update") .takes_value(false) .hidden(true)) .arg(Arg::with_name("force") .help("Force an update, even if some components are missing") .long("force") .takes_value(false))) .subcommand(SubCommand::with_name("default") .about("Set the default toolchain") .after_help(DEFAULT_HELP) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true))) .subcommand(SubCommand::with_name("toolchain") .about("Modify or query the installed toolchains") .after_help(TOOLCHAIN_HELP) .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::DeriveDisplayOrder) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand(SubCommand::with_name("list") .about("List installed toolchains")) .subcommand(SubCommand::with_name("install") .about("Install or update a given toolchain") .aliases(&["update", "add"]) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true) .multiple(true))) .subcommand(SubCommand::with_name("uninstall") .about("Uninstall a toolchain") .alias("remove") .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true) .multiple(true))) .subcommand(SubCommand::with_name("link") .about("Create a custom toolchain by symlinking to a directory") .after_help(TOOLCHAIN_LINK_HELP) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true)) .arg(Arg::with_name("path") .required(true)))) .subcommand(SubCommand::with_name("override") .about("Modify directory toolchain overrides") .after_help(OVERRIDE_HELP) .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::DeriveDisplayOrder) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand(SubCommand::with_name("list") .about("List directory toolchain overrides")) .subcommand(SubCommand::with_name("set") .about("Set the override toolchain for a directory") .alias("add") .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true))) .subcommand(SubCommand::with_name("unset") .about("Remove the override toolchain for a directory") .after_help(OVERRIDE_UNSET_HELP) .alias("remove") .arg(Arg::with_name("path") .long("path") .takes_value(true) .help("Path to the directory")) .arg(Arg::with_name("nonexistent") .long("nonexistent") .takes_value(false) .help("Remove override toolchain for all nonexistent directories")))) .subcommand(SubCommand::with_name("run") .about("Run a command with an environment configured for a given toolchain") .after_help(RUN_HELP) .setting(AppSettings::TrailingVarArg) .arg(Arg::with_name("install") .help("Install the requested toolchain if needed") .long("install")) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .required(true)) .arg(Arg::with_name("command") .required(true).multiple(true).use_delimiter(false))) .subcommand(SubCommand::with_name("which") .about("Display which binary will be run for a given command") .arg(Arg::with_name("command") .required(true))) /*.subcommand(SubCommand::with_name("doc") .alias("docs") .about("Open the documentation for the current toolchain") .after_help(DOC_HELP) .arg(Arg::with_name("book") .long("book") .help("The Rust Programming Language book")) .arg(Arg::with_name("std") .long("std") .help("Standard library API documentation")) .group(ArgGroup::with_name("page") .args(&["book", "std"])))*/; /*if cfg!(not(target_os = "windows")) { app = app .subcommand(SubCommand::with_name("man") .about("View the man page for a given command") .arg(Arg::with_name("command") .required(true)) .arg(Arg::with_name("toolchain") .help(TOOLCHAIN_ARG_HELP) .long("toolchain") .takes_value(true))); }*/ app.subcommand( SubCommand::with_name("self") .about("Modify the elan installation") .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::DeriveDisplayOrder) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand( SubCommand::with_name("update").about("Download and install updates to elan"), ) .subcommand( SubCommand::with_name("uninstall") .about("Uninstall elan.") .arg(Arg::with_name("no-prompt").short("y")), ), ) /*.subcommand(SubCommand::with_name("telemetry") .about("elan telemetry commands") .setting(AppSettings::Hidden) .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::DeriveDisplayOrder) .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand(SubCommand::with_name("enable") .about("Enable elan telemetry")) .subcommand(SubCommand::with_name("disable") .about("Disable elan telemetry")) .subcommand(SubCommand::with_name("analyze") .about("Analyze stored telemetry")))*/ .subcommand( SubCommand::with_name("completions") .about("Generate completion scripts for your shell") .after_help(COMPLETIONS_HELP) .setting(AppSettings::ArgRequiredElseHelp) .arg(Arg::with_name("shell").possible_values(&Shell::variants())), ) } fn default_(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let ref toolchain = m.value_of("toolchain").expect(""); let ref toolchain = cfg.get_toolchain(toolchain, false)?; let status = if !toolchain.exists() || !toolchain.is_custom() { Some(toolchain.install_from_dist_if_not_installed()?) } else { None }; toolchain.make_default()?; if let Some(status) = status { println!(""); common::show_channel_update(cfg, toolchain.name(), Ok(status))?; } Ok(()) } fn update(cfg: &Cfg, m: &ArgMatches) -> Result<()> { if let Some(names) = m.values_of("toolchain") { for name in names { let toolchain = cfg.get_toolchain(name, false)?; let status = if !toolchain.exists() || !toolchain.is_custom() { Some(toolchain.install_from_dist(m.is_present("force"))?) } else { None }; if let Some(status) = status { println!(""); common::show_channel_update(cfg, toolchain.name(), Ok(status))?; } } } else { common::update_all_channels( cfg, !m.is_present("no-self-update") && !self_update::NEVER_SELF_UPDATE, m.is_present("force"), )?; } Ok(()) } fn run(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let ref toolchain = m.value_of("toolchain").expect(""); let args = m.values_of("command").unwrap(); let args: Vec<_> = args.collect(); let cmd = cfg.create_command_for_toolchain(toolchain, m.is_present("install"), args[0])?; Ok(command::run_command_for_dir( cmd, args[0], &args[1..], &cfg, )?) } fn which(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let binary = m.value_of("command").expect(""); let binary_path = cfg .which_binary(&utils::current_dir()?, binary)? .expect("binary not found"); utils::assert_is_file(&binary_path)?; println!("{}", binary_path.display()); Ok(()) } fn show(cfg: &Cfg) -> Result<()> { let ref cwd = utils::current_dir()?; let installed_toolchains = cfg.list_toolchains()?; let active_toolchain = cfg.find_override_toolchain_or_default(cwd); let show_installed_toolchains = installed_toolchains.len() > 1; let show_active_toolchain = true; // Only need to display headers if we have multiple sections let show_headers = [show_installed_toolchains, show_active_toolchain] .iter() .filter(|x| **x) .count() > 1; if show_installed_toolchains { if show_headers { print_header("installed toolchains") } let default_name = cfg.get_default()?; for t in installed_toolchains { if default_name.as_ref() == Some(&t) { println!("{} (default)", t); } else { println!("{}", t); } } if show_headers { println!("") }; } if show_active_toolchain { if show_headers { print_header("active toolchain") } match active_toolchain { Ok(atc) => match atc { Some((ref toolchain, Some(ref reason))) => { println!("{} ({})", toolchain.name(), reason); println!("{}", common::lean_version(toolchain)); } Some((ref toolchain, None)) => { println!("{} (default)", toolchain.name()); println!("{}", common::lean_version(toolchain)); } None => { println!("no active toolchain"); } }, Err(err) => { if let Some(cause) = err.source() { println!("(error: {}, {})", err, cause); } else { println!("(error: {})", err); } } } if show_headers { println!("") }; } fn print_header(s: &str) { let mut t = term2::stdout(); let _ = t.attr(term2::Attr::Bold); let _ = writeln!(t, "{}", s); let _ = writeln!(t, "{}", iter::repeat("-").take(s.len()).collect::()); let _ = writeln!(t, ""); let _ = t.reset(); } Ok(()) } fn explicit_or_dir_toolchain<'a>(cfg: &'a Cfg, m: &ArgMatches) -> Result> { let toolchain = m.value_of("toolchain"); if let Some(toolchain) = toolchain { let toolchain = cfg.get_toolchain(toolchain, false)?; return Ok(toolchain); } let ref cwd = utils::current_dir()?; let (toolchain, _) = cfg.toolchain_for_dir(cwd)?; Ok(toolchain) } fn toolchain_link(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let ref toolchain = m.value_of("toolchain").expect(""); let ref path = m.value_of("path").expect(""); let toolchain = cfg.get_toolchain(toolchain, true)?; Ok(toolchain.install_from_dir(Path::new(path), true)?) } fn toolchain_remove(cfg: &Cfg, m: &ArgMatches) -> Result<()> { for toolchain in m.values_of("toolchain").expect("") { let toolchain = cfg.get_toolchain(toolchain, false)?; toolchain.remove()?; } Ok(()) } fn override_add(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let ref toolchain = m.value_of("toolchain").expect(""); let toolchain = cfg.get_toolchain(toolchain, false)?; let status = if !toolchain.exists() || !toolchain.is_custom() { Some(toolchain.install_from_dist_if_not_installed()?) } else { None }; toolchain.make_override(&utils::current_dir()?)?; if let Some(status) = status { println!(""); common::show_channel_update(cfg, toolchain.name(), Ok(status))?; } Ok(()) } fn override_remove(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let paths = if m.is_present("nonexistent") { let list: Vec<_> = cfg.settings_file.with(|s| { Ok(s.overrides .iter() .filter_map(|(k, _)| { if Path::new(k).is_dir() { None } else { Some(k.clone()) } }) .collect()) })?; if list.is_empty() { info!("no nonexistent paths detected"); } list } else { if m.is_present("path") { vec![m.value_of("path").unwrap().to_string()] } else { vec![utils::current_dir()?.to_str().unwrap().to_string()] } }; for path in paths { if cfg .settings_file .with_mut(|s| Ok(s.remove_override(&Path::new(&path), cfg.notify_handler.as_ref())))? { info!("override toolchain for '{}' removed", path); } else { info!("no override toolchain for '{}'", path); if !m.is_present("path") && !m.is_present("nonexistent") { info!( "you may use `--path ` option to remove override toolchain \ for a specific path" ); } } } Ok(()) } fn doc(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let doc_url = if m.is_present("book") { "book/index.html" } else if m.is_present("std") { "std/index.html" } else { "index.html" }; Ok(cfg.open_docs_for_dir(&utils::current_dir()?, doc_url)?) } fn man(cfg: &Cfg, m: &ArgMatches) -> Result<()> { let manpage = m.value_of("command").expect(""); let toolchain = explicit_or_dir_toolchain(cfg, m)?; let mut man_path = toolchain.path().to_path_buf(); man_path.push("share"); man_path.push("man"); man_path.push("man1"); man_path.push(manpage.to_owned() + ".1"); utils::assert_is_file(&man_path)?; Command::new("man") .arg(man_path) .status() .expect("failed to open man page"); Ok(()) } fn self_uninstall(m: &ArgMatches) -> Result<()> { let no_prompt = m.is_present("no-prompt"); self_update::uninstall(no_prompt) } fn set_telemetry(cfg: &Cfg, t: TelemetryMode) -> Result<()> { match t { TelemetryMode::On => Ok(cfg.set_telemetry(true)?), TelemetryMode::Off => Ok(cfg.set_telemetry(false)?), } } fn analyze_telemetry(cfg: &Cfg) -> Result<()> { let analysis = cfg.analyze_telemetry()?; common::show_telemetry(analysis) } elan-1.3.1/src/elan-cli/errors.rs000066400000000000000000000021241414005346400165700ustar00rootroot00000000000000#![allow(dead_code)] use std::io; use std::path::PathBuf; use elan; use elan_dist::{self, temp}; use elan_utils; error_chain! { links { Elan(elan::Error, elan::ErrorKind); Dist(elan_dist::Error, elan_dist::ErrorKind); Utils(elan_utils::Error, elan_utils::ErrorKind); } foreign_links { Temp(temp::Error); Io(io::Error); } errors { PermissionDenied { description("permission denied") } ToolchainNotInstalled(t: String) { description("toolchain is not installed") display("toolchain '{}' is not installed", t) } InfiniteRecursion { description("infinite recursion detected") } NoExeName { description("couldn't determine self executable name") } NotSelfInstalled(p: PathBuf) { description("elan is not installed") display("elan is not installed at '{}'", p.display()) } WindowsUninstallMadness { description("failure during windows uninstall") } } } elan-1.3.1/src/elan-cli/help.rs000066400000000000000000000215211414005346400162060ustar00rootroot00000000000000pub static ELAN_HELP: &'static str = r"DISCUSSION: elan manages your installations of the Lean theorem prover. It places `lean` and `leanpkg` binaries in your `PATH` that automatically select and, if necessary, download the Lean version described in the `lean_version` field of your project's `leanpkg.toml`. You can also install, select, run, and uninstall Lean versions manually using the commands of the `elan` executable."; pub static SHOW_HELP: &'static str = r"DISCUSSION: Shows the name of the active toolchain and the version of `lean`. If there are multiple toolchains installed then all installed toolchains are listed as well."; pub static UPDATE_HELP: &'static str = r"DISCUSSION: With no toolchain specified, the `update` command updates each of the installed toolchains from the official release channels, then updates elan itself. If given a toolchain argument then `update` updates that toolchain, the same as `elan toolchain install`."; pub static INSTALL_HELP: &'static str = r"DISCUSSION: Installs a specific lean toolchain. The 'install' command is an alias for 'elan update '."; pub static DEFAULT_HELP: &'static str = r"DISCUSSION: Sets the default toolchain to the one specified. If the toolchain is not already installed then it is installed first."; pub static TOOLCHAIN_HELP: &'static str = r"DISCUSSION: Many `elan` commands deal with *toolchains*, a single installation of the Lean theorem prover. `elan` supports multiple types of toolchains. The most basic track the official release channels: 'stable' and 'nightly'; but `elan` can also install toolchains from the official archives and from local builds. Standard release channel toolchain names have the following form: [:][-] = stable|nightly| = YYYY-MM-DD 'channel' is either a named release channel or an explicit version number, such as '3.3.0'. Channel names can be optionally appended with an archive date, as in 'nightly-2018-04-10', in which case the toolchain is downloaded from the archive for that date. 'origin' can be used to refer to custom forks of Lean on Github; the default is 'leanprover/lean'. For nightly versions, '-nightly' is appended to the value of 'origin'. elan can also manage symlinked local toolchain builds, which are often used to for developing Lean itself. For more information see `elan toolchain help link`."; pub static TOOLCHAIN_LINK_HELP: &'static str = r"DISCUSSION: 'toolchain' is the custom name to be assigned to the new toolchain. 'path' specifies the directory where the binaries and libraries for the custom toolchain can be found. For example, when used for development of Lean itself, toolchains can be linked directly out of the Lean root directory. After building, you can test out different compiler versions as follows: $ elan toolchain link master $ elan override set master If you now compile a crate in the current directory, the custom toolchain 'master' will be used."; pub static OVERRIDE_HELP: &'static str = r"DISCUSSION: Overrides configure elan to use a specific toolchain when running in a specific directory. elan will automatically select the Lean toolchain specified in the `lean_version` field of the `leanpkg.toml` file when inside a Lean package, but directories can also be assigned their own Lean toolchain manually with `elan override`. When a directory has an override then any time `lean` or `leanpkg` is run inside that directory, or one of its child directories, the override toolchain will be invoked. To pin to a specific nightly: $ elan override set nightly-2018-04-10 Or a specific stable release: $ elan override set 3.3.0 To see the active toolchain use `elan show`. To remove the override and use the default toolchain again, `elan override unset`."; pub static OVERRIDE_UNSET_HELP: &'static str = r"DISCUSSION: If `--path` argument is present, removes the override toolchain for the specified directory. If `--nonexistent` argument is present, removes the override toolchain for all nonexistent directories. Otherwise, removes the override toolchain for the current directory."; pub static RUN_HELP: &'static str = r"DISCUSSION: Configures an environment to use the given toolchain and then runs the specified program. The command may be any program, not just lean or leanpkg. This can be used for testing arbitrary toolchains without setting an override. Commands explicitly proxied by `elan` (such as `lean` and `leanpkg`) also have a shorthand for this available. The toolchain can be set by using `+toolchain` as the first argument. These are equivalent: $ leanpkg +nightly build $ elan run nightly leanpkg build"; pub static _DOC_HELP: &'static str = r"DISCUSSION: Opens the documentation for the currently active toolchain with the default browser. By default, it opens the documentation index. Use the various flags to open specific pieces of documentation."; pub static COMPLETIONS_HELP: &'static str = r"DISCUSSION: One can generate a completion script for `elan` that is compatible with a given shell. The script is output on `stdout` allowing one to re-direct the output to the file of their choosing. Where you place the file will depend on which shell, and which operating system you are using. Your particular configuration may also determine where these scripts need to be placed. Here are some common set ups for the three supported shells under Unix and similar operating systems (such as GNU/Linux). BASH: Completion files are commonly stored in `/etc/bash_completion.d/`. Run the command: $ elan completions bash > /etc/bash_completion.d/elan.bash-completion This installs the completion script. You may have to log out and log back in to your shell session for the changes to take affect. BASH (macOS/Homebrew): Homebrew stores bash completion files within the Homebrew directory. With the `bash-completion` brew formula installed, run the command: $ elan completions bash > $(brew --prefix)/etc/bash_completion.d/elan.bash-completion FISH: Fish completion files are commonly stored in `$HOME/.config/fish/completions`. Run the command: $ elan completions fish > ~/.config/fish/completions/elan.fish This installs the completion script. You may have to log out and log back in to your shell session for the changes to take affect. ZSH: ZSH completions are commonly stored in any directory listed in your `$fpath` variable. To use these completions, you must either add the generated script to one of those directories, or add your own to this list. Adding a custom directory is often the safest bet if you are unsure of which directory to use. First create the directory; for this example we'll create a hidden directory inside our `$HOME` directory: $ mkdir ~/.zfunc Then add the following lines to your `.zshrc` just before `compinit`: fpath+=~/.zfunc Now you can install the completions script using the following command: $ elan completions zsh > ~/.zfunc/_elan You must then either log out and log back in, or simply run $ exec zsh for the new completions to take affect. CUSTOM LOCATIONS: Alternatively, you could save these files to the place of your choosing, such as a custom directory inside your $HOME. Doing so will require you to add the proper directives, such as `source`ing inside your login script. Consult your shells documentation for how to add such directives. POWERSHELL: The powershell completion scripts require PowerShell v5.0+ (which comes Windows 10, but can be downloaded separately for windows 7 or 8.1). First, check if a profile has already been set PS C:\> Test-Path $profile If the above command returns `False` run the following PS C:\> New-Item -path $profile -type file -force Now open the file provided by `$profile` (if you used the `New-Item` command it will be `%USERPROFILE%\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1` Next, we either save the completions file into our profile, or into a separate file and source it inside our profile. To save the completions into our profile simply use PS C:\> elan completions powershell >> %USERPROFILE%\Documents\WindowsPowerShell\Microsoft.PowerShell_profile.ps1"; pub static TOOLCHAIN_ARG_HELP: &'static str = "Toolchain name, such as 'stable', 'nightly', \ or '3.3.0'. For more information see `elan \ help toolchain`"; elan-1.3.1/src/elan-cli/job.rs000066400000000000000000000241611414005346400160330ustar00rootroot00000000000000// FIXME: stolen from cargo. Should be extracted into a common crate. //! Job management (mostly for windows) //! //! Most of the time when you're running leanpkg you expect Ctrl-C to actually //! terminate the entire tree of processes in play, not just the one at the top //! (cago). This currently works "by default" on Unix platforms because Ctrl-C //! actually sends a signal to the *process group* rather than the parent //! process, so everything will get torn down. On Windows, however, this does //! not happen and Ctrl-C just kills leanpkg. //! //! To achieve the same semantics on Windows we use Job Objects to ensure that //! all processes die at the same time. Job objects have a mode of operation //! where when all handles to the object are closed it causes all child //! processes associated with the object to be terminated immediately. //! Conveniently whenever a process in the job object spawns a new process the //! child will be associated with the job object as well. This means if we add //! ourselves to the job object we create then everything will get torn down! pub use self::imp::Setup; pub fn setup() -> Option { unsafe { imp::setup() } } #[cfg(unix)] mod imp { pub type Setup = (); pub unsafe fn setup() -> Option<()> { Some(()) } } #[cfg(windows)] mod imp { extern crate winapi; use std::ffi::OsString; use std::io; use std::mem; use std::os::windows::prelude::*; use winapi::shared::*; use winapi::um::*; pub struct Setup { job: Handle, } pub struct Handle { inner: ntdef::HANDLE, } fn last_err() -> io::Error { io::Error::last_os_error() } pub unsafe fn setup() -> Option { // Creates a new job object for us to use and then adds ourselves to it. // Note that all errors are basically ignored in this function, // intentionally. Job objects are "relatively new" in Windows, // particularly the ability to support nested job objects. Older // Windows installs don't support this ability. We probably don't want // to force Leanpkg to abort in this situation or force others to *not* // use job objects, so we instead just ignore errors and assume that // we're otherwise part of someone else's job object in this case. let job = jobapi2::CreateJobObjectW(0 as *mut _, 0 as *const _); if job.is_null() { return None; } let job = Handle { inner: job }; // Indicate that when all handles to the job object are gone that all // process in the object should be killed. Note that this includes our // entire process tree by default because we've added ourselves and and // our children will reside in the job once we spawn a process. let mut info: winnt::JOBOBJECT_EXTENDED_LIMIT_INFORMATION; info = mem::zeroed(); info.BasicLimitInformation.LimitFlags = winnt::JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; let r = jobapi2::SetInformationJobObject( job.inner, winnt::JobObjectExtendedLimitInformation, &mut info as *mut _ as minwindef::LPVOID, mem::size_of_val(&info) as minwindef::DWORD, ); if r == 0 { return None; } // Assign our process to this job object, meaning that our children will // now live or die based on our existence. let me = processthreadsapi::GetCurrentProcess(); let r = jobapi2::AssignProcessToJobObject(job.inner, me); if r == 0 { return None; } Some(Setup { job: job }) } impl Drop for Setup { fn drop(&mut self) { // This is a litte subtle. By default if we are terminated then all // processes in our job object are terminated as well, but we // intentionally want to whitelist some processes to outlive our job // object (see below). // // To allow for this, we manually kill processes instead of letting // the job object kill them for us. We do this in a loop to handle // processes spawning other processes. // // Finally once this is all done we know that the only remaining // ones are ourselves and the whitelisted processes. The destructor // here then configures our job object to *not* kill everything on // close, then closes the job object. unsafe { while self.kill_remaining() { info!("killed some, going for more"); } let mut info: winnt::JOBOBJECT_EXTENDED_LIMIT_INFORMATION; info = mem::zeroed(); let r = jobapi2::SetInformationJobObject( self.job.inner, winnt::JobObjectExtendedLimitInformation, &mut info as *mut _ as minwindef::LPVOID, mem::size_of_val(&info) as minwindef::DWORD, ); if r == 0 { info!("failed to configure job object to defaults: {}", last_err()); } } } } impl Setup { unsafe fn kill_remaining(&mut self) -> bool { #[repr(C)] struct Jobs { header: winnt::JOBOBJECT_BASIC_PROCESS_ID_LIST, list: [basetsd::ULONG_PTR; 1024], } let mut jobs: Jobs = mem::zeroed(); let r = jobapi2::QueryInformationJobObject( self.job.inner, winnt::JobObjectBasicProcessIdList, &mut jobs as *mut _ as minwindef::LPVOID, mem::size_of_val(&jobs) as minwindef::DWORD, 0 as *mut _, ); if r == 0 { info!("failed to query job object: {}", last_err()); return false; } let mut killed = false; let list = &jobs.list[..jobs.header.NumberOfProcessIdsInList as usize]; assert!(list.len() > 0); let list = list .iter() .filter(|&&id| { // let's not kill ourselves id as minwindef::DWORD != processthreadsapi::GetCurrentProcessId() }) .filter_map(|&id| { // Open the process with the necessary rights, and if this // fails then we probably raced with the process exiting so we // ignore the problem. let flags = winnt::PROCESS_QUERY_INFORMATION | winnt::PROCESS_TERMINATE | winnt::SYNCHRONIZE; let p = processthreadsapi::OpenProcess( flags, minwindef::FALSE, id as minwindef::DWORD, ); if p.is_null() { None } else { Some(Handle { inner: p }) } }) .filter(|p| { // Test if this process was actually in the job object or not. // If it's not then we likely raced with something else // recycling this PID, so we just skip this step. let mut res = 0; let r = jobapi::IsProcessInJob(p.inner, self.job.inner, &mut res); if r == 0 { info!("failed to test is process in job: {}", last_err()); return false; } res == minwindef::TRUE }); for p in list { // Load the file which this process was spawned from. We then // later use this for identification purposes. let mut buf = [0; 1024]; let r = psapi::GetProcessImageFileNameW( p.inner, buf.as_mut_ptr(), buf.len() as minwindef::DWORD, ); if r == 0 { info!("failed to get image name: {}", last_err()); continue; } let s = OsString::from_wide(&buf[..r as usize]); info!("found remaining: {:?}", s); // And here's where we find the whole purpose for this // function! Currently, our only whitelisted process is // `mspdbsrv.exe`, and more details about that can be found // here: // // https://github.com/rust-lang/rust/issues/33145 // // The gist of it is that all builds on one machine use the // same `mspdbsrv.exe` instance. If we were to kill this // instance then we could erroneously cause other builds to // fail. if let Some(s) = s.to_str() { if s.contains("mspdbsrv") { info!("\toops, this is mspdbsrv"); continue; } } // Ok, this isn't mspdbsrv, let's kill the process. After we // kill it we wait on it to ensure that the next time around in // this function we're not going to see it again. let r = processthreadsapi::TerminateProcess(p.inner, 1); if r == 0 { info!("\tfailed to kill subprocess: {}", last_err()); info!("\tassuming subprocess is dead..."); } else { info!("\tterminated subprocess"); } let r = synchapi::WaitForSingleObject(p.inner, winbase::INFINITE); if r != 0 { info!("failed to wait for process to die: {}", last_err()); return false; } killed = true; } return killed; } } impl Drop for Handle { fn drop(&mut self) { unsafe { handleapi::CloseHandle(self.inner); } } } } elan-1.3.1/src/elan-cli/log.rs000066400000000000000000000030531414005346400160370ustar00rootroot00000000000000use std::fmt; use std::io::Write; use term2; macro_rules! warn { ( $ ( $ arg : tt ) * ) => ( $crate::log::warn_fmt ( format_args ! ( $ ( $ arg ) * ) ) ) } macro_rules! err { ( $ ( $ arg : tt ) * ) => ( $crate::log::err_fmt ( format_args ! ( $ ( $ arg ) * ) ) ) } macro_rules! info { ( $ ( $ arg : tt ) * ) => ( $crate::log::info_fmt ( format_args ! ( $ ( $ arg ) * ) ) ) } macro_rules! verbose { ( $ ( $ arg : tt ) * ) => ( $crate::log::verbose_fmt ( format_args ! ( $ ( $ arg ) * ) ) ) } pub fn warn_fmt(args: fmt::Arguments) { let mut t = term2::stderr(); let _ = t.fg(term2::color::BRIGHT_YELLOW); let _ = t.attr(term2::Attr::Bold); let _ = write!(t, "warning: "); let _ = t.reset(); let _ = t.write_fmt(args); let _ = write!(t, "\n"); } pub fn err_fmt(args: fmt::Arguments) { let mut t = term2::stderr(); let _ = t.fg(term2::color::BRIGHT_RED); let _ = t.attr(term2::Attr::Bold); let _ = write!(t, "error: "); let _ = t.reset(); let _ = t.write_fmt(args); let _ = write!(t, "\n"); } pub fn info_fmt(args: fmt::Arguments) { let mut t = term2::stderr(); let _ = t.attr(term2::Attr::Bold); let _ = write!(t, "info: "); let _ = t.reset(); let _ = t.write_fmt(args); let _ = write!(t, "\n"); } pub fn verbose_fmt(args: fmt::Arguments) { let mut t = term2::stderr(); let _ = t.fg(term2::color::BRIGHT_MAGENTA); let _ = t.attr(term2::Attr::Bold); let _ = write!(t, "verbose: "); let _ = t.reset(); let _ = t.write_fmt(args); let _ = write!(t, "\n"); } elan-1.3.1/src/elan-cli/main.rs000066400000000000000000000060271414005346400162060ustar00rootroot00000000000000//! The main elan commandline application //! //! The elan binary is a chimera, changing its behavior based on the //! name of the binary. This is used most prominently to enable //! elan's tool 'proxies' - that is, elan itself and the elan //! proxies are the same binary; when the binary is called 'elan' or //! 'elan.exe' elan behaves like the elan commandline //! application; when it is called 'lean' it behaves as a proxy to //! 'lean'. //! //! This scheme is further used to distingush the elan installer, //! called 'elan-init' which is again just the elan binary under a //! different name. #![recursion_limit = "1024"] extern crate elan_dist; extern crate elan_utils; #[macro_use] extern crate error_chain; extern crate clap; extern crate elan; extern crate flate2; extern crate itertools; extern crate json; extern crate markdown; extern crate rand; extern crate regex; extern crate same_file; extern crate scopeguard; extern crate sha2; extern crate tar; extern crate tempfile; extern crate term; extern crate time; extern crate toml; extern crate wait_timeout; extern crate zip; #[cfg(windows)] extern crate gcc; extern crate libc; #[cfg(windows)] extern crate winapi; #[cfg(windows)] extern crate winreg; #[macro_use] mod log; mod common; mod download_tracker; mod elan_mode; mod errors; mod help; mod job; mod proxy_mode; mod self_update; mod setup_mode; mod term2; use elan::env_var::LEAN_RECURSION_COUNT_MAX; use errors::*; use std::env; use std::path::PathBuf; fn main() { if let Err(ref e) = run_elan() { common::report_error(e); std::process::exit(1); } } fn run_elan() -> Result<()> { // Guard against infinite proxy recursion. This mostly happens due to // bugs in elan. do_recursion_guard()?; // The name of arg0 determines how the program is going to behave let arg0 = env::args().next().map(PathBuf::from); let name = arg0 .as_ref() .and_then(|a| a.file_stem()) .and_then(|a| a.to_str()); match name { Some("elan") => elan_mode::main(), Some(n) if n.starts_with("elan-setup") || n.starts_with("elan-init") => { // NB: The above check is only for the prefix of the file // name. Browsers rename duplicates to // e.g. elan-setup(2), and this allows all variations // to work. setup_mode::main() } Some(n) if n.starts_with("elan-gc-") => { // This is the final uninstallation stage on windows where // elan deletes its own exe self_update::complete_windows_uninstall() } Some(_) => proxy_mode::main(), None => { // Weird case. No arg0, or it's unparsable. Err(ErrorKind::NoExeName.into()) } } } fn do_recursion_guard() -> Result<()> { let recursion_count = env::var("LEAN_RECURSION_COUNT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(0); if recursion_count > LEAN_RECURSION_COUNT_MAX { return Err(ErrorKind::InfiniteRecursion.into()); } Ok(()) } elan-1.3.1/src/elan-cli/proxy_mode.rs000066400000000000000000000026511414005346400174460ustar00rootroot00000000000000use common::set_globals; use elan::command::run_command_for_dir; use elan::Cfg; use elan_utils::utils; use errors::*; use job; use std::env; use std::ffi::OsString; use std::path::PathBuf; pub fn main() -> Result<()> { ::self_update::cleanup_self_updater()?; let _setup = job::setup(); let mut args = env::args(); let arg0 = args.next().map(PathBuf::from); let arg0 = arg0 .as_ref() .and_then(|a| a.file_name()) .and_then(|a| a.to_str()); let ref arg0 = arg0.ok_or(ErrorKind::NoExeName)?; // Check for a toolchain specifier. let arg1 = args.next(); let toolchain = arg1.as_ref().and_then(|arg1| { if arg1.starts_with('+') { Some(&arg1[1..]) } else { None } }); // Build command args now while we know whether or not to skip arg 1. let cmd_args: Vec<_> = if toolchain.is_none() { env::args_os().skip(1).collect() } else { env::args_os().skip(2).collect() }; let cfg = set_globals(false)?; direct_proxy(&cfg, arg0, toolchain, &cmd_args)?; Ok(()) } fn direct_proxy(cfg: &Cfg, arg0: &str, toolchain: Option<&str>, args: &[OsString]) -> Result<()> { let cmd = match toolchain { None => cfg.create_command_for_dir(&utils::current_dir()?, arg0)?, Some(tc) => cfg.create_command_for_toolchain(tc, true, arg0)?, }; Ok(run_command_for_dir(cmd, arg0, args, &cfg)?) } elan-1.3.1/src/elan-cli/self_update.rs000066400000000000000000001257641414005346400175670ustar00rootroot00000000000000//! Self-installation and updating //! //! This is the installer at the heart of Lean. If it breaks //! everything breaks. It is conceptually very simple, as elan is //! distributed as a single binary, and installation mostly requires //! copying it into place. There are some tricky bits though, mostly //! because of workarounds to self-delete an exe on Windows. //! //! During install (as `elan-init`): //! //! * copy the self exe to $ELAN_HOME/bin //! * hardlink lean, etc to *that* //! * update the PATH in a system-specific way //! * run the equivalent of `elan default stable` //! //! During upgrade (`elan self upgrade`): //! //! * download elan-init to $ELAN_HOME/bin/elan-init //! * run elan-init with appropriate flags to indicate //! this is a self-upgrade //! * elan-init copies bins and hardlinks into place. On windows //! this happens *after* the upgrade command exits successfully. //! //! During uninstall (`elan self uninstall`): //! //! * Delete `$ELAN_HOME`. //! * Delete everything in `$ELAN_HOME`, including //! the elan binary and its hardlinks //! //! Deleting the running binary during uninstall is tricky //! and racy on Windows. use common::{self, Confirm}; use elan_dist::dist; use elan_utils::utils; use errors::*; use flate2; use regex::Regex; use same_file::Handle; use std::env; use std::env::consts::EXE_SUFFIX; use std::fs; use std::io; use std::path::{Component, Path, PathBuf}; use std::process::{self, Command}; use tar; use tempfile::tempdir; use term2; use zip; pub struct InstallOpts { pub default_toolchain: String, pub no_modify_path: bool, } #[cfg(feature = "no-self-update")] pub const NEVER_SELF_UPDATE: bool = true; #[cfg(not(feature = "no-self-update"))] pub const NEVER_SELF_UPDATE: bool = false; // The big installation messages. These are macros because the first // argument of format! needs to be a literal. macro_rules! pre_install_msg_template { ($platform_msg: expr) => { concat!( r" # Welcome to Lean! This will download and install Elan, a tool for managing different Lean versions used in packages you create or download. It will also install a default version of Lean and its package manager, leanpkg, for editing files not belonging to any package. It will add the `leanpkg`, `lean`, and `elan` commands to Elan's bin directory, located at: {elan_home_bin} ", $platform_msg, r#" You can uninstall at any time with `elan self uninstall` and these changes will be reverted. "# ) }; } macro_rules! pre_install_msg_unix { () => { pre_install_msg_template!( "This path will then be added to your `PATH` environment variable by modifying the profile file{plural} located at: {rcfiles}" ) }; } macro_rules! pre_install_msg_win { () => { pre_install_msg_template!( "This path will then be added to your `PATH` environment variable by modifying the `HKEY_CURRENT_USER/Environment/PATH` registry key." ) }; } macro_rules! pre_install_msg_no_modify_path { () => { pre_install_msg_template!( "This path needs to be in your `PATH` environment variable, but will not be added automatically." ) }; } macro_rules! post_install_msg_unix { () => { r"# Elan is installed now. Great! To get started you need Elan's bin directory ({elan_home}/bin) in your `PATH` environment variable. Next time you log in this will be done automatically. To configure your current shell run `source {elan_home}/env` " }; } macro_rules! post_install_msg_win { () => { r"# Elan is installed now. Great! To get started you need Elan's bin directory ({elan_home}\bin) in your `PATH` environment variable. Future applications will automatically have the correct environment, but you may need to restart your current shell. " }; } macro_rules! post_install_msg_unix_no_modify_path { () => { r"# Elan is installed now. Great! To get started you need Elan's bin directory ({elan_home}/bin) in your `PATH` environment variable. To configure your current shell run `source {elan_home}/env` " }; } macro_rules! post_install_msg_win_no_modify_path { () => { r"# Elan is installed now. Great! To get started you need Elan's bin directory ({elan_home}\bin) in your `PATH` environment variable. This has not been done automatically. " }; } macro_rules! pre_uninstall_msg { () => { r"This will uninstall all Lean toolchains and data, and remove `{elan_home}/bin` from your `PATH` environment variable. " }; } static TOOLS: &'static [&'static str] = &[ "lean", "leanpkg", "leanchecker", "leanc", "leanmake", "lake", ]; static UPDATE_ROOT: &'static str = "https://github.com/leanprover/elan/releases/download"; /// `ELAN_HOME` suitable for display, possibly with $HOME /// substituted for the directory prefix fn canonical_elan_home() -> Result { let path = utils::elan_home()?; let mut path_str = path.to_string_lossy().to_string(); let default_elan_home = utils::home_dir() .unwrap_or(PathBuf::from(".")) .join(".elan"); if default_elan_home == path { if cfg!(unix) { path_str = String::from("$HOME/.elan"); } else { path_str = String::from(r"%USERPROFILE%\.elan"); } } Ok(path_str) } /// Installing is a simple matter of coping the running binary to /// `ELAN_HOME`/bin, hardlinking the various Lean tools to it, /// and adding `ELAN_HOME`/bin to PATH. pub fn install(no_prompt: bool, verbose: bool, mut opts: InstallOpts) -> Result<()> { check_existence_of_lean_in_path(no_prompt)?; do_anti_sudo_check(no_prompt)?; if !no_prompt { let ref msg = pre_install_msg(opts.no_modify_path)?; term2::stdout().md(msg); loop { term2::stdout().md(current_install_opts(&opts)); match common::confirm_advanced()? { Confirm::No => { info!("aborting installation"); return Ok(()); } Confirm::Yes => { break; } Confirm::Advanced => { opts = customize_install(opts)?; } } } } let install_res: Result<()> = (|| { install_bins()?; if !opts.no_modify_path { do_add_to_path(&get_add_path_methods())?; } maybe_install_lean(&opts.default_toolchain, verbose)?; if cfg!(unix) { let ref env_file = utils::elan_home()?.join("env"); let ref env_str = format!("{}\n", shell_export_string()?); utils::write_file("env", env_file, env_str)?; } Ok(()) })(); if let Err(ref e) = install_res { common::report_error(e); process::exit(1); } // More helpful advice, skip if -y if !no_prompt { let elan_home = canonical_elan_home()?; let msg = if !opts.no_modify_path { if cfg!(unix) { format!(post_install_msg_unix!(), elan_home = elan_home) } else { format!(post_install_msg_win!(), elan_home = elan_home) } } else { if cfg!(unix) { format!( post_install_msg_unix_no_modify_path!(), elan_home = elan_home ) } else { format!( post_install_msg_win_no_modify_path!(), elan_home = elan_home ) } }; term2::stdout().md(msg); } Ok(()) } fn lean_exists_in_path() -> Result<()> { // Ignore lean if present in $HOME/.elan/bin fn ignore_paths(path: &PathBuf) -> bool { !path .components() .any(|c| c == Component::Normal(".elan".as_ref())) } if let Some(paths) = env::var_os("PATH") { let paths = env::split_paths(&paths).filter(ignore_paths); for path in paths { let lean = path.join(format!("lean{}", EXE_SUFFIX)); if lean.exists() { return Err(path.to_str().unwrap().into()); } } } Ok(()) } fn check_existence_of_lean_in_path(no_prompt: bool) -> Result<()> { // Only the test runner should set this let skip_check = env::var_os("ELAN_INIT_SKIP_PATH_CHECK"); // Ignore this check if called with no prompt (-y) or if the environment variable is set if no_prompt || skip_check == Some("yes".into()) { return Ok(()); } if let Err(path) = lean_exists_in_path() { err!("it looks like you have an existing installation of Lean at:"); err!("{}", path); err!("elan cannot be installed alongside Lean. Please uninstall first"); err!("if this is what you want, restart the installation with `-y'"); Err("cannot install while Lean is installed".into()) } else { Ok(()) } } // If the user is trying to install with sudo, on some systems this will // result in writing root-owned files to the user's home directory, because // sudo is configured not to change $HOME. Don't let that bogosity happen. #[allow(dead_code)] fn do_anti_sudo_check(no_prompt: bool) -> Result<()> { #[cfg(unix)] pub fn home_mismatch() -> bool { extern crate libc as c; use std::ffi::CStr; use std::mem::MaybeUninit; use std::ops::Deref; use std::ptr; // test runner should set this, nothing else if env::var("ELAN_INIT_SKIP_SUDO_CHECK") .as_ref() .map(Deref::deref) .ok() == Some("yes") { return false; } let mut buf = [0 as c::c_char; 1024]; let mut pwd = MaybeUninit::::uninit(); let mut pwdp: *mut c::passwd = ptr::null_mut(); let rv = unsafe { c::getpwuid_r( c::geteuid(), pwd.as_mut_ptr(), buf.as_mut_ptr(), buf.len(), &mut pwdp, ) }; if rv != 0 { warn!("getpwuid_r: couldn't get user data ({})", rv); return false; } if pwdp.is_null() { warn!("getpwuid_r: couldn't get user data"); return false; } let pw_dir = unsafe { CStr::from_ptr(pwd.assume_init().pw_dir) } .to_str() .ok(); let env_home = env::var_os("HOME"); let env_home = env_home.as_ref().map(Deref::deref); match (env_home, pw_dir) { (None, _) | (_, None) => false, (Some(eh), Some(pd)) => eh != pd, } } #[cfg(not(unix))] pub fn home_mismatch() -> bool { false } match (home_mismatch(), no_prompt) { (false, _) => (), (true, false) => { err!("$HOME differs from euid-obtained home directory: you may be using sudo"); err!("if this is what you want, restart the installation with `-y'"); process::exit(1); } (true, true) => { warn!("$HOME differs from euid-obtained home directory: you may be using sudo"); } } Ok(()) } fn pre_install_msg(no_modify_path: bool) -> Result { let elan_home = utils::elan_home()?; let elan_home_bin = elan_home.join("bin"); if !no_modify_path { if cfg!(unix) { let add_path_methods = get_add_path_methods(); let rcfiles = add_path_methods .into_iter() .filter_map(|m| { if let PathUpdateMethod::RcFile(path) = m { Some(format!("{}", path.display())) } else { None } }) .collect::>(); let plural = if rcfiles.len() > 1 { "s" } else { "" }; let rcfiles = rcfiles .into_iter() .map(|f| format!(" {}", f)) .collect::>(); let rcfiles = rcfiles.join("\n"); Ok(format!( pre_install_msg_unix!(), elan_home_bin = elan_home_bin.display(), plural = plural, rcfiles = rcfiles )) } else { Ok(format!( pre_install_msg_win!(), elan_home_bin = elan_home_bin.display() )) } } else { Ok(format!( pre_install_msg_no_modify_path!(), elan_home_bin = elan_home_bin.display() )) } } fn current_install_opts(opts: &InstallOpts) -> String { format!( r"Current installation options: - ` `default toolchain: `{}` - modify PATH variable: `{}` ", opts.default_toolchain, if !opts.no_modify_path { "yes" } else { "no" } ) } // Interactive editing of the install options fn customize_install(mut opts: InstallOpts) -> Result { println!( "I'm going to ask you the value of each these installation options.\n\ You may simply press the Enter key to leave unchanged." ); println!(""); opts.default_toolchain = common::question_str( "Default toolchain? (stable/nightly/none)", &opts.default_toolchain, )?; opts.no_modify_path = !common::question_bool("Modify PATH variable? (y/n)", !opts.no_modify_path)?; Ok(opts) } fn install_bins() -> Result<()> { let ref bin_path = utils::elan_home()?.join("bin"); let ref this_exe_path = utils::current_exe()?; let ref elan_path = bin_path.join(&format!("elan{}", EXE_SUFFIX)); utils::ensure_dir_exists("bin", bin_path, &|_| {})?; // NB: Even on Linux we can't just copy the new binary over the (running) // old binary; we must unlink it first. if elan_path.exists() { utils::remove_file("elan-bin", elan_path)?; } utils::copy_file(this_exe_path, elan_path)?; utils::make_executable(elan_path)?; install_proxies() } pub fn install_proxies() -> Result<()> { let ref bin_path = utils::elan_home()?.join("bin"); let ref elan_path = bin_path.join(&format!("elan{}", EXE_SUFFIX)); let elan = Handle::from_path(elan_path)?; let mut tool_handles = Vec::new(); let mut link_afterwards = Vec::new(); // Try to hardlink all the Lean exes to the elan exe. Some systems, // like Android, does not support hardlinks, so we fallback to symlinks. // // Note that this function may not be running in the context of a fresh // self update but rather as part of a normal update to fill in missing // proxies. In that case our process may actually have the `elan.exe` // file open, and on systems like Windows that means that you can't // even remove other hard links to the same file. Basically if we have // `elan.exe` open and running and `leanpkg.exe` is a hard link to that // file, we can't remove `leanpkg.exe`. // // To avoid unnecessary errors from being returned here we use the // `same-file` crate and its `Handle` type to avoid clobbering hard links // that are already valid. If a hard link already points to the // `elan.exe` file then we leave it alone and move to the next one. // // As yet one final caveat, when we're looking at handles for files we can't // actually delete files (they'll say they're deleted but they won't // actually be on Windows). As a result we manually drop all the // `tool_handles` later on. This'll allow us, afterwards, to actually // overwrite all the previous hard links with new ones. for tool in TOOLS { let tool_path = bin_path.join(&format!("{}{}", tool, EXE_SUFFIX)); if let Ok(handle) = Handle::from_path(&tool_path) { tool_handles.push(handle); if elan == *tool_handles.last().unwrap() { continue; } } link_afterwards.push(tool_path); } drop(tool_handles); for path in link_afterwards { utils::hard_or_symlink_file(elan_path, &path)?; } Ok(()) } fn maybe_install_lean(toolchain_str: &str, verbose: bool) -> Result<()> { let ref cfg = common::set_globals(verbose)?; // If there is already an install, then `toolchain_str` may not be // a toolchain the user actually wants. Don't do anything. FIXME: // This logic should be part of InstallOpts so that it isn't // possible to select a toolchain then have it not be installed. if toolchain_str == "none" { info!("skipping toolchain installation"); println!(""); } else if cfg.find_default()?.is_none() { let toolchain = cfg.get_toolchain(toolchain_str, false)?; let status = toolchain.install_from_dist(false)?; cfg.set_default(toolchain_str)?; println!(""); common::show_channel_update(cfg, toolchain_str, Ok(status))?; } else { info!("updating existing elan installation"); println!(""); } Ok(()) } pub fn uninstall(no_prompt: bool) -> Result<()> { if NEVER_SELF_UPDATE { err!("self-uninstall is disabled for this build of elan"); err!("you should probably use your system package manager to uninstall elan"); process::exit(1); } if cfg!(feature = "msi-installed") { // Get the product code of the MSI installer from the registry // and spawn `msiexec /x`, then exit immediately let product_code = get_msi_product_code()?; Command::new("msiexec") .arg("/x") .arg(product_code) .spawn() .chain_err(|| ErrorKind::WindowsUninstallMadness)?; process::exit(0); } let ref elan_home = utils::elan_home()?; if !elan_home.join(&format!("bin/elan{}", EXE_SUFFIX)).exists() { return Err(ErrorKind::NotSelfInstalled(elan_home.clone()).into()); } if !no_prompt { println!(""); let ref msg = format!(pre_uninstall_msg!(), elan_home = canonical_elan_home()?); term2::stdout().md(msg); if !common::confirm("\nContinue? (y/N)", false)? { info!("aborting uninstallation"); return Ok(()); } } let read_dir_err = "failure reading directory"; info!("removing leanpkg home"); // Remove ELAN_HOME/bin from PATH let ref remove_path_methods = get_remove_path_methods()?; do_remove_from_path(remove_path_methods)?; // Delete everything in ELAN_HOME *except* the elan bin // First everything except the bin directory for dirent in fs::read_dir(elan_home).chain_err(|| read_dir_err)? { let dirent = dirent.chain_err(|| read_dir_err)?; if dirent.file_name().to_str() != Some("bin") { if dirent.path().is_dir() { utils::remove_dir("elan_home", &dirent.path(), &|_| {})?; } else { utils::remove_file("elan_home", &dirent.path())?; } } } // Then everything in bin except elan and tools. These can't be unlinked // until this process exits (on windows). let tools = TOOLS.iter().map(|t| format!("{}{}", t, EXE_SUFFIX)); let tools: Vec<_> = tools.chain(vec![format!("elan{}", EXE_SUFFIX)]).collect(); for dirent in fs::read_dir(&elan_home.join("bin")).chain_err(|| read_dir_err)? { let dirent = dirent.chain_err(|| read_dir_err)?; let name = dirent.file_name(); let file_is_tool = name.to_str().map(|n| tools.iter().any(|t| *t == n)); if file_is_tool == Some(false) { if dirent.path().is_dir() { utils::remove_dir("elan_home", &dirent.path(), &|_| {})?; } else { utils::remove_file("elan_home", &dirent.path())?; } } } info!("removing elan binaries"); // Delete elan. This is tricky because this is *probably* // the running executable and on Windows can't be unlinked until // the process exits. delete_elan_and_elan_home()?; info!("elan is uninstalled"); process::exit(0); } #[cfg(not(feature = "msi-installed"))] fn get_msi_product_code() -> Result { unreachable!() } #[cfg(feature = "msi-installed")] fn get_msi_product_code() -> Result { use winreg::enums::{HKEY_CURRENT_USER, KEY_READ}; use winreg::RegKey; let root = RegKey::predef(HKEY_CURRENT_USER); let environment = root.open_subkey_with_flags("SOFTWARE\\elan", KEY_READ); match environment { Ok(env) => match env.get_value("InstalledProductCode") { Ok(val) => Ok(val), Err(e) => Err(e).chain_err(|| ErrorKind::WindowsUninstallMadness), }, Err(e) => Err(e).chain_err(|| ErrorKind::WindowsUninstallMadness), } } #[cfg(unix)] fn delete_elan_and_elan_home() -> Result<()> { let ref elan_home = utils::elan_home()?; utils::remove_dir("elan_home", elan_home, &|_| ())?; Ok(()) } // The last step of uninstallation is to delete *this binary*, // elan.exe and the ELAN_HOME that contains it. On Unix, this // works fine. On Windows you can't delete files while they are open, // like when they are running. // // Here's what we're going to do: // - Copy elan to a temporary file in // ELAN_HOME/../elan-gc-$random.exe. // - Open the gc exe with the FILE_FLAG_DELETE_ON_CLOSE and // FILE_SHARE_DELETE flags. This is going to be the last // file to remove, and the OS is going to do it for us. // This file is opened as inheritable so that subsequent // processes created with the option to inherit handles // will also keep them open. // - Run the gc exe, which waits for the original elan // process to close, then deletes ELAN_HOME. This process // has inherited a FILE_FLAG_DELETE_ON_CLOSE handle to itself. // - Finally, spawn yet another system binary with the inherit handles // flag, so *it* inherits the FILE_FLAG_DELETE_ON_CLOSE handle to // the gc exe. If the gc exe exits before the system exe then at // last it will be deleted when the handle closes. // // This is the DELETE_ON_CLOSE method from // http://www.catch22.net/tuts/self-deleting-executables // // ... which doesn't actually work because Windows won't really // delete a FILE_FLAG_DELETE_ON_CLOSE process when it exits. // // .. augmented with this SO answer // http://stackoverflow.com/questions/10319526/understanding-a-self-deleting-program-in-c #[cfg(windows)] fn delete_elan_and_elan_home() -> Result<()> { use std::thread; use std::time::Duration; // ELAN_HOME, hopefully empty except for bin/elan.exe let ref elan_home = utils::elan_home()?; // The elan.exe bin let ref elan_path = elan_home.join(&format!("bin/elan{}", EXE_SUFFIX)); // The directory containing ELAN_HOME let work_path = elan_home .parent() .expect("ELAN_HOME doesn't have a parent?"); // Generate a unique name for the files we're about to move out // of ELAN_HOME. let numbah: u32 = rand::random(); let gc_exe = work_path.join(&format!("elan-gc-{:x}.exe", numbah)); use std::mem; use std::os::windows::ffi::OsStrExt; use std::ptr; use winapi::shared::minwindef::DWORD; use winapi::um::fileapi::{CreateFileW, OPEN_EXISTING}; use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE}; use winapi::um::minwinbase::SECURITY_ATTRIBUTES; use winapi::um::winbase::FILE_FLAG_DELETE_ON_CLOSE; use winapi::um::winnt::{FILE_SHARE_DELETE, FILE_SHARE_READ, GENERIC_READ}; unsafe { // Copy elan (probably this process's exe) to the gc exe utils::copy_file(elan_path, &gc_exe)?; let mut gc_exe_win: Vec<_> = gc_exe.as_os_str().encode_wide().collect(); gc_exe_win.push(0); // Open an inheritable handle to the gc exe marked // FILE_FLAG_DELETE_ON_CLOSE. This will be inherited // by subsequent processes. let mut sa = mem::zeroed::(); sa.nLength = mem::size_of::() as DWORD; sa.bInheritHandle = 1; let gc_handle = CreateFileW( gc_exe_win.as_ptr(), GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_DELETE, &mut sa, OPEN_EXISTING, FILE_FLAG_DELETE_ON_CLOSE, ptr::null_mut(), ); if gc_handle == INVALID_HANDLE_VALUE { let err = io::Error::last_os_error(); return Err(err).chain_err(|| ErrorKind::WindowsUninstallMadness); } let _g = scopeguard::guard(gc_handle, |h| { let _ = CloseHandle(h); }); Command::new(gc_exe) .spawn() .chain_err(|| ErrorKind::WindowsUninstallMadness)?; // The catch 22 article says we must sleep here to give // Windows a chance to bump the processes file reference // count. acrichto though is in disbelief and *demanded* that // we not insert a sleep. If Windows failed to uninstall // correctly it is because of him. // (.. and months later acrichto owes me a beer). thread::sleep(Duration::from_millis(100)); } Ok(()) } /// Run by elan-gc-$num.exe to delete ELAN_HOME #[cfg(windows)] pub fn complete_windows_uninstall() -> Result<()> { use std::ffi::OsStr; use std::process::Stdio; wait_for_parent()?; // Now that the parent has exited there are hopefully no more files open in ELAN_HOME let ref elan_home = utils::elan_home()?; utils::remove_dir("elan_home", elan_home, &|_| ())?; // Now, run a *system* binary to inherit the DELETE_ON_CLOSE // handle to *this* process, then exit. The OS will delete the gc // exe when it exits. let rm_gc_exe = OsStr::new("net"); Command::new(rm_gc_exe) .stdin(Stdio::null()) .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn() .chain_err(|| ErrorKind::WindowsUninstallMadness)?; process::exit(0); } #[cfg(windows)] fn wait_for_parent() -> Result<()> { use std::mem; use std::ptr; use winapi::shared::minwindef::DWORD; use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE}; use winapi::um::processthreadsapi::{GetCurrentProcessId, OpenProcess}; use winapi::um::synchapi::WaitForSingleObject; use winapi::um::tlhelp32::{ CreateToolhelp32Snapshot, Process32First, Process32Next, PROCESSENTRY32, TH32CS_SNAPPROCESS, }; use winapi::um::winbase::{INFINITE, WAIT_OBJECT_0}; use winapi::um::winnt::SYNCHRONIZE; unsafe { // Take a snapshot of system processes, one of which is ours // and contains our parent's pid let snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0); if snapshot == INVALID_HANDLE_VALUE { let err = io::Error::last_os_error(); return Err(err).chain_err(|| ErrorKind::WindowsUninstallMadness); } let _g = scopeguard::guard(snapshot, |h| { let _ = CloseHandle(h); }); let mut entry: PROCESSENTRY32 = mem::zeroed(); entry.dwSize = mem::size_of::() as DWORD; // Iterate over system processes looking for ours let success = Process32First(snapshot, &mut entry); if success == 0 { let err = io::Error::last_os_error(); return Err(err).chain_err(|| ErrorKind::WindowsUninstallMadness); } let this_pid = GetCurrentProcessId(); while entry.th32ProcessID != this_pid { let success = Process32Next(snapshot, &mut entry); if success == 0 { let err = io::Error::last_os_error(); return Err(err).chain_err(|| ErrorKind::WindowsUninstallMadness); } } // FIXME: Using the process ID exposes a race condition // wherein the parent process already exited and the OS // reassigned its ID. let parent_id = entry.th32ParentProcessID; // Get a handle to the parent process let parent = OpenProcess(SYNCHRONIZE, 0, parent_id); if parent == ptr::null_mut() { // This just means the parent has already exited. return Ok(()); } let _g = scopeguard::guard(parent, |h| { let _ = CloseHandle(h); }); // Wait for our parent to exit let res = WaitForSingleObject(parent, INFINITE); if res != WAIT_OBJECT_0 { let err = io::Error::last_os_error(); return Err(err).chain_err(|| ErrorKind::WindowsUninstallMadness); } } Ok(()) } #[cfg(unix)] pub fn complete_windows_uninstall() -> Result<()> { panic!("stop doing that") } #[derive(PartialEq)] enum PathUpdateMethod { RcFile(PathBuf), Windows, } /// Decide which rcfiles we're going to update, so we /// can tell the user before they confirm. fn get_add_path_methods() -> Vec { if cfg!(windows) { return vec![PathUpdateMethod::Windows]; } let profile = utils::home_dir().map(|p| p.join(".profile")); let mut profiles = vec![profile]; if let Ok(shell) = env::var("SHELL") { if shell.contains("zsh") { let zdotdir = env::var("ZDOTDIR") .ok() .map(PathBuf::from) .or_else(utils::home_dir); let zprofile = zdotdir.map(|p| p.join(".zprofile")); profiles.push(zprofile); } } if let Some(bash_profile) = utils::home_dir().map(|p| p.join(".bash_profile")) { // Only update .bash_profile if it exists because creating .bash_profile // will cause .profile to not be read if bash_profile.exists() { profiles.push(Some(bash_profile)); } } let rcfiles = profiles.into_iter().filter_map(|f| f); rcfiles.map(PathUpdateMethod::RcFile).collect() } fn shell_export_string() -> Result { let path = format!("{}/bin", canonical_elan_home()?); // The path is *prepended* in case there are system-installed // lean's that need to be overridden. Ok(format!(r#"export PATH="{}:$PATH""#, path)) } #[cfg(unix)] fn do_add_to_path(methods: &[PathUpdateMethod]) -> Result<()> { for method in methods { if let PathUpdateMethod::RcFile(ref rcpath) = *method { let file = if rcpath.exists() { utils::read_file("rcfile", rcpath)? } else { String::new() }; let ref addition = format!("\n{}", shell_export_string()?); if !file.contains(addition) { utils::append_file("rcfile", rcpath, addition)?; } } else { unreachable!() } } Ok(()) } #[cfg(windows)] fn do_add_to_path(methods: &[PathUpdateMethod]) -> Result<()> { assert!(methods.len() == 1 && methods[0] == PathUpdateMethod::Windows); use std::ptr; use winapi::shared::minwindef::*; use winapi::um::winuser::{ SendMessageTimeoutA, HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE, }; use winreg::enums::{RegType, HKEY_CURRENT_USER, KEY_READ, KEY_WRITE}; use winreg::{RegKey, RegValue}; let old_path = if let Some(s) = get_windows_path_var()? { s } else { // Non-unicode path return Ok(()); }; let mut new_path = utils::elan_home()? .join("bin") .to_string_lossy() .to_string(); if old_path.contains(&new_path) { return Ok(()); } if !old_path.is_empty() { new_path.push_str(";"); new_path.push_str(&old_path); } let root = RegKey::predef(HKEY_CURRENT_USER); let environment = root .open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE) .chain_err(|| ErrorKind::PermissionDenied)?; let reg_value = RegValue { bytes: utils::string_to_winreg_bytes(&new_path), vtype: RegType::REG_EXPAND_SZ, }; environment .set_raw_value("PATH", ®_value) .chain_err(|| ErrorKind::PermissionDenied)?; // Tell other processes to update their environment unsafe { SendMessageTimeoutA( HWND_BROADCAST, WM_SETTINGCHANGE, 0 as WPARAM, "Environment\0".as_ptr() as LPARAM, SMTO_ABORTIFHUNG, 5000, ptr::null_mut(), ); } Ok(()) } // Get the windows PATH variable out of the registry as a String. If // this returns None then the PATH varible is not unicode and we // should not mess with it. #[cfg(windows)] fn get_windows_path_var() -> Result> { use winreg::enums::{HKEY_CURRENT_USER, KEY_READ, KEY_WRITE}; use winreg::RegKey; let root = RegKey::predef(HKEY_CURRENT_USER); let environment = root .open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE) .chain_err(|| ErrorKind::PermissionDenied)?; let reg_value = environment.get_raw_value("PATH"); match reg_value { Ok(val) => { if let Some(s) = utils::string_from_winreg_value(&val) { Ok(Some(s)) } else { warn!("the registry key HKEY_CURRENT_USER\\Environment\\PATH does not contain valid Unicode. \ Not modifying the PATH variable"); return Ok(None); } } Err(ref e) if e.kind() == io::ErrorKind::NotFound => Ok(Some(String::new())), Err(e) => Err(e).chain_err(|| ErrorKind::WindowsUninstallMadness), } } /// Decide which rcfiles we're going to update, so we /// can tell the user before they confirm. fn get_remove_path_methods() -> Result> { if cfg!(windows) { return Ok(vec![PathUpdateMethod::Windows]); } let profile = utils::home_dir().map(|p| p.join(".profile")); let bash_profile = utils::home_dir().map(|p| p.join(".bash_profile")); let rcfiles = vec![profile, bash_profile]; let existing_rcfiles = rcfiles.into_iter().filter_map(|f| f).filter(|f| f.exists()); let export_str = shell_export_string()?; let matching_rcfiles = existing_rcfiles.filter(|f| { let file = utils::read_file("rcfile", f).unwrap_or(String::new()); let ref addition = format!("\n{}", export_str); file.contains(addition) }); Ok(matching_rcfiles.map(PathUpdateMethod::RcFile).collect()) } #[cfg(windows)] fn do_remove_from_path(methods: &[PathUpdateMethod]) -> Result<()> { assert!(methods.len() == 1 && methods[0] == PathUpdateMethod::Windows); use std::ptr; use winapi::shared::minwindef::*; use winapi::um::winuser::{ SendMessageTimeoutA, HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE, }; use winreg::enums::{RegType, HKEY_CURRENT_USER, KEY_READ, KEY_WRITE}; use winreg::{RegKey, RegValue}; let old_path = if let Some(s) = get_windows_path_var()? { s } else { // Non-unicode path return Ok(()); }; let ref path_str = utils::elan_home()? .join("bin") .to_string_lossy() .to_string(); let idx = if let Some(i) = old_path.find(path_str) { i } else { return Ok(()); }; // If there's a trailing semicolon (likely, since we added one during install), // include that in the substring to remove. let mut len = path_str.len(); if old_path.as_bytes().get(idx + path_str.len()) == Some(&b';') { len += 1; } let mut new_path = old_path[..idx].to_string(); new_path.push_str(&old_path[idx + len..]); let root = RegKey::predef(HKEY_CURRENT_USER); let environment = root .open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE) .chain_err(|| ErrorKind::PermissionDenied)?; if new_path.is_empty() { environment .delete_value("PATH") .chain_err(|| ErrorKind::PermissionDenied)?; } else { let reg_value = RegValue { bytes: utils::string_to_winreg_bytes(&new_path), vtype: RegType::REG_EXPAND_SZ, }; environment .set_raw_value("PATH", ®_value) .chain_err(|| ErrorKind::PermissionDenied)?; } // Tell other processes to update their environment unsafe { SendMessageTimeoutA( HWND_BROADCAST, WM_SETTINGCHANGE, 0 as WPARAM, "Environment\0".as_ptr() as LPARAM, SMTO_ABORTIFHUNG, 5000, ptr::null_mut(), ); } Ok(()) } #[cfg(unix)] fn do_remove_from_path(methods: &[PathUpdateMethod]) -> Result<()> { for method in methods { if let PathUpdateMethod::RcFile(ref rcpath) = *method { let file = utils::read_file("rcfile", rcpath)?; let addition = format!("\n{}\n", shell_export_string()?); let file_bytes = file.into_bytes(); let addition_bytes = addition.into_bytes(); let idx = file_bytes .windows(addition_bytes.len()) .position(|w| w == &*addition_bytes); if let Some(i) = idx { let mut new_file_bytes = file_bytes[..i].to_vec(); new_file_bytes.extend(&file_bytes[i + addition_bytes.len()..]); let ref new_file = String::from_utf8(new_file_bytes).unwrap(); utils::write_file("rcfile", rcpath, new_file)?; } else { // Weird case. rcfile no longer needs to be modified? } } else { unreachable!() } } Ok(()) } /// Self update downloads elan-init to `ELAN_HOME`/bin/elan-init /// and runs it. /// /// It does a few things to accomodate self-delete problems on windows: /// /// elan-init is run in two stages, first with `--self-upgrade`, /// which displays update messages and asks for confirmations, etc; /// then with `--self-replace`, which replaces the elan binary and /// hardlinks. The last step is done without waiting for confirmation /// on windows so that the running exe can be deleted. /// /// Because it's again difficult for elan-init to delete itself /// (and on windows this process will not be running to do it), /// elan-init is stored in `ELAN_HOME`/bin, and then deleted next /// time elan runs. pub fn update() -> Result<()> { if NEVER_SELF_UPDATE { err!("self-update is disabled for this build of elan"); err!("you should probably use your system package manager to update elan"); process::exit(1); } let setup_path = prepare_update()?; if let Some(ref p) = setup_path { let version = match get_new_elan_version(p) { Some(new_version) => parse_new_elan_version(new_version), None => { err!("failed to get elan version"); process::exit(1); } }; info!("elan updated successfully to {}", version); run_update(p)?; } else { // Try again in case we emitted "tool `{}` is already installed" last time. install_proxies()? } Ok(()) } fn get_new_elan_version(path: &Path) -> Option { match Command::new(path).arg("--version").output() { Err(_) => None, Ok(output) => match String::from_utf8(output.stdout) { Ok(version) => Some(version), Err(_) => None, }, } } fn parse_new_elan_version(version: String) -> String { let re = Regex::new(r"\d+.\d+.\d+[0-9a-zA-Z-]*").unwrap(); let capture = re.captures(&version); let matched_version = match capture { Some(cap) => cap.get(0).unwrap().as_str(), None => "(unknown)", }; String::from(matched_version) } pub fn prepare_update() -> Result> { let ref elan_home = utils::elan_home()?; let ref elan_path = elan_home.join(&format!("bin/elan{}", EXE_SUFFIX)); let ref setup_path = elan_home.join(&format!("bin/elan-init{}", EXE_SUFFIX)); if !elan_path.exists() { return Err(ErrorKind::NotSelfInstalled(elan_home.clone()).into()); } if setup_path.exists() { utils::remove_file("setup", setup_path)?; } let update_root = env::var("ELAN_UPDATE_ROOT").unwrap_or(String::from(UPDATE_ROOT)); let tempdir = tempdir().chain_err(|| "error creating temp directory")?; // Get current version let current_version = env!("CARGO_PKG_VERSION"); // Download available version info!("checking for self-updates"); let tag = utils::fetch_latest_release_tag("leanprover/elan")?; let available_version = &tag[1..]; // If up-to-date if available_version == current_version { return Ok(None); } let archive_suffix = if cfg!(target_os = "windows") { ".zip" } else { ".tar.gz" }; let archive_name = format!("elan-{}{}", dist::host_triple(), archive_suffix); let archive_path = tempdir.path().join(&archive_name); // Get download URL let url = format!("{}/v{}/{}", update_root, available_version, archive_name); // Get download path let download_url = utils::parse_url(&url)?; // Download new version info!("downloading self-update"); utils::download_file(&download_url, &archive_path, None, &|_| ())?; let file = fs::File::open(archive_path)?; if cfg!(target_os = "windows") { let mut archive = zip::read::ZipArchive::new(file).chain_err(|| "failed to open zip archive")?; let mut src = archive .by_name("elan-init.exe") .chain_err(|| "failed to extract update")?; let mut dst = fs::File::create(setup_path)?; io::copy(&mut src, &mut dst)?; } else { let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(file)); archive.unpack(elan_home.join("bin"))?; } // Mark as executable utils::make_executable(setup_path)?; Ok(Some(setup_path.to_owned())) } /// Tell the upgrader to replace the elan bins, then delete /// itself. Like with uninstallation, on Windows we're going to /// have to jump through hoops to make everything work right. /// /// On windows we're not going to wait for it to finish before exiting /// successfully, so it should not do much, and it should try /// really hard to succeed, because at this point the upgrade is /// considered successful. #[cfg(unix)] pub fn run_update(setup_path: &Path) -> Result<()> { let status = Command::new(setup_path) .arg("--self-replace") .status() .chain_err(|| "unable to run updater")?; if !status.success() { return Err("self-updated failed to replace elan executable".into()); } process::exit(0); } #[cfg(windows)] pub fn run_update(setup_path: &Path) -> Result<()> { Command::new(setup_path) .arg("--self-replace") .spawn() .chain_err(|| "unable to run updater")?; process::exit(0); } /// This function is as the final step of a self-upgrade. It replaces /// `ELAN_HOME`/bin/elan with the running exe, and updates the the /// links to it. On windows this will run *after* the original /// elan process exits. #[cfg(unix)] pub fn self_replace() -> Result<()> { install_bins()?; Ok(()) } #[cfg(windows)] pub fn self_replace() -> Result<()> { wait_for_parent()?; install_bins()?; Ok(()) } pub fn cleanup_self_updater() -> Result<()> { let elan_home = utils::elan_home()?; let ref setup = elan_home.join(&format!("bin/elan-init{}", EXE_SUFFIX)); if setup.exists() { utils::remove_file("setup", setup)?; } // Transitional let ref old_setup = elan_home.join(&format!("bin/multilean-setup{}", EXE_SUFFIX)); if old_setup.exists() { utils::remove_file("setup", old_setup)?; } Ok(()) } elan-1.3.1/src/elan-cli/setup_mode.rs000066400000000000000000000034611414005346400174250ustar00rootroot00000000000000use clap::{App, AppSettings, Arg}; use common; use errors::*; use self_update::{self, InstallOpts}; use std::env; pub fn main() -> Result<()> { let args: Vec<_> = env::args().collect(); let arg1 = args.get(1).map(|a| &**a); // Secret command used during self-update. Not for users. if arg1 == Some("--self-replace") { return self_update::self_replace(); } // XXX: If you change anything here, please make the same changes in elan-init.sh let cli = App::new("elan-init") .version(common::version()) .about("The installer for elan") .setting(AppSettings::DeriveDisplayOrder) .arg( Arg::with_name("verbose") .short("v") .long("verbose") .help("Enable verbose output"), ) .arg( Arg::with_name("no-prompt") .short("y") .help("Disable confirmation prompt."), ) .arg( Arg::with_name("default-toolchain") .long("default-toolchain") .takes_value(true) .help("Choose a default toolchain to install"), ) .arg( Arg::with_name("no-modify-path") .long("no-modify-path") .help("Don't configure the PATH environment variable"), ); let matches = cli.get_matches(); let no_prompt = matches.is_present("no-prompt"); let verbose = matches.is_present("verbose"); let default_toolchain = matches.value_of("default-toolchain").unwrap_or("stable"); let no_modify_path = matches.is_present("no-modify-path"); let opts = InstallOpts { default_toolchain: default_toolchain.to_owned(), no_modify_path: no_modify_path, }; self_update::install(no_prompt, verbose, opts)?; Ok(()) } elan-1.3.1/src/elan-cli/term2.rs000066400000000000000000000201501414005346400163040ustar00rootroot00000000000000//! This provides wrappers around the `StdoutTerminal` and `StderrTerminal` types //! that does not fail if `StdoutTerminal` etc can't be constructed, which happens //! if TERM isn't defined. use elan_utils::tty; use markdown::tokenize; use markdown::{Block, ListItem, Span}; use std::io; use term; pub use term::color; pub use term::Attr; pub trait Instantiable { fn instance() -> Self; } impl Instantiable for io::Stdout { fn instance() -> Self { io::stdout() } } impl Instantiable for io::Stderr { fn instance() -> Self { io::stderr() } } pub trait Isatty { fn isatty() -> bool; } impl Isatty for io::Stdout { fn isatty() -> bool { tty::stdout_isatty() } } impl Isatty for io::Stderr { fn isatty() -> bool { tty::stderr_isatty() } } pub struct Terminal(Option + Send>>) where T: Instantiable + Isatty + io::Write; pub type StdoutTerminal = Terminal; pub type StderrTerminal = Terminal; pub fn stdout() -> StdoutTerminal { Terminal(term::stdout()) } pub fn stderr() -> StderrTerminal { Terminal(term::stderr()) } // Handles the wrapping of text written to the console struct LineWrapper<'a, T: io::Write + 'a> { indent: u32, margin: u32, pos: u32, pub w: &'a mut T, } impl<'a, T: io::Write + 'a> LineWrapper<'a, T> { // Just write a newline fn write_line(&mut self) { let _ = writeln!(self.w, ""); // Reset column position to start of line self.pos = 0; } // Called before writing text to ensure indent is applied fn write_indent(&mut self) { if self.pos == 0 { // Write a space for each level of indent for _ in 0..self.indent { let _ = write!(self.w, " "); } self.pos = self.indent; } } // Write a non-breaking word fn write_word(&mut self, word: &str) { // Ensure correct indentation self.write_indent(); let word_len = word.len() as u32; // If this word goes past the margin if self.pos + word_len > self.margin { // And adding a newline would give us more space if self.pos > self.indent { // Then add a newline! self.write_line(); self.write_indent(); } } // Write the word let _ = write!(self.w, "{}", word); self.pos += word_len; } fn write_space(&mut self) { if self.pos > self.indent { if self.pos < self.margin { self.write_word(" "); } else { self.write_line(); } } } // Writes a span of text which wraps at the margin fn write_span(&mut self, text: &str) { // Allow words to wrap on whitespace let mut is_first = true; for word in text.split(char::is_whitespace) { if is_first { is_first = false; } else { self.write_space(); } self.write_word(word); } } // Constructor fn new(w: &'a mut T, indent: u32, margin: u32) -> Self { LineWrapper { indent: indent, margin: margin, pos: indent, w: w, } } } // Handles the formatting of text struct LineFormatter<'a, T: Instantiable + Isatty + io::Write + 'a> { wrapper: LineWrapper<'a, Terminal>, attrs: Vec, } impl<'a, T: Instantiable + Isatty + io::Write + 'a> LineFormatter<'a, T> { fn new(w: &'a mut Terminal, indent: u32, margin: u32) -> Self { LineFormatter { wrapper: LineWrapper::new(w, indent, margin), attrs: Vec::new(), } } fn push_attr(&mut self, attr: Attr) { self.attrs.push(attr); let _ = self.wrapper.w.attr(attr); } fn pop_attr(&mut self) { self.attrs.pop(); let _ = self.wrapper.w.reset(); for attr in &self.attrs { let _ = self.wrapper.w.attr(*attr); } } fn do_spans(&mut self, spans: Vec) { for span in spans { match span { Span::Break => {} Span::Text(text) => { self.wrapper.write_span(&text); } Span::Code(code) => { self.push_attr(Attr::Bold); self.wrapper.write_word(&code); self.pop_attr(); } Span::Emphasis(spans) => { self.push_attr(Attr::ForegroundColor(color::BRIGHT_RED)); self.do_spans(spans); self.pop_attr(); } _ => {} } } } fn do_block(&mut self, b: Block) { match b { Block::Header(spans, _) => { self.push_attr(Attr::Bold); self.wrapper.write_line(); self.do_spans(spans); self.wrapper.write_line(); self.pop_attr(); } Block::CodeBlock(_, code) => { self.wrapper.write_line(); self.wrapper.indent += 2; for line in code.lines() { // Don't word-wrap code lines self.wrapper.write_word(line); self.wrapper.write_line(); } self.wrapper.indent -= 2; } Block::Paragraph(spans) => { self.wrapper.write_line(); self.do_spans(spans); self.wrapper.write_line(); } Block::UnorderedList(items) => { self.wrapper.write_line(); for item in items { self.wrapper.indent += 2; match item { ListItem::Simple(spans) => { self.do_spans(spans); } ListItem::Paragraph(blocks) => { for block in blocks { self.do_block(block); } } } self.wrapper.write_line(); self.wrapper.indent -= 2; } } _ => {} } } } impl io::Write for Terminal { fn write(&mut self, buf: &[u8]) -> Result { if let Some(ref mut t) = self.0 { t.write(buf) } else { let mut t = T::instance(); t.write(buf) } } fn flush(&mut self) -> Result<(), io::Error> { if let Some(ref mut t) = self.0 { t.flush() } else { let mut t = T::instance(); t.flush() } } } impl Terminal { pub fn fg(&mut self, color: color::Color) -> Result<(), term::Error> { if !T::isatty() { return Ok(()); } if let Some(ref mut t) = self.0 { t.fg(color) } else { Ok(()) } } pub fn attr(&mut self, attr: Attr) -> Result<(), term::Error> { if !T::isatty() { return Ok(()); } if let Some(ref mut t) = self.0 { if let Err(e) = t.attr(attr) { // If `attr` is not supported, try to emulate it match attr { Attr::Bold => t.fg(color::BRIGHT_WHITE), _ => Err(e), } } else { Ok(()) } } else { Ok(()) } } pub fn reset(&mut self) -> Result<(), term::Error> { if !T::isatty() { return Ok(()); } if let Some(ref mut t) = self.0 { t.reset() } else { Ok(()) } } pub fn md>(&mut self, content: S) { let mut f = LineFormatter::new(self, 0, 79); let blocks = tokenize(content.as_ref()); for b in blocks { f.do_block(b); } } } elan-1.3.1/src/elan-dist/000077500000000000000000000000001414005346400151035ustar00rootroot00000000000000elan-1.3.1/src/elan-dist/Cargo.toml000066400000000000000000000013771414005346400170430ustar00rootroot00000000000000[package] name = "elan-dist" version = "1.11.0" authors = [ "Sebastian Ullrich " ] description = "Installation from a Lean distribution server" build = "build.rs" license = "MIT OR Apache-2.0" [dependencies] regex = "1.4.3" itertools = "0.10.0" ole32-sys = "0.2.0" url = "2.2.1" tar = "0.4.33" flate2 = "1.0.14" zstd = "0.9" walkdir = "2.3.1" toml = "0.5.8" sha2 = "0.9.3" remove_dir_all = "0.7.0" elan-utils = { path = "../elan-utils" } error-chain = "0.12.4" json = "0.12.4" zip = "0.5.13" filetime = "0.2.14" [target."cfg(windows)".dependencies] winapi = { version = "0.3.9", features = ["handleapi", "sysinfoapi", "tlhelp32", "winnt"] } winreg = "0.8.0" [target."cfg(not(windows))".dependencies] libc = "0.2.88" [lib] name = "elan_dist" elan-1.3.1/src/elan-dist/build.rs000066400000000000000000000006561414005346400165570ustar00rootroot00000000000000use std::env; use std::fs::File; use std::io::Write; use std::path::PathBuf; fn main() { let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let target = env::var("RELEASE_TARGET_NAME") .or(env::var("TARGET")) .unwrap(); File::create(out_dir.join("target.txt")) .unwrap() .write_all(target.as_bytes()) .unwrap(); println!("leanpkg:rerun-if-changed=build.rs"); } elan-1.3.1/src/elan-dist/src/000077500000000000000000000000001414005346400156725ustar00rootroot00000000000000elan-1.3.1/src/elan-dist/src/component/000077500000000000000000000000001414005346400176745ustar00rootroot00000000000000elan-1.3.1/src/elan-dist/src/component/mod.rs000066400000000000000000000003441414005346400210220ustar00rootroot00000000000000/// An interpreter for the lean-installer [1] installation format. /// /// https://github.com/rust-lang/rust-installer pub use self::package::*; // The representation of a package, its components, and installation mod package; elan-1.3.1/src/elan-dist/src/component/package.rs000066400000000000000000000130151414005346400216350ustar00rootroot00000000000000//! An interpreter for the lean-installer package format. Responsible //! for installing from a directory or tarball to an installation //! prefix, represented by a `Components` instance. extern crate filetime; extern crate flate2; extern crate zstd; extern crate tar; use errors::*; use temp; use std::fs::{self, File}; use std::io::{self, Read, Seek}; use std::path::{Path, PathBuf}; use zip::ZipArchive; #[derive(Debug)] pub struct TarPackage<'a>(temp::Dir<'a>); impl<'a> TarPackage<'a> { pub fn unpack(stream: R, path: &Path) -> Result<()> { let mut archive = tar::Archive::new(stream); // The lean-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. unpack_without_first_dir(&mut archive, path) } } fn unpack_without_first_dir(archive: &mut tar::Archive, path: &Path) -> Result<()> { let entries = archive .entries() .chain_err(|| ErrorKind::ExtractingPackage)?; for entry in entries { let mut entry = entry.chain_err(|| ErrorKind::ExtractingPackage)?; let relpath = { let path = entry.path(); let path = path.chain_err(|| ErrorKind::ExtractingPackage)?; path.into_owned() }; let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already match full_path.parent() { Some(parent) if !parent.exists() => { ::std::fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } _ => (), }; entry .unpack(&full_path) .chain_err(|| ErrorKind::ExtractingPackage)?; } Ok(()) } #[derive(Debug)] pub struct ZipPackage<'a>(temp::Dir<'a>); impl<'a> ZipPackage<'a> { pub fn unpack(stream: R, path: &Path) -> Result<()> { let mut archive = ZipArchive::new(stream).chain_err(|| ErrorKind::ExtractingPackage)?; /* let mut src = archive.by_name("elan-init.exe").chain_err(|| "failed to extract update")?; let mut dst = fs::File::create(setup_path)?; io::copy(&mut src, &mut dst)?; */ // The lean-installer packages unpack to a directory called // $pkgname-$version-$target. Skip that directory when // unpacking. Self::unpack_without_first_dir(&mut archive, &path) } pub fn unpack_file(path: &Path, into: &Path) -> Result<()> { let file = File::open(path).chain_err(|| ErrorKind::ExtractingPackage)?; Self::unpack(file, into) } fn unpack_without_first_dir( archive: &mut ZipArchive, path: &Path, ) -> Result<()> { for i in 0..archive.len() { let mut entry = archive .by_index(i) .chain_err(|| ErrorKind::ExtractingPackage)?; if entry.name().ends_with('/') { continue; // skip directories } let relpath = PathBuf::from(entry.name()); let mut components = relpath.components(); // Throw away the first path component components.next(); let full_path = path.join(&components.as_path()); // Create the full path to the entry if it does not exist already match full_path.parent() { Some(parent) if !parent.exists() => { fs::create_dir_all(&parent).chain_err(|| ErrorKind::ExtractingPackage)? } _ => (), }; { let mut dst = File::create(&full_path).chain_err(|| ErrorKind::ExtractingPackage)?; io::copy(&mut entry, &mut dst).chain_err(|| ErrorKind::ExtractingPackage)?; #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; if let Some(mode) = entry.unix_mode() { let mut ro_mode = fs::Permissions::from_mode(mode); ro_mode.set_readonly(true); fs::set_permissions(&full_path, ro_mode).unwrap(); } } } // make sure to close `dst` before setting mtime let mtime = entry.last_modified().to_time().to_timespec(); let mtime = filetime::FileTime::from_unix_time(mtime.sec, mtime.nsec as u32); filetime::set_file_times(&full_path, mtime, mtime).unwrap(); } Ok(()) } } #[derive(Debug)] pub struct TarGzPackage<'a>(TarPackage<'a>); impl<'a> TarGzPackage<'a> { pub fn unpack(stream: R, path: &Path) -> Result<()> { let stream = flate2::read::GzDecoder::new(stream); TarPackage::unpack(stream, path) } pub fn unpack_file(path: &Path, into: &Path) -> Result<()> { let file = File::open(path).chain_err(|| ErrorKind::ExtractingPackage)?; Self::unpack(file, into) } } #[derive(Debug)] pub struct TarZstdPackage<'a>(TarPackage<'a>); impl<'a> TarZstdPackage<'a> { pub fn unpack(stream: R, path: &Path) -> Result<()> { let stream = zstd::stream::read::Decoder::new(stream)?; TarPackage::unpack(stream, path) } pub fn unpack_file(path: &Path, into: &Path) -> Result<()> { let file = File::open(path).chain_err(|| ErrorKind::ExtractingPackage)?; Self::unpack(file, into) } } elan-1.3.1/src/elan-dist/src/config.rs000066400000000000000000000046171414005346400175150ustar00rootroot00000000000000use toml; use super::manifest::Component; use elan_utils::toml_utils::*; use errors::*; pub const SUPPORTED_CONFIG_VERSIONS: [&'static str; 1] = ["1"]; pub const DEFAULT_CONFIG_VERSION: &'static str = "1"; #[derive(Clone, Debug)] pub struct Config { pub config_version: String, pub components: Vec, } impl Config { pub fn from_toml(mut table: toml::value::Table, path: &str) -> Result { let version = get_string(&mut table, "config_version", path)?; if !SUPPORTED_CONFIG_VERSIONS.contains(&&*version) { return Err(ErrorKind::UnsupportedVersion(version).into()); } let components = get_array(&mut table, "components", path)?; let components = Self::toml_to_components(components, &format!("{}{}.", path, "components"))?; Ok(Config { config_version: version, components: components, }) } pub fn to_toml(self) -> toml::value::Table { let mut result = toml::value::Table::new(); result.insert( "config_version".to_owned(), toml::Value::String(self.config_version), ); let components = Self::components_to_toml(self.components); if !components.is_empty() { result.insert("components".to_owned(), toml::Value::Array(components)); } result } pub fn parse(data: &str) -> Result { let value = toml::from_str(data).map_err(ErrorKind::Parsing)?; Self::from_toml(value, "") } pub fn stringify(self) -> String { toml::Value::Table(self.to_toml()).to_string() } fn toml_to_components(arr: toml::value::Array, path: &str) -> Result> { let mut result = Vec::new(); for (i, v) in arr.into_iter().enumerate() { if let toml::Value::Table(t) = v { let path = format!("{}[{}]", path, i); result.push(Component::from_toml(t, &path)?); } } Ok(result) } fn components_to_toml(components: Vec) -> toml::value::Array { let mut result = toml::value::Array::new(); for v in components { result.push(toml::Value::Table(v.to_toml())); } result } pub fn new() -> Self { Config { config_version: DEFAULT_CONFIG_VERSION.to_owned(), components: Vec::new(), } } } elan-1.3.1/src/elan-dist/src/dist.rs000066400000000000000000000200361414005346400172040ustar00rootroot00000000000000use download::DownloadCfg; use elan_utils::{self, utils}; use errors::*; use manifest::Component; use manifestation::Manifestation; use notifications::Notification; use prefix::InstallPrefix; use temp; use std::fmt; use std::path::Path; use regex::Regex; // fetch latest versions from leanprover-community, but don't break pinned origin-less versions const DEFAULT_ORIGIN: &str = "leanprover/lean"; const DEFAULT_CHANNEL_ORIGIN: &str = "leanprover-community/lean"; // Fully-resolved toolchain descriptors. These always have full target // triples attached to them and are used for canonical identification, // such as naming their installation directory. #[derive(Debug, Clone)] pub struct ToolchainDesc { // The GitHub source repository to use (if "nightly" is specified, we append "-nightly" to this) // If None, we default to "leanprover/lean" pub origin: Option, // Either "nightly", "stable", an explicit version number, or a tag name pub channel: String, pub date: Option, } impl ToolchainDesc { pub fn from_str(name: &str) -> Result { let pattern = r"^(?:([a-zA-Z0-9-]+[/][a-zA-Z0-9-]+)[:])?(?:(nightly|stable)(?:-(\d{4}-\d{2}-\d{2}))?|([a-zA-Z0-9-.]+))$"; let re = Regex::new(&pattern).unwrap(); re.captures(name) .map(|c| { fn fn_map(s: &str) -> Option { if s == "" { None } else { Some(s.to_owned()) } } ToolchainDesc { origin: c.get(1).map(|s| s.as_str()).and_then(fn_map), channel: c.get(2).or(c.get(4)).unwrap().as_str().to_owned(), date: c.get(3).map(|s| s.as_str()).and_then(fn_map), } }) .ok_or(ErrorKind::InvalidToolchainName(name.to_string()).into()) } /// Either "$channel" or "channel-$date" pub fn manifest_name(&self) -> String { match self.date { None => self.channel.clone(), Some(ref date) => format!("{}-{}", self.channel, date), } } pub fn package_dir(&self, dist_root: &str) -> String { match self.date { None => format!("{}", dist_root), Some(ref date) => format!("{}/{}", dist_root, date), } } pub fn full_spec(&self) -> String { if self.date.is_some() { format!("{}", self) } else { format!("{} (tracking)", self) } } pub fn is_tracking(&self) -> bool { let channels = ["nightly", "stable"]; channels.iter().any(|x| *x == self.channel) && self.date.is_none() } } #[derive(Debug)] pub struct Manifest<'a>(temp::File<'a>, String); impl fmt::Display for ToolchainDesc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(ref origin) = self.origin { write!(f, "{}-", str::replace(origin, "/", "-"))?; } write!(f, "{}", &self.channel)?; if let Some(ref date) = self.date { write!(f, "-{}", date)?; } Ok(()) } } // Installs or updates a toolchain from a dist server. If an initial // install then it will be installed with the default components. If // an upgrade then all the existing components will be upgraded. // // Returns the manifest's hash if anything changed. pub fn update_from_dist<'a>( download: DownloadCfg<'a>, update_hash: Option<&Path>, toolchain: &ToolchainDesc, prefix: &InstallPrefix, add: &[Component], remove: &[Component], force_update: bool, ) -> Result> { let fresh_install = !prefix.path().exists(); let res = update_from_dist_( download, update_hash, toolchain, prefix, add, remove, force_update, ); // Don't leave behind an empty / broken installation directory if res.is_err() && fresh_install { // FIXME Ignoring cascading errors let _ = utils::remove_dir("toolchain", prefix.path(), &|n| { (download.notify_handler)(n.into()) }); } res } //Append "-nightly" to the origin if version == "nightly" was specified. //If origin is None use DEFAULT_ORIGIN. fn build_origin_name(origin: Option<&String>, version: &str) -> String { let repo = match origin { None => { if version == "stable" || version == "nightly" { DEFAULT_CHANNEL_ORIGIN } else { DEFAULT_ORIGIN } } Some(repo) => repo, }; format!( "{}{}", repo, if version == "nightly" { "-nightly" } else { "" } ) } pub fn update_from_dist_<'a>( download: DownloadCfg<'a>, update_hash: Option<&Path>, toolchain: &ToolchainDesc, prefix: &InstallPrefix, _add: &[Component], _remove: &[Component], _force_update: bool, ) -> Result> { let toolchain_str = toolchain.to_string(); let manifestation = Manifestation::open(prefix.clone())?; let url = match toolchain_url(download, toolchain) { Ok(url) => url, Err(Error(ErrorKind::Utils(elan_utils::ErrorKind::DownloadNotExists { .. }), _)) => { return Err(format!("no release found for '{}'", toolchain.manifest_name()).into()); } Err(e @ Error(ErrorKind::ChecksumFailed { .. }, _)) => { return Err(e); } Err(e) => { return Err(e).chain_err(|| { format!( "failed to resolve latest version of '{}'", toolchain.manifest_name() ) }); } }; if let Some(hash_file) = update_hash { if utils::is_file(hash_file) { if let Ok(contents) = utils::read_file("update hash", hash_file) { if contents == url { // Skip download, url matches return Ok(None); } } /*else { (self.notify_handler)(Notification::CantReadUpdateHash(hash_file)); }*/ } /*else { (self.notify_handler)(Notification::NoUpdateHash(hash_file)); }*/ } match manifestation.update( &build_origin_name(toolchain.origin.as_ref(), &toolchain.channel), &url, &download.temp_cfg, download.notify_handler.clone(), ) { Ok(()) => Ok(()), e @ Err(Error(ErrorKind::Utils(elan_utils::ErrorKind::DownloadNotExists { .. }), _)) => e .chain_err(|| { format!( "could not download nonexistent lean version `{}`", toolchain_str ) }), Err(e) => Err(e), } .map(|()| Some(url)) } fn toolchain_url<'a>(download: DownloadCfg<'a>, toolchain: &ToolchainDesc) -> Result { let origin = build_origin_name(toolchain.origin.as_ref(), toolchain.channel.as_ref()); Ok( match (toolchain.date.as_ref(), toolchain.channel.as_str()) { (None, version) if version == "stable" || version == "nightly" => { (download.notify_handler)(Notification::DownloadingManifest(version)); let release = utils::fetch_latest_release_tag(&origin)?; (download.notify_handler)(Notification::DownloadedManifest( version, Some(&release), )); format!("https://github.com/{}/releases/tag/{}", origin, release) } (Some(date), "nightly") => format!( "https://github.com/{}/releases/tag/nightly-{}", origin, date ), (None, version) if version.starts_with(|c: char| c.is_numeric()) => { format!("https://github.com/{}/releases/tag/v{}", origin, version) } (None, tag) => format!("https://github.com/{}/releases/tag/{}", origin, tag), _ => panic!("wat"), }, ) } pub fn host_triple() -> &'static str { include_str!(concat!(env!("OUT_DIR"), "/target.txt")) } elan-1.3.1/src/elan-dist/src/download.rs000066400000000000000000000075211414005346400200540ustar00rootroot00000000000000use elan_utils::utils; use errors::*; use notifications::*; use sha2::{Digest, Sha256}; use temp; use url::Url; use std::fs; use std::ops; use std::path::{Path, PathBuf}; const _UPDATE_HASH_LEN: usize = 20; #[derive(Copy, Clone)] pub struct DownloadCfg<'a> { pub temp_cfg: &'a temp::Cfg, pub download_dir: &'a PathBuf, pub notify_handler: &'a dyn Fn(Notification), } pub struct File { path: PathBuf, } impl ops::Deref for File { type Target = Path; fn deref(&self) -> &Path { ops::Deref::deref(&self.path) } } impl<'a> DownloadCfg<'a> { /// Downloads a file, validating its hash, and resuming interrupted downloads /// Partial downloads are stored in `self.download_dir`, keyed by hash. If the /// target file already exists, then the hash is checked and it is returned /// immediately without re-downloading. pub fn download(&self, url: &Url, hash: &str) -> Result { utils::ensure_dir_exists("Download Directory", &self.download_dir, &|n| { (self.notify_handler)(n.into()) })?; let target_file = self.download_dir.join(Path::new(hash)); if target_file.exists() { let cached_result = file_hash(&target_file)?; if hash == cached_result { (self.notify_handler)(Notification::FileAlreadyDownloaded); (self.notify_handler)(Notification::ChecksumValid(&url.to_string())); return Ok(File { path: target_file }); } else { (self.notify_handler)(Notification::CachedFileChecksumFailed); fs::remove_file(&target_file).chain_err(|| "cleaning up previous download")?; } } let partial_file_path = target_file.with_file_name( target_file .file_name() .map(|s| s.to_str().unwrap_or("_")) .unwrap_or("_") .to_owned() + ".partial", ); let mut hasher = Sha256::new(); utils::download_file_with_resume( &url, &partial_file_path, Some(&mut hasher), true, &|n| (self.notify_handler)(n.into()), )?; let actual_hash = format!("{:x}", hasher.finalize()); if hash != actual_hash { // Incorrect hash return Err(ErrorKind::ChecksumFailed { url: url.to_string(), expected: hash.to_string(), calculated: actual_hash, } .into()); } else { (self.notify_handler)(Notification::ChecksumValid(&url.to_string())); fs::rename(&partial_file_path, &target_file)?; return Ok(File { path: target_file }); } } pub fn clean(&self, hashes: &Vec) -> Result<()> { for hash in hashes.iter() { let used_file = self.download_dir.join(hash); if self.download_dir.join(&used_file).exists() { fs::remove_file(used_file).chain_err(|| "cleaning up cached downloads")?; } } Ok(()) } pub fn download_and_check(&self, url_str: &str) -> Result> { let url = utils::parse_url(url_str)?; let file = self.temp_cfg.new_file()?; utils::download_file(&url, &file, None, &|n| (self.notify_handler)(n.into()))?; Ok(file) } } fn file_hash(path: &Path) -> Result { let mut hasher = Sha256::new(); use std::io::Read; let mut downloaded = fs::File::open(&path).chain_err(|| "opening already downloaded file")?; let mut buf = vec![0; 32768]; loop { if let Ok(n) = downloaded.read(&mut buf) { if n == 0 { break; } hasher.update(&buf[..n]); } else { break; } } Ok(format!("{:x}", hasher.finalize())) } elan-1.3.1/src/elan-dist/src/errors.rs000066400000000000000000000103421414005346400175540ustar00rootroot00000000000000use elan_utils; use manifest::Component; use std::io::{self, Write}; use std::path::PathBuf; use temp; use toml; error_chain! { links { Utils(elan_utils::Error, elan_utils::ErrorKind); } foreign_links { Temp(temp::Error); Io(io::Error); } errors { InvalidToolchainName(t: String) { description("invalid toolchain name") display("invalid toolchain name: '{}'", t) } ChecksumFailed { url: String, expected: String, calculated: String, } { description("checksum failed") display("checksum failed, expected: '{}', calculated: '{}'", expected, calculated) } ComponentConflict { name: String, path: PathBuf, } { description("conflicting component") display("failed to install component: '{}', detected conflict: '{:?}'", name, path) } ComponentMissingFile { name: String, path: PathBuf, } { description("missing file in component") display("failure removing component '{}', file does not exist: '{:?}'", name, path) } ComponentMissingDir { name: String, path: PathBuf, } { description("missing directory in component") display("failure removing component '{}', directory does not exist: '{:?}'", name, path) } CorruptComponent(name: String) { description("corrupt component manifest") display("component manifest for '{}' is corrupt", name) } ExtractingPackage { description("failed to extract package") } BadInstallerVersion(v: String) { description("unsupported installer version") display("unsupported installer version: {}", v) } BadInstalledMetadataVersion(v: String) { description("unsupported metadata version in existing installation") display("unsupported metadata version in existing installation: {}", v) } ComponentDirPermissionsFailed { description("I/O error walking directory during install") } ComponentFilePermissionsFailed { description("error setting file permissions during install") } ComponentDownloadFailed(c: Component) { description("component download failed") display("component download failed for {}", c.pkg) } Parsing(e: toml::de::Error) { description("error parsing manifest") } UnsupportedVersion(v: String) { description("unsupported manifest version") display("manifest version '{}' is not supported", v) } MissingPackageForComponent(c: Component) { description("missing package for component") display("server sent a broken manifest: missing package for component {}", c.name()) } MissingPackageForRename(name: String) { description("missing package for the target of a rename") display("server sent a broken manifest: missing package for the target of a rename {}", name) } RequestedComponentsUnavailable(c: Vec) { description("some requested components are unavailable to download") display("{}", component_unavailable_msg(&c)) } } } fn component_unavailable_msg(cs: &[Component]) -> String { assert!(!cs.is_empty()); let mut buf = vec![]; fn format_component(c: &Component) -> String { format!("'{}'", c.pkg) } if cs.len() == 1 { let _ = write!( buf, "component {} is unavailable for download", format_component(&cs[0]) ); } else { use itertools::Itertools; let mut cs_strs = cs.iter().map(|c| format!("'{}'", c.pkg)); let cs_str = cs_strs.join(", "); let _ = write!(buf, "some components unavailable for download: {}", cs_str); } String::from_utf8(buf).expect("") } elan-1.3.1/src/elan-dist/src/lib.rs000066400000000000000000000011731414005346400170100ustar00rootroot00000000000000#![recursion_limit = "1024"] extern crate elan_utils; extern crate flate2; extern crate itertools; extern crate regex; extern crate tar; extern crate toml; extern crate url; extern crate walkdir; #[macro_use] extern crate error_chain; extern crate json; extern crate sha2; extern crate zip; #[cfg(not(windows))] extern crate libc; #[cfg(windows)] extern crate winapi; #[cfg(windows)] extern crate winreg; pub use errors::*; pub use notifications::Notification; pub mod temp; mod component; pub mod config; pub mod dist; pub mod download; pub mod errors; pub mod manifest; mod manifestation; pub mod notifications; pub mod prefix; elan-1.3.1/src/elan-dist/src/manifest.rs000066400000000000000000000023011414005346400200420ustar00rootroot00000000000000//! Lean distribution v2 manifests. //! //! This manifest describes the distributable artifacts for a single //! release of Lean. They are toml files, typically downloaded from //! e.g. static.lean-lang.org/dist/channel-lean-nightly.toml. They //! describe where to download, for all platforms, each component of //! the a release, and their relationships to each other. //! //! Installers use this info to customize Lean installations. //! //! See tests/channel-lean-nightly-example.toml for an example. use elan_utils::toml_utils::*; use errors::*; use toml; #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct Component { pub pkg: String, } impl Component { pub fn from_toml(mut table: toml::value::Table, path: &str) -> Result { Ok(Component { pkg: get_string(&mut table, "pkg", path)?, }) } pub fn to_toml(self) -> toml::value::Table { let mut result = toml::value::Table::new(); result.insert("pkg".to_owned(), toml::Value::String(self.pkg)); result } pub fn name(&self) -> String { format!("{}", self.pkg) } pub fn description(&self) -> String { format!("'{}'", self.pkg) } } elan-1.3.1/src/elan-dist/src/manifestation.rs000066400000000000000000000065411414005346400211070ustar00rootroot00000000000000//! Manifest a particular Lean version by installing it from a distribution server. use component::{TarGzPackage, TarZstdPackage, ZipPackage}; use download::DownloadCfg; use elan_utils::utils; use errors::*; use notifications::*; use prefix::InstallPrefix; use temp; #[derive(Debug)] pub struct Manifestation { prefix: InstallPrefix, } impl Manifestation { pub fn open(prefix: InstallPrefix) -> Result { Ok(Manifestation { prefix }) } /// Installation using the legacy v1 manifest format pub fn update( &self, origin: &String, url: &String, temp_cfg: &temp::Cfg, notify_handler: &dyn Fn(Notification), ) -> Result<()> { notify_handler(Notification::DownloadingComponent("lean")); use std::path::PathBuf; let dld_dir = PathBuf::from("bogus"); let dlcfg = DownloadCfg { download_dir: &dld_dir, temp_cfg: temp_cfg, notify_handler: notify_handler, }; // find correct download on HTML page (AAAAH) use regex::Regex; use std::fs; use std::io::Read; let informal_target = if cfg!(target_os = "windows") { "windows" } else if cfg!(target_os = "linux") { "linux" } else if cfg!(target_os = "macos") { "darwin" } else { unreachable!() }; let informal_target = informal_target.to_owned(); let informal_target = if cfg!(target_arch = "x86_64") { informal_target } else if cfg!(target_arch = "aarch64") { informal_target + "_aarch64" } else { unreachable!(); }; let url_substring = informal_target.clone() + "."; let re = Regex::new(format!(r#"/{}/releases/download/[^"]+"#, origin).as_str()).unwrap(); let download_page_file = dlcfg.download_and_check(&url)?; let mut html = String::new(); fs::File::open(&download_page_file as &::std::path::Path)?.read_to_string(&mut html)?; let url = re .find_iter(&html) .map(|m| m.as_str().to_string()) .find(|m| m.contains(&url_substring)); if url.is_none() { return Err( format!("binary package was not provided for '{}'", informal_target).into(), ); } let url = format!("https://github.com/{}", url.unwrap()); let installer_file = dlcfg.download_and_check(&url)?; let prefix = self.prefix.path(); notify_handler(Notification::InstallingComponent("lean")); // Remove old files if utils::is_directory(prefix) { utils::remove_dir("toolchain directory", prefix, &|n| { (notify_handler)(n.into()) })?; } utils::ensure_dir_exists("toolchain directory", prefix, &|n| { (notify_handler)(n.into()) })?; // Extract new files if url.ends_with(".tar.gz") { TarGzPackage::unpack_file(&installer_file, prefix)? } else if url.ends_with(".tar.zst") { TarZstdPackage::unpack_file(&installer_file, prefix)? } else if url.ends_with(".zip") { ZipPackage::unpack_file(&installer_file, prefix)? } else { return Err(format!("unsupported archive format: {}", url).into()) } Ok(()) } } elan-1.3.1/src/elan-dist/src/notifications.rs000066400000000000000000000107641414005346400211210ustar00rootroot00000000000000use elan_utils; use elan_utils::notify::NotificationLevel; use errors::*; use manifest::Component; use std::fmt::{self, Display}; use std::path::Path; use temp; #[derive(Debug)] pub enum Notification<'a> { Utils(elan_utils::Notification<'a>), Temp(temp::Notification<'a>), Extracting(&'a Path, &'a Path), ComponentAlreadyInstalled(&'a Component), CantReadUpdateHash(&'a Path), NoUpdateHash(&'a Path), ChecksumValid(&'a str), SignatureValid(&'a str), FileAlreadyDownloaded, CachedFileChecksumFailed, RollingBack, ExtensionNotInstalled(&'a Component), NonFatalError(&'a Error), MissingInstalledComponent(&'a str), DownloadingComponent(&'a str), InstallingComponent(&'a str), RemovingComponent(&'a str), DownloadingManifest(&'a str), DownloadedManifest(&'a str, Option<&'a str>), DownloadingLegacyManifest, ManifestChecksumFailedHack, } impl<'a> From> for Notification<'a> { fn from(n: elan_utils::Notification<'a>) -> Notification<'a> { Notification::Utils(n) } } impl<'a> From> for Notification<'a> { fn from(n: temp::Notification<'a>) -> Notification<'a> { Notification::Temp(n) } } impl<'a> Notification<'a> { pub fn level(&self) -> NotificationLevel { use self::Notification::*; match *self { Temp(ref n) => n.level(), Utils(ref n) => n.level(), ChecksumValid(_) | NoUpdateHash(_) | FileAlreadyDownloaded | DownloadingLegacyManifest => NotificationLevel::Verbose, Extracting(_, _) | SignatureValid(_) | DownloadingComponent(_) | InstallingComponent(_) | RemovingComponent(_) | ComponentAlreadyInstalled(_) | ManifestChecksumFailedHack | RollingBack | DownloadingManifest(_) | DownloadedManifest(_, _) => NotificationLevel::Info, CantReadUpdateHash(_) | ExtensionNotInstalled(_) | MissingInstalledComponent(_) | CachedFileChecksumFailed => NotificationLevel::Warn, NonFatalError(_) => NotificationLevel::Error, } } } impl<'a> Display for Notification<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { use self::Notification::*; match *self { Temp(ref n) => n.fmt(f), Utils(ref n) => n.fmt(f), Extracting(_, _) => write!(f, "extracting..."), ComponentAlreadyInstalled(ref c) => { write!(f, "component {} is up to date", c.description()) } CantReadUpdateHash(path) => { write!( f, "can't read update hash file: '{}', can't skip update...", path.display() ) } NoUpdateHash(path) => write!(f, "no update hash at: '{}'", path.display()), ChecksumValid(_) => write!(f, "checksum passed"), SignatureValid(_) => write!(f, "signature valid"), FileAlreadyDownloaded => write!(f, "reusing previously downloaded file"), CachedFileChecksumFailed => write!(f, "bad checksum for cached download"), RollingBack => write!(f, "rolling back changes"), ExtensionNotInstalled(c) => { write!(f, "extension '{}' was not installed", c.name()) } NonFatalError(e) => write!(f, "{}", e), MissingInstalledComponent(c) => { write!(f, "during uninstall component {} was not found", c) } DownloadingComponent(c) => write!(f, "downloading component '{}'", c), InstallingComponent(c) => write!(f, "installing component '{}'", c), RemovingComponent(c) => write!(f, "removing component '{}'", c), DownloadingManifest(t) => write!(f, "syncing channel updates for '{}'", t), DownloadedManifest(date, Some(version)) => { write!(f, "latest update on {}, lean version {}", date, version) } DownloadedManifest(date, None) => { write!(f, "latest update on {}, no lean version", date) } DownloadingLegacyManifest => write!(f, "manifest not found. trying legacy manifest"), ManifestChecksumFailedHack => { write!(f, "update not yet available, sorry! try again later") } } } } elan-1.3.1/src/elan-dist/src/prefix.rs000066400000000000000000000015401414005346400175350ustar00rootroot00000000000000use std::path::{Path, PathBuf}; const REL_MANIFEST_DIR: &'static str = "."; #[derive(Clone, Debug)] pub struct InstallPrefix { path: PathBuf, } impl InstallPrefix { pub fn from(path: PathBuf) -> Self { InstallPrefix { path: path } } pub fn path(&self) -> &Path { &self.path } pub fn abs_path>(&self, path: P) -> PathBuf { self.path.join(path) } pub fn manifest_dir(&self) -> PathBuf { let mut path = self.path.clone(); path.push(REL_MANIFEST_DIR); path } pub fn manifest_file(&self, name: &str) -> PathBuf { let mut path = self.manifest_dir(); path.push(name); path } pub fn rel_manifest_file(&self, name: &str) -> PathBuf { let mut path = PathBuf::from(REL_MANIFEST_DIR); path.push(name); path } } elan-1.3.1/src/elan-dist/src/temp.rs000066400000000000000000000160471414005346400172150ustar00rootroot00000000000000extern crate remove_dir_all; use elan_utils::raw; use std::error; use std::fmt::{self, Display}; use std::fs; use std::io; use std::ops; use std::path::{Path, PathBuf}; use elan_utils::notify::NotificationLevel; #[derive(Debug)] pub enum Error { CreatingRoot { path: PathBuf, error: io::Error }, CreatingFile { path: PathBuf, error: io::Error }, CreatingDirectory { path: PathBuf, error: io::Error }, } pub type Result = ::std::result::Result; #[derive(Debug)] pub enum Notification<'a> { CreatingRoot(&'a Path), CreatingFile(&'a Path), CreatingDirectory(&'a Path), FileDeletion(&'a Path, io::Result<()>), DirectoryDeletion(&'a Path, io::Result<()>), } pub struct Cfg { root_directory: PathBuf, notify_handler: Box, } #[derive(Debug)] pub struct Dir<'a> { cfg: &'a Cfg, path: PathBuf, } #[derive(Debug)] pub struct File<'a> { cfg: &'a Cfg, path: PathBuf, } impl<'a> Notification<'a> { pub fn level(&self) -> NotificationLevel { use self::Notification::*; match *self { CreatingRoot(_) | CreatingFile(_) | CreatingDirectory(_) => NotificationLevel::Verbose, FileDeletion(_, ref result) | DirectoryDeletion(_, ref result) => { if result.is_ok() { NotificationLevel::Verbose } else { NotificationLevel::Warn } } } } } impl<'a> Display for Notification<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { use self::Notification::*; match *self { CreatingRoot(path) => write!(f, "creating temp root: {}", path.display()), CreatingFile(path) => write!(f, "creating temp file: {}", path.display()), CreatingDirectory(path) => write!(f, "creating temp directory: {}", path.display()), FileDeletion(path, ref result) => { if result.is_ok() { write!(f, "deleted temp file: {}", path.display()) } else { write!(f, "could not delete temp file: {}", path.display()) } } DirectoryDeletion(path, ref result) => { if result.is_ok() { write!(f, "deleted temp directory: {}", path.display()) } else { write!(f, "could not delete temp directory: {}", path.display()) } } } } } impl error::Error for Error { fn description(&self) -> &str { use self::Error::*; match *self { CreatingRoot { .. } => "could not create temp root", CreatingFile { .. } => "could not create temp file", CreatingDirectory { .. } => "could not create temp directory", } } fn cause(&self) -> Option<&dyn error::Error> { use self::Error::*; match *self { CreatingRoot { ref error, .. } | CreatingFile { ref error, .. } | CreatingDirectory { ref error, .. } => Some(error), } } } impl Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { use self::Error::*; match *self { CreatingRoot { ref path, error: _ } => { write!(f, "could not create temp root: {}", path.display()) } CreatingFile { ref path, error: _ } => { write!(f, "could not create temp file: {}", path.display()) } CreatingDirectory { ref path, error: _ } => { write!(f, "could not create temp directory: {}", path.display()) } } } } impl Cfg { pub fn new(root_directory: PathBuf, notify_handler: Box) -> Self { Cfg { root_directory: root_directory, notify_handler: notify_handler, } } pub fn create_root(&self) -> Result { raw::ensure_dir_exists(&self.root_directory, |p| { (self.notify_handler)(Notification::CreatingRoot(p)); }) .map_err(|e| Error::CreatingRoot { path: PathBuf::from(&self.root_directory), error: e, }) } pub fn new_directory(&self) -> Result { self.create_root()?; loop { let temp_name = raw::random_string(16) + "_dir"; let temp_dir = self.root_directory.join(temp_name); // This is technically racey, but the probability of getting the same // random names at exactly the same time is... low. if !raw::path_exists(&temp_dir) { (self.notify_handler)(Notification::CreatingDirectory(&temp_dir)); fs::create_dir(&temp_dir).map_err(|e| Error::CreatingDirectory { path: PathBuf::from(&temp_dir), error: e, })?; return Ok(Dir { cfg: self, path: temp_dir, }); } } } pub fn new_file(&self) -> Result { self.new_file_with_ext("", "") } pub fn new_file_with_ext(&self, prefix: &str, ext: &str) -> Result { self.create_root()?; loop { let temp_name = prefix.to_owned() + &raw::random_string(16) + "_file" + ext; let temp_file = self.root_directory.join(temp_name); // This is technically racey, but the probability of getting the same // random names at exactly the same time is... low. if !raw::path_exists(&temp_file) { (self.notify_handler)(Notification::CreatingFile(&temp_file)); fs::File::create(&temp_file).map_err(|e| Error::CreatingFile { path: PathBuf::from(&temp_file), error: e, })?; return Ok(File { cfg: self, path: temp_file, }); } } } } impl fmt::Debug for Cfg { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Cfg") .field("root_directory", &self.root_directory) .field("notify_handler", &"...") .finish() } } impl<'a> ops::Deref for Dir<'a> { type Target = Path; fn deref(&self) -> &Path { ops::Deref::deref(&self.path) } } impl<'a> ops::Deref for File<'a> { type Target = Path; fn deref(&self) -> &Path { ops::Deref::deref(&self.path) } } impl<'a> Drop for Dir<'a> { fn drop(&mut self) { if raw::is_directory(&self.path) { let n = Notification::DirectoryDeletion( &self.path, remove_dir_all::remove_dir_all(&self.path), ); (self.cfg.notify_handler)(n); } } } impl<'a> Drop for File<'a> { fn drop(&mut self) { if raw::is_file(&self.path) { let n = Notification::FileDeletion(&self.path, fs::remove_file(&self.path)); (self.cfg.notify_handler)(n); } } } elan-1.3.1/src/elan-utils/000077500000000000000000000000001414005346400153005ustar00rootroot00000000000000elan-1.3.1/src/elan-utils/Cargo.toml000066400000000000000000000014111414005346400172250ustar00rootroot00000000000000[package] name = "elan-utils" version = "1.11.0" authors = [ "Sebastian Ullrich " ] description = "Utility functions for elan" license = "MIT OR Apache-2.0" [dependencies] download = { path = "../download" } error-chain = "0.12.4" libc = "0.2.88" rand = "0.8.3" remove_dir_all = "0.7.0" scopeguard = "1.1.0" semver = "0.11.0" sha2 = "0.9.3" toml = "0.5.8" url = "2.2.1" curl = "0.4.34" openssl = { version = "0.10", features = ["vendored"] } regex = "1.4.3" dirs = "3.0.1" [target."cfg(windows)".dependencies] winapi = { version = "0.3.9", features = ["combaseapi", "errhandlingapi", "fileapi", "handleapi", "ioapiset", "minwindef", "processthreadsapi", "shlobj", "shtypes", "userenv", "winbase", "winerror", "winnt", "winioctl"] } winreg = "0.8.0" elan-1.3.1/src/elan-utils/src/000077500000000000000000000000001414005346400160675ustar00rootroot00000000000000elan-1.3.1/src/elan-utils/src/errors.rs000066400000000000000000000120521414005346400177510ustar00rootroot00000000000000use download; use std::ffi::OsString; use std::path::PathBuf; use url::Url; error_chain! { links { Download(download::Error, download::ErrorKind); } foreign_links { } errors { LocatingWorkingDir { description("could not locate working directory") } ReadingFile { name: &'static str, path: PathBuf, } { description("could not read file") display("could not read {} file: '{}'", name, path.display()) } ReadingDirectory { name: &'static str, path: PathBuf, } { description("could not read directory") display("could not read {} directory: '{}'", name, path.display()) } WritingFile { name: &'static str, path: PathBuf, } { description("could not write file") display("could not write {} file: '{}'", name, path.display()) } CreatingDirectory { name: &'static str, path: PathBuf, } { description("could not create directory") display("could not create {} directory: '{}'", name, path.display()) } ExpectedType(t: &'static str, n: String) { description("expected type") display("expected type: '{}' for '{}'", t, n) } FilteringFile { name: &'static str, src: PathBuf, dest: PathBuf, } { description("could not copy file") display("could not copy {} file from '{}' to '{}'", name, src.display(), dest.display()) } RenamingFile { name: &'static str, src: PathBuf, dest: PathBuf, } { description("could not rename file") display("could not rename {} file from '{}' to '{}'", name, src.display(), dest.display()) } RenamingDirectory { name: &'static str, src: PathBuf, dest: PathBuf, } { description("could not rename directory") display("could not rename {} directory from '{}' to '{}'", name, src.display(), dest.display()) } DownloadingFile { url: Url, path: PathBuf, } { description("could not download file") display("could not download file from '{}' to '{}'", url, path.display()) } DownloadNotExists { url: Url, path: PathBuf, } { description("could not download file") display("could not download file from '{}' to '{}'", url, path.display()) } InvalidUrl { url: String, } { description("invalid url") display("invalid url: {}", url) } RunningCommand { name: OsString, } { description("command failed") display("command failed: '{}'", PathBuf::from(name).display()) } NotAFile { path: PathBuf, } { description("not a file") display("not a file: '{}'", path.display()) } NotADirectory { path: PathBuf, } { description("not a directory") display("not a directory: '{}'", path.display()) } LinkingFile { src: PathBuf, dest: PathBuf, } { description("could not link file") display("could not create link from '{}' to '{}'", src.display(), dest.display()) } LinkingDirectory { src: PathBuf, dest: PathBuf, } { description("could not symlink directory") display("could not create link from '{}' to '{}'", src.display(), dest.display()) } CopyingDirectory { src: PathBuf, dest: PathBuf, } { description("could not copy directory") display("could not copy directory from '{}' to '{}'", src.display(), dest.display()) } CopyingFile { src: PathBuf, dest: PathBuf, } { description("could not copy file") display("could not copy file from '{}' to '{}'", src.display(), dest.display()) } RemovingFile { name: &'static str, path: PathBuf, } { description("could not remove file") display("could not remove '{}' file: '{}'", name, path.display()) } RemovingDirectory { name: &'static str, path: PathBuf, } { description("could not remove directory") display("could not remove '{}' directory: '{}'", name, path.display()) } SettingPermissions { path: PathBuf, } { description("failed to set permissions") display("failed to set permissions for '{}'", path.display()) } ElanHome { description("couldn't find value of ELAN_HOME") } } } elan-1.3.1/src/elan-utils/src/lib.rs000066400000000000000000000010751414005346400172060ustar00rootroot00000000000000#![recursion_limit = "1024"] // for error_chain! extern crate rand; extern crate scopeguard; #[macro_use] extern crate error_chain; extern crate curl; extern crate dirs; extern crate download; extern crate regex; extern crate semver; extern crate sha2; extern crate toml; extern crate url; #[cfg(windows)] extern crate winapi; #[cfg(windows)] extern crate winreg; #[cfg(unix)] extern crate libc; pub mod errors; pub mod notifications; pub mod raw; pub mod toml_utils; pub mod tty; pub mod utils; pub use errors::*; pub use notifications::Notification; pub mod notify; elan-1.3.1/src/elan-utils/src/notifications.rs000066400000000000000000000054501414005346400213120ustar00rootroot00000000000000use std::fmt::{self, Display}; use std::path::Path; use url::Url; use notify::NotificationLevel; #[derive(Debug)] pub enum Notification<'a> { CreatingDirectory(&'a str, &'a Path), LinkingDirectory(&'a Path, &'a Path), CopyingDirectory(&'a Path, &'a Path), RemovingDirectory(&'a str, &'a Path), DownloadingFile(&'a Url, &'a Path), /// Received the Content-Length of the to-be downloaded data. DownloadContentLengthReceived(u64), /// Received some data. DownloadDataReceived(&'a [u8]), /// Download has finished. DownloadFinished, NoCanonicalPath(&'a Path), ResumingPartialDownload, UsingCurl, UsingReqwest, UsingHyperDeprecated, } impl<'a> Notification<'a> { pub fn level(&self) -> NotificationLevel { use self::Notification::*; match *self { CreatingDirectory(_, _) | RemovingDirectory(_, _) => NotificationLevel::Verbose, LinkingDirectory(_, _) | CopyingDirectory(_, _) | DownloadingFile(_, _) | DownloadContentLengthReceived(_) | DownloadDataReceived(_) | DownloadFinished | ResumingPartialDownload | UsingCurl | UsingReqwest => NotificationLevel::Verbose, UsingHyperDeprecated | NoCanonicalPath(_) => NotificationLevel::Warn, } } } impl<'a> Display for Notification<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { use self::Notification::*; match *self { CreatingDirectory(name, path) => { write!(f, "creating {} directory: '{}'", name, path.display()) } LinkingDirectory(_, dest) => write!(f, "linking directory from: '{}'", dest.display()), CopyingDirectory(src, _) => write!(f, "coping directory from: '{}'", src.display()), RemovingDirectory(name, path) => { write!(f, "removing {} directory: '{}'", name, path.display()) } DownloadingFile(url, _) => write!(f, "downloading file from: '{}'", url), DownloadContentLengthReceived(len) => write!(f, "download size is: '{}'", len), DownloadDataReceived(data) => write!(f, "received some data of size {}", data.len()), DownloadFinished => write!(f, "download finished"), NoCanonicalPath(path) => write!(f, "could not canonicalize path: '{}'", path.display()), ResumingPartialDownload => write!(f, "resuming partial download"), UsingCurl => write!(f, "downloading with curl"), UsingReqwest => write!(f, "downloading with reqwest"), UsingHyperDeprecated => f.write_str( "ELAN_USE_HYPER environment variable is deprecated, use ELAN_USE_REQWEST instead", ), } } } elan-1.3.1/src/elan-utils/src/notify.rs000066400000000000000000000001341414005346400177430ustar00rootroot00000000000000#[derive(Debug)] pub enum NotificationLevel { Verbose, Info, Warn, Error, } elan-1.3.1/src/elan-utils/src/raw.rs000066400000000000000000000324001414005346400172250ustar00rootroot00000000000000extern crate remove_dir_all; use std::char::from_u32; use std::env; use std::error; use std::ffi::{OsStr, OsString}; use std::fmt; use std::fs; use std::io; use std::io::Write; use std::path::Path; use std::process::{Command, ExitStatus}; use std::str; use rand::random; pub fn ensure_dir_exists, F: FnOnce(&Path)>( path: P, callback: F, ) -> io::Result { if !is_directory(path.as_ref()) { callback(path.as_ref()); fs::create_dir_all(path.as_ref()).map(|()| true) } else { Ok(false) } } pub fn is_directory>(path: P) -> bool { fs::metadata(path).ok().as_ref().map(fs::Metadata::is_dir) == Some(true) } pub fn is_file>(path: P) -> bool { fs::metadata(path).ok().as_ref().map(fs::Metadata::is_file) == Some(true) } pub fn path_exists>(path: P) -> bool { fs::metadata(path).is_ok() } pub fn random_string(length: usize) -> String { let chars = b"abcdefghijklmnopqrstuvwxyz0123456789_"; (0..length) .map(|_| from_u32(chars[random::() % chars.len()] as u32).unwrap()) .collect() } pub fn if_not_empty>(s: S) -> Option { if s == *"" { None } else { Some(s) } } pub fn write_file(path: &Path, contents: &str) -> io::Result<()> { let mut file = fs::OpenOptions::new() .write(true) .truncate(true) .create(true) .open(path)?; io::Write::write_all(&mut file, contents.as_bytes())?; file.sync_data()?; Ok(()) } pub fn read_file(path: &Path) -> io::Result { let mut file = fs::OpenOptions::new().read(true).open(path)?; let mut contents = String::new(); io::Read::read_to_string(&mut file, &mut contents)?; Ok(contents) } pub fn filter_file bool>( src: &Path, dest: &Path, mut filter: F, ) -> io::Result { let src_file = fs::File::open(src)?; let dest_file = fs::File::create(dest)?; let mut reader = io::BufReader::new(src_file); let mut writer = io::BufWriter::new(dest_file); let mut removed = 0; for result in io::BufRead::lines(&mut reader) { let line = result?; if filter(&line) { writeln!(&mut writer, "{}", &line)?; } else { removed += 1; } } writer.flush()?; Ok(removed) } pub fn match_file Option>(src: &Path, mut f: F) -> io::Result> { let src_file = fs::File::open(src)?; let mut reader = io::BufReader::new(src_file); for result in io::BufRead::lines(&mut reader) { let line = result?; if let Some(r) = f(&line) { return Ok(Some(r)); } } Ok(None) } pub fn append_file(dest: &Path, line: &str) -> io::Result<()> { let mut dest_file = fs::OpenOptions::new() .write(true) .append(true) .create(true) .open(dest)?; writeln!(&mut dest_file, "{}", line)?; dest_file.sync_data()?; Ok(()) } pub fn tee_file(path: &Path, w: &mut W) -> io::Result<()> { let mut file = fs::OpenOptions::new().read(true).open(path)?; let buffer_size = 0x10000; let mut buffer = vec![0u8; buffer_size]; loop { let bytes_read = io::Read::read(&mut file, &mut buffer)?; if bytes_read != 0 { io::Write::write_all(w, &mut buffer[0..bytes_read])?; } else { return Ok(()); } } } pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> { #[cfg(windows)] fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> { // std's symlink uses Windows's symlink function, which requires // admin. We can create directory junctions the hard way without // though. symlink_junction_inner(src, dest) } #[cfg(not(windows))] fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> { ::std::os::unix::fs::symlink(src, dest) } let _ = remove_dir(dest); symlink_dir_inner(src, dest) } // Creating a directory junction on windows involves dealing with reparse // points and the DeviceIoControl function, and this code is a skeleton of // what can be found here: // // http://www.flexhex.com/docs/articles/hard-links.phtml // // Copied from std #[cfg(windows)] #[allow(non_snake_case)] fn symlink_junction_inner(target: &Path, junction: &Path) -> io::Result<()> { use std::os::windows::ffi::OsStrExt; use std::ptr; use winapi::shared::minwindef::*; use winapi::um::fileapi::*; use winapi::um::ioapiset::*; use winapi::um::winbase::*; use winapi::um::winioctl::FSCTL_SET_REPARSE_POINT; use winapi::um::winnt::*; const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024; #[repr(C)] pub struct REPARSE_MOUNTPOINT_DATA_BUFFER { pub ReparseTag: DWORD, pub ReparseDataLength: DWORD, pub Reserved: WORD, pub ReparseTargetLength: WORD, pub ReparseTargetMaximumLength: WORD, pub Reserved1: WORD, pub ReparseTarget: WCHAR, } // We're using low-level APIs to create the junction, and these are more picky about paths. // For example, forward slashes cannot be used as a path separator, so we should try to // canonicalize the path first. let target = fs::canonicalize(target)?; fs::create_dir(junction)?; let path = windows::to_u16s(junction)?; unsafe { let h = CreateFileW( path.as_ptr(), GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, 0 as *mut _, OPEN_EXISTING, FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, ptr::null_mut(), ); let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER; let buf = &mut (*db).ReparseTarget as *mut WCHAR; let mut i = 0; // FIXME: this conversion is very hacky let v = br"\??\"; let v = v.iter().map(|x| *x as u16); for c in v.chain(target.as_os_str().encode_wide().skip(4)) { *buf.offset(i) = c; i += 1; } *buf.offset(i) = 0; i += 1; (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT; (*db).ReparseTargetMaximumLength = (i * 2) as WORD; (*db).ReparseTargetLength = ((i - 1) * 2) as WORD; (*db).ReparseDataLength = (*db).ReparseTargetLength as DWORD + 12; let mut ret = 0; let res = DeviceIoControl( h as *mut _, FSCTL_SET_REPARSE_POINT, data.as_ptr() as *mut _, (*db).ReparseDataLength + 8, ptr::null_mut(), 0, &mut ret, ptr::null_mut(), ); if res == 0 { Err(io::Error::last_os_error()) } else { Ok(()) } } } pub fn hardlink(src: &Path, dest: &Path) -> io::Result<()> { let _ = fs::remove_file(dest); fs::hard_link(src, dest) } #[derive(Debug)] pub enum CommandError { Io(io::Error), Status(ExitStatus), } pub type CommandResult = ::std::result::Result; impl error::Error for CommandError { fn description(&self) -> &str { use self::CommandError::*; match *self { Io(_) => "could not execute command", Status(_) => "command exited with unsuccessful status", } } fn cause(&self) -> Option<&dyn error::Error> { use self::CommandError::*; match *self { Io(ref e) => Some(e), Status(_) => None, } } } impl fmt::Display for CommandError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { CommandError::Io(ref e) => write!(f, "Io: {}", e), CommandError::Status(ref s) => write!(f, "Status: {}", s), } } } pub fn cmd_status(cmd: &mut Command) -> CommandResult<()> { cmd.status().map_err(CommandError::Io).and_then(|s| { if s.success() { Ok(()) } else { Err(CommandError::Status(s)) } }) } pub fn remove_dir(path: &Path) -> io::Result<()> { if fs::symlink_metadata(path)?.file_type().is_symlink() { if cfg!(windows) { fs::remove_dir(path) } else { fs::remove_file(path) } } else { // Again because remove_dir all doesn't delete write-only files on windows, // this is a custom implementation, more-or-less copied from cargo. // cc rust-lang/rust#31944 // cc https://github.com/rust-lang/cargo/blob/master/tests/support/paths.rs#L52 remove_dir_all::remove_dir_all(path) } } pub fn copy_dir(src: &Path, dest: &Path) -> io::Result<()> { fs::create_dir(dest)?; for entry in src.read_dir()? { let entry = entry?; let kind = entry.file_type()?; let src = entry.path(); let dest = dest.join(entry.file_name()); if kind.is_dir() { copy_dir(&src, &dest)?; } else { fs::copy(&src, &dest)?; } } Ok(()) } pub fn prefix_arg>(name: &str, s: S) -> OsString { let mut arg = OsString::from(name); arg.push(s); arg } pub fn has_cmd(cmd: &str) -> bool { let cmd = format!("{}{}", cmd, env::consts::EXE_SUFFIX); let path = env::var_os("PATH").unwrap_or(OsString::new()); env::split_paths(&path) .map(|p| p.join(&cmd)) .any(|p| p.exists()) } pub fn find_cmd<'a>(cmds: &[&'a str]) -> Option<&'a str> { cmds.into_iter().map(|&s| s).filter(|&s| has_cmd(s)).next() } pub fn open_browser(path: &Path) -> io::Result { #[cfg(not(windows))] fn inner(path: &Path) -> io::Result { use std::process::Stdio; let commands = [ "xdg-open", "open", "firefox", "chromium", "sensible-browser", ]; if let Some(cmd) = find_cmd(&commands) { Command::new(cmd) .arg(path) .stdin(Stdio::null()) .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn() .map(|_| true) } else { Ok(false) } } #[cfg(windows)] fn inner(path: &Path) -> io::Result { use std::ptr; use winapi::ctypes; use winapi::shared::minwindef::HINSTANCE; use winapi::shared::ntdef::LPCWSTR; use winapi::shared::windef::HWND; // FIXME: When winapi has this function, use their version extern "system" { pub fn ShellExecuteW( hwnd: HWND, lpOperation: LPCWSTR, lpFile: LPCWSTR, lpParameters: LPCWSTR, lpDirectory: LPCWSTR, nShowCmd: ctypes::c_int, ) -> HINSTANCE; } const SW_SHOW: ctypes::c_int = 5; let path = windows::to_u16s(path)?; let operation = windows::to_u16s("open")?; let result = unsafe { ShellExecuteW( ptr::null_mut(), operation.as_ptr(), path.as_ptr(), ptr::null(), ptr::null(), SW_SHOW, ) }; Ok(result as usize > 32) } inner(path) } #[cfg(windows)] pub mod windows { use std::ffi::{OsStr, OsString}; use std::io; use std::os::windows::ffi::{OsStrExt, OsStringExt}; use std::path::PathBuf; use std::ptr; use std::slice; use winapi::shared::guiddef::GUID; use winapi::um::{combaseapi, shlobj, shtypes}; #[allow(non_upper_case_globals)] pub const FOLDERID_LocalAppData: GUID = GUID { Data1: 0xF1B32785, Data2: 0x6FBA, Data3: 0x4FCF, Data4: [0x9D, 0x55, 0x7B, 0x8E, 0x7F, 0x15, 0x70, 0x91], }; #[allow(non_upper_case_globals)] pub const FOLDERID_Profile: GUID = GUID { Data1: 0x5E6C858F, Data2: 0x0E22, Data3: 0x4760, Data4: [0x9A, 0xFE, 0xEA, 0x33, 0x17, 0xB6, 0x71, 0x73], }; pub fn get_special_folder(id: &shtypes::KNOWNFOLDERID) -> io::Result { let mut path = ptr::null_mut(); let result; unsafe { let code = shlobj::SHGetKnownFolderPath(id, 0, ptr::null_mut(), &mut path); if code == 0 { let mut length = 0usize; while *path.offset(length as isize) != 0 { length += 1; } let slice = slice::from_raw_parts(path, length); result = Ok(OsString::from_wide(slice).into()); } else { result = Err(io::Error::from_raw_os_error(code)); } combaseapi::CoTaskMemFree(path as *mut _); } result } pub fn to_u16s>(s: S) -> io::Result> { fn inner(s: &OsStr) -> io::Result> { let mut maybe_result: Vec = s.encode_wide().collect(); if maybe_result.iter().any(|&u| u == 0) { return Err(io::Error::new( io::ErrorKind::InvalidInput, "strings passed to WinAPI cannot contain NULs", )); } maybe_result.push(0); Ok(maybe_result) } inner(s.as_ref()) } } elan-1.3.1/src/elan-utils/src/toml_utils.rs000066400000000000000000000045361414005346400206400ustar00rootroot00000000000000use errors::*; use toml; pub fn get_value(table: &mut toml::value::Table, key: &str, path: &str) -> Result { table .remove(key) .ok_or_else(|| format!("missing key: '{}'", path.to_owned() + key).into()) } pub fn get_string(table: &mut toml::value::Table, key: &str, path: &str) -> Result { get_value(table, key, path).and_then(|v| { if let toml::Value::String(s) = v { Ok(s) } else { Err(ErrorKind::ExpectedType("string", path.to_owned() + key).into()) } }) } pub fn get_opt_string( table: &mut toml::value::Table, key: &str, path: &str, ) -> Result> { if let Ok(v) = get_value(table, key, path) { if let toml::Value::String(s) = v { Ok(Some(s)) } else { Err(ErrorKind::ExpectedType("string", path.to_owned() + key).into()) } } else { Ok(None) } } pub fn get_bool(table: &mut toml::value::Table, key: &str, path: &str) -> Result { get_value(table, key, path).and_then(|v| { if let toml::Value::Boolean(b) = v { Ok(b) } else { Err(ErrorKind::ExpectedType("bool", path.to_owned() + key).into()) } }) } pub fn get_opt_bool(table: &mut toml::value::Table, key: &str, path: &str) -> Result> { if let Ok(v) = get_value(table, key, path) { if let toml::Value::Boolean(b) = v { Ok(Some(b)) } else { Err(ErrorKind::ExpectedType("bool", path.to_owned() + key).into()) } } else { Ok(None) } } pub fn get_table( table: &mut toml::value::Table, key: &str, path: &str, ) -> Result { if let Some(v) = table.remove(key) { if let toml::Value::Table(t) = v { Ok(t) } else { Err(ErrorKind::ExpectedType("table", path.to_owned() + key).into()) } } else { Ok(toml::value::Table::new()) } } pub fn get_array( table: &mut toml::value::Table, key: &str, path: &str, ) -> Result { if let Some(v) = table.remove(key) { if let toml::Value::Array(s) = v { Ok(s) } else { Err(ErrorKind::ExpectedType("table", path.to_owned() + key).into()) } } else { Ok(toml::value::Array::new()) } } elan-1.3.1/src/elan-utils/src/tty.rs000066400000000000000000000024731414005346400172630ustar00rootroot00000000000000// Copied from rustc. atty crate did not work as expected #[cfg(unix)] pub fn stderr_isatty() -> bool { unsafe { libc::isatty(libc::STDERR_FILENO) != 0 } } // FIXME: Unfortunately this doesn't detect msys terminals so elan // is always colorless there (just like lean and leanpkg). #[cfg(windows)] pub fn stderr_isatty() -> bool { type DWORD = u32; type BOOL = i32; type HANDLE = *mut u8; const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD; extern "system" { fn GetStdHandle(which: DWORD) -> HANDLE; fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; } unsafe { let handle = GetStdHandle(STD_ERROR_HANDLE); let mut out = 0; GetConsoleMode(handle, &mut out) != 0 } } #[cfg(unix)] pub fn stdout_isatty() -> bool { unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 } } #[cfg(windows)] pub fn stdout_isatty() -> bool { type DWORD = u32; type BOOL = i32; type HANDLE = *mut u8; const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD; extern "system" { fn GetStdHandle(which: DWORD) -> HANDLE; fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; } unsafe { let handle = GetStdHandle(STD_OUTPUT_HANDLE); let mut out = 0; GetConsoleMode(handle, &mut out) != 0 } } elan-1.3.1/src/elan-utils/src/utils.rs000066400000000000000000000403421414005346400176000ustar00rootroot00000000000000use dirs; use errors::*; use notifications::Notification; use raw; use sha2::{Digest, Sha256}; use std::cmp::Ord; use std::env; use std::ffi::OsString; use std::fs::{self, File}; use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::atomic::{AtomicBool, Ordering}; use url::Url; #[cfg(windows)] use winreg; pub use raw::{ find_cmd, has_cmd, if_not_empty, is_directory, is_file, path_exists, prefix_arg, random_string, }; pub fn ensure_dir_exists( name: &'static str, path: &Path, notify_handler: &dyn Fn(Notification), ) -> Result { raw::ensure_dir_exists(path, |p| { notify_handler(Notification::CreatingDirectory(name, p)) }) .chain_err(|| ErrorKind::CreatingDirectory { name: name, path: PathBuf::from(path), }) } pub fn read_file(name: &'static str, path: &Path) -> Result { raw::read_file(path).chain_err(|| ErrorKind::ReadingFile { name: name, path: PathBuf::from(path), }) } pub fn write_file(name: &'static str, path: &Path, contents: &str) -> Result<()> { raw::write_file(path, contents).chain_err(|| ErrorKind::WritingFile { name: name, path: PathBuf::from(path), }) } pub fn append_file(name: &'static str, path: &Path, line: &str) -> Result<()> { raw::append_file(path, line).chain_err(|| ErrorKind::WritingFile { name: name, path: PathBuf::from(path), }) } pub fn write_line(name: &'static str, file: &mut File, path: &Path, line: &str) -> Result<()> { writeln!(file, "{}", line).chain_err(|| ErrorKind::WritingFile { name: name, path: path.to_path_buf(), }) } pub fn write_str(name: &'static str, file: &mut File, path: &Path, s: &str) -> Result<()> { write!(file, "{}", s).chain_err(|| ErrorKind::WritingFile { name: name, path: path.to_path_buf(), }) } pub fn rename_file(name: &'static str, src: &Path, dest: &Path) -> Result<()> { fs::rename(src, dest).chain_err(|| ErrorKind::RenamingFile { name: name, src: PathBuf::from(src), dest: PathBuf::from(dest), }) } pub fn rename_dir(name: &'static str, src: &Path, dest: &Path) -> Result<()> { fs::rename(src, dest).chain_err(|| ErrorKind::RenamingDirectory { name: name, src: PathBuf::from(src), dest: PathBuf::from(dest), }) } pub fn filter_file bool>( name: &'static str, src: &Path, dest: &Path, filter: F, ) -> Result { raw::filter_file(src, dest, filter).chain_err(|| ErrorKind::FilteringFile { name: name, src: PathBuf::from(src), dest: PathBuf::from(dest), }) } pub fn match_file Option>( name: &'static str, src: &Path, f: F, ) -> Result> { raw::match_file(src, f).chain_err(|| ErrorKind::ReadingFile { name: name, path: PathBuf::from(src), }) } pub fn canonicalize_path(path: &Path, notify_handler: &dyn Fn(Notification)) -> PathBuf { fs::canonicalize(path).unwrap_or_else(|_| { notify_handler(Notification::NoCanonicalPath(path)); PathBuf::from(path) }) } pub fn tee_file(name: &'static str, path: &Path, w: &mut W) -> Result<()> { raw::tee_file(path, w).chain_err(|| ErrorKind::ReadingFile { name: name, path: PathBuf::from(path), }) } pub fn download_file( url: &Url, path: &Path, hasher: Option<&mut Sha256>, notify_handler: &dyn Fn(Notification), ) -> Result<()> { download_file_with_resume(&url, &path, hasher, false, ¬ify_handler) } pub fn download_file_with_resume( url: &Url, path: &Path, hasher: Option<&mut Sha256>, resume_from_partial: bool, notify_handler: &dyn Fn(Notification), ) -> Result<()> { use download::ErrorKind as DEK; match download_file_(url, path, hasher, resume_from_partial, notify_handler) { Ok(_) => Ok(()), Err(e) => { println!("{:?}", e); let is_client_error = match e.kind() { &ErrorKind::Download(DEK::HttpStatus(400..=499)) => true, &ErrorKind::Download(DEK::FileNotFound) => true, _ => false, }; Err(e).chain_err(|| { if is_client_error { ErrorKind::DownloadNotExists { url: url.clone(), path: path.to_path_buf(), } } else { ErrorKind::DownloadingFile { url: url.clone(), path: path.to_path_buf(), } } }) } } } static DEPRECATED_HYPER_WARNED: AtomicBool = AtomicBool::new(false); fn download_file_( url: &Url, path: &Path, hasher: Option<&mut Sha256>, resume_from_partial: bool, notify_handler: &dyn Fn(Notification), ) -> Result<()> { use download::download_to_path_with_backend; use download::{Backend, Event}; use std::cell::RefCell; notify_handler(Notification::DownloadingFile(url, path)); let hasher = RefCell::new(hasher); // This callback will write the download to disk and optionally // hash the contents, then forward the notification up the stack let callback: &dyn Fn(Event) -> download::Result<()> = &|msg| { match msg { Event::DownloadDataReceived(data) => { if let Some(ref mut h) = *hasher.borrow_mut() { h.update(data); } } _ => (), } match msg { Event::DownloadContentLengthReceived(len) => { notify_handler(Notification::DownloadContentLengthReceived(len)); } Event::DownloadDataReceived(data) => { notify_handler(Notification::DownloadDataReceived(data)); } Event::ResumingPartialDownload => { notify_handler(Notification::ResumingPartialDownload); } } Ok(()) }; // Download the file // Keep the hyper env var around for a bit let use_hyper_backend = env::var_os("ELAN_USE_HYPER").is_some(); if use_hyper_backend && DEPRECATED_HYPER_WARNED.swap(true, Ordering::Relaxed) { notify_handler(Notification::UsingHyperDeprecated); } let use_reqwest_backend = use_hyper_backend || env::var_os("ELAN_USE_REQWEST").is_some(); let (backend, notification) = if use_reqwest_backend { (Backend::Reqwest, Notification::UsingReqwest) } else { (Backend::Curl, Notification::UsingCurl) }; notify_handler(notification); download_to_path_with_backend(backend, url, path, resume_from_partial, Some(callback))?; notify_handler(Notification::DownloadFinished); Ok(()) } pub fn parse_url(url: &str) -> Result { Url::parse(url).chain_err(|| format!("failed to parse url: {}", url)) } pub fn cmd_status(name: &'static str, cmd: &mut Command) -> Result<()> { raw::cmd_status(cmd).chain_err(|| ErrorKind::RunningCommand { name: OsString::from(name), }) } pub fn assert_is_file(path: &Path) -> Result<()> { if !is_file(path) { Err(ErrorKind::NotAFile { path: PathBuf::from(path), } .into()) } else { Ok(()) } } pub fn assert_is_directory(path: &Path) -> Result<()> { if !is_directory(path) { Err(ErrorKind::NotADirectory { path: PathBuf::from(path), } .into()) } else { Ok(()) } } pub fn symlink_dir(src: &Path, dest: &Path, notify_handler: &dyn Fn(Notification)) -> Result<()> { notify_handler(Notification::LinkingDirectory(src, dest)); raw::symlink_dir(src, dest).chain_err(|| ErrorKind::LinkingDirectory { src: PathBuf::from(src), dest: PathBuf::from(dest), }) } pub fn hard_or_symlink_file(src: &Path, dest: &Path) -> Result<()> { if hardlink_file(src, dest).is_err() { symlink_file(src, dest)?; } Ok(()) } pub fn hardlink_file(src: &Path, dest: &Path) -> Result<()> { raw::hardlink(src, dest).chain_err(|| ErrorKind::LinkingFile { src: PathBuf::from(src), dest: PathBuf::from(dest), }) } #[cfg(unix)] pub fn symlink_file(src: &Path, dest: &Path) -> Result<()> { ::std::os::unix::fs::symlink(src, dest).chain_err(|| ErrorKind::LinkingFile { src: PathBuf::from(src), dest: PathBuf::from(dest), }) } #[cfg(windows)] pub fn symlink_file(src: &Path, dest: &Path) -> Result<()> { // we are supposed to not use symlink on windows Err(ErrorKind::LinkingFile { src: PathBuf::from(src), dest: PathBuf::from(dest), } .into()) } pub fn copy_dir(src: &Path, dest: &Path, notify_handler: &dyn Fn(Notification)) -> Result<()> { notify_handler(Notification::CopyingDirectory(src, dest)); raw::copy_dir(src, dest).chain_err(|| ErrorKind::CopyingDirectory { src: PathBuf::from(src), dest: PathBuf::from(dest), }) } pub fn copy_file(src: &Path, dest: &Path) -> Result<()> { fs::copy(src, dest) .chain_err(|| ErrorKind::CopyingFile { src: PathBuf::from(src), dest: PathBuf::from(dest), }) .map(|_| ()) } pub fn remove_dir( name: &'static str, path: &Path, notify_handler: &dyn Fn(Notification), ) -> Result<()> { notify_handler(Notification::RemovingDirectory(name, path)); raw::remove_dir(path).chain_err(|| ErrorKind::RemovingDirectory { name: name, path: PathBuf::from(path), }) } pub fn remove_file(name: &'static str, path: &Path) -> Result<()> { fs::remove_file(path).chain_err(|| ErrorKind::RemovingFile { name: name, path: PathBuf::from(path), }) } pub fn read_dir(name: &'static str, path: &Path) -> Result { fs::read_dir(path).chain_err(|| ErrorKind::ReadingDirectory { name: name, path: PathBuf::from(path), }) } pub fn open_browser(path: &Path) -> Result<()> { match raw::open_browser(path) { Ok(true) => Ok(()), Ok(false) => Err("no browser installed".into()), Err(e) => Err(e).chain_err(|| "could not open browser"), } } pub fn set_permissions(path: &Path, perms: fs::Permissions) -> Result<()> { fs::set_permissions(path, perms).chain_err(|| ErrorKind::SettingPermissions { path: PathBuf::from(path), }) } pub fn file_size(path: &Path) -> Result { let metadata = fs::metadata(path).chain_err(|| ErrorKind::ReadingFile { name: "metadata for", path: PathBuf::from(path), })?; Ok(metadata.len()) } pub fn make_executable(path: &Path) -> Result<()> { #[cfg(windows)] fn inner(_: &Path) -> Result<()> { Ok(()) } #[cfg(not(windows))] fn inner(path: &Path) -> Result<()> { use std::os::unix::fs::PermissionsExt; let metadata = fs::metadata(path).chain_err(|| ErrorKind::SettingPermissions { path: PathBuf::from(path), })?; let mut perms = metadata.permissions(); let new_mode = (perms.mode() & !0o777) | 0o755; perms.set_mode(new_mode); set_permissions(path, perms) } inner(path) } pub fn current_dir() -> Result { env::current_dir().chain_err(|| ErrorKind::LocatingWorkingDir) } pub fn current_exe() -> Result { env::current_exe().chain_err(|| ErrorKind::LocatingWorkingDir) } pub fn to_absolute>(path: P) -> Result { current_dir().map(|mut v| { v.push(path); v }) } pub fn home_dir() -> Option { dirs::home_dir() } pub fn elan_home() -> Result { let env_var = env::var_os("ELAN_HOME"); let cwd = env::current_dir().chain_err(|| ErrorKind::ElanHome)?; let elan_home = env_var.clone().map(|home| cwd.join(home)); let user_home = home_dir().map(|p| p.join(".elan")); elan_home.or(user_home).ok_or(ErrorKind::ElanHome.into()) } pub fn format_path_for_display(path: &str) -> String { let unc_present = path.find(r"\\?\"); match unc_present { None => path.to_owned(), Some(_) => path[4..].to_owned(), } } /// Encodes a utf-8 string as a null-terminated UCS-2 string in bytes #[cfg(windows)] pub fn string_to_winreg_bytes(s: &str) -> Vec { use std::os::windows::ffi::OsStrExt; let v: Vec<_> = OsString::from(format!("{}\x00", s)).encode_wide().collect(); unsafe { ::std::slice::from_raw_parts(v.as_ptr() as *const u8, v.len() * 2).to_vec() } } // This is used to decode the value of HKCU\Environment\PATH. If that // key is not unicode (or not REG_SZ | REG_EXPAND_SZ) then this // returns null. The winreg library itself does a lossy unicode // conversion. #[cfg(windows)] pub fn string_from_winreg_value(val: &winreg::RegValue) -> Option { use std::slice; use winreg::enums::RegType; match val.vtype { RegType::REG_SZ | RegType::REG_EXPAND_SZ => { // Copied from winreg let words = unsafe { slice::from_raw_parts(val.bytes.as_ptr() as *const u16, val.bytes.len() / 2) }; let mut s = if let Ok(s) = String::from_utf16(words) { s } else { return None; }; while s.ends_with('\u{0}') { s.pop(); } Some(s) } _ => None, } } pub fn toolchain_sort>(v: &mut Vec) { use semver::{Identifier, Version}; fn special_version(ord: u64, s: &str) -> Version { Version { major: 0, minor: 0, patch: 0, pre: vec![Identifier::Numeric(ord), Identifier::AlphaNumeric(s.into())], build: vec![], } } fn toolchain_sort_key(s: &str) -> Version { if s.starts_with("stable") { special_version(0, s) } else if s.starts_with("beta") { special_version(1, s) } else if s.starts_with("nightly") { special_version(2, s) } else { Version::parse(&s.replace("_", "-")).unwrap_or_else(|_| special_version(3, s)) } } v.sort_by(|a, b| { let a_str: &str = a.as_ref(); let b_str: &str = b.as_ref(); let a_key = toolchain_sort_key(a_str); let b_key = toolchain_sort_key(b_str); a_key.cmp(&b_key) }); } // fetch from HTML page instead of Github API to avoid rate limit pub fn fetch_latest_release_tag(repo_slug: &str) -> Result { use regex::Regex; let latest_url = format!("https://github.com/{}/releases/latest", repo_slug); let mut data = Vec::new(); ::download::curl::EASY.with(|handle| { let mut handle = handle.borrow_mut(); handle.url(&latest_url).unwrap(); handle.follow_location(true).unwrap(); { let mut transfer = handle.transfer(); transfer .write_function(|new_data| { data.extend_from_slice(new_data); Ok(new_data.len()) }) .unwrap(); transfer.perform().unwrap(); } }); let redirect = ::std::str::from_utf8(&data).chain_err(|| "failed to decode release tag response")?; let re = Regex::new(r#"/tag/([-a-z0-9.]+)"#).unwrap(); let capture = re.captures(&redirect); match capture { Some(cap) => Ok(cap.get(1).unwrap().as_str().to_string()), None => Err("failed to parse latest release tag".into()), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_toochain_sort() { let expected = vec![ "stable-x86_64-unknown-linux-gnu", "beta-x86_64-unknown-linux-gnu", "nightly-x86_64-unknown-linux-gnu", "1.0.0-x86_64-unknown-linux-gnu", "1.2.0-x86_64-unknown-linux-gnu", "1.8.0-x86_64-unknown-linux-gnu", "1.10.0-x86_64-unknown-linux-gnu", ]; let mut v = vec![ "1.8.0-x86_64-unknown-linux-gnu", "1.0.0-x86_64-unknown-linux-gnu", "nightly-x86_64-unknown-linux-gnu", "stable-x86_64-unknown-linux-gnu", "1.10.0-x86_64-unknown-linux-gnu", "beta-x86_64-unknown-linux-gnu", "1.2.0-x86_64-unknown-linux-gnu", ]; toolchain_sort(&mut v); assert_eq!(expected, v); } } elan-1.3.1/src/elan/000077500000000000000000000000001414005346400141425ustar00rootroot00000000000000elan-1.3.1/src/elan/command.rs000066400000000000000000000127461414005346400161400ustar00rootroot00000000000000use regex::Regex; use std::ffi::OsStr; use std::fs::File; use std::io::{self, BufRead, BufReader, Seek, SeekFrom, Write}; use std::process::{self, Command, Stdio}; use std::time::Instant; use tempfile::tempfile; use elan_utils; use errors::*; use notifications::*; use telemetry::{Telemetry, TelemetryEvent}; use Cfg; pub fn run_command_for_dir>( cmd: Command, arg0: &str, args: &[S], cfg: &Cfg, ) -> Result<()> { if (arg0 == "lean" || arg0 == "lean.exe") && cfg.telemetry_enabled()? { return telemetry_lean(cmd, arg0, args, cfg); } exec_command_for_dir_without_telemetry(cmd, arg0, args) } fn telemetry_lean>( mut cmd: Command, arg0: &str, args: &[S], cfg: &Cfg, ) -> Result<()> { #[cfg(unix)] fn file_as_stdio(file: &File) -> Stdio { use std::os::unix::io::{AsRawFd, FromRawFd}; unsafe { Stdio::from_raw_fd(file.as_raw_fd()) } } #[cfg(windows)] fn file_as_stdio(file: &File) -> Stdio { use std::os::windows::io::{AsRawHandle, FromRawHandle}; unsafe { Stdio::from_raw_handle(file.as_raw_handle()) } } let now = Instant::now(); cmd.args(args); let has_color_args = args.iter().any(|e| { let e = e.as_ref().to_str().unwrap_or(""); e.starts_with("--color") }); if stderr_isatty() && !has_color_args { cmd.arg("--color"); cmd.arg("always"); } let mut cmd_err_file = tempfile().unwrap(); let cmd_err_stdio = file_as_stdio(&cmd_err_file); // FIXME rust-lang/rust#32254. It's not clear to me // when and why this is needed. let mut cmd = cmd .stdin(Stdio::inherit()) .stdout(Stdio::inherit()) .stderr(cmd_err_stdio) .spawn() .unwrap(); let status = cmd.wait(); let duration = now.elapsed(); let ms = (duration.as_secs() as u64 * 1000) + (duration.subsec_nanos() as u64 / 1000 / 1000); let t = Telemetry::new(cfg.elan_dir.join("telemetry")); match status { Ok(status) => { let exit_code = status.code().unwrap_or(1); let re = Regex::new(r"\[(?PE.{4})\]").unwrap(); let mut buffer = String::new(); // Chose a HashSet instead of a Vec to avoid calls to sort() and dedup(). // The HashSet should be faster if there are a lot of errors, too. let mut errors: Vec = Vec::new(); let stderr = io::stderr(); let mut handle = stderr.lock(); cmd_err_file.seek(SeekFrom::Start(0)).unwrap(); let mut buffered_stderr = BufReader::new(cmd_err_file); while buffered_stderr.read_line(&mut buffer).unwrap() > 0 { let b = buffer.to_owned(); buffer.clear(); let _ = handle.write(b.as_bytes()); if let Some(caps) = re.captures(&b) { if caps.len() > 0 { errors.push( caps.name("error") .map(|m| m.as_str()) .unwrap_or("") .to_owned(), ); } }; } let e = if errors.is_empty() { None } else { Some(errors) }; let te = TelemetryEvent::LeanRun { duration_ms: ms, exit_code: exit_code, errors: e, }; let _ = t.log_telemetry(te).map_err(|xe| { (cfg.notify_handler)(Notification::TelemetryCleanupError(&xe)); }); process::exit(exit_code); } Err(e) => { let exit_code = e.raw_os_error().unwrap_or(1); let te = TelemetryEvent::LeanRun { duration_ms: ms, exit_code: exit_code, errors: None, }; let _ = t.log_telemetry(te).map_err(|xe| { (cfg.notify_handler)(Notification::TelemetryCleanupError(&xe)); }); Err(e).chain_err(|| elan_utils::ErrorKind::RunningCommand { name: OsStr::new(arg0).to_owned(), }) } } } fn exec_command_for_dir_without_telemetry>( mut cmd: Command, arg0: &str, args: &[S], ) -> Result<()> { cmd.args(args); // FIXME rust-lang/rust#32254. It's not clear to me // when and why this is needed. cmd.stdin(process::Stdio::inherit()); return exec(&mut cmd).chain_err(|| elan_utils::ErrorKind::RunningCommand { name: OsStr::new(arg0).to_owned(), }); #[cfg(unix)] fn exec(cmd: &mut Command) -> io::Result<()> { use std::os::unix::prelude::*; Err(cmd.exec()) } #[cfg(windows)] fn exec(cmd: &mut Command) -> io::Result<()> { let status = cmd.status()?; process::exit(status.code().unwrap()); } } #[cfg(unix)] fn stderr_isatty() -> bool { unsafe { libc::isatty(libc::STDERR_FILENO) != 0 } } #[cfg(windows)] fn stderr_isatty() -> bool { type DWORD = u32; type BOOL = i32; type HANDLE = *mut u8; const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD; extern "system" { fn GetStdHandle(which: DWORD) -> HANDLE; fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; } unsafe { let handle = GetStdHandle(STD_ERROR_HANDLE); let mut out = 0; GetConsoleMode(handle, &mut out) != 0 } } elan-1.3.1/src/elan/config.rs000066400000000000000000000361101414005346400157560ustar00rootroot00000000000000use std::env; use std::fmt::{self, Display}; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; use elan_dist::temp; use elan_utils::utils; use errors::*; use notifications::*; use settings::{Settings, SettingsFile, TelemetryMode}; use telemetry_analysis::*; use toolchain::{Toolchain, UpdateStatus}; use toml; #[derive(Debug)] pub enum OverrideReason { Environment, OverrideDB(PathBuf), ToolchainFile(PathBuf), LeanpkgFile(PathBuf), InToolchainDirectory(PathBuf), } impl Display for OverrideReason { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { match *self { OverrideReason::Environment => write!(f, "environment override by ELAN_TOOLCHAIN"), OverrideReason::OverrideDB(ref path) => { write!(f, "directory override for '{}'", path.display()) } OverrideReason::ToolchainFile(ref path) => { write!(f, "overridden by '{}'", path.display()) } OverrideReason::InToolchainDirectory(ref path) => { write!( f, "override because inside toolchain directory '{}'", path.display() ) } OverrideReason::LeanpkgFile(ref path) => { write!(f, "overridden by '{}'", path.display()) } } } } pub struct Cfg { pub elan_dir: PathBuf, pub settings_file: SettingsFile, pub toolchains_dir: PathBuf, pub update_hash_dir: PathBuf, pub download_dir: PathBuf, pub temp_cfg: temp::Cfg, //pub gpg_key: Cow<'static, str>, pub env_override: Option, pub notify_handler: Arc, } impl Cfg { pub fn from_env(notify_handler: Arc) -> Result { // Set up the elan home directory let elan_dir = utils::elan_home()?; utils::ensure_dir_exists("home", &elan_dir, &|n| notify_handler(n.into()))?; let settings_file = SettingsFile::new(elan_dir.join("settings.toml")); let toolchains_dir = elan_dir.join("toolchains"); let update_hash_dir = elan_dir.join("update-hashes"); let download_dir = elan_dir.join("downloads"); // GPG key /*let gpg_key = ""; if let Some(path) = env::var_os("ELAN_GPG_KEY") .and_then(utils::if_not_empty) { Cow::Owned(try!(utils::read_file("public key", Path::new(&path)))) } else { Cow::Borrowed(include_str!("lean-key.gpg.ascii")) };*/ // Environment override let env_override = env::var("ELAN_TOOLCHAIN") .ok() .and_then(utils::if_not_empty); let notify_clone = notify_handler.clone(); let temp_cfg = temp::Cfg::new( elan_dir.join("tmp"), Box::new(move |n| (notify_clone)(n.into())), ); Ok(Cfg { elan_dir: elan_dir, settings_file: settings_file, toolchains_dir: toolchains_dir, update_hash_dir: update_hash_dir, download_dir: download_dir, temp_cfg: temp_cfg, //gpg_key: gpg_key, notify_handler: notify_handler, env_override: env_override, }) } pub fn set_default(&self, toolchain: &str) -> Result<()> { self.settings_file.with_mut(|s| { s.default_toolchain = Some(toolchain.to_owned()); Ok(()) })?; (self.notify_handler)(Notification::SetDefaultToolchain(toolchain)); Ok(()) } pub fn get_toolchain(&self, name: &str, create_parent: bool) -> Result { if create_parent { utils::ensure_dir_exists("toolchains", &self.toolchains_dir, &|n| { (self.notify_handler)(n.into()) })?; } Toolchain::from(self, name) } pub fn verify_toolchain(&self, name: &str) -> Result { let toolchain = self.get_toolchain(name, false)?; toolchain.verify()?; Ok(toolchain) } pub fn get_hash_file(&self, toolchain: &str, create_parent: bool) -> Result { if create_parent { utils::ensure_dir_exists("update-hash", &self.update_hash_dir, &|n| { (self.notify_handler)(n.into()) })?; } Ok(self.update_hash_dir.join(toolchain)) } pub fn which_binary(&self, path: &Path, binary: &str) -> Result> { if let Some((toolchain, _)) = self.find_override_toolchain_or_default(path)? { Ok(Some(toolchain.binary_file(binary))) } else { Ok(None) } } pub fn find_default(&self) -> Result> { let opt_name = self .settings_file .with(|s| Ok(s.default_toolchain.clone()))?; if let Some(name) = opt_name { let toolchain = self .verify_toolchain(&name) .chain_err(|| ErrorKind::ToolchainNotInstalled(name.to_string()))?; Ok(Some(toolchain)) } else { Ok(None) } } pub fn find_override(&self, path: &Path) -> Result> { let mut override_ = None; // First check ELAN_TOOLCHAIN if let Some(ref name) = self.env_override { override_ = Some((name.to_string(), OverrideReason::Environment)); } // Then walk up the directory tree from 'path' looking for either the // directory in override database, a `lean-toolchain` file, or a // `leanpkg.toml` file. if override_.is_none() { self.settings_file.with(|s| { override_ = self.find_override_from_dir_walk(path, s)?; Ok(()) })?; } if let Some((name, reason)) = override_ { // This is hackishly using the error chain to provide a bit of // extra context about what went wrong. The CLI will display it // on a line after the proximate error. let reason_err = match reason { OverrideReason::Environment => { format!("the ELAN_TOOLCHAIN environment variable specifies an uninstalled toolchain") } OverrideReason::OverrideDB(ref path) => { format!( "the directory override for '{}' specifies an uninstalled toolchain", path.display() ) } OverrideReason::ToolchainFile(ref path) => { format!( "the toolchain file at '{}' specifies an uninstalled toolchain", path.display() ) } OverrideReason::LeanpkgFile(ref path) => { format!( "the leanpkg.toml file at '{}' specifies an uninstalled toolchain", path.display() ) } OverrideReason::InToolchainDirectory(ref path) => { format!( "could not parse toolchain directory at '{}'", path.display() ) } }; match self.get_toolchain(&name, false) { Ok(toolchain) => { if toolchain.exists() { Ok(Some((toolchain, reason))) } else { toolchain.install_from_dist(false)?; Ok(Some((toolchain, reason))) } } Err(e) => Err(e) .chain_err(|| Error::from(reason_err)) .chain_err(|| ErrorKind::OverrideToolchainNotInstalled(name.to_string())), } } else { Ok(None) } } fn find_override_from_dir_walk( &self, dir: &Path, settings: &Settings, ) -> Result> { let notify = self.notify_handler.as_ref(); let dir = utils::canonicalize_path(dir, &|n| notify(n.into())); let mut dir = Some(&*dir); while let Some(d) = dir { // First check the override database if let Some(name) = settings.dir_override(d, notify) { let reason = OverrideReason::OverrideDB(d.to_owned()); return Ok(Some((name, reason))); } // Then look for 'lean-toolchain' let toolchain_file = d.join("lean-toolchain"); if let Ok(s) = utils::read_file("toolchain file", &toolchain_file) { if let Some(s) = s.lines().next() { let toolchain_name = s.trim(); let reason = OverrideReason::ToolchainFile(toolchain_file); return Ok(Some((toolchain_name.to_string(), reason))); } } // Then look for 'leanpkg.toml' let leanpkg_file = d.join("leanpkg.toml"); if let Ok(content) = utils::read_file("leanpkg.toml", &leanpkg_file) { let value = content .parse::() .map_err(|error| ErrorKind::InvalidLeanpkgFile(leanpkg_file.clone(), error))?; match value .get("package") .and_then(|package| package.get("lean_version")) { None => {} Some(toml::Value::String(s)) => { return Ok(Some(( s.to_string(), OverrideReason::LeanpkgFile(leanpkg_file), ))) } Some(a) => { return Err(ErrorKind::InvalidLeanVersion(leanpkg_file, a.type_str()).into()) } } } dir = d.parent(); if dir == Some(&self.toolchains_dir) { if let Some(last) = d.file_name() { if let Some(last) = last.to_str() { return Ok(Some(( last.to_string(), OverrideReason::InToolchainDirectory(d.into()), ))); } } } } Ok(None) } pub fn find_override_toolchain_or_default( &self, path: &Path, ) -> Result)>> { Ok( if let Some((toolchain, reason)) = self.find_override(path)? { Some((toolchain, Some(reason))) } else { self.find_default()?.map(|toolchain| (toolchain, None)) }, ) } pub fn get_default(&self) -> Result> { self.settings_file.with(|s| Ok(s.default_toolchain.clone())) } pub fn list_toolchains(&self) -> Result> { // de-sanitize toolchain file names (best effort...) fn insane(s: String) -> String { s.replace("---", ":").replace("--", "/") } if utils::is_directory(&self.toolchains_dir) { let mut toolchains: Vec<_> = utils::read_dir("toolchains", &self.toolchains_dir)? .filter_map(io::Result::ok) .filter(|e| e.file_type().map(|f| !f.is_file()).unwrap_or(false)) .filter_map(|e| e.file_name().into_string().ok()) .map(insane) .collect(); utils::toolchain_sort(&mut toolchains); Ok(toolchains) } else { Ok(Vec::new()) } } pub fn update_all_channels( &self, force_update: bool, ) -> Result)>> { let toolchains = self.list_toolchains()?; // Convert the toolchain strings to Toolchain values let toolchains = toolchains.into_iter(); let toolchains = toolchains.map(|n| (n.clone(), self.get_toolchain(&n, true))); // Filter out toolchains that don't track a release channel let toolchains = toolchains.filter(|&(_, ref t)| t.as_ref().map(|t| t.is_tracking()).unwrap_or(false)); // Update toolchains and collect the results let toolchains = toolchains.map(|(n, t)| { let t = t.and_then(|t| { let t = t.install_from_dist(force_update); if let Err(ref e) = t { (self.notify_handler)(Notification::NonFatalError(e)); } t }); (n, t) }); Ok(toolchains.collect()) } pub fn toolchain_for_dir(&self, path: &Path) -> Result<(Toolchain, Option)> { self.find_override_toolchain_or_default(path) .and_then(|r| r.ok_or(ErrorKind::NoDefaultToolchain.into())) } pub fn create_command_for_dir(&self, path: &Path, binary: &str) -> Result { let (ref toolchain, _) = self.toolchain_for_dir(path)?; toolchain.create_command(binary) } pub fn create_command_for_toolchain( &self, toolchain: &str, install_if_missing: bool, binary: &str, ) -> Result { let ref toolchain = self.get_toolchain(toolchain, false)?; if install_if_missing && !toolchain.exists() { toolchain.install_from_dist(false)?; } toolchain.create_command(binary) } pub fn doc_path_for_dir(&self, path: &Path, relative: &str) -> Result { let (toolchain, _) = self.toolchain_for_dir(path)?; toolchain.doc_path(relative) } pub fn open_docs_for_dir(&self, path: &Path, relative: &str) -> Result<()> { let (toolchain, _) = self.toolchain_for_dir(path)?; toolchain.open_docs(relative) } pub fn set_telemetry(&self, telemetry_enabled: bool) -> Result<()> { if telemetry_enabled { self.enable_telemetry() } else { self.disable_telemetry() } } fn enable_telemetry(&self) -> Result<()> { self.settings_file.with_mut(|s| { s.telemetry = TelemetryMode::On; Ok(()) })?; let _ = utils::ensure_dir_exists("telemetry", &self.elan_dir.join("telemetry"), &|_| ()); (self.notify_handler)(Notification::SetTelemetry("on")); Ok(()) } fn disable_telemetry(&self) -> Result<()> { self.settings_file.with_mut(|s| { s.telemetry = TelemetryMode::Off; Ok(()) })?; (self.notify_handler)(Notification::SetTelemetry("off")); Ok(()) } pub fn telemetry_enabled(&self) -> Result { Ok(match self.settings_file.with(|s| Ok(s.telemetry))? { TelemetryMode::On => true, TelemetryMode::Off => false, }) } pub fn analyze_telemetry(&self) -> Result { let mut t = TelemetryAnalysis::new(self.elan_dir.join("telemetry")); let events = t.import_telemery()?; t.analyze_telemetry_events(&events)?; Ok(t) } } elan-1.3.1/src/elan/env_var.rs000066400000000000000000000021601414005346400161470ustar00rootroot00000000000000use std::env; use std::path::PathBuf; use std::process::Command; pub const LEAN_RECURSION_COUNT_MAX: u32 = 5; #[allow(unused)] pub fn append_path(name: &str, value: Vec, cmd: &mut Command) { let old_value = env::var_os(name); let mut parts: Vec; if let Some(ref v) = old_value { parts = env::split_paths(v).collect(); parts.extend(value); } else { parts = value; } if let Ok(new_value) = env::join_paths(parts) { cmd.env(name, new_value); } } pub fn prepend_path(name: &str, value: Vec, cmd: &mut Command) { let old_value = env::var_os(name); let mut parts: Vec; if let Some(ref v) = old_value { parts = value; parts.extend(env::split_paths(v).collect::>()); } else { parts = value; } if let Ok(new_value) = env::join_paths(parts) { cmd.env(name, new_value); } } pub fn inc(name: &str, cmd: &mut Command) { let old_value = env::var(name) .ok() .and_then(|v| v.parse().ok()) .unwrap_or(0); cmd.env(name, (old_value + 1).to_string()); } elan-1.3.1/src/elan/errors.rs000066400000000000000000000065471414005346400160400ustar00rootroot00000000000000use elan_dist::manifest::Component; use elan_dist::{self, temp}; use elan_utils; use std::path::PathBuf; use toml; error_chain! { links { Dist(elan_dist::Error, elan_dist::ErrorKind); Utils(elan_utils::Error, elan_utils::ErrorKind); } foreign_links { Temp(temp::Error); } errors { UnknownMetadataVersion(v: String) { description("unknown metadata version") display("unknown metadata version: '{}'", v) } ToolchainNotInstalled(t: String) { description("toolchain is not installed") display("toolchain '{}' is not installed", t) } NoDefaultToolchain { description("no default toolchain configured. run `elan default stable` to install & configure the latest Lean 3 community release.") } OverrideToolchainNotInstalled(t: String) { description("override toolchain is not installed") display("override toolchain '{}' is not installed", t) } BinaryNotFound(t: String, bin: String) { description("toolchain does not contain binary") display("toolchain '{}' does not have the binary `{}`", t, bin) } NeedMetadataUpgrade { description("elan's metadata is out of date. run `elan self upgrade-data`") } UpgradeIoError { description("I/O error during upgrade") } BadInstallerType(s: String) { description("invalid extension for installer") display("invalid extension for installer: '{}'", s) } ComponentsUnsupported(t: String) { description("toolchain does not support components") display("toolchain '{}' does not support components", t) } UnknownComponent(t: String, c: Component) { description("toolchain does not contain component") display("toolchain '{}' does not contain component {}", t, c.description()) } AddingRequiredComponent(t: String, c: Component) { description("required component cannot be added") display("component {} was automatically added because it is required for toolchain '{}'", c.description(), t) } ParsingSettings(e: toml::de::Error) { description("error parsing settings") } RemovingRequiredComponent(t: String, c: Component) { description("required component cannot be removed") display("component {} is required for toolchain '{}' and cannot be removed", c.description(), t) } NoExeName { description("couldn't determine self executable name") } TelemetryCleanupError { description("unable to remove old telemetry files") } TelemetryAnalysisError { description("error analyzing telemetry files") } InvalidLeanpkgFile(path: PathBuf, error: toml::de::Error) { description("couldn't parse 'leanpkg.toml'") display("couldn't parse '{}': '{}'", path.display(), error) } InvalidLeanVersion(path: PathBuf, t: &'static str) { description("invalid 'package.lean_version' value") display("invalid 'package.lean_version' value in '{}': expected string instead of {}", path.display(), t) } } } elan-1.3.1/src/elan/install.rs000066400000000000000000000042531414005346400161620ustar00rootroot00000000000000//! Installation and upgrade of both distribution-managed and local //! toolchains use elan_dist::dist; use elan_dist::download::DownloadCfg; use elan_dist::prefix::InstallPrefix; use elan_dist::Notification; use elan_utils::utils; use errors::Result; use std::path::Path; #[derive(Copy, Clone)] pub enum InstallMethod<'a> { Copy(&'a Path), Link(&'a Path), // bool is whether to force an update Dist( &'a dist::ToolchainDesc, Option<&'a Path>, DownloadCfg<'a>, bool, ), } impl<'a> InstallMethod<'a> { pub fn run(self, path: &Path, notify_handler: &dyn Fn(Notification)) -> Result { if path.exists() { // Don't uninstall first for Dist method match self { InstallMethod::Dist(..) => {} _ => { uninstall(path, notify_handler)?; } } } match self { InstallMethod::Copy(src) => { utils::copy_dir(src, path, &|n| notify_handler(n.into()))?; Ok(true) } InstallMethod::Link(src) => { utils::symlink_dir(src, &path, &|n| notify_handler(n.into()))?; Ok(true) } InstallMethod::Dist(toolchain, update_hash, dl_cfg, force_update) => { let prefix = &InstallPrefix::from(path.to_owned()); let maybe_new_hash = dist::update_from_dist( dl_cfg, update_hash, toolchain, prefix, &[], &[], force_update, )?; if let Some(hash) = maybe_new_hash { if let Some(hash_file) = update_hash { utils::write_file("update hash", hash_file, &hash)?; } Ok(true) } else { Ok(false) } } } } } pub fn uninstall(path: &Path, notify_handler: &dyn Fn(Notification)) -> Result<()> { Ok(utils::remove_dir("install", path, &|n| { notify_handler(n.into()) })?) } elan-1.3.1/src/elan/lib.rs000066400000000000000000000012051414005346400152540ustar00rootroot00000000000000#![recursion_limit = "1024"] extern crate elan_dist; extern crate elan_utils; #[macro_use] extern crate error_chain; extern crate itertools; extern crate regex; extern crate url; #[macro_use] extern crate serde_derive; #[cfg(unix)] extern crate libc; extern crate serde_json; extern crate tempfile; extern crate time; extern crate toml; pub use config::*; pub use elan_utils::{notify, toml_utils, utils}; pub use errors::*; pub use notifications::*; pub use toolchain::*; pub mod command; mod config; pub mod env_var; mod errors; mod install; mod notifications; pub mod settings; pub mod telemetry; pub mod telemetry_analysis; mod toolchain; elan-1.3.1/src/elan/notifications.rs000066400000000000000000000130341414005346400173620ustar00rootroot00000000000000use std::fmt::{self, Display}; use std::path::{Path, PathBuf}; use errors::*; use elan_dist::{self, temp}; use elan_utils; use elan_utils::notify::NotificationLevel; #[derive(Debug)] pub enum Notification<'a> { Install(elan_dist::Notification<'a>), Utils(elan_utils::Notification<'a>), Temp(temp::Notification<'a>), SetDefaultToolchain(&'a str), SetOverrideToolchain(&'a Path, &'a str), LookingForToolchain(&'a str), ToolchainDirectory(&'a Path, &'a str), UpdatingToolchain(&'a str), InstallingToolchain(&'a str), InstalledToolchain(&'a str), UsingExistingToolchain(&'a str), UninstallingToolchain(&'a str), UninstalledToolchain(&'a str), ToolchainNotInstalled(&'a str), UpdateHashMatches, UpgradingMetadata(&'a str, &'a str), MetadataUpgradeNotNeeded(&'a str), WritingMetadataVersion(&'a str), ReadMetadataVersion(&'a str), NonFatalError(&'a Error), UpgradeRemovesToolchains, MissingFileDuringSelfUninstall(PathBuf), SetTelemetry(&'a str), TelemetryCleanupError(&'a Error), } impl<'a> From> for Notification<'a> { fn from(n: elan_dist::Notification<'a>) -> Notification<'a> { Notification::Install(n) } } impl<'a> From> for Notification<'a> { fn from(n: elan_utils::Notification<'a>) -> Notification<'a> { Notification::Utils(n) } } impl<'a> From> for Notification<'a> { fn from(n: temp::Notification<'a>) -> Notification<'a> { Notification::Temp(n) } } impl<'a> Notification<'a> { pub fn level(&self) -> NotificationLevel { use self::Notification::*; match *self { Install(ref n) => n.level(), Utils(ref n) => n.level(), Temp(ref n) => n.level(), ToolchainDirectory(_, _) | LookingForToolchain(_) | WritingMetadataVersion(_) | InstallingToolchain(_) | UpdatingToolchain(_) | ReadMetadataVersion(_) | InstalledToolchain(_) | UpdateHashMatches | TelemetryCleanupError(_) => NotificationLevel::Verbose, SetDefaultToolchain(_) | SetOverrideToolchain(_, _) | UsingExistingToolchain(_) | UninstallingToolchain(_) | UninstalledToolchain(_) | ToolchainNotInstalled(_) | UpgradingMetadata(_, _) | MetadataUpgradeNotNeeded(_) | SetTelemetry(_) => NotificationLevel::Info, NonFatalError(_) => NotificationLevel::Error, UpgradeRemovesToolchains | MissingFileDuringSelfUninstall(_) => NotificationLevel::Warn, } } } impl<'a> Display for Notification<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> ::std::result::Result<(), fmt::Error> { use self::Notification::*; match *self { Install(ref n) => n.fmt(f), Utils(ref n) => n.fmt(f), Temp(ref n) => n.fmt(f), SetDefaultToolchain(name) => write!(f, "default toolchain set to '{}'", name), SetOverrideToolchain(path, name) => { write!( f, "override toolchain for '{}' set to '{}'", path.display(), name ) } LookingForToolchain(name) => write!(f, "looking for installed toolchain '{}'", name), ToolchainDirectory(path, _) => write!(f, "toolchain directory: '{}'", path.display()), UpdatingToolchain(name) => write!(f, "updating existing install for '{}'", name), InstallingToolchain(name) => write!(f, "installing toolchain '{}'", name), InstalledToolchain(name) => write!(f, "toolchain '{}' installed", name), UsingExistingToolchain(name) => write!(f, "using existing install for '{}'", name), UninstallingToolchain(name) => write!(f, "uninstalling toolchain '{}'", name), UninstalledToolchain(name) => write!(f, "toolchain '{}' uninstalled", name), ToolchainNotInstalled(name) => write!(f, "no toolchain installed for '{}'", name), UpdateHashMatches => { write!(f, "toolchain is already up to date") } UpgradingMetadata(from_ver, to_ver) => { write!( f, "upgrading metadata version from '{}' to '{}'", from_ver, to_ver ) } MetadataUpgradeNotNeeded(ver) => { write!( f, "nothing to upgrade: metadata version is already '{}'", ver ) } WritingMetadataVersion(ver) => write!(f, "writing metadata version: '{}'", ver), ReadMetadataVersion(ver) => write!(f, "read metadata version: '{}'", ver), NonFatalError(e) => write!(f, "{}", e), UpgradeRemovesToolchains => write!( f, "this upgrade will remove all existing toolchains. you will need to reinstall them" ), MissingFileDuringSelfUninstall(ref p) => { write!( f, "expected file does not exist to uninstall: {}", p.display() ) } SetTelemetry(telemetry_status) => write!(f, "telemetry set to '{}'", telemetry_status), TelemetryCleanupError(e) => write!(f, "unable to remove old telemetry files: '{}'", e), } } } elan-1.3.1/src/elan/settings.rs000066400000000000000000000134011414005346400163470ustar00rootroot00000000000000use errors::*; use notifications::*; use std::cell::RefCell; use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use toml; use toml_utils::*; use utils; pub const SUPPORTED_METADATA_VERSIONS: [&'static str; 2] = ["2", "12"]; pub const DEFAULT_METADATA_VERSION: &'static str = "12"; #[derive(Clone, Debug, PartialEq)] pub struct SettingsFile { path: PathBuf, cache: RefCell>, } impl SettingsFile { pub fn new(path: PathBuf) -> Self { SettingsFile { path: path, cache: RefCell::new(None), } } fn write_settings(&self) -> Result<()> { let s = self.cache.borrow().as_ref().unwrap().clone(); utils::write_file("settings", &self.path, &s.stringify())?; Ok(()) } fn read_settings(&self) -> Result<()> { let mut needs_save = false; { let mut b = self.cache.borrow_mut(); if b.is_none() { *b = Some(if utils::is_file(&self.path) { let content = utils::read_file("settings", &self.path)?; Settings::parse(&content)? } else { needs_save = true; Default::default() }); } } if needs_save { self.write_settings()?; } Ok(()) } pub fn with Result>(&self, f: F) -> Result { self.read_settings()?; // Settings can no longer be None so it's OK to unwrap f(self.cache.borrow().as_ref().unwrap()) } pub fn with_mut Result>(&self, f: F) -> Result { self.read_settings()?; // Settings can no longer be None so it's OK to unwrap let result = { f(self.cache.borrow_mut().as_mut().unwrap())? }; self.write_settings()?; Ok(result) } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum TelemetryMode { On, Off, } #[derive(Clone, Debug, PartialEq)] pub struct Settings { pub version: String, pub default_toolchain: Option, pub overrides: BTreeMap, pub telemetry: TelemetryMode, } impl Default for Settings { fn default() -> Self { Settings { version: DEFAULT_METADATA_VERSION.to_owned(), default_toolchain: None, overrides: BTreeMap::new(), telemetry: TelemetryMode::Off, } } } impl Settings { fn path_to_key(path: &Path, notify_handler: &dyn Fn(Notification)) -> String { if path.exists() { utils::canonicalize_path(path, &|n| notify_handler(n.into())) .display() .to_string() } else { path.display().to_string() } } pub fn remove_override(&mut self, path: &Path, notify_handler: &dyn Fn(Notification)) -> bool { let key = Self::path_to_key(path, notify_handler); self.overrides.remove(&key).is_some() } pub fn add_override( &mut self, path: &Path, toolchain: String, notify_handler: &dyn Fn(Notification), ) { let key = Self::path_to_key(path, notify_handler); notify_handler(Notification::SetOverrideToolchain(path, &toolchain)); self.overrides.insert(key, toolchain); } pub fn dir_override( &self, dir: &Path, notify_handler: &dyn Fn(Notification), ) -> Option { let key = Self::path_to_key(dir, notify_handler); self.overrides.get(&key).map(|s| s.clone()) } pub fn parse(data: &str) -> Result { let value = toml::from_str(data).map_err(ErrorKind::ParsingSettings)?; Self::from_toml(value, "") } pub fn stringify(self) -> String { toml::Value::Table(self.to_toml()).to_string() } pub fn from_toml(mut table: toml::value::Table, path: &str) -> Result { let version = get_string(&mut table, "version", path)?; if !SUPPORTED_METADATA_VERSIONS.contains(&&*version) { return Err(ErrorKind::UnknownMetadataVersion(version).into()); } Ok(Settings { version: version, default_toolchain: get_opt_string(&mut table, "default_toolchain", path)?, overrides: Self::table_to_overrides(&mut table, path)?, telemetry: if get_opt_bool(&mut table, "telemetry", path)?.unwrap_or(false) { TelemetryMode::On } else { TelemetryMode::Off }, }) } pub fn to_toml(self) -> toml::value::Table { let mut result = toml::value::Table::new(); result.insert("version".to_owned(), toml::Value::String(self.version)); if let Some(v) = self.default_toolchain { result.insert("default_toolchain".to_owned(), toml::Value::String(v)); } let overrides = Self::overrides_to_table(self.overrides); result.insert("overrides".to_owned(), toml::Value::Table(overrides)); let telemetry = self.telemetry == TelemetryMode::On; result.insert("telemetry".to_owned(), toml::Value::Boolean(telemetry)); result } fn table_to_overrides( table: &mut toml::value::Table, path: &str, ) -> Result> { let mut result = BTreeMap::new(); let pkg_table = get_table(table, "overrides", path)?; for (k, v) in pkg_table { if let toml::Value::String(t) = v { result.insert(k, t); } } Ok(result) } fn overrides_to_table(overrides: BTreeMap) -> toml::value::Table { let mut result = toml::value::Table::new(); for (k, v) in overrides { result.insert(k, toml::Value::String(v)); } result } } elan-1.3.1/src/elan/telemetry.rs000066400000000000000000000040511414005346400165220ustar00rootroot00000000000000use errors::*; use std::fs; use std::path::PathBuf; #[derive(Deserialize, Serialize, Debug, Clone)] pub enum TelemetryEvent { LeanRun { duration_ms: u64, exit_code: i32, errors: Option>, }, ToolchainUpdate { toolchain: String, success: bool, }, TargetAdd { toolchain: String, target: String, success: bool, }, } #[derive(Deserialize, Serialize, Debug)] pub struct LogMessage { log_time_s: i64, event: TelemetryEvent, version: i32, } impl LogMessage { pub fn get_event(&self) -> TelemetryEvent { self.event.clone() } } #[derive(Debug)] pub struct Telemetry { telemetry_dir: PathBuf, } #[allow(dead_code)] const LOG_FILE_VERSION: i32 = 1; const MAX_TELEMETRY_FILES: usize = 100; impl Telemetry { pub fn new(telemetry_dir: PathBuf) -> Telemetry { Telemetry { telemetry_dir: telemetry_dir, } } pub fn log_telemetry(&self, _event: TelemetryEvent) -> Result<()> { Ok(()) } pub fn clean_telemetry_dir(&self) -> Result<()> { let telemetry_dir_contents = self.telemetry_dir.read_dir(); let contents = telemetry_dir_contents.chain_err(|| ErrorKind::TelemetryCleanupError)?; let mut telemetry_files: Vec = Vec::new(); for c in contents { let x = c.unwrap(); let filename = x.path().file_name().unwrap().to_str().unwrap().to_owned(); if filename.starts_with("log") && filename.ends_with("json") { telemetry_files.push(x.path()); } } if telemetry_files.len() < MAX_TELEMETRY_FILES { return Ok(()); } let dl: usize = telemetry_files.len() - MAX_TELEMETRY_FILES; let dl = dl + 1 as usize; telemetry_files.sort(); telemetry_files.dedup(); for i in 0..dl { let i = i as usize; fs::remove_file(&telemetry_files[i]).chain_err(|| ErrorKind::TelemetryCleanupError)?; } Ok(()) } } elan-1.3.1/src/elan/telemetry_analysis.rs000066400000000000000000000226341414005346400204340ustar00rootroot00000000000000use std::collections::HashMap; use std::fmt; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::path::PathBuf; use serde_json; use errors::*; use telemetry::{LogMessage, TelemetryEvent}; pub struct TelemetryAnalysis { telemetry_dir: PathBuf, rustc_statistics: RustcStatistics, rustc_success_statistics: RustcStatistics, rustc_error_statistics: RustcStatistics, } #[derive(Default)] pub struct RustcStatistics { rustc_execution_count: u32, compile_time_ms_total: u64, compile_time_ms_mean: u64, compile_time_ms_ntile_75: u64, compile_time_ms_ntile_90: u64, compile_time_ms_ntile_95: u64, compile_time_ms_ntile_99: u64, compile_time_ms_stdev: f64, exit_codes_with_count: HashMap, error_codes_with_counts: HashMap, } impl RustcStatistics { pub fn new() -> RustcStatistics { Default::default() } } impl fmt::Display for RustcStatistics { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut errors: String = String::new(); if !self.error_codes_with_counts.is_empty() { errors = " rustc errors\n".to_owned(); for (error, count) in &self.error_codes_with_counts { errors = errors + &format!(" '{}': {}\n", error, count); } } let mut exits: String = String::new(); if !self.exit_codes_with_count.is_empty() { exits = " rustc exit codes\n".to_owned(); for (exit, count) in &self.exit_codes_with_count { exits = exits + &format!(" {}: {}\n", exit, count); } } write!( f, r" Total compiles: {} Compile Time (ms) Total : {} Mean : {} STDEV : {} 75th : {} 90th : {} 95th : {} 99th : {} {} {}", self.rustc_execution_count, self.compile_time_ms_total, self.compile_time_ms_mean, self.compile_time_ms_stdev, self.compile_time_ms_ntile_75, self.compile_time_ms_ntile_90, self.compile_time_ms_ntile_95, self.compile_time_ms_ntile_99, errors, exits ) } } impl fmt::Display for TelemetryAnalysis { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, r" Overall rustc statistics: {} rustc successful execution statistics {} rustc error statistics {}", self.rustc_statistics, self.rustc_success_statistics, self.rustc_error_statistics ) } } impl TelemetryAnalysis { pub fn new(telemetry_dir: PathBuf) -> TelemetryAnalysis { TelemetryAnalysis { telemetry_dir: telemetry_dir, rustc_statistics: RustcStatistics::new(), rustc_success_statistics: RustcStatistics::new(), rustc_error_statistics: RustcStatistics::new(), } } pub fn import_telemery(&mut self) -> Result> { let mut events: Vec = Vec::new(); let contents = self .telemetry_dir .read_dir() .chain_err(|| ErrorKind::TelemetryAnalysisError)?; let mut telemetry_files: Vec = Vec::new(); for c in contents { let x = c.unwrap(); let filename = x.path().file_name().unwrap().to_str().unwrap().to_owned(); if filename.starts_with("log") && filename.ends_with("json") { telemetry_files.push(x.path()); match self.read_telemetry_file(x.path()) { Ok(y) => events.extend(y), Err(e) => return Err(e), }; } } Ok(events) } fn read_telemetry_file(&self, path: PathBuf) -> Result> { let mut events: Vec = Vec::new(); let f = File::open(&path).chain_err(|| ErrorKind::TelemetryAnalysisError)?; let file = BufReader::new(&f); for line in file.lines() { let l = line.unwrap(); let log_message_result = serde_json::from_str(&l); if log_message_result.is_ok() { let log_message: LogMessage = log_message_result.unwrap(); let event: TelemetryEvent = log_message.get_event(); events.push(event); } } Ok(events) } pub fn analyze_telemetry_events(&mut self, events: &[TelemetryEvent]) -> Result<()> { let mut rustc_durations = Vec::new(); let mut rustc_exit_codes = Vec::new(); let mut rustc_successful_durations = Vec::new(); let mut rustc_error_durations = Vec::new(); let mut error_list: Vec> = Vec::new(); let mut error_codes_with_counts: HashMap = HashMap::new(); let mut toolchains = Vec::new(); let mut toolchains_with_errors = Vec::new(); let mut targets = Vec::new(); let mut updated_toolchains = Vec::new(); let mut updated_toolchains_with_errors = Vec::new(); for event in events { match *event { TelemetryEvent::LeanRun { duration_ms, ref exit_code, ref errors, } => { self.rustc_statistics.rustc_execution_count += 1; rustc_durations.push(duration_ms); let exit_count = self .rustc_statistics .exit_codes_with_count .entry(*exit_code) .or_insert(0); *exit_count += 1; rustc_exit_codes.push(exit_code); if errors.is_some() { let errors = errors.clone().unwrap(); for e in &errors { let error_count = error_codes_with_counts.entry(e.to_owned()).or_insert(0); *error_count += 1; } error_list.push(errors); rustc_error_durations.push(duration_ms); } else { rustc_successful_durations.push(duration_ms); } } TelemetryEvent::TargetAdd { ref toolchain, ref target, success, } => { toolchains.push(toolchain.to_owned()); targets.push(target.to_owned()); if !success { toolchains_with_errors.push(toolchain.to_owned()); } } TelemetryEvent::ToolchainUpdate { ref toolchain, success, } => { updated_toolchains.push(toolchain.to_owned()); if !success { updated_toolchains_with_errors.push(toolchain.to_owned()); } } } } self.rustc_statistics = compute_rustc_percentiles(&rustc_durations); self.rustc_error_statistics = compute_rustc_percentiles(&rustc_error_durations); self.rustc_error_statistics.error_codes_with_counts = error_codes_with_counts; self.rustc_success_statistics = compute_rustc_percentiles(&rustc_successful_durations); let error_list = error_list.into_iter().flatten(); for e in error_list { let error_count = self .rustc_statistics .error_codes_with_counts .entry(e) .or_insert(0); *error_count += 1; } Ok(()) } } pub fn compute_rustc_percentiles(values: &[u64]) -> RustcStatistics { RustcStatistics { rustc_execution_count: (values.len() as u32), compile_time_ms_total: values.iter().fold(0, |sum, val| sum + val), compile_time_ms_mean: mean(values), compile_time_ms_ntile_75: ntile(75, values), compile_time_ms_ntile_90: ntile(90, values), compile_time_ms_ntile_95: ntile(95, values), compile_time_ms_ntile_99: ntile(99, values), compile_time_ms_stdev: stdev(values), exit_codes_with_count: HashMap::new(), error_codes_with_counts: HashMap::new(), } } pub fn ntile(percentile: i32, values: &[u64]) -> u64 { if values.is_empty() { return 0u64; } let mut values = values.to_owned(); values.sort(); let count = values.len() as f32; let percentile = (percentile as f32) / 100f32; let n = (count * percentile).ceil() - 1f32; let n = n as usize; values[n] } pub fn mean(values: &[u64]) -> u64 { if values.is_empty() { return 0; } let count = values.len() as f64; let sum = values.iter().fold(0, |sum, val| sum + val) as f64; (sum / count) as u64 } pub fn variance(values: &[u64]) -> f64 { if values.is_empty() { return 0f64; } let mean = mean(values); let mut deviations: Vec = Vec::new(); for v in values.iter() { let x = (*v as i64) - (mean as i64); deviations.push(x * x); } let sum = deviations.iter().fold(0, |sum, val| sum + val) as f64; sum / (values.len() as f64) } pub fn stdev(values: &[u64]) -> f64 { if values.is_empty() { return 0f64; } let variance = variance(values); variance.sqrt() } elan-1.3.1/src/elan/toolchain.rs000066400000000000000000000301621414005346400164720ustar00rootroot00000000000000use config::Cfg; use elan_dist; use elan_dist::dist::ToolchainDesc; use elan_dist::download::DownloadCfg; use elan_dist::manifest::Component; use elan_utils::utils; use env_var; use errors::*; use install::{self, InstallMethod}; use notifications::*; use telemetry; use telemetry::{Telemetry, TelemetryEvent}; use std::env; use std::env::consts::EXE_SUFFIX; use std::ffi::OsStr; use std::ffi::OsString; use std::path::{Path, PathBuf}; use std::process::Command; /// A fully resolved reference to a toolchain which may or may not exist pub struct Toolchain<'a> { cfg: &'a Cfg, name: String, dir_name: String, path: PathBuf, telemetry: telemetry::Telemetry, dist_handler: Box, } /// Used by the `list_component` function pub struct ComponentStatus { pub component: Component, pub required: bool, pub installed: bool, pub available: bool, } pub enum UpdateStatus { Installed, Updated, Unchanged, } impl<'a> Toolchain<'a> { pub fn from(cfg: &'a Cfg, name: &str) -> Result { //We need to replace ":" and "/" with "-" in the toolchain name in order to make a name which is a valid //name for a directory. let dir_name = name.replace("/", "--").replace(":", "---"); let path = cfg.toolchains_dir.join(&dir_name[..]); Ok(Toolchain { cfg: cfg, name: name.to_owned(), dir_name: dir_name, path: path.clone(), telemetry: Telemetry::new(cfg.elan_dir.join("telemetry")), dist_handler: Box::new(move |n| (cfg.notify_handler)(n.into())), }) } pub fn name(&self) -> &str { &self.name } pub fn desc(&self) -> Result { Ok(ToolchainDesc::from_str(&self.name)?) } pub fn path(&self) -> &Path { &self.path } fn is_symlink(&self) -> bool { use std::fs; fs::symlink_metadata(&self.path) .map(|m| m.file_type().is_symlink()) .unwrap_or(false) } pub fn exists(&self) -> bool { // HACK: linked toolchains are symlinks, and, contrary to what std docs // lead me to believe `fs::metadata`, used by `is_directory` does not // seem to follow symlinks on windows. utils::is_directory(&self.path) || self.is_symlink() } pub fn is_custom(&self) -> bool { assert!(self.exists()); self.is_symlink() } pub fn verify(&self) -> Result<()> { Ok(utils::assert_is_directory(&self.path)?) } pub fn remove(&self) -> Result<()> { if self.exists() || self.is_symlink() { (self.cfg.notify_handler)(Notification::UninstallingToolchain(&self.name)); } else { (self.cfg.notify_handler)(Notification::ToolchainNotInstalled(&self.name)); return Ok(()); } if let Some(update_hash) = self.update_hash()? { utils::remove_file("update hash", &update_hash)?; } let result = install::uninstall(&self.path, &|n| (self.cfg.notify_handler)(n.into())); if !self.exists() { (self.cfg.notify_handler)(Notification::UninstalledToolchain(&self.name)); } Ok(result?) } fn install(&self, install_method: InstallMethod) -> Result { let exists = self.exists(); if exists { (self.cfg.notify_handler)(Notification::UpdatingToolchain(&self.name)); } else { (self.cfg.notify_handler)(Notification::InstallingToolchain(&self.name)); } (self.cfg.notify_handler)(Notification::ToolchainDirectory(&self.path, &self.name)); let updated = install_method.run(&self.path, &|n| (self.cfg.notify_handler)(n.into()))?; if !updated { (self.cfg.notify_handler)(Notification::UpdateHashMatches); } else { (self.cfg.notify_handler)(Notification::InstalledToolchain(&self.name)); } let status = match (updated, exists) { (true, false) => UpdateStatus::Installed, (true, true) => UpdateStatus::Updated, (false, true) => UpdateStatus::Unchanged, (false, false) => UpdateStatus::Unchanged, }; Ok(status) } fn install_if_not_installed(&self, install_method: InstallMethod) -> Result { (self.cfg.notify_handler)(Notification::LookingForToolchain(&self.name)); if !self.exists() { Ok(self.install(install_method)?) } else { (self.cfg.notify_handler)(Notification::UsingExistingToolchain(&self.name)); Ok(UpdateStatus::Unchanged) } } fn update_hash(&self) -> Result> { if self.is_symlink() { Ok(None) } else { Ok(Some(self.cfg.get_hash_file(&self.dir_name, true)?)) } } fn download_cfg(&self) -> DownloadCfg { DownloadCfg { temp_cfg: &self.cfg.temp_cfg, download_dir: &self.cfg.download_dir, notify_handler: &*self.dist_handler, } } pub fn install_from_dist(&self, force_update: bool) -> Result { if self.cfg.telemetry_enabled()? { return self.install_from_dist_with_telemetry(force_update); } self.install_from_dist_inner(force_update) } pub fn install_from_dist_inner(&self, force_update: bool) -> Result { let update_hash = self.update_hash()?; self.install(InstallMethod::Dist( &self.desc()?, update_hash.as_ref().map(|p| &**p), self.download_cfg(), force_update, )) } pub fn install_from_dist_with_telemetry(&self, force_update: bool) -> Result { let result = self.install_from_dist_inner(force_update); match result { Ok(us) => { let te = TelemetryEvent::ToolchainUpdate { toolchain: self.name().to_string(), success: true, }; match self.telemetry.log_telemetry(te) { Ok(_) => Ok(us), Err(e) => { (self.cfg.notify_handler)(Notification::TelemetryCleanupError(&e)); Ok(us) } } } Err(e) => { let te = TelemetryEvent::ToolchainUpdate { toolchain: self.name().to_string(), success: true, }; let _ = self.telemetry.log_telemetry(te).map_err(|xe| { (self.cfg.notify_handler)(Notification::TelemetryCleanupError(&xe)); }); Err(e) } } } pub fn install_from_dist_if_not_installed(&self) -> Result { let update_hash = self.update_hash()?; self.install_if_not_installed(InstallMethod::Dist( &self.desc()?, update_hash.as_ref().map(|p| &**p), self.download_cfg(), false, )) } pub fn is_tracking(&self) -> bool { ToolchainDesc::from_str(&self.name) .ok() .map(|d| d.is_tracking()) == Some(true) } pub fn install_from_dir(&self, src: &Path, link: bool) -> Result<()> { let mut pathbuf = PathBuf::from(src); pathbuf.push("bin"); utils::assert_is_directory(&pathbuf)?; pathbuf.push(format!("lean{}", EXE_SUFFIX)); utils::assert_is_file(&pathbuf)?; if link { self.install(InstallMethod::Link(&utils::to_absolute(src)?))?; } else { self.install(InstallMethod::Copy(src))?; } Ok(()) } pub fn create_command>(&self, binary: T) -> Result { if !self.exists() { return Err(ErrorKind::ToolchainNotInstalled(self.name.to_owned()).into()); } let bin_path = self.binary_file(&binary); let path = if utils::is_file(&bin_path) { &bin_path } else { let recursion_count = env::var("LEAN_RECURSION_COUNT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(0); if recursion_count > env_var::LEAN_RECURSION_COUNT_MAX - 1 { return Err(ErrorKind::BinaryNotFound( self.name.clone(), bin_path.to_str().unwrap().into(), ) .into()); } Path::new(&binary) }; let mut cmd: Command; if cfg!(windows) && path.extension() == None { cmd = Command::new("sh"); cmd.arg(format!("'{}'", path.to_str().unwrap())); } else { cmd = Command::new(&path); }; self.set_env(&mut cmd); Ok(cmd) } fn set_env(&self, cmd: &mut Command) { self.set_ldpath(cmd); // Because elan and leanpkg use slightly different // definitions of leanpkg home (elan doesn't read HOME on // windows), we must set it here to ensure leanpkg and // elan agree. if let Ok(elan_home) = utils::elan_home() { cmd.env("ELAN_HOME", &elan_home); } env_var::inc("LEAN_RECURSION_COUNT", cmd); cmd.env("ELAN_TOOLCHAIN", &self.name); cmd.env("ELAN_HOME", &self.cfg.elan_dir); } pub fn set_ldpath(&self, cmd: &mut Command) { let new_path = self.path.join("lib"); #[cfg(not(target_os = "macos"))] mod sysenv { pub const LOADER_PATH: &'static str = "LD_LIBRARY_PATH"; } #[cfg(target_os = "macos")] mod sysenv { pub const LOADER_PATH: &'static str = "DYLD_LIBRARY_PATH"; } env_var::prepend_path(sysenv::LOADER_PATH, vec![new_path.clone()], cmd); // Prepend ELAN_HOME/bin to the PATH variable so that we're sure to run // leanpkg/lean via the proxy bins. There is no fallback case for if the // proxy bins don't exist. We'll just be running whatever happens to // be on the PATH. let mut path_entries = vec![]; if let Ok(elan_home) = utils::elan_home() { path_entries.push(elan_home.join("bin").to_path_buf()); } if cfg!(target_os = "windows") { path_entries.push(self.path.join("bin")); } env_var::prepend_path("PATH", path_entries, cmd); } pub fn doc_path(&self, relative: &str) -> Result { self.verify()?; let parts = vec!["share", "doc", "lean", "html"]; let mut doc_dir = self.path.clone(); for part in parts { doc_dir.push(part); } doc_dir.push(relative); Ok(doc_dir) } pub fn open_docs(&self, relative: &str) -> Result<()> { self.verify()?; Ok(utils::open_browser(&self.doc_path(relative)?)?) } pub fn make_default(&self) -> Result<()> { self.cfg.set_default(&self.name) } pub fn make_override(&self, path: &Path) -> Result<()> { Ok(self.cfg.settings_file.with_mut(|s| { s.add_override(path, self.name.clone(), self.cfg.notify_handler.as_ref()); Ok(()) })?) } pub fn binary_file>(&self, binary: T) -> PathBuf { let binary = if let Some(binary_str) = binary.as_ref().to_str() { let binary_str = binary_str.to_lowercase(); let path = Path::new(&binary_str); if path.extension().is_some() { binary.as_ref().to_owned() } else { let ext = EXE_SUFFIX; OsString::from(format!("{}{}", binary_str, ext)) } } else { // Very weird case. Non-unicode command. binary.as_ref().to_owned() }; let path = self.path.join("bin").join(&binary); if cfg!(windows) && !path.exists() && path.with_extension("bat").exists() { // leanpkg.bat path.with_extension("bat") } else if cfg!(windows) && !path.exists() && path.with_extension("").exists() { // leanc (sh script) path.with_extension("") } else { path } } }