cabal-install-3.8.1.0/0000755000000000000000000000000007346545000012560 5ustar0000000000000000cabal-install-3.8.1.0/LICENSE0000644000000000000000000000317507346545000013573 0ustar0000000000000000Copyright (c) 2003-2022, Cabal Development Team. See the AUTHORS file for the full list of copyright holders. See */LICENSE for the copyright holders of the subcomponents. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Isaac Jones nor the names of other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. cabal-install-3.8.1.0/README.md0000644000000000000000000000065407346545000014044 0ustar0000000000000000The cabal-install package ========================= See the [Cabal web site] for more information. The `cabal-install` package provides a command line tool named `cabal`. It uses the Cabal library and provides a user interface to the Cabal/Hackage build automation and package management system. It can build and install both local and remote packages, including dependencies. [Cabal web site]: http://www.haskell.org/cabal/ cabal-install-3.8.1.0/Setup.hs0000644000000000000000000000007407346545000014215 0ustar0000000000000000import Distribution.Simple main :: IO () main = defaultMain cabal-install-3.8.1.0/bash-completion/0000755000000000000000000000000007346545000015644 5ustar0000000000000000cabal-install-3.8.1.0/bash-completion/cabal0000644000000000000000000001573607346545000016645 0ustar0000000000000000#!/bin/bash ################################################## # cabal command line completion # # Copyright 2007-2008 "Lennart Kolmodin" # "Duncan Coutts" # Copyright 2019- "Sam Boosalis" # # Compatibility — Bash 3. # # OSX won't update Bash 3 (last updated circa 2009) to Bash 4, # and we'd like this completion script to work on both Linux and Mac. # # For example, OSX Yosemite (released circa 2014) ships with Bash 3: # # $ echo $BASH_VERSION # 3.2 # # While Ubuntu LTS 14.04 (a.k.a. Trusty, also released circa 2016) # ships with the latest version, Bash 4 (updated circa 2016): # # $ echo $BASH_VERSION # 4.3 # # Testing # # (1) Invoke « shellcheck » # # * source: « https://github.com/koalaman/shellcheck » # * run: « shellcheck ./cabal-install/bash-completion/cabal » # # (2) Interpret via Bash 3 # # * source: « https://ftp.gnu.org/gnu/bash/bash-3.2.tar.gz » # * run: « bash --noprofile --norc --posix ./cabal-install/bash-completion/cabal » # # ################################################## # Dependencies: command -v cabal >/dev/null command -v grep >/dev/null command -v sed >/dev/null ################################################## # List project-specific (/ internal) packages: # # function _cabal_list_packages () ( shopt -s nullglob local CabalFiles CabalFiles=( ./*.cabal ./*/*.cabal ./*/*/*.cabal ) for FILE in "${CabalFiles[@]}" do BASENAME=$(basename "$FILE") PACKAGE="${BASENAME%.cabal}" echo "$PACKAGE" done | sort | uniq ) # NOTES # # [1] « "${string%suffix}" » strips « suffix » from « string », # in pure Bash. # # [2] « done | sort | uniq » removes duplicates from the output of the for-loop. # ################################################## # List cabal targets by type, pass: # # - ‹test-suite› for test suites # - ‹benchmark› for benchmarks # - ‹executable› for executables # - ‹library› for internal libraries # - ‹foreign-library› for foreign libraries # - nothing for all components. # function _cabal_list_targets () ( shopt -s nullglob # ^ NOTE « _cabal_list_targets » must be a subshell to temporarily enable « nullglob ». # hence, « function _ () ( ... ) » over « function _ () { ... } ». # without « nullglob », if a glob-pattern fails, it becomes a literal # (i.e. the string with an asterix, rather than an empty string). CabalComponent=${1:-library|executable|test-suite|benchmark|foreign-library} local CabalFiles CabalFiles=( ./*.cabal ./*/*.cabal ./*/*/*.cabal ) for FILE in "${CabalFiles[@]}" do grep -E -i "^[[:space:]]*($CabalComponent)[[:space:]]" "$FILE" 2>/dev/null | sed -e "s/.* \([^ ]*\).*/\1/" | sed -e '/^$/d' done | sort | uniq ) # NOTES # # [1] in « sed '/^$/d' »: # # * « d » is the sed command to delete a line. # * « ^$ » is a regular expression matching only a blank line # (i.e. a line start followed by a line end). # # dropping blank lines is necessary to ignore public « library » stanzas, # while still matching private « library _ » stanzas. # # [2] # #TODO# rm duplicate components and qualify with « PACKAGE: » (from basename): # # $ .. | sort | uniq ################################################## # List possible targets depending on the command supplied as parameter. The # ideal option would be to implement this via --list-options on cabal directly. # This is a temporary workaround. function _cabal_targets () { local Completion for Completion in "$@"; do [ "$Completion" == new-build ] && _cabal_list_targets && break [ "$Completion" == new-repl ] && _cabal_list_targets && break [ "$Completion" == new-run ] && _cabal_list_targets "executable" && break [ "$Completion" == new-test ] && _cabal_list_targets "test-suite" && break [ "$Completion" == new-bench ] && _cabal_list_targets "benchmark" && break [ "$Completion" == new-haddock ] && _cabal_list_targets && break [ "$Completion" == new-install ] && _cabal_list_targets "executable" && break # ^ Only complete for local packages (not all 1000s of remote packages). [ "$Completion" == build ] && _cabal_list_targets "executable|test-suite|benchmark" && break [ "$Completion" == repl ] && _cabal_list_targets "executable|test-suite|benchmark" && break [ "$Completion" == run ] && _cabal_list_targets "executable" && break [ "$Completion" == test ] && _cabal_list_targets "test-suite" && break [ "$Completion" == bench ] && _cabal_list_targets "benchmark" && break done } # NOTES # # [1] « $@ » will be the full command-line (so far). # # [2] # ################################################## # List possible subcommands of a cabal subcommand. # # In example "sandbox" is a cabal subcommand that itself has subcommands. Since # "cabal --list-options" doesn't work in such cases we have to get the list # using other means. function _cabal_subcommands () { local word for word in "$@"; do case "$word" in sandbox) # Get list of "cabal sandbox" subcommands from its help message. "$1" help sandbox | sed -n '1,/^Subcommands:$/d;/^Flags for sandbox:$/,$d;/^ /d;s/^\(.*\):/\1/p' break # Terminate for loop. ;; esac done } ################################################## function __cabal_has_doubledash () { local c=1 # Ignore the last word, because it is replaced anyways. # This allows expansion for flags on "cabal foo --", # but does not try to complete after "cabal foo -- ". local n=$((${#COMP_WORDS[@]} - 1)) while [ $c -lt $n ]; do if [ "--" = "${COMP_WORDS[c]}" ]; then return 0 fi ((c++)) done return 1 } ################################################## function _cabal () { # no completion past cabal arguments. __cabal_has_doubledash && return # get the word currently being completed local CurrentWord CurrentWord=${COMP_WORDS[$COMP_CWORD]} # create a command line to run local CommandLine # copy all words the user has entered CommandLine=( "${COMP_WORDS[@]}" ) # replace the current word with --list-options CommandLine[${COMP_CWORD}]="--list-options" # the resulting completions should be put into this array COMPREPLY=( $( compgen -W "$( eval "${CommandLine[@]}" 2>/dev/null ) $( _cabal_targets "${CommandLine[@]}" ) $( _cabal_subcommands "${COMP_WORDS[@]}" )" -- "$CurrentWord" ) ) } # abc="a b c" # { IFS=" " read -a ExampleArray <<< "$abc"; echo ${ExampleArray[@]}; echo ${!ExampleArray[@]}; } ################################################## complete -F _cabal -o default cabal cabal-install-3.8.1.0/cabal-install.cabal0000644000000000000000000003454607346545000016266 0ustar0000000000000000Cabal-Version: 2.2 Name: cabal-install Version: 3.8.1.0 Synopsis: The command-line interface for Cabal and Hackage. Description: The \'cabal\' command-line program simplifies the process of managing Haskell software by automating the fetching, configuration, compilation and installation of Haskell libraries and programs. homepage: http://www.haskell.org/cabal/ bug-reports: https://github.com/haskell/cabal/issues License: BSD-3-Clause License-File: LICENSE Author: Cabal Development Team (see AUTHORS file) Maintainer: Cabal Development Team Copyright: 2003-2022, Cabal Development Team Category: Distribution Build-type: Simple Extra-Source-Files: README.md bash-completion/cabal changelog source-repository head type: git location: https://github.com/haskell/cabal/ subdir: cabal-install Flag native-dns description: Enable use of the [resolv](https://hackage.haskell.org/package/resolv) & [windns](https://hackage.haskell.org/package/windns) packages for performing DNS lookups default: True manual: True Flag lukko description: Use @lukko@ for file-locking default: True manual: True common warnings ghc-options: -Wall -Wcompat -Wnoncanonical-monad-instances -Wincomplete-uni-patterns -Wincomplete-record-updates if impl(ghc < 8.8) ghc-options: -Wnoncanonical-monadfail-instances if impl(ghc >=8.10) ghc-options: -Wunused-packages common base-dep build-depends: base >=4.10 && <4.17 common cabal-dep build-depends: Cabal ^>=3.8 common cabal-syntax-dep build-depends: Cabal-syntax ^>=3.8 common cabal-install-solver-dep build-depends: cabal-install-solver ^>=3.8 library import: warnings, base-dep, cabal-dep, cabal-syntax-dep, cabal-install-solver-dep default-language: Haskell2010 hs-source-dirs: src exposed-modules: -- this modules are moved from Cabal -- they are needed for as long until cabal-install moves to parsec parser Distribution.Deprecated.ParseUtils Distribution.Deprecated.ReadP Distribution.Deprecated.ViewAsFieldDescr Distribution.Client.BuildReports.Anonymous Distribution.Client.BuildReports.Lens Distribution.Client.BuildReports.Storage Distribution.Client.BuildReports.Types Distribution.Client.BuildReports.Upload Distribution.Client.Check Distribution.Client.CmdBench Distribution.Client.CmdBuild Distribution.Client.CmdClean Distribution.Client.CmdConfigure Distribution.Client.CmdErrorMessages Distribution.Client.CmdExec Distribution.Client.CmdFreeze Distribution.Client.CmdHaddock Distribution.Client.CmdInstall Distribution.Client.CmdInstall.ClientInstallFlags Distribution.Client.CmdInstall.ClientInstallTargetSelector Distribution.Client.CmdLegacy Distribution.Client.CmdListBin Distribution.Client.CmdOutdated Distribution.Client.CmdRepl Distribution.Client.CmdRun Distribution.Client.CmdSdist Distribution.Client.CmdTest Distribution.Client.CmdUpdate Distribution.Client.Compat.Directory Distribution.Client.Compat.ExecutablePath Distribution.Client.Compat.Orphans Distribution.Client.Compat.Prelude Distribution.Client.Compat.Process Distribution.Client.Compat.Semaphore Distribution.Client.Config Distribution.Client.Configure Distribution.Client.Dependency Distribution.Client.Dependency.Types Distribution.Client.DistDirLayout Distribution.Client.Fetch Distribution.Client.FetchUtils Distribution.Client.FileMonitor Distribution.Client.Freeze Distribution.Client.GZipUtils Distribution.Client.GenBounds Distribution.Client.Get Distribution.Client.Glob Distribution.Client.GlobalFlags Distribution.Client.Haddock Distribution.Client.HashValue Distribution.Client.HttpUtils Distribution.Client.IndexUtils Distribution.Client.IndexUtils.ActiveRepos Distribution.Client.IndexUtils.IndexState Distribution.Client.IndexUtils.Timestamp Distribution.Client.Init Distribution.Client.Init.Defaults Distribution.Client.Init.FileCreators Distribution.Client.Init.FlagExtractors Distribution.Client.Init.Format Distribution.Client.Init.Interactive.Command Distribution.Client.Init.NonInteractive.Command Distribution.Client.Init.NonInteractive.Heuristics Distribution.Client.Init.Licenses Distribution.Client.Init.Prompt Distribution.Client.Init.Simple Distribution.Client.Init.Types Distribution.Client.Init.Utils Distribution.Client.Install Distribution.Client.InstallPlan Distribution.Client.InstallSymlink Distribution.Client.JobControl Distribution.Client.List Distribution.Client.Manpage Distribution.Client.ManpageFlags Distribution.Client.Nix Distribution.Client.NixStyleOptions Distribution.Client.PackageHash Distribution.Client.ParseUtils Distribution.Client.ProjectBuilding Distribution.Client.ProjectBuilding.Types Distribution.Client.ProjectConfig Distribution.Client.ProjectConfig.Legacy Distribution.Client.ProjectConfig.Types Distribution.Client.ProjectFlags Distribution.Client.ProjectOrchestration Distribution.Client.ProjectPlanOutput Distribution.Client.ProjectPlanning Distribution.Client.ProjectPlanning.Types Distribution.Client.RebuildMonad Distribution.Client.Reconfigure Distribution.Client.Run Distribution.Client.Sandbox Distribution.Client.Sandbox.PackageEnvironment Distribution.Client.SavedFlags Distribution.Client.ScriptUtils Distribution.Client.Security.DNS Distribution.Client.Security.HTTP Distribution.Client.Setup Distribution.Client.SetupWrapper Distribution.Client.SolverInstallPlan Distribution.Client.SourceFiles Distribution.Client.SrcDist Distribution.Client.Store Distribution.Client.Tar Distribution.Client.TargetProblem Distribution.Client.TargetSelector Distribution.Client.Targets Distribution.Client.Types Distribution.Client.Types.AllowNewer Distribution.Client.Types.BuildResults Distribution.Client.Types.ConfiguredId Distribution.Client.Types.ConfiguredPackage Distribution.Client.Types.Credentials Distribution.Client.Types.InstallMethod Distribution.Client.Types.OverwritePolicy Distribution.Client.Types.PackageLocation Distribution.Client.Types.PackageSpecifier Distribution.Client.Types.ReadyPackage Distribution.Client.Types.Repo Distribution.Client.Types.RepoName Distribution.Client.Types.SourcePackageDb Distribution.Client.Types.SourceRepo Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy Distribution.Client.Upload Distribution.Client.Utils Distribution.Client.Utils.Json Distribution.Client.Utils.Parsec Distribution.Client.VCS Distribution.Client.Version Distribution.Client.Win32SelfUpgrade build-depends: async >= 2.0 && < 2.3, array >= 0.4 && < 0.6, base16-bytestring >= 0.1.1 && < 1.1.0.0, binary >= 0.7.3 && < 0.9, bytestring >= 0.10.6.0 && < 0.12, containers >= 0.5.6.2 && < 0.7, cryptohash-sha256 >= 0.11 && < 0.12, directory >= 1.2.2.0 && < 1.4, echo >= 0.1.3 && < 0.2, edit-distance >= 0.2.2 && < 0.3, exceptions >= 0.10.4 && < 0.11, filepath >= 1.4.0.0 && < 1.5, hashable >= 1.0 && < 1.5, HTTP >= 4000.1.5 && < 4000.5, mtl >= 2.0 && < 2.3, network-uri >= 2.6.0.2 && < 2.7, pretty >= 1.1 && < 1.2, process >= 1.2.3.0 && < 1.7, random >= 1.2 && < 1.3, stm >= 2.0 && < 2.6, tar >= 0.5.0.3 && < 0.6, time >= 1.5.0.1 && < 1.12, zlib >= 0.5.3 && < 0.7, hackage-security >= 0.6.2.0 && < 0.7, text >= 1.2.3 && < 1.3, parsec >= 3.1.13.0 && < 3.2, regex-base >= 0.94.0.0 && <0.95, regex-posix >= 0.96.0.0 && <0.97, safe-exceptions >= 0.1.7.0 && < 0.2 if flag(native-dns) if os(windows) build-depends: windns >= 0.1.0 && < 0.2 else build-depends: resolv >= 0.1.1 && < 0.2 if os(windows) -- newer directory for symlinks build-depends: Win32 >= 2.8 && < 3, directory >=1.3.1.0 else build-depends: unix >= 2.5 && < 2.9 if flag(lukko) build-depends: lukko >= 0.1 && <0.2 executable cabal import: warnings, base-dep, cabal-dep, cabal-syntax-dep main-is: Main.hs hs-source-dirs: main default-language: Haskell2010 ghc-options: -rtsopts -threaded -- On AIX, some legacy BSD operations such as flock(2) are provided by libbsd.a if os(aix) extra-libraries: bsd build-depends: cabal-install, directory, filepath -- Small, fast running tests. -- test-suite unit-tests import: warnings, base-dep, cabal-dep, cabal-syntax-dep, cabal-install-solver-dep default-language: Haskell2010 ghc-options: -rtsopts -threaded type: exitcode-stdio-1.0 main-is: UnitTests.hs hs-source-dirs: tests other-modules: UnitTests.Distribution.Client.ArbitraryInstances UnitTests.Distribution.Client.BuildReport UnitTests.Distribution.Client.Configure UnitTests.Distribution.Client.FetchUtils UnitTests.Distribution.Client.Get UnitTests.Distribution.Client.Glob UnitTests.Distribution.Client.GZipUtils UnitTests.Distribution.Client.IndexUtils UnitTests.Distribution.Client.IndexUtils.Timestamp UnitTests.Distribution.Client.Init UnitTests.Distribution.Client.Init.Golden UnitTests.Distribution.Client.Init.Interactive UnitTests.Distribution.Client.Init.NonInteractive UnitTests.Distribution.Client.Init.Simple UnitTests.Distribution.Client.Init.Utils UnitTests.Distribution.Client.Init.FileCreators UnitTests.Distribution.Client.InstallPlan UnitTests.Distribution.Client.JobControl UnitTests.Distribution.Client.ProjectConfig UnitTests.Distribution.Client.ProjectPlanning UnitTests.Distribution.Client.Store UnitTests.Distribution.Client.Tar UnitTests.Distribution.Client.Targets UnitTests.Distribution.Client.TreeDiffInstances UnitTests.Distribution.Client.UserConfig UnitTests.Distribution.Solver.Modular.Builder UnitTests.Distribution.Solver.Modular.RetryLog UnitTests.Distribution.Solver.Modular.Solver UnitTests.Distribution.Solver.Modular.DSL UnitTests.Distribution.Solver.Modular.DSL.TestCaseUtils UnitTests.Distribution.Solver.Modular.WeightedPSQ UnitTests.Distribution.Solver.Types.OptionalStanza UnitTests.Options UnitTests.TempTestDir build-depends: array, bytestring, cabal-install, Cabal-tree-diff, Cabal-QuickCheck, containers, directory, filepath, mtl, network-uri >= 2.6.2.0 && <2.7, random, tar, time, zlib, tasty >= 1.2.3 && <1.5, tasty-golden >=2.3.1.1 && <2.4, tasty-quickcheck, tasty-hunit >= 0.10, tree-diff, QuickCheck >= 2.14 && <2.15 -- Tests to run with a limited stack and heap size -- The test suite name must be keep short cause a longer one -- could make the build generating paths which exceeds the windows -- max path limit (still a problem for some ghc versions) test-suite mem-use-tests import: warnings, base-dep, cabal-dep, cabal-syntax-dep, cabal-install-solver-dep type: exitcode-stdio-1.0 main-is: MemoryUsageTests.hs hs-source-dirs: tests default-language: Haskell2010 ghc-options: -threaded -rtsopts "-with-rtsopts=-M16M -K1K" other-modules: UnitTests.Distribution.Solver.Modular.DSL UnitTests.Distribution.Solver.Modular.DSL.TestCaseUtils UnitTests.Distribution.Solver.Modular.MemoryUsage UnitTests.Options build-depends: cabal-install, containers, tasty >= 1.2.3 && <1.5, tasty-hunit >= 0.10 -- Integration tests that use the cabal-install code directly -- but still build whole projects test-suite integration-tests2 import: warnings, base-dep, cabal-dep, cabal-syntax-dep, cabal-install-solver-dep ghc-options: -rtsopts -threaded type: exitcode-stdio-1.0 main-is: IntegrationTests2.hs hs-source-dirs: tests default-language: Haskell2010 build-depends: bytestring, cabal-install, containers, directory, filepath, tasty >= 1.2.3 && <1.5, tasty-hunit >= 0.10, tagged test-suite long-tests import: warnings, base-dep, cabal-dep, cabal-syntax-dep, cabal-install-solver-dep ghc-options: -rtsopts -threaded type: exitcode-stdio-1.0 hs-source-dirs: tests main-is: LongTests.hs default-language: Haskell2010 other-modules: UnitTests.Distribution.Client.ArbitraryInstances UnitTests.Distribution.Client.Described UnitTests.Distribution.Client.DescribedInstances UnitTests.Distribution.Client.FileMonitor UnitTests.Distribution.Client.VCS UnitTests.Distribution.Solver.Modular.DSL UnitTests.Distribution.Solver.Modular.QuickCheck UnitTests.Distribution.Solver.Modular.QuickCheck.Utils UnitTests.Options UnitTests.TempTestDir build-depends: Cabal-QuickCheck, Cabal-described, cabal-install, containers, directory, filepath, hashable, mtl, network-uri >= 2.6.2.0 && <2.7, random, tagged, tasty >= 1.2.3 && <1.5, tasty-expected-failure, tasty-hunit >= 0.10, tasty-quickcheck, QuickCheck >= 2.14 && <2.15, pretty-show >= 1.6.15 cabal-install-3.8.1.0/changelog0000644000000000000000000007534707346545000014452 0ustar0000000000000000-*-change-log-*- 3.8.1.0 Mikolaj Konarski August 2022 * See https://github.com/haskell/cabal/blob/master/release-notes/cabal-install-3.8.1.0.md 3.6.2.0 Emily Pillmore October 2021 * See https://github.com/haskell/cabal/blob/master/release-notes/cabal-install-3.6.2.0.md 3.6.0.0 Emily Pillmore August 2021 * See https://github.com/haskell/cabal/blob/master/release-notes/cabal-install-3.6.0.0.md 3.4.1.0 Emily Pillmore October 2021 * See https://github.com/haskell/cabal/blob/master/release-notes/cabal-install-3.4.1.0.md 3.4.0.0 Oleg Grenrus February 2021 * See https://github.com/haskell/cabal/blob/master/release-notes/cabal-install-3.4.0.0.md 3.2.0.0 Herbert Valerio Riedel April 2020 * `v2-build` (and other `v2-`prefixed commands) now accept the `--benchmark-option(s)` flags, which pass options to benchmark executables (analogous to how `--test-option(s)` works). (#6209) * Add solver optimization to skip a version of a package if it does not resolve any conflicts encountered in the last version, controlled by flag '--fine-grained-conflicts'. (#5918) * `cabal v2-exec` doesn't fail in clean package (#6479) * Show full ABI hash for installed packages in solver log (#5892) * Create incoming directory even for empty packages (#4130) * Start GHCi with `main-is` module in scope (#6311) * Implement `--benchmark-options` for `v2-bench` (#6224) * Fix store-dir in ghc env files generated by `cabal install --lib --package-env` (#6298) * `cabal v2-run` works with `.lhs` files (#6134) * `subdir` in source-repository-package accepts multiple entries (#5472) 3.0.1.0 Herbert Valerio Riedel April 2020 * Create store incoming directory ([#4130](https://github.com/haskell/cabal/issues/4130)) * `fetchRepoTarball` output is not marked ([#6385](https://github.com/haskell/cabal/pull/6385)) * Update `setupMinCabalVersionConstraint` for GHC-8.8 ([#6217](https://github.com/haskell/cabal/pull/6217)) * Implement `cabal install --ignore-project` ([#5919](https://github.com/haskell/cabal/issues/5919)) * `cabal install executable` solver isn't affected by default environment contents ([#6410](https://github.com/haskell/cabal/issues/6410)) * Use `lukko` for file locking ([#6345](https://github.com/haskell/cabal/pull/6345)) * Use `hackage-security-0.6` ([#6388](https://github.com/haskell/cabal/pull/6388)) * Other dependency upgrades 3.0.0.0 Mikhail Glushenkov August 2019 * `v2-haddock` fails on `haddock` failures (#5977) * `v2-run` works when given `File.lhs` literate file. (#6134) * Parse comma-separated lists for extra-prog-path, extra-lib-dirs, extra-framework-dirs, and extra-include-dirs as actual lists. (#5420) * `v2-repl` no longer changes directory to a randomized temporary folder when used outside of a project. (#5544) * `install-method` and `overwrite-policy` in `.cabal/config` now actually work. (#5942) * `v2-install` now reports the error when a package fails to build. (#5641) * `v2-install` now has a default when called in a project (#5978, #6014, #6092) * '--write-ghc-environment-files' now defaults to 'never' (#4242) * Fix `sdist`'s output when sent to stdout. (#5874) * Allow a list of dependencies to be provided for `repl --build-depends`. (#5845) * Legacy commands are now only accessible with the `v1-` prefixes, and the `v2-` commands are the new default. Accordingly, the next version of Cabal will be the start of the 3.x version series. (#5800) * New solver flag: '--reject-unconstrained-dependencies'. (#2568) * Ported old-style test options to the new-style commands (#5455). * Improved error messages for cabal file parse errors. (#5710) * Removed support for `.zip` format source distributions (#5755) * Add "simple project" initialization option. (#5707) * Add '--minimize-conflict-set' flag to try to improve the solver's error message, but with an increase in run time. (#5647) * v2-test now succeeds when there are no test suites. (#5435) * Add '--lib', '--exe', and '--libandexe' shorthands to init. (#5759) * init now generates valid `Main.lhs` files. (#5577) * Init improvements: add flag '--application-dir', and when creating a library also create a MyLib.hs module. (#5740) * Add support for generating test-suite via cabal init. (#5761) * Increase `max-backjumps` default from 2000 to 4000. * Make v2-install/new-install-specific flags configurable in ~/.cabal/config * Add --installdir and --install-method=copy flags to v2-install that make it possible to copy the executable instead of symlinking it * --symlink-bindir no longer controls the symlinking directory of v2-install (installdir controls both symlinking and copying now) * Default to non-interactive init. * Add --test-wrapper that allows a prebuild script to set the test environment. * Add filterTestFlags: filter test-wrapper for Cabal < 3.0.0. * Cabal now only builds the minimum of a package for `v2-install` (#5754, #6091) 2.4.1.0 Mikhail Glushenkov November 2018 * Add message to alert user to potential package casing errors. (#5635) * new-clean no longer deletes dist-newstyle/src with `-s`. (#5699) * 'new-install' now warns when failing to symlink an exe (#5602) * Extend 'cabal init' support for 'cabal-version' selection (#5567) * 'new-sdist' now generates tarballs with file modification times from a date in 2001. Using the Unix epoch caused problems on Windows. (#5596) * Register monolithic packages installed into the store due to a build-tool dependency if they also happen to contain a buildable public lib. (#5379,#5604) * Fixed a Windows bug where cabal-install tried to copy files after moving them (#5631). * 'cabal v2-repl' now works for indefinite (in the Backpack sense) components. (#5619) * Set data dir environment variable for tarballs and remote repos (#5469) * Fix monolithic inplace build tool PATH (#5633) * 'cabal init' now supports '-w'/'--with-compiler' flag (#4936, #5654) * Fix ambiguous --builddir on new-install (#5652) * Allow relative --storedir (#5662) * Respect --dry on new-install (#5671) * Warn when new-installing zero exes (#5666) * Add 'pkg-cabal-sha256' field to plan.json (#5695) * New v2-build flag: '--only-configure'. (#5578) * Fixed a 'new-install' failure that manifested when it encountered remote source dependencies in a project. (#5643) * New 'v2-[build,configure' flag: '--write-ghc-environment-files' to control the generation of .ghc.environment files. (#5711) 2.4.0.0 Mikhail Glushenkov September 2018 * Bugfix: "cabal new-build --ghc-option '--bogus' --ghc-option '-O1'" no longer ignores all arguments except the last one (#5512). * Add the following option aliases for '-dir'-suffixed options: 'storedir', 'logsdir', 'packagedir', 'sourcedir', 'outputdir' (#5484). * 'new-run' now allows the user to run scripts that use a special block to define their requirements (as in the executable stanza) in place of a target. This also allows the use of 'cabal' as an interpreter in a shebang line. * Add aliases for the "new-" commands that won't change when they lose their prefix or are eventually replaced by a third UI paradigm in the future. (#5429) * 'outdated' now accepts '--project-file FILE', which will look for bounds from the new-style freeze file named FILE.freeze. This is only available when `--new-freeze-file` has been passed. * 'new-repl' now accepts a '--build-depends' flag which accepts the same syntax as is used in .cabal files to add additional dependencies to the environment when developing in the REPL. It is now usable outside of projects. (#5425, #5454) * 'new-build' now treats Haddock errors non-fatally. In addition, it attempts to avoid trying to generate Haddocks when there is nothing to generate them from. (#5232, #5459) * 'new-run', 'new-test', and 'new-bench' now will attempt to resolve ambiguous selectors by filtering out selectors that would be invalid. (#4679, #5461) * 'new-install' now supports installing libraries and local components. (#5399) * Drop support for GHC 7.4, since it is out of our support window (and has been for over a year!). * 'new-update' now works outside of projects. (#5096) * Extend `plan.json` with `pkg-src` provenance information. (#5487) * Add 'new-sdist' command (#5389). Creates stable archives based on cabal projects in '.zip' and '.tar.gz' formats. * Add '--repl-options' flag to 'cabal repl' and 'cabal new-repl' commands. Passes its arguments to the invoked repl, bypassing the new-build's cached configurations. This assures they don't trigger useless rebuilds and are always applied within the repl. (#4247, #5287) * Add 'v1-' prefixes for the commands that will be replaced in the new-build universe, in preparation for it becoming the default. (#5358) * 'outdated' accepts '--v1-freeze-file' and '--v2-freeze-file' in the same spirit. * Completed the 'new-clean' command (#5357). The functionality is equivalent to old-style clean, but for nix-style builds. * Ensure that each package selected for a build-depends dependency contains a library (#5304). * Support packages from local tarballs in the cabal.project file. * Default changelog generated by 'cabal init' is now named 'CHANGELOG.md' (#5441). * Align output of 'new-build' command phases (#4040). * Add suport for specifying remote VCS dependencies via new 'source-repository-package' stanzas in 'cabal.project' files (#5351). 2.2.0.0 Mikhail Glushenkov March 2018 * '--with-PROG' and '--PROG-options' are applied to all packages and not local packages only (#5019). * Completed the 'new-update' command (#4809), which respects nix-style cabal.project(.local) files and allows to update from multiple repositories when using overlays. * Completed the 'new-run' command (#4477). The functionality is the same of the old 'run' command but using nix-style builds. Additionally, it can run executables across packages in a project. Tests and benchmarks are also treated as executables, providing a quick way to pass them arguments. * Completed the 'new-bench' command (#3638). Same as above. * Completed the 'new-exec' command (#3638). Same as above. * Added a preliminary 'new-install' command (#4558, nonlocal exes part) which allows to quickly install executables from Hackage. * Set symlink-bindir (used by new-install) to .cabal/bin by default on .cabal/config initialization (#5188). * 'cabal update' now supports '--index-state' which can be used to roll back the index to an earlier state. * '--allow-{newer,older}' syntax has been enhanced. Dependency relaxation can be now limited to a specific release of a package, plus there's a new syntax for relaxing only caret-style (i.e. '^>=') dependencies (#4575, #4669). * New config file field: 'cxx-options' to specify which options to be passed to the compiler when compiling C++ sources specified by the 'cxx-sources' field. (#3700) * New config file field: 'cxx-sources' to specify C++ files to be compiled separately from C source files. Useful in conjunction with the 'cxx-options' flag to pass different compiler options to C and C++ source files. (#3700) * Use [lfxtb] letters to differentiate component kind instead of opaque "c" in dist-dir layout. * 'cabal configure' now supports '--enable-static', which can be used to build static libaries with GHC via GHC's `-staticlib` flag. * 'cabal user-config now supports '--augment' which can append additional lines to a new or updated cabal config file. * Added support for '--enable-tests' and '--enable-benchmarks' to 'cabal fetch' (#4948). * Misspelled package-names on CLI will no longer be silently case-corrected (#4778). * 'cabal new-configure' now backs up the old 'cabal.project.local' file if it exists (#4460). * On macOS, `new-build` will now place dynamic libraries into `store/lib` and aggressively shorten their names in an effort to stay within the load command size limits of macOSs mach-o linker. * 'new-build' now checks for the existence of executables for build-tools and build-tool-depends dependencies in the solver (#4884). * Fixed a spurious warning telling the user to run 'cabal update' when it wasn't necessary (#4444). * Packages installed in sandboxes via 'add-source' now have their timestamps updated correctly and so will not be reinstalled unncecessarily if the main install command fails (#1375). * Add Windows device path support for copyFile, renameFile. Allows cabal new-build to use temporary store path of up to 32k length (#3972, #4914, #4515). * When a flag value is specified multiple times on the command line, the last one is now preferred, so e.g. '-f+dev -f-dev' is now equivalent to '-f-dev' (#4452). * Removed support for building cabal-install with GHC < 7.10 (#4870). * New 'package *' section in 'cabal.project' files that applies options to all packages, not just those local to the project. * Paths_ autogen modules now compile when `RebindableSyntax` or `OverloadedStrings` is used in `default-extensions`. [stack#3789](https://github.com/commercialhaskell/stack/issues/3789) * getDataDir` and other `Paths_autogen` functions now work correctly when compiling a custom `Setup.hs` script using `new-build` (#5164). 2.0.0.1 Mikhail Glushenkov December 2017 * Support for GHC's numeric -g debug levels (#4673). * Demoted 'scope' field version check to a warning (#4714). * Fixed verbosity flags getting removed before being passed to 'printPlan' (#4724). * Added a '--store-dir' option that can be used to configure the location of the build global build store (#4623). * Turned `allow-{newer,older}` in `cabal.project` files into an accumulating field to match CLI flag semantics (#4679). * Improve success message when `cabal upload`ing documentation (#4777). * Documentation fixes. 2.0.0.0 Mikhail Glushenkov August 2017 * See http://coldwa.st/e/blog/2017-09-09-Cabal-2-0.html for more detailed release notes. * Removed the '--root-cmd' parameter of the 'install' command (#3356). * Deprecated 'cabal install --global' (#3356). * Changed 'cabal upload' to upload a package candidate by default (#3419). Same applies to uploading documentation. * Added a new 'cabal upload' flag '--publish' for publishing a package on Hackage instead of uploading a candidate (#3419). * Added optional solver output visualisation support via the tracetree package. Mainly intended for debugging (#3410). * Removed the '--check' option from 'cabal upload' (#1823). It was replaced by package candidates. * Fixed various behaviour differences between network transports (#3429). * The bootstrap script now works correctly when run from a Git clone (#3439). * Removed the top-down solver (#3598). * The '-v/--verbosity' option no longer affects GHC verbosity (except in the case of '-v0'). Use '--ghc-options=-v' to enable verbose GHC output (#3540, #3671). * Changed the default logfile template from '.../$pkgid.log' to '.../$compiler/$libname.log' (#3807). * Added a new command, 'cabal reconfigure', which re-runs 'configure' with the most recently used flags (#2214). * Added the '--index-state' flag for requesting a specific version of the package index (#3893, #4115). * Support for building Backpack packages. See https://github.com/ezyang/ghc-proposals/blob/backpack/proposals/0000-backpack.rst for more details. * Support the Nix package manager (#3651). * Made the 'template-haskell' package non-upgradable again (#4185). * Fixed password echoing on MinTTY (#4128). * Added a new solver flag, '--allow-boot-library-installs', that allows any package to be installed or upgraded (#4209). * New 'cabal-install' command: 'outdated', for listing outdated version bounds in a .cabal file or a freeze file (#4207). * Added qualified constraints for setup dependencies. For example, --constraint="setup.bar == 1.0" constrains all setup dependencies on bar, and --constraint="foo:setup.bar == 1.0" constrains foo's setup dependency on bar (part of #3502). * Non-qualified constraints, such as --constraint="bar == 1.0", now only apply to top-level dependencies. They don't constrain setup or build-tool dependencies. The new syntax --constraint="any.bar == 1.0" constrains all uses of bar. * Added a technical preview version of the 'cabal doctest' command (#4480). 1.24.0.2 Mikhail Glushenkov December 2016 * Adapted to the revert of a PVP-noncompliant API change in Cabal 1.24.2.0 (#4123). * Bumped the directory upper bound to < 1.4 (#4158). 1.24.0.1 Ryan Thomas October 2016 * Fixed issue with passing '--enable-profiling' when invoking Setup scripts built with older versions of Cabal (#3873). * Fixed various behaviour differences between network transports (#3429). * Updated to depend on the latest hackage-security that fixes various issues on Windows. * Fixed 'new-build' to exit with a non-zero exit code on failure (#3506). * Store secure repo index data as 01-index.* (#3862). * Added new hackage-security root keys for distribution with cabal-install. * Fix an issue where 'cabal install' sometimes had to be run twice for packages with build-type: Custom and a custom-setup stanza (#3723). * 'cabal sdist' no longer ignores '--builddir' when the package's build-type is Custom (#3794). 1.24.0.0 Ryan Thomas March 2016 * If there are multiple remote repos, 'cabal update' now updates them in parallel (#2503). * New 'cabal upload' option '-P'/'--password-command' for reading Hackage password from arbitrary program output (#2506). * Better warning for 'cabal run' (#2510). * 'cabal init' now warns if the chosen package name is already registered in the source package index (#2436). * New 'cabal install' option: '--offline' (#2578). * Accept 'builddir' field in cabal.config (#2484) * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable. * Remote repos may now be configured to use https URLs. This uses either curl or wget or, on Windows, PowerShell, under the hood (#2687). * Install target URLs can now use https e.g. 'cabal install https://example.com/foo-1.0.tar.gz'. * Automatically use https for cabal upload for the main hackage.haskell.org (other repos will use whatever they are configured to use). * Support for dependencies of custom Setup.hs scripts (see http://www.well-typed.com/blog/2015/07/cabal-setup-deps/). * 'cabal' program itself now can be used as an external setup method. This fixes an issue when Cabal version mismatch caused unnecessary reconfigures (#2633). * Improved error message for unsatisfiable package constraints (#2727). * Fixed a space leak in 'cabal update' (#2826). * 'cabal exec' and 'sandbox hc-pkg' now use the configured compiler (#2859). * New 'cabal haddock' option: '--for-hackage' (#2852). * Added a warning when the solver cannot find a dependency (#2853). * New 'cabal upload' option: '--doc': upload documentation to hackage (#2890). * Improved error handling for 'sandbox delete-source' (#2943). * Solver support for extension and language flavours (#2873). * Support for secure repos using hackage-security (#2983). * Added a log file message similar to one printed by 'make' when building in another directory (#2642). * Added new subcommand 'init' to 'cabal user-config'. This subcommand creates a cabal configuration file in either the default location or as specified by --config-file (#2553). * The man page for 'cabal-install' is now automatically generated (#2877). * The '--allow-newer' option now works as expected when specified multiple times (#2588). * New config file field: 'extra-framework-dirs' (extra locations to find OS X frameworks in). Can be also specified as an argument for 'install' and 'configure' commands (#3158). * It's now possible to limit the scope of '--allow-newer' to single packages in the install plan (#2756). * Full '--allow-newer' syntax is now supported in the config file (that is, 'allow-newer: base, ghc-prim, some-package:vector') (#3171). * Improved performance of '--reorder-goals' (#3208). * Fixed space leaks in modular solver (#2916, #2914). * Made the solver aware of pkg-config constraints (#3023). * Added a new command: 'gen-bounds' (#3223). See http://softwaresimply.blogspot.se/2015/08/cabal-gen-bounds-easy-generation-of.html. * Tech preview of new nix-style isolated project-based builds. Currently provides the commands (new-)build/repl/configure. 1.22.9.0 Ryan Thomas March 2016 * Include Cabal-1.22.8.0 1.22.8.0 Ryan Thomas February 2016 * Only Custom setup scripts should be compiled with '-i -i.'. * installedCabalVersion: Don't special-case Cabal anymore. * Bump the HTTP upper bound. See #3069. 1.22.7.0 Ryan Thomas December 2015 * Remove GZipUtils tests * maybeDecompress: bail on all errors at the beginning of the stream with zlib < 0.6 * Correct maybeDecompress 1.22.6.0 Ryan Thomas June 2015 * A fix for @ezyang's fix for #2502. (Mikhail Glushenkov) 1.22.5.0 Ryan Thomas June 2015 * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) 1.22.4.0 Ryan Thomas May 2015 * Force cabal upload to always use digest auth and never basic auth. * Add dependency-graph information to `printPlan` output * bootstrap.sh: fixes linker matching to avoid cases where tested linker names appear unexpectedly in compiler output (fixes #2542) 1.22.3.0 Ryan Thomas April 2015 * Fix bash completion for sandbox subcommands - Fixes #2513 (Mikhail Glushenkov) * filterConfigureFlags: filter more flags (Mikhail Glushenkov) 1.22.2.0 Ryan Thomas March 2015 * Don't pass '--{en,dis}able-profiling' to old setup exes. * -Wall police * Allow filepath 1.4 1.22.0.0 Johan Tibell January 2015 * New command: user-config (#2159). * Implement 'cabal repl --only' (#2016). * Fix an issue when 'cabal repl' was doing unnecessary compilation (#1715). * Prompt the user to specify source directory in 'cabal init' (#1989). * Remove the self-upgrade check (#2090). * Don't redownload already downloaded packages when bootstrapping (#2133). * Support sandboxes in 'bootstrap.sh' (#2137). * Install profiling and shared libs by default in 'bootstrap.sh' (#2009). 1.20.2.0 Ryan Thomas February 2016 * Only Custom setup scripts should be compiled with '-i -i.'. * installedCabalVersion: Don't special-case Cabal anymore. 1.20.1.0 Ryan Thomas May 2015 * Force cabal upload to always use digest auth and never basic auth. * bootstrap.sh: install network-uri before HTTP 1.20.0.5 Johan Tibell December 2014 * Support random 1.1. * Fix bootstrap script after network package split. * Support network-2.6 in test suite. 1.20.0.3 Johan Tibell June 2014 * Don't attempt to rename dist if it is already named correctly * Treat all flags of a package as interdependent. * Allow template-haskell to be upgradable again 1.20.0.2 Johan Tibell May 2014 * Increase max-backjumps to 2000. * Fix solver bug which led to missed install plans. * Fix streaming test output. * Tweak solver heuristics to avoid reinstalls. 1.20.0.1 Johan Tibell May 2014 * Fix cabal repl search path bug on Windows * Include OS and arch in cabal-install user agent * Revert --constraint flag behavior in configure to 1.18 behavior 1.20.0.0 Johan Tibell April 2014 * Build only selected executables * Add -j flag to build/test/bench/run * Improve install log file * Don't symlink executables when in a sandbox * Add --package-db flag to 'list' and 'info' * Make upload more efficient * Add --require-sandbox option * Add experimental Cabal file format command * Add haddock section to config file * Add --main-is flag to init 1.18.2.0 Ryan Thomas February 2016 * Only Custom setup scripts should be compiled with '-i -i.'. * installedCabalVersion: Don't special-case Cabal anymore. 1.18.1.0 Ryan Thomas May 2015 * Force cabal upload to always use digest auth and never basic auth. * Merge pull request #2367 from juhp/patch-2 * Fix bootstrap.sh by bumping HTTP to 4000.2.16.1 1.18.0.7 Johan Tibell December 2014 * Support random 1.1. * Fix bootstrap script after network package split. * Support network-2.6 in test suite. 1.18.0.5 Johan Tibell July 2014 * Make solver flag resolution more conservative. 1.18.0.4 Johan Tibell May 2014 * Increase max-backjumps to 2000. * Fix solver bug which led to missed install plans. * Tweak solver heuristics to avoid reinstalls. 0.14.0 Andres Loeh April 2012 * Works with ghc-7.4 * Completely new modular dependency solver (default in most cases) * Some tweaks to old topdown dependency solver * Install plans are now checked for reinstalls that break packages * Flags --constraint and --preference work for nonexisting packages * New constraint forms for source and installed packages * New constraint form for package-specific use flags * New constraint form for package-specific stanza flags * Test suite dependencies are pulled in on demand * No longer install packages on --enable-tests when tests fail * New "cabal bench" command * Various "cabal init" tweaks 0.10.0 Duncan Coutts February 2011 * New package targets: local dirs, local and remote tarballs * Initial support for a "world" package target * Partial fix for situation where user packages mask global ones * Removed cabal upgrade, new --upgrade-dependencies flag * New cabal install --only-dependencies flag * New cabal fetch --no-dependencies and --dry-run flags * Improved output for cabal info * Simpler and faster bash command line completion * Fix for broken proxies that decompress wrongly * Fix for cabal unpack to preserve executable permissions * Adjusted the output for the -v verbosity level in a few places 0.8.2 Duncan Coutts March 2010 * Fix for cabal update on Windows * On windows switch to per-user installs (rather than global) * Handle intra-package dependencies in dependency planning * Minor tweaks to cabal init feature * Fix various -Wall warnings * Fix for cabal sdist --snapshot 0.8.0 Duncan Coutts Dec 2009 * Works with ghc-6.12 * New "cabal init" command for making initial project .cabal file * New feature to maintain an index of haddock documentation 0.6.4 Duncan Coutts Nov 2009 * Improve the algorithm for selecting the base package version * Hackage errors now reported by "cabal upload [--check]" * Improved format of messages from "cabal check" * Config file can now be selected by an env var * Updated tar reading/writing code * Improve instructions in the README and bootstrap output * Fix bootstrap.sh on Solaris 9 * Fix bootstrap for systems where network uses parsec 3 * Fix building with ghc-6.6 0.6.2 Duncan Coutts Feb 2009 * The upgrade command has been disabled in this release * The configure and install commands now have consistent behaviour * Reduce the tendency to re-install already existing packages * The --constraint= flag now works for the install command * New --preference= flag for soft constraints / version preferences * Improved bootstrap.sh script, smarter and better error checking * New cabal info command to display detailed info on packages * New cabal unpack command to download and untar a package * HTTP-4000 package required, should fix bugs with http proxies * Now works with authenticated proxies. * On Windows can now override the proxy setting using an env var * Fix compatibility with config files generated by older versions * Warn if the hackage package list is very old * More helpful --help output, mention config file and examples * Better documentation in ~/.cabal/config file * Improved command line interface for logging and build reporting * Minor improvements to some messages 0.6.0 Duncan Coutts Oct 2008 * Constraint solver can now cope with base 3 and base 4 * Allow use of package version preferences from hackage index * More detailed output from cabal install --dry-run -v * Improved bootstrap.sh 0.5.2 Duncan Coutts Aug 2008 * Suport building haddock documentaion * Self-reinstall now works on Windows * Allow adding symlinks to excutables into a separate bindir * New self-documenting config file * New install --reinstall flag * More helpful status messages in a couple places * Upload failures now report full text error message from the server * Support for local package repositories * New build logging and reporting * New command to upload build reports to (a compatible) server * Allow tilde in hackage server URIs * Internal code improvements * Many other minor improvements and bug fixes 0.5.1 Duncan Coutts June 2008 * Restore minimal hugs support in dependency resolver * Fix for disabled http proxies on Windows * Revert to global installs on Windows by default 0.5.0 Duncan Coutts June 2008 * New package dependency resolver, solving diamond dep problem * Integrate cabal-setup functionality * Integrate cabal-upload functionality * New cabal update and check commands * Improved behavior for install and upgrade commands * Full Windows support * New command line handling * Bash command line completion * Allow case insensitive package names on command line * New --dry-run flag for install, upgrade and fetch commands * New --root-cmd flag to allow installing as root * New --cabal-lib-version flag to select different Cabal lib versions * Support for HTTP proxies * Improved cabal list output * Build other non-dependent packages even when some fail * Report a summary of all build failures at the end * Partial support for hugs * Partial implementation of build reporting and logging * More consistent logging and verbosity * Significant internal code restructuring 0.4 Duncan Coutts Oct 2007 * Renamed executable from 'cabal-install' to 'cabal' * Partial Windows compatibility * Do per-user installs by default * cabal install now installs the package in the current directory * Allow multiple remote servers * Use zlib lib and internal tar code and rather than external tar * Reorganised configuration files * Significant code restructuring * Cope with packages with conditional dependencies 0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 * Switch from smart-server, dumb-client model to the reverse * New .tar.gz based index format * New remote and local package archive format cabal-install-3.8.1.0/main/0000755000000000000000000000000007346545000013504 5ustar0000000000000000cabal-install-3.8.1.0/main/Main.hs0000644000000000000000000012576107346545000014740 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Main -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- Entry point to the default cabal-install front-end. ----------------------------------------------------------------------------- module Main (main) where import Distribution.Client.Setup ( GlobalFlags(..), globalCommand, withRepoContext , ConfigFlags(..) , ConfigExFlags(..), defaultConfigExFlags, configureExCommand , reconfigureCommand , configCompilerAux', configPackageDB' , BuildFlags(..) , buildCommand, replCommand, testCommand, benchmarkCommand , InstallFlags(..), defaultInstallFlags , installCommand , FetchFlags(..), fetchCommand , FreezeFlags(..), freezeCommand , genBoundsCommand , GetFlags(..), getCommand, unpackCommand , checkCommand , formatCommand , ListFlags(..), listCommand, listNeedsCompiler , InfoFlags(..), infoCommand , UploadFlags(..), uploadCommand , ReportFlags(..), reportCommand , runCommand , InitFlags(initVerbosity, initHcPath), initCommand , ActAsSetupFlags(..), actAsSetupCommand , UserConfigFlags(..), userConfigCommand , reportCommand , manpageCommand , haddockCommand , cleanCommand , copyCommand , registerCommand ) import Distribution.Simple.Setup ( HaddockTarget(..) , HaddockFlags(..), defaultHaddockFlags , HscolourFlags(..), hscolourCommand , ReplFlags(..) , CopyFlags(..) , RegisterFlags(..) , CleanFlags(..) , TestFlags(..), BenchmarkFlags(..) , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag , configAbsolutePaths ) import Prelude () import Distribution.Client.Compat.Prelude hiding (get) import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Config ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff , userConfigUpdate, createDefaultConfigFile, getConfigFilePath ) import Distribution.Client.Targets ( readUserTargets ) import qualified Distribution.Client.List as List ( list, info ) import qualified Distribution.Client.CmdConfigure as CmdConfigure import qualified Distribution.Client.CmdUpdate as CmdUpdate import qualified Distribution.Client.CmdBuild as CmdBuild import qualified Distribution.Client.CmdRepl as CmdRepl import qualified Distribution.Client.CmdFreeze as CmdFreeze import qualified Distribution.Client.CmdHaddock as CmdHaddock import qualified Distribution.Client.CmdInstall as CmdInstall import qualified Distribution.Client.CmdRun as CmdRun import qualified Distribution.Client.CmdTest as CmdTest import qualified Distribution.Client.CmdBench as CmdBench import qualified Distribution.Client.CmdExec as CmdExec import qualified Distribution.Client.CmdClean as CmdClean import qualified Distribution.Client.CmdSdist as CmdSdist import qualified Distribution.Client.CmdListBin as CmdListBin import qualified Distribution.Client.CmdOutdated as CmdOutdated import Distribution.Client.CmdLegacy import Distribution.Client.Install (install) import Distribution.Client.Configure (configure, writeConfigFlags) import Distribution.Client.Fetch (fetch) import Distribution.Client.Freeze (freeze) import Distribution.Client.GenBounds (genBounds) import Distribution.Client.Check as Check (check) --import Distribution.Client.Clean (clean) import qualified Distribution.Client.Upload as Upload import Distribution.Client.Run (run, splitRunArgs) import Distribution.Client.Get (get) import Distribution.Client.Reconfigure (Check(..), reconfigure) import Distribution.Client.Nix (nixInstantiate ,nixShell ) import Distribution.Client.Sandbox (loadConfigOrSandboxConfig ,findSavedDistPref ,updateInstallDirs) import Distribution.Client.Tar (createTarGzFile) import Distribution.Client.Types.Credentials (Password (..)) import Distribution.Client.Init (initCmd) import Distribution.Client.Manpage (manpageCmd) import Distribution.Client.ManpageFlags (ManpageFlags (..)) import Distribution.Client.Utils ( determineNumJobs, relaxEncodingErrors ) import Distribution.Client.Version ( cabalInstallVersion ) import Distribution.Package (packageId) import Distribution.PackageDescription ( BuildType(..), Executable(..), buildable ) import Distribution.PackageDescription.PrettyPrint ( writeGenericPackageDescription ) import qualified Distribution.Simple as Simple import qualified Distribution.Make as Make import qualified Distribution.Types.UnqualComponentName as Make import Distribution.Simple.Build ( startInterpreter ) import Distribution.Simple.Command ( CommandParse(..), CommandUI(..), Command, CommandSpec(..) , CommandType(..), commandsRun, commandAddAction, hiddenCommand , commandFromSpec, commandShowOptions ) import Distribution.Simple.Compiler (PackageDBStack) import Distribution.Simple.Configure ( configCompilerAuxEx, ConfigStateFileError(..) , getPersistBuildConfig, interpretPackageDbFlags , tryGetPersistBuildConfig ) import qualified Distribution.Simple.LocalBuildInfo as LBI import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import Distribution.Simple.Program (defaultProgramDb ,configureAllKnownPrograms ,simpleProgramInvocation ,getProgramInvocationOutput) import Distribution.Simple.Program.Db (reconfigurePrograms) import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Utils ( cabalVersion, die', dieNoVerbosity, info, notice, topHandler , findPackageDesc, tryFindPackageDesc, createDirectoryIfMissingVerbose ) import Distribution.Text ( display ) import Distribution.Verbosity as Verbosity ( normal ) import Distribution.Version ( Version, mkVersion, orLaterVersion ) import Distribution.Compat.ResponseFile import System.Environment (getArgs, getProgName) import System.FilePath ( dropExtension, splitExtension , takeExtension, (), (<.>) ) import System.IO ( BufferMode(LineBuffering), hSetBuffering , stderr, stdout ) import System.Directory ( doesFileExist, getCurrentDirectory , withCurrentDirectory) import Data.Monoid (Any(..)) import Control.Exception (try) -- | Entry point -- main :: IO () main = do -- Enable line buffering so that we can get fast feedback even when piped. -- This is especially important for CI and build systems. hSetBuffering stdout LineBuffering -- If the locale encoding for CLI doesn't support all Unicode characters, -- printing to it may fail unless we relax the handling of encoding errors -- when writing to stderr and stdout. relaxEncodingErrors stdout relaxEncodingErrors stderr (args0, args1) <- break (== "--") <$> getArgs mainWorker =<< (++ args1) <$> expandResponse args0 mainWorker :: [String] -> IO () mainWorker args = do maybeScriptAndArgs <- case args of [] -> return Nothing (h:tl) -> (\b -> if b then Just (h:|tl) else Nothing) <$> CmdRun.validScript h topHandler $ case commandsRun (globalCommand commands) commands args of CommandHelp help -> printGlobalHelp help CommandList opts -> printOptionsList opts CommandErrors errs -> printErrors errs CommandReadyToGo (globalFlags, commandParse) -> case commandParse of _ | fromFlagOrDefault False (globalVersion globalFlags) -> printVersion | fromFlagOrDefault False (globalNumericVersion globalFlags) -> printNumericVersion CommandHelp help -> printCommandHelp help CommandList opts -> printOptionsList opts CommandErrors errs -> maybe (printErrors errs) go maybeScriptAndArgs where go (script:|scriptArgs) = CmdRun.handleShebang script scriptArgs CommandReadyToGo action -> action globalFlags where printCommandHelp help = do pname <- getProgName putStr (help pname) printGlobalHelp help = do pname <- getProgName configFile <- defaultConfigFile putStr (help pname) putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" ++ " " ++ configFile ++ "\n" exists <- doesFileExist configFile unless exists $ putStrLn $ "This file will be generated with sensible " ++ "defaults if you run 'cabal update'." printOptionsList = putStr . unlines printErrors errs = dieNoVerbosity $ intercalate "\n" errs printNumericVersion = putStrLn $ display cabalInstallVersion printVersion = putStrLn $ "cabal-install version " ++ display cabalInstallVersion ++ "\ncompiled using version " ++ display cabalVersion ++ " of the Cabal library " commands = map commandFromSpec commandSpecs commandSpecs = [ regularCmd listCommand listAction , regularCmd infoCommand infoAction , regularCmd fetchCommand fetchAction , regularCmd getCommand getAction , regularCmd unpackCommand unpackAction , regularCmd checkCommand checkAction , regularCmd uploadCommand uploadAction , regularCmd reportCommand reportAction , regularCmd initCommand initAction , regularCmd userConfigCommand userConfigAction , regularCmd genBoundsCommand genBoundsAction , regularCmd CmdOutdated.outdatedCommand CmdOutdated.outdatedAction , wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref , hiddenCmd formatCommand formatAction , hiddenCmd actAsSetupCommand actAsSetupAction , hiddenCmd manpageCommand (manpageAction commandSpecs) , regularCmd CmdListBin.listbinCommand CmdListBin.listbinAction ] ++ concat [ newCmd CmdConfigure.configureCommand CmdConfigure.configureAction , newCmd CmdUpdate.updateCommand CmdUpdate.updateAction , newCmd CmdBuild.buildCommand CmdBuild.buildAction , newCmd CmdRepl.replCommand CmdRepl.replAction , newCmd CmdFreeze.freezeCommand CmdFreeze.freezeAction , newCmd CmdHaddock.haddockCommand CmdHaddock.haddockAction , newCmd CmdInstall.installCommand CmdInstall.installAction , newCmd CmdRun.runCommand CmdRun.runAction , newCmd CmdTest.testCommand CmdTest.testAction , newCmd CmdBench.benchCommand CmdBench.benchAction , newCmd CmdExec.execCommand CmdExec.execAction , newCmd CmdClean.cleanCommand CmdClean.cleanAction , newCmd CmdSdist.sdistCommand CmdSdist.sdistAction , legacyCmd configureExCommand configureAction , legacyCmd buildCommand buildAction , legacyCmd replCommand replAction , legacyCmd freezeCommand freezeAction , legacyCmd haddockCommand haddockAction , legacyCmd installCommand installAction , legacyCmd runCommand runAction , legacyCmd testCommand testAction , legacyCmd benchmarkCommand benchmarkAction , legacyCmd cleanCommand cleanAction , legacyWrapperCmd copyCommand copyVerbosity copyDistPref , legacyWrapperCmd registerCommand regVerbosity regDistPref , legacyCmd reconfigureCommand reconfigureAction ] type Action = GlobalFlags -> IO () -- Duplicated in Distribution.Client.CmdLegacy. Any changes must be -- reflected there, as well. regularCmd :: CommandUI flags -> (flags -> [String] -> action) -> CommandSpec action regularCmd ui action = CommandSpec ui ((flip commandAddAction) action) NormalCommand hiddenCmd :: CommandUI flags -> (flags -> [String] -> action) -> CommandSpec action hiddenCmd ui action = CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action)) HiddenCommand wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) -> (flags -> Flag String) -> CommandSpec Action wrapperCmd ui verbosity distPref = CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand wrapperAction :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) -> (flags -> Flag String) -> Command Action wrapperAction command verbosityFlag distPrefFlag = commandAddAction command { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do let verbosity = fromFlagOrDefault normal (verbosityFlag flags) load <- try (loadConfigOrSandboxConfig verbosity globalFlags) let config = either (\(SomeException _) -> mempty) id load distPref <- findSavedDistPref config (distPrefFlag flags) let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } setupWrapper verbosity setupScriptOptions Nothing command (const flags) (const extraArgs) configureAction :: (ConfigFlags, ConfigExFlags) -> [String] -> Action configureAction (configFlags, configExFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) config <- updateInstallDirs (configUserInstall configFlags) <$> loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (configDistPref configFlags) nixInstantiate verbosity distPref True globalFlags config nixShell verbosity distPref globalFlags config $ do let configFlags' = savedConfigureFlags config `mappend` configFlags configExFlags' = savedConfigureExFlags config `mappend` configExFlags globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, progdb) <- configCompilerAuxEx configFlags' writeConfigFlags verbosity distPref (configFlags', configExFlags') -- What package database(s) to use let packageDBs :: PackageDBStack packageDBs = interpretPackageDbFlags (fromFlag (configUserInstall configFlags')) (configPackageDBs configFlags') withRepoContext verbosity globalFlags' $ \repoContext -> configure verbosity packageDBs repoContext comp platform progdb configFlags' configExFlags' extraArgs reconfigureAction :: (ConfigFlags, ConfigExFlags) -> [String] -> Action reconfigureAction flags@(configFlags, _) _ globalFlags = do let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) config <- updateInstallDirs (configUserInstall configFlags) <$> loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (configDistPref configFlags) let checkFlags = Check $ \_ saved -> do let flags' = saved <> flags unless (saved == flags') $ info verbosity message pure (Any True, flags') where -- This message is correct, but not very specific: it will list all -- of the new flags, even if some have not actually changed. The -- *minimal* set of changes is more difficult to determine. message = "flags changed: " ++ unwords (commandShowOptions configureExCommand flags) nixInstantiate verbosity distPref True globalFlags config _ <- reconfigure configureAction verbosity distPref NoFlag checkFlags [] globalFlags config pure () buildAction :: BuildFlags -> [String] -> Action buildAction buildFlags extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (buildDistPref buildFlags) -- Calls 'configureAction' to do the real work, so nothing special has to be -- done to support sandboxes. config' <- reconfigure configureAction verbosity distPref (buildNumJobs buildFlags) mempty [] globalFlags config nixShell verbosity distPref globalFlags config $ do build verbosity config' distPref buildFlags extraArgs -- | Actually do the work of building the package. This is separate from -- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke -- 'reconfigure' twice. build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () build verbosity config distPref buildFlags extraArgs = setupWrapper verbosity setupOptions Nothing (Cabal.buildCommand progDb) mkBuildFlags (const extraArgs) where progDb = defaultProgramDb setupOptions = defaultSetupScriptOptions { useDistPref = distPref } mkBuildFlags version = filterBuildFlags version config buildFlags' buildFlags' = buildFlags { buildVerbosity = toFlag verbosity , buildDistPref = toFlag distPref } -- | Make sure that we don't pass new flags to setup scripts compiled against -- old versions of Cabal. filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags filterBuildFlags version config buildFlags | version >= mkVersion [1,19,1] = buildFlags_latest -- Cabal < 1.19.1 doesn't support 'build -j'. | otherwise = buildFlags_pre_1_19_1 where buildFlags_pre_1_19_1 = buildFlags { buildNumJobs = NoFlag } buildFlags_latest = buildFlags { -- Take the 'jobs' setting '~/.cabal/config' into account. buildNumJobs = Flag . Just . determineNumJobs $ (numJobsConfigFlag `mappend` numJobsCmdLineFlag) } numJobsConfigFlag = installNumJobs . savedInstallFlags $ config numJobsCmdLineFlag = buildNumJobs buildFlags replAction :: ReplFlags -> [String] -> Action replAction replFlags extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (replVerbosity replFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (replDistPref replFlags) cwd <- getCurrentDirectory pkgDesc <- findPackageDesc cwd let -- There is a .cabal file in the current directory: start a REPL and load -- the project's modules. onPkgDesc = do -- Calls 'configureAction' to do the real work, so nothing special has to -- be done to support sandboxes. _ <- reconfigure configureAction verbosity distPref NoFlag mempty [] globalFlags config let progDb = defaultProgramDb setupOptions = defaultSetupScriptOptions { useCabalVersion = orLaterVersion $ mkVersion [1,18,0] , useDistPref = distPref } replFlags' = replFlags { replVerbosity = toFlag verbosity , replDistPref = toFlag distPref } nixShell verbosity distPref globalFlags config $ setupWrapper verbosity setupOptions Nothing (Cabal.replCommand progDb) (const replFlags') (const extraArgs) -- No .cabal file in the current directory: just start the REPL (possibly -- using the sandbox package DB). onNoPkgDesc = do let configFlags = savedConfigureFlags config (comp, platform, programDb) <- configCompilerAux' configFlags programDb' <- reconfigurePrograms verbosity (replProgramPaths replFlags) (replProgramArgs replFlags) programDb nixShell verbosity distPref globalFlags config $ do startInterpreter verbosity programDb' comp platform (configPackageDB' configFlags) either (const onNoPkgDesc) (const onPkgDesc) pkgDesc installAction :: ( ConfigFlags, ConfigExFlags, InstallFlags , HaddockFlags, TestFlags, BenchmarkFlags ) -> [String] -> Action installAction (configFlags, _, installFlags, _, _, _) _ globalFlags | fromFlagOrDefault False (installOnly installFlags) = do let verb = fromFlagOrDefault normal (configVerbosity configFlags) config <- loadConfigOrSandboxConfig verb globalFlags dist <- findSavedDistPref config (configDistPref configFlags) let setupOpts = defaultSetupScriptOptions { useDistPref = dist } setupWrapper verb setupOpts Nothing installCommand (const (mempty, mempty, mempty, mempty, mempty, mempty)) (const []) installAction ( configFlags, configExFlags, installFlags , haddockFlags, testFlags, benchmarkFlags ) extraArgs globalFlags = do let verb = fromFlagOrDefault normal (configVerbosity configFlags) config <- updateInstallDirs (configUserInstall configFlags) <$> loadConfigOrSandboxConfig verb globalFlags dist <- findSavedDistPref config (configDistPref configFlags) do targets <- readUserTargets verb extraArgs let configFlags' = maybeForceTests installFlags' $ savedConfigureFlags config `mappend` configFlags { configDistPref = toFlag dist } configExFlags' = defaultConfigExFlags `mappend` savedConfigureExFlags config `mappend` configExFlags installFlags' = defaultInstallFlags `mappend` savedInstallFlags config `mappend` installFlags haddockFlags' = defaultHaddockFlags `mappend` savedHaddockFlags config `mappend` haddockFlags { haddockDistPref = toFlag dist } testFlags' = Cabal.defaultTestFlags `mappend` savedTestFlags config `mappend` testFlags { testDistPref = toFlag dist } benchmarkFlags' = Cabal.defaultBenchmarkFlags `mappend` savedBenchmarkFlags config `mappend` benchmarkFlags { benchmarkDistPref = toFlag dist } globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, progdb) <- configCompilerAux' configFlags' -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the -- future. progdb' <- configureAllKnownPrograms verb progdb configFlags'' <- configAbsolutePaths configFlags' withRepoContext verb globalFlags' $ \repoContext -> install verb (configPackageDB' configFlags'') repoContext comp platform progdb' globalFlags' configFlags'' configExFlags' installFlags' haddockFlags' testFlags' benchmarkFlags' targets where -- '--run-tests' implies '--enable-tests'. maybeForceTests installFlags' configFlags' = if fromFlagOrDefault False (installRunTests installFlags') then configFlags' { configTests = toFlag True } else configFlags' testAction :: (BuildFlags, TestFlags) -> [String] -> GlobalFlags -> IO () testAction (buildFlags, testFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (testDistPref testFlags) let buildFlags' = buildFlags { buildVerbosity = testVerbosity testFlags } checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> if fromFlagOrDefault False (configTests configFlags) then pure (mempty, flags) else do info verbosity "reconfiguring to enable tests" let flags' = ( configFlags { configTests = toFlag True } , configExFlags ) pure (Any True, flags') _ <- reconfigure configureAction verbosity distPref (buildNumJobs buildFlags') checkFlags [] globalFlags config nixShell verbosity distPref globalFlags config $ do let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } testFlags' = testFlags { testDistPref = toFlag distPref } -- The package was just configured, so the LBI must be available. names <- componentNamesFromLBI verbosity distPref "test suites" (\c -> case c of { LBI.CTest{} -> True; _ -> False }) let extraArgs' | null extraArgs = case names of ComponentNamesUnknown -> [] ComponentNames names' -> [ Make.unUnqualComponentName name | LBI.CTestName name <- names' ] | otherwise = extraArgs build verbosity config distPref buildFlags' extraArgs' setupWrapper verbosity setupOptions Nothing Cabal.testCommand (const testFlags') (const extraArgs') data ComponentNames = ComponentNamesUnknown | ComponentNames [LBI.ComponentName] -- | Return the names of all buildable components matching a given predicate. componentNamesFromLBI :: Verbosity -> FilePath -> String -> (LBI.Component -> Bool) -> IO ComponentNames componentNamesFromLBI verbosity distPref targetsDescr compPred = do eLBI <- tryGetPersistBuildConfig distPref case eLBI of Left err -> case err of -- Note: the build config could have been generated by a custom setup -- script built against a different Cabal version, so it's crucial that -- we ignore the bad version error here. ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown _ -> die' verbosity (show err) Right lbi -> do let pkgDescr = LBI.localPkgDescr lbi names = map LBI.componentName . filter (buildable . LBI.componentBuildInfo) . filter compPred $ LBI.pkgComponents pkgDescr if null names then do notice verbosity $ "Package has no buildable " ++ targetsDescr ++ "." exitSuccess -- See #3215. else return $! (ComponentNames names) benchmarkAction :: (BuildFlags, BenchmarkFlags) -> [String] -> GlobalFlags -> IO () benchmarkAction (buildFlags, benchmarkFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (benchmarkDistPref benchmarkFlags) let buildFlags' = buildFlags { buildVerbosity = benchmarkVerbosity benchmarkFlags } let checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> if fromFlagOrDefault False (configBenchmarks configFlags) then pure (mempty, flags) else do info verbosity "reconfiguring to enable benchmarks" let flags' = ( configFlags { configBenchmarks = toFlag True } , configExFlags ) pure (Any True, flags') config' <- reconfigure configureAction verbosity distPref (buildNumJobs buildFlags') checkFlags [] globalFlags config nixShell verbosity distPref globalFlags config $ do let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref } -- The package was just configured, so the LBI must be available. names <- componentNamesFromLBI verbosity distPref "benchmarks" (\c -> case c of { LBI.CBench{} -> True; _ -> False; }) let extraArgs' | null extraArgs = case names of ComponentNamesUnknown -> [] ComponentNames names' -> [ Make.unUnqualComponentName name | LBI.CBenchName name <- names'] | otherwise = extraArgs build verbosity config' distPref buildFlags' extraArgs' setupWrapper verbosity setupOptions Nothing Cabal.benchmarkCommand (const benchmarkFlags') (const extraArgs') haddockAction :: HaddockFlags -> [String] -> Action haddockAction haddockFlags extraArgs globalFlags = do let verbosity = fromFlag (haddockVerbosity haddockFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (haddockDistPref haddockFlags) config' <- reconfigure configureAction verbosity distPref NoFlag mempty [] globalFlags config nixShell verbosity distPref globalFlags config $ do let haddockFlags' = defaultHaddockFlags `mappend` savedHaddockFlags config' `mappend` haddockFlags { haddockDistPref = toFlag distPref } setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } setupWrapper verbosity setupScriptOptions Nothing haddockCommand (const haddockFlags') (const extraArgs) when (haddockForHackage haddockFlags == Flag ForHackage) $ do pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) let dest = distPref name <.> "tar.gz" name = display (packageId pkg) ++ "-docs" docDir = distPref "doc" "html" createTarGzFile dest docDir name notice verbosity $ "Documentation tarball created: " ++ dest cleanAction :: CleanFlags -> [String] -> Action cleanAction cleanFlags extraArgs globalFlags = do load <- try (loadConfigOrSandboxConfig verbosity globalFlags) let config = either (\(SomeException _) -> mempty) id load distPref <- findSavedDistPref config (cleanDistPref cleanFlags) let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref , useWin32CleanHack = True } cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref } setupWrapper verbosity setupScriptOptions Nothing cleanCommand (const cleanFlags') (const extraArgs) where verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) listAction :: ListFlags -> [String] -> Action listAction listFlags extraArgs globalFlags = do let verbosity = fromFlag (listVerbosity listFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags let configFlags' = savedConfigureFlags config configFlags = configFlags' { configPackageDBs = configPackageDBs configFlags' `mappend` listPackageDBs listFlags , configHcPath = listHcPath listFlags } globalFlags' = savedGlobalFlags config `mappend` globalFlags compProgdb <- if listNeedsCompiler listFlags then do (comp, _, progdb) <- configCompilerAux' configFlags return (Just (comp, progdb)) else return Nothing withRepoContext verbosity globalFlags' $ \repoContext -> List.list verbosity (configPackageDB' configFlags) repoContext compProgdb listFlags extraArgs infoAction :: InfoFlags -> [String] -> Action infoAction infoFlags extraArgs globalFlags = do let verbosity = fromFlag (infoVerbosity infoFlags) targets <- readUserTargets verbosity extraArgs config <- loadConfigOrSandboxConfig verbosity globalFlags let configFlags' = savedConfigureFlags config configFlags = configFlags' { configPackageDBs = configPackageDBs configFlags' `mappend` infoPackageDBs infoFlags } globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, _, progdb) <- configCompilerAuxEx configFlags withRepoContext verbosity globalFlags' $ \repoContext -> List.info verbosity (configPackageDB' configFlags) repoContext comp progdb globalFlags' infoFlags targets fetchAction :: FetchFlags -> [String] -> Action fetchAction fetchFlags extraArgs globalFlags = do let verbosity = fromFlag (fetchVerbosity fetchFlags) targets <- readUserTargets verbosity extraArgs config <- loadConfig verbosity (globalConfigFile globalFlags) let configFlags = savedConfigureFlags config globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, progdb) <- configCompilerAux' configFlags withRepoContext verbosity globalFlags' $ \repoContext -> fetch verbosity (configPackageDB' configFlags) repoContext comp platform progdb globalFlags' fetchFlags targets freezeAction :: FreezeFlags -> [String] -> Action freezeAction freezeFlags _extraArgs globalFlags = do let verbosity = fromFlag (freezeVerbosity freezeFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config NoFlag nixShell verbosity distPref globalFlags config $ do let configFlags = savedConfigureFlags config globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, progdb) <- configCompilerAux' configFlags withRepoContext verbosity globalFlags' $ \repoContext -> freeze verbosity (configPackageDB' configFlags) repoContext comp platform progdb globalFlags' freezeFlags genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () genBoundsAction freezeFlags _extraArgs globalFlags = do let verbosity = fromFlag (freezeVerbosity freezeFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config NoFlag nixShell verbosity distPref globalFlags config $ do let configFlags = savedConfigureFlags config globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, progdb) <- configCompilerAux' configFlags withRepoContext verbosity globalFlags' $ \repoContext -> genBounds verbosity (configPackageDB' configFlags) repoContext comp platform progdb globalFlags' freezeFlags uploadAction :: UploadFlags -> [String] -> Action uploadAction uploadFlags extraArgs globalFlags = do config <- loadConfig verbosity (globalConfigFile globalFlags) let uploadFlags' = savedUploadFlags config `mappend` uploadFlags globalFlags' = savedGlobalFlags config `mappend` globalFlags tarfiles = extraArgs when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $ die' verbosity "the 'upload' command expects at least one .tar.gz archive." checkTarFiles extraArgs maybe_password <- case uploadPasswordCmd uploadFlags' of Flag (xs:xss) -> Just . Password <$> getProgramInvocationOutput verbosity (simpleProgramInvocation xs xss) _ -> pure $ flagToMaybe $ uploadPassword uploadFlags' withRepoContext verbosity globalFlags' $ \repoContext -> do if fromFlag (uploadDoc uploadFlags') then do when (length tarfiles > 1) $ die' verbosity $ "the 'upload' command can only upload documentation " ++ "for one package at a time." tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles Upload.uploadDoc verbosity repoContext (flagToMaybe $ uploadUsername uploadFlags') maybe_password (fromFlag (uploadCandidate uploadFlags')) tarfile else do Upload.upload verbosity repoContext (flagToMaybe $ uploadUsername uploadFlags') maybe_password (fromFlag (uploadCandidate uploadFlags')) tarfiles where verbosity = fromFlag (uploadVerbosity uploadFlags) checkTarFiles tarfiles | not (null otherFiles) = die' verbosity $ "the 'upload' command expects only .tar.gz archives: " ++ intercalate ", " otherFiles | otherwise = sequence_ [ do exists <- doesFileExist tarfile unless exists $ die' verbosity $ "file not found: " ++ tarfile | tarfile <- tarfiles ] where otherFiles = filter (not . isTarGzFile) tarfiles isTarGzFile file = case splitExtension file of (file', ".gz") -> takeExtension file' == ".tar" _ -> False generateDocTarball config = do notice verbosity $ "No documentation tarball specified. " ++ "Building a documentation tarball with default settings...\n" ++ "If you need to customise Haddock options, " ++ "run 'haddock --for-hackage' first " ++ "to generate a documentation tarball." haddockAction (defaultHaddockFlags { haddockForHackage = Flag ForHackage }) [] globalFlags distPref <- findSavedDistPref config NoFlag pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) return $ distPref display (packageId pkg) ++ "-docs" <.> "tar.gz" checkAction :: Flag Verbosity -> [String] -> Action checkAction verbosityFlag extraArgs _globalFlags = do let verbosity = fromFlag verbosityFlag unless (null extraArgs) $ die' verbosity $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs allOk <- Check.check (fromFlag verbosityFlag) unless allOk exitFailure formatAction :: Flag Verbosity -> [String] -> Action formatAction verbosityFlag extraArgs _globalFlags = do let verbosity = fromFlag verbosityFlag path <- case extraArgs of [] -> do cwd <- getCurrentDirectory tryFindPackageDesc verbosity cwd (p:_) -> return p pkgDesc <- readGenericPackageDescription verbosity path -- Uses 'writeFileAtomic' under the hood. writeGenericPackageDescription path pkgDesc reportAction :: ReportFlags -> [String] -> Action reportAction reportFlags extraArgs globalFlags = do let verbosity = fromFlag (reportVerbosity reportFlags) unless (null extraArgs) $ die' verbosity $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs config <- loadConfig verbosity (globalConfigFile globalFlags) let globalFlags' = savedGlobalFlags config `mappend` globalFlags reportFlags' = savedReportFlags config `mappend` reportFlags withRepoContext verbosity globalFlags' $ \repoContext -> Upload.report verbosity repoContext (flagToMaybe $ reportUsername reportFlags') (flagToMaybe $ reportPassword reportFlags') runAction :: BuildFlags -> [String] -> Action runAction buildFlags extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) config <- loadConfigOrSandboxConfig verbosity globalFlags distPref <- findSavedDistPref config (buildDistPref buildFlags) config' <- reconfigure configureAction verbosity distPref (buildNumJobs buildFlags) mempty [] globalFlags config nixShell verbosity distPref globalFlags config $ do lbi <- getPersistBuildConfig distPref (exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs build verbosity config' distPref buildFlags ["exe:" ++ display (exeName exe)] run verbosity lbi exe exeArgs getAction :: GetFlags -> [String] -> Action getAction getFlags extraArgs globalFlags = do let verbosity = fromFlag (getVerbosity getFlags) targets <- readUserTargets verbosity extraArgs config <- loadConfigOrSandboxConfig verbosity globalFlags let globalFlags' = savedGlobalFlags config `mappend` globalFlags withRepoContext verbosity (savedGlobalFlags config) $ \repoContext -> get verbosity repoContext globalFlags' getFlags targets unpackAction :: GetFlags -> [String] -> Action unpackAction getFlags extraArgs globalFlags = do getAction getFlags extraArgs globalFlags initAction :: InitFlags -> [String] -> Action initAction initFlags extraArgs globalFlags = do -- it takes the first value within extraArgs (if there's one) -- and uses it as the root directory for the new project case extraArgs of [] -> initAction' [projectDir] -> do createDirectoryIfMissingVerbose verbosity True projectDir withCurrentDirectory projectDir initAction' _ -> die' verbosity $ "'init' only takes a single, optional, extra " ++ "argument for the project root directory" where initAction' = do confFlags <- loadConfigOrSandboxConfig verbosity globalFlags -- override with `--with-compiler` from CLI if available let confFlags' = savedConfigureFlags confFlags `mappend` compFlags initFlags' = savedInitFlags confFlags `mappend` initFlags globalFlags' = savedGlobalFlags confFlags `mappend` globalFlags (comp, _, progdb) <- configCompilerAux' confFlags' withRepoContext verbosity globalFlags' $ \repoContext -> initCmd verbosity (configPackageDB' confFlags') repoContext comp progdb initFlags' verbosity = fromFlag (initVerbosity initFlags) compFlags = mempty { configHcPath = initHcPath initFlags } userConfigAction :: UserConfigFlags -> [String] -> Action userConfigAction ucflags extraArgs globalFlags = do let verbosity = fromFlag (userConfigVerbosity ucflags) frc = fromFlag (userConfigForce ucflags) extraLines = fromFlag (userConfigAppendLines ucflags) case extraArgs of ("init":_) -> do path <- configFile fileExists <- doesFileExist path if (not fileExists || (fileExists && frc)) then void $ createDefaultConfigFile verbosity extraLines path else die' verbosity $ path ++ " already exists." ("diff":_) -> traverse_ putStrLn =<< userConfigDiff verbosity globalFlags extraLines ("update":_) -> userConfigUpdate verbosity globalFlags extraLines -- Error handling. [] -> die' verbosity $ "Please specify a subcommand (see 'help user-config')" _ -> die' verbosity $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs where configFile = getConfigFilePath (globalConfigFile globalFlags) -- | Used as an entry point when cabal-install needs to invoke itself -- as a setup script. This can happen e.g. when doing parallel builds. -- actAsSetupAction :: ActAsSetupFlags -> [String] -> Action actAsSetupAction actAsSetupFlags args _globalFlags = let bt = fromFlag (actAsSetupBuildType actAsSetupFlags) in case bt of Simple -> Simple.defaultMainArgs args Configure -> Simple.defaultMainWithHooksArgs Simple.autoconfUserHooks args Make -> Make.defaultMainArgs args Custom -> error "actAsSetupAction Custom" manpageAction :: [CommandSpec action] -> ManpageFlags -> [String] -> Action manpageAction commands flags extraArgs _ = do let verbosity = fromFlag (manpageVerbosity flags) unless (null extraArgs) $ die' verbosity $ "'man' doesn't take any extra arguments: " ++ unwords extraArgs pname <- getProgName let cabalCmd = if takeExtension pname == ".exe" then dropExtension pname else pname manpageCmd cabalCmd commands flags cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/0000755000000000000000000000000007346545000021662 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/Anonymous.hs0000644000000000000000000001443607346545000024216 0ustar0000000000000000{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Reporting -- Copyright : (c) David Waern 2008 -- License : BSD-like -- -- Maintainer : david.waern@gmail.com -- Stability : experimental -- Portability : portable -- -- Anonymous build report data structure, printing and parsing -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Anonymous ( BuildReport(..), InstallOutcome(..), Outcome(..), -- * Constructing and writing reports newBuildReport, -- * parsing and pretty printing parseBuildReport, parseBuildReportList, showBuildReport, cabalInstallID -- showList, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.CabalSpecVersion import Distribution.Client.BuildReports.Types import Distribution.Client.Version (cabalInstallVersion) import Distribution.Compiler (CompilerId (..)) import Distribution.FieldGrammar import Distribution.Fields import Distribution.Package (PackageIdentifier (..), mkPackageName) import Distribution.PackageDescription (FlagAssignment) import Distribution.Parsec import Distribution.System (Arch, OS) import qualified Distribution.Client.BuildReports.Lens as L import qualified Distribution.Client.Types as BR (BuildFailure (..), BuildOutcome, BuildResult (..), DocsResult (..), TestsResult (..)) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 ------------------------------------------------------------------------------- -- New ------------------------------------------------------------------------------- newBuildReport :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment -> [PackageIdentifier] -> BR.BuildOutcome -> BuildReport newBuildReport os' arch' comp pkgid flags deps result = BuildReport { package = pkgid, os = os', arch = arch', compiler = comp, client = cabalInstallID, flagAssignment = flags, dependencies = deps, installOutcome = convertInstallOutcome, -- cabalVersion = undefined docsOutcome = convertDocsOutcome, testsOutcome = convertTestsOutcome } where convertInstallOutcome = case result of Left BR.PlanningFailed -> PlanningFailed Left (BR.DependentFailed p) -> DependencyFailed p Left (BR.DownloadFailed _) -> DownloadFailed Left (BR.UnpackFailed _) -> UnpackFailed Left (BR.ConfigureFailed _) -> ConfigureFailed Left (BR.BuildFailed _) -> BuildFailed Left (BR.TestsFailed _) -> TestsFailed Left (BR.InstallFailed _) -> InstallFailed Right (BR.BuildResult _ _ _) -> InstallOk convertDocsOutcome = case result of Left _ -> NotTried Right (BR.BuildResult BR.DocsNotTried _ _) -> NotTried Right (BR.BuildResult BR.DocsFailed _ _) -> Failed Right (BR.BuildResult BR.DocsOk _ _) -> Ok convertTestsOutcome = case result of Left (BR.TestsFailed _) -> Failed Left _ -> NotTried Right (BR.BuildResult _ BR.TestsNotTried _) -> NotTried Right (BR.BuildResult _ BR.TestsOk _) -> Ok cabalInstallID :: PackageIdentifier cabalInstallID = PackageIdentifier (mkPackageName "cabal-install") cabalInstallVersion ------------------------------------------------------------------------------- -- FieldGrammar ------------------------------------------------------------------------------- fieldDescrs :: ( Applicative (g BuildReport), FieldGrammar c g , c (Identity Arch) , c (Identity CompilerId) , c (Identity FlagAssignment) , c (Identity InstallOutcome) , c (Identity OS) , c (Identity Outcome) , c (Identity PackageIdentifier) , c (List VCat (Identity PackageIdentifier) PackageIdentifier) ) => g BuildReport BuildReport fieldDescrs = BuildReport <$> uniqueField "package" L.package <*> uniqueField "os" L.os <*> uniqueField "arch" L.arch <*> uniqueField "compiler" L.compiler <*> uniqueField "client" L.client <*> monoidalField "flags" L.flagAssignment <*> monoidalFieldAla "dependencies" (alaList VCat) L.dependencies <*> uniqueField "install-outcome" L.installOutcome <*> uniqueField "docs-outcome" L.docsOutcome <*> uniqueField "tests-outcome" L.testsOutcome -- ----------------------------------------------------------------------------- -- Parsing parseBuildReport :: BS.ByteString -> Either String BuildReport parseBuildReport s = case snd $ runParseResult $ parseFields s of Left (_, perrors) -> Left $ unlines [ err | PError _ err <- toList perrors ] Right report -> Right report parseFields :: BS.ByteString -> ParseResult BuildReport parseFields input = do fields <- either (parseFatalFailure zeroPos . show) pure $ readFields input case partitionFields fields of (fields', []) -> parseFieldGrammar CabalSpecV2_4 fields' fieldDescrs _otherwise -> parseFatalFailure zeroPos "found sections in BuildReport" parseBuildReportList :: BS.ByteString -> [BuildReport] parseBuildReportList str = [ report | Right report <- map parseBuildReport (split str) ] where split :: BS.ByteString -> [BS.ByteString] split = filter (not . BS.null) . unfoldr chunk . BS8.lines chunk [] = Nothing chunk ls = case break BS.null ls of (r, rs) -> Just (BS8.unlines r, dropWhile BS.null rs) -- ----------------------------------------------------------------------------- -- Pretty-printing showBuildReport :: BuildReport -> String showBuildReport = showFields (const NoComment) . prettyFieldGrammar CabalSpecV2_4 fieldDescrs cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/Lens.hs0000644000000000000000000000333707346545000023125 0ustar0000000000000000module Distribution.Client.BuildReports.Lens ( BuildReport, module Distribution.Client.BuildReports.Lens, ) where import Distribution.Client.Compat.Prelude import Distribution.Compat.Lens import Prelude () import Distribution.Client.BuildReports.Types (BuildReport, InstallOutcome, Outcome) import Distribution.Compiler (CompilerId) import Distribution.System (Arch, OS) import Distribution.Types.Flag (FlagAssignment) import Distribution.Types.PackageId (PackageIdentifier) import qualified Distribution.Client.BuildReports.Types as T package :: Lens' BuildReport PackageIdentifier package f s = fmap (\x -> s { T.package = x }) (f (T.package s)) os :: Lens' BuildReport OS os f s = fmap (\x -> s { T.os = x }) (f (T.os s)) arch :: Lens' BuildReport Arch arch f s = fmap (\x -> s { T.arch = x }) (f (T.arch s)) compiler :: Lens' BuildReport CompilerId compiler f s = fmap (\x -> s { T.compiler = x }) (f (T.compiler s)) client :: Lens' BuildReport PackageIdentifier client f s = fmap (\x -> s { T.client = x }) (f (T.client s)) flagAssignment :: Lens' BuildReport FlagAssignment flagAssignment f s = fmap (\x -> s { T.flagAssignment = x }) (f (T.flagAssignment s)) dependencies :: Lens' BuildReport [PackageIdentifier] dependencies f s = fmap (\x -> s { T.dependencies = x }) (f (T.dependencies s)) installOutcome :: Lens' BuildReport InstallOutcome installOutcome f s = fmap (\x -> s { T.installOutcome = x }) (f (T.installOutcome s)) docsOutcome :: Lens' BuildReport Outcome docsOutcome f s = fmap (\x -> s { T.docsOutcome = x }) (f (T.docsOutcome s)) testsOutcome :: Lens' BuildReport Outcome testsOutcome f s = fmap (\x -> s { T.testsOutcome = x }) (f (T.testsOutcome s)) cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/Storage.hs0000644000000000000000000001372207346545000023627 0ustar0000000000000000{-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Reporting -- Copyright : (c) David Waern 2008 -- License : BSD-like -- -- Maintainer : david.waern@gmail.com -- Stability : experimental -- Portability : portable -- -- Anonymous build report data structure, printing and parsing -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Storage ( -- * Storing and retrieving build reports storeAnonymous, storeLocal, -- retrieve, -- * 'InstallPlan' support fromInstallPlan, fromPlanningFailure, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.BuildReports.Anonymous (BuildReport, showBuildReport, newBuildReport) import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import Distribution.Client.Types import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan ( InstallPlan ) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.SourcePackage import Distribution.Package ( PackageId, packageId ) import Distribution.PackageDescription ( FlagAssignment ) import Distribution.Simple.InstallDirs ( PathTemplate, fromPathTemplate , initialPathTemplateEnv, substPathTemplate ) import Distribution.System ( Platform(Platform) ) import Distribution.Compiler ( CompilerId(..), CompilerInfo(..) ) import Distribution.Simple.Utils ( equating ) import Data.List.NonEmpty ( groupBy ) import qualified Data.List as L import System.FilePath ( (), takeDirectory ) import System.Directory ( createDirectoryIfMissing ) storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () storeAnonymous reports = sequence_ [ appendFile file (concatMap format reports') | (repo, reports') <- separate reports , let file = repoLocalDir repo "build-reports.log" ] --TODO: make this concurrency safe, either lock the report file or make sure -- the writes for each report are atomic (under 4k and flush at boundaries) where format r = '\n' : showBuildReport r ++ "\n" separate :: [(BuildReport, Maybe Repo)] -> [(Repo, [BuildReport])] separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) . map (concatMap toList) . L.groupBy (equating (repoName' . head)) . sortBy (comparing (repoName' . head)) . groupBy (equating repoName') . onlyRemote repoName' (_,_,rrepo) = remoteRepoName rrepo onlyRemote :: [(BuildReport, Maybe Repo)] -> [(BuildReport, Repo, RemoteRepo)] onlyRemote rs = [ (report, repo, remoteRepo) | (report, Just repo) <- rs , Just remoteRepo <- [maybeRepoRemote repo] ] storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] -> Platform -> IO () storeLocal cinfo templates reports platform = sequence_ [ do createDirectoryIfMissing True (takeDirectory file) appendFile file output --TODO: make this concurrency safe, either lock the report file or make -- sure the writes for each report are atomic | (file, reports') <- groupByFileName [ (reportFileName template report, report) | template <- templates , (report, _repo) <- reports ] , let output = concatMap format reports' ] where format r = '\n' : showBuildReport r ++ "\n" reportFileName template report = fromPathTemplate (substPathTemplate env template) where env = initialPathTemplateEnv (BuildReport.package report) -- TODO: In principle, we can support $pkgkey, but only -- if the configure step succeeds. So add a Maybe field -- to the build report, and either use that or make up -- a fake identifier if it's not available. (error "storeLocal: package key not available") cinfo platform groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) . L.groupBy (equating fst) . sortBy (comparing fst) -- ------------------------------------------------------------ -- * InstallPlan support -- ------------------------------------------------------------ fromInstallPlan :: Platform -> CompilerId -> InstallPlan -> BuildOutcomes -> [(BuildReport, Maybe Repo)] fromInstallPlan platform comp plan buildOutcomes = mapMaybe (\pkg -> fromPlanPackage platform comp pkg (InstallPlan.lookupBuildOutcome pkg buildOutcomes)) . InstallPlan.toList $ plan fromPlanPackage :: Platform -> CompilerId -> InstallPlan.PlanPackage -> Maybe BuildOutcome -> Maybe (BuildReport, Maybe Repo) fromPlanPackage (Platform arch os) comp (InstallPlan.Configured (ConfiguredPackage _ srcPkg flags _ deps)) (Just buildResult) = Just ( newBuildReport os arch comp (packageId srcPkg) flags (map packageId (CD.nonSetupDeps deps)) buildResult , extractRepo srcPkg) where extractRepo (SourcePackage { srcpkgSource = RepoTarballPackage repo _ _ }) = Just repo extractRepo _ = Nothing fromPlanPackage _ _ _ _ = Nothing fromPlanningFailure :: Platform -> CompilerId -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] fromPlanningFailure (Platform arch os) comp pkgids flags = [ (newBuildReport os arch comp pkgid flags [] (Left PlanningFailed), Nothing) | pkgid <- pkgids ] cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/Types.hs0000644000000000000000000001253407346545000023327 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.BuildReports.Types -- Copyright : (c) Duncan Coutts 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Types related to build reporting -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Types ( ReportLevel(..), BuildReport (..), InstallOutcome(..), Outcome(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp import Distribution.Compiler (CompilerId (..)) import Distribution.PackageDescription (FlagAssignment) import Distribution.System (Arch, OS) import Distribution.Types.PackageId (PackageIdentifier) ------------------------------------------------------------------------------- -- ReportLevel ------------------------------------------------------------------------------- data ReportLevel = NoReports | AnonymousReports | DetailedReports deriving (Eq, Ord, Enum, Bounded, Show, Generic) instance Binary ReportLevel instance Structured ReportLevel instance Pretty ReportLevel where pretty NoReports = Disp.text "none" pretty AnonymousReports = Disp.text "anonymous" pretty DetailedReports = Disp.text "detailed" instance Parsec ReportLevel where parsec = do name <- P.munch1 isAlpha case lowercase name of "none" -> return NoReports "anonymous" -> return AnonymousReports "detailed" -> return DetailedReports _ -> P.unexpected $ "ReportLevel: " ++ name lowercase :: String -> String lowercase = map toLower ------------------------------------------------------------------------------- -- BuildReport ------------------------------------------------------------------------------- data BuildReport = BuildReport { -- | The package this build report is about package :: PackageIdentifier, -- | The OS and Arch the package was built on os :: OS, arch :: Arch, -- | The Haskell compiler (and hopefully version) used compiler :: CompilerId, -- | The uploading client, ie cabal-install-x.y.z client :: PackageIdentifier, -- | Which configurations flags we used flagAssignment :: FlagAssignment, -- | Which dependent packages we were using exactly dependencies :: [PackageIdentifier], -- | Did installing work ok? installOutcome :: InstallOutcome, -- Which version of the Cabal library was used to compile the Setup.hs -- cabalVersion :: Version, -- Which build tools we were using (with versions) -- tools :: [PackageIdentifier], -- | Configure outcome, did configure work ok? docsOutcome :: Outcome, -- | Configure outcome, did configure work ok? testsOutcome :: Outcome } deriving (Eq, Show, Generic) ------------------------------------------------------------------------------- -- InstallOutcome ------------------------------------------------------------------------------- data InstallOutcome = PlanningFailed | DependencyFailed PackageIdentifier | DownloadFailed | UnpackFailed | SetupFailed | ConfigureFailed | BuildFailed | TestsFailed | InstallFailed | InstallOk deriving (Eq, Show, Generic) instance Pretty InstallOutcome where pretty PlanningFailed = Disp.text "PlanningFailed" pretty (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> pretty pkgid pretty DownloadFailed = Disp.text "DownloadFailed" pretty UnpackFailed = Disp.text "UnpackFailed" pretty SetupFailed = Disp.text "SetupFailed" pretty ConfigureFailed = Disp.text "ConfigureFailed" pretty BuildFailed = Disp.text "BuildFailed" pretty TestsFailed = Disp.text "TestsFailed" pretty InstallFailed = Disp.text "InstallFailed" pretty InstallOk = Disp.text "InstallOk" instance Parsec InstallOutcome where parsec = do name <- P.munch1 isAlpha case name of "PlanningFailed" -> return PlanningFailed "DependencyFailed" -> DependencyFailed <$ P.spaces <*> parsec "DownloadFailed" -> return DownloadFailed "UnpackFailed" -> return UnpackFailed "SetupFailed" -> return SetupFailed "ConfigureFailed" -> return ConfigureFailed "BuildFailed" -> return BuildFailed "TestsFailed" -> return TestsFailed "InstallFailed" -> return InstallFailed "InstallOk" -> return InstallOk _ -> P.unexpected $ "InstallOutcome: " ++ name ------------------------------------------------------------------------------- -- Outcome ------------------------------------------------------------------------------- data Outcome = NotTried | Failed | Ok deriving (Eq, Show, Enum, Bounded, Generic) instance Pretty Outcome where pretty NotTried = Disp.text "NotTried" pretty Failed = Disp.text "Failed" pretty Ok = Disp.text "Ok" instance Parsec Outcome where parsec = do name <- P.munch1 isAlpha case name of "NotTried" -> return NotTried "Failed" -> return Failed "Ok" -> return Ok _ -> P.unexpected $ "Outcome: " ++ name cabal-install-3.8.1.0/src/Distribution/Client/BuildReports/Upload.hs0000644000000000000000000000663307346545000023452 0ustar0000000000000000{-# LANGUAGE CPP, PatternGuards #-} -- This is a quick hack for uploading build reports to Hackage. module Distribution.Client.BuildReports.Upload ( BuildLog , BuildReportId , uploadReports ) where import Distribution.Client.Compat.Prelude import Prelude () {- import Network.Browser ( BrowserAction, request, setAllowRedirects ) import Network.HTTP ( Header(..), HeaderName(..) , Request(..), RequestMethod(..), Response(..) ) import Network.TCP (HandleStream) -} import Network.URI (URI, uriPath) --parseRelativeReference, relativeTo) import System.FilePath.Posix ( () ) import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import Distribution.Client.BuildReports.Anonymous (BuildReport, showBuildReport) import Distribution.Simple.Utils (die') import Distribution.Client.HttpUtils import Distribution.Client.Setup ( RepoContext(..) ) type BuildReportId = URI type BuildLog = String uploadReports :: Verbosity -> RepoContext -> (String, String) -> URI -> [(BuildReport, Maybe BuildLog)] -> IO () uploadReports verbosity repoCtxt auth uri reports = do for_ reports $ \(report, mbBuildLog) -> do buildId <- postBuildReport verbosity repoCtxt auth uri report case mbBuildLog of Just buildLog -> putBuildLog verbosity repoCtxt auth buildId buildLog Nothing -> return () postBuildReport :: Verbosity -> RepoContext -> (String, String) -> URI -> BuildReport -> IO BuildReportId postBuildReport verbosity repoCtxt auth uri buildReport = do let fullURI = uri { uriPath = "/package" prettyShow (BuildReport.package buildReport) "reports" } transport <- repoContextGetTransport repoCtxt res <- postHttp transport verbosity fullURI (showBuildReport buildReport) (Just auth) case res of (303, redir) -> return $ undefined redir --TODO parse redir _ -> die' verbosity "unrecognized response" -- give response {- setAllowRedirects False (_, response) <- request Request { rqURI = uri { uriPath = "/package" prettyShow (BuildReport.package buildReport) "reports" }, rqMethod = POST, rqHeaders = [Header HdrContentType ("text/plain"), Header HdrContentLength (show (length body)), Header HdrAccept ("text/plain")], rqBody = body } case rspCode response of (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location #if defined(VERSION_network_uri) return $ relativeTo rel uri #elif defined(VERSION_network) #if MIN_VERSION_network(2,4,0) return $ relativeTo rel uri #else relativeTo rel uri #endif #endif | Header HdrLocation location <- rspHeaders response ] -> return $ buildId _ -> error "Unrecognised response from server." where body = BuildReport.show buildReport -} -- TODO force this to be a PUT? putBuildLog :: Verbosity -> RepoContext -> (String, String) -> BuildReportId -> BuildLog -> IO () putBuildLog verbosity repoCtxt auth reportId buildLog = do let fullURI = reportId {uriPath = uriPath reportId "log"} transport <- repoContextGetTransport repoCtxt res <- postHttp transport verbosity fullURI buildLog (Just auth) case res of (200, _) -> return () _ -> die' verbosity "unrecognized response" -- give response cabal-install-3.8.1.0/src/Distribution/Client/0000755000000000000000000000000007346545000017244 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Check.hs0000644000000000000000000001160707346545000020622 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Check -- Copyright : (c) Lennart Kolmodin 2008 -- License : BSD-like -- -- Maintainer : kolmodin@haskell.org -- Stability : provisional -- Portability : portable -- -- Check a package for common mistakes -- ----------------------------------------------------------------------------- module Distribution.Client.Check ( check ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Utils.Parsec (renderParseError) import Distribution.PackageDescription (GenericPackageDescription) import Distribution.PackageDescription.Check import Distribution.PackageDescription.Configuration (flattenPackageDescription) import Distribution.PackageDescription.Parsec (parseGenericPackageDescription, runParseResult) import Distribution.Parsec (PWarning (..), showPError, showPWarning) import Distribution.Simple.Utils (defaultPackageDesc, die', notice, warn) import System.IO (hPutStr, stderr) import qualified Data.ByteString as BS import qualified System.Directory as Dir readGenericPackageDescriptionCheck :: Verbosity -> FilePath -> IO ([PWarning], GenericPackageDescription) readGenericPackageDescriptionCheck verbosity fpath = do exists <- Dir.doesFileExist fpath unless exists $ die' verbosity $ "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue." bs <- BS.readFile fpath let (warnings, result) = runParseResult (parseGenericPackageDescription bs) case result of Left (_, errors) -> do traverse_ (warn verbosity . showPError fpath) errors hPutStr stderr $ renderParseError fpath bs errors warnings die' verbosity "parse error" Right x -> return (warnings, x) -- | Note: must be called with the CWD set to the directory containing -- the '.cabal' file. check :: Verbosity -> IO Bool check verbosity = do pdfile <- defaultPackageDesc verbosity (ws, ppd) <- readGenericPackageDescriptionCheck verbosity pdfile -- convert parse warnings into PackageChecks -- Note: we /could/ pick different levels, based on warning type. let ws' = [ PackageDistSuspicious (showPWarning pdfile w) | w <- ws ] -- flatten the generic package description into a regular package -- description -- TODO: this may give more warnings than it should give; -- consider two branches of a condition, one saying -- ghc-options: -Wall -- and the other -- ghc-options: -Werror -- joined into -- ghc-options: -Wall -Werror -- checkPackages will yield a warning on the last line, but it -- would not on each individual branch. -- However, this is the same way hackage does it, so we will yield -- the exact same errors as it will. let pkg_desc = flattenPackageDescription ppd ioChecks <- checkPackageFiles verbosity pkg_desc "." let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) ++ ws' buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] ++ [ x | x@PackageDistSuspiciousWarn {} <- packageChecks ] distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] unless (null buildImpossible) $ do warn verbosity "The package will not build sanely due to these errors:" printCheckMessages buildImpossible unless (null buildWarning) $ do warn verbosity "The following warnings are likely to affect your build negatively:" printCheckMessages buildWarning unless (null distSuspicious) $ do warn verbosity "These warnings may cause trouble when distributing the package:" printCheckMessages distSuspicious unless (null distInexusable) $ do warn verbosity "The following errors will cause portability problems on other environments:" printCheckMessages distInexusable let isDistError (PackageDistSuspicious {}) = False isDistError (PackageDistSuspiciousWarn {}) = False isDistError _ = True isCheckError (PackageDistSuspiciousWarn {}) = False isCheckError _ = True errors = filter isDistError packageChecks unless (null errors) $ warn verbosity "Hackage would reject this package." when (null packageChecks) $ notice verbosity "No errors or warnings could be found in the package." return (not . any isCheckError $ packageChecks) where printCheckMessages = traverse_ (warn verbosity . explanation) cabal-install-3.8.1.0/src/Distribution/Client/CmdBench.hs0000644000000000000000000002362507346545000021253 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | cabal-install CLI command: bench -- module Distribution.Client.CmdBench ( -- * The @bench@ CLI and action benchCommand, benchAction, -- * Internals exposed for testing componentNotBenchmarkProblem, isSubComponentProblem, noBenchmarksProblem, selectPackageTargets, selectComponentTarget ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.ProjectOrchestration import Distribution.Client.CmdErrorMessages ( renderTargetSelector, showTargetSelector, renderTargetProblem, renderTargetProblemNoTargets, plural, targetSelectorPluralPkgs, targetSelectorFilter ) import Distribution.Client.TargetProblem ( TargetProblem (..) ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..) ) import Distribution.Simple.Flag ( fromFlagOrDefault ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import Distribution.Verbosity ( normal ) import Distribution.Simple.Utils ( wrapText, die' ) benchCommand :: CommandUI (NixStyleFlags ()) benchCommand = CommandUI { commandName = "v2-bench", commandSynopsis = "Run benchmarks.", commandUsage = usageAlternatives "v2-bench" [ "[TARGETS] [FLAGS]" ], commandDescription = Just $ \_ -> wrapText $ "Runs the specified benchmarks, first ensuring they are up to " ++ "date.\n\n" ++ "Any benchmark in any package in the project can be specified. " ++ "A package can be specified in which case all the benchmarks in the " ++ "package are run. The default is to run all the benchmarks in the " ++ "package in the current directory.\n\n" ++ "Dependencies are built or rebuilt as necessary. Additional " ++ "configuration flags can be specified on the command line and these " ++ "extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files.", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-bench\n" ++ " Run all the benchmarks in the package in the current directory\n" ++ " " ++ pname ++ " v2-bench pkgname\n" ++ " Run all the benchmarks in the package named pkgname\n" ++ " " ++ pname ++ " v2-bench cname\n" ++ " Run the benchmark named cname\n" ++ " " ++ pname ++ " v2-bench cname -O2\n" ++ " Run the benchmark built with '-O2' (including local libs used)\n" , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = nixStyleOptions (const []) } -- | The @build@ command does a lot. It brings the install plan up to date, -- selects that part of the plan needed by the given or implicit targets and -- then executes the plan. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- benchAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () benchAction flags@NixStyleFlags {..} targetStrings globalFlags = do baseCtx <- establishProjectBaseContext verbosity cliConfig OtherCommand targetSelectors <- either (reportTargetSelectorProblems verbosity) return =<< readTargetSelectors (localPackages baseCtx) (Just BenchKind) targetStrings buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do when (buildSettingOnlyDeps (buildSettings baseCtx)) $ die' verbosity $ "The bench command does not support '--only-dependencies'. " ++ "You may wish to use 'build --only-dependencies' and then " ++ "use 'bench'." -- Interpret the targets on the command line as bench targets -- (as opposed to say build or haddock targets). targets <- either (reportTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors let elaboratedPlan' = pruneInstallPlanToTargets TargetActionBench targets elaboratedPlan return (elaboratedPlan', targets) printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here -- | This defines what a 'TargetSelector' means for the @bench@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @bench@ command we select all buildable benchmarks, -- or fail if there are no benchmarks or no buildable benchmarks. -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either BenchTargetProblem [k] selectPackageTargets targetSelector targets -- If there are any buildable benchmark targets then we select those | not (null targetsBenchBuildable) = Right targetsBenchBuildable -- If there are benchmarks but none are buildable then we report those | not (null targetsBench) = Left (TargetProblemNoneEnabled targetSelector targetsBench) -- If there are no benchmarks but some other targets then we report that | not (null targets) = Left (noBenchmarksProblem targetSelector) -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targetsBenchBuildable = selectBuildableTargets . filterTargetsKind BenchKind $ targets targetsBench = forgetTargetsDetail . filterTargetsKind BenchKind $ targets -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @bench@ command we just need to check it is a benchmark, in addition -- to the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either BenchTargetProblem k selectComponentTarget subtarget@WholeComponent t | CBenchName _ <- availableTargetComponentName t = selectComponentTargetBasic subtarget t | otherwise = Left (componentNotBenchmarkProblem (availableTargetPackageId t) (availableTargetComponentName t)) selectComponentTarget subtarget t = Left (isSubComponentProblem (availableTargetPackageId t) (availableTargetComponentName t) subtarget) -- | The various error conditions that can occur when matching a -- 'TargetSelector' against 'AvailableTarget's for the @bench@ command. -- data BenchProblem = -- | The 'TargetSelector' matches targets but no benchmarks TargetProblemNoBenchmarks TargetSelector -- | The 'TargetSelector' refers to a component that is not a benchmark | TargetProblemComponentNotBenchmark PackageId ComponentName -- | Asking to benchmark an individual file or module is not supported | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget deriving (Eq, Show) type BenchTargetProblem = TargetProblem BenchProblem noBenchmarksProblem :: TargetSelector -> TargetProblem BenchProblem noBenchmarksProblem = CustomTargetProblem . TargetProblemNoBenchmarks componentNotBenchmarkProblem :: PackageId -> ComponentName -> TargetProblem BenchProblem componentNotBenchmarkProblem pkgid name = CustomTargetProblem $ TargetProblemComponentNotBenchmark pkgid name isSubComponentProblem :: PackageId -> ComponentName -> SubComponentTarget -> TargetProblem BenchProblem isSubComponentProblem pkgid name subcomponent = CustomTargetProblem $ TargetProblemIsSubComponent pkgid name subcomponent reportTargetProblems :: Verbosity -> [BenchTargetProblem] -> IO a reportTargetProblems verbosity = die' verbosity . unlines . map renderBenchTargetProblem renderBenchTargetProblem :: BenchTargetProblem -> String renderBenchTargetProblem (TargetProblemNoTargets targetSelector) = case targetSelectorFilter targetSelector of Just kind | kind /= BenchKind -> "The bench command is for running benchmarks, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." _ -> renderTargetProblemNoTargets "benchmark" targetSelector renderBenchTargetProblem problem = renderTargetProblem "benchmark" renderBenchProblem problem renderBenchProblem :: BenchProblem -> String renderBenchProblem (TargetProblemNoBenchmarks targetSelector) = "Cannot run benchmarks for the target '" ++ showTargetSelector targetSelector ++ "' which refers to " ++ renderTargetSelector targetSelector ++ " because " ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" ++ " not contain any benchmarks." renderBenchProblem (TargetProblemComponentNotBenchmark pkgid cname) = "The bench command is for running benchmarks, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " from the package " ++ prettyShow pkgid ++ "." where targetSelector = TargetComponent pkgid cname WholeComponent renderBenchProblem (TargetProblemIsSubComponent pkgid cname subtarget) = "The bench command can only run benchmarks as a whole, " ++ "not files or modules within them, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." where targetSelector = TargetComponent pkgid cname subtarget cabal-install-3.8.1.0/src/Distribution/Client/CmdBuild.hs0000644000000000000000000002007607346545000021270 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | cabal-install CLI command: build -- module Distribution.Client.CmdBuild ( -- * The @build@ CLI and action buildCommand, buildAction, -- * Internals exposed for testing selectPackageTargets, selectComponentTarget ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.ProjectFlags ( removeIgnoreProjectOption ) import Distribution.Client.ProjectOrchestration import Distribution.Client.TargetProblem ( TargetProblem (..), TargetProblem' ) import Distribution.Client.CmdErrorMessages import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..), yesNoOpt ) import Distribution.Simple.Flag ( Flag(..), toFlag, fromFlag, fromFlagOrDefault ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives, option ) import Distribution.Verbosity ( normal ) import Distribution.Simple.Utils ( wrapText, die' ) import Distribution.Client.ScriptUtils ( AcceptNoTargets(..), withContextAndSelectors, updateContextAndWriteProjectFile, TargetContext(..) ) import qualified Data.Map as Map buildCommand :: CommandUI (NixStyleFlags BuildFlags) buildCommand = CommandUI { commandName = "v2-build", commandSynopsis = "Compile targets within the project.", commandUsage = usageAlternatives "v2-build" [ "[TARGETS] [FLAGS]" ], commandDescription = Just $ \_ -> wrapText $ "Build one or more targets from within the project. The available " ++ "targets are the packages in the project as well as individual " ++ "components within those packages, including libraries, executables, " ++ "test-suites or benchmarks. Targets can be specified by name or " ++ "location. If no target is specified then the default is to build " ++ "the package in the current directory.\n\n" ++ "Dependencies are built or rebuilt as necessary. Additional " ++ "configuration flags can be specified on the command line and these " ++ "extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files.", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-build\n" ++ " Build the package in the current directory " ++ "or all packages in the project\n" ++ " " ++ pname ++ " v2-build pkgname\n" ++ " Build the package named pkgname in the project\n" ++ " " ++ pname ++ " v2-build ./pkgfoo\n" ++ " Build the package in the ./pkgfoo directory\n" ++ " " ++ pname ++ " v2-build cname\n" ++ " Build the component named cname in the project\n" ++ " " ++ pname ++ " v2-build cname --enable-profiling\n" ++ " Build the component in profiling mode " ++ "(including dependencies as needed)\n" , commandDefaultFlags = defaultNixStyleFlags defaultBuildFlags , commandOptions = removeIgnoreProjectOption . nixStyleOptions (\showOrParseArgs -> [ option [] ["only-configure"] "Instead of performing a full build just run the configure step" buildOnlyConfigure (\v flags -> flags { buildOnlyConfigure = v }) (yesNoOpt showOrParseArgs) ]) } data BuildFlags = BuildFlags { buildOnlyConfigure :: Flag Bool } defaultBuildFlags :: BuildFlags defaultBuildFlags = BuildFlags { buildOnlyConfigure = toFlag False } -- | The @build@ command does a lot. It brings the install plan up to date, -- selects that part of the plan needed by the given or implicit targets and -- then executes the plan. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- buildAction :: NixStyleFlags BuildFlags -> [String] -> GlobalFlags -> IO () buildAction flags@NixStyleFlags { extraFlags = buildFlags, ..} targetStrings globalFlags = withContextAndSelectors RejectNoTargets Nothing flags targetStrings globalFlags $ \targetCtx ctx targetSelectors -> do -- TODO: This flags defaults business is ugly let onlyConfigure = fromFlag (buildOnlyConfigure defaultBuildFlags <> buildOnlyConfigure buildFlags) targetAction | onlyConfigure = TargetActionConfigure | otherwise = TargetActionBuild baseCtx <- case targetCtx of ProjectContext -> return ctx GlobalContext -> return ctx ScriptContext path exemeta -> updateContextAndWriteProjectFile ctx path exemeta buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do -- Interpret the targets on the command line as build targets -- (as opposed to say repl or haddock targets). targets <- either (reportBuildTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors let elaboratedPlan' = pruneInstallPlanToTargets targetAction targets elaboratedPlan elaboratedPlan'' <- if buildSettingOnlyDeps (buildSettings baseCtx) then either (reportCannotPruneDependencies verbosity) return $ pruneInstallPlanToDependencies (Map.keysSet targets) elaboratedPlan' else return elaboratedPlan' return (elaboratedPlan'', targets) printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) -- | This defines what a 'TargetSelector' means for the @bench@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @build@ command select all components except non-buildable -- and disabled tests\/benchmarks, fail if there are no such -- components -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either TargetProblem' [k] selectPackageTargets targetSelector targets -- If there are any buildable targets then we select those | not (null targetsBuildable) = Right targetsBuildable -- If there are targets but none are buildable then we report those | not (null targets) = Left (TargetProblemNoneEnabled targetSelector targets') -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targets' = forgetTargetsDetail targets targetsBuildable = selectBuildableTargetsWith (buildable targetSelector) targets -- When there's a target filter like "pkg:tests" then we do select tests, -- but if it's just a target like "pkg" then we don't build tests unless -- they are requested by default (i.e. by using --enable-tests) buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False buildable _ _ = True -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @build@ command we just need the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either TargetProblem' k selectComponentTarget = selectComponentTargetBasic reportBuildTargetProblems :: Verbosity -> [TargetProblem'] -> IO a reportBuildTargetProblems verbosity problems = reportTargetProblems verbosity "build" problems reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a reportCannotPruneDependencies verbosity = die' verbosity . renderCannotPruneDependencies cabal-install-3.8.1.0/src/Distribution/Client/CmdClean.hs0000644000000000000000000001305207346545000021247 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} module Distribution.Client.CmdClean (cleanCommand, cleanAction) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.DistDirLayout ( DistDirLayout(..), defaultDistDirLayout ) import Distribution.Client.ProjectConfig ( findProjectRoot ) import Distribution.Client.ScriptUtils ( getScriptCacheDirectoryRoot ) import Distribution.Client.Setup ( GlobalFlags ) import Distribution.ReadE ( succeedReadE ) import Distribution.Simple.Setup ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe , optionDistPref, optionVerbosity, falseArg ) import Distribution.Simple.Command ( CommandUI(..), option, reqArg ) import Distribution.Simple.Utils ( info, die', wrapText, handleDoesNotExist ) import Distribution.Verbosity ( normal ) import Control.Monad ( forM, forM_, mapM ) import qualified Data.Set as Set import System.Directory ( removeDirectoryRecursive, removeFile , doesDirectoryExist, doesFileExist , getDirectoryContents, listDirectory , canonicalizePath ) import System.FilePath ( () ) data CleanFlags = CleanFlags { cleanSaveConfig :: Flag Bool , cleanVerbosity :: Flag Verbosity , cleanDistDir :: Flag FilePath , cleanProjectFile :: Flag FilePath } deriving (Eq) defaultCleanFlags :: CleanFlags defaultCleanFlags = CleanFlags { cleanSaveConfig = toFlag False , cleanVerbosity = toFlag normal , cleanDistDir = NoFlag , cleanProjectFile = mempty } cleanCommand :: CommandUI CleanFlags cleanCommand = CommandUI { commandName = "v2-clean" , commandSynopsis = "Clean the package store and remove temporary files." , commandUsage = \pname -> "Usage: " ++ pname ++ " new-clean [FLAGS]\n" , commandDescription = Just $ \_ -> wrapText $ "Removes all temporary files created during the building process " ++ "(.hi, .o, preprocessed sources, etc.) and also empties out the " ++ "local caches (by default).\n\n" , commandNotes = Nothing , commandDefaultFlags = defaultCleanFlags , commandOptions = \showOrParseArgs -> [ optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) , optionDistPref cleanDistDir (\dd flags -> flags { cleanDistDir = dd }) showOrParseArgs , option [] ["project-file"] ("Set the name of the cabal.project file" ++ " to search for in parent directories") cleanProjectFile (\pf flags -> flags {cleanProjectFile = pf}) (reqArg "FILE" (succeedReadE Flag) flagToList) , option ['s'] ["save-config"] "Save configuration, only remove build artifacts" cleanSaveConfig (\sc flags -> flags { cleanSaveConfig = sc }) falseArg ] } cleanAction :: CleanFlags -> [String] -> GlobalFlags -> IO () cleanAction CleanFlags{..} extraArgs _ = do let verbosity = fromFlagOrDefault normal cleanVerbosity saveConfig = fromFlagOrDefault False cleanSaveConfig mdistDirectory = flagToMaybe cleanDistDir mprojectFile = flagToMaybe cleanProjectFile -- TODO interpret extraArgs as targets and clean those targets only (issue #7506) -- -- For now assume all files passed are the names of scripts notScripts <- filterM (fmap not . doesFileExist) extraArgs unless (null notScripts) $ die' verbosity $ "'clean' extra arguments should be script files: " ++ unwords notScripts projectRoot <- either throwIO return =<< findProjectRoot Nothing mprojectFile let distLayout = defaultDistDirLayout projectRoot mdistDirectory -- Do not clean a project if just running a script in it's directory when (null extraArgs || isJust mdistDirectory) $ do if saveConfig then do let buildRoot = distBuildRootDirectory distLayout buildRootExists <- doesDirectoryExist buildRoot when buildRootExists $ do info verbosity ("Deleting build root (" ++ buildRoot ++ ")") handleDoesNotExist () $ removeDirectoryRecursive buildRoot else do let distRoot = distDirectory distLayout info verbosity ("Deleting dist-newstyle (" ++ distRoot ++ ")") handleDoesNotExist () $ removeDirectoryRecursive distRoot removeEnvFiles (distProjectRootDirectory distLayout) -- Clean specified script build caches and orphaned caches. -- There is currently no good way to specify to only clean orphaned caches. -- It would be better as part of an explicit gc step (see issue #3333) toClean <- Set.fromList <$> mapM canonicalizePath extraArgs cacheDir <- getScriptCacheDirectoryRoot existsCD <- doesDirectoryExist cacheDir caches <- if existsCD then listDirectory cacheDir else return [] paths <- fmap concat . forM caches $ \cache -> do let locFile = cacheDir cache "scriptlocation" exists <- doesFileExist locFile if exists then pure . (,) (cacheDir cache) <$> readFile locFile else return [] forM_ paths $ \(cache, script) -> do exists <- doesFileExist script when (not exists || script `Set.member` toClean) $ do info verbosity ("Deleting cache (" ++ cache ++ ") for script (" ++ script ++ ")") removeDirectoryRecursive cache removeEnvFiles :: FilePath -> IO () removeEnvFiles dir = (traverse_ (removeFile . (dir )) . filter ((".ghc.environment" ==) . take 16)) =<< getDirectoryContents dir cabal-install-3.8.1.0/src/Distribution/Client/CmdConfigure.hs0000644000000000000000000001527307346545000022155 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | cabal-install CLI command: configure -- module Distribution.Client.CmdConfigure ( configureCommand, configureAction, configureAction', ) where import Distribution.Client.Compat.Prelude import Prelude () import System.Directory import System.FilePath import Distribution.Simple.Flag import Distribution.Client.ProjectOrchestration import Distribution.Client.ProjectConfig ( writeProjectLocalExtraConfig, readProjectLocalExtraConfig ) import Distribution.Client.ProjectFlags ( removeIgnoreProjectOption ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..), ConfigExFlags(..) ) import Distribution.Verbosity ( normal ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import Distribution.Simple.Utils ( wrapText, notice, die' ) import Distribution.Client.DistDirLayout ( DistDirLayout(..) ) import Distribution.Client.RebuildMonad (runRebuild) import Distribution.Client.ProjectConfig.Types import Distribution.Client.HttpUtils import Distribution.Utils.NubList ( fromNubList ) import Distribution.Types.CondTree ( CondTree (..) ) configureCommand :: CommandUI (NixStyleFlags ()) configureCommand = CommandUI { commandName = "v2-configure", commandSynopsis = "Add extra project configuration.", commandUsage = usageAlternatives "v2-configure" [ "[FLAGS]" ], commandDescription = Just $ \_ -> wrapText $ "Adjust how the project is built by setting additional package flags " ++ "and other flags.\n\n" ++ "The configuration options are written to the 'cabal.project.local' " ++ "file (or '$project_file.local', if '--project-file' is specified) " ++ "which extends the configuration from the 'cabal.project' file " ++ "(if any). This combination is used as the project configuration for " ++ "all other commands (such as 'v2-build', 'v2-repl' etc) though it " ++ "can be extended/overridden on a per-command basis.\n\n" ++ "The v2-configure command also checks that the project configuration " ++ "will work. In particular it checks that there is a consistent set of " ++ "dependencies for the project as a whole.\n\n" ++ "The 'cabal.project.local' file persists across 'v2-clean' but is " ++ "overwritten on the next use of the 'v2-configure' command. The " ++ "intention is that the 'cabal.project' file should be kept in source " ++ "control but the 'cabal.project.local' should not.\n\n" ++ "It is never necessary to use the 'v2-configure' command. It is " ++ "merely a convenience in cases where you do not want to specify flags " ++ "to 'v2-build' (and other commands) every time and yet do not want " ++ "to alter the 'cabal.project' persistently.", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-configure --with-compiler ghc-7.10.3\n" ++ " Adjust the project configuration to use the given compiler\n" ++ " program and check the resulting configuration works.\n" ++ " " ++ pname ++ " v2-configure\n" ++ " Reset the local configuration to empty. To check that the\n" ++ " project configuration works, use 'cabal build'.\n" , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = removeIgnoreProjectOption . nixStyleOptions (const []) } -- | To a first approximation, the @configure@ just runs the first phase of -- the @build@ command where we bring the install plan up to date (thus -- checking that it's possible). -- -- The only difference is that @configure@ also allows the user to specify -- some extra config flags which we save in the file @cabal.project.local@. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- configureAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () configureAction flags@NixStyleFlags {..} extraArgs globalFlags = do (baseCtx, projConfig) <- configureAction' flags extraArgs globalFlags if shouldNotWriteFile baseCtx then notice v "Config file not written due to flag(s)." else writeProjectLocalExtraConfig (distDirLayout baseCtx) projConfig where v = fromFlagOrDefault normal (configVerbosity configFlags) configureAction' :: NixStyleFlags () -> [String] -> GlobalFlags -> IO (ProjectBaseContext, ProjectConfig) configureAction' flags@NixStyleFlags {..} _extraArgs globalFlags = do --TODO: deal with _extraArgs, since flags with wrong syntax end up there baseCtx <- establishProjectBaseContext v cliConfig OtherCommand let localFile = distProjectFile (distDirLayout baseCtx) "local" -- If cabal.project.local already exists, and the flags allow, back up to cabal.project.local~ let backups = fromFlagOrDefault True $ configBackup configExFlags appends = fromFlagOrDefault False $ configAppend configExFlags backupFile = localFile <> "~" if shouldNotWriteFile baseCtx then return (baseCtx, cliConfig) else do exists <- doesFileExist localFile when (exists && backups) $ do notice v $ quote (takeFileName localFile) <> " already exists, backing it up to " <> quote (takeFileName backupFile) <> "." copyFile localFile backupFile -- If the flag @configAppend@ is set to true, append and do not overwrite if exists && appends then do httpTransport <- configureTransport v (fromNubList . projectConfigProgPathExtra $ projectConfigShared cliConfig) (flagToMaybe . projectConfigHttpTransport $ projectConfigBuildOnly cliConfig) (CondNode conf imps bs) <- runRebuild (distProjectRootDirectory . distDirLayout $ baseCtx) $ readProjectLocalExtraConfig v httpTransport (distDirLayout baseCtx) when (not (null imps && null bs)) $ die' v "local project file has conditional and/or import logic, unable to perform and automatic in-place update" return (baseCtx, conf <> cliConfig) else return (baseCtx, cliConfig) where v = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here quote s = "'" <> s <> "'" -- Config file should not be written when certain flags are present shouldNotWriteFile :: ProjectBaseContext -> Bool shouldNotWriteFile baseCtx = buildSettingDryRun (buildSettings baseCtx) || buildSettingOnlyDownload (buildSettings baseCtx) cabal-install-3.8.1.0/src/Distribution/Client/CmdErrorMessages.hs0000644000000000000000000004713307346545000023015 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE RecordWildCards #-} -- | Utilities to help format error messages for the various CLI commands. -- module Distribution.Client.CmdErrorMessages ( module Distribution.Client.CmdErrorMessages, module Distribution.Client.TargetSelector, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.ProjectPlanning ( AvailableTarget(..), AvailableTargetStatus(..), CannotPruneDependencies(..), TargetRequested(..) ) import Distribution.Client.TargetSelector ( SubComponentTarget(..) ) import Distribution.Client.TargetProblem ( TargetProblem(..), TargetProblem' ) import Distribution.Client.TargetSelector ( ComponentKind(..), ComponentKindFilter, TargetSelector(..), componentKind, showTargetSelector ) import Distribution.Package ( PackageId, packageId, PackageName, packageName ) import Distribution.Simple.Utils ( die' ) import Distribution.Types.ComponentName ( ComponentName(..), showComponentName ) import Distribution.Types.LibraryName ( LibraryName(..) ) import Distribution.Solver.Types.OptionalStanza ( OptionalStanza(..) ) import qualified Data.List.NonEmpty as NE ----------------------- -- Singular or plural -- -- | A tag used in rendering messages to distinguish singular or plural. -- data Plural = Singular | Plural -- | Used to render a singular or plural version of something -- -- > plural (listPlural theThings) "it is" "they are" -- plural :: Plural -> a -> a -> a plural Singular si _pl = si plural Plural _si pl = pl -- | Singular for singleton lists and plural otherwise. -- listPlural :: [a] -> Plural listPlural [_] = Singular listPlural _ = Plural -------------------- -- Rendering lists -- -- | Render a list of things in the style @foo, bar and baz@ renderListCommaAnd :: [String] -> String renderListCommaAnd [] = "" renderListCommaAnd [x] = x renderListCommaAnd [x,x'] = x ++ " and " ++ x' renderListCommaAnd (x:xs) = x ++ ", " ++ renderListCommaAnd xs -- | Render a list of things in the style @blah blah; this that; and the other@ renderListSemiAnd :: [String] -> String renderListSemiAnd [] = "" renderListSemiAnd [x] = x renderListSemiAnd [x,x'] = x ++ "; and " ++ x' renderListSemiAnd (x:xs) = x ++ "; " ++ renderListSemiAnd xs -- | When rendering lists of things it often reads better to group related -- things, e.g. grouping components by package name -- -- > renderListSemiAnd -- > [ "the package " ++ prettyShow pkgname ++ " components " -- > ++ renderListCommaAnd showComponentName components -- > | (pkgname, components) <- sortGroupOn packageName allcomponents ] -- sortGroupOn :: Ord b => (a -> b) -> [a] -> [(b, [a])] sortGroupOn key = map (\(x:|xs) -> (key x, x:xs)) . NE.groupBy ((==) `on` key) . sortBy (compare `on` key) ---------------------------------------------------- -- Rendering for a few project and package types -- renderTargetSelector :: TargetSelector -> String renderTargetSelector (TargetPackage _ pkgids Nothing) = "the " ++ plural (listPlural pkgids) "package" "packages" ++ " " ++ renderListCommaAnd (map prettyShow pkgids) renderTargetSelector (TargetPackage _ pkgids (Just kfilter)) = "the " ++ renderComponentKind Plural kfilter ++ " in the " ++ plural (listPlural pkgids) "package" "packages" ++ " " ++ renderListCommaAnd (map prettyShow pkgids) renderTargetSelector (TargetPackageNamed pkgname Nothing) = "the package " ++ prettyShow pkgname renderTargetSelector (TargetPackageNamed pkgname (Just kfilter)) = "the " ++ renderComponentKind Plural kfilter ++ " in the package " ++ prettyShow pkgname renderTargetSelector (TargetAllPackages Nothing) = "all the packages in the project" renderTargetSelector (TargetAllPackages (Just kfilter)) = "all the " ++ renderComponentKind Plural kfilter ++ " in the project" renderTargetSelector (TargetComponent pkgid cname subtarget) = renderSubComponentTarget subtarget ++ "the " ++ renderComponentName (packageName pkgid) cname renderTargetSelector (TargetComponentUnknown pkgname (Left ucname) subtarget) = renderSubComponentTarget subtarget ++ "the component " ++ prettyShow ucname ++ " in the package " ++ prettyShow pkgname renderTargetSelector (TargetComponentUnknown pkgname (Right cname) subtarget) = renderSubComponentTarget subtarget ++ "the " ++ renderComponentName pkgname cname renderSubComponentTarget :: SubComponentTarget -> String renderSubComponentTarget WholeComponent = "" renderSubComponentTarget (FileTarget filename) = "the file " ++ filename ++ " in " renderSubComponentTarget (ModuleTarget modname) = "the module " ++ prettyShow modname ++ " in " renderOptionalStanza :: Plural -> OptionalStanza -> String renderOptionalStanza Singular TestStanzas = "test suite" renderOptionalStanza Plural TestStanzas = "test suites" renderOptionalStanza Singular BenchStanzas = "benchmark" renderOptionalStanza Plural BenchStanzas = "benchmarks" -- | The optional stanza type (test suite or benchmark), if it is one. optionalStanza :: ComponentName -> Maybe OptionalStanza optionalStanza (CTestName _) = Just TestStanzas optionalStanza (CBenchName _) = Just BenchStanzas optionalStanza _ = Nothing -- | Does the 'TargetSelector' potentially refer to one package or many? -- targetSelectorPluralPkgs :: TargetSelector -> Plural targetSelectorPluralPkgs (TargetAllPackages _) = Plural targetSelectorPluralPkgs (TargetPackage _ pids _) = listPlural pids targetSelectorPluralPkgs (TargetPackageNamed _ _) = Singular targetSelectorPluralPkgs TargetComponent{} = Singular targetSelectorPluralPkgs TargetComponentUnknown{} = Singular -- | Does the 'TargetSelector' refer to packages or to components? targetSelectorRefersToPkgs :: TargetSelector -> Bool targetSelectorRefersToPkgs (TargetAllPackages mkfilter) = isNothing mkfilter targetSelectorRefersToPkgs (TargetPackage _ _ mkfilter) = isNothing mkfilter targetSelectorRefersToPkgs (TargetPackageNamed _ mkfilter) = isNothing mkfilter targetSelectorRefersToPkgs TargetComponent{} = False targetSelectorRefersToPkgs TargetComponentUnknown{} = False targetSelectorFilter :: TargetSelector -> Maybe ComponentKindFilter targetSelectorFilter (TargetPackage _ _ mkfilter) = mkfilter targetSelectorFilter (TargetPackageNamed _ mkfilter) = mkfilter targetSelectorFilter (TargetAllPackages mkfilter) = mkfilter targetSelectorFilter TargetComponent{} = Nothing targetSelectorFilter TargetComponentUnknown{} = Nothing renderComponentName :: PackageName -> ComponentName -> String renderComponentName pkgname (CLibName LMainLibName) = "library " ++ prettyShow pkgname renderComponentName _ (CLibName (LSubLibName name)) = "library " ++ prettyShow name renderComponentName _ (CFLibName name) = "foreign library " ++ prettyShow name renderComponentName _ (CExeName name) = "executable " ++ prettyShow name renderComponentName _ (CTestName name) = "test suite " ++ prettyShow name renderComponentName _ (CBenchName name) = "benchmark " ++ prettyShow name renderComponentKind :: Plural -> ComponentKind -> String renderComponentKind Singular ckind = case ckind of LibKind -> "library" -- internal/sub libs? FLibKind -> "foreign library" ExeKind -> "executable" TestKind -> "test suite" BenchKind -> "benchmark" renderComponentKind Plural ckind = case ckind of LibKind -> "libraries" -- internal/sub libs? FLibKind -> "foreign libraries" ExeKind -> "executables" TestKind -> "test suites" BenchKind -> "benchmarks" ------------------------------------------------------- -- Rendering error messages for TargetProblem -- -- | Default implementation of 'reportTargetProblems' simply renders one problem per line. reportTargetProblems :: Verbosity -> String -> [TargetProblem'] -> IO a reportTargetProblems verbosity verb = die' verbosity . unlines . map (renderTargetProblem verb absurd) -- | Default implementation of 'renderTargetProblem'. renderTargetProblem :: String -- ^ verb -> (a -> String) -- ^ how to render custom problems -> TargetProblem a -> String renderTargetProblem _verb f (CustomTargetProblem x) = f x renderTargetProblem verb _ (TargetProblemNoneEnabled targetSelector targets) = renderTargetProblemNoneEnabled verb targetSelector targets renderTargetProblem verb _ (TargetProblemNoTargets targetSelector) = renderTargetProblemNoTargets verb targetSelector renderTargetProblem verb _ (TargetNotInProject pkgname) = "Cannot " ++ verb ++ " the package " ++ prettyShow pkgname ++ ", it is not " ++ "in this project (either directly or indirectly). If you want to add it " ++ "to the project then edit the cabal.project file." renderTargetProblem verb _ (TargetAvailableInIndex pkgname) = "Cannot " ++ verb ++ " the package " ++ prettyShow pkgname ++ ", it is not " ++ "in this project (either directly or indirectly), but it is in the current " ++ "package index. If you want to add it to the project then edit the " ++ "cabal.project file." renderTargetProblem verb _ (TargetComponentNotProjectLocal pkgid cname _) = "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " ++ "package " ++ prettyShow pkgid ++ " is not local to the project, and cabal " ++ "does not currently support building test suites or benchmarks of " ++ "non-local dependencies. To run test suites or benchmarks from " ++ "dependencies you can unpack the package locally and adjust the " ++ "cabal.project file to include that package directory." renderTargetProblem verb _ (TargetComponentNotBuildable pkgid cname _) = "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because it is " ++ "marked as 'buildable: False' within the '" ++ prettyShow (packageName pkgid) ++ ".cabal' file (at least for the current configuration). If you believe it " ++ "should be buildable then check the .cabal file to see if the buildable " ++ "property is conditional on flags. Alternatively you may simply have to " ++ "edit the .cabal file to declare it as buildable and fix any resulting " ++ "build problems." renderTargetProblem verb _ (TargetOptionalStanzaDisabledByUser _ cname _) = "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because " ++ "building " ++ compkinds ++ " has been explicitly disabled in the " ++ "configuration. You can adjust this configuration in the " ++ "cabal.project{.local} file either for all packages in the project or on " ++ "a per-package basis. Note that if you do not explicitly disable " ++ compkinds ++ " then the solver will merely try to make a plan with " ++ "them available, so you may wish to explicitly enable them which will " ++ "require the solver to find a plan with them available or to fail with an " ++ "explanation." where compkinds = renderComponentKind Plural (componentKind cname) renderTargetProblem verb _ (TargetOptionalStanzaDisabledBySolver pkgid cname _) = "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " ++ "solver did not find a plan that included the " ++ compkinds ++ " for " ++ prettyShow pkgid ++ ". It is probably worth trying again with " ++ compkinds ++ " explicitly enabled in the configuration in the " ++ "cabal.project{.local} file. This will ask the solver to find a plan with " ++ "the " ++ compkinds ++ " available. It will either fail with an " ++ "explanation or find a different plan that uses different versions of some " ++ "other packages. Use the '--dry-run' flag to see package versions and " ++ "check that you are happy with the choices." where compkinds = renderComponentKind Plural (componentKind cname) renderTargetProblem verb _ (TargetProblemUnknownComponent pkgname ecname) = "Cannot " ++ verb ++ " the " ++ (case ecname of Left ucname -> "component " ++ prettyShow ucname Right cname -> renderComponentName pkgname cname) ++ " from the package " ++ prettyShow pkgname ++ ", because the package does not contain a " ++ (case ecname of Left _ -> "component" Right cname -> renderComponentKind Singular (componentKind cname)) ++ " with that name." renderTargetProblem verb _ (TargetProblemNoSuchPackage pkgid) = "Internal error when trying to " ++ verb ++ " the package " ++ prettyShow pkgid ++ ". The package is not in the set of available targets " ++ "for the project plan, which would suggest an inconsistency " ++ "between readTargetSelectors and resolveTargets." renderTargetProblem verb _ (TargetProblemNoSuchComponent pkgid cname) = "Internal error when trying to " ++ verb ++ " the " ++ showComponentName cname ++ " from the package " ++ prettyShow pkgid ++ ". The package,component pair is not in the set of available targets " ++ "for the project plan, which would suggest an inconsistency " ++ "between readTargetSelectors and resolveTargets." ------------------------------------------------------------ -- Rendering error messages for TargetProblemNoneEnabled -- -- | Several commands have a @TargetProblemNoneEnabled@ problem constructor. -- This renders an error message for those cases. -- renderTargetProblemNoneEnabled :: String -> TargetSelector -> [AvailableTarget ()] -> String renderTargetProblemNoneEnabled verb targetSelector targets = "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector ++ " because none of the components are available to build: " ++ renderListSemiAnd [ case (status, mstanza) of (TargetDisabledByUser, Just stanza) -> renderListCommaAnd [ "the " ++ showComponentName availableTargetComponentName | AvailableTarget {availableTargetComponentName} <- targets' ] ++ plural (listPlural targets') " is " " are " ++ " not available because building " ++ renderOptionalStanza Plural stanza ++ " has been disabled in the configuration" (TargetDisabledBySolver, Just stanza) -> renderListCommaAnd [ "the " ++ showComponentName availableTargetComponentName | AvailableTarget {availableTargetComponentName} <- targets' ] ++ plural (listPlural targets') " is " " are " ++ "not available because the solver picked a plan that does not " ++ "include the " ++ renderOptionalStanza Plural stanza ++ ", perhaps because no such plan exists. To see the error message " ++ "explaining the problems with such plans, force the solver to " ++ "include the " ++ renderOptionalStanza Plural stanza ++ " for all " ++ "packages, by adding the line 'tests: True' to the " ++ "'cabal.project.local' file." (TargetNotBuildable, _) -> renderListCommaAnd [ "the " ++ showComponentName availableTargetComponentName | AvailableTarget {availableTargetComponentName} <- targets' ] ++ plural (listPlural targets') " is " " are all " ++ "marked as 'buildable: False'" (TargetNotLocal, _) -> renderListCommaAnd [ "the " ++ showComponentName availableTargetComponentName | AvailableTarget {availableTargetComponentName} <- targets' ] ++ " cannot be built because cabal does not currently support " ++ "building test suites or benchmarks of non-local dependencies" (TargetBuildable () TargetNotRequestedByDefault, Just stanza) -> renderListCommaAnd [ "the " ++ showComponentName availableTargetComponentName | AvailableTarget {availableTargetComponentName} <- targets' ] ++ " will not be built because " ++ renderOptionalStanza Plural stanza ++ " are not built by default in the current configuration (but you " ++ "can still build them specifically)" --TODO: say how _ -> error $ "renderBuildTargetProblem: unexpected status " ++ show (status, mstanza) | ((status, mstanza), targets') <- sortGroupOn groupingKey targets ] where groupingKey t = ( availableTargetStatus t , case availableTargetStatus t of TargetNotBuildable -> Nothing TargetNotLocal -> Nothing _ -> optionalStanza (availableTargetComponentName t) ) ------------------------------------------------------------ -- Rendering error messages for TargetProblemNoneEnabled -- -- | Several commands have a @TargetProblemNoTargets@ problem constructor. -- This renders an error message for those cases. -- renderTargetProblemNoTargets :: String -> TargetSelector -> String renderTargetProblemNoTargets verb targetSelector = "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector ++ " because " ++ reason targetSelector ++ ". " ++ "Check the .cabal " ++ plural (targetSelectorPluralPkgs targetSelector) "file for the package and make sure that it properly declares " "files for the packages and make sure that they properly declare " ++ "the components that you expect." where reason (TargetPackage _ _ Nothing) = "it does not contain any components at all" reason (TargetPackage _ _ (Just kfilter)) = "it does not contain any " ++ renderComponentKind Plural kfilter reason (TargetPackageNamed _ Nothing) = "it does not contain any components at all" reason (TargetPackageNamed _ (Just kfilter)) = "it does not contain any " ++ renderComponentKind Plural kfilter reason (TargetAllPackages Nothing) = "none of them contain any components at all" reason (TargetAllPackages (Just kfilter)) = "none of the packages contain any " ++ renderComponentKind Plural kfilter reason ts@TargetComponent{} = error $ "renderTargetProblemNoTargets: " ++ show ts reason ts@TargetComponentUnknown{} = error $ "renderTargetProblemNoTargets: " ++ show ts ----------------------------------------------------------- -- Rendering error messages for CannotPruneDependencies -- renderCannotPruneDependencies :: CannotPruneDependencies -> String renderCannotPruneDependencies (CannotPruneDependencies brokenPackages) = "Cannot select only the dependencies (as requested by the " ++ "'--only-dependencies' flag), " ++ (case pkgids of [pkgid] -> "the package " ++ prettyShow pkgid ++ " is " _ -> "the packages " ++ renderListCommaAnd (map prettyShow pkgids) ++ " are ") ++ "required by a dependency of one of the other targets." where -- throw away the details and just list the deps that are needed pkgids :: [PackageId] pkgids = nub . map packageId . concatMap snd $ brokenPackages {- ++ "Syntax:\n" ++ " - build [package]\n" ++ " - build [package:]component\n" ++ " - build [package:][component:]module\n" ++ " - build [package:][component:]file\n" ++ " where\n" ++ " package is a package name, package dir or .cabal file\n\n" ++ "Examples:\n" ++ " - build foo -- package name\n" ++ " - build tests -- component name\n" ++ " (name of library, executable, test-suite or benchmark)\n" ++ " - build Data.Foo -- module name\n" ++ " - build Data/Foo.hsc -- file name\n\n" ++ "An ambiguous target can be qualified by package, component\n" ++ "and/or component kind (lib|exe|test|bench|flib)\n" ++ " - build foo:tests -- component qualified by package\n" ++ " - build tests:Data.Foo -- module qualified by component\n" ++ " - build lib:foo -- component qualified by kind" -} cabal-install-3.8.1.0/src/Distribution/Client/CmdExec.hs0000644000000000000000000002227607346545000021121 0ustar0000000000000000------------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Exec -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Implementation of the 'v2-exec' command for running an arbitrary executable -- in an environment suited to the part of the store built for a project. ------------------------------------------------------------------------------- {-# LANGUAGE RecordWildCards #-} module Distribution.Client.CmdExec ( execAction , execCommand ) where import Distribution.Client.DistDirLayout ( DistDirLayout(..) ) import Distribution.Client.InstallPlan ( GenericPlanPackage(..) , toGraph ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( ConfigFlags(configVerbosity) , GlobalFlags ) import Distribution.Client.ProjectFlags ( removeIgnoreProjectOption ) import Distribution.Client.ProjectOrchestration ( ProjectBuildContext(..) , runProjectPreBuildPhase , CurrentCommand(..) , establishProjectBaseContext , distDirLayout , commandLineFlagsToProjectConfig , ProjectBaseContext(..) , BuildTimeSettings(..) ) import Distribution.Client.ProjectPlanOutput ( updatePostBuildProjectStatus , createPackageEnvironment , argsEquivalentOfGhcEnvironmentFile , PostBuildProjectStatus ) import qualified Distribution.Client.ProjectPlanning as Planning import Distribution.Client.ProjectPlanning ( ElaboratedInstallPlan , ElaboratedSharedConfig(..) ) import Distribution.Simple.Command ( CommandUI(..) ) import Distribution.Simple.Program.Db ( modifyProgramSearchPath , requireProgram , configuredPrograms ) import Distribution.Simple.Program.Find ( ProgramSearchPathEntry(..) ) import Distribution.Simple.Program.Run ( programInvocation , runProgramInvocation ) import Distribution.Simple.Program.Types ( programOverrideEnv , programDefaultArgs , programPath , simpleProgram , ConfiguredProgram ) import Distribution.Simple.GHC ( getImplInfo , GhcImplInfo(supportsPkgEnvFiles) ) import Distribution.Simple.Flag ( fromFlagOrDefault ) import Distribution.Simple.Utils ( die' , info , createDirectoryIfMissingVerbose , withTempDirectory , wrapText , notice ) import Distribution.Verbosity ( normal ) import Prelude () import Distribution.Client.Compat.Prelude import qualified Data.Set as S import qualified Data.Map as M execCommand :: CommandUI (NixStyleFlags ()) execCommand = CommandUI { commandName = "v2-exec" , commandSynopsis = "Give a command access to the store." , commandUsage = \pname -> "Usage: " ++ pname ++ " v2-exec [FLAGS] [--] COMMAND [--] [ARGS]\n" , commandDescription = Just $ \pname -> wrapText $ "During development it is often useful to run build tasks and perform" ++ " one-off program executions to experiment with the behavior of build" ++ " tools. It is convenient to run these tools in the same way " ++ pname ++ " itself would. The `" ++ pname ++ " v2-exec` command provides a way to" ++ " do so.\n" ++ "\n" ++ "Compiler tools will be configured to see the same subset of the store" ++ " that builds would see. The PATH is modified to make all executables in" ++ " the dependency tree available (provided they have been built already)." ++ " Commands are also rewritten in the way cabal itself would. For" ++ " example, `" ++ pname ++ " v2-exec ghc` will consult the configuration" ++ " to choose an appropriate version of ghc and to include any" ++ " ghc-specific flags requested." , commandNotes = Nothing , commandOptions = removeIgnoreProjectOption . nixStyleOptions (const []) , commandDefaultFlags = defaultNixStyleFlags () } execAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () execAction flags@NixStyleFlags {..} extraArgs globalFlags = do baseCtx <- establishProjectBaseContext verbosity cliConfig OtherCommand -- To set up the environment, we'd like to select the libraries in our -- dependency tree that we've already built. So first we set up an install -- plan, but we walk the dependency tree without first executing the plan. buildCtx <- runProjectPreBuildPhase verbosity baseCtx (\plan -> return (plan, M.empty)) -- We use the build status below to decide what libraries to include in the -- compiler environment, but we don't want to actually build anything. So we -- pass mempty to indicate that nothing happened and we just want the current -- status. buildStatus <- updatePostBuildProjectStatus verbosity (distDirLayout baseCtx) (elaboratedPlanOriginal buildCtx) (pkgsBuildStatus buildCtx) mempty -- Some dependencies may have executables. Let's put those on the PATH. extraPaths <- pathAdditions verbosity baseCtx buildCtx let programDb = modifyProgramSearchPath (map ProgramSearchPathDir extraPaths ++) . pkgConfigCompilerProgs . elaboratedShared $ buildCtx -- Now that we have the packages, set up the environment. We accomplish this -- by creating an environment file that selects the databases and packages we -- computed in the previous step, and setting an environment variable to -- point at the file. -- In case ghc is too old to support environment files, -- we pass the same info as arguments let compiler = pkgConfigCompiler $ elaboratedShared buildCtx envFilesSupported = supportsPkgEnvFiles (getImplInfo compiler) case extraArgs of [] -> die' verbosity "Please specify an executable to run" exe:args -> do (program, _) <- requireProgram verbosity (simpleProgram exe) programDb let argOverrides = argsEquivalentOfGhcEnvironmentFile compiler (distDirLayout baseCtx) (elaboratedPlanOriginal buildCtx) buildStatus programIsConfiguredCompiler = matchCompilerPath (elaboratedShared buildCtx) program argOverrides' = if envFilesSupported || not programIsConfiguredCompiler then [] else argOverrides (if envFilesSupported then withTempEnvFile verbosity baseCtx buildCtx buildStatus else \f -> f []) $ \envOverrides -> do let program' = withOverrides envOverrides argOverrides' program invocation = programInvocation program' args dryRun = buildSettingDryRun (buildSettings baseCtx) || buildSettingOnlyDownload (buildSettings baseCtx) if dryRun then notice verbosity "Running of executable suppressed by flag(s)" else runProgramInvocation verbosity invocation where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here withOverrides env args program = program { programOverrideEnv = programOverrideEnv program ++ env , programDefaultArgs = programDefaultArgs program ++ args} matchCompilerPath :: ElaboratedSharedConfig -> ConfiguredProgram -> Bool matchCompilerPath elaboratedShared program = programPath program `elem` (programPath <$> configuredCompilers) where configuredCompilers = configuredPrograms $ pkgConfigCompilerProgs elaboratedShared -- | Execute an action with a temporary .ghc.environment file reflecting the -- current environment. The action takes an environment containing the env -- variable which points ghc to the file. withTempEnvFile :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> PostBuildProjectStatus -> ([(String, Maybe String)] -> IO a) -> IO a withTempEnvFile verbosity baseCtx buildCtx buildStatus action = do createDirectoryIfMissingVerbose verbosity True (distTempDirectory (distDirLayout baseCtx)) withTempDirectory verbosity (distTempDirectory (distDirLayout baseCtx)) "environment." (\tmpDir -> do envOverrides <- createPackageEnvironment verbosity tmpDir (elaboratedPlanToExecute buildCtx) (elaboratedShared buildCtx) buildStatus action envOverrides) pathAdditions :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> IO [FilePath] pathAdditions verbosity ProjectBaseContext{..}ProjectBuildContext{..} = do info verbosity . unlines $ "Including the following directories in PATH:" : paths return paths where paths = S.toList $ binDirectories distDirLayout elaboratedShared elaboratedPlanToExecute binDirectories :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> Set FilePath binDirectories layout config = fromElaboratedInstallPlan where fromElaboratedInstallPlan = fromGraph . toGraph fromGraph = foldMap fromPlan fromSrcPkg = S.fromList . Planning.binDirectories layout config fromPlan (PreExisting _) = mempty fromPlan (Configured pkg) = fromSrcPkg pkg fromPlan (Installed pkg) = fromSrcPkg pkg cabal-install-3.8.1.0/src/Distribution/Client/CmdFreeze.hs0000644000000000000000000002256707346545000021460 0ustar0000000000000000{-# LANGUAGE NamedFieldPuns, RecordWildCards #-} -- | cabal-install CLI command: freeze -- module Distribution.Client.CmdFreeze ( freezeCommand, freezeAction, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.ProjectOrchestration import Distribution.Client.ProjectPlanning import Distribution.Client.ProjectConfig ( ProjectConfig(..), ProjectConfigShared(..) , writeProjectLocalFreezeConfig ) import Distribution.Client.IndexUtils (TotalIndexState, ActiveRepos, filterSkippedActiveRepos) import Distribution.Client.Targets ( UserQualifier(..), UserConstraintScope(..), UserConstraint(..) ) import Distribution.Solver.Types.PackageConstraint ( PackageProperty(..) ) import Distribution.Solver.Types.ConstraintSource ( ConstraintSource(..) ) import Distribution.Client.DistDirLayout ( DistDirLayout(distProjectFile) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Package ( PackageName, packageName, packageVersion ) import Distribution.Version ( VersionRange, thisVersion , unionVersionRanges, simplifyVersionRange ) import Distribution.PackageDescription ( FlagAssignment, nullFlagAssignment ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..) ) import Distribution.Simple.Flag ( fromFlagOrDefault ) import Distribution.Simple.Flag (Flag (..)) import Distribution.Simple.Utils ( die', notice, wrapText ) import Distribution.Verbosity ( normal ) import qualified Data.Map as Map import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) freezeCommand :: CommandUI (NixStyleFlags ()) freezeCommand = CommandUI { commandName = "v2-freeze", commandSynopsis = "Freeze dependencies.", commandUsage = usageAlternatives "v2-freeze" [ "[FLAGS]" ], commandDescription = Just $ \_ -> wrapText $ "The project configuration is frozen so that it will be reproducible " ++ "in future.\n\n" ++ "The precise dependency configuration for the project is written to " ++ "the 'cabal.project.freeze' file (or '$project_file.freeze' if " ++ "'--project-file' is specified). This file extends the configuration " ++ "from the 'cabal.project' file and thus is used as the project " ++ "configuration for all other commands (such as 'v2-build', " ++ "'v2-repl' etc).\n\n" ++ "The freeze file can be kept in source control. To make small " ++ "adjustments it may be edited manually, or to make bigger changes " ++ "you may wish to delete the file and re-freeze. For more control, " ++ "one approach is to try variations using 'v2-build --dry-run' with " ++ "solver flags such as '--constraint=\"pkg < 1.2\"' and once you have " ++ "a satisfactory solution to freeze it using the 'v2-freeze' command " ++ "with the same set of flags.", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-freeze\n" ++ " Freeze the configuration of the current project\n\n" ++ " " ++ pname ++ " v2-build --dry-run --constraint=\"aeson < 1\"\n" ++ " Check what a solution with the given constraints would look like\n" ++ " " ++ pname ++ " v2-freeze --constraint=\"aeson < 1\"\n" ++ " Freeze a solution using the given constraints\n" , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = nixStyleOptions (const []) } -- | To a first approximation, the @freeze@ command runs the first phase of -- the @build@ command where we bring the install plan up to date, and then -- based on the install plan we write out a @cabal.project.freeze@ config file. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- freezeAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () freezeAction flags@NixStyleFlags {..} extraArgs globalFlags = do unless (null extraArgs) $ die' verbosity $ "'freeze' doesn't take any extra arguments: " ++ unwords extraArgs ProjectBaseContext { distDirLayout, cabalDirLayout, projectConfig, localPackages, buildSettings } <- establishProjectBaseContext verbosity cliConfig OtherCommand (_, elaboratedPlan, _, totalIndexState, activeRepos) <- rebuildInstallPlan verbosity distDirLayout cabalDirLayout projectConfig localPackages let freezeConfig = projectFreezeConfig elaboratedPlan totalIndexState activeRepos dryRun = buildSettingDryRun buildSettings || buildSettingOnlyDownload buildSettings if dryRun then notice verbosity "Freeze file not written due to flag(s)" else do writeProjectLocalFreezeConfig distDirLayout freezeConfig notice verbosity $ "Wrote freeze file: " ++ distProjectFile distDirLayout "freeze" where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here -- | Given the install plan, produce a config value with constraints that -- freezes the versions of packages used in the plan. -- projectFreezeConfig :: ElaboratedInstallPlan -> TotalIndexState -> ActiveRepos -> ProjectConfig projectFreezeConfig elaboratedPlan totalIndexState activeRepos0 = mempty { projectConfigShared = mempty { projectConfigConstraints = concat (Map.elems (projectFreezeConstraints elaboratedPlan)) , projectConfigIndexState = Flag totalIndexState , projectConfigActiveRepos = Flag activeRepos } } where activeRepos :: ActiveRepos activeRepos = filterSkippedActiveRepos activeRepos0 -- | Given the install plan, produce solver constraints that will ensure the -- solver picks the same solution again in future in different environments. -- projectFreezeConstraints :: ElaboratedInstallPlan -> Map PackageName [(UserConstraint, ConstraintSource)] projectFreezeConstraints plan = -- -- TODO: [required eventually] this is currently an underapproximation -- since the constraints language is not expressive enough to specify the -- precise solution. See https://github.com/haskell/cabal/issues/3502. -- -- For the moment we deal with multiple versions in the solution by using -- constraints that allow either version. Also, we do not include any -- /version/ constraints for packages that are local to the project (e.g. -- if the solution has two instances of Cabal, one from the local project -- and one pulled in as a setup deps then we exclude all constraints on -- Cabal, not just the constraint for the local instance since any -- constraint would apply to both instances). We do however keep flag -- constraints of local packages. -- deleteLocalPackagesVersionConstraints (Map.unionWith (++) versionConstraints flagConstraints) where versionConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] versionConstraints = Map.mapWithKey (\p v -> [(UserConstraint (UserAnyQualifier p) (PackagePropertyVersion v), ConstraintSourceFreeze)]) versionRanges versionRanges :: Map PackageName VersionRange versionRanges = Map.map simplifyVersionRange $ Map.fromListWith unionVersionRanges $ [ (packageName pkg, thisVersion (packageVersion pkg)) | InstallPlan.PreExisting pkg <- InstallPlan.toList plan ] ++ [ (packageName pkg, thisVersion (packageVersion pkg)) | InstallPlan.Configured pkg <- InstallPlan.toList plan ] flagConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] flagConstraints = Map.mapWithKey (\p f -> [(UserConstraint (UserQualified UserQualToplevel p) (PackagePropertyFlags f), ConstraintSourceFreeze)]) flagAssignments flagAssignments :: Map PackageName FlagAssignment flagAssignments = Map.fromList [ (pkgname, flags) | InstallPlan.Configured elab <- InstallPlan.toList plan , let flags = elabFlagAssignment elab pkgname = packageName elab , not (nullFlagAssignment flags) ] -- As described above, remove the version constraints on local packages, -- but leave any flag constraints. deleteLocalPackagesVersionConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] -> Map PackageName [(UserConstraint, ConstraintSource)] deleteLocalPackagesVersionConstraints = Map.mergeWithKey (\_pkgname () constraints -> case filter (not . isVersionConstraint . fst) constraints of [] -> Nothing constraints' -> Just constraints') (const Map.empty) id localPackages isVersionConstraint (UserConstraint _ (PackagePropertyVersion _)) = True isVersionConstraint _ = False localPackages :: Map PackageName () localPackages = Map.fromList [ (packageName elab, ()) | InstallPlan.Configured elab <- InstallPlan.toList plan , elabLocalToProject elab ] cabal-install-3.8.1.0/src/Distribution/Client/CmdHaddock.hs0000644000000000000000000002074707346545000021573 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | cabal-install CLI command: haddock -- module Distribution.Client.CmdHaddock ( -- * The @haddock@ CLI and action haddockCommand, haddockAction, -- * Internals exposed for testing selectPackageTargets, selectComponentTarget ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.ProjectOrchestration import Distribution.Client.CmdErrorMessages import Distribution.Client.TargetProblem ( TargetProblem (..), TargetProblem' ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..) ) import Distribution.Simple.Setup ( HaddockFlags(..), fromFlagOrDefault, trueArg ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives, ShowOrParseArgs, OptionField, option ) import Distribution.Verbosity ( normal ) import Distribution.Simple.Utils ( wrapText, die', notice ) import Distribution.Simple.Flag (Flag(..)) import qualified System.Exit (exitSuccess) newtype ClientHaddockFlags = ClientHaddockFlags { openInBrowser :: Flag Bool } haddockCommand :: CommandUI (NixStyleFlags ClientHaddockFlags) haddockCommand = CommandUI { commandName = "v2-haddock", commandSynopsis = "Build Haddock documentation.", commandUsage = usageAlternatives "v2-haddock" [ "[FLAGS] TARGET" ], commandDescription = Just $ \_ -> wrapText $ "Build Haddock documentation for the specified packages within the " ++ "project.\n\n" ++ "Any package in the project can be specified. If no package is " ++ "specified, the default is to build the documentation for the package " ++ "in the current directory. The default behaviour is to build " ++ "documentation for the exposed modules of the library component (if " ++ "any). This can be changed with the '--internal', '--executables', " ++ "'--tests', '--benchmarks' or '--all' flags.\n\n" ++ "Currently, documentation for dependencies is NOT built. This " ++ "behavior may change in future.\n\n" ++ "Additional configuration flags can be specified on the command line " ++ "and these extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files.", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-haddock pkgname" ++ " Build documentation for the package named pkgname\n" , commandOptions = nixStyleOptions haddockOptions , commandDefaultFlags = defaultNixStyleFlags (ClientHaddockFlags (Flag False)) } --TODO: [nice to have] support haddock on specific components, not just -- whole packages and the silly --executables etc modifiers. haddockOptions :: ShowOrParseArgs -> [OptionField ClientHaddockFlags] haddockOptions _ = [ option [] ["open"] "Open generated documentation in the browser" openInBrowser (\v f -> f { openInBrowser = v}) trueArg ] -- | The @haddock@ command is TODO. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- haddockAction :: NixStyleFlags ClientHaddockFlags -> [String] -> GlobalFlags -> IO () haddockAction flags@NixStyleFlags {..} targetStrings globalFlags = do projCtx <- establishProjectBaseContext verbosity cliConfig HaddockCommand let baseCtx | fromFlagOrDefault False (openInBrowser extraFlags) = projCtx { buildSettings = (buildSettings projCtx) { buildSettingHaddockOpen = True } } | otherwise = projCtx targetSelectors <- either (reportTargetSelectorProblems verbosity) return =<< readTargetSelectors (localPackages baseCtx) Nothing targetStrings buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do when (buildSettingOnlyDeps (buildSettings baseCtx)) $ die' verbosity "The haddock command does not support '--only-dependencies'." -- When we interpret the targets on the command line, interpret them as -- haddock targets targets <- either (reportBuildDocumentationTargetProblems verbosity) return $ resolveTargets (selectPackageTargets haddockFlags) selectComponentTarget elaboratedPlan Nothing targetSelectors let elaboratedPlan' = pruneInstallPlanToTargets TargetActionHaddock targets elaboratedPlan return (elaboratedPlan', targets) printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here -- | This defines what a 'TargetSelector' means for the @haddock@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @haddock@ command we select all buildable libraries. Additionally, -- depending on the @--executables@ flag we also select all the buildable exes. -- We do similarly for test-suites, benchmarks and foreign libs. -- selectPackageTargets :: HaddockFlags -> TargetSelector -> [AvailableTarget k] -> Either TargetProblem' [k] selectPackageTargets haddockFlags targetSelector targets -- If there are any buildable targets then we select those | not (null targetsBuildable) = Right targetsBuildable -- If there are targets but none are buildable then we report those | not (null targets) = Left (TargetProblemNoneEnabled targetSelector targets') -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targets' = forgetTargetsDetail (map disableNotRequested targets) targetsBuildable = selectBuildableTargets (map disableNotRequested targets) -- When there's a target filter like "pkg:exes" then we do select exes, -- but if it's just a target like "pkg" then we don't build docs for exes -- unless they are requested by default (i.e. by using --executables) disableNotRequested t@(AvailableTarget _ cname (TargetBuildable _ _) _) | not (isRequested targetSelector (componentKind cname)) = t { availableTargetStatus = TargetDisabledByUser } disableNotRequested t = t isRequested (TargetPackage _ _ (Just _)) _ = True isRequested (TargetAllPackages (Just _)) _ = True isRequested _ LibKind = True -- isRequested _ SubLibKind = True --TODO: what about sublibs? -- TODO/HACK, we encode some defaults here as v2-haddock's logic; -- make sure this matches the defaults applied in -- "Distribution.Client.ProjectPlanning"; this may need more work -- to be done properly -- -- See also https://github.com/haskell/cabal/pull/4886 isRequested _ FLibKind = fromFlagOrDefault False (haddockForeignLibs haddockFlags) isRequested _ ExeKind = fromFlagOrDefault False (haddockExecutables haddockFlags) isRequested _ TestKind = fromFlagOrDefault False (haddockTestSuites haddockFlags) isRequested _ BenchKind = fromFlagOrDefault False (haddockBenchmarks haddockFlags) -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @haddock@ command we just need the basic checks on being buildable -- etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either TargetProblem' k selectComponentTarget = selectComponentTargetBasic reportBuildDocumentationTargetProblems :: Verbosity -> [TargetProblem'] -> IO a reportBuildDocumentationTargetProblems verbosity problems = case problems of [TargetProblemNoneEnabled _ _] -> do notice verbosity $ unwords [ "No documentation was generated as this package does not contain a library." , "Perhaps you want to use the --haddock-all flag, or one or more of the" , "--haddock-executables, --haddock-tests, --haddock-benchmarks or" , "--haddock-internal flags." ] System.Exit.exitSuccess _ -> reportTargetProblems verbosity "build documentation for" problems cabal-install-3.8.1.0/src/Distribution/Client/CmdInstall.hs0000644000000000000000000011564307346545000021644 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} -- | cabal-install CLI command: build -- module Distribution.Client.CmdInstall ( -- * The @build@ CLI and action installCommand, installAction, -- * Internals exposed for testing selectPackageTargets, selectComponentTarget, -- * Internals exposed for CmdRepl + CmdRun establishDummyDistDirLayout, establishDummyProjectBaseContext ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Compat.Directory ( doesPathExist ) import Distribution.Client.ProjectOrchestration import Distribution.Client.CmdErrorMessages import Distribution.Client.CmdSdist import Distribution.Client.TargetProblem ( TargetProblem', TargetProblem (..) ) import Distribution.Client.CmdInstall.ClientInstallFlags import Distribution.Client.CmdInstall.ClientInstallTargetSelector import Distribution.Client.Setup ( GlobalFlags(..), ConfigFlags(..) ) import Distribution.Client.Types ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage , SourcePackageDb(..) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Package ( Package(..), PackageName, mkPackageName, unPackageName ) import Distribution.Types.PackageId ( PackageIdentifier(..) ) import Distribution.Client.ProjectConfig ( ProjectPackageLocation(..) , fetchAndReadSourcePackages , projectConfigWithBuilderRepoContext , resolveBuildTimeSettings, withProjectOrGlobalConfig ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.ProjectFlags (ProjectFlags (..)) import Distribution.Client.ProjectConfig.Types ( ProjectConfig(..), ProjectConfigShared(..) , ProjectConfigBuildOnly(..), PackageConfig(..) , getMapLast, getMapMappend, projectConfigLogsDir , projectConfigStoreDir, projectConfigBuildOnly , projectConfigConfigFile ) import Distribution.Simple.Program.Db ( userSpecifyPaths, userSpecifyArgss, defaultProgramDb , modifyProgramSearchPath, ProgramDb ) import Distribution.Simple.BuildPaths ( exeExtension ) import Distribution.Simple.Program.Find ( ProgramSearchPathEntry(..) ) import Distribution.Client.Config ( defaultInstallPath, getCabalDir, loadConfig, SavedConfig(..) ) import qualified Distribution.Simple.PackageIndex as PI import Distribution.Solver.Types.PackageIndex ( lookupPackageName, searchByName ) import Distribution.Types.InstalledPackageInfo ( InstalledPackageInfo(..) ) import Distribution.Types.Version ( Version, nullVersion ) import Distribution.Types.VersionRange ( thisVersion ) import Distribution.Solver.Types.PackageConstraint ( PackageProperty(..) ) import Distribution.Client.IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.ProjectPlanning ( storePackageInstallDirs' ) import Distribution.Client.ProjectPlanning.Types ( ElaboratedInstallPlan ) import qualified Distribution.Simple.InstallDirs as InstallDirs import Distribution.Client.DistDirLayout ( DistDirLayout(..), mkCabalDirLayout , cabalStoreDirLayout , CabalDirLayout(..), StoreDirLayout(..) ) import Distribution.Client.RebuildMonad ( runRebuild ) import Distribution.Client.InstallSymlink ( symlinkBinary, trySymlink, promptRun ) import Distribution.Client.Types.OverwritePolicy ( OverwritePolicy (..) ) import Distribution.Simple.Flag ( fromFlagOrDefault, flagToMaybe, flagElim ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Solver.Types.SourcePackage ( SourcePackage(..) ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import Distribution.Simple.Configure ( configCompilerEx ) import Distribution.Simple.Compiler ( Compiler(..), CompilerId(..), CompilerFlavor(..) , PackageDBStack ) import Distribution.Simple.GHC ( ghcPlatformAndVersionString, getGhcAppDir , GhcImplInfo(..), getImplInfo , GhcEnvironmentFileEntry(..) , renderGhcEnvironmentFile, readGhcEnvironmentFile, ParseErrorExc ) import Distribution.System ( Platform , buildOS, OS (Windows) ) import Distribution.Types.UnitId ( UnitId ) import Distribution.Types.UnqualComponentName ( UnqualComponentName, unUnqualComponentName ) import Distribution.Verbosity ( normal, lessVerbose ) import Distribution.Simple.Utils ( wrapText, die', notice, warn , withTempDirectory, createDirectoryIfMissingVerbose , ordNub ) import Distribution.Utils.Generic ( safeHead, writeFileAtomic ) import qualified Data.ByteString.Lazy.Char8 as BS import Data.Ord ( Down(..) ) import qualified Data.Map as Map import qualified Data.List.NonEmpty as NE import Distribution.Utils.NubList ( fromNubList ) import Network.URI (URI) import System.Directory ( doesFileExist, createDirectoryIfMissing , getTemporaryDirectory, makeAbsolute, doesDirectoryExist , removeFile, removeDirectory, copyFile ) import System.FilePath ( (), (<.>), takeDirectory, takeBaseName ) installCommand :: CommandUI (NixStyleFlags ClientInstallFlags) installCommand = CommandUI { commandName = "v2-install" , commandSynopsis = "Install packages." , commandUsage = usageAlternatives "v2-install" [ "[TARGETS] [FLAGS]" ] , commandDescription = Just $ \_ -> wrapText $ "Installs one or more packages. This is done by installing them " ++ "in the store and symlinking/copying the executables in the directory " ++ "specified by the --installdir flag (`~/.cabal/bin/` by default). " ++ "If you want the installed executables to be available globally, " ++ "make sure that the PATH environment variable contains that directory. " ++ "\n\n" ++ "If TARGET is a library and --lib (provisional) is used, " ++ "it will be added to the global environment. " ++ "When doing this, cabal will try to build a plan that includes all " ++ "the previously installed libraries. This is currently not implemented." , commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-install\n" ++ " Install the package in the current directory\n" ++ " " ++ pname ++ " v2-install pkgname\n" ++ " Install the package named pkgname" ++ " (fetching it from hackage if necessary)\n" ++ " " ++ pname ++ " v2-install ./pkgfoo\n" ++ " Install the package in the ./pkgfoo directory\n" , commandOptions = nixStyleOptions clientInstallOptions , commandDefaultFlags = defaultNixStyleFlags defaultClientInstallFlags } -- | The @install@ command actually serves four different needs. It installs: -- * exes: -- For example a program from hackage. The behavior is similar to the old -- install command, except that now conflicts between separate runs of the -- command are impossible thanks to the store. -- Exes are installed in the store like a normal dependency, then they are -- symlinked/copied in the directory specified by --installdir. -- To do this we need a dummy projectBaseContext containing the targets as -- extra packages and using a temporary dist directory. -- * libraries -- Libraries install through a similar process, but using GHC environment -- files instead of symlinks. This means that 'v2-install'ing libraries -- only works on GHC >= 8.0. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- installAction :: NixStyleFlags ClientInstallFlags -> [String] -> GlobalFlags -> IO () installAction flags@NixStyleFlags { extraFlags = clientInstallFlags', .. } targetStrings globalFlags = do -- Ensure there were no invalid configuration options specified. verifyPreconditionsOrDie verbosity configFlags' -- We cannot use establishDummyProjectBaseContext to get these flags, since -- it requires one of them as an argument. Normal establishProjectBaseContext -- does not, and this is why this is done only for the install command clientInstallFlags <- getClientInstallFlags verbosity globalFlags clientInstallFlags' let installLibs = fromFlagOrDefault False (cinstInstallLibs clientInstallFlags) targetFilter = if installLibs then Just LibKind else Just ExeKind targetStrings' = if null targetStrings then ["."] else targetStrings withProject :: IO ([PackageSpecifier UnresolvedSourcePackage], [URI], [TargetSelector], ProjectConfig) withProject = do let reducedVerbosity = lessVerbose verbosity -- First, we need to learn about what's available to be installed. localBaseCtx <- establishProjectBaseContext reducedVerbosity cliConfig InstallCommand let localDistDirLayout = distDirLayout localBaseCtx pkgDb <- projectConfigWithBuilderRepoContext reducedVerbosity (buildSettings localBaseCtx) (getSourcePackages verbosity) let (targetStrings'', packageIds) = partitionEithers . flip fmap targetStrings' $ \str -> case simpleParsec str of Just (pkgId :: PackageId) | pkgVersion pkgId /= nullVersion -> Right pkgId _ -> Left str packageSpecifiers = flip fmap packageIds $ \case PackageIdentifier{..} | pkgVersion == nullVersion -> NamedPackage pkgName [] | otherwise -> NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] packageTargets = flip TargetPackageNamed targetFilter . pkgName <$> packageIds if null targetStrings' then return (packageSpecifiers, [], packageTargets, projectConfig localBaseCtx) else do targetSelectors <- either (reportTargetSelectorProblems verbosity) return =<< readTargetSelectors (localPackages localBaseCtx) Nothing targetStrings'' (specs, selectors) <- getSpecsAndTargetSelectors verbosity reducedVerbosity pkgDb targetSelectors localDistDirLayout localBaseCtx targetFilter return ( specs ++ packageSpecifiers , [] , selectors ++ packageTargets , projectConfig localBaseCtx ) withoutProject :: ProjectConfig -> IO ([PackageSpecifier pkg], [URI], [TargetSelector], ProjectConfig) withoutProject globalConfig = do tss <- traverse (parseWithoutProjectTargetSelector verbosity) targetStrings' cabalDir <- getCabalDir let projectConfig = globalConfig <> cliConfig ProjectConfigBuildOnly { projectConfigLogsDir } = projectConfigBuildOnly projectConfig ProjectConfigShared { projectConfigStoreDir } = projectConfigShared projectConfig mlogsDir = flagToMaybe projectConfigLogsDir mstoreDir = flagToMaybe projectConfigStoreDir cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir buildSettings = resolveBuildTimeSettings verbosity cabalDirLayout projectConfig SourcePackageDb { packageIndex } <- projectConfigWithBuilderRepoContext verbosity buildSettings (getSourcePackages verbosity) for_ (concatMap woPackageNames tss) $ \name -> do when (null (lookupPackageName packageIndex name)) $ do let xs = searchByName packageIndex (unPackageName name) let emptyIf True _ = [] emptyIf False zs = zs die' verbosity $ concat $ [ "Unknown package \"", unPackageName name, "\". " ] ++ emptyIf (null xs) [ "Did you mean any of the following?\n" , unlines (("- " ++) . unPackageName . fst <$> xs) ] let (uris, packageSpecifiers) = partitionEithers $ map woPackageSpecifiers tss packageTargets = map woPackageTargets tss return (packageSpecifiers, uris, packageTargets, projectConfig) (specs, uris, targetSelectors, config) <- withProjectOrGlobalConfig verbosity ignoreProject globalConfigFlag withProject withoutProject let ProjectConfig { projectConfigBuildOnly = ProjectConfigBuildOnly { projectConfigLogsDir }, projectConfigShared = ProjectConfigShared { projectConfigHcFlavor, projectConfigHcPath, projectConfigHcPkg, projectConfigStoreDir }, projectConfigLocalPackages = PackageConfig { packageConfigProgramPaths, packageConfigProgramArgs, packageConfigProgramPathExtra } } = config hcFlavor = flagToMaybe projectConfigHcFlavor hcPath = flagToMaybe projectConfigHcPath hcPkg = flagToMaybe projectConfigHcPkg -- ProgramDb with directly user specified paths preProgDb = userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) . modifyProgramSearchPath (++ [ ProgramSearchPathDir dir | dir <- fromNubList packageConfigProgramPathExtra ]) $ defaultProgramDb -- progDb is a program database with compiler tools configured properly (compiler@Compiler { compilerId = compilerId@(CompilerId compilerFlavor compilerVersion) }, platform, progDb) <- configCompilerEx hcFlavor hcPath hcPkg preProgDb verbosity let GhcImplInfo{ supportsPkgEnvFiles } = getImplInfo compiler envFile <- getEnvFile clientInstallFlags platform compilerVersion existingEnvEntries <- getExistingEnvEntries verbosity compilerFlavor supportsPkgEnvFiles envFile packageDbs <- getPackageDbStack compilerId projectConfigStoreDir projectConfigLogsDir installedIndex <- getInstalledPackages verbosity compiler packageDbs progDb let (envSpecs, nonGlobalEnvEntries) = getEnvSpecsAndNonGlobalEntries installedIndex existingEnvEntries installLibs -- Second, we need to use a fake project to let Cabal build the -- installables correctly. For that, we need a place to put a -- temporary dist directory. globalTmp <- getTemporaryDirectory withTempDirectory verbosity globalTmp "cabal-install." $ \tmpDir -> do distDirLayout <- establishDummyDistDirLayout verbosity config tmpDir uriSpecs <- runRebuild tmpDir $ fetchAndReadSourcePackages verbosity distDirLayout (projectConfigShared config) (projectConfigBuildOnly config) [ ProjectPackageRemoteTarball uri | uri <- uris ] baseCtx <- establishDummyProjectBaseContext verbosity config distDirLayout (envSpecs ++ specs ++ uriSpecs) InstallCommand buildCtx <- constructProjectBuildContext verbosity baseCtx targetSelectors printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes -- Now that we built everything we can do the installation part. -- First, figure out if / what parts we want to install: let dryRun = buildSettingDryRun (buildSettings baseCtx) || buildSettingOnlyDownload (buildSettings baseCtx) -- Then, install! unless dryRun $ if installLibs then installLibraries verbosity buildCtx compiler packageDbs progDb envFile nonGlobalEnvEntries else installExes verbosity baseCtx buildCtx platform compiler configFlags clientInstallFlags where configFlags' = disableTestsBenchsByDefault configFlags verbosity = fromFlagOrDefault normal (configVerbosity configFlags') ignoreProject = flagIgnoreProject projectFlags cliConfig = commandLineFlagsToProjectConfig globalFlags flags { configFlags = configFlags' } clientInstallFlags' globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) -- | Verify that invalid config options were not passed to the install command. -- -- If an invalid configuration is found the command will @die'@. verifyPreconditionsOrDie :: Verbosity -> ConfigFlags -> IO () verifyPreconditionsOrDie verbosity configFlags = do -- We never try to build tests/benchmarks for remote packages. -- So we set them as disabled by default and error if they are explicitly -- enabled. when (configTests configFlags == Flag True) $ die' verbosity $ "--enable-tests was specified, but tests can't " ++ "be enabled in a remote package" when (configBenchmarks configFlags == Flag True) $ die' verbosity $ "--enable-benchmarks was specified, but benchmarks can't " ++ "be enabled in a remote package" getClientInstallFlags :: Verbosity -> GlobalFlags -> ClientInstallFlags -> IO ClientInstallFlags getClientInstallFlags verbosity globalFlags existingClientInstallFlags = do let configFileFlag = globalConfigFile globalFlags savedConfig <- loadConfig verbosity configFileFlag pure $ savedClientInstallFlags savedConfig `mappend` existingClientInstallFlags getSpecsAndTargetSelectors :: Verbosity -> Verbosity -> SourcePackageDb -> [TargetSelector] -> DistDirLayout -> ProjectBaseContext -> Maybe ComponentKindFilter -> IO ([PackageSpecifier UnresolvedSourcePackage], [TargetSelector]) getSpecsAndTargetSelectors verbosity reducedVerbosity pkgDb targetSelectors localDistDirLayout localBaseCtx targetFilter = withInstallPlan reducedVerbosity localBaseCtx $ \elaboratedPlan _ -> do -- Split into known targets and hackage packages. (targets, hackageNames) <- partitionToKnownTargetsAndHackagePackages verbosity pkgDb elaboratedPlan targetSelectors let planMap = InstallPlan.toMap elaboratedPlan targetIds = Map.keys targets sdistize (SpecificSourcePackage spkg) = SpecificSourcePackage spkg' where sdistPath = distSdistFile localDistDirLayout (packageId spkg) spkg' = spkg { srcpkgSource = LocalTarballPackage sdistPath } sdistize named = named local = sdistize <$> localPackages localBaseCtx gatherTargets :: UnitId -> TargetSelector gatherTargets targetId = TargetPackageNamed pkgName targetFilter where targetUnit = Map.findWithDefault (error "cannot find target unit") targetId planMap PackageIdentifier{..} = packageId targetUnit targets' = fmap gatherTargets targetIds hackagePkgs :: [PackageSpecifier UnresolvedSourcePackage] hackagePkgs = flip NamedPackage [] <$> hackageNames hackageTargets :: [TargetSelector] hackageTargets = flip TargetPackageNamed targetFilter <$> hackageNames createDirectoryIfMissing True (distSdistDirectory localDistDirLayout) unless (Map.null targets) $ for_ (localPackages localBaseCtx) $ \lpkg -> case lpkg of SpecificSourcePackage pkg -> packageToSdist verbosity (distProjectRootDirectory localDistDirLayout) TarGzArchive (distSdistFile localDistDirLayout (packageId pkg)) pkg NamedPackage pkgName _ -> error $ "Got NamedPackage " ++ prettyShow pkgName if null targets then return (hackagePkgs, hackageTargets) else return (local ++ hackagePkgs, targets' ++ hackageTargets) -- | Partitions the target selectors into known local targets and hackage packages. partitionToKnownTargetsAndHackagePackages :: Verbosity -> SourcePackageDb -> ElaboratedInstallPlan -> [TargetSelector] -> IO (TargetsMap, [PackageName]) partitionToKnownTargetsAndHackagePackages verbosity pkgDb elaboratedPlan targetSelectors = do let mTargets = resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan (Just pkgDb) targetSelectors case mTargets of Right targets -> -- Everything is a local dependency. return (targets, []) Left errs -> do -- Not everything is local. let (errs', hackageNames) = partitionEithers . flip fmap errs $ \case TargetAvailableInIndex name -> Right name err -> Left err -- report incorrect case for known package. for_ errs' $ \case TargetNotInProject hn -> case searchByName (packageIndex pkgDb) (unPackageName hn) of [] -> return () xs -> die' verbosity . concat $ [ "Unknown package \"", unPackageName hn, "\". " , "Did you mean any of the following?\n" , unlines (("- " ++) . unPackageName . fst <$> xs) ] _ -> return () when (not . null $ errs') $ reportBuildTargetProblems verbosity errs' let targetSelectors' = flip filter targetSelectors $ \case TargetComponentUnknown name _ _ | name `elem` hackageNames -> False TargetPackageNamed name _ | name `elem` hackageNames -> False _ -> True -- This can't fail, because all of the errors are -- removed (or we've given up). targets <- either (reportBuildTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors' return (targets, hackageNames) constructProjectBuildContext :: Verbosity -> ProjectBaseContext -- ^ The synthetic base context to use to produce the full build context. -> [TargetSelector] -> IO ProjectBuildContext constructProjectBuildContext verbosity baseCtx targetSelectors = do runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do -- Interpret the targets on the command line as build targets targets <- either (reportBuildTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors let prunedToTargetsElaboratedPlan = pruneInstallPlanToTargets TargetActionBuild targets elaboratedPlan prunedElaboratedPlan <- if buildSettingOnlyDeps (buildSettings baseCtx) then either (reportCannotPruneDependencies verbosity) return $ pruneInstallPlanToDependencies (Map.keysSet targets) prunedToTargetsElaboratedPlan else return prunedToTargetsElaboratedPlan return (prunedElaboratedPlan, targets) -- | Install any built exe by symlinking/copying it -- we don't use BuildOutcomes because we also need the component names installExes :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> Platform -> Compiler -> ConfigFlags -> ClientInstallFlags -> IO () installExes verbosity baseCtx buildCtx platform compiler configFlags clientInstallFlags = do installPath <- defaultInstallPath let storeDirLayout = cabalStoreDirLayout $ cabalDirLayout baseCtx prefix = fromFlagOrDefault "" (fmap InstallDirs.fromPathTemplate (configProgPrefix configFlags)) suffix = fromFlagOrDefault "" (fmap InstallDirs.fromPathTemplate (configProgSuffix configFlags)) mkUnitBinDir :: UnitId -> FilePath mkUnitBinDir = InstallDirs.bindir . storePackageInstallDirs' storeDirLayout (compilerId compiler) mkExeName :: UnqualComponentName -> FilePath mkExeName exe = unUnqualComponentName exe <.> exeExtension platform mkFinalExeName :: UnqualComponentName -> FilePath mkFinalExeName exe = prefix <> unUnqualComponentName exe <> suffix <.> exeExtension platform installdirUnknown = "installdir is not defined. Set it in your cabal config file " ++ "or use --installdir=. Using default installdir: " ++ show installPath installdir <- fromFlagOrDefault (warn verbosity installdirUnknown >> pure installPath) $ pure <$> cinstInstalldir clientInstallFlags createDirectoryIfMissingVerbose verbosity False installdir warnIfNoExes verbosity buildCtx installMethod <- flagElim defaultMethod return $ cinstInstallMethod clientInstallFlags let doInstall = installUnitExes verbosity overwritePolicy mkUnitBinDir mkExeName mkFinalExeName installdir installMethod in traverse_ doInstall $ Map.toList $ targetsMap buildCtx where overwritePolicy = fromFlagOrDefault NeverOverwrite $ cinstOverwritePolicy clientInstallFlags isWindows = buildOS == Windows -- This is in IO as we will make environment checks, -- to decide which method is best defaultMethod :: IO InstallMethod defaultMethod -- Try symlinking in temporary directory, if it works default to -- symlinking even on windows | isWindows = do symlinks <- trySymlink verbosity return $ if symlinks then InstallMethodSymlink else InstallMethodCopy | otherwise = return InstallMethodSymlink -- | Install any built library by adding it to the default ghc environment installLibraries :: Verbosity -> ProjectBuildContext -> Compiler -> PackageDBStack -> ProgramDb -> FilePath -- ^ Environment file -> [GhcEnvironmentFileEntry] -> IO () installLibraries verbosity buildCtx compiler packageDbs programDb envFile envEntries = do -- Why do we get it again? If we updated a globalPackage then we need -- the new version. installedIndex <- getInstalledPackages verbosity compiler packageDbs programDb if supportsPkgEnvFiles $ getImplInfo compiler then do let getLatest :: PackageName -> [InstalledPackageInfo] getLatest = (=<<) (maybeToList . safeHead . snd) . take 1 . sortBy (comparing (Down . fst)) . PI.lookupPackageName installedIndex globalLatest = concat (getLatest <$> globalPackages) baseEntries = GhcEnvFileClearPackageDbStack : fmap GhcEnvFilePackageDb packageDbs globalEntries = GhcEnvFilePackageId . installedUnitId <$> globalLatest pkgEntries = ordNub $ globalEntries ++ envEntries ++ entriesForLibraryComponents (targetsMap buildCtx) contents' = renderGhcEnvironmentFile (baseEntries ++ pkgEntries) createDirectoryIfMissing True (takeDirectory envFile) writeFileAtomic envFile (BS.pack contents') else warn verbosity $ "The current compiler doesn't support safely installing libraries, " ++ "so only executables will be available. (Library installation is " ++ "supported on GHC 8.0+ only)" warnIfNoExes :: Verbosity -> ProjectBuildContext -> IO () warnIfNoExes verbosity buildCtx = when noExes $ warn verbosity $ "\n" <> "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n" <> "@ WARNING: Installation might not be completed as desired! @\n" <> "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n" <> "The command \"cabal install [TARGETS]\" doesn't expose libraries.\n" <> "* You might have wanted to add them as dependencies to your package." <> " In this case add \"" <> intercalate ", " (showTargetSelector <$> selectors) <> "\" to the build-depends field(s) of your package's .cabal file.\n" <> "* You might have wanted to add them to a GHC environment. In this case" <> " use \"cabal install --lib " <> unwords (showTargetSelector <$> selectors) <> "\". " <> " The \"--lib\" flag is provisional: see" <> " https://github.com/haskell/cabal/issues/6481 for more information." where targets = concat $ Map.elems $ targetsMap buildCtx components = fst <$> targets selectors = concatMap (NE.toList . snd) targets noExes = null $ catMaybes $ exeMaybe <$> components exeMaybe (ComponentTarget (CExeName exe) _) = Just exe exeMaybe _ = Nothing globalPackages :: [PackageName] globalPackages = mkPackageName <$> [ "ghc", "hoopl", "bytestring", "unix", "base", "time", "hpc", "filepath" , "process", "array", "integer-gmp", "containers", "ghc-boot", "binary" , "ghc-prim", "ghci", "rts", "terminfo", "transformers", "deepseq" , "ghc-boot-th", "pretty", "template-haskell", "directory", "text" , "bin-package-db" ] -- | Return the package specifiers and non-global environment file entries. getEnvSpecsAndNonGlobalEntries :: PI.InstalledPackageIndex -> [GhcEnvironmentFileEntry] -> Bool -> ([PackageSpecifier a], [GhcEnvironmentFileEntry]) getEnvSpecsAndNonGlobalEntries installedIndex entries installLibs = if installLibs then (envSpecs, envEntries') else ([], envEntries') where (envSpecs, envEntries') = environmentFileToSpecifiers installedIndex entries environmentFileToSpecifiers :: PI.InstalledPackageIndex -> [GhcEnvironmentFileEntry] -> ([PackageSpecifier a], [GhcEnvironmentFileEntry]) environmentFileToSpecifiers ipi = foldMap $ \case (GhcEnvFilePackageId unitId) | Just InstalledPackageInfo { sourcePackageId = PackageIdentifier{..}, installedUnitId } <- PI.lookupUnitId ipi unitId , let pkgSpec = NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] -> if pkgName `elem` globalPackages then ([pkgSpec], []) else ([pkgSpec], [GhcEnvFilePackageId installedUnitId]) _ -> ([], []) -- | Disables tests and benchmarks if they weren't explicitly enabled. disableTestsBenchsByDefault :: ConfigFlags -> ConfigFlags disableTestsBenchsByDefault configFlags = configFlags { configTests = Flag False <> configTests configFlags , configBenchmarks = Flag False <> configBenchmarks configFlags } -- | Symlink/copy every exe from a package from the store to a given location installUnitExes :: Verbosity -> OverwritePolicy -- ^ Whether to overwrite existing files -> (UnitId -> FilePath) -- ^ A function to get an UnitId's -- ^ store directory -> (UnqualComponentName -> FilePath) -- ^ A function to get an -- ^ exe's filename -> (UnqualComponentName -> FilePath) -- ^ A function to get an -- ^ exe's final possibly -- ^ different to the name in the store. -> FilePath -> InstallMethod -> ( UnitId , [(ComponentTarget, NonEmpty TargetSelector)] ) -> IO () installUnitExes verbosity overwritePolicy mkSourceBinDir mkExeName mkFinalExeName installdir installMethod (unit, components) = traverse_ installAndWarn exes where exes = catMaybes $ (exeMaybe . fst) <$> components exeMaybe (ComponentTarget (CExeName exe) _) = Just exe exeMaybe _ = Nothing installAndWarn exe = do success <- installBuiltExe verbosity overwritePolicy (mkSourceBinDir unit) (mkExeName exe) (mkFinalExeName exe) installdir installMethod let errorMessage = case overwritePolicy of NeverOverwrite -> "Path '" <> (installdir prettyShow exe) <> "' already exists. " <> "Use --overwrite-policy=always to overwrite." -- This shouldn't even be possible, but we keep it in case -- symlinking/copying logic changes _ -> case installMethod of InstallMethodSymlink -> "Symlinking" InstallMethodCopy -> "Copying" <> " '" <> prettyShow exe <> "' failed." unless success $ die' verbosity errorMessage -- | Install a specific exe. installBuiltExe :: Verbosity -> OverwritePolicy -> FilePath -- ^ The directory where the built exe is located -> FilePath -- ^ The exe's filename -> FilePath -- ^ The exe's filename in the public install directory -> FilePath -- ^ the directory where it should be installed -> InstallMethod -> IO Bool -- ^ Whether the installation was successful installBuiltExe verbosity overwritePolicy sourceDir exeName finalExeName installdir InstallMethodSymlink = do notice verbosity $ "Symlinking '" <> exeName <> "' to '" <> destination <> "'" symlinkBinary overwritePolicy installdir sourceDir finalExeName exeName where destination = installdir finalExeName installBuiltExe verbosity overwritePolicy sourceDir exeName finalExeName installdir InstallMethodCopy = do notice verbosity $ "Copying '" <> exeName <> "' to '" <> destination <> "'" exists <- doesPathExist destination case (exists, overwritePolicy) of (True , NeverOverwrite ) -> pure False (True , AlwaysOverwrite) -> overwrite (True , PromptOverwrite) -> maybeOverwrite (False, _ ) -> copy where source = sourceDir exeName destination = installdir finalExeName remove = do isDir <- doesDirectoryExist destination if isDir then removeDirectory destination else removeFile destination copy = copyFile source destination >> pure True overwrite :: IO Bool overwrite = remove >> copy maybeOverwrite :: IO Bool maybeOverwrite = promptRun "Existing file found while installing executable. Do you want to overwrite that file? (y/n)" overwrite -- | Create 'GhcEnvironmentFileEntry's for packages with exposed libraries. entriesForLibraryComponents :: TargetsMap -> [GhcEnvironmentFileEntry] entriesForLibraryComponents = Map.foldrWithKey' (\k v -> mappend (go k v)) [] where hasLib :: (ComponentTarget, NonEmpty TargetSelector) -> Bool hasLib (ComponentTarget (CLibName _) _, _) = True hasLib _ = False go :: UnitId -> [(ComponentTarget, NonEmpty TargetSelector)] -> [GhcEnvironmentFileEntry] go unitId targets | any hasLib targets = [GhcEnvFilePackageId unitId] | otherwise = [] -- | Gets the file path to the request environment file. getEnvFile :: ClientInstallFlags -> Platform -> Version -> IO FilePath getEnvFile clientInstallFlags platform compilerVersion = do appDir <- getGhcAppDir case flagToMaybe (cinstEnvironmentPath clientInstallFlags) of Just spec -- Is spec a bare word without any "pathy" content, then it refers to -- a named global environment. | takeBaseName spec == spec -> return (getGlobalEnv appDir platform compilerVersion spec) | otherwise -> do spec' <- makeAbsolute spec isDir <- doesDirectoryExist spec' if isDir -- If spec is a directory, then make an ambient environment inside -- that directory. then return (getLocalEnv spec' platform compilerVersion) -- Otherwise, treat it like a literal file path. else return spec' Nothing -> return (getGlobalEnv appDir platform compilerVersion "default") -- | Returns the list of @GhcEnvFilePackageIj@ values already existing in the -- environment being operated on. getExistingEnvEntries :: Verbosity -> CompilerFlavor -> Bool -> FilePath -> IO [GhcEnvironmentFileEntry] getExistingEnvEntries verbosity compilerFlavor supportsPkgEnvFiles envFile = do envFileExists <- doesFileExist envFile filterEnvEntries <$> if (compilerFlavor == GHC || compilerFlavor == GHCJS) && supportsPkgEnvFiles && envFileExists then catch (readGhcEnvironmentFile envFile) $ \(_ :: ParseErrorExc) -> warn verbosity ("The environment file " ++ envFile ++ " is unparsable. Libraries cannot be installed.") >> return [] else return [] where -- Why? We know what the first part will be, we only care about the packages. filterEnvEntries = filter $ \case GhcEnvFilePackageId _ -> True _ -> False -- | Constructs the path to the global GHC environment file. -- -- TODO(m-renaud): Create PkgEnvName newtype wrapper. getGlobalEnv :: FilePath -> Platform -> Version -> String -> FilePath getGlobalEnv appDir platform compilerVersion name = appDir ghcPlatformAndVersionString platform compilerVersion "environments" name -- | Constructs the path to a local GHC environment file. getLocalEnv :: FilePath -> Platform -> Version -> FilePath getLocalEnv dir platform compilerVersion = dir ".ghc.environment." <> ghcPlatformAndVersionString platform compilerVersion getPackageDbStack :: CompilerId -> Flag FilePath -> Flag FilePath -> IO PackageDBStack getPackageDbStack compilerId storeDirFlag logsDirFlag = do cabalDir <- getCabalDir mstoreDir <- traverse makeAbsolute $ flagToMaybe storeDirFlag let mlogsDir = flagToMaybe logsDirFlag cabalLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir pure $ storePackageDBStack (cabalStoreDirLayout cabalLayout) compilerId -- | This defines what a 'TargetSelector' means for the @bench@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @build@ command select all components except non-buildable -- and disabled tests\/benchmarks, fail if there are no such -- components -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either TargetProblem' [k] selectPackageTargets targetSelector targets -- If there are any buildable targets then we select those | not (null targetsBuildable) = Right targetsBuildable -- If there are targets but none are buildable then we report those | not (null targets) = Left (TargetProblemNoneEnabled targetSelector targets') -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targets' = forgetTargetsDetail targets targetsBuildable = selectBuildableTargetsWith (buildable targetSelector) targets -- When there's a target filter like "pkg:tests" then we do select tests, -- but if it's just a target like "pkg" then we don't build tests unless -- they are requested by default (i.e. by using --enable-tests) buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False buildable _ _ = True -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @build@ command we just need the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either TargetProblem' k selectComponentTarget = selectComponentTargetBasic reportBuildTargetProblems :: Verbosity -> [TargetProblem'] -> IO a reportBuildTargetProblems verbosity problems = reportTargetProblems verbosity "build" problems reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a reportCannotPruneDependencies verbosity = die' verbosity . renderCannotPruneDependencies cabal-install-3.8.1.0/src/Distribution/Client/CmdInstall/0000755000000000000000000000000007346545000021276 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/CmdInstall/ClientInstallFlags.hs0000644000000000000000000000633507346545000025363 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} module Distribution.Client.CmdInstall.ClientInstallFlags ( InstallMethod(..) , ClientInstallFlags(..) , defaultClientInstallFlags , clientInstallOptions ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.ReadE ( succeedReadE, parsecToReadE ) import Distribution.Simple.Command ( ShowOrParseArgs(..), OptionField(..), option, reqArg ) import Distribution.Simple.Setup ( Flag(..), trueArg, flagToList, toFlag ) import Distribution.Client.Types.InstallMethod ( InstallMethod (..) ) import Distribution.Client.Types.OverwritePolicy ( OverwritePolicy(..) ) import qualified Distribution.Compat.CharParsing as P data ClientInstallFlags = ClientInstallFlags { cinstInstallLibs :: Flag Bool , cinstEnvironmentPath :: Flag FilePath , cinstOverwritePolicy :: Flag OverwritePolicy , cinstInstallMethod :: Flag InstallMethod , cinstInstalldir :: Flag FilePath } deriving (Eq, Show, Generic) instance Monoid ClientInstallFlags where mempty = gmempty mappend = (<>) instance Semigroup ClientInstallFlags where (<>) = gmappend instance Binary ClientInstallFlags instance Structured ClientInstallFlags defaultClientInstallFlags :: ClientInstallFlags defaultClientInstallFlags = ClientInstallFlags { cinstInstallLibs = toFlag False , cinstEnvironmentPath = mempty , cinstOverwritePolicy = mempty , cinstInstallMethod = mempty , cinstInstalldir = mempty } clientInstallOptions :: ShowOrParseArgs -> [OptionField ClientInstallFlags] clientInstallOptions _ = [ option [] ["lib"] ( "Install libraries rather than executables from the target package " <> "(provisional, see https://github.com/haskell/cabal/issues/6481 for more information)." ) cinstInstallLibs (\v flags -> flags { cinstInstallLibs = v }) trueArg , option [] ["package-env", "env"] "Set the environment file that may be modified." cinstEnvironmentPath (\pf flags -> flags { cinstEnvironmentPath = pf }) (reqArg "ENV" (succeedReadE Flag) flagToList) , option [] ["overwrite-policy"] "How to handle already existing symlinks." cinstOverwritePolicy (\v flags -> flags { cinstOverwritePolicy = v }) $ reqArg "always|never|prompt" (parsecToReadE (\err -> "Error parsing overwrite-policy: " ++ err) (toFlag `fmap` parsec)) (map prettyShow . flagToList) , option [] ["install-method"] "How to install the executables." cinstInstallMethod (\v flags -> flags { cinstInstallMethod = v }) $ reqArg "default|copy|symlink" (parsecToReadE (\err -> "Error parsing install-method: " ++ err) (toFlag `fmap` parsecInstallMethod)) (map prettyShow . flagToList) , option [] ["installdir"] "Where to install (by symlinking or copying) the executables in." cinstInstalldir (\v flags -> flags { cinstInstalldir = v }) $ reqArg "DIR" (succeedReadE Flag) flagToList ] parsecInstallMethod :: CabalParsing m => m InstallMethod parsecInstallMethod = do name <- P.munch1 isAlpha case name of "copy" -> pure InstallMethodCopy "symlink" -> pure InstallMethodSymlink _ -> P.unexpected $ "InstallMethod: " ++ name cabal-install-3.8.1.0/src/Distribution/Client/CmdInstall/ClientInstallTargetSelector.hs0000644000000000000000000000524007346545000027250 0ustar0000000000000000module Distribution.Client.CmdInstall.ClientInstallTargetSelector ( WithoutProjectTargetSelector (..), parseWithoutProjectTargetSelector, woPackageNames, woPackageTargets, woPackageSpecifiers, ) where import Distribution.Client.Compat.Prelude import Prelude () import Network.URI (URI, parseURI) import Distribution.Client.TargetSelector import Distribution.Client.Types import Distribution.Compat.CharParsing (char, optional) import Distribution.Package import Distribution.Simple.LocalBuildInfo (ComponentName (CExeName)) import Distribution.Simple.Utils (die') import Distribution.Solver.Types.PackageConstraint (PackageProperty (..)) import Distribution.Version data WithoutProjectTargetSelector = WoPackageId PackageId | WoPackageComponent PackageId ComponentName | WoURI URI deriving (Show) parseWithoutProjectTargetSelector :: Verbosity -> String -> IO WithoutProjectTargetSelector parseWithoutProjectTargetSelector verbosity input = case explicitEitherParsec parser input of Right ts -> return ts Left err -> case parseURI input of Just uri -> return (WoURI uri) Nothing -> die' verbosity $ "Invalid package ID: " ++ input ++ "\n" ++ err where parser :: CabalParsing m => m WithoutProjectTargetSelector parser = do pid <- parsec cn <- optional (char ':' *> parsec) return $ case cn of Nothing -> WoPackageId pid Just cn' -> WoPackageComponent pid (CExeName cn') woPackageNames :: WithoutProjectTargetSelector -> [PackageName] woPackageNames (WoPackageId pid) = [pkgName pid] woPackageNames (WoPackageComponent pid _) = [pkgName pid] woPackageNames (WoURI _) = [] woPackageTargets :: WithoutProjectTargetSelector -> TargetSelector woPackageTargets (WoPackageId pid) = TargetPackageNamed (pkgName pid) Nothing woPackageTargets (WoPackageComponent pid cn) = TargetComponentUnknown (pkgName pid) (Right cn) WholeComponent woPackageTargets (WoURI _) = TargetAllPackages (Just ExeKind) woPackageSpecifiers :: WithoutProjectTargetSelector -> Either URI (PackageSpecifier pkg) woPackageSpecifiers (WoPackageId pid) = Right (pidPackageSpecifiers pid) woPackageSpecifiers (WoPackageComponent pid _) = Right (pidPackageSpecifiers pid) woPackageSpecifiers (WoURI uri) = Left uri pidPackageSpecifiers :: PackageId -> PackageSpecifier pkg pidPackageSpecifiers pid | pkgVersion pid == nullVersion = NamedPackage (pkgName pid) [] | otherwise = NamedPackage (pkgName pid) [ PackagePropertyVersion (thisVersion (pkgVersion pid)) ] cabal-install-3.8.1.0/src/Distribution/Client/CmdLegacy.hs0000644000000000000000000001355707346545000021443 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ViewPatterns #-} module Distribution.Client.CmdLegacy ( legacyCmd, legacyWrapperCmd, newCmd ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.Sandbox ( loadConfigOrSandboxConfig, findSavedDistPref ) import qualified Distribution.Client.Setup as Client import Distribution.Client.SetupWrapper ( SetupScriptOptions(..), setupWrapper, defaultSetupScriptOptions ) import qualified Distribution.Simple.Setup as Setup import Distribution.Simple.Command import Distribution.Simple.Utils ( wrapText ) import Distribution.Verbosity ( normal ) import Control.Exception ( try ) import qualified Data.Text as T -- Tweaked versions of code from Main. regularCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> CommandSpec (globals -> IO action) regularCmd ui action = CommandSpec ui ((flip commandAddAction) (\flags extra globals -> action flags extra globals)) NormalCommand wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> CommandSpec (Client.GlobalFlags -> IO ()) wrapperCmd ui verbosity' distPref = CommandSpec ui (\ui' -> wrapperAction ui' verbosity' distPref) NormalCommand wrapperAction :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> Command (Client.GlobalFlags -> IO ()) wrapperAction command verbosityFlag distPrefFlag = commandAddAction command { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do let verbosity' = Setup.fromFlagOrDefault normal (verbosityFlag flags) load <- try (loadConfigOrSandboxConfig verbosity' globalFlags) let config = either (\(SomeException _) -> mempty) id load distPref <- findSavedDistPref config (distPrefFlag flags) let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } let command' = command { commandName = T.unpack . T.replace "v1-" "" . T.pack . commandName $ command } setupWrapper verbosity' setupScriptOptions Nothing command' (const flags) (const extraArgs) -- class HasVerbosity a where verbosity :: a -> Verbosity instance HasVerbosity (Setup.Flag Verbosity) where verbosity = Setup.fromFlagOrDefault normal instance (HasVerbosity a) => HasVerbosity (a, b) where verbosity (a, _) = verbosity a instance (HasVerbosity a) => HasVerbosity (a, b, c) where verbosity (a , _, _) = verbosity a instance (HasVerbosity a) => HasVerbosity (a, b, c, d) where verbosity (a, _, _, _) = verbosity a instance (HasVerbosity a) => HasVerbosity (a, b, c, d, e) where verbosity (a, _, _, _, _) = verbosity a instance (HasVerbosity a) => HasVerbosity (a, b, c, d, e, f) where verbosity (a, _, _, _, _, _) = verbosity a instance HasVerbosity Setup.BuildFlags where verbosity = verbosity . Setup.buildVerbosity instance HasVerbosity Setup.ConfigFlags where verbosity = verbosity . Setup.configVerbosity instance HasVerbosity Setup.ReplFlags where verbosity = verbosity . Setup.replVerbosity instance HasVerbosity Client.FreezeFlags where verbosity = verbosity . Client.freezeVerbosity instance HasVerbosity Setup.HaddockFlags where verbosity = verbosity . Setup.haddockVerbosity instance HasVerbosity Client.UpdateFlags where verbosity = verbosity . Client.updateVerbosity instance HasVerbosity Setup.CleanFlags where verbosity = verbosity . Setup.cleanVerbosity -- legacyNote :: String -> String legacyNote cmd = wrapText $ "The v1-" ++ cmd ++ " command is a part of the legacy v1 style of cabal usage.\n\n" ++ "It is a legacy feature and will be removed in a future release of cabal-install." ++ " Please file a bug if you cannot replicate a working v1- use case with the nix-style" ++ " commands.\n\n" ++ "For more information, see: https://cabal.readthedocs.io/en/latest/nix-local-build-overview.html" toLegacyCmd :: CommandSpec (globals -> IO action) -> [CommandSpec (globals -> IO action)] toLegacyCmd mkSpec = [toLegacy mkSpec] where toLegacy (CommandSpec origUi@CommandUI{..} action type') = CommandSpec legUi action type' where legUi = origUi { commandName = "v1-" ++ commandName , commandNotes = Just $ \pname -> case commandNotes of Just notes -> notes pname ++ "\n" ++ legacyNote commandName Nothing -> legacyNote commandName } legacyCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] legacyCmd ui action = toLegacyCmd (regularCmd ui action) legacyWrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> [CommandSpec (Client.GlobalFlags -> IO ())] legacyWrapperCmd ui verbosity' distPref = toLegacyCmd (wrapperCmd ui verbosity' distPref) newCmd :: CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] newCmd origUi@CommandUI{..} action = [cmd defaultUi, cmd newUi, cmd origUi] where cmd ui = CommandSpec ui (flip commandAddAction action) NormalCommand newMsg = T.unpack . T.replace "v2-" "new-" . T.pack newUi = origUi { commandName = newMsg commandName , commandUsage = newMsg . commandUsage , commandDescription = (newMsg .) <$> commandDescription , commandNotes = (newMsg .) <$> commandNotes } defaultMsg = T.unpack . T.replace "v2-" "" . T.pack defaultUi = origUi { commandName = defaultMsg commandName , commandUsage = defaultMsg . commandUsage , commandDescription = (defaultMsg .) <$> commandDescription , commandNotes = (defaultMsg .) <$> commandNotes } cabal-install-3.8.1.0/src/Distribution/Client/CmdListBin.hs0000644000000000000000000004013307346545000021571 0ustar0000000000000000{-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} module Distribution.Client.CmdListBin ( listbinCommand, listbinAction, -- * Internals exposed for testing selectPackageTargets, selectComponentTarget, noComponentsProblem, matchesMultipleProblem, multipleTargetsProblem, componentNotRightKindProblem ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.CmdErrorMessages (plural, renderListCommaAnd, renderTargetProblem, renderTargetProblemNoTargets, renderTargetSelector, showTargetSelector, targetSelectorFilter, targetSelectorPluralPkgs) import Distribution.Client.DistDirLayout (DistDirLayout (..)) import Distribution.Client.NixStyleOptions (NixStyleFlags (..), defaultNixStyleFlags, nixStyleOptions) import Distribution.Client.ProjectOrchestration import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ScriptUtils (AcceptNoTargets(..), TargetContext(..), updateContextAndWriteProjectFile, withContextAndSelectors) import Distribution.Client.Setup (GlobalFlags (..)) import Distribution.Client.TargetProblem (TargetProblem (..)) import Distribution.Simple.BuildPaths (dllExtension, exeExtension) import Distribution.Simple.Command (CommandUI (..)) import Distribution.Simple.Setup (configVerbosity, fromFlagOrDefault) import Distribution.Simple.Utils (die', wrapText) import Distribution.System (Platform) import Distribution.Types.ComponentName (showComponentName) import Distribution.Types.UnitId (UnitId) import Distribution.Types.UnqualComponentName (UnqualComponentName) import Distribution.Verbosity (silent, verboseStderr) import System.FilePath ((<.>), ()) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Distribution.Client.InstallPlan as IP import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.Solver.Types.ComponentDeps as CD ------------------------------------------------------------------------------- -- Command ------------------------------------------------------------------------------- listbinCommand :: CommandUI (NixStyleFlags ()) listbinCommand = CommandUI { commandName = "list-bin" , commandSynopsis = "List the path to a single executable." , commandUsage = \pname -> "Usage: " ++ pname ++ " list-bin [FLAGS] TARGET\n" , commandDescription = Just $ \_ -> wrapText "List the path to a build product." , commandNotes = Nothing , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = nixStyleOptions (const []) } ------------------------------------------------------------------------------- -- Action ------------------------------------------------------------------------------- listbinAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () listbinAction flags@NixStyleFlags{..} args globalFlags = do -- fail early if multiple target selectors specified target <- case args of [] -> die' verbosity "One target is required, none provided" [x] -> return x _ -> die' verbosity "One target is required, given multiple" -- configure and elaborate target selectors withContextAndSelectors RejectNoTargets (Just ExeKind) flags [target] globalFlags $ \targetCtx ctx targetSelectors -> do baseCtx <- case targetCtx of ProjectContext -> return ctx GlobalContext -> return ctx ScriptContext path exemeta -> updateContextAndWriteProjectFile ctx path exemeta buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do -- Interpret the targets on the command line as build targets -- (as opposed to say repl or haddock targets). targets <- either (reportTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors -- Reject multiple targets, or at least targets in different -- components. It is ok to have two module/file targets in the -- same component, but not two that live in different components. -- -- Note that we discard the target and return the whole 'TargetsMap', -- so this check will be repeated (and must succeed) after -- the 'runProjectPreBuildPhase'. Keep it in mind when modifying this. _ <- singleComponentOrElse (reportTargetProblems verbosity [multipleTargetsProblem targets]) targets let elaboratedPlan' = pruneInstallPlanToTargets TargetActionBuild targets elaboratedPlan return (elaboratedPlan', targets) (selectedUnitId, selectedComponent) <- -- Slight duplication with 'runProjectPreBuildPhase'. singleComponentOrElse (die' verbosity $ "No or multiple targets given, but the run " ++ "phase has been reached. This is a bug.") $ targetsMap buildCtx printPlan verbosity baseCtx buildCtx binfiles <- case Map.lookup selectedUnitId $ IP.toMap (elaboratedPlanOriginal buildCtx) of Nothing -> die' verbosity "No or multiple targets given..." Just gpp -> return $ IP.foldPlanPackage (const []) -- IPI don't have executables (elaboratedPackage (distDirLayout baseCtx) (elaboratedShared buildCtx) selectedComponent) gpp case binfiles of [] -> die' verbosity "No target found" [exe] -> putStrLn exe _ -> die' verbosity "Multiple targets found" where defaultVerbosity = verboseStderr silent verbosity = fromFlagOrDefault defaultVerbosity (configVerbosity configFlags) -- this is copied from elaboratedPackage :: DistDirLayout -> ElaboratedSharedConfig -> UnqualComponentName -> ElaboratedConfiguredPackage -> [FilePath] elaboratedPackage distDirLayout elaboratedSharedConfig selectedComponent elab = case elabPkgOrComp elab of ElabPackage pkg -> [ bin | (c, _) <- CD.toList $ CD.zip (pkgLibDependencies pkg) (pkgExeDependencies pkg) , bin <- bin_file c ] ElabComponent comp -> bin_file (compSolverName comp) where dist_dir = distBuildDirectory distDirLayout (elabDistDirParams elaboratedSharedConfig elab) bin_file c = case c of CD.ComponentExe s | s == selectedComponent -> [bin_file' s] CD.ComponentTest s | s == selectedComponent -> [bin_file' s] CD.ComponentBench s | s == selectedComponent -> [bin_file' s] CD.ComponentFLib s | s == selectedComponent -> [flib_file' s] _ -> [] plat :: Platform plat = pkgConfigPlatform elaboratedSharedConfig -- here and in PlanOutput, -- use binDirectoryFor? bin_file' s = if elabBuildStyle elab == BuildInplaceOnly then dist_dir "build" prettyShow s prettyShow s <.> exeExtension plat else InstallDirs.bindir (elabInstallDirs elab) prettyShow s <.> exeExtension plat flib_file' s = if elabBuildStyle elab == BuildInplaceOnly then dist_dir "build" prettyShow s ("lib" ++ prettyShow s) <.> dllExtension plat else InstallDirs.bindir (elabInstallDirs elab) ("lib" ++ prettyShow s) <.> dllExtension plat ------------------------------------------------------------------------------- -- Target Problem: the very similar to CmdRun ------------------------------------------------------------------------------- singleComponentOrElse :: IO (UnitId, UnqualComponentName) -> TargetsMap -> IO (UnitId, UnqualComponentName) singleComponentOrElse action targetsMap = case Set.toList . distinctTargetComponents $ targetsMap of [(unitId, CExeName component)] -> return (unitId, component) [(unitId, CTestName component)] -> return (unitId, component) [(unitId, CBenchName component)] -> return (unitId, component) [(unitId, CFLibName component)] -> return (unitId, component) _ -> action -- | This defines what a 'TargetSelector' means for the @list-bin@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @list-bin@ command we select the exe or flib if there is only one -- and it's buildable. Fail if there are no or multiple buildable exe components. -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either ListBinTargetProblem [k] selectPackageTargets targetSelector targets -- If there is a single executable component, select that. See #7403 | [target] <- targetsExesBuildable = Right [target] -- Otherwise, if there is a single executable-like component left, select that. | [target] <- targetsExeLikesBuildable = Right [target] -- but fail if there are multiple buildable executables. | not (null targetsExeLikesBuildable) = Left (matchesMultipleProblem targetSelector targetsExeLikesBuildable') -- If there are executables but none are buildable then we report those | not (null targetsExeLikes') = Left (TargetProblemNoneEnabled targetSelector targetsExeLikes') -- If there are no executables but some other targets then we report that | not (null targets) = Left (noComponentsProblem targetSelector) -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where -- Targets that are precisely executables targetsExes = filterTargetsKind ExeKind targets targetsExesBuildable = selectBuildableTargets targetsExes -- Any target that could be executed targetsExeLikes = targetsExes ++ filterTargetsKind TestKind targets ++ filterTargetsKind BenchKind targets (targetsExeLikesBuildable, targetsExeLikesBuildable') = selectBuildableTargets' targetsExeLikes targetsExeLikes' = forgetTargetsDetail targetsExeLikes -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @run@ command we just need to check it is a executable-like -- (an executable, a test, or a benchmark), in addition -- to the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either ListBinTargetProblem k selectComponentTarget subtarget@WholeComponent t = case availableTargetComponentName t of CExeName _ -> component CTestName _ -> component CBenchName _ -> component CFLibName _ -> component _ -> Left (componentNotRightKindProblem pkgid cname) where pkgid = availableTargetPackageId t cname = availableTargetComponentName t component = selectComponentTargetBasic subtarget t selectComponentTarget subtarget t = Left (isSubComponentProblem (availableTargetPackageId t) (availableTargetComponentName t) subtarget) -- | The various error conditions that can occur when matching a -- 'TargetSelector' against 'AvailableTarget's for the @run@ command. -- data ListBinProblem = -- | The 'TargetSelector' matches targets but no executables TargetProblemNoRightComps TargetSelector -- | A single 'TargetSelector' matches multiple targets | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] -- | Multiple 'TargetSelector's match multiple targets | TargetProblemMultipleTargets TargetsMap -- | The 'TargetSelector' refers to a component that is not an executable | TargetProblemComponentNotRightKind PackageId ComponentName -- | Asking to run an individual file or module is not supported | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget deriving (Eq, Show) type ListBinTargetProblem = TargetProblem ListBinProblem noComponentsProblem :: TargetSelector -> ListBinTargetProblem noComponentsProblem = CustomTargetProblem . TargetProblemNoRightComps matchesMultipleProblem :: TargetSelector -> [AvailableTarget ()] -> ListBinTargetProblem matchesMultipleProblem selector targets = CustomTargetProblem $ TargetProblemMatchesMultiple selector targets multipleTargetsProblem :: TargetsMap -> TargetProblem ListBinProblem multipleTargetsProblem = CustomTargetProblem . TargetProblemMultipleTargets componentNotRightKindProblem :: PackageId -> ComponentName -> TargetProblem ListBinProblem componentNotRightKindProblem pkgid name = CustomTargetProblem $ TargetProblemComponentNotRightKind pkgid name isSubComponentProblem :: PackageId -> ComponentName -> SubComponentTarget -> TargetProblem ListBinProblem isSubComponentProblem pkgid name subcomponent = CustomTargetProblem $ TargetProblemIsSubComponent pkgid name subcomponent reportTargetProblems :: Verbosity -> [ListBinTargetProblem] -> IO a reportTargetProblems verbosity = die' verbosity . unlines . map renderListBinTargetProblem renderListBinTargetProblem :: ListBinTargetProblem -> String renderListBinTargetProblem (TargetProblemNoTargets targetSelector) = case targetSelectorFilter targetSelector of Just kind | kind /= ExeKind -> "The list-bin command is for finding binaries, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." _ -> renderTargetProblemNoTargets "list-bin" targetSelector renderListBinTargetProblem problem = renderTargetProblem "list-bin" renderListBinProblem problem renderListBinProblem :: ListBinProblem -> String renderListBinProblem (TargetProblemMatchesMultiple targetSelector targets) = "The list-bin command is for finding a single binary at once. The target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " which includes " ++ renderListCommaAnd ( ("the "++) <$> showComponentName <$> availableTargetComponentName <$> foldMap (\kind -> filterTargetsKind kind targets) [ExeKind, TestKind, BenchKind] ) ++ "." renderListBinProblem (TargetProblemMultipleTargets selectorMap) = "The list-bin command is for finding a single binary at once. The targets " ++ renderListCommaAnd [ "'" ++ showTargetSelector ts ++ "'" | ts <- uniqueTargetSelectors selectorMap ] ++ " refer to different executables." renderListBinProblem (TargetProblemComponentNotRightKind pkgid cname) = "The list-bin command is for finding binaries, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " from the package " ++ prettyShow pkgid ++ "." where targetSelector = TargetComponent pkgid cname WholeComponent renderListBinProblem (TargetProblemIsSubComponent pkgid cname subtarget) = "The list-bin command can only find a binary as a whole, " ++ "not files or modules within them, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." where targetSelector = TargetComponent pkgid cname subtarget renderListBinProblem (TargetProblemNoRightComps targetSelector) = "Cannot list-bin the target '" ++ showTargetSelector targetSelector ++ "' which refers to " ++ renderTargetSelector targetSelector ++ " because " ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" ++ " not contain any executables or foreign libraries." cabal-install-3.8.1.0/src/Distribution/Client/CmdOutdated.hs0000644000000000000000000004113707346545000022003 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE NamedFieldPuns #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.CmdOutdated -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Implementation of the 'outdated' command. Checks for outdated -- dependencies in the package description file or freeze file. ----------------------------------------------------------------------------- module Distribution.Client.CmdOutdated ( outdatedCommand, outdatedAction , ListOutdatedSettings(..), listOutdated ) where import Distribution.Client.Compat.Prelude import Distribution.Compat.Lens ( _1, _2 ) import Prelude () import Distribution.Client.Config ( SavedConfig(savedGlobalFlags, savedConfigureFlags , savedConfigureExFlags) ) import Distribution.Client.IndexUtils as IndexUtils import Distribution.Client.DistDirLayout ( defaultDistDirLayout , DistDirLayout(distProjectRootDirectory, distProjectFile) ) import Distribution.Client.ProjectConfig import Distribution.Client.ProjectConfig.Legacy ( instantiateProjectConfigSkeleton ) import Distribution.Client.ProjectFlags ( projectFlagsOptions, ProjectFlags(..), defaultProjectFlags , removeIgnoreProjectOption ) import Distribution.Client.RebuildMonad ( runRebuild ) import Distribution.Client.Sandbox ( loadConfigOrSandboxConfig ) import Distribution.Client.Setup import Distribution.Client.Targets ( userToPackageConstraint, UserConstraint ) import Distribution.Client.Types.SourcePackageDb as SourcePackageDb import Distribution.Solver.Types.PackageConstraint ( packageConstraintToDependency ) import Distribution.Client.Sandbox.PackageEnvironment ( loadUserConfig ) import Distribution.Utils.Generic ( safeLast, wrapText ) import Distribution.Package ( PackageName, packageVersion ) import Distribution.PackageDescription ( allBuildDepends ) import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Simple.Compiler ( Compiler, compilerInfo ) import Distribution.Simple.Setup ( optionVerbosity, trueArg ) import Distribution.Simple.Utils ( die', notice, debug, tryFindPackageDesc ) import Distribution.System ( Platform (..) ) import Distribution.Types.ComponentRequestedSpec ( ComponentRequestedSpec(..) ) import Distribution.Types.Dependency ( Dependency(..) ) import Distribution.Verbosity ( silent, normal ) import Distribution.Version ( Version, VersionInterval (..), VersionRange, LowerBound(..) , UpperBound(..) , asVersionIntervals, majorBoundVersion ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint (..), simplifyPackageVersionConstraint ) import Distribution.Simple.Flag ( Flag(..), flagToMaybe, fromFlagOrDefault, toFlag ) import Distribution.Simple.Command ( ShowOrParseArgs, OptionField, CommandUI(..), optArg, option, reqArg, liftOptionL ) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import qualified Distribution.Compat.CharParsing as P import Distribution.ReadE ( parsecToReadE ) import Distribution.Client.HttpUtils import Distribution.Utils.NubList ( fromNubList ) import qualified Data.Set as S import System.Directory ( getCurrentDirectory, doesFileExist ) ------------------------------------------------------------------------------- -- Command ------------------------------------------------------------------------------- outdatedCommand :: CommandUI (ProjectFlags, OutdatedFlags) outdatedCommand = CommandUI { commandName = "outdated" , commandSynopsis = "Check for outdated dependencies." , commandDescription = Just $ \_ -> wrapText $ "Checks for outdated dependencies in the package description file " ++ "or freeze file" , commandNotes = Nothing , commandUsage = \pname -> "Usage: " ++ pname ++ " outdated [FLAGS] [PACKAGES]\n" , commandDefaultFlags = (defaultProjectFlags, defaultOutdatedFlags) , commandOptions = \showOrParseArgs -> map (liftOptionL _1) (removeIgnoreProjectOption (projectFlagsOptions showOrParseArgs)) ++ map (liftOptionL _2) (outdatedOptions showOrParseArgs) } ------------------------------------------------------------------------------- -- Flags ------------------------------------------------------------------------------- data IgnoreMajorVersionBumps = IgnoreMajorVersionBumpsNone | IgnoreMajorVersionBumpsAll | IgnoreMajorVersionBumpsSome [PackageName] instance Monoid IgnoreMajorVersionBumps where mempty = IgnoreMajorVersionBumpsNone mappend = (<>) instance Semigroup IgnoreMajorVersionBumps where IgnoreMajorVersionBumpsNone <> r = r l@IgnoreMajorVersionBumpsAll <> _ = l l@(IgnoreMajorVersionBumpsSome _) <> IgnoreMajorVersionBumpsNone = l (IgnoreMajorVersionBumpsSome _) <> r@IgnoreMajorVersionBumpsAll = r (IgnoreMajorVersionBumpsSome a) <> (IgnoreMajorVersionBumpsSome b) = IgnoreMajorVersionBumpsSome (a ++ b) data OutdatedFlags = OutdatedFlags { outdatedVerbosity :: Flag Verbosity , outdatedFreezeFile :: Flag Bool , outdatedNewFreezeFile :: Flag Bool , outdatedSimpleOutput :: Flag Bool , outdatedExitCode :: Flag Bool , outdatedQuiet :: Flag Bool , outdatedIgnore :: [PackageName] , outdatedMinor :: Maybe IgnoreMajorVersionBumps } defaultOutdatedFlags :: OutdatedFlags defaultOutdatedFlags = OutdatedFlags { outdatedVerbosity = toFlag normal , outdatedFreezeFile = mempty , outdatedNewFreezeFile = mempty , outdatedSimpleOutput = mempty , outdatedExitCode = mempty , outdatedQuiet = mempty , outdatedIgnore = mempty , outdatedMinor = mempty } outdatedOptions :: ShowOrParseArgs -> [OptionField OutdatedFlags] outdatedOptions _showOrParseArgs = [ optionVerbosity outdatedVerbosity (\v flags -> flags {outdatedVerbosity = v}) , option [] ["freeze-file", "v1-freeze-file"] "Act on the freeze file" outdatedFreezeFile (\v flags -> flags {outdatedFreezeFile = v}) trueArg , option [] ["v2-freeze-file", "new-freeze-file"] "Act on the new-style freeze file (default: cabal.project.freeze)" outdatedNewFreezeFile (\v flags -> flags {outdatedNewFreezeFile = v}) trueArg , option [] ["simple-output"] "Only print names of outdated dependencies, one per line" outdatedSimpleOutput (\v flags -> flags {outdatedSimpleOutput = v}) trueArg , option [] ["exit-code"] "Exit with non-zero when there are outdated dependencies" outdatedExitCode (\v flags -> flags {outdatedExitCode = v}) trueArg , option ['q'] ["quiet"] "Don't print any output. Implies '--exit-code' and '-v0'" outdatedQuiet (\v flags -> flags {outdatedQuiet = v}) trueArg , option [] ["ignore"] "Packages to ignore" outdatedIgnore (\v flags -> flags {outdatedIgnore = v}) (reqArg "PKGS" pkgNameListParser (map prettyShow)) , option [] ["minor"] "Ignore major version bumps for these packages" outdatedMinor (\v flags -> flags {outdatedMinor = v}) ( optArg "PKGS" ignoreMajorVersionBumpsParser (Just IgnoreMajorVersionBumpsAll) ignoreMajorVersionBumpsPrinter ) ] where ignoreMajorVersionBumpsPrinter :: Maybe IgnoreMajorVersionBumps -> [Maybe String] ignoreMajorVersionBumpsPrinter Nothing = [] ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsNone)= [] ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsAll) = [Nothing] ignoreMajorVersionBumpsPrinter (Just (IgnoreMajorVersionBumpsSome pkgs)) = map (Just . prettyShow) pkgs ignoreMajorVersionBumpsParser = (Just . IgnoreMajorVersionBumpsSome) `fmap` pkgNameListParser pkgNameListParser = parsecToReadE ("Couldn't parse the list of package names: " ++) (fmap toList (P.sepByNonEmpty parsec (P.char ','))) ------------------------------------------------------------------------------- -- Action ------------------------------------------------------------------------------- -- | Entry point for the 'outdated' command. outdatedAction :: (ProjectFlags, OutdatedFlags) -> [String] -> GlobalFlags -> IO () outdatedAction (ProjectFlags{flagProjectFileName}, OutdatedFlags{..}) _targetStrings globalFlags = do config <- loadConfigOrSandboxConfig verbosity globalFlags let globalFlags' = savedGlobalFlags config `mappend` globalFlags configFlags = savedConfigureFlags config withRepoContext verbosity globalFlags' $ \repoContext -> do when (not newFreezeFile && isJust mprojectFile) $ die' verbosity $ "--project-file must only be used with --v2-freeze-file." sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoContext (comp, platform, _progdb) <- configCompilerAux' configFlags deps <- if freezeFile then depsFromFreezeFile verbosity else if newFreezeFile then do httpTransport <- configureTransport verbosity (fromNubList . globalProgPathExtra $ globalFlags) (flagToMaybe . globalHttpTransport $ globalFlags) depsFromNewFreezeFile verbosity httpTransport comp platform mprojectFile else do depsFromPkgDesc verbosity comp platform debug verbosity $ "Dependencies loaded: " ++ intercalate ", " (map prettyShow deps) let outdatedDeps = listOutdated deps sourcePkgDb (ListOutdatedSettings ignorePred minorPred) when (not quiet) $ showResult verbosity outdatedDeps simpleOutput if exitCode && (not . null $ outdatedDeps) then exitFailure else return () where verbosity = if quiet then silent else fromFlagOrDefault normal outdatedVerbosity freezeFile = fromFlagOrDefault False outdatedFreezeFile newFreezeFile = fromFlagOrDefault False outdatedNewFreezeFile mprojectFile = flagToMaybe flagProjectFileName simpleOutput = fromFlagOrDefault False outdatedSimpleOutput quiet = fromFlagOrDefault False outdatedQuiet exitCode = fromFlagOrDefault quiet outdatedExitCode ignorePred = let ignoreSet = S.fromList outdatedIgnore in \pkgname -> pkgname `S.member` ignoreSet minorPred = case outdatedMinor of Nothing -> const False Just IgnoreMajorVersionBumpsNone -> const False Just IgnoreMajorVersionBumpsAll -> const True Just (IgnoreMajorVersionBumpsSome pkgs) -> let minorSet = S.fromList pkgs in \pkgname -> pkgname `S.member` minorSet -- | Print either the list of all outdated dependencies, or a message -- that there are none. showResult :: Verbosity -> [(PackageVersionConstraint,Version)] -> Bool -> IO () showResult verbosity outdatedDeps simpleOutput = if not . null $ outdatedDeps then do when (not simpleOutput) $ notice verbosity "Outdated dependencies:" for_ outdatedDeps $ \(d@(PackageVersionConstraint pn _), v) -> let outdatedDep = if simpleOutput then prettyShow pn else prettyShow d ++ " (latest: " ++ prettyShow v ++ ")" in notice verbosity outdatedDep else notice verbosity "All dependencies are up to date." -- | Convert a list of 'UserConstraint's to a 'Dependency' list. userConstraintsToDependencies :: [UserConstraint] -> [PackageVersionConstraint] userConstraintsToDependencies ucnstrs = mapMaybe (packageConstraintToDependency . userToPackageConstraint) ucnstrs -- | Read the list of dependencies from the freeze file. depsFromFreezeFile :: Verbosity -> IO [PackageVersionConstraint] depsFromFreezeFile verbosity = do cwd <- getCurrentDirectory userConfig <- loadUserConfig verbosity cwd Nothing let ucnstrs = map fst . configExConstraints . savedConfigureExFlags $ userConfig deps = userConstraintsToDependencies ucnstrs debug verbosity "Reading the list of dependencies from the freeze file" return deps -- | Read the list of dependencies from the new-style freeze file. depsFromNewFreezeFile :: Verbosity -> HttpTransport -> Compiler -> Platform -> Maybe FilePath -> IO [PackageVersionConstraint] depsFromNewFreezeFile verbosity httpTransport compiler (Platform arch os) mprojectFile = do projectRoot <- either throwIO return =<< findProjectRoot Nothing mprojectFile let distDirLayout = defaultDistDirLayout projectRoot {- TODO: Support dist dir override -} Nothing projectConfig <- runRebuild (distProjectRootDirectory distDirLayout) $ do pcs <- readProjectLocalFreezeConfig verbosity httpTransport distDirLayout pure $ instantiateProjectConfigSkeleton os arch (compilerInfo compiler) mempty pcs let ucnstrs = map fst . projectConfigConstraints . projectConfigShared $ projectConfig deps = userConstraintsToDependencies ucnstrs freezeFile = distProjectFile distDirLayout "freeze" freezeFileExists <- doesFileExist freezeFile unless freezeFileExists $ die' verbosity $ "Couldn't find a freeze file expected at: " ++ freezeFile ++ "\n\n" ++ "We are looking for this file because you supplied '--project-file' or '--v2-freeze-file'. " ++ "When one of these flags is given, we try to read the dependencies from a freeze file. " ++ "If it is undesired behaviour, you should not use these flags, otherwise please generate " ++ "a freeze file via 'cabal freeze'." debug verbosity $ "Reading the list of dependencies from the new-style freeze file " ++ freezeFile return deps -- | Read the list of dependencies from the package description. depsFromPkgDesc :: Verbosity -> Compiler -> Platform -> IO [PackageVersionConstraint] depsFromPkgDesc verbosity comp platform = do cwd <- getCurrentDirectory path <- tryFindPackageDesc verbosity cwd gpd <- readGenericPackageDescription verbosity path let cinfo = compilerInfo comp epd = finalizePD mempty (ComponentRequestedSpec True True) (const True) platform cinfo [] gpd case epd of Left _ -> die' verbosity "finalizePD failed" Right (pd, _) -> do let bd = allBuildDepends pd debug verbosity "Reading the list of dependencies from the package description" return $ map toPVC bd where toPVC (Dependency pn vr _) = PackageVersionConstraint pn vr -- | Various knobs for customising the behaviour of 'listOutdated'. data ListOutdatedSettings = ListOutdatedSettings { -- | Should this package be ignored? listOutdatedIgnorePred :: PackageName -> Bool , -- | Should major version bumps be ignored for this package? listOutdatedMinorPred :: PackageName -> Bool } -- | Find all outdated dependencies. listOutdated :: [PackageVersionConstraint] -> SourcePackageDb -> ListOutdatedSettings -> [(PackageVersionConstraint, Version)] listOutdated deps sourceDb (ListOutdatedSettings ignorePred minorPred) = mapMaybe isOutdated $ map simplifyPackageVersionConstraint deps where isOutdated :: PackageVersionConstraint -> Maybe (PackageVersionConstraint, Version) isOutdated dep@(PackageVersionConstraint pname vr) | ignorePred pname = Nothing | otherwise = let this = map packageVersion $ SourcePackageDb.lookupDependency sourceDb pname vr latest = lookupLatest dep in (\v -> (dep, v)) `fmap` isOutdated' this latest isOutdated' :: [Version] -> [Version] -> Maybe Version isOutdated' [] _ = Nothing isOutdated' _ [] = Nothing isOutdated' this latest = let this' = maximum this latest' = maximum latest in if this' < latest' then Just latest' else Nothing lookupLatest :: PackageVersionConstraint -> [Version] lookupLatest (PackageVersionConstraint pname vr) | minorPred pname = map packageVersion $ SourcePackageDb.lookupDependency sourceDb pname (relaxMinor vr) | otherwise = map packageVersion $ SourcePackageDb.lookupPackageName sourceDb pname relaxMinor :: VersionRange -> VersionRange relaxMinor vr = let vis = asVersionIntervals vr in maybe vr relax (safeLast vis) where relax (VersionInterval (LowerBound v0 _) upper) = case upper of NoUpperBound -> vr UpperBound _v1 _ -> majorBoundVersion v0 cabal-install-3.8.1.0/src/Distribution/Client/CmdRepl.hs0000644000000000000000000005513107346545000021133 0ustar0000000000000000{-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} -- | cabal-install CLI command: repl -- module Distribution.Client.CmdRepl ( -- * The @repl@ CLI and action replCommand, replAction, -- * Internals exposed for testing matchesMultipleProblem, selectPackageTargets, selectComponentTarget ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Compat.Lens import qualified Distribution.Types.Lens as L import Distribution.Client.DistDirLayout ( DistDirLayout(..) ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.CmdErrorMessages ( renderTargetSelector, showTargetSelector, renderTargetProblem, targetSelectorRefersToPkgs, renderComponentKind, renderListCommaAnd, renderListSemiAnd, componentKind, sortGroupOn, Plural(..) ) import Distribution.Client.TargetProblem ( TargetProblem(..) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.ProjectBuilding ( rebuildTargetsDryRun, improveInstallPlanWithUpToDatePackages ) import Distribution.Client.ProjectOrchestration import Distribution.Client.ProjectPlanning ( ElaboratedSharedConfig(..), ElaboratedInstallPlan ) import Distribution.Client.ProjectPlanning.Types ( elabOrderExeDependencies ) import Distribution.Client.ScriptUtils ( AcceptNoTargets(..), withContextAndSelectors, TargetContext(..) , updateContextAndWriteProjectFile, updateContextAndWriteProjectFile' , fakeProjectSourcePackage, lSrcpkgDescription ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..) ) import qualified Distribution.Client.Setup as Client import Distribution.Client.Types ( PackageSpecifier(..), UnresolvedSourcePackage ) import Distribution.Simple.Setup ( fromFlagOrDefault, ReplOptions(..), replOptions , Flag(..), toFlag, falseArg ) import Distribution.Simple.Command ( CommandUI(..), liftOptionL, usageAlternatives, option , ShowOrParseArgs, OptionField, reqArg ) import Distribution.Compiler ( CompilerFlavor(GHC) ) import Distribution.Simple.Compiler ( Compiler, compilerCompatVersion ) import Distribution.Package ( Package(..), packageName, UnitId, installedUnitId ) import Distribution.Parsec ( parsecCommaList ) import Distribution.ReadE ( ReadE, parsecToReadE ) import Distribution.Solver.Types.SourcePackage ( SourcePackage(..) ) import Distribution.Types.BuildInfo ( BuildInfo(..), emptyBuildInfo ) import Distribution.Types.ComponentName ( componentNameString ) import Distribution.Types.CondTree ( CondTree(..), traverseCondTreeC ) import Distribution.Types.Dependency ( Dependency(..), mainLibSet ) import Distribution.Types.Library ( Library(..), emptyLibrary ) import Distribution.Types.Version ( Version, mkVersion ) import Distribution.Types.VersionRange ( anyVersion ) import Distribution.Utils.Generic ( safeHead ) import Distribution.Verbosity ( normal, lessVerbose ) import Distribution.Simple.Utils ( wrapText, die', debugNoWrap ) import Language.Haskell.Extension ( Language(..) ) import Data.List ( (\\) ) import qualified Data.Map as Map import qualified Data.Set as Set import System.Directory ( doesFileExist, getCurrentDirectory ) import System.FilePath ( () ) data EnvFlags = EnvFlags { envPackages :: [Dependency] , envIncludeTransitive :: Flag Bool } defaultEnvFlags :: EnvFlags defaultEnvFlags = EnvFlags { envPackages = [] , envIncludeTransitive = toFlag True } envOptions :: ShowOrParseArgs -> [OptionField EnvFlags] envOptions _ = [ option ['b'] ["build-depends"] "Include additional packages in the environment presented to GHCi." envPackages (\p flags -> flags { envPackages = p ++ envPackages flags }) (reqArg "DEPENDENCIES" dependenciesReadE (fmap prettyShow :: [Dependency] -> [String])) , option [] ["no-transitive-deps"] "Don't automatically include transitive dependencies of requested packages." envIncludeTransitive (\p flags -> flags { envIncludeTransitive = p }) falseArg ] where dependenciesReadE :: ReadE [Dependency] dependenciesReadE = parsecToReadE ("couldn't parse dependencies: " ++) (parsecCommaList parsec) replCommand :: CommandUI (NixStyleFlags (ReplOptions, EnvFlags)) replCommand = Client.installCommand { commandName = "v2-repl", commandSynopsis = "Open an interactive session for the given component.", commandUsage = usageAlternatives "v2-repl" [ "[TARGET] [FLAGS]" ], commandDescription = Just $ \_ -> wrapText $ "Open an interactive session for a component within the project. The " ++ "available targets are the same as for the 'v2-build' command: " ++ "individual components within packages in the project, including " ++ "libraries, executables, test-suites or benchmarks. Packages can " ++ "also be specified in which case the library component in the " ++ "package will be used, or the (first listed) executable in the " ++ "package if there is no library.\n\n" ++ "Dependencies are built or rebuilt as necessary. Additional " ++ "configuration flags can be specified on the command line and these " ++ "extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files.", commandNotes = Just $ \pname -> "Examples, open an interactive session:\n" ++ " " ++ pname ++ " v2-repl\n" ++ " for the default component in the package in the current directory\n" ++ " " ++ pname ++ " v2-repl pkgname\n" ++ " for the default component in the package named 'pkgname'\n" ++ " " ++ pname ++ " v2-repl ./pkgfoo\n" ++ " for the default component in the package in the ./pkgfoo directory\n" ++ " " ++ pname ++ " v2-repl cname\n" ++ " for the component named 'cname'\n" ++ " " ++ pname ++ " v2-repl pkgname:cname\n" ++ " for the component 'cname' in the package 'pkgname'\n\n" ++ " " ++ pname ++ " v2-repl --build-depends lens\n" ++ " add the latest version of the library 'lens' to the default component " ++ "(or no componentif there is no project present)\n" ++ " " ++ pname ++ " v2-repl --build-depends \"lens >= 4.15 && < 4.18\"\n" ++ " add a version (constrained between 4.15 and 4.18) of the library 'lens' " ++ "to the default component (or no component if there is no project present)\n", commandDefaultFlags = defaultNixStyleFlags (mempty, defaultEnvFlags), commandOptions = nixStyleOptions $ \showOrParseArgs -> map (liftOptionL _1) (replOptions showOrParseArgs) ++ map (liftOptionL _2) (envOptions showOrParseArgs) } -- | The @repl@ command is very much like @build@. It brings the install plan -- up to date, selects that part of the plan needed by the given or implicit -- repl target and then executes the plan. -- -- Compared to @build@ the difference is that only one target is allowed -- (given or implicit) and the target type is repl rather than build. The -- general plan execution infrastructure handles both build and repl targets. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- replAction :: NixStyleFlags (ReplOptions, EnvFlags) -> [String] -> GlobalFlags -> IO () replAction flags@NixStyleFlags { extraFlags = (replOpts, envFlags), ..} targetStrings globalFlags = withContextAndSelectors AcceptNoTargets (Just LibKind) flags targetStrings globalFlags $ \targetCtx ctx targetSelectors -> do when (buildSettingOnlyDeps (buildSettings ctx)) $ die' verbosity $ "The repl command does not support '--only-dependencies'. " ++ "You may wish to use 'build --only-dependencies' and then " ++ "use 'repl'." let projectRoot = distProjectRootDirectory $ distDirLayout ctx baseCtx <- case targetCtx of ProjectContext -> return ctx GlobalContext -> do unless (null targetStrings) $ die' verbosity $ "'repl' takes no arguments or a script argument outside a project: " ++ unwords targetStrings let sourcePackage = fakeProjectSourcePackage projectRoot & lSrcpkgDescription . L.condLibrary .~ Just (CondNode library [baseDep] []) library = emptyLibrary { libBuildInfo = lBuildInfo } lBuildInfo = emptyBuildInfo { targetBuildDepends = [baseDep] , defaultLanguage = Just Haskell2010 } baseDep = Dependency "base" anyVersion mainLibSet updateContextAndWriteProjectFile' ctx sourcePackage ScriptContext scriptPath scriptExecutable -> do unless (length targetStrings == 1) $ die' verbosity $ "'repl' takes a single argument which should be a script: " ++ unwords targetStrings existsScriptPath <- doesFileExist scriptPath unless existsScriptPath $ die' verbosity $ "'repl' takes a single argument which should be a script: " ++ unwords targetStrings updateContextAndWriteProjectFile ctx scriptPath scriptExecutable (originalComponent, baseCtx') <- if null (envPackages envFlags) then return (Nothing, baseCtx) else -- Unfortunately, the best way to do this is to let the normal solver -- help us resolve the targets, but that isn't ideal for performance, -- especially in the no-project case. withInstallPlan (lessVerbose verbosity) baseCtx $ \elaboratedPlan _ -> do -- targets should be non-empty map, but there's no NonEmptyMap yet. targets <- validatedTargets elaboratedPlan targetSelectors let (unitId, _) = fromMaybe (error "panic: targets should be non-empty") $ safeHead $ Map.toList targets originalDeps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan unitId oci = OriginalComponentInfo unitId originalDeps pkgId = fromMaybe (error $ "cannot find " ++ prettyShow unitId) $ packageId <$> InstallPlan.lookup elaboratedPlan unitId baseCtx' = addDepsToProjectTarget (envPackages envFlags) pkgId baseCtx return (Just oci, baseCtx') -- Now, we run the solver again with the added packages. While the graph -- won't actually reflect the addition of transitive dependencies, -- they're going to be available already and will be offered to the REPL -- and that's good enough. -- -- In addition, to avoid a *third* trip through the solver, we are -- replicating the second half of 'runProjectPreBuildPhase' by hand -- here. (buildCtx, compiler, replOpts') <- withInstallPlan verbosity baseCtx' $ \elaboratedPlan elaboratedShared' -> do let ProjectBaseContext{..} = baseCtx' -- Recalculate with updated project. targets <- validatedTargets elaboratedPlan targetSelectors let elaboratedPlan' = pruneInstallPlanToTargets TargetActionRepl targets elaboratedPlan includeTransitive = fromFlagOrDefault True (envIncludeTransitive envFlags) pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared' elaboratedPlan' let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages pkgsBuildStatus elaboratedPlan' debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') let buildCtx = ProjectBuildContext { elaboratedPlanOriginal = elaboratedPlan , elaboratedPlanToExecute = elaboratedPlan'' , elaboratedShared = elaboratedShared' , pkgsBuildStatus , targetsMap = targets } ElaboratedSharedConfig { pkgConfigCompiler = compiler } = elaboratedShared' replFlags = case originalComponent of Just oci -> generateReplFlags includeTransitive elaboratedPlan' oci Nothing -> [] return (buildCtx, compiler, replOpts & lReplOptionsFlags %~ (++ replFlags)) replOpts'' <- case targetCtx of ProjectContext -> return replOpts' _ -> usingGhciScript compiler projectRoot replOpts' let buildCtx' = buildCtx & lElaboratedShared . lPkgConfigReplOptions .~ replOpts'' printPlan verbosity baseCtx' buildCtx' buildOutcomes <- runProjectBuildPhase verbosity baseCtx' buildCtx' runProjectPostBuildPhase verbosity baseCtx' buildCtx' buildOutcomes where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) validatedTargets elaboratedPlan targetSelectors = do -- Interpret the targets on the command line as repl targets -- (as opposed to say build or haddock targets). targets <- either (reportTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors -- Reject multiple targets, or at least targets in different -- components. It is ok to have two module/file targets in the -- same component, but not two that live in different components. when (Set.size (distinctTargetComponents targets) > 1) $ reportTargetProblems verbosity [multipleTargetsProblem targets] return targets data OriginalComponentInfo = OriginalComponentInfo { ociUnitId :: UnitId , ociOriginalDeps :: [UnitId] } deriving (Show) addDepsToProjectTarget :: [Dependency] -> PackageId -> ProjectBaseContext -> ProjectBaseContext addDepsToProjectTarget deps pkgId ctx = (\p -> ctx { localPackages = p }) . fmap addDeps . localPackages $ ctx where addDeps :: PackageSpecifier UnresolvedSourcePackage -> PackageSpecifier UnresolvedSourcePackage addDeps (SpecificSourcePackage pkg) | packageId pkg /= pkgId = SpecificSourcePackage pkg | SourcePackage{..} <- pkg = SpecificSourcePackage $ pkg { srcpkgDescription = srcpkgDescription & (\f -> L.allCondTrees $ traverseCondTreeC f) %~ (deps ++) } addDeps spec = spec generateReplFlags :: Bool -> ElaboratedInstallPlan -> OriginalComponentInfo -> [String] generateReplFlags includeTransitive elaboratedPlan OriginalComponentInfo{..} = flags where exeDeps :: [UnitId] exeDeps = foldMap (InstallPlan.foldPlanPackage (const []) elabOrderExeDependencies) (InstallPlan.dependencyClosure elaboratedPlan [ociUnitId]) deps, deps', trans, trans' :: [UnitId] flags :: [String] deps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan ociUnitId deps' = deps \\ ociOriginalDeps trans = installedUnitId <$> InstallPlan.dependencyClosure elaboratedPlan deps' trans' = trans \\ ociOriginalDeps flags = fmap (("-package-id " ++) . prettyShow) . (\\ exeDeps) $ if includeTransitive then trans' else deps' -- | Add repl options to ensure the repl actually starts in the current working directory. -- -- In a global or script context, when we are using a fake package, @cabal repl@ -- starts in the fake package directory instead of the directory it was called from, -- so we need to tell ghci to change back to the correct directory. -- -- The @-ghci-script@ flag is path to the ghci script responsible for changing to the -- correct directory. Only works on GHC >= 7.6, though. 🙁 usingGhciScript :: Compiler -> FilePath -> ReplOptions -> IO ReplOptions usingGhciScript compiler projectRoot replOpts | compilerCompatVersion GHC compiler >= Just minGhciScriptVersion = do let ghciScriptPath = projectRoot "setcwd.ghci" cwd <- getCurrentDirectory writeFile ghciScriptPath (":cd " ++ cwd) return $ replOpts & lReplOptionsFlags %~ (("-ghci-script" ++ ghciScriptPath) :) | otherwise = return replOpts -- | First version of GHC where GHCi supported the flag we need. -- https://downloads.haskell.org/~ghc/7.6.1/docs/html/users_guide/release-7-6-1.html minGhciScriptVersion :: Version minGhciScriptVersion = mkVersion [7, 6] -- | This defines what a 'TargetSelector' means for the @repl@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For repl we select: -- -- * the library if there is only one and it's buildable; or -- -- * the exe if there is only one and it's buildable; or -- -- * any other buildable component. -- -- Fail if there are no buildable lib\/exe components, or if there are -- multiple libs or exes. -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either ReplTargetProblem [k] selectPackageTargets targetSelector targets -- If there is exactly one buildable library then we select that | [target] <- targetsLibsBuildable = Right [target] -- but fail if there are multiple buildable libraries. | not (null targetsLibsBuildable) = Left (matchesMultipleProblem targetSelector targetsLibsBuildable') -- If there is exactly one buildable executable then we select that | [target] <- targetsExesBuildable = Right [target] -- but fail if there are multiple buildable executables. | not (null targetsExesBuildable) = Left (matchesMultipleProblem targetSelector targetsExesBuildable') -- If there is exactly one other target then we select that | [target] <- targetsBuildable = Right [target] -- but fail if there are multiple such targets | not (null targetsBuildable) = Left (matchesMultipleProblem targetSelector targetsBuildable') -- If there are targets but none are buildable then we report those | not (null targets) = Left (TargetProblemNoneEnabled targetSelector targets') -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targets' = forgetTargetsDetail targets (targetsLibsBuildable, targetsLibsBuildable') = selectBuildableTargets' . filterTargetsKind LibKind $ targets (targetsExesBuildable, targetsExesBuildable') = selectBuildableTargets' . filterTargetsKind ExeKind $ targets (targetsBuildable, targetsBuildable') = selectBuildableTargetsWith' (isRequested targetSelector) targets -- When there's a target filter like "pkg:tests" then we do select tests, -- but if it's just a target like "pkg" then we don't build tests unless -- they are requested by default (i.e. by using --enable-tests) isRequested (TargetAllPackages Nothing) TargetNotRequestedByDefault = False isRequested (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False isRequested _ _ = True -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @repl@ command we just need the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either ReplTargetProblem k selectComponentTarget = selectComponentTargetBasic data ReplProblem = TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] -- | Multiple 'TargetSelector's match multiple targets | TargetProblemMultipleTargets TargetsMap deriving (Eq, Show) -- | The various error conditions that can occur when matching a -- 'TargetSelector' against 'AvailableTarget's for the @repl@ command. -- type ReplTargetProblem = TargetProblem ReplProblem matchesMultipleProblem :: TargetSelector -> [AvailableTarget ()] -> ReplTargetProblem matchesMultipleProblem targetSelector targetsExesBuildable = CustomTargetProblem $ TargetProblemMatchesMultiple targetSelector targetsExesBuildable multipleTargetsProblem :: TargetsMap -> ReplTargetProblem multipleTargetsProblem = CustomTargetProblem . TargetProblemMultipleTargets reportTargetProblems :: Verbosity -> [TargetProblem ReplProblem] -> IO a reportTargetProblems verbosity = die' verbosity . unlines . map renderReplTargetProblem renderReplTargetProblem :: TargetProblem ReplProblem -> String renderReplTargetProblem = renderTargetProblem "open a repl for" renderReplProblem renderReplProblem :: ReplProblem -> String renderReplProblem (TargetProblemMatchesMultiple targetSelector targets) = "Cannot open a repl for multiple components at once. The target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " which " ++ (if targetSelectorRefersToPkgs targetSelector then "includes " else "are ") ++ renderListSemiAnd [ "the " ++ renderComponentKind Plural ckind ++ " " ++ renderListCommaAnd [ maybe (prettyShow pkgname) prettyShow (componentNameString cname) | t <- ts , let cname = availableTargetComponentName t pkgname = packageName (availableTargetPackageId t) ] | (ckind, ts) <- sortGroupOn availableTargetComponentKind targets ] ++ ".\n\n" ++ explanationSingleComponentLimitation where availableTargetComponentKind = componentKind . availableTargetComponentName renderReplProblem (TargetProblemMultipleTargets selectorMap) = "Cannot open a repl for multiple components at once. The targets " ++ renderListCommaAnd [ "'" ++ showTargetSelector ts ++ "'" | ts <- uniqueTargetSelectors selectorMap ] ++ " refer to different components." ++ ".\n\n" ++ explanationSingleComponentLimitation explanationSingleComponentLimitation :: String explanationSingleComponentLimitation = "The reason for this limitation is that current versions of ghci do not " ++ "support loading multiple components as source. Load just one component " ++ "and when you make changes to a dependent component then quit and reload." -- Lenses lElaboratedShared :: Lens' ProjectBuildContext ElaboratedSharedConfig lElaboratedShared f s = fmap (\x -> s { elaboratedShared = x }) (f (elaboratedShared s)) {-# inline lElaboratedShared #-} lPkgConfigReplOptions :: Lens' ElaboratedSharedConfig ReplOptions lPkgConfigReplOptions f s = fmap (\x -> s { pkgConfigReplOptions = x }) (f (pkgConfigReplOptions s)) {-# inline lPkgConfigReplOptions #-} lReplOptionsFlags :: Lens' ReplOptions [String] lReplOptionsFlags f s = fmap (\x -> s { replOptionsFlags = x }) (f (replOptionsFlags s)) {-# inline lReplOptionsFlags #-} cabal-install-3.8.1.0/src/Distribution/Client/CmdRun.hs0000644000000000000000000004546207346545000021003 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} -- | cabal-install CLI command: run -- module Distribution.Client.CmdRun ( -- * The @run@ CLI and action runCommand, runAction, handleShebang, validScript, -- * Internals exposed for testing matchesMultipleProblem, noExesProblem, selectPackageTargets, selectComponentTarget ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (toList) import Distribution.Client.ProjectOrchestration import Distribution.Client.CmdErrorMessages ( renderTargetSelector, showTargetSelector, renderTargetProblem, renderTargetProblemNoTargets, plural, targetSelectorPluralPkgs, targetSelectorFilter, renderListCommaAnd ) import Distribution.Client.TargetProblem ( TargetProblem (..) ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags(..), ConfigFlags(..) ) import Distribution.Client.GlobalFlags ( defaultGlobalFlags ) import Distribution.Simple.Flag ( fromFlagOrDefault ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import Distribution.Types.ComponentName ( showComponentName ) import Distribution.Verbosity ( normal, silent ) import Distribution.Simple.Utils ( wrapText, die', info, notice ) import Distribution.Client.ProjectPlanning ( ElaboratedConfiguredPackage(..) , ElaboratedInstallPlan, binDirectoryFor ) import Distribution.Client.ProjectPlanning.Types ( dataDirsEnvironmentForPlan ) import Distribution.Client.InstallPlan ( toList, foldPlanPackage ) import Distribution.Types.UnqualComponentName ( UnqualComponentName, unUnqualComponentName ) import Distribution.Simple.Program.Run ( runProgramInvocation, ProgramInvocation(..), emptyProgramInvocation ) import Distribution.Types.UnitId ( UnitId ) import Distribution.Client.ScriptUtils ( AcceptNoTargets(..), withContextAndSelectors, updateContextAndWriteProjectFile, TargetContext(..) ) import qualified Data.Set as Set import System.Directory ( doesFileExist ) import System.FilePath ( (), isValid, isPathSeparator ) runCommand :: CommandUI (NixStyleFlags ()) runCommand = CommandUI { commandName = "v2-run" , commandSynopsis = "Run an executable." , commandUsage = usageAlternatives "v2-run" [ "[TARGET] [FLAGS] [-- EXECUTABLE_FLAGS]" ] , commandDescription = Just $ \pname -> wrapText $ "Runs the specified executable-like component (an executable, a test, " ++ "or a benchmark), first ensuring it is up to date.\n\n" ++ "Any executable-like component in any package in the project can be " ++ "specified. A package can be specified if contains just one " ++ "executable-like, preferring a single executable. The default is to " ++ "use the package in the current directory if it contains just one " ++ "executable-like.\n\n" ++ "Extra arguments can be passed to the program, but use '--' to " ++ "separate arguments for the program from arguments for " ++ pname ++ ". The executable is run in an environment where it can find its " ++ "data files inplace in the build tree.\n\n" ++ "Dependencies are built or rebuilt as necessary. Additional " ++ "configuration flags can be specified on the command line and these " ++ "extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files." , commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-run\n" ++ " Run the executable-like in the package in the current directory\n" ++ " " ++ pname ++ " v2-run foo-tool\n" ++ " Run the named executable-like (in any package in the project)\n" ++ " " ++ pname ++ " v2-run pkgfoo:foo-tool\n" ++ " Run the executable-like 'foo-tool' in the package 'pkgfoo'\n" ++ " " ++ pname ++ " v2-run foo -O2 -- dothing --fooflag\n" ++ " Build with '-O2' and run the program, passing it extra arguments.\n" , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = nixStyleOptions (const []) } -- | The @run@ command runs a specified executable-like component, building it -- first if necessary. The component can be either an executable, a test, -- or a benchmark. This is particularly useful for passing arguments to -- exes/tests/benchs by simply appending them after a @--@. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- runAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () runAction flags@NixStyleFlags {..} targetAndArgs globalFlags = withContextAndSelectors RejectNoTargets (Just ExeKind) flags targetStr globalFlags $ \targetCtx ctx targetSelectors -> do (baseCtx, defaultVerbosity) <- case targetCtx of ProjectContext -> return (ctx, normal) GlobalContext -> return (ctx, normal) ScriptContext path exemeta -> (, silent) <$> updateContextAndWriteProjectFile ctx path exemeta let verbosity = fromFlagOrDefault defaultVerbosity (configVerbosity configFlags) buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do when (buildSettingOnlyDeps (buildSettings baseCtx)) $ die' verbosity $ "The run command does not support '--only-dependencies'. " ++ "You may wish to use 'build --only-dependencies' and then " ++ "use 'run'." -- Interpret the targets on the command line as build targets -- (as opposed to say repl or haddock targets). targets <- either (reportTargetProblems verbosity) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors -- Reject multiple targets, or at least targets in different -- components. It is ok to have two module/file targets in the -- same component, but not two that live in different components. -- -- Note that we discard the target and return the whole 'TargetsMap', -- so this check will be repeated (and must succeed) after -- the 'runProjectPreBuildPhase'. Keep it in mind when modifying this. _ <- singleExeOrElse (reportTargetProblems verbosity [multipleTargetsProblem targets]) targets let elaboratedPlan' = pruneInstallPlanToTargets TargetActionBuild targets elaboratedPlan return (elaboratedPlan', targets) (selectedUnitId, selectedComponent) <- -- Slight duplication with 'runProjectPreBuildPhase'. singleExeOrElse (die' verbosity $ "No or multiple targets given, but the run " ++ "phase has been reached. This is a bug.") $ targetsMap buildCtx printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes let elaboratedPlan = elaboratedPlanToExecute buildCtx matchingElaboratedConfiguredPackages = matchingPackagesByUnitId selectedUnitId elaboratedPlan let exeName = unUnqualComponentName selectedComponent -- In the common case, we expect @matchingElaboratedConfiguredPackages@ -- to consist of a single element that provides a single way of building -- an appropriately-named executable. In that case we take that -- package and continue. -- -- However, multiple packages/components could provide that -- executable, or it's possible we don't find the executable anywhere -- in the build plan. I suppose in principle it's also possible that -- a single package provides an executable in two different ways, -- though that's probably a bug if. Anyway it's a good lint to report -- an error in all of these cases, even if some seem like they -- shouldn't happen. pkg <- case matchingElaboratedConfiguredPackages of [] -> die' verbosity $ "Unknown executable " ++ exeName ++ " in package " ++ prettyShow selectedUnitId [elabPkg] -> do info verbosity $ "Selecting " ++ prettyShow selectedUnitId ++ " to supply " ++ exeName return elabPkg elabPkgs -> die' verbosity $ "Multiple matching executables found matching " ++ exeName ++ ":\n" ++ unlines (fmap (\p -> " - in package " ++ prettyShow (elabUnitId p)) elabPkgs) let exePath = binDirectoryFor (distDirLayout baseCtx) (elaboratedShared buildCtx) pkg exeName exeName let dryRun = buildSettingDryRun (buildSettings baseCtx) || buildSettingOnlyDownload (buildSettings baseCtx) if dryRun then notice verbosity "Running of executable suppressed by flag(s)" else runProgramInvocation verbosity emptyProgramInvocation { progInvokePath = exePath, progInvokeArgs = args, progInvokeEnv = dataDirsEnvironmentForPlan (distDirLayout baseCtx) elaboratedPlan } where (targetStr, args) = splitAt 1 targetAndArgs -- | Used by the main CLI parser as heuristic to decide whether @cabal@ was -- invoked as a script interpreter, i.e. via -- -- > #! /usr/bin/env cabal -- -- or -- -- > #! /usr/bin/cabal -- -- As the first argument passed to `cabal` will be a filepath to the -- script to be interpreted. -- -- See also 'handleShebang' validScript :: String -> IO Bool validScript script | isValid script && any isPathSeparator script = doesFileExist script | otherwise = return False -- | Handle @cabal@ invoked as script interpreter, see also 'validScript' -- -- First argument is the 'FilePath' to the script to be executed; second -- argument is a list of arguments to be passed to the script. handleShebang :: FilePath -> [String] -> IO () handleShebang script args = runAction (commandDefaultFlags runCommand) (script:args) defaultGlobalFlags singleExeOrElse :: IO (UnitId, UnqualComponentName) -> TargetsMap -> IO (UnitId, UnqualComponentName) singleExeOrElse action targetsMap = case Set.toList . distinctTargetComponents $ targetsMap of [(unitId, CExeName component)] -> return (unitId, component) [(unitId, CTestName component)] -> return (unitId, component) [(unitId, CBenchName component)] -> return (unitId, component) _ -> action -- | Filter the 'ElaboratedInstallPlan' keeping only the -- 'ElaboratedConfiguredPackage's that match the specified -- 'UnitId'. matchingPackagesByUnitId :: UnitId -> ElaboratedInstallPlan -> [ElaboratedConfiguredPackage] matchingPackagesByUnitId uid = catMaybes . fmap (foldPlanPackage (const Nothing) (\x -> if elabUnitId x == uid then Just x else Nothing)) . toList -- | This defines what a 'TargetSelector' means for the @run@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @run@ command we select the exe if there is only one and it's -- buildable. Fail if there are no or multiple buildable exe components. -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either RunTargetProblem [k] selectPackageTargets targetSelector targets -- If there is a single executable component, select that. See #7403 | [target] <- targetsExesBuildable = Right [target] -- Otherwise, if there is a single executable-like component left, select that. | [target] <- targetsExeLikesBuildable = Right [target] -- but fail if there are multiple buildable executables. | not (null targetsExeLikesBuildable) = Left (matchesMultipleProblem targetSelector targetsExeLikesBuildable') -- If there are executables but none are buildable then we report those | not (null targetsExeLikes') = Left (TargetProblemNoneEnabled targetSelector targetsExeLikes') -- If there are no executables but some other targets then we report that | not (null targets) = Left (noExesProblem targetSelector) -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where -- Targets that are precisely executables targetsExes = filterTargetsKind ExeKind targets targetsExesBuildable = selectBuildableTargets targetsExes -- Any target that could be executed targetsExeLikes = targetsExes ++ filterTargetsKind TestKind targets ++ filterTargetsKind BenchKind targets (targetsExeLikesBuildable, targetsExeLikesBuildable') = selectBuildableTargets' targetsExeLikes targetsExeLikes' = forgetTargetsDetail targetsExeLikes -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @run@ command we just need to check it is a executable-like -- (an executable, a test, or a benchmark), in addition -- to the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either RunTargetProblem k selectComponentTarget subtarget@WholeComponent t = case availableTargetComponentName t of CExeName _ -> component CTestName _ -> component CBenchName _ -> component _ -> Left (componentNotExeProblem pkgid cname) where pkgid = availableTargetPackageId t cname = availableTargetComponentName t component = selectComponentTargetBasic subtarget t selectComponentTarget subtarget t = Left (isSubComponentProblem (availableTargetPackageId t) (availableTargetComponentName t) subtarget) -- | The various error conditions that can occur when matching a -- 'TargetSelector' against 'AvailableTarget's for the @run@ command. -- data RunProblem = -- | The 'TargetSelector' matches targets but no executables TargetProblemNoExes TargetSelector -- | A single 'TargetSelector' matches multiple targets | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] -- | Multiple 'TargetSelector's match multiple targets | TargetProblemMultipleTargets TargetsMap -- | The 'TargetSelector' refers to a component that is not an executable | TargetProblemComponentNotExe PackageId ComponentName -- | Asking to run an individual file or module is not supported | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget deriving (Eq, Show) type RunTargetProblem = TargetProblem RunProblem noExesProblem :: TargetSelector -> RunTargetProblem noExesProblem = CustomTargetProblem . TargetProblemNoExes matchesMultipleProblem :: TargetSelector -> [AvailableTarget ()] -> RunTargetProblem matchesMultipleProblem selector targets = CustomTargetProblem $ TargetProblemMatchesMultiple selector targets multipleTargetsProblem :: TargetsMap -> TargetProblem RunProblem multipleTargetsProblem = CustomTargetProblem . TargetProblemMultipleTargets componentNotExeProblem :: PackageId -> ComponentName -> TargetProblem RunProblem componentNotExeProblem pkgid name = CustomTargetProblem $ TargetProblemComponentNotExe pkgid name isSubComponentProblem :: PackageId -> ComponentName -> SubComponentTarget -> TargetProblem RunProblem isSubComponentProblem pkgid name subcomponent = CustomTargetProblem $ TargetProblemIsSubComponent pkgid name subcomponent reportTargetProblems :: Verbosity -> [RunTargetProblem] -> IO a reportTargetProblems verbosity = die' verbosity . unlines . map renderRunTargetProblem renderRunTargetProblem :: RunTargetProblem -> String renderRunTargetProblem (TargetProblemNoTargets targetSelector) = case targetSelectorFilter targetSelector of Just kind | kind /= ExeKind -> "The run command is for running executables, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." _ -> renderTargetProblemNoTargets "run" targetSelector renderRunTargetProblem problem = renderTargetProblem "run" renderRunProblem problem renderRunProblem :: RunProblem -> String renderRunProblem (TargetProblemMatchesMultiple targetSelector targets) = "The run command is for running a single executable at once. The target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " which includes " ++ renderListCommaAnd ( ("the "++) <$> showComponentName <$> availableTargetComponentName <$> foldMap (\kind -> filterTargetsKind kind targets) [ExeKind, TestKind, BenchKind] ) ++ "." renderRunProblem (TargetProblemMultipleTargets selectorMap) = "The run command is for running a single executable at once. The targets " ++ renderListCommaAnd [ "'" ++ showTargetSelector ts ++ "'" | ts <- uniqueTargetSelectors selectorMap ] ++ " refer to different executables." renderRunProblem (TargetProblemComponentNotExe pkgid cname) = "The run command is for running executables, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " from the package " ++ prettyShow pkgid ++ "." where targetSelector = TargetComponent pkgid cname WholeComponent renderRunProblem (TargetProblemIsSubComponent pkgid cname subtarget) = "The run command can only run an executable as a whole, " ++ "not files or modules within them, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." where targetSelector = TargetComponent pkgid cname subtarget renderRunProblem (TargetProblemNoExes targetSelector) = "Cannot run the target '" ++ showTargetSelector targetSelector ++ "' which refers to " ++ renderTargetSelector targetSelector ++ " because " ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" ++ " not contain any executables." cabal-install-3.8.1.0/src/Distribution/Client/CmdSdist.hs0000644000000000000000000003215007346545000021313 0ustar0000000000000000{-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE ViewPatterns #-} module Distribution.Client.CmdSdist ( sdistCommand, sdistAction, packageToSdist , OutputFormat(..)) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.CmdErrorMessages ( Plural(..), renderComponentKind ) import Distribution.Client.ProjectOrchestration ( ProjectBaseContext(..), CurrentCommand(..), establishProjectBaseContext, establishProjectBaseContextWithRoot) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), defaultNixStyleFlags ) import Distribution.Client.TargetSelector ( TargetSelector(..), ComponentKind , readTargetSelectors, reportTargetSelectorProblems ) import Distribution.Client.Setup ( GlobalFlags(..) ) import Distribution.Solver.Types.SourcePackage ( SourcePackage(..) ) import Distribution.Client.Types ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage ) import Distribution.Client.DistDirLayout ( DistDirLayout(..), ProjectRoot (..) ) import Distribution.Client.ProjectConfig ( ProjectConfig, withProjectOrGlobalConfig, commandLineFlagsToProjectConfig, projectConfigConfigFile, projectConfigShared ) import Distribution.Client.ProjectFlags ( ProjectFlags (..), defaultProjectFlags, projectFlagsOptions ) import Distribution.Compat.Lens ( _1, _2 ) import Distribution.Package ( Package(packageId) ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.ReadE ( succeedReadE ) import Distribution.Simple.Command ( CommandUI(..), OptionField, option, reqArg, liftOptionL, ShowOrParseArgs ) import Distribution.Simple.PreProcess ( knownSuffixHandlers ) import Distribution.Simple.Setup ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe , optionVerbosity, optionDistPref, trueArg, configVerbosity, configDistPref ) import Distribution.Simple.SrcDist ( listPackageSourcesWithDie ) import Distribution.Client.SrcDist ( packageDirToSdist ) import Distribution.Simple.Utils ( die', notice, withOutputMarker, wrapText ) import Distribution.Types.ComponentName ( ComponentName, showComponentName ) import Distribution.Types.PackageName ( PackageName, unPackageName ) import Distribution.Verbosity ( normal ) import Distribution.Types.GenericPackageDescription (GenericPackageDescription) import qualified Data.ByteString.Lazy.Char8 as BSL import System.Directory ( getCurrentDirectory , createDirectoryIfMissing, makeAbsolute ) import System.FilePath ( (), (<.>), makeRelative, normalise ) ------------------------------------------------------------------------------- -- Command ------------------------------------------------------------------------------- sdistCommand :: CommandUI (ProjectFlags, SdistFlags) sdistCommand = CommandUI { commandName = "v2-sdist" , commandSynopsis = "Generate a source distribution file (.tar.gz)." , commandUsage = \pname -> "Usage: " ++ pname ++ " v2-sdist [FLAGS] [PACKAGES]\n" , commandDescription = Just $ \_ -> wrapText "Generates tarballs of project packages suitable for upload to Hackage." , commandNotes = Nothing , commandDefaultFlags = (defaultProjectFlags, defaultSdistFlags) , commandOptions = \showOrParseArgs -> map (liftOptionL _1) (projectFlagsOptions showOrParseArgs) ++ map (liftOptionL _2) (sdistOptions showOrParseArgs) } ------------------------------------------------------------------------------- -- Flags ------------------------------------------------------------------------------- data SdistFlags = SdistFlags { sdistVerbosity :: Flag Verbosity , sdistDistDir :: Flag FilePath , sdistListSources :: Flag Bool , sdistNulSeparated :: Flag Bool , sdistOutputPath :: Flag FilePath } defaultSdistFlags :: SdistFlags defaultSdistFlags = SdistFlags { sdistVerbosity = toFlag normal , sdistDistDir = mempty , sdistListSources = toFlag False , sdistNulSeparated = toFlag False , sdistOutputPath = mempty } sdistOptions :: ShowOrParseArgs -> [OptionField SdistFlags] sdistOptions showOrParseArgs = [ optionVerbosity sdistVerbosity (\v flags -> flags { sdistVerbosity = v }) , optionDistPref sdistDistDir (\dd flags -> flags { sdistDistDir = dd }) showOrParseArgs , option ['l'] ["list-only"] "Just list the sources, do not make a tarball" sdistListSources (\v flags -> flags { sdistListSources = v }) trueArg , option [] ["null-sep"] "Separate the source files with NUL bytes rather than newlines." sdistNulSeparated (\v flags -> flags { sdistNulSeparated = v }) trueArg , option ['o'] ["output-directory", "outputdir"] "Choose the output directory of this command. '-' sends all output to stdout" sdistOutputPath (\o flags -> flags { sdistOutputPath = o }) (reqArg "PATH" (succeedReadE Flag) flagToList) ] ------------------------------------------------------------------------------- -- Action ------------------------------------------------------------------------------- sdistAction :: (ProjectFlags, SdistFlags) -> [String] -> GlobalFlags -> IO () sdistAction (ProjectFlags{..}, SdistFlags{..}) targetStrings globalFlags = do (baseCtx, distDirLayout) <- withProjectOrGlobalConfig verbosity flagIgnoreProject globalConfigFlag withProject withoutProject let localPkgs = localPackages baseCtx targetSelectors <- either (reportTargetSelectorProblems verbosity) return =<< readTargetSelectors localPkgs Nothing targetStrings -- elaborate path, create target directory mOutputPath' <- case mOutputPath of Just "-" -> return (Just "-") Just path -> do abspath <- makeAbsolute path createDirectoryIfMissing True abspath return (Just abspath) Nothing -> do createDirectoryIfMissing True (distSdistDirectory distDirLayout) return Nothing let format :: OutputFormat format = if | listSources, nulSeparated -> SourceList '\0' | listSources -> SourceList '\n' | otherwise -> TarGzArchive ext = case format of SourceList _ -> "list" TarGzArchive -> "tar.gz" outputPath pkg = case mOutputPath' of Just path | path == "-" -> "-" | otherwise -> path prettyShow (packageId pkg) <.> ext Nothing | listSources -> "-" | otherwise -> distSdistFile distDirLayout (packageId pkg) case reifyTargetSelectors localPkgs targetSelectors of Left errs -> die' verbosity . unlines . fmap renderTargetProblem $ errs Right pkgs | length pkgs > 1, not listSources, Just "-" <- mOutputPath' -> die' verbosity "Can't write multiple tarballs to standard output!" | otherwise -> traverse_ (\pkg -> packageToSdist verbosity (distProjectRootDirectory distDirLayout) format (outputPath pkg) pkg) pkgs where verbosity = fromFlagOrDefault normal sdistVerbosity listSources = fromFlagOrDefault False sdistListSources nulSeparated = fromFlagOrDefault False sdistNulSeparated mOutputPath = flagToMaybe sdistOutputPath prjConfig :: ProjectConfig prjConfig = commandLineFlagsToProjectConfig globalFlags (defaultNixStyleFlags ()) { configFlags = (configFlags $ defaultNixStyleFlags ()) { configVerbosity = sdistVerbosity , configDistPref = sdistDistDir } } mempty globalConfigFlag = projectConfigConfigFile (projectConfigShared prjConfig) withProject :: IO (ProjectBaseContext, DistDirLayout) withProject = do baseCtx <- establishProjectBaseContext verbosity prjConfig OtherCommand return (baseCtx, distDirLayout baseCtx) withoutProject :: ProjectConfig -> IO (ProjectBaseContext, DistDirLayout) withoutProject config = do cwd <- getCurrentDirectory baseCtx <- establishProjectBaseContextWithRoot verbosity (config <> prjConfig) (ProjectRootImplicit cwd) OtherCommand return (baseCtx, distDirLayout baseCtx) data OutputFormat = SourceList Char | TarGzArchive deriving (Show, Eq) packageToSdist :: Verbosity -> FilePath -> OutputFormat -> FilePath -> UnresolvedSourcePackage -> IO () packageToSdist verbosity projectRootDir format outputFile pkg = do let death = die' verbosity ("The impossible happened: a local package isn't local" <> (show pkg)) dir0 <- case srcpkgSource pkg of LocalUnpackedPackage path -> pure (Right path) RemoteSourceRepoPackage _ (Just tgz) -> pure (Left tgz) RemoteSourceRepoPackage {} -> death LocalTarballPackage tgz -> pure (Left tgz) RemoteTarballPackage _ (Just tgz) -> pure (Left tgz) RemoteTarballPackage {} -> death RepoTarballPackage {} -> death let -- Write String to stdout or file, using the default TextEncoding. write str | outputFile == "-" = putStr (withOutputMarker verbosity str) | otherwise = do writeFile outputFile str notice verbosity $ "Wrote source list to " ++ outputFile ++ "\n" -- Write raw ByteString to stdout or file as it is, without encoding. writeLBS lbs | outputFile == "-" = BSL.putStr lbs | otherwise = do BSL.writeFile outputFile lbs notice verbosity $ "Wrote tarball sdist to " ++ outputFile ++ "\n" case dir0 of Left tgz -> do case format of TarGzArchive -> do writeLBS =<< BSL.readFile tgz _ -> die' verbosity ("cannot convert tarball package to " ++ show format) Right dir -> case format of SourceList nulSep -> do let gpd :: GenericPackageDescription gpd = srcpkgDescription pkg let thisDie :: Verbosity -> String -> IO a thisDie v s = die' v $ "sdist of " <> prettyShow (packageId gpd) ++ ": " ++ s files' <- listPackageSourcesWithDie verbosity thisDie dir (flattenPackageDescription gpd) knownSuffixHandlers let files = nub $ sort $ map normalise files' let prefix = makeRelative projectRootDir dir write $ concat [prefix i ++ [nulSep] | i <- files] TarGzArchive -> do packageDirToSdist verbosity (srcpkgDescription pkg) dir >>= writeLBS -- reifyTargetSelectors :: [PackageSpecifier UnresolvedSourcePackage] -> [TargetSelector] -> Either [TargetProblem] [UnresolvedSourcePackage] reifyTargetSelectors pkgs sels = case partitionEithers (foldMap go sels) of ([], sels') -> Right sels' (errs, _) -> Left errs where -- there can be pkgs which are in extra-packages: -- these are not SpecificSourcePackage -- -- Why these packages are in localPkgs, it's confusing. -- Anyhow, better to be lenient here. -- flatten (SpecificSourcePackage pkg@SourcePackage{}) = Just pkg flatten _ = Nothing pkgs' = mapMaybe flatten pkgs getPkg pid = case find ((== pid) . packageId) pkgs' of Just pkg -> Right pkg Nothing -> error "The impossible happened: we have a reference to a local package that isn't in localPackages." go :: TargetSelector -> [Either TargetProblem UnresolvedSourcePackage] go (TargetPackage _ pids Nothing) = fmap getPkg pids go (TargetAllPackages Nothing) = Right <$> pkgs' go (TargetPackage _ _ (Just kind)) = [Left (AllComponentsOnly kind)] go (TargetAllPackages (Just kind)) = [Left (AllComponentsOnly kind)] go (TargetPackageNamed pname _) = [Left (NonlocalPackageNotAllowed pname)] go (TargetComponentUnknown pname _ _) = [Left (NonlocalPackageNotAllowed pname)] go (TargetComponent _ cname _) = [Left (ComponentsNotAllowed cname)] data TargetProblem = AllComponentsOnly ComponentKind | NonlocalPackageNotAllowed PackageName | ComponentsNotAllowed ComponentName renderTargetProblem :: TargetProblem -> String renderTargetProblem (AllComponentsOnly kind) = "It is not possible to package only the " ++ renderComponentKind Plural kind ++ " from a package " ++ "for distribution. Only entire packages may be packaged for distribution." renderTargetProblem (ComponentsNotAllowed cname) = "The component " ++ showComponentName cname ++ " cannot be packaged for distribution on its own. " ++ "Only entire packages may be packaged for distribution." renderTargetProblem (NonlocalPackageNotAllowed pname) = "The package " ++ unPackageName pname ++ " cannot be packaged for distribution, because it is not " ++ "local to this project." cabal-install-3.8.1.0/src/Distribution/Client/CmdTest.hs0000644000000000000000000002603607346545000021152 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | cabal-install CLI command: test -- module Distribution.Client.CmdTest ( -- * The @test@ CLI and action testCommand, testAction, -- * Internals exposed for testing isSubComponentProblem, notTestProblem, noTestsProblem, selectPackageTargets, selectComponentTarget ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.ProjectOrchestration import Distribution.Client.CmdErrorMessages ( renderTargetSelector, showTargetSelector, targetSelectorFilter, plural, renderTargetProblem, renderTargetProblemNoTargets, targetSelectorPluralPkgs ) import Distribution.Client.TargetProblem ( TargetProblem (..) ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Setup ( GlobalFlags(..), ConfigFlags(..) ) import Distribution.Simple.Setup ( TestFlags(..), fromFlagOrDefault ) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import Distribution.Simple.Flag ( Flag(..) ) import Distribution.Verbosity ( normal ) import Distribution.Simple.Utils ( notice, wrapText, die' ) import qualified System.Exit (exitSuccess) testCommand :: CommandUI (NixStyleFlags ()) testCommand = CommandUI { commandName = "v2-test" , commandSynopsis = "Run test-suites." , commandUsage = usageAlternatives "v2-test" [ "[TARGETS] [FLAGS]" ] , commandDescription = Just $ \_ -> wrapText $ "Runs the specified test-suites, first ensuring they are up to " ++ "date.\n\n" ++ "Any test-suite in any package in the project can be specified. " ++ "A package can be specified in which case all the test-suites in the " ++ "package are run. The default is to run all the test-suites in the " ++ "package in the current directory.\n\n" ++ "Dependencies are built or rebuilt as necessary. Additional " ++ "configuration flags can be specified on the command line and these " ++ "extend the project configuration from the 'cabal.project', " ++ "'cabal.project.local' and other files.\n\n" ++ "To pass command-line arguments to a test suite, see the " ++ "v2-run command." , commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v2-test\n" ++ " Run all the test-suites in the package in the current directory\n" ++ " " ++ pname ++ " v2-test pkgname\n" ++ " Run all the test-suites in the package named pkgname\n" ++ " " ++ pname ++ " v2-test cname\n" ++ " Run the test-suite named cname\n" ++ " " ++ pname ++ " v2-test cname --enable-coverage\n" ++ " Run the test-suite built with code coverage (including local libs used)\n" , commandDefaultFlags = defaultNixStyleFlags () , commandOptions = nixStyleOptions (const []) } -- | The @test@ command is very much like @build@. It brings the install plan -- up to date, selects that part of the plan needed by the given or implicit -- test target(s) and then executes the plan. -- -- Compared to @build@ the difference is that there's also test targets -- which are ephemeral. -- -- For more details on how this works, see the module -- "Distribution.Client.ProjectOrchestration" -- testAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () testAction flags@NixStyleFlags {..} targetStrings globalFlags = do baseCtx <- establishProjectBaseContext verbosity cliConfig OtherCommand targetSelectors <- either (reportTargetSelectorProblems verbosity) return =<< readTargetSelectors (localPackages baseCtx) (Just TestKind) targetStrings buildCtx <- runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do when (buildSettingOnlyDeps (buildSettings baseCtx)) $ die' verbosity $ "The test command does not support '--only-dependencies'. " ++ "You may wish to use 'build --only-dependencies' and then " ++ "use 'test'." -- Interpret the targets on the command line as test targets -- (as opposed to say build or haddock targets). targets <- either (reportTargetProblems verbosity failWhenNoTestSuites) return $ resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors let elaboratedPlan' = pruneInstallPlanToTargets TargetActionTest targets elaboratedPlan return (elaboratedPlan', targets) printPlan verbosity baseCtx buildCtx buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes where failWhenNoTestSuites = testFailWhenNoTestSuites testFlags verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags -- | This defines what a 'TargetSelector' means for the @test@ command. -- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, -- or otherwise classifies the problem. -- -- For the @test@ command we select all buildable test-suites, -- or fail if there are no test-suites or no buildable test-suites. -- selectPackageTargets :: TargetSelector -> [AvailableTarget k] -> Either TestTargetProblem [k] selectPackageTargets targetSelector targets -- If there are any buildable test-suite targets then we select those | not (null targetsTestsBuildable) = Right targetsTestsBuildable -- If there are test-suites but none are buildable then we report those | not (null targetsTests) = Left (TargetProblemNoneEnabled targetSelector targetsTests) -- If there are no test-suite but some other targets then we report that | not (null targets) = Left (noTestsProblem targetSelector) -- If there are no targets at all then we report that | otherwise = Left (TargetProblemNoTargets targetSelector) where targetsTestsBuildable = selectBuildableTargets . filterTargetsKind TestKind $ targets targetsTests = forgetTargetsDetail . filterTargetsKind TestKind $ targets -- | For a 'TargetComponent' 'TargetSelector', check if the component can be -- selected. -- -- For the @test@ command we just need to check it is a test-suite, in addition -- to the basic checks on being buildable etc. -- selectComponentTarget :: SubComponentTarget -> AvailableTarget k -> Either TestTargetProblem k selectComponentTarget subtarget@WholeComponent t | CTestName _ <- availableTargetComponentName t = either Left return $ selectComponentTargetBasic subtarget t | otherwise = Left (notTestProblem (availableTargetPackageId t) (availableTargetComponentName t)) selectComponentTarget subtarget t = Left (isSubComponentProblem (availableTargetPackageId t) (availableTargetComponentName t) subtarget) -- | The various error conditions that can occur when matching a -- 'TargetSelector' against 'AvailableTarget's for the @test@ command. -- data TestProblem = -- | The 'TargetSelector' matches targets but no test-suites TargetProblemNoTests TargetSelector -- | The 'TargetSelector' refers to a component that is not a test-suite | TargetProblemComponentNotTest PackageId ComponentName -- | Asking to test an individual file or module is not supported | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget deriving (Eq, Show) type TestTargetProblem = TargetProblem TestProblem noTestsProblem :: TargetSelector -> TargetProblem TestProblem noTestsProblem = CustomTargetProblem . TargetProblemNoTests notTestProblem :: PackageId -> ComponentName -> TargetProblem TestProblem notTestProblem pkgid name = CustomTargetProblem $ TargetProblemComponentNotTest pkgid name isSubComponentProblem :: PackageId -> ComponentName -> SubComponentTarget -> TargetProblem TestProblem isSubComponentProblem pkgid name subcomponent = CustomTargetProblem $ TargetProblemIsSubComponent pkgid name subcomponent reportTargetProblems :: Verbosity -> Flag Bool -> [TestTargetProblem] -> IO a reportTargetProblems verbosity failWhenNoTestSuites problems = case (failWhenNoTestSuites, problems) of (Flag True, [CustomTargetProblem (TargetProblemNoTests _)]) -> die' verbosity problemsMessage (_, [CustomTargetProblem (TargetProblemNoTests selector)]) -> do notice verbosity (renderAllowedNoTestsProblem selector) System.Exit.exitSuccess (_, _) -> die' verbosity problemsMessage where problemsMessage = unlines . map renderTestTargetProblem $ problems -- | Unless @--test-fail-when-no-test-suites@ flag is passed, we don't -- @die@ when the target problem is 'TargetProblemNoTests'. -- Instead, we display a notice saying that no tests have run and -- indicate how this behaviour was enabled. renderAllowedNoTestsProblem :: TargetSelector -> String renderAllowedNoTestsProblem selector = "No tests to run for " ++ renderTargetSelector selector renderTestTargetProblem :: TestTargetProblem -> String renderTestTargetProblem (TargetProblemNoTargets targetSelector) = case targetSelectorFilter targetSelector of Just kind | kind /= TestKind -> "The test command is for running test suites, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." ++ "\n" ++ show targetSelector _ -> renderTargetProblemNoTargets "test" targetSelector renderTestTargetProblem problem = renderTargetProblem "test" renderTestProblem problem renderTestProblem :: TestProblem -> String renderTestProblem (TargetProblemNoTests targetSelector) = "Cannot run tests for the target '" ++ showTargetSelector targetSelector ++ "' which refers to " ++ renderTargetSelector targetSelector ++ " because " ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" ++ " not contain any test suites." renderTestProblem (TargetProblemComponentNotTest pkgid cname) = "The test command is for running test suites, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ " from the package " ++ prettyShow pkgid ++ "." where targetSelector = TargetComponent pkgid cname WholeComponent renderTestProblem (TargetProblemIsSubComponent pkgid cname subtarget) = "The test command can only run test suites as a whole, " ++ "not files or modules within them, but the target '" ++ showTargetSelector targetSelector ++ "' refers to " ++ renderTargetSelector targetSelector ++ "." where targetSelector = TargetComponent pkgid cname subtarget cabal-install-3.8.1.0/src/Distribution/Client/CmdUpdate.hs0000644000000000000000000002426207346545000021454 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE ViewPatterns #-} -- | cabal-install CLI command: update -- module Distribution.Client.CmdUpdate ( updateCommand, updateAction, ) where import Prelude () import Control.Exception import Distribution.Client.Compat.Prelude import Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags ) import Distribution.Client.Compat.Directory ( setModificationTime ) import Distribution.Client.ProjectOrchestration import Distribution.Client.ProjectConfig ( ProjectConfig(..) , ProjectConfigShared(projectConfigConfigFile) , projectConfigWithSolverRepoContext , withProjectOrGlobalConfig ) import Distribution.Client.ProjectFlags ( ProjectFlags (..) ) import Distribution.Client.Types ( Repo(..), RepoName (..), unRepoName, RemoteRepo(..), repoName ) import Distribution.Client.HttpUtils ( DownloadResult(..) ) import Distribution.Client.FetchUtils ( downloadIndex ) import Distribution.Client.JobControl ( newParallelJobControl, spawnJob, collectJob ) import Distribution.Client.Setup ( GlobalFlags, ConfigFlags(..) , UpdateFlags, defaultUpdateFlags , RepoContext(..) ) import Distribution.Simple.Flag ( fromFlagOrDefault ) import Distribution.Simple.Utils ( die', notice, wrapText, writeFileAtomic, noticeNoWrap, warn ) import Distribution.Verbosity ( normal, lessVerbose ) import Distribution.Client.IndexUtils.IndexState import Distribution.Client.IndexUtils ( updateRepoIndexCache, Index(..), writeIndexTimestamp , currentIndexTimestamp, indexBaseName, updatePackageIndexCacheFile ) import qualified Data.Maybe as Unsafe (fromJust) import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp import qualified Data.ByteString.Lazy as BS import Distribution.Client.GZipUtils (maybeDecompress) import System.FilePath ((<.>), dropExtension) import Data.Time (getCurrentTime) import Distribution.Simple.Command ( CommandUI(..), usageAlternatives ) import qualified Hackage.Security.Client as Sec import Distribution.Client.IndexUtils.Timestamp (nullTimestamp) updateCommand :: CommandUI (NixStyleFlags ()) updateCommand = CommandUI { commandName = "v2-update" , commandSynopsis = "Updates list of known packages." , commandUsage = usageAlternatives "v2-update" [ "[FLAGS] [REPOS]" ] , commandDescription = Just $ \_ -> wrapText $ "For all known remote repositories, download the package list." , commandNotes = Just $ \pname -> "REPO has the format [,] where index-state follows\n" ++ "the same format and syntax that is supported by the --index-state flag.\n\n" ++ "Examples:\n" ++ " " ++ pname ++ " v2-update\n" ++ " Download the package list for all known remote repositories.\n\n" ++ " " ++ pname ++ " v2-update hackage.haskell.org,@1474732068\n" ++ " " ++ pname ++ " v2-update hackage.haskell.org,2016-09-24T17:47:48Z\n" ++ " " ++ pname ++ " v2-update hackage.haskell.org,HEAD\n" ++ " " ++ pname ++ " v2-update hackage.haskell.org\n" ++ " Download hackage.haskell.org at a specific index state.\n\n" ++ " " ++ pname ++ " new update hackage.haskell.org head.hackage\n" ++ " Download hackage.haskell.org and head.hackage\n" ++ " head.hackage must be a known repo-id. E.g. from\n" ++ " your cabal.project(.local) file.\n" , commandOptions = nixStyleOptions $ const [] , commandDefaultFlags = defaultNixStyleFlags () } data UpdateRequest = UpdateRequest { _updateRequestRepoName :: RepoName , _updateRequestRepoState :: RepoIndexState } deriving (Show) instance Pretty UpdateRequest where pretty (UpdateRequest n s) = pretty n <<>> Disp.comma <<>> pretty s instance Parsec UpdateRequest where parsec = do name <- parsec state <- P.char ',' *> parsec <|> pure IndexStateHead return (UpdateRequest name state) updateAction :: NixStyleFlags () -> [String] -> GlobalFlags -> IO () updateAction flags@NixStyleFlags {..} extraArgs globalFlags = do let ignoreProject = flagIgnoreProject projectFlags projectConfig <- withProjectOrGlobalConfig verbosity ignoreProject globalConfigFlag (projectConfig <$> establishProjectBaseContext verbosity cliConfig OtherCommand) (\globalConfig -> return $ globalConfig <> cliConfig) projectConfigWithSolverRepoContext verbosity (projectConfigShared projectConfig) (projectConfigBuildOnly projectConfig) $ \repoCtxt -> do let repos :: [Repo] repos = repoContextRepos repoCtxt parseArg :: String -> IO UpdateRequest parseArg s = case simpleParsec s of Just r -> return r Nothing -> die' verbosity $ "'v2-update' unable to parse repo: \"" ++ s ++ "\"" updateRepoRequests <- traverse parseArg extraArgs unless (null updateRepoRequests) $ do let remoteRepoNames = map repoName repos unknownRepos = [r | (UpdateRequest r _) <- updateRepoRequests , not (r `elem` remoteRepoNames)] unless (null unknownRepos) $ die' verbosity $ "'v2-update' repo(s): \"" ++ intercalate "\", \"" (map unRepoName unknownRepos) ++ "\" can not be found in known remote repo(s): " ++ intercalate ", " (map unRepoName remoteRepoNames) let reposToUpdate :: [(Repo, RepoIndexState)] reposToUpdate = case updateRepoRequests of -- If we are not given any specific repository, update all -- repositories to HEAD. [] -> map (,IndexStateHead) repos updateRequests -> let repoMap = [(repoName r, r) | r <- repos] lookup' k = Unsafe.fromJust (lookup k repoMap) in [ (lookup' name, state) | (UpdateRequest name state) <- updateRequests ] case reposToUpdate of [] -> notice verbosity "No remote repositories configured" [(remoteRepo, _)] -> notice verbosity $ "Downloading the latest package list from " ++ unRepoName (repoName remoteRepo) _ -> notice verbosity . unlines $ "Downloading the latest package lists from: " : map (("- " ++) . unRepoName . repoName . fst) reposToUpdate unless (null reposToUpdate) $ do jobCtrl <- newParallelJobControl (length reposToUpdate) traverse_ (spawnJob jobCtrl . updateRepo verbosity defaultUpdateFlags repoCtxt) reposToUpdate traverse_ (\_ -> collectJob jobCtrl) reposToUpdate where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty -- ClientInstallFlags, not needed here globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> (Repo, RepoIndexState) -> IO () updateRepo verbosity _updateFlags repoCtxt (repo, indexState) = do transport <- repoContextGetTransport repoCtxt case repo of RepoLocalNoIndex{} -> do let index = RepoIndex repoCtxt repo updatePackageIndexCacheFile verbosity index RepoRemote{..} -> do downloadResult <- downloadIndex transport verbosity repoRemote repoLocalDir case downloadResult of FileAlreadyInCache -> setModificationTime (indexBaseName repo <.> "tar") =<< getCurrentTime FileDownloaded indexPath -> do writeFileAtomic (dropExtension indexPath) . maybeDecompress =<< BS.readFile indexPath updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do let index = RepoIndex repoCtxt repo -- NB: This may be a nullTimestamp if we've never updated before current_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo -- NB: always update the timestamp, even if we didn't actually -- download anything writeIndexTimestamp index indexState ce <- if repoContextIgnoreExpiry repoCtxt then Just `fmap` getCurrentTime else return Nothing updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce -- this resolves indexState (which could be HEAD) into a timestamp new_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo let rname = remoteRepoName (repoRemote repo) -- Update cabal's internal index as well so that it's not out of sync -- (If all access to the cache goes through hackage-security this can go) case updated of Sec.NoUpdates -> do now <- getCurrentTime setModificationTime (indexBaseName repo <.> "tar") now `catchIO` (\e -> warn verbosity $ "Could not set modification time of index tarball -- " ++ displayException e) noticeNoWrap verbosity $ "Package list of " ++ prettyShow rname ++ " is up to date." Sec.HasUpdates -> do updateRepoIndexCache verbosity index noticeNoWrap verbosity $ "Package list of " ++ prettyShow rname ++ " has been updated." noticeNoWrap verbosity $ "The index-state is set to " ++ prettyShow (IndexStateTime new_ts) ++ "." -- TODO: This will print multiple times if there are multiple -- repositories: main problem is we don't have a way of updating -- a specific repo. Once we implement that, update this. -- In case current_ts is a valid timestamp different from new_ts, let -- the user know how to go back to current_ts when (current_ts /= nullTimestamp && new_ts /= current_ts) $ noticeNoWrap verbosity $ "To revert to previous state run:\n" ++ " cabal v2-update '" ++ prettyShow (UpdateRequest rname (IndexStateTime current_ts)) ++ "'\n" cabal-install-3.8.1.0/src/Distribution/Client/Compat/0000755000000000000000000000000007346545000020467 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Compat/Directory.hs0000644000000000000000000000336707346545000023000 0ustar0000000000000000{-# LANGUAGE CPP #-} module Distribution.Client.Compat.Directory ( setModificationTime, createFileLink, pathIsSymbolicLink, getSymbolicLinkTarget, ) where #if MIN_VERSION_directory(1,2,3) import System.Directory (setModificationTime) #else import Data.Time.Clock (UTCTime) #endif #if MIN_VERSION_directory(1,3,1) import System.Directory (createFileLink, getSymbolicLinkTarget, pathIsSymbolicLink) #elif defined(MIN_VERSION_unix) import System.Posix.Files (createSymbolicLink, getSymbolicLinkStatus, isSymbolicLink, readSymbolicLink) #endif ------------------------------------------------------------------------------- -- setModificationTime ------------------------------------------------------------------------------- #if !MIN_VERSION_directory(1,2,3) setModificationTime :: FilePath -> UTCTime -> IO () setModificationTime _fp _t = return () #endif ------------------------------------------------------------------------------- -- Symlink ------------------------------------------------------------------------------- #if MIN_VERSION_directory(1,3,1) #elif defined(MIN_VERSION_unix) createFileLink :: FilePath -> FilePath -> IO () createFileLink = createSymbolicLink pathIsSymbolicLink :: FilePath -> IO Bool pathIsSymbolicLink fp = do status <- getSymbolicLinkStatus fp return (isSymbolicLink status) getSymbolicLinkTarget :: FilePath -> IO FilePath getSymbolicLinkTarget = readSymbolicLink #else createFileLink :: FilePath -> FilePath -> IO () createFileLink _ _ = fail "Symlinking feature not available" pathIsSymbolicLink :: FilePath -> IO Bool pathIsSymbolicLink _ = fail "Symlinking feature not available" getSymbolicLinkTarget :: FilePath -> IO FilePath getSymbolicLinkTarget _ = fail "Symlinking feature not available" #endif cabal-install-3.8.1.0/src/Distribution/Client/Compat/ExecutablePath.hs0000644000000000000000000001216107346545000023722 0ustar0000000000000000{-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE CPP #-} -- Copied verbatim from base-4.6.0.0. We can't simply import -- System.Environment.getExecutablePath because we need compatibility with older -- GHCs. module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where import Prelude -- The imports are purposely kept completely disjoint to prevent edits -- to one OS implementation from breaking another. #if defined(darwin_HOST_OS) import Data.Word import Foreign.C import Foreign.Marshal.Alloc import Foreign.Ptr import Foreign.Storable import System.Posix.Internals #elif defined(linux_HOST_OS) import Foreign.C import Foreign.Marshal.Array import System.Posix.Internals #elif defined(mingw32_HOST_OS) import Data.Word import Foreign.C import Foreign.Marshal.Array import Foreign.Ptr import System.Posix.Internals #else import Foreign.C import Foreign.Marshal.Alloc import Foreign.Ptr import Foreign.Storable import System.Posix.Internals #endif -- The exported function is defined outside any if-guard to make sure -- every OS implements it with the same type. -- | Returns the absolute pathname of the current executable. -- -- Note that for scripts and interactive sessions, this is the path to -- the interpreter (e.g. ghci.) -- -- /Since: 4.6.0.0/ getExecutablePath :: IO FilePath -------------------------------------------------------------------------------- -- Mac OS X #if defined(darwin_HOST_OS) type UInt32 = Word32 foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt -- | Returns the path of the main executable. The path may be a -- symbolic link and not the real file. -- -- See dyld(3) _NSGetExecutablePath :: IO FilePath _NSGetExecutablePath = allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X alloca $ \ bufsize -> do poke bufsize 1024 status <- c__NSGetExecutablePath buf bufsize if status == 0 then peekFilePath buf else do reqBufsize <- fromIntegral `fmap` peek bufsize allocaBytes reqBufsize $ \ newBuf -> do status2 <- c__NSGetExecutablePath newBuf bufsize if status2 == 0 then peekFilePath newBuf else error "_NSGetExecutablePath: buffer too small" foreign import ccall unsafe "stdlib.h realpath" c_realpath :: CString -> CString -> IO CString -- | Resolves all symbolic links, extra \/ characters, and references -- to \/.\/ and \/..\/. Returns an absolute pathname. -- -- See realpath(3) realpath :: FilePath -> IO FilePath realpath path = withFilePath path $ \ fileName -> allocaBytes 1024 $ \ resolvedName -> do _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName peekFilePath resolvedName getExecutablePath = _NSGetExecutablePath >>= realpath -------------------------------------------------------------------------------- -- Linux #elif defined(linux_HOST_OS) foreign import ccall unsafe "readlink" c_readlink :: CString -> CString -> CSize -> IO CInt -- | Reads the @FilePath@ pointed to by the symbolic link and returns -- it. -- -- See readlink(2) readSymbolicLink :: FilePath -> IO FilePath readSymbolicLink file = allocaArray0 4096 $ \buf -> do withFilePath file $ \s -> do len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ c_readlink s buf 4096 peekFilePathLen (buf,fromIntegral len) getExecutablePath = readSymbolicLink $ "/proc/self/exe" -------------------------------------------------------------------------------- -- Windows #elif defined(mingw32_HOST_OS) # if defined(i386_HOST_ARCH) # define WINDOWS_CCONV stdcall # elif defined(x86_64_HOST_ARCH) # define WINDOWS_CCONV ccall # else # error Unknown mingw32 arch # endif foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 where go size = allocaArray (fromIntegral size) $ \ buf -> do ret <- c_GetModuleFileName nullPtr buf size case ret of 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" _ | ret < size -> peekFilePath buf | otherwise -> go (size * 2) -------------------------------------------------------------------------------- -- Fallback to argv[0] #else foreign import ccall unsafe "getFullProgArgv" c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () getExecutablePath = alloca $ \ p_argc -> alloca $ \ p_argv -> do c_getFullProgArgv p_argc p_argv argc <- peek p_argc if argc > 0 -- If argc > 0 then argv[0] is guaranteed by the standard -- to be a pointer to a null-terminated string. then peek p_argv >>= peek >>= peekFilePath else error $ "getExecutablePath: " ++ msg where msg = "no OS specific implementation and program name couldn't be " ++ "found in argv" -------------------------------------------------------------------------------- #endif cabal-install-3.8.1.0/src/Distribution/Client/Compat/Orphans.hs0000644000000000000000000000316607346545000022443 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Distribution.Client.Compat.Orphans () where import Control.Exception (SomeException) import Distribution.Compat.Binary (Binary (..)) import Distribution.Compat.Typeable (typeRep) import Distribution.Utils.Structured (Structure (Nominal), Structured (..)) import Network.URI (URI (..), URIAuth (..)) import Prelude (error, return) ------------------------------------------------------------------------------- -- network-uri ------------------------------------------------------------------------------- -- note, network-uri-2.6.0.3+ provide a Generic instance but earlier -- versions do not, so we use manual Binary instances here instance Binary URI where put (URI a b c d e) = do put a; put b; put c; put d; put e get = do !a <- get; !b <- get; !c <- get; !d <- get; !e <- get return (URI a b c d e) instance Structured URI where structure p = Nominal (typeRep p) 0 "URI" [] instance Binary URIAuth where put (URIAuth a b c) = do put a; put b; put c get = do !a <- get; !b <- get; !c <- get; return (URIAuth a b c) ------------------------------------------------------------------------------- -- base ------------------------------------------------------------------------------- --FIXME: Duncan Coutts: this is a total cheat --Added in 46aa019ec85e313e257d122a3549cce01996c566 instance Binary SomeException where put _ = return () get = error "cannot serialise exceptions" instance Structured SomeException where structure p = Nominal (typeRep p) 0 "SomeException" [] cabal-install-3.8.1.0/src/Distribution/Client/Compat/Prelude.hs0000644000000000000000000000142307346545000022423 0ustar0000000000000000-- to suppress WARNING in "Distribution.Compat.Prelude.Internal" {-# OPTIONS_GHC -fno-warn-deprecations #-} -- | This module does two things: -- -- * Acts as a compatibility layer, like @base-compat@. -- -- * Provides commonly used imports. -- -- This module is a superset of "Distribution.Compat.Prelude" (which -- this module re-exports) -- module Distribution.Client.Compat.Prelude ( module Distribution.Compat.Prelude.Internal , module X ) where import Distribution.Client.Compat.Orphans () import Distribution.Compat.Prelude.Internal import Prelude () import Distribution.Parsec as X (CabalParsing, Parsec (..), eitherParsec, explicitEitherParsec, simpleParsec) import Distribution.Pretty as X (Pretty (..), prettyShow) import Distribution.Verbosity as X (Verbosity) cabal-install-3.8.1.0/src/Distribution/Client/Compat/Process.hs0000644000000000000000000000361507346545000022446 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Compat.Process -- Copyright : (c) 2013 Liu Hao, Brent Yorgey -- License : BSD-style (see the file LICENSE) -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Cross-platform utilities for invoking processes. -- ----------------------------------------------------------------------------- module Distribution.Client.Compat.Process ( readProcessWithExitCode ) where import Prelude (FilePath, IO, String, return, (||)) import Control.Exception (catch, throw) import System.Exit (ExitCode (ExitFailure)) import System.IO.Error (isDoesNotExistError, isPermissionError) import qualified System.Process as P -- | @readProcessWithExitCode@ creates an external process, reads its -- standard output and standard error strictly, waits until the -- process terminates, and then returns the @ExitCode@ of the -- process, the standard output, and the standard error. -- -- See the documentation of the version from @System.Process@ for -- more information. -- -- The version from @System.Process@ behaves inconsistently across -- platforms when an executable with the given name is not found: in -- some cases it returns an @ExitFailure@, in others it throws an -- exception. This variant catches \"does not exist\" and -- \"permission denied\" exceptions and turns them into -- @ExitFailure@s. -- -- TODO: this doesn't use 'Distrubution.Compat.Process'. -- readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) readProcessWithExitCode cmd args input = P.readProcessWithExitCode cmd args input `catch` \e -> if isDoesNotExistError e || isPermissionError e then return (ExitFailure 127, "", "") else throw e cabal-install-3.8.1.0/src/Distribution/Client/Compat/Semaphore.hs0000644000000000000000000000610507346545000022750 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} {-# OPTIONS_GHC -funbox-strict-fields #-} module Distribution.Client.Compat.Semaphore ( QSem , newQSem , waitQSem , signalQSem ) where import Prelude (IO, return, Eq (..), Int, Bool (..), ($), ($!), Num (..), flip) import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, writeTVar) import Control.Exception (mask_, onException) import Control.Monad (join, unless) import Data.Typeable (Typeable) import Data.List.NonEmpty (NonEmpty (..)) import qualified Data.List.NonEmpty as NE -- | 'QSem' is a quantity semaphore in which the resource is acquired -- and released in units of one. It provides guaranteed FIFO ordering -- for satisfying blocked `waitQSem` calls. -- data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) deriving (Eq, Typeable) newQSem :: Int -> IO QSem newQSem i = atomically $ do q <- newTVar i b1 <- newTVar [] b2 <- newTVar [] return (QSem q b1 b2) waitQSem :: QSem -> IO () waitQSem s@(QSem q _b1 b2) = mask_ $ join $ atomically $ do -- join, because if we need to block, we have to add a TVar to -- the block queue. -- mask_, because we need a chance to set up an exception handler -- after the join returns. v <- readTVar q if v == 0 then do b <- newTVar False ys <- readTVar b2 writeTVar b2 (b:ys) return (wait b) else do writeTVar q $! v - 1 return (return ()) where -- -- very careful here: if we receive an exception, then we need to -- (a) write True into the TVar, so that another signalQSem doesn't -- try to wake up this thread, and -- (b) if the TVar is *already* True, then we need to do another -- signalQSem to avoid losing a unit of the resource. -- -- The 'wake' function does both (a) and (b), so we can just call -- it here. -- wait t = flip onException (wake s t) $ atomically $ do b <- readTVar t unless b retry wake :: QSem -> TVar Bool -> IO () wake s x = join $ atomically $ do b <- readTVar x if b then return (signalQSem s) else do writeTVar x True return (return ()) {- property we want: bracket waitQSem (\_ -> signalQSem) (\_ -> ...) never loses a unit of the resource. -} signalQSem :: QSem -> IO () signalQSem s@(QSem q b1 b2) = mask_ $ join $ atomically $ do -- join, so we don't force the reverse inside the txn -- mask_ is needed so we don't lose a wakeup v <- readTVar q if v /= 0 then do writeTVar q $! v + 1 return (return ()) else do xs <- readTVar b1 checkwake1 xs where checkwake1 [] = do ys <- readTVar b2 checkwake2 ys checkwake1 (x:xs) = do writeTVar b1 xs return (wake s x) checkwake2 [] = do writeTVar q 1 return (return ()) checkwake2 (y:ys) = do let (z:|zs) = NE.reverse (y:|ys) writeTVar b1 zs writeTVar b2 [] return (wake s z) cabal-install-3.8.1.0/src/Distribution/Client/Config.hs0000644000000000000000000017515507346545000021023 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Config -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- Utilities for handling saved state such as known packages, known servers and -- downloaded packages. ----------------------------------------------------------------------------- module Distribution.Client.Config ( SavedConfig(..), loadConfig, getConfigFilePath, showConfig, showConfigWithComments, parseConfig, getCabalDir, defaultConfigFile, defaultCacheDir, defaultCompiler, defaultInstallPath, defaultLogsDir, defaultUserInstall, baseSavedConfig, commentSavedConfig, initialSavedConfig, configFieldDescriptions, haddockFlagsFields, installDirsFields, withProgramsFields, withProgramOptionsFields, userConfigDiff, userConfigUpdate, createDefaultConfigFile, remoteRepoFields, postProcessRepo, ) where import Distribution.Client.Compat.Prelude import Prelude () import Language.Haskell.Extension ( Language(Haskell2010) ) import Distribution.Deprecated.ViewAsFieldDescr ( viewAsFieldDescr ) import Distribution.Client.Types ( RemoteRepo(..), LocalRepo (..), emptyRemoteRepo , AllowOlder(..), AllowNewer(..), RelaxDeps(..), isRelaxDeps , RepoName (..), unRepoName ) import Distribution.Client.Types.Credentials (Username (..), Password (..)) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import qualified Distribution.Client.Init.Types as IT ( InitFlags(..) ) import qualified Distribution.Client.Init.Defaults as IT import Distribution.Client.Setup ( GlobalFlags(..), globalCommand, defaultGlobalFlags , ConfigExFlags(..), configureExOptions, defaultConfigExFlags , initOptions , InstallFlags(..), installOptions, defaultInstallFlags , UploadFlags(..), uploadCommand , ReportFlags(..), reportCommand ) import Distribution.Client.CmdInstall.ClientInstallFlags ( ClientInstallFlags(..), defaultClientInstallFlags , clientInstallOptions ) import Distribution.Utils.NubList ( NubList, fromNubList, toNubList, overNubList ) import Distribution.Simple.Compiler ( DebugInfoLevel(..), OptimisationLevel(..) ) import Distribution.Simple.Setup ( ConfigFlags(..), configureOptions, defaultConfigFlags , HaddockFlags(..), haddockOptions, defaultHaddockFlags , TestFlags(..), defaultTestFlags , BenchmarkFlags(..), defaultBenchmarkFlags , installDirsOptions, optionDistPref , programDbPaths', programDbOptions , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) import Distribution.Simple.InstallDirs ( InstallDirs(..), defaultInstallDirs , PathTemplate, toPathTemplate ) import Distribution.Deprecated.ParseUtils ( FieldDescr(..), liftField, runP , ParseResult(..), PError(..), PWarning(..) , locatedErrorMsg, showPWarning , readFields, warning, lineNo , simpleField, listField, spaceListField , parseOptCommaList, parseTokenQ, syntaxError , simpleFieldParsec, listFieldParsec ) import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) import Distribution.Client.HttpUtils ( isOldHackageURI ) import qualified Distribution.Deprecated.ParseUtils as ParseUtils ( Field(..) ) import Distribution.Simple.Command ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) ) import Distribution.Simple.Program ( defaultProgramDb ) import Distribution.Simple.Utils ( die', notice, warn, lowercase, cabalVersion, toUTF8BS ) import Distribution.Client.Version ( cabalInstallVersion ) import Distribution.Compiler ( CompilerFlavor(..), defaultCompilerFlavor ) import Distribution.Verbosity ( normal ) import qualified Distribution.Compat.CharParsing as P import Distribution.Client.ProjectFlags (ProjectFlags (..)) import Distribution.Solver.Types.ConstraintSource import qualified Text.PrettyPrint as Disp ( render, text, empty ) import Distribution.Parsec (parsecOptCommaList, ParsecParser, parsecToken, parsecFilePath) import Text.PrettyPrint ( ($+$) ) import Text.PrettyPrint.HughesPJ ( text, Doc ) import System.Directory ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) import Network.URI ( URI(..), URIAuth(..), parseURI ) import System.FilePath ( (<.>), (), takeDirectory ) import System.IO.Error ( isDoesNotExistError ) import Distribution.Compat.Environment ( getEnvironment, lookupEnv ) import qualified Data.Map as M import qualified Data.ByteString as BS -- -- * Configuration saved in the config file -- data SavedConfig = SavedConfig { savedGlobalFlags :: GlobalFlags , savedInitFlags :: IT.InitFlags , savedInstallFlags :: InstallFlags , savedClientInstallFlags :: ClientInstallFlags , savedConfigureFlags :: ConfigFlags , savedConfigureExFlags :: ConfigExFlags , savedUserInstallDirs :: InstallDirs (Flag PathTemplate) , savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate) , savedUploadFlags :: UploadFlags , savedReportFlags :: ReportFlags , savedHaddockFlags :: HaddockFlags , savedTestFlags :: TestFlags , savedBenchmarkFlags :: BenchmarkFlags , savedProjectFlags :: ProjectFlags } deriving Generic instance Monoid SavedConfig where mempty = gmempty mappend = (<>) instance Semigroup SavedConfig where a <> b = SavedConfig { savedGlobalFlags = combinedSavedGlobalFlags, savedInitFlags = combinedSavedInitFlags, savedInstallFlags = combinedSavedInstallFlags, savedClientInstallFlags = combinedSavedClientInstallFlags, savedConfigureFlags = combinedSavedConfigureFlags, savedConfigureExFlags = combinedSavedConfigureExFlags, savedUserInstallDirs = combinedSavedUserInstallDirs, savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, savedUploadFlags = combinedSavedUploadFlags, savedReportFlags = combinedSavedReportFlags, savedHaddockFlags = combinedSavedHaddockFlags, savedTestFlags = combinedSavedTestFlags, savedBenchmarkFlags = combinedSavedBenchmarkFlags, savedProjectFlags = combinedSavedProjectFlags } where -- This is ugly, but necessary. If we're mappending two config files, we -- want the values of the *non-empty* list fields from the second one to -- *override* the corresponding values from the first one. Default -- behaviour (concatenation) is confusing and makes some use cases (see -- #1884) impossible. -- -- However, we also want to allow specifying multiple values for a list -- field in a *single* config file. For example, we want the following to -- continue to work: -- -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ -- remote-repo: private-collection:http://hackage.local/ -- -- So we can't just wrap the list fields inside Flags; we have to do some -- special-casing just for SavedConfig. -- NB: the signature prevents us from using 'combine' on lists. combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a combine' field subfield = (subfield . field $ a) `mappend` (subfield . field $ b) combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon) -> mon combineMonoid field subfield = (subfield . field $ a) `mappend` (subfield . field $ b) lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] lastNonEmpty' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case b' of [] -> a' _ -> b' lastNonMempty' :: (Eq a, Monoid a) => (SavedConfig -> flags) -> (flags -> a) -> a lastNonMempty' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in if b' == mempty then a' else b' lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) -> NubList a lastNonEmptyNL' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case fromNubList b' of [] -> a' _ -> b' combinedSavedGlobalFlags = GlobalFlags { globalVersion = combine globalVersion, globalNumericVersion = combine globalNumericVersion, globalConfigFile = combine globalConfigFile, globalConstraintsFile = combine globalConstraintsFile, globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, globalCacheDir = combine globalCacheDir, globalLocalNoIndexRepos = lastNonEmptyNL globalLocalNoIndexRepos, globalActiveRepos = combine globalActiveRepos, globalLogsDir = combine globalLogsDir, globalIgnoreExpiry = combine globalIgnoreExpiry, globalHttpTransport = combine globalHttpTransport, globalNix = combine globalNix, globalStoreDir = combine globalStoreDir, globalProgPathExtra = lastNonEmptyNL globalProgPathExtra } where combine = combine' savedGlobalFlags lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags combinedSavedInitFlags = IT.InitFlags { IT.applicationDirs = combineMonoid savedInitFlags IT.applicationDirs, IT.author = combine IT.author, IT.buildTools = combineMonoid savedInitFlags IT.buildTools, IT.cabalVersion = combine IT.cabalVersion, IT.category = combine IT.category, IT.dependencies = combineMonoid savedInitFlags IT.dependencies, IT.email = combine IT.email, IT.exposedModules = combineMonoid savedInitFlags IT.exposedModules, IT.extraSrc = combineMonoid savedInitFlags IT.extraSrc, IT.extraDoc = combineMonoid savedInitFlags IT.extraDoc, IT.homepage = combine IT.homepage, IT.initHcPath = combine IT.initHcPath, IT.initVerbosity = combine IT.initVerbosity, IT.initializeTestSuite = combine IT.initializeTestSuite, IT.interactive = combine IT.interactive, IT.language = combine IT.language, IT.license = combine IT.license, IT.mainIs = combine IT.mainIs, IT.minimal = combine IT.minimal, IT.noComments = combine IT.noComments, IT.otherExts = combineMonoid savedInitFlags IT.otherExts, IT.otherModules = combineMonoid savedInitFlags IT.otherModules, IT.overwrite = combine IT.overwrite, IT.packageDir = combine IT.packageDir, IT.packageName = combine IT.packageName, IT.packageType = combine IT.packageType, IT.quiet = combine IT.quiet, IT.simpleProject = combine IT.simpleProject, IT.sourceDirs = combineMonoid savedInitFlags IT.sourceDirs, IT.synopsis = combine IT.synopsis, IT.testDirs = combineMonoid savedInitFlags IT.testDirs, IT.version = combine IT.version } where combine = combine' savedInitFlags combinedSavedInstallFlags = InstallFlags { installDocumentation = combine installDocumentation, installHaddockIndex = combine installHaddockIndex, installDryRun = combine installDryRun, installOnlyDownload = combine installOnlyDownload, installDest = combine installDest, installMaxBackjumps = combine installMaxBackjumps, installReorderGoals = combine installReorderGoals, installCountConflicts = combine installCountConflicts, installFineGrainedConflicts = combine installFineGrainedConflicts, installMinimizeConflictSet = combine installMinimizeConflictSet, installIndependentGoals = combine installIndependentGoals, installShadowPkgs = combine installShadowPkgs, installStrongFlags = combine installStrongFlags, installAllowBootLibInstalls = combine installAllowBootLibInstalls, installOnlyConstrained = combine installOnlyConstrained, installReinstall = combine installReinstall, installAvoidReinstalls = combine installAvoidReinstalls, installOverrideReinstall = combine installOverrideReinstall, installUpgradeDeps = combine installUpgradeDeps, installOnly = combine installOnly, installOnlyDeps = combine installOnlyDeps, installIndexState = combine installIndexState, installRootCmd = combine installRootCmd, installSummaryFile = lastNonEmptyNL installSummaryFile, installLogFile = combine installLogFile, installBuildReports = combine installBuildReports, installReportPlanningFailure = combine installReportPlanningFailure, installSymlinkBinDir = combine installSymlinkBinDir, installPerComponent = combine installPerComponent, installNumJobs = combine installNumJobs, installKeepGoing = combine installKeepGoing, installRunTests = combine installRunTests, installOfflineMode = combine installOfflineMode } where combine = combine' savedInstallFlags lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags combinedSavedClientInstallFlags = ClientInstallFlags { cinstInstallLibs = combine cinstInstallLibs , cinstEnvironmentPath = combine cinstEnvironmentPath , cinstOverwritePolicy = combine cinstOverwritePolicy , cinstInstallMethod = combine cinstInstallMethod , cinstInstalldir = combine cinstInstalldir } where combine = combine' savedClientInstallFlags combinedSavedConfigureFlags = ConfigFlags { configArgs = lastNonEmpty configArgs, configPrograms_ = configPrograms_ . savedConfigureFlags $ b, -- TODO: NubListify configProgramPaths = lastNonEmpty configProgramPaths, -- TODO: NubListify configProgramArgs = lastNonEmpty configProgramArgs, configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, configInstantiateWith = lastNonEmpty configInstantiateWith, configHcFlavor = combine configHcFlavor, configHcPath = combine configHcPath, configHcPkg = combine configHcPkg, configVanillaLib = combine configVanillaLib, configProfLib = combine configProfLib, configProf = combine configProf, configSharedLib = combine configSharedLib, configStaticLib = combine configStaticLib, configDynExe = combine configDynExe, configFullyStaticExe = combine configFullyStaticExe, configProfExe = combine configProfExe, configProfDetail = combine configProfDetail, configProfLibDetail = combine configProfLibDetail, -- TODO: NubListify configConfigureArgs = lastNonEmpty configConfigureArgs, configOptimization = combine configOptimization, configDebugInfo = combine configDebugInfo, configProgPrefix = combine configProgPrefix, configProgSuffix = combine configProgSuffix, -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. configInstallDirs = (configInstallDirs . savedConfigureFlags $ a) `mappend` (configInstallDirs . savedConfigureFlags $ b), configScratchDir = combine configScratchDir, -- TODO: NubListify configExtraLibDirs = lastNonEmpty configExtraLibDirs, configExtraLibDirsStatic = lastNonEmpty configExtraLibDirsStatic, -- TODO: NubListify configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs, -- TODO: NubListify configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, configDeterministic = combine configDeterministic, configIPID = combine configIPID, configCID = combine configCID, configDistPref = combine configDistPref, configCabalFilePath = combine configCabalFilePath, configVerbosity = combine configVerbosity, configUserInstall = combine configUserInstall, -- TODO: NubListify configPackageDBs = lastNonEmpty configPackageDBs, configGHCiLib = combine configGHCiLib, configSplitSections = combine configSplitSections, configSplitObjs = combine configSplitObjs, configStripExes = combine configStripExes, configStripLibs = combine configStripLibs, -- TODO: NubListify configConstraints = lastNonEmpty configConstraints, -- TODO: NubListify configDependencies = lastNonEmpty configDependencies, -- TODO: NubListify configConfigurationsFlags = lastNonMempty configConfigurationsFlags, configTests = combine configTests, configBenchmarks = combine configBenchmarks, configCoverage = combine configCoverage, configLibCoverage = combine configLibCoverage, configExactConfiguration = combine configExactConfiguration, configFlagError = combine configFlagError, configRelocatable = combine configRelocatable, configUseResponseFiles = combine configUseResponseFiles, configDumpBuildInfo = combine configDumpBuildInfo, configAllowDependingOnPrivateLibs = combine configAllowDependingOnPrivateLibs } where combine = combine' savedConfigureFlags lastNonEmpty = lastNonEmpty' savedConfigureFlags lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags lastNonMempty = lastNonMempty' savedConfigureFlags combinedSavedConfigureExFlags = ConfigExFlags { configCabalVersion = combine configCabalVersion, configAppend = combine configAppend, configBackup = combine configBackup, -- TODO: NubListify configExConstraints = lastNonEmpty configExConstraints, -- TODO: NubListify configPreferences = lastNonEmpty configPreferences, configSolver = combine configSolver, configAllowNewer = combineMonoid savedConfigureExFlags configAllowNewer, configAllowOlder = combineMonoid savedConfigureExFlags configAllowOlder, configWriteGhcEnvironmentFilesPolicy = combine configWriteGhcEnvironmentFilesPolicy } where combine = combine' savedConfigureExFlags lastNonEmpty = lastNonEmpty' savedConfigureExFlags -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedUserInstallDirs = savedUserInstallDirs a `mappend` savedUserInstallDirs b -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a `mappend` savedGlobalInstallDirs b combinedSavedUploadFlags = UploadFlags { uploadCandidate = combine uploadCandidate, uploadDoc = combine uploadDoc, uploadUsername = combine uploadUsername, uploadPassword = combine uploadPassword, uploadPasswordCmd = combine uploadPasswordCmd, uploadVerbosity = combine uploadVerbosity } where combine = combine' savedUploadFlags combinedSavedReportFlags = ReportFlags { reportUsername = combine reportUsername, reportPassword = combine reportPassword, reportVerbosity = combine reportVerbosity } where combine = combine' savedReportFlags combinedSavedHaddockFlags = HaddockFlags { -- TODO: NubListify haddockProgramPaths = lastNonEmpty haddockProgramPaths, -- TODO: NubListify haddockProgramArgs = lastNonEmpty haddockProgramArgs, haddockHoogle = combine haddockHoogle, haddockHtml = combine haddockHtml, haddockHtmlLocation = combine haddockHtmlLocation, haddockForHackage = combine haddockForHackage, haddockExecutables = combine haddockExecutables, haddockTestSuites = combine haddockTestSuites, haddockBenchmarks = combine haddockBenchmarks, haddockForeignLibs = combine haddockForeignLibs, haddockInternal = combine haddockInternal, haddockCss = combine haddockCss, haddockLinkedSource = combine haddockLinkedSource, haddockQuickJump = combine haddockQuickJump, haddockHscolourCss = combine haddockHscolourCss, haddockContents = combine haddockContents, haddockDistPref = combine haddockDistPref, haddockKeepTempFiles = combine haddockKeepTempFiles, haddockVerbosity = combine haddockVerbosity, haddockCabalFilePath = combine haddockCabalFilePath, haddockArgs = lastNonEmpty haddockArgs } where combine = combine' savedHaddockFlags lastNonEmpty = lastNonEmpty' savedHaddockFlags combinedSavedTestFlags = TestFlags { testDistPref = combine testDistPref, testVerbosity = combine testVerbosity, testHumanLog = combine testHumanLog, testMachineLog = combine testMachineLog, testShowDetails = combine testShowDetails, testKeepTix = combine testKeepTix, testWrapper = combine testWrapper, testFailWhenNoTestSuites = combine testFailWhenNoTestSuites, testOptions = lastNonEmpty testOptions } where combine = combine' savedTestFlags lastNonEmpty = lastNonEmpty' savedTestFlags combinedSavedBenchmarkFlags = BenchmarkFlags { benchmarkDistPref = combine benchmarkDistPref, benchmarkVerbosity = combine benchmarkVerbosity, benchmarkOptions = lastNonEmpty benchmarkOptions } where combine = combine' savedBenchmarkFlags lastNonEmpty = lastNonEmpty' savedBenchmarkFlags combinedSavedProjectFlags = ProjectFlags { flagProjectFileName = combine flagProjectFileName , flagIgnoreProject = combine flagIgnoreProject } where combine = combine' savedProjectFlags -- -- * Default config -- -- | These are the absolute basic defaults. The fields that must be -- initialised. When we load the config from the file we layer the loaded -- values over these ones, so any missing fields in the file take their values -- from here. -- baseSavedConfig :: IO SavedConfig baseSavedConfig = do userPrefix <- getCabalDir cacheDir <- defaultCacheDir logsDir <- defaultLogsDir return mempty { savedConfigureFlags = mempty { configHcFlavor = toFlag defaultCompiler, configUserInstall = toFlag defaultUserInstall, configVerbosity = toFlag normal }, savedUserInstallDirs = mempty { prefix = toFlag (toPathTemplate userPrefix) }, savedGlobalFlags = mempty { globalCacheDir = toFlag cacheDir, globalLogsDir = toFlag logsDir } } -- | This is the initial configuration that we write out to the config file -- if the file does not exist (or the config we use if the file cannot be read -- for some other reason). When the config gets loaded it gets layered on top -- of 'baseSavedConfig' so we do not need to include it into the initial -- values we save into the config file. -- initialSavedConfig :: IO SavedConfig initialSavedConfig = do cacheDir <- defaultCacheDir logsDir <- defaultLogsDir extraPath <- defaultExtraPath installPath <- defaultInstallPath return mempty { savedGlobalFlags = mempty { globalCacheDir = toFlag cacheDir, globalRemoteRepos = toNubList [defaultRemoteRepo] }, savedConfigureFlags = mempty { configProgramPathExtra = toNubList extraPath }, savedInstallFlags = mempty { installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], installBuildReports= toFlag NoReports, installNumJobs = toFlag Nothing }, savedClientInstallFlags = mempty { cinstInstalldir = toFlag installPath } } defaultCabalDir :: IO FilePath defaultCabalDir = getAppUserDataDirectory "cabal" getCabalDir :: IO FilePath getCabalDir = do mDir <- lookupEnv "CABAL_DIR" case mDir of Nothing -> defaultCabalDir Just dir -> return dir defaultConfigFile :: IO FilePath defaultConfigFile = do dir <- getCabalDir return $ dir "config" defaultCacheDir :: IO FilePath defaultCacheDir = do dir <- getCabalDir return $ dir "packages" defaultLogsDir :: IO FilePath defaultLogsDir = do dir <- getCabalDir return $ dir "logs" defaultExtraPath :: IO [FilePath] defaultExtraPath = do dir <- getCabalDir return [dir "bin"] defaultInstallPath :: IO FilePath defaultInstallPath = do dir <- getCabalDir return (dir "bin") defaultCompiler :: CompilerFlavor defaultCompiler = fromMaybe GHC defaultCompilerFlavor defaultUserInstall :: Bool defaultUserInstall = True -- We do per-user installs by default on all platforms. We used to default to -- global installs on Windows but that no longer works on Windows Vista or 7. defaultRemoteRepo :: RemoteRepo defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False where str = "hackage.haskell.org" name = RepoName str uri = URI "http:" (Just (URIAuth "" str "")) "/" "" "" -- Note that lots of old ~/.cabal/config files will have the old url -- http://hackage.haskell.org/packages/archive -- but new config files can use the new url (without the /packages/archive) -- and avoid having to do a http redirect -- For the default repo we know extra information, fill this in. -- -- We need this because the 'defaultRemoteRepo' above is only used for the -- first time when a config file is made. So for users with older config files -- we might have only have older info. This lets us fill that in even for old -- config files. -- addInfoForKnownRepos :: RemoteRepo -> RemoteRepo addInfoForKnownRepos repo | remoteRepoName repo == remoteRepoName defaultRemoteRepo = useSecure . tryHttps . fixOldURI $ repo where fixOldURI r | isOldHackageURI (remoteRepoURI r) = r { remoteRepoURI = remoteRepoURI defaultRemoteRepo } | otherwise = r tryHttps r = r { remoteRepoShouldTryHttps = True } useSecure r@RemoteRepo{ remoteRepoSecure = secure, remoteRepoRootKeys = [], remoteRepoKeyThreshold = 0 } | secure /= Just False = r { -- Use hackage-security by default unless you opt-out with -- secure: False remoteRepoSecure = Just True, remoteRepoRootKeys = defaultHackageRemoteRepoKeys, remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold } useSecure r = r addInfoForKnownRepos other = other -- | The current hackage.haskell.org repo root keys that we ship with cabal. --- -- This lets us bootstrap trust in this repo without user intervention. -- These keys need to be periodically updated when new root keys are added. -- See the root key procedures for details. -- defaultHackageRemoteRepoKeys :: [String] defaultHackageRemoteRepoKeys = [ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0", "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42", "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3", "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d", "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" ] -- | The required threshold of root key signatures for hackage.haskell.org -- defaultHackageRemoteRepoKeyThreshold :: Int defaultHackageRemoteRepoKeyThreshold = 3 -- -- * Config file reading -- -- | Loads the main configuration, and applies additional defaults to give the -- effective configuration. To loads just what is actually in the config file, -- use 'loadRawConfig'. -- loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig loadConfig verbosity configFileFlag = do config <- loadRawConfig verbosity configFileFlag extendToEffectiveConfig config extendToEffectiveConfig :: SavedConfig -> IO SavedConfig extendToEffectiveConfig config = do base <- baseSavedConfig let effective0 = base `mappend` config globalFlags0 = savedGlobalFlags effective0 effective = effective0 { savedGlobalFlags = globalFlags0 { globalRemoteRepos = overNubList (map addInfoForKnownRepos) (globalRemoteRepos globalFlags0) } } return effective -- | Like 'loadConfig' but does not apply any additional defaults, it just -- loads what is actually in the config file. This is thus suitable for -- comparing or editing a config file, but not suitable for using as the -- effective configuration. -- loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig loadRawConfig verbosity configFileFlag = do (source, configFile) <- getConfigFilePathAndSource configFileFlag minp <- readConfigFile mempty configFile case minp of Nothing -> do notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "." -- 2021-10-07, issue #7705 -- Only create default config file if name was not given explicitly -- via option --config-file or environment variable. case source of Default -> do notice verbosity msgNotFound createDefaultConfigFile verbosity [] configFile CommandlineOption -> failNoConfigFile EnvironmentVariable -> failNoConfigFile where msgNotFound = unwords [ "Config file not found:", configFile ] failNoConfigFile = die' verbosity $ unlines [ msgNotFound , "(Config files can be created via the cabal-command 'user-config init'.)" ] Just (ParseOk ws conf) -> do unless (null ws) $ warn verbosity $ unlines (map (showPWarning configFile) ws) return conf Just (ParseFailed err) -> do let (line, msg) = locatedErrorMsg err die' verbosity $ "Error parsing config file " ++ configFile ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg where sourceMsg CommandlineOption = "commandline option" sourceMsg EnvironmentVariable = "environment variable CABAL_CONFIG" sourceMsg Default = "default config file" -- | Provenance of the config file. data ConfigFileSource = CommandlineOption | EnvironmentVariable | Default -- | Returns the config file path, without checking that the file exists. -- The order of precedence is: input flag, CABAL_CONFIG, default location. getConfigFilePath :: Flag FilePath -> IO FilePath getConfigFilePath = fmap snd . getConfigFilePathAndSource getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath) getConfigFilePathAndSource configFileFlag = getSource sources where sources = [ (CommandlineOption, return . flagToMaybe $ configFileFlag) , (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment) , (Default, Just `liftM` defaultConfigFile) ] getSource [] = error "no config file path candidate found." getSource ((source,action): xs) = action >>= maybe (getSource xs) (return . (,) source) readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) readConfigFile initial file = handleNotExists $ fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial) (BS.readFile file) where handleNotExists action = catchIO action $ \ioe -> if isDoesNotExistError ioe then return Nothing else ioError ioe createDefaultConfigFile :: Verbosity -> [String] -> FilePath -> IO SavedConfig createDefaultConfigFile verbosity extraLines filePath = do commentConf <- commentSavedConfig initialConf <- initialSavedConfig extraConf <- parseExtraLines verbosity extraLines notice verbosity $ "Writing default configuration to " ++ filePath writeConfigFile filePath commentConf (initialConf `mappend` extraConf) return initialConf writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () writeConfigFile file comments vals = do let tmpFile = file <.> "tmp" createDirectoryIfMissing True (takeDirectory file) writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" renameFile tmpFile file where explanation = unlines ["-- This is the configuration file for the 'cabal' command line tool." ,"--" ,"-- The available configuration options are listed below." ,"-- Some of them have default values listed." ,"--" ,"-- Lines (like this one) beginning with '--' are comments." ,"-- Be careful with spaces and indentation because they are" ,"-- used to indicate layout for nested sections." ,"--" ,"-- This config file was generated using the following versions" ,"-- of Cabal and cabal-install:" ,"-- Cabal library version: " ++ prettyShow cabalVersion ,"-- cabal-install version: " ++ prettyShow cabalInstallVersion ,"","" ] -- | These are the default values that get used in Cabal if a no value is -- given. We use these here to include in comments when we write out the -- initial config file so that the user can see what default value they are -- overriding. -- commentSavedConfig :: IO SavedConfig commentSavedConfig = do userInstallDirs <- defaultInstallDirs defaultCompiler True True globalInstallDirs <- defaultInstallDirs defaultCompiler False True let conf0 = mempty { savedGlobalFlags = defaultGlobalFlags { globalRemoteRepos = toNubList [defaultRemoteRepo] }, savedInitFlags = mempty { IT.interactive = toFlag False, IT.cabalVersion = toFlag IT.defaultCabalVersion, IT.language = toFlag Haskell2010, IT.license = NoFlag, IT.sourceDirs = Flag [IT.defaultSourceDir], IT.applicationDirs = Flag [IT.defaultApplicationDir] }, savedInstallFlags = defaultInstallFlags, savedClientInstallFlags= defaultClientInstallFlags, savedConfigureExFlags = defaultConfigExFlags { configAllowNewer = Just (AllowNewer mempty), configAllowOlder = Just (AllowOlder mempty) }, savedConfigureFlags = (defaultConfigFlags defaultProgramDb) { configUserInstall = toFlag defaultUserInstall }, savedUserInstallDirs = fmap toFlag userInstallDirs, savedGlobalInstallDirs = fmap toFlag globalInstallDirs, savedUploadFlags = commandDefaultFlags uploadCommand, savedReportFlags = commandDefaultFlags reportCommand, savedHaddockFlags = defaultHaddockFlags, savedTestFlags = defaultTestFlags, savedBenchmarkFlags = defaultBenchmarkFlags } conf1 <- extendToEffectiveConfig conf0 let globalFlagsConf1 = savedGlobalFlags conf1 conf2 = conf1 { savedGlobalFlags = globalFlagsConf1 { globalRemoteRepos = overNubList (map removeRootKeys) (globalRemoteRepos globalFlagsConf1) } } return conf2 where -- Most people don't want to see default root keys, so don't print them. removeRootKeys :: RemoteRepo -> RemoteRepo removeRootKeys r = r { remoteRepoRootKeys = [] } -- | All config file fields. -- configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig] configFieldDescriptions src = toSavedConfig liftGlobalFlag (commandOptions (globalCommand []) ParseArgs) ["version", "numeric-version", "config-file"] [] ++ toSavedConfig liftConfigFlag (configureOptions ParseArgs) (["builddir", "constraint", "dependency", "ipid"] ++ map fieldName installDirsFields) -- This is only here because viewAsFieldDescr gives us a parser -- that only recognises 'ghc' etc, the case-sensitive flag names, not -- what the normal case-insensitive parser gives us. [simpleFieldParsec "compiler" (fromFlagOrDefault Disp.empty . fmap pretty) (Flag <$> parsec <|> pure NoFlag) configHcFlavor (\v flags -> flags { configHcFlavor = v }) -- TODO: The following is a temporary fix. The "optimization" -- and "debug-info" fields are OptArg, and viewAsFieldDescr -- fails on that. Instead of a hand-written hackaged parser -- and printer, we should handle this case properly in the -- library. ,liftField configOptimization (\v flags -> flags { configOptimization = v }) $ let name = "optimization" in FieldDescr name (\f -> case f of Flag NoOptimisation -> Disp.text "False" Flag NormalOptimisation -> Disp.text "True" Flag MaximumOptimisation -> Disp.text "2" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoOptimisation) | str == "True" -> ParseOk [] (Flag NormalOptimisation) | str == "0" -> ParseOk [] (Flag NoOptimisation) | str == "1" -> ParseOk [] (Flag NormalOptimisation) | str == "2" -> ParseOk [] (Flag MaximumOptimisation) | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ let name = "debug-info" in FieldDescr name (\f -> case f of Flag NoDebugInfo -> Disp.text "False" Flag MinimalDebugInfo -> Disp.text "1" Flag NormalDebugInfo -> Disp.text "True" Flag MaximalDebugInfo -> Disp.text "3" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) | str == "True" -> ParseOk [] (Flag NormalDebugInfo) | str == "0" -> ParseOk [] (Flag NoDebugInfo) | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) | str == "2" -> ParseOk [] (Flag NormalDebugInfo) | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ] ++ toSavedConfig liftConfigExFlag (configureExOptions ParseArgs src) [] [let pkgs = (Just . AllowOlder . RelaxDepsSome) `fmap` parsecOptCommaList parsec parseAllowOlder = ((Just . AllowOlder . toRelaxDeps) `fmap` parsec) <|> pkgs in simpleFieldParsec "allow-older" (showRelaxDeps . fmap unAllowOlder) parseAllowOlder configAllowOlder (\v flags -> flags { configAllowOlder = v }) ,let pkgs = (Just . AllowNewer . RelaxDepsSome) `fmap` parsecOptCommaList parsec parseAllowNewer = ((Just . AllowNewer . toRelaxDeps) `fmap` parsec) <|> pkgs in simpleFieldParsec "allow-newer" (showRelaxDeps . fmap unAllowNewer) parseAllowNewer configAllowNewer (\v flags -> flags { configAllowNewer = v }) ] ++ toSavedConfig liftInstallFlag (installOptions ParseArgs) ["dry-run", "only", "only-dependencies", "dependencies-only"] [] ++ toSavedConfig liftClientInstallFlag (clientInstallOptions ParseArgs) [] [] ++ toSavedConfig liftUploadFlag (commandOptions uploadCommand ParseArgs) ["verbose", "check", "documentation", "publish"] [] ++ toSavedConfig liftReportFlag (commandOptions reportCommand ParseArgs) ["verbose", "username", "password"] [] --FIXME: this is a hack, hiding the user name and password. -- But otherwise it masks the upload ones. Either need to -- share the options or make then distinct. In any case -- they should probably be per-server. ++ [ viewAsFieldDescr $ optionDistPref (configDistPref . savedConfigureFlags) (\distPref config -> config { savedConfigureFlags = (savedConfigureFlags config) { configDistPref = distPref } , savedHaddockFlags = (savedHaddockFlags config) { haddockDistPref = distPref } } ) ParseArgs ] where toSavedConfig lift options exclusions replacements = [ lift (fromMaybe field replacement) | opt <- options , let field = viewAsFieldDescr opt name = fieldName field replacement = find ((== name) . fieldName) replacements , name `notElem` exclusions ] showRelaxDeps Nothing = mempty showRelaxDeps (Just rd) | isRelaxDeps rd = Disp.text "True" | otherwise = Disp.text "False" toRelaxDeps True = RelaxDepsAll toRelaxDeps False = mempty -- TODO: next step, make the deprecated fields elicit a warning. -- deprecatedFieldDescriptions :: [FieldDescr SavedConfig] deprecatedFieldDescriptions = [ liftGlobalFlag $ listFieldParsec "repos" pretty parsec (fromNubList . globalRemoteRepos) (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) , liftGlobalFlag $ simpleFieldParsec "cachedir" (Disp.text . fromFlagOrDefault "") (optionalFlag parsecFilePath) globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) , liftUploadFlag $ simpleFieldParsec "hackage-username" (Disp.text . fromFlagOrDefault "" . fmap unUsername) (optionalFlag (fmap Username parsecToken)) uploadUsername (\d cfg -> cfg { uploadUsername = d }) , liftUploadFlag $ simpleFieldParsec "hackage-password" (Disp.text . fromFlagOrDefault "" . fmap unPassword) (optionalFlag (fmap Password parsecToken)) uploadPassword (\d cfg -> cfg { uploadPassword = d }) , liftUploadFlag $ spaceListField "hackage-password-command" Disp.text parseTokenQ (fromFlagOrDefault [] . uploadPasswordCmd) (\d cfg -> cfg { uploadPasswordCmd = Flag d }) ] ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields where optionalFlag :: ParsecParser a -> ParsecParser (Flag a) optionalFlag p = toFlag <$> p <|> pure mempty modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a modifyFieldName f d = d { fieldName = f (fieldName d) } liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftUserInstallDirs = liftField savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftGlobalInstallDirs = liftField savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig liftGlobalFlag = liftField savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig liftConfigFlag = liftField savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig liftConfigExFlag = liftField savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig liftInstallFlag = liftField savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) liftClientInstallFlag :: FieldDescr ClientInstallFlags -> FieldDescr SavedConfig liftClientInstallFlag = liftField savedClientInstallFlags (\flags conf -> conf { savedClientInstallFlags = flags }) liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig liftUploadFlag = liftField savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig liftReportFlag = liftField savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) parseConfig :: ConstraintSource -> SavedConfig -> BS.ByteString -> ParseResult SavedConfig parseConfig src initial = \str -> do fields <- readFields str let (knownSections, others) = partition isKnownSection fields config <- parse others let init0 = savedInitFlags config user0 = savedUserInstallDirs config global0 = savedGlobalInstallDirs config (remoteRepoSections0, localRepoSections0, haddockFlags, initFlags, user, global, paths, args) <- foldM parseSections ([], [], savedHaddockFlags config, init0, user0, global0, [], []) knownSections let remoteRepoSections = reverse . nubBy ((==) `on` remoteRepoName) $ remoteRepoSections0 let localRepoSections = reverse . nubBy ((==) `on` localRepoName) $ localRepoSections0 return . fixConfigMultilines $ config { savedGlobalFlags = (savedGlobalFlags config) { globalRemoteRepos = toNubList remoteRepoSections, globalLocalNoIndexRepos = toNubList localRepoSections, -- the global extra prog path comes from the configure flag prog path globalProgPathExtra = configProgramPathExtra (savedConfigureFlags config) }, savedConfigureFlags = (savedConfigureFlags config) { configProgramPaths = paths, configProgramArgs = args }, savedHaddockFlags = haddockFlags, savedInitFlags = initFlags, savedUserInstallDirs = user, savedGlobalInstallDirs = global } where isKnownSection (ParseUtils.Section _ "repository" _ _) = True isKnownSection (ParseUtils.F _ "remote-repo" _) = True isKnownSection (ParseUtils.Section _ "haddock" _ _) = True isKnownSection (ParseUtils.Section _ "init" _ _) = True isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True isKnownSection _ = False -- Attempt to split fields that can represent lists of paths into -- actual lists on failure, leave the field untouched. splitMultiPath :: [String] -> [String] splitMultiPath [s] = case runP 0 "" (parseOptCommaList parseTokenQ) s of ParseOk _ res -> res _ -> [s] splitMultiPath xs = xs -- This is a fixup, pending a full config parser rewrite, to -- ensure that config fields which can be comma-separated lists -- actually parse as comma-separated lists. fixConfigMultilines conf = conf { savedConfigureFlags = let scf = savedConfigureFlags conf in scf { configProgramPathExtra = toNubList $ splitMultiPath (fromNubList $ configProgramPathExtra scf) , configExtraLibDirs = splitMultiPath (configExtraLibDirs scf) , configExtraLibDirsStatic = splitMultiPath (configExtraLibDirsStatic scf) , configExtraFrameworkDirs = splitMultiPath (configExtraFrameworkDirs scf) , configExtraIncludeDirs = splitMultiPath (configExtraIncludeDirs scf) , configConfigureArgs = splitMultiPath (configConfigureArgs scf) } } parse = parseFields (configFieldDescriptions src ++ deprecatedFieldDescriptions) initial parseSections (rs, ls, h, i, u, g, p, a) (ParseUtils.Section lineno "repository" name fs) = do name' <- maybe (ParseFailed $ NoParse "repository name" lineno) return $ simpleParsec name r' <- parseFields remoteRepoFields (emptyRemoteRepo name') fs r'' <- postProcessRepo lineno name r' case r'' of Left local -> return (rs, local:ls, h, i, u, g, p, a) Right remote -> return (remote:rs, ls, h, i, u, g, p, a) parseSections (rs, ls, h, i, u, g, p, a) (ParseUtils.F lno "remote-repo" raw) = do let mr' = simpleParsec raw r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr' return (r':rs, ls, h, i, u, g, p, a) parseSections accum@(rs, ls, h, i, u, g, p, a) (ParseUtils.Section _ "haddock" name fs) | name == "" = do h' <- parseFields haddockFlagsFields h fs return (rs, ls, h', i, u, g, p, a) | otherwise = do warning "The 'haddock' section should be unnamed" return accum parseSections accum@(rs, ls, h, i, u, g, p, a) (ParseUtils.Section _ "init" name fs) | name == "" = do i' <- parseFields initFlagsFields i fs return (rs, ls, h, i', u, g, p, a) | otherwise = do warning "The 'init' section should be unnamed" return accum parseSections accum@(rs, ls, h, i, u, g, p, a) (ParseUtils.Section _ "install-dirs" name fs) | name' == "user" = do u' <- parseFields installDirsFields u fs return (rs, ls, h, i, u', g, p, a) | name' == "global" = do g' <- parseFields installDirsFields g fs return (rs, ls, h, i, u, g', p, a) | otherwise = do warning "The 'install-paths' section should be for 'user' or 'global'" return accum where name' = lowercase name parseSections accum@(rs, ls, h, i, u, g, p, a) (ParseUtils.Section _ "program-locations" name fs) | name == "" = do p' <- parseFields withProgramsFields p fs return (rs, ls, h, i, u, g, p', a) | otherwise = do warning "The 'program-locations' section should be unnamed" return accum parseSections accum@(rs, ls, h, i, u, g, p, a) (ParseUtils.Section _ "program-default-options" name fs) | name == "" = do a' <- parseFields withProgramOptionsFields a fs return (rs, ls, h, i, u, g, p, a') | otherwise = do warning "The 'program-default-options' section should be unnamed" return accum parseSections accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum postProcessRepo :: Int -> String -> RemoteRepo -> ParseResult (Either LocalRepo RemoteRepo) postProcessRepo lineno reponameStr repo0 = do when (null reponameStr) $ syntaxError lineno $ "a 'repository' section requires the " ++ "repository name as an argument" reponame <- maybe (fail $ "Invalid repository name " ++ reponameStr) return $ simpleParsec reponameStr case uriScheme (remoteRepoURI repo0) of -- TODO: check that there are no authority, query or fragment -- Note: the trailing colon is important "file+noindex:" -> do let uri = remoteRepoURI repo0 return $ Left $ LocalRepo reponame (uriPath uri) (uriFragment uri == "#shared-cache") _ -> do let repo = repo0 { remoteRepoName = reponame } when (remoteRepoKeyThreshold repo > length (remoteRepoRootKeys repo)) $ warning $ "'key-threshold' for repository " ++ show (remoteRepoName repo) ++ " higher than number of keys" when (not (null (remoteRepoRootKeys repo)) && remoteRepoSecure repo /= Just True) $ warning $ "'root-keys' for repository " ++ show (remoteRepoName repo) ++ " non-empty, but 'secure' not set to True." return $ Right repo showConfig :: SavedConfig -> String showConfig = showConfigWithComments mempty showConfigWithComments :: SavedConfig -> SavedConfig -> String showConfigWithComments comment vals = Disp.render $ case fmap (uncurry ppRemoteRepoSection) (zip (getRemoteRepos comment) (getRemoteRepos vals)) of [] -> Disp.text "" (x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs $+$ Disp.text "" $+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown)) mcomment vals $+$ Disp.text "" $+$ ppSection "haddock" "" haddockFlagsFields (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) $+$ Disp.text "" $+$ ppSection "init" "" initFlagsFields (fmap savedInitFlags mcomment) (savedInitFlags vals) $+$ Disp.text "" $+$ installDirsSection "user" savedUserInstallDirs $+$ Disp.text "" $+$ installDirsSection "global" savedGlobalInstallDirs $+$ Disp.text "" $+$ configFlagsSection "program-locations" withProgramsFields configProgramPaths $+$ Disp.text "" $+$ configFlagsSection "program-default-options" withProgramOptionsFields configProgramArgs where getRemoteRepos = fromNubList . globalRemoteRepos . savedGlobalFlags mcomment = Just comment installDirsSection name field = ppSection "install-dirs" name installDirsFields (fmap field mcomment) (field vals) configFlagsSection name fields field = ppSection name "" fields (fmap (field . savedConfigureFlags) mcomment) ((field . savedConfigureFlags) vals) -- skip fields based on field name. currently only skips "remote-repo", -- because that is rendered as a section. (see 'ppRemoteRepoSection'.) skipSomeFields = filter ((/= "remote-repo") . fieldName) -- | Fields for the 'install-dirs' sections. installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] installDirsFields = map viewAsFieldDescr installDirsOptions ppRemoteRepoSection :: RemoteRepo -> RemoteRepo -> Doc ppRemoteRepoSection def vals = ppSection "repository" (unRepoName (remoteRepoName vals)) remoteRepoFields (Just def) vals remoteRepoFields :: [FieldDescr RemoteRepo] remoteRepoFields = [ simpleField "url" (text . show) (parseTokenQ >>= parseURI') remoteRepoURI (\x repo -> repo { remoteRepoURI = x }) , simpleFieldParsec "secure" showSecure (Just `fmap` parsec) remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x }) , listField "root-keys" text parseTokenQ remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x }) , simpleFieldParsec "key-threshold" showThreshold P.integral remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x }) ] where parseURI' uriString = case parseURI uriString of Nothing -> fail $ "remote-repo: no parse on " ++ show uriString Just uri -> return uri showSecure Nothing = mempty -- default 'secure' setting showSecure (Just True) = text "True" -- user explicitly enabled it showSecure (Just False) = text "False" -- user explicitly disabled it -- If the key-threshold is set to 0, we omit it as this is the default -- and it looks odd to have a value for key-threshold but not for 'secure' -- (note that an empty list of keys is already omitted by default, since -- that is what we do for all list fields) showThreshold 0 = mempty showThreshold t = text (show t) -- | Fields for the 'haddock' section. haddockFlagsFields :: [FieldDescr HaddockFlags] haddockFlagsFields = [ field | opt <- haddockOptions ParseArgs , let field = viewAsFieldDescr opt name = fieldName field , name `notElem` exclusions ] where exclusions = ["verbose", "builddir", "for-hackage"] -- | Fields for the 'init' section. initFlagsFields :: [FieldDescr IT.InitFlags] initFlagsFields = [ field | opt <- initOptions ParseArgs , let field = viewAsFieldDescr opt name = fieldName field , name `notElem` exclusions ] where exclusions = [ "author", "email", "quiet", "no-comments", "minimal", "overwrite" , "package-dir", "packagedir", "package-name", "version", "homepage" , "synopsis", "category", "extra-source-file", "lib", "exe", "libandexe" , "simple", "main-is", "expose-module", "exposed-modules", "extension" , "dependency", "build-tool", "with-compiler" , "verbose" ] -- | Fields for the 'program-locations' section. withProgramsFields :: [FieldDescr [(String, FilePath)]] withProgramsFields = map viewAsFieldDescr $ programDbPaths' (++ "-location") defaultProgramDb ParseArgs id (++) -- | Fields for the 'program-default-options' section. withProgramOptionsFields :: [FieldDescr [(String, [String])]] withProgramOptionsFields = map viewAsFieldDescr $ programDbOptions defaultProgramDb ParseArgs id (++) parseExtraLines :: Verbosity -> [String] -> IO SavedConfig parseExtraLines verbosity extraLines = case parseConfig (ConstraintSourceMainConfig "additional lines") mempty (toUTF8BS (unlines extraLines)) of ParseFailed err -> let (line, msg) = locatedErrorMsg err in die' verbosity $ "Error parsing additional config lines\n" ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg ParseOk [] r -> return r ParseOk ws _ -> die' verbosity $ unlines (map (showPWarning "Error parsing additional config lines") ws) -- | Get the differences (as a pseudo code diff) between the user's -- '~/.cabal/config' and the one that cabal would generate if it didn't exist. userConfigDiff :: Verbosity -> GlobalFlags -> [String] -> IO [String] userConfigDiff verbosity globalFlags extraLines = do userConfig <- loadRawConfig normal (globalConfigFile globalFlags) extraConfig <- parseExtraLines verbosity extraLines testConfig <- initialSavedConfig return $ reverse . foldl' createDiff [] . M.toList $ M.unionWith combine (M.fromList . map justFst $ filterShow testConfig) (M.fromList . map justSnd $ filterShow (userConfig `mappend` extraConfig)) where justFst (a, b) = (a, (Just b, Nothing)) justSnd (a, b) = (a, (Nothing, Just b)) combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) combine x y = error $ "Can't happen : userConfigDiff " ++ show x ++ " " ++ show y createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] createDiff acc (key, (Just a, Just b)) | a == b = acc | otherwise = ("+ " ++ key ++ ": " ++ b) : ("- " ++ key ++ ": " ++ a) : acc createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc createDiff acc (_, (Nothing, Nothing)) = acc filterShow :: SavedConfig -> [(String, String)] filterShow cfg = map keyValueSplit . filter (\s -> not (null s) && ':' `elem` s) . map nonComment . lines $ showConfig cfg nonComment [] = [] nonComment ('-':'-':_) = [] nonComment (x:xs) = x : nonComment xs topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace keyValueSplit s = let (left, right) = break (== ':') s in (topAndTail left, topAndTail (drop 1 right)) -- | Update the user's ~/.cabal/config' keeping the user's customizations. userConfigUpdate :: Verbosity -> GlobalFlags -> [String] -> IO () userConfigUpdate verbosity globalFlags extraLines = do userConfig <- loadRawConfig normal (globalConfigFile globalFlags) extraConfig <- parseExtraLines verbosity extraLines newConfig <- initialSavedConfig commentConf <- commentSavedConfig cabalFile <- getConfigFilePath $ globalConfigFile globalFlags let backup = cabalFile ++ ".backup" notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." renameFile cabalFile backup notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig `mappend` extraConfig) cabal-install-3.8.1.0/src/Distribution/Client/Configure.hs0000644000000000000000000004725107346545000021532 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Configure -- Copyright : (c) David Himmelstrup 2005, -- Duncan Coutts 2005 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- High level interface to configuring a package. ----------------------------------------------------------------------------- module Distribution.Client.Configure ( configure, configureSetupScript, chooseCabalVersion, checkConfigExFlags, -- * Saved configure flags readConfigFlagsFrom, readConfigFlags, cabalConfigFlagsFile, writeConfigFlagsTo, writeConfigFlags, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Utils.Generic (safeHead) import Distribution.Client.Dependency import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.SolverInstallPlan (SolverInstallPlan) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.Setup ( ConfigExFlags(..), RepoContext(..) , configureCommand, configureExCommand, filterConfigureFlags ) import Distribution.Client.Types as Source import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Targets ( userToPackageConstraint, userConstraintPackageName ) import Distribution.Client.JobControl (Lock) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.Settings import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageIndex ( PackageIndex, elemByPackageName ) import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb, readPkgConfigDb) import Distribution.Solver.Types.SourcePackage import Distribution.Simple.Compiler ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) import Distribution.Simple.Program (ProgramDb) import Distribution.Client.SavedFlags ( readCommandFlags, writeCommandFlags ) import Distribution.Simple.Setup ( ConfigFlags(..) , fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import Distribution.Simple.PackageIndex as PackageIndex ( InstalledPackageIndex, lookupPackageName ) import Distribution.Package ( Package(..), packageName, PackageId ) import Distribution.Types.GivenComponent ( GivenComponent(..) ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint(..), thisPackageVersionConstraint ) import qualified Distribution.PackageDescription as PkgDesc import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Version ( Version, mkVersion, anyVersion, thisVersion , VersionRange, orLaterVersion ) import Distribution.Simple.Utils as Utils ( warn, notice, debug, die' , defaultPackageDesc ) import Distribution.System ( Platform ) import System.FilePath ( () ) -- | Choose the Cabal version such that the setup scripts compiled against this -- version will support the given command-line flags. chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange chooseCabalVersion configExFlags maybeVersion = maybe defaultVersionRange thisVersion maybeVersion where -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed -- for '--allow-newer' to work. allowNewer = isRelaxDeps (maybe mempty unAllowNewer $ configAllowNewer configExFlags) allowOlder = isRelaxDeps (maybe mempty unAllowOlder $ configAllowOlder configExFlags) defaultVersionRange = if allowOlder || allowNewer then orLaterVersion (mkVersion [1,19,2]) else anyVersion -- | Configure the package found in the local directory configure :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> ConfigFlags -> ConfigExFlags -> [String] -> IO () configure verbosity packageDBs repoCtxt comp platform progdb configFlags configExFlags extraArgs = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt pkgConfigDb <- readPkgConfigDb verbosity progdb checkConfigExFlags verbosity installedPkgIndex (packageIndex sourcePkgDb) configExFlags progress <- planLocalPackage verbosity comp platform configFlags configExFlags installedPkgIndex sourcePkgDb pkgConfigDb notice verbosity "Resolving dependencies..." maybePlan <- foldProgress logMsg (return . Left) (return . Right) progress case maybePlan of Left message -> do warn verbosity $ "solver failed to find a solution:\n" ++ message ++ "\nTrying configure anyway." setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) Nothing configureCommand (const configFlags) (const extraArgs) Right installPlan0 -> let installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 in case fst (InstallPlan.ready installPlan) of [pkg@(ReadyPackage (ConfiguredPackage _ (SourcePackage _ _ (LocalUnpackedPackage _) _) _ _ _))] -> do configurePackage verbosity platform (compilerInfo comp) (setupScriptOptions installedPkgIndex (Just pkg)) configFlags pkg extraArgs _ -> die' verbosity $ "internal error: configure install plan should have exactly " ++ "one local ready package." where setupScriptOptions :: InstalledPackageIndex -> Maybe ReadyPackage -> SetupScriptOptions setupScriptOptions = configureSetupScript packageDBs comp platform progdb (fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (configDistPref configFlags)) (chooseCabalVersion configExFlags (flagToMaybe (configCabalVersion configExFlags))) Nothing False logMsg message rest = debug verbosity message >> rest configureSetupScript :: PackageDBStack -> Compiler -> Platform -> ProgramDb -> FilePath -> VersionRange -> Maybe Lock -> Bool -> InstalledPackageIndex -> Maybe ReadyPackage -> SetupScriptOptions configureSetupScript packageDBs comp platform progdb distPref cabalVersion lock forceExternal index mpkg = SetupScriptOptions { useCabalVersion = cabalVersion , useCabalSpecVersion = Nothing , useCompiler = Just comp , usePlatform = Just platform , usePackageDB = packageDBs' , usePackageIndex = index' , useProgramDb = progdb , useDistPref = distPref , useLoggingHandle = Nothing , useWorkingDir = Nothing , useExtraPathEnv = [] , useExtraEnvOverrides = [] , setupCacheLock = lock , useWin32CleanHack = False , forceExternalSetupMethod = forceExternal -- If we have explicit setup dependencies, list them; otherwise, we give -- the empty list of dependencies; ideally, we would fix the version of -- Cabal here, so that we no longer need the special case for that in -- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet -- know the version of Cabal at this point, but only find this there. -- Therefore, for now, we just leave this blank. , useDependencies = fromMaybe [] explicitSetupDeps , useDependenciesExclusive = not defaultSetupDeps && isJust explicitSetupDeps , useVersionMacros = not defaultSetupDeps && isJust explicitSetupDeps , isInteractive = False } where -- When we are compiling a legacy setup script without an explicit -- setup stanza, we typically want to allow the UserPackageDB for -- finding the Cabal lib when compiling any Setup.hs even if we're doing -- a global install. However we also allow looking in a specific package -- db. packageDBs' :: PackageDBStack index' :: Maybe InstalledPackageIndex (packageDBs', index') = case packageDBs of (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs , Nothing <- explicitSetupDeps -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) -- but if the user is using an odd db stack, don't touch it _otherwise -> (packageDBs, Just index) maybeSetupBuildInfo :: Maybe PkgDesc.SetupBuildInfo maybeSetupBuildInfo = do ReadyPackage cpkg <- mpkg let gpkg = srcpkgDescription (confPkgSource cpkg) PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg) -- Was a default 'custom-setup' stanza added by 'cabal-install' itself? If -- so, 'setup-depends' must not be exclusive. See #3199. defaultSetupDeps :: Bool defaultSetupDeps = maybe False PkgDesc.defaultSetupDepends maybeSetupBuildInfo explicitSetupDeps :: Maybe [(InstalledPackageId, PackageId)] explicitSetupDeps = do -- Check if there is an explicit setup stanza. _buildInfo <- maybeSetupBuildInfo -- Return the setup dependencies computed by the solver ReadyPackage cpkg <- mpkg return [ ( cid, srcid ) | ConfiguredId srcid (Just (PkgDesc.CLibName PkgDesc.LMainLibName)) cid <- CD.setupDeps (confPkgDeps cpkg) ] -- | Warn if any constraints or preferences name packages that are not in the -- source package index or installed package index. checkConfigExFlags :: Package pkg => Verbosity -> InstalledPackageIndex -> PackageIndex pkg -> ConfigExFlags -> IO () checkConfigExFlags verbosity installedPkgIndex sourcePkgIndex flags = do for_ (safeHead unknownConstraints) $ \h -> warn verbosity $ "Constraint refers to an unknown package: " ++ showConstraint h for_ (safeHead unknownPreferences) $ \h -> warn verbosity $ "Preference refers to an unknown package: " ++ prettyShow h where unknownConstraints = filter (unknown . userConstraintPackageName . fst) $ configExConstraints flags unknownPreferences = filter (unknown . \(PackageVersionConstraint name _) -> name) $ configPreferences flags unknown pkg = null (PackageIndex.lookupPackageName installedPkgIndex pkg) && not (elemByPackageName sourcePkgIndex pkg) showConstraint (uc, src) = prettyShow uc ++ " (" ++ showConstraintSource src ++ ")" -- | Make an 'InstallPlan' for the unpacked package in the current directory, -- and all its dependencies. -- planLocalPackage :: Verbosity -> Compiler -> Platform -> ConfigFlags -> ConfigExFlags -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> IO (Progress String String SolverInstallPlan) planLocalPackage verbosity comp platform configFlags configExFlags installedPkgIndex (SourcePackageDb _ packagePrefs) pkgConfigDb = do pkg <- readGenericPackageDescription verbosity =<< case flagToMaybe (configCabalFilePath configFlags) of Nothing -> defaultPackageDesc verbosity Just fp -> return fp solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) (compilerInfo comp) let -- We create a local package and ask to resolve a dependency on it localPkg = SourcePackage { srcpkgPackageId = packageId pkg, srcpkgDescription = pkg, srcpkgSource = LocalUnpackedPackage ".", srcpkgDescrOverride = Nothing } testsEnabled :: Bool testsEnabled = fromFlagOrDefault False $ configTests configFlags benchmarksEnabled :: Bool benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags resolverParams :: DepResolverParams resolverParams = removeLowerBounds (fromMaybe (AllowOlder mempty) $ configAllowOlder configExFlags) . removeUpperBounds (fromMaybe (AllowNewer mempty) $ configAllowNewer configExFlags) . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | PackageVersionConstraint name ver <- configPreferences configExFlags ] . addConstraints -- version constraints from the config file or command line -- TODO: should warn or error on constraints that are not on direct -- deps or flag constraints not on the package in question. [ LabeledPackageConstraint (userToPackageConstraint uc) src | (uc, src) <- configExConstraints configExFlags ] . addConstraints -- package flags from the config file or command line [ let pc = PackageConstraint (scopeToplevel $ packageName pkg) (PackagePropertyFlags $ configConfigurationsFlags configFlags) in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget ] . addConstraints -- '--enable-tests' and '--enable-benchmarks' constraints from -- the config file or command line [ let pc = PackageConstraint (scopeToplevel $ packageName pkg) . PackagePropertyStanzas $ [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget ] -- Don't solve for executables, since we use an empty source -- package database and executables never show up in the -- installed package index . setSolveExecutables (SolveExecutables False) . setSolverVerbosity verbosity $ standardInstallPolicy installedPkgIndex -- NB: We pass in an *empty* source package database, -- because cabal configure assumes that all dependencies -- have already been installed (SourcePackageDb mempty packagePrefs) [SpecificSourcePackage localPkg] return (resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams) -- | Call an installer for an 'SourcePackage' but override the configure -- flags with the ones given by the 'ReadyPackage'. In particular the -- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly -- versioned package dependencies. So we ignore any previous partial flag -- assignment or dependency constraints and use the new ones. -- -- NB: when updating this function, don't forget to also update -- 'installReadyPackage' in D.C.Install. configurePackage :: Verbosity -> Platform -> CompilerInfo -> SetupScriptOptions -> ConfigFlags -> ReadyPackage -> [String] -> IO () configurePackage verbosity platform comp scriptOptions configFlags (ReadyPackage (ConfiguredPackage ipid spkg flags stanzas deps)) extraArgs = setupWrapper verbosity scriptOptions (Just pkg) configureCommand configureFlags (const extraArgs) where gpkg :: PkgDesc.GenericPackageDescription gpkg = srcpkgDescription spkg configureFlags :: Version -> ConfigFlags configureFlags = filterConfigureFlags configFlags { configIPID = if isJust (flagToMaybe (configIPID configFlags)) -- Make sure cabal configure --ipid works. then configIPID configFlags else toFlag (prettyShow ipid), configConfigurationsFlags = flags, -- We generate the legacy constraints as well as the new style precise -- deps. In the end only one set gets passed to Setup.hs configure, -- depending on the Cabal version we are talking to. configConstraints = [ thisPackageVersionConstraint srcid | ConfiguredId srcid (Just (PkgDesc.CLibName PkgDesc.LMainLibName)) _uid <- CD.nonSetupDeps deps ], configDependencies = [ GivenComponent (packageName srcid) cname uid | ConfiguredId srcid (Just (PkgDesc.CLibName cname)) uid <- CD.nonSetupDeps deps ], -- Use '--exact-configuration' if supported. configExactConfiguration = toFlag True, configVerbosity = toFlag verbosity, -- NB: if the user explicitly specified -- --enable-tests/--enable-benchmarks, always respect it. -- (But if they didn't, let solver decide.) configBenchmarks = toFlag (BenchStanzas `optStanzaSetMember` stanzas) `mappend` configBenchmarks configFlags, configTests = toFlag (TestStanzas `optStanzaSetMember` stanzas) `mappend` configTests configFlags } pkg :: PkgDesc.PackageDescription pkg = case finalizePD flags (enableStanzas stanzas) (const True) platform comp [] gpkg of Left _ -> error "finalizePD ReadyPackage failed" Right (desc, _) -> desc -- ----------------------------------------------------------------------------- -- * Saved configure environments and flags -- ----------------------------------------------------------------------------- -- | Read saved configure flags and restore the saved environment from the -- specified files. readConfigFlagsFrom :: FilePath -- ^ path to saved flags file -> IO (ConfigFlags, ConfigExFlags) readConfigFlagsFrom flags = do readCommandFlags flags configureExCommand -- | The path (relative to @--build-dir@) where the arguments to @configure@ -- should be saved. cabalConfigFlagsFile :: FilePath -> FilePath cabalConfigFlagsFile dist = dist "cabal-config-flags" -- | Read saved configure flags and restore the saved environment from the -- usual location. readConfigFlags :: FilePath -- ^ @--build-dir@ -> IO (ConfigFlags, ConfigExFlags) readConfigFlags dist = readConfigFlagsFrom (cabalConfigFlagsFile dist) -- | Save the configure flags and environment to the specified files. writeConfigFlagsTo :: FilePath -- ^ path to saved flags file -> Verbosity -> (ConfigFlags, ConfigExFlags) -> IO () writeConfigFlagsTo file verb flags = do writeCommandFlags verb file configureExCommand flags -- | Save the build flags to the usual location. writeConfigFlags :: Verbosity -> FilePath -- ^ @--build-dir@ -> (ConfigFlags, ConfigExFlags) -> IO () writeConfigFlags verb dist = writeConfigFlagsTo (cabalConfigFlagsFile dist) verb cabal-install-3.8.1.0/src/Distribution/Client/Dependency.hs0000644000000000000000000012445107346545000021665 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency -- Copyright : (c) David Himmelstrup 2005, -- Bjorn Bringert 2007 -- Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Top level interface to dependency resolution. ----------------------------------------------------------------------------- module Distribution.Client.Dependency ( -- * The main package dependency resolver DepResolverParams, chooseSolver, resolveDependencies, Progress(..), foldProgress, -- * Alternate, simple resolver that does not do dependencies recursively resolveWithoutDependencies, -- * Constructing resolver policies PackageProperty(..), PackageConstraint(..), scopeToplevel, PackagesPreferenceDefault(..), PackagePreference(..), -- ** Standard policy basicInstallPolicy, standardInstallPolicy, PackageSpecifier(..), -- ** Extra policy options upgradeDependencies, reinstallTargets, -- ** Policy utils addConstraints, addPreferences, setPreferenceDefault, setReorderGoals, setCountConflicts, setFineGrainedConflicts, setMinimizeConflictSet, setIndependentGoals, setAvoidReinstalls, setShadowPkgs, setStrongFlags, setAllowBootLibInstalls, setOnlyConstrained, setMaxBackjumps, setEnableBackjumping, setSolveExecutables, setGoalOrder, setSolverVerbosity, removeLowerBounds, removeUpperBounds, addDefaultSetupDependencies, addSetupCabalMinVersionConstraint, addSetupCabalMaxVersionConstraint, ) where import Distribution.Client.Compat.Prelude import qualified Prelude as Unsafe (head) import Distribution.Solver.Modular ( modularResolver, SolverConfig(..), PruneAfterFirstSuccess(..) ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import Distribution.Client.SolverInstallPlan (SolverInstallPlan) import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.Types ( SourcePackageDb(SourcePackageDb) , PackageSpecifier(..), pkgSpecifierTarget, pkgSpecifierConstraints , UnresolvedPkgLoc, UnresolvedSourcePackage , AllowNewer(..), AllowOlder(..), RelaxDeps(..), RelaxedDep(..) , RelaxDepScope(..), RelaxDepMod(..), RelaxDepSubject(..), isRelaxDeps ) import Distribution.Client.Dependency.Types ( PreSolver(..), Solver(..) , PackagesPreferenceDefault(..) ) import Distribution.Package ( PackageName, mkPackageName, PackageIdentifier(PackageIdentifier), PackageId , Package(..), packageName, packageVersion ) import Distribution.Types.Dependency import qualified Distribution.PackageDescription as PD import qualified Distribution.PackageDescription.Configuration as PD import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Compiler ( CompilerInfo(..) ) import Distribution.System ( Platform ) import Distribution.Client.Utils ( duplicatesBy, mergeBy, MergeResult(..) ) import Distribution.Simple.Setup ( asBool ) import Distribution.Verbosity ( normal ) import Distribution.Version import qualified Distribution.Compat.Graph as Graph import Distribution.Solver.Types.ComponentDeps (ComponentDeps) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.DependencyResolver import Distribution.Solver.Types.InstalledPreference import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageConstraint import Distribution.Solver.Types.PackagePath import Distribution.Solver.Types.PackagePreferences import qualified Distribution.Solver.Types.PackageIndex as PackageIndex import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) import Distribution.Solver.Types.Progress import Distribution.Solver.Types.ResolverPackage import Distribution.Solver.Types.Settings import Distribution.Solver.Types.SolverId import Distribution.Solver.Types.SolverPackage import Distribution.Solver.Types.SourcePackage import Distribution.Solver.Types.Variable import Data.List ( maximumBy ) import qualified Data.Map as Map import qualified Data.Set as Set import Control.Exception ( assert ) -- ------------------------------------------------------------ -- * High level planner policy -- ------------------------------------------------------------ -- | The set of parameters to the dependency resolver. These parameters are -- relatively low level but many kinds of high level policies can be -- implemented in terms of adjustments to the parameters. -- data DepResolverParams = DepResolverParams { depResolverTargets :: Set PackageName, depResolverConstraints :: [LabeledPackageConstraint], depResolverPreferences :: [PackagePreference], depResolverPreferenceDefault :: PackagesPreferenceDefault, depResolverInstalledPkgIndex :: InstalledPackageIndex, depResolverSourcePkgIndex :: PackageIndex.PackageIndex UnresolvedSourcePackage, depResolverReorderGoals :: ReorderGoals, depResolverCountConflicts :: CountConflicts, depResolverFineGrainedConflicts :: FineGrainedConflicts, depResolverMinimizeConflictSet :: MinimizeConflictSet, depResolverIndependentGoals :: IndependentGoals, depResolverAvoidReinstalls :: AvoidReinstalls, depResolverShadowPkgs :: ShadowPkgs, depResolverStrongFlags :: StrongFlags, -- | Whether to allow base and its dependencies to be installed. depResolverAllowBootLibInstalls :: AllowBootLibInstalls, -- | Whether to only allow explicitly constrained packages plus -- goals or to allow any package. depResolverOnlyConstrained :: OnlyConstrained, depResolverMaxBackjumps :: Maybe Int, depResolverEnableBackjumping :: EnableBackjumping, -- | Whether or not to solve for dependencies on executables. -- This should be true, except in the legacy code path where -- we can't tell if an executable has been installed or not, -- so we shouldn't solve for them. See #3875. depResolverSolveExecutables :: SolveExecutables, -- | Function to override the solver's goal-ordering heuristics. depResolverGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), depResolverVerbosity :: Verbosity } showDepResolverParams :: DepResolverParams -> String showDepResolverParams p = "targets: " ++ intercalate ", " (map prettyShow $ Set.toList (depResolverTargets p)) ++ "\nconstraints: " ++ concatMap (("\n " ++) . showLabeledConstraint) (depResolverConstraints p) ++ "\npreferences: " ++ concatMap (("\n " ++) . showPackagePreference) (depResolverPreferences p) ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) ++ "\nreorder goals: " ++ show (asBool (depResolverReorderGoals p)) ++ "\ncount conflicts: " ++ show (asBool (depResolverCountConflicts p)) ++ "\nfine grained conflicts: " ++ show (asBool (depResolverFineGrainedConflicts p)) ++ "\nminimize conflict set: " ++ show (asBool (depResolverMinimizeConflictSet p)) ++ "\nindependent goals: " ++ show (asBool (depResolverIndependentGoals p)) ++ "\navoid reinstalls: " ++ show (asBool (depResolverAvoidReinstalls p)) ++ "\nshadow packages: " ++ show (asBool (depResolverShadowPkgs p)) ++ "\nstrong flags: " ++ show (asBool (depResolverStrongFlags p)) ++ "\nallow boot library installs: " ++ show (asBool (depResolverAllowBootLibInstalls p)) ++ "\nonly constrained packages: " ++ show (depResolverOnlyConstrained p) ++ "\nmax backjumps: " ++ maybe "infinite" show (depResolverMaxBackjumps p) where showLabeledConstraint :: LabeledPackageConstraint -> String showLabeledConstraint (LabeledPackageConstraint pc src) = showPackageConstraint pc ++ " (" ++ showConstraintSource src ++ ")" -- | A package selection preference for a particular package. -- -- Preferences are soft constraints that the dependency resolver should try to -- respect where possible. It is not specified if preferences on some packages -- are more important than others. -- data PackagePreference = -- | A suggested constraint on the version number. PackageVersionPreference PackageName VersionRange -- | If we prefer versions of packages that are already installed. | PackageInstalledPreference PackageName InstalledPreference -- | If we would prefer to enable these optional stanzas -- (i.e. test suites and/or benchmarks) | PackageStanzasPreference PackageName [OptionalStanza] -- | Provide a textual representation of a package preference -- for debugging purposes. -- showPackagePreference :: PackagePreference -> String showPackagePreference (PackageVersionPreference pn vr) = prettyShow pn ++ " " ++ prettyShow (simplifyVersionRange vr) showPackagePreference (PackageInstalledPreference pn ip) = prettyShow pn ++ " " ++ show ip showPackagePreference (PackageStanzasPreference pn st) = prettyShow pn ++ " " ++ show st basicDepResolverParams :: InstalledPackageIndex -> PackageIndex.PackageIndex UnresolvedSourcePackage -> DepResolverParams basicDepResolverParams installedPkgIndex sourcePkgIndex = DepResolverParams { depResolverTargets = Set.empty, depResolverConstraints = [], depResolverPreferences = [], depResolverPreferenceDefault = PreferLatestForSelected, depResolverInstalledPkgIndex = installedPkgIndex, depResolverSourcePkgIndex = sourcePkgIndex, depResolverReorderGoals = ReorderGoals False, depResolverCountConflicts = CountConflicts True, depResolverFineGrainedConflicts = FineGrainedConflicts True, depResolverMinimizeConflictSet = MinimizeConflictSet False, depResolverIndependentGoals = IndependentGoals False, depResolverAvoidReinstalls = AvoidReinstalls False, depResolverShadowPkgs = ShadowPkgs False, depResolverStrongFlags = StrongFlags False, depResolverAllowBootLibInstalls = AllowBootLibInstalls False, depResolverOnlyConstrained = OnlyConstrainedNone, depResolverMaxBackjumps = Nothing, depResolverEnableBackjumping = EnableBackjumping True, depResolverSolveExecutables = SolveExecutables True, depResolverGoalOrder = Nothing, depResolverVerbosity = normal } addTargets :: [PackageName] -> DepResolverParams -> DepResolverParams addTargets extraTargets params = params { depResolverTargets = Set.fromList extraTargets `Set.union` depResolverTargets params } addConstraints :: [LabeledPackageConstraint] -> DepResolverParams -> DepResolverParams addConstraints extraConstraints params = params { depResolverConstraints = extraConstraints ++ depResolverConstraints params } addPreferences :: [PackagePreference] -> DepResolverParams -> DepResolverParams addPreferences extraPreferences params = params { depResolverPreferences = extraPreferences ++ depResolverPreferences params } setPreferenceDefault :: PackagesPreferenceDefault -> DepResolverParams -> DepResolverParams setPreferenceDefault preferenceDefault params = params { depResolverPreferenceDefault = preferenceDefault } setReorderGoals :: ReorderGoals -> DepResolverParams -> DepResolverParams setReorderGoals reorder params = params { depResolverReorderGoals = reorder } setCountConflicts :: CountConflicts -> DepResolverParams -> DepResolverParams setCountConflicts count params = params { depResolverCountConflicts = count } setFineGrainedConflicts :: FineGrainedConflicts -> DepResolverParams -> DepResolverParams setFineGrainedConflicts fineGrained params = params { depResolverFineGrainedConflicts = fineGrained } setMinimizeConflictSet :: MinimizeConflictSet -> DepResolverParams -> DepResolverParams setMinimizeConflictSet minimize params = params { depResolverMinimizeConflictSet = minimize } setIndependentGoals :: IndependentGoals -> DepResolverParams -> DepResolverParams setIndependentGoals indep params = params { depResolverIndependentGoals = indep } setAvoidReinstalls :: AvoidReinstalls -> DepResolverParams -> DepResolverParams setAvoidReinstalls avoid params = params { depResolverAvoidReinstalls = avoid } setShadowPkgs :: ShadowPkgs -> DepResolverParams -> DepResolverParams setShadowPkgs shadow params = params { depResolverShadowPkgs = shadow } setStrongFlags :: StrongFlags -> DepResolverParams -> DepResolverParams setStrongFlags sf params = params { depResolverStrongFlags = sf } setAllowBootLibInstalls :: AllowBootLibInstalls -> DepResolverParams -> DepResolverParams setAllowBootLibInstalls i params = params { depResolverAllowBootLibInstalls = i } setOnlyConstrained :: OnlyConstrained -> DepResolverParams -> DepResolverParams setOnlyConstrained i params = params { depResolverOnlyConstrained = i } setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams setMaxBackjumps n params = params { depResolverMaxBackjumps = n } setEnableBackjumping :: EnableBackjumping -> DepResolverParams -> DepResolverParams setEnableBackjumping b params = params { depResolverEnableBackjumping = b } setSolveExecutables :: SolveExecutables -> DepResolverParams -> DepResolverParams setSolveExecutables b params = params { depResolverSolveExecutables = b } setGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering) -> DepResolverParams -> DepResolverParams setGoalOrder order params = params { depResolverGoalOrder = order } setSolverVerbosity :: Verbosity -> DepResolverParams -> DepResolverParams setSolverVerbosity verbosity params = params { depResolverVerbosity = verbosity } -- | Some packages are specific to a given compiler version and should never be -- upgraded. dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams dontUpgradeNonUpgradeablePackages params = addConstraints extraConstraints params where extraConstraints = [ LabeledPackageConstraint (PackageConstraint (ScopeAnyQualifier pkgname) PackagePropertyInstalled) ConstraintSourceNonUpgradeablePackage | Set.notMember (mkPackageName "base") (depResolverTargets params) -- If you change this enumeration, make sure to update the list in -- "Distribution.Solver.Modular.Solver" as well , pkgname <- [ mkPackageName "base" , mkPackageName "ghc-bignum" , mkPackageName "ghc-prim" , mkPackageName "integer-gmp" , mkPackageName "integer-simple" , mkPackageName "template-haskell" ] , isInstalled pkgname ] isInstalled = not . null . InstalledPackageIndex.lookupPackageName (depResolverInstalledPkgIndex params) addSourcePackages :: [UnresolvedSourcePackage] -> DepResolverParams -> DepResolverParams addSourcePackages pkgs params = params { depResolverSourcePkgIndex = foldl (flip PackageIndex.insert) (depResolverSourcePkgIndex params) pkgs } hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] -> DepResolverParams -> DepResolverParams hideInstalledPackagesSpecificBySourcePackageId pkgids params = --TODO: this should work using exclude constraints instead params { depResolverInstalledPkgIndex = foldl' (flip InstalledPackageIndex.deleteSourcePackageId) (depResolverInstalledPkgIndex params) pkgids } hideInstalledPackagesAllVersions :: [PackageName] -> DepResolverParams -> DepResolverParams hideInstalledPackagesAllVersions pkgnames params = --TODO: this should work using exclude constraints instead params { depResolverInstalledPkgIndex = foldl' (flip InstalledPackageIndex.deletePackageName) (depResolverInstalledPkgIndex params) pkgnames } -- | Remove upper bounds in dependencies using the policy specified by the -- 'AllowNewer' argument (all/some/none). -- -- Note: It's important to apply 'removeUpperBounds' after -- 'addSourcePackages'. Otherwise, the packages inserted by -- 'addSourcePackages' won't have upper bounds in dependencies relaxed. -- removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams removeUpperBounds (AllowNewer relDeps) = removeBounds RelaxUpper relDeps -- | Dual of 'removeUpperBounds' removeLowerBounds :: AllowOlder -> DepResolverParams -> DepResolverParams removeLowerBounds (AllowOlder relDeps) = removeBounds RelaxLower relDeps data RelaxKind = RelaxLower | RelaxUpper -- | Common internal implementation of 'removeLowerBounds'/'removeUpperBounds' removeBounds :: RelaxKind -> RelaxDeps -> DepResolverParams -> DepResolverParams removeBounds _ rd params | not (isRelaxDeps rd) = params -- no-op optimisation removeBounds relKind relDeps params = params { depResolverSourcePkgIndex = sourcePkgIndex' } where sourcePkgIndex' :: PackageIndex.PackageIndex UnresolvedSourcePackage sourcePkgIndex' = fmap relaxDeps $ depResolverSourcePkgIndex params relaxDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage relaxDeps srcPkg = srcPkg { srcpkgDescription = relaxPackageDeps relKind relDeps (srcpkgDescription srcPkg) } -- | Relax the dependencies of this package if needed. -- -- Helper function used by 'removeBounds' relaxPackageDeps :: RelaxKind -> RelaxDeps -> PD.GenericPackageDescription -> PD.GenericPackageDescription relaxPackageDeps _ rd gpd | not (isRelaxDeps rd) = gpd -- subsumed by no-op case in 'removeBounds' relaxPackageDeps relKind RelaxDepsAll gpd = PD.transformAllBuildDepends relaxAll gpd where relaxAll :: Dependency -> Dependency relaxAll (Dependency pkgName verRange cs) = Dependency pkgName (removeBound relKind RelaxDepModNone verRange) cs relaxPackageDeps relKind (RelaxDepsSome depsToRelax0) gpd = PD.transformAllBuildDepends relaxSome gpd where thisPkgName = packageName gpd thisPkgId = packageId gpd depsToRelax = Map.fromList $ mapMaybe f depsToRelax0 f :: RelaxedDep -> Maybe (RelaxDepSubject,RelaxDepMod) f (RelaxedDep scope rdm p) = case scope of RelaxDepScopeAll -> Just (p,rdm) RelaxDepScopePackage p0 | p0 == thisPkgName -> Just (p,rdm) | otherwise -> Nothing RelaxDepScopePackageId p0 | p0 == thisPkgId -> Just (p,rdm) | otherwise -> Nothing relaxSome :: Dependency -> Dependency relaxSome d@(Dependency depName verRange cs) | Just relMod <- Map.lookup RelaxDepSubjectAll depsToRelax = -- a '*'-subject acts absorbing, for consistency with -- the 'Semigroup RelaxDeps' instance Dependency depName (removeBound relKind relMod verRange) cs | Just relMod <- Map.lookup (RelaxDepSubjectPkg depName) depsToRelax = Dependency depName (removeBound relKind relMod verRange) cs | otherwise = d -- no-op -- | Internal helper for 'relaxPackageDeps' removeBound :: RelaxKind -> RelaxDepMod -> VersionRange -> VersionRange removeBound RelaxLower RelaxDepModNone = removeLowerBound removeBound RelaxUpper RelaxDepModNone = removeUpperBound removeBound RelaxLower RelaxDepModCaret = transformCaretLower removeBound RelaxUpper RelaxDepModCaret = transformCaretUpper -- | Supply defaults for packages without explicit Setup dependencies -- -- Note: It's important to apply 'addDefaultSetupDepends' after -- 'addSourcePackages'. Otherwise, the packages inserted by -- 'addSourcePackages' won't have upper bounds in dependencies relaxed. -- addDefaultSetupDependencies :: (UnresolvedSourcePackage -> Maybe [Dependency]) -> DepResolverParams -> DepResolverParams addDefaultSetupDependencies defaultSetupDeps params = params { depResolverSourcePkgIndex = fmap applyDefaultSetupDeps (depResolverSourcePkgIndex params) } where applyDefaultSetupDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage applyDefaultSetupDeps srcpkg = srcpkg { srcpkgDescription = gpkgdesc { PD.packageDescription = pkgdesc { PD.setupBuildInfo = case PD.setupBuildInfo pkgdesc of Just sbi -> Just sbi Nothing -> case defaultSetupDeps srcpkg of Nothing -> Nothing Just deps | isCustom -> Just PD.SetupBuildInfo { PD.defaultSetupDepends = True, PD.setupDepends = deps } | otherwise -> Nothing } } } where isCustom = PD.buildType pkgdesc == PD.Custom gpkgdesc = srcpkgDescription srcpkg pkgdesc = PD.packageDescription gpkgdesc -- | If a package has a custom setup then we need to add a setup-depends -- on Cabal. -- addSetupCabalMinVersionConstraint :: Version -> DepResolverParams -> DepResolverParams addSetupCabalMinVersionConstraint minVersion = addConstraints [ LabeledPackageConstraint (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) (PackagePropertyVersion $ orLaterVersion minVersion)) ConstraintSetupCabalMinVersion ] where cabalPkgname = mkPackageName "Cabal" -- | Variant of 'addSetupCabalMinVersionConstraint' which sets an -- upper bound on @setup.Cabal@ labeled with 'ConstraintSetupCabalMaxVersion'. -- addSetupCabalMaxVersionConstraint :: Version -> DepResolverParams -> DepResolverParams addSetupCabalMaxVersionConstraint maxVersion = addConstraints [ LabeledPackageConstraint (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) (PackagePropertyVersion $ earlierVersion maxVersion)) ConstraintSetupCabalMaxVersion ] where cabalPkgname = mkPackageName "Cabal" upgradeDependencies :: DepResolverParams -> DepResolverParams upgradeDependencies = setPreferenceDefault PreferAllLatest reinstallTargets :: DepResolverParams -> DepResolverParams reinstallTargets params = hideInstalledPackagesAllVersions (Set.toList $ depResolverTargets params) params -- | A basic solver policy on which all others are built. -- basicInstallPolicy :: InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier UnresolvedSourcePackage] -> DepResolverParams basicInstallPolicy installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) pkgSpecifiers = addPreferences [ PackageVersionPreference name ver | (name, ver) <- Map.toList sourcePkgPrefs ] . addConstraints (concatMap pkgSpecifierConstraints pkgSpecifiers) . addTargets (map pkgSpecifierTarget pkgSpecifiers) . hideInstalledPackagesSpecificBySourcePackageId [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] . addSourcePackages [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] $ basicDepResolverParams installedPkgIndex sourcePkgIndex -- | The policy used by all the standard commands, install, fetch, freeze etc -- (but not the v2-build and related commands). -- -- It extends the 'basicInstallPolicy' with a policy on setup deps. -- standardInstallPolicy :: InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier UnresolvedSourcePackage] -> DepResolverParams standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers = addDefaultSetupDependencies mkDefaultSetupDeps $ basicInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers where -- Force Cabal >= 1.24 dep when the package is affected by #3199. mkDefaultSetupDeps :: UnresolvedSourcePackage -> Maybe [Dependency] mkDefaultSetupDeps srcpkg | affected = Just [Dependency (mkPackageName "Cabal") (orLaterVersion $ mkVersion [1,24]) mainLibSet] | otherwise = Nothing where gpkgdesc = srcpkgDescription srcpkg pkgdesc = PD.packageDescription gpkgdesc bt = PD.buildType pkgdesc affected = bt == PD.Custom && hasBuildableFalse gpkgdesc -- Does this package contain any components with non-empty 'build-depends' -- and a 'buildable' field that could potentially be set to 'False'? False -- positives are possible. hasBuildableFalse :: PD.GenericPackageDescription -> Bool hasBuildableFalse gpkg = not (all alwaysTrue (zipWith PD.cOr buildableConditions noDepConditions)) where buildableConditions = PD.extractConditions PD.buildable gpkg noDepConditions = PD.extractConditions (null . PD.targetBuildDepends) gpkg alwaysTrue (PD.Lit True) = True alwaysTrue _ = False -- ------------------------------------------------------------ -- * Interface to the standard resolver -- ------------------------------------------------------------ chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver chooseSolver _verbosity preSolver _cinfo = case preSolver of AlwaysModular -> do return Modular runSolver :: Solver -> SolverConfig -> DependencyResolver UnresolvedPkgLoc runSolver Modular = modularResolver -- | Run the dependency solver. -- -- Since this is potentially an expensive operation, the result is wrapped in a -- a 'Progress' structure that can be unfolded to provide progress information, -- logging messages and the final result or an error. -- resolveDependencies :: Platform -> CompilerInfo -> PkgConfigDb -> Solver -> DepResolverParams -> Progress String String SolverInstallPlan --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages resolveDependencies platform comp _pkgConfigDB _solver params | Set.null (depResolverTargets params) = return (validateSolverResult platform comp indGoals []) where indGoals = depResolverIndependentGoals params resolveDependencies platform comp pkgConfigDB solver params = Step (showDepResolverParams finalparams) $ fmap (validateSolverResult platform comp indGoals) $ runSolver solver (SolverConfig reordGoals cntConflicts fineGrained minimize indGoals noReinstalls shadowing strFlags allowBootLibs onlyConstrained_ maxBkjumps enableBj solveExes order verbosity (PruneAfterFirstSuccess False)) platform comp installedPkgIndex sourcePkgIndex pkgConfigDB preferences constraints targets where finalparams@(DepResolverParams targets constraints prefs defpref installedPkgIndex sourcePkgIndex reordGoals cntConflicts fineGrained minimize indGoals noReinstalls shadowing strFlags allowBootLibs onlyConstrained_ maxBkjumps enableBj solveExes order verbosity) = if asBool (depResolverAllowBootLibInstalls params) then params else dontUpgradeNonUpgradeablePackages params preferences :: PackageName -> PackagePreferences preferences = interpretPackagesPreference targets defpref prefs -- | Give an interpretation to the global 'PackagesPreference' as -- specific per-package 'PackageVersionPreference'. -- interpretPackagesPreference :: Set PackageName -> PackagesPreferenceDefault -> [PackagePreference] -> (PackageName -> PackagePreferences) interpretPackagesPreference selected defaultPref prefs = \pkgname -> PackagePreferences (versionPref pkgname) (installPref pkgname) (stanzasPref pkgname) where versionPref :: PackageName -> [VersionRange] versionPref pkgname = fromMaybe [anyVersion] (Map.lookup pkgname versionPrefs) versionPrefs = Map.fromListWith (++) [(pkgname, [pref]) | PackageVersionPreference pkgname pref <- prefs] installPref :: PackageName -> InstalledPreference installPref pkgname = fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) installPrefs = Map.fromList [ (pkgname, pref) | PackageInstalledPreference pkgname pref <- prefs ] installPrefDefault = case defaultPref of PreferAllLatest -> const PreferLatest PreferAllInstalled -> const PreferInstalled PreferLatestForSelected -> \pkgname -> -- When you say cabal install foo, what you really mean is, prefer the -- latest version of foo, but the installed version of everything else if pkgname `Set.member` selected then PreferLatest else PreferInstalled stanzasPref :: PackageName -> [OptionalStanza] stanzasPref pkgname = fromMaybe [] (Map.lookup pkgname stanzasPrefs) stanzasPrefs = Map.fromListWith (\a b -> nub (a ++ b)) [ (pkgname, pref) | PackageStanzasPreference pkgname pref <- prefs ] -- ------------------------------------------------------------ -- * Checking the result of the solver -- ------------------------------------------------------------ -- | Make an install plan from the output of the dep resolver. -- It checks that the plan is valid, or it's an error in the dep resolver. -- validateSolverResult :: Platform -> CompilerInfo -> IndependentGoals -> [ResolverPackage UnresolvedPkgLoc] -> SolverInstallPlan validateSolverResult platform comp indepGoals pkgs = case planPackagesProblems platform comp pkgs of [] -> case SolverInstallPlan.new indepGoals graph of Right plan -> plan Left problems -> error (formatPlanProblems problems) problems -> error (formatPkgProblems problems) where graph :: Graph.Graph (ResolverPackage UnresolvedPkgLoc) graph = Graph.fromDistinctList pkgs formatPkgProblems :: [PlanPackageProblem] -> String formatPkgProblems = formatProblemMessage . map showPlanPackageProblem formatPlanProblems :: [SolverInstallPlan.SolverPlanProblem] -> String formatPlanProblems = formatProblemMessage . map SolverInstallPlan.showPlanProblem formatProblemMessage problems = unlines $ "internal error: could not construct a valid install plan." : "The proposed (invalid) plan contained the following problems:" : problems ++ "Proposed plan:" : [SolverInstallPlan.showPlanIndex pkgs] data PlanPackageProblem = InvalidConfiguredPackage (SolverPackage UnresolvedPkgLoc) [PackageProblem] | DuplicatePackageSolverId SolverId [ResolverPackage UnresolvedPkgLoc] showPlanPackageProblem :: PlanPackageProblem -> String showPlanPackageProblem (InvalidConfiguredPackage pkg packageProblems) = "Package " ++ prettyShow (packageId pkg) ++ " has an invalid configuration, in particular:\n" ++ unlines [ " " ++ showPackageProblem problem | problem <- packageProblems ] showPlanPackageProblem (DuplicatePackageSolverId pid dups) = "Package " ++ prettyShow (packageId pid) ++ " has " ++ show (length dups) ++ " duplicate instances." planPackagesProblems :: Platform -> CompilerInfo -> [ResolverPackage UnresolvedPkgLoc] -> [PlanPackageProblem] planPackagesProblems platform cinfo pkgs = [ InvalidConfiguredPackage pkg packageProblems | Configured pkg <- pkgs , let packageProblems = configuredPackageProblems platform cinfo pkg , not (null packageProblems) ] ++ [ DuplicatePackageSolverId (Graph.nodeKey (Unsafe.head dups)) dups | dups <- duplicatesBy (comparing Graph.nodeKey) pkgs ] data PackageProblem = DuplicateFlag PD.FlagName | MissingFlag PD.FlagName | ExtraFlag PD.FlagName | DuplicateDeps [PackageId] | MissingDep Dependency | ExtraDep PackageId | InvalidDep Dependency PackageId showPackageProblem :: PackageProblem -> String showPackageProblem (DuplicateFlag flag) = "duplicate flag in the flag assignment: " ++ PD.unFlagName flag showPackageProblem (MissingFlag flag) = "missing an assignment for the flag: " ++ PD.unFlagName flag showPackageProblem (ExtraFlag flag) = "extra flag given that is not used by the package: " ++ PD.unFlagName flag showPackageProblem (DuplicateDeps pkgids) = "duplicate packages specified as selected dependencies: " ++ intercalate ", " (map prettyShow pkgids) showPackageProblem (MissingDep dep) = "the package has a dependency " ++ prettyShow dep ++ " but no package has been selected to satisfy it." showPackageProblem (ExtraDep pkgid) = "the package configuration specifies " ++ prettyShow pkgid ++ " but (with the given flag assignment) the package does not actually" ++ " depend on any version of that package." showPackageProblem (InvalidDep dep pkgid) = "the package depends on " ++ prettyShow dep ++ " but the configuration specifies " ++ prettyShow pkgid ++ " which does not satisfy the dependency." -- | A 'ConfiguredPackage' is valid if the flag assignment is total and if -- in the configuration given by the flag assignment, all the package -- dependencies are satisfied by the specified packages. -- configuredPackageProblems :: Platform -> CompilerInfo -> SolverPackage UnresolvedPkgLoc -> [PackageProblem] configuredPackageProblems platform cinfo (SolverPackage pkg specifiedFlags stanzas specifiedDeps0 _specifiedExeDeps') = [ DuplicateFlag flag | flag <- PD.findDuplicateFlagAssignments specifiedFlags ] ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] ++ [ DuplicateDeps pkgs | pkgs <- CD.nonSetupDeps (fmap (duplicatesBy (comparing packageName)) specifiedDeps1) ] ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps , not (packageSatisfiesDependency pkgid dep) ] -- TODO: sanity tests on executable deps where thisPkgName :: PackageName thisPkgName = packageName (srcpkgDescription pkg) specifiedDeps1 :: ComponentDeps [PackageId] specifiedDeps1 = fmap (map solverSrcId) specifiedDeps0 specifiedDeps :: [PackageId] specifiedDeps = CD.flatDeps specifiedDeps1 mergedFlags :: [MergeResult PD.FlagName PD.FlagName] mergedFlags = mergeBy compare (sort $ map PD.flagName (PD.genPackageFlags (srcpkgDescription pkg))) (sort $ map fst (PD.unFlagAssignment specifiedFlags)) -- TODO packageSatisfiesDependency :: PackageIdentifier -> Dependency -> Bool packageSatisfiesDependency (PackageIdentifier name version) (Dependency name' versionRange _) = assert (name == name') $ version `withinRange` versionRange dependencyName (Dependency name _ _) = name mergedDeps :: [MergeResult Dependency PackageId] mergedDeps = mergeDeps requiredDeps specifiedDeps mergeDeps :: [Dependency] -> [PackageId] -> [MergeResult Dependency PackageId] mergeDeps required specified = let sortNubOn f = nubBy ((==) `on` f) . sortBy (compare `on` f) in mergeBy (\dep pkgid -> dependencyName dep `compare` packageName pkgid) (sortNubOn dependencyName required) (sortNubOn packageName specified) compSpec = enableStanzas stanzas -- TODO: It would be nicer to use ComponentDeps here so we can be more -- precise in our checks. In fact, this no longer relies on buildDepends and -- thus should be easier to fix. As long as we _do_ use a flat list here, we -- have to allow for duplicates when we fold specifiedDeps; once we have -- proper ComponentDeps here we should get rid of the `nubOn` in -- `mergeDeps`. requiredDeps :: [Dependency] requiredDeps = --TODO: use something lower level than finalizePD case finalizePD specifiedFlags compSpec (const True) platform cinfo [] (srcpkgDescription pkg) of Right (resolvedPkg, _) -> -- we filter self/internal dependencies. They are still there. -- This is INCORRECT. -- -- If we had per-component solver, it would make this unnecessary, -- but no finalizePDs picks components we are not building, eg. exes. -- See #3775 -- filter ((/= thisPkgName) . dependencyName) (PD.enabledBuildDepends resolvedPkg compSpec) ++ maybe [] PD.setupDepends (PD.setupBuildInfo resolvedPkg) Left _ -> error "configuredPackageInvalidDeps internal error" -- ------------------------------------------------------------ -- * Simple resolver that ignores dependencies -- ------------------------------------------------------------ -- | A simplistic method of resolving a list of target package names to -- available packages. -- -- Specifically, it does not consider package dependencies at all. Unlike -- 'resolveDependencies', no attempt is made to ensure that the selected -- packages have dependencies that are satisfiable or consistent with -- each other. -- -- It is suitable for tasks such as selecting packages to download for user -- inspection. It is not suitable for selecting packages to install. -- -- Note: if no installed package index is available, it is OK to pass 'mempty'. -- It simply means preferences for installed packages will be ignored. -- resolveWithoutDependencies :: DepResolverParams -> Either [ResolveNoDepsError] [UnresolvedSourcePackage] resolveWithoutDependencies (DepResolverParams targets constraints prefs defpref installedPkgIndex sourcePkgIndex _reorderGoals _countConflicts _fineGrained _minimizeConflictSet _indGoals _avoidReinstalls _shadowing _strFlags _maxBjumps _enableBj _solveExes _allowBootLibInstalls _onlyConstrained _order _verbosity) = collectEithers $ map selectPackage (Set.toList targets) where selectPackage :: PackageName -> Either ResolveNoDepsError UnresolvedSourcePackage selectPackage pkgname | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions | otherwise = Right $! maximumBy bestByPrefs choices where -- Constraints requiredVersions :: VersionRange requiredVersions = packageConstraints pkgname choices :: [UnresolvedSourcePackage] choices = PackageIndex.lookupDependency sourcePkgIndex pkgname requiredVersions -- Preferences PackagePreferences preferredVersions preferInstalled _ = packagePreferences pkgname bestByPrefs :: UnresolvedSourcePackage -> UnresolvedSourcePackage -> Ordering bestByPrefs = comparing $ \pkg -> (installPref pkg, versionPref pkg, packageVersion pkg) installPref :: UnresolvedSourcePackage -> Bool installPref = case preferInstalled of PreferLatest -> const False PreferInstalled -> not . null . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex . packageId versionPref :: Package a => a -> Int versionPref pkg = length . filter (packageVersion pkg `withinRange`) $ preferredVersions packageConstraints :: PackageName -> VersionRange packageConstraints pkgname = Map.findWithDefault anyVersion pkgname packageVersionConstraintMap packageVersionConstraintMap :: Map PackageName VersionRange packageVersionConstraintMap = let pcs = map unlabelPackageConstraint constraints in Map.fromList [ (scopeToPackageName scope, range) | PackageConstraint scope (PackagePropertyVersion range) <- pcs ] packagePreferences :: PackageName -> PackagePreferences packagePreferences = interpretPackagesPreference targets defpref prefs collectEithers :: [Either a b] -> Either [a] [b] collectEithers = collect . partitionEithers where collect ([], xs) = Right xs collect (errs,_) = Left errs -- | Errors for 'resolveWithoutDependencies'. -- data ResolveNoDepsError = -- | A package name which cannot be resolved to a specific package. -- Also gives the constraint on the version and whether there was -- a constraint on the package being installed. ResolveUnsatisfiable PackageName VersionRange instance Show ResolveNoDepsError where show (ResolveUnsatisfiable name ver) = "There is no available version of " ++ prettyShow name ++ " that satisfies " ++ prettyShow (simplifyVersionRange ver) cabal-install-3.8.1.0/src/Distribution/Client/Dependency/0000755000000000000000000000000007346545000021322 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Dependency/Types.hs0000644000000000000000000000346207346545000022767 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Dependency.Types ( PreSolver(..), Solver(..), PackagesPreferenceDefault(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Text.PrettyPrint (text) import qualified Distribution.Compat.CharParsing as P -- | All the solvers that can be selected. data PreSolver = AlwaysModular deriving (Eq, Ord, Show, Bounded, Enum, Generic) -- | All the solvers that can be used. data Solver = Modular deriving (Eq, Ord, Show, Bounded, Enum, Generic) instance Binary PreSolver instance Binary Solver instance Structured PreSolver instance Structured Solver instance Pretty PreSolver where pretty AlwaysModular = text "modular" instance Parsec PreSolver where parsec = do name <- P.munch1 isAlpha case map toLower name of "modular" -> return AlwaysModular _ -> P.unexpected $ "PreSolver: " ++ name -- | Global policy for all packages to say if we prefer package versions that -- are already installed locally or if we just prefer the latest available. -- data PackagesPreferenceDefault = -- | Always prefer the latest version irrespective of any existing -- installed version. -- -- * This is the standard policy for upgrade. -- PreferAllLatest -- | Always prefer the installed versions over ones that would need to be -- installed. Secondarily, prefer latest versions (eg the latest installed -- version or if there are none then the latest source version). | PreferAllInstalled -- | Prefer the latest version for packages that are explicitly requested -- but prefers the installed version for any other packages. -- -- * This is the standard policy for install. -- | PreferLatestForSelected deriving Show cabal-install-3.8.1.0/src/Distribution/Client/DistDirLayout.hs0000644000000000000000000002616207346545000022347 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | -- -- The layout of the .\/dist\/ directory where cabal keeps all of its state -- and build artifacts. -- module Distribution.Client.DistDirLayout ( -- * 'DistDirLayout' DistDirLayout(..), DistDirParams(..), defaultDistDirLayout, ProjectRoot(..), -- * 'StoreDirLayout' StoreDirLayout(..), defaultStoreDirLayout, -- * 'CabalDirLayout' CabalDirLayout(..), mkCabalDirLayout, defaultCabalDirLayout ) where import Distribution.Client.Compat.Prelude import Prelude () import System.FilePath import Distribution.Package ( PackageId, PackageIdentifier, ComponentId, UnitId ) import Distribution.Compiler import Distribution.Simple.Compiler ( PackageDB(..), PackageDBStack, OptimisationLevel(..) ) import Distribution.Types.ComponentName import Distribution.Types.LibraryName import Distribution.System -- | Information which can be used to construct the path to -- the build directory of a build. This is LESS fine-grained -- than what goes into the hashed 'InstalledPackageId', -- and for good reason: we don't want this path to change if -- the user, say, adds a dependency to their project. data DistDirParams = DistDirParams { distParamUnitId :: UnitId, distParamPackageId :: PackageId, distParamComponentId :: ComponentId, distParamComponentName :: Maybe ComponentName, distParamCompilerId :: CompilerId, distParamPlatform :: Platform, distParamOptimization :: OptimisationLevel -- TODO (see #3343): -- Flag assignments -- Optimization } -- | The layout of the project state directory. Traditionally this has been -- called the @dist@ directory. -- data DistDirLayout = DistDirLayout { -- | The root directory of the project. Many other files are relative to -- this location. In particular, the @cabal.project@ lives here. -- distProjectRootDirectory :: FilePath, -- | The @cabal.project@ file and related like @cabal.project.freeze@. -- The parameter is for the extension, like \"freeze\", or \"\" for the -- main file. -- distProjectFile :: String -> FilePath, -- | The \"dist\" directory, which is the root of where cabal keeps all -- its state including the build artifacts from each package we build. -- distDirectory :: FilePath, -- | The directory under dist where we keep the build artifacts for a -- package we're building from a local directory. -- -- This uses a 'UnitId' not just a 'PackageName' because technically -- we can have multiple instances of the same package in a solution -- (e.g. setup deps). -- distBuildDirectory :: DistDirParams -> FilePath, distBuildRootDirectory :: FilePath, -- | The directory under dist where we download tarballs and source -- control repos to. -- distDownloadSrcDirectory :: FilePath, -- | The directory under dist where we put the unpacked sources of -- packages, in those cases where it makes sense to keep the build -- artifacts to reduce rebuild times. -- distUnpackedSrcDirectory :: PackageId -> FilePath, distUnpackedSrcRootDirectory :: FilePath, -- | The location for project-wide cache files (e.g. state used in -- incremental rebuilds). -- distProjectCacheFile :: String -> FilePath, distProjectCacheDirectory :: FilePath, -- | The location for package-specific cache files (e.g. state used in -- incremental rebuilds). -- distPackageCacheFile :: DistDirParams -> String -> FilePath, distPackageCacheDirectory :: DistDirParams -> FilePath, -- | The location that sdists are placed by default. distSdistFile :: PackageId -> FilePath, distSdistDirectory :: FilePath, distTempDirectory :: FilePath, distBinDirectory :: FilePath, distPackageDB :: CompilerId -> PackageDB } -- | The layout of a cabal nix-style store. -- data StoreDirLayout = StoreDirLayout { storeDirectory :: CompilerId -> FilePath, storePackageDirectory :: CompilerId -> UnitId -> FilePath, storePackageDBPath :: CompilerId -> FilePath, storePackageDB :: CompilerId -> PackageDB, storePackageDBStack :: CompilerId -> PackageDBStack, storeIncomingDirectory :: CompilerId -> FilePath, storeIncomingLock :: CompilerId -> UnitId -> FilePath } --TODO: move to another module, e.g. CabalDirLayout? -- or perhaps rename this module to DirLayouts. -- | The layout of the user-wide cabal directory, that is the @~/.cabal@ dir -- on unix, and equivalents on other systems. -- -- At the moment this is just a partial specification, but the idea is -- eventually to cover it all. -- data CabalDirLayout = CabalDirLayout { cabalStoreDirLayout :: StoreDirLayout, cabalLogsDirectory :: FilePath } -- | Information about the root directory of the project. -- -- It can either be an implicit project root in the current dir if no -- @cabal.project@ file is found, or an explicit root if the file is found. -- data ProjectRoot = -- | -- ^ An implicit project root. It contains the absolute project -- root dir. ProjectRootImplicit FilePath -- | -- ^ An explicit project root. It contains the absolute project -- root dir and the relative @cabal.project@ file (or explicit override) | ProjectRootExplicit FilePath FilePath deriving (Eq, Show) -- | Make the default 'DistDirLayout' based on the project root dir and -- optional overrides for the location of the @dist@ directory and the -- @cabal.project@ file. -- defaultDistDirLayout :: ProjectRoot -- ^ the project root -> Maybe FilePath -- ^ the @dist@ directory or default -- (absolute or relative to the root) -> DistDirLayout defaultDistDirLayout projectRoot mdistDirectory = DistDirLayout {..} where (projectRootDir, projectFile) = case projectRoot of ProjectRootImplicit dir -> (dir, dir "cabal.project") ProjectRootExplicit dir file -> (dir, dir file) distProjectRootDirectory :: FilePath distProjectRootDirectory = projectRootDir distProjectFile :: String -> FilePath distProjectFile ext = projectFile <.> ext distDirectory :: FilePath distDirectory = distProjectRootDirectory fromMaybe "dist-newstyle" mdistDirectory --TODO: switch to just dist at some point, or some other new name distBuildRootDirectory :: FilePath distBuildRootDirectory = distDirectory "build" distBuildDirectory :: DistDirParams -> FilePath distBuildDirectory params = distBuildRootDirectory prettyShow (distParamPlatform params) prettyShow (distParamCompilerId params) prettyShow (distParamPackageId params) (case distParamComponentName params of Nothing -> "" Just (CLibName LMainLibName) -> "" Just (CLibName (LSubLibName name)) -> "l" prettyShow name Just (CFLibName name) -> "f" prettyShow name Just (CExeName name) -> "x" prettyShow name Just (CTestName name) -> "t" prettyShow name Just (CBenchName name) -> "b" prettyShow name) (case distParamOptimization params of NoOptimisation -> "noopt" NormalOptimisation -> "" MaximumOptimisation -> "opt") (let uid_str = prettyShow (distParamUnitId params) in if uid_str == prettyShow (distParamComponentId params) then "" else uid_str) distUnpackedSrcRootDirectory :: FilePath distUnpackedSrcRootDirectory = distDirectory "src" distUnpackedSrcDirectory :: PackageId -> FilePath distUnpackedSrcDirectory pkgid = distUnpackedSrcRootDirectory prettyShow pkgid -- we shouldn't get name clashes so this should be fine: distDownloadSrcDirectory :: FilePath distDownloadSrcDirectory = distUnpackedSrcRootDirectory distProjectCacheDirectory :: FilePath distProjectCacheDirectory = distDirectory "cache" distProjectCacheFile :: FilePath -> FilePath distProjectCacheFile name = distProjectCacheDirectory name distPackageCacheDirectory :: DistDirParams -> FilePath distPackageCacheDirectory params = distBuildDirectory params "cache" distPackageCacheFile :: DistDirParams -> String -> FilePath distPackageCacheFile params name = distPackageCacheDirectory params name distSdistFile :: PackageIdentifier -> FilePath distSdistFile pid = distSdistDirectory prettyShow pid <.> "tar.gz" distSdistDirectory :: FilePath distSdistDirectory = distDirectory "sdist" distTempDirectory :: FilePath distTempDirectory = distDirectory "tmp" distBinDirectory :: FilePath distBinDirectory = distDirectory "bin" distPackageDBPath :: CompilerId -> FilePath distPackageDBPath compid = distDirectory "packagedb" prettyShow compid distPackageDB :: CompilerId -> PackageDB distPackageDB = SpecificPackageDB . distPackageDBPath defaultStoreDirLayout :: FilePath -> StoreDirLayout defaultStoreDirLayout storeRoot = StoreDirLayout {..} where storeDirectory :: CompilerId -> FilePath storeDirectory compid = storeRoot prettyShow compid storePackageDirectory :: CompilerId -> UnitId -> FilePath storePackageDirectory compid ipkgid = storeDirectory compid prettyShow ipkgid storePackageDBPath :: CompilerId -> FilePath storePackageDBPath compid = storeDirectory compid "package.db" storePackageDB :: CompilerId -> PackageDB storePackageDB compid = SpecificPackageDB (storePackageDBPath compid) storePackageDBStack :: CompilerId -> PackageDBStack storePackageDBStack compid = [GlobalPackageDB, storePackageDB compid] storeIncomingDirectory :: CompilerId -> FilePath storeIncomingDirectory compid = storeDirectory compid "incoming" storeIncomingLock :: CompilerId -> UnitId -> FilePath storeIncomingLock compid unitid = storeIncomingDirectory compid prettyShow unitid <.> "lock" defaultCabalDirLayout :: FilePath -> CabalDirLayout defaultCabalDirLayout cabalDir = mkCabalDirLayout cabalDir Nothing Nothing mkCabalDirLayout :: FilePath -- ^ Cabal directory -> Maybe FilePath -- ^ Store directory. Must be absolute -> Maybe FilePath -- ^ Log directory -> CabalDirLayout mkCabalDirLayout cabalDir mstoreDir mlogDir = CabalDirLayout {..} where cabalStoreDirLayout :: StoreDirLayout cabalStoreDirLayout = defaultStoreDirLayout (fromMaybe (cabalDir "store") mstoreDir) cabalLogsDirectory :: FilePath cabalLogsDirectory = fromMaybe (cabalDir "logs") mlogDir cabal-install-3.8.1.0/src/Distribution/Client/Fetch.hs0000644000000000000000000002103007346545000020625 0ustar0000000000000000------------------------------------------------------------------------------- | -- Module : Distribution.Client.Fetch -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- The cabal fetch command ----------------------------------------------------------------------------- module Distribution.Client.Fetch ( fetch, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.FetchUtils hiding (fetchPackage) import Distribution.Client.Dependency import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.Setup ( GlobalFlags(..), FetchFlags(..), RepoContext(..) ) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb, readPkgConfigDb ) import Distribution.Solver.Types.SolverPackage import Distribution.Solver.Types.SourcePackage import Distribution.Package ( packageId ) import Distribution.Simple.Compiler ( Compiler, compilerInfo, PackageDBStack ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Program ( ProgramDb ) import Distribution.Simple.Setup ( fromFlag, fromFlagOrDefault ) import Distribution.Simple.Utils ( die', notice, debug ) import Distribution.System ( Platform ) -- ------------------------------------------------------------ -- * The fetch command -- ------------------------------------------------------------ --TODO: -- * add fetch -o support -- * support tarball URLs via ad-hoc download cache (or in -o mode?) -- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied -- * Port various flags from install: -- * --upgrade-dependencies -- * --constraint and --preference -- * --only-dependencies, but note it conflicts with --no-deps -- | Fetch a list of packages and their dependencies. -- fetch :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> GlobalFlags -> FetchFlags -> [UserTarget] -> IO () fetch verbosity _ _ _ _ _ _ _ [] = notice verbosity "No packages requested. Nothing to do." fetch verbosity packageDBs repoCtxt comp platform progdb _ fetchFlags userTargets = do traverse_ (checkTarget verbosity) userTargets installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt pkgConfigDb <- readPkgConfigDb verbosity progdb pkgSpecifiers <- resolveUserTargets verbosity repoCtxt (packageIndex sourcePkgDb) userTargets pkgs <- planPackages verbosity comp platform fetchFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers pkgs' <- filterM (fmap not . isFetched . srcpkgSource) pkgs if null pkgs' --TODO: when we add support for remote tarballs then this message -- will need to be changed because for remote tarballs we fetch them -- at the earlier phase. then notice verbosity $ "No packages need to be fetched. " ++ "All the requested packages are already local " ++ "or cached locally." else if dryRun then notice verbosity $ unlines $ "The following packages would be fetched:" : map (prettyShow . packageId) pkgs' else traverse_ (fetchPackage verbosity repoCtxt . srcpkgSource) pkgs' where dryRun = fromFlag (fetchDryRun fetchFlags) planPackages :: Verbosity -> Compiler -> Platform -> FetchFlags -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [PackageSpecifier UnresolvedSourcePackage] -> IO [UnresolvedSourcePackage] planPackages verbosity comp platform fetchFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers | includeDependencies = do solver <- chooseSolver verbosity (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." installPlan <- foldProgress logMsg (die' verbosity) return $ resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams -- The packages we want to fetch are those packages the 'InstallPlan' -- that are in the 'InstallPlan.Configured' state. return [ solverPkgSource cpkg | (SolverInstallPlan.Configured cpkg) <- SolverInstallPlan.toList installPlan ] | otherwise = either (die' verbosity . unlines . map show) return $ resolveWithoutDependencies resolverParams where resolverParams :: DepResolverParams resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setCountConflicts countConflicts . setFineGrainedConflicts fineGrainedConflicts . setMinimizeConflictSet minimizeConflictSet . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . setAllowBootLibInstalls allowBootLibInstalls . setOnlyConstrained onlyConstrained . setSolverVerbosity verbosity . addConstraints [ let pc = PackageConstraint (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) (PackagePropertyStanzas stanzas) in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget | pkgSpecifier <- pkgSpecifiers ] -- Reinstall the targets given on the command line so that the dep -- resolver will decide that they need fetching, even if they're -- already installed. Since we want to get the source packages of -- things we might have installed (but not have the sources for). . reinstallTargets $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers includeDependencies = fromFlag (fetchDeps fetchFlags) logMsg message rest = debug verbosity message >> rest stanzas = [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] testsEnabled = fromFlagOrDefault False $ fetchTests fetchFlags benchmarksEnabled = fromFlagOrDefault False $ fetchBenchmarks fetchFlags reorderGoals = fromFlag (fetchReorderGoals fetchFlags) countConflicts = fromFlag (fetchCountConflicts fetchFlags) fineGrainedConflicts = fromFlag (fetchFineGrainedConflicts fetchFlags) minimizeConflictSet = fromFlag (fetchMinimizeConflictSet fetchFlags) independentGoals = fromFlag (fetchIndependentGoals fetchFlags) shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) strongFlags = fromFlag (fetchStrongFlags fetchFlags) maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) allowBootLibInstalls = fromFlag (fetchAllowBootLibInstalls fetchFlags) onlyConstrained = fromFlag (fetchOnlyConstrained fetchFlags) checkTarget :: Verbosity -> UserTarget -> IO () checkTarget verbosity target = case target of UserTargetRemoteTarball _uri -> die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " ++ "In the meantime you can use the 'unpack' commands." _ -> return () fetchPackage :: Verbosity -> RepoContext -> PackageLocation a -> IO () fetchPackage verbosity repoCtxt pkgsrc = case pkgsrc of LocalUnpackedPackage _dir -> return () LocalTarballPackage _file -> return () RemoteTarballPackage _uri _ -> die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " ++ "In the meantime you can use the 'unpack' commands." RemoteSourceRepoPackage _repo _ -> die' verbosity $ "The 'fetch' command does not yet support remote " ++ "source repositories." RepoTarballPackage repo pkgid _ -> do _ <- fetchRepoTarball verbosity repoCtxt repo pkgid return () cabal-install-3.8.1.0/src/Distribution/Client/FetchUtils.hs0000644000000000000000000002742407346545000021663 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.FetchUtils -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Functions for fetching packages ----------------------------------------------------------------------------- {-# LANGUAGE RecordWildCards #-} module Distribution.Client.FetchUtils ( -- * fetching packages fetchPackage, isFetched, checkFetched, -- ** specifically for repo packages checkRepoTarballFetched, fetchRepoTarball, -- ** fetching packages asynchronously asyncFetchPackages, waitAsyncFetchPackage, AsyncFetchMap, -- * fetching other things downloadIndex, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Types import Distribution.Client.HttpUtils ( downloadURI, isOldHackageURI, DownloadResult(..) , HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps ) import Distribution.Package ( PackageId, packageName, packageVersion ) import Distribution.Simple.Utils ( notice, info, debug, die' ) import Distribution.Verbosity ( verboseUnmarkOutput ) import Distribution.Client.GlobalFlags ( RepoContext(..) ) import Distribution.Client.Utils ( ProgressPhase(..), progressMessage ) import qualified Data.Map as Map import qualified Control.Exception.Safe as Safe import Control.Concurrent.Async import Control.Concurrent.MVar import System.Directory ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) import System.IO ( openTempFile, hClose ) import System.FilePath ( (), (<.>) ) import qualified System.FilePath.Posix as FilePath.Posix ( combine, joinPath ) import Network.URI ( URI(uriPath) ) import qualified Hackage.Security.Client as Sec -- ------------------------------------------------------------ -- * Actually fetch things -- ------------------------------------------------------------ -- | Returns @True@ if the package has already been fetched -- or does not need fetching. -- isFetched :: UnresolvedPkgLoc -> IO Bool isFetched loc = case loc of LocalUnpackedPackage _dir -> return True LocalTarballPackage _file -> return True RemoteTarballPackage _uri local -> return (isJust local) RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) RemoteSourceRepoPackage _ local -> return (isJust local) -- | Checks if the package has already been fetched (or does not need -- fetching) and if so returns evidence in the form of a 'PackageLocation' -- with a resolved local file location. -- checkFetched :: UnresolvedPkgLoc -> IO (Maybe ResolvedPkgLoc) checkFetched loc = case loc of LocalUnpackedPackage dir -> return (Just $ LocalUnpackedPackage dir) LocalTarballPackage file -> return (Just $ LocalTarballPackage file) RemoteTarballPackage uri (Just file) -> return (Just $ RemoteTarballPackage uri file) RepoTarballPackage repo pkgid (Just file) -> return (Just $ RepoTarballPackage repo pkgid file) RemoteSourceRepoPackage repo (Just file) -> return (Just $ RemoteSourceRepoPackage repo file) RemoteTarballPackage _uri Nothing -> return Nothing RemoteSourceRepoPackage _repo Nothing -> return Nothing RepoTarballPackage repo pkgid Nothing -> fmap (fmap (RepoTarballPackage repo pkgid)) (checkRepoTarballFetched repo pkgid) -- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'. -- checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath) checkRepoTarballFetched repo pkgid = do let file = packageFile repo pkgid exists <- doesFileExist file if exists then return (Just file) else return Nothing -- | Fetch a package if we don't have it already. -- fetchPackage :: Verbosity -> RepoContext -> UnresolvedPkgLoc -> IO ResolvedPkgLoc fetchPackage verbosity repoCtxt loc = case loc of LocalUnpackedPackage dir -> return (LocalUnpackedPackage dir) LocalTarballPackage file -> return (LocalTarballPackage file) RemoteTarballPackage uri (Just file) -> return (RemoteTarballPackage uri file) RepoTarballPackage repo pkgid (Just file) -> return (RepoTarballPackage repo pkgid file) RemoteSourceRepoPackage repo (Just dir) -> return (RemoteSourceRepoPackage repo dir) RemoteTarballPackage uri Nothing -> do path <- downloadTarballPackage uri return (RemoteTarballPackage uri path) RepoTarballPackage repo pkgid Nothing -> do local <- fetchRepoTarball verbosity repoCtxt repo pkgid return (RepoTarballPackage repo pkgid local) RemoteSourceRepoPackage _repo Nothing -> die' verbosity "fetchPackage: source repos not supported" where downloadTarballPackage :: URI -> IO FilePath downloadTarballPackage uri = do transport <- repoContextGetTransport repoCtxt transportCheckHttps verbosity transport uri notice verbosity ("Downloading " ++ show uri) tmpdir <- getTemporaryDirectory (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" hClose hnd _ <- downloadURI transport verbosity uri path return path -- | Fetch a repo package if we don't have it already. -- fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath fetchRepoTarball verbosity' repoCtxt repo pkgid = do fetched <- doesFileExist (packageFile repo pkgid) if fetched then do info verbosity $ prettyShow pkgid ++ " has already been downloaded." return (packageFile repo pkgid) else do progressMessage verbosity ProgressDownloading (prettyShow pkgid) res <- downloadRepoPackage progressMessage verbosity ProgressDownloaded (prettyShow pkgid) return res where -- whether we download or not is non-deterministic verbosity = verboseUnmarkOutput verbosity' downloadRepoPackage :: IO FilePath downloadRepoPackage = case repo of RepoLocalNoIndex{} -> return (packageFile repo pkgid) RepoRemote{..} -> do transport <- repoContextGetTransport repoCtxt remoteRepoCheckHttps verbosity transport repoRemote let uri = packageURI repoRemote pkgid dir = packageDir repo pkgid path = packageFile repo pkgid createDirectoryIfMissing True dir _ <- downloadURI transport verbosity uri path return path RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do let dir = packageDir repo pkgid path = packageFile repo pkgid createDirectoryIfMissing True dir Sec.uncheckClientErrors $ do info verbosity ("Writing " ++ path) Sec.downloadPackage' rep pkgid path return path -- | Downloads an index file to [config-dir/packages/serv-id] without -- hackage-security. You probably don't want to call this directly; -- use 'updateRepo' instead. -- downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult downloadIndex transport verbosity remoteRepo cacheDir = do remoteRepoCheckHttps verbosity transport remoteRepo let uri = (remoteRepoURI remoteRepo) { uriPath = uriPath (remoteRepoURI remoteRepo) `FilePath.Posix.combine` "00-index.tar.gz" } path = cacheDir "00-index" <.> "tar.gz" createDirectoryIfMissing True cacheDir downloadURI transport verbosity uri path -- ------------------------------------------------------------ -- * Async fetch wrapper utilities -- ------------------------------------------------------------ type AsyncFetchMap = Map UnresolvedPkgLoc (MVar (Either SomeException ResolvedPkgLoc)) -- | Fork off an async action to download the given packages (by location). -- -- The downloads are initiated in order, so you can arrange for packages that -- will likely be needed sooner to be earlier in the list. -- -- The body action is passed a map from those packages (identified by their -- location) to a completion var for that package. So the body action should -- lookup the location and use 'waitAsyncFetchPackage' to get the result. -- -- Synchronous exceptions raised by the download actions are delivered -- via 'waitAsyncFetchPackage'. -- asyncFetchPackages :: Verbosity -> RepoContext -> [UnresolvedPkgLoc] -> (AsyncFetchMap -> IO a) -> IO a asyncFetchPackages verbosity repoCtxt pkglocs body = do --TODO: [nice to have] use parallel downloads? asyncDownloadVars <- sequenceA [ do v <- newEmptyMVar return (pkgloc, v) | pkgloc <- pkglocs ] let fetchPackages :: IO () fetchPackages = for_ asyncDownloadVars $ \(pkgloc, var) -> do -- Suppress marking here, because 'withAsync' means -- that we get nondeterministic interleaving. -- It is essential that we don't catch async exceptions here, -- specifically 'AsyncCancelled' thrown at us from 'concurrently'. result <- Safe.try $ fetchPackage (verboseUnmarkOutput verbosity) repoCtxt pkgloc putMVar var result (_, res) <- concurrently fetchPackages (body $ Map.fromList asyncDownloadVars) pure res -- | Expect to find a download in progress in the given 'AsyncFetchMap' -- and wait on it to finish. -- -- If the download failed with an exception then this will be thrown. -- -- Note: This function is supposed to be idempotent, as our install plans -- can now use the same tarball for many builds, e.g. different -- components and/or qualified goals, and these all go through the -- download phase so we end up using 'waitAsyncFetchPackage' twice on -- the same package. C.f. #4461. waitAsyncFetchPackage :: Verbosity -> AsyncFetchMap -> UnresolvedPkgLoc -> IO ResolvedPkgLoc waitAsyncFetchPackage verbosity downloadMap srcloc = case Map.lookup srcloc downloadMap of Just hnd -> do debug verbosity $ "Waiting for download of " ++ show srcloc either throwIO return =<< readMVar hnd Nothing -> fail "waitAsyncFetchPackage: package not being downloaded" -- ------------------------------------------------------------ -- * Path utilities -- ------------------------------------------------------------ -- | Generate the full path to the locally cached copy of -- the tarball for a given @PackageIdentifer@. -- packageFile :: Repo -> PackageId -> FilePath packageFile repo pkgid = packageDir repo pkgid prettyShow pkgid <.> "tar.gz" -- | Generate the full path to the directory where the local cached copy of -- the tarball for a given @PackageIdentifer@ is stored. -- packageDir :: Repo -> PackageId -> FilePath packageDir (RepoLocalNoIndex (LocalRepo _ dir _) _) _pkgid = dir packageDir repo pkgid = repoLocalDir repo prettyShow (packageName pkgid) prettyShow (packageVersion pkgid) -- | Generate the URI of the tarball for a given package. -- packageURI :: RemoteRepo -> PackageId -> URI packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = (remoteRepoURI repo) { uriPath = FilePath.Posix.joinPath [uriPath (remoteRepoURI repo) ,prettyShow (packageName pkgid) ,prettyShow (packageVersion pkgid) ,prettyShow pkgid <.> "tar.gz"] } packageURI repo pkgid = (remoteRepoURI repo) { uriPath = FilePath.Posix.joinPath [uriPath (remoteRepoURI repo) ,"package" ,prettyShow pkgid <.> "tar.gz"] } cabal-install-3.8.1.0/src/Distribution/Client/FileMonitor.hs0000644000000000000000000014037207346545000022036 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveGeneric, DeriveFunctor, GeneralizedNewtypeDeriving, NamedFieldPuns, BangPatterns, ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} {-# LANGUAGE ScopedTypeVariables #-} -- | An abstraction to help with re-running actions when files or other -- input values they depend on have changed. -- module Distribution.Client.FileMonitor ( -- * Declaring files to monitor MonitorFilePath(..), MonitorKindFile(..), MonitorKindDir(..), FilePathGlob(..), monitorFile, monitorFileHashed, monitorNonExistentFile, monitorFileExistence, monitorDirectory, monitorNonExistentDirectory, monitorDirectoryExistence, monitorFileOrDirectory, monitorFileGlob, monitorFileGlobExistence, monitorFileSearchPath, monitorFileHashedSearchPath, -- * Creating and checking sets of monitored files FileMonitor(..), newFileMonitor, MonitorChanged(..), MonitorChangedReason(..), checkFileMonitorChanged, updateFileMonitor, MonitorTimestamp, beginUpdateFileMonitor, -- * Internal MonitorStateFileSet, MonitorStateFile, MonitorStateGlob, ) where import Prelude () import Distribution.Client.Compat.Prelude import qualified Distribution.Compat.Binary as Binary import qualified Data.Map.Strict as Map import Data.Binary.Get (runGetOrFail) import qualified Data.ByteString.Lazy as BS import qualified Data.Hashable as Hashable import Control.Monad import Control.Monad.Trans (MonadIO, liftIO) import Control.Monad.State (StateT, mapStateT) import qualified Control.Monad.State as State import Control.Monad.Except (ExceptT, runExceptT, withExceptT, throwError) import Control.Exception import Distribution.Compat.Time import Distribution.Client.Glob import Distribution.Simple.Utils (handleDoesNotExist, writeFileAtomic) import Distribution.Client.Utils (mergeBy, MergeResult(..)) import Distribution.Utils.Structured (structuredEncode, Tag (..)) import System.FilePath import System.Directory import System.IO ------------------------------------------------------------------------------ -- Types for specifying files to monitor -- -- | A description of a file (or set of files) to monitor for changes. -- -- Where file paths are relative they are relative to a common directory -- (e.g. project root), not necessarily the process current directory. -- data MonitorFilePath = MonitorFile { monitorKindFile :: !MonitorKindFile, monitorKindDir :: !MonitorKindDir, monitorPath :: !FilePath } | MonitorFileGlob { monitorKindFile :: !MonitorKindFile, monitorKindDir :: !MonitorKindDir, monitorPathGlob :: !FilePathGlob } deriving (Eq, Show, Generic) data MonitorKindFile = FileExists | FileModTime | FileHashed | FileNotExists deriving (Eq, Show, Generic) data MonitorKindDir = DirExists | DirModTime | DirNotExists deriving (Eq, Show, Generic) instance Binary MonitorFilePath instance Binary MonitorKindFile instance Binary MonitorKindDir instance Structured MonitorFilePath instance Structured MonitorKindFile instance Structured MonitorKindDir -- | Monitor a single file for changes, based on its modification time. -- The monitored file is considered to have changed if it no longer -- exists or if its modification time has changed. -- monitorFile :: FilePath -> MonitorFilePath monitorFile = MonitorFile FileModTime DirNotExists -- | Monitor a single file for changes, based on its modification time -- and content hash. The monitored file is considered to have changed if -- it no longer exists or if its modification time and content hash have -- changed. -- monitorFileHashed :: FilePath -> MonitorFilePath monitorFileHashed = MonitorFile FileHashed DirNotExists -- | Monitor a single non-existent file for changes. The monitored file -- is considered to have changed if it exists. -- monitorNonExistentFile :: FilePath -> MonitorFilePath monitorNonExistentFile = MonitorFile FileNotExists DirNotExists -- | Monitor a single file for existence only. The monitored file is -- considered to have changed if it no longer exists. -- monitorFileExistence :: FilePath -> MonitorFilePath monitorFileExistence = MonitorFile FileExists DirNotExists -- | Monitor a single directory for changes, based on its modification -- time. The monitored directory is considered to have changed if it no -- longer exists or if its modification time has changed. -- monitorDirectory :: FilePath -> MonitorFilePath monitorDirectory = MonitorFile FileNotExists DirModTime -- | Monitor a single non-existent directory for changes. The monitored -- directory is considered to have changed if it exists. -- monitorNonExistentDirectory :: FilePath -> MonitorFilePath -- Just an alias for monitorNonExistentFile, since you can't -- tell the difference between a non-existent directory and -- a non-existent file :) monitorNonExistentDirectory = monitorNonExistentFile -- | Monitor a single directory for existence. The monitored directory is -- considered to have changed only if it no longer exists. -- monitorDirectoryExistence :: FilePath -> MonitorFilePath monitorDirectoryExistence = MonitorFile FileNotExists DirExists -- | Monitor a single file or directory for changes, based on its modification -- time. The monitored file is considered to have changed if it no longer -- exists or if its modification time has changed. -- monitorFileOrDirectory :: FilePath -> MonitorFilePath monitorFileOrDirectory = MonitorFile FileModTime DirModTime -- | Monitor a set of files (or directories) identified by a file glob. -- The monitored glob is considered to have changed if the set of files -- matching the glob changes (i.e. creations or deletions), or for files if the -- modification time and content hash of any matching file has changed. -- monitorFileGlob :: FilePathGlob -> MonitorFilePath monitorFileGlob = MonitorFileGlob FileHashed DirExists -- | Monitor a set of files (or directories) identified by a file glob for -- existence only. The monitored glob is considered to have changed if the set -- of files matching the glob changes (i.e. creations or deletions). -- monitorFileGlobExistence :: FilePathGlob -> MonitorFilePath monitorFileGlobExistence = MonitorFileGlob FileExists DirExists -- | Creates a list of files to monitor when you search for a file which -- unsuccessfully looked in @notFoundAtPaths@ before finding it at -- @foundAtPath@. monitorFileSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] monitorFileSearchPath notFoundAtPaths foundAtPath = monitorFile foundAtPath : map monitorNonExistentFile notFoundAtPaths -- | Similar to 'monitorFileSearchPath', but also instructs us to -- monitor the hash of the found file. monitorFileHashedSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] monitorFileHashedSearchPath notFoundAtPaths foundAtPath = monitorFileHashed foundAtPath : map monitorNonExistentFile notFoundAtPaths ------------------------------------------------------------------------------ -- Implementation types, files status -- -- | The state necessary to determine whether a set of monitored -- files has changed. It consists of two parts: a set of specific -- files to be monitored (index by their path), and a list of -- globs, which monitor may files at once. data MonitorStateFileSet = MonitorStateFileSet ![MonitorStateFile] ![MonitorStateGlob] -- Morally this is not actually a set but a bag (represented by lists). -- There is no principled reason to use a bag here rather than a set, but -- there is also no particular gain either. That said, we do preserve the -- order of the lists just to reduce confusion (and have predictable I/O -- patterns). deriving (Show, Generic) instance Binary MonitorStateFileSet instance Structured MonitorStateFileSet type Hash = Int -- | The state necessary to determine whether a monitored file has changed. -- -- This covers all the cases of 'MonitorFilePath' except for globs which is -- covered separately by 'MonitorStateGlob'. -- -- The @Maybe ModTime@ is to cover the case where we already consider the -- file to have changed, either because it had already changed by the time we -- did the snapshot (i.e. too new, changed since start of update process) or it -- no longer exists at all. -- data MonitorStateFile = MonitorStateFile !MonitorKindFile !MonitorKindDir !FilePath !MonitorStateFileStatus deriving (Show, Generic) data MonitorStateFileStatus = MonitorStateFileExists | MonitorStateFileModTime !ModTime -- ^ cached file mtime | MonitorStateFileHashed !ModTime !Hash -- ^ cached mtime and content hash | MonitorStateDirExists | MonitorStateDirModTime !ModTime -- ^ cached dir mtime | MonitorStateNonExistent | MonitorStateAlreadyChanged deriving (Show, Generic) instance Binary MonitorStateFile instance Binary MonitorStateFileStatus instance Structured MonitorStateFile instance Structured MonitorStateFileStatus -- | The state necessary to determine whether the files matched by a globbing -- match have changed. -- data MonitorStateGlob = MonitorStateGlob !MonitorKindFile !MonitorKindDir !FilePathRoot !MonitorStateGlobRel deriving (Show, Generic) data MonitorStateGlobRel = MonitorStateGlobDirs !Glob !FilePathGlobRel !ModTime ![(FilePath, MonitorStateGlobRel)] -- invariant: sorted | MonitorStateGlobFiles !Glob !ModTime ![(FilePath, MonitorStateFileStatus)] -- invariant: sorted | MonitorStateGlobDirTrailing deriving (Show, Generic) instance Binary MonitorStateGlob instance Binary MonitorStateGlobRel instance Structured MonitorStateGlob instance Structured MonitorStateGlobRel -- | We can build a 'MonitorStateFileSet' from a set of 'MonitorFilePath' by -- inspecting the state of the file system, and we can go in the reverse -- direction by just forgetting the extra info. -- reconstructMonitorFilePaths :: MonitorStateFileSet -> [MonitorFilePath] reconstructMonitorFilePaths (MonitorStateFileSet singlePaths globPaths) = map getSinglePath singlePaths ++ map getGlobPath globPaths where getSinglePath :: MonitorStateFile -> MonitorFilePath getSinglePath (MonitorStateFile kindfile kinddir filepath _) = MonitorFile kindfile kinddir filepath getGlobPath :: MonitorStateGlob -> MonitorFilePath getGlobPath (MonitorStateGlob kindfile kinddir root gstate) = MonitorFileGlob kindfile kinddir $ FilePathGlob root $ case gstate of MonitorStateGlobDirs glob globs _ _ -> GlobDir glob globs MonitorStateGlobFiles glob _ _ -> GlobFile glob MonitorStateGlobDirTrailing -> GlobDirTrailing ------------------------------------------------------------------------------ -- Checking the status of monitored files -- -- | A monitor for detecting changes to a set of files. It can be used to -- efficiently test if any of a set of files (specified individually or by -- glob patterns) has changed since some snapshot. In addition, it also checks -- for changes in a value (of type @a@), and when there are no changes in -- either it returns a saved value (of type @b@). -- -- The main use case looks like this: suppose we have some expensive action -- that depends on certain pure inputs and reads some set of files, and -- produces some pure result. We want to avoid re-running this action when it -- would produce the same result. So we need to monitor the files the action -- looked at, the other pure input values, and we need to cache the result. -- Then at some later point, if the input value didn't change, and none of the -- files changed, then we can re-use the cached result rather than re-running -- the action. -- -- This can be achieved using a 'FileMonitor'. Each 'FileMonitor' instance -- saves state in a disk file, so the file for that has to be specified, -- making sure it is unique. The pattern is to use 'checkFileMonitorChanged' -- to see if there's been any change. If there is, re-run the action, keeping -- track of the files, then use 'updateFileMonitor' to record the current -- set of files to monitor, the current input value for the action, and the -- result of the action. -- -- The typical occurrence of this pattern is captured by 'rerunIfChanged' -- and the 'Rebuild' monad. More complicated cases may need to use -- 'checkFileMonitorChanged' and 'updateFileMonitor' directly. -- data FileMonitor a b = FileMonitor { -- | The file where this 'FileMonitor' should store its state. -- fileMonitorCacheFile :: FilePath, -- | Compares a new cache key with old one to determine if a -- corresponding cached value is still valid. -- -- Typically this is just an equality test, but in some -- circumstances it can make sense to do things like subset -- comparisons. -- -- The first arg is the new value, the second is the old cached value. -- fileMonitorKeyValid :: a -> a -> Bool, -- | When this mode is enabled, if 'checkFileMonitorChanged' returns -- 'MonitoredValueChanged' then we have the guarantee that no files -- changed, that the value change was the only change. In the default -- mode no such guarantee is provided which is slightly faster. -- fileMonitorCheckIfOnlyValueChanged :: Bool } -- | Define a new file monitor. -- -- It's best practice to define file monitor values once, and then use the -- same value for 'checkFileMonitorChanged' and 'updateFileMonitor' as this -- ensures you get the same types @a@ and @b@ for reading and writing. -- -- The path of the file monitor itself must be unique because it keeps state -- on disk and these would clash. -- newFileMonitor :: Eq a => FilePath -- ^ The file to cache the state of the -- file monitor. Must be unique. -> FileMonitor a b newFileMonitor path = FileMonitor path (==) False -- | The result of 'checkFileMonitorChanged': either the monitored files or -- value changed (and it tells us which it was) or nothing changed and we get -- the cached result. -- data MonitorChanged a b = -- | The monitored files and value did not change. The cached result is -- @b@. -- -- The set of monitored files is also returned. This is useful -- for composing or nesting 'FileMonitor's. MonitorUnchanged b [MonitorFilePath] -- | The monitor found that something changed. The reason is given. -- | MonitorChanged (MonitorChangedReason a) deriving Show -- | What kind of change 'checkFileMonitorChanged' detected. -- data MonitorChangedReason a = -- | One of the files changed (existence, file type, mtime or file -- content, depending on the 'MonitorFilePath' in question) MonitoredFileChanged FilePath -- | The pure input value changed. -- -- The previous cached key value is also returned. This is sometimes -- useful when using a 'fileMonitorKeyValid' function that is not simply -- '(==)', when invalidation can be partial. In such cases it can make -- sense to 'updateFileMonitor' with a key value that's a combination of -- the new and old (e.g. set union). | MonitoredValueChanged a -- | There was no saved monitor state, cached value etc. Ie the file -- for the 'FileMonitor' does not exist. | MonitorFirstRun -- | There was existing state, but we could not read it. This typically -- happens when the code has changed compared to an existing 'FileMonitor' -- cache file and type of the input value or cached value has changed such -- that we cannot decode the values. This is completely benign as we can -- treat is just as if there were no cache file and re-run. | MonitorCorruptCache deriving (Eq, Show, Functor) -- | Test if the input value or files monitored by the 'FileMonitor' have -- changed. If not, return the cached value. -- -- See 'FileMonitor' for a full explanation. -- checkFileMonitorChanged :: forall a b. (Binary a, Structured a, Binary b, Structured b) => FileMonitor a b -- ^ cache file path -> FilePath -- ^ root directory -> a -- ^ guard or key value -> IO (MonitorChanged a b) -- ^ did the key or any paths change? checkFileMonitorChanged monitor@FileMonitor { fileMonitorKeyValid, fileMonitorCheckIfOnlyValueChanged } root currentKey = -- Consider it a change if the cache file does not exist, -- or we cannot decode it. Sadly ErrorCall can still happen, despite -- using decodeFileOrFail, e.g. Data.Char.chr errors handleDoesNotExist (MonitorChanged MonitorFirstRun) $ handleErrorCall (MonitorChanged MonitorCorruptCache) $ withCacheFile monitor $ either (\_ -> return (MonitorChanged MonitorCorruptCache)) checkStatusCache where checkStatusCache :: (MonitorStateFileSet, a, Either String b) -> IO (MonitorChanged a b) checkStatusCache (cachedFileStatus, cachedKey, cachedResult) = do change <- checkForChanges case change of Just reason -> return (MonitorChanged reason) Nothing -> case cachedResult of Left _ -> pure (MonitorChanged MonitorCorruptCache) Right cr -> return (MonitorUnchanged cr monitorFiles) where monitorFiles = reconstructMonitorFilePaths cachedFileStatus where -- In fileMonitorCheckIfOnlyValueChanged mode we want to guarantee that -- if we return MonitoredValueChanged that only the value changed. -- We do that by checking for file changes first. Otherwise it makes -- more sense to do the cheaper test first. checkForChanges :: IO (Maybe (MonitorChangedReason a)) checkForChanges | fileMonitorCheckIfOnlyValueChanged = checkFileChange cachedFileStatus cachedKey cachedResult `mplusMaybeT` checkValueChange cachedKey | otherwise = checkValueChange cachedKey `mplusMaybeT` checkFileChange cachedFileStatus cachedKey cachedResult mplusMaybeT :: Monad m => m (Maybe a1) -> m (Maybe a1) -> m (Maybe a1) mplusMaybeT ma mb = do mx <- ma case mx of Nothing -> mb Just x -> return (Just x) -- Check if the guard value has changed checkValueChange :: a -> IO (Maybe (MonitorChangedReason a)) checkValueChange cachedKey | not (fileMonitorKeyValid currentKey cachedKey) = return (Just (MonitoredValueChanged cachedKey)) | otherwise = return Nothing -- Check if any file has changed checkFileChange :: MonitorStateFileSet -> a -> Either String b -> IO (Maybe (MonitorChangedReason a)) checkFileChange cachedFileStatus cachedKey cachedResult = do res <- probeFileSystem root cachedFileStatus case res of -- Some monitored file has changed Left changedPath -> return (Just (MonitoredFileChanged (normalise changedPath))) -- No monitored file has changed Right (cachedFileStatus', cacheStatus) -> do -- But we might still want to update the cache whenCacheChanged cacheStatus $ case cachedResult of Left _ -> pure () Right cr -> rewriteCacheFile monitor cachedFileStatus' cachedKey cr return Nothing -- | Lazily decode a triple, parsing the first two fields strictly and -- returning a lazy value containing either the last one or an error. -- This is helpful for cabal cache files where the first two components -- contain header data that lets one test if the cache is still valid, -- and the last (potentially large) component is the cached value itself. -- This way we can test for cache validity without needing to pay the -- cost of the decode of stale cache data. This lives here rather than -- Distribution.Utils.Structured because it depends on a newer version of -- binary than supported in the Cabal library proper. structuredDecodeTriple :: forall a b c. (Structured a, Structured b, Structured c, Binary.Binary a, Binary.Binary b, Binary.Binary c) => BS.ByteString -> Either String (a, b, Either String c) structuredDecodeTriple lbs = let partialDecode = (`runGetOrFail` lbs) $ do (_ :: Tag (a,b,c)) <- Binary.get (a :: a) <- Binary.get (b :: b) <- Binary.get pure (a, b) cleanEither (Left (_, pos, msg)) = Left ("Data.Binary.Get.runGet at position " ++ show pos ++ ": " ++ msg) cleanEither (Right (_,_,v)) = Right v in case partialDecode of Left (_, pos, msg) -> Left ("Data.Binary.Get.runGet at position " ++ show pos ++ ": " ++ msg) Right (lbs', _, (x,y)) -> Right (x, y, cleanEither $ runGetOrFail (Binary.get :: Binary.Get c) lbs') -- | Helper for reading the cache file. -- -- This determines the type and format of the binary cache file. -- withCacheFile :: (Binary a, Structured a, Binary b, Structured b) => FileMonitor a b -> (Either String (MonitorStateFileSet, a, Either String b) -> IO r) -> IO r withCacheFile (FileMonitor {fileMonitorCacheFile}) k = withBinaryFile fileMonitorCacheFile ReadMode $ \hnd -> do contents <- structuredDecodeTriple <$> BS.hGetContents hnd k contents -- | Helper for writing the cache file. -- -- This determines the type and format of the binary cache file. -- rewriteCacheFile :: (Binary a, Structured a, Binary b, Structured b) => FileMonitor a b -> MonitorStateFileSet -> a -> b -> IO () rewriteCacheFile FileMonitor {fileMonitorCacheFile} fileset key result = writeFileAtomic fileMonitorCacheFile $ structuredEncode (fileset, key, result) -- | Probe the file system to see if any of the monitored files have changed. -- -- It returns Nothing if any file changed, or returns a possibly updated -- file 'MonitorStateFileSet' plus an indicator of whether it actually changed. -- -- We may need to update the cache since there may be changes in the filesystem -- state which don't change any of our affected files. -- -- Consider the glob @{proj1,proj2}\/\*.cabal@. Say we first run and find a -- @proj1@ directory containing @proj1.cabal@ yet no @proj2@. If we later run -- and find @proj2@ was created, yet contains no files matching @*.cabal@ then -- we want to update the cache despite no changes in our relevant file set. -- Specifically, we should add an mtime for this directory so we can avoid -- re-traversing the directory in future runs. -- probeFileSystem :: FilePath -> MonitorStateFileSet -> IO (Either FilePath (MonitorStateFileSet, CacheChanged)) probeFileSystem root (MonitorStateFileSet singlePaths globPaths) = runChangedM $ do sequence_ [ probeMonitorStateFileStatus root file status | MonitorStateFile _ _ file status <- singlePaths ] -- The glob monitors can require state changes globPaths' <- sequence [ probeMonitorStateGlob root globPath | globPath <- globPaths ] return (MonitorStateFileSet singlePaths globPaths') ----------------------------------------------- -- Monad for checking for file system changes -- -- We need to be able to bail out if we detect a change (using ExceptT), -- but if there's no change we need to be able to rebuild the monitor -- state. And we want to optimise that rebuilding by keeping track if -- anything actually changed (using StateT), so that in the typical case -- we can avoid rewriting the state file. newtype ChangedM a = ChangedM (StateT CacheChanged (ExceptT FilePath IO) a) deriving (Functor, Applicative, Monad, MonadIO) runChangedM :: ChangedM a -> IO (Either FilePath (a, CacheChanged)) runChangedM (ChangedM action) = runExceptT $ State.runStateT action CacheUnchanged somethingChanged :: FilePath -> ChangedM a somethingChanged path = ChangedM $ throwError path cacheChanged :: ChangedM () cacheChanged = ChangedM $ State.put CacheChanged mapChangedFile :: (FilePath -> FilePath) -> ChangedM a -> ChangedM a mapChangedFile adjust (ChangedM a) = ChangedM (mapStateT (withExceptT adjust) a) data CacheChanged = CacheChanged | CacheUnchanged whenCacheChanged :: Monad m => CacheChanged -> m () -> m () whenCacheChanged CacheChanged action = action whenCacheChanged CacheUnchanged _ = return () ---------------------- -- | Probe the file system to see if a single monitored file has changed. -- probeMonitorStateFileStatus :: FilePath -> FilePath -> MonitorStateFileStatus -> ChangedM () probeMonitorStateFileStatus root file status = case status of MonitorStateFileExists -> probeFileExistence root file MonitorStateFileModTime mtime -> probeFileModificationTime root file mtime MonitorStateFileHashed mtime hash -> probeFileModificationTimeAndHash root file mtime hash MonitorStateDirExists -> probeDirExistence root file MonitorStateDirModTime mtime -> probeFileModificationTime root file mtime MonitorStateNonExistent -> probeFileNonExistence root file MonitorStateAlreadyChanged -> somethingChanged file -- | Probe the file system to see if a monitored file glob has changed. -- probeMonitorStateGlob :: FilePath -- ^ root path -> MonitorStateGlob -> ChangedM MonitorStateGlob probeMonitorStateGlob relroot (MonitorStateGlob kindfile kinddir globroot glob) = do root <- liftIO $ getFilePathRootDirectory globroot relroot case globroot of FilePathRelative -> MonitorStateGlob kindfile kinddir globroot <$> probeMonitorStateGlobRel kindfile kinddir root "." glob -- for absolute cases, make the changed file we report absolute too _ -> mapChangedFile (root ) $ MonitorStateGlob kindfile kinddir globroot <$> probeMonitorStateGlobRel kindfile kinddir root "" glob probeMonitorStateGlobRel :: MonitorKindFile -> MonitorKindDir -> FilePath -- ^ root path -> FilePath -- ^ path of the directory we are -- looking in relative to @root@ -> MonitorStateGlobRel -> ChangedM MonitorStateGlobRel probeMonitorStateGlobRel kindfile kinddir root dirName (MonitorStateGlobDirs glob globPath mtime children) = do change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime case change of Nothing -> do children' <- sequence [ do fstate' <- probeMonitorStateGlobRel kindfile kinddir root (dirName fname) fstate return (fname, fstate') | (fname, fstate) <- children ] return $! MonitorStateGlobDirs glob globPath mtime children' Just mtime' -> do -- directory modification time changed: -- a matching subdir may have been added or deleted matches <- filterM (\entry -> let subdir = root dirName entry in liftIO $ doesDirectoryExist subdir) . filter (matchGlob glob) =<< liftIO (getDirectoryContents (root dirName)) children' <- traverse probeMergeResult $ mergeBy (\(path1,_) path2 -> compare path1 path2) children (sort matches) return $! MonitorStateGlobDirs glob globPath mtime' children' -- Note that just because the directory has changed, we don't force -- a cache rewrite with 'cacheChanged' since that has some cost, and -- all we're saving is scanning the directory. But we do rebuild the -- cache with the new mtime', so that if the cache is rewritten for -- some other reason, we'll take advantage of that. where probeMergeResult :: MergeResult (FilePath, MonitorStateGlobRel) FilePath -> ChangedM (FilePath, MonitorStateGlobRel) -- Only in cached (directory deleted) probeMergeResult (OnlyInLeft (path, fstate)) = do case allMatchingFiles (dirName path) fstate of [] -> return (path, fstate) -- Strictly speaking we should be returning 'CacheChanged' above -- as we should prune the now-missing 'MonitorStateGlobRel'. However -- we currently just leave these now-redundant entries in the -- cache as they cost no IO and keeping them allows us to avoid -- rewriting the cache. (file:_) -> somethingChanged file -- Only in current filesystem state (directory added) probeMergeResult (OnlyInRight path) = do fstate <- liftIO $ buildMonitorStateGlobRel Nothing Map.empty kindfile kinddir root (dirName path) globPath case allMatchingFiles (dirName path) fstate of (file:_) -> somethingChanged file -- This is the only case where we use 'cacheChanged' because we can -- have a whole new dir subtree (of unbounded size and cost), so we -- need to save the state of that new subtree in the cache. [] -> cacheChanged >> return (path, fstate) -- Found in path probeMergeResult (InBoth (path, fstate) _) = do fstate' <- probeMonitorStateGlobRel kindfile kinddir root (dirName path) fstate return (path, fstate') -- | Does a 'MonitorStateGlob' have any relevant files within it? allMatchingFiles :: FilePath -> MonitorStateGlobRel -> [FilePath] allMatchingFiles dir (MonitorStateGlobFiles _ _ entries) = [ dir fname | (fname, _) <- entries ] allMatchingFiles dir (MonitorStateGlobDirs _ _ _ entries) = [ res | (subdir, fstate) <- entries , res <- allMatchingFiles (dir subdir) fstate ] allMatchingFiles dir MonitorStateGlobDirTrailing = [dir] probeMonitorStateGlobRel _ _ root dirName (MonitorStateGlobFiles glob mtime children) = do change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime mtime' <- case change of Nothing -> return mtime Just mtime' -> do -- directory modification time changed: -- a matching file may have been added or deleted matches <- return . filter (matchGlob glob) =<< liftIO (getDirectoryContents (root dirName)) traverse_ probeMergeResult $ mergeBy (\(path1,_) path2 -> compare path1 path2) children (sort matches) return mtime' -- Check that none of the children have changed for_ children $ \(file, status) -> probeMonitorStateFileStatus root (dirName file) status return (MonitorStateGlobFiles glob mtime' children) -- Again, we don't force a cache rewrite with 'cacheChanged', but we do use -- the new mtime' if any. where probeMergeResult :: MergeResult (FilePath, MonitorStateFileStatus) FilePath -> ChangedM () probeMergeResult mr = case mr of InBoth _ _ -> return () -- this is just to be able to accurately report which file changed: OnlyInLeft (path, _) -> somethingChanged (dirName path) OnlyInRight path -> somethingChanged (dirName path) probeMonitorStateGlobRel _ _ _ _ MonitorStateGlobDirTrailing = return MonitorStateGlobDirTrailing ------------------------------------------------------------------------------ -- | Update the input value and the set of files monitored by the -- 'FileMonitor', plus the cached value that may be returned in future. -- -- This takes a snapshot of the state of the monitored files right now, so -- 'checkFileMonitorChanged' will look for file system changes relative to -- this snapshot. -- -- This is typically done once the action has been completed successfully and -- we have the action's result and we know what files it looked at. See -- 'FileMonitor' for a full explanation. -- -- If we do take the snapshot after the action has completed then we have a -- problem. The problem is that files might have changed /while/ the action was -- running but /after/ the action read them. If we take the snapshot after the -- action completes then we will miss these changes. The solution is to record -- a timestamp before beginning execution of the action and then we make the -- conservative assumption that any file that has changed since then has -- already changed, ie the file monitor state for these files will be such that -- 'checkFileMonitorChanged' will report that they have changed. -- -- So if you do use 'updateFileMonitor' after the action (so you can discover -- the files used rather than predicting them in advance) then use -- 'beginUpdateFileMonitor' to get a timestamp and pass that. Alternatively, -- if you take the snapshot in advance of the action, or you're not monitoring -- any files then you can use @Nothing@ for the timestamp parameter. -- updateFileMonitor :: (Binary a, Structured a, Binary b, Structured b) => FileMonitor a b -- ^ cache file path -> FilePath -- ^ root directory -> Maybe MonitorTimestamp -- ^ timestamp when the update action started -> [MonitorFilePath] -- ^ files of interest relative to root -> a -- ^ the current key value -> b -- ^ the current result value -> IO () updateFileMonitor monitor root startTime monitorFiles cachedKey cachedResult = do hashcache <- readCacheFileHashes monitor msfs <- buildMonitorStateFileSet startTime hashcache root monitorFiles rewriteCacheFile monitor msfs cachedKey cachedResult -- | A timestamp to help with the problem of file changes during actions. -- See 'updateFileMonitor' for details. -- newtype MonitorTimestamp = MonitorTimestamp ModTime -- | Record a timestamp at the beginning of an action, and when the action -- completes call 'updateFileMonitor' passing it the timestamp. -- See 'updateFileMonitor' for details. -- beginUpdateFileMonitor :: IO MonitorTimestamp beginUpdateFileMonitor = MonitorTimestamp <$> getCurTime -- | Take the snapshot of the monitored files. That is, given the -- specification of the set of files we need to monitor, inspect the state -- of the file system now and collect the information we'll need later to -- determine if anything has changed. -- buildMonitorStateFileSet :: Maybe MonitorTimestamp -- ^ optional: timestamp -- of the start of the action -> FileHashCache -- ^ existing file hashes -> FilePath -- ^ root directory -> [MonitorFilePath] -- ^ patterns of interest -- relative to root -> IO MonitorStateFileSet buildMonitorStateFileSet mstartTime hashcache root = go [] [] where go :: [MonitorStateFile] -> [MonitorStateGlob] -> [MonitorFilePath] -> IO MonitorStateFileSet go !singlePaths !globPaths [] = return (MonitorStateFileSet (reverse singlePaths) (reverse globPaths)) go !singlePaths !globPaths (MonitorFile kindfile kinddir path : monitors) = do monitorState <- MonitorStateFile kindfile kinddir path <$> buildMonitorStateFile mstartTime hashcache kindfile kinddir root path go (monitorState : singlePaths) globPaths monitors go !singlePaths !globPaths (MonitorFileGlob kindfile kinddir globPath : monitors) = do monitorState <- buildMonitorStateGlob mstartTime hashcache kindfile kinddir root globPath go singlePaths (monitorState : globPaths) monitors buildMonitorStateFile :: Maybe MonitorTimestamp -- ^ start time of update -> FileHashCache -- ^ existing file hashes -> MonitorKindFile -> MonitorKindDir -> FilePath -- ^ the root directory -> FilePath -> IO MonitorStateFileStatus buildMonitorStateFile mstartTime hashcache kindfile kinddir root path = do let abspath = root path isFile <- doesFileExist abspath isDir <- doesDirectoryExist abspath case (isFile, kindfile, isDir, kinddir) of (_, FileNotExists, _, DirNotExists) -> -- we don't need to care if it exists now, since we check at probe time return MonitorStateNonExistent (False, _, False, _) -> return MonitorStateAlreadyChanged (True, FileExists, _, _) -> return MonitorStateFileExists (True, FileModTime, _, _) -> handleIOException MonitorStateAlreadyChanged $ do mtime <- getModTime abspath if changedDuringUpdate mstartTime mtime then return MonitorStateAlreadyChanged else return (MonitorStateFileModTime mtime) (True, FileHashed, _, _) -> handleIOException MonitorStateAlreadyChanged $ do mtime <- getModTime abspath if changedDuringUpdate mstartTime mtime then return MonitorStateAlreadyChanged else do hash <- getFileHash hashcache abspath abspath mtime return (MonitorStateFileHashed mtime hash) (_, _, True, DirExists) -> return MonitorStateDirExists (_, _, True, DirModTime) -> handleIOException MonitorStateAlreadyChanged $ do mtime <- getModTime abspath if changedDuringUpdate mstartTime mtime then return MonitorStateAlreadyChanged else return (MonitorStateDirModTime mtime) (False, _, True, DirNotExists) -> return MonitorStateAlreadyChanged (True, FileNotExists, False, _) -> return MonitorStateAlreadyChanged -- | If we have a timestamp for the beginning of the update, then any file -- mtime later than this means that it changed during the update and we ought -- to consider the file as already changed. -- changedDuringUpdate :: Maybe MonitorTimestamp -> ModTime -> Bool changedDuringUpdate (Just (MonitorTimestamp startTime)) mtime = mtime > startTime changedDuringUpdate _ _ = False -- | Much like 'buildMonitorStateFileSet' but for the somewhat complicated case -- of a file glob. -- -- This gets used both by 'buildMonitorStateFileSet' when we're taking the -- file system snapshot, but also by 'probeGlobStatus' as part of checking -- the monitored (globed) files for changes when we find a whole new subtree. -- buildMonitorStateGlob :: Maybe MonitorTimestamp -- ^ start time of update -> FileHashCache -- ^ existing file hashes -> MonitorKindFile -> MonitorKindDir -> FilePath -- ^ the root directory -> FilePathGlob -- ^ the matching glob -> IO MonitorStateGlob buildMonitorStateGlob mstartTime hashcache kindfile kinddir relroot (FilePathGlob globroot globPath) = do root <- liftIO $ getFilePathRootDirectory globroot relroot MonitorStateGlob kindfile kinddir globroot <$> buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root "." globPath buildMonitorStateGlobRel :: Maybe MonitorTimestamp -- ^ start time of update -> FileHashCache -- ^ existing file hashes -> MonitorKindFile -> MonitorKindDir -> FilePath -- ^ the root directory -> FilePath -- ^ directory we are examining -- relative to the root -> FilePathGlobRel -- ^ the matching glob -> IO MonitorStateGlobRel buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root dir globPath = do let absdir = root dir dirEntries <- getDirectoryContents absdir dirMTime <- getModTime absdir case globPath of GlobDir glob globPath' -> do subdirs <- filterM (\subdir -> doesDirectoryExist (absdir subdir)) $ filter (matchGlob glob) dirEntries subdirStates <- for (sort subdirs) $ \subdir -> do fstate <- buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root (dir subdir) globPath' return (subdir, fstate) return $! MonitorStateGlobDirs glob globPath' dirMTime subdirStates GlobFile glob -> do let files = filter (matchGlob glob) dirEntries filesStates <- for (sort files) $ \file -> do fstate <- buildMonitorStateFile mstartTime hashcache kindfile kinddir root (dir file) return (file, fstate) return $! MonitorStateGlobFiles glob dirMTime filesStates GlobDirTrailing -> return MonitorStateGlobDirTrailing -- | We really want to avoid re-hashing files all the time. We already make -- the assumption that if a file mtime has not changed then we don't need to -- bother checking if the content hash has changed. We can apply the same -- assumption when updating the file monitor state. In the typical case of -- updating a file monitor the set of files is the same or largely the same so -- we can grab the previously known content hashes with their corresponding -- mtimes. -- type FileHashCache = Map FilePath (ModTime, Hash) -- | We declare it a cache hit if the mtime of a file is the same as before. -- lookupFileHashCache :: FileHashCache -> FilePath -> ModTime -> Maybe Hash lookupFileHashCache hashcache file mtime = do (mtime', hash) <- Map.lookup file hashcache guard (mtime' == mtime) return hash -- | Either get it from the cache or go read the file getFileHash :: FileHashCache -> FilePath -> FilePath -> ModTime -> IO Hash getFileHash hashcache relfile absfile mtime = case lookupFileHashCache hashcache relfile mtime of Just hash -> return hash Nothing -> readFileHash absfile -- | Build a 'FileHashCache' from the previous 'MonitorStateFileSet'. While -- in principle we could preserve the structure of the previous state, given -- that the set of files to monitor can change then it's simpler just to throw -- away the structure and use a finite map. -- readCacheFileHashes :: (Binary a, Structured a, Binary b, Structured b) => FileMonitor a b -> IO FileHashCache readCacheFileHashes monitor = handleDoesNotExist Map.empty $ handleErrorCall Map.empty $ withCacheFile monitor $ \res -> case res of Left _ -> return Map.empty Right (msfs, _, _) -> return (mkFileHashCache msfs) where mkFileHashCache :: MonitorStateFileSet -> FileHashCache mkFileHashCache (MonitorStateFileSet singlePaths globPaths) = collectAllFileHashes singlePaths `Map.union` collectAllGlobHashes globPaths collectAllFileHashes :: [MonitorStateFile] -> Map FilePath (ModTime, Hash) collectAllFileHashes singlePaths = Map.fromList [ (fpath, (mtime, hash)) | MonitorStateFile _ _ fpath (MonitorStateFileHashed mtime hash) <- singlePaths ] collectAllGlobHashes :: [MonitorStateGlob] -> Map FilePath (ModTime, Hash) collectAllGlobHashes globPaths = Map.fromList [ (fpath, (mtime, hash)) | MonitorStateGlob _ _ _ gstate <- globPaths , (fpath, (mtime, hash)) <- collectGlobHashes "" gstate ] collectGlobHashes :: FilePath -> MonitorStateGlobRel -> [(FilePath, (ModTime, Hash))] collectGlobHashes dir (MonitorStateGlobDirs _ _ _ entries) = [ res | (subdir, fstate) <- entries , res <- collectGlobHashes (dir subdir) fstate ] collectGlobHashes dir (MonitorStateGlobFiles _ _ entries) = [ (dir fname, (mtime, hash)) | (fname, MonitorStateFileHashed mtime hash) <- entries ] collectGlobHashes _dir MonitorStateGlobDirTrailing = [] ------------------------------------------------------------------------------ -- Utils -- -- | Within the @root@ directory, check if @file@ has its 'ModTime' is -- the same as @mtime@, short-circuiting if it is different. probeFileModificationTime :: FilePath -> FilePath -> ModTime -> ChangedM () probeFileModificationTime root file mtime = do unchanged <- liftIO $ checkModificationTimeUnchanged root file mtime unless unchanged (somethingChanged file) -- | Within the @root@ directory, check if @file@ has its 'ModTime' and -- 'Hash' is the same as @mtime@ and @hash@, short-circuiting if it is -- different. probeFileModificationTimeAndHash :: FilePath -> FilePath -> ModTime -> Hash -> ChangedM () probeFileModificationTimeAndHash root file mtime hash = do unchanged <- liftIO $ checkFileModificationTimeAndHashUnchanged root file mtime hash unless unchanged (somethingChanged file) -- | Within the @root@ directory, check if @file@ still exists as a file. -- If it *does not* exist, short-circuit. probeFileExistence :: FilePath -> FilePath -> ChangedM () probeFileExistence root file = do existsFile <- liftIO $ doesFileExist (root file) unless existsFile (somethingChanged file) -- | Within the @root@ directory, check if @dir@ still exists. -- If it *does not* exist, short-circuit. probeDirExistence :: FilePath -> FilePath -> ChangedM () probeDirExistence root dir = do existsDir <- liftIO $ doesDirectoryExist (root dir) unless existsDir (somethingChanged dir) -- | Within the @root@ directory, check if @file@ still does not exist. -- If it *does* exist, short-circuit. probeFileNonExistence :: FilePath -> FilePath -> ChangedM () probeFileNonExistence root file = do existsFile <- liftIO $ doesFileExist (root file) existsDir <- liftIO $ doesDirectoryExist (root file) when (existsFile || existsDir) (somethingChanged file) -- | Returns @True@ if, inside the @root@ directory, @file@ has the same -- 'ModTime' as @mtime@. checkModificationTimeUnchanged :: FilePath -> FilePath -> ModTime -> IO Bool checkModificationTimeUnchanged root file mtime = handleIOException False $ do mtime' <- getModTime (root file) return (mtime == mtime') -- | Returns @True@ if, inside the @root@ directory, @file@ has the -- same 'ModTime' and 'Hash' as @mtime and @chash@. checkFileModificationTimeAndHashUnchanged :: FilePath -> FilePath -> ModTime -> Hash -> IO Bool checkFileModificationTimeAndHashUnchanged root file mtime chash = handleIOException False $ do mtime' <- getModTime (root file) if mtime == mtime' then return True else do chash' <- readFileHash (root file) return (chash == chash') -- | Read a non-cryptographic hash of a @file@. readFileHash :: FilePath -> IO Hash readFileHash file = withBinaryFile file ReadMode $ \hnd -> evaluate . Hashable.hash =<< BS.hGetContents hnd -- | Given a directory @dir@, return @Nothing@ if its 'ModTime' -- is the same as @mtime@, and the new 'ModTime' if it is not. checkDirectoryModificationTime :: FilePath -> ModTime -> IO (Maybe ModTime) checkDirectoryModificationTime dir mtime = handleIOException Nothing $ do mtime' <- getModTime dir if mtime == mtime' then return Nothing else return (Just mtime') -- | Run an IO computation, returning the first argument @e@ if there is an 'error' -- call. ('ErrorCall') handleErrorCall :: a -> IO a -> IO a handleErrorCall e = handle handler where #if MIN_VERSION_base(4,9,0) handler (ErrorCallWithLocation _ _) = return e #else handler (ErrorCall _) = return e #endif -- | Run an IO computation, returning @e@ if there is any 'IOException'. -- -- This policy is OK in the file monitor code because it just causes the -- monitor to report that something changed, and then code reacting to that -- will normally encounter the same IO exception when it re-runs the action -- that uses the file. -- handleIOException :: a -> IO a -> IO a handleIOException e = handle (anyIOException e) where anyIOException :: a -> IOException -> IO a anyIOException x _ = return x ------------------------------------------------------------------------------ -- Instances -- cabal-install-3.8.1.0/src/Distribution/Client/Freeze.hs0000644000000000000000000002334707346545000021031 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Freeze -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- The cabal freeze command ----------------------------------------------------------------------------- module Distribution.Client.Freeze ( freeze, getFreezePkgs ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.Config ( SavedConfig(..) ) import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.Dependency import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.SolverInstallPlan ( SolverInstallPlan, SolverPlanPackage ) import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.Setup ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) , RepoContext(..) ) import Distribution.Client.Sandbox.PackageEnvironment ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, userPackageEnvironmentFile ) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PkgConfigDb import Distribution.Solver.Types.SolverId import Distribution.Package ( Package, packageId, packageName, packageVersion ) import Distribution.Simple.Compiler ( Compiler, compilerInfo, PackageDBStack ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Program ( ProgramDb ) import Distribution.Simple.Setup ( fromFlag, fromFlagOrDefault, flagToMaybe ) import Distribution.Simple.Utils ( die', notice, debug, writeFileAtomic, toUTF8LBS) import Distribution.System ( Platform ) import Distribution.Version ( thisVersion ) -- ------------------------------------------------------------ -- * The freeze command -- ------------------------------------------------------------ -- | Freeze all of the dependencies by writing a constraints section -- constraining each dependency to an exact version. -- freeze :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> GlobalFlags -> FreezeFlags -> IO () freeze verbosity packageDBs repoCtxt comp platform progdb globalFlags freezeFlags = do pkgs <- getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb globalFlags freezeFlags if null pkgs then notice verbosity $ "No packages to be frozen. " ++ "As this package has no dependencies." else if dryRun then notice verbosity $ unlines $ "The following packages would be frozen:" : formatPkgs pkgs else freezePackages verbosity globalFlags pkgs where dryRun = fromFlag (freezeDryRun freezeFlags) -- | Get the list of packages whose versions would be frozen by the @freeze@ -- command. getFreezePkgs :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> GlobalFlags -> FreezeFlags -> IO [SolverPlanPackage] getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb _ freezeFlags = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt pkgConfigDb <- readPkgConfigDb verbosity progdb pkgSpecifiers <- resolveUserTargets verbosity repoCtxt (packageIndex sourcePkgDb) [UserTargetLocalDir "."] sanityCheck pkgSpecifiers planPackages verbosity comp platform freezeFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers where sanityCheck :: [PackageSpecifier UnresolvedSourcePackage] -> IO () sanityCheck pkgSpecifiers = do when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ die' verbosity $ "internal error: 'resolveUserTargets' returned " ++ "unexpected named package specifiers!" when (length pkgSpecifiers /= 1) $ die' verbosity $ "internal error: 'resolveUserTargets' returned " ++ "unexpected source package specifiers!" planPackages :: Verbosity -> Compiler -> Platform -> FreezeFlags -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [PackageSpecifier UnresolvedSourcePackage] -> IO [SolverPlanPackage] planPackages verbosity comp platform freezeFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do solver <- chooseSolver verbosity (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." installPlan <- foldProgress logMsg (die' verbosity) return $ resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams return $ pruneInstallPlan installPlan pkgSpecifiers where resolverParams :: DepResolverParams resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setCountConflicts countConflicts . setFineGrainedConflicts fineGrainedConflicts . setMinimizeConflictSet minimizeConflictSet . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . setAllowBootLibInstalls allowBootLibInstalls . setOnlyConstrained onlyConstrained . setSolverVerbosity verbosity . addConstraints [ let pkg = pkgSpecifierTarget pkgSpecifier pc = PackageConstraint (scopeToplevel pkg) (PackagePropertyStanzas stanzas) in LabeledPackageConstraint pc ConstraintSourceFreeze | pkgSpecifier <- pkgSpecifiers ] $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers logMsg message rest = debug verbosity message >> rest stanzas = [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags reorderGoals = fromFlag (freezeReorderGoals freezeFlags) countConflicts = fromFlag (freezeCountConflicts freezeFlags) fineGrainedConflicts = fromFlag (freezeFineGrainedConflicts freezeFlags) minimizeConflictSet = fromFlag (freezeMinimizeConflictSet freezeFlags) independentGoals = fromFlag (freezeIndependentGoals freezeFlags) shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) strongFlags = fromFlag (freezeStrongFlags freezeFlags) maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) allowBootLibInstalls = fromFlag (freezeAllowBootLibInstalls freezeFlags) onlyConstrained = fromFlag (freezeOnlyConstrained freezeFlags) -- | Remove all unneeded packages from an install plan. -- -- A package is unneeded if it is either -- -- 1) the package that we are freezing, or -- -- 2) not a dependency (directly or transitively) of the package we are -- freezing. This is useful for removing previously installed packages -- which are no longer required from the install plan. -- -- Invariant: @pkgSpecifiers@ must refer to packages which are not -- 'PreExisting' in the 'SolverInstallPlan'. pruneInstallPlan :: SolverInstallPlan -> [PackageSpecifier UnresolvedSourcePackage] -> [SolverPlanPackage] pruneInstallPlan installPlan pkgSpecifiers = removeSelf pkgIds $ SolverInstallPlan.dependencyClosure installPlan pkgIds where pkgIds = [ PlannedId (packageId pkg) | SpecificSourcePackage pkg <- pkgSpecifiers ] removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " ++ "unexpected package specifiers!" freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () freezePackages verbosity globalFlags pkgs = do pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ loadUserConfig verbosity "" (flagToMaybe . globalConstraintsFile $ globalFlags) writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv where addFrozenConstraints config = config { savedConfigureExFlags = (savedConfigureExFlags config) { configExConstraints = map constraint pkgs } } constraint pkg = (pkgIdToConstraint $ packageId pkg ,ConstraintSourceUserConfig userPackageEnvironmentFile) where pkgIdToConstraint pkgId = UserConstraint (UserQualified UserQualToplevel (packageName pkgId)) (PackagePropertyVersion $ thisVersion (packageVersion pkgId)) createPkgEnv config = mempty { pkgEnvSavedConfig = config } showPkgEnv = toUTF8LBS . showPackageEnvironment formatPkgs :: Package pkg => [pkg] -> [String] formatPkgs = map $ showPkg . packageId where showPkg pid = name pid ++ " == " ++ version pid name = prettyShow . packageName version = prettyShow . packageVersion cabal-install-3.8.1.0/src/Distribution/Client/GZipUtils.hs0000644000000000000000000000714207346545000021476 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.GZipUtils -- Copyright : (c) Dmitry Astapov 2010 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Provides a convenience functions for working with files that may or may not -- be zipped. ----------------------------------------------------------------------------- module Distribution.Client.GZipUtils ( maybeDecompress, ) where import Prelude () import Distribution.Client.Compat.Prelude import Codec.Compression.Zlib.Internal import Data.ByteString.Lazy.Internal as BS (ByteString(Empty, Chunk)) #ifndef MIN_VERSION_zlib #define MIN_VERSION_zlib(x,y,z) 1 #endif #if MIN_VERSION_zlib(0,6,0) import Control.Exception (throw) import Control.Monad.ST.Lazy (ST, runST) import qualified Data.ByteString as Strict #endif -- | Attempts to decompress the `bytes' under the assumption that -- "data format" error at the very beginning of the stream means -- that it is already decompressed. Caller should make sanity checks -- to verify that it is not, in fact, garbage. -- -- This is to deal with http proxies that lie to us and transparently -- decompress without removing the content-encoding header. See: -- -- maybeDecompress :: ByteString -> ByteString #if MIN_VERSION_zlib(0,6,0) maybeDecompress bytes = runST (go bytes decompressor) where decompressor :: DecompressStream (ST s) decompressor = decompressST gzipOrZlibFormat defaultDecompressParams -- DataError at the beginning of the stream probably means that stream is -- not compressed, so we return it as-is. -- TODO: alternatively, we might consider looking for the two magic bytes -- at the beginning of the gzip header. (not an option for zlib, though.) go :: Monad m => ByteString -> DecompressStream m -> m ByteString go cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k go _ (DecompressStreamEnd _bs ) = return Empty go _ (DecompressStreamError _err ) = return bytes go cs (DecompressInputRequired k) = go cs' =<< k c where (c, cs') = uncons cs -- Once we have received any output though we regard errors as actual errors -- and we throw them (as pure exceptions). -- TODO: We could (and should) avoid these pure exceptions. go' :: Monad m => ByteString -> DecompressStream m -> m ByteString go' cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k go' _ (DecompressStreamEnd _bs ) = return Empty go' _ (DecompressStreamError err ) = throw err go' cs (DecompressInputRequired k) = go' cs' =<< k c where (c, cs') = uncons cs uncons :: ByteString -> (Strict.ByteString, ByteString) uncons Empty = (Strict.empty, Empty) uncons (Chunk c cs) = (c, cs) #else maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes where -- DataError at the beginning of the stream probably means that stream is not compressed. -- Returning it as-is. -- TODO: alternatively, we might consider looking for the two magic bytes -- at the beginning of the gzip header. foldStream (StreamError _ _) = bytes foldStream somethingElse = doFold somethingElse doFold StreamEnd = BS.Empty doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg #endif cabal-install-3.8.1.0/src/Distribution/Client/GenBounds.hs0000644000000000000000000001343107346545000021466 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.GenBounds -- Copyright : (c) Doug Beardsley 2015 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- The cabal gen-bounds command for generating PVP-compliant version bounds. ----------------------------------------------------------------------------- module Distribution.Client.GenBounds ( genBounds ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.Utils ( incVersion ) import Distribution.Client.Freeze ( getFreezePkgs ) import Distribution.Client.Setup ( GlobalFlags(..), FreezeFlags(..), RepoContext ) import Distribution.Package ( Package(..), unPackageName, packageName, packageVersion ) import Distribution.PackageDescription ( enabledBuildDepends ) import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Types.ComponentRequestedSpec ( defaultComponentRequestedSpec ) import Distribution.Types.Dependency import Distribution.Simple.Compiler ( Compiler, PackageDBStack, compilerInfo ) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import Distribution.Simple.Program ( ProgramDb ) import Distribution.Simple.Utils ( tryFindPackageDesc ) import Distribution.System ( Platform ) import Distribution.Version ( Version, alterVersion, VersionInterval (..) , LowerBound(..), UpperBound(..), VersionRange, asVersionIntervals , orLaterVersion, earlierVersion, intersectVersionRanges, hasUpperBound) import System.Directory ( getCurrentDirectory ) -- | Given a version, return an API-compatible (according to PVP) version range. -- -- Example: @0.4.1.2@ produces the version range @>= 0.4.1 && < 0.5@. -- -- This version is slightly different than the one in -- 'Distribution.Client.Init'. This one uses a.b.c as the lower bound because -- the user could be using a new function introduced in a.b.c which would make -- ">= a.b" incorrect. pvpize :: Version -> VersionRange pvpize v = orLaterVersion (vn 3) `intersectVersionRanges` earlierVersion (incVersion 1 (vn 2)) where vn n = alterVersion (take n) v -- | Show the PVP-mandated version range for this package. The @padTo@ parameter -- specifies the width of the package name column. showBounds :: Package pkg => Int -> pkg -> String showBounds padTo p = unwords $ (padAfter padTo $ unPackageName $ packageName p) : -- TODO: use normaliseVersionRange map showInterval (asVersionIntervals $ pvpize $ packageVersion p) where padAfter :: Int -> String -> String padAfter n str = str ++ replicate (n - length str) ' ' showInterval :: VersionInterval -> String showInterval (VersionInterval (LowerBound _ _) NoUpperBound) = error "Error: expected upper bound...this should never happen!" showInterval (VersionInterval (LowerBound l _) (UpperBound u _)) = unwords [">=", prettyShow l, "&& <", prettyShow u] -- | Entry point for the @gen-bounds@ command. genBounds :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> GlobalFlags -> FreezeFlags -> IO () genBounds verbosity packageDBs repoCtxt comp platform progdb globalFlags freezeFlags = do let cinfo = compilerInfo comp cwd <- getCurrentDirectory path <- tryFindPackageDesc verbosity cwd gpd <- readGenericPackageDescription verbosity path -- NB: We don't enable tests or benchmarks, since often they -- don't really have useful bounds. let epd = finalizePD mempty defaultComponentRequestedSpec (const True) platform cinfo [] gpd case epd of Left _ -> putStrLn "finalizePD failed" Right (pd,_) -> do let needBounds = filter (not . hasUpperBound . depVersion) $ enabledBuildDepends pd defaultComponentRequestedSpec if (null needBounds) then putStrLn "Congratulations, all your dependencies have upper bounds!" else go needBounds where go needBounds = do pkgs <- getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb globalFlags freezeFlags putStrLn boundsNeededMsg let isNeeded pkg = unPackageName (packageName pkg) `elem` map depName needBounds let thePkgs = filter isNeeded pkgs let padTo = maximum $ map (length . unPackageName . packageName) pkgs traverse_ (putStrLn . (++",") . showBounds padTo) thePkgs depName :: Dependency -> String depName (Dependency pn _ _) = unPackageName pn depVersion :: Dependency -> VersionRange depVersion (Dependency _ vr _) = vr -- | The message printed when some dependencies are found to be lacking proper -- PVP-mandated bounds. boundsNeededMsg :: String boundsNeededMsg = unlines [ "" , "The following packages need bounds and here is a suggested starting point." , "You can copy and paste this into the build-depends section in your .cabal" , "file and it should work (with the appropriate removal of commas)." , "" , "Note that version bounds are a statement that you've successfully built and" , "tested your package and expect it to work with any of the specified package" , "versions (PROVIDED that those packages continue to conform with the PVP)." , "Therefore, the version bounds generated here are the most conservative" , "based on the versions that you are currently building with. If you know" , "your package will work with versions outside the ranges generated here," , "feel free to widen them." , "" ] cabal-install-3.8.1.0/src/Distribution/Client/Get.hs0000644000000000000000000003345107346545000020325 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Get -- Copyright : (c) Andrea Vezzosi 2008 -- Duncan Coutts 2011 -- John Millikin 2012 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- The 'cabal get' command. ----------------------------------------------------------------------------- module Distribution.Client.Get ( get, -- * Cloning 'SourceRepo's -- | Mainly exported for testing purposes clonePackagesFromSourceRepo, ClonePackageException(..), ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (get) import Distribution.Compat.Directory ( listDirectory ) import Distribution.Package ( PackageId, packageId, packageName ) import Distribution.Simple.Setup ( Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe ) import Distribution.Simple.Utils ( notice, die', info, writeFileAtomic ) import qualified Distribution.PackageDescription as PD import Distribution.Simple.Program ( programName ) import Distribution.Types.SourceRepo (RepoKind (..)) import Distribution.Client.Types.SourceRepo (SourceRepositoryPackage (..), SourceRepoProxy, srpToProxy) import Distribution.Client.Setup ( GlobalFlags(..), GetFlags(..), RepoContext(..) ) import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.Dependency import Distribution.Client.VCS import Distribution.Client.FetchUtils import qualified Distribution.Client.Tar as Tar (extractTarGzFile) import Distribution.Client.IndexUtils ( getSourcePackagesAtIndexState, TotalIndexState, ActiveRepos ) import Distribution.Solver.Types.SourcePackage import qualified Data.Map as Map import System.Directory ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist ) import System.FilePath ( (), (<.>), addTrailingPathSeparator ) -- | Entry point for the 'cabal get' command. get :: Verbosity -> RepoContext -> GlobalFlags -> GetFlags -> [UserTarget] -> IO () get verbosity _ _ _ [] = notice verbosity "No packages requested. Nothing to do." get verbosity repoCtxt _ getFlags userTargets = do let useSourceRepo = case getSourceRepository getFlags of NoFlag -> False _ -> True unless useSourceRepo $ traverse_ (checkTarget verbosity) userTargets let idxState :: Maybe TotalIndexState idxState = flagToMaybe $ getIndexState getFlags activeRepos :: Maybe ActiveRepos activeRepos = flagToMaybe $ getActiveRepos getFlags (sourcePkgDb, _, _) <- getSourcePackagesAtIndexState verbosity repoCtxt idxState activeRepos pkgSpecifiers <- resolveUserTargets verbosity repoCtxt (packageIndex sourcePkgDb) userTargets pkgs <- either (die' verbosity . unlines . map show) return $ resolveWithoutDependencies (resolverParams sourcePkgDb pkgSpecifiers) unless (null prefix) $ createDirectoryIfMissing True prefix if useSourceRepo then clone pkgs else unpack pkgs where resolverParams :: SourcePackageDb -> [PackageSpecifier UnresolvedSourcePackage] -> DepResolverParams resolverParams sourcePkgDb pkgSpecifiers = --TODO: add command-line constraint and preference args for unpack standardInstallPolicy mempty sourcePkgDb pkgSpecifiers prefix :: String prefix = fromFlagOrDefault "" (getDestDir getFlags) clone :: [UnresolvedSourcePackage] -> IO () clone = clonePackagesFromSourceRepo verbosity prefix kind . map (\pkg -> (packageId pkg, packageSourceRepos pkg)) where kind :: Maybe RepoKind kind = fromFlag . getSourceRepository $ getFlags packageSourceRepos :: SourcePackage loc -> [PD.SourceRepo] packageSourceRepos = PD.sourceRepos . PD.packageDescription . srcpkgDescription unpack :: [UnresolvedSourcePackage] -> IO () unpack pkgs = do for_ pkgs $ \pkg -> do location <- fetchPackage verbosity repoCtxt (srcpkgSource pkg) let pkgid = packageId pkg descOverride | usePristine = Nothing | otherwise = srcpkgDescrOverride pkg case location of LocalTarballPackage tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath RemoteTarballPackage _tarballURL tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath RepoTarballPackage _repo _pkgid tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath RemoteSourceRepoPackage _repo _ -> die' verbosity $ "The 'get' command does no yet support targets " ++ "that are remote source repositories." LocalUnpackedPackage _ -> error "Distribution.Client.Get.unpack: the impossible happened." where usePristine :: Bool usePristine = fromFlagOrDefault False (getPristine getFlags) checkTarget :: Verbosity -> UserTarget -> IO () checkTarget verbosity target = case target of UserTargetLocalDir dir -> die' verbosity (notTarball dir) UserTargetLocalCabalFile file -> die' verbosity (notTarball file) _ -> return () where notTarball t = "The 'get' command is for tarball packages. " ++ "The target '" ++ t ++ "' is not a tarball." -- ------------------------------------------------------------ -- * Unpacking the source tarball -- ------------------------------------------------------------ unpackPackage :: Verbosity -> FilePath -> PackageId -> PackageDescriptionOverride -> FilePath -> IO () unpackPackage verbosity prefix pkgid descOverride pkgPath = do let pkgdirname = prettyShow pkgid pkgdir = prefix pkgdirname pkgdir' = addTrailingPathSeparator pkgdir emptyDirectory directory = null <$> listDirectory directory existsDir <- doesDirectoryExist pkgdir when existsDir $ do isEmpty <- emptyDirectory pkgdir unless isEmpty $ die' verbosity $ "The directory \"" ++ pkgdir' ++ "\" already exists and is not empty, not unpacking." existsFile <- doesFileExist pkgdir when existsFile $ die' verbosity $ "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." notice verbosity $ "Unpacking to " ++ pkgdir' Tar.extractTarGzFile prefix pkgdirname pkgPath case descOverride of Nothing -> return () Just pkgtxt -> do let descFilePath = pkgdir prettyShow (packageName pkgid) <.> "cabal" info verbosity $ "Updating " ++ descFilePath ++ " with the latest revision from the index." writeFileAtomic descFilePath pkgtxt -- ------------------------------------------------------------ -- * Cloning packages from their declared source repositories -- ------------------------------------------------------------ data ClonePackageException = ClonePackageNoSourceRepos PackageId | ClonePackageNoSourceReposOfKind PackageId (Maybe RepoKind) | ClonePackageNoRepoType PackageId PD.SourceRepo | ClonePackageUnsupportedRepoType PackageId SourceRepoProxy RepoType | ClonePackageNoRepoLocation PackageId PD.SourceRepo | ClonePackageDestinationExists PackageId FilePath Bool | ClonePackageFailedWithExitCode PackageId SourceRepoProxy String ExitCode deriving (Show, Eq) instance Exception ClonePackageException where displayException (ClonePackageNoSourceRepos pkgid) = "Cannot fetch a source repository for package " ++ prettyShow pkgid ++ ". The package does not specify any source repositories." displayException (ClonePackageNoSourceReposOfKind pkgid repoKind) = "Cannot fetch a source repository for package " ++ prettyShow pkgid ++ ". The package does not specify a source repository of the requested " ++ "kind" ++ maybe "." (\k -> " (kind " ++ prettyShow k ++ ").") repoKind displayException (ClonePackageNoRepoType pkgid _repo) = "Cannot fetch the source repository for package " ++ prettyShow pkgid ++ ". The package's description specifies a source repository but does " ++ "not specify the repository 'type' field (e.g. git, darcs or hg)." displayException (ClonePackageUnsupportedRepoType pkgid _ repoType) = "Cannot fetch the source repository for package " ++ prettyShow pkgid ++ ". The repository type '" ++ prettyShow repoType ++ "' is not yet supported." displayException (ClonePackageNoRepoLocation pkgid _repo) = "Cannot fetch the source repository for package " ++ prettyShow pkgid ++ ". The package's description specifies a source repository but does " ++ "not specify the repository 'location' field (i.e. the URL)." displayException (ClonePackageDestinationExists pkgid dest isdir) = "Not fetching the source repository for package " ++ prettyShow pkgid ++ ". " ++ if isdir then "The destination directory " ++ dest ++ " already exists." else "A file " ++ dest ++ " is in the way." displayException (ClonePackageFailedWithExitCode pkgid repo vcsprogname exitcode) = "Failed to fetch the source repository for package " ++ prettyShow pkgid ++ ", repository location " ++ srpLocation repo ++ " (" ++ vcsprogname ++ " failed with " ++ show exitcode ++ ")." -- | Given a bunch of package ids and their corresponding available -- 'SourceRepo's, pick a single 'SourceRepo' for each one and clone into -- new subdirs of the given directory. -- clonePackagesFromSourceRepo :: Verbosity -> FilePath -- ^ destination dir prefix -> Maybe RepoKind -- ^ preferred 'RepoKind' -> [(PackageId, [PD.SourceRepo])] -- ^ the packages and their -- available 'SourceRepo's -> IO () clonePackagesFromSourceRepo verbosity destDirPrefix preferredRepoKind pkgrepos = do -- Do a bunch of checks and collect the required info pkgrepos' <- traverse preCloneChecks pkgrepos -- Configure the VCS drivers for all the repository types we may need vcss <- configureVCSs verbosity $ Map.fromList [ (vcsRepoType vcs, vcs) | (_, _, vcs, _) <- pkgrepos' ] -- Now execute all the required commands for each repo sequence_ [ cloneSourceRepo verbosity vcs' repo destDir `catch` \exitcode -> throwIO (ClonePackageFailedWithExitCode pkgid (srpToProxy repo) (programName (vcsProgram vcs)) exitcode) | (pkgid, repo, vcs, destDir) <- pkgrepos' , let vcs' = Map.findWithDefault (error $ "Cannot configure " ++ prettyShow (vcsRepoType vcs)) (vcsRepoType vcs) vcss ] where preCloneChecks :: (PackageId, [PD.SourceRepo]) -> IO (PackageId, SourceRepositoryPackage Maybe, VCS Program, FilePath) preCloneChecks (pkgid, repos) = do repo <- case selectPackageSourceRepo preferredRepoKind repos of Just repo -> return repo Nothing | null repos -> throwIO (ClonePackageNoSourceRepos pkgid) Nothing -> throwIO (ClonePackageNoSourceReposOfKind pkgid preferredRepoKind) (repo', vcs) <- case validatePDSourceRepo repo of Right (repo', _, _, vcs) -> return (repo', vcs) Left SourceRepoRepoTypeUnspecified -> throwIO (ClonePackageNoRepoType pkgid repo) Left (SourceRepoRepoTypeUnsupported repo' repoType) -> throwIO (ClonePackageUnsupportedRepoType pkgid repo' repoType) Left SourceRepoLocationUnspecified -> throwIO (ClonePackageNoRepoLocation pkgid repo) let destDir :: FilePath destDir = destDirPrefix prettyShow (packageName pkgid) destDirExists <- doesDirectoryExist destDir destFileExists <- doesFileExist destDir when (destDirExists || destFileExists) $ throwIO (ClonePackageDestinationExists pkgid destDir destDirExists) return (pkgid, repo', vcs, destDir) ------------------------------------------------------------------------------- -- Selecting ------------------------------------------------------------------------------- -- | Pick the 'SourceRepo' to use to get the package sources from. -- -- Note that this does /not/ depend on what 'VCS' drivers we are able to -- successfully configure. It is based only on the 'SourceRepo's declared -- in the package, and optionally on a preferred 'RepoKind'. -- selectPackageSourceRepo :: Maybe RepoKind -> [PD.SourceRepo] -> Maybe PD.SourceRepo selectPackageSourceRepo preferredRepoKind = listToMaybe -- Sort repositories by kind, from This to Head to Unknown. Repositories -- with equivalent kinds are selected based on the order they appear in -- the Cabal description file. . sortBy (comparing thisFirst) -- If the user has specified the repo kind, filter out the repositories -- they're not interested in. . filter (\repo -> maybe True (PD.repoKind repo ==) preferredRepoKind) where thisFirst :: PD.SourceRepo -> Int thisFirst r = case PD.repoKind r of RepoThis -> 0 RepoHead -> case PD.repoTag r of -- If the type is 'head' but the author specified a tag, they -- probably meant to create a 'this' repository but screwed up. Just _ -> 0 Nothing -> 1 RepoKindUnknown _ -> 2 cabal-install-3.8.1.0/src/Distribution/Client/Glob.hs0000644000000000000000000002105107346545000020462 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} --TODO: [code cleanup] plausibly much of this module should be merged with -- similar functionality in Cabal. module Distribution.Client.Glob ( FilePathGlob(..) , FilePathRoot(..) , FilePathGlobRel(..) , Glob , GlobPiece(..) , matchFileGlob , matchFileGlobRel , matchGlob , isTrivialFilePathGlob , getFilePathRootDirectory ) where import Distribution.Client.Compat.Prelude import Prelude () import Data.List (stripPrefix) import System.Directory import System.FilePath import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- | A file path specified by globbing -- data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel deriving (Eq, Show, Generic) data FilePathGlobRel = GlobDir !Glob !FilePathGlobRel | GlobFile !Glob | GlobDirTrailing -- ^ trailing dir, a glob ending in @/@ deriving (Eq, Show, Generic) -- | A single directory or file component of a globbed path type Glob = [GlobPiece] -- | A piece of a globbing pattern data GlobPiece = WildCard | Literal String | Union [Glob] deriving (Eq, Show, Generic) data FilePathRoot = FilePathRelative | FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive' | FilePathHomeDir deriving (Eq, Show, Generic) instance Binary FilePathGlob instance Binary FilePathRoot instance Binary FilePathGlobRel instance Binary GlobPiece instance Structured FilePathGlob instance Structured FilePathRoot instance Structured FilePathGlobRel instance Structured GlobPiece -- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and -- is in fact equivalent to a non-glob 'FilePath'. -- -- If it is trivial in this sense then the result is the equivalent constant -- 'FilePath'. On the other hand if it is not trivial (so could in principle -- match more than one file) then the result is @Nothing@. -- isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath isTrivialFilePathGlob (FilePathGlob root pathglob) = case root of FilePathRelative -> go [] pathglob FilePathRoot root' -> go [root'] pathglob FilePathHomeDir -> Nothing where go paths (GlobDir [Literal path] globs) = go (path:paths) globs go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths))) go paths GlobDirTrailing = Just (addTrailingPathSeparator (joinPath (reverse paths))) go _ _ = Nothing -- | Get the 'FilePath' corresponding to a 'FilePathRoot'. -- -- The 'FilePath' argument is required to supply the path for the -- 'FilePathRelative' case. -- getFilePathRootDirectory :: FilePathRoot -> FilePath -- ^ root for relative paths -> IO FilePath getFilePathRootDirectory FilePathRelative root = return root getFilePathRootDirectory (FilePathRoot root) _ = return root getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory ------------------------------------------------------------------------------ -- Matching -- -- | Match a 'FilePathGlob' against the file system, starting from a given -- root directory for relative paths. The results of relative globs are -- relative to the given root. Matches for absolute globs are absolute. -- matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath] matchFileGlob relroot (FilePathGlob globroot glob) = do root <- getFilePathRootDirectory globroot relroot matches <- matchFileGlobRel root glob case globroot of FilePathRelative -> return matches _ -> return (map (root ) matches) -- | Match a 'FilePathGlobRel' against the file system, starting from a -- given root directory. The results are all relative to the given root. -- matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath] matchFileGlobRel root glob0 = go glob0 "" where go (GlobFile glob) dir = do entries <- getDirectoryContents (root dir) let files = filter (matchGlob glob) entries return (map (dir ) files) go (GlobDir glob globPath) dir = do entries <- getDirectoryContents (root dir) subdirs <- filterM (\subdir -> doesDirectoryExist (root dir subdir)) $ filter (matchGlob glob) entries concat <$> traverse (\subdir -> go globPath (dir subdir)) subdirs go GlobDirTrailing dir = return [dir] -- | Match a globbing pattern against a file path component -- matchGlob :: Glob -> String -> Bool matchGlob = goStart where -- From the man page, glob(7): -- "If a filename starts with a '.', this character must be -- matched explicitly." go, goStart :: [GlobPiece] -> String -> Bool goStart (WildCard:_) ('.':_) = False goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs) globs goStart rest cs = go rest cs go [] "" = True go (Literal lit:rest) cs | Just cs' <- stripPrefix lit cs = go rest cs' | otherwise = False go [WildCard] "" = True go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs go [] (_:_) = False go (_:_) "" = False ------------------------------------------------------------------------------ -- Parsing & printing -- instance Pretty FilePathGlob where pretty (FilePathGlob root pathglob) = pretty root Disp.<> pretty pathglob instance Parsec FilePathGlob where parsec = do root <- parsec case root of FilePathRelative -> FilePathGlob root <$> parsec _ -> FilePathGlob root <$> parsec <|> pure (FilePathGlob root GlobDirTrailing) instance Pretty FilePathRoot where pretty FilePathRelative = Disp.empty pretty (FilePathRoot root) = Disp.text root pretty FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/' instance Parsec FilePathRoot where parsec = root <|> P.try home <|> P.try drive <|> pure FilePathRelative where root = FilePathRoot "/" <$ P.char '/' home = FilePathHomeDir <$ P.string "~/" drive = do dr <- P.satisfy $ \c -> (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') _ <- P.char ':' _ <- P.char '/' <|> P.char '\\' return (FilePathRoot (toUpper dr : ":\\")) instance Pretty FilePathGlobRel where pretty (GlobDir glob pathglob) = dispGlob glob Disp.<> Disp.char '/' Disp.<> pretty pathglob pretty (GlobFile glob) = dispGlob glob pretty GlobDirTrailing = Disp.empty instance Parsec FilePathGlobRel where parsec = parsecPath where parsecPath :: CabalParsing m => m FilePathGlobRel parsecPath = do glob <- parsecGlob dirSep *> (GlobDir glob <$> parsecPath <|> pure (GlobDir glob GlobDirTrailing)) <|> pure (GlobFile glob) dirSep :: CabalParsing m => m () dirSep = () <$ P.char '/' <|> P.try (do _ <- P.char '\\' -- check this isn't an escape code P.notFollowedBy (P.satisfy isGlobEscapedChar)) dispGlob :: Glob -> Disp.Doc dispGlob = Disp.hcat . map dispPiece where dispPiece WildCard = Disp.char '*' dispPiece (Literal str) = Disp.text (escape str) dispPiece (Union globs) = Disp.braces (Disp.hcat (Disp.punctuate (Disp.char ',') (map dispGlob globs))) escape [] = [] escape (c:cs) | isGlobEscapedChar c = '\\' : c : escape cs | otherwise = c : escape cs parsecGlob :: CabalParsing m => m Glob parsecGlob = some parsecPiece where parsecPiece = P.choice [ literal, wildcard, union ] wildcard = WildCard <$ P.char '*' union = Union . toList <$> P.between (P.char '{') (P.char '}') (P.sepByNonEmpty parsecGlob (P.char ',')) literal = Literal <$> some litchar litchar = normal <|> escape normal = P.satisfy (\c -> not (isGlobEscapedChar c) && c /= '/' && c /= '\\') escape = P.try $ P.char '\\' >> P.satisfy isGlobEscapedChar isGlobEscapedChar :: Char -> Bool isGlobEscapedChar '*' = True isGlobEscapedChar '{' = True isGlobEscapedChar '}' = True isGlobEscapedChar ',' = True isGlobEscapedChar _ = False cabal-install-3.8.1.0/src/Distribution/Client/GlobalFlags.hs0000644000000000000000000002733407346545000021766 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE RecordWildCards #-} module Distribution.Client.GlobalFlags ( GlobalFlags(..) , defaultGlobalFlags , RepoContext(..) , withRepoContext , withRepoContext' ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.Types ( Repo(..), unRepoName, RemoteRepo(..), LocalRepo (..), localRepoCacheKey ) import Distribution.Simple.Setup ( Flag(..), fromFlag, flagToMaybe ) import Distribution.Utils.NubList ( NubList, fromNubList ) import Distribution.Client.HttpUtils ( HttpTransport, configureTransport ) import Distribution.Simple.Utils ( info, warn ) import Distribution.Client.IndexUtils.ActiveRepos ( ActiveRepos ) import Control.Concurrent ( MVar, newMVar, modifyMVar ) import System.FilePath ( () ) import Network.URI ( URI, uriScheme, uriPath ) import qualified Data.Map as Map import qualified Hackage.Security.Client as Sec import qualified Hackage.Security.Util.Path as Sec import qualified Hackage.Security.Util.Pretty as Sec import qualified Hackage.Security.Client.Repository.Cache as Sec import qualified Hackage.Security.Client.Repository.Local as Sec.Local import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote import qualified Distribution.Client.Security.HTTP as Sec.HTTP import qualified Distribution.Client.Security.DNS as Sec.DNS import qualified System.FilePath.Posix as FilePath.Posix -- ------------------------------------------------------------ -- * Global flags -- ------------------------------------------------------------ -- | Flags that apply at the top level, not to any sub-command. data GlobalFlags = GlobalFlags { globalVersion :: Flag Bool , globalNumericVersion :: Flag Bool , globalConfigFile :: Flag FilePath , globalConstraintsFile :: Flag FilePath , globalRemoteRepos :: NubList RemoteRepo -- ^ Available Hackage servers. , globalCacheDir :: Flag FilePath , globalLocalNoIndexRepos :: NubList LocalRepo , globalActiveRepos :: Flag ActiveRepos , globalLogsDir :: Flag FilePath , globalIgnoreExpiry :: Flag Bool -- ^ Ignore security expiry dates , globalHttpTransport :: Flag String , globalNix :: Flag Bool -- ^ Integrate with Nix , globalStoreDir :: Flag FilePath , globalProgPathExtra :: NubList FilePath -- ^ Extra program path used for packagedb lookups in a global context (i.e. for http transports) } deriving (Show, Generic) defaultGlobalFlags :: GlobalFlags defaultGlobalFlags = GlobalFlags { globalVersion = Flag False , globalNumericVersion = Flag False , globalConfigFile = mempty , globalConstraintsFile = mempty , globalRemoteRepos = mempty , globalCacheDir = mempty , globalLocalNoIndexRepos = mempty , globalActiveRepos = mempty , globalLogsDir = mempty , globalIgnoreExpiry = Flag False , globalHttpTransport = mempty , globalNix = Flag False , globalStoreDir = mempty , globalProgPathExtra = mempty } instance Monoid GlobalFlags where mempty = gmempty mappend = (<>) instance Semigroup GlobalFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Repo context -- ------------------------------------------------------------ -- | Access to repositories data RepoContext = RepoContext { -- | All user-specified repositories repoContextRepos :: [Repo] -- | Get the HTTP transport -- -- The transport will be initialized on the first call to this function. -- -- NOTE: It is important that we don't eagerly initialize the transport. -- Initializing the transport is not free, and especially in contexts where -- we don't know a priori whether or not we need the transport (for instance -- when using cabal in "nix mode") incurring the overhead of transport -- initialization on _every_ invocation (eg @cabal build@) is undesirable. , repoContextGetTransport :: IO HttpTransport -- | Get the (initialized) secure repo -- -- (the 'Repo' type itself is stateless and must remain so, because it -- must be serializable) , repoContextWithSecureRepo :: forall a. Repo -> (forall down. Sec.Repository down -> IO a) -> IO a -- | Should we ignore expiry times (when checking security)? , repoContextIgnoreExpiry :: Bool } -- | Wrapper around 'Repository', hiding the type argument data SecureRepo = forall down. SecureRepo (Sec.Repository down) withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a withRepoContext verbosity globalFlags = withRepoContext' verbosity (fromNubList (globalRemoteRepos globalFlags)) (fromNubList (globalLocalNoIndexRepos globalFlags)) (fromFlag (globalCacheDir globalFlags)) (flagToMaybe (globalHttpTransport globalFlags)) (flagToMaybe (globalIgnoreExpiry globalFlags)) (fromNubList (globalProgPathExtra globalFlags)) withRepoContext' :: Verbosity -> [RemoteRepo] -> [LocalRepo] -> FilePath -> Maybe String -> Maybe Bool -> [FilePath] -> (RepoContext -> IO a) -> IO a withRepoContext' verbosity remoteRepos localNoIndexRepos sharedCacheDir httpTransport ignoreExpiry extraPaths = \callback -> do for_ localNoIndexRepos $ \local -> unless (FilePath.Posix.isAbsolute (localRepoPath local)) $ warn verbosity $ "file+noindex " ++ unRepoName (localRepoName local) ++ " repository path is not absolute; this is fragile, and not recommended" transportRef <- newMVar Nothing let httpLib = Sec.HTTP.transportAdapter verbosity (getTransport transportRef) initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> callback RepoContext { repoContextRepos = allRemoteRepos ++ allLocalNoIndexRepos , repoContextGetTransport = getTransport transportRef , repoContextWithSecureRepo = withSecureRepo secureRepos' , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry } where secureRemoteRepos = [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] allRemoteRepos :: [Repo] allRemoteRepos = [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir | remote <- remoteRepos , let cacheDir = sharedCacheDir unRepoName (remoteRepoName remote) isSecure = remoteRepoSecure remote == Just True ] allLocalNoIndexRepos :: [Repo] allLocalNoIndexRepos = [ RepoLocalNoIndex local cacheDir | local <- localNoIndexRepos , let cacheDir | localRepoSharedCache local = sharedCacheDir localRepoCacheKey local | otherwise = localRepoPath local ] getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport getTransport transportRef = modifyMVar transportRef $ \mTransport -> do transport <- case mTransport of Just tr -> return tr Nothing -> configureTransport verbosity extraPaths httpTransport return (Just transport, transport) withSecureRepo :: Map Repo SecureRepo -> Repo -> (forall down. Sec.Repository down -> IO a) -> IO a withSecureRepo secureRepos repo callback = case Map.lookup repo secureRepos of Just (SecureRepo secureRepo) -> callback secureRepo Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" -- | Initialize the provided secure repositories -- -- Assumed invariant: `remoteRepoSecure` should be set for all these repos. initSecureRepos :: forall a. Verbosity -> Sec.HTTP.HttpLib -> [(RemoteRepo, FilePath)] -> (Map Repo SecureRepo -> IO a) -> IO a initSecureRepos verbosity httpLib repos callback = go Map.empty repos where go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a go !acc [] = callback acc go !acc ((r,cacheDir):rs) = do cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir initSecureRepo verbosity httpLib r cachePath $ \r' -> go (Map.insert (RepoSecure r cacheDir) r' acc) rs -- | Initialize the given secure repo -- -- The security library has its own concept of a "local" repository, distinct -- from @cabal-install@'s; these are secure repositories, but live in the local -- file system. We use the convention that these repositories are identified by -- URLs of the form @file:/path/to/local/repo@. initSecureRepo :: Verbosity -> Sec.HTTP.HttpLib -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) -> Sec.Path Sec.Absolute -- ^ Cache dir -> (SecureRepo -> IO a) -- ^ Callback -> IO a initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do requiresBootstrap <- withRepo [] Sec.requiresBootstrap mirrors <- if requiresBootstrap then do info verbosity $ "Trying to locate mirrors via DNS for " ++ "initial bootstrap of secure " ++ "repository '" ++ show remoteRepoURI ++ "' ..." Sec.DNS.queryBootstrapMirrors verbosity remoteRepoURI else pure [] withRepo mirrors $ \r -> do when requiresBootstrap $ Sec.uncheckClientErrors $ Sec.bootstrap r (map Sec.KeyId remoteRepoRootKeys) (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) callback $ SecureRepo r where -- Initialize local or remote repo depending on the URI withRepo :: [URI] -> (forall down. Sec.Repository down -> IO a) -> IO a withRepo _ callback | uriScheme remoteRepoURI == "file:" = do dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) Sec.Local.withRepository dir cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback withRepo mirrors callback = Sec.Remote.withRepository httpLib (remoteRepoURI:mirrors) Sec.Remote.defaultRepoOpts cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback cache :: Sec.Cache cache = Sec.Cache { cacheRoot = cachePath , cacheLayout = Sec.cabalCacheLayout { Sec.cacheLayoutIndexTar = cacheFn "01-index.tar" , Sec.cacheLayoutIndexIdx = cacheFn "01-index.tar.idx" , Sec.cacheLayoutIndexTarGz = cacheFn "01-index.tar.gz" } } cacheFn :: FilePath -> Sec.CachePath cacheFn = Sec.rootPath . Sec.fragment -- We display any TUF progress only in verbose mode, including any transient -- verification errors. If verification fails, then the final exception that -- is thrown will of course be shown. logTUF :: Sec.LogMessage -> IO () logTUF = info verbosity . Sec.pretty cabal-install-3.8.1.0/src/Distribution/Client/Haddock.hs0000644000000000000000000000524007346545000021136 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Haddock -- Copyright : (c) Andrea Vezzosi 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Interfacing with Haddock -- ----------------------------------------------------------------------------- module Distribution.Client.Haddock ( regenerateHaddockIndex ) where import Distribution.Client.Compat.Prelude import Prelude () import Data.List (maximumBy) import System.Directory (createDirectoryIfMissing, renameFile) import System.FilePath ((), splitFileName) import Distribution.Package ( packageVersion ) import Distribution.Simple.Haddock (haddockPackagePaths) import Distribution.Simple.Program (haddockProgram, ProgramDb , runProgram, requireProgramVersion) import Distribution.Version (mkVersion, orLaterVersion) import Distribution.Simple.PackageIndex ( InstalledPackageIndex, allPackagesByName ) import Distribution.Simple.Utils ( debug, installDirectoryContents, withTempDirectory ) import Distribution.InstalledPackageInfo as InstalledPackageInfo ( InstalledPackageInfo(exposed) ) regenerateHaddockIndex :: Verbosity -> InstalledPackageIndex -> ProgramDb -> FilePath -> IO () regenerateHaddockIndex verbosity pkgs progdb index = do (paths, warns) <- haddockPackagePaths pkgs' Nothing let paths' = [ (interface, html) | (interface, Just html, _) <- paths] for_ warns (debug verbosity) (confHaddock, _, _) <- requireProgramVersion verbosity haddockProgram (orLaterVersion (mkVersion [0,6])) progdb createDirectoryIfMissing True destDir withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do let flags = [ "--gen-contents" , "--gen-index" , "--odir=" ++ tempDir , "--title=Haskell modules on this system" ] ++ [ "--read-interface=" ++ html ++ "," ++ interface | (interface, html) <- paths' ] runProgram verbosity confHaddock flags renameFile (tempDir "index.html") (tempDir destFile) installDirectoryContents verbosity tempDir destDir where (destDir,destFile) = splitFileName index pkgs' :: [InstalledPackageInfo] pkgs' = [ maximumBy (comparing packageVersion) pkgvers' | (_pname, pkgvers) <- allPackagesByName pkgs , let pkgvers' = filter exposed pkgvers , not (null pkgvers') ] cabal-install-3.8.1.0/src/Distribution/Client/HashValue.hs0000644000000000000000000000625307346545000021466 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} module Distribution.Client.HashValue ( HashValue, hashValue, truncateHash, showHashValue, readFileHashValue, hashFromTUF, ) where import Distribution.Client.Compat.Prelude import Prelude () import qualified Hackage.Security.Client as Sec import qualified Crypto.Hash.SHA256 as SHA256 import qualified Data.ByteString.Base16 as Base16 import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy.Char8 as LBS import System.IO (IOMode (..), withBinaryFile) ----------------------------------------------- -- The specific choice of hash implementation -- -- Is a crypto hash necessary here? One thing to consider is who controls the -- inputs and what's the result of a hash collision. Obviously we should not -- install packages we don't trust because they can run all sorts of code, but -- if I've checked there's no TH, no custom Setup etc, is there still a -- problem? If someone provided us a tarball that hashed to the same value as -- some other package and we installed it, we could end up re-using that -- installed package in place of another one we wanted. So yes, in general -- there is some value in preventing intentional hash collisions in installed -- package ids. newtype HashValue = HashValue BS.ByteString deriving (Eq, Generic, Show, Typeable) -- Cannot do any sensible validation here. Although we use SHA256 -- for stuff we hash ourselves, we can also get hashes from TUF -- and that can in principle use different hash functions in future. -- -- Therefore, we simply derive this structurally. instance Binary HashValue instance Structured HashValue -- | Hash some data. Currently uses SHA256. -- hashValue :: LBS.ByteString -> HashValue hashValue = HashValue . SHA256.hashlazy showHashValue :: HashValue -> String showHashValue (HashValue digest) = BS.unpack (Base16.encode digest) -- | Hash the content of a file. Uses SHA256. -- readFileHashValue :: FilePath -> IO HashValue readFileHashValue tarball = withBinaryFile tarball ReadMode $ \hnd -> evaluate . hashValue =<< LBS.hGetContents hnd -- | Convert a hash from TUF metadata into a 'PackageSourceHash'. -- -- Note that TUF hashes don't necessarily have to be SHA256, since it can -- support new algorithms in future. -- hashFromTUF :: Sec.Hash -> HashValue hashFromTUF (Sec.Hash hashstr) = --TODO: [code cleanup] either we should get TUF to use raw bytestrings or -- perhaps we should also just use a base16 string as the internal rep. case Base16.decode (BS.pack hashstr) of #if MIN_VERSION_base16_bytestring(1,0,0) Right hash -> HashValue hash Left _ -> error "hashFromTUF: cannot decode base16" #else (hash, trailing) | not (BS.null hash) && BS.null trailing -> HashValue hash _ -> error "hashFromTUF: cannot decode base16 hash" #endif -- | Truncate a 32 byte SHA256 hash to -- -- For example 20 bytes render as 40 hex chars, which we use for unit-ids. -- Or even 4 bytes for 'hashedInstalledPackageIdShort' -- truncateHash :: Int -> HashValue -> HashValue truncateHash n (HashValue h) = HashValue (BS.take n h) cabal-install-3.8.1.0/src/Distribution/Client/HttpUtils.hs0000644000000000000000000011252207346545000021543 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | Separate module for HTTP actions, using a proxy server if one exists. ----------------------------------------------------------------------------- module Distribution.Client.HttpUtils ( DownloadResult(..), configureTransport, HttpTransport(..), HttpCode, downloadURI, transportCheckHttps, remoteRepoCheckHttps, remoteRepoTryUpgradeToHttps, isOldHackageURI ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (Proxy (..)) import Distribution.Utils.Generic import Network.HTTP ( Request (..), Response (..), RequestMethod (..) , Header(..), HeaderName(..), lookupHeader ) import Network.HTTP.Proxy ( Proxy(..), fetchProxy) import Network.URI ( URI (..), URIAuth (..), uriToString ) import Network.Browser ( browse, setOutHandler, setErrHandler, setProxy , setAuthorityGen, request, setAllowBasicAuth, setUserAgent ) import qualified Control.Exception as Exception import Distribution.Simple.Utils ( die', info, warn, debug, notice , copyFileVerbose, withTempFile, IOData (..) ) import Distribution.Utils.String (trim) import Distribution.Client.Utils ( withTempFileName ) import Distribution.Client.Version ( cabalInstallVersion ) import Distribution.Client.Types ( unRepoName, RemoteRepo(..) ) import Distribution.System ( buildOS, buildArch ) import qualified System.FilePath.Posix as FilePath.Posix ( splitDirectories ) import System.FilePath ( (<.>), takeFileName, takeDirectory ) import System.Directory ( doesFileExist, renameFile, canonicalizePath ) import System.IO ( withFile, IOMode(ReadMode), hGetContents, hClose ) import System.IO.Error ( isDoesNotExistError ) import Distribution.Simple.Program ( Program, simpleProgram, ConfiguredProgram, programPath , ProgramInvocation(..), programInvocation , ProgramSearchPathEntry(..) , getProgramInvocationOutput ) import Distribution.Simple.Program.Db ( ProgramDb, emptyProgramDb, addKnownPrograms , configureAllKnownPrograms , requireProgram, lookupProgram , modifyProgramSearchPath ) import Distribution.Simple.Program.Run ( getProgramInvocationOutputAndErrors ) import Numeric (showHex) import System.Random (randomRIO) import qualified Crypto.Hash.SHA256 as SHA256 import qualified Data.ByteString.Base16 as Base16 import qualified Distribution.Compat.CharParsing as P import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 import qualified Data.ByteString.Lazy as LBS import qualified Data.ByteString.Lazy.Char8 as LBS8 ------------------------------------------------------------------------------ -- Downloading a URI, given an HttpTransport -- data DownloadResult = FileAlreadyInCache | FileDownloaded FilePath deriving (Eq) data DownloadCheck = Downloaded -- ^ already downloaded and sha256 matches | CheckETag String -- ^ already downloaded and we have etag | NeedsDownload (Maybe BS.ByteString) -- ^ needs download with optional hash check deriving Eq downloadURI :: HttpTransport -> Verbosity -> URI -- ^ What to download -> FilePath -- ^ Where to put it -> IO DownloadResult downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do copyFileVerbose verbosity (uriPath uri) path return (FileDownloaded path) -- Can we store the hash of the file so we can safely return path when the -- hash matches to avoid unnecessary computation? downloadURI transport verbosity uri path = do targetExists <- doesFileExist path downloadCheck <- -- if we have uriFrag, then we expect there to be #sha256=... if not (null uriFrag) then case sha256parsed of -- we know the hash, and target exists Right expected | targetExists -> do contents <- LBS.readFile path let actual = SHA256.hashlazy contents if expected == actual then return Downloaded else return (NeedsDownload (Just expected)) -- we known the hash, target doesn't exist Right expected -> return (NeedsDownload (Just expected)) -- we failed to parse uriFragment Left err -> die' verbosity $ "Cannot parse URI fragment " ++ uriFrag ++ " " ++ err -- if there are no uri fragment, use ETag else do etagPathExists <- doesFileExist etagPath -- In rare cases the target file doesn't exist, but the etag does. if targetExists && etagPathExists then return (CheckETag etagPath) else return (NeedsDownload Nothing) -- Only use the external http transports if we actually have to -- (or have been told to do so) let transport' | uriScheme uri == "http:" , not (transportManuallySelected transport) = plainHttpTransport | otherwise = transport case downloadCheck of Downloaded -> return FileAlreadyInCache CheckETag etag -> makeDownload transport' Nothing (Just etag) NeedsDownload hash -> makeDownload transport' hash Nothing where makeDownload :: HttpTransport -> Maybe BS8.ByteString -> Maybe String -> IO DownloadResult makeDownload transport' sha256 etag = withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do result <- getHttp transport' verbosity uri etag tmpFile [] -- Only write the etag if we get a 200 response code. -- A 304 still sends us an etag header. case result of -- if we have hash, we don't care about etag. (200, _) | Just expected <- sha256 -> do contents <- LBS.readFile tmpFile let actual = SHA256.hashlazy contents unless (actual == expected) $ die' verbosity $ unwords [ "Failed to download", show uri , ": SHA256 don't match; expected:", BS8.unpack (Base16.encode expected) , "actual:", BS8.unpack (Base16.encode actual) ] (200, Just newEtag) -> writeFile etagPath newEtag _ -> return () case fst result of 200 -> do info verbosity ("Downloaded to " ++ path) renameFile tmpFile path return (FileDownloaded path) 304 -> do notice verbosity "Skipping download: local and remote files match." return FileAlreadyInCache errCode -> die' verbosity $ "failed to download " ++ show uri ++ " : HTTP code " ++ show errCode etagPath = path <.> "etag" uriFrag = uriFragment uri sha256parsed :: Either String BS.ByteString sha256parsed = explicitEitherParsec fragmentParser uriFrag fragmentParser = do _ <- P.string "#sha256=" str <- some P.hexDigit let bs = Base16.decode (BS8.pack str) #if MIN_VERSION_base16_bytestring(1,0,0) either fail return bs #else return (fst bs) #endif ------------------------------------------------------------------------------ -- Utilities for repo url management -- remoteRepoCheckHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO () remoteRepoCheckHttps verbosity transport repo | uriScheme (remoteRepoURI repo) == "https:" , not (transportSupportsHttps transport) = die' verbosity $ "The remote repository '" ++ unRepoName (remoteRepoName repo) ++ "' specifies a URL that " ++ requiresHttpsErrorMessage | otherwise = return () transportCheckHttps :: Verbosity -> HttpTransport -> URI -> IO () transportCheckHttps verbosity transport uri | uriScheme uri == "https:" , not (transportSupportsHttps transport) = die' verbosity $ "The URL " ++ show uri ++ " " ++ requiresHttpsErrorMessage | otherwise = return () requiresHttpsErrorMessage :: String requiresHttpsErrorMessage = "requires HTTPS however the built-in HTTP implementation " ++ "does not support HTTPS. The transport implementations with HTTPS " ++ "support are " ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] ++ ". One of these will be selected automatically if the corresponding " ++ "external program is available, or one can be selected specifically " ++ "with the global flag --http-transport=" remoteRepoTryUpgradeToHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO RemoteRepo remoteRepoTryUpgradeToHttps verbosity transport repo | remoteRepoShouldTryHttps repo , uriScheme (remoteRepoURI repo) == "http:" , not (transportSupportsHttps transport) , not (transportManuallySelected transport) = die' verbosity $ "The builtin HTTP implementation does not support HTTPS, but using " ++ "HTTPS for authenticated uploads is recommended. " ++ "The transport implementations with HTTPS support are " ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] ++ "but they require the corresponding external program to be " ++ "available. You can either make one available or use plain HTTP by " ++ "using the global flag --http-transport=plain-http (or putting the " ++ "equivalent in the config file). With plain HTTP, your password " ++ "is sent using HTTP digest authentication so it cannot be easily " ++ "intercepted, but it is not as secure as using HTTPS." | remoteRepoShouldTryHttps repo , uriScheme (remoteRepoURI repo) == "http:" , transportSupportsHttps transport = return repo { remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" } } | otherwise = return repo -- | Utility function for legacy support. isOldHackageURI :: URI -> Bool isOldHackageURI uri = case uriAuthority uri of Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> FilePath.Posix.splitDirectories (uriPath uri) == ["/","packages","archive"] _ -> False ------------------------------------------------------------------------------ -- Setting up a HttpTransport -- data HttpTransport = HttpTransport { -- | GET a URI, with an optional ETag (to do a conditional fetch), -- write the resource to the given file and return the HTTP status code, -- and optional ETag. getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath -> [Header] -> IO (HttpCode, Maybe ETag), -- | POST a resource to a URI, with optional auth (username, password) -- and return the HTTP status code and any redirect URL. postHttp :: Verbosity -> URI -> String -> Maybe Auth -> IO (HttpCode, String), -- | POST a file resource to a URI using multipart\/form-data encoding, -- with optional auth (username, password) and return the HTTP status -- code and any error string. postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> IO (HttpCode, String), -- | PUT a file resource to a URI, with optional auth -- (username, password), extra headers and return the HTTP status code -- and any error string. putHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> [Header] -> IO (HttpCode, String), -- | Whether this transport supports https or just http. transportSupportsHttps :: Bool, -- | Whether this transport implementation was specifically chosen by -- the user via configuration, or whether it was automatically selected. -- Strictly speaking this is not a property of the transport itself but -- about how it was chosen. Nevertheless it's convenient to keep here. transportManuallySelected :: Bool } --TODO: why does postHttp return a redirect, but postHttpFile return errors? type HttpCode = Int type ETag = String type Auth = (String, String) noPostYet :: Verbosity -> URI -> String -> Maybe (String, String) -> IO (Int, String) noPostYet verbosity _ _ _ = die' verbosity "Posting (for report upload) is not implemented yet" supportedTransports :: [(String, Maybe Program, Bool, ProgramDb -> Maybe HttpTransport)] supportedTransports = [ let prog = simpleProgram "curl" in ( "curl", Just prog, True , \db -> curlTransport <$> lookupProgram prog db ) , let prog = simpleProgram "wget" in ( "wget", Just prog, True , \db -> wgetTransport <$> lookupProgram prog db ) , let prog = simpleProgram "powershell" in ( "powershell", Just prog, True , \db -> powershellTransport <$> lookupProgram prog db ) , ( "plain-http", Nothing, False , \_ -> Just plainHttpTransport ) ] configureTransport :: Verbosity -> [FilePath] -> Maybe String -> IO HttpTransport configureTransport verbosity extraPath (Just name) = -- the user specifically selected a transport by name so we'll try and -- configure that one case find (\(name',_,_,_) -> name' == name) supportedTransports of Just (_, mprog, _tls, mkTrans) -> do let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb progdb <- case mprog of Nothing -> return emptyProgramDb Just prog -> snd <$> requireProgram verbosity prog baseProgDb -- ^^ if it fails, it'll fail here let transport = fromMaybe (error "configureTransport: failed to make transport") $ mkTrans progdb return transport { transportManuallySelected = True } Nothing -> die' verbosity $ "Unknown HTTP transport specified: " ++ name ++ ". The supported transports are " ++ intercalate ", " [ name' | (name', _, _, _ ) <- supportedTransports ] configureTransport verbosity extraPath Nothing = do -- the user hasn't selected a transport, so we'll pick the first one we -- can configure successfully, provided that it supports tls -- for all the transports except plain-http we need to try and find -- their external executable let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb progdb <- configureAllKnownPrograms verbosity $ addKnownPrograms [ prog | (_, Just prog, _, _) <- supportedTransports ] baseProgDb let availableTransports = [ (name, transport) | (name, _, _, mkTrans) <- supportedTransports , transport <- maybeToList (mkTrans progdb) ] let (name, transport) = fromMaybe ("plain-http", plainHttpTransport) (safeHead availableTransports) debug verbosity $ "Selected http transport implementation: " ++ name return transport { transportManuallySelected = False } ------------------------------------------------------------------------------ -- The HttpTransports based on external programs -- curlTransport :: ConfiguredProgram -> HttpTransport curlTransport prog = HttpTransport gethttp posthttp posthttpfile puthttpfile True False where gethttp verbosity uri etag destPath reqHeaders = do withTempFile (takeDirectory destPath) "curl-headers.txt" $ \tmpFile tmpHandle -> do hClose tmpHandle let args = [ show uri , "--output", destPath , "--location" , "--write-out", "%{http_code}" , "--user-agent", userAgent , "--silent", "--show-error" , "--dump-header", tmpFile ] ++ concat [ ["--header", "If-None-Match: " ++ t] | t <- maybeToList etag ] ++ concat [ ["--header", show name ++ ": " ++ value] | Header name value <- reqHeaders ] resp <- getProgramInvocationOutput verbosity $ addAuthConfig Nothing uri (programInvocation prog args) withFile tmpFile ReadMode $ \hnd -> do headers <- hGetContents hnd (code, _err, etag') <- parseResponse verbosity uri resp headers evaluate $ force (code, etag') posthttp = noPostYet addAuthConfig explicitAuth uri progInvocation = do -- attempt to derive a u/p pair from the uri authority if one exists -- all `uriUserInfo` values have '@' as a suffix. drop it. let uriDerivedAuth = case uriAuthority uri of (Just (URIAuth u _ _)) | not (null u) -> Just $ filter (/= '@') u _ -> Nothing -- prefer passed in auth to auth derived from uri. If neither exist, then no auth let mbAuthString = case (explicitAuth, uriDerivedAuth) of (Just (uname, passwd), _) -> Just (uname ++ ":" ++ passwd) (Nothing, Just a) -> Just a (Nothing, Nothing) -> Nothing case mbAuthString of Just up -> progInvocation { progInvokeInput = Just . IODataText . unlines $ [ "--digest" , "--user " ++ up ] , progInvokeArgs = ["--config", "-"] ++ progInvokeArgs progInvocation } Nothing -> progInvocation posthttpfile verbosity uri path auth = do let args = [ show uri , "--form", "package=@"++path , "--write-out", "\n%{http_code}" , "--user-agent", userAgent , "--silent", "--show-error" , "--header", "Accept: text/plain" , "--location" ] resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth uri (programInvocation prog args) (code, err, _etag) <- parseResponse verbosity uri resp "" return (code, err) puthttpfile verbosity uri path auth headers = do let args = [ show uri , "--request", "PUT", "--data-binary", "@"++path , "--write-out", "\n%{http_code}" , "--user-agent", userAgent , "--silent", "--show-error" , "--location" , "--header", "Accept: text/plain" ] ++ concat [ ["--header", show name ++ ": " ++ value] | Header name value <- headers ] resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth uri (programInvocation prog args) (code, err, _etag) <- parseResponse verbosity uri resp "" return (code, err) -- on success these curl invocations produces an output like "200" -- and on failure it has the server error response first parseResponse :: Verbosity -> URI -> String -> String -> IO (Int, String, Maybe ETag) parseResponse verbosity uri resp headers = let codeerr = case reverse (lines resp) of (codeLine:rerrLines) -> case readMaybe (trim codeLine) of Just i -> let errstr = mkErrstr rerrLines in Just (i, errstr) Nothing -> Nothing [] -> Nothing mkErrstr = unlines . reverse . dropWhile (all isSpace) mb_etag :: Maybe ETag mb_etag = listToMaybe $ reverse [ etag | ["ETag:", etag] <- map words (lines headers) ] in case codeerr of Just (i, err) -> return (i, err, mb_etag) _ -> statusParseFail verbosity uri resp wgetTransport :: ConfiguredProgram -> HttpTransport wgetTransport prog = HttpTransport gethttp posthttp posthttpfile puthttpfile True False where gethttp verbosity uri etag destPath reqHeaders = do resp <- runWGet verbosity uri args -- wget doesn't support range requests. -- so, we not only ignore range request headers, -- but we also display a warning message when we see them. let hasRangeHeader = any isRangeHeader reqHeaders warningMsg = "the 'wget' transport currently doesn't support" ++ " range requests, which wastes network bandwidth." ++ " To fix this, set 'http-transport' to 'curl' or" ++ " 'plain-http' in '~/.cabal/config'." ++ " Note that the 'plain-http' transport doesn't" ++ " support HTTPS.\n" when (hasRangeHeader) $ warn verbosity warningMsg (code, etag') <- parseOutput verbosity uri resp return (code, etag') where args = [ "--output-document=" ++ destPath , "--user-agent=" ++ userAgent , "--tries=5" , "--timeout=15" , "--server-response" ] ++ concat [ ["--header", "If-None-Match: " ++ t] | t <- maybeToList etag ] ++ [ "--header=" ++ show name ++ ": " ++ value | hdr@(Header name value) <- reqHeaders , (not (isRangeHeader hdr)) ] -- wget doesn't support range requests. -- so, we ignore range request headers, lest we get errors. isRangeHeader :: Header -> Bool isRangeHeader (Header HdrRange _) = True isRangeHeader _ = False posthttp = noPostYet posthttpfile verbosity uri path auth = withTempFile (takeDirectory path) (takeFileName path) $ \tmpFile tmpHandle -> withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do hClose responseHandle (body, boundary) <- generateMultipartBody path LBS.hPut tmpHandle body hClose tmpHandle let args = [ "--post-file=" ++ tmpFile , "--user-agent=" ++ userAgent , "--server-response" , "--output-document=" ++ responseFile , "--header=Accept: text/plain" , "--header=Content-type: multipart/form-data; " ++ "boundary=" ++ boundary ] out <- runWGet verbosity (addUriAuth auth uri) args (code, _etag) <- parseOutput verbosity uri out withFile responseFile ReadMode $ \hnd -> do resp <- hGetContents hnd evaluate $ force (code, resp) puthttpfile verbosity uri path auth headers = withTempFile (takeDirectory path) "response" $ \responseFile responseHandle -> do hClose responseHandle let args = [ "--method=PUT", "--body-file="++path , "--user-agent=" ++ userAgent , "--server-response" , "--output-document=" ++ responseFile , "--header=Accept: text/plain" ] ++ [ "--header=" ++ show name ++ ": " ++ value | Header name value <- headers ] out <- runWGet verbosity (addUriAuth auth uri) args (code, _etag) <- parseOutput verbosity uri out withFile responseFile ReadMode $ \hnd -> do resp <- hGetContents hnd evaluate $ force (code, resp) addUriAuth Nothing uri = uri addUriAuth (Just (user, pass)) uri = uri { uriAuthority = Just a { uriUserInfo = user ++ ":" ++ pass ++ "@" } } where a = fromMaybe (URIAuth "" "" "") (uriAuthority uri) runWGet verbosity uri args = do -- We pass the URI via STDIN because it contains the users' credentials -- and sensitive data should not be passed via command line arguments. let invocation = (programInvocation prog ("--input-file=-" : args)) { progInvokeInput = Just $ IODataText $ uriToString id uri "" } -- wget returns its output on stderr rather than stdout (_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity invocation -- wget returns exit code 8 for server "errors" like "304 not modified" if exitCode == ExitSuccess || exitCode == ExitFailure 8 then return resp else die' verbosity $ "'" ++ programPath prog ++ "' exited with an error:\n" ++ resp -- With the --server-response flag, wget produces output with the full -- http server response with all headers, we want to find a line like -- "HTTP/1.1 200 OK", but only the last one, since we can have multiple -- requests due to redirects. parseOutput verbosity uri resp = let parsedCode = listToMaybe [ code | (protocol:codestr:_err) <- map words (reverse (lines resp)) , "HTTP/" `isPrefixOf` protocol , code <- maybeToList (readMaybe codestr) ] mb_etag :: Maybe ETag mb_etag = listToMaybe [ etag | ["ETag:", etag] <- map words (reverse (lines resp)) ] in case parsedCode of Just i -> return (i, mb_etag) _ -> statusParseFail verbosity uri resp powershellTransport :: ConfiguredProgram -> HttpTransport powershellTransport prog = HttpTransport gethttp posthttp posthttpfile puthttpfile True False where gethttp verbosity uri etag destPath reqHeaders = do resp <- runPowershellScript verbosity $ webclientScript (escape (show uri)) (("$targetStream = New-Object -TypeName System.IO.FileStream -ArgumentList " ++ (escape destPath) ++ ", Create") :(setupHeaders ((useragentHeader : etagHeader) ++ reqHeaders))) [ "$response = $request.GetResponse()" , "$responseStream = $response.GetResponseStream()" , "$buffer = new-object byte[] 10KB" , "$count = $responseStream.Read($buffer, 0, $buffer.length)" , "while ($count -gt 0)" , "{" , " $targetStream.Write($buffer, 0, $count)" , " $count = $responseStream.Read($buffer, 0, $buffer.length)" , "}" , "Write-Host ($response.StatusCode -as [int]);" , "Write-Host $response.GetResponseHeader(\"ETag\").Trim('\"')" ] [ "$targetStream.Flush()" , "$targetStream.Close()" , "$targetStream.Dispose()" , "$responseStream.Dispose()" ] parseResponse resp where parseResponse :: String -> IO (HttpCode, Maybe ETag) parseResponse x = case lines $ trim x of (code:etagv:_) -> fmap (\c -> (c, Just etagv)) $ parseCode code x (code: _) -> fmap (\c -> (c, Nothing )) $ parseCode code x _ -> statusParseFail verbosity uri x parseCode :: String -> String -> IO HttpCode parseCode code x = case readMaybe code of Just i -> return i Nothing -> statusParseFail verbosity uri x etagHeader = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] posthttp = noPostYet posthttpfile verbosity uri path auth = withTempFile (takeDirectory path) (takeFileName path) $ \tmpFile tmpHandle -> do (body, boundary) <- generateMultipartBody path LBS.hPut tmpHandle body hClose tmpHandle fullPath <- canonicalizePath tmpFile let contentHeader = Header HdrContentType ("multipart/form-data; boundary=" ++ boundary) resp <- runPowershellScript verbosity $ webclientScript (escape (show uri)) (setupHeaders (contentHeader : extraHeaders) ++ setupAuth auth) (uploadFileAction "POST" uri fullPath) uploadFileCleanup parseUploadResponse verbosity uri resp puthttpfile verbosity uri path auth headers = do fullPath <- canonicalizePath path resp <- runPowershellScript verbosity $ webclientScript (escape (show uri)) (setupHeaders (extraHeaders ++ headers) ++ setupAuth auth) (uploadFileAction "PUT" uri fullPath) uploadFileCleanup parseUploadResponse verbosity uri resp runPowershellScript verbosity script = do let args = [ "-InputFormat", "None" -- the default execution policy doesn't allow running -- unsigned scripts, so we need to tell powershell to bypass it , "-ExecutionPolicy", "bypass" , "-NoProfile", "-NonInteractive" , "-Command", "-" ] debug verbosity script getProgramInvocationOutput verbosity (programInvocation prog args) { progInvokeInput = Just $ IODataText $ script ++ "\nExit(0);" } escape = show useragentHeader = Header HdrUserAgent userAgent extraHeaders = [Header HdrAccept "text/plain", useragentHeader] setupHeaders headers = [ "$request." ++ addHeader name value | Header name value <- headers ] where addHeader header value = case header of HdrAccept -> "Accept = " ++ escape value HdrUserAgent -> "UserAgent = " ++ escape value HdrConnection -> "Connection = " ++ escape value HdrContentLength -> "ContentLength = " ++ escape value HdrContentType -> "ContentType = " ++ escape value HdrDate -> "Date = " ++ escape value HdrExpect -> "Expect = " ++ escape value HdrHost -> "Host = " ++ escape value HdrIfModifiedSince -> "IfModifiedSince = " ++ escape value HdrReferer -> "Referer = " ++ escape value HdrTransferEncoding -> "TransferEncoding = " ++ escape value HdrRange -> let (start, end) = if "bytes=" `isPrefixOf` value then case break (== '-') value' of (start', '-':end') -> (start', end') _ -> error $ "Could not decode range: " ++ value else error $ "Could not decode range: " ++ value value' = drop 6 value in "AddRange(\"bytes\", " ++ escape start ++ ", " ++ escape end ++ ");" name -> "Headers.Add(" ++ escape (show name) ++ "," ++ escape value ++ ");" setupAuth auth = [ "$request.Credentials = new-object System.Net.NetworkCredential(" ++ escape uname ++ "," ++ escape passwd ++ ",\"\");" | (uname,passwd) <- maybeToList auth ] uploadFileAction method _uri fullPath = [ "$request.Method = " ++ show method , "$requestStream = $request.GetRequestStream()" , "$fileStream = [System.IO.File]::OpenRead(" ++ escape fullPath ++ ")" , "$bufSize=10000" , "$chunk = New-Object byte[] $bufSize" , "while( $bytesRead = $fileStream.Read($chunk,0,$bufsize) )" , "{" , " $requestStream.write($chunk, 0, $bytesRead)" , " $requestStream.Flush()" , "}" , "" , "$responseStream = $request.getresponse()" , "$responseReader = new-object System.IO.StreamReader $responseStream.GetResponseStream()" , "$code = $response.StatusCode -as [int]" , "if ($code -eq 0) {" , " $code = 200;" , "}" , "Write-Host $code" , "Write-Host $responseReader.ReadToEnd()" ] uploadFileCleanup = [ "$fileStream.Close()" , "$requestStream.Close()" , "$responseStream.Close()" ] parseUploadResponse verbosity uri resp = case lines (trim resp) of (codeStr : message) | Just code <- readMaybe codeStr -> return (code, unlines message) _ -> statusParseFail verbosity uri resp webclientScript uri setup action cleanup = unlines [ "[Net.ServicePointManager]::SecurityProtocol = \"tls12, tls11, tls\"" , "$uri = New-Object \"System.Uri\" " ++ uri , "$request = [System.Net.HttpWebRequest]::Create($uri)" , unlines setup , "Try {" , unlines (map (" " ++) action) , "} Catch [System.Net.WebException] {" , " $exception = $_.Exception;" , " If ($exception.Status -eq " ++ "[System.Net.WebExceptionStatus]::ProtocolError) {" , " $response = $exception.Response -as [System.Net.HttpWebResponse];" , " $reader = new-object " ++ "System.IO.StreamReader($response.GetResponseStream());" , " Write-Host ($response.StatusCode -as [int]);" , " Write-Host $reader.ReadToEnd();" , " } Else {" , " Write-Host $exception.Message;" , " }" , "} Catch {" , " Write-Host $_.Exception.Message;" , "} finally {" , unlines (map (" " ++) cleanup) , "}" ] ------------------------------------------------------------------------------ -- The builtin plain HttpTransport -- plainHttpTransport :: HttpTransport plainHttpTransport = HttpTransport gethttp posthttp posthttpfile puthttpfile False False where gethttp verbosity uri etag destPath reqHeaders = do let req = Request{ rqURI = uri, rqMethod = GET, rqHeaders = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] ++ reqHeaders, rqBody = LBS.empty } (_, resp) <- cabalBrowse verbosity Nothing (request req) let code = convertRspCode (rspCode resp) etag' = lookupHeader HdrETag (rspHeaders resp) -- 206 Partial Content is a normal response to a range request; see #3385. when (code==200 || code==206) $ writeFileAtomic destPath $ rspBody resp return (code, etag') posthttp = noPostYet posthttpfile verbosity uri path auth = do (body, boundary) <- generateMultipartBody path let headers = [ Header HdrContentType ("multipart/form-data; boundary="++boundary) , Header HdrContentLength (show (LBS8.length body)) , Header HdrAccept ("text/plain") ] req = Request { rqURI = uri, rqMethod = POST, rqHeaders = headers, rqBody = body } (_, resp) <- cabalBrowse verbosity auth (request req) return (convertRspCode (rspCode resp), rspErrorString resp) puthttpfile verbosity uri path auth headers = do body <- LBS8.readFile path let req = Request { rqURI = uri, rqMethod = PUT, rqHeaders = Header HdrContentLength (show (LBS8.length body)) : Header HdrAccept "text/plain" : headers, rqBody = body } (_, resp) <- cabalBrowse verbosity auth (request req) return (convertRspCode (rspCode resp), rspErrorString resp) convertRspCode (a,b,c) = a*100 + b*10 + c rspErrorString resp = case lookupHeader HdrContentType (rspHeaders resp) of Just contenttype | takeWhile (/= ';') contenttype == "text/plain" -> LBS8.unpack (rspBody resp) _ -> rspReason resp cabalBrowse verbosity auth act = do p <- fixupEmptyProxy <$> fetchProxy True Exception.handleJust (guard . isDoesNotExistError) (const . die' verbosity $ "Couldn't establish HTTP connection. " ++ "Possible cause: HTTP proxy server is down.") $ browse $ do setProxy p setErrHandler (warn verbosity . ("http error: "++)) setOutHandler (debug verbosity) setUserAgent userAgent setAllowBasicAuth False setAuthorityGen (\_ _ -> return auth) act fixupEmptyProxy (Proxy uri _) | null uri = NoProxy fixupEmptyProxy p = p ------------------------------------------------------------------------------ -- Common stuff used by multiple transport impls -- userAgent :: String userAgent = concat [ "cabal-install/", prettyShow cabalInstallVersion , " (", prettyShow buildOS, "; ", prettyShow buildArch, ")" ] statusParseFail :: Verbosity -> URI -> String -> IO a statusParseFail verbosity uri r = die' verbosity $ "Failed to download " ++ show uri ++ " : " ++ "No Status Code could be parsed from response: " ++ r ------------------------------------------------------------------------------ -- Multipart stuff partially taken from cgi package. -- generateMultipartBody :: FilePath -> IO (LBS.ByteString, String) generateMultipartBody path = do content <- LBS.readFile path boundary <- genBoundary let !body = formatBody content (LBS8.pack boundary) return (body, boundary) where formatBody content boundary = LBS8.concat $ [ crlf, dd, boundary, crlf ] ++ [ LBS8.pack (show header) | header <- headers ] ++ [ crlf , content , crlf, dd, boundary, dd, crlf ] headers = [ Header (HdrCustom "Content-disposition") ("form-data; name=package; " ++ "filename=\"" ++ takeFileName path ++ "\"") , Header HdrContentType "application/x-gzip" ] crlf = LBS8.pack "\r\n" dd = LBS8.pack "--" genBoundary :: IO String genBoundary = do i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer return $ showHex i "" cabal-install-3.8.1.0/src/Distribution/Client/IndexUtils.hs0000644000000000000000000014605607346545000021704 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE GADTs #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.IndexUtils -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- Extra utils related to the package indexes. ----------------------------------------------------------------------------- module Distribution.Client.IndexUtils ( getIndexFileAge, getInstalledPackages, indexBaseName, Configure.getInstalledPackagesMonitorFiles, getSourcePackages, getSourcePackagesMonitorFiles, TotalIndexState, getSourcePackagesAtIndexState, ActiveRepos, filterSkippedActiveRepos, Index(..), RepoIndexState (..), PackageEntry(..), parsePackageIndex, updateRepoIndexCache, updatePackageIndexCacheFile, writeIndexTimestamp, currentIndexTimestamp, BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType, -- * preferred-versions utilities preferredVersions, isPreferredVersions, parsePreferredVersionsWarnings, PreferredVersionsParseError(..) ) where import Prelude () import Distribution.Client.Compat.Prelude import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Archive.Tar.Index as Tar import qualified Distribution.Client.Tar as Tar import Distribution.Client.IndexUtils.ActiveRepos import Distribution.Client.IndexUtils.IndexState import Distribution.Client.IndexUtils.Timestamp import Distribution.Client.Types import Distribution.Verbosity import Distribution.Parsec (simpleParsecBS) import Distribution.Package ( PackageId, PackageIdentifier(..), mkPackageName , Package(..), packageVersion, packageName ) import Distribution.Types.Dependency import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.PackageDescription ( GenericPackageDescription(..) , PackageDescription(..), emptyPackageDescription ) import Distribution.Simple.Compiler ( Compiler, PackageDBStack ) import Distribution.Simple.Program ( ProgramDb ) import qualified Distribution.Simple.Configure as Configure ( getInstalledPackages, getInstalledPackagesMonitorFiles ) import Distribution.Types.PackageName (PackageName) import Distribution.Version ( Version, VersionRange, mkVersion, intersectVersionRanges ) import Distribution.Simple.Utils ( die', warn, info, createDirectoryIfMissingVerbose, fromUTF8LBS ) import Distribution.Client.Setup ( RepoContext(..) ) import Distribution.PackageDescription.Parsec ( parseGenericPackageDescription, parseGenericPackageDescriptionMaybe ) import qualified Distribution.PackageDescription.Parsec as PackageDesc.Parse import qualified Distribution.Simple.PackageDescription as PackageDesc.Parse import Distribution.Solver.Types.PackageIndex (PackageIndex) import qualified Distribution.Solver.Types.PackageIndex as PackageIndex import Distribution.Solver.Types.SourcePackage import Data.Either ( rights ) import qualified Data.Map as Map import qualified Data.Set as Set import Control.Exception import Data.List (stripPrefix) import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Char8 as BSS import Data.ByteString.Lazy (ByteString) import Distribution.Client.GZipUtils (maybeDecompress) import Distribution.Client.Utils ( byteStringToFilePath , tryFindAddSourcePackageDesc ) import Distribution.Utils.Structured (Structured (..), nominalStructure, structuredEncodeFile, structuredDecodeFileOrFail) import Distribution.Compat.Time (getFileAge, getModTime) import System.Directory (doesFileExist, doesDirectoryExist) import System.FilePath ( (), (<.>), takeFileName, takeExtension, replaceExtension, splitDirectories, normalise, takeDirectory ) import qualified System.FilePath.Posix as FilePath.Posix import System.IO import System.IO.Unsafe (unsafeInterleaveIO) import System.IO.Error (isDoesNotExistError) import Distribution.Compat.Directory (listDirectory) import Distribution.Utils.Generic (fstOf3) import qualified Codec.Compression.GZip as GZip import qualified Hackage.Security.Client as Sec import qualified Hackage.Security.Util.Some as Sec -- | Reduced-verbosity version of 'Configure.getInstalledPackages' getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramDb -> IO InstalledPackageIndex getInstalledPackages verbosity comp packageDbs progdb = Configure.getInstalledPackages verbosity' comp packageDbs progdb where verbosity' = lessVerbose verbosity -- | Get filename base (i.e. without file extension) for index-related files -- -- /Secure/ cabal repositories use a new extended & incremental -- @01-index.tar@. In order to avoid issues resulting from clobbering -- new/old-style index data, we save them locally to different names. -- -- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the -- @00-index.tar.gz@/@01-index.tar.gz@ file. indexBaseName :: Repo -> FilePath indexBaseName repo = repoLocalDir repo fn where fn = case repo of RepoSecure {} -> "01-index" RepoRemote {} -> "00-index" RepoLocalNoIndex {} -> "noindex" ------------------------------------------------------------------------ -- Reading the source package index -- -- Note: 'data IndexState' is defined in -- "Distribution.Client.IndexUtils.Timestamp" to avoid import cycles -- | 'IndexStateInfo' contains meta-information about the resulting -- filtered 'Cache' 'after applying 'filterCache' according to a -- requested 'IndexState'. data IndexStateInfo = IndexStateInfo { isiMaxTime :: !Timestamp -- ^ 'Timestamp' of maximum/latest 'Timestamp' in the current -- filtered view of the cache. -- -- The following property holds -- -- > filterCache (IndexState (isiMaxTime isi)) cache == (cache, isi) -- , isiHeadTime :: !Timestamp -- ^ 'Timestamp' equivalent to 'IndexStateHead', i.e. the latest -- known 'Timestamp'; 'isiHeadTime' is always greater or equal to -- 'isiMaxTime'. } emptyStateInfo :: IndexStateInfo emptyStateInfo = IndexStateInfo nullTimestamp nullTimestamp -- | Filters a 'Cache' according to an 'IndexState' -- specification. Also returns 'IndexStateInfo' describing the -- resulting index cache. -- -- Note: 'filterCache' is idempotent in the 'Cache' value filterCache :: RepoIndexState -> Cache -> (Cache, IndexStateInfo) filterCache IndexStateHead cache = (cache, IndexStateInfo{..}) where isiMaxTime = cacheHeadTs cache isiHeadTime = cacheHeadTs cache filterCache (IndexStateTime ts0) cache0 = (cache, IndexStateInfo{..}) where cache = Cache { cacheEntries = ents, cacheHeadTs = isiMaxTime } isiHeadTime = cacheHeadTs cache0 isiMaxTime = maximumTimestamp (map cacheEntryTimestamp ents) ents = filter ((<= ts0) . cacheEntryTimestamp) (cacheEntries cache0) -- | Read a repository index from disk, from the local files specified by -- a list of 'Repo's. -- -- All the 'SourcePackage's are marked as having come from the appropriate -- 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb getSourcePackages verbosity repoCtxt = fstOf3 <$> getSourcePackagesAtIndexState verbosity repoCtxt Nothing Nothing -- | Variant of 'getSourcePackages' which allows getting the source -- packages at a particular 'IndexState'. -- -- Current choices are either the latest (aka HEAD), or the index as -- it was at a particular time. -- -- Returns also the total index where repositories' -- RepoIndexState's are not HEAD. This is used in v2-freeze. -- getSourcePackagesAtIndexState :: Verbosity -> RepoContext -> Maybe TotalIndexState -> Maybe ActiveRepos -> IO (SourcePackageDb, TotalIndexState, ActiveRepos) getSourcePackagesAtIndexState verbosity repoCtxt _ _ | null (repoContextRepos repoCtxt) = do -- In the test suite, we routinely don't have any remote package -- servers, so don't bleat about it warn (verboseUnmarkOutput verbosity) $ "No remote package servers have been specified. Usually " ++ "you would have one specified in the config file." return (SourcePackageDb { packageIndex = mempty, packagePreferences = mempty }, headTotalIndexState, ActiveRepos []) getSourcePackagesAtIndexState verbosity repoCtxt mb_idxState mb_activeRepos = do let describeState IndexStateHead = "most recent state" describeState (IndexStateTime time) = "historical state as of " ++ prettyShow time pkgss <- for (repoContextRepos repoCtxt) $ \r -> do let rname :: RepoName rname = repoName r info verbosity ("Reading available packages of " ++ unRepoName rname ++ "...") idxState <- case mb_idxState of Just totalIdxState -> do let idxState = lookupIndexState rname totalIdxState info verbosity $ "Using " ++ describeState idxState ++ " as explicitly requested (via command line / project configuration)" return idxState Nothing -> do mb_idxState' <- readIndexTimestamp verbosity (RepoIndex repoCtxt r) case mb_idxState' of Nothing -> do info verbosity "Using most recent state (could not read timestamp file)" return IndexStateHead Just idxState -> do info verbosity $ "Using " ++ describeState idxState ++ " specified from most recent cabal update" return idxState unless (idxState == IndexStateHead) $ case r of RepoLocalNoIndex {} -> warn verbosity "index-state ignored for file+noindex repositories" RepoRemote {} -> warn verbosity ("index-state ignored for old-format (remote repository '" ++ unRepoName rname ++ "')") RepoSecure {} -> pure () let idxState' = case r of RepoSecure {} -> idxState _ -> IndexStateHead (pis,deps,isi) <- readRepoIndex verbosity repoCtxt r idxState' case idxState' of IndexStateHead -> do info verbosity ("index-state("++ unRepoName rname ++") = " ++ prettyShow (isiHeadTime isi)) return () IndexStateTime ts0 -> do when (isiMaxTime isi /= ts0) $ if ts0 > isiMaxTime isi then warn verbosity $ "Requested index-state " ++ prettyShow ts0 ++ " is newer than '" ++ unRepoName rname ++ "'!" ++ " Falling back to older state (" ++ prettyShow (isiMaxTime isi) ++ ")." else info verbosity $ "Requested index-state " ++ prettyShow ts0 ++ " does not exist in '"++ unRepoName rname ++"'!" ++ " Falling back to older state (" ++ prettyShow (isiMaxTime isi) ++ ")." info verbosity ("index-state("++ unRepoName rname ++") = " ++ prettyShow (isiMaxTime isi) ++ " (HEAD = " ++ prettyShow (isiHeadTime isi) ++ ")") pure RepoData { rdRepoName = rname , rdTimeStamp = isiMaxTime isi , rdIndex = pis , rdPreferences = deps } let activeRepos :: ActiveRepos activeRepos = fromMaybe defaultActiveRepos mb_activeRepos pkgss' <- case organizeByRepos activeRepos rdRepoName pkgss of Right x -> return x Left err -> warn verbosity err >> return (map (\x -> (x, CombineStrategyMerge)) pkgss) let activeRepos' :: ActiveRepos activeRepos' = ActiveRepos [ ActiveRepo (rdRepoName rd) strategy | (rd, strategy) <- pkgss' ] let totalIndexState :: TotalIndexState totalIndexState = makeTotalIndexState IndexStateHead $ Map.fromList [ (n, IndexStateTime ts) | (RepoData n ts _idx _prefs, _strategy) <- pkgss' -- e.g. file+noindex have nullTimestamp as their timestamp , ts /= nullTimestamp ] let addIndex :: PackageIndex UnresolvedSourcePackage -> (RepoData, CombineStrategy) -> PackageIndex UnresolvedSourcePackage addIndex acc (RepoData _ _ _ _, CombineStrategySkip) = acc addIndex acc (RepoData _ _ idx _, CombineStrategyMerge) = PackageIndex.merge acc idx addIndex acc (RepoData _ _ idx _, CombineStrategyOverride) = PackageIndex.override acc idx let pkgs :: PackageIndex UnresolvedSourcePackage pkgs = foldl' addIndex mempty pkgss' -- Note: preferences combined without using CombineStrategy let prefs :: Map PackageName VersionRange prefs = Map.fromListWith intersectVersionRanges [ (name, range) | (RepoData _n _ts _idx prefs', _strategy) <- pkgss' , Dependency name range _ <- prefs' ] _ <- evaluate pkgs _ <- evaluate prefs _ <- evaluate totalIndexState return (SourcePackageDb { packageIndex = pkgs, packagePreferences = prefs }, totalIndexState, activeRepos') -- auxiliary data used in getSourcePackagesAtIndexState data RepoData = RepoData { rdRepoName :: RepoName , rdTimeStamp :: Timestamp , rdIndex :: PackageIndex UnresolvedSourcePackage , rdPreferences :: [Dependency] } -- | Read a repository index from disk, from the local file specified by -- the 'Repo'. -- -- All the 'SourcePackage's are marked as having come from the given 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. -- readRepoIndex :: Verbosity -> RepoContext -> Repo -> RepoIndexState -> IO (PackageIndex UnresolvedSourcePackage, [Dependency], IndexStateInfo) readRepoIndex verbosity repoCtxt repo idxState = handleNotFound $ do when (isRepoRemote repo) $ warnIfIndexIsOld =<< getIndexFileAge repo -- note that if this step fails due to a bad repo cache, the the procedure can still succeed by reading from the existing cache, which is updated regardless. updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) `catchIO` (\e -> warn verbosity $ "unable to update the repo index cache -- " ++ displayException e) readPackageIndexCacheFile verbosity mkAvailablePackage (RepoIndex repoCtxt repo) idxState where mkAvailablePackage pkgEntry = SourcePackage { srcpkgPackageId = pkgid , srcpkgDescription = pkgdesc , srcpkgSource = case pkgEntry of NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path , srcpkgDescrOverride = case pkgEntry of NormalPackage _ _ pkgtxt _ -> Just pkgtxt _ -> Nothing } where pkgdesc = packageDesc pkgEntry pkgid = packageId pkgEntry handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e then do case repo of RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote RepoLocalNoIndex local _ -> warn verbosity $ "Error during construction of local+noindex " ++ unRepoName (localRepoName local) ++ " repository index: " ++ show e return (mempty,mempty,emptyStateInfo) else ioError e isOldThreshold = 15 --days warnIfIndexIsOld dt = do when (dt >= isOldThreshold) $ case repo of RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt RepoLocalNoIndex {} -> return () errMissingPackageList repoRemote = "The package list for '" ++ unRepoName (remoteRepoName repoRemote) ++ "' does not exist. Run 'cabal update' to download it." errOutdatedPackageList repoRemote dt = "The package list for '" ++ unRepoName (remoteRepoName repoRemote) ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " ++ "'cabal update' to get the latest list of available packages." -- | Return the age of the index file in days (as a Double). getIndexFileAge :: Repo -> IO Double getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar" -- | A set of files (or directories) that can be monitored to detect when -- there might have been a change in the source packages. -- getSourcePackagesMonitorFiles :: [Repo] -> [FilePath] getSourcePackagesMonitorFiles repos = concat [ [ indexBaseName repo <.> "cache" , indexBaseName repo <.> "timestamp" ] | repo <- repos ] -- | It is not necessary to call this, as the cache will be updated when the -- index is read normally. However you can do the work earlier if you like. -- updateRepoIndexCache :: Verbosity -> Index -> IO () updateRepoIndexCache verbosity index = whenCacheOutOfDate index $ updatePackageIndexCacheFile verbosity index whenCacheOutOfDate :: Index -> IO () -> IO () whenCacheOutOfDate index action = do exists <- doesFileExist $ cacheFile index if not exists then action else if localNoIndex index then return () -- TODO: don't update cache for local+noindex repositories else do indexTime <- getModTime $ indexFile index cacheTime <- getModTime $ cacheFile index when (indexTime > cacheTime) action localNoIndex :: Index -> Bool localNoIndex (RepoIndex _ (RepoLocalNoIndex {})) = True localNoIndex _ = False ------------------------------------------------------------------------ -- Reading the index file -- -- | An index entry is either a normal package, or a local build tree reference. data PackageEntry = NormalPackage PackageId GenericPackageDescription ByteString BlockNo | BuildTreeRef BuildTreeRefType PackageId GenericPackageDescription FilePath BlockNo -- | A build tree reference is either a link or a snapshot. data BuildTreeRefType = SnapshotRef | LinkRef deriving (Eq,Show,Generic) instance Binary BuildTreeRefType instance Structured BuildTreeRefType refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType refTypeFromTypeCode t | t == Tar.buildTreeRefTypeCode = LinkRef | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef | otherwise = error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode instance Package PackageEntry where packageId (NormalPackage pkgid _ _ _) = pkgid packageId (BuildTreeRef _ pkgid _ _ _) = pkgid packageDesc :: PackageEntry -> GenericPackageDescription packageDesc (NormalPackage _ descr _ _) = descr packageDesc (BuildTreeRef _ _ descr _ _) = descr -- | Parse an uncompressed \"00-index.tar\" repository index file represented -- as a 'ByteString'. -- data PackageOrDep = Pkg PackageEntry | Dep Dependency -- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files -- -- We read the index using 'Tar.read', which gives us a lazily constructed -- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', -- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a -- function over this to translate it to a list of IO actions returning -- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of -- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. parsePackageIndex :: Verbosity -> ByteString -> [IO (Maybe PackageOrDep)] parsePackageIndex verbosity = concatMap (uncurry extract) . tarEntriesList . Tar.read where extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] extract blockNo entry = tryExtractPkg ++ tryExtractPrefs where tryExtractPkg = do mkPkgEntry <- maybeToList $ extractPkg verbosity entry blockNo return $ fmap (fmap Pkg) mkPkgEntry tryExtractPrefs = do prefs' <- maybeToList $ extractPrefs entry fmap (return . Just . Dep) prefs' -- | Turn the 'Entries' data structure from the @tar@ package into a list, -- and pair each entry with its block number. -- -- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read -- as far as the list is evaluated. tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)] tarEntriesList = go 0 where go !_ Tar.Done = [] go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e) go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es' extractPkg :: Verbosity -> Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) extractPkg verbosity entry blockNo = case Tar.entryContent entry of Tar.NormalFile content _ | takeExtension fileName == ".cabal" -> case splitDirectories (normalise fileName) of [pkgname,vers,_] -> case simpleParsec vers of Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) where pkgid = PackageIdentifier (mkPackageName pkgname) ver parsed = parseGenericPackageDescriptionMaybe (BS.toStrict content) descr = case parsed of Just d -> d Nothing -> error $ "Couldn't read cabal file " ++ show fileName _ -> Nothing _ -> Nothing Tar.OtherEntryType typeCode content _ | Tar.isBuildTreeRefTypeCode typeCode -> Just $ do let path = byteStringToFilePath content dirExists <- doesDirectoryExist path result <- if not dirExists then return Nothing else do cabalFile <- tryFindAddSourcePackageDesc verbosity path "Error reading package index." descr <- PackageDesc.Parse.readGenericPackageDescription normal cabalFile return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) descr path blockNo return result _ -> Nothing where fileName = Tar.entryPath entry extractPrefs :: Tar.Entry -> Maybe [Dependency] extractPrefs entry = case Tar.entryContent entry of Tar.NormalFile content _ | isPreferredVersions entrypath -> Just prefs where entrypath = Tar.entryPath entry prefs = parsePreferredVersions content _ -> Nothing ------------------------------------------------------------------------ -- Filename and parsers for 'preferred-versions' file. -- -- | Expected name of the 'preferred-versions' file. -- -- Contains special constraints, such as a preferred version of a package -- or deprecations of certain package versions. -- -- Expected format: -- -- @ -- binary > 0.9.0.0 || < 0.9.0.0 -- text == 1.2.1.0 -- @ preferredVersions :: FilePath preferredVersions = "preferred-versions" -- | Does the given filename match with the expected name of 'preferred-versions'? isPreferredVersions :: FilePath -> Bool isPreferredVersions = (== preferredVersions) . takeFileName -- | Parse `preferred-versions` file, ignoring any parse failures. -- -- To obtain parse errors, use 'parsePreferredVersionsWarnings'. parsePreferredVersions :: ByteString -> [Dependency] parsePreferredVersions = rights . parsePreferredVersionsWarnings -- | Parser error of the `preferred-versions` file. data PreferredVersionsParseError = PreferredVersionsParseError { preferredVersionsParsecError :: String -- ^ Parser error to show to a user. , preferredVersionsOriginalDependency :: String -- ^ Original input that produced the parser error. } deriving (Generic, Read, Show, Eq, Ord, Typeable) -- | Parse `preferred-versions` file, collecting parse errors that can be shown -- in error messages. parsePreferredVersionsWarnings :: ByteString -> [Either PreferredVersionsParseError Dependency] parsePreferredVersionsWarnings = map parsePreference . filter (not . isPrefixOf "--") . lines . fromUTF8LBS where parsePreference :: String -> Either PreferredVersionsParseError Dependency parsePreference s = case eitherParsec s of Left err -> Left $ PreferredVersionsParseError { preferredVersionsParsecError = err , preferredVersionsOriginalDependency = s } Right dep -> Right dep ------------------------------------------------------------------------ -- Reading and updating the index cache -- -- | Variation on 'sequence' which evaluates the actions lazily -- -- Pattern matching on the result list will execute just the first action; -- more generally pattern matching on the first @n@ '(:)' nodes will execute -- the first @n@ actions. lazySequence :: [IO a] -> IO [a] lazySequence = unsafeInterleaveIO . go where go [] = return [] go (x:xs) = do x' <- x xs' <- lazySequence xs return (x' : xs') -- | A lazy unfolder for lookup operations which return the current -- value and (possibly) the next key lazyUnfold :: (k -> IO (v, Maybe k)) -> k -> IO [(k,v)] lazyUnfold step = goLazy . Just where goLazy s = unsafeInterleaveIO (go s) go Nothing = return [] go (Just k) = do (v, mk') <- step k vs' <- goLazy mk' return ((k,v):vs') -- | Which index do we mean? data Index = -- | The main index for the specified repository RepoIndex RepoContext Repo -- | A sandbox-local repository -- Argument is the location of the index file | SandboxIndex FilePath indexFile :: Index -> FilePath indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar" indexFile (SandboxIndex index) = index cacheFile :: Index -> FilePath cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache" cacheFile (SandboxIndex index) = index `replaceExtension` "cache" timestampFile :: Index -> FilePath timestampFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "timestamp" timestampFile (SandboxIndex index) = index `replaceExtension` "timestamp" -- | Return 'True' if 'Index' uses 01-index format (aka secure repo) is01Index :: Index -> Bool is01Index (RepoIndex _ repo) = case repo of RepoSecure {} -> True RepoRemote {} -> False RepoLocalNoIndex {} -> True is01Index (SandboxIndex _) = False updatePackageIndexCacheFile :: Verbosity -> Index -> IO () updatePackageIndexCacheFile verbosity index = do info verbosity ("Updating index cache file " ++ cacheFile index ++ " ...") withIndexEntries verbosity index callback callbackNoIndex where callback entries = do let !maxTs = maximumTimestamp (map cacheEntryTimestamp entries) cache = Cache { cacheHeadTs = maxTs , cacheEntries = entries } writeIndexCache index cache info verbosity ("Index cache updated to index-state " ++ prettyShow (cacheHeadTs cache)) callbackNoIndex entries = do writeNoIndexCache verbosity index $ NoIndexCache entries info verbosity "Index cache updated" -- | Read the index (for the purpose of building a cache) -- -- The callback is provided with list of cache entries, which is guaranteed to -- be lazily constructed. This list must ONLY be used in the scope of the -- callback; when the callback is terminated the file handle to the index will -- be closed and further attempts to read from the list will result in (pure) -- I/O exceptions. -- -- In the construction of the index for a secure repo we take advantage of the -- index built by the @hackage-security@ library to avoid reading the @.tar@ -- file as much as possible (we need to read it only to extract preferred -- versions). This helps performance, but is also required for correctness: -- the new @01-index.tar.gz@ may have multiple versions of preferred-versions -- files, and 'parsePackageIndex' does not correctly deal with that (see #2956); -- by reading the already-built cache from the security library we will be sure -- to only read the latest versions of all files. -- -- TODO: It would be nicer if we actually incrementally updated @cabal@'s -- cache, rather than reconstruct it from zero on each update. However, this -- would require a change in the cache format. withIndexEntries :: Verbosity -> Index -> ([IndexCacheEntry] -> IO a) -> ([NoIndexCacheEntry] -> IO a) -> IO a withIndexEntries _ (RepoIndex repoCtxt repo@RepoSecure{}) callback _ = repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do -- Incrementally (lazily) read all the entries in the tar file in order, -- including all revisions, not just the last revision of each file indexEntries <- lazyUnfold indexLookupEntry (Sec.directoryFirst indexDirectory) callback [ cacheEntry | (dirEntry, indexEntry) <- indexEntries , cacheEntry <- toCacheEntries dirEntry indexEntry ] where toCacheEntries :: Sec.DirectoryEntry -> Sec.Some Sec.IndexEntry -> [IndexCacheEntry] toCacheEntries dirEntry (Sec.Some sie) = case Sec.indexEntryPathParsed sie of Nothing -> [] -- skip unrecognized file Just (Sec.IndexPkgMetadata _pkgId) -> [] -- skip metadata Just (Sec.IndexPkgCabal pkgId) -> force [CachePackageId pkgId blockNo timestamp] Just (Sec.IndexPkgPrefs _pkgName) -> force [ CachePreference dep blockNo timestamp | dep <- parsePreferredVersions (Sec.indexEntryContent sie) ] where blockNo = Sec.directoryEntryBlockNo dirEntry timestamp = fromMaybe (error "withIndexEntries: invalid timestamp") $ epochTimeToTimestamp $ Sec.indexEntryTime sie withIndexEntries verbosity (RepoIndex _repoCtxt (RepoLocalNoIndex (LocalRepo name localDir _) _cacheDir)) _ callback = do dirContents <- listDirectory localDir let contentSet = Set.fromList dirContents entries <- handle handler $ fmap catMaybes $ for dirContents $ \file -> do case isTarGz file of Nothing | isPreferredVersions file -> do contents <- BS.readFile (localDir file) let versionPreferencesParsed = parsePreferredVersionsWarnings contents let (warnings, versionPreferences) = partitionEithers versionPreferencesParsed unless (null warnings) $ do warn verbosity $ "withIndexEntries: failed to parse some entries of \"preferred-versions\" found at: " ++ (localDir file) for_ warnings $ \err -> do warn verbosity $ "* \"" ++ preferredVersionsOriginalDependency err warn verbosity $ "Parser Error: " ++ preferredVersionsParsecError err return $ Just $ NoIndexCachePreference versionPreferences | otherwise -> do unless (takeFileName file == "noindex.cache" || ".cabal" `isSuffixOf` file) $ info verbosity $ "Skipping " ++ file return Nothing Just pkgid | cabalPath `Set.member` contentSet -> do contents <- BSS.readFile (localDir cabalPath) for (parseGenericPackageDescriptionMaybe contents) $ \gpd -> return (CacheGPD gpd contents) where cabalPath = prettyShow pkgid ++ ".cabal" Just pkgId -> do -- check for the right named .cabal file in the compressed tarball tarGz <- BS.readFile (localDir file) let tar = GZip.decompress tarGz entries = Tar.read tar case Tar.foldEntries (readCabalEntry pkgId) Nothing (const Nothing) entries of Just ce -> return (Just ce) Nothing -> die' verbosity $ "Cannot read .cabal file inside " ++ file let (prefs, gpds) = partitionEithers $ map (\case NoIndexCachePreference deps -> Left deps CacheGPD gpd _ -> Right gpd ) entries info verbosity $ "Entries in file+noindex repository " ++ unRepoName name for_ gpds $ \gpd -> info verbosity $ "- " ++ prettyShow (package $ Distribution.PackageDescription.packageDescription gpd) unless (null prefs) $ do info verbosity $ "Preferred versions in file+noindex repository " ++ unRepoName name for_ (concat prefs) $ \pref -> info verbosity ("* " ++ prettyShow pref) callback entries where handler :: IOException -> IO a handler e = die' verbosity $ "Error while updating index for " ++ unRepoName name ++ " repository " ++ show e isTarGz :: FilePath -> Maybe PackageIdentifier isTarGz fp = do pfx <- stripSuffix ".tar.gz" fp simpleParsec pfx stripSuffix sfx str = fmap reverse (stripPrefix (reverse sfx) (reverse str)) -- look for /.cabal inside the tarball readCabalEntry :: PackageIdentifier -> Tar.Entry -> Maybe NoIndexCacheEntry -> Maybe NoIndexCacheEntry readCabalEntry pkgId entry Nothing | filename == Tar.entryPath entry , Tar.NormalFile contents _ <- Tar.entryContent entry = let bs = BS.toStrict contents in fmap (\gpd -> CacheGPD gpd bs) $ parseGenericPackageDescriptionMaybe bs where filename = prettyShow pkgId FilePath.Posix. prettyShow (packageName pkgId) ++ ".cabal" readCabalEntry _ _ x = x withIndexEntries verbosity index callback _ = do -- non-secure repositories withFile (indexFile index) ReadMode $ \h -> do bs <- maybeDecompress `fmap` BS.hGetContents h pkgsOrPrefs <- lazySequence $ parsePackageIndex verbosity bs callback $ map toCache (catMaybes pkgsOrPrefs) where toCache :: PackageOrDep -> IndexCacheEntry toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo nullTimestamp toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo toCache (Dep d) = CachePreference d 0 nullTimestamp readPackageIndexCacheFile :: Package pkg => Verbosity -> (PackageEntry -> pkg) -> Index -> RepoIndexState -> IO (PackageIndex pkg, [Dependency], IndexStateInfo) readPackageIndexCacheFile verbosity mkPkg index idxState | localNoIndex index = do cache0 <- readNoIndexCache verbosity index (pkgs, prefs) <- packageNoIndexFromCache verbosity mkPkg cache0 pure (pkgs, prefs, emptyStateInfo) | otherwise = do cache0 <- readIndexCache verbosity index indexHnd <- openFile (indexFile index) ReadMode let (cache,isi) = filterCache idxState cache0 (pkgs,deps) <- packageIndexFromCache verbosity mkPkg indexHnd cache pure (pkgs,deps,isi) packageIndexFromCache :: Package pkg => Verbosity -> (PackageEntry -> pkg) -> Handle -> Cache -> IO (PackageIndex pkg, [Dependency]) packageIndexFromCache verbosity mkPkg hnd cache = do (pkgs, prefs) <- packageListFromCache verbosity mkPkg hnd cache pkgIndex <- evaluate $ PackageIndex.fromList pkgs return (pkgIndex, prefs) packageNoIndexFromCache :: forall pkg. Package pkg => Verbosity -> (PackageEntry -> pkg) -> NoIndexCache -> IO (PackageIndex pkg, [Dependency]) packageNoIndexFromCache _verbosity mkPkg cache = do let (pkgs, prefs) = packageListFromNoIndexCache pkgIndex <- evaluate $ PackageIndex.fromList pkgs pure (pkgIndex, prefs) where packageListFromNoIndexCache :: ([pkg], [Dependency]) packageListFromNoIndexCache = foldr go mempty (noIndexCacheEntries cache) go :: NoIndexCacheEntry -> ([pkg], [Dependency]) -> ([pkg], [Dependency]) go (CacheGPD gpd bs) (pkgs, prefs) = let pkgId = package $ Distribution.PackageDescription.packageDescription gpd in (mkPkg (NormalPackage pkgId gpd (BS.fromStrict bs) 0) : pkgs, prefs) go (NoIndexCachePreference deps) (pkgs, prefs) = (pkgs, deps ++ prefs) -- | Read package list -- -- The result package releases and preference entries are guaranteed -- to be unique. -- -- Note: 01-index.tar is an append-only index and therefore contains -- all .cabal edits and preference-updates. The masking happens -- here, i.e. the semantics that later entries in a tar file mask -- earlier ones is resolved in this function. packageListFromCache :: Verbosity -> (PackageEntry -> pkg) -> Handle -> Cache -> IO ([pkg], [Dependency]) packageListFromCache verbosity mkPkg hnd Cache{..} = accum mempty [] mempty cacheEntries where accum !srcpkgs btrs !prefs [] = return (Map.elems srcpkgs ++ btrs, Map.elems prefs) accum srcpkgs btrs prefs (CachePackageId pkgid blockno _ : entries) = do -- Given the cache entry, make a package index entry. -- The magic here is that we use lazy IO to read the .cabal file -- from the index tarball if it turns out that we need it. -- Most of the time we only need the package id. ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do pkgtxt <- getEntryContent blockno pkg <- readPackageDescription pkgid pkgtxt return (pkg, pkgtxt) let srcpkg = mkPkg (NormalPackage pkgid pkg pkgtxt blockno) accum (Map.insert pkgid srcpkg srcpkgs) btrs prefs entries accum srcpkgs btrs prefs (CacheBuildTreeRef refType blockno : entries) = do -- We have to read the .cabal file eagerly here because we can't cache the -- package id for build tree references - the user might edit the .cabal -- file after the reference was added to the index. path <- liftM byteStringToFilePath . getEntryContent $ blockno pkg <- do let err = "Error reading package index from cache." file <- tryFindAddSourcePackageDesc verbosity path err PackageDesc.Parse.readGenericPackageDescription normal file let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) accum srcpkgs (srcpkg:btrs) prefs entries accum srcpkgs btrs prefs (CachePreference pref@(Dependency pn _ _) _ _ : entries) = accum srcpkgs btrs (Map.insert pn pref prefs) entries getEntryContent :: BlockNo -> IO ByteString getEntryContent blockno = do entry <- Tar.hReadEntry hnd blockno case Tar.entryContent entry of Tar.NormalFile content _size -> return content Tar.OtherEntryType typecode content _size | Tar.isBuildTreeRefTypeCode typecode -> return content _ -> interror "unexpected tar entry type" readPackageDescription :: PackageIdentifier -> ByteString -> IO GenericPackageDescription readPackageDescription pkgid content = case snd $ PackageDesc.Parse.runParseResult $ parseGenericPackageDescription $ BS.toStrict content of Right gpd -> return gpd Left (Just specVer, _) | specVer >= mkVersion [2,2] -> return (dummyPackageDescription specVer) Left _ -> interror "failed to parse .cabal file" where dummyPackageDescription :: Version -> GenericPackageDescription dummyPackageDescription specVer = GenericPackageDescription { packageDescription = emptyPackageDescription { package = pkgid , synopsis = dummySynopsis } , gpdScannedVersion = Just specVer -- tells index scanner to skip this file. , genPackageFlags = [] , condLibrary = Nothing , condSubLibraries = [] , condForeignLibs = [] , condExecutables = [] , condTestSuites = [] , condBenchmarks = [] } dummySynopsis = "" interror :: String -> IO a interror msg = die' verbosity $ "internal error when reading package index: " ++ msg ++ "The package index or index cache is probably " ++ "corrupt. Running cabal update might fix it." ------------------------------------------------------------------------ -- Index cache data structure -- -- | Read the 'Index' cache from the filesystem -- -- If a corrupted index cache is detected this function regenerates -- the index cache and then reattempt to read the index once (and -- 'die's if it fails again). readIndexCache :: Verbosity -> Index -> IO Cache readIndexCache verbosity index = do cacheOrFail <- readIndexCache' index case cacheOrFail of Left msg -> do warn verbosity $ concat [ "Parsing the index cache failed (", msg, "). " , "Trying to regenerate the index cache..." ] updatePackageIndexCacheFile verbosity index either (die' verbosity) (return . hashConsCache) =<< readIndexCache' index Right res -> return (hashConsCache res) readNoIndexCache :: Verbosity -> Index -> IO NoIndexCache readNoIndexCache verbosity index = do cacheOrFail <- readNoIndexCache' index case cacheOrFail of Left msg -> do warn verbosity $ concat [ "Parsing the index cache failed (", msg, "). " , "Trying to regenerate the index cache..." ] updatePackageIndexCacheFile verbosity index either (die' verbosity) return =<< readNoIndexCache' index -- we don't hash cons local repository cache, they are hopefully small Right res -> return res -- | Read the 'Index' cache from the filesystem without attempting to -- regenerate on parsing failures. readIndexCache' :: Index -> IO (Either String Cache) readIndexCache' index | is01Index index = structuredDecodeFileOrFail (cacheFile index) | otherwise = liftM (Right .read00IndexCache) $ BSS.readFile (cacheFile index) readNoIndexCache' :: Index -> IO (Either String NoIndexCache) readNoIndexCache' index = structuredDecodeFileOrFail (cacheFile index) -- | Write the 'Index' cache to the filesystem writeIndexCache :: Index -> Cache -> IO () writeIndexCache index cache | is01Index index = structuredEncodeFile (cacheFile index) cache | otherwise = writeFile (cacheFile index) (show00IndexCache cache) writeNoIndexCache :: Verbosity -> Index -> NoIndexCache -> IO () writeNoIndexCache verbosity index cache = do let path = cacheFile index createDirectoryIfMissingVerbose verbosity True (takeDirectory path) structuredEncodeFile path cache -- | Write the 'IndexState' to the filesystem writeIndexTimestamp :: Index -> RepoIndexState -> IO () writeIndexTimestamp index st = writeFile (timestampFile index) (prettyShow st) -- | Read out the "current" index timestamp, i.e., what -- timestamp you would use to revert to this version currentIndexTimestamp :: Verbosity -> RepoContext -> Repo -> IO Timestamp currentIndexTimestamp verbosity repoCtxt r = do mb_is <- readIndexTimestamp verbosity (RepoIndex repoCtxt r) case mb_is of Just (IndexStateTime ts) -> return ts _ -> do (_,_,isi) <- readRepoIndex verbosity repoCtxt r IndexStateHead return (isiHeadTime isi) -- | Read the 'IndexState' from the filesystem readIndexTimestamp :: Verbosity -> Index -> IO (Maybe RepoIndexState) readIndexTimestamp verbosity index = fmap simpleParsec (readFile (timestampFile index)) `catchIO` \e -> if isDoesNotExistError e then return Nothing else do warn verbosity $ "Warning: could not read current index timestamp: " ++ displayException e return Nothing -- | Optimise sharing of equal values inside 'Cache' -- -- c.f. https://en.wikipedia.org/wiki/Hash_consing hashConsCache :: Cache -> Cache hashConsCache cache0 = cache0 { cacheEntries = go mempty mempty (cacheEntries cache0) } where -- TODO/NOTE: -- -- If/when we redo the binary serialisation via e.g. CBOR and we -- are able to use incremental decoding, we may want to move the -- hash-consing into the incremental deserialisation, or -- alternatively even do something like -- http://cbor.schmorp.de/value-sharing -- go _ _ [] = [] -- for now we only optimise only CachePackageIds since those -- represent the vast majority go !pns !pvs (CachePackageId pid bno ts : rest) = CachePackageId pid' bno ts : go pns' pvs' rest where !pid' = PackageIdentifier pn' pv' (!pn',!pns') = mapIntern pn pns (!pv',!pvs') = mapIntern pv pvs PackageIdentifier pn pv = pid go pns pvs (x:xs) = x : go pns pvs xs mapIntern :: Ord k => k -> Map.Map k k -> (k,Map.Map k k) mapIntern k m = maybe (k,Map.insert k k m) (\k' -> (k',m)) (Map.lookup k m) -- | Cabal caches various information about the Hackage index data Cache = Cache { cacheHeadTs :: Timestamp -- ^ maximum/latest 'Timestamp' among 'cacheEntries'; unless the -- invariant of 'cacheEntries' being in chronological order is -- violated, this corresponds to the last (seen) 'Timestamp' in -- 'cacheEntries' , cacheEntries :: [IndexCacheEntry] } deriving (Show, Generic) instance NFData Cache where rnf = rnf . cacheEntries -- | Cache format for 'file+noindex' repositories newtype NoIndexCache = NoIndexCache { noIndexCacheEntries :: [NoIndexCacheEntry] } deriving (Show, Generic) instance NFData NoIndexCache where rnf = rnf . noIndexCacheEntries -- | Tar files are block structured with 512 byte blocks. Every header and file -- content starts on a block boundary. -- type BlockNo = Word32 -- Tar.TarEntryOffset data IndexCacheEntry = CachePackageId PackageId !BlockNo !Timestamp | CachePreference Dependency !BlockNo !Timestamp | CacheBuildTreeRef !BuildTreeRefType !BlockNo -- NB: CacheBuildTreeRef is irrelevant for 01-index & v2-build deriving (Eq,Show,Generic) data NoIndexCacheEntry = CacheGPD GenericPackageDescription !BSS.ByteString | NoIndexCachePreference [Dependency] deriving (Eq,Show,Generic) instance NFData IndexCacheEntry where rnf (CachePackageId pkgid _ _) = rnf pkgid rnf (CachePreference dep _ _) = rnf dep rnf (CacheBuildTreeRef _ _) = () instance NFData NoIndexCacheEntry where rnf (CacheGPD gpd bs) = rnf gpd `seq` rnf bs rnf (NoIndexCachePreference dep) = rnf dep cacheEntryTimestamp :: IndexCacheEntry -> Timestamp cacheEntryTimestamp (CacheBuildTreeRef _ _) = nullTimestamp cacheEntryTimestamp (CachePreference _ _ ts) = ts cacheEntryTimestamp (CachePackageId _ _ ts) = ts ---------------------------------------------------------------------------- -- new binary 01-index.cache format instance Binary Cache instance Binary IndexCacheEntry instance Binary NoIndexCache instance Structured Cache instance Structured IndexCacheEntry instance Structured NoIndexCache -- | We need to save only .cabal file contents instance Binary NoIndexCacheEntry where put (CacheGPD _ bs) = do put (0 :: Word8) put bs put (NoIndexCachePreference dep) = do put (1 :: Word8) put dep get = do t :: Word8 <- get case t of 0 -> do bs <- get case parseGenericPackageDescriptionMaybe bs of Just gpd -> return (CacheGPD gpd bs) Nothing -> fail "Failed to parse GPD" 1 -> do dep <- get pure $ NoIndexCachePreference dep _ -> fail "Failed to parse NoIndexCacheEntry" instance Structured NoIndexCacheEntry where structure = nominalStructure ---------------------------------------------------------------------------- -- legacy 00-index.cache format packageKey, blocknoKey, buildTreeRefKey, preferredVersionKey :: String packageKey = "pkg:" blocknoKey = "b#" buildTreeRefKey = "build-tree-ref:" preferredVersionKey = "pref-ver:" -- legacy 00-index.cache format read00IndexCache :: BSS.ByteString -> Cache read00IndexCache bs = Cache { cacheHeadTs = nullTimestamp , cacheEntries = mapMaybe read00IndexCacheEntry $ BSS.lines bs } read00IndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry read00IndexCacheEntry = \line -> case BSS.words line of [key, pkgnamestr, pkgverstr, sep, blocknostr] | key == BSS.pack packageKey && sep == BSS.pack blocknoKey -> case (parseName pkgnamestr, parseVer pkgverstr [], parseBlockNo blocknostr) of (Just pkgname, Just pkgver, Just blockno) -> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno nullTimestamp) _ -> Nothing [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> case (parseRefType typecodestr, parseBlockNo blocknostr) of (Just refType, Just blockno) -> Just (CacheBuildTreeRef refType blockno) _ -> Nothing (key: remainder) | key == BSS.pack preferredVersionKey -> do pref <- simpleParsecBS (BSS.unwords remainder) return $ CachePreference pref 0 nullTimestamp _ -> Nothing where parseName str | BSS.all (\c -> isAlphaNum c || c == '-') str = Just (mkPackageName (BSS.unpack str)) | otherwise = Nothing parseVer str vs = case BSS.readInt str of Nothing -> Nothing Just (v, str') -> case BSS.uncons str' of Just ('.', str'') -> parseVer str'' (v:vs) Just _ -> Nothing Nothing -> Just (mkVersion (reverse (v:vs))) parseBlockNo str = case BSS.readInt str of Just (blockno, remainder) | BSS.null remainder -> Just (fromIntegral blockno) _ -> Nothing parseRefType str = case BSS.uncons str of Just (typeCode, remainder) | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode -> Just (refTypeFromTypeCode typeCode) _ -> Nothing -- legacy 00-index.cache format show00IndexCache :: Cache -> String show00IndexCache Cache{..} = unlines $ map show00IndexCacheEntry cacheEntries show00IndexCacheEntry :: IndexCacheEntry -> String show00IndexCacheEntry entry = unwords $ case entry of CachePackageId pkgid b _ -> [ packageKey , prettyShow (packageName pkgid) , prettyShow (packageVersion pkgid) , blocknoKey , show b ] CacheBuildTreeRef tr b -> [ buildTreeRefKey , [typeCodeFromRefType tr] , show b ] CachePreference dep _ _ -> [ preferredVersionKey , prettyShow dep ] cabal-install-3.8.1.0/src/Distribution/Client/IndexUtils/0000755000000000000000000000000007346545000021334 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/IndexUtils/ActiveRepos.hs0000644000000000000000000001611107346545000024114 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module Distribution.Client.IndexUtils.ActiveRepos ( ActiveRepos (..), defaultActiveRepos, filterSkippedActiveRepos, ActiveRepoEntry (..), CombineStrategy (..), organizeByRepos, ) where import Distribution.Client.Compat.Prelude import Distribution.Client.Types.RepoName (RepoName (..)) import Prelude () import Distribution.Parsec (parsecLeadingCommaNonEmpty) import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- $setup -- >>> import Distribution.Parsec ------------------------------------------------------------------------------- -- Types ------------------------------------------------------------------------------- -- | Ordered list of active repositories. newtype ActiveRepos = ActiveRepos [ActiveRepoEntry] deriving (Eq, Show, Generic) defaultActiveRepos :: ActiveRepos defaultActiveRepos = ActiveRepos [ ActiveRepoRest CombineStrategyMerge ] -- | Note, this does nothing if 'ActiveRepoRest' is present. filterSkippedActiveRepos :: ActiveRepos -> ActiveRepos filterSkippedActiveRepos repos@(ActiveRepos entries) | any isActiveRepoRest entries = repos | otherwise = ActiveRepos (filter notSkipped entries) where isActiveRepoRest (ActiveRepoRest _) = True isActiveRepoRest _ = False notSkipped (ActiveRepo _ CombineStrategySkip) = False notSkipped _ = True instance Binary ActiveRepos instance Structured ActiveRepos instance NFData ActiveRepos instance Pretty ActiveRepos where pretty (ActiveRepos []) = Disp.text ":none" pretty (ActiveRepos repos) = Disp.hsep $ Disp.punctuate Disp.comma $ map pretty repos -- | Note: empty string is not valid 'ActiveRepos'. -- -- >>> simpleParsec "" :: Maybe ActiveRepos -- Nothing -- -- >>> simpleParsec ":none" :: Maybe ActiveRepos -- Just (ActiveRepos []) -- -- >>> simpleParsec ":rest" :: Maybe ActiveRepos -- Just (ActiveRepos [ActiveRepoRest CombineStrategyMerge]) -- -- >>> simpleParsec "hackage.haskell.org, :rest, head.hackage:override" :: Maybe ActiveRepos -- Just (ActiveRepos [ActiveRepo (RepoName "hackage.haskell.org") CombineStrategyMerge,ActiveRepoRest CombineStrategyMerge,ActiveRepo (RepoName "head.hackage") CombineStrategyOverride]) -- instance Parsec ActiveRepos where parsec = ActiveRepos [] <$ P.try (P.string ":none") <|> do repos <- parsecLeadingCommaNonEmpty parsec return (ActiveRepos (toList repos)) data ActiveRepoEntry = ActiveRepoRest CombineStrategy -- ^ rest repositories, i.e. not explicitly listed as 'ActiveRepo' | ActiveRepo RepoName CombineStrategy -- ^ explicit repository name deriving (Eq, Show, Generic) instance Binary ActiveRepoEntry instance Structured ActiveRepoEntry instance NFData ActiveRepoEntry instance Pretty ActiveRepoEntry where pretty (ActiveRepoRest s) = Disp.text ":rest" <<>> Disp.colon <<>> pretty s pretty (ActiveRepo r s) = pretty r <<>> Disp.colon <<>> pretty s instance Parsec ActiveRepoEntry where parsec = leadColon <|> leadRepo where leadColon = do _ <- P.char ':' token <- P.munch1 isAlpha case token of "rest" -> ActiveRepoRest <$> strategyP "repo" -> P.char ':' *> leadRepo _ -> P.unexpected $ "Unknown active repository entry type: " ++ token leadRepo = do r <- parsec s <- strategyP return (ActiveRepo r s) strategyP = P.option CombineStrategyMerge (P.char ':' *> parsec) data CombineStrategy = CombineStrategySkip -- ^ skip this repository | CombineStrategyMerge -- ^ merge existing versions | CombineStrategyOverride -- ^ if later repository specifies a package, -- all package versions are replaced deriving (Eq, Show, Enum, Bounded, Generic) instance Binary CombineStrategy instance Structured CombineStrategy instance NFData CombineStrategy instance Pretty CombineStrategy where pretty CombineStrategySkip = Disp.text "skip" pretty CombineStrategyMerge = Disp.text "merge" pretty CombineStrategyOverride = Disp.text "override" instance Parsec CombineStrategy where parsec = P.choice [ CombineStrategySkip <$ P.string "skip" , CombineStrategyMerge <$ P.string "merge" , CombineStrategyOverride <$ P.string "override" ] ------------------------------------------------------------------------------- -- Organisation ------------------------------------------------------------------------------- -- | Sort values 'RepoName' according to 'ActiveRepos' list. -- -- >>> let repos = [RepoName "a", RepoName "b", RepoName "c"] -- >>> organizeByRepos (ActiveRepos [ActiveRepoRest CombineStrategyMerge]) id repos -- Right [(RepoName "a",CombineStrategyMerge),(RepoName "b",CombineStrategyMerge),(RepoName "c",CombineStrategyMerge)] -- -- >>> organizeByRepos (ActiveRepos [ActiveRepo (RepoName "b") CombineStrategyOverride, ActiveRepoRest CombineStrategyMerge]) id repos -- Right [(RepoName "b",CombineStrategyOverride),(RepoName "a",CombineStrategyMerge),(RepoName "c",CombineStrategyMerge)] -- -- >>> organizeByRepos (ActiveRepos [ActiveRepoRest CombineStrategyMerge, ActiveRepo (RepoName "b") CombineStrategyOverride]) id repos -- Right [(RepoName "a",CombineStrategyMerge),(RepoName "c",CombineStrategyMerge),(RepoName "b",CombineStrategyOverride)] -- -- >>> organizeByRepos (ActiveRepos [ActiveRepoRest CombineStrategyMerge, ActiveRepo (RepoName "d") CombineStrategyOverride]) id repos -- Left "no repository provided d" -- -- Note: currently if 'ActiveRepoRest' is provided more than once, -- rest-repositories will be multiple times in the output. -- organizeByRepos :: forall a. ActiveRepos -> (a -> RepoName) -> [a] -> Either String [(a, CombineStrategy)] organizeByRepos (ActiveRepos xs0) sel ys0 = -- here we use lazyness to do only one traversal let (rest, result) = case go rest xs0 ys0 of Right (rest', result') -> (rest', Right result') Left err -> ([], Left err) in result where go :: [a] -> [ActiveRepoEntry] -> [a] -> Either String ([a], [(a, CombineStrategy)]) go _rest [] ys = Right (ys, []) go rest (ActiveRepoRest s : xs) ys = go rest xs ys <&> \(rest', result) -> (rest', map (\x -> (x, s)) rest ++ result) go rest (ActiveRepo r s : xs) ys = do (z, zs) <- extract r ys go rest xs zs <&> \(rest', result) -> (rest', (z, s) : result) extract :: RepoName -> [a] -> Either String (a, [a]) extract r = loop id where loop _acc [] = Left $ "no repository provided " ++ prettyShow r loop acc (x:xs) | sel x == r = Right (x, acc xs) | otherwise = loop (acc . (x :)) xs (<&>) :: Either err ([s], b) -> (([s], b) -> ([s], c)) -> Either err ([s], c) (<&>) = flip fmap cabal-install-3.8.1.0/src/Distribution/Client/IndexUtils/IndexState.hs0000644000000000000000000001213707346545000023744 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.IndexUtils.IndexUtils -- Copyright : (c) 2016 Herbert Valerio Riedel -- License : BSD3 -- -- Package repositories index state. -- module Distribution.Client.IndexUtils.IndexState ( RepoIndexState(..), TotalIndexState, headTotalIndexState, makeTotalIndexState, lookupIndexState, insertIndexState, ) where import Distribution.Client.Compat.Prelude import Distribution.Client.IndexUtils.Timestamp (Timestamp) import Distribution.Client.Types.RepoName (RepoName (..)) import Distribution.Parsec (parsecLeadingCommaNonEmpty) import qualified Data.Map.Strict as Map import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- $setup -- >>> import Distribution.Parsec ------------------------------------------------------------------------------- -- Total index state ------------------------------------------------------------------------------- -- | Index state of multiple repositories data TotalIndexState = TIS RepoIndexState (Map RepoName RepoIndexState) deriving (Eq, Show, Generic) instance Binary TotalIndexState instance Structured TotalIndexState instance NFData TotalIndexState instance Pretty TotalIndexState where pretty (TIS IndexStateHead m) | not (Map.null m) = Disp.hsep $ Disp.punctuate Disp.comma [ pretty rn Disp.<+> pretty idx | (rn, idx) <- Map.toList m ] pretty (TIS def m) = foldl' go (pretty def) (Map.toList m) where go doc (rn, idx) = doc <<>> Disp.comma Disp.<+> pretty rn Disp.<+> pretty idx -- | -- -- >>> simpleParsec "HEAD" :: Maybe TotalIndexState -- Just (TIS IndexStateHead (fromList [])) -- -- >>> simpleParsec "" :: Maybe TotalIndexState -- Nothing -- -- >>> simpleParsec "hackage.haskell.org HEAD" :: Maybe TotalIndexState -- Just (TIS IndexStateHead (fromList [])) -- -- >>> simpleParsec "2020-02-04T12:34:56Z, hackage.haskell.org HEAD" :: Maybe TotalIndexState -- Just (TIS (IndexStateTime (TS 1580819696)) (fromList [(RepoName "hackage.haskell.org",IndexStateHead)])) -- -- >>> simpleParsec "hackage.haskell.org 2020-02-04T12:34:56Z" :: Maybe TotalIndexState -- Just (TIS IndexStateHead (fromList [(RepoName "hackage.haskell.org",IndexStateTime (TS 1580819696))])) -- instance Parsec TotalIndexState where parsec = normalise . foldl' add headTotalIndexState <$> parsecLeadingCommaNonEmpty single0 where single0 = startsWithRepoName <|> TokTimestamp <$> parsec startsWithRepoName = do reponame <- parsec -- the "HEAD" is technically a valid reponame... if reponame == RepoName "HEAD" then return TokHead else do P.spaces TokRepo reponame <$> parsec add :: TotalIndexState -> Tok -> TotalIndexState add _ TokHead = headTotalIndexState add _ (TokTimestamp ts) = TIS (IndexStateTime ts) Map.empty add (TIS def m) (TokRepo rn idx) = TIS def (Map.insert rn idx m) -- used in Parsec TotalIndexState implementation data Tok = TokRepo RepoName RepoIndexState | TokTimestamp Timestamp | TokHead -- | Remove non-default values from 'TotalIndexState'. normalise :: TotalIndexState -> TotalIndexState normalise (TIS def m) = TIS def (Map.filter (/= def) m) -- | 'TotalIndexState' where all repositories are at @HEAD@ index state. headTotalIndexState :: TotalIndexState headTotalIndexState = TIS IndexStateHead Map.empty -- | Create 'TotalIndexState'. makeTotalIndexState :: RepoIndexState -> Map RepoName RepoIndexState -> TotalIndexState makeTotalIndexState def m = normalise (TIS def m) -- | Lookup a 'RepoIndexState' for an individual repository from 'TotalIndexState'. lookupIndexState :: RepoName -> TotalIndexState -> RepoIndexState lookupIndexState rn (TIS def m) = Map.findWithDefault def rn m -- | Insert a 'RepoIndexState' to 'TotalIndexState'. insertIndexState :: RepoName -> RepoIndexState -> TotalIndexState -> TotalIndexState insertIndexState rn idx (TIS def m) | idx == def = TIS def (Map.delete rn m) | otherwise = TIS def (Map.insert rn idx m) ------------------------------------------------------------------------------- -- Repository index state ------------------------------------------------------------------------------- -- | Specification of the state of a specific repo package index data RepoIndexState = IndexStateHead -- ^ Use all available entries | IndexStateTime !Timestamp -- ^ Use all entries that existed at the specified time deriving (Eq,Generic,Show) instance Binary RepoIndexState instance Structured RepoIndexState instance NFData RepoIndexState instance Pretty RepoIndexState where pretty IndexStateHead = Disp.text "HEAD" pretty (IndexStateTime ts) = pretty ts instance Parsec RepoIndexState where parsec = parseHead <|> parseTime where parseHead = IndexStateHead <$ P.string "HEAD" parseTime = IndexStateTime <$> parsec cabal-install-3.8.1.0/src/Distribution/Client/IndexUtils/Timestamp.hs0000644000000000000000000001266407346545000023644 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.IndexUtils.Timestamp -- Copyright : (c) 2016 Herbert Valerio Riedel -- License : BSD3 -- -- Timestamp type used in package indexes module Distribution.Client.IndexUtils.Timestamp ( Timestamp , nullTimestamp , epochTimeToTimestamp , timestampToUTCTime , utcTimeToTimestamp , maximumTimestamp ) where import Distribution.Client.Compat.Prelude -- read is needed for Text instance import Prelude (read) import Data.Time (UTCTime (..), fromGregorianValid, makeTimeOfDayValid, showGregorian, timeOfDayToTime, timeToTimeOfDay) import Data.Time.Clock.POSIX (posixSecondsToUTCTime, utcTimeToPOSIXSeconds) import qualified Codec.Archive.Tar.Entry as Tar import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- | UNIX timestamp (expressed in seconds since unix epoch, i.e. 1970). newtype Timestamp = TS Int64 -- Tar.EpochTime deriving (Eq,Ord,Enum,NFData,Show,Generic) epochTimeToTimestamp :: Tar.EpochTime -> Maybe Timestamp epochTimeToTimestamp et | ts == nullTimestamp = Nothing | otherwise = Just ts where ts = TS et timestampToUTCTime :: Timestamp -> Maybe UTCTime timestampToUTCTime (TS t) | t == minBound = Nothing | otherwise = Just $ posixSecondsToUTCTime (fromIntegral t) utcTimeToTimestamp :: UTCTime -> Maybe Timestamp utcTimeToTimestamp utct | minTime <= t, t <= maxTime = Just (TS (fromIntegral t)) | otherwise = Nothing where maxTime = toInteger (maxBound :: Int64) minTime = toInteger (succ minBound :: Int64) t :: Integer t = round . utcTimeToPOSIXSeconds $ utct -- | Compute the maximum 'Timestamp' value -- -- Returns 'nullTimestamp' for the empty list. Also note that -- 'nullTimestamp' compares as smaller to all non-'nullTimestamp' -- values. maximumTimestamp :: [Timestamp] -> Timestamp maximumTimestamp [] = nullTimestamp maximumTimestamp xs@(_:_) = maximum xs -- returns 'Nothing' if not representable as 'Timestamp' posixSecondsToTimestamp :: Integer -> Maybe Timestamp posixSecondsToTimestamp pt | minTs <= pt, pt <= maxTs = Just (TS (fromInteger pt)) | otherwise = Nothing where maxTs = toInteger (maxBound :: Int64) minTs = toInteger (succ minBound :: Int64) -- | Pretty-prints 'Timestamp' in ISO8601/RFC3339 format -- (e.g. @"2017-12-31T23:59:59Z"@) -- -- Returns empty string for 'nullTimestamp' in order for -- -- > null (display nullTimestamp) == True -- -- to hold. showTimestamp :: Timestamp -> String showTimestamp ts = case timestampToUTCTime ts of Nothing -> "" -- Note: we don't use 'formatTime' here to avoid incurring a -- dependency on 'old-locale' for older `time` libs Just UTCTime{..} -> showGregorian utctDay ++ ('T':showTOD utctDayTime) ++ "Z" where showTOD = show . timeToTimeOfDay instance Binary Timestamp instance Structured Timestamp instance Pretty Timestamp where pretty = Disp.text . showTimestamp instance Parsec Timestamp where parsec = parsePosix <|> parseUTC where -- | Parses unix timestamps, e.g. @"\@1474626019"@ parsePosix = do _ <- P.char '@' t <- P.integral -- note, no negative timestamps maybe (fail (show t ++ " is not representable as timestamp")) return $ posixSecondsToTimestamp t -- | Parses ISO8601/RFC3339-style UTC timestamps, -- e.g. @"2017-12-31T23:59:59Z"@ -- -- TODO: support numeric tz offsets; allow to leave off seconds parseUTC = do -- Note: we don't use 'Data.Time.Format.parseTime' here since -- we want more control over the accepted formats. ye <- parseYear _ <- P.char '-' mo <- parseTwoDigits _ <- P.char '-' da <- parseTwoDigits _ <- P.char 'T' utctDay <- maybe (fail (show (ye,mo,da) ++ " is not valid gregorian date")) return $ fromGregorianValid ye mo da ho <- parseTwoDigits _ <- P.char ':' mi <- parseTwoDigits _ <- P.char ':' se <- parseTwoDigits _ <- P.char 'Z' utctDayTime <- maybe (fail (show (ho,mi,se) ++ " is not valid time of day")) (return . timeOfDayToTime) $ makeTimeOfDayValid ho mi (realToFrac (se::Int)) let utc = UTCTime {..} maybe (fail (show utc ++ " is not representable as timestamp")) return $ utcTimeToTimestamp utc parseTwoDigits = do d1 <- P.satisfy isDigit d2 <- P.satisfy isDigit return (read [d1,d2]) -- A year must have at least 4 digits; e.g. "0097" is fine, -- while "97" is not c.f. RFC3339 which -- deprecates 2-digit years parseYear = do sign <- P.option ' ' (P.char '-') ds <- P.munch1 isDigit when (length ds < 4) $ fail "Year should have at least 4 digits" return (read (sign:ds)) -- | Special timestamp value to be used when 'timestamp' is -- missing/unknown/invalid nullTimestamp :: Timestamp nullTimestamp = TS minBound cabal-install-3.8.1.0/src/Distribution/Client/Init.hs0000644000000000000000000000433607346545000020511 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Implementation of the 'cabal init' command, which creates an initial .cabal -- file for a project. -- ----------------------------------------------------------------------------- module Distribution.Client.Init ( -- * Commands initCmd ) where import qualified Distribution.Client.Init.Interactive.Command as Interactive import qualified Distribution.Client.Init.NonInteractive.Command as NonInteractive import qualified Distribution.Client.Init.Simple as Simple import Distribution.Verbosity import Distribution.Client.Setup (RepoContext) import Distribution.Simple.Compiler import Distribution.Simple.Program (ProgramDb) import Distribution.Client.Init.Types import Distribution.Simple.Setup import Distribution.Client.IndexUtils import System.IO (hSetBuffering, stdout, BufferMode (NoBuffering)) import Distribution.Client.Init.FileCreators -- | This is the main driver for the init script. -- initCmd :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> ProgramDb -> InitFlags -> IO () initCmd v packageDBs repoCtxt comp progdb initFlags = do installedPkgIndex <- getInstalledPackages v comp packageDBs progdb sourcePkgDb <- getSourcePackages v repoCtxt hSetBuffering stdout NoBuffering settings <- createProject v installedPkgIndex sourcePkgDb initFlags writeProject settings where -- When no flag is set, default to interactive. -- -- When `--interactive` is set, if we also set `--simple`, -- then we interactive generate a simple project with sensible defaults. -- -- If `--simple` is not set, default to interactive. When the flag -- is explicitly set to `--non-interactive`, then we choose non-interactive. -- createProject | fromFlagOrDefault False (simpleProject initFlags) = Simple.createProject | otherwise = case interactive initFlags of Flag False -> NonInteractive.createProject comp _ -> Interactive.createProject cabal-install-3.8.1.0/src/Distribution/Client/Init/0000755000000000000000000000000007346545000020147 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Init/Defaults.hs0000644000000000000000000001061707346545000022257 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.Defaults -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Default values to use in cabal init (if not specified in config/flags). -- ----------------------------------------------------------------------------- module Distribution.Client.Init.Defaults ( -- * default init values defaultApplicationDir , defaultSourceDir , defaultCabalVersion , defaultCabalVersions , defaultPackageType , defaultLicense , defaultLicenseIds , defaultMainIs , defaultChangelog , defaultCategories , defaultInitFlags , defaultLanguage , defaultVersion , defaultTestDir -- * MyLib defaults , myLibModule , myLibTestFile , myLibFile , myLibHs , myExeHs , myLibExeHs , myTestHs ) where import Distribution.ModuleName (ModuleName) import qualified Distribution.ModuleName as ModuleName(fromString) import Distribution.CabalSpecVersion (CabalSpecVersion (..)) import Distribution.Client.Init.Types (PackageType(..), InitFlags(..), HsFilePath, toHsFilePath) import qualified Distribution.SPDX.License as SPDX import qualified Distribution.SPDX.LicenseId as SPDX import Distribution.Simple.Flag (toFlag) import Distribution.Verbosity (normal) import Distribution.Types.Version import Distribution.FieldGrammar.Newtypes import Distribution.Simple (Language(..), License(..)) -- -------------------------------------------------------------------- -- -- Default flag and init values defaultVersion :: Version defaultVersion = mkVersion [0,1,0,0] defaultApplicationDir :: String defaultApplicationDir = "app" defaultSourceDir :: String defaultSourceDir = "src" defaultTestDir :: String defaultTestDir = "test" defaultCabalVersion :: CabalSpecVersion defaultCabalVersion = CabalSpecV3_0 defaultPackageType :: PackageType defaultPackageType = Executable defaultChangelog :: FilePath defaultChangelog = "CHANGELOG.md" defaultLicense :: CabalSpecVersion -> SpecLicense defaultLicense csv | csv < CabalSpecV2_2 = SpecLicense $ Right AllRightsReserved | otherwise = SpecLicense $ Left SPDX.NONE defaultMainIs :: HsFilePath defaultMainIs = toHsFilePath "Main.hs" defaultLanguage :: Language defaultLanguage = Haskell2010 defaultLicenseIds :: [SPDX.LicenseId] defaultLicenseIds = [ SPDX.BSD_2_Clause , SPDX.BSD_3_Clause , SPDX.Apache_2_0 , SPDX.MIT , SPDX.MPL_2_0 , SPDX.ISC , SPDX.GPL_2_0_only , SPDX.GPL_3_0_only , SPDX.LGPL_2_1_only , SPDX.LGPL_3_0_only , SPDX.AGPL_3_0_only , SPDX.GPL_2_0_or_later , SPDX.GPL_3_0_or_later , SPDX.LGPL_2_1_or_later , SPDX.LGPL_3_0_or_later , SPDX.AGPL_3_0_or_later ] defaultCategories :: [String] defaultCategories = [ "Codec" , "Concurrency" , "Control" , "Data" , "Database" , "Development" , "Distribution" , "Game" , "Graphics" , "Language" , "Math" , "Network" , "Sound" , "System" , "Testing" , "Text" , "Web" ] defaultCabalVersions :: [CabalSpecVersion] defaultCabalVersions = [ CabalSpecV1_24 , CabalSpecV2_0 , CabalSpecV2_2 , CabalSpecV2_4 , CabalSpecV3_0 , CabalSpecV3_4 ] defaultInitFlags :: InitFlags defaultInitFlags = mempty { initVerbosity = toFlag normal } -- -------------------------------------------------------------------- -- -- MyLib defaults myLibModule :: ModuleName myLibModule = ModuleName.fromString "MyLib" myLibTestFile :: HsFilePath myLibTestFile = toHsFilePath "MyLibTest.hs" myLibFile :: HsFilePath myLibFile = toHsFilePath "MyLib.hs" -- | Default MyLib.hs file. Used when no Lib.hs exists. myLibHs :: String myLibHs = unlines [ "module MyLib (someFunc) where" , "" , "someFunc :: IO ()" , "someFunc = putStrLn \"someFunc\"" ] myExeHs :: [String] myExeHs = [ "module Main where" , "" , "main :: IO ()" , "main = putStrLn \"Hello, Haskell!\"" ] myLibExeHs :: [String] myLibExeHs = [ "module Main where" , "" , "import qualified MyLib (someFunc)" , "" , "main :: IO ()" , "main = do" , " putStrLn \"Hello, Haskell!\"" , " MyLib.someFunc" ] -- | Default MyLibTest.hs file. myTestHs :: String myTestHs = unlines [ "module Main (main) where" , "" , "main :: IO ()" , "main = putStrLn \"Test suite not yet implemented.\"" ] cabal-install-3.8.1.0/src/Distribution/Client/Init/FileCreators.hs0000644000000000000000000002456407346545000023100 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.FileCreators -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Functions to create files during 'cabal init'. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.FileCreators ( -- * Commands writeProject , writeLicense , writeChangeLog , prepareLibTarget , prepareExeTarget , prepareTestTarget ) where import Prelude hiding (writeFile, readFile) import Distribution.Client.Compat.Prelude hiding (head, empty, writeFile, readFile) import qualified Data.Set as Set (member) import Distribution.Client.Init.Defaults import Distribution.Client.Init.Licenses ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) import Distribution.Client.Init.Types hiding (putStrLn, putStr, message) import qualified Distribution.Client.Init.Types as T import Distribution.Fields.Pretty (PrettyField(..), showFields') import qualified Distribution.SPDX as SPDX import Distribution.Types.PackageName import Distribution.Client.Init.Format import Distribution.CabalSpecVersion (showCabalSpecVersion) import System.FilePath ((), (<.>)) import Distribution.FieldGrammar.Newtypes import Distribution.License (licenseToSPDX) -- -------------------------------------------------------------------- -- -- File generation writeProject :: Interactive m => ProjectSettings -> m () writeProject (ProjectSettings opts pkgDesc libTarget exeTarget testTarget) | null pkgName = do message opts T.Error "no package name given, so no .cabal file can be generated\n" | otherwise = do -- clear prompt history a bit" message opts T.Log $ "Using cabal specification: " ++ showCabalSpecVersion (_optCabalSpec opts) writeLicense opts pkgDesc writeChangeLog opts pkgDesc let pkgFields = mkPkgDescription opts pkgDesc commonStanza = mkCommonStanza opts libStanza <- prepareLibTarget opts libTarget exeStanza <- prepareExeTarget opts exeTarget testStanza <- prepareTestTarget opts testTarget (reusedCabal, cabalContents) <- writeCabalFile opts $ pkgFields ++ [commonStanza, libStanza, exeStanza, testStanza] when (null $ _pkgSynopsis pkgDesc) $ message opts T.Warning "No synopsis given. You should edit the .cabal file and add one." message opts T.Info "You may want to edit the .cabal file and add a Description field." when reusedCabal $ do existingCabal <- readFile $ unPackageName (_optPkgName opts) ++ ".cabal" when (existingCabal /= cabalContents) $ message opts T.Warning "A .cabal file was found and not updated, if updating is desired please use the '--overwrite' option." -- clear out last line for presentation. T.putStrLn "" where pkgName = unPackageName $ _optPkgName opts prepareLibTarget :: Interactive m => WriteOpts -> Maybe LibTarget -> m (PrettyField FieldAnnotation) prepareLibTarget _ Nothing = return PrettyEmpty prepareLibTarget opts (Just libTarget) = do void $ writeDirectoriesSafe opts $ filter (/= ".") srcDirs -- avoid writing when conflicting exposed paths may -- exist. when (expMods == (myLibModule :| [])) . void $ writeFileSafe opts libPath myLibHs return $ mkLibStanza opts libTarget where expMods = _libExposedModules libTarget srcDirs = _libSourceDirs libTarget libPath = case srcDirs of path:_ -> path _hsFilePath myLibFile _ -> _hsFilePath myLibFile prepareExeTarget :: Interactive m => WriteOpts -> Maybe ExeTarget -> m (PrettyField FieldAnnotation) prepareExeTarget _ Nothing = return PrettyEmpty prepareExeTarget opts (Just exeTarget) = do void $ writeDirectoriesSafe opts appDirs void $ writeFileSafe opts mainPath mainHs return $ mkExeStanza opts exeTarget where exeMainIs = _exeMainIs exeTarget pkgType = _optPkgType opts appDirs = _exeApplicationDirs exeTarget mainFile = _hsFilePath exeMainIs mainPath = case appDirs of appPath:_ -> appPath mainFile _ -> mainFile mainHs = unlines . mkLiterate exeMainIs $ if pkgType == LibraryAndExecutable then myLibExeHs else myExeHs prepareTestTarget :: Interactive m => WriteOpts -> Maybe TestTarget -> m (PrettyField FieldAnnotation) prepareTestTarget _ Nothing = return PrettyEmpty prepareTestTarget opts (Just testTarget) = do void $ writeDirectoriesSafe opts testDirs' void $ writeFileSafe opts testPath myTestHs return $ mkTestStanza opts testTarget where testDirs' = _testDirs testTarget testMainIs = _hsFilePath $ _testMainIs testTarget testPath = case testDirs' of p:_ -> p testMainIs _ -> testMainIs writeCabalFile :: Interactive m => WriteOpts -> [PrettyField FieldAnnotation] -- ^ .cabal fields -> m (Bool, String) writeCabalFile opts fields = do let cabalContents = showFields' annCommentLines postProcessFieldLines 4 fields reusedCabal <- writeFileSafe opts cabalFileName cabalContents return (reusedCabal, cabalContents) where cabalFileName = pkgName ++ ".cabal" pkgName = unPackageName $ _optPkgName opts -- | Write the LICENSE file. -- -- For licenses that contain the author's name(s), the values are taken -- from the 'authors' field of 'InitFlags', and if not specified will -- be the string "???". -- -- If the license type is unknown no license file will be prepared and -- a warning will be raised. -- writeLicense :: Interactive m => WriteOpts -> PkgDescription -> m () writeLicense writeOpts pkgDesc = do year <- show <$> getCurrentYear case licenseFile year (_pkgAuthor pkgDesc) of Just licenseText -> void $ writeFileSafe writeOpts "LICENSE" licenseText Nothing -> message writeOpts T.Warning "unknown license type, you must put a copy in LICENSE yourself." where getLid (Left (SPDX.License (SPDX.ELicense (SPDX.ELicenseId lid) Nothing))) = Just lid getLid (Right l) = getLid . Left $ licenseToSPDX l getLid _ = Nothing licenseFile year auth = case getLid . getSpecLicense $ _pkgLicense pkgDesc of Just SPDX.BSD_2_Clause -> Just $ bsd2 auth year Just SPDX.BSD_3_Clause -> Just $ bsd3 auth year Just SPDX.Apache_2_0 -> Just apache20 Just SPDX.MIT -> Just $ mit auth year Just SPDX.MPL_2_0 -> Just mpl20 Just SPDX.ISC -> Just $ isc auth year Just SPDX.GPL_2_0_only -> Just gplv2 Just SPDX.GPL_3_0_only -> Just gplv3 Just SPDX.LGPL_2_1_only -> Just lgpl21 Just SPDX.LGPL_3_0_only -> Just lgpl3 Just SPDX.AGPL_3_0_only -> Just agplv3 Just SPDX.GPL_2_0_or_later -> Just gplv2 Just SPDX.GPL_3_0_or_later -> Just gplv3 Just SPDX.LGPL_2_1_or_later -> Just lgpl21 Just SPDX.LGPL_3_0_or_later -> Just lgpl3 Just SPDX.AGPL_3_0_or_later -> Just agplv3 _ -> Nothing -- | Writes the changelog to the current directory. -- writeChangeLog :: Interactive m => WriteOpts -> PkgDescription -> m () writeChangeLog opts pkgDesc | Just docs <- _pkgExtraDocFiles pkgDesc , defaultChangelog `Set.member` docs = go | defaultChangelog `elem` _pkgExtraSrcFiles pkgDesc = go | otherwise = return () where changeLog = unlines [ "# Revision history for " ++ prettyShow (_pkgName pkgDesc) , "" , "## " ++ prettyShow (_pkgVersion pkgDesc) ++ " -- YYYY-mm-dd" , "" , "* First version. Released on an unsuspecting world." ] go = void $ writeFileSafe opts defaultChangelog changeLog -- -------------------------------------------------------------------- -- -- Utilities data WriteAction = Overwrite | Fresh | Existing deriving Eq instance Show WriteAction where show Overwrite = "Overwriting" show Fresh = "Creating fresh" show Existing = "Using existing" -- | Possibly generate a message to stdout, taking into account the -- --quiet flag. message :: Interactive m => WriteOpts -> T.Severity -> String -> m () message opts = T.message (_optVerbosity opts) -- | Write a file \"safely\" if it doesn't exist, backing up any existing version when -- the overwrite flag is set. writeFileSafe :: Interactive m => WriteOpts -> FilePath -> String -> m Bool writeFileSafe opts fileName content = do exists <- doesFileExist fileName let action | exists && doOverwrite = Overwrite | not exists = Fresh | otherwise = Existing go exists message opts T.Log $ show action ++ " file " ++ fileName ++ "..." return $ action == Existing where doOverwrite = _optOverwrite opts go exists | not exists = do writeFile fileName content | exists && doOverwrite = do newName <- findNewPath fileName message opts T.Log $ concat [ fileName , " already exists. Backing up old version in " , newName ] copyFile fileName newName -- backups the old file removeExistingFile fileName -- removes the original old file writeFile fileName content -- writes the new file | otherwise = return () writeDirectoriesSafe :: Interactive m => WriteOpts -> [String] -> m Bool writeDirectoriesSafe opts dirs = fmap or $ for dirs $ \dir -> do exists <- doesDirectoryExist dir let action | exists && doOverwrite = Overwrite | not exists = Fresh | otherwise = Existing go dir exists message opts T.Log $ show action ++ " directory ./" ++ dir ++ "..." return $ action == Existing where doOverwrite = _optOverwrite opts go dir exists | not exists = do createDirectory dir | exists && doOverwrite = do newDir <- findNewPath dir message opts T.Log $ concat [ dir , " already exists. Backing up old version in " , newDir ] renameDirectory dir newDir -- backups the old directory createDirectory dir -- creates the new directory | otherwise = return () findNewPath :: Interactive m => FilePath -> m FilePath findNewPath dir = go (0 :: Int) where go n = do let newDir = dir <.> ("save" ++ show n) e <- doesDirectoryExist newDir if e then go (succ n) else return newDir cabal-install-3.8.1.0/src/Distribution/Client/Init/FlagExtractors.hs0000644000000000000000000002360207346545000023436 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} module Distribution.Client.Init.FlagExtractors ( -- * Flag extractors getPackageDir , getSimpleProject , getMinimal , getCabalVersion , getCabalVersionNoPrompt , getPackageName , getVersion , getLicense , getAuthor , getEmail , getHomepage , getSynopsis , getCategory , getExtraSrcFiles , getExtraDocFiles , getPackageType , getMainFile , getInitializeTestSuite , getTestDirs , getLanguage , getNoComments , getAppDirs , getSrcDirs , getExposedModules , getBuildTools , getDependencies , getOtherExts , getOverwrite , getOtherModules -- * Shared prompts , simpleProjectPrompt , initializeTestSuitePrompt , packageTypePrompt , testMainPrompt , dependenciesPrompt ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (putStr, putStrLn, getLine, last) import qualified Data.List.NonEmpty as NEL import Distribution.CabalSpecVersion (CabalSpecVersion(..)) import Distribution.Version (Version) import Distribution.ModuleName (ModuleName) import Distribution.Types.Dependency (Dependency(..)) import Distribution.Types.PackageName (PackageName) import Distribution.Client.Init.Defaults import Distribution.FieldGrammar.Newtypes (SpecLicense) import Distribution.Client.Init.Types import Distribution.Simple.Setup (Flag(..), fromFlagOrDefault, flagToMaybe) import Distribution.Simple.Flag (flagElim) import Language.Haskell.Extension (Language(..), Extension(..)) import Distribution.Client.Init.Prompt import qualified Data.Set as Set import Distribution.Simple.PackageIndex import Distribution.Client.Init.Utils -- -------------------------------------------------------------------- -- -- Flag extraction getPackageDir :: Interactive m => InitFlags -> m FilePath getPackageDir = flagElim getCurrentDirectory return . packageDir -- | Ask if a simple project with sensible defaults should be created. getSimpleProject :: Interactive m => InitFlags -> m Bool -> m Bool getSimpleProject flags = fromFlagOrPrompt (simpleProject flags) -- | Extract minimal cabal file flag (implies nocomments) getMinimal :: Interactive m => InitFlags -> m Bool getMinimal = return . fromFlagOrDefault False . minimal -- | Get the version of the cabal spec to use. -- -- The spec version can be specified by the InitFlags cabalVersion field. If -- none is specified then the user is prompted to pick from a list of -- supported versions (see code below). getCabalVersion :: Interactive m => InitFlags -> m CabalSpecVersion -> m CabalSpecVersion getCabalVersion flags = fromFlagOrPrompt (cabalVersion flags) getCabalVersionNoPrompt :: InitFlags -> CabalSpecVersion getCabalVersionNoPrompt = fromFlagOrDefault defaultCabalVersion . cabalVersion -- | Get the package name: use the package directory (supplied, or the current -- directory by default) as a guess. It looks at the SourcePackageDb to avoid -- using an existing package name. getPackageName :: Interactive m => InitFlags -> m PackageName -> m PackageName getPackageName flags = fromFlagOrPrompt (packageName flags) -- | Package version: use 0.1.0.0 as a last resort, but try prompting the user -- if possible. getVersion :: Interactive m => InitFlags -> m Version -> m Version getVersion flags = fromFlagOrPrompt (version flags) -- | Choose a license for the package. -- The license can come from Initflags (license field), if it is not present -- then prompt the user from a predefined list of licenses. getLicense :: Interactive m => InitFlags -> m SpecLicense -> m SpecLicense getLicense flags = fromFlagOrPrompt (license flags) -- | The author's name. Prompt, or try to guess from an existing -- darcs repo. getAuthor :: Interactive m => InitFlags -> m String -> m String getAuthor flags = fromFlagOrPrompt (author flags) -- | The author's email. Prompt, or try to guess from an existing -- darcs repo. getEmail :: Interactive m => InitFlags -> m String -> m String getEmail flags = fromFlagOrPrompt (email flags) -- | Prompt for a homepage URL for the package. getHomepage :: Interactive m => InitFlags -> m String -> m String getHomepage flags = fromFlagOrPrompt (homepage flags) -- | Prompt for a project synopsis. getSynopsis :: Interactive m => InitFlags -> m String -> m String getSynopsis flags = fromFlagOrPrompt (synopsis flags) -- | Prompt for a package category. -- Note that it should be possible to do some smarter guessing here too, i.e. -- look at the name of the top level source directory. getCategory :: Interactive m => InitFlags -> m String -> m String getCategory flags = fromFlagOrPrompt (category flags) -- | Try to guess extra source files (don't prompt the user). getExtraSrcFiles :: Interactive m => InitFlags -> m (Set String) getExtraSrcFiles = pure . flagElim mempty Set.fromList . extraSrc -- | Try to guess extra source files (don't prompt the user). getExtraDocFiles :: Interactive m => InitFlags -> m (Maybe (Set String)) getExtraDocFiles = pure . Just . flagElim (Set.singleton defaultChangelog) Set.fromList . extraDoc -- | Ask whether the project builds a library or executable. getPackageType :: Interactive m => InitFlags -> m PackageType -> m PackageType getPackageType InitFlags { initializeTestSuite = Flag True , packageType = NoFlag } _ = return TestSuite getPackageType flags act = fromFlagOrPrompt (packageType flags) act getMainFile :: Interactive m => InitFlags -> m HsFilePath -> m HsFilePath getMainFile flags act = case mainIs flags of Flag a | isHsFilePath a -> return $ toHsFilePath a | otherwise -> act NoFlag -> act getInitializeTestSuite :: Interactive m => InitFlags -> m Bool -> m Bool getInitializeTestSuite flags = fromFlagOrPrompt (initializeTestSuite flags) getTestDirs :: Interactive m => InitFlags -> m [String] -> m [String] getTestDirs flags = fromFlagOrPrompt (testDirs flags) -- | Ask for the Haskell base language of the package. getLanguage :: Interactive m => InitFlags -> m Language -> m Language getLanguage flags = fromFlagOrPrompt (language flags) -- | Ask whether to generate explanatory comments. getNoComments :: Interactive m => InitFlags -> m Bool -> m Bool getNoComments flags = fromFlagOrPrompt (noComments flags) -- | Ask for the application root directory. getAppDirs :: Interactive m => InitFlags -> m [String] -> m [String] getAppDirs flags = fromFlagOrPrompt (applicationDirs flags) -- | Ask for the source (library) root directory. getSrcDirs :: Interactive m => InitFlags -> m [String] -> m [String] getSrcDirs flags = fromFlagOrPrompt (sourceDirs flags) -- | Retrieve the list of exposed modules getExposedModules :: Interactive m => InitFlags -> m (NonEmpty ModuleName) getExposedModules = return . fromMaybe (myLibModule NEL.:| []) . join . flagToMaybe . fmap NEL.nonEmpty . exposedModules -- | Retrieve the list of other modules getOtherModules :: Interactive m => InitFlags -> m [ModuleName] getOtherModules = return . fromFlagOrDefault [] . otherModules -- | Retrieve the list of build tools getBuildTools :: Interactive m => InitFlags -> m [Dependency] getBuildTools = flagElim (return []) (foldM go []) . buildTools where go acc dep = case eitherParsec dep of Left e -> do putStrLn $ "Failed to parse dependency: " ++ e putStrLn "Skipping..." return acc Right d -> return $ acc ++ [d] -- | Retrieve the list of dependencies getDependencies :: Interactive m => InitFlags -> m [Dependency] -> m [Dependency] getDependencies flags = fromFlagOrPrompt (dependencies flags) -- | Retrieve the list of extensions getOtherExts :: Interactive m => InitFlags -> m [Extension] getOtherExts = return . fromFlagOrDefault [] . otherExts -- | Tell whether to overwrite files on write -- getOverwrite :: Interactive m => InitFlags -> m Bool getOverwrite = return . fromFlagOrDefault False . overwrite -- -------------------------------------------------------------------- -- -- Shared prompts simpleProjectPrompt :: Interactive m => InitFlags -> m Bool simpleProjectPrompt flags = getSimpleProject flags $ promptYesNo "Should I generate a simple project with sensible defaults" (DefaultPrompt True) initializeTestSuitePrompt :: Interactive m => InitFlags -> m Bool initializeTestSuitePrompt flags = getInitializeTestSuite flags $ promptYesNo "Should I generate a test suite for the library" (DefaultPrompt True) packageTypePrompt :: Interactive m => InitFlags -> m PackageType packageTypePrompt flags = getPackageType flags $ do pt <- promptList "What does the package build" packageTypes (DefaultPrompt "Executable") Nothing False return $ fromMaybe Executable (parsePackageType pt) where packageTypes = [ "Library" , "Executable" , "Library and Executable" , "Test suite" ] parsePackageType = \case "Library" -> Just Library "Executable" -> Just Executable "Library and Executable" -> Just LibraryAndExecutable "Test suite" -> Just TestSuite _ -> Nothing testMainPrompt :: Interactive m => m HsFilePath testMainPrompt = do fp <- promptList "What is the main module of the test suite?" [defaultMainIs', "Main.lhs"] (DefaultPrompt defaultMainIs') Nothing True let hs = toHsFilePath fp case _hsFileType hs of InvalidHsPath -> do putStrLn $ concat [ "Main file " , show hs , " is not a valid haskell file. Source files must end in .hs or .lhs." ] testMainPrompt _ -> return hs where defaultMainIs' = show defaultMainIs dependenciesPrompt :: Interactive m => InstalledPackageIndex -> InitFlags -> m [Dependency] dependenciesPrompt pkgIx flags = getDependencies flags (getBaseDep pkgIx flags) -- -------------------------------------------------------------------- -- -- utilities -- | If a flag is defined, return its value or else execute -- an interactive action. -- fromFlagOrPrompt :: Interactive m => Flag a -> m a -> m a fromFlagOrPrompt flag action = flagElim action return flag cabal-install-3.8.1.0/src/Distribution/Client/Init/Format.hs0000644000000000000000000003157107346545000021742 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} -- | -- Module : Distribution.Client.Init.Format -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Pretty printing and field formatting utilities used for file creation. -- module Distribution.Client.Init.Format ( -- * cabal file formatters listFieldS , field , fieldD , commentedOutWithComments , withComments , annNoComments , postProcessFieldLines -- * stanza generation , mkCommonStanza , mkLibStanza , mkExeStanza , mkTestStanza , mkPkgDescription ) where import Distribution.Pretty import Distribution.Fields import Distribution.Client.Init.Types import Distribution.License import Text.PrettyPrint import Distribution.Solver.Compat.Prelude hiding (empty) import Distribution.PackageDescription.FieldGrammar import Distribution.Simple.Utils hiding (cabalVersion) import Distribution.Utils.Path import Distribution.Package (unPackageName) import qualified Distribution.SPDX.License as SPDX import Distribution.CabalSpecVersion import Distribution.FieldGrammar.Newtypes (SpecLicense(SpecLicense)) -- | Construct a 'PrettyField' from a field that can be automatically -- converted to a 'Doc' via 'display'. field :: Pretty b => FieldName -> (a -> b) -> a -> [String] -> Bool -> WriteOpts -> PrettyField FieldAnnotation field fieldName modifier fieldContents = fieldD fieldName (pretty $ modifier fieldContents) -- | Construct a 'PrettyField' from a 'Doc' Flag. fieldD :: FieldName -- ^ Name of the field -> Doc -- ^ Field contents -> [String] -- ^ Comment to explain the field -> Bool -- ^ Should the field be included (commented out) even if blank? -> WriteOpts -> PrettyField FieldAnnotation fieldD fieldName fieldContents fieldComments includeField opts -- If the "--no-comments" or "--minimal" flag is set, strip comments. | hasNoComments || isMinimal = contents NoComment | otherwise = contents $ commentPositionFor fieldName fieldComments where commentPositionFor fn | fn == "cabal-version" = CommentAfter | otherwise = CommentBefore isMinimal = _optMinimal opts hasNoComments = _optNoComments opts contents -- If there is no content, optionally produce a commented out field. | fieldContents == empty = fieldSEmptyContents | otherwise = fieldSWithContents fieldSEmptyContents cs | not includeField || isMinimal = PrettyEmpty | otherwise = PrettyField (commentedOutWithComments cs) fieldName empty fieldSWithContents cs = PrettyField (withComments cs) fieldName fieldContents -- | A field annotation instructing the pretty printer to comment out the field -- and any contents, with no comments. commentedOutWithComments :: CommentPosition -> FieldAnnotation commentedOutWithComments (CommentBefore cs) = FieldAnnotation True . CommentBefore $ map commentNoTrailing cs commentedOutWithComments (CommentAfter cs) = FieldAnnotation True . CommentAfter $ map commentNoTrailing cs commentedOutWithComments NoComment = FieldAnnotation True NoComment -- | A field annotation with the specified comment lines. withComments :: CommentPosition -> FieldAnnotation withComments (CommentBefore cs) = FieldAnnotation False . CommentBefore $ map commentNoTrailing cs withComments (CommentAfter cs) = FieldAnnotation False . CommentAfter $ map commentNoTrailing cs withComments NoComment = FieldAnnotation False NoComment -- | A field annotation with no comments. annNoComments :: FieldAnnotation annNoComments = FieldAnnotation False NoComment postProcessFieldLines :: FieldAnnotation -> [String] -> [String] postProcessFieldLines ann | annCommentedOut ann = fmap commentNoTrailing | otherwise = id -- -------------------------------------------------------------------- -- -- Stanzas -- The common stanzas are hardcoded for simplicity purposes, -- see https://github.com/haskell/cabal/pull/7558#discussion_r693173846 mkCommonStanza :: WriteOpts -> PrettyField FieldAnnotation mkCommonStanza opts = case specHasCommonStanzas $ _optCabalSpec opts of NoCommonStanzas -> PrettyEmpty _ -> PrettySection annNoComments "common" [text "warnings"] [field "ghc-options" text "-Wall" [] False opts] mkLibStanza :: WriteOpts -> LibTarget -> PrettyField FieldAnnotation mkLibStanza opts (LibTarget srcDirs lang expMods otherMods exts deps tools) = PrettySection annNoComments (toUTF8BS "library") [] [ case specHasCommonStanzas $ _optCabalSpec opts of NoCommonStanzas -> PrettyEmpty _ -> field "import" (hsep . map text) ["warnings"] ["Import common warning flags."] False opts , field "exposed-modules" formatExposedModules (toList expMods) ["Modules exported by the library."] True opts , field "other-modules" formatOtherModules otherMods ["Modules included in this library but not exported."] True opts , field "other-extensions" formatOtherExtensions exts ["LANGUAGE extensions used by modules in this package."] True opts , field "build-depends" formatDependencyList deps ["Other library packages from which modules are imported."] True opts , field "hs-source-dirs" formatHsSourceDirs (unsafeMakeSymbolicPath <$> srcDirs) ["Directories containing source files."] True opts , field (buildToolTag opts) formatDependencyList tools ["Extra tools (e.g. alex, hsc2hs, ...) needed to build the source."] False opts , field "default-language" id lang ["Base language which the package is written in."] True opts ] mkExeStanza :: WriteOpts -> ExeTarget -> PrettyField FieldAnnotation mkExeStanza opts (ExeTarget exeMain appDirs lang otherMods exts deps tools) = PrettySection annNoComments (toUTF8BS "executable") [exeName] [ case specHasCommonStanzas $ _optCabalSpec opts of NoCommonStanzas -> PrettyEmpty _ -> field "import" (hsep . map text) ["warnings"] ["Import common warning flags."] False opts , field "main-is" unsafeFromHs exeMain [".hs or .lhs file containing the Main module."] True opts , field "other-modules" formatOtherModules otherMods [ "Modules included in this executable, other than Main." ] True opts , field "other-extensions" formatOtherExtensions exts ["LANGUAGE extensions used by modules in this package."] True opts , field "build-depends" formatDependencyList deps ["Other library packages from which modules are imported."] True opts , field "hs-source-dirs" formatHsSourceDirs (unsafeMakeSymbolicPath <$> appDirs) ["Directories containing source files."] True opts , field (buildToolTag opts) formatDependencyList tools ["Extra tools (e.g. alex, hsc2hs, ...) needed to build the source."] False opts , field "default-language" id lang ["Base language which the package is written in."] True opts ] where exeName = pretty $ _optPkgName opts mkTestStanza :: WriteOpts -> TestTarget -> PrettyField FieldAnnotation mkTestStanza opts (TestTarget testMain dirs lang otherMods exts deps tools) = PrettySection annNoComments (toUTF8BS "test-suite") [suiteName] [ case specHasCommonStanzas $ _optCabalSpec opts of NoCommonStanzas -> PrettyEmpty _ -> field "import" (hsep . map text) ["warnings"] ["Import common warning flags."] False opts , field "default-language" id lang ["Base language which the package is written in."] True opts , field "other-modules" formatOtherModules otherMods [ "Modules included in this executable, other than Main." ] True opts , field "other-extensions" formatOtherExtensions exts ["LANGUAGE extensions used by modules in this package."] True opts , field "type" text "exitcode-stdio-1.0" ["The interface type and version of the test suite."] True opts , field "hs-source-dirs" formatHsSourceDirs (unsafeMakeSymbolicPath <$> dirs) ["Directories containing source files."] True opts , field "main-is" unsafeFromHs testMain ["The entrypoint to the test suite."] True opts , field "build-depends" formatDependencyList deps ["Test dependencies."] True opts , field (buildToolTag opts) formatDependencyList tools ["Extra tools (e.g. alex, hsc2hs, ...) needed to build the source."] False opts ] where suiteName = text $ unPackageName (_optPkgName opts) ++ "-test" mkPkgDescription :: WriteOpts -> PkgDescription -> [PrettyField FieldAnnotation] mkPkgDescription opts pkgDesc = [ field "cabal-version" text (showCabalSpecVersion cabalSpec) [ "The cabal-version field refers to the version of the .cabal specification," , "and can be different from the cabal-install (the tool) version and the" , "Cabal (the library) version you are using. As such, the Cabal (the library)" , "version used must be equal or greater than the version stated in this field." , "Starting from the specification version 2.2, the cabal-version field must be" , "the first thing in the cabal file." ] False opts , field "name" pretty (_pkgName pkgDesc) ["Initial package description '" ++ prettyShow (_optPkgName opts) ++ "' generated by" , "'cabal init'. For further documentation, see:" , " http://haskell.org/cabal/users-guide/" , "" , "The name of the package." ] True opts , field "version" pretty (_pkgVersion pkgDesc) ["The package version.", "See the Haskell package versioning policy (PVP) for standards", "guiding when and how versions should be incremented.", "https://pvp.haskell.org", "PVP summary: +-+------- breaking API changes", " | | +----- non-breaking API additions", " | | | +--- code changes with no API change"] True opts , field "synopsis" text (_pkgSynopsis pkgDesc) ["A short (one-line) description of the package."] True opts , field "description" text "" ["A longer description of the package."] True opts , field "homepage" text (_pkgHomePage pkgDesc) ["URL for the project homepage or repository."] False opts , field "bug-reports" text "" ["A URL where users can report bugs."] False opts , field "license" pretty (_pkgLicense pkgDesc) ["The license under which the package is released."] True opts , case _pkgLicense pkgDesc of SpecLicense (Left SPDX.NONE) -> PrettyEmpty SpecLicense (Right AllRightsReserved) -> PrettyEmpty SpecLicense (Right UnspecifiedLicense) -> PrettyEmpty _ -> field "license-file" text "LICENSE" ["The file containing the license text."] False opts , field "author" text (_pkgAuthor pkgDesc) ["The package author(s)."] True opts , field "maintainer" text (_pkgEmail pkgDesc) ["An email address to which users can send suggestions, bug reports, and patches."] True opts , field "copyright" text "" ["A copyright notice."] True opts , field "category" text (_pkgCategory pkgDesc) [] False opts , field "build-type" text "Simple" [] False opts , case _pkgExtraDocFiles pkgDesc of Nothing -> PrettyEmpty Just fs -> field "extra-doc-files" formatExtraSourceFiles (toList fs) ["Extra doc files to be distributed with the package, such as a CHANGELOG or a README."] True opts , field "extra-source-files" formatExtraSourceFiles (toList $ _pkgExtraSrcFiles pkgDesc) ["Extra source files to be distributed with the package, such as examples, or a tutorial module."] True opts ] where cabalSpec = _pkgCabalVersion pkgDesc -- -------------------------------------------------------------------- -- -- Utils listFieldS :: [String] -> Doc listFieldS = text . intercalate ", " unsafeFromHs :: HsFilePath -> Doc unsafeFromHs = text . _hsFilePath buildToolTag :: WriteOpts -> FieldName buildToolTag opts | _optCabalSpec opts < CabalSpecV3_0 = "build-tools" | otherwise = "build-tool-depends" commentNoTrailing :: String -> String commentNoTrailing "" = "--" commentNoTrailing c = "-- " ++ c cabal-install-3.8.1.0/src/Distribution/Client/Init/Interactive/0000755000000000000000000000000007346545000022424 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Init/Interactive/Command.hs0000644000000000000000000003716707346545000024354 0ustar0000000000000000{-# LANGUAGE LambdaCase, MultiWayIf #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.Command -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Implementation of the 'cabal init' command, which creates an initial .cabal -- file for a project. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.Interactive.Command ( -- * Commands createProject -- ** Target generation , genPkgDescription , genLibTarget , genExeTarget , genTestTarget -- ** Prompts , cabalVersionPrompt , packageNamePrompt , versionPrompt , licensePrompt , authorPrompt , emailPrompt , homepagePrompt , synopsisPrompt , categoryPrompt , mainFilePrompt , testDirsPrompt , languagePrompt , noCommentsPrompt , appDirsPrompt , dependenciesPrompt , srcDirsPrompt ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (putStr, putStrLn, getLine, last) import Distribution.CabalSpecVersion (CabalSpecVersion(..), showCabalSpecVersion) import Distribution.Version (Version) import Distribution.Types.PackageName (PackageName, unPackageName) import qualified Distribution.SPDX as SPDX import Distribution.Client.Init.Defaults import Distribution.Client.Init.FlagExtractors import Distribution.Client.Init.Prompt import Distribution.Client.Init.Types import Distribution.Client.Init.Utils import Distribution.Client.Init.NonInteractive.Heuristics (guessAuthorName, guessAuthorEmail) import Distribution.FieldGrammar.Newtypes (SpecLicense(..)) import Distribution.Simple.Setup (Flag(..), fromFlagOrDefault) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Client.Types (SourcePackageDb(..)) import Distribution.Solver.Types.PackageIndex (elemByPackageName) import Language.Haskell.Extension (Language(..)) import Distribution.License (knownLicenses) import Distribution.Parsec (simpleParsec') -- | Main driver for interactive prompt code. -- createProject :: Interactive m => Verbosity -> InstalledPackageIndex -> SourcePackageDb -> InitFlags -> m ProjectSettings createProject v pkgIx srcDb initFlags = do -- The workflow is as follows: -- -- 1. Get the package type, supplied as either a program input or -- via user prompt. This determines what targets will be built -- in later steps. -- -- 2. Generate package description and the targets specified by -- the package type. Once this is done, a prompt for building -- test suites is initiated, and this determines if we build -- test targets as well. Then we ask if the user wants to -- comment their .cabal file with pretty comments. -- -- 3. The targets are passed to the file creator script, and associated -- directories/files/modules are created, with the a .cabal file -- being generated as a final result. -- pkgType <- packageTypePrompt initFlags isMinimal <- getMinimal initFlags doOverwrite <- overwritePrompt initFlags pkgDir <- getPackageDir initFlags pkgDesc <- fixupDocFiles v =<< genPkgDescription initFlags srcDb let pkgName = _pkgName pkgDesc cabalSpec = _pkgCabalVersion pkgDesc mkOpts cs = WriteOpts doOverwrite isMinimal cs v pkgDir pkgType pkgName initFlags' = initFlags { cabalVersion = Flag cabalSpec } case pkgType of Library -> do libTarget <- genLibTarget initFlags' pkgIx testTarget <- addLibDepToTest pkgName <$> genTestTarget initFlags' pkgIx comments <- noCommentsPrompt initFlags' return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc (Just libTarget) Nothing testTarget Executable -> do exeTarget <- genExeTarget initFlags' pkgIx comments <- noCommentsPrompt initFlags' return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc Nothing (Just exeTarget) Nothing LibraryAndExecutable -> do libTarget <- genLibTarget initFlags' pkgIx exeTarget <- addLibDepToExe pkgName <$> genExeTarget initFlags' pkgIx testTarget <- addLibDepToTest pkgName <$> genTestTarget initFlags' pkgIx comments <- noCommentsPrompt initFlags' return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc (Just libTarget) (Just exeTarget) testTarget TestSuite -> do -- the line below is necessary because if both package type and test flags -- are *not* passed, the user will be prompted for a package type (which -- includes TestSuite in the list). It prevents that the user end up with a -- TestSuite target with initializeTestSuite set to NoFlag, thus avoiding the prompt. let initFlags'' = initFlags' { initializeTestSuite = Flag True } testTarget <- genTestTarget initFlags'' pkgIx comments <- noCommentsPrompt initFlags'' return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc Nothing Nothing testTarget -- -------------------------------------------------------------------- -- -- Target and pkg description generation -- | Extract flags relevant to a package description and interactively -- generate a 'PkgDescription' object for creation. If the user specifies -- the generation of a simple package, then a simple target with defaults -- is generated. -- genPkgDescription :: Interactive m => InitFlags -> SourcePackageDb -> m PkgDescription genPkgDescription flags' srcDb = do csv <- cabalVersionPrompt flags' let flags = flags' { cabalVersion = Flag csv } PkgDescription csv <$> packageNamePrompt srcDb flags <*> versionPrompt flags <*> licensePrompt flags <*> authorPrompt flags <*> emailPrompt flags <*> homepagePrompt flags <*> synopsisPrompt flags <*> categoryPrompt flags <*> getExtraSrcFiles flags <*> getExtraDocFiles flags -- | Extract flags relevant to a library target and interactively -- generate a 'LibTarget' object for creation. If the user specifies -- the generation of a simple package, then a simple target with defaults -- is generated. -- genLibTarget :: Interactive m => InitFlags -> InstalledPackageIndex -> m LibTarget genLibTarget flags pkgs = LibTarget <$> srcDirsPrompt flags <*> languagePrompt flags "library" <*> getExposedModules flags <*> getOtherModules flags <*> getOtherExts flags <*> dependenciesPrompt pkgs flags <*> getBuildTools flags -- | Extract flags relevant to a executable target and interactively -- generate a 'ExeTarget' object for creation. If the user specifies -- the generation of a simple package, then a simple target with defaults -- is generated. -- genExeTarget :: Interactive m => InitFlags -> InstalledPackageIndex -> m ExeTarget genExeTarget flags pkgs = ExeTarget <$> mainFilePrompt flags <*> appDirsPrompt flags <*> languagePrompt flags "executable" <*> getOtherModules flags <*> getOtherExts flags <*> dependenciesPrompt pkgs flags <*> getBuildTools flags -- | Extract flags relevant to a test target and interactively -- generate a 'TestTarget' object for creation. If the user specifies -- the generation of a simple package, then a simple target with defaults -- is generated. -- -- Note: this workflow is only enabled if the user answers affirmatively -- when prompted, or if the user passes in the flag to enable -- test suites at command line. -- genTestTarget :: Interactive m => InitFlags -> InstalledPackageIndex -> m (Maybe TestTarget) genTestTarget flags pkgs = initializeTestSuitePrompt flags >>= go where go initialized | not initialized = return Nothing | otherwise = fmap Just $ TestTarget <$> testMainPrompt <*> testDirsPrompt flags <*> languagePrompt flags "test suite" <*> getOtherModules flags <*> getOtherExts flags <*> dependenciesPrompt pkgs flags <*> getBuildTools flags -- -------------------------------------------------------------------- -- -- Prompts overwritePrompt :: Interactive m => InitFlags -> m Bool overwritePrompt flags = do isOverwrite <- getOverwrite flags promptYesNo "Do you wish to overwrite existing files (backups will be created) (y/n)" (DefaultPrompt isOverwrite) cabalVersionPrompt :: Interactive m => InitFlags -> m CabalSpecVersion cabalVersionPrompt flags = getCabalVersion flags $ do v <- promptList "Please choose version of the Cabal specification to use" ppVersions (DefaultPrompt ppDefault) (Just takeVersion) False -- take just the version numbers for convenience return $ parseCabalVersion (takeVersion v) where -- only used when presenting the default in prompt takeVersion = takeWhile (/= ' ') ppDefault = displayCabalVersion defaultCabalVersion ppVersions = displayCabalVersion <$> defaultCabalVersions parseCabalVersion :: String -> CabalSpecVersion parseCabalVersion "1.24" = CabalSpecV1_24 parseCabalVersion "2.0" = CabalSpecV2_0 parseCabalVersion "2.2" = CabalSpecV2_2 parseCabalVersion "2.4" = CabalSpecV2_4 parseCabalVersion "3.0" = CabalSpecV3_0 parseCabalVersion "3.4" = CabalSpecV3_4 parseCabalVersion _ = defaultCabalVersion -- 2.4 displayCabalVersion :: CabalSpecVersion -> String displayCabalVersion v = case v of CabalSpecV1_24 -> "1.24 (legacy)" CabalSpecV2_0 -> "2.0 (+ support for Backpack, internal sub-libs, '^>=' operator)" CabalSpecV2_2 -> "2.2 (+ support for 'common', 'elif', redundant commas, SPDX)" CabalSpecV2_4 -> "2.4 (+ support for '**' globbing)" CabalSpecV3_0 -> "3.0 (+ set notation for ==, common stanzas in ifs, more redundant commas, better pkgconfig-depends)" CabalSpecV3_4 -> "3.4 (+ sublibraries in 'mixins', optional 'default-language')" _ -> showCabalSpecVersion v packageNamePrompt :: Interactive m => SourcePackageDb -> InitFlags -> m PackageName packageNamePrompt srcDb flags = getPackageName flags $ do defName <- case packageDir flags of Flag b -> return $ filePathToPkgName b NoFlag -> currentDirPkgName go $ DefaultPrompt defName where go defName = prompt "Package name" defName >>= \n -> if isPkgRegistered n then do don'tUseName <- promptYesNo (promptOtherNameMsg n) (DefaultPrompt True) if don'tUseName then go defName else return n else return n isPkgRegistered = elemByPackageName (packageIndex srcDb) inUseMsg pn = "The name " ++ unPackageName pn ++ " is already in use by another package on Hackage." promptOtherNameMsg pn = inUseMsg pn ++ " Do you want to choose a different name (y/n)" versionPrompt :: Interactive m => InitFlags -> m Version versionPrompt flags = getVersion flags go where go = do vv <- promptStr "Package version" (DefaultPrompt $ prettyShow defaultVersion) case simpleParsec vv of Nothing -> do putStrLn $ "Version must be a valid PVP format (e.g. 0.1.0.0): " ++ vv go Just v -> return v licensePrompt :: Interactive m => InitFlags -> m SpecLicense licensePrompt flags = getLicense flags $ do let csv = fromFlagOrDefault defaultCabalVersion (cabalVersion flags) l <- promptList "Please choose a license" (licenses csv) MandatoryPrompt Nothing True case simpleParsec' csv l of Nothing -> do putStrLn ( "The license must be a valid SPDX expression:" ++ "\n - On the SPDX License List: https://spdx.org/licenses/" ++ "\n - NONE, if you do not want to grant any license" ++ "\n - LicenseRef-( alphanumeric | - | . )+" ) licensePrompt flags Just l' -> return l' where licenses csv = if csv >= CabalSpecV2_2 then SPDX.licenseId <$> defaultLicenseIds else fmap prettyShow knownLicenses authorPrompt :: Interactive m => InitFlags -> m String authorPrompt flags = getAuthor flags $ do name <- guessAuthorName promptStr "Author name" (DefaultPrompt name) emailPrompt :: Interactive m => InitFlags -> m String emailPrompt flags = getEmail flags $ do email' <- guessAuthorEmail promptStr "Maintainer email" (DefaultPrompt email') homepagePrompt :: Interactive m => InitFlags -> m String homepagePrompt flags = getHomepage flags $ promptStr "Project homepage URL" OptionalPrompt synopsisPrompt :: Interactive m => InitFlags -> m String synopsisPrompt flags = getSynopsis flags $ promptStr "Project synopsis" OptionalPrompt categoryPrompt :: Interactive m => InitFlags -> m String categoryPrompt flags = getCategory flags $ promptList "Project category" defaultCategories (DefaultPrompt "") (Just matchNone) True where matchNone s | null s = "(none)" | otherwise = s mainFilePrompt :: Interactive m => InitFlags -> m HsFilePath mainFilePrompt flags = getMainFile flags go where defaultMainIs' = show defaultMainIs go = do fp <- promptList "What is the main module of the executable" [defaultMainIs', "Main.lhs"] (DefaultPrompt defaultMainIs') Nothing True let hs = toHsFilePath fp case _hsFileType hs of InvalidHsPath -> do putStrLn $ concat [ "Main file " , show hs , " is not a valid haskell file. Source files must end in .hs or .lhs." ] go _ -> return hs testDirsPrompt :: Interactive m => InitFlags -> m [String] testDirsPrompt flags = getTestDirs flags $ do dir <- promptStr "Test directory" (DefaultPrompt defaultTestDir) return [dir] languagePrompt :: Interactive m => InitFlags -> String -> m Language languagePrompt flags pkgType = getLanguage flags $ do let h2010 = "Haskell2010" h98 = "Haskell98" ghc2021 = "GHC2021 (requires at least GHC 9.2)" l <- promptList ("Choose a language for your " ++ pkgType) [h2010, h98, ghc2021] (DefaultPrompt h2010) Nothing True if | l == h2010 -> return Haskell2010 | l == h98 -> return Haskell98 | l == ghc2021 -> return GHC2021 | all isAlphaNum l -> return $ UnknownLanguage l | otherwise -> do putStrLn $ "\nThe language must be alphanumeric. " ++ "Please enter a different language." languagePrompt flags pkgType noCommentsPrompt :: Interactive m => InitFlags -> m Bool noCommentsPrompt flags = getNoComments flags $ do doComments <- promptYesNo "Add informative comments to each field in the cabal file. (y/n)" (DefaultPrompt True) -- -- if --no-comments is flagged, then we choose not to generate comments -- for fields in the cabal file, but it's a nicer UX to present the -- affirmative question which must be negated. -- return (not doComments) -- | Ask for the application root directory. appDirsPrompt :: Interactive m => InitFlags -> m [String] appDirsPrompt flags = getAppDirs flags $ do dir <- promptList promptMsg [defaultApplicationDir, "exe", "src-exe"] (DefaultPrompt defaultApplicationDir) Nothing True return [dir] where promptMsg = case mainIs flags of Flag p -> "Application (" ++ p ++ ") directory" NoFlag -> "Application directory" -- | Ask for the source (library) root directory. srcDirsPrompt :: Interactive m => InitFlags -> m [String] srcDirsPrompt flags = getSrcDirs flags $ do dir <- promptList "Library source directory" [defaultSourceDir, "lib", "src-lib"] (DefaultPrompt defaultSourceDir) Nothing True return [dir] cabal-install-3.8.1.0/src/Distribution/Client/Init/Licenses.hs0000644000000000000000000053744107346545000022266 0ustar0000000000000000{-| Module : Distribution.Client.Init.Licenses Description : Factory functions for producing known license types. License : BSD-like Maintainer : cabal-devel@haskell.org Stability : provisional Portability : portable -} module Distribution.Client.Init.Licenses ( License , bsd2 , bsd3 , gplv2 , gplv3 , lgpl21 , lgpl3 , agplv3 , apache20 , mit , mpl20 , isc ) where import Prelude (String, unlines, (++)) type License = String bsd2 :: String -> String -> License bsd2 authors year = unlines [ "Copyright (c) " ++ year ++ ", " ++ authors , "All rights reserved." , "" , "Redistribution and use in source and binary forms, with or without" , "modification, are permitted provided that the following conditions are" , "met:" , "" , "1. Redistributions of source code must retain the above copyright" , " notice, this list of conditions and the following disclaimer." , "" , "2. Redistributions in binary form must reproduce the above copyright" , " notice, this list of conditions and the following disclaimer in the" , " documentation and/or other materials provided with the" , " distribution." , "" , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ] bsd3 :: String -> String -> License bsd3 authors year = unlines [ "Copyright (c) " ++ year ++ ", " ++ authors , "" , "All rights reserved." , "" , "Redistribution and use in source and binary forms, with or without" , "modification, are permitted provided that the following conditions are met:" , "" , " * Redistributions of source code must retain the above copyright" , " notice, this list of conditions and the following disclaimer." , "" , " * Redistributions in binary form must reproduce the above" , " copyright notice, this list of conditions and the following" , " disclaimer in the documentation and/or other materials provided" , " with the distribution." , "" , " * Neither the name of " ++ authors ++ " nor the names of other" , " contributors may be used to endorse or promote products derived" , " from this software without specific prior written permission." , "" , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ] gplv2 :: License gplv2 = unlines [ " GNU GENERAL PUBLIC LICENSE" , " Version 2, June 1991" , "" , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The licenses for most software are designed to take away your" , "freedom to share and change it. By contrast, the GNU General Public" , "License is intended to guarantee your freedom to share and change free" , "software--to make sure the software is free for all its users. This" , "General Public License applies to most of the Free Software" , "Foundation's software and to any other program whose authors commit to" , "using it. (Some other Free Software Foundation software is covered by" , "the GNU Lesser General Public License instead.) You can apply it to" , "your programs, too." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "this service if you wish), that you receive source code or can get it" , "if you want it, that you can change the software or use pieces of it" , "in new free programs; and that you know you can do these things." , "" , " To protect your rights, we need to make restrictions that forbid" , "anyone to deny you these rights or to ask you to surrender the rights." , "These restrictions translate to certain responsibilities for you if you" , "distribute copies of the software, or if you modify it." , "" , " For example, if you distribute copies of such a program, whether" , "gratis or for a fee, you must give the recipients all the rights that" , "you have. You must make sure that they, too, receive or can get the" , "source code. And you must show them these terms so they know their" , "rights." , "" , " We protect your rights with two steps: (1) copyright the software, and" , "(2) offer you this license which gives you legal permission to copy," , "distribute and/or modify the software." , "" , " Also, for each author's protection and ours, we want to make certain" , "that everyone understands that there is no warranty for this free" , "software. If the software is modified by someone else and passed on, we" , "want its recipients to know that what they have is not the original, so" , "that any problems introduced by others will not reflect on the original" , "authors' reputations." , "" , " Finally, any free program is threatened constantly by software" , "patents. We wish to avoid the danger that redistributors of a free" , "program will individually obtain patent licenses, in effect making the" , "program proprietary. To prevent this, we have made it clear that any" , "patent must be licensed for everyone's free use or not licensed at all." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " GNU GENERAL PUBLIC LICENSE" , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" , "" , " 0. This License applies to any program or other work which contains" , "a notice placed by the copyright holder saying it may be distributed" , "under the terms of this General Public License. The \"Program\", below," , "refers to any such program or work, and a \"work based on the Program\"" , "means either the Program or any derivative work under copyright law:" , "that is to say, a work containing the Program or a portion of it," , "either verbatim or with modifications and/or translated into another" , "language. (Hereinafter, translation is included without limitation in" , "the term \"modification\".) Each licensee is addressed as \"you\"." , "" , "Activities other than copying, distribution and modification are not" , "covered by this License; they are outside its scope. The act of" , "running the Program is not restricted, and the output from the Program" , "is covered only if its contents constitute a work based on the" , "Program (independent of having been made by running the Program)." , "Whether that is true depends on what the Program does." , "" , " 1. You may copy and distribute verbatim copies of the Program's" , "source code as you receive it, in any medium, provided that you" , "conspicuously and appropriately publish on each copy an appropriate" , "copyright notice and disclaimer of warranty; keep intact all the" , "notices that refer to this License and to the absence of any warranty;" , "and give any other recipients of the Program a copy of this License" , "along with the Program." , "" , "You may charge a fee for the physical act of transferring a copy, and" , "you may at your option offer warranty protection in exchange for a fee." , "" , " 2. You may modify your copy or copies of the Program or any portion" , "of it, thus forming a work based on the Program, and copy and" , "distribute such modifications or work under the terms of Section 1" , "above, provided that you also meet all of these conditions:" , "" , " a) You must cause the modified files to carry prominent notices" , " stating that you changed the files and the date of any change." , "" , " b) You must cause any work that you distribute or publish, that in" , " whole or in part contains or is derived from the Program or any" , " part thereof, to be licensed as a whole at no charge to all third" , " parties under the terms of this License." , "" , " c) If the modified program normally reads commands interactively" , " when run, you must cause it, when started running for such" , " interactive use in the most ordinary way, to print or display an" , " announcement including an appropriate copyright notice and a" , " notice that there is no warranty (or else, saying that you provide" , " a warranty) and that users may redistribute the program under" , " these conditions, and telling the user how to view a copy of this" , " License. (Exception: if the Program itself is interactive but" , " does not normally print such an announcement, your work based on" , " the Program is not required to print an announcement.)" , "" , "These requirements apply to the modified work as a whole. If" , "identifiable sections of that work are not derived from the Program," , "and can be reasonably considered independent and separate works in" , "themselves, then this License, and its terms, do not apply to those" , "sections when you distribute them as separate works. But when you" , "distribute the same sections as part of a whole which is a work based" , "on the Program, the distribution of the whole must be on the terms of" , "this License, whose permissions for other licensees extend to the" , "entire whole, and thus to each and every part regardless of who wrote it." , "" , "Thus, it is not the intent of this section to claim rights or contest" , "your rights to work written entirely by you; rather, the intent is to" , "exercise the right to control the distribution of derivative or" , "collective works based on the Program." , "" , "In addition, mere aggregation of another work not based on the Program" , "with the Program (or with a work based on the Program) on a volume of" , "a storage or distribution medium does not bring the other work under" , "the scope of this License." , "" , " 3. You may copy and distribute the Program (or a work based on it," , "under Section 2) in object code or executable form under the terms of" , "Sections 1 and 2 above provided that you also do one of the following:" , "" , " a) Accompany it with the complete corresponding machine-readable" , " source code, which must be distributed under the terms of Sections" , " 1 and 2 above on a medium customarily used for software interchange; or," , "" , " b) Accompany it with a written offer, valid for at least three" , " years, to give any third party, for a charge no more than your" , " cost of physically performing source distribution, a complete" , " machine-readable copy of the corresponding source code, to be" , " distributed under the terms of Sections 1 and 2 above on a medium" , " customarily used for software interchange; or," , "" , " c) Accompany it with the information you received as to the offer" , " to distribute corresponding source code. (This alternative is" , " allowed only for noncommercial distribution and only if you" , " received the program in object code or executable form with such" , " an offer, in accord with Subsection b above.)" , "" , "The source code for a work means the preferred form of the work for" , "making modifications to it. For an executable work, complete source" , "code means all the source code for all modules it contains, plus any" , "associated interface definition files, plus the scripts used to" , "control compilation and installation of the executable. However, as a" , "special exception, the source code distributed need not include" , "anything that is normally distributed (in either source or binary" , "form) with the major components (compiler, kernel, and so on) of the" , "operating system on which the executable runs, unless that component" , "itself accompanies the executable." , "" , "If distribution of executable or object code is made by offering" , "access to copy from a designated place, then offering equivalent" , "access to copy the source code from the same place counts as" , "distribution of the source code, even though third parties are not" , "compelled to copy the source along with the object code." , "" , " 4. You may not copy, modify, sublicense, or distribute the Program" , "except as expressly provided under this License. Any attempt" , "otherwise to copy, modify, sublicense or distribute the Program is" , "void, and will automatically terminate your rights under this License." , "However, parties who have received copies, or rights, from you under" , "this License will not have their licenses terminated so long as such" , "parties remain in full compliance." , "" , " 5. You are not required to accept this License, since you have not" , "signed it. However, nothing else grants you permission to modify or" , "distribute the Program or its derivative works. These actions are" , "prohibited by law if you do not accept this License. Therefore, by" , "modifying or distributing the Program (or any work based on the" , "Program), you indicate your acceptance of this License to do so, and" , "all its terms and conditions for copying, distributing or modifying" , "the Program or works based on it." , "" , " 6. Each time you redistribute the Program (or any work based on the" , "Program), the recipient automatically receives a license from the" , "original licensor to copy, distribute or modify the Program subject to" , "these terms and conditions. You may not impose any further" , "restrictions on the recipients' exercise of the rights granted herein." , "You are not responsible for enforcing compliance by third parties to" , "this License." , "" , " 7. If, as a consequence of a court judgment or allegation of patent" , "infringement or for any other reason (not limited to patent issues)," , "conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot" , "distribute so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you" , "may not distribute the Program at all. For example, if a patent" , "license would not permit royalty-free redistribution of the Program by" , "all those who receive copies directly or indirectly through you, then" , "the only way you could satisfy both it and this License would be to" , "refrain entirely from distribution of the Program." , "" , "If any portion of this section is held invalid or unenforceable under" , "any particular circumstance, the balance of the section is intended to" , "apply and the section as a whole is intended to apply in other" , "circumstances." , "" , "It is not the purpose of this section to induce you to infringe any" , "patents or other property right claims or to contest validity of any" , "such claims; this section has the sole purpose of protecting the" , "integrity of the free software distribution system, which is" , "implemented by public license practices. Many people have made" , "generous contributions to the wide range of software distributed" , "through that system in reliance on consistent application of that" , "system; it is up to the author/donor to decide if he or she is willing" , "to distribute software through any other system and a licensee cannot" , "impose that choice." , "" , "This section is intended to make thoroughly clear what is believed to" , "be a consequence of the rest of this License." , "" , " 8. If the distribution and/or use of the Program is restricted in" , "certain countries either by patents or by copyrighted interfaces, the" , "original copyright holder who places the Program under this License" , "may add an explicit geographical distribution limitation excluding" , "those countries, so that distribution is permitted only in or among" , "countries not thus excluded. In such case, this License incorporates" , "the limitation as if written in the body of this License." , "" , " 9. The Free Software Foundation may publish revised and/or new versions" , "of the General Public License from time to time. Such new versions will" , "be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , "Each version is given a distinguishing version number. If the Program" , "specifies a version number of this License which applies to it and \"any" , "later version\", you have the option of following the terms and conditions" , "either of that version or of any later version published by the Free" , "Software Foundation. If the Program does not specify a version number of" , "this License, you may choose any version ever published by the Free Software" , "Foundation." , "" , " 10. If you wish to incorporate parts of the Program into other free" , "programs whose distribution conditions are different, write to the author" , "to ask for permission. For software which is copyrighted by the Free" , "Software Foundation, write to the Free Software Foundation; we sometimes" , "make exceptions for this. Our decision will be guided by the two goals" , "of preserving the free status of all derivatives of our free software and" , "of promoting the sharing and reuse of software generally." , "" , " NO WARRANTY" , "" , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," , "REPAIR OR CORRECTION." , "" , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" , "POSSIBILITY OF SUCH DAMAGES." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "convey the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software; you can redistribute it and/or modify" , " it under the terms of the GNU General Public License as published by" , " the Free Software Foundation; either version 2 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU General Public License for more details." , "" , " You should have received a copy of the GNU General Public License along" , " with this program; if not, write to the Free Software Foundation, Inc.," , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , "If the program is interactive, make it output a short notice like this" , "when it starts in an interactive mode:" , "" , " Gnomovision version 69, Copyright (C) year name of author" , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." , " This is free software, and you are welcome to redistribute it" , " under certain conditions; type `show c' for details." , "" , "The hypothetical commands `show w' and `show c' should show the appropriate" , "parts of the General Public License. Of course, the commands you use may" , "be called something other than `show w' and `show c'; they could even be" , "mouse-clicks or menu items--whatever suits your program." , "" , "You should also get your employer (if you work as a programmer) or your" , "school, if any, to sign a \"copyright disclaimer\" for the program, if" , "necessary. Here is a sample; alter the names:" , "" , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" , " `Gnomovision' (which makes passes at compilers) written by James Hacker." , "" , " , 1 April 1989" , " Ty Coon, President of Vice" , "" , "This General Public License does not permit incorporating your program into" , "proprietary programs. If your program is a subroutine library, you may" , "consider it more useful to permit linking proprietary applications with the" , "library. If this is what you want to do, use the GNU Lesser General" , "Public License instead of this License." ] gplv3 :: License gplv3 = unlines [ " GNU GENERAL PUBLIC LICENSE" , " Version 3, 29 June 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The GNU General Public License is a free, copyleft license for" , "software and other kinds of works." , "" , " The licenses for most software and other practical works are designed" , "to take away your freedom to share and change the works. By contrast," , "the GNU General Public License is intended to guarantee your freedom to" , "share and change all versions of a program--to make sure it remains free" , "software for all its users. We, the Free Software Foundation, use the" , "GNU General Public License for most of our software; it applies also to" , "any other work released this way by its authors. You can apply it to" , "your programs, too." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "them if you wish), that you receive source code or can get it if you" , "want it, that you can change the software or use pieces of it in new" , "free programs, and that you know you can do these things." , "" , " To protect your rights, we need to prevent others from denying you" , "these rights or asking you to surrender the rights. Therefore, you have" , "certain responsibilities if you distribute copies of the software, or if" , "you modify it: responsibilities to respect the freedom of others." , "" , " For example, if you distribute copies of such a program, whether" , "gratis or for a fee, you must pass on to the recipients the same" , "freedoms that you received. You must make sure that they, too, receive" , "or can get the source code. And you must show them these terms so they" , "know their rights." , "" , " Developers that use the GNU GPL protect your rights with two steps:" , "(1) assert copyright on the software, and (2) offer you this License" , "giving you legal permission to copy, distribute and/or modify it." , "" , " For the developers' and authors' protection, the GPL clearly explains" , "that there is no warranty for this free software. For both users' and" , "authors' sake, the GPL requires that modified versions be marked as" , "changed, so that their problems will not be attributed erroneously to" , "authors of previous versions." , "" , " Some devices are designed to deny users access to install or run" , "modified versions of the software inside them, although the manufacturer" , "can do so. This is fundamentally incompatible with the aim of" , "protecting users' freedom to change the software. The systematic" , "pattern of such abuse occurs in the area of products for individuals to" , "use, which is precisely where it is most unacceptable. Therefore, we" , "have designed this version of the GPL to prohibit the practice for those" , "products. If such problems arise substantially in other domains, we" , "stand ready to extend this provision to those domains in future versions" , "of the GPL, as needed to protect the freedom of users." , "" , " Finally, every program is threatened constantly by software patents." , "States should not allow patents to restrict development and use of" , "software on general-purpose computers, but in those that do, we wish to" , "avoid the special danger that patents applied to a free program could" , "make it effectively proprietary. To prevent this, the GPL assures that" , "patents cannot be used to render the program non-free." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " TERMS AND CONDITIONS" , "" , " 0. Definitions." , "" , " \"This License\" refers to version 3 of the GNU General Public License." , "" , " \"Copyright\" also means copyright-like laws that apply to other kinds of" , "works, such as semiconductor masks." , "" , " \"The Program\" refers to any copyrightable work licensed under this" , "License. Each licensee is addressed as \"you\". \"Licensees\" and" , "\"recipients\" may be individuals or organizations." , "" , " To \"modify\" a work means to copy from or adapt all or part of the work" , "in a fashion requiring copyright permission, other than the making of an" , "exact copy. The resulting work is called a \"modified version\" of the" , "earlier work or a work \"based on\" the earlier work." , "" , " A \"covered work\" means either the unmodified Program or a work based" , "on the Program." , "" , " To \"propagate\" a work means to do anything with it that, without" , "permission, would make you directly or secondarily liable for" , "infringement under applicable copyright law, except executing it on a" , "computer or modifying a private copy. Propagation includes copying," , "distribution (with or without modification), making available to the" , "public, and in some countries other activities as well." , "" , " To \"convey\" a work means any kind of propagation that enables other" , "parties to make or receive copies. Mere interaction with a user through" , "a computer network, with no transfer of a copy, is not conveying." , "" , " An interactive user interface displays \"Appropriate Legal Notices\"" , "to the extent that it includes a convenient and prominently visible" , "feature that (1) displays an appropriate copyright notice, and (2)" , "tells the user that there is no warranty for the work (except to the" , "extent that warranties are provided), that licensees may convey the" , "work under this License, and how to view a copy of this License. If" , "the interface presents a list of user commands or options, such as a" , "menu, a prominent item in the list meets this criterion." , "" , " 1. Source Code." , "" , " The \"source code\" for a work means the preferred form of the work" , "for making modifications to it. \"Object code\" means any non-source" , "form of a work." , "" , " A \"Standard Interface\" means an interface that either is an official" , "standard defined by a recognized standards body, or, in the case of" , "interfaces specified for a particular programming language, one that" , "is widely used among developers working in that language." , "" , " The \"System Libraries\" of an executable work include anything, other" , "than the work as a whole, that (a) is included in the normal form of" , "packaging a Major Component, but which is not part of that Major" , "Component, and (b) serves only to enable use of the work with that" , "Major Component, or to implement a Standard Interface for which an" , "implementation is available to the public in source code form. A" , "\"Major Component\", in this context, means a major essential component" , "(kernel, window system, and so on) of the specific operating system" , "(if any) on which the executable work runs, or a compiler used to" , "produce the work, or an object code interpreter used to run it." , "" , " The \"Corresponding Source\" for a work in object code form means all" , "the source code needed to generate, install, and (for an executable" , "work) run the object code and to modify the work, including scripts to" , "control those activities. However, it does not include the work's" , "System Libraries, or general-purpose tools or generally available free" , "programs which are used unmodified in performing those activities but" , "which are not part of the work. For example, Corresponding Source" , "includes interface definition files associated with source files for" , "the work, and the source code for shared libraries and dynamically" , "linked subprograms that the work is specifically designed to require," , "such as by intimate data communication or control flow between those" , "subprograms and other parts of the work." , "" , " The Corresponding Source need not include anything that users" , "can regenerate automatically from other parts of the Corresponding" , "Source." , "" , " The Corresponding Source for a work in source code form is that" , "same work." , "" , " 2. Basic Permissions." , "" , " All rights granted under this License are granted for the term of" , "copyright on the Program, and are irrevocable provided the stated" , "conditions are met. This License explicitly affirms your unlimited" , "permission to run the unmodified Program. The output from running a" , "covered work is covered by this License only if the output, given its" , "content, constitutes a covered work. This License acknowledges your" , "rights of fair use or other equivalent, as provided by copyright law." , "" , " You may make, run and propagate covered works that you do not" , "convey, without conditions so long as your license otherwise remains" , "in force. You may convey covered works to others for the sole purpose" , "of having them make modifications exclusively for you, or provide you" , "with facilities for running those works, provided that you comply with" , "the terms of this License in conveying all material for which you do" , "not control copyright. Those thus making or running the covered works" , "for you must do so exclusively on your behalf, under your direction" , "and control, on terms that prohibit them from making any copies of" , "your copyrighted material outside their relationship with you." , "" , " Conveying under any other circumstances is permitted solely under" , "the conditions stated below. Sublicensing is not allowed; section 10" , "makes it unnecessary." , "" , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." , "" , " No covered work shall be deemed part of an effective technological" , "measure under any applicable law fulfilling obligations under article" , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" , "similar laws prohibiting or restricting circumvention of such" , "measures." , "" , " When you convey a covered work, you waive any legal power to forbid" , "circumvention of technological measures to the extent such circumvention" , "is effected by exercising rights under this License with respect to" , "the covered work, and you disclaim any intention to limit operation or" , "modification of the work as a means of enforcing, against the work's" , "users, your or third parties' legal rights to forbid circumvention of" , "technological measures." , "" , " 4. Conveying Verbatim Copies." , "" , " You may convey verbatim copies of the Program's source code as you" , "receive it, in any medium, provided that you conspicuously and" , "appropriately publish on each copy an appropriate copyright notice;" , "keep intact all notices stating that this License and any" , "non-permissive terms added in accord with section 7 apply to the code;" , "keep intact all notices of the absence of any warranty; and give all" , "recipients a copy of this License along with the Program." , "" , " You may charge any price or no price for each copy that you convey," , "and you may offer support or warranty protection for a fee." , "" , " 5. Conveying Modified Source Versions." , "" , " You may convey a work based on the Program, or the modifications to" , "produce it from the Program, in the form of source code under the" , "terms of section 4, provided that you also meet all of these conditions:" , "" , " a) The work must carry prominent notices stating that you modified" , " it, and giving a relevant date." , "" , " b) The work must carry prominent notices stating that it is" , " released under this License and any conditions added under section" , " 7. This requirement modifies the requirement in section 4 to" , " \"keep intact all notices\"." , "" , " c) You must license the entire work, as a whole, under this" , " License to anyone who comes into possession of a copy. This" , " License will therefore apply, along with any applicable section 7" , " additional terms, to the whole of the work, and all its parts," , " regardless of how they are packaged. This License gives no" , " permission to license the work in any other way, but it does not" , " invalidate such permission if you have separately received it." , "" , " d) If the work has interactive user interfaces, each must display" , " Appropriate Legal Notices; however, if the Program has interactive" , " interfaces that do not display Appropriate Legal Notices, your" , " work need not make them do so." , "" , " A compilation of a covered work with other separate and independent" , "works, which are not by their nature extensions of the covered work," , "and which are not combined with it such as to form a larger program," , "in or on a volume of a storage or distribution medium, is called an" , "\"aggregate\" if the compilation and its resulting copyright are not" , "used to limit the access or legal rights of the compilation's users" , "beyond what the individual works permit. Inclusion of a covered work" , "in an aggregate does not cause this License to apply to the other" , "parts of the aggregate." , "" , " 6. Conveying Non-Source Forms." , "" , " You may convey a covered work in object code form under the terms" , "of sections 4 and 5, provided that you also convey the" , "machine-readable Corresponding Source under the terms of this License," , "in one of these ways:" , "" , " a) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by the" , " Corresponding Source fixed on a durable physical medium" , " customarily used for software interchange." , "" , " b) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by a" , " written offer, valid for at least three years and valid for as" , " long as you offer spare parts or customer support for that product" , " model, to give anyone who possesses the object code either (1) a" , " copy of the Corresponding Source for all the software in the" , " product that is covered by this License, on a durable physical" , " medium customarily used for software interchange, for a price no" , " more than your reasonable cost of physically performing this" , " conveying of source, or (2) access to copy the" , " Corresponding Source from a network server at no charge." , "" , " c) Convey individual copies of the object code with a copy of the" , " written offer to provide the Corresponding Source. This" , " alternative is allowed only occasionally and noncommercially, and" , " only if you received the object code with such an offer, in accord" , " with subsection 6b." , "" , " d) Convey the object code by offering access from a designated" , " place (gratis or for a charge), and offer equivalent access to the" , " Corresponding Source in the same way through the same place at no" , " further charge. You need not require recipients to copy the" , " Corresponding Source along with the object code. If the place to" , " copy the object code is a network server, the Corresponding Source" , " may be on a different server (operated by you or a third party)" , " that supports equivalent copying facilities, provided you maintain" , " clear directions next to the object code saying where to find the" , " Corresponding Source. Regardless of what server hosts the" , " Corresponding Source, you remain obligated to ensure that it is" , " available for as long as needed to satisfy these requirements." , "" , " e) Convey the object code using peer-to-peer transmission, provided" , " you inform other peers where the object code and Corresponding" , " Source of the work are being offered to the general public at no" , " charge under subsection 6d." , "" , " A separable portion of the object code, whose source code is excluded" , "from the Corresponding Source as a System Library, need not be" , "included in conveying the object code work." , "" , " A \"User Product\" is either (1) a \"consumer product\", which means any" , "tangible personal property which is normally used for personal, family," , "or household purposes, or (2) anything designed or sold for incorporation" , "into a dwelling. In determining whether a product is a consumer product," , "doubtful cases shall be resolved in favor of coverage. For a particular" , "product received by a particular user, \"normally used\" refers to a" , "typical or common use of that class of product, regardless of the status" , "of the particular user or of the way in which the particular user" , "actually uses, or expects or is expected to use, the product. A product" , "is a consumer product regardless of whether the product has substantial" , "commercial, industrial or non-consumer uses, unless such uses represent" , "the only significant mode of use of the product." , "" , " \"Installation Information\" for a User Product means any methods," , "procedures, authorization keys, or other information required to install" , "and execute modified versions of a covered work in that User Product from" , "a modified version of its Corresponding Source. The information must" , "suffice to ensure that the continued functioning of the modified object" , "code is in no case prevented or interfered with solely because" , "modification has been made." , "" , " If you convey an object code work under this section in, or with, or" , "specifically for use in, a User Product, and the conveying occurs as" , "part of a transaction in which the right of possession and use of the" , "User Product is transferred to the recipient in perpetuity or for a" , "fixed term (regardless of how the transaction is characterized), the" , "Corresponding Source conveyed under this section must be accompanied" , "by the Installation Information. But this requirement does not apply" , "if neither you nor any third party retains the ability to install" , "modified object code on the User Product (for example, the work has" , "been installed in ROM)." , "" , " The requirement to provide Installation Information does not include a" , "requirement to continue to provide support service, warranty, or updates" , "for a work that has been modified or installed by the recipient, or for" , "the User Product in which it has been modified or installed. Access to a" , "network may be denied when the modification itself materially and" , "adversely affects the operation of the network or violates the rules and" , "protocols for communication across the network." , "" , " Corresponding Source conveyed, and Installation Information provided," , "in accord with this section must be in a format that is publicly" , "documented (and with an implementation available to the public in" , "source code form), and must require no special password or key for" , "unpacking, reading or copying." , "" , " 7. Additional Terms." , "" , " \"Additional permissions\" are terms that supplement the terms of this" , "License by making exceptions from one or more of its conditions." , "Additional permissions that are applicable to the entire Program shall" , "be treated as though they were included in this License, to the extent" , "that they are valid under applicable law. If additional permissions" , "apply only to part of the Program, that part may be used separately" , "under those permissions, but the entire Program remains governed by" , "this License without regard to the additional permissions." , "" , " When you convey a copy of a covered work, you may at your option" , "remove any additional permissions from that copy, or from any part of" , "it. (Additional permissions may be written to require their own" , "removal in certain cases when you modify the work.) You may place" , "additional permissions on material, added by you to a covered work," , "for which you have or can give appropriate copyright permission." , "" , " Notwithstanding any other provision of this License, for material you" , "add to a covered work, you may (if authorized by the copyright holders of" , "that material) supplement the terms of this License with terms:" , "" , " a) Disclaiming warranty or limiting liability differently from the" , " terms of sections 15 and 16 of this License; or" , "" , " b) Requiring preservation of specified reasonable legal notices or" , " author attributions in that material or in the Appropriate Legal" , " Notices displayed by works containing it; or" , "" , " c) Prohibiting misrepresentation of the origin of that material, or" , " requiring that modified versions of such material be marked in" , " reasonable ways as different from the original version; or" , "" , " d) Limiting the use for publicity purposes of names of licensors or" , " authors of the material; or" , "" , " e) Declining to grant rights under trademark law for use of some" , " trade names, trademarks, or service marks; or" , "" , " f) Requiring indemnification of licensors and authors of that" , " material by anyone who conveys the material (or modified versions of" , " it) with contractual assumptions of liability to the recipient, for" , " any liability that these contractual assumptions directly impose on" , " those licensors and authors." , "" , " All other non-permissive additional terms are considered \"further" , "restrictions\" within the meaning of section 10. If the Program as you" , "received it, or any part of it, contains a notice stating that it is" , "governed by this License along with a term that is a further" , "restriction, you may remove that term. If a license document contains" , "a further restriction but permits relicensing or conveying under this" , "License, you may add to a covered work material governed by the terms" , "of that license document, provided that the further restriction does" , "not survive such relicensing or conveying." , "" , " If you add terms to a covered work in accord with this section, you" , "must place, in the relevant source files, a statement of the" , "additional terms that apply to those files, or a notice indicating" , "where to find the applicable terms." , "" , " Additional terms, permissive or non-permissive, may be stated in the" , "form of a separately written license, or stated as exceptions;" , "the above requirements apply either way." , "" , " 8. Termination." , "" , " You may not propagate or modify a covered work except as expressly" , "provided under this License. Any attempt otherwise to propagate or" , "modify it is void, and will automatically terminate your rights under" , "this License (including any patent licenses granted under the third" , "paragraph of section 11)." , "" , " However, if you cease all violation of this License, then your" , "license from a particular copyright holder is reinstated (a)" , "provisionally, unless and until the copyright holder explicitly and" , "finally terminates your license, and (b) permanently, if the copyright" , "holder fails to notify you of the violation by some reasonable means" , "prior to 60 days after the cessation." , "" , " Moreover, your license from a particular copyright holder is" , "reinstated permanently if the copyright holder notifies you of the" , "violation by some reasonable means, this is the first time you have" , "received notice of violation of this License (for any work) from that" , "copyright holder, and you cure the violation prior to 30 days after" , "your receipt of the notice." , "" , " Termination of your rights under this section does not terminate the" , "licenses of parties who have received copies or rights from you under" , "this License. If your rights have been terminated and not permanently" , "reinstated, you do not qualify to receive new licenses for the same" , "material under section 10." , "" , " 9. Acceptance Not Required for Having Copies." , "" , " You are not required to accept this License in order to receive or" , "run a copy of the Program. Ancillary propagation of a covered work" , "occurring solely as a consequence of using peer-to-peer transmission" , "to receive a copy likewise does not require acceptance. However," , "nothing other than this License grants you permission to propagate or" , "modify any covered work. These actions infringe copyright if you do" , "not accept this License. Therefore, by modifying or propagating a" , "covered work, you indicate your acceptance of this License to do so." , "" , " 10. Automatic Licensing of Downstream Recipients." , "" , " Each time you convey a covered work, the recipient automatically" , "receives a license from the original licensors, to run, modify and" , "propagate that work, subject to this License. You are not responsible" , "for enforcing compliance by third parties with this License." , "" , " An \"entity transaction\" is a transaction transferring control of an" , "organization, or substantially all assets of one, or subdividing an" , "organization, or merging organizations. If propagation of a covered" , "work results from an entity transaction, each party to that" , "transaction who receives a copy of the work also receives whatever" , "licenses to the work the party's predecessor in interest had or could" , "give under the previous paragraph, plus a right to possession of the" , "Corresponding Source of the work from the predecessor in interest, if" , "the predecessor has it or can get it with reasonable efforts." , "" , " You may not impose any further restrictions on the exercise of the" , "rights granted or affirmed under this License. For example, you may" , "not impose a license fee, royalty, or other charge for exercise of" , "rights granted under this License, and you may not initiate litigation" , "(including a cross-claim or counterclaim in a lawsuit) alleging that" , "any patent claim is infringed by making, using, selling, offering for" , "sale, or importing the Program or any portion of it." , "" , " 11. Patents." , "" , " A \"contributor\" is a copyright holder who authorizes use under this" , "License of the Program or a work on which the Program is based. The" , "work thus licensed is called the contributor's \"contributor version\"." , "" , " A contributor's \"essential patent claims\" are all patent claims" , "owned or controlled by the contributor, whether already acquired or" , "hereafter acquired, that would be infringed by some manner, permitted" , "by this License, of making, using, or selling its contributor version," , "but do not include claims that would be infringed only as a" , "consequence of further modification of the contributor version. For" , "purposes of this definition, \"control\" includes the right to grant" , "patent sublicenses in a manner consistent with the requirements of" , "this License." , "" , " Each contributor grants you a non-exclusive, worldwide, royalty-free" , "patent license under the contributor's essential patent claims, to" , "make, use, sell, offer for sale, import and otherwise run, modify and" , "propagate the contents of its contributor version." , "" , " In the following three paragraphs, a \"patent license\" is any express" , "agreement or commitment, however denominated, not to enforce a patent" , "(such as an express permission to practice a patent or covenant not to" , "sue for patent infringement). To \"grant\" such a patent license to a" , "party means to make such an agreement or commitment not to enforce a" , "patent against the party." , "" , " If you convey a covered work, knowingly relying on a patent license," , "and the Corresponding Source of the work is not available for anyone" , "to copy, free of charge and under the terms of this License, through a" , "publicly available network server or other readily accessible means," , "then you must either (1) cause the Corresponding Source to be so" , "available, or (2) arrange to deprive yourself of the benefit of the" , "patent license for this particular work, or (3) arrange, in a manner" , "consistent with the requirements of this License, to extend the patent" , "license to downstream recipients. \"Knowingly relying\" means you have" , "actual knowledge that, but for the patent license, your conveying the" , "covered work in a country, or your recipient's use of the covered work" , "in a country, would infringe one or more identifiable patents in that" , "country that you have reason to believe are valid." , "" , " If, pursuant to or in connection with a single transaction or" , "arrangement, you convey, or propagate by procuring conveyance of, a" , "covered work, and grant a patent license to some of the parties" , "receiving the covered work authorizing them to use, propagate, modify" , "or convey a specific copy of the covered work, then the patent license" , "you grant is automatically extended to all recipients of the covered" , "work and works based on it." , "" , " A patent license is \"discriminatory\" if it does not include within" , "the scope of its coverage, prohibits the exercise of, or is" , "conditioned on the non-exercise of one or more of the rights that are" , "specifically granted under this License. You may not convey a covered" , "work if you are a party to an arrangement with a third party that is" , "in the business of distributing software, under which you make payment" , "to the third party based on the extent of your activity of conveying" , "the work, and under which the third party grants, to any of the" , "parties who would receive the covered work from you, a discriminatory" , "patent license (a) in connection with copies of the covered work" , "conveyed by you (or copies made from those copies), or (b) primarily" , "for and in connection with specific products or compilations that" , "contain the covered work, unless you entered into that arrangement," , "or that patent license was granted, prior to 28 March 2007." , "" , " Nothing in this License shall be construed as excluding or limiting" , "any implied license or other defenses to infringement that may" , "otherwise be available to you under applicable patent law." , "" , " 12. No Surrender of Others' Freedom." , "" , " If conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot convey a" , "covered work so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you may" , "not convey it at all. For example, if you agree to terms that obligate you" , "to collect a royalty for further conveying from those to whom you convey" , "the Program, the only way you could satisfy both those terms and this" , "License would be to refrain entirely from conveying the Program." , "" , " 13. Use with the GNU Affero General Public License." , "" , " Notwithstanding any other provision of this License, you have" , "permission to link or combine any covered work with a work licensed" , "under version 3 of the GNU Affero General Public License into a single" , "combined work, and to convey the resulting work. The terms of this" , "License will continue to apply to the part which is the covered work," , "but the special requirements of the GNU Affero General Public License," , "section 13, concerning interaction through a network will apply to the" , "combination as such." , "" , " 14. Revised Versions of this License." , "" , " The Free Software Foundation may publish revised and/or new versions of" , "the GNU General Public License from time to time. Such new versions will" , "be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Program specifies that a certain numbered version of the GNU General" , "Public License \"or any later version\" applies to it, you have the" , "option of following the terms and conditions either of that numbered" , "version or of any later version published by the Free Software" , "Foundation. If the Program does not specify a version number of the" , "GNU General Public License, you may choose any version ever published" , "by the Free Software Foundation." , "" , " If the Program specifies that a proxy can decide which future" , "versions of the GNU General Public License can be used, that proxy's" , "public statement of acceptance of a version permanently authorizes you" , "to choose that version for the Program." , "" , " Later license versions may give you additional or different" , "permissions. However, no additional obligations are imposed on any" , "author or copyright holder as a result of your choosing to follow a" , "later version." , "" , " 15. Disclaimer of Warranty." , "" , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. Limitation of Liability." , "" , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" , "SUCH DAMAGES." , "" , " 17. Interpretation of Sections 15 and 16." , "" , " If the disclaimer of warranty and limitation of liability provided" , "above cannot be given local legal effect according to their terms," , "reviewing courts shall apply local law that most closely approximates" , "an absolute waiver of all civil liability in connection with the" , "Program, unless a warranty or assumption of liability accompanies a" , "copy of the Program in return for a fee." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "state the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software: you can redistribute it and/or modify" , " it under the terms of the GNU General Public License as published by" , " the Free Software Foundation, either version 3 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU General Public License for more details." , "" , " You should have received a copy of the GNU General Public License" , " along with this program. If not, see ." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , " If the program does terminal interaction, make it output a short" , "notice like this when it starts in an interactive mode:" , "" , " Copyright (C) " , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." , " This is free software, and you are welcome to redistribute it" , " under certain conditions; type `show c' for details." , "" , "The hypothetical commands `show w' and `show c' should show the appropriate" , "parts of the General Public License. Of course, your program's commands" , "might be different; for a GUI interface, you would use an \"about box\"." , "" , " You should also get your employer (if you work as a programmer) or school," , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." , "For more information on this, and how to apply and follow the GNU GPL, see" , "." , "" , " The GNU General Public License does not permit incorporating your program" , "into proprietary programs. If your program is a subroutine library, you" , "may consider it more useful to permit linking proprietary applications with" , "the library. If this is what you want to do, use the GNU Lesser General" , "Public License instead of this License. But first, please read" , "." ] agplv3 :: License agplv3 = unlines [ " GNU AFFERO GENERAL PUBLIC LICENSE" , " Version 3, 19 November 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The GNU Affero General Public License is a free, copyleft license for" , "software and other kinds of works, specifically designed to ensure" , "cooperation with the community in the case of network server software." , "" , " The licenses for most software and other practical works are designed" , "to take away your freedom to share and change the works. By contrast," , "our General Public Licenses are intended to guarantee your freedom to" , "share and change all versions of a program--to make sure it remains free" , "software for all its users." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "them if you wish), that you receive source code or can get it if you" , "want it, that you can change the software or use pieces of it in new" , "free programs, and that you know you can do these things." , "" , " Developers that use our General Public Licenses protect your rights" , "with two steps: (1) assert copyright on the software, and (2) offer" , "you this License which gives you legal permission to copy, distribute" , "and/or modify the software." , "" , " A secondary benefit of defending all users' freedom is that" , "improvements made in alternate versions of the program, if they" , "receive widespread use, become available for other developers to" , "incorporate. Many developers of free software are heartened and" , "encouraged by the resulting cooperation. However, in the case of" , "software used on network servers, this result may fail to come about." , "The GNU General Public License permits making a modified version and" , "letting the public access it on a server without ever releasing its" , "source code to the public." , "" , " The GNU Affero General Public License is designed specifically to" , "ensure that, in such cases, the modified source code becomes available" , "to the community. It requires the operator of a network server to" , "provide the source code of the modified version running there to the" , "users of that server. Therefore, public use of a modified version, on" , "a publicly accessible server, gives the public access to the source" , "code of the modified version." , "" , " An older license, called the Affero General Public License and" , "published by Affero, was designed to accomplish similar goals. This is" , "a different license, not a version of the Affero GPL, but Affero has" , "released a new version of the Affero GPL which permits relicensing under" , "this license." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " TERMS AND CONDITIONS" , "" , " 0. Definitions." , "" , " \"This License\" refers to version 3 of the GNU Affero General Public License." , "" , " \"Copyright\" also means copyright-like laws that apply to other kinds of" , "works, such as semiconductor masks." , "" , " \"The Program\" refers to any copyrightable work licensed under this" , "License. Each licensee is addressed as \"you\". \"Licensees\" and" , "\"recipients\" may be individuals or organizations." , "" , " To \"modify\" a work means to copy from or adapt all or part of the work" , "in a fashion requiring copyright permission, other than the making of an" , "exact copy. The resulting work is called a \"modified version\" of the" , "earlier work or a work \"based on\" the earlier work." , "" , " A \"covered work\" means either the unmodified Program or a work based" , "on the Program." , "" , " To \"propagate\" a work means to do anything with it that, without" , "permission, would make you directly or secondarily liable for" , "infringement under applicable copyright law, except executing it on a" , "computer or modifying a private copy. Propagation includes copying," , "distribution (with or without modification), making available to the" , "public, and in some countries other activities as well." , "" , " To \"convey\" a work means any kind of propagation that enables other" , "parties to make or receive copies. Mere interaction with a user through" , "a computer network, with no transfer of a copy, is not conveying." , "" , " An interactive user interface displays \"Appropriate Legal Notices\"" , "to the extent that it includes a convenient and prominently visible" , "feature that (1) displays an appropriate copyright notice, and (2)" , "tells the user that there is no warranty for the work (except to the" , "extent that warranties are provided), that licensees may convey the" , "work under this License, and how to view a copy of this License. If" , "the interface presents a list of user commands or options, such as a" , "menu, a prominent item in the list meets this criterion." , "" , " 1. Source Code." , "" , " The \"source code\" for a work means the preferred form of the work" , "for making modifications to it. \"Object code\" means any non-source" , "form of a work." , "" , " A \"Standard Interface\" means an interface that either is an official" , "standard defined by a recognized standards body, or, in the case of" , "interfaces specified for a particular programming language, one that" , "is widely used among developers working in that language." , "" , " The \"System Libraries\" of an executable work include anything, other" , "than the work as a whole, that (a) is included in the normal form of" , "packaging a Major Component, but which is not part of that Major" , "Component, and (b) serves only to enable use of the work with that" , "Major Component, or to implement a Standard Interface for which an" , "implementation is available to the public in source code form. A" , "\"Major Component\", in this context, means a major essential component" , "(kernel, window system, and so on) of the specific operating system" , "(if any) on which the executable work runs, or a compiler used to" , "produce the work, or an object code interpreter used to run it." , "" , " The \"Corresponding Source\" for a work in object code form means all" , "the source code needed to generate, install, and (for an executable" , "work) run the object code and to modify the work, including scripts to" , "control those activities. However, it does not include the work's" , "System Libraries, or general-purpose tools or generally available free" , "programs which are used unmodified in performing those activities but" , "which are not part of the work. For example, Corresponding Source" , "includes interface definition files associated with source files for" , "the work, and the source code for shared libraries and dynamically" , "linked subprograms that the work is specifically designed to require," , "such as by intimate data communication or control flow between those" , "subprograms and other parts of the work." , "" , " The Corresponding Source need not include anything that users" , "can regenerate automatically from other parts of the Corresponding" , "Source." , "" , " The Corresponding Source for a work in source code form is that" , "same work." , "" , " 2. Basic Permissions." , "" , " All rights granted under this License are granted for the term of" , "copyright on the Program, and are irrevocable provided the stated" , "conditions are met. This License explicitly affirms your unlimited" , "permission to run the unmodified Program. The output from running a" , "covered work is covered by this License only if the output, given its" , "content, constitutes a covered work. This License acknowledges your" , "rights of fair use or other equivalent, as provided by copyright law." , "" , " You may make, run and propagate covered works that you do not" , "convey, without conditions so long as your license otherwise remains" , "in force. You may convey covered works to others for the sole purpose" , "of having them make modifications exclusively for you, or provide you" , "with facilities for running those works, provided that you comply with" , "the terms of this License in conveying all material for which you do" , "not control copyright. Those thus making or running the covered works" , "for you must do so exclusively on your behalf, under your direction" , "and control, on terms that prohibit them from making any copies of" , "your copyrighted material outside their relationship with you." , "" , " Conveying under any other circumstances is permitted solely under" , "the conditions stated below. Sublicensing is not allowed; section 10" , "makes it unnecessary." , "" , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." , "" , " No covered work shall be deemed part of an effective technological" , "measure under any applicable law fulfilling obligations under article" , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" , "similar laws prohibiting or restricting circumvention of such" , "measures." , "" , " When you convey a covered work, you waive any legal power to forbid" , "circumvention of technological measures to the extent such circumvention" , "is effected by exercising rights under this License with respect to" , "the covered work, and you disclaim any intention to limit operation or" , "modification of the work as a means of enforcing, against the work's" , "users, your or third parties' legal rights to forbid circumvention of" , "technological measures." , "" , " 4. Conveying Verbatim Copies." , "" , " You may convey verbatim copies of the Program's source code as you" , "receive it, in any medium, provided that you conspicuously and" , "appropriately publish on each copy an appropriate copyright notice;" , "keep intact all notices stating that this License and any" , "non-permissive terms added in accord with section 7 apply to the code;" , "keep intact all notices of the absence of any warranty; and give all" , "recipients a copy of this License along with the Program." , "" , " You may charge any price or no price for each copy that you convey," , "and you may offer support or warranty protection for a fee." , "" , " 5. Conveying Modified Source Versions." , "" , " You may convey a work based on the Program, or the modifications to" , "produce it from the Program, in the form of source code under the" , "terms of section 4, provided that you also meet all of these conditions:" , "" , " a) The work must carry prominent notices stating that you modified" , " it, and giving a relevant date." , "" , " b) The work must carry prominent notices stating that it is" , " released under this License and any conditions added under section" , " 7. This requirement modifies the requirement in section 4 to" , " \"keep intact all notices\"." , "" , " c) You must license the entire work, as a whole, under this" , " License to anyone who comes into possession of a copy. This" , " License will therefore apply, along with any applicable section 7" , " additional terms, to the whole of the work, and all its parts," , " regardless of how they are packaged. This License gives no" , " permission to license the work in any other way, but it does not" , " invalidate such permission if you have separately received it." , "" , " d) If the work has interactive user interfaces, each must display" , " Appropriate Legal Notices; however, if the Program has interactive" , " interfaces that do not display Appropriate Legal Notices, your" , " work need not make them do so." , "" , " A compilation of a covered work with other separate and independent" , "works, which are not by their nature extensions of the covered work," , "and which are not combined with it such as to form a larger program," , "in or on a volume of a storage or distribution medium, is called an" , "\"aggregate\" if the compilation and its resulting copyright are not" , "used to limit the access or legal rights of the compilation's users" , "beyond what the individual works permit. Inclusion of a covered work" , "in an aggregate does not cause this License to apply to the other" , "parts of the aggregate." , "" , " 6. Conveying Non-Source Forms." , "" , " You may convey a covered work in object code form under the terms" , "of sections 4 and 5, provided that you also convey the" , "machine-readable Corresponding Source under the terms of this License," , "in one of these ways:" , "" , " a) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by the" , " Corresponding Source fixed on a durable physical medium" , " customarily used for software interchange." , "" , " b) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by a" , " written offer, valid for at least three years and valid for as" , " long as you offer spare parts or customer support for that product" , " model, to give anyone who possesses the object code either (1) a" , " copy of the Corresponding Source for all the software in the" , " product that is covered by this License, on a durable physical" , " medium customarily used for software interchange, for a price no" , " more than your reasonable cost of physically performing this" , " conveying of source, or (2) access to copy the" , " Corresponding Source from a network server at no charge." , "" , " c) Convey individual copies of the object code with a copy of the" , " written offer to provide the Corresponding Source. This" , " alternative is allowed only occasionally and noncommercially, and" , " only if you received the object code with such an offer, in accord" , " with subsection 6b." , "" , " d) Convey the object code by offering access from a designated" , " place (gratis or for a charge), and offer equivalent access to the" , " Corresponding Source in the same way through the same place at no" , " further charge. You need not require recipients to copy the" , " Corresponding Source along with the object code. If the place to" , " copy the object code is a network server, the Corresponding Source" , " may be on a different server (operated by you or a third party)" , " that supports equivalent copying facilities, provided you maintain" , " clear directions next to the object code saying where to find the" , " Corresponding Source. Regardless of what server hosts the" , " Corresponding Source, you remain obligated to ensure that it is" , " available for as long as needed to satisfy these requirements." , "" , " e) Convey the object code using peer-to-peer transmission, provided" , " you inform other peers where the object code and Corresponding" , " Source of the work are being offered to the general public at no" , " charge under subsection 6d." , "" , " A separable portion of the object code, whose source code is excluded" , "from the Corresponding Source as a System Library, need not be" , "included in conveying the object code work." , "" , " A \"User Product\" is either (1) a \"consumer product\", which means any" , "tangible personal property which is normally used for personal, family," , "or household purposes, or (2) anything designed or sold for incorporation" , "into a dwelling. In determining whether a product is a consumer product," , "doubtful cases shall be resolved in favor of coverage. For a particular" , "product received by a particular user, \"normally used\" refers to a" , "typical or common use of that class of product, regardless of the status" , "of the particular user or of the way in which the particular user" , "actually uses, or expects or is expected to use, the product. A product" , "is a consumer product regardless of whether the product has substantial" , "commercial, industrial or non-consumer uses, unless such uses represent" , "the only significant mode of use of the product." , "" , " \"Installation Information\" for a User Product means any methods," , "procedures, authorization keys, or other information required to install" , "and execute modified versions of a covered work in that User Product from" , "a modified version of its Corresponding Source. The information must" , "suffice to ensure that the continued functioning of the modified object" , "code is in no case prevented or interfered with solely because" , "modification has been made." , "" , " If you convey an object code work under this section in, or with, or" , "specifically for use in, a User Product, and the conveying occurs as" , "part of a transaction in which the right of possession and use of the" , "User Product is transferred to the recipient in perpetuity or for a" , "fixed term (regardless of how the transaction is characterized), the" , "Corresponding Source conveyed under this section must be accompanied" , "by the Installation Information. But this requirement does not apply" , "if neither you nor any third party retains the ability to install" , "modified object code on the User Product (for example, the work has" , "been installed in ROM)." , "" , " The requirement to provide Installation Information does not include a" , "requirement to continue to provide support service, warranty, or updates" , "for a work that has been modified or installed by the recipient, or for" , "the User Product in which it has been modified or installed. Access to a" , "network may be denied when the modification itself materially and" , "adversely affects the operation of the network or violates the rules and" , "protocols for communication across the network." , "" , " Corresponding Source conveyed, and Installation Information provided," , "in accord with this section must be in a format that is publicly" , "documented (and with an implementation available to the public in" , "source code form), and must require no special password or key for" , "unpacking, reading or copying." , "" , " 7. Additional Terms." , "" , " \"Additional permissions\" are terms that supplement the terms of this" , "License by making exceptions from one or more of its conditions." , "Additional permissions that are applicable to the entire Program shall" , "be treated as though they were included in this License, to the extent" , "that they are valid under applicable law. If additional permissions" , "apply only to part of the Program, that part may be used separately" , "under those permissions, but the entire Program remains governed by" , "this License without regard to the additional permissions." , "" , " When you convey a copy of a covered work, you may at your option" , "remove any additional permissions from that copy, or from any part of" , "it. (Additional permissions may be written to require their own" , "removal in certain cases when you modify the work.) You may place" , "additional permissions on material, added by you to a covered work," , "for which you have or can give appropriate copyright permission." , "" , " Notwithstanding any other provision of this License, for material you" , "add to a covered work, you may (if authorized by the copyright holders of" , "that material) supplement the terms of this License with terms:" , "" , " a) Disclaiming warranty or limiting liability differently from the" , " terms of sections 15 and 16 of this License; or" , "" , " b) Requiring preservation of specified reasonable legal notices or" , " author attributions in that material or in the Appropriate Legal" , " Notices displayed by works containing it; or" , "" , " c) Prohibiting misrepresentation of the origin of that material, or" , " requiring that modified versions of such material be marked in" , " reasonable ways as different from the original version; or" , "" , " d) Limiting the use for publicity purposes of names of licensors or" , " authors of the material; or" , "" , " e) Declining to grant rights under trademark law for use of some" , " trade names, trademarks, or service marks; or" , "" , " f) Requiring indemnification of licensors and authors of that" , " material by anyone who conveys the material (or modified versions of" , " it) with contractual assumptions of liability to the recipient, for" , " any liability that these contractual assumptions directly impose on" , " those licensors and authors." , "" , " All other non-permissive additional terms are considered \"further" , "restrictions\" within the meaning of section 10. If the Program as you" , "received it, or any part of it, contains a notice stating that it is" , "governed by this License along with a term that is a further" , "restriction, you may remove that term. If a license document contains" , "a further restriction but permits relicensing or conveying under this" , "License, you may add to a covered work material governed by the terms" , "of that license document, provided that the further restriction does" , "not survive such relicensing or conveying." , "" , " If you add terms to a covered work in accord with this section, you" , "must place, in the relevant source files, a statement of the" , "additional terms that apply to those files, or a notice indicating" , "where to find the applicable terms." , "" , " Additional terms, permissive or non-permissive, may be stated in the" , "form of a separately written license, or stated as exceptions;" , "the above requirements apply either way." , "" , " 8. Termination." , "" , " You may not propagate or modify a covered work except as expressly" , "provided under this License. Any attempt otherwise to propagate or" , "modify it is void, and will automatically terminate your rights under" , "this License (including any patent licenses granted under the third" , "paragraph of section 11)." , "" , " However, if you cease all violation of this License, then your" , "license from a particular copyright holder is reinstated (a)" , "provisionally, unless and until the copyright holder explicitly and" , "finally terminates your license, and (b) permanently, if the copyright" , "holder fails to notify you of the violation by some reasonable means" , "prior to 60 days after the cessation." , "" , " Moreover, your license from a particular copyright holder is" , "reinstated permanently if the copyright holder notifies you of the" , "violation by some reasonable means, this is the first time you have" , "received notice of violation of this License (for any work) from that" , "copyright holder, and you cure the violation prior to 30 days after" , "your receipt of the notice." , "" , " Termination of your rights under this section does not terminate the" , "licenses of parties who have received copies or rights from you under" , "this License. If your rights have been terminated and not permanently" , "reinstated, you do not qualify to receive new licenses for the same" , "material under section 10." , "" , " 9. Acceptance Not Required for Having Copies." , "" , " You are not required to accept this License in order to receive or" , "run a copy of the Program. Ancillary propagation of a covered work" , "occurring solely as a consequence of using peer-to-peer transmission" , "to receive a copy likewise does not require acceptance. However," , "nothing other than this License grants you permission to propagate or" , "modify any covered work. These actions infringe copyright if you do" , "not accept this License. Therefore, by modifying or propagating a" , "covered work, you indicate your acceptance of this License to do so." , "" , " 10. Automatic Licensing of Downstream Recipients." , "" , " Each time you convey a covered work, the recipient automatically" , "receives a license from the original licensors, to run, modify and" , "propagate that work, subject to this License. You are not responsible" , "for enforcing compliance by third parties with this License." , "" , " An \"entity transaction\" is a transaction transferring control of an" , "organization, or substantially all assets of one, or subdividing an" , "organization, or merging organizations. If propagation of a covered" , "work results from an entity transaction, each party to that" , "transaction who receives a copy of the work also receives whatever" , "licenses to the work the party's predecessor in interest had or could" , "give under the previous paragraph, plus a right to possession of the" , "Corresponding Source of the work from the predecessor in interest, if" , "the predecessor has it or can get it with reasonable efforts." , "" , " You may not impose any further restrictions on the exercise of the" , "rights granted or affirmed under this License. For example, you may" , "not impose a license fee, royalty, or other charge for exercise of" , "rights granted under this License, and you may not initiate litigation" , "(including a cross-claim or counterclaim in a lawsuit) alleging that" , "any patent claim is infringed by making, using, selling, offering for" , "sale, or importing the Program or any portion of it." , "" , " 11. Patents." , "" , " A \"contributor\" is a copyright holder who authorizes use under this" , "License of the Program or a work on which the Program is based. The" , "work thus licensed is called the contributor's \"contributor version\"." , "" , " A contributor's \"essential patent claims\" are all patent claims" , "owned or controlled by the contributor, whether already acquired or" , "hereafter acquired, that would be infringed by some manner, permitted" , "by this License, of making, using, or selling its contributor version," , "but do not include claims that would be infringed only as a" , "consequence of further modification of the contributor version. For" , "purposes of this definition, \"control\" includes the right to grant" , "patent sublicenses in a manner consistent with the requirements of" , "this License." , "" , " Each contributor grants you a non-exclusive, worldwide, royalty-free" , "patent license under the contributor's essential patent claims, to" , "make, use, sell, offer for sale, import and otherwise run, modify and" , "propagate the contents of its contributor version." , "" , " In the following three paragraphs, a \"patent license\" is any express" , "agreement or commitment, however denominated, not to enforce a patent" , "(such as an express permission to practice a patent or covenant not to" , "sue for patent infringement). To \"grant\" such a patent license to a" , "party means to make such an agreement or commitment not to enforce a" , "patent against the party." , "" , " If you convey a covered work, knowingly relying on a patent license," , "and the Corresponding Source of the work is not available for anyone" , "to copy, free of charge and under the terms of this License, through a" , "publicly available network server or other readily accessible means," , "then you must either (1) cause the Corresponding Source to be so" , "available, or (2) arrange to deprive yourself of the benefit of the" , "patent license for this particular work, or (3) arrange, in a manner" , "consistent with the requirements of this License, to extend the patent" , "license to downstream recipients. \"Knowingly relying\" means you have" , "actual knowledge that, but for the patent license, your conveying the" , "covered work in a country, or your recipient's use of the covered work" , "in a country, would infringe one or more identifiable patents in that" , "country that you have reason to believe are valid." , "" , " If, pursuant to or in connection with a single transaction or" , "arrangement, you convey, or propagate by procuring conveyance of, a" , "covered work, and grant a patent license to some of the parties" , "receiving the covered work authorizing them to use, propagate, modify" , "or convey a specific copy of the covered work, then the patent license" , "you grant is automatically extended to all recipients of the covered" , "work and works based on it." , "" , " A patent license is \"discriminatory\" if it does not include within" , "the scope of its coverage, prohibits the exercise of, or is" , "conditioned on the non-exercise of one or more of the rights that are" , "specifically granted under this License. You may not convey a covered" , "work if you are a party to an arrangement with a third party that is" , "in the business of distributing software, under which you make payment" , "to the third party based on the extent of your activity of conveying" , "the work, and under which the third party grants, to any of the" , "parties who would receive the covered work from you, a discriminatory" , "patent license (a) in connection with copies of the covered work" , "conveyed by you (or copies made from those copies), or (b) primarily" , "for and in connection with specific products or compilations that" , "contain the covered work, unless you entered into that arrangement," , "or that patent license was granted, prior to 28 March 2007." , "" , " Nothing in this License shall be construed as excluding or limiting" , "any implied license or other defenses to infringement that may" , "otherwise be available to you under applicable patent law." , "" , " 12. No Surrender of Others' Freedom." , "" , " If conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot convey a" , "covered work so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you may" , "not convey it at all. For example, if you agree to terms that obligate you" , "to collect a royalty for further conveying from those to whom you convey" , "the Program, the only way you could satisfy both those terms and this" , "License would be to refrain entirely from conveying the Program." , "" , " 13. Remote Network Interaction; Use with the GNU General Public License." , "" , " Notwithstanding any other provision of this License, if you modify the" , "Program, your modified version must prominently offer all users" , "interacting with it remotely through a computer network (if your version" , "supports such interaction) an opportunity to receive the Corresponding" , "Source of your version by providing access to the Corresponding Source" , "from a network server at no charge, through some standard or customary" , "means of facilitating copying of software. This Corresponding Source" , "shall include the Corresponding Source for any work covered by version 3" , "of the GNU General Public License that is incorporated pursuant to the" , "following paragraph." , "" , " Notwithstanding any other provision of this License, you have" , "permission to link or combine any covered work with a work licensed" , "under version 3 of the GNU General Public License into a single" , "combined work, and to convey the resulting work. The terms of this" , "License will continue to apply to the part which is the covered work," , "but the work with which it is combined will remain governed by version" , "3 of the GNU General Public License." , "" , " 14. Revised Versions of this License." , "" , " The Free Software Foundation may publish revised and/or new versions of" , "the GNU Affero General Public License from time to time. Such new versions" , "will be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Program specifies that a certain numbered version of the GNU Affero General" , "Public License \"or any later version\" applies to it, you have the" , "option of following the terms and conditions either of that numbered" , "version or of any later version published by the Free Software" , "Foundation. If the Program does not specify a version number of the" , "GNU Affero General Public License, you may choose any version ever published" , "by the Free Software Foundation." , "" , " If the Program specifies that a proxy can decide which future" , "versions of the GNU Affero General Public License can be used, that proxy's" , "public statement of acceptance of a version permanently authorizes you" , "to choose that version for the Program." , "" , " Later license versions may give you additional or different" , "permissions. However, no additional obligations are imposed on any" , "author or copyright holder as a result of your choosing to follow a" , "later version." , "" , " 15. Disclaimer of Warranty." , "" , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. Limitation of Liability." , "" , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" , "SUCH DAMAGES." , "" , " 17. Interpretation of Sections 15 and 16." , "" , " If the disclaimer of warranty and limitation of liability provided" , "above cannot be given local legal effect according to their terms," , "reviewing courts shall apply local law that most closely approximates" , "an absolute waiver of all civil liability in connection with the" , "Program, unless a warranty or assumption of liability accompanies a" , "copy of the Program in return for a fee." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "state the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software: you can redistribute it and/or modify" , " it under the terms of the GNU Affero General Public License as published by" , " the Free Software Foundation, either version 3 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU Affero General Public License for more details." , "" , " You should have received a copy of the GNU Affero General Public License" , " along with this program. If not, see ." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , " If your software can interact with users remotely through a computer" , "network, you should also make sure that it provides a way for users to" , "get its source. For example, if your program is a web application, its" , "interface could display a \"Source\" link that leads users to an archive" , "of the code. There are many ways you could offer source, and different" , "solutions will be better for different programs; see section 13 for the" , "specific requirements." , "" , " You should also get your employer (if you work as a programmer) or school," , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." , "For more information on this, and how to apply and follow the GNU AGPL, see" , "." ] lgpl21 :: License lgpl21 = unlines [ " GNU LESSER GENERAL PUBLIC LICENSE" , " Version 2.1, February 1999" , "" , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , "[This is the first released version of the Lesser GPL. It also counts" , " as the successor of the GNU Library Public License, version 2, hence" , " the version number 2.1.]" , "" , " Preamble" , "" , " The licenses for most software are designed to take away your" , "freedom to share and change it. By contrast, the GNU General Public" , "Licenses are intended to guarantee your freedom to share and change" , "free software--to make sure the software is free for all its users." , "" , " This license, the Lesser General Public License, applies to some" , "specially designated software packages--typically libraries--of the" , "Free Software Foundation and other authors who decide to use it. You" , "can use it too, but we suggest you first think carefully about whether" , "this license or the ordinary General Public License is the better" , "strategy to use in any particular case, based on the explanations below." , "" , " When we speak of free software, we are referring to freedom of use," , "not price. Our General Public Licenses are designed to make sure that" , "you have the freedom to distribute copies of free software (and charge" , "for this service if you wish); that you receive source code or can get" , "it if you want it; that you can change the software and use pieces of" , "it in new free programs; and that you are informed that you can do" , "these things." , "" , " To protect your rights, we need to make restrictions that forbid" , "distributors to deny you these rights or to ask you to surrender these" , "rights. These restrictions translate to certain responsibilities for" , "you if you distribute copies of the library or if you modify it." , "" , " For example, if you distribute copies of the library, whether gratis" , "or for a fee, you must give the recipients all the rights that we gave" , "you. You must make sure that they, too, receive or can get the source" , "code. If you link other code with the library, you must provide" , "complete object files to the recipients, so that they can relink them" , "with the library after making changes to the library and recompiling" , "it. And you must show them these terms so they know their rights." , "" , " We protect your rights with a two-step method: (1) we copyright the" , "library, and (2) we offer you this license, which gives you legal" , "permission to copy, distribute and/or modify the library." , "" , " To protect each distributor, we want to make it very clear that" , "there is no warranty for the free library. Also, if the library is" , "modified by someone else and passed on, the recipients should know" , "that what they have is not the original version, so that the original" , "author's reputation will not be affected by problems that might be" , "introduced by others." , "" , " Finally, software patents pose a constant threat to the existence of" , "any free program. We wish to make sure that a company cannot" , "effectively restrict the users of a free program by obtaining a" , "restrictive license from a patent holder. Therefore, we insist that" , "any patent license obtained for a version of the library must be" , "consistent with the full freedom of use specified in this license." , "" , " Most GNU software, including some libraries, is covered by the" , "ordinary GNU General Public License. This license, the GNU Lesser" , "General Public License, applies to certain designated libraries, and" , "is quite different from the ordinary General Public License. We use" , "this license for certain libraries in order to permit linking those" , "libraries into non-free programs." , "" , " When a program is linked with a library, whether statically or using" , "a shared library, the combination of the two is legally speaking a" , "combined work, a derivative of the original library. The ordinary" , "General Public License therefore permits such linking only if the" , "entire combination fits its criteria of freedom. The Lesser General" , "Public License permits more lax criteria for linking other code with" , "the library." , "" , " We call this license the \"Lesser\" General Public License because it" , "does Less to protect the user's freedom than the ordinary General" , "Public License. It also provides other free software developers Less" , "of an advantage over competing non-free programs. These disadvantages" , "are the reason we use the ordinary General Public License for many" , "libraries. However, the Lesser license provides advantages in certain" , "special circumstances." , "" , " For example, on rare occasions, there may be a special need to" , "encourage the widest possible use of a certain library, so that it becomes" , "a de-facto standard. To achieve this, non-free programs must be" , "allowed to use the library. A more frequent case is that a free" , "library does the same job as widely used non-free libraries. In this" , "case, there is little to gain by limiting the free library to free" , "software only, so we use the Lesser General Public License." , "" , " In other cases, permission to use a particular library in non-free" , "programs enables a greater number of people to use a large body of" , "free software. For example, permission to use the GNU C Library in" , "non-free programs enables many more people to use the whole GNU" , "operating system, as well as its variant, the GNU/Linux operating" , "system." , "" , " Although the Lesser General Public License is Less protective of the" , "users' freedom, it does ensure that the user of a program that is" , "linked with the Library has the freedom and the wherewithal to run" , "that program using a modified version of the Library." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow. Pay close attention to the difference between a" , "\"work based on the library\" and a \"work that uses the library\". The" , "former contains code derived from the library, whereas the latter must" , "be combined with the library in order to run." , "" , " GNU LESSER GENERAL PUBLIC LICENSE" , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" , "" , " 0. This License Agreement applies to any software library or other" , "program which contains a notice placed by the copyright holder or" , "other authorized party saying it may be distributed under the terms of" , "this Lesser General Public License (also called \"this License\")." , "Each licensee is addressed as \"you\"." , "" , " A \"library\" means a collection of software functions and/or data" , "prepared so as to be conveniently linked with application programs" , "(which use some of those functions and data) to form executables." , "" , " The \"Library\", below, refers to any such software library or work" , "which has been distributed under these terms. A \"work based on the" , "Library\" means either the Library or any derivative work under" , "copyright law: that is to say, a work containing the Library or a" , "portion of it, either verbatim or with modifications and/or translated" , "straightforwardly into another language. (Hereinafter, translation is" , "included without limitation in the term \"modification\".)" , "" , " \"Source code\" for a work means the preferred form of the work for" , "making modifications to it. For a library, complete source code means" , "all the source code for all modules it contains, plus any associated" , "interface definition files, plus the scripts used to control compilation" , "and installation of the library." , "" , " Activities other than copying, distribution and modification are not" , "covered by this License; they are outside its scope. The act of" , "running a program using the Library is not restricted, and output from" , "such a program is covered only if its contents constitute a work based" , "on the Library (independent of the use of the Library in a tool for" , "writing it). Whether that is true depends on what the Library does" , "and what the program that uses the Library does." , "" , " 1. You may copy and distribute verbatim copies of the Library's" , "complete source code as you receive it, in any medium, provided that" , "you conspicuously and appropriately publish on each copy an" , "appropriate copyright notice and disclaimer of warranty; keep intact" , "all the notices that refer to this License and to the absence of any" , "warranty; and distribute a copy of this License along with the" , "Library." , "" , " You may charge a fee for the physical act of transferring a copy," , "and you may at your option offer warranty protection in exchange for a" , "fee." , "" , " 2. You may modify your copy or copies of the Library or any portion" , "of it, thus forming a work based on the Library, and copy and" , "distribute such modifications or work under the terms of Section 1" , "above, provided that you also meet all of these conditions:" , "" , " a) The modified work must itself be a software library." , "" , " b) You must cause the files modified to carry prominent notices" , " stating that you changed the files and the date of any change." , "" , " c) You must cause the whole of the work to be licensed at no" , " charge to all third parties under the terms of this License." , "" , " d) If a facility in the modified Library refers to a function or a" , " table of data to be supplied by an application program that uses" , " the facility, other than as an argument passed when the facility" , " is invoked, then you must make a good faith effort to ensure that," , " in the event an application does not supply such function or" , " table, the facility still operates, and performs whatever part of" , " its purpose remains meaningful." , "" , " (For example, a function in a library to compute square roots has" , " a purpose that is entirely well-defined independent of the" , " application. Therefore, Subsection 2d requires that any" , " application-supplied function or table used by this function must" , " be optional: if the application does not supply it, the square" , " root function must still compute square roots.)" , "" , "These requirements apply to the modified work as a whole. If" , "identifiable sections of that work are not derived from the Library," , "and can be reasonably considered independent and separate works in" , "themselves, then this License, and its terms, do not apply to those" , "sections when you distribute them as separate works. But when you" , "distribute the same sections as part of a whole which is a work based" , "on the Library, the distribution of the whole must be on the terms of" , "this License, whose permissions for other licensees extend to the" , "entire whole, and thus to each and every part regardless of who wrote" , "it." , "" , "Thus, it is not the intent of this section to claim rights or contest" , "your rights to work written entirely by you; rather, the intent is to" , "exercise the right to control the distribution of derivative or" , "collective works based on the Library." , "" , "In addition, mere aggregation of another work not based on the Library" , "with the Library (or with a work based on the Library) on a volume of" , "a storage or distribution medium does not bring the other work under" , "the scope of this License." , "" , " 3. You may opt to apply the terms of the ordinary GNU General Public" , "License instead of this License to a given copy of the Library. To do" , "this, you must alter all the notices that refer to this License, so" , "that they refer to the ordinary GNU General Public License, version 2," , "instead of to this License. (If a newer version than version 2 of the" , "ordinary GNU General Public License has appeared, then you can specify" , "that version instead if you wish.) Do not make any other change in" , "these notices." , "" , " Once this change is made in a given copy, it is irreversible for" , "that copy, so the ordinary GNU General Public License applies to all" , "subsequent copies and derivative works made from that copy." , "" , " This option is useful when you wish to copy part of the code of" , "the Library into a program that is not a library." , "" , " 4. You may copy and distribute the Library (or a portion or" , "derivative of it, under Section 2) in object code or executable form" , "under the terms of Sections 1 and 2 above provided that you accompany" , "it with the complete corresponding machine-readable source code, which" , "must be distributed under the terms of Sections 1 and 2 above on a" , "medium customarily used for software interchange." , "" , " If distribution of object code is made by offering access to copy" , "from a designated place, then offering equivalent access to copy the" , "source code from the same place satisfies the requirement to" , "distribute the source code, even though third parties are not" , "compelled to copy the source along with the object code." , "" , " 5. A program that contains no derivative of any portion of the" , "Library, but is designed to work with the Library by being compiled or" , "linked with it, is called a \"work that uses the Library\". Such a" , "work, in isolation, is not a derivative work of the Library, and" , "therefore falls outside the scope of this License." , "" , " However, linking a \"work that uses the Library\" with the Library" , "creates an executable that is a derivative of the Library (because it" , "contains portions of the Library), rather than a \"work that uses the" , "library\". The executable is therefore covered by this License." , "Section 6 states terms for distribution of such executables." , "" , " When a \"work that uses the Library\" uses material from a header file" , "that is part of the Library, the object code for the work may be a" , "derivative work of the Library even though the source code is not." , "Whether this is true is especially significant if the work can be" , "linked without the Library, or if the work is itself a library. The" , "threshold for this to be true is not precisely defined by law." , "" , " If such an object file uses only numerical parameters, data" , "structure layouts and accessors, and small macros and small inline" , "functions (ten lines or less in length), then the use of the object" , "file is unrestricted, regardless of whether it is legally a derivative" , "work. (Executables containing this object code plus portions of the" , "Library will still fall under Section 6.)" , "" , " Otherwise, if the work is a derivative of the Library, you may" , "distribute the object code for the work under the terms of Section 6." , "Any executables containing that work also fall under Section 6," , "whether or not they are linked directly with the Library itself." , "" , " 6. As an exception to the Sections above, you may also combine or" , "link a \"work that uses the Library\" with the Library to produce a" , "work containing portions of the Library, and distribute that work" , "under terms of your choice, provided that the terms permit" , "modification of the work for the customer's own use and reverse" , "engineering for debugging such modifications." , "" , " You must give prominent notice with each copy of the work that the" , "Library is used in it and that the Library and its use are covered by" , "this License. You must supply a copy of this License. If the work" , "during execution displays copyright notices, you must include the" , "copyright notice for the Library among them, as well as a reference" , "directing the user to the copy of this License. Also, you must do one" , "of these things:" , "" , " a) Accompany the work with the complete corresponding" , " machine-readable source code for the Library including whatever" , " changes were used in the work (which must be distributed under" , " Sections 1 and 2 above); and, if the work is an executable linked" , " with the Library, with the complete machine-readable \"work that" , " uses the Library\", as object code and/or source code, so that the" , " user can modify the Library and then relink to produce a modified" , " executable containing the modified Library. (It is understood" , " that the user who changes the contents of definitions files in the" , " Library will not necessarily be able to recompile the application" , " to use the modified definitions.)" , "" , " b) Use a suitable shared library mechanism for linking with the" , " Library. A suitable mechanism is one that (1) uses at run time a" , " copy of the library already present on the user's computer system," , " rather than copying library functions into the executable, and (2)" , " will operate properly with a modified version of the library, if" , " the user installs one, as long as the modified version is" , " interface-compatible with the version that the work was made with." , "" , " c) Accompany the work with a written offer, valid for at" , " least three years, to give the same user the materials" , " specified in Subsection 6a, above, for a charge no more" , " than the cost of performing this distribution." , "" , " d) If distribution of the work is made by offering access to copy" , " from a designated place, offer equivalent access to copy the above" , " specified materials from the same place." , "" , " e) Verify that the user has already received a copy of these" , " materials or that you have already sent this user a copy." , "" , " For an executable, the required form of the \"work that uses the" , "Library\" must include any data and utility programs needed for" , "reproducing the executable from it. However, as a special exception," , "the materials to be distributed need not include anything that is" , "normally distributed (in either source or binary form) with the major" , "components (compiler, kernel, and so on) of the operating system on" , "which the executable runs, unless that component itself accompanies" , "the executable." , "" , " It may happen that this requirement contradicts the license" , "restrictions of other proprietary libraries that do not normally" , "accompany the operating system. Such a contradiction means you cannot" , "use both them and the Library together in an executable that you" , "distribute." , "" , " 7. You may place library facilities that are a work based on the" , "Library side-by-side in a single library together with other library" , "facilities not covered by this License, and distribute such a combined" , "library, provided that the separate distribution of the work based on" , "the Library and of the other library facilities is otherwise" , "permitted, and provided that you do these two things:" , "" , " a) Accompany the combined library with a copy of the same work" , " based on the Library, uncombined with any other library" , " facilities. This must be distributed under the terms of the" , " Sections above." , "" , " b) Give prominent notice with the combined library of the fact" , " that part of it is a work based on the Library, and explaining" , " where to find the accompanying uncombined form of the same work." , "" , " 8. You may not copy, modify, sublicense, link with, or distribute" , "the Library except as expressly provided under this License. Any" , "attempt otherwise to copy, modify, sublicense, link with, or" , "distribute the Library is void, and will automatically terminate your" , "rights under this License. However, parties who have received copies," , "or rights, from you under this License will not have their licenses" , "terminated so long as such parties remain in full compliance." , "" , " 9. You are not required to accept this License, since you have not" , "signed it. However, nothing else grants you permission to modify or" , "distribute the Library or its derivative works. These actions are" , "prohibited by law if you do not accept this License. Therefore, by" , "modifying or distributing the Library (or any work based on the" , "Library), you indicate your acceptance of this License to do so, and" , "all its terms and conditions for copying, distributing or modifying" , "the Library or works based on it." , "" , " 10. Each time you redistribute the Library (or any work based on the" , "Library), the recipient automatically receives a license from the" , "original licensor to copy, distribute, link with or modify the Library" , "subject to these terms and conditions. You may not impose any further" , "restrictions on the recipients' exercise of the rights granted herein." , "You are not responsible for enforcing compliance by third parties with" , "this License." , "" , " 11. If, as a consequence of a court judgment or allegation of patent" , "infringement or for any other reason (not limited to patent issues)," , "conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot" , "distribute so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you" , "may not distribute the Library at all. For example, if a patent" , "license would not permit royalty-free redistribution of the Library by" , "all those who receive copies directly or indirectly through you, then" , "the only way you could satisfy both it and this License would be to" , "refrain entirely from distribution of the Library." , "" , "If any portion of this section is held invalid or unenforceable under any" , "particular circumstance, the balance of the section is intended to apply," , "and the section as a whole is intended to apply in other circumstances." , "" , "It is not the purpose of this section to induce you to infringe any" , "patents or other property right claims or to contest validity of any" , "such claims; this section has the sole purpose of protecting the" , "integrity of the free software distribution system which is" , "implemented by public license practices. Many people have made" , "generous contributions to the wide range of software distributed" , "through that system in reliance on consistent application of that" , "system; it is up to the author/donor to decide if he or she is willing" , "to distribute software through any other system and a licensee cannot" , "impose that choice." , "" , "This section is intended to make thoroughly clear what is believed to" , "be a consequence of the rest of this License." , "" , " 12. If the distribution and/or use of the Library is restricted in" , "certain countries either by patents or by copyrighted interfaces, the" , "original copyright holder who places the Library under this License may add" , "an explicit geographical distribution limitation excluding those countries," , "so that distribution is permitted only in or among countries not thus" , "excluded. In such case, this License incorporates the limitation as if" , "written in the body of this License." , "" , " 13. The Free Software Foundation may publish revised and/or new" , "versions of the Lesser General Public License from time to time." , "Such new versions will be similar in spirit to the present version," , "but may differ in detail to address new problems or concerns." , "" , "Each version is given a distinguishing version number. If the Library" , "specifies a version number of this License which applies to it and" , "\"any later version\", you have the option of following the terms and" , "conditions either of that version or of any later version published by" , "the Free Software Foundation. If the Library does not specify a" , "license version number, you may choose any version ever published by" , "the Free Software Foundation." , "" , " 14. If you wish to incorporate parts of the Library into other free" , "programs whose distribution conditions are incompatible with these," , "write to the author to ask for permission. For software which is" , "copyrighted by the Free Software Foundation, write to the Free" , "Software Foundation; we sometimes make exceptions for this. Our" , "decision will be guided by the two goals of preserving the free status" , "of all derivatives of our free software and of promoting the sharing" , "and reuse of software generally." , "" , " NO WARRANTY" , "" , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" , "DAMAGES." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Libraries" , "" , " If you develop a new library, and you want it to be of the greatest" , "possible use to the public, we recommend making it free software that" , "everyone can redistribute and change. You can do so by permitting" , "redistribution under these terms (or, alternatively, under the terms of the" , "ordinary General Public License)." , "" , " To apply these terms, attach the following notices to the library. It is" , "safest to attach them to the start of each source file to most effectively" , "convey the exclusion of warranty; and each file should have at least the" , "\"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This library is free software; you can redistribute it and/or" , " modify it under the terms of the GNU Lesser General Public" , " License as published by the Free Software Foundation; either" , " version 2.1 of the License, or (at your option) any later version." , "" , " This library is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" , " Lesser General Public License for more details." , "" , " You should have received a copy of the GNU Lesser General Public" , " License along with this library; if not, write to the Free Software" , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , "" , "Also add information on how to contact you by electronic and paper mail." , "" , "You should also get your employer (if you work as a programmer) or your" , "school, if any, to sign a \"copyright disclaimer\" for the library, if" , "necessary. Here is a sample; alter the names:" , "" , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." , "" , " , 1 April 1990" , " Ty Coon, President of Vice" , "" , "That's all there is to it!" ] lgpl3 :: License lgpl3 = unlines [ " GNU LESSER GENERAL PUBLIC LICENSE" , " Version 3, 29 June 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , "" , " This version of the GNU Lesser General Public License incorporates" , "the terms and conditions of version 3 of the GNU General Public" , "License, supplemented by the additional permissions listed below." , "" , " 0. Additional Definitions." , "" , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" , "General Public License." , "" , " \"The Library\" refers to a covered work governed by this License," , "other than an Application or a Combined Work as defined below." , "" , " An \"Application\" is any work that makes use of an interface provided" , "by the Library, but which is not otherwise based on the Library." , "Defining a subclass of a class defined by the Library is deemed a mode" , "of using an interface provided by the Library." , "" , " A \"Combined Work\" is a work produced by combining or linking an" , "Application with the Library. The particular version of the Library" , "with which the Combined Work was made is also called the \"Linked" , "Version\"." , "" , " The \"Minimal Corresponding Source\" for a Combined Work means the" , "Corresponding Source for the Combined Work, excluding any source code" , "for portions of the Combined Work that, considered in isolation, are" , "based on the Application, and not on the Linked Version." , "" , " The \"Corresponding Application Code\" for a Combined Work means the" , "object code and/or source code for the Application, including any data" , "and utility programs needed for reproducing the Combined Work from the" , "Application, but excluding the System Libraries of the Combined Work." , "" , " 1. Exception to Section 3 of the GNU GPL." , "" , " You may convey a covered work under sections 3 and 4 of this License" , "without being bound by section 3 of the GNU GPL." , "" , " 2. Conveying Modified Versions." , "" , " If you modify a copy of the Library, and, in your modifications, a" , "facility refers to a function or data to be supplied by an Application" , "that uses the facility (other than as an argument passed when the" , "facility is invoked), then you may convey a copy of the modified" , "version:" , "" , " a) under this License, provided that you make a good faith effort to" , " ensure that, in the event an Application does not supply the" , " function or data, the facility still operates, and performs" , " whatever part of its purpose remains meaningful, or" , "" , " b) under the GNU GPL, with none of the additional permissions of" , " this License applicable to that copy." , "" , " 3. Object Code Incorporating Material from Library Header Files." , "" , " The object code form of an Application may incorporate material from" , "a header file that is part of the Library. You may convey such object" , "code under terms of your choice, provided that, if the incorporated" , "material is not limited to numerical parameters, data structure" , "layouts and accessors, or small macros, inline functions and templates" , "(ten or fewer lines in length), you do both of the following:" , "" , " a) Give prominent notice with each copy of the object code that the" , " Library is used in it and that the Library and its use are" , " covered by this License." , "" , " b) Accompany the object code with a copy of the GNU GPL and this license" , " document." , "" , " 4. Combined Works." , "" , " You may convey a Combined Work under terms of your choice that," , "taken together, effectively do not restrict modification of the" , "portions of the Library contained in the Combined Work and reverse" , "engineering for debugging such modifications, if you also do each of" , "the following:" , "" , " a) Give prominent notice with each copy of the Combined Work that" , " the Library is used in it and that the Library and its use are" , " covered by this License." , "" , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" , " document." , "" , " c) For a Combined Work that displays copyright notices during" , " execution, include the copyright notice for the Library among" , " these notices, as well as a reference directing the user to the" , " copies of the GNU GPL and this license document." , "" , " d) Do one of the following:" , "" , " 0) Convey the Minimal Corresponding Source under the terms of this" , " License, and the Corresponding Application Code in a form" , " suitable for, and under terms that permit, the user to" , " recombine or relink the Application with a modified version of" , " the Linked Version to produce a modified Combined Work, in the" , " manner specified by section 6 of the GNU GPL for conveying" , " Corresponding Source." , "" , " 1) Use a suitable shared library mechanism for linking with the" , " Library. A suitable mechanism is one that (a) uses at run time" , " a copy of the Library already present on the user's computer" , " system, and (b) will operate properly with a modified version" , " of the Library that is interface-compatible with the Linked" , " Version." , "" , " e) Provide Installation Information, but only if you would otherwise" , " be required to provide such information under section 6 of the" , " GNU GPL, and only to the extent that such information is" , " necessary to install and execute a modified version of the" , " Combined Work produced by recombining or relinking the" , " Application with a modified version of the Linked Version. (If" , " you use option 4d0, the Installation Information must accompany" , " the Minimal Corresponding Source and Corresponding Application" , " Code. If you use option 4d1, you must provide the Installation" , " Information in the manner specified by section 6 of the GNU GPL" , " for conveying Corresponding Source.)" , "" , " 5. Combined Libraries." , "" , " You may place library facilities that are a work based on the" , "Library side by side in a single library together with other library" , "facilities that are not Applications and are not covered by this" , "License, and convey such a combined library under terms of your" , "choice, if you do both of the following:" , "" , " a) Accompany the combined library with a copy of the same work based" , " on the Library, uncombined with any other library facilities," , " conveyed under the terms of this License." , "" , " b) Give prominent notice with the combined library that part of it" , " is a work based on the Library, and explaining where to find the" , " accompanying uncombined form of the same work." , "" , " 6. Revised Versions of the GNU Lesser General Public License." , "" , " The Free Software Foundation may publish revised and/or new versions" , "of the GNU Lesser General Public License from time to time. Such new" , "versions will be similar in spirit to the present version, but may" , "differ in detail to address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Library as you received it specifies that a certain numbered version" , "of the GNU Lesser General Public License \"or any later version\"" , "applies to it, you have the option of following the terms and" , "conditions either of that published version or of any later version" , "published by the Free Software Foundation. If the Library as you" , "received it does not specify a version number of the GNU Lesser" , "General Public License, you may choose any version of the GNU Lesser" , "General Public License ever published by the Free Software Foundation." , "" , " If the Library as you received it specifies that a proxy can decide" , "whether future versions of the GNU Lesser General Public License shall" , "apply, that proxy's public statement of acceptance of any version is" , "permanent authorization for you to choose that version for the" , "Library." ] apache20 :: License apache20 = unlines [ "" , " Apache License" , " Version 2.0, January 2004" , " http://www.apache.org/licenses/" , "" , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" , "" , " 1. Definitions." , "" , " \"License\" shall mean the terms and conditions for use, reproduction," , " and distribution as defined by Sections 1 through 9 of this document." , "" , " \"Licensor\" shall mean the copyright owner or entity authorized by" , " the copyright owner that is granting the License." , "" , " \"Legal Entity\" shall mean the union of the acting entity and all" , " other entities that control, are controlled by, or are under common" , " control with that entity. For the purposes of this definition," , " \"control\" means (i) the power, direct or indirect, to cause the" , " direction or management of such entity, whether by contract or" , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" , " outstanding shares, or (iii) beneficial ownership of such entity." , "" , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" , " exercising permissions granted by this License." , "" , " \"Source\" form shall mean the preferred form for making modifications," , " including but not limited to software source code, documentation" , " source, and configuration files." , "" , " \"Object\" form shall mean any form resulting from mechanical" , " transformation or translation of a Source form, including but" , " not limited to compiled object code, generated documentation," , " and conversions to other media types." , "" , " \"Work\" shall mean the work of authorship, whether in Source or" , " Object form, made available under the License, as indicated by a" , " copyright notice that is included in or attached to the work" , " (an example is provided in the Appendix below)." , "" , " \"Derivative Works\" shall mean any work, whether in Source or Object" , " form, that is based on (or derived from) the Work and for which the" , " editorial revisions, annotations, elaborations, or other modifications" , " represent, as a whole, an original work of authorship. For the purposes" , " of this License, Derivative Works shall not include works that remain" , " separable from, or merely link (or bind by name) to the interfaces of," , " the Work and Derivative Works thereof." , "" , " \"Contribution\" shall mean any work of authorship, including" , " the original version of the Work and any modifications or additions" , " to that Work or Derivative Works thereof, that is intentionally" , " submitted to Licensor for inclusion in the Work by the copyright owner" , " or by an individual or Legal Entity authorized to submit on behalf of" , " the copyright owner. For the purposes of this definition, \"submitted\"" , " means any form of electronic, verbal, or written communication sent" , " to the Licensor or its representatives, including but not limited to" , " communication on electronic mailing lists, source code control systems," , " and issue tracking systems that are managed by, or on behalf of, the" , " Licensor for the purpose of discussing and improving the Work, but" , " excluding communication that is conspicuously marked or otherwise" , " designated in writing by the copyright owner as \"Not a Contribution.\"" , "" , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" , " on behalf of whom a Contribution has been received by Licensor and" , " subsequently incorporated within the Work." , "" , " 2. Grant of Copyright License. Subject to the terms and conditions of" , " this License, each Contributor hereby grants to You a perpetual," , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" , " copyright license to reproduce, prepare Derivative Works of," , " publicly display, publicly perform, sublicense, and distribute the" , " Work and such Derivative Works in Source or Object form." , "" , " 3. Grant of Patent License. Subject to the terms and conditions of" , " this License, each Contributor hereby grants to You a perpetual," , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" , " (except as stated in this section) patent license to make, have made," , " use, offer to sell, sell, import, and otherwise transfer the Work," , " where such license applies only to those patent claims licensable" , " by such Contributor that are necessarily infringed by their" , " Contribution(s) alone or by combination of their Contribution(s)" , " with the Work to which such Contribution(s) was submitted. If You" , " institute patent litigation against any entity (including a" , " cross-claim or counterclaim in a lawsuit) alleging that the Work" , " or a Contribution incorporated within the Work constitutes direct" , " or contributory patent infringement, then any patent licenses" , " granted to You under this License for that Work shall terminate" , " as of the date such litigation is filed." , "" , " 4. Redistribution. You may reproduce and distribute copies of the" , " Work or Derivative Works thereof in any medium, with or without" , " modifications, and in Source or Object form, provided that You" , " meet the following conditions:" , "" , " (a) You must give any other recipients of the Work or" , " Derivative Works a copy of this License; and" , "" , " (b) You must cause any modified files to carry prominent notices" , " stating that You changed the files; and" , "" , " (c) You must retain, in the Source form of any Derivative Works" , " that You distribute, all copyright, patent, trademark, and" , " attribution notices from the Source form of the Work," , " excluding those notices that do not pertain to any part of" , " the Derivative Works; and" , "" , " (d) If the Work includes a \"NOTICE\" text file as part of its" , " distribution, then any Derivative Works that You distribute must" , " include a readable copy of the attribution notices contained" , " within such NOTICE file, excluding those notices that do not" , " pertain to any part of the Derivative Works, in at least one" , " of the following places: within a NOTICE text file distributed" , " as part of the Derivative Works; within the Source form or" , " documentation, if provided along with the Derivative Works; or," , " within a display generated by the Derivative Works, if and" , " wherever such third-party notices normally appear. The contents" , " of the NOTICE file are for informational purposes only and" , " do not modify the License. You may add Your own attribution" , " notices within Derivative Works that You distribute, alongside" , " or as an addendum to the NOTICE text from the Work, provided" , " that such additional attribution notices cannot be construed" , " as modifying the License." , "" , " You may add Your own copyright statement to Your modifications and" , " may provide additional or different license terms and conditions" , " for use, reproduction, or distribution of Your modifications, or" , " for any such Derivative Works as a whole, provided Your use," , " reproduction, and distribution of the Work otherwise complies with" , " the conditions stated in this License." , "" , " 5. Submission of Contributions. Unless You explicitly state otherwise," , " any Contribution intentionally submitted for inclusion in the Work" , " by You to the Licensor shall be under the terms and conditions of" , " this License, without any additional terms or conditions." , " Notwithstanding the above, nothing herein shall supersede or modify" , " the terms of any separate license agreement you may have executed" , " with Licensor regarding such Contributions." , "" , " 6. Trademarks. This License does not grant permission to use the trade" , " names, trademarks, service marks, or product names of the Licensor," , " except as required for reasonable and customary use in describing the" , " origin of the Work and reproducing the content of the NOTICE file." , "" , " 7. Disclaimer of Warranty. Unless required by applicable law or" , " agreed to in writing, Licensor provides the Work (and each" , " Contributor provides its Contributions) on an \"AS IS\" BASIS," , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" , " implied, including, without limitation, any warranties or conditions" , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" , " PARTICULAR PURPOSE. You are solely responsible for determining the" , " appropriateness of using or redistributing the Work and assume any" , " risks associated with Your exercise of permissions under this License." , "" , " 8. Limitation of Liability. In no event and under no legal theory," , " whether in tort (including negligence), contract, or otherwise," , " unless required by applicable law (such as deliberate and grossly" , " negligent acts) or agreed to in writing, shall any Contributor be" , " liable to You for damages, including any direct, indirect, special," , " incidental, or consequential damages of any character arising as a" , " result of this License or out of the use or inability to use the" , " Work (including but not limited to damages for loss of goodwill," , " work stoppage, computer failure or malfunction, or any and all" , " other commercial damages or losses), even if such Contributor" , " has been advised of the possibility of such damages." , "" , " 9. Accepting Warranty or Additional Liability. While redistributing" , " the Work or Derivative Works thereof, You may choose to offer," , " and charge a fee for, acceptance of support, warranty, indemnity," , " or other liability obligations and/or rights consistent with this" , " License. However, in accepting such obligations, You may act only" , " on Your own behalf and on Your sole responsibility, not on behalf" , " of any other Contributor, and only if You agree to indemnify," , " defend, and hold each Contributor harmless for any liability" , " incurred by, or claims asserted against, such Contributor by reason" , " of your accepting any such warranty or additional liability." , "" , " END OF TERMS AND CONDITIONS" , "" , " APPENDIX: How to apply the Apache License to your work." , "" , " To apply the Apache License to your work, attach the following" , " boilerplate notice, with the fields enclosed by brackets \"[]\"" , " replaced with your own identifying information. (Don't include" , " the brackets!) The text should be enclosed in the appropriate" , " comment syntax for the file format. We also recommend that a" , " file or class name and description of purpose be included on the" , " same \"printed page\" as the copyright notice for easier" , " identification within third-party archives." , "" , " Copyright [yyyy] [name of copyright owner]" , "" , " Licensed under the Apache License, Version 2.0 (the \"License\");" , " you may not use this file except in compliance with the License." , " You may obtain a copy of the License at" , "" , " http://www.apache.org/licenses/LICENSE-2.0" , "" , " Unless required by applicable law or agreed to in writing, software" , " distributed under the License is distributed on an \"AS IS\" BASIS," , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." , " See the License for the specific language governing permissions and" , " limitations under the License." ] mit :: String -> String -> License mit authors year = unlines [ "Copyright (c) " ++ year ++ " " ++ authors , "" , "Permission is hereby granted, free of charge, to any person obtaining" , "a copy of this software and associated documentation files (the" , "\"Software\"), to deal in the Software without restriction, including" , "without limitation the rights to use, copy, modify, merge, publish," , "distribute, sublicense, and/or sell copies of the Software, and to" , "permit persons to whom the Software is furnished to do so, subject to" , "the following conditions:" , "" , "The above copyright notice and this permission notice shall be included" , "in all copies or substantial portions of the Software." , "" , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." ] mpl20 :: License mpl20 = unlines [ "Mozilla Public License Version 2.0" , "==================================" , "" , "1. Definitions" , "--------------" , "" , "1.1. \"Contributor\"" , " means each individual or legal entity that creates, contributes to" , " the creation of, or owns Covered Software." , "" , "1.2. \"Contributor Version\"" , " means the combination of the Contributions of others (if any) used" , " by a Contributor and that particular Contributor's Contribution." , "" , "1.3. \"Contribution\"" , " means Covered Software of a particular Contributor." , "" , "1.4. \"Covered Software\"" , " means Source Code Form to which the initial Contributor has attached" , " the notice in Exhibit A, the Executable Form of such Source Code" , " Form, and Modifications of such Source Code Form, in each case" , " including portions thereof." , "" , "1.5. \"Incompatible With Secondary Licenses\"" , " means" , "" , " (a) that the initial Contributor has attached the notice described" , " in Exhibit B to the Covered Software; or" , "" , " (b) that the Covered Software was made available under the terms of" , " version 1.1 or earlier of the License, but not also under the" , " terms of a Secondary License." , "" , "1.6. \"Executable Form\"" , " means any form of the work other than Source Code Form." , "" , "1.7. \"Larger Work\"" , " means a work that combines Covered Software with other material, in" , " a separate file or files, that is not Covered Software." , "" , "1.8. \"License\"" , " means this document." , "" , "1.9. \"Licensable\"" , " means having the right to grant, to the maximum extent possible," , " whether at the time of the initial grant or subsequently, any and" , " all of the rights conveyed by this License." , "" , "1.10. \"Modifications\"" , " means any of the following:" , "" , " (a) any file in Source Code Form that results from an addition to," , " deletion from, or modification of the contents of Covered" , " Software; or" , "" , " (b) any new file in Source Code Form that contains any Covered" , " Software." , "" , "1.11. \"Patent Claims\" of a Contributor" , " means any patent claim(s), including without limitation, method," , " process, and apparatus claims, in any patent Licensable by such" , " Contributor that would be infringed, but for the grant of the" , " License, by the making, using, selling, offering for sale, having" , " made, import, or transfer of either its Contributions or its" , " Contributor Version." , "" , "1.12. \"Secondary License\"" , " means either the GNU General Public License, Version 2.0, the GNU" , " Lesser General Public License, Version 2.1, the GNU Affero General" , " Public License, Version 3.0, or any later versions of those" , " licenses." , "" , "1.13. \"Source Code Form\"" , " means the form of the work preferred for making modifications." , "" , "1.14. \"You\" (or \"Your\")" , " means an individual or a legal entity exercising rights under this" , " License. For legal entities, \"You\" includes any entity that" , " controls, is controlled by, or is under common control with You. For" , " purposes of this definition, \"control\" means (a) the power, direct" , " or indirect, to cause the direction or management of such entity," , " whether by contract or otherwise, or (b) ownership of more than" , " fifty percent (50%) of the outstanding shares or beneficial" , " ownership of such entity." , "" , "2. License Grants and Conditions" , "--------------------------------" , "" , "2.1. Grants" , "" , "Each Contributor hereby grants You a world-wide, royalty-free," , "non-exclusive license:" , "" , "(a) under intellectual property rights (other than patent or trademark)" , " Licensable by such Contributor to use, reproduce, make available," , " modify, display, perform, distribute, and otherwise exploit its" , " Contributions, either on an unmodified basis, with Modifications, or" , " as part of a Larger Work; and" , "" , "(b) under Patent Claims of such Contributor to make, use, sell, offer" , " for sale, have made, import, and otherwise transfer either its" , " Contributions or its Contributor Version." , "" , "2.2. Effective Date" , "" , "The licenses granted in Section 2.1 with respect to any Contribution" , "become effective for each Contribution on the date the Contributor first" , "distributes such Contribution." , "" , "2.3. Limitations on Grant Scope" , "" , "The licenses granted in this Section 2 are the only rights granted under" , "this License. No additional rights or licenses will be implied from the" , "distribution or licensing of Covered Software under this License." , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" , "Contributor:" , "" , "(a) for any code that a Contributor has removed from Covered Software;" , " or" , "" , "(b) for infringements caused by: (i) Your and any other third party's" , " modifications of Covered Software, or (ii) the combination of its" , " Contributions with other software (except as part of its Contributor" , " Version); or" , "" , "(c) under Patent Claims infringed by Covered Software in the absence of" , " its Contributions." , "" , "This License does not grant any rights in the trademarks, service marks," , "or logos of any Contributor (except as may be necessary to comply with" , "the notice requirements in Section 3.4)." , "" , "2.4. Subsequent Licenses" , "" , "No Contributor makes additional grants as a result of Your choice to" , "distribute the Covered Software under a subsequent version of this" , "License (see Section 10.2) or under the terms of a Secondary License (if" , "permitted under the terms of Section 3.3)." , "" , "2.5. Representation" , "" , "Each Contributor represents that the Contributor believes its" , "Contributions are its original creation(s) or it has sufficient rights" , "to grant the rights to its Contributions conveyed by this License." , "" , "2.6. Fair Use" , "" , "This License is not intended to limit any rights You have under" , "applicable copyright doctrines of fair use, fair dealing, or other" , "equivalents." , "" , "2.7. Conditions" , "" , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" , "in Section 2.1." , "" , "3. Responsibilities" , "-------------------" , "" , "3.1. Distribution of Source Form" , "" , "All distribution of Covered Software in Source Code Form, including any" , "Modifications that You create or to which You contribute, must be under" , "the terms of this License. You must inform recipients that the Source" , "Code Form of the Covered Software is governed by the terms of this" , "License, and how they can obtain a copy of this License. You may not" , "attempt to alter or restrict the recipients' rights in the Source Code" , "Form." , "" , "3.2. Distribution of Executable Form" , "" , "If You distribute Covered Software in Executable Form then:" , "" , "(a) such Covered Software must also be made available in Source Code" , " Form, as described in Section 3.1, and You must inform recipients of" , " the Executable Form how they can obtain a copy of such Source Code" , " Form by reasonable means in a timely manner, at a charge no more" , " than the cost of distribution to the recipient; and" , "" , "(b) You may distribute such Executable Form under the terms of this" , " License, or sublicense it under different terms, provided that the" , " license for the Executable Form does not attempt to limit or alter" , " the recipients' rights in the Source Code Form under this License." , "" , "3.3. Distribution of a Larger Work" , "" , "You may create and distribute a Larger Work under terms of Your choice," , "provided that You also comply with the requirements of this License for" , "the Covered Software. If the Larger Work is a combination of Covered" , "Software with a work governed by one or more Secondary Licenses, and the" , "Covered Software is not Incompatible With Secondary Licenses, this" , "License permits You to additionally distribute such Covered Software" , "under the terms of such Secondary License(s), so that the recipient of" , "the Larger Work may, at their option, further distribute the Covered" , "Software under the terms of either this License or such Secondary" , "License(s)." , "" , "3.4. Notices" , "" , "You may not remove or alter the substance of any license notices" , "(including copyright notices, patent notices, disclaimers of warranty," , "or limitations of liability) contained within the Source Code Form of" , "the Covered Software, except that You may alter any license notices to" , "the extent required to remedy known factual inaccuracies." , "" , "3.5. Application of Additional Terms" , "" , "You may choose to offer, and to charge a fee for, warranty, support," , "indemnity or liability obligations to one or more recipients of Covered" , "Software. However, You may do so only on Your own behalf, and not on" , "behalf of any Contributor. You must make it absolutely clear that any" , "such warranty, support, indemnity, or liability obligation is offered by" , "You alone, and You hereby agree to indemnify every Contributor for any" , "liability incurred by such Contributor as a result of warranty, support," , "indemnity or liability terms You offer. You may include additional" , "disclaimers of warranty and limitations of liability specific to any" , "jurisdiction." , "" , "4. Inability to Comply Due to Statute or Regulation" , "---------------------------------------------------" , "" , "If it is impossible for You to comply with any of the terms of this" , "License with respect to some or all of the Covered Software due to" , "statute, judicial order, or regulation then You must: (a) comply with" , "the terms of this License to the maximum extent possible; and (b)" , "describe the limitations and the code they affect. Such description must" , "be placed in a text file included with all distributions of the Covered" , "Software under this License. Except to the extent prohibited by statute" , "or regulation, such description must be sufficiently detailed for a" , "recipient of ordinary skill to be able to understand it." , "" , "5. Termination" , "--------------" , "" , "5.1. The rights granted under this License will terminate automatically" , "if You fail to comply with any of its terms. However, if You become" , "compliant, then the rights granted under this License from a particular" , "Contributor are reinstated (a) provisionally, unless and until such" , "Contributor explicitly and finally terminates Your grants, and (b) on an" , "ongoing basis, if such Contributor fails to notify You of the" , "non-compliance by some reasonable means prior to 60 days after You have" , "come back into compliance. Moreover, Your grants from a particular" , "Contributor are reinstated on an ongoing basis if such Contributor" , "notifies You of the non-compliance by some reasonable means, this is the" , "first time You have received notice of non-compliance with this License" , "from such Contributor, and You become compliant prior to 30 days after" , "Your receipt of the notice." , "" , "5.2. If You initiate litigation against any entity by asserting a patent" , "infringement claim (excluding declaratory judgment actions," , "counter-claims, and cross-claims) alleging that a Contributor Version" , "directly or indirectly infringes any patent, then the rights granted to" , "You by any and all Contributors for the Covered Software under Section" , "2.1 of this License shall terminate." , "" , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" , "end user license agreements (excluding distributors and resellers) which" , "have been validly granted by You or Your distributors under this License" , "prior to termination shall survive termination." , "" , "************************************************************************" , "* *" , "* 6. Disclaimer of Warranty *" , "* ------------------------- *" , "* *" , "* Covered Software is provided under this License on an \"as is\" *" , "* basis, without warranty of any kind, either expressed, implied, or *" , "* statutory, including, without limitation, warranties that the *" , "* Covered Software is free of defects, merchantable, fit for a *" , "* particular purpose or non-infringing. The entire risk as to the *" , "* quality and performance of the Covered Software is with You. *" , "* Should any Covered Software prove defective in any respect, You *" , "* (not any Contributor) assume the cost of any necessary servicing, *" , "* repair, or correction. This disclaimer of warranty constitutes an *" , "* essential part of this License. No use of any Covered Software is *" , "* authorized under this License except under this disclaimer. *" , "* *" , "************************************************************************" , "" , "************************************************************************" , "* *" , "* 7. Limitation of Liability *" , "* -------------------------- *" , "* *" , "* Under no circumstances and under no legal theory, whether tort *" , "* (including negligence), contract, or otherwise, shall any *" , "* Contributor, or anyone who distributes Covered Software as *" , "* permitted above, be liable to You for any direct, indirect, *" , "* special, incidental, or consequential damages of any character *" , "* including, without limitation, damages for lost profits, loss of *" , "* goodwill, work stoppage, computer failure or malfunction, or any *" , "* and all other commercial damages or losses, even if such party *" , "* shall have been informed of the possibility of such damages. This *" , "* limitation of liability shall not apply to liability for death or *" , "* personal injury resulting from such party's negligence to the *" , "* extent applicable law prohibits such limitation. Some *" , "* jurisdictions do not allow the exclusion or limitation of *" , "* incidental or consequential damages, so this exclusion and *" , "* limitation may not apply to You. *" , "* *" , "************************************************************************" , "" , "8. Litigation" , "-------------" , "" , "Any litigation relating to this License may be brought only in the" , "courts of a jurisdiction where the defendant maintains its principal" , "place of business and such litigation shall be governed by laws of that" , "jurisdiction, without reference to its conflict-of-law provisions." , "Nothing in this Section shall prevent a party's ability to bring" , "cross-claims or counter-claims." , "" , "9. Miscellaneous" , "----------------" , "" , "This License represents the complete agreement concerning the subject" , "matter hereof. If any provision of this License is held to be" , "unenforceable, such provision shall be reformed only to the extent" , "necessary to make it enforceable. Any law or regulation which provides" , "that the language of a contract shall be construed against the drafter" , "shall not be used to construe this License against a Contributor." , "" , "10. Versions of the License" , "---------------------------" , "" , "10.1. New Versions" , "" , "Mozilla Foundation is the license steward. Except as provided in Section" , "10.3, no one other than the license steward has the right to modify or" , "publish new versions of this License. Each version will be given a" , "distinguishing version number." , "" , "10.2. Effect of New Versions" , "" , "You may distribute the Covered Software under the terms of the version" , "of the License under which You originally received the Covered Software," , "or under the terms of any subsequent version published by the license" , "steward." , "" , "10.3. Modified Versions" , "" , "If you create software not governed by this License, and you want to" , "create a new license for such software, you may create and use a" , "modified version of this License if you rename the license and remove" , "any references to the name of the license steward (except to note that" , "such modified license differs from this License)." , "" , "10.4. Distributing Source Code Form that is Incompatible With Secondary" , "Licenses" , "" , "If You choose to distribute Source Code Form that is Incompatible With" , "Secondary Licenses under the terms of this version of the License, the" , "notice described in Exhibit B of this License must be attached." , "" , "Exhibit A - Source Code Form License Notice" , "-------------------------------------------" , "" , " This Source Code Form is subject to the terms of the Mozilla Public" , " License, v. 2.0. If a copy of the MPL was not distributed with this" , " file, You can obtain one at http://mozilla.org/MPL/2.0/." , "" , "If it is not possible or desirable to put the notice in a particular" , "file, then You may include the notice in a location (such as a LICENSE" , "file in a relevant directory) where a recipient would be likely to look" , "for such a notice." , "" , "You may add additional accurate notices of copyright ownership." , "" , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" , "---------------------------------------------------------" , "" , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" , " defined by the Mozilla Public License, v. 2.0." ] isc :: String -> String -> License isc authors year = unlines [ "Copyright (c) " ++ year ++ " " ++ authors , "" , "Permission to use, copy, modify, and/or distribute this software for any purpose" , "with or without fee is hereby granted, provided that the above copyright notice" , "and this permission notice appear in all copies." , "" , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" , "THIS SOFTWARE." ] cabal-install-3.8.1.0/src/Distribution/Client/Init/NonInteractive/0000755000000000000000000000000007346545000023077 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Init/NonInteractive/Command.hs0000644000000000000000000004110607346545000025013 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} module Distribution.Client.Init.NonInteractive.Command ( genPkgDescription , genLibTarget , genExeTarget , genTestTarget , createProject , packageTypeHeuristics , authorHeuristics , emailHeuristics , cabalVersionHeuristics , packageNameHeuristics , versionHeuristics , mainFileHeuristics , testDirsHeuristics , initializeTestSuiteHeuristics , exposedModulesHeuristics , libOtherModulesHeuristics , exeOtherModulesHeuristics , testOtherModulesHeuristics , buildToolsHeuristics , dependenciesHeuristics , otherExtsHeuristics , licenseHeuristics , homepageHeuristics , synopsisHeuristics , categoryHeuristics , extraDocFileHeuristics , appDirsHeuristics , srcDirsHeuristics , languageHeuristics , noCommentsHeuristics , minimalHeuristics , overwriteHeuristics ) where import Distribution.Client.Init.Types import Prelude () import Distribution.Client.Compat.Prelude hiding (putStr, putStrLn, getLine, last, head) import Data.List (last, head) import qualified Data.List.NonEmpty as NEL import Distribution.CabalSpecVersion (CabalSpecVersion(..)) import Distribution.Version (Version) import Distribution.ModuleName (ModuleName, components) import Distribution.Types.Dependency (Dependency(..)) import Distribution.Types.PackageName (PackageName, unPackageName) import Distribution.Client.Init.Defaults import Distribution.Client.Init.NonInteractive.Heuristics import Distribution.Client.Init.Utils import Distribution.Client.Init.FlagExtractors import Distribution.Simple.Setup (Flag(..), fromFlagOrDefault) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Client.Types (SourcePackageDb(..)) import Distribution.Solver.Types.PackageIndex (elemByPackageName) import Distribution.Utils.Generic (safeHead) import Distribution.Verbosity import Language.Haskell.Extension (Language(..), Extension(..)) import System.FilePath (splitDirectories, ()) import Distribution.Simple.Compiler import qualified Data.Set as Set import Distribution.FieldGrammar.Newtypes -- | Main driver for interactive prompt code. -- createProject :: Interactive m => Compiler -> Verbosity -> InstalledPackageIndex -> SourcePackageDb -> InitFlags -> m ProjectSettings createProject comp v pkgIx srcDb initFlags = do -- The workflow is as follows: -- -- 1. Get the package type, supplied as either a program input or -- via user prompt. This determines what targets will be built -- in later steps. -- -- 2. Determine whether we generate simple targets or prompt the -- user for inputs when not supplied as a flag. In general, -- flag inputs are preferred, and "simple" here means -- reasonable defaults defined in @Defaults.hs@. -- -- 3. Generate package description and the targets specified by -- the package type. Once this is done, a prompt for building -- test suites is initiated, and this determines if we build -- test targets as well. Then we ask if the user wants to -- comment their .cabal file with pretty comments. -- -- 4. The targets are passed to the file creator script, and associated -- directories/files/modules are created, with the a .cabal file -- being generated as a final result. -- pkgType <- packageTypeHeuristics initFlags isMinimal <- getMinimal initFlags doOverwrite <- getOverwrite initFlags pkgDir <- packageDirHeuristics initFlags pkgDesc <- fixupDocFiles v =<< genPkgDescription initFlags srcDb comments <- noCommentsHeuristics initFlags let pkgName = _pkgName pkgDesc cabalSpec = _pkgCabalVersion pkgDesc mkOpts cs = WriteOpts doOverwrite isMinimal cs v pkgDir pkgType pkgName case pkgType of Library -> do libTarget <- genLibTarget initFlags comp pkgIx cabalSpec testTarget <- addLibDepToTest pkgName <$> genTestTarget initFlags comp pkgIx cabalSpec return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc (Just libTarget) Nothing testTarget Executable -> do exeTarget <- genExeTarget initFlags comp pkgIx cabalSpec return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc Nothing (Just exeTarget) Nothing LibraryAndExecutable -> do libTarget <- genLibTarget initFlags comp pkgIx cabalSpec exeTarget <- addLibDepToExe pkgName <$> genExeTarget initFlags comp pkgIx cabalSpec testTarget <- addLibDepToTest pkgName <$> genTestTarget initFlags comp pkgIx cabalSpec return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc (Just libTarget) (Just exeTarget) testTarget TestSuite -> do testTarget <- genTestTarget initFlags comp pkgIx cabalSpec return $ ProjectSettings (mkOpts comments cabalSpec) pkgDesc Nothing Nothing testTarget genPkgDescription :: Interactive m => InitFlags -> SourcePackageDb -> m PkgDescription genPkgDescription flags srcDb = PkgDescription <$> cabalVersionHeuristics flags <*> packageNameHeuristics srcDb flags <*> versionHeuristics flags <*> licenseHeuristics flags <*> authorHeuristics flags <*> emailHeuristics flags <*> homepageHeuristics flags <*> synopsisHeuristics flags <*> categoryHeuristics flags <*> getExtraSrcFiles flags <*> extraDocFileHeuristics flags genLibTarget :: Interactive m => InitFlags -> Compiler -> InstalledPackageIndex -> CabalSpecVersion -> m LibTarget genLibTarget flags comp pkgs v = do srcDirs <- srcDirsHeuristics flags let srcDir = fromMaybe defaultSourceDir $ safeHead srcDirs LibTarget srcDirs <$> languageHeuristics flags comp <*> exposedModulesHeuristics flags <*> libOtherModulesHeuristics flags <*> otherExtsHeuristics flags srcDir <*> dependenciesHeuristics flags srcDir pkgs <*> buildToolsHeuristics flags srcDir v genExeTarget :: Interactive m => InitFlags -> Compiler -> InstalledPackageIndex -> CabalSpecVersion -> m ExeTarget genExeTarget flags comp pkgs v = do appDirs <- appDirsHeuristics flags let appDir = fromMaybe defaultApplicationDir $ safeHead appDirs ExeTarget <$> mainFileHeuristics flags <*> pure appDirs <*> languageHeuristics flags comp <*> exeOtherModulesHeuristics flags <*> otherExtsHeuristics flags appDir <*> dependenciesHeuristics flags appDir pkgs <*> buildToolsHeuristics flags appDir v genTestTarget :: Interactive m => InitFlags -> Compiler -> InstalledPackageIndex -> CabalSpecVersion -> m (Maybe TestTarget) genTestTarget flags comp pkgs v = do initialized <- initializeTestSuiteHeuristics flags testDirs' <- testDirsHeuristics flags let testDir = fromMaybe defaultTestDir $ safeHead testDirs' if not initialized then return Nothing else fmap Just $ TestTarget <$> testMainHeuristics flags <*> pure testDirs' <*> languageHeuristics flags comp <*> testOtherModulesHeuristics flags <*> otherExtsHeuristics flags testDir <*> dependenciesHeuristics flags testDir pkgs <*> buildToolsHeuristics flags testDir v -- -------------------------------------------------------------------- -- -- Get flags from init config minimalHeuristics :: Interactive m => InitFlags -> m Bool minimalHeuristics = getMinimal overwriteHeuristics :: Interactive m => InitFlags -> m Bool overwriteHeuristics = getOverwrite packageDirHeuristics :: Interactive m => InitFlags -> m FilePath packageDirHeuristics = getPackageDir -- | Get the version of the cabal spec to use. -- The spec version can be specified by the InitFlags cabalVersion field. If -- none is specified then the default version is used. cabalVersionHeuristics :: Interactive m => InitFlags -> m CabalSpecVersion cabalVersionHeuristics flags = getCabalVersion flags guessCabalSpecVersion -- | Get the package name: use the package directory (supplied, or the current -- directory by default) as a guess. It looks at the SourcePackageDb to avoid -- using an existing package name. packageNameHeuristics :: Interactive m => SourcePackageDb -> InitFlags -> m PackageName packageNameHeuristics sourcePkgDb flags = getPackageName flags $ do defName <- guessPackageName =<< case packageDir flags of Flag a -> return a NoFlag -> last . splitDirectories <$> getCurrentDirectory when (isPkgRegistered defName) $ putStrLn (inUseMsg defName) return defName where isPkgRegistered = elemByPackageName (packageIndex sourcePkgDb) inUseMsg pn = "The name " ++ unPackageName pn ++ " is already in use by another package on Hackage." -- | Package version: use 0.1.0.0 as a last resort versionHeuristics :: Interactive m => InitFlags -> m Version versionHeuristics flags = getVersion flags $ return defaultVersion -- | Choose a license for the package. -- The license can come from Initflags (license field), if it is not present -- then prompt the user from a predefined list of licenses. licenseHeuristics :: Interactive m => InitFlags -> m SpecLicense licenseHeuristics flags = getLicense flags $ guessLicense flags -- | The author's name. Prompt, or try to guess from an existing -- darcs repo. authorHeuristics :: Interactive m => InitFlags -> m String authorHeuristics flags = getAuthor flags guessAuthorEmail -- | The author's email. Prompt, or try to guess from an existing -- darcs repo. emailHeuristics :: Interactive m => InitFlags -> m String emailHeuristics flags = getEmail flags guessAuthorName -- | Prompt for a homepage URL for the package. homepageHeuristics :: Interactive m => InitFlags -> m String homepageHeuristics flags = getHomepage flags $ return "" -- | Prompt for a project synopsis. synopsisHeuristics :: Interactive m => InitFlags -> m String synopsisHeuristics flags = getSynopsis flags $ return "" -- | Prompt for a package category. -- Note that it should be possible to do some smarter guessing here too, i.e. -- look at the name of the top level source directory. categoryHeuristics :: Interactive m => InitFlags -> m String categoryHeuristics flags = getCategory flags $ return "" -- | Try to guess extra source files. extraDocFileHeuristics :: Interactive m => InitFlags -> m (Maybe (Set FilePath)) extraDocFileHeuristics flags = case extraDoc flags of Flag x -> return $ Just $ Set.fromList x _ -> guessExtraDocFiles flags -- | Try to guess if the project builds a library, an executable, or both. packageTypeHeuristics :: Interactive m => InitFlags -> m PackageType packageTypeHeuristics flags = getPackageType flags $ guessPackageType flags -- | Try to guess the main file, if nothing is found, fallback -- to a default value. mainFileHeuristics :: Interactive m => InitFlags -> m HsFilePath mainFileHeuristics flags = do appDir <- head <$> appDirsHeuristics flags getMainFile flags . guessMainFile $ appDir testMainHeuristics :: Interactive m => InitFlags -> m HsFilePath testMainHeuristics flags = do testDir <- head <$> testDirsHeuristics flags guessMainFile testDir initializeTestSuiteHeuristics :: Interactive m => InitFlags -> m Bool initializeTestSuiteHeuristics flags = getInitializeTestSuite flags $ return False testDirsHeuristics :: Interactive m => InitFlags -> m [String] testDirsHeuristics flags = getTestDirs flags $ return [defaultTestDir] -- | Ask for the Haskell base language of the package. languageHeuristics :: Interactive m => InitFlags -> Compiler -> m Language languageHeuristics flags comp = getLanguage flags $ guessLanguage comp -- | Ask whether to generate explanatory comments. noCommentsHeuristics :: Interactive m => InitFlags -> m Bool noCommentsHeuristics flags = getNoComments flags $ return False -- | Ask for the application root directory. appDirsHeuristics :: Interactive m => InitFlags -> m [String] appDirsHeuristics flags = getAppDirs flags $ guessApplicationDirectories flags -- | Ask for the source (library) root directory. srcDirsHeuristics :: Interactive m => InitFlags -> m [String] srcDirsHeuristics flags = getSrcDirs flags $ guessSourceDirectories flags -- | Retrieve the list of exposed modules exposedModulesHeuristics :: Interactive m => InitFlags -> m (NonEmpty ModuleName) exposedModulesHeuristics flags = do mods <- case exposedModules flags of Flag x -> return x NoFlag -> do srcDir <- fromMaybe defaultSourceDir . safeHead <$> srcDirsHeuristics flags exists <- doesDirectoryExist srcDir if exists then do modules <- filter isHaskell <$> listFilesRecursive srcDir modulesNames <- catMaybes <$> traverse retrieveModuleName modules otherModules' <- libOtherModulesHeuristics flags return $ filter (`notElem` otherModules') modulesNames else return [] return $ if null mods then myLibModule NEL.:| [] else NEL.fromList mods -- | Retrieve the list of other modules for Libraries, filtering them -- based on the last component of the module name libOtherModulesHeuristics :: Interactive m => InitFlags -> m [ModuleName] libOtherModulesHeuristics flags = case otherModules flags of Flag x -> return x NoFlag -> do let otherCandidates = ["Internal", "Utils"] srcDir = case sourceDirs flags of Flag x -> fromMaybe defaultSourceDir $ safeHead x NoFlag -> defaultSourceDir libDir <- ( srcDir) <$> case packageDir flags of Flag x -> return x NoFlag -> getCurrentDirectory exists <- doesDirectoryExist libDir if exists then do otherModules' <- filter isHaskell <$> listFilesRecursive libDir filter ((`elem` otherCandidates) . last . components) . catMaybes <$> traverse retrieveModuleName otherModules' else return [] -- | Retrieve the list of other modules for Executables, it lists everything -- that is a Haskell file within the application directory, excluding the main file exeOtherModulesHeuristics :: Interactive m => InitFlags -> m [ModuleName] exeOtherModulesHeuristics flags = case otherModules flags of Flag x -> return x NoFlag -> do let appDir = case applicationDirs flags of Flag x -> fromMaybe defaultApplicationDir $ safeHead x NoFlag -> defaultApplicationDir exeDir <- ( appDir) <$> case packageDir flags of Flag x -> return x NoFlag -> getCurrentDirectory exists <- doesDirectoryExist exeDir if exists then do otherModules' <- filter (\f -> not (isMain f) && isHaskell f) <$> listFilesRecursive exeDir catMaybes <$> traverse retrieveModuleName otherModules' else return [] -- | Retrieve the list of other modules for Tests, it lists everything -- that is a Haskell file within the tests directory, excluding the main file testOtherModulesHeuristics :: Interactive m => InitFlags -> m [ModuleName] testOtherModulesHeuristics flags = case otherModules flags of Flag x -> return x NoFlag -> do let testDir = case testDirs flags of Flag x -> fromMaybe defaultTestDir $ safeHead x NoFlag -> defaultTestDir testDir' <- ( testDir) <$> case packageDir flags of Flag x -> return x NoFlag -> getCurrentDirectory exists <- doesDirectoryExist testDir' if exists then do otherModules' <- filter (\f -> not (isMain f) && isHaskell f) <$> listFilesRecursive testDir' catMaybes <$> traverse retrieveModuleName otherModules' else return [] -- | Retrieve the list of build tools buildToolsHeuristics :: Interactive m => InitFlags -> FilePath -> CabalSpecVersion -> m [Dependency] buildToolsHeuristics flags fp v = case buildTools flags of Flag{} -> getBuildTools flags NoFlag -> retrieveBuildTools v fp -- | Retrieve the list of dependencies dependenciesHeuristics :: Interactive m => InitFlags -> FilePath -> InstalledPackageIndex -> m [Dependency] dependenciesHeuristics flags fp pkgIx = getDependencies flags $ do sources <- retrieveSourceFiles fp let mods = case exposedModules flags of Flag x -> x NoFlag -> map moduleName sources groupedDeps = concatMap (\s -> map (\i -> (moduleName s, i)) (imports s)) sources filteredDeps = filter ((`notElem` mods) . snd) groupedDeps preludeNub = nubBy (\a b -> snd a == snd b) $ (fromString "Prelude", fromString "Prelude") : filteredDeps retrieveDependencies (fromFlagOrDefault normal $ initVerbosity flags) flags preludeNub pkgIx -- | Retrieve the list of extensions otherExtsHeuristics :: Interactive m => InitFlags -> FilePath -> m [Extension] otherExtsHeuristics flags fp = case otherExts flags of Flag x -> return x NoFlag -> do exists <- doesDirectoryExist fp if exists then do sources <- listFilesRecursive fp extensions' <- traverse retrieveModuleExtensions . filter isHaskell $ sources return $ nub . join $ extensions' else return [] cabal-install-3.8.1.0/src/Distribution/Client/Init/NonInteractive/Heuristics.hs0000644000000000000000000001657607346545000025574 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.NonInteractive.Heuristics -- Copyright : (c) Benedikt Huber 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Heuristics for creating initial cabal files. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.NonInteractive.Heuristics ( guessPackageName , guessMainFile , guessLicense , guessExtraDocFiles , guessAuthorName , guessAuthorEmail , guessCabalSpecVersion , guessLanguage , guessPackageType , guessSourceDirectories , guessApplicationDirectories ) where import Distribution.Client.Compat.Prelude hiding (readFile, (<|>), many) import Distribution.Utils.Generic (safeLast) import Distribution.Simple.Setup (fromFlagOrDefault) import qualified Data.List as L import Distribution.Client.Init.Defaults import Distribution.Client.Init.FlagExtractors (getCabalVersionNoPrompt) import Distribution.Client.Init.Types import Distribution.Client.Init.Utils import System.FilePath import Distribution.CabalSpecVersion import Language.Haskell.Extension import Distribution.Version import Distribution.Types.PackageName (PackageName, mkPackageName) import Distribution.Simple.Compiler import qualified Data.Set as Set import Distribution.FieldGrammar.Newtypes -- | Guess the main file, returns a default value if none is found. guessMainFile :: Interactive m => FilePath -> m HsFilePath guessMainFile pkgDir = do exists <- doesDirectoryExist pkgDir if exists then do files <- filter isMain <$> listFilesRecursive pkgDir return $ if null files then defaultMainIs else toHsFilePath $ L.head files else return defaultMainIs -- | Juggling characters around to guess the desired cabal version based on -- the system's cabal version. guessCabalSpecVersion :: Interactive m => m CabalSpecVersion guessCabalSpecVersion = do (_, verString, _) <- readProcessWithExitCode "cabal" ["--version"] "" case simpleParsec $ takeWhile (not . isSpace) $ dropWhile (not . isDigit) verString of Just v -> pure $ fromMaybe defaultCabalVersion $ case versionNumbers v of [x,y,_,_] -> cabalSpecFromVersionDigits [x,y] [x,y,_] -> cabalSpecFromVersionDigits [x,y] _ -> Just defaultCabalVersion Nothing -> pure defaultCabalVersion -- | Guess the language specification based on the GHC version guessLanguage :: Interactive m => Compiler -> m Language guessLanguage Compiler {compilerId = CompilerId GHC ver} = return $ if ver < mkVersion [7,0,1] then Haskell98 else Haskell2010 guessLanguage _ = return defaultLanguage -- | Guess the package name based on the given root directory. guessPackageName :: Interactive m => FilePath -> m PackageName guessPackageName = fmap (mkPackageName . repair . fromMaybe "" . safeLast . splitDirectories) . canonicalizePathNoThrow where -- Treat each span of non-alphanumeric characters as a hyphen. Each -- hyphenated component of a package name must contain at least one -- alphabetic character. An arbitrary character ('x') will be prepended if -- this is not the case for the first component, and subsequent components -- will simply be run together. For example, "1+2_foo-3" will become -- "x12-foo3". repair = repair' ('x' :) id repair' invalid valid x = case dropWhile (not . isAlphaNum) x of "" -> repairComponent "" x' -> let (c, r) = first repairComponent $ span isAlphaNum x' in c ++ repairRest r where repairComponent c | all isDigit c = invalid c | otherwise = valid c repairRest = repair' id ('-' :) -- | Try to guess the license from an already existing @LICENSE@ file in -- the package directory, comparing the file contents with the ones -- listed in @Licenses.hs@, for now it only returns a default value. guessLicense :: Interactive m => InitFlags -> m SpecLicense guessLicense flags = return . defaultLicense $ getCabalVersionNoPrompt flags guessExtraDocFiles :: Interactive m => InitFlags -> m (Maybe (Set FilePath)) guessExtraDocFiles flags = do pkgDir <- fromFlagOrDefault getCurrentDirectory $ return <$> packageDir flags files <- getDirectoryContents pkgDir let extraDocCandidates = ["CHANGES", "CHANGELOG", "README"] extraDocs = [y | x <- extraDocCandidates, y <- files, x == map toUpper (takeBaseName y)] return $ Just $ if null extraDocs then Set.singleton defaultChangelog else Set.fromList extraDocs -- | Try to guess the package type from the files in the package directory, -- looking for unique characteristics from each type, defaults to Executable. guessPackageType :: Interactive m => InitFlags -> m PackageType guessPackageType flags = do if fromFlagOrDefault False (initializeTestSuite flags) then return TestSuite else do let lastDir dirs = L.last . splitDirectories $ dirs srcCandidates = [defaultSourceDir, "src", "source"] testCandidates = [defaultTestDir, "test", "tests"] pkgDir <- fromFlagOrDefault getCurrentDirectory $ return <$> packageDir flags files <- listFilesInside (\x -> return $ lastDir x `notElem` testCandidates) pkgDir files' <- filter (not . null . map (`elem` testCandidates) . splitDirectories) <$> listFilesRecursive pkgDir let hasExe = not $ null [f | f <- files, isMain $ takeFileName f] hasLib = not $ null [f | f <- files, lastDir f `elem` srcCandidates] hasTest = not $ null [f | f <- files', isMain $ takeFileName f] return $ case (hasLib, hasExe, hasTest) of (True , True , _ ) -> LibraryAndExecutable (True , False, _ ) -> Library (False, False, True) -> TestSuite _ -> Executable -- | Try to guess the application directories from the package directory, -- using a default value as fallback. guessApplicationDirectories :: Interactive m => InitFlags -> m [FilePath] guessApplicationDirectories flags = do pkgDirs <- fromFlagOrDefault getCurrentDirectory (return <$> packageDir flags) pkgDirsContents <- listDirectory pkgDirs let candidates = [defaultApplicationDir, "app", "src-exe"] in return $ case [y | x <- candidates, y <- pkgDirsContents, x == y] of [] -> [defaultApplicationDir] x -> map ( pkgDirs) . nub $ x -- | Try to guess the source directories, using a default value as fallback. guessSourceDirectories :: Interactive m => InitFlags -> m [FilePath] guessSourceDirectories flags = do pkgDir <- fromFlagOrDefault getCurrentDirectory $ return <$> packageDir flags doesDirectoryExist (pkgDir "src") >>= return . \case False -> [defaultSourceDir] True -> ["src"] -- | Guess author and email using git configuration options. guessAuthorName :: Interactive m => m String guessAuthorName = guessGitInfo "user.name" guessAuthorEmail :: Interactive m => m String guessAuthorEmail = guessGitInfo "user.email" guessGitInfo :: Interactive m => String -> m String guessGitInfo target = do info <- readProcessWithExitCode "git" ["config", "--local", target] "" if null $ snd' info then trim . snd' <$> readProcessWithExitCode "git" ["config", "--global", target] "" else return . trim $ snd' info where snd' (_, x, _) = x cabal-install-3.8.1.0/src/Distribution/Client/Init/Prompt.hs0000644000000000000000000001164207346545000021770 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.Prompt -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- User prompt utility functions for use by the 'cabal init' command. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.Prompt ( prompt , promptYesNo , promptStr , promptList ) where import Prelude hiding (break, putStrLn, getLine, putStr) import Distribution.Client.Compat.Prelude hiding (break, empty, getLine, putStr, putStrLn) import Distribution.Client.Init.Types import qualified System.IO -- | Create a prompt with optional default value that returns a -- String. promptStr :: Interactive m => String -> DefaultPrompt String -> m String promptStr = promptDefault Right id -- | Create a yes/no prompt with optional default value. promptYesNo :: Interactive m => String -- ^ prompt message -> DefaultPrompt Bool -- ^ optional default value -> m Bool promptYesNo = promptDefault recogniseYesNo showYesNo where recogniseYesNo s | (toLower <$> s) == "y" = Right True | (toLower <$> s) == "n" || s == "N" = Right False | otherwise = Left $ "Cannot parse input: " ++ s showYesNo True = "y" showYesNo False = "n" -- | Create a prompt with optional default value that returns a value -- of some Text instance. prompt :: (Interactive m, Parsec t, Pretty t) => String -> DefaultPrompt t -> m t prompt = promptDefault eitherParsec prettyShow -- | Create a prompt from a prompt string and a String representation -- of an optional default value. mkDefPrompt :: String -> DefaultPrompt String -> String mkDefPrompt msg def = msg ++ "?" ++ format def where format MandatoryPrompt = " " format OptionalPrompt = " [optional] " format (DefaultPrompt s) = " [default: " ++ s ++ "] " -- | Create a prompt from a list of strings promptList :: Interactive m => String -- ^ prompt -> [String] -- ^ choices -> DefaultPrompt String -- ^ optional default value -> Maybe (String -> String) -- ^ modify the default value to present in-prompt -- e.g. empty string maps to "(none)", but only in the -- prompt. -> Bool -- ^ whether to allow an 'other' option -> m String promptList msg choices def modDef hasOther = do putStrLn $ msg ++ ":" -- Output nicely formatted list of options for_ prettyChoices $ \(i,c) -> do let star = if DefaultPrompt c == def then "*" else " " let output = concat $ if i < 10 then [" ", star, " ", show i, ") ", c] else [" ", star, show i, ") ", c] putStrLn output go where prettyChoices = let cs = if hasOther then choices ++ ["Other (specify)"] else choices in zip [1::Int .. length choices + 1] cs numChoices = length choices invalidChoice input = do let msg' = if null input then "Empty input is not a valid choice." else concat [ input , " is not a valid choice. Please choose a number from 1 to " , show (length prettyChoices) , "." ] putStrLn msg' breakOrContinue ("promptList: " ++ input) go go = do putStr $ mkDefPrompt "Your choice" $ maybe def (<$> def) modDef input <- getLine case def of DefaultPrompt d | null input -> return d _ -> case readMaybe input of Nothing -> invalidChoice input Just n | n > 0, n <= numChoices -> return $ choices !! (n-1) | n == numChoices + 1, hasOther -> promptStr "Please specify" OptionalPrompt | otherwise -> invalidChoice (show n) -- | Create a prompt with an optional default value. promptDefault :: Interactive m => (String -> Either String t) -- ^ parser -> (t -> String) -- ^ pretty-printer -> String -- ^ prompt message -> (DefaultPrompt t) -- ^ optional default value -> m t promptDefault parse pprint msg def = do putStr $ mkDefPrompt msg (pprint <$> def) hFlush System.IO.stdout input <- getLine case def of DefaultPrompt d | null input -> return d _ -> case parse input of Right t -> return t Left err -> do putStrLn $ "Couldn't parse " ++ input ++ ", please try again!" breakOrContinue ("promptDefault: " ++ err ++ " on input: " ++ input) (promptDefault parse pprint msg def) -- | Prompt utility for breaking out of an interactive loop -- in the pure case -- breakOrContinue :: Interactive m => String -> m a -> m a breakOrContinue msg act = break >>= \case True -> throwPrompt $ BreakException msg False -> act cabal-install-3.8.1.0/src/Distribution/Client/Init/Simple.hs0000644000000000000000000001512207346545000021735 0ustar0000000000000000module Distribution.Client.Init.Simple ( -- * Project creation createProject -- * Gen targets , genSimplePkgDesc , genSimpleLibTarget , genSimpleExeTarget , genSimpleTestTarget ) where import Distribution.Client.Init.Types import Distribution.Verbosity import Distribution.Simple.PackageIndex import Distribution.Client.Types.SourcePackageDb (SourcePackageDb(..)) import qualified Data.List.NonEmpty as NEL import Distribution.Client.Init.Utils (currentDirPkgName, mkPackageNameDep, fixupDocFiles) import Distribution.Client.Init.Defaults import Distribution.Simple.Flag (fromFlagOrDefault, flagElim, Flag(..)) import Distribution.Client.Init.FlagExtractors import qualified Data.Set as Set import Distribution.Types.Dependency import Distribution.Types.PackageName (unPackageName) createProject :: Interactive m => Verbosity -> InstalledPackageIndex -> SourcePackageDb -> InitFlags -> m ProjectSettings createProject v pkgIx _srcDb initFlags = do pkgType <- packageTypePrompt initFlags isMinimal <- getMinimal initFlags doOverwrite <- getOverwrite initFlags pkgDir <- getPackageDir initFlags pkgDesc <- fixupDocFiles v =<< genSimplePkgDesc initFlags let pkgName = _pkgName pkgDesc cabalSpec = _pkgCabalVersion pkgDesc mkOpts cs = WriteOpts doOverwrite isMinimal cs v pkgDir pkgType pkgName basedFlags <- addBaseDepToFlags pkgIx initFlags case pkgType of Library -> do libTarget <- genSimpleLibTarget basedFlags testTarget <- addLibDepToTest pkgName <$> genSimpleTestTarget basedFlags return $ ProjectSettings (mkOpts False cabalSpec) pkgDesc (Just libTarget) Nothing testTarget Executable -> do exeTarget <- genSimpleExeTarget basedFlags return $ ProjectSettings (mkOpts False cabalSpec) pkgDesc Nothing (Just exeTarget) Nothing LibraryAndExecutable -> do libTarget <- genSimpleLibTarget basedFlags testTarget <- addLibDepToTest pkgName <$> genSimpleTestTarget basedFlags exeTarget <- addLibDepToExe pkgName <$> genSimpleExeTarget basedFlags return $ ProjectSettings (mkOpts False cabalSpec) pkgDesc (Just libTarget) (Just exeTarget) testTarget TestSuite -> do testTarget <- genSimpleTestTarget basedFlags return $ ProjectSettings (mkOpts False cabalSpec) pkgDesc Nothing Nothing testTarget where -- Add package name as dependency of test suite -- addLibDepToTest _ Nothing = Nothing addLibDepToTest n (Just t) = Just $ t { _testDependencies = _testDependencies t ++ [mkPackageNameDep n] } -- Add package name as dependency of executable -- addLibDepToExe n exe = exe { _exeDependencies = _exeDependencies exe ++ [mkPackageNameDep n] } genSimplePkgDesc :: Interactive m => InitFlags -> m PkgDescription genSimplePkgDesc flags = mkPkgDesc <$> currentDirPkgName where defaultExtraDoc = Just $ Set.singleton defaultChangelog extractExtraDoc [] = defaultExtraDoc extractExtraDoc fs = Just $ Set.fromList fs mkPkgDesc pkgName = PkgDescription (fromFlagOrDefault defaultCabalVersion (cabalVersion flags)) pkgName (fromFlagOrDefault defaultVersion (version flags)) (fromFlagOrDefault (defaultLicense $ getCabalVersionNoPrompt flags) (license flags)) (fromFlagOrDefault "" (author flags)) (fromFlagOrDefault "" (email flags)) (fromFlagOrDefault "" (homepage flags)) (fromFlagOrDefault "" (synopsis flags)) (fromFlagOrDefault "" (category flags)) (flagElim mempty Set.fromList (extraSrc flags)) (flagElim defaultExtraDoc extractExtraDoc (extraDoc flags)) genSimpleLibTarget :: Interactive m => InitFlags -> m LibTarget genSimpleLibTarget flags = do buildToolDeps <- getBuildTools flags return $ LibTarget { _libSourceDirs = fromFlagOrDefault [defaultSourceDir] $ sourceDirs flags , _libLanguage = fromFlagOrDefault defaultLanguage $ language flags , _libExposedModules = flagElim (myLibModule NEL.:| []) extractMods $ exposedModules flags , _libOtherModules = fromFlagOrDefault [] $ otherModules flags , _libOtherExts = fromFlagOrDefault [] $ otherExts flags , _libDependencies = fromFlagOrDefault [] $ dependencies flags , _libBuildTools = buildToolDeps } where extractMods [] = myLibModule NEL.:| [] extractMods as = NEL.fromList as genSimpleExeTarget :: Interactive m => InitFlags -> m ExeTarget genSimpleExeTarget flags = do buildToolDeps <- getBuildTools flags return $ ExeTarget { _exeMainIs = flagElim defaultMainIs toHsFilePath $ mainIs flags , _exeApplicationDirs = fromFlagOrDefault [defaultApplicationDir] $ applicationDirs flags , _exeLanguage = fromFlagOrDefault defaultLanguage $ language flags , _exeOtherModules = fromFlagOrDefault [] $ otherModules flags , _exeOtherExts = fromFlagOrDefault [] $ otherExts flags , _exeDependencies = fromFlagOrDefault [] $ dependencies flags , _exeBuildTools = buildToolDeps } genSimpleTestTarget :: Interactive m => InitFlags -> m (Maybe TestTarget) genSimpleTestTarget flags = go =<< initializeTestSuitePrompt flags where go initialized | not initialized = return Nothing | otherwise = do buildToolDeps <- getBuildTools flags return $ Just $ TestTarget { _testMainIs = flagElim defaultMainIs toHsFilePath $ mainIs flags , _testDirs = fromFlagOrDefault [defaultTestDir] $ testDirs flags , _testLanguage = fromFlagOrDefault defaultLanguage $ language flags , _testOtherModules = fromFlagOrDefault [] $ otherModules flags , _testOtherExts = fromFlagOrDefault [] $ otherExts flags , _testDependencies = fromFlagOrDefault [] $ dependencies flags , _testBuildTools = buildToolDeps } -- -------------------------------------------------------------------- -- -- Utils -- | If deps are defined, and base is present, we skip the search for base. -- otherwise, we look up @base@ and add it to the list. addBaseDepToFlags :: Interactive m => InstalledPackageIndex -> InitFlags -> m InitFlags addBaseDepToFlags pkgIx initFlags = case dependencies initFlags of Flag as | any ((==) "base" . unPackageName . depPkgName) as -> return initFlags | otherwise -> do based <- dependenciesPrompt pkgIx initFlags return $ initFlags { dependencies = Flag $ based ++ as } _ -> do based <- dependenciesPrompt pkgIx initFlags return initFlags { dependencies = Flag based } cabal-install-3.8.1.0/src/Distribution/Client/Init/Types.hs0000644000000000000000000003422307346545000021613 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE BangPatterns #-} -- | -- Module : Distribution.Client.Init.Types -- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Some types used by the 'cabal init' command. -- module Distribution.Client.Init.Types ( -- * Data InitFlags(..) -- ** Targets and descriptions , PkgDescription(..) , LibTarget(..) , ExeTarget(..) , TestTarget(..) -- ** package types , PackageType(..) -- ** Main file , HsFilePath(..) , HsFileType(..) , fromHsFilePath , toHsFilePath , toLiterateHs , toStandardHs , mkLiterate , isHsFilePath -- * Typeclasses , Interactive(..) , BreakException(..) , PurePrompt(..) , evalPrompt , Severity(..) -- * Aliases , IsLiterate , IsSimple -- * File creator opts , WriteOpts(..) , ProjectSettings(..) -- * Formatters , FieldAnnotation(..) -- * Other conveniences , DefaultPrompt(..) ) where import qualified Distribution.Client.Compat.Prelude as P import Distribution.Client.Compat.Prelude as P hiding (getLine, putStr, putStrLn) import Prelude (read) import Control.Monad.Catch import Data.List.NonEmpty (fromList) import Distribution.Simple.Setup (Flag(..)) import Distribution.Types.Dependency as P import Distribution.Verbosity (silent) import Distribution.Version import qualified Distribution.Package as P import Distribution.ModuleName import Distribution.CabalSpecVersion import Distribution.Client.Utils as P import Distribution.Fields.Pretty import Language.Haskell.Extension ( Language(..), Extension ) import qualified System.IO import qualified System.Directory as P import qualified System.Process as P import qualified Distribution.Compat.Environment as P import System.FilePath import Distribution.FieldGrammar.Newtypes (SpecLicense) -- -------------------------------------------------------------------- -- -- Flags -- | InitFlags is a subset of flags available in the -- @.cabal@ file that represent options that are relevant to the -- init command process. -- data InitFlags = InitFlags { interactive :: Flag Bool , quiet :: Flag Bool , packageDir :: Flag FilePath , noComments :: Flag Bool , minimal :: Flag Bool , simpleProject :: Flag Bool , packageName :: Flag P.PackageName , version :: Flag Version , cabalVersion :: Flag CabalSpecVersion , license :: Flag SpecLicense , author :: Flag String , email :: Flag String , homepage :: Flag String , synopsis :: Flag String , category :: Flag String , extraSrc :: Flag [String] , extraDoc :: Flag [String] , packageType :: Flag PackageType , mainIs :: Flag FilePath , language :: Flag Language , exposedModules :: Flag [ModuleName] , otherModules :: Flag [ModuleName] , otherExts :: Flag [Extension] , dependencies :: Flag [P.Dependency] , applicationDirs :: Flag [String] , sourceDirs :: Flag [String] , buildTools :: Flag [String] , initializeTestSuite :: Flag Bool , testDirs :: Flag [String] , initHcPath :: Flag FilePath , initVerbosity :: Flag Verbosity , overwrite :: Flag Bool } deriving (Eq, Show, Generic) instance Monoid InitFlags where mempty = gmempty mappend = (<>) instance Semigroup InitFlags where (<>) = gmappend -- -------------------------------------------------------------------- -- -- Targets -- | 'PkgDescription' represents the relevant options set by the -- user when building a package description during the init command -- process. -- data PkgDescription = PkgDescription { _pkgCabalVersion :: CabalSpecVersion , _pkgName :: P.PackageName , _pkgVersion :: Version , _pkgLicense :: SpecLicense , _pkgAuthor :: String , _pkgEmail :: String , _pkgHomePage :: String , _pkgSynopsis :: String , _pkgCategory :: String , _pkgExtraSrcFiles :: Set String , _pkgExtraDocFiles :: Maybe (Set String) } deriving (Show, Eq) -- | 'LibTarget' represents the relevant options set by the -- user when building a library package during the init command -- process. -- data LibTarget = LibTarget { _libSourceDirs :: [String] , _libLanguage :: Language , _libExposedModules :: NonEmpty ModuleName , _libOtherModules :: [ModuleName] , _libOtherExts :: [Extension] , _libDependencies :: [P.Dependency] , _libBuildTools :: [P.Dependency] } deriving (Show, Eq) -- | 'ExeTarget' represents the relevant options set by the -- user when building an executable package. -- data ExeTarget = ExeTarget { _exeMainIs :: HsFilePath , _exeApplicationDirs :: [String] , _exeLanguage :: Language , _exeOtherModules :: [ModuleName] , _exeOtherExts :: [Extension] , _exeDependencies :: [P.Dependency] , _exeBuildTools :: [P.Dependency] } deriving (Show, Eq) -- | 'TestTarget' represents the relevant options set by the -- user when building a library package. -- data TestTarget = TestTarget { _testMainIs :: HsFilePath , _testDirs :: [String] , _testLanguage :: Language , _testOtherModules :: [ModuleName] , _testOtherExts :: [Extension] , _testDependencies :: [P.Dependency] , _testBuildTools :: [P.Dependency] } deriving (Show, Eq) -- -------------------------------------------------------------------- -- -- File creator options data WriteOpts = WriteOpts { _optOverwrite :: Bool , _optMinimal :: Bool , _optNoComments :: Bool , _optVerbosity :: Verbosity , _optPkgDir :: FilePath , _optPkgType :: PackageType , _optPkgName :: P.PackageName , _optCabalSpec :: CabalSpecVersion } deriving (Eq, Show) data ProjectSettings = ProjectSettings { _pkgOpts :: WriteOpts , _pkgDesc :: PkgDescription , _pkgLibTarget :: Maybe LibTarget , _pkgExeTarget :: Maybe ExeTarget , _pkgTestTarget :: Maybe TestTarget } deriving (Eq, Show) -- -------------------------------------------------------------------- -- -- Other types -- | Enum to denote whether the user wants to build a library target, -- executable target, library and executable targets, or a standalone test suite. -- data PackageType = Library | Executable | LibraryAndExecutable | TestSuite deriving (Eq, Show, Generic) data HsFileType = Literate | Standard | InvalidHsPath deriving (Eq, Show) data HsFilePath = HsFilePath { _hsFilePath :: FilePath , _hsFileType :: HsFileType } deriving Eq instance Show HsFilePath where show (HsFilePath fp ty) = case ty of Literate -> fp Standard -> fp InvalidHsPath -> "Invalid haskell source file: " ++ fp fromHsFilePath :: HsFilePath -> Maybe FilePath fromHsFilePath (HsFilePath fp ty) = case ty of Literate -> Just fp Standard -> Just fp InvalidHsPath -> Nothing isHsFilePath :: FilePath -> Bool isHsFilePath fp = case _hsFileType $ toHsFilePath fp of InvalidHsPath -> False _ -> True toHsFilePath :: FilePath -> HsFilePath toHsFilePath fp | takeExtension fp == ".lhs" = HsFilePath fp Literate | takeExtension fp == ".hs" = HsFilePath fp Standard | otherwise = HsFilePath fp InvalidHsPath toLiterateHs :: HsFilePath -> HsFilePath toLiterateHs (HsFilePath fp Standard) = HsFilePath (dropExtension fp ++ ".lhs") Literate toLiterateHs a = a toStandardHs :: HsFilePath -> HsFilePath toStandardHs (HsFilePath fp Literate) = HsFilePath (dropExtension fp ++ ".hs") Standard toStandardHs a = a mkLiterate :: HsFilePath -> [String] -> [String] mkLiterate (HsFilePath _ Literate) hs = (\line -> if null line then line else "> " ++ line) <$> hs mkLiterate _ hs = hs -- -------------------------------------------------------------------- -- -- Interactive prompt monad newtype PurePrompt a = PurePrompt { _runPrompt :: NonEmpty String -> Either BreakException (a, NonEmpty String) } deriving (Functor) evalPrompt :: PurePrompt a -> NonEmpty String -> a evalPrompt act s = case _runPrompt act s of Left e -> error $ show e Right (a,_) -> a instance Applicative PurePrompt where pure a = PurePrompt $ \s -> Right (a, s) PurePrompt ff <*> PurePrompt aa = PurePrompt $ \s -> case ff s of Left e -> Left e Right (f, s') -> case aa s' of Left e -> Left e Right (a, s'') -> Right (f a, s'') instance Monad PurePrompt where return = pure PurePrompt a >>= k = PurePrompt $ \s -> case a s of Left e -> Left e Right (a', s') -> _runPrompt (k a') s' class Monad m => Interactive m where -- input functions getLine :: m String readFile :: FilePath -> m String getCurrentDirectory :: m FilePath getHomeDirectory :: m FilePath getDirectoryContents :: FilePath -> m [FilePath] listDirectory :: FilePath -> m [FilePath] doesDirectoryExist :: FilePath -> m Bool doesFileExist :: FilePath -> m Bool canonicalizePathNoThrow :: FilePath -> m FilePath readProcessWithExitCode :: FilePath -> [String] -> String -> m (ExitCode, String, String) getEnvironment :: m [(String, String)] getCurrentYear :: m Integer listFilesInside :: (FilePath -> m Bool) -> FilePath -> m [FilePath] listFilesRecursive :: FilePath -> m [FilePath] -- output functions putStr :: String -> m () putStrLn :: String -> m () createDirectory :: FilePath -> m () removeDirectory :: FilePath -> m () writeFile :: FilePath -> String -> m () removeExistingFile :: FilePath -> m () copyFile :: FilePath -> FilePath -> m () renameDirectory :: FilePath -> FilePath -> m () hFlush :: System.IO.Handle -> m () message :: Verbosity -> Severity -> String -> m () -- misc functions break :: m Bool throwPrompt :: BreakException -> m a instance Interactive IO where getLine = P.getLine readFile = P.readFile getCurrentDirectory = P.getCurrentDirectory getHomeDirectory = P.getHomeDirectory getDirectoryContents = P.getDirectoryContents listDirectory = P.listDirectory doesDirectoryExist = P.doesDirectoryExist doesFileExist = P.doesFileExist canonicalizePathNoThrow = P.canonicalizePathNoThrow readProcessWithExitCode = P.readProcessWithExitCode getEnvironment = P.getEnvironment getCurrentYear = P.getCurrentYear listFilesInside = P.listFilesInside listFilesRecursive = P.listFilesRecursive putStr = P.putStr putStrLn = P.putStrLn createDirectory = P.createDirectory removeDirectory = P.removeDirectoryRecursive writeFile = P.writeFile removeExistingFile = P.removeExistingFile copyFile = P.copyFile renameDirectory = P.renameDirectory hFlush = System.IO.hFlush message q severity msg | q == silent = pure () | otherwise = putStrLn $ "[" ++ show severity ++ "] " ++ msg break = return False throwPrompt = throwM instance Interactive PurePrompt where getLine = pop readFile !_ = pop getCurrentDirectory = popAbsolute getHomeDirectory = popAbsolute -- expects stack input of form "[\"foo\", \"bar\", \"baz\"]" getDirectoryContents !_ = popList listDirectory !_ = popList doesDirectoryExist !_ = popBool doesFileExist !_ = popBool canonicalizePathNoThrow !_ = popAbsolute readProcessWithExitCode !_ !_ !_ = do input <- pop return (ExitSuccess, input, "") getEnvironment = fmap (map read) popList getCurrentYear = fmap read pop listFilesInside pred' !_ = do input <- map splitDirectories <$> popList map joinPath <$> filterM (fmap and . traverse pred') input listFilesRecursive !_ = popList putStr !_ = return () putStrLn !_ = return () createDirectory !d = checkInvalidPath d () removeDirectory !d = checkInvalidPath d () writeFile !f !_ = checkInvalidPath f () removeExistingFile !f = checkInvalidPath f () copyFile !f !_ = checkInvalidPath f () renameDirectory !d !_ = checkInvalidPath d () hFlush _ = return () message !_ !severity !msg = case severity of Error -> PurePrompt $ \_ -> Left $ BreakException (show severity ++ ": " ++ msg) _ -> return () break = return True throwPrompt (BreakException e) = PurePrompt $ \s -> Left $ BreakException ("Error: " ++ e ++ "\nStacktrace: " ++ show s) pop :: PurePrompt String pop = PurePrompt $ \ (p:|ps) -> Right (p,fromList ps) popAbsolute :: PurePrompt String popAbsolute = do input <- pop return $ "/home/test/" ++ input popBool :: PurePrompt Bool popBool = pop >>= \case "True" -> pure True "False" -> pure False s -> throwPrompt $ BreakException $ "popBool: " ++ s popList :: PurePrompt [String] popList = pop >>= \a -> case P.safeRead a of Nothing -> throwPrompt $ BreakException ("popList: " ++ show a) Just as -> return as checkInvalidPath :: String -> a -> PurePrompt a checkInvalidPath path act = -- The check below is done this way so it's easier to append -- more invalid paths in the future, if necessary if path `elem` ["."] then throwPrompt $ BreakException $ "Invalid path: " ++ path else return act -- | A pure exception thrown exclusively by the pure prompter -- to cancel infinite loops in the prompting process. -- -- For example, in order to break on parse errors, or user-driven -- continuations that do not make sense to test. -- newtype BreakException = BreakException String deriving (Eq, Show) instance Exception BreakException -- | Used to inform the intent of prompted messages. -- data Severity = Log | Info | Warning | Error deriving (Eq, Show) -- | Convenience alias for the literate haskell flag -- type IsLiterate = Bool -- | Convenience alias for generating simple projects -- type IsSimple = Bool -- | Defines whether or not a prompt will have a default value, -- is optional, or is mandatory. data DefaultPrompt t = DefaultPrompt t | OptionalPrompt | MandatoryPrompt deriving (Eq, Functor) -- -------------------------------------------------------------------- -- -- Field annotation for pretty formatters -- | Annotations for cabal file PrettyField. data FieldAnnotation = FieldAnnotation { annCommentedOut :: Bool -- ^ True iif the field and its contents should be commented out. , annCommentLines :: CommentPosition -- ^ Comment lines to place before the field or section. } cabal-install-3.8.1.0/src/Distribution/Client/Init/Utils.hs0000644000000000000000000002715507346545000021615 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} module Distribution.Client.Init.Utils ( SourceFileEntry(..) , retrieveSourceFiles , retrieveModuleName , retrieveModuleImports , retrieveModuleExtensions , retrieveBuildTools , retrieveDependencies , isMain , isHaskell , isSourceFile , trim , currentDirPkgName , filePathToPkgName , mkPackageNameDep , fixupDocFiles , mkStringyDep , getBaseDep , addLibDepToExe , addLibDepToTest ) where import qualified Prelude import Distribution.Client.Compat.Prelude hiding (putStrLn, empty, readFile, Parsec, many) import Distribution.Utils.Generic (isInfixOf) import Control.Monad (forM) import qualified Data.List.NonEmpty as NE import qualified Data.Map as M import Language.Haskell.Extension (Extension(..)) import System.FilePath import Distribution.CabalSpecVersion (CabalSpecVersion(..)) import Distribution.ModuleName (ModuleName) import Distribution.InstalledPackageInfo (InstalledPackageInfo, exposed) import qualified Distribution.Package as P import qualified Distribution.Types.PackageName as PN import Distribution.Simple.PackageIndex (InstalledPackageIndex, moduleNameIndex) import Distribution.Simple.Setup (Flag(..)) import Distribution.Utils.String (trim) import Distribution.Version import Distribution.Client.Init.Defaults import Distribution.Client.Init.Types import Distribution.Client.Utils (pvpize) import Distribution.Types.PackageName import Distribution.Types.Dependency (Dependency, mkDependency) import qualified Distribution.Compat.NonEmptySet as NES import Distribution.Types.LibraryName import Distribution.Verbosity (silent) -- |Data type of source files found in the working directory data SourceFileEntry = SourceFileEntry { relativeSourcePath :: FilePath , moduleName :: ModuleName , fileExtension :: String , imports :: [ModuleName] , extensions :: [Extension] } deriving Show -- Unfortunately we cannot use the version exported by Distribution.Simple.Program knownSuffixHandlers :: CabalSpecVersion -> String -> String knownSuffixHandlers v s | v < CabalSpecV3_0 = case s of ".gc" -> "greencard" ".chs" -> "chs" ".hsc" -> "hsc2hs" ".x" -> "alex" ".y" -> "happy" ".ly" -> "happy" ".cpphs" -> "cpp" _ -> "" | otherwise = case s of ".gc" -> "greencard:greencard" ".chs" -> "chs:chs" ".hsc" -> "hsc2hs:hsc2hs" ".x" -> "alex:alex" ".y" -> "happy:happy" ".ly" -> "happy:happy" ".cpphs" -> "cpp:cpp" _ -> "" -- | Check if a given file has main file characteristics isMain :: String -> Bool isMain f = (isInfixOf "Main" f || isInfixOf "main" f) && isSuffixOf ".hs" f || isSuffixOf ".lhs" f -- | Check if a given file has a Haskell extension isHaskell :: String -> Bool isHaskell f = isSuffixOf ".hs" f || isSuffixOf ".lhs" f isBuildTool :: CabalSpecVersion -> String -> Bool isBuildTool v = not . null . knownSuffixHandlers v . takeExtension retrieveBuildTools :: Interactive m => CabalSpecVersion -> FilePath -> m [Dependency] retrieveBuildTools v fp = do exists <- doesDirectoryExist fp if exists then do files <- fmap takeExtension <$> listFilesRecursive fp let tools = [ mkStringyDep (knownSuffixHandlers v f) | f <- files, isBuildTool v f ] return tools else return [] retrieveSourceFiles :: Interactive m => FilePath -> m [SourceFileEntry] retrieveSourceFiles fp = do exists <- doesDirectoryExist fp if exists then do files <- filter isHaskell <$> listFilesRecursive fp entries <- forM files $ \f -> do exists' <- doesFileExist f if exists' then do maybeModuleName <- retrieveModuleName f case maybeModuleName of Nothing -> return Nothing Just moduleName -> do let fileExtension = takeExtension f relativeSourcePath <- makeRelative f <$> getCurrentDirectory imports <- retrieveModuleImports f extensions <- retrieveModuleExtensions f return . Just $ SourceFileEntry {..} else return Nothing return . catMaybes $ entries else return [] -- | Given a module, retrieve its name retrieveModuleName :: Interactive m => FilePath -> m (Maybe ModuleName) retrieveModuleName m = do rawModule <- trim . grabModuleName <$> readFile m if isInfixOf rawModule (dirToModuleName m) then return $ Just $ fromString rawModule else do putStrLn $ "Warning: found module that doesn't match directory structure: " ++ rawModule return Nothing where dirToModuleName = map (\x -> if x == '/' || x == '\\' then '.' else x) stop c = (c /= '\n') && (c /= ' ') grabModuleName [] = [] grabModuleName ('-':'-':xs) = grabModuleName $ dropWhile' (/= '\n') xs grabModuleName ('m':'o':'d':'u':'l':'e':' ':xs) = takeWhile' stop xs grabModuleName (_:xs) = grabModuleName xs -- | Given a module, retrieve all of its imports retrieveModuleImports :: Interactive m => FilePath -> m [ModuleName] retrieveModuleImports m = do map (fromString . trim) . grabModuleImports <$> readFile m where stop c = (c /= '\n') && (c /= ' ') && (c /= '(') grabModuleImports [] = [] grabModuleImports ('-':'-':xs) = grabModuleImports $ dropWhile' (/= '\n') xs grabModuleImports ('i':'m':'p':'o':'r':'t':' ':xs) = case trim xs of -- in case someone uses a weird formatting ('q':'u':'a':'l':'i':'f':'i':'e':'d':' ':ys) -> takeWhile' stop ys : grabModuleImports (dropWhile' stop ys) _ -> takeWhile' stop xs : grabModuleImports (dropWhile' stop xs) grabModuleImports (_:xs) = grabModuleImports xs -- | Given a module, retrieve all of its language pragmas retrieveModuleExtensions :: Interactive m => FilePath -> m [Extension] retrieveModuleExtensions m = do catMaybes <$> map (simpleParsec . trim) . grabModuleExtensions <$> readFile m where stop c = (c /= '\n') && (c /= ' ') && (c /= ',') && (c /= '#') grabModuleExtensions [] = [] grabModuleExtensions ('-':'-':xs) = grabModuleExtensions $ dropWhile' (/= '\n') xs grabModuleExtensions ('L':'A':'N':'G':'U':'A':'G':'E':xs) = takeWhile' stop xs : grabModuleExtensions' (dropWhile' stop xs) grabModuleExtensions (_:xs) = grabModuleExtensions xs grabModuleExtensions' [] = [] grabModuleExtensions' ('#':xs) = grabModuleExtensions xs grabModuleExtensions' (',':xs) = takeWhile' stop xs : grabModuleExtensions' (dropWhile' stop xs) grabModuleExtensions' (_:xs) = grabModuleExtensions xs takeWhile' :: (Char -> Bool) -> String -> String takeWhile' p = takeWhile p . trim dropWhile' :: (Char -> Bool) -> String -> String dropWhile' p = dropWhile p . trim -- | Check whether a potential source file is located in one of the -- source directories. isSourceFile :: Maybe [FilePath] -> SourceFileEntry -> Bool isSourceFile Nothing sf = isSourceFile (Just ["."]) sf isSourceFile (Just srcDirs) sf = any (equalFilePath (relativeSourcePath sf)) srcDirs retrieveDependencies :: Interactive m => Verbosity -> InitFlags -> [(ModuleName, ModuleName)] -> InstalledPackageIndex -> m [P.Dependency] retrieveDependencies v flags mods' pkgIx = do let mods = mods' modMap :: M.Map ModuleName [InstalledPackageInfo] modMap = M.map (filter exposed) $ moduleNameIndex pkgIx modDeps :: [(ModuleName, ModuleName, Maybe [InstalledPackageInfo])] modDeps = map (\(mn, ds) -> (mn, ds, M.lookup ds modMap)) mods -- modDeps = map (id &&& flip M.lookup modMap) mods message v Log "Guessing dependencies..." nub . catMaybes <$> traverse (chooseDep v flags) modDeps -- Given a module and a list of installed packages providing it, -- choose a dependency (i.e. package + version range) to use for that -- module. chooseDep :: Interactive m => Verbosity -> InitFlags -> (ModuleName, ModuleName, Maybe [InstalledPackageInfo]) -> m (Maybe P.Dependency) chooseDep v flags (importer, m, mipi) = case mipi of -- We found some packages: group them by name. Just ps@(_:_) -> case NE.groupBy (\x y -> P.pkgName x == P.pkgName y) $ map P.packageId ps of -- if there's only one group, i.e. multiple versions of a single package, -- we make it into a dependency, choosing the latest-ish version. -- Given a list of available versions of the same package, pick a dependency. [grp] -> fmap Just $ case grp of -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* (pid:|[]) -> return $ P.Dependency (P.pkgName pid) (pvpize desugar . P.pkgVersion $ pid) P.mainLibSet --TODO sublibraries -- Otherwise, choose the latest version and issue a warning. pids -> do message v Warning ("multiple versions of " ++ prettyShow (P.pkgName . NE.head $ pids) ++ " provide " ++ prettyShow m ++ ", choosing the latest.") return $ P.Dependency (P.pkgName . NE.head $ pids) (pvpize desugar . maximum . fmap P.pkgVersion $ pids) P.mainLibSet --TODO take into account sublibraries -- if multiple packages are found, we refuse to choose between -- different packages and make the user do it grps -> do message v Warning ("multiple packages found providing " ++ prettyShow m ++ ": " ++ intercalate ", " (fmap (prettyShow . P.pkgName . NE.head) grps)) message v Warning "You will need to pick one and manually add it to the build-depends field." return Nothing _ -> do message v Warning ("no package found providing " ++ prettyShow m ++ " in " ++ prettyShow importer ++ ".") return Nothing where -- desugar if cabal version lower than 2.0 desugar = case cabalVersion flags of Flag x -> x < CabalSpecV2_0 NoFlag -> defaultCabalVersion < CabalSpecV2_0 filePathToPkgName :: FilePath -> P.PackageName filePathToPkgName = PN.mkPackageName . Prelude.last . splitDirectories currentDirPkgName :: Interactive m => m P.PackageName currentDirPkgName = filePathToPkgName <$> getCurrentDirectory mkPackageNameDep :: PackageName -> Dependency mkPackageNameDep pkg = mkDependency pkg anyVersion (NES.singleton LMainLibName) -- when cabal-version < 1.18, extra-doc-files is not supported -- so whatever the user wants as doc files should be dumped into -- extra-src-files. -- fixupDocFiles :: Interactive m => Verbosity -> PkgDescription -> m PkgDescription fixupDocFiles v pkgDesc | _pkgCabalVersion pkgDesc < CabalSpecV1_18 = do message v Warning $ concat [ "Cabal spec versions < 1.18 do not support extra-doc-files. " , "Doc files will be treated as extra-src-files." ] return $ pkgDesc { _pkgExtraSrcFiles =_pkgExtraSrcFiles pkgDesc <> fromMaybe mempty (_pkgExtraDocFiles pkgDesc) , _pkgExtraDocFiles = Nothing } | otherwise = return pkgDesc mkStringyDep :: String -> Dependency mkStringyDep = mkPackageNameDep . mkPackageName getBaseDep :: Interactive m => InstalledPackageIndex -> InitFlags -> m [Dependency] getBaseDep pkgIx flags = retrieveDependencies silent flags [(fromString "Prelude", fromString "Prelude")] pkgIx -- Add package name as dependency of test suite -- addLibDepToTest :: PackageName -> Maybe TestTarget -> Maybe TestTarget addLibDepToTest _ Nothing = Nothing addLibDepToTest n (Just t) = Just $ t { _testDependencies = _testDependencies t ++ [mkPackageNameDep n] } -- Add package name as dependency of executable -- addLibDepToExe :: PackageName -> ExeTarget -> ExeTarget addLibDepToExe n exe = exe { _exeDependencies = _exeDependencies exe ++ [mkPackageNameDep n] } cabal-install-3.8.1.0/src/Distribution/Client/Install.hs0000644000000000000000000017703307346545000021221 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Install -- Copyright : (c) 2005 David Himmelstrup -- 2007 Bjorn Bringert -- 2007-2010 Duncan Coutts -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- High level interface to package installation. ----------------------------------------------------------------------------- module Distribution.Client.Install ( -- * High-level interface install, -- * Lower-level interface that allows to manipulate the install plan makeInstallContext, makeInstallPlan, processInstallPlan, InstallArgs, InstallContext, -- * Prune certain packages from the install plan pruneInstallPlan ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Utils.Generic(safeLast) import qualified Data.List.NonEmpty as NE import qualified Data.Map as Map import Control.Exception as Exception ( bracket, catches, Handler(Handler), handleJust ) import System.Directory ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, createDirectoryIfMissing, removeFile, renameDirectory, getDirectoryContents ) import System.FilePath ( (), (<.>), equalFilePath, takeDirectory ) import System.IO ( openFile, IOMode(AppendMode), hClose ) import System.IO.Error ( isDoesNotExistError, ioeGetFileName ) import Distribution.Client.Targets import Distribution.Client.Configure ( chooseCabalVersion, configureSetupScript, checkConfigExFlags ) import Distribution.Client.Dependency import Distribution.Client.Dependency.Types ( Solver(..) ) import Distribution.Client.FetchUtils import Distribution.Client.HttpUtils ( HttpTransport (..) ) import Distribution.Solver.Types.PackageFixedDeps import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackagesAtIndexState, getInstalledPackages ) import qualified Distribution.Client.InstallPlan as InstallPlan import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.SolverInstallPlan (SolverInstallPlan) import Distribution.Client.Setup ( GlobalFlags(..), RepoContext(..) , ConfigFlags(..), configureCommand, filterConfigureFlags , ConfigExFlags(..), InstallFlags(..) , filterTestFlags ) import Distribution.Client.Config ( getCabalDir, defaultUserInstall ) import Distribution.Client.Tar (extractTarGzFile) import Distribution.Client.Types as Source import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.BuildReports.Anonymous (showBuildReport) import qualified Distribution.Client.BuildReports.Anonymous as BuildReports import qualified Distribution.Client.BuildReports.Storage as BuildReports ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) import qualified Distribution.Client.InstallSymlink as InstallSymlink ( symlinkBinaries ) import Distribution.Client.Types.OverwritePolicy (OverwritePolicy (..)) import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Client.JobControl import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.Settings import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import qualified Distribution.Solver.Types.PackageIndex as SourcePackageIndex import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb, readPkgConfigDb ) import Distribution.Solver.Types.SourcePackage as SourcePackage import Distribution.Utils.NubList import Distribution.Simple.Compiler ( CompilerId(..), Compiler(compilerId), compilerFlavor , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) import Distribution.Simple.Program (ProgramDb) import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Setup ( haddockCommand, HaddockFlags(..) , buildCommand, BuildFlags(..), emptyBuildFlags , TestFlags, BenchmarkFlags , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) import qualified Distribution.Simple.Setup as Cabal ( Flag(..) , copyCommand, CopyFlags(..), emptyCopyFlags , registerCommand, RegisterFlags(..), emptyRegisterFlags , testCommand, TestFlags(..) ) import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, writeFileAtomic ) import Distribution.Simple.InstallDirs as InstallDirs ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate , initialPathTemplateEnv, installDirsTemplateEnv ) import Distribution.Simple.Configure (interpretPackageDbFlags) import Distribution.Simple.Register (registerPackage, defaultRegisterOptions) import Distribution.Package ( PackageIdentifier(..), PackageId, packageName, packageVersion , Package(..), HasMungedPackageId(..), HasUnitId(..) , UnitId ) import Distribution.Types.GivenComponent ( GivenComponent(..) ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint(..), thisPackageVersionConstraint ) import Distribution.Types.MungedPackageId import qualified Distribution.PackageDescription as PackageDescription import Distribution.PackageDescription ( PackageDescription, GenericPackageDescription(..) ) import Distribution.Types.Flag ( PackageFlag(..), FlagAssignment, mkFlagAssignment , showFlagAssignment, diffFlagAssignment, nullFlagAssignment ) import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Version ( Version, VersionRange, foldVersionRange ) import Distribution.Simple.Utils as Utils ( notice, info, warn, debug, debugNoWrap, die' , withTempDirectory ) import Distribution.Client.Utils ( determineNumJobs, logDirChange, mergeBy, MergeResult(..) , ProgressPhase(..), progressMessage ) import Distribution.System ( Platform, OS(Windows), buildOS, buildPlatform ) import Distribution.Verbosity as Verbosity ( modifyVerbosity, normal, verbose ) import Distribution.Simple.BuildPaths ( exeExtension ) import qualified Data.ByteString as BS --TODO: -- * assign flags to packages individually -- * complain about flags that do not apply to any package given as target -- so flags do not apply to dependencies, only listed, can use flag -- constraints for dependencies -- * allow flag constraints -- * allow installed constraints -- * allow flag and installed preferences -- * allow persistent configure flags for each package individually -- ------------------------------------------------------------ -- * Top level user actions -- ------------------------------------------------------------ -- | Installs the packages needed to satisfy a list of dependencies. -- install :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> GlobalFlags -> ConfigFlags -> ConfigExFlags -> InstallFlags -> HaddockFlags -> TestFlags -> BenchmarkFlags -> [UserTarget] -> IO () install verbosity packageDBs repos comp platform progdb globalFlags configFlags configExFlags installFlags haddockFlags testFlags benchmarkFlags userTargets0 = do unless (installRootCmd installFlags == Cabal.NoFlag) $ warn verbosity $ "--root-cmd is no longer supported, " ++ "see https://github.com/haskell/cabal/issues/3353" ++ " (if you didn't type --root-cmd, comment out root-cmd" ++ " in your ~/.cabal/config file)" let userOrSandbox = fromFlag (configUserInstall configFlags) unless userOrSandbox $ warn verbosity $ "the --global flag is deprecated -- " ++ "it is generally considered a bad idea to install packages " ++ "into the global store" installContext <- makeInstallContext verbosity args (Just userTargets0) planResult <- foldProgress logMsg (return . Left) (return . Right) =<< makeInstallPlan verbosity args installContext case planResult of Left message -> do reportPlanningFailure verbosity args installContext message die'' message Right installPlan -> processInstallPlan verbosity args installContext installPlan where args :: InstallArgs args = (packageDBs, repos, comp, platform, progdb, globalFlags, configFlags, configExFlags, installFlags, haddockFlags, testFlags, benchmarkFlags) die'' = die' verbosity logMsg message rest = debugNoWrap verbosity message >> rest -- TODO: Make InstallContext a proper data type with documented fields. -- | Common context for makeInstallPlan and processInstallPlan. type InstallContext = ( InstalledPackageIndex, SourcePackageDb , PkgConfigDb , [UserTarget], [PackageSpecifier UnresolvedSourcePackage] , HttpTransport ) -- TODO: Make InstallArgs a proper data type with documented fields or just get -- rid of it completely. -- | Initial arguments given to 'install' or 'makeInstallContext'. type InstallArgs = ( PackageDBStack , RepoContext , Compiler , Platform , ProgramDb , GlobalFlags , ConfigFlags , ConfigExFlags , InstallFlags , HaddockFlags , TestFlags , BenchmarkFlags ) -- | Make an install context given install arguments. makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] -> IO InstallContext makeInstallContext verbosity (packageDBs, repoCtxt, comp, _, progdb, _, _, configExFlags, installFlags, _, _, _) mUserTargets = do let idxState = flagToMaybe (installIndexState installFlags) installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb (sourcePkgDb, _, _) <- getSourcePackagesAtIndexState verbosity repoCtxt idxState Nothing pkgConfigDb <- readPkgConfigDb verbosity progdb checkConfigExFlags verbosity installedPkgIndex (packageIndex sourcePkgDb) configExFlags transport <- repoContextGetTransport repoCtxt (userTargets, pkgSpecifiers) <- case mUserTargets of Nothing -> -- We want to distinguish between the case where the user has given an -- empty list of targets on the command-line and the case where we -- specifically want to have an empty list of targets. return ([], []) Just userTargets0 -> do -- For install, if no target is given it means we use the current -- directory as the single target. let userTargets | null userTargets0 = [UserTargetLocalDir "."] | otherwise = userTargets0 pkgSpecifiers <- resolveUserTargets verbosity repoCtxt (packageIndex sourcePkgDb) userTargets return (userTargets, pkgSpecifiers) return (installedPkgIndex, sourcePkgDb, pkgConfigDb, userTargets ,pkgSpecifiers, transport) -- | Make an install plan given install context and install arguments. makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext -> IO (Progress String String SolverInstallPlan) makeInstallPlan verbosity (_, _, comp, platform,_, _, configFlags, configExFlags, installFlags, _, _, _) (installedPkgIndex, sourcePkgDb, pkgConfigDb, _, pkgSpecifiers, _) = do solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." return $ planPackages verbosity comp platform solver configFlags configExFlags installFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers -- | Given an install plan, perform the actual installations. processInstallPlan :: Verbosity -> InstallArgs -> InstallContext -> SolverInstallPlan -> IO () processInstallPlan verbosity args@(_,_, _, _, _, _, configFlags, _, installFlags, _, _, _) (installedPkgIndex, sourcePkgDb, _, userTargets, pkgSpecifiers, _) installPlan0 = do checkPrintPlan verbosity installedPkgIndex installPlan sourcePkgDb installFlags pkgSpecifiers unless (dryRun || nothingToInstall) $ do buildOutcomes <- performInstallations verbosity args installedPkgIndex installPlan postInstallActions verbosity args userTargets installPlan buildOutcomes where installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 dryRun = fromFlag (installDryRun installFlags) nothingToInstall = null (fst (InstallPlan.ready installPlan)) -- ------------------------------------------------------------ -- * Installation planning -- ------------------------------------------------------------ planPackages :: Verbosity -> Compiler -> Platform -> Solver -> ConfigFlags -> ConfigExFlags -> InstallFlags -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [PackageSpecifier UnresolvedSourcePackage] -> Progress String String SolverInstallPlan planPackages verbosity comp platform solver configFlags configExFlags installFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return where resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setCountConflicts countConflicts . setFineGrainedConflicts fineGrainedConflicts . setMinimizeConflictSet minimizeConflictSet . setAvoidReinstalls avoidReinstalls . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . setAllowBootLibInstalls allowBootLibInstalls . setOnlyConstrained onlyConstrained . setSolverVerbosity verbosity . setPreferenceDefault (if upgradeDeps then PreferAllLatest else PreferLatestForSelected) . removeLowerBounds allowOlder . removeUpperBounds allowNewer . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | PackageVersionConstraint name ver <- configPreferences configExFlags ] . addConstraints -- version constraints from the config file or command line [ LabeledPackageConstraint (userToPackageConstraint pc) src | (pc, src) <- configExConstraints configExFlags ] . addConstraints --FIXME: this just applies all flags to all targets which -- is silly. We should check if the flags are appropriate [ let pc = PackageConstraint (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) (PackagePropertyFlags flags) in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget | let flags = configConfigurationsFlags configFlags , not (nullFlagAssignment flags) , pkgSpecifier <- pkgSpecifiers ] . addConstraints [ let pc = PackageConstraint (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) (PackagePropertyStanzas stanzas) in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget | pkgSpecifier <- pkgSpecifiers ] . (if reinstall then reinstallTargets else id) -- Don't solve for executables, the legacy install codepath -- doesn't understand how to install them . setSolveExecutables (SolveExecutables False) $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers stanzas = [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] testsEnabled = fromFlagOrDefault False $ configTests configFlags benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags reinstall = fromFlag (installOverrideReinstall installFlags) || fromFlag (installReinstall installFlags) reorderGoals = fromFlag (installReorderGoals installFlags) countConflicts = fromFlag (installCountConflicts installFlags) fineGrainedConflicts = fromFlag (installFineGrainedConflicts installFlags) minimizeConflictSet = fromFlag (installMinimizeConflictSet installFlags) independentGoals = fromFlag (installIndependentGoals installFlags) avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) shadowPkgs = fromFlag (installShadowPkgs installFlags) strongFlags = fromFlag (installStrongFlags installFlags) maxBackjumps = fromFlag (installMaxBackjumps installFlags) allowBootLibInstalls = fromFlag (installAllowBootLibInstalls installFlags) onlyConstrained = fromFlag (installOnlyConstrained installFlags) upgradeDeps = fromFlag (installUpgradeDeps installFlags) onlyDeps = fromFlag (installOnlyDeps installFlags) allowOlder = fromMaybe (AllowOlder mempty) (configAllowOlder configExFlags) allowNewer = fromMaybe (AllowNewer mempty) (configAllowNewer configExFlags) -- | Remove the provided targets from the install plan. pruneInstallPlan :: Package targetpkg => [PackageSpecifier targetpkg] -> SolverInstallPlan -> Progress String String SolverInstallPlan pruneInstallPlan pkgSpecifiers = -- TODO: this is a general feature and should be moved to D.C.Dependency -- Also, the InstallPlan.remove should return info more precise to the -- problem, rather than the very general PlanProblem type. either (Fail . explain) Done . SolverInstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) where explain :: [SolverInstallPlan.SolverPlanProblem] -> String explain problems = "Cannot select only the dependencies (as requested by the " ++ "'--only-dependencies' flag), " ++ (case pkgids of [pkgid] -> "the package " ++ prettyShow pkgid ++ " is " _ -> "the packages " ++ intercalate ", " (map prettyShow pkgids) ++ " are ") ++ "required by a dependency of one of the other targets." where pkgids = nub [ depid | SolverInstallPlan.PackageMissingDeps _ depids <- problems , depid <- depids , packageName depid `elem` targetnames ] targetnames = map pkgSpecifierTarget pkgSpecifiers -- ------------------------------------------------------------ -- * Informational messages -- ------------------------------------------------------------ -- | Perform post-solver checks of the install plan and print it if -- either requested or needed. checkPrintPlan :: Verbosity -> InstalledPackageIndex -> InstallPlan -> SourcePackageDb -> InstallFlags -> [PackageSpecifier UnresolvedSourcePackage] -> IO () checkPrintPlan verbosity installed installPlan sourcePkgDb installFlags pkgSpecifiers = do -- User targets that are already installed. let preExistingTargets = [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, InstallPlan.PreExisting p <- InstallPlan.toList installPlan, packageName p `elem` tgts ] -- If there's nothing to install, we print the already existing -- target packages as an explanation. when nothingToInstall $ notice verbosity $ unlines $ "All the requested packages are already installed:" : map (prettyShow . packageId) preExistingTargets ++ ["Use --reinstall if you want to reinstall anyway."] let lPlan = [ (pkg, status) | pkg <- InstallPlan.executionOrder installPlan , let status = packageStatus installed pkg ] -- Are any packages classified as reinstalls? let reinstalledPkgs = [ ipkg | (_pkg, status) <- lPlan , ipkg <- extractReinstalls status ] -- Packages that are already broken. let oldBrokenPkgs = map Installed.installedUnitId . PackageIndex.reverseDependencyClosure installed . map (Installed.installedUnitId . fst) . PackageIndex.brokenPackages $ installed let excluded = reinstalledPkgs ++ oldBrokenPkgs -- Packages that are reverse dependencies of replaced packages are very -- likely to be broken. We exclude packages that are already broken. let newBrokenPkgs = filter (\ p -> not (Installed.installedUnitId p `elem` excluded)) (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) let containsReinstalls = not (null reinstalledPkgs) let breaksPkgs = not (null newBrokenPkgs) let adaptedVerbosity | containsReinstalls , not overrideReinstall = modifyVerbosity (max verbose) verbosity | otherwise = verbosity -- We print the install plan if we are in a dry-run or if we are confronted -- with a dangerous install plan. when (dryRun || containsReinstalls && not overrideReinstall) $ printPlan (dryRun || breaksPkgs && not overrideReinstall) adaptedVerbosity lPlan sourcePkgDb -- If the install plan is dangerous, we print various warning messages. In -- particular, if we can see that packages are likely to be broken, we even -- bail out (unless installation has been forced with --force-reinstalls). when containsReinstalls $ do if breaksPkgs then do (if dryRun || overrideReinstall then warn else die') verbosity $ unlines $ "The following packages are likely to be broken by the reinstalls:" : map (prettyShow . mungedId) newBrokenPkgs ++ if overrideReinstall then if dryRun then [] else ["Continuing even though " ++ "the plan contains dangerous reinstalls."] else ["Use --force-reinstalls if you want to install anyway."] else unless dryRun $ warn verbosity "Note that reinstalls are always dangerous. Continuing anyway..." -- If we are explicitly told to not download anything, check that all packages -- are already fetched. let offline = fromFlagOrDefault False (installOfflineMode installFlags) when offline $ do let pkgs = [ confPkgSource cpkg | InstallPlan.Configured cpkg <- InstallPlan.toList installPlan ] notFetched <- fmap (map packageId) . filterM (fmap isNothing . checkFetched . srcpkgSource) $ pkgs unless (null notFetched) $ die' verbosity $ "Can't download packages in offline mode. " ++ "Must download the following packages to proceed:\n" ++ intercalate ", " (map prettyShow notFetched) ++ "\nTry using 'cabal fetch'." where nothingToInstall = null (fst (InstallPlan.ready installPlan)) dryRun = fromFlag (installDryRun installFlags) overrideReinstall = fromFlag (installOverrideReinstall installFlags) data PackageStatus = NewPackage | NewVersion [Version] | Reinstall [UnitId] [PackageChange] type PackageChange = MergeResult MungedPackageId MungedPackageId extractReinstalls :: PackageStatus -> [UnitId] extractReinstalls (Reinstall ipids _) = ipids extractReinstalls _ = [] packageStatus :: InstalledPackageIndex -> ReadyPackage -> PackageStatus packageStatus installedPkgIndex cpkg = case PackageIndex.lookupPackageName installedPkgIndex (packageName cpkg) of [] -> NewPackage ps -> case filter ((== mungedId cpkg) . mungedId) (concatMap snd ps) of [] -> NewVersion (map fst ps) pkgs@(pkg:_) -> Reinstall (map Installed.installedUnitId pkgs) (changes pkg cpkg) where changes :: Installed.InstalledPackageInfo -> ReadyPackage -> [PackageChange] changes pkg (ReadyPackage pkg') = filter changed $ mergeBy (comparing mungedName) -- deps of installed pkg (resolveInstalledIds $ Installed.depends pkg) -- deps of configured pkg (resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) -- convert to source pkg ids via index resolveInstalledIds :: [UnitId] -> [MungedPackageId] resolveInstalledIds = nub . sort . map mungedId . mapMaybe (PackageIndex.lookupUnitId installedPkgIndex) changed (InBoth pkgid pkgid') = pkgid /= pkgid' changed _ = True printPlan :: Bool -- is dry run -> Verbosity -> [(ReadyPackage, PackageStatus)] -> SourcePackageDb -> IO () printPlan dryRun verbosity plan sourcePkgDb = case plan of [] -> return () pkgs | verbosity >= Verbosity.verbose -> notice verbosity $ unlines $ ("In order, the following " ++ wouldWill ++ " be installed:") : map showPkgAndReason pkgs | otherwise -> notice verbosity $ unlines $ ("In order, the following " ++ wouldWill ++ " be installed (use -v for more details):") : map showPkg pkgs where wouldWill | dryRun = "would" | otherwise = "will" showPkg (pkg, _) = prettyShow (packageId pkg) ++ showLatest (pkg) showPkgAndReason (ReadyPackage pkg', pr) = unwords [ prettyShow (packageId pkg') , showLatest pkg' , showFlagAssignment (nonDefaultFlags pkg') , showStanzas (confPkgStanzas pkg') , showDep pkg' , case pr of NewPackage -> "(new package)" NewVersion _ -> "(new version)" Reinstall _ cs -> "(reinstall)" ++ case cs of [] -> "" diff -> "(changes: " ++ intercalate ", " (map change diff) ++ ")" ] showLatest :: Package srcpkg => srcpkg -> String showLatest pkg = case mLatestVersion of Just latestVersion -> if packageVersion pkg < latestVersion then ("(latest: " ++ prettyShow latestVersion ++ ")") else "" Nothing -> "" where mLatestVersion :: Maybe Version mLatestVersion = fmap packageVersion $ safeLast $ SourcePackageIndex.lookupPackageName (packageIndex sourcePkgDb) (packageName pkg) toFlagAssignment :: [PackageFlag] -> FlagAssignment toFlagAssignment = mkFlagAssignment . map (\ f -> (flagName f, flagDefault f)) nonDefaultFlags :: ConfiguredPackage loc -> FlagAssignment nonDefaultFlags cpkg = let defaultAssignment = toFlagAssignment (genPackageFlags (SourcePackage.srcpkgDescription $ confPkgSource cpkg)) in confPkgFlags cpkg `diffFlagAssignment` defaultAssignment change (OnlyInLeft pkgid) = prettyShow pkgid ++ " removed" change (InBoth pkgid pkgid') = prettyShow pkgid ++ " -> " ++ prettyShow (mungedVersion pkgid') change (OnlyInRight pkgid') = prettyShow pkgid' ++ " added" showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps = " (via: " ++ unwords (map prettyShow rdeps) ++ ")" | otherwise = "" revDepGraphEdges :: [(PackageId, PackageId)] revDepGraphEdges = [ (rpid, packageId cpkg) | (ReadyPackage cpkg, _) <- plan , ConfiguredId rpid (Just (PackageDescription.CLibName PackageDescription.LMainLibName)) _ <- CD.flatDeps (confPkgDeps cpkg) ] revDeps :: Map.Map PackageId [PackageId] revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) -- ------------------------------------------------------------ -- * Post installation stuff -- ------------------------------------------------------------ -- | Report a solver failure. This works slightly differently to -- 'postInstallActions', as (by definition) we don't have an install plan. reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String -> IO () reportPlanningFailure verbosity (_, _, comp, platform, _ ,_, configFlags, _, installFlags, _, _, _) (_, sourcePkgDb, _, _, pkgSpecifiers, _) message = do when reportFailure $ do -- Only create reports for explicitly named packages let pkgids = filter (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ mapMaybe theSpecifiedPackage pkgSpecifiers buildReports = BuildReports.fromPlanningFailure platform (compilerId comp) pkgids (configConfigurationsFlags configFlags) unless (null buildReports) $ info verbosity $ "Solver failure will be reported for " ++ intercalate "," (map prettyShow pkgids) -- Save reports BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports platform -- Save solver log case logFile of Nothing -> return () Just template -> for_ pkgids $ \pkgid -> let env = initialPathTemplateEnv pkgid dummyIpid (compilerInfo comp) platform path = fromPathTemplate $ substPathTemplate env template in writeFile path message where reportFailure = fromFlag (installReportPlanningFailure installFlags) logFile = flagToMaybe (installLogFile installFlags) -- A IPID is calculated from the transitive closure of -- dependencies, but when the solver fails we don't have that. -- So we fail. dummyIpid = error "reportPlanningFailure: installed package ID not available" -- | If a 'PackageSpecifier' refers to a single package, return Just that -- package. theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId theSpecifiedPackage pkgSpec = case pkgSpec of NamedPackage name [PackagePropertyVersion version] -> PackageIdentifier name <$> trivialRange version NamedPackage _ _ -> Nothing SpecificSourcePackage pkg -> Just $ packageId pkg where -- | If a range includes only a single version, return Just that version. trivialRange :: VersionRange -> Maybe Version trivialRange = foldVersionRange Nothing Just -- "== v" (\_ -> Nothing) (\_ -> Nothing) (\_ _ -> Nothing) (\_ _ -> Nothing) -- | Various stuff we do after successful or unsuccessfully installing a bunch -- of packages. This includes: -- -- * build reporting, local and remote -- * symlinking binaries -- * updating indexes -- * error reporting -- postInstallActions :: Verbosity -> InstallArgs -> [UserTarget] -> InstallPlan -> BuildOutcomes -> IO () postInstallActions verbosity (packageDBs, _, comp, platform, progdb ,globalFlags, configFlags, _, installFlags, _, _, _) _ installPlan buildOutcomes = do let buildReports = BuildReports.fromInstallPlan platform (compilerId comp) installPlan buildOutcomes BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports platform when (reportingLevel >= AnonymousReports) $ BuildReports.storeAnonymous buildReports when (reportingLevel == DetailedReports) $ storeDetailedBuildReports verbosity logsDir buildReports regenerateHaddockIndex verbosity packageDBs comp platform progdb configFlags installFlags buildOutcomes symlinkBinaries verbosity platform comp configFlags installFlags installPlan buildOutcomes printBuildFailures verbosity buildOutcomes where reportingLevel = fromFlag (installBuildReports installFlags) logsDir = fromFlag (globalLogsDir globalFlags) storeDetailedBuildReports :: Verbosity -> FilePath -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () storeDetailedBuildReports verbosity logsDir reports = sequence_ [ do dotCabal <- getCabalDir let logFileName = prettyShow (BuildReports.package report) <.> "log" logFile = logsDir logFileName reportsDir = dotCabal "reports" unRepoName (remoteRepoName remoteRepo) reportFile = reportsDir logFileName handleMissingLogFile $ do buildLog <- readFile logFile createDirectoryIfMissing True reportsDir -- FIXME writeFile reportFile (show (showBuildReport report, buildLog)) | (report, Just repo) <- reports , Just remoteRepo <- [maybeRepoRemote repo] , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] where isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True isLikelyToHaveLogFile BuildReports.BuildFailed {} = True isLikelyToHaveLogFile BuildReports.InstallFailed {} = True isLikelyToHaveLogFile BuildReports.InstallOk {} = True isLikelyToHaveLogFile _ = False handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> warn verbosity $ "Missing log file for build report: " ++ fromMaybe "" (ioeGetFileName ioe) missingFile ioe | isDoesNotExistError ioe = Just ioe missingFile _ = Nothing regenerateHaddockIndex :: Verbosity -> [PackageDB] -> Compiler -> Platform -> ProgramDb -> ConfigFlags -> InstallFlags -> BuildOutcomes -> IO () regenerateHaddockIndex verbosity packageDBs comp platform progdb configFlags installFlags buildOutcomes | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do defaultDirs <- InstallDirs.defaultInstallDirs (compilerFlavor comp) (fromFlag (configUserInstall configFlags)) True let indexFileTemplate = fromFlag (installHaddockIndex installFlags) indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate notice verbosity $ "Updating documentation index " ++ indexFile --TODO: might be nice if the install plan gave us the new InstalledPackageInfo installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb Haddock.regenerateHaddockIndex verbosity installedPkgIndex progdb indexFile | otherwise = return () where haddockIndexFileIsRequested = fromFlag (installDocumentation installFlags) && isJust (flagToMaybe (installHaddockIndex installFlags)) -- We want to regenerate the index if some new documentation was actually -- installed. Since the index can be only per-user or per-sandbox (see -- #1337), we don't do it for global installs or special cases where we're -- installing into a specific db. shouldRegenerateHaddockIndex = normalUserInstall && someDocsWereInstalled buildOutcomes where someDocsWereInstalled = any installedDocs . Map.elems installedDocs (Right (BuildResult DocsOk _ _)) = True installedDocs _ = False normalUserInstall = (UserPackageDB `elem` packageDBs) && all (not . isSpecificPackageDB) packageDBs isSpecificPackageDB (SpecificPackageDB _) = True isSpecificPackageDB _ = False substHaddockIndexFileName defaultDirs = fromPathTemplate . substPathTemplate env where env = env0 ++ installDirsTemplateEnv absoluteDirs env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) ++ InstallDirs.platformTemplateEnv platform ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform absoluteDirs = InstallDirs.substituteInstallDirTemplates env0 templateDirs templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) symlinkBinaries :: Verbosity -> Platform -> Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> BuildOutcomes -> IO () symlinkBinaries verbosity platform comp configFlags installFlags plan buildOutcomes = do failed <- InstallSymlink.symlinkBinaries platform comp NeverOverwrite configFlags installFlags plan buildOutcomes case failed of [] -> return () [(_, exe, path)] -> warn verbosity $ "could not create a symlink in " ++ bindir ++ " for " ++ prettyShow exe ++ " because the file exists there already but is not " ++ "managed by cabal. You can create a symlink for this executable " ++ "manually if you wish. The executable file has been installed at " ++ path exes -> warn verbosity $ "could not create symlinks in " ++ bindir ++ " for " ++ intercalate ", " [ prettyShow exe | (_, exe, _) <- exes ] ++ " because the files exist there already and are not " ++ "managed by cabal. You can create symlinks for these executables " ++ "manually if you wish. The executable files have been installed at " ++ intercalate ", " [ path | (_, _, path) <- exes ] where bindir = fromFlag (installSymlinkBinDir installFlags) printBuildFailures :: Verbosity -> BuildOutcomes -> IO () printBuildFailures verbosity buildOutcomes = case [ (pkgid, failure) | (pkgid, Left failure) <- Map.toList buildOutcomes ] of [] -> return () failed -> die' verbosity . unlines $ "Some packages failed to install:" : [ prettyShow pkgid ++ printFailureReason reason | (pkgid, reason) <- failed ] where printFailureReason reason = case reason of DependentFailed pkgid -> " depends on " ++ prettyShow pkgid ++ " which failed to install." DownloadFailed e -> " failed while downloading the package." ++ showException e UnpackFailed e -> " failed while unpacking the package." ++ showException e ConfigureFailed e -> " failed during the configure step." ++ showException e BuildFailed e -> " failed during the building phase." ++ showException e TestsFailed e -> " failed during the tests phase." ++ showException e InstallFailed e -> " failed during the final install step." ++ showException e -- This will never happen, but we include it for completeness PlanningFailed -> " failed during the planning phase." showException e = " The exception was:\n " ++ show e ++ maybeOOM e #ifdef mingw32_HOST_OS maybeOOM _ = "" #else maybeOOM e = maybe "" onExitFailure (fromException e) onExitFailure (ExitFailure n) | n == 9 || n == -9 = "\nThis may be due to an out-of-memory condition." onExitFailure _ = "" #endif -- ------------------------------------------------------------ -- * Actually do the installations -- ------------------------------------------------------------ data InstallMisc = InstallMisc { libVersion :: Maybe Version } -- | If logging is enabled, contains location of the log file and the verbosity -- level for logging. type UseLogFile = Maybe (PackageIdentifier -> UnitId -> FilePath, Verbosity) performInstallations :: Verbosity -> InstallArgs -> InstalledPackageIndex -> InstallPlan -> IO BuildOutcomes performInstallations verbosity (packageDBs, repoCtxt, comp, platform, progdb, globalFlags, configFlags, configExFlags, installFlags, haddockFlags, testFlags, _) installedPkgIndex installPlan = do info verbosity $ "Number of threads used: " ++ (show numJobs) ++ "." jobControl <- if parallelInstall then newParallelJobControl numJobs else newSerialJobControl fetchLimit <- newJobLimit (min numJobs numFetchJobs) installLock <- newLock -- serialise installation cacheLock <- newLock -- serialise access to setup exe cache executeInstallPlan verbosity jobControl keepGoing useLogFile installPlan $ \rpkg -> installReadyPackage platform cinfo configFlags rpkg $ \configFlags' src pkg pkgoverride -> fetchSourcePackage verbosity repoCtxt fetchLimit src $ \src' -> installLocalPackage verbosity (packageId pkg) src' distPref $ \mpath -> installUnpackedPackage verbosity installLock numJobs (setupScriptOptions installedPkgIndex cacheLock rpkg) configFlags' installFlags haddockFlags testFlags comp progdb platform pkg rpkg pkgoverride mpath useLogFile where cinfo = compilerInfo comp numJobs = determineNumJobs (installNumJobs installFlags) numFetchJobs = 2 parallelInstall = numJobs >= 2 keepGoing = fromFlag (installKeepGoing installFlags) distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (configDistPref configFlags) setupScriptOptions index lock rpkg = configureSetupScript packageDBs comp platform progdb distPref (chooseCabalVersion configExFlags (libVersion miscOptions)) (Just lock) parallelInstall index (Just rpkg) reportingLevel = fromFlag (installBuildReports installFlags) logsDir = fromFlag (globalLogsDir globalFlags) -- Should the build output be written to a log file instead of stdout? useLogFile :: UseLogFile useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) logFileTemplate where installLogFile' = flagToMaybe $ installLogFile installFlags defaultTemplate = toPathTemplate $ logsDir "$compiler" "$libname" <.> "log" -- If the user has specified --remote-build-reporting=detailed, use the -- default log file location. If the --build-log option is set, use the -- provided location. Otherwise don't use logging, unless building in -- parallel (in which case the default location is used). logFileTemplate :: Maybe PathTemplate logFileTemplate | useDefaultTemplate = Just defaultTemplate | otherwise = installLogFile' -- If the user has specified --remote-build-reporting=detailed or -- --build-log, use more verbose logging. loggingVerbosity :: Verbosity loggingVerbosity | overrideVerbosity = modifyVerbosity (max verbose) verbosity | otherwise = verbosity useDefaultTemplate :: Bool useDefaultTemplate | reportingLevel == DetailedReports = True | isJust installLogFile' = False | parallelInstall = True | otherwise = False overrideVerbosity :: Bool overrideVerbosity | reportingLevel == DetailedReports = True | isJust installLogFile' = True | parallelInstall = False | otherwise = False substLogFileName :: PathTemplate -> PackageIdentifier -> UnitId -> FilePath substLogFileName template pkg uid = fromPathTemplate . substPathTemplate env $ template where env = initialPathTemplateEnv (packageId pkg) uid (compilerInfo comp) platform miscOptions = InstallMisc { libVersion = flagToMaybe (configCabalVersion configExFlags) } executeInstallPlan :: Verbosity -> JobControl IO (UnitId, BuildOutcome) -> Bool -> UseLogFile -> InstallPlan -> (ReadyPackage -> IO BuildOutcome) -> IO BuildOutcomes executeInstallPlan verbosity jobCtl keepGoing useLogFile plan0 installPkg = InstallPlan.execute jobCtl keepGoing depsFailure plan0 $ \pkg -> do buildOutcome <- installPkg pkg printBuildResult (packageId pkg) (installedUnitId pkg) buildOutcome return buildOutcome where depsFailure = DependentFailed . packageId -- Print build log if something went wrong, and 'Installed $PKGID' -- otherwise. printBuildResult :: PackageId -> UnitId -> BuildOutcome -> IO () printBuildResult pkgid uid buildOutcome = case buildOutcome of (Right _) -> progressMessage verbosity ProgressCompleted (prettyShow pkgid) (Left _) -> do notice verbosity $ "Failed to install " ++ prettyShow pkgid when (verbosity >= normal) $ case useLogFile of Nothing -> return () Just (mkLogFileName, _) -> do let logName = mkLogFileName pkgid uid putStr $ "Build log ( " ++ logName ++ " ):\n" printFile logName printFile :: FilePath -> IO () printFile path = readFile path >>= putStr -- | Call an installer for an 'SourcePackage' but override the configure -- flags with the ones given by the 'ReadyPackage'. In particular the -- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly -- versioned package dependencies. So we ignore any previous partial flag -- assignment or dependency constraints and use the new ones. -- -- NB: when updating this function, don't forget to also update -- 'configurePackage' in D.C.Configure. installReadyPackage :: Platform -> CompilerInfo -> ConfigFlags -> ReadyPackage -> (ConfigFlags -> UnresolvedPkgLoc -> PackageDescription -> PackageDescriptionOverride -> a) -> a installReadyPackage platform cinfo configFlags (ReadyPackage (ConfiguredPackage ipid (SourcePackage _ gpkg source pkgoverride) flags stanzas deps)) installPkg = installPkg configFlags { configIPID = toFlag (prettyShow ipid), configConfigurationsFlags = flags, -- We generate the legacy constraints as well as the new style precise deps. -- In the end only one set gets passed to Setup.hs configure, depending on -- the Cabal version we are talking to. configConstraints = [ thisPackageVersionConstraint srcid | ConfiguredId srcid (Just (PackageDescription.CLibName PackageDescription.LMainLibName)) _ipid <- CD.nonSetupDeps deps ], configDependencies = [ GivenComponent (packageName srcid) cname dep_ipid | ConfiguredId srcid (Just (PackageDescription.CLibName cname)) dep_ipid <- CD.nonSetupDeps deps ], -- Use '--exact-configuration' if supported. configExactConfiguration = toFlag True, configBenchmarks = toFlag False, configTests = toFlag (TestStanzas `optStanzaSetMember` stanzas) } source pkg pkgoverride where pkg = case finalizePD flags (enableStanzas stanzas) (const True) platform cinfo [] gpkg of Left _ -> error "finalizePD ReadyPackage failed" Right (desc, _) -> desc fetchSourcePackage :: Verbosity -> RepoContext -> JobLimit -> UnresolvedPkgLoc -> (ResolvedPkgLoc -> IO BuildOutcome) -> IO BuildOutcome fetchSourcePackage verbosity repoCtxt fetchLimit src installPkg = do fetched <- checkFetched src case fetched of Just src' -> installPkg src' Nothing -> onFailure DownloadFailed $ do loc <- withJobLimit fetchLimit $ fetchPackage verbosity repoCtxt src installPkg loc installLocalPackage :: Verbosity -> PackageIdentifier -> ResolvedPkgLoc -> FilePath -> (Maybe FilePath -> IO BuildOutcome) -> IO BuildOutcome installLocalPackage verbosity pkgid location distPref installPkg = case location of LocalUnpackedPackage dir -> installPkg (Just dir) RemoteSourceRepoPackage _repo dir -> installPkg (Just dir) LocalTarballPackage tarballPath -> installLocalTarballPackage verbosity pkgid tarballPath distPref installPkg RemoteTarballPackage _ tarballPath -> installLocalTarballPackage verbosity pkgid tarballPath distPref installPkg RepoTarballPackage _ _ tarballPath -> installLocalTarballPackage verbosity pkgid tarballPath distPref installPkg installLocalTarballPackage :: Verbosity -> PackageIdentifier -> FilePath -> FilePath -> (Maybe FilePath -> IO BuildOutcome) -> IO BuildOutcome installLocalTarballPackage verbosity pkgid tarballPath distPref installPkg = do tmp <- getTemporaryDirectory withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> onFailure UnpackFailed $ do let relUnpackedPath = prettyShow pkgid absUnpackedPath = tmpDirPath relUnpackedPath descFilePath = absUnpackedPath prettyShow (packageName pkgid) <.> "cabal" info verbosity $ "Extracting " ++ tarballPath ++ " to " ++ tmpDirPath ++ "..." extractTarGzFile tmpDirPath relUnpackedPath tarballPath exists <- doesFileExist descFilePath unless exists $ die' verbosity $ "Package .cabal file not found: " ++ show descFilePath maybeRenameDistDir absUnpackedPath installPkg (Just absUnpackedPath) where -- 'cabal sdist' puts pre-generated files in the 'dist' -- directory. This fails when a nonstandard build directory name -- is used (as is the case with sandboxes), so we need to rename -- the 'dist' dir here. -- -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still -- fails even with this workaround. We probably can live with that. maybeRenameDistDir :: FilePath -> IO () maybeRenameDistDir absUnpackedPath = do let distDirPath = absUnpackedPath defaultDistPref distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") distDirPathNew = absUnpackedPath distPref distDirExists <- doesDirectoryExist distDirPath when (distDirExists && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do -- NB: we need to handle the case when 'distDirPathNew' is a -- subdirectory of 'distDirPath' (e.g. the former is -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" ++ distDirPathTmp ++ "'." renameDirectory distDirPath distDirPathTmp when (distDirPath `isPrefixOf` distDirPathNew) $ createDirectoryIfMissingVerbose verbosity False distDirPath debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" ++ distDirPathNew ++ "'." renameDirectory distDirPathTmp distDirPathNew installUnpackedPackage :: Verbosity -> Lock -> Int -> SetupScriptOptions -> ConfigFlags -> InstallFlags -> HaddockFlags -> TestFlags -> Compiler -> ProgramDb -> Platform -> PackageDescription -> ReadyPackage -> PackageDescriptionOverride -> Maybe FilePath -- ^ Directory to change to before starting the installation. -> UseLogFile -- ^ File to log output to (if any) -> IO BuildOutcome installUnpackedPackage verbosity installLock numJobs scriptOptions configFlags installFlags haddockFlags testFlags comp progdb platform pkg rpkg pkgoverride workingDir useLogFile = do -- Override the .cabal file if necessary case pkgoverride of Nothing -> return () Just pkgtxt -> do let descFilePath = fromMaybe "." workingDir prettyShow (packageName pkgid) <.> "cabal" info verbosity $ "Updating " ++ prettyShow (packageName pkgid) <.> "cabal" ++ " with the latest revision from the index." writeFileAtomic descFilePath pkgtxt -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if -- the setup script was compiled against an old version of the Cabal lib). configFlags' <- addDefaultInstallDirs configFlags -- Filter out flags not supported by the old versions of the Cabal lib. let configureFlags :: Version -> ConfigFlags configureFlags = filterConfigureFlags configFlags' { configVerbosity = toFlag verbosity' } -- Path to the optional log file. mLogPath <- maybeLogPath logDirChange (maybe (const (return ())) appendFile mLogPath) workingDir $ do -- Configure phase onFailure ConfigureFailed $ do noticeProgress ProgressStarting setup configureCommand configureFlags mLogPath -- Build phase onFailure BuildFailed $ do noticeProgress ProgressBuilding setup buildCommand' buildFlags mLogPath -- Doc generation phase docsResult <- if shouldHaddock then (do setup haddockCommand haddockFlags' mLogPath return DocsOk) `catchIO` (\_ -> return DocsFailed) `catchExit` (\_ -> return DocsFailed) else return DocsNotTried -- Tests phase onFailure TestsFailed $ do when (testsEnabled && PackageDescription.hasTests pkg) $ setup Cabal.testCommand testFlags' mLogPath let testsResult | testsEnabled = TestsOk | otherwise = TestsNotTried -- Install phase onFailure InstallFailed $ criticalSection installLock $ do -- Actual installation withWin32SelfUpgrade verbosity uid configFlags cinfo platform pkg $ do setup Cabal.copyCommand copyFlags mLogPath -- Capture installed package configuration file, so that -- it can be incorporated into the final InstallPlan ipkgs <- genPkgConfs mLogPath let ipkgs' = case ipkgs of [ipkg] -> [ipkg { Installed.installedUnitId = uid }] _ -> ipkgs let packageDBs = interpretPackageDbFlags (fromFlag (configUserInstall configFlags)) (configPackageDBs configFlags) for_ ipkgs' $ \ipkg' -> registerPackage verbosity comp progdb packageDBs ipkg' defaultRegisterOptions return (Right (BuildResult docsResult testsResult (find ((==uid).installedUnitId) ipkgs'))) where pkgid = packageId pkg uid = installedUnitId rpkg cinfo = compilerInfo comp buildCommand' = buildCommand progdb dispname = prettyShow pkgid isParallelBuild = numJobs >= 2 noticeProgress phase = when isParallelBuild $ progressMessage verbosity phase dispname buildFlags _ = emptyBuildFlags { buildDistPref = configDistPref configFlags, buildVerbosity = toFlag verbosity' } shouldHaddock = fromFlag (installDocumentation installFlags) haddockFlags' _ = haddockFlags { haddockVerbosity = toFlag verbosity', haddockDistPref = configDistPref configFlags } testsEnabled = fromFlag (configTests configFlags) && fromFlagOrDefault False (installRunTests installFlags) testFlags' = filterTestFlags testFlags { Cabal.testDistPref = configDistPref configFlags } copyFlags _ = Cabal.emptyCopyFlags { Cabal.copyDistPref = configDistPref configFlags, Cabal.copyDest = toFlag InstallDirs.NoCopyDest, Cabal.copyVerbosity = toFlag verbosity' } shouldRegister = PackageDescription.hasLibs pkg registerFlags _ = Cabal.emptyRegisterFlags { Cabal.regDistPref = configDistPref configFlags, Cabal.regVerbosity = toFlag verbosity' } verbosity' = maybe verbosity snd useLogFile tempTemplate name = name ++ "-" ++ prettyShow pkgid addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags addDefaultInstallDirs configFlags' = do defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False return $ configFlags' { configInstallDirs = fmap Cabal.Flag . InstallDirs.substituteInstallDirTemplates env $ InstallDirs.combineInstallDirs fromFlagOrDefault defInstallDirs (configInstallDirs configFlags) } where CompilerId flavor _ = compilerInfoId cinfo env = initialPathTemplateEnv pkgid uid cinfo platform userInstall = fromFlagOrDefault defaultUserInstall (configUserInstall configFlags') genPkgConfs :: Maybe FilePath -> IO [Installed.InstalledPackageInfo] genPkgConfs mLogPath = if shouldRegister then do tmp <- getTemporaryDirectory withTempDirectory verbosity tmp (tempTemplate "pkgConf") $ \dir -> do let pkgConfDest = dir "pkgConf" registerFlags' version = (registerFlags version) { Cabal.regGenPkgConf = toFlag (Just pkgConfDest) } setup Cabal.registerCommand registerFlags' mLogPath is_dir <- doesDirectoryExist pkgConfDest let notHidden = not . isHidden isHidden name = "." `isPrefixOf` name if is_dir -- Sort so that each prefix of the package -- configurations is well formed then traverse (readPkgConf pkgConfDest) . sort . filter notHidden =<< getDirectoryContents pkgConfDest else fmap (:[]) $ readPkgConf "." pkgConfDest else return [] readPkgConf :: FilePath -> FilePath -> IO Installed.InstalledPackageInfo readPkgConf pkgConfDir pkgConfFile = do pkgConfText <- BS.readFile (pkgConfDir pkgConfFile) case Installed.parseInstalledPackageInfo pkgConfText of Left perrors -> pkgConfParseFailed $ unlines $ NE.toList perrors Right (warns, pkgConf) -> do unless (null warns) $ warn verbosity $ unlines warns return pkgConf pkgConfParseFailed :: String -> IO a pkgConfParseFailed perror = die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" ++ show perror maybeLogPath :: IO (Maybe FilePath) maybeLogPath = case useLogFile of Nothing -> return Nothing Just (mkLogFileName, _) -> do let logFileName = mkLogFileName (packageId pkg) uid logDir = takeDirectory logFileName unless (null logDir) $ createDirectoryIfMissing True logDir logFileExists <- doesFileExist logFileName when logFileExists $ removeFile logFileName return (Just logFileName) setup cmd flags mLogPath = Exception.bracket (traverse (\path -> openFile path AppendMode) mLogPath) (traverse_ hClose) (\logFileHandle -> setupWrapper verbosity scriptOptions { useLoggingHandle = logFileHandle , useWorkingDir = workingDir } (Just pkg) cmd flags (const [])) -- helper onFailure :: (SomeException -> BuildFailure) -> IO BuildOutcome -> IO BuildOutcome onFailure result action = action `catches` [ Handler $ \ioe -> handler (ioe :: IOException) , Handler $ \exit -> handler (exit :: ExitCode) ] where handler :: Exception e => e -> IO BuildOutcome handler = return . Left . result . toException -- ------------------------------------------------------------ -- * Weird windows hacks -- ------------------------------------------------------------ withWin32SelfUpgrade :: Verbosity -> UnitId -> ConfigFlags -> CompilerInfo -> Platform -> PackageDescription -> IO a -> IO a withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action withWin32SelfUpgrade verbosity uid configFlags cinfo platform pkg action = do defaultDirs <- InstallDirs.defaultInstallDirs compFlavor (fromFlag (configUserInstall configFlags)) (PackageDescription.hasLibs pkg) Win32SelfUpgrade.possibleSelfUpgrade verbosity (exeInstallPaths defaultDirs) action where pkgid = packageId pkg (CompilerId compFlavor _) = compilerInfoId cinfo exeInstallPaths defaultDirs = [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension buildPlatform | exe <- PackageDescription.executables pkg , PackageDescription.buildable (PackageDescription.buildInfo exe) , let exeName = prefix ++ prettyShow (PackageDescription.exeName exe) ++ suffix prefix = substTemplate prefixTemplate suffix = substTemplate suffixTemplate ] where fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) absoluteDirs = InstallDirs.absoluteInstallDirs pkgid uid cinfo InstallDirs.NoCopyDest platform templateDirs substTemplate = InstallDirs.fromPathTemplate . InstallDirs.substPathTemplate env where env = InstallDirs.initialPathTemplateEnv pkgid uid cinfo platform cabal-install-3.8.1.0/src/Distribution/Client/InstallPlan.hs0000644000000000000000000011561307346545000022030 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE FlexibleContexts #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.InstallPlan -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- Package installation plan -- ----------------------------------------------------------------------------- module Distribution.Client.InstallPlan ( InstallPlan, GenericInstallPlan, PlanPackage, GenericPlanPackage(..), foldPlanPackage, IsUnit, -- * Operations on 'InstallPlan's new, toGraph, toList, toMap, keys, keysSet, planIndepGoals, depends, fromSolverInstallPlan, fromSolverInstallPlanWithProgress, configureInstallPlan, remove, installed, lookup, directDeps, revDirectDeps, -- * Traversal executionOrder, execute, BuildOutcomes, lookupBuildOutcome, -- ** Traversal helpers -- $traversal Processing, ready, completed, failed, -- * Display showPlanGraph, showInstallPlan, -- * Graph-like operations dependencyClosure, reverseTopologicalOrder, reverseDependencyClosure, ) where import Distribution.Client.Compat.Prelude hiding (toList, lookup, tail) import Prelude (tail) import Distribution.Compat.Stack (WithCallStack) import Distribution.Client.Types hiding (BuildOutcomes) import qualified Distribution.PackageDescription as PD import qualified Distribution.Simple.Configure as Configure import qualified Distribution.Simple.Setup as Cabal import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) import Distribution.Package ( Package(..), HasMungedPackageId(..) , HasUnitId(..), UnitId ) import Distribution.Solver.Types.SolverPackage import Distribution.Client.JobControl import Distribution.Pretty (defaultStyle) import Text.PrettyPrint import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.SolverInstallPlan (SolverInstallPlan) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.Settings import Distribution.Solver.Types.SolverId import Distribution.Solver.Types.InstSolverPackage import Distribution.Utils.LogProgress import Distribution.Utils.Structured (Structured (..), Structure(Nominal)) -- TODO: Need this when we compute final UnitIds -- import qualified Distribution.Simple.Configure as Configure import qualified Data.Foldable as Foldable (all, toList) import qualified Distribution.Compat.Graph as Graph import Distribution.Compat.Graph (Graph, IsNode(..)) import Control.Exception ( assert ) import qualified Data.Map as Map import qualified Data.Set as Set -- When cabal tries to install a number of packages, including all their -- dependencies it has a non-trivial problem to solve. -- -- The Problem: -- -- In general we start with a set of installed packages and a set of source -- packages. -- -- Installed packages have fixed dependencies. They have already been built and -- we know exactly what packages they were built against, including their exact -- versions. -- -- Source package have somewhat flexible dependencies. They are specified as -- version ranges, though really they're predicates. To make matters worse they -- have conditional flexible dependencies. Configuration flags can affect which -- packages are required and can place additional constraints on their -- versions. -- -- These two sets of package can and usually do overlap. There can be installed -- packages that are also available as source packages which means they could -- be re-installed if required, though there will also be packages which are -- not available as source and cannot be re-installed. Very often there will be -- extra versions available than are installed. Sometimes we may like to prefer -- installed packages over source ones or perhaps always prefer the latest -- available version whether installed or not. -- -- The goal is to calculate an installation plan that is closed, acyclic and -- consistent and where every configured package is valid. -- -- An installation plan is a set of packages that are going to be used -- together. It will consist of a mixture of installed packages and source -- packages along with their exact version dependencies. An installation plan -- is closed if for every package in the set, all of its dependencies are -- also in the set. It is consistent if for every package in the set, all -- dependencies which target that package have the same version. -- Note that plans do not necessarily compose. You might have a valid plan for -- package A and a valid plan for package B. That does not mean the composition -- is simultaneously valid for A and B. In particular you're most likely to -- have problems with inconsistent dependencies. -- On the other hand it is true that every closed sub plan is valid. -- | Packages in an install plan -- -- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage' -- intentionally have no 'PackageInstalled' instance. `This is important: -- PackageInstalled returns only library dependencies, but for package that -- aren't yet installed we know many more kinds of dependencies (setup -- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on -- dependencies in cabal-install should consider what to do with these -- dependencies; if we give a 'PackageInstalled' instance it would be too easy -- to get this wrong (and, for instance, call graph traversal functions from -- Cabal rather than from cabal-install). Instead, see 'PackageInstalled'. data GenericPlanPackage ipkg srcpkg = PreExisting ipkg | Configured srcpkg | Installed srcpkg deriving (Eq, Show, Generic) displayGenericPlanPackage :: (IsUnit ipkg, IsUnit srcpkg) => GenericPlanPackage ipkg srcpkg -> String displayGenericPlanPackage (PreExisting pkg) = "PreExisting " ++ prettyShow (nodeKey pkg) displayGenericPlanPackage (Configured pkg) = "Configured " ++ prettyShow (nodeKey pkg) displayGenericPlanPackage (Installed pkg) = "Installed " ++ prettyShow (nodeKey pkg) -- | Convenience combinator for destructing 'GenericPlanPackage'. -- This is handy because if you case manually, you have to handle -- 'Configured' and 'Installed' separately (where often you want -- them to be the same.) foldPlanPackage :: (ipkg -> a) -> (srcpkg -> a) -> GenericPlanPackage ipkg srcpkg -> a foldPlanPackage f _ (PreExisting ipkg) = f ipkg foldPlanPackage _ g (Configured srcpkg) = g srcpkg foldPlanPackage _ g (Installed srcpkg) = g srcpkg type IsUnit a = (IsNode a, Key a ~ UnitId) depends :: IsUnit a => a -> [UnitId] depends = nodeNeighbors -- NB: Expanded constraint synonym here to avoid undecidable -- instance errors in GHC 7.8 and earlier. instance (IsNode ipkg, IsNode srcpkg, Key ipkg ~ UnitId, Key srcpkg ~ UnitId) => IsNode (GenericPlanPackage ipkg srcpkg) where type Key (GenericPlanPackage ipkg srcpkg) = UnitId nodeKey (PreExisting ipkg) = nodeKey ipkg nodeKey (Configured spkg) = nodeKey spkg nodeKey (Installed spkg) = nodeKey spkg nodeNeighbors (PreExisting ipkg) = nodeNeighbors ipkg nodeNeighbors (Configured spkg) = nodeNeighbors spkg nodeNeighbors (Installed spkg) = nodeNeighbors spkg instance (Binary ipkg, Binary srcpkg) => Binary (GenericPlanPackage ipkg srcpkg) instance (Structured ipkg, Structured srcpkg) => Structured (GenericPlanPackage ipkg srcpkg) type PlanPackage = GenericPlanPackage InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) instance (Package ipkg, Package srcpkg) => Package (GenericPlanPackage ipkg srcpkg) where packageId (PreExisting ipkg) = packageId ipkg packageId (Configured spkg) = packageId spkg packageId (Installed spkg) = packageId spkg instance (HasMungedPackageId ipkg, HasMungedPackageId srcpkg) => HasMungedPackageId (GenericPlanPackage ipkg srcpkg) where mungedId (PreExisting ipkg) = mungedId ipkg mungedId (Configured spkg) = mungedId spkg mungedId (Installed spkg) = mungedId spkg instance (HasUnitId ipkg, HasUnitId srcpkg) => HasUnitId (GenericPlanPackage ipkg srcpkg) where installedUnitId (PreExisting ipkg) = installedUnitId ipkg installedUnitId (Configured spkg) = installedUnitId spkg installedUnitId (Installed spkg) = installedUnitId spkg instance (HasConfiguredId ipkg, HasConfiguredId srcpkg) => HasConfiguredId (GenericPlanPackage ipkg srcpkg) where configuredId (PreExisting ipkg) = configuredId ipkg configuredId (Configured spkg) = configuredId spkg configuredId (Installed spkg) = configuredId spkg data GenericInstallPlan ipkg srcpkg = GenericInstallPlan { planGraph :: !(Graph (GenericPlanPackage ipkg srcpkg)), planIndepGoals :: !IndependentGoals } deriving (Typeable) -- | 'GenericInstallPlan' specialised to most commonly used types. type InstallPlan = GenericInstallPlan InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) -- | Smart constructor that deals with caching the 'Graph' representation. -- mkInstallPlan :: (IsUnit ipkg, IsUnit srcpkg) => String -> Graph (GenericPlanPackage ipkg srcpkg) -> IndependentGoals -> GenericInstallPlan ipkg srcpkg mkInstallPlan loc graph indepGoals = assert (valid loc graph) GenericInstallPlan { planGraph = graph, planIndepGoals = indepGoals } internalError :: WithCallStack (String -> String -> a) internalError loc msg = error $ "internal error in InstallPlan." ++ loc ++ if null msg then "" else ": " ++ msg instance (Structured ipkg, Structured srcpkg) => Structured (GenericInstallPlan ipkg srcpkg) where structure p = Nominal (typeRep p) 0 "GenericInstallPlan" [ structure (Proxy :: Proxy ipkg) , structure (Proxy :: Proxy srcpkg) ] instance (IsNode ipkg, Key ipkg ~ UnitId, IsNode srcpkg, Key srcpkg ~ UnitId, Binary ipkg, Binary srcpkg) => Binary (GenericInstallPlan ipkg srcpkg) where put GenericInstallPlan { planGraph = graph, planIndepGoals = indepGoals } = put graph >> put indepGoals get = do graph <- get indepGoals <- get return $! mkInstallPlan "(instance Binary)" graph indepGoals showPlanGraph :: (Package ipkg, Package srcpkg, IsUnit ipkg, IsUnit srcpkg) => Graph (GenericPlanPackage ipkg srcpkg) -> String showPlanGraph graph = renderStyle defaultStyle $ vcat (map dispPlanPackage (Foldable.toList graph)) where dispPlanPackage p = hang (hsep [ text (showPlanPackageTag p) , pretty (packageId p) , parens (pretty (nodeKey p))]) 2 (vcat (map pretty (nodeNeighbors p))) showInstallPlan :: (Package ipkg, Package srcpkg, IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> String showInstallPlan = showPlanGraph . planGraph showPlanPackageTag :: GenericPlanPackage ipkg srcpkg -> String showPlanPackageTag (PreExisting _) = "PreExisting" showPlanPackageTag (Configured _) = "Configured" showPlanPackageTag (Installed _) = "Installed" -- | Build an installation plan from a valid set of resolved packages. -- new :: (IsUnit ipkg, IsUnit srcpkg) => IndependentGoals -> Graph (GenericPlanPackage ipkg srcpkg) -> GenericInstallPlan ipkg srcpkg new indepGoals graph = mkInstallPlan "new" graph indepGoals toGraph :: GenericInstallPlan ipkg srcpkg -> Graph (GenericPlanPackage ipkg srcpkg) toGraph = planGraph toList :: GenericInstallPlan ipkg srcpkg -> [GenericPlanPackage ipkg srcpkg] toList = Foldable.toList . planGraph toMap :: GenericInstallPlan ipkg srcpkg -> Map UnitId (GenericPlanPackage ipkg srcpkg) toMap = Graph.toMap . planGraph keys :: GenericInstallPlan ipkg srcpkg -> [UnitId] keys = Graph.keys . planGraph keysSet :: GenericInstallPlan ipkg srcpkg -> Set UnitId keysSet = Graph.keysSet . planGraph -- | Remove packages from the install plan. This will result in an -- error if there are remaining packages that depend on any matching -- package. This is primarily useful for obtaining an install plan for -- the dependencies of a package or set of packages without actually -- installing the package itself, as when doing development. -- remove :: (IsUnit ipkg, IsUnit srcpkg) => (GenericPlanPackage ipkg srcpkg -> Bool) -> GenericInstallPlan ipkg srcpkg -> GenericInstallPlan ipkg srcpkg remove shouldRemove plan = mkInstallPlan "remove" newGraph (planIndepGoals plan) where newGraph = Graph.fromDistinctList $ filter (not . shouldRemove) (toList plan) -- | Change a number of packages in the 'Configured' state to the 'Installed' -- state. -- -- To preserve invariants, the package must have all of its dependencies -- already installed too (that is 'PreExisting' or 'Installed'). -- installed :: (IsUnit ipkg, IsUnit srcpkg) => (srcpkg -> Bool) -> GenericInstallPlan ipkg srcpkg -> GenericInstallPlan ipkg srcpkg installed shouldBeInstalled installPlan = foldl' markInstalled installPlan [ pkg | Configured pkg <- reverseTopologicalOrder installPlan , shouldBeInstalled pkg ] where markInstalled plan pkg = assert (all isInstalled (directDeps plan (nodeKey pkg))) $ plan { planGraph = Graph.insert (Installed pkg) (planGraph plan) } -- | Lookup a package in the plan. -- lookup :: (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> UnitId -> Maybe (GenericPlanPackage ipkg srcpkg) lookup plan pkgid = Graph.lookup pkgid (planGraph plan) -- | Find all the direct dependencies of the given package. -- -- Note that the package must exist in the plan or it is an error. -- directDeps :: GenericInstallPlan ipkg srcpkg -> UnitId -> [GenericPlanPackage ipkg srcpkg] directDeps plan pkgid = case Graph.neighbors (planGraph plan) pkgid of Just deps -> deps Nothing -> internalError "directDeps" "package not in graph" -- | Find all the direct reverse dependencies of the given package. -- -- Note that the package must exist in the plan or it is an error. -- revDirectDeps :: GenericInstallPlan ipkg srcpkg -> UnitId -> [GenericPlanPackage ipkg srcpkg] revDirectDeps plan pkgid = case Graph.revNeighbors (planGraph plan) pkgid of Just deps -> deps Nothing -> internalError "revDirectDeps" "package not in graph" -- | Return all the packages in the 'InstallPlan' in reverse topological order. -- That is, for each package, all dependencies of the package appear first. -- -- Compared to 'executionOrder', this function returns all the installed and -- source packages rather than just the source ones. Also, while both this -- and 'executionOrder' produce reverse topological orderings of the package -- dependency graph, it is not necessarily exactly the same order. -- reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg -> [GenericPlanPackage ipkg srcpkg] reverseTopologicalOrder plan = Graph.revTopSort (planGraph plan) -- | Return the packages in the plan that are direct or indirect dependencies of -- the given packages. -- dependencyClosure :: GenericInstallPlan ipkg srcpkg -> [UnitId] -> [GenericPlanPackage ipkg srcpkg] dependencyClosure plan = fromMaybe [] . Graph.closure (planGraph plan) -- | Return the packages in the plan that depend directly or indirectly on the -- given packages. -- reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg -> [UnitId] -> [GenericPlanPackage ipkg srcpkg] reverseDependencyClosure plan = fromMaybe [] . Graph.revClosure (planGraph plan) -- Alert alert! Why does SolverId map to a LIST of plan packages? -- The sordid story has to do with 'build-depends' on a package -- with libraries and executables. In an ideal world, we would -- ONLY depend on the library in this situation. But c.f. #3661 -- some people rely on the build-depends to ALSO implicitly -- depend on an executable. -- -- I don't want to commit to a strategy yet, so the only possible -- thing you can do in this case is return EVERYTHING and let -- the client filter out what they want (executables? libraries? -- etc). This similarly implies we can't return a 'ConfiguredId' -- because that's not enough information. fromSolverInstallPlan :: (IsUnit ipkg, IsUnit srcpkg) => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) -> SolverInstallPlan.SolverPlanPackage -> [GenericPlanPackage ipkg srcpkg] ) -> SolverInstallPlan -> GenericInstallPlan ipkg srcpkg fromSolverInstallPlan f plan = mkInstallPlan "fromSolverInstallPlan" (Graph.fromDistinctList pkgs'') (SolverInstallPlan.planIndepGoals plan) where (_, _, pkgs'') = foldl' f' (Map.empty, Map.empty, []) (SolverInstallPlan.reverseTopologicalOrder plan) f' (pidMap, ipiMap, pkgs) pkg = (pidMap', ipiMap', pkgs' ++ pkgs) where pkgs' = f (mapDep pidMap ipiMap) pkg (pidMap', ipiMap') = case nodeKey pkg of PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) mapDep _ ipiMap (PreExistingId _pid uid) | Just pkgs <- Map.lookup uid ipiMap = pkgs | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ prettyShow uid) mapDep pidMap _ (PlannedId pid) | Just pkgs <- Map.lookup pid pidMap = pkgs | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ prettyShow pid) -- This shouldn't happen, since mapDep should only be called -- on neighbor SolverId, which must have all been done already -- by the reverse top-sort (we assume the graph is not broken). fromSolverInstallPlanWithProgress :: (IsUnit ipkg, IsUnit srcpkg) => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) -> SolverInstallPlan.SolverPlanPackage -> LogProgress [GenericPlanPackage ipkg srcpkg] ) -> SolverInstallPlan -> LogProgress (GenericInstallPlan ipkg srcpkg) fromSolverInstallPlanWithProgress f plan = do (_, _, pkgs'') <- foldM f' (Map.empty, Map.empty, []) (SolverInstallPlan.reverseTopologicalOrder plan) return $ mkInstallPlan "fromSolverInstallPlanWithProgress" (Graph.fromDistinctList pkgs'') (SolverInstallPlan.planIndepGoals plan) where f' (pidMap, ipiMap, pkgs) pkg = do pkgs' <- f (mapDep pidMap ipiMap) pkg let (pidMap', ipiMap') = case nodeKey pkg of PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) return (pidMap', ipiMap', pkgs' ++ pkgs) mapDep _ ipiMap (PreExistingId _pid uid) | Just pkgs <- Map.lookup uid ipiMap = pkgs | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ prettyShow uid) mapDep pidMap _ (PlannedId pid) | Just pkgs <- Map.lookup pid pidMap = pkgs | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ prettyShow pid) -- This shouldn't happen, since mapDep should only be called -- on neighbor SolverId, which must have all been done already -- by the reverse top-sort (we assume the graph is not broken). -- | Conversion of 'SolverInstallPlan' to 'InstallPlan'. -- Similar to 'elaboratedInstallPlan' configureInstallPlan :: Cabal.ConfigFlags -> SolverInstallPlan -> InstallPlan configureInstallPlan configFlags solverPlan = flip fromSolverInstallPlan solverPlan $ \mapDep planpkg -> [case planpkg of SolverInstallPlan.PreExisting pkg -> PreExisting (instSolverPkgIPI pkg) SolverInstallPlan.Configured pkg -> Configured (configureSolverPackage mapDep pkg) ] where configureSolverPackage :: (SolverId -> [PlanPackage]) -> SolverPackage UnresolvedPkgLoc -> ConfiguredPackage UnresolvedPkgLoc configureSolverPackage mapDep spkg = ConfiguredPackage { confPkgId = Configure.computeComponentId (Cabal.fromFlagOrDefault False (Cabal.configDeterministic configFlags)) Cabal.NoFlag Cabal.NoFlag (packageId spkg) (PD.CLibName PD.LMainLibName) (Just (map confInstId (CD.libraryDeps deps), solverPkgFlags spkg)), confPkgSource = solverPkgSource spkg, confPkgFlags = solverPkgFlags spkg, confPkgStanzas = solverPkgStanzas spkg, confPkgDeps = deps -- NB: no support for executable dependencies } where deps = fmap (concatMap (map configuredId . mapDep)) (solverPkgLibDeps spkg) -- ------------------------------------------------------------ -- * Primitives for traversing plans -- ------------------------------------------------------------ -- $traversal -- -- Algorithms to traverse or execute an 'InstallPlan', especially in parallel, -- may make use of the 'Processing' type and the associated operations -- 'ready', 'completed' and 'failed'. -- -- The 'Processing' type is used to keep track of the state of a traversal and -- includes the set of packages that are in the processing state, e.g. in the -- process of being installed, plus those that have been completed and those -- where processing failed. -- -- Traversal algorithms start with an 'InstallPlan': -- -- * Initially there will be certain packages that can be processed immediately -- (since they are configured source packages and have all their dependencies -- installed already). The function 'ready' returns these packages plus a -- 'Processing' state that marks these same packages as being in the -- processing state. -- -- * The algorithm must now arrange for these packages to be processed -- (possibly in parallel). When a package has completed processing, the -- algorithm needs to know which other packages (if any) are now ready to -- process as a result. The 'completed' function marks a package as completed -- and returns any packages that are newly in the processing state (ie ready -- to process), along with the updated 'Processing' state. -- -- * If failure is possible then when processing a package fails, the algorithm -- needs to know which other packages have also failed as a result. The -- 'failed' function marks the given package as failed as well as all the -- other packages that depend on the failed package. In addition it returns -- the other failed packages. -- | The 'Processing' type is used to keep track of the state of a traversal -- and includes the set of packages that are in the processing state, e.g. in -- the process of being installed, plus those that have been completed and -- those where processing failed. -- data Processing = Processing !(Set UnitId) !(Set UnitId) !(Set UnitId) -- processing, completed, failed -- | The packages in the plan that are initially ready to be installed. -- That is they are in the configured state and have all their dependencies -- installed already. -- -- The result is both the packages that are now ready to be installed and also -- a 'Processing' state containing those same packages. The assumption is that -- all the packages that are ready will now be processed and so we can consider -- them to be in the processing state. -- ready :: (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> ([GenericReadyPackage srcpkg], Processing) ready plan = assert (processingInvariant plan processing) $ (readyPackages, processing) where !processing = Processing (Set.fromList [ nodeKey pkg | pkg <- readyPackages ]) (Set.fromList [ nodeKey pkg | pkg <- toList plan, isInstalled pkg ]) Set.empty readyPackages = [ ReadyPackage pkg | Configured pkg <- toList plan , all isInstalled (directDeps plan (nodeKey pkg)) ] isInstalled :: GenericPlanPackage a b -> Bool isInstalled (PreExisting {}) = True isInstalled (Installed {}) = True isInstalled _ = False -- | Given a package in the processing state, mark the package as completed -- and return any packages that are newly in the processing state (ie ready to -- process), along with the updated 'Processing' state. -- completed :: forall ipkg srcpkg. (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> Processing -> UnitId -> ([GenericReadyPackage srcpkg], Processing) completed plan (Processing processingSet completedSet failedSet) pkgid = assert (pkgid `Set.member` processingSet) $ assert (processingInvariant plan processing') $ ( map asReadyPackage newlyReady , processing' ) where completedSet' = Set.insert pkgid completedSet -- each direct reverse dep where all direct deps are completed newlyReady = [ dep | dep <- revDirectDeps plan pkgid , all ((`Set.member` completedSet') . nodeKey) (directDeps plan (nodeKey dep)) ] processingSet' = foldl' (flip Set.insert) (Set.delete pkgid processingSet) (map nodeKey newlyReady) processing' = Processing processingSet' completedSet' failedSet asReadyPackage :: GenericPlanPackage ipkg srcpkg -> GenericReadyPackage srcpkg asReadyPackage (Configured pkg) = ReadyPackage pkg asReadyPackage pkg = internalError "completed" $ "not in configured state: " ++ displayGenericPlanPackage pkg failed :: (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> Processing -> UnitId -> ([srcpkg], Processing) failed plan (Processing processingSet completedSet failedSet) pkgid = assert (pkgid `Set.member` processingSet) $ assert (all (`Set.notMember` processingSet) (tail newlyFailedIds)) $ assert (all (`Set.notMember` completedSet) (tail newlyFailedIds)) $ -- but note that some newlyFailed may already be in the failed set -- since one package can depend on two packages that both fail and -- so would be in the rev-dep closure for both. assert (processingInvariant plan processing') $ ( map asConfiguredPackage (tail newlyFailed) , processing' ) where processingSet' = Set.delete pkgid processingSet failedSet' = failedSet `Set.union` Set.fromList newlyFailedIds newlyFailedIds = map nodeKey newlyFailed newlyFailed = fromMaybe (internalError "failed" "package not in graph") $ Graph.revClosure (planGraph plan) [pkgid] processing' = Processing processingSet' completedSet failedSet' asConfiguredPackage (Configured pkg) = pkg asConfiguredPackage pkg = internalError "failed" $ "not in configured state: " ++ displayGenericPlanPackage pkg processingInvariant :: (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> Processing -> Bool processingInvariant plan (Processing processingSet completedSet failedSet) = -- All the packages in the three sets are actually in the graph assert (Foldable.all (flip Graph.member (planGraph plan)) processingSet) $ assert (Foldable.all (flip Graph.member (planGraph plan)) completedSet) $ assert (Foldable.all (flip Graph.member (planGraph plan)) failedSet) $ -- The processing, completed and failed sets are disjoint from each other assert (noIntersection processingSet completedSet) $ assert (noIntersection processingSet failedSet) $ assert (noIntersection failedSet completedSet) $ -- Packages that depend on a package that's still processing cannot be -- completed assert (noIntersection (reverseClosure processingSet) completedSet) $ -- On the other hand, packages that depend on a package that's still -- processing /can/ have failed (since they may have depended on multiple -- packages that were processing, but it only takes one to fail to cause -- knock-on failures) so it is quite possible to have an -- intersection (reverseClosure processingSet) failedSet -- The failed set is upwards closed, i.e. equal to its own rev dep closure assert (failedSet == reverseClosure failedSet) $ -- All immediate reverse deps of packages that are currently processing -- are not currently being processed (ie not in the processing set). assert (and [ rdeppkgid `Set.notMember` processingSet | pkgid <- Set.toList processingSet , rdeppkgid <- maybe (internalError "processingInvariant" "") (map nodeKey) (Graph.revNeighbors (planGraph plan) pkgid) ]) $ -- Packages from the processing or failed sets are only ever in the -- configured state. assert (and [ case Graph.lookup pkgid (planGraph plan) of Just (Configured _) -> True Just (PreExisting _) -> False Just (Installed _) -> False Nothing -> False | pkgid <- Set.toList processingSet ++ Set.toList failedSet ]) -- We use asserts rather than returning False so that on failure we get -- better details on which bit of the invariant was violated. True where reverseClosure = Set.fromList . map nodeKey . fromMaybe (internalError "processingInvariant" "") . Graph.revClosure (planGraph plan) . Set.toList noIntersection a b = Set.null (Set.intersection a b) -- ------------------------------------------------------------ -- * Traversing plans -- ------------------------------------------------------------ -- | Flatten an 'InstallPlan', producing the sequence of source packages in -- the order in which they would be processed when the plan is executed. This -- can be used for simulations or presenting execution dry-runs. -- -- It is guaranteed to give the same order as using 'execute' (with a serial -- in-order 'JobControl'), which is a reverse topological orderings of the -- source packages in the dependency graph, albeit not necessarily exactly the -- same ordering as that produced by 'reverseTopologicalOrder'. -- executionOrder :: (IsUnit ipkg, IsUnit srcpkg) => GenericInstallPlan ipkg srcpkg -> [GenericReadyPackage srcpkg] executionOrder plan = let (newpkgs, processing) = ready plan in tryNewTasks processing newpkgs where tryNewTasks _processing [] = [] tryNewTasks processing (p:todo) = waitForTasks processing p todo waitForTasks processing p todo = p : tryNewTasks processing' (todo++nextpkgs) where (nextpkgs, processing') = completed plan processing (nodeKey p) -- ------------------------------------------------------------ -- * Executing plans -- ------------------------------------------------------------ -- | The set of results we get from executing an install plan. -- type BuildOutcomes failure result = Map UnitId (Either failure result) -- | Lookup the build result for a single package. -- lookupBuildOutcome :: HasUnitId pkg => pkg -> BuildOutcomes failure result -> Maybe (Either failure result) lookupBuildOutcome = Map.lookup . installedUnitId -- | Execute an install plan. This traverses the plan in dependency order. -- -- Executing each individual package can fail and if so all dependents fail -- too. The result for each package is collected as a 'BuildOutcomes' map. -- -- Visiting each package happens with optional parallelism, as determined by -- the 'JobControl'. By default, after any failure we stop as soon as possible -- (using the 'JobControl' to try to cancel in-progress tasks). This behaviour -- can be reversed to keep going and build as many packages as possible. -- -- Note that the 'BuildOutcomes' is /not/ guaranteed to cover all the packages -- in the plan. In particular in the default mode where we stop as soon as -- possible after a failure then there may be packages which are skipped and -- these will have no 'BuildOutcome'. -- execute :: forall m ipkg srcpkg result failure. (IsUnit ipkg, IsUnit srcpkg, Monad m) => JobControl m (UnitId, Either failure result) -> Bool -- ^ Keep going after failure -> (srcpkg -> failure) -- ^ Value for dependents of failed packages -> GenericInstallPlan ipkg srcpkg -> (GenericReadyPackage srcpkg -> m (Either failure result)) -> m (BuildOutcomes failure result) execute jobCtl keepGoing depFailure plan installPkg = let (newpkgs, processing) = ready plan in tryNewTasks Map.empty False False processing newpkgs where tryNewTasks :: BuildOutcomes failure result -> Bool -> Bool -> Processing -> [GenericReadyPackage srcpkg] -> m (BuildOutcomes failure result) tryNewTasks !results tasksFailed tasksRemaining !processing newpkgs -- we were in the process of cancelling and now we're finished | tasksFailed && not keepGoing && not tasksRemaining = return results -- we are still in the process of cancelling, wait for remaining tasks | tasksFailed && not keepGoing && tasksRemaining = waitForTasks results tasksFailed processing -- no new tasks to do and all tasks are done so we're finished | null newpkgs && not tasksRemaining = return results -- no new tasks to do, remaining tasks to wait for | null newpkgs = waitForTasks results tasksFailed processing -- new tasks to do, spawn them, then wait for tasks to complete | otherwise = do sequence_ [ spawnJob jobCtl $ do result <- installPkg pkg return (nodeKey pkg, result) | pkg <- newpkgs ] waitForTasks results tasksFailed processing waitForTasks :: BuildOutcomes failure result -> Bool -> Processing -> m (BuildOutcomes failure result) waitForTasks !results tasksFailed !processing = do (pkgid, result) <- collectJob jobCtl case result of Right _success -> do tasksRemaining <- remainingJobs jobCtl tryNewTasks results' tasksFailed tasksRemaining processing' nextpkgs where results' = Map.insert pkgid result results (nextpkgs, processing') = completed plan processing pkgid Left _failure -> do -- if this is the first failure and we're not trying to keep going -- then try to cancel as many of the remaining jobs as possible when (not tasksFailed && not keepGoing) $ cancelJobs jobCtl tasksRemaining <- remainingJobs jobCtl tryNewTasks results' True tasksRemaining processing' [] where (depsfailed, processing') = failed plan processing pkgid results' = Map.insert pkgid result results `Map.union` depResults depResults = Map.fromList [ (nodeKey deppkg, Left (depFailure deppkg)) | deppkg <- depsfailed ] -- ------------------------------------------------------------ -- * Checking validity of plans -- ------------------------------------------------------------ -- | A valid installation plan is a set of packages that is closed, acyclic -- and respects the package state relation. -- -- * if the result is @False@ use 'problems' to get a detailed list. -- valid :: (IsUnit ipkg, IsUnit srcpkg) => String -> Graph (GenericPlanPackage ipkg srcpkg) -> Bool valid loc graph = case problems graph of [] -> True ps -> internalError loc ('\n' : unlines (map showPlanProblem ps)) data PlanProblem ipkg srcpkg = PackageMissingDeps (GenericPlanPackage ipkg srcpkg) [UnitId] | PackageCycle [GenericPlanPackage ipkg srcpkg] | PackageStateInvalid (GenericPlanPackage ipkg srcpkg) (GenericPlanPackage ipkg srcpkg) showPlanProblem :: (IsUnit ipkg, IsUnit srcpkg) => PlanProblem ipkg srcpkg -> String showPlanProblem (PackageMissingDeps pkg missingDeps) = "Package " ++ prettyShow (nodeKey pkg) ++ " depends on the following packages which are missing from the plan: " ++ intercalate ", " (map prettyShow missingDeps) showPlanProblem (PackageCycle cycleGroup) = "The following packages are involved in a dependency cycle " ++ intercalate ", " (map (prettyShow . nodeKey) cycleGroup) showPlanProblem (PackageStateInvalid pkg pkg') = "Package " ++ prettyShow (nodeKey pkg) ++ " is in the " ++ showPlanPackageTag pkg ++ " state but it depends on package " ++ prettyShow (nodeKey pkg') ++ " which is in the " ++ showPlanPackageTag pkg' ++ " state" -- | For an invalid plan, produce a detailed list of problems as human readable -- error messages. This is mainly intended for debugging purposes. -- Use 'showPlanProblem' for a human readable explanation. -- problems :: (IsUnit ipkg, IsUnit srcpkg) => Graph (GenericPlanPackage ipkg srcpkg) -> [PlanProblem ipkg srcpkg] problems graph = [ PackageMissingDeps pkg (mapMaybe (fmap nodeKey . flip Graph.lookup graph) missingDeps) | (pkg, missingDeps) <- Graph.broken graph ] ++ [ PackageCycle cycleGroup | cycleGroup <- Graph.cycles graph ] {- ++ [ PackageInconsistency name inconsistencies | (name, inconsistencies) <- dependencyInconsistencies indepGoals graph ] --TODO: consider re-enabling this one, see SolverInstallPlan -} ++ [ PackageStateInvalid pkg pkg' | pkg <- Foldable.toList graph , Just pkg' <- map (flip Graph.lookup graph) (nodeNeighbors pkg) , not (stateDependencyRelation pkg pkg') ] -- | The states of packages have that depend on each other must respect -- this relation. That is for very case where package @a@ depends on -- package @b@ we require that @stateDependencyRelation a b = True@. -- stateDependencyRelation :: GenericPlanPackage ipkg srcpkg -> GenericPlanPackage ipkg srcpkg -> Bool stateDependencyRelation PreExisting{} PreExisting{} = True stateDependencyRelation Installed{} PreExisting{} = True stateDependencyRelation Installed{} Installed{} = True stateDependencyRelation Configured{} PreExisting{} = True stateDependencyRelation Configured{} Installed{} = True stateDependencyRelation Configured{} Configured{} = True stateDependencyRelation _ _ = False cabal-install-3.8.1.0/src/Distribution/Client/InstallSymlink.hs0000644000000000000000000003043307346545000022560 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.InstallSymlink -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Managing installing binaries with symlinks. ----------------------------------------------------------------------------- module Distribution.Client.InstallSymlink ( symlinkBinaries, symlinkBinary, trySymlink, promptRun ) where import Distribution.Client.Compat.Prelude hiding (ioError) import Prelude () import Distribution.Client.Types ( ConfiguredPackage(..), BuildOutcomes ) import Distribution.Client.Setup ( InstallFlags(installSymlinkBinDir) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Solver.Types.SourcePackage import Distribution.Solver.Types.OptionalStanza import Distribution.Package ( PackageIdentifier, Package(packageId), UnitId, installedUnitId ) import Distribution.Types.UnqualComponentName import Distribution.Compiler ( CompilerId(..) ) import qualified Distribution.PackageDescription as PackageDescription import Distribution.PackageDescription ( PackageDescription ) import Distribution.PackageDescription.Configuration ( finalizePD ) import Distribution.Simple.Setup ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) import qualified Distribution.Simple.InstallDirs as InstallDirs import Distribution.Simple.Compiler ( Compiler, compilerInfo, CompilerInfo(..) ) import Distribution.System ( Platform ) import Distribution.Simple.Utils ( info, withTempDirectory ) import System.Directory ( canonicalizePath, getTemporaryDirectory, removeFile ) import System.FilePath ( (), splitPath, joinPath, isAbsolute ) import System.IO.Error ( isDoesNotExistError, ioError ) import Control.Exception ( assert ) import Distribution.Client.Compat.Directory ( createFileLink, getSymbolicLinkTarget, pathIsSymbolicLink ) import Distribution.Client.Types.OverwritePolicy import Distribution.Client.Init.Types ( DefaultPrompt(MandatoryPrompt) ) import Distribution.Client.Init.Prompt ( promptYesNo ) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 -- | We would like by default to install binaries into some location that is on -- the user's PATH. For per-user installations on Unix systems that basically -- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ -- directory will be on the user's PATH. However some people are a bit nervous -- about letting a package manager install programs into @~/bin/@. -- -- A compromise solution is that instead of installing binaries directly into -- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ -- and then create symlinks in @~/bin/@. We can be careful when setting up the -- symlinks that we do not overwrite any binary that the user installed. We can -- check if it was a symlink we made because it would point to the private dir -- where we install our binaries. This means we can install normally without -- worrying and in a later phase set up symlinks, and if that fails then we -- report it to the user, but even in this case the package is still in an OK -- installed state. -- -- This is an optional feature that users can choose to use or not. It is -- controlled from the config file. Of course it only works on POSIX systems -- with symlinks so is not available to Windows users. -- symlinkBinaries :: Platform -> Compiler -> OverwritePolicy -> ConfigFlags -> InstallFlags -> InstallPlan -> BuildOutcomes -> IO [(PackageIdentifier, UnqualComponentName, FilePath)] symlinkBinaries platform comp overwritePolicy configFlags installFlags plan buildOutcomes = case flagToMaybe (installSymlinkBinDir installFlags) of Nothing -> return [] Just symlinkBinDir | null exes -> return [] | otherwise -> do publicBinDir <- canonicalizePath symlinkBinDir -- TODO: do we want to do this here? : -- createDirectoryIfMissing True publicBinDir fmap catMaybes $ sequenceA [ do privateBinDir <- pkgBinDir pkg ipid ok <- symlinkBinary overwritePolicy publicBinDir privateBinDir (prettyShow publicExeName) privateExeName if ok then return Nothing else return (Just (pkgid, publicExeName, privateBinDir privateExeName)) | (rpkg, pkg, exe) <- exes , let pkgid = packageId pkg -- This is a bit dodgy; probably won't work for Backpack packages ipid = installedUnitId rpkg publicExeName = PackageDescription.exeName exe privateExeName = prefix ++ unUnqualComponentName publicExeName ++ suffix prefix = substTemplate pkgid ipid prefixTemplate suffix = substTemplate pkgid ipid suffixTemplate ] where exes = [ (cpkg, pkg, exe) | InstallPlan.Configured cpkg <- InstallPlan.toList plan , case InstallPlan.lookupBuildOutcome cpkg buildOutcomes of Just (Right _success) -> True _ -> False , let pkg :: PackageDescription pkg = pkgDescription cpkg , exe <- PackageDescription.executables pkg , PackageDescription.buildable (PackageDescription.buildInfo exe) ] pkgDescription (ConfiguredPackage _ (SourcePackage _ gpd _ _) flags stanzas _) = case finalizePD flags (enableStanzas stanzas) (const True) platform cinfo [] gpd of Left _ -> error "finalizePD ReadyPackage failed" Right (desc, _) -> desc -- This is sadly rather complicated. We're kind of re-doing part of the -- configuration for the package. :-( pkgBinDir :: PackageDescription -> UnitId -> IO FilePath pkgBinDir pkg ipid = do defaultDirs <- InstallDirs.defaultInstallDirs compilerFlavor (fromFlag (configUserInstall configFlags)) (PackageDescription.hasLibs pkg) let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) absoluteDirs = InstallDirs.absoluteInstallDirs (packageId pkg) ipid cinfo InstallDirs.NoCopyDest platform templateDirs canonicalizePath (InstallDirs.bindir absoluteDirs) substTemplate pkgid ipid = InstallDirs.fromPathTemplate . InstallDirs.substPathTemplate env where env = InstallDirs.initialPathTemplateEnv pkgid ipid cinfo platform fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) cinfo = compilerInfo comp (CompilerId compilerFlavor _) = compilerInfoId cinfo -- | Symlink binary. -- -- The paths are take in pieces, so we can make relative link when possible. -- symlinkBinary :: OverwritePolicy -- ^ Whether to force overwrite an existing file -> FilePath -- ^ The canonical path of the public bin dir eg -- @/home/user/bin@ -> FilePath -- ^ The canonical path of the private bin dir eg -- @/home/user/.cabal/bin@ -> FilePath -- ^ The name of the executable to go in the public bin -- dir, eg @foo@ -> String -- ^ The name of the executable to in the private bin -- dir, eg @foo-1.0@ -> IO Bool -- ^ If creating the symlink was successful. @False@ if -- there was another file there already that we did -- not own. Other errors like permission errors just -- propagate as exceptions. symlinkBinary overwritePolicy publicBindir privateBindir publicName privateName = do ok <- targetOkToOverwrite (publicBindir publicName) (privateBindir privateName) case ok of NotExists -> mkLink OkToOverwrite -> overwrite NotOurFile -> case overwritePolicy of NeverOverwrite -> return False AlwaysOverwrite -> overwrite PromptOverwrite -> maybeOverwrite where relativeBindir = makeRelative publicBindir privateBindir mkLink :: IO Bool mkLink = True <$ createFileLink (relativeBindir privateName) (publicBindir publicName) rmLink :: IO Bool rmLink = True <$ removeFile (publicBindir publicName) overwrite :: IO Bool overwrite = rmLink *> mkLink maybeOverwrite :: IO Bool maybeOverwrite = promptRun "Existing file found while installing symlink. Do you want to overwrite that file? (y/n)" overwrite promptRun :: String -> IO Bool -> IO Bool promptRun s m = do a <- promptYesNo s MandatoryPrompt if a then m else pure a -- | Check a file path of a symlink that we would like to create to see if it -- is OK. For it to be OK to overwrite it must either not already exist yet or -- be a symlink to our target (in which case we can assume ownership). -- targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private -- binary that we would like to create -> FilePath -- ^ The canonical path of the private binary. -- Use 'canonicalizePath' to make this. -> IO SymlinkStatus targetOkToOverwrite symlink target = handleNotExist $ do isLink <- pathIsSymbolicLink symlink if not isLink then return NotOurFile else do target' <- canonicalizePath =<< getSymbolicLinkTarget symlink -- This partially relies on canonicalizePath handling symlinks if target == target' then return OkToOverwrite else return NotOurFile where handleNotExist action = catchIO action $ \ioexception -> -- If the target doesn't exist then there's no problem overwriting it! if isDoesNotExistError ioexception then return NotExists else ioError ioexception data SymlinkStatus = NotExists -- ^ The file doesn't exist so we can make a symlink. | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll -- have to delete it first before we make a new symlink. | NotOurFile -- ^ A file already exists and it is not one of our existing -- symlinks (either because it is not a symlink or because -- it points somewhere other than our managed space). deriving Show -- | Take two canonical paths and produce a relative path to get from the first -- to the second, even if it means adding @..@ path components. -- makeRelative :: FilePath -> FilePath -> FilePath makeRelative a b = assert (isAbsolute a && isAbsolute b) $ let as = splitPath a bs = splitPath b commonLen = length $ takeWhile id $ zipWith (==) as bs in joinPath $ [ ".." | _ <- drop commonLen as ] ++ drop commonLen bs -- | Try to make a symlink in a temporary directory. -- -- If this works, we can try to symlink: even on Windows. -- trySymlink :: Verbosity -> IO Bool trySymlink verbosity = do tmp <- getTemporaryDirectory withTempDirectory verbosity tmp "cabal-symlink-test" $ \tmpDirPath -> do let from = tmpDirPath "file.txt" let to = tmpDirPath "file2.txt" -- create a file BS.writeFile from (BS8.pack "TEST") -- create a symbolic link let create :: IO Bool create = do createFileLink from to info verbosity $ "Symlinking seems to work" return True create `catchIO` \exc -> do info verbosity $ "Symlinking doesn't seem to be working: " ++ show exc return False cabal-install-3.8.1.0/src/Distribution/Client/JobControl.hs0000644000000000000000000001222107346545000021651 0ustar0000000000000000{-# LANGUAGE FlexibleContexts #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.JobControl -- Copyright : (c) Duncan Coutts 2012 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- A job control concurrency abstraction ----------------------------------------------------------------------------- module Distribution.Client.JobControl ( JobControl, newSerialJobControl, newParallelJobControl, spawnJob, collectJob, remainingJobs, cancelJobs, JobLimit, newJobLimit, withJobLimit, Lock, newLock, criticalSection ) where import Distribution.Client.Compat.Prelude import Prelude () import Control.Monad (forever, replicateM_) import Control.Concurrent (forkIO) import Control.Concurrent.MVar import Control.Concurrent.STM (STM, atomically) import Control.Concurrent.STM.TVar import Control.Concurrent.STM.TChan import Control.Exception (bracket_, try) import Distribution.Compat.Stack import Distribution.Client.Compat.Semaphore -- | A simple concurrency abstraction. Jobs can be spawned and can complete -- in any order. This allows both serial and parallel implementations. -- data JobControl m a = JobControl { -- | Add a new job to the pool of jobs spawnJob :: m a -> m (), -- | Wait until one job is complete collectJob :: m a, -- | Returns True if there are any outstanding jobs -- (ie spawned but yet to be collected) remainingJobs :: m Bool, -- | Try to cancel any outstanding but not-yet-started jobs. -- Call 'remainingJobs' after this to find out if any jobs are left -- (ie could not be cancelled). cancelJobs :: m () } -- | Make a 'JobControl' that executes all jobs serially and in order. -- It only executes jobs on demand when they are collected, not eagerly. -- -- Cancelling will cancel /all/ jobs that have not been collected yet. -- newSerialJobControl :: IO (JobControl IO a) newSerialJobControl = do qVar <- newTChanIO return JobControl { spawnJob = spawn qVar, collectJob = collect qVar, remainingJobs = remaining qVar, cancelJobs = cancel qVar } where spawn :: TChan (IO a) -> IO a -> IO () spawn qVar job = atomically $ writeTChan qVar job collect :: TChan (IO a) -> IO a collect qVar = join $ atomically $ readTChan qVar remaining :: TChan (IO a) -> IO Bool remaining qVar = fmap not $ atomically $ isEmptyTChan qVar cancel :: TChan (IO a) -> IO () cancel qVar = do _ <- atomically $ readAllTChan qVar return () -- | Make a 'JobControl' that eagerly executes jobs in parallel, with a given -- maximum degree of parallelism. -- -- Cancelling will cancel jobs that have not yet begun executing, but jobs -- that have already been executed or are currently executing cannot be -- cancelled. -- newParallelJobControl :: WithCallStack (Int -> IO (JobControl IO a)) newParallelJobControl n | n < 1 || n > 1000 = error $ "newParallelJobControl: not a sensible number of jobs: " ++ show n newParallelJobControl maxJobLimit = do inqVar <- newTChanIO outqVar <- newTChanIO countVar <- newTVarIO 0 replicateM_ maxJobLimit $ forkIO $ worker inqVar outqVar return JobControl { spawnJob = spawn inqVar countVar, collectJob = collect outqVar countVar, remainingJobs = remaining countVar, cancelJobs = cancel inqVar countVar } where worker :: TChan (IO a) -> TChan (Either SomeException a) -> IO () worker inqVar outqVar = forever $ do job <- atomically $ readTChan inqVar res <- try job atomically $ writeTChan outqVar res spawn :: TChan (IO a) -> TVar Int -> IO a -> IO () spawn inqVar countVar job = atomically $ do modifyTVar' countVar (+1) writeTChan inqVar job collect :: TChan (Either SomeException a) -> TVar Int -> IO a collect outqVar countVar = do res <- atomically $ do modifyTVar' countVar (subtract 1) readTChan outqVar either throwIO return res remaining :: TVar Int -> IO Bool remaining countVar = fmap (/=0) $ atomically $ readTVar countVar cancel :: TChan (IO a) -> TVar Int -> IO () cancel inqVar countVar = atomically $ do xs <- readAllTChan inqVar modifyTVar' countVar (subtract (length xs)) readAllTChan :: TChan a -> STM [a] readAllTChan qvar = go [] where go xs = do mx <- tryReadTChan qvar case mx of Nothing -> return (reverse xs) Just x -> go (x:xs) ------------------------- -- Job limits and locks -- data JobLimit = JobLimit QSem newJobLimit :: Int -> IO JobLimit newJobLimit n = fmap JobLimit (newQSem n) withJobLimit :: JobLimit -> IO a -> IO a withJobLimit (JobLimit sem) = bracket_ (waitQSem sem) (signalQSem sem) newtype Lock = Lock (MVar ()) newLock :: IO Lock newLock = fmap Lock $ newMVar () criticalSection :: Lock -> IO a -> IO a criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act cabal-install-3.8.1.0/src/Distribution/Client/List.hs0000644000000000000000000006263507346545000020527 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.List -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2008-2011 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- -- Search for and print information about packages ----------------------------------------------------------------------------- module Distribution.Client.List ( list, info ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Package ( PackageName, Package(..), packageName , packageVersion, UnitId ) import Distribution.Types.Dependency import Distribution.Types.UnqualComponentName import Distribution.ModuleName (ModuleName) import Distribution.License (License) import qualified Distribution.InstalledPackageInfo as Installed import qualified Distribution.PackageDescription as Source import Distribution.PackageDescription ( PackageFlag(..), unFlagName ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.Simple.Compiler ( Compiler, PackageDBStack ) import Distribution.Simple.Program (ProgramDb) import Distribution.Simple.Utils ( equating, die', notice ) import Distribution.Simple.Setup (fromFlag, fromFlagOrDefault) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import Distribution.Version ( Version, mkVersion, versionNumbers, VersionRange, withinRange, anyVersion , intersectVersionRanges, simplifyVersionRange ) import qualified Distribution.SPDX as SPDX import Distribution.Solver.Types.PackageConstraint import qualified Distribution.Solver.Types.PackageIndex as PackageIndex import Distribution.Solver.Types.SourcePackage import Distribution.Client.Types ( SourcePackageDb(..), PackageSpecifier(..), UnresolvedSourcePackage ) import Distribution.Client.Targets ( UserTarget, resolveUserTargets ) import Distribution.Client.Setup ( GlobalFlags(..), ListFlags(..), InfoFlags(..) , RepoContext(..) ) import Distribution.Client.Utils ( mergeBy, MergeResult(..) ) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.FetchUtils ( isFetched ) import Data.Bits ((.|.)) import Data.List ( maximumBy ) import Data.List.NonEmpty (groupBy) import qualified Data.List as L import Data.Maybe ( fromJust ) import qualified Data.Map as Map import Data.Tree as Tree import Control.Exception ( assert ) import qualified Text.PrettyPrint as Disp import Text.PrettyPrint ( lineLength, ribbonsPerLine, Doc, renderStyle, char , nest, ($+$), text, vcat, style, parens, fsep) import System.Directory ( doesDirectoryExist ) import Distribution.Utils.ShortText (ShortText) import qualified Distribution.Utils.ShortText as ShortText import qualified Text.Regex.Base as Regex import qualified Text.Regex.Posix.String as Regex -- | Return a list of packages matching given search strings. getPkgList :: Verbosity -> PackageDBStack -> RepoContext -> Maybe (Compiler, ProgramDb) -> ListFlags -> [String] -> IO [PackageDisplayInfo] getPkgList verbosity packageDBs repoCtxt mcompprogdb listFlags pats = do installedPkgIndex <- for mcompprogdb $ \(comp, progdb) -> getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt regexps <- for pats $ \pat -> do e <- Regex.compile compOption Regex.execBlank pat case e of Right r -> return r Left err -> die' verbosity $ "Failed to compile regex " ++ pat ++ ": " ++ snd err let sourcePkgIndex = packageIndex sourcePkgDb prefs name = fromMaybe anyVersion (Map.lookup name (packagePreferences sourcePkgDb)) pkgsInfoMatching :: [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] pkgsInfoMatching = let matchingInstalled = maybe [] (matchingPackages InstalledPackageIndex.searchWithPredicate regexps) installedPkgIndex matchingSource = matchingPackages (\ idx n -> concatMap snd (PackageIndex.searchWithPredicate idx n)) regexps sourcePkgIndex in mergePackages matchingInstalled matchingSource pkgsInfo :: [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] pkgsInfo -- gather info for all packages | null regexps = mergePackages (maybe [] InstalledPackageIndex.allPackages installedPkgIndex) ( PackageIndex.allPackages sourcePkgIndex) -- gather info for packages matching search term | otherwise = pkgsInfoMatching matches :: [PackageDisplayInfo] matches = [ mergePackageInfo pref installedPkgs sourcePkgs selectedPkg False | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo , not onlyInstalled || not (null installedPkgs) , let pref = prefs pkgname selectedPkg = latestWithPref pref sourcePkgs ] return matches where onlyInstalled = fromFlagOrDefault False (listInstalled listFlags) caseInsensitive = fromFlagOrDefault True (listCaseInsensitive listFlags) compOption | caseInsensitive = Regex.compExtended .|. Regex.compIgnoreCase | otherwise = Regex.compExtended matchingPackages search regexps index = [ pkg | re <- regexps , pkg <- search index (Regex.matchTest re) ] -- | Show information about packages. list :: Verbosity -> PackageDBStack -> RepoContext -> Maybe (Compiler, ProgramDb) -> ListFlags -> [String] -> IO () list verbosity packageDBs repos mcompProgdb listFlags pats = do matches <- getPkgList verbosity packageDBs repos mcompProgdb listFlags pats if simpleOutput then putStr $ unlines [ prettyShow (pkgName pkg) ++ " " ++ prettyShow version | pkg <- matches , version <- if onlyInstalled then installedVersions pkg else nub . sort $ installedVersions pkg ++ sourceVersions pkg ] -- Note: this only works because for 'list', one cannot currently -- specify any version constraints, so listing all installed -- and source ones works. else if null matches then notice verbosity "No matches found." else putStr $ unlines (map showPackageSummaryInfo matches) where onlyInstalled = fromFlag (listInstalled listFlags) simpleOutput = fromFlag (listSimpleOutput listFlags) info :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> ProgramDb -> GlobalFlags -> InfoFlags -> [UserTarget] -> IO () info verbosity _ _ _ _ _ _ [] = notice verbosity "No packages requested. Nothing to do." info verbosity packageDBs repoCtxt comp progdb _ _listFlags userTargets = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt let sourcePkgIndex = packageIndex sourcePkgDb prefs name = fromMaybe anyVersion (Map.lookup name (packagePreferences sourcePkgDb)) -- Users may specify names of packages that are only installed, not -- just available source packages, so we must resolve targets using -- the combination of installed and source packages. let sourcePkgs' = PackageIndex.fromList $ map packageId (InstalledPackageIndex.allPackages installedPkgIndex) ++ map packageId (PackageIndex.allPackages sourcePkgIndex) pkgSpecifiers <- resolveUserTargets verbosity repoCtxt sourcePkgs' userTargets pkgsinfo <- sequenceA [ do pkginfo <- either (die' verbosity) return $ gatherPkgInfo prefs installedPkgIndex sourcePkgIndex pkgSpecifier updateFileSystemPackageDetails pkginfo | pkgSpecifier <- pkgSpecifiers ] putStr $ unlines (map showPackageDetailedInfo pkgsinfo) where gatherPkgInfo :: (PackageName -> VersionRange) -> InstalledPackageIndex -> PackageIndex.PackageIndex UnresolvedSourcePackage -> PackageSpecifier UnresolvedSourcePackage -> Either String PackageDisplayInfo gatherPkgInfo prefs installedPkgIndex sourcePkgIndex (NamedPackage name props) | null (selectedInstalledPkgs) && null (selectedSourcePkgs) = Left $ "There is no available version of " ++ prettyShow name ++ " that satisfies " ++ prettyShow (simplifyVersionRange verConstraint) | otherwise = Right $ mergePackageInfo pref installedPkgs sourcePkgs selectedSourcePkg' showPkgVersion where (pref, installedPkgs, sourcePkgs) = sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex selectedInstalledPkgs = InstalledPackageIndex.lookupDependency installedPkgIndex name verConstraint selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex name verConstraint selectedSourcePkg' = latestWithPref pref selectedSourcePkgs -- display a specific package version if the user -- supplied a non-trivial version constraint showPkgVersion = not (null verConstraints) verConstraint = foldr intersectVersionRanges anyVersion verConstraints verConstraints = [ vr | PackagePropertyVersion vr <- props ] gatherPkgInfo prefs installedPkgIndex sourcePkgIndex (SpecificSourcePackage pkg) = Right $ mergePackageInfo pref installedPkgs sourcePkgs selectedPkg True where name = packageName pkg selectedPkg = Just pkg (pref, installedPkgs, sourcePkgs) = sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex sourcePkgsInfo :: (PackageName -> VersionRange) -> PackageName -> InstalledPackageIndex -> PackageIndex.PackageIndex UnresolvedSourcePackage -> (VersionRange, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage]) sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = (pref, installedPkgs, sourcePkgs) where pref = prefs name installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName installedPkgIndex name) sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name -- | The info that we can display for each package. It is information per -- package name and covers all installed and available versions. -- data PackageDisplayInfo = PackageDisplayInfo { pkgName :: PackageName, selectedVersion :: Maybe Version, selectedSourcePkg :: Maybe UnresolvedSourcePackage, installedVersions :: [Version], sourceVersions :: [Version], preferredVersions :: VersionRange, homepage :: ShortText, bugReports :: ShortText, sourceRepo :: String, -- TODO synopsis :: ShortText, description :: ShortText, category :: ShortText, license :: Either SPDX.License License, author :: ShortText, maintainer :: ShortText, dependencies :: [ExtDependency], flags :: [PackageFlag], hasLib :: Bool, hasExe :: Bool, executables :: [UnqualComponentName], modules :: [ModuleName], haddockHtml :: FilePath, haveTarball :: Bool } -- | Covers source dependencies and installed dependencies in -- one type. data ExtDependency = SourceDependency Dependency | InstalledDependency UnitId showPackageSummaryInfo :: PackageDisplayInfo -> String showPackageSummaryInfo pkginfo = renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ char '*' <+> pretty (pkgName pkginfo) $+$ (nest 4 $ vcat [ maybeShowST (synopsis pkginfo) "Synopsis:" reflowParagraphs , text "Default available version:" <+> case selectedSourcePkg pkginfo of Nothing -> text "[ Not available from any configured repository ]" Just pkg -> pretty (packageVersion pkg) , text "Installed versions:" <+> case installedVersions pkginfo of [] | hasLib pkginfo -> text "[ Not installed ]" | otherwise -> text "[ Unknown ]" versions -> dispTopVersions 4 (preferredVersions pkginfo) versions , maybeShowST (homepage pkginfo) "Homepage:" text , text "License: " <+> either pretty pretty (license pkginfo) ]) $+$ text "" where maybeShowST l s f | ShortText.null l = Disp.empty | otherwise = text s <+> f (ShortText.fromShortText l) showPackageDetailedInfo :: PackageDisplayInfo -> String showPackageDetailedInfo pkginfo = renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ char '*' <+> pretty (pkgName pkginfo) <<>> maybe Disp.empty (\v -> char '-' Disp.<> pretty v) (selectedVersion pkginfo) <+> text (replicate (16 - length (prettyShow (pkgName pkginfo))) ' ') <<>> parens pkgkind $+$ (nest 4 $ vcat [ entryST "Synopsis" synopsis hideIfNull reflowParagraphs , entry "Versions available" sourceVersions (altText null "[ Not available from server ]") (dispTopVersions 9 (preferredVersions pkginfo)) , entry "Versions installed" installedVersions (altText null (if hasLib pkginfo then "[ Not installed ]" else "[ Unknown ]")) (dispTopVersions 4 (preferredVersions pkginfo)) , entryST "Homepage" homepage orNotSpecified text , entryST "Bug reports" bugReports orNotSpecified text , entryST "Description" description hideIfNull reflowParagraphs , entryST "Category" category hideIfNull text , entry "License" license alwaysShow (either pretty pretty) , entryST "Author" author hideIfNull reflowLines , entryST "Maintainer" maintainer hideIfNull reflowLines , entry "Source repo" sourceRepo orNotSpecified text , entry "Executables" executables hideIfNull (commaSep pretty) , entry "Flags" flags hideIfNull (commaSep dispFlag) , entry "Dependencies" dependencies hideIfNull (commaSep dispExtDep) , entry "Documentation" haddockHtml showIfInstalled text , entry "Cached" haveTarball alwaysShow dispYesNo , if not (hasLib pkginfo) then mempty else text "Modules:" $+$ nest 4 (vcat (map pretty . sort . modules $ pkginfo)) ]) $+$ text "" where entry fname field cond format = case cond (field pkginfo) of Nothing -> label <+> format (field pkginfo) Just Nothing -> mempty Just (Just other) -> label <+> text other where label = text fname Disp.<> char ':' Disp.<> padding padding = text (replicate (13 - length fname ) ' ') entryST fname field = entry fname (ShortText.fromShortText . field) normal = Nothing hide = Just Nothing replace msg = Just (Just msg) alwaysShow = const normal hideIfNull v = if null v then hide else normal showIfInstalled v | not isInstalled = hide | null v = replace "[ Not installed ]" | otherwise = normal altText nul msg v = if nul v then replace msg else normal orNotSpecified = altText null "[ Not specified ]" commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f dispFlag = text . unFlagName . flagName dispYesNo True = text "Yes" dispYesNo False = text "No" dispExtDep (SourceDependency dep) = pretty dep dispExtDep (InstalledDependency dep) = pretty dep isInstalled = not (null (installedVersions pkginfo)) hasExes = length (executables pkginfo) >= 2 --TODO: exclude non-buildable exes pkgkind | hasLib pkginfo && hasExes = text "programs and library" | hasLib pkginfo && hasExe pkginfo = text "program and library" | hasLib pkginfo = text "library" | hasExes = text "programs" | hasExe pkginfo = text "program" | otherwise = mempty reflowParagraphs :: String -> Doc reflowParagraphs = vcat . intersperse (text "") -- re-insert blank lines . map (fsep . map text . concatMap words) -- reflow paragraphs . filter (/= [""]) . L.groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines . lines reflowLines :: String -> Doc reflowLines = vcat . map text . lines -- | We get the 'PackageDisplayInfo' by combining the info for the installed -- and available versions of a package. -- -- * We're building info about a various versions of a single named package so -- the input package info records are all supposed to refer to the same -- package name. -- mergePackageInfo :: VersionRange -> [Installed.InstalledPackageInfo] -> [UnresolvedSourcePackage] -> Maybe UnresolvedSourcePackage -> Bool -> PackageDisplayInfo mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = assert (length installedPkgs + length sourcePkgs > 0) $ PackageDisplayInfo { pkgName = combine packageName source packageName installed, selectedVersion = if showVer then fmap packageVersion selectedPkg else Nothing, selectedSourcePkg = sourceSelected, installedVersions = map packageVersion installedPkgs, sourceVersions = map packageVersion sourcePkgs, preferredVersions = versionPref, license = combine Source.licenseRaw source Installed.license installed, maintainer = combine Source.maintainer source Installed.maintainer installed, author = combine Source.author source Installed.author installed, homepage = combine Source.homepage source Installed.homepage installed, bugReports = maybe mempty Source.bugReports source, sourceRepo = fromMaybe mempty . join . fmap (uncons Nothing Source.repoLocation . sortBy (comparing Source.repoKind) . Source.sourceRepos) $ source, --TODO: installed package info is missing synopsis synopsis = maybe mempty Source.synopsis source, description = combine Source.description source Installed.description installed, category = combine Source.category source Installed.category installed, flags = maybe [] Source.genPackageFlags sourceGeneric, hasLib = isJust installed || maybe False (isJust . Source.condLibrary) sourceGeneric, hasExe = maybe False (not . null . Source.condExecutables) sourceGeneric, executables = map fst (maybe [] Source.condExecutables sourceGeneric), modules = combine (map Installed.exposedName . Installed.exposedModules) installed -- NB: only for the PUBLIC library (concatMap getListOfExposedModules . maybeToList . Source.library) source, dependencies = combine (map (SourceDependency . simplifyDependency) . Source.allBuildDepends) source (map InstalledDependency . Installed.depends) installed, haddockHtml = fromMaybe "" . join . fmap (listToMaybe . Installed.haddockHTMLs) $ installed, haveTarball = False } where combine f x g y = fromJust (fmap f x `mplus` fmap g y) installed :: Maybe Installed.InstalledPackageInfo installed = latestWithPref versionPref installedPkgs getListOfExposedModules lib = Source.exposedModules lib ++ map Source.moduleReexportName (Source.reexportedModules lib) sourceSelected | isJust selectedPkg = selectedPkg | otherwise = latestWithPref versionPref sourcePkgs sourceGeneric = fmap srcpkgDescription sourceSelected source = fmap flattenPackageDescription sourceGeneric uncons :: b -> (a -> b) -> [a] -> b uncons z _ [] = z uncons _ f (x:_) = f x -- | Not all the info is pure. We have to check if the docs really are -- installed, because the registered package info lies. Similarly we have to -- check if the tarball has indeed been fetched. -- updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo updateFileSystemPackageDetails pkginfo = do fetched <- maybe (return False) (isFetched . srcpkgSource) (selectedSourcePkg pkginfo) docsExist <- doesDirectoryExist (haddockHtml pkginfo) return pkginfo { haveTarball = fetched, haddockHtml = if docsExist then haddockHtml pkginfo else "" } latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg latestWithPref _ [] = Nothing latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) where prefThenVersion pkg = let ver = packageVersion pkg in (withinRange ver pref, ver) -- | Rearrange installed and source packages into groups referring to the -- same package by name. In the result pairs, the lists are guaranteed to not -- both be empty. -- mergePackages :: [Installed.InstalledPackageInfo] -> [UnresolvedSourcePackage] -> [( PackageName , [Installed.InstalledPackageInfo] , [UnresolvedSourcePackage] )] mergePackages installedPkgs sourcePkgs = map collect $ mergeBy (\i a -> fst i `compare` fst a) (groupOn packageName installedPkgs) (groupOn packageName sourcePkgs) where collect (OnlyInLeft (name,is) ) = (name, is, []) collect ( InBoth (_,is) (name,as)) = (name, is, as) collect (OnlyInRight (name,as)) = (name, [], as) groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] groupOn key = map (\xs -> (key (head xs), toList xs)) . groupBy (equating key) . sortBy (comparing key) dispTopVersions :: Int -> VersionRange -> [Version] -> Doc dispTopVersions n pref vs = (Disp.fsep . Disp.punctuate (Disp.char ',') . map (\ver -> if ispref ver then pretty ver else parens (pretty ver)) . sort . take n . interestingVersions ispref $ vs) <+> trailingMessage where ispref ver = withinRange ver pref extra = length vs - n trailingMessage | extra <= 0 = Disp.empty | otherwise = Disp.parens $ Disp.text "and" <+> Disp.int (length vs - n) <+> if extra == 1 then Disp.text "other" else Disp.text "others" -- | Reorder a bunch of versions to put the most interesting / significant -- versions first. A preferred version range is taken into account. -- -- This may be used in a user interface to select a small number of versions -- to present to the user, e.g. -- -- > let selectVersions = sort . take 5 . interestingVersions pref -- interestingVersions :: (Version -> Bool) -> [Version] -> [Version] interestingVersions pref = map (mkVersion . fst) . filter snd . concat . Tree.levels . swizzleTree . reorderTree (\(Node (v,_) _) -> pref (mkVersion v)) . reverseTree . mkTree . map (or0 . versionNumbers) where or0 [] = 0 :| [] or0 (x:xs) = x :| xs swizzleTree = unfoldTree (spine []) where spine ts' (Node x []) = (x, ts') spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t reorderTree _ (Node x []) = Node x [] reorderTree p (Node x ts) = Node x (ts' ++ ts'') where (ts',ts'') = partition p (map (reorderTree p) ts) reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) mkTree :: forall a. Eq a => [NonEmpty a] -> Tree ([a], Bool) mkTree xs = unfoldTree step (False, [], xs) where step :: (Bool, [a], [NonEmpty a]) -> (([a], Bool), [(Bool, [a], [NonEmpty a])]) step (node,ns,vs) = ( (reverse ns, node) , [ (any null vs', n:ns, mapMaybe nonEmpty (toList vs')) | (n, vs') <- groups vs ] ) groups :: [NonEmpty a] -> [(a, NonEmpty [a])] groups = map (\g -> (head (head g), fmap tail g)) . groupBy (equating head) cabal-install-3.8.1.0/src/Distribution/Client/Manpage.hs0000644000000000000000000002124107346545000021150 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Manpage -- Copyright : (c) Maciek Makowski 2015 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Functions for building the manual page. module Distribution.Client.Manpage ( -- * Manual page generation manpage , manpageCmd , ManpageFlags , defaultManpageFlags , manpageOptions ) where import Distribution.Client.Compat.Prelude import Prelude () import qualified Data.List.NonEmpty as List1 import Distribution.Client.Init.Utils (trim) import Distribution.Client.ManpageFlags import Distribution.Client.Setup (globalCommand) import Distribution.Simple.Command import Distribution.Simple.Flag (fromFlagOrDefault) import Distribution.Simple.Utils ( IOData(..), IODataMode(..), createProcessWithEnv, ignoreSigPipe, rawSystemStdInOut ) import qualified Distribution.Verbosity as Verbosity import System.IO (hClose, hPutStr) import System.Environment (lookupEnv) import System.FilePath (takeFileName) import qualified System.Process as Process data FileInfo = FileInfo String String -- ^ path, description ------------------------------------------------------------------------------- -- ------------------------------------------------------------------------------- -- | A list of files that should be documented in the manual page. files :: [FileInfo] files = [ (FileInfo "~/.cabal/config" "The defaults that can be overridden with command-line options.") ] manpageCmd :: String -> [CommandSpec a] -> ManpageFlags -> IO () manpageCmd pname commands flags | fromFlagOrDefault False (manpageRaw flags) = putStrLn contents | otherwise = ignoreSigPipe $ do -- 2021-10-08, issue #7714 -- @cabal man --raw | man -l -@ does not work on macOS/BSD, -- because BSD-man does not support option @-l@, rather would -- accept directly a file argument, e.g. @man /dev/stdin@. -- The following works both on macOS and Linux -- (but not on Windows out-of-the-box): -- -- cabal man --raw | nroff -man /dev/stdin | less -- -- So let us simulate this! -- Feed contents into @nroff -man /dev/stdin@ (formatted, _errors, ec1) <- rawSystemStdInOut Verbosity.normal "nroff" [ "-man", "/dev/stdin" ] Nothing -- Inherit working directory Nothing -- Inherit environment (Just $ IODataText contents) IODataModeText unless (ec1 == ExitSuccess) $ exitWith ec1 pager <- fromMaybe "less" <$> lookupEnv "PAGER" -- 'less' is borked with color sequences otherwise let pagerArgs = if takeFileName pager == "less" then ["-R"] else [] -- Pipe output of @nroff@ into @less@ (Just inLess, _, _, procLess) <- createProcessWithEnv Verbosity.normal pager pagerArgs Nothing -- Inherit working directory Nothing -- Inherit environment Process.CreatePipe -- in Process.Inherit -- out Process.Inherit -- err hPutStr inLess formatted hClose inLess exitWith =<< Process.waitForProcess procLess where contents :: String contents = manpage pname commands -- | Produces a manual page with @troff@ markup. manpage :: String -> [CommandSpec a] -> String manpage pname commands = unlines $ [ ".TH " ++ map toUpper pname ++ " 1" , ".SH NAME" , pname ++ " \\- a system for building and packaging Haskell libraries and programs" , ".SH SYNOPSIS" , ".B " ++ pname , ".I command" , ".RI < arguments |[ options ]>..." , "" , "Where the" , ".I commands" , "are" , "" ] ++ concatMap (commandSynopsisLines pname) commands ++ [ ".SH DESCRIPTION" , "Cabal is the standard package system for Haskell software. It helps people to configure, " , "build and install Haskell software and to distribute it easily to other users and developers." , "" , "The command line " ++ pname ++ " tool (also referred to as cabal-install) helps with " , "installing existing packages and developing new packages. " , "It can be used to work with local packages or to install packages from online package archives, " , "including automatically installing dependencies. By default it is configured to use Hackage, " , "which is Haskell's central package archive that contains thousands of libraries and applications " , "in the Cabal package format." , ".SH OPTIONS" , "Global options:" , "" ] ++ optionsLines (globalCommand []) ++ [ ".SH COMMANDS" ] ++ concatMap (commandDetailsLines pname) commands ++ [ ".SH FILES" ] ++ concatMap fileLines files ++ [ ".SH BUGS" , "To browse the list of known issues or report a new one please see " , "https://github.com/haskell/cabal/labels/cabal-install." ] commandSynopsisLines :: String -> CommandSpec action -> [String] commandSynopsisLines pname (CommandSpec ui _ NormalCommand) = [ ".B " ++ pname ++ " " ++ (commandName ui) , "- " ++ commandSynopsis ui , ".br" ] commandSynopsisLines _ (CommandSpec _ _ HiddenCommand) = [] commandDetailsLines :: String -> CommandSpec action -> [String] commandDetailsLines pname (CommandSpec ui _ NormalCommand) = [ ".B " ++ pname ++ " " ++ (commandName ui) , "" , commandUsage ui pname , "" ] ++ optional removeLineBreaks commandDescription ++ optional id commandNotes ++ [ "Flags:" , ".RS" ] ++ optionsLines ui ++ [ ".RE" , "" ] where optional f field = case field ui of Just text -> [ f $ text pname, "" ] Nothing -> [] -- 2021-10-12, https://github.com/haskell/cabal/issues/7714#issuecomment-940842905 -- Line breaks just before e.g. 'new-build' cause weird @nroff@ warnings. -- Thus: -- Remove line breaks but preserve paragraph breaks. -- We group lines by empty/non-empty and then 'unwords' -- blocks consisting of non-empty lines. removeLineBreaks = unlines . concatMap unwordsNonEmpty . List1.groupWith null . map trim . lines unwordsNonEmpty :: List1.NonEmpty String -> [String] unwordsNonEmpty ls1 = if null (List1.head ls1) then ls else [unwords ls] where ls = List1.toList ls1 commandDetailsLines _ (CommandSpec _ _ HiddenCommand) = [] optionsLines :: CommandUI flags -> [String] optionsLines command = concatMap optionLines (concatMap optionDescr (commandOptions command ParseArgs)) data ArgumentRequired = Optional | Required type OptionArg = (ArgumentRequired, ArgPlaceHolder) optionLines :: OptDescr flags -> [String] optionLines (ReqArg description (optionChars, optionStrings) placeHolder _ _) = argOptionLines description optionChars optionStrings (Required, placeHolder) optionLines (OptArg description (optionChars, optionStrings) placeHolder _ _ _) = argOptionLines description optionChars optionStrings (Optional, placeHolder) optionLines (BoolOpt description (trueChars, trueStrings) (falseChars, falseStrings) _ _) = optionLinesIfPresent trueChars trueStrings ++ optionLinesIfPresent falseChars falseStrings ++ optionDescriptionLines description optionLines (ChoiceOpt options) = concatMap choiceLines options where choiceLines (description, (optionChars, optionStrings), _, _) = [ optionsLine optionChars optionStrings ] ++ optionDescriptionLines description argOptionLines :: String -> [Char] -> [String] -> OptionArg -> [String] argOptionLines description optionChars optionStrings arg = [ optionsLine optionChars optionStrings , optionArgLine arg ] ++ optionDescriptionLines description optionLinesIfPresent :: [Char] -> [String] -> [String] optionLinesIfPresent optionChars optionStrings = if null optionChars && null optionStrings then [] else [ optionsLine optionChars optionStrings, ".br" ] optionDescriptionLines :: String -> [String] optionDescriptionLines description = [ ".RS" , description , ".RE" , "" ] optionsLine :: [Char] -> [String] -> String optionsLine optionChars optionStrings = intercalate ", " (shortOptions optionChars ++ longOptions optionStrings) shortOptions :: [Char] -> [String] shortOptions = map (\c -> "\\-" ++ [c]) longOptions :: [String] -> [String] longOptions = map (\s -> "\\-\\-" ++ s) optionArgLine :: OptionArg -> String optionArgLine (Required, placeHolder) = ".I " ++ placeHolder optionArgLine (Optional, placeHolder) = ".RI [ " ++ placeHolder ++ " ]" fileLines :: FileInfo -> [String] fileLines (FileInfo path description) = [ path , ".RS" , description , ".RE" , "" ] cabal-install-3.8.1.0/src/Distribution/Client/ManpageFlags.hs0000644000000000000000000000217507346545000022132 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} module Distribution.Client.ManpageFlags ( ManpageFlags (..) , defaultManpageFlags , manpageOptions, ) where import Distribution.Client.Compat.Prelude import Distribution.Simple.Command (OptionField (..), ShowOrParseArgs (..), option) import Distribution.Simple.Setup (Flag (..), toFlag, trueArg, optionVerbosity) import Distribution.Verbosity (normal) data ManpageFlags = ManpageFlags { manpageVerbosity :: Flag Verbosity , manpageRaw :: Flag Bool } deriving (Eq, Show, Generic) instance Monoid ManpageFlags where mempty = gmempty mappend = (<>) instance Semigroup ManpageFlags where (<>) = gmappend defaultManpageFlags :: ManpageFlags defaultManpageFlags = ManpageFlags { manpageVerbosity = toFlag normal , manpageRaw = toFlag False } manpageOptions :: ShowOrParseArgs -> [OptionField ManpageFlags] manpageOptions _ = [ optionVerbosity manpageVerbosity (\v flags -> flags { manpageVerbosity = v }) , option "" ["raw"] "Output raw troff content" manpageRaw (\v flags -> flags { manpageRaw = v }) trueArg ] cabal-install-3.8.1.0/src/Distribution/Client/Nix.hs0000644000000000000000000001310407346545000020335 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE ViewPatterns #-} module Distribution.Client.Nix ( findNixExpr , inNixShell , nixInstantiate , nixShell ) where import Distribution.Client.Compat.Prelude import Control.Exception (bracket) import System.Directory ( canonicalizePath, createDirectoryIfMissing, doesDirectoryExist , doesFileExist, removeDirectoryRecursive, removeFile ) import System.Environment (getArgs, getExecutablePath) import System.FilePath ( (), replaceExtension, takeDirectory, takeFileName ) import System.IO (IOMode(..), hClose, openFile) import System.IO.Error (isDoesNotExistError) import System.Process (showCommandForUser) import Distribution.Compat.Environment ( lookupEnv, setEnv, unsetEnv ) import Distribution.Simple.Program ( Program(..), ProgramDb , addKnownProgram, configureProgram, emptyProgramDb, getDbProgramOutput , runDbProgram, simpleProgram ) import Distribution.Simple.Setup (fromFlagOrDefault) import Distribution.Simple.Utils (debug, existsAndIsMoreRecentThan) import Distribution.Client.Config (SavedConfig(..)) import Distribution.Client.GlobalFlags (GlobalFlags(..)) configureOneProgram :: Verbosity -> Program -> IO ProgramDb configureOneProgram verb prog = configureProgram verb prog (addKnownProgram prog emptyProgramDb) touchFile :: FilePath -> IO () touchFile path = do catch (removeFile path) (\e -> when (isDoesNotExistError e) (return ())) createDirectoryIfMissing True (takeDirectory path) openFile path WriteMode >>= hClose findNixExpr :: GlobalFlags -> SavedConfig -> IO (Maybe FilePath) findNixExpr globalFlags config = do -- criteria for deciding to run nix-shell let nixEnabled = fromFlagOrDefault False (globalNix (savedGlobalFlags config) <> globalNix globalFlags) if nixEnabled then do let exprPaths = [ "shell.nix", "default.nix" ] filterM doesFileExist exprPaths >>= \case [] -> return Nothing (path : _) -> return (Just path) else return Nothing -- set IN_NIX_SHELL so that builtins.getEnv in Nix works as in nix-shell inFakeNixShell :: IO a -> IO a inFakeNixShell f = bracket (fakeEnv "IN_NIX_SHELL" "1") (resetEnv "IN_NIX_SHELL") (\_ -> f) where fakeEnv var new = do old <- lookupEnv var setEnv var new return old resetEnv var = maybe (unsetEnv var) (setEnv var) nixInstantiate :: Verbosity -> FilePath -> Bool -> GlobalFlags -> SavedConfig -> IO () nixInstantiate verb dist force' globalFlags config = findNixExpr globalFlags config >>= \case Nothing -> return () Just shellNix -> do alreadyInShell <- inNixShell shellDrv <- drvPath dist shellNix instantiated <- doesFileExist shellDrv -- an extra timestamp file is necessary because the derivation lives in -- the store so its mtime is always 1. let timestamp = timestampPath dist shellNix upToDate <- existsAndIsMoreRecentThan timestamp shellNix let ready = alreadyInShell || (instantiated && upToDate && not force') unless ready $ do let prog = simpleProgram "nix-instantiate" progdb <- configureOneProgram verb prog removeGCRoots verb dist touchFile timestamp _ <- inFakeNixShell (getDbProgramOutput verb prog progdb [ "--add-root", shellDrv, "--indirect", shellNix ]) return () nixShell :: Verbosity -> FilePath -> GlobalFlags -> SavedConfig -> IO () -- ^ The action to perform inside a nix-shell. This is also the action -- that will be performed immediately if Nix is disabled. -> IO () nixShell verb dist globalFlags config go = do alreadyInShell <- inNixShell if alreadyInShell then go else do findNixExpr globalFlags config >>= \case Nothing -> go Just shellNix -> do let prog = simpleProgram "nix-shell" progdb <- configureOneProgram verb prog cabal <- getExecutablePath -- alreadyInShell == True in child process setEnv "CABAL_IN_NIX_SHELL" "1" -- Run cabal with the same arguments inside nix-shell. -- When the child process reaches the top of nixShell, it will -- detect that it is running inside the shell and fall back -- automatically. shellDrv <- drvPath dist shellNix args <- getArgs runDbProgram verb prog progdb [ "--add-root", gcrootPath dist "result", "--indirect", shellDrv , "--run", showCommandForUser cabal args ] drvPath :: FilePath -> FilePath -> IO FilePath drvPath dist path = do -- We do not actually care about canonicity, but makeAbsolute is only -- available in newer versions of directory. -- We expect the path to be a symlink if it exists, so we do not canonicalize -- the entire path because that would dereference the symlink. distNix <- canonicalizePath (dist "nix") -- Nix garbage collector roots must be absolute paths return (distNix replaceExtension (takeFileName path) "drv") timestampPath :: FilePath -> FilePath -> FilePath timestampPath dist path = dist "nix" replaceExtension (takeFileName path) "drv.timestamp" gcrootPath :: FilePath -> FilePath gcrootPath dist = dist "nix" "gcroots" inNixShell :: IO Bool inNixShell = isJust <$> lookupEnv "CABAL_IN_NIX_SHELL" removeGCRoots :: Verbosity -> FilePath -> IO () removeGCRoots verb dist = do let tgt = gcrootPath dist exists <- doesDirectoryExist tgt when exists $ do debug verb ("removing Nix gcroots from " ++ tgt) removeDirectoryRecursive tgt cabal-install-3.8.1.0/src/Distribution/Client/NixStyleOptions.hs0000644000000000000000000000706007346545000022736 0ustar0000000000000000-- | Command line options for nix-style / v2 commands. -- -- The commands take a lot of the same options, which affect how install plan -- is constructed. module Distribution.Client.NixStyleOptions ( NixStyleFlags (..), nixStyleOptions, defaultNixStyleFlags, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Simple.Command (OptionField (..), ShowOrParseArgs) import Distribution.Simple.Setup (BenchmarkFlags, HaddockFlags, TestFlags) import Distribution.Solver.Types.ConstraintSource (ConstraintSource (..)) import Distribution.Client.ProjectFlags (ProjectFlags (..), defaultProjectFlags, projectFlagsOptions) import Distribution.Client.Setup (ConfigExFlags, ConfigFlags (..), InstallFlags (..), benchmarkOptions, configureExOptions, configureOptions, haddockOptions, installOptions, liftOptions, testOptions) data NixStyleFlags a = NixStyleFlags { configFlags :: ConfigFlags , configExFlags :: ConfigExFlags , installFlags :: InstallFlags , haddockFlags :: HaddockFlags , testFlags :: TestFlags , benchmarkFlags :: BenchmarkFlags , projectFlags :: ProjectFlags , extraFlags :: a } nixStyleOptions :: (ShowOrParseArgs -> [OptionField a]) -> ShowOrParseArgs -> [OptionField (NixStyleFlags a)] nixStyleOptions commandOptions showOrParseArgs = liftOptions configFlags set1 -- Note: [Hidden Flags] -- hide "constraint", "dependency", and -- "exact-configuration" from the configure options. (filter ((`notElem` ["constraint", "dependency" , "exact-configuration"]) . optionName) $ configureOptions showOrParseArgs) ++ liftOptions configExFlags set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) ++ liftOptions installFlags set3 -- hide "target-package-db" and "symlink-bindir" flags from the -- install options. -- "symlink-bindir" is obsoleted by "installdir" in ClientInstallFlags (filter ((`notElem` ["target-package-db", "symlink-bindir"]) . optionName) $ installOptions showOrParseArgs) ++ liftOptions haddockFlags set4 -- hide "verbose" and "builddir" flags from the -- haddock options. (filter ((`notElem` ["v", "verbose", "builddir"]) . optionName) $ haddockOptions showOrParseArgs) ++ liftOptions testFlags set5 (testOptions showOrParseArgs) ++ liftOptions benchmarkFlags set6 (benchmarkOptions showOrParseArgs) ++ liftOptions projectFlags set7 (projectFlagsOptions showOrParseArgs) ++ liftOptions extraFlags set8 (commandOptions showOrParseArgs) where set1 x flags = flags { configFlags = x } set2 x flags = flags { configExFlags = x } set3 x flags = flags { installFlags = x } set4 x flags = flags { haddockFlags = x } set5 x flags = flags { testFlags = x } set6 x flags = flags { benchmarkFlags = x } set7 x flags = flags { projectFlags = x } set8 x flags = flags { extraFlags = x } defaultNixStyleFlags :: a -> NixStyleFlags a defaultNixStyleFlags x = NixStyleFlags { configFlags = mempty , configExFlags = mempty , installFlags = mempty , haddockFlags = mempty , testFlags = mempty , benchmarkFlags = mempty , projectFlags = defaultProjectFlags , extraFlags = x } cabal-install-3.8.1.0/src/Distribution/Client/PackageHash.hs0000644000000000000000000003526007346545000021745 0ustar0000000000000000{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} {-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-} -- | Functions to calculate nix-style hashes for package ids. -- -- The basic idea is simple, hash the combination of: -- -- * the package tarball -- * the ids of all the direct dependencies -- * other local configuration (flags, profiling, etc) -- module Distribution.Client.PackageHash ( -- * Calculating package hashes PackageHashInputs(..), PackageHashConfigInputs(..), PackageSourceHash, hashedInstalledPackageId, hashPackageHashInputs, renderPackageHashInputs, -- ** Platform-specific variations hashedInstalledPackageIdLong, hashedInstalledPackageIdShort, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Package ( PackageId, PackageIdentifier(..), mkComponentId , PkgconfigName ) import Distribution.System ( Platform, OS(Windows, OSX), buildOS ) import Distribution.Types.Flag ( FlagAssignment, showFlagAssignment ) import Distribution.Simple.Compiler ( CompilerId, OptimisationLevel(..), DebugInfoLevel(..) , ProfDetailLevel(..), PackageDB, showProfDetailLevel ) import Distribution.Simple.InstallDirs ( PathTemplate, fromPathTemplate ) import Distribution.Types.PkgconfigVersion (PkgconfigVersion) import Distribution.Client.HashValue import Distribution.Client.Types ( InstalledPackageId ) import qualified Distribution.Solver.Types.ComponentDeps as CD import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Data.Map as Map import qualified Data.Set as Set ------------------------------- -- Calculating package hashes -- -- | Calculate a 'InstalledPackageId' for a package using our nix-style -- inputs hashing method. -- -- Note that due to path length limitations on Windows, this function uses -- a different method on Windows that produces shorted package ids. -- See 'hashedInstalledPackageIdLong' vs 'hashedInstalledPackageIdShort'. -- hashedInstalledPackageId :: PackageHashInputs -> InstalledPackageId hashedInstalledPackageId | buildOS == Windows = hashedInstalledPackageIdShort | buildOS == OSX = hashedInstalledPackageIdVeryShort | otherwise = hashedInstalledPackageIdLong -- | Calculate a 'InstalledPackageId' for a package using our nix-style -- inputs hashing method. -- -- This produces large ids with big hashes. It is only suitable for systems -- without significant path length limitations (ie not Windows). -- hashedInstalledPackageIdLong :: PackageHashInputs -> InstalledPackageId hashedInstalledPackageIdLong pkghashinputs@PackageHashInputs{pkgHashPkgId,pkgHashComponent} = mkComponentId $ prettyShow pkgHashPkgId -- to be a bit user friendly ++ maybe "" displayComponent pkgHashComponent ++ "-" ++ showHashValue (hashPackageHashInputs pkghashinputs) where displayComponent :: CD.Component -> String displayComponent CD.ComponentLib = "" displayComponent (CD.ComponentSubLib s) = "-l-" ++ prettyShow s displayComponent (CD.ComponentFLib s) = "-f-" ++ prettyShow s displayComponent (CD.ComponentExe s) = "-e-" ++ prettyShow s displayComponent (CD.ComponentTest s) = "-t-" ++ prettyShow s displayComponent (CD.ComponentBench s) = "-b-" ++ prettyShow s displayComponent CD.ComponentSetup = "-setup" -- | On Windows we have serious problems with path lengths. Windows imposes a -- maximum path length of 260 chars, and even if we can use the windows long -- path APIs ourselves, we cannot guarantee that ghc, gcc, ld, ar, etc etc all -- do so too. -- -- So our only choice is to limit the lengths of the paths, and the only real -- way to do that is to limit the size of the 'InstalledPackageId's that we -- generate. We do this by truncating the package names and versions and also -- by truncating the hash sizes. -- -- Truncating the package names and versions is technically ok because they are -- just included for human convenience, the full source package id is included -- in the hash. -- -- Truncating the hash size is disappointing but also technically ok. We -- rely on the hash primarily for collision avoidance not for any security -- properties (at least for now). -- hashedInstalledPackageIdShort :: PackageHashInputs -> InstalledPackageId hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = mkComponentId $ intercalate "-" -- max length now 64 [ truncateStr 14 (prettyShow name) , truncateStr 8 (prettyShow version) , showHashValue (truncateHash 20 (hashPackageHashInputs pkghashinputs)) ] where PackageIdentifier name version = pkgHashPkgId -- Truncate a string, with a visual indication that it is truncated. truncateStr n s | length s <= n = s | otherwise = take (n-1) s ++ "_" -- | On macOS we shorten the name very aggressively. The mach-o linker on -- macOS has a limited load command size, to which the name of the library -- as well as its relative path (\@rpath) entry count. To circumvent this, -- on macOS the libraries are not stored as -- @store//libHS.dylib@ -- where libraryname contains the libraries name, version and abi hash, but in -- @store/lib/libHS.dylib@ -- where the very short library name drops all vowels from the package name, -- and truncates the hash to 4 bytes. -- -- We therefore we only need one \@rpath entry to @store/lib@ instead of one -- \@rpath entry for each library. And the reduced library name saves some -- additional space. -- -- This however has two major drawbacks: -- 1) Packages can collide more easily due to the shortened hash. -- 2) The libraries are *not* prefix relocatable anymore as they all end up -- in the same @store/lib@ folder. -- -- The ultimate solution would have to include generating proxy dynamic -- libraries on macOS, such that the proxy libraries and the linked libraries -- stay under the load command limit, and the recursive linker is still able -- to link all of them. hashedInstalledPackageIdVeryShort :: PackageHashInputs -> InstalledPackageId hashedInstalledPackageIdVeryShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = mkComponentId $ intercalate "-" [ filter (not . flip elem "aeiou") (prettyShow name) , prettyShow version , showHashValue (truncateHash 4 (hashPackageHashInputs pkghashinputs)) ] where PackageIdentifier name version = pkgHashPkgId -- | All the information that contributes to a package's hash, and thus its -- 'InstalledPackageId'. -- data PackageHashInputs = PackageHashInputs { pkgHashPkgId :: PackageId, pkgHashComponent :: Maybe CD.Component, pkgHashSourceHash :: PackageSourceHash, pkgHashPkgConfigDeps :: Set (PkgconfigName, Maybe PkgconfigVersion), pkgHashDirectDeps :: Set InstalledPackageId, pkgHashOtherConfig :: PackageHashConfigInputs } type PackageSourceHash = HashValue -- | Those parts of the package configuration that contribute to the -- package hash. -- data PackageHashConfigInputs = PackageHashConfigInputs { pkgHashCompilerId :: CompilerId, pkgHashPlatform :: Platform, pkgHashFlagAssignment :: FlagAssignment, -- complete not partial pkgHashConfigureScriptArgs :: [String], -- just ./configure for build-type Configure pkgHashVanillaLib :: Bool, pkgHashSharedLib :: Bool, pkgHashDynExe :: Bool, pkgHashFullyStaticExe :: Bool, pkgHashGHCiLib :: Bool, pkgHashProfLib :: Bool, pkgHashProfExe :: Bool, pkgHashProfLibDetail :: ProfDetailLevel, pkgHashProfExeDetail :: ProfDetailLevel, pkgHashCoverage :: Bool, pkgHashOptimization :: OptimisationLevel, pkgHashSplitObjs :: Bool, pkgHashSplitSections :: Bool, pkgHashStripLibs :: Bool, pkgHashStripExes :: Bool, pkgHashDebugInfo :: DebugInfoLevel, pkgHashProgramArgs :: Map String [String], pkgHashExtraLibDirs :: [FilePath], pkgHashExtraFrameworkDirs :: [FilePath], pkgHashExtraIncludeDirs :: [FilePath], pkgHashProgPrefix :: Maybe PathTemplate, pkgHashProgSuffix :: Maybe PathTemplate, pkgHashPackageDbs :: [Maybe PackageDB], -- Haddock options pkgHashDocumentation :: Bool, pkgHashHaddockHoogle :: Bool, pkgHashHaddockHtml :: Bool, pkgHashHaddockHtmlLocation :: Maybe String, pkgHashHaddockForeignLibs :: Bool, pkgHashHaddockExecutables :: Bool, pkgHashHaddockTestSuites :: Bool, pkgHashHaddockBenchmarks :: Bool, pkgHashHaddockInternal :: Bool, pkgHashHaddockCss :: Maybe FilePath, pkgHashHaddockLinkedSource :: Bool, pkgHashHaddockQuickJump :: Bool, pkgHashHaddockContents :: Maybe PathTemplate -- TODO: [required eventually] pkgHashToolsVersions ? -- TODO: [required eventually] pkgHashToolsExtraOptions ? } deriving Show -- | Calculate the overall hash to be used for an 'InstalledPackageId'. -- hashPackageHashInputs :: PackageHashInputs -> HashValue hashPackageHashInputs = hashValue . renderPackageHashInputs -- | Render a textual representation of the 'PackageHashInputs'. -- -- The 'hashValue' of this text is the overall package hash. -- renderPackageHashInputs :: PackageHashInputs -> LBS.ByteString renderPackageHashInputs PackageHashInputs{ pkgHashPkgId, pkgHashComponent, pkgHashSourceHash, pkgHashDirectDeps, pkgHashPkgConfigDeps, pkgHashOtherConfig = PackageHashConfigInputs{..} } = -- The purpose of this somewhat laboured rendering (e.g. why not just -- use show?) is so that existing package hashes do not change -- unnecessarily when new configuration inputs are added into the hash. -- In particular, the assumption is that when a new configuration input -- is included into the hash, that existing packages will typically get -- the default value for that feature. So if we avoid adding entries with -- the default value then most of the time adding new features will not -- change the hashes of existing packages and so fewer packages will need -- to be rebuilt. --TODO: [nice to have] ultimately we probably want to put this config info -- into the ghc-pkg db. At that point this should probably be changed to -- use the config file infrastructure so it can be read back in again. LBS.pack $ unlines $ catMaybes $ [ entry "pkgid" prettyShow pkgHashPkgId , mentry "component" show pkgHashComponent , entry "src" showHashValue pkgHashSourceHash , entry "pkg-config-deps" (intercalate ", " . map (\(pn, mb_v) -> prettyShow pn ++ case mb_v of Nothing -> "" Just v -> " " ++ prettyShow v) . Set.toList) pkgHashPkgConfigDeps , entry "deps" (intercalate ", " . map prettyShow . Set.toList) pkgHashDirectDeps -- and then all the config , entry "compilerid" prettyShow pkgHashCompilerId , entry "platform" prettyShow pkgHashPlatform , opt "flags" mempty showFlagAssignment pkgHashFlagAssignment , opt "configure-script" [] unwords pkgHashConfigureScriptArgs , opt "vanilla-lib" True prettyShow pkgHashVanillaLib , opt "shared-lib" False prettyShow pkgHashSharedLib , opt "dynamic-exe" False prettyShow pkgHashDynExe , opt "fully-static-exe" False prettyShow pkgHashFullyStaticExe , opt "ghci-lib" False prettyShow pkgHashGHCiLib , opt "prof-lib" False prettyShow pkgHashProfLib , opt "prof-exe" False prettyShow pkgHashProfExe , opt "prof-lib-detail" ProfDetailDefault showProfDetailLevel pkgHashProfLibDetail , opt "prof-exe-detail" ProfDetailDefault showProfDetailLevel pkgHashProfExeDetail , opt "hpc" False prettyShow pkgHashCoverage , opt "optimisation" NormalOptimisation (show . fromEnum) pkgHashOptimization , opt "split-objs" False prettyShow pkgHashSplitObjs , opt "split-sections" False prettyShow pkgHashSplitSections , opt "stripped-lib" False prettyShow pkgHashStripLibs , opt "stripped-exe" True prettyShow pkgHashStripExes , opt "debug-info" NormalDebugInfo (show . fromEnum) pkgHashDebugInfo , opt "extra-lib-dirs" [] unwords pkgHashExtraLibDirs , opt "extra-framework-dirs" [] unwords pkgHashExtraFrameworkDirs , opt "extra-include-dirs" [] unwords pkgHashExtraIncludeDirs , opt "prog-prefix" Nothing (maybe "" fromPathTemplate) pkgHashProgPrefix , opt "prog-suffix" Nothing (maybe "" fromPathTemplate) pkgHashProgSuffix , opt "package-dbs" [] (unwords . map show) pkgHashPackageDbs , opt "documentation" False prettyShow pkgHashDocumentation , opt "haddock-hoogle" False prettyShow pkgHashHaddockHoogle , opt "haddock-html" False prettyShow pkgHashHaddockHtml , opt "haddock-html-location" Nothing (fromMaybe "") pkgHashHaddockHtmlLocation , opt "haddock-foreign-libraries" False prettyShow pkgHashHaddockForeignLibs , opt "haddock-executables" False prettyShow pkgHashHaddockExecutables , opt "haddock-tests" False prettyShow pkgHashHaddockTestSuites , opt "haddock-benchmarks" False prettyShow pkgHashHaddockBenchmarks , opt "haddock-internal" False prettyShow pkgHashHaddockInternal , opt "haddock-css" Nothing (fromMaybe "") pkgHashHaddockCss , opt "haddock-hyperlink-source" False prettyShow pkgHashHaddockLinkedSource , opt "haddock-quickjump" False prettyShow pkgHashHaddockQuickJump , opt "haddock-contents-location" Nothing (maybe "" fromPathTemplate) pkgHashHaddockContents ] ++ Map.foldrWithKey (\prog args acc -> opt (prog ++ "-options") [] unwords args : acc) [] pkgHashProgramArgs where entry key format value = Just (key ++ ": " ++ format value) mentry key format value = fmap (\v -> key ++ ": " ++ format v) value opt key def format value | value == def = Nothing | otherwise = entry key format value cabal-install-3.8.1.0/src/Distribution/Client/ParseUtils.hs0000644000000000000000000003235007346545000021676 0ustar0000000000000000{-# LANGUAGE ExistentialQuantification, NamedFieldPuns, RankNTypes #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.ParseUtils -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Parsing utilities. ----------------------------------------------------------------------------- module Distribution.Client.ParseUtils ( -- * Fields and field utilities FieldDescr(..), liftField, liftFields, filterFields, mapFieldNames, commandOptionToField, commandOptionsToFields, -- * Sections and utilities SectionDescr(..), liftSection, -- * FieldGrammar sections FGSectionDescr(..), -- * Parsing and printing flat config parseFields, ppFields, ppSection, -- * Parsing and printing config with sections and subsections parseFieldsAndSections, ppFieldsAndSections, -- ** Top level of config files parseConfig, showConfig, ) where import Distribution.Client.Compat.Prelude hiding (empty, get) import Prelude () import Distribution.Deprecated.ParseUtils ( FieldDescr(..), ParseResult(..), warning, LineNo, lineNo , Field(..), liftField, readFields ) import Distribution.Deprecated.ViewAsFieldDescr ( viewAsFieldDescr ) import Distribution.Simple.Command ( OptionField ) import Text.PrettyPrint ( ($+$) ) import qualified Data.ByteString as BS import qualified Data.Map as Map import qualified Text.PrettyPrint as Disp ( (<>), Doc, text, colon, vcat, empty, isEmpty, nest ) -- For new parser stuff import Distribution.CabalSpecVersion (cabalSpecLatest) import Distribution.FieldGrammar (partitionFields, parseFieldGrammar) import Distribution.Fields.ParseResult (runParseResult) import Distribution.Parsec.Error (showPError) import Distribution.Parsec.Position (Position (..)) import Distribution.Parsec.Warning (showPWarning) import Distribution.Simple.Utils (fromUTF8BS, toUTF8BS) import qualified Distribution.Fields as F import qualified Distribution.FieldGrammar as FG ------------------------- -- FieldDescr utilities -- liftFields :: (b -> a) -> (a -> b -> b) -> [FieldDescr a] -> [FieldDescr b] liftFields get set = map (liftField get set) -- | Given a collection of field descriptions, keep only a given list of them, -- identified by name. -- filterFields :: [String] -> [FieldDescr a] -> [FieldDescr a] filterFields includeFields = filter ((`elem` includeFields) . fieldName) -- | Apply a name mangling function to the field names of all the field -- descriptions. The typical use case is to apply some prefix. -- mapFieldNames :: (String -> String) -> [FieldDescr a] -> [FieldDescr a] mapFieldNames mangleName = map (\descr -> descr { fieldName = mangleName (fieldName descr) }) -- | Reuse a command line 'OptionField' as a config file 'FieldDescr'. -- commandOptionToField :: OptionField a -> FieldDescr a commandOptionToField = viewAsFieldDescr -- | Reuse a bunch of command line 'OptionField's as config file 'FieldDescr's. -- commandOptionsToFields :: [OptionField a] -> [FieldDescr a] commandOptionsToFields = map viewAsFieldDescr ------------------------------------------ -- SectionDescr definition and utilities -- -- | The description of a section in a config file. It can contain both -- fields and optionally further subsections. See also 'FieldDescr'. -- data SectionDescr a = forall b. SectionDescr { sectionName :: String, sectionFields :: [FieldDescr b], sectionSubsections :: [SectionDescr b], sectionGet :: a -> [(String, b)], sectionSet :: LineNo -> String -> b -> a -> ParseResult a, sectionEmpty :: b } -- | 'FieldGrammar' section description data FGSectionDescr g a = forall s. FGSectionDescr { fgSectionName :: String , fgSectionGrammar :: g s s -- todo: add subsections? , fgSectionGet :: a -> [(String, s)] , fgSectionSet :: LineNo -> String -> s -> a -> ParseResult a } -- | To help construction of config file descriptions in a modular way it is -- useful to define fields and sections on local types and then hoist them -- into the parent types when combining them in bigger descriptions. -- -- This is essentially a lens operation for 'SectionDescr' to help embedding -- one inside another. -- liftSection :: (b -> a) -> (a -> b -> b) -> SectionDescr a -> SectionDescr b liftSection get' set' (SectionDescr name fields sections get set empty) = let sectionGet' = get . get' sectionSet' lineno param x y = do x' <- set lineno param x (get' y) return (set' x' y) in SectionDescr name fields sections sectionGet' sectionSet' empty ------------------------------------- -- Parsing and printing flat config -- -- | Parse a bunch of semi-parsed 'Field's according to a set of field -- descriptions. It accumulates the result on top of a given initial value. -- -- This only covers the case of flat configuration without subsections. See -- also 'parseFieldsAndSections'. -- parseFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a parseFields fieldDescrs = foldM setField where fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] setField accum (F line name value) = case Map.lookup name fieldMap of Just (FieldDescr _ _ set) -> set line value accum Nothing -> do -- the 'world-file' field was removed in 3.8, however -- it was automatically added to many config files -- before that, so its warning is silently ignored unless (name == "world-file") $ warning $ "Unrecognized field " ++ name ++ " on line " ++ show line return accum setField accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum -- | This is a customised version of the functions from Distribution.Deprecated.ParseUtils -- that also optionally print default values for empty fields as comments. -- ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc ppFields fields def cur = Disp.vcat [ ppField name (fmap getter def) (getter cur) | FieldDescr name getter _ <- fields] ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc ppField name mdef cur | Disp.isEmpty cur = maybe Disp.empty (\def -> Disp.text "--" <+> Disp.text name Disp.<> Disp.colon <+> def) mdef | otherwise = Disp.text name Disp.<> Disp.colon <+> cur -- | Pretty print a section. -- -- Since 'ppFields' does not cover subsections you can use this to add them. -- Or alternatively use a 'SectionDescr' and use 'ppFieldsAndSections'. -- ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc ppSection name arg fields def cur | Disp.isEmpty fieldsDoc = Disp.empty | otherwise = Disp.text name <+> argDoc $+$ (Disp.nest 2 fieldsDoc) where fieldsDoc = ppFields fields def cur argDoc | arg == "" = Disp.empty | otherwise = Disp.text arg ----------------------------------------- -- Parsing and printing non-flat config -- -- | Much like 'parseFields' but it also allows subsections. The permitted -- subsections are given by a list of 'SectionDescr's. -- parseFieldsAndSections :: [FieldDescr a] -- ^ field -> [SectionDescr a] -- ^ legacy sections -> [FGSectionDescr FG.ParsecFieldGrammar a] -- ^ FieldGrammar sections -> a -> [Field] -> ParseResult a parseFieldsAndSections fieldDescrs sectionDescrs fgSectionDescrs = foldM setField where fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] sectionMap = Map.fromList [ (sectionName s, s) | s <- sectionDescrs ] fgSectionMap = Map.fromList [ (fgSectionName s, s) | s <- fgSectionDescrs ] setField a (F line name value) = case Map.lookup name fieldMap of Just (FieldDescr _ _ set) -> set line value a Nothing -> do warning $ "Unrecognized field '" ++ name ++ "' on line " ++ show line return a setField a (Section line name param fields) = case Left <$> Map.lookup name sectionMap <|> Right <$> Map.lookup name fgSectionMap of Just (Left (SectionDescr _ fieldDescrs' sectionDescrs' _ set sectionEmpty)) -> do b <- parseFieldsAndSections fieldDescrs' sectionDescrs' [] sectionEmpty fields set line param b a Just (Right (FGSectionDescr _ grammar _getter setter)) -> do let fields1 = map convertField fields (fields2, sections) = partitionFields fields1 -- TODO: recurse into sections for_ (concat sections) $ \(FG.MkSection (F.Name (Position line' _) name') _ _) -> warning $ "Unrecognized section '" ++ fromUTF8BS name' ++ "' on line " ++ show line' case runParseResult $ parseFieldGrammar cabalSpecLatest fields2 grammar of (warnings, Right b) -> do for_ warnings $ \w -> warning $ showPWarning "???" w setter line param b a (warnings, Left (_, errs)) -> do for_ warnings $ \w -> warning $ showPWarning "???" w case errs of err :| _errs -> fail $ showPError "???" err Nothing -> do warning $ "Unrecognized section '" ++ name ++ "' on line " ++ show line return a convertField :: Field -> F.Field Position convertField (F line name str) = F.Field (F.Name pos (toUTF8BS name)) [ F.FieldLine pos $ toUTF8BS str ] where pos = Position line 0 -- arguments omitted convertField (Section line name _arg fields) = F.Section (F.Name pos (toUTF8BS name)) [] (map convertField fields) where pos = Position line 0 -- | Much like 'ppFields' but also pretty prints any subsections. Subsection -- are only shown if they are non-empty. -- -- Note that unlike 'ppFields', at present it does not support printing -- default values. If needed, adding such support would be quite reasonable. -- ppFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> [FGSectionDescr FG.PrettyFieldGrammar a] -> a -> Disp.Doc ppFieldsAndSections fieldDescrs sectionDescrs fgSectionDescrs val = ppFields fieldDescrs Nothing val $+$ Disp.vcat ( [ Disp.text "" $+$ sectionDoc | SectionDescr { sectionName, sectionGet, sectionFields, sectionSubsections } <- sectionDescrs , (param, x) <- sectionGet val , let sectionDoc = ppSectionAndSubsections sectionName param sectionFields sectionSubsections [] x , not (Disp.isEmpty sectionDoc) ] ++ [ Disp.text "" $+$ sectionDoc | FGSectionDescr { fgSectionName, fgSectionGrammar, fgSectionGet } <- fgSectionDescrs , (param, x) <- fgSectionGet val , let sectionDoc = ppFgSection fgSectionName param fgSectionGrammar x , not (Disp.isEmpty sectionDoc) ]) -- | Unlike 'ppSection' which has to be called directly, this gets used via -- 'ppFieldsAndSections' and so does not need to be exported. -- ppSectionAndSubsections :: String -> String -> [FieldDescr a] -> [SectionDescr a] -> [FGSectionDescr FG.PrettyFieldGrammar a] -> a -> Disp.Doc ppSectionAndSubsections name arg fields sections fgSections cur | Disp.isEmpty fieldsDoc = Disp.empty | otherwise = Disp.text name <+> argDoc $+$ (Disp.nest 2 fieldsDoc) where fieldsDoc = showConfig fields sections fgSections cur argDoc | arg == "" = Disp.empty | otherwise = Disp.text arg -- | -- -- TODO: subsections -- TODO: this should simply build 'PrettyField' ppFgSection :: String -- ^ section name -> String -- ^ parameter -> FG.PrettyFieldGrammar a a -> a -> Disp.Doc ppFgSection secName arg grammar x | null prettyFields = Disp.empty | otherwise = Disp.text secName <+> argDoc $+$ (Disp.nest 2 fieldsDoc) where prettyFields = FG.prettyFieldGrammar cabalSpecLatest grammar x argDoc | arg == "" = Disp.empty | otherwise = Disp.text arg fieldsDoc = Disp.vcat [ Disp.text fname' <<>> Disp.colon <<>> doc | F.PrettyField _ fname doc <- prettyFields -- TODO: this skips sections , let fname' = fromUTF8BS fname ] ----------------------------------------------- -- Top level config file parsing and printing -- -- | Parse a string in the config file syntax into a value, based on a -- description of the configuration file in terms of its fields and sections. -- -- It accumulates the result on top of a given initial (typically empty) value. -- parseConfig :: [FieldDescr a] -> [SectionDescr a] -> [FGSectionDescr FG.ParsecFieldGrammar a] -> a -> BS.ByteString -> ParseResult a parseConfig fieldDescrs sectionDescrs fgSectionDescrs empty str = parseFieldsAndSections fieldDescrs sectionDescrs fgSectionDescrs empty =<< readFields str -- | Render a value in the config file syntax, based on a description of the -- configuration file in terms of its fields and sections. -- showConfig :: [FieldDescr a] -> [SectionDescr a] -> [FGSectionDescr FG.PrettyFieldGrammar a] -> a -> Disp.Doc showConfig = ppFieldsAndSections cabal-install-3.8.1.0/src/Distribution/Client/ProjectBuilding.hs0000644000000000000000000017773307346545000022706 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE NoMonoLocalBinds #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} -- | -- module Distribution.Client.ProjectBuilding ( -- * Dry run phase -- | What bits of the plan will we execute? The dry run does not change -- anything but tells us what will need to be built. rebuildTargetsDryRun, improveInstallPlanWithUpToDatePackages, -- ** Build status -- | This is the detailed status information we get from the dry run. BuildStatusMap, BuildStatus(..), BuildStatusRebuild(..), BuildReason(..), MonitorChangedReason(..), buildStatusToString, -- * Build phase -- | Now we actually execute the plan. rebuildTargets, -- ** Build outcomes -- | This is the outcome for each package of executing the plan. -- For each package, did the build succeed or fail? BuildOutcomes, BuildOutcome, BuildResult(..), BuildFailure(..), BuildFailureReason(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.PackageHash (renderPackageHashInputs) import Distribution.Client.RebuildMonad import Distribution.Client.ProjectConfig import Distribution.Client.ProjectPlanning import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ProjectBuilding.Types import Distribution.Client.Store import Distribution.Client.Types hiding (BuildOutcomes, BuildOutcome, BuildResult(..), BuildFailure(..)) import Distribution.Client.InstallPlan ( GenericInstallPlan, GenericPlanPackage, IsUnit ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.DistDirLayout import Distribution.Client.FileMonitor import Distribution.Client.SetupWrapper import Distribution.Client.JobControl import Distribution.Client.FetchUtils import Distribution.Client.GlobalFlags (RepoContext) import qualified Distribution.Client.Tar as Tar import Distribution.Client.Setup ( filterConfigureFlags, filterHaddockArgs , filterHaddockFlags, filterTestFlags ) import Distribution.Client.SourceFiles import Distribution.Client.SrcDist (allPackageSourceFiles) import Distribution.Client.Utils ( ProgressPhase(..), findOpenProgramLocation, progressMessage, removeExistingFile ) import Distribution.Compat.Lens import Distribution.Package import qualified Distribution.PackageDescription as PD import Distribution.InstalledPackageInfo (InstalledPackageInfo) import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Simple.BuildPaths (haddockDirName) import qualified Distribution.Simple.InstallDirs as InstallDirs import Distribution.Types.BuildType import Distribution.Types.PackageDescription.Lens (componentModules) import Distribution.Simple.Program import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Command (CommandUI) import qualified Distribution.Simple.Register as Cabal import Distribution.Simple.LocalBuildInfo ( ComponentName(..), LibraryName(..) ) import Distribution.Simple.Compiler ( Compiler, compilerId, PackageDB(..) ) import Distribution.Simple.Utils import Distribution.Version import Distribution.Compat.Graph (IsNode(..)) import qualified Data.List.NonEmpty as NE import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as LBS import qualified Data.ByteString.Lazy.Char8 as LBS.Char8 import Control.Exception (Handler (..), SomeAsyncException, assert, catches, handle) import System.Directory (canonicalizePath, createDirectoryIfMissing, doesDirectoryExist, doesFileExist, removeFile, renameDirectory) import System.FilePath (dropDrive, makeRelative, normalise, takeDirectory, (<.>), ()) import System.IO (IOMode (AppendMode), Handle, withFile) import Distribution.Compat.Directory (listDirectory) ------------------------------------------------------------------------------ -- * Overall building strategy. ------------------------------------------------------------------------------ -- -- We start with an 'ElaboratedInstallPlan' that has already been improved by -- reusing packages from the store, and pruned to include only the targets of -- interest and their dependencies. So the remaining packages in the -- 'InstallPlan.Configured' state are ones we either need to build or rebuild. -- -- First, we do a preliminary dry run phase where we work out which packages -- we really need to (re)build, and for the ones we do need to build which -- build phase to start at. -- -- We use this to improve the 'ElaboratedInstallPlan' again by changing -- up-to-date 'InstallPlan.Configured' packages to 'InstallPlan.Installed' -- so that the build phase will skip them. -- -- Then we execute the plan, that is actually build packages. The outcomes of -- trying to build all the packages are collected and returned. -- -- We split things like this (dry run and execute) for a couple reasons. -- Firstly we need to be able to do dry runs anyway, and these need to be -- reasonably accurate in terms of letting users know what (and why) things -- are going to be (re)built. -- -- Given that we need to be able to do dry runs, it would not be great if -- we had to repeat all the same work when we do it for real. Not only is -- it duplicate work, but it's duplicate code which is likely to get out of -- sync. So we do things only once. We preserve info we discover in the dry -- run phase and rely on it later when we build things for real. This also -- somewhat simplifies the build phase. So this way the dry run can't so -- easily drift out of sync with the real thing since we're relying on the -- info it produces. -- -- An additional advantage is that it makes it easier to debug rebuild -- errors (ie rebuilding too much or too little), since all the rebuild -- decisions are made without making any state changes at the same time -- (that would make it harder to reproduce the problem situation). -- -- Finally, we can use the dry run build status and the build outcomes to -- give us some information on the overall status of packages in the project. -- This includes limited information about the status of things that were -- not actually in the subset of the plan that was used for the dry run or -- execution phases. In particular we may know that some packages are now -- definitely out of date. See "Distribution.Client.ProjectPlanOutput" for -- details. ------------------------------------------------------------------------------ -- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute? ------------------------------------------------------------------------------ -- Refer to ProjectBuilding.Types for details of these important types: -- type BuildStatusMap = ... -- data BuildStatus = ... -- data BuildStatusRebuild = ... -- data BuildReason = ... -- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'. -- -- It gives us the 'BuildStatusMap'. This should be used with -- 'improveInstallPlanWithUpToDatePackages' to give an improved version of -- the 'ElaboratedInstallPlan' with packages switched to the -- 'InstallPlan.Installed' state when we find that they're already up to date. -- rebuildTargetsDryRun :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> IO BuildStatusMap rebuildTargetsDryRun distDirLayout@DistDirLayout{..} shared = -- Do the various checks to work out the 'BuildStatus' of each package foldMInstallPlanDepOrder dryRunPkg where dryRunPkg :: ElaboratedPlanPackage -> [BuildStatus] -> IO BuildStatus dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus = return BuildStatusPreExisting dryRunPkg (InstallPlan.Installed _pkg) _depsBuildStatus = return BuildStatusInstalled dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do mloc <- checkFetched (elabPkgSourceLocation pkg) case mloc of Nothing -> return BuildStatusDownload Just (LocalUnpackedPackage srcdir) -> -- For the case of a user-managed local dir, irrespective of the -- build style, we build from that directory and put build -- artifacts under the shared dist directory. dryRunLocalPkg pkg depsBuildStatus srcdir -- The rest cases are all tarball cases are, -- and handled the same as each other though depending on the build style. Just (LocalTarballPackage tarball) -> dryRunTarballPkg pkg depsBuildStatus tarball Just (RemoteTarballPackage _ tarball) -> dryRunTarballPkg pkg depsBuildStatus tarball Just (RepoTarballPackage _ _ tarball) -> dryRunTarballPkg pkg depsBuildStatus tarball Just (RemoteSourceRepoPackage _repo tarball) -> dryRunTarballPkg pkg depsBuildStatus tarball dryRunTarballPkg :: ElaboratedConfiguredPackage -> [BuildStatus] -> FilePath -> IO BuildStatus dryRunTarballPkg pkg depsBuildStatus tarball = case elabBuildStyle pkg of BuildAndInstall -> return (BuildStatusUnpack tarball) BuildInplaceOnly -> do -- TODO: [nice to have] use a proper file monitor rather -- than this dir exists test exists <- doesDirectoryExist srcdir if exists then dryRunLocalPkg pkg depsBuildStatus srcdir else return (BuildStatusUnpack tarball) where srcdir :: FilePath srcdir = distUnpackedSrcDirectory (packageId pkg) dryRunLocalPkg :: ElaboratedConfiguredPackage -> [BuildStatus] -> FilePath -> IO BuildStatus dryRunLocalPkg pkg depsBuildStatus srcdir = do -- Go and do lots of I/O, reading caches and probing files to work out -- if anything has changed change <- checkPackageFileMonitorChanged packageFileMonitor pkg srcdir depsBuildStatus case change of -- It did change, giving us 'BuildStatusRebuild' info on why Left rebuild -> return (BuildStatusRebuild srcdir rebuild) -- No changes, the package is up to date. Use the saved build results. Right buildResult -> return (BuildStatusUpToDate buildResult) where packageFileMonitor :: PackageFileMonitor packageFileMonitor = newPackageFileMonitor shared distDirLayout (elabDistDirParams shared pkg) -- | A specialised traversal over the packages in an install plan. -- -- The packages are visited in dependency order, starting with packages with no -- dependencies. The result for each package is accumulated into a 'Map' and -- returned as the final result. In addition, when visiting a package, the -- visiting function is passed the results for all the immediate package -- dependencies. This can be used to propagate information from dependencies. -- foldMInstallPlanDepOrder :: forall m ipkg srcpkg b. (Monad m, IsUnit ipkg, IsUnit srcpkg) => (GenericPlanPackage ipkg srcpkg -> [b] -> m b) -> GenericInstallPlan ipkg srcpkg -> m (Map UnitId b) foldMInstallPlanDepOrder visit = go Map.empty . InstallPlan.reverseTopologicalOrder where go :: Map UnitId b -> [GenericPlanPackage ipkg srcpkg] -> m (Map UnitId b) go !results [] = return results go !results (pkg : pkgs) = do -- we go in the right order so the results map has entries for all deps let depresults :: [b] depresults = map (\ipkgid -> let result = Map.findWithDefault (error "foldMInstallPlanDepOrder") ipkgid results in result) (InstallPlan.depends pkg) result <- visit pkg depresults let results' = Map.insert (nodeKey pkg) result results go results' pkgs improveInstallPlanWithUpToDatePackages :: BuildStatusMap -> ElaboratedInstallPlan -> ElaboratedInstallPlan improveInstallPlanWithUpToDatePackages pkgsBuildStatus = InstallPlan.installed canPackageBeImproved where canPackageBeImproved :: ElaboratedConfiguredPackage -> Bool canPackageBeImproved pkg = case Map.lookup (installedUnitId pkg) pkgsBuildStatus of Just BuildStatusUpToDate {} -> True Just _ -> False Nothing -> error $ "improveInstallPlanWithUpToDatePackages: " ++ prettyShow (packageId pkg) ++ " not in status map" ----------------------------- -- Package change detection -- -- | As part of the dry run for local unpacked packages we have to check if the -- package config or files have changed. That is the purpose of -- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'. -- -- When a package is (re)built, the monitor must be updated to reflect the new -- state of the package. Because we sometimes build without reconfiguring the -- state updates are split into two, one for package config changes and one -- for other changes. This is the purpose of 'updatePackageConfigFileMonitor' -- and 'updatePackageBuildFileMonitor'. -- data PackageFileMonitor = PackageFileMonitor { pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (), pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildResultMisc, pkgFileMonitorReg :: FileMonitor () (Maybe InstalledPackageInfo) } -- | This is all the components of the 'BuildResult' other than the -- @['InstalledPackageInfo']@. -- -- We have to split up the 'BuildResult' components since they get produced -- at different times (or rather, when different things change). -- type BuildResultMisc = (DocsResult, TestsResult) newPackageFileMonitor :: ElaboratedSharedConfig -> DistDirLayout -> DistDirParams -> PackageFileMonitor newPackageFileMonitor shared DistDirLayout{distPackageCacheFile} dparams = PackageFileMonitor { pkgFileMonitorConfig = FileMonitor { fileMonitorCacheFile = distPackageCacheFile dparams "config", fileMonitorKeyValid = (==) `on` normaliseConfiguredPackage shared, fileMonitorCheckIfOnlyValueChanged = False }, pkgFileMonitorBuild = FileMonitor { fileMonitorCacheFile = distPackageCacheFile dparams "build", fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt -> componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt, fileMonitorCheckIfOnlyValueChanged = True }, pkgFileMonitorReg = newFileMonitor (distPackageCacheFile dparams "registration") } -- | Helper function for 'checkPackageFileMonitorChanged', -- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'. -- -- It selects the info from a 'ElaboratedConfiguredPackage' that are used by -- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes. -- packageFileMonitorKeyValues :: ElaboratedConfiguredPackage -> (ElaboratedConfiguredPackage, Set ComponentName) packageFileMonitorKeyValues elab = (elab_config, buildComponents) where -- The first part is the value used to guard (re)configuring the package. -- That is, if this value changes then we will reconfigure. -- The ElaboratedConfiguredPackage consists mostly (but not entirely) of -- information that affects the (re)configure step. But those parts that -- do not affect the configure step need to be nulled out. Those parts are -- the specific targets that we're going to build. -- -- Additionally we null out the parts that don't affect the configure step because they're simply -- about how tests or benchmarks are run -- TODO there may be more things to null here too, in the future. elab_config :: ElaboratedConfiguredPackage elab_config = elab { elabBuildTargets = [], elabTestTargets = [], elabBenchTargets = [], elabReplTarget = Nothing, elabHaddockTargets = [], elabBuildHaddocks = False, elabTestMachineLog = Nothing, elabTestHumanLog = Nothing, elabTestShowDetails = Nothing, elabTestKeepTix = False, elabTestTestOptions = [], elabBenchmarkOptions = [] } -- The second part is the value used to guard the build step. So this is -- more or less the opposite of the first part, as it's just the info about -- what targets we're going to build. -- buildComponents :: Set ComponentName buildComponents = elabBuildTargetWholeComponents elab -- | Do all the checks on whether a package has changed and thus needs either -- rebuilding or reconfiguring and rebuilding. -- checkPackageFileMonitorChanged :: PackageFileMonitor -> ElaboratedConfiguredPackage -> FilePath -> [BuildStatus] -> IO (Either BuildStatusRebuild BuildResult) checkPackageFileMonitorChanged PackageFileMonitor{..} pkg srcdir depsBuildStatus = do --TODO: [nice to have] some debug-level message about file --changes, like rerunIfChanged configChanged <- checkFileMonitorChanged pkgFileMonitorConfig srcdir pkgconfig case configChanged of MonitorChanged monitorReason -> return (Left (BuildStatusConfigure monitorReason')) where monitorReason' = fmap (const ()) monitorReason MonitorUnchanged () _ -- The configChanged here includes the identity of the dependencies, -- so depsBuildStatus is just needed for the changes in the content -- of dependencies. | any buildStatusRequiresBuild depsBuildStatus -> do regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () let mreg = changedToMaybe regChanged return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt)) | otherwise -> do buildChanged <- checkFileMonitorChanged pkgFileMonitorBuild srcdir buildComponents regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () let mreg = changedToMaybe regChanged case (buildChanged, regChanged) of (MonitorChanged (MonitoredValueChanged prevBuildComponents), _) -> return (Left (BuildStatusBuild mreg buildReason)) where buildReason = BuildReasonExtraTargets prevBuildComponents (MonitorChanged monitorReason, _) -> return (Left (BuildStatusBuild mreg buildReason)) where buildReason = BuildReasonFilesChanged monitorReason' monitorReason' = fmap (const ()) monitorReason (MonitorUnchanged _ _, MonitorChanged monitorReason) -> -- this should only happen if the file is corrupt or been -- manually deleted. We don't want to bother with another -- phase just for this, so we'll reregister by doing a build. return (Left (BuildStatusBuild Nothing buildReason)) where buildReason = BuildReasonFilesChanged monitorReason' monitorReason' = fmap (const ()) monitorReason (MonitorUnchanged _ _, MonitorUnchanged _ _) | pkgHasEphemeralBuildTargets pkg -> return (Left (BuildStatusBuild mreg buildReason)) where buildReason = BuildReasonEphemeralTargets (MonitorUnchanged buildResult _, MonitorUnchanged _ _) -> return $ Right BuildResult { buildResultDocs = docsResult, buildResultTests = testsResult, buildResultLogFile = Nothing } where (docsResult, testsResult) = buildResult where (pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg changedToMaybe :: MonitorChanged a b -> Maybe b changedToMaybe (MonitorChanged _) = Nothing changedToMaybe (MonitorUnchanged x _) = Just x updatePackageConfigFileMonitor :: PackageFileMonitor -> FilePath -> ElaboratedConfiguredPackage -> IO () updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig} srcdir pkg = updateFileMonitor pkgFileMonitorConfig srcdir Nothing [] pkgconfig () where (pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg updatePackageBuildFileMonitor :: PackageFileMonitor -> FilePath -> MonitorTimestamp -> ElaboratedConfiguredPackage -> BuildStatusRebuild -> [MonitorFilePath] -> BuildResultMisc -> IO () updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild} srcdir timestamp pkg pkgBuildStatus monitors buildResult = updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp) monitors buildComponents' buildResult where (_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg -- If the only thing that's changed is that we're now building extra -- components, then we can avoid later unnecessary rebuilds by saving the -- total set of components that have been built, namely the union of the -- existing ones plus the new ones. If files also changed this would be -- the wrong thing to do. Note that we rely on the -- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee -- that it's /only/ the value that changed not any files that changed. buildComponents' = case pkgBuildStatus of BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents) -> buildComponents `Set.union` prevBuildComponents _ -> buildComponents updatePackageRegFileMonitor :: PackageFileMonitor -> FilePath -> Maybe InstalledPackageInfo -> IO () updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} srcdir mipkg = updateFileMonitor pkgFileMonitorReg srcdir Nothing [] () mipkg invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO () invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} = removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg) ------------------------------------------------------------------------------ -- * Doing it: executing an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------ -- Refer to ProjectBuilding.Types for details of these important types: -- type BuildOutcomes = ... -- type BuildOutcome = ... -- data BuildResult = ... -- data BuildFailure = ... -- data BuildFailureReason = ... -- | Build things for real. -- -- It requires the 'BuildStatusMap' gathered by 'rebuildTargetsDryRun'. -- rebuildTargets :: Verbosity -> DistDirLayout -> StoreDirLayout -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> BuildStatusMap -> BuildTimeSettings -> IO BuildOutcomes rebuildTargets verbosity distDirLayout@DistDirLayout{..} storeDirLayout installPlan sharedPackageConfig@ElaboratedSharedConfig { pkgConfigCompiler = compiler, pkgConfigCompilerProgs = progdb } pkgsBuildStatus buildSettings@BuildTimeSettings{ buildSettingNumJobs, buildSettingKeepGoing } = do -- Concurrency control: create the job controller and concurrency limits -- for downloading, building and installing. jobControl <- if isParallelBuild then newParallelJobControl buildSettingNumJobs else newSerialJobControl registerLock <- newLock -- serialise registration cacheLock <- newLock -- serialise access to setup exe cache --TODO: [code cleanup] eliminate setup exe cache debug verbosity $ "Executing install plan " ++ if isParallelBuild then " in parallel using " ++ show buildSettingNumJobs ++ " threads." else " serially." createDirectoryIfMissingVerbose verbosity True distBuildRootDirectory createDirectoryIfMissingVerbose verbosity True distTempDirectory traverse_ (createPackageDBIfMissing verbosity compiler progdb) packageDBsToUse -- Before traversing the install plan, preemptively find all packages that -- will need to be downloaded and start downloading them. asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus $ \downloadMap -> -- For each package in the plan, in dependency order, but in parallel... InstallPlan.execute jobControl keepGoing (BuildFailure Nothing . DependentFailed . packageId) installPlan $ \pkg -> --TODO: review exception handling handle (\(e :: BuildFailure) -> return (Left e)) $ fmap Right $ let uid = installedUnitId pkg pkgBuildStatus = Map.findWithDefault (error "rebuildTargets") uid pkgsBuildStatus in rebuildTarget verbosity distDirLayout storeDirLayout buildSettings downloadMap registerLock cacheLock sharedPackageConfig installPlan pkg pkgBuildStatus where isParallelBuild = buildSettingNumJobs >= 2 keepGoing = buildSettingKeepGoing withRepoCtx = projectConfigWithBuilderRepoContext verbosity buildSettings packageDBsToUse = -- all the package dbs we may need to create (Set.toList . Set.fromList) [ pkgdb | InstallPlan.Configured elab <- InstallPlan.toList installPlan , pkgdb <- concat [ elabBuildPackageDBStack elab , elabRegisterPackageDBStack elab , elabSetupPackageDBStack elab ] ] -- | Create a package DB if it does not currently exist. Note that this action -- is /not/ safe to run concurrently. -- createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb -> PackageDB -> IO () createPackageDBIfMissing verbosity compiler progdb (SpecificPackageDB dbPath) = do exists <- Cabal.doesPackageDBExist dbPath unless exists $ do createDirectoryIfMissingVerbose verbosity True (takeDirectory dbPath) Cabal.createPackageDB verbosity compiler progdb False dbPath createPackageDBIfMissing _ _ _ _ = return () -- | Given all the context and resources, (re)build an individual package. -- rebuildTarget :: Verbosity -> DistDirLayout -> StoreDirLayout -> BuildTimeSettings -> AsyncFetchMap -> Lock -> Lock -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> ElaboratedReadyPackage -> BuildStatus -> IO BuildResult rebuildTarget verbosity distDirLayout@DistDirLayout{distBuildDirectory} storeDirLayout buildSettings downloadMap registerLock cacheLock sharedPackageConfig plan rpkg@(ReadyPackage pkg) pkgBuildStatus -- Technically, doing the --only-download filtering only in this function is -- not perfect. We could also prune the plan at an earlier stage, like it's -- done with --only-dependencies. But... -- * the benefit would be minimal (practically just avoiding to print the -- "requires build" parts of the plan) -- * we currently don't have easy access to the BuildStatus of packages -- in the pruning phase -- * we still have to check it here to avoid performing successive phases | buildSettingOnlyDownload buildSettings = do case pkgBuildStatus of BuildStatusDownload -> void $ waitAsyncPackageDownload verbosity downloadMap pkg _ -> return () return $ BuildResult DocsNotTried TestsNotTried Nothing | otherwise = -- We rely on the 'BuildStatus' to decide which phase to start from: case pkgBuildStatus of BuildStatusDownload -> downloadPhase BuildStatusUnpack tarball -> unpackTarballPhase tarball BuildStatusRebuild srcdir status -> rebuildPhase status srcdir -- TODO: perhaps re-nest the types to make these impossible BuildStatusPreExisting {} -> unexpectedState BuildStatusInstalled {} -> unexpectedState BuildStatusUpToDate {} -> unexpectedState where unexpectedState = error "rebuildTarget: unexpected package status" downloadPhase :: IO BuildResult downloadPhase = do downsrcloc <- annotateFailureNoLog DownloadFailed $ waitAsyncPackageDownload verbosity downloadMap pkg case downsrcloc of DownloadedTarball tarball -> unpackTarballPhase tarball --TODO: [nice to have] git/darcs repos etc unpackTarballPhase :: FilePath -> IO BuildResult unpackTarballPhase tarball = withTarballLocalDirectory verbosity distDirLayout tarball (packageId pkg) (elabDistDirParams sharedPackageConfig pkg) (elabBuildStyle pkg) (elabPkgDescriptionOverride pkg) $ case elabBuildStyle pkg of BuildAndInstall -> buildAndInstall BuildInplaceOnly -> buildInplace buildStatus where buildStatus = BuildStatusConfigure MonitorFirstRun -- Note that this really is rebuild, not build. It can only happen for -- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages -- would only start from download or unpack phases. -- rebuildPhase :: BuildStatusRebuild -> FilePath -> IO BuildResult rebuildPhase buildStatus srcdir = assert (elabBuildStyle pkg == BuildInplaceOnly) $ buildInplace buildStatus srcdir builddir where builddir = distBuildDirectory (elabDistDirParams sharedPackageConfig pkg) buildAndInstall :: FilePath -> FilePath -> IO BuildResult buildAndInstall srcdir builddir = buildAndInstallUnpackedPackage verbosity distDirLayout storeDirLayout buildSettings registerLock cacheLock sharedPackageConfig plan rpkg srcdir builddir' where builddir' = makeRelative srcdir builddir --TODO: [nice to have] ^^ do this relative stuff better buildInplace :: BuildStatusRebuild -> FilePath -> FilePath -> IO BuildResult buildInplace buildStatus srcdir builddir = --TODO: [nice to have] use a relative build dir rather than absolute buildInplaceUnpackedPackage verbosity distDirLayout buildSettings registerLock cacheLock sharedPackageConfig plan rpkg buildStatus srcdir builddir -- TODO: [nice to have] do we need to use a with-style for the temp -- files for downloading http packages, or are we going to cache them -- persistently? -- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the -- packages we have to download and fork off an async action to download them. -- We download them in dependency order so that the one's we'll need -- first are the ones we will start downloading first. -- -- The body action is passed a map from those packages (identified by their -- location) to a completion var for that package. So the body action should -- lookup the location and use 'waitAsyncPackageDownload' to get the result. -- asyncDownloadPackages :: Verbosity -> ((RepoContext -> IO a) -> IO a) -> ElaboratedInstallPlan -> BuildStatusMap -> (AsyncFetchMap -> IO a) -> IO a asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body | null pkgsToDownload = body Map.empty | otherwise = withRepoCtx $ \repoctx -> asyncFetchPackages verbosity repoctx pkgsToDownload body where pkgsToDownload :: [PackageLocation (Maybe FilePath)] pkgsToDownload = ordNub $ [ elabPkgSourceLocation elab | InstallPlan.Configured elab <- InstallPlan.reverseTopologicalOrder installPlan , let uid = installedUnitId elab pkgBuildStatus = Map.findWithDefault (error "asyncDownloadPackages") uid pkgsBuildStatus , BuildStatusDownload <- [pkgBuildStatus] ] -- | Check if a package needs downloading, and if so expect to find a download -- in progress in the given 'AsyncFetchMap' and wait on it to finish. -- waitAsyncPackageDownload :: Verbosity -> AsyncFetchMap -> ElaboratedConfiguredPackage -> IO DownloadedSourceLocation waitAsyncPackageDownload verbosity downloadMap elab = do pkgloc <- waitAsyncFetchPackage verbosity downloadMap (elabPkgSourceLocation elab) case downloadedSourceLocation pkgloc of Just loc -> return loc Nothing -> fail "waitAsyncPackageDownload: unexpected source location" data DownloadedSourceLocation = DownloadedTarball FilePath --TODO: [nice to have] git/darcs repos etc downloadedSourceLocation :: PackageLocation FilePath -> Maybe DownloadedSourceLocation downloadedSourceLocation pkgloc = case pkgloc of RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball) RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball) _ -> Nothing -- | Ensure that the package is unpacked in an appropriate directory, either -- a temporary one or a persistent one under the shared dist directory. -- withTarballLocalDirectory :: Verbosity -> DistDirLayout -> FilePath -> PackageId -> DistDirParams -> BuildStyle -> Maybe CabalFileText -> (FilePath -> -- Source directory FilePath -> -- Build directory IO a) -> IO a withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..} tarball pkgid dparams buildstyle pkgTextOverride buildPkg = case buildstyle of -- In this case we make a temp dir (e.g. tmp/src2345/), unpack -- the tarball to it (e.g. tmp/src2345/foo-1.0/), and for -- compatibility we put the dist dir within it -- (i.e. tmp/src2345/foo-1.0/dist/). -- -- Unfortunately, a few custom Setup.hs scripts do not respect -- the --builddir flag and always look for it at ./dist/ so -- this way we avoid breaking those packages BuildAndInstall -> let tmpdir = distTempDirectory in withTempDirectory verbosity tmpdir "src" $ \unpackdir -> do unpackPackageTarball verbosity tarball unpackdir pkgid pkgTextOverride let srcdir = unpackdir prettyShow pkgid builddir = srcdir "dist" buildPkg srcdir builddir -- In this case we make sure the tarball has been unpacked to the -- appropriate location under the shared dist dir, and then build it -- inplace there BuildInplaceOnly -> do let srcrootdir = distUnpackedSrcRootDirectory srcdir = distUnpackedSrcDirectory pkgid builddir = distBuildDirectory dparams -- TODO: [nice to have] use a proper file monitor rather -- than this dir exists test exists <- doesDirectoryExist srcdir unless exists $ do createDirectoryIfMissingVerbose verbosity True srcrootdir unpackPackageTarball verbosity tarball srcrootdir pkgid pkgTextOverride moveTarballShippedDistDirectory verbosity distDirLayout srcrootdir pkgid dparams buildPkg srcdir builddir unpackPackageTarball :: Verbosity -> FilePath -> FilePath -> PackageId -> Maybe CabalFileText -> IO () unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride = --TODO: [nice to have] switch to tar package and catch tar exceptions annotateFailureNoLog UnpackFailed $ do -- Unpack the tarball -- info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..." Tar.extractTarGzFile parentdir pkgsubdir tarball -- Sanity check -- exists <- doesFileExist cabalFile unless exists $ die' verbosity $ "Package .cabal file not found in the tarball: " ++ cabalFile -- Overwrite the .cabal with the one from the index, when appropriate -- case pkgTextOverride of Nothing -> return () Just pkgtxt -> do info verbosity $ "Updating " ++ prettyShow pkgname <.> "cabal" ++ " with the latest revision from the index." writeFileAtomic cabalFile pkgtxt where cabalFile :: FilePath cabalFile = parentdir pkgsubdir prettyShow pkgname <.> "cabal" pkgsubdir = prettyShow pkgid pkgname = packageName pkgid -- | This is a bit of a hacky workaround. A number of packages ship -- pre-processed .hs files in a dist directory inside the tarball. We don't -- use the standard 'dist' location so unless we move this dist dir to the -- right place then we'll miss the shipped pre-processed files. This hacky -- approach to shipped pre-processed files ought to be replaced by a proper -- system, though we'll still need to keep this hack for older packages. -- moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout -> FilePath -> PackageId -> DistDirParams -> IO () moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory} parentdir pkgid dparams = do distDirExists <- doesDirectoryExist tarballDistDir when distDirExists $ do debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '" ++ targetDistDir ++ "'" --TODO: [nice to have] or perhaps better to copy, and use a file monitor renameDirectory tarballDistDir targetDistDir where tarballDistDir = parentdir prettyShow pkgid "dist" targetDistDir = distBuildDirectory dparams buildAndInstallUnpackedPackage :: Verbosity -> DistDirLayout -> StoreDirLayout -> BuildTimeSettings -> Lock -> Lock -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> ElaboratedReadyPackage -> FilePath -> FilePath -> IO BuildResult buildAndInstallUnpackedPackage verbosity distDirLayout@DistDirLayout{distTempDirectory} storeDirLayout@StoreDirLayout { storePackageDBStack } BuildTimeSettings { buildSettingNumJobs, buildSettingLogFile } registerLock cacheLock pkgshared@ElaboratedSharedConfig { pkgConfigPlatform = platform, pkgConfigCompiler = compiler, pkgConfigCompilerProgs = progdb } plan rpkg@(ReadyPackage pkg) srcdir builddir = do createDirectoryIfMissingVerbose verbosity True (srcdir builddir) initLogFile --TODO: [code cleanup] deal consistently with talking to older -- Setup.hs versions, much like we do for ghc, with a proper -- options type and rendering step which will also let us -- call directly into the lib, rather than always going via -- the lib's command line interface, which would also allow -- passing data like installed packages, compiler, and -- program db for a quicker configure. --TODO: [required feature] docs and tests --TODO: [required feature] sudo re-exec -- Configure phase noticeProgress ProgressStarting annotateFailure mlogFile ConfigureFailed $ setup' configureCommand configureFlags configureArgs -- Build phase noticeProgress ProgressBuilding annotateFailure mlogFile BuildFailed $ setup buildCommand buildFlags -- Haddock phase whenHaddock $ do noticeProgress ProgressHaddock annotateFailureNoLog HaddocksFailed $ setup haddockCommand haddockFlags -- Install phase noticeProgress ProgressInstalling annotateFailure mlogFile InstallFailed $ do let copyPkgFiles tmpDir = do let tmpDirNormalised = normalise tmpDir setup Cabal.copyCommand (copyFlags tmpDirNormalised) -- Note that the copy command has put the files into -- @$tmpDir/$prefix@ so we need to return this dir so -- the store knows which dir will be the final store entry. let prefix = normalise $ dropDrive (InstallDirs.prefix (elabInstallDirs pkg)) entryDir = tmpDirNormalised prefix -- if there weren't anything to build, it might be that directory is not created -- the @setup Cabal.copyCommand@ above might do nothing. -- https://github.com/haskell/cabal/issues/4130 createDirectoryIfMissingVerbose verbosity True entryDir let hashFileName = entryDir "cabal-hash.txt" outPkgHashInputs = renderPackageHashInputs (packageHashInputs pkgshared pkg) info verbosity $ "creating file with the inputs used to compute the package hash: " ++ hashFileName LBS.writeFile hashFileName outPkgHashInputs debug verbosity "Package hash inputs:" traverse_ (debug verbosity . ("> " ++)) (lines $ LBS.Char8.unpack outPkgHashInputs) -- Ensure that there are no files in `tmpDir`, that are -- not in `entryDir`. While this breaks the -- prefix-relocatable property of the libraries, it is -- necessary on macOS to stay under the load command limit -- of the macOS mach-o linker. See also -- @PackageHash.hashedInstalledPackageIdVeryShort@. -- -- We also normalise paths to ensure that there are no -- different representations for the same path. Like / and -- \\ on windows under msys. otherFiles <- filter (not . isPrefixOf entryDir) <$> listFilesRecursive tmpDirNormalised -- Here's where we could keep track of the installed files -- ourselves if we wanted to by making a manifest of the -- files in the tmp dir. return (entryDir, otherFiles) where listFilesRecursive :: FilePath -> IO [FilePath] listFilesRecursive path = do files <- fmap (path ) <$> (listDirectory path) allFiles <- for files $ \file -> do isDir <- doesDirectoryExist file if isDir then listFilesRecursive file else return [file] return (concat allFiles) registerPkg | not (elabRequiresRegistration pkg) = debug verbosity $ "registerPkg: elab does NOT require registration for " ++ prettyShow uid | otherwise = do -- We register ourselves rather than via Setup.hs. We need to -- grab and modify the InstalledPackageInfo. We decide what -- the installed package id is, not the build system. ipkg0 <- generateInstalledPackageInfo let ipkg = ipkg0 { Installed.installedUnitId = uid } assert ( elabRegisterPackageDBStack pkg == storePackageDBStack compid) (return ()) criticalSection registerLock $ Cabal.registerPackage verbosity compiler progdb (storePackageDBStack compid) ipkg Cabal.defaultRegisterOptions { Cabal.registerMultiInstance = True, Cabal.registerSuppressFilesCheck = True } -- Actual installation void $ newStoreEntry verbosity storeDirLayout compid uid copyPkgFiles registerPkg --TODO: [nice to have] we currently rely on Setup.hs copy to do the right -- thing. Although we do copy into an image dir and do the move into the -- final location ourselves, perhaps we ought to do some sanity checks on -- the image dir first. -- TODO: [required eventually] note that for nix-style -- installations it is not necessary to do the -- 'withWin32SelfUpgrade' dance, but it would be necessary for a -- shared bin dir. --TODO: [required feature] docs and test phases let docsResult = DocsNotTried testsResult = TestsNotTried noticeProgress ProgressCompleted return BuildResult { buildResultDocs = docsResult, buildResultTests = testsResult, buildResultLogFile = mlogFile } where pkgid = packageId rpkg uid = installedUnitId rpkg compid = compilerId compiler dispname :: String dispname = case elabPkgOrComp pkg of ElabPackage _ -> prettyShow pkgid ++ " (all, legacy fallback)" ElabComponent comp -> prettyShow pkgid ++ " (" ++ maybe "custom" prettyShow (compComponentName comp) ++ ")" noticeProgress :: ProgressPhase -> IO () noticeProgress phase = when isParallelBuild $ progressMessage verbosity phase dispname isParallelBuild = buildSettingNumJobs >= 2 whenHaddock action | hasValidHaddockTargets pkg = action | otherwise = return () configureCommand = Cabal.configureCommand defaultProgramDb configureFlags v = flip filterConfigureFlags v $ setupHsConfigureFlags rpkg pkgshared verbosity builddir configureArgs _ = setupHsConfigureArgs pkg buildCommand = Cabal.buildCommand defaultProgramDb buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir haddockCommand = Cabal.haddockCommand haddockFlags _ = setupHsHaddockFlags pkg pkgshared verbosity builddir generateInstalledPackageInfo :: IO InstalledPackageInfo generateInstalledPackageInfo = withTempInstalledPackageInfoFile verbosity distTempDirectory $ \pkgConfDest -> do let registerFlags _ = setupHsRegisterFlags pkg pkgshared verbosity builddir pkgConfDest setup Cabal.registerCommand registerFlags copyFlags destdir _ = setupHsCopyFlags pkg pkgshared verbosity builddir destdir scriptOptions = setupHsScriptOptions rpkg plan pkgshared distDirLayout srcdir builddir isParallelBuild cacheLock setup :: CommandUI flags -> (Version -> flags) -> IO () setup cmd flags = setup' cmd flags (const []) setup' :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () setup' cmd flags args = withLogging $ \mLogFileHandle -> setupWrapper verbosity scriptOptions { useLoggingHandle = mLogFileHandle , useExtraEnvOverrides = dataDirsEnvironmentForPlan distDirLayout plan } (Just (elabPkgDescription pkg)) cmd flags args mlogFile :: Maybe FilePath mlogFile = case buildSettingLogFile of Nothing -> Nothing Just mkLogFile -> Just (mkLogFile compiler platform pkgid uid) initLogFile :: IO () initLogFile = case mlogFile of Nothing -> return () Just logFile -> do createDirectoryIfMissing True (takeDirectory logFile) exists <- doesFileExist logFile when exists $ removeFile logFile withLogging :: (Maybe Handle -> IO r) -> IO r withLogging action = case mlogFile of Nothing -> action Nothing Just logFile -> withFile logFile AppendMode (action . Just) hasValidHaddockTargets :: ElaboratedConfiguredPackage -> Bool hasValidHaddockTargets ElaboratedConfiguredPackage{..} | not elabBuildHaddocks = False | otherwise = any componentHasHaddocks components where components :: [ComponentTarget] components = elabBuildTargets ++ elabTestTargets ++ elabBenchTargets ++ maybeToList elabReplTarget ++ elabHaddockTargets componentHasHaddocks :: ComponentTarget -> Bool componentHasHaddocks (ComponentTarget name _) = case name of CLibName LMainLibName -> hasHaddocks CLibName (LSubLibName _) -> elabHaddockInternal && hasHaddocks CFLibName _ -> elabHaddockForeignLibs && hasHaddocks CExeName _ -> elabHaddockExecutables && hasHaddocks CTestName _ -> elabHaddockTestSuites && hasHaddocks CBenchName _ -> elabHaddockBenchmarks && hasHaddocks where hasHaddocks = not (null (elabPkgDescription ^. componentModules name)) buildInplaceUnpackedPackage :: Verbosity -> DistDirLayout -> BuildTimeSettings -> Lock -> Lock -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> ElaboratedReadyPackage -> BuildStatusRebuild -> FilePath -> FilePath -> IO BuildResult buildInplaceUnpackedPackage verbosity distDirLayout@DistDirLayout { distTempDirectory, distPackageCacheDirectory, distDirectory } BuildTimeSettings{buildSettingNumJobs, buildSettingHaddockOpen} registerLock cacheLock pkgshared@ElaboratedSharedConfig { pkgConfigCompiler = compiler, pkgConfigCompilerProgs = progdb, pkgConfigPlatform = platform } plan rpkg@(ReadyPackage pkg) buildStatus srcdir builddir = do --TODO: [code cleanup] there is duplication between the -- distdirlayout and the builddir here builddir is not -- enough, we also need the per-package cachedir createDirectoryIfMissingVerbose verbosity True builddir createDirectoryIfMissingVerbose verbosity True (distPackageCacheDirectory dparams) -- Configure phase -- whenReConfigure $ do annotateFailureNoLog ConfigureFailed $ setup configureCommand configureFlags configureArgs invalidatePackageRegFileMonitor packageFileMonitor updatePackageConfigFileMonitor packageFileMonitor srcdir pkg -- Build phase -- let docsResult = DocsNotTried testsResult = TestsNotTried buildResult :: BuildResultMisc buildResult = (docsResult, testsResult) whenRebuild $ do timestamp <- beginUpdateFileMonitor annotateFailureNoLog BuildFailed $ setup buildCommand buildFlags buildArgs let listSimple = execRebuild srcdir (needElaboratedConfiguredPackage pkg) listSdist = fmap (map monitorFileHashed) $ allPackageSourceFiles verbosity srcdir ifNullThen m m' = do xs <- m if null xs then m' else return xs monitors <- case PD.buildType (elabPkgDescription pkg) of Simple -> listSimple -- If a Custom setup was used, AND the Cabal is recent -- enough to have sdist --list-sources, use that to -- determine the files that we need to track. This can -- cause unnecessary rebuilding (for example, if README -- is edited, we will try to rebuild) but there isn't -- a more accurate Custom interface we can use to get -- this info. We prefer not to use listSimple here -- as it can miss extra source files that are considered -- by the Custom setup. _ | elabSetupScriptCliVersion pkg >= mkVersion [1,17] -- However, sometimes sdist --list-sources will fail -- and return an empty list. In that case, fall -- back on the (inaccurate) simple tracking. -> listSdist `ifNullThen` listSimple | otherwise -> listSimple let dep_monitors = map monitorFileHashed $ elabInplaceDependencyBuildCacheFiles distDirLayout pkgshared plan pkg updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp pkg buildStatus (monitors ++ dep_monitors) buildResult -- PURPOSELY omitted: no copy! whenReRegister $ annotateFailureNoLog InstallFailed $ do -- Register locally mipkg <- if elabRequiresRegistration pkg then do ipkg0 <- generateInstalledPackageInfo -- We register ourselves rather than via Setup.hs. We need to -- grab and modify the InstalledPackageInfo. We decide what -- the installed package id is, not the build system. let ipkg = ipkg0 { Installed.installedUnitId = ipkgid } criticalSection registerLock $ Cabal.registerPackage verbosity compiler progdb (elabRegisterPackageDBStack pkg) ipkg Cabal.defaultRegisterOptions return (Just ipkg) else return Nothing updatePackageRegFileMonitor packageFileMonitor srcdir mipkg whenTest $ do annotateFailureNoLog TestsFailed $ setup testCommand testFlags testArgs whenBench $ annotateFailureNoLog BenchFailed $ setup benchCommand benchFlags benchArgs -- Repl phase -- whenRepl $ annotateFailureNoLog ReplFailed $ setupInteractive replCommand replFlags replArgs -- Haddock phase whenHaddock $ annotateFailureNoLog HaddocksFailed $ do setup haddockCommand haddockFlags haddockArgs let haddockTarget = elabHaddockForHackage pkg when (haddockTarget == Cabal.ForHackage) $ do let dest = distDirectory name <.> "tar.gz" name = haddockDirName haddockTarget (elabPkgDescription pkg) docDir = distBuildDirectory distDirLayout dparams "doc" "html" Tar.createTarGzFile dest docDir name notice verbosity $ "Documentation tarball created: " ++ dest when (buildSettingHaddockOpen && haddockTarget /= Cabal.ForHackage) $ do let dest = docDir name "index.html" name = haddockDirName haddockTarget (elabPkgDescription pkg) docDir = distBuildDirectory distDirLayout dparams "doc" "html" exe <- findOpenProgramLocation platform case exe of Right open -> runProgramInvocation verbosity (simpleProgramInvocation open [dest]) Left err -> die' verbosity err return BuildResult { buildResultDocs = docsResult, buildResultTests = testsResult, buildResultLogFile = Nothing } where ipkgid = installedUnitId pkg dparams = elabDistDirParams pkgshared pkg isParallelBuild = buildSettingNumJobs >= 2 packageFileMonitor = newPackageFileMonitor pkgshared distDirLayout dparams whenReConfigure action = case buildStatus of BuildStatusConfigure _ -> action _ -> return () whenRebuild action | null (elabBuildTargets pkg) -- NB: we have to build the test/bench suite! , null (elabTestTargets pkg) , null (elabBenchTargets pkg) = return () | otherwise = action whenTest action | null (elabTestTargets pkg) = return () | otherwise = action whenBench action | null (elabBenchTargets pkg) = return () | otherwise = action whenRepl action | isNothing (elabReplTarget pkg) = return () | otherwise = action whenHaddock action | hasValidHaddockTargets pkg = action | otherwise = return () whenReRegister action = case buildStatus of -- We registered the package already BuildStatusBuild (Just _) _ -> info verbosity "whenReRegister: previously registered" -- There is nothing to register _ | null (elabBuildTargets pkg) -> info verbosity "whenReRegister: nothing to register" | otherwise -> action configureCommand = Cabal.configureCommand defaultProgramDb configureFlags v = flip filterConfigureFlags v $ setupHsConfigureFlags rpkg pkgshared verbosity builddir configureArgs _ = setupHsConfigureArgs pkg buildCommand = Cabal.buildCommand defaultProgramDb buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir buildArgs _ = setupHsBuildArgs pkg testCommand = Cabal.testCommand -- defaultProgramDb testFlags v = flip filterTestFlags v $ setupHsTestFlags pkg pkgshared verbosity builddir testArgs _ = setupHsTestArgs pkg benchCommand = Cabal.benchmarkCommand benchFlags _ = setupHsBenchFlags pkg pkgshared verbosity builddir benchArgs _ = setupHsBenchArgs pkg replCommand = Cabal.replCommand defaultProgramDb replFlags _ = setupHsReplFlags pkg pkgshared verbosity builddir replArgs _ = setupHsReplArgs pkg haddockCommand = Cabal.haddockCommand haddockFlags v = flip filterHaddockFlags v $ setupHsHaddockFlags pkg pkgshared verbosity builddir haddockArgs v = flip filterHaddockArgs v $ setupHsHaddockArgs pkg scriptOptions = setupHsScriptOptions rpkg plan pkgshared distDirLayout srcdir builddir isParallelBuild cacheLock setupInteractive :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () setupInteractive cmd flags args = setupWrapper verbosity scriptOptions { isInteractive = True } (Just (elabPkgDescription pkg)) cmd flags args setup :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () setup cmd flags args = setupWrapper verbosity scriptOptions (Just (elabPkgDescription pkg)) cmd flags args generateInstalledPackageInfo :: IO InstalledPackageInfo generateInstalledPackageInfo = withTempInstalledPackageInfoFile verbosity distTempDirectory $ \pkgConfDest -> do let registerFlags _ = setupHsRegisterFlags pkg pkgshared verbosity builddir pkgConfDest setup Cabal.registerCommand registerFlags (const []) withTempInstalledPackageInfoFile :: Verbosity -> FilePath -> (FilePath -> IO ()) -> IO InstalledPackageInfo withTempInstalledPackageInfoFile verbosity tempdir action = withTempDirectory verbosity tempdir "package-registration-" $ \dir -> do -- make absolute since @action@ will often change directory abs_dir <- canonicalizePath dir let pkgConfDest = abs_dir "pkgConf" action pkgConfDest readPkgConf "." pkgConfDest where pkgConfParseFailed :: String -> IO a pkgConfParseFailed perror = die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" ++ show perror readPkgConf :: FilePath -> FilePath -> IO InstalledPackageInfo readPkgConf pkgConfDir pkgConfFile = do pkgConfStr <- BS.readFile (pkgConfDir pkgConfFile) (warns, ipkg) <- case Installed.parseInstalledPackageInfo pkgConfStr of Left perrors -> pkgConfParseFailed $ unlines $ NE.toList perrors Right (warns, ipkg) -> return (warns, ipkg) unless (null warns) $ warn verbosity $ unlines warns return ipkg ------------------------------------------------------------------------------ -- * Utilities ------------------------------------------------------------------------------ annotateFailureNoLog :: (SomeException -> BuildFailureReason) -> IO a -> IO a annotateFailureNoLog annotate action = annotateFailure Nothing annotate action annotateFailure :: Maybe FilePath -> (SomeException -> BuildFailureReason) -> IO a -> IO a annotateFailure mlogFile annotate action = action `catches` -- It's not just IOException and ExitCode we have to deal with, there's -- lots, including exceptions from the hackage-security and tar packages. -- So we take the strategy of catching everything except async exceptions. [ #if MIN_VERSION_base(4,7,0) Handler $ \async -> throwIO (async :: SomeAsyncException) #else Handler $ \async -> throwIO (async :: AsyncException) #endif , Handler $ \other -> handler (other :: SomeException) ] where handler :: Exception e => e -> IO a handler = throwIO . BuildFailure mlogFile . annotate . toException cabal-install-3.8.1.0/src/Distribution/Client/ProjectBuilding/0000755000000000000000000000000007346545000022330 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/ProjectBuilding/Types.hs0000644000000000000000000001655307346545000024002 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} -- | Types for the "Distribution.Client.ProjectBuilding" -- -- Moved out to avoid module cycles. -- module Distribution.Client.ProjectBuilding.Types ( -- * Pre-build status BuildStatusMap, BuildStatus(..), buildStatusRequiresBuild, buildStatusToString, BuildStatusRebuild(..), BuildReason(..), MonitorChangedReason(..), -- * Build outcomes BuildOutcomes, BuildOutcome, BuildResult(..), BuildFailure(..), BuildFailureReason(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Types (DocsResult, TestsResult) import Distribution.Client.FileMonitor (MonitorChangedReason(..)) import Distribution.Package (UnitId, PackageId) import Distribution.InstalledPackageInfo (InstalledPackageInfo) import Distribution.Simple.LocalBuildInfo (ComponentName) ------------------------------------------------------------------------------ -- Pre-build status: result of the dry run -- -- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan'. -- -- This is used as the result of the dry-run of building an install plan. -- type BuildStatusMap = Map UnitId BuildStatus -- | The build status for an individual package is the state that the -- package is in /prior/ to initiating a (re)build. -- -- This should not be confused with a 'BuildResult' which is the result -- /after/ successfully building a package. -- -- It serves two purposes: -- -- * For dry-run output, it lets us explain to the user if and why a package -- is going to be (re)built. -- -- * It tell us what step to start or resume building from, and carries -- enough information for us to be able to do so. -- data BuildStatus = -- | The package is in the 'InstallPlan.PreExisting' state, so does not -- need building. BuildStatusPreExisting -- | The package is in the 'InstallPlan.Installed' state, so does not -- need building. | BuildStatusInstalled -- | The package has not been downloaded yet, so it will have to be -- downloaded, unpacked and built. | BuildStatusDownload -- | The package has not been unpacked yet, so it will have to be -- unpacked and built. | BuildStatusUnpack FilePath -- | The package exists in a local dir already, and just needs building -- or rebuilding. So this can only happen for 'BuildInplaceOnly' style -- packages. | BuildStatusRebuild FilePath BuildStatusRebuild -- | The package exists in a local dir already, and is fully up to date. -- So this package can be put into the 'InstallPlan.Installed' state -- and it does not need to be built. | BuildStatusUpToDate BuildResult -- | Which 'BuildStatus' values indicate we'll have to do some build work of -- some sort. In particular we use this as part of checking if any of a -- package's deps have changed. -- buildStatusRequiresBuild :: BuildStatus -> Bool buildStatusRequiresBuild BuildStatusPreExisting = False buildStatusRequiresBuild BuildStatusInstalled = False buildStatusRequiresBuild BuildStatusUpToDate {} = False buildStatusRequiresBuild _ = True -- | This is primarily here for debugging. It's not actually used anywhere. -- buildStatusToString :: BuildStatus -> String buildStatusToString BuildStatusPreExisting = "BuildStatusPreExisting" buildStatusToString BuildStatusInstalled = "BuildStatusInstalled" buildStatusToString BuildStatusDownload = "BuildStatusDownload" buildStatusToString (BuildStatusUnpack fp) = "BuildStatusUnpack " ++ show fp buildStatusToString (BuildStatusRebuild fp _) = "BuildStatusRebuild " ++ show fp buildStatusToString (BuildStatusUpToDate _) = "BuildStatusUpToDate" -- | For a package that is going to be built or rebuilt, the state it's in now. -- -- So again, this tells us why a package needs to be rebuilt and what build -- phases need to be run. The 'MonitorChangedReason' gives us details like -- which file changed, which is mainly for high verbosity debug output. -- data BuildStatusRebuild = -- | The package configuration changed, so the configure and build phases -- needs to be (re)run. BuildStatusConfigure (MonitorChangedReason ()) -- | The configuration has not changed but the build phase needs to be -- rerun. We record the reason the (re)build is needed. -- -- The optional registration info here tells us if we've registered the -- package already, or if we still need to do that after building. -- @Just Nothing@ indicates that we know that no registration is -- necessary (e.g., executable.) -- | BuildStatusBuild (Maybe (Maybe InstalledPackageInfo)) BuildReason data BuildReason = -- | The dependencies of this package have been (re)built so the build -- phase needs to be rerun. -- BuildReasonDepsRebuilt -- | Changes in files within the package (or first run or corrupt cache) | BuildReasonFilesChanged (MonitorChangedReason ()) -- | An important special case is that no files have changed but the -- set of components the /user asked to build/ has changed. We track the -- set of components /we have built/, which of course only grows (until -- some other change resets it). -- -- The @Set 'ComponentName'@ is the set of components we have built -- previously. When we update the monitor we take the union of the ones -- we have built previously with the ones the user has asked for this -- time and save those. See 'updatePackageBuildFileMonitor'. -- | BuildReasonExtraTargets (Set ComponentName) -- | Although we're not going to build any additional targets as a whole, -- we're going to build some part of a component or run a repl or any -- other action that does not result in additional persistent artifacts. -- | BuildReasonEphemeralTargets ------------------------------------------------------------------------------ -- Build outcomes: result of the build -- -- | A summary of the outcome for building a whole set of packages. -- type BuildOutcomes = Map UnitId BuildOutcome -- | A summary of the outcome for building a single package: either success -- or failure. -- type BuildOutcome = Either BuildFailure BuildResult -- | Information arising from successfully building a single package. -- data BuildResult = BuildResult { buildResultDocs :: DocsResult, buildResultTests :: TestsResult, buildResultLogFile :: Maybe FilePath } deriving Show -- | Information arising from the failure to build a single package. -- data BuildFailure = BuildFailure { buildFailureLogFile :: Maybe FilePath, buildFailureReason :: BuildFailureReason } deriving (Show, Typeable) instance Exception BuildFailure -- | Detail on the reason that a package failed to build. -- data BuildFailureReason = DependentFailed PackageId | DownloadFailed SomeException | UnpackFailed SomeException | ConfigureFailed SomeException | BuildFailed SomeException | ReplFailed SomeException | HaddocksFailed SomeException | TestsFailed SomeException | BenchFailed SomeException | InstallFailed SomeException deriving Show cabal-install-3.8.1.0/src/Distribution/Client/ProjectConfig.hs0000644000000000000000000016651507346545000022352 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE RecordWildCards #-} -- | Handling project configuration. -- module Distribution.Client.ProjectConfig ( -- * Types for project config ProjectConfig(..), ProjectConfigBuildOnly(..), ProjectConfigShared(..), ProjectConfigProvenance(..), PackageConfig(..), MapLast(..), MapMappend(..), -- * Project root findProjectRoot, ProjectRoot(..), BadProjectRoot(..), -- * Project config files readProjectConfig, readGlobalConfig, readProjectLocalExtraConfig, readProjectLocalFreezeConfig, reportParseResult, showProjectConfig, withProjectOrGlobalConfig, writeProjectLocalExtraConfig, writeProjectLocalFreezeConfig, writeProjectConfigFile, commandLineFlagsToProjectConfig, -- * Packages within projects ProjectPackageLocation(..), BadPackageLocations(..), BadPackageLocation(..), BadPackageLocationMatch(..), findProjectPackages, fetchAndReadSourcePackages, -- * Resolving configuration lookupLocalPackageConfig, projectConfigWithBuilderRepoContext, projectConfigWithSolverRepoContext, SolverSettings(..), resolveSolverSettings, BuildTimeSettings(..), resolveBuildTimeSettings, -- * Checking configuration checkBadPerPackageCompilerPaths, BadPerPackageCompilerPaths(..) ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.ProjectConfig.Types import Distribution.Client.ProjectConfig.Legacy import Distribution.Client.RebuildMonad import Distribution.Client.Glob ( isTrivialFilePathGlob ) import Distribution.Client.VCS ( validateSourceRepos, SourceRepoProblem(..) , VCS(..), knownVCSs, configureVCS, syncSourceRepos ) import Distribution.Client.Types import Distribution.Client.DistDirLayout ( DistDirLayout(..), CabalDirLayout(..), ProjectRoot(..) ) import Distribution.Client.GlobalFlags ( RepoContext(..), withRepoContext' ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Config ( loadConfig, getConfigFilePath ) import Distribution.Client.HttpUtils ( HttpTransport, configureTransport, transportCheckHttps , downloadURI ) import Distribution.Client.Utils.Parsec (renderParseError) import Distribution.Solver.Types.SourcePackage import Distribution.Solver.Types.Settings import Distribution.Solver.Types.PackageConstraint ( PackageProperty(..) ) import Distribution.Package ( PackageName, PackageId, UnitId, packageId ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint(..) ) import Distribution.System ( Platform ) import Distribution.Types.GenericPackageDescription ( GenericPackageDescription ) import Distribution.PackageDescription.Parsec ( parseGenericPackageDescription ) import Distribution.Fields ( runParseResult, PError, PWarning, showPWarning) import Distribution.Types.SourceRepo ( RepoType(..) ) import Distribution.Client.Types.SourceRepo ( SourceRepoList, SourceRepositoryPackage (..), srpFanOut ) import Distribution.Simple.Compiler ( Compiler, compilerInfo ) import Distribution.Simple.Program ( ConfiguredProgram(..) ) import Distribution.Simple.Setup ( Flag(Flag), toFlag, flagToMaybe, flagToList , fromFlag, fromFlagOrDefault ) import Distribution.Client.Setup ( defaultSolver, defaultMaxBackjumps ) import Distribution.Simple.InstallDirs ( PathTemplate, fromPathTemplate , toPathTemplate, substPathTemplate, initialPathTemplateEnv ) import Distribution.Simple.Utils ( die', warn, notice, info, createDirectoryIfMissingVerbose, rawSystemIOWithEnv ) import Distribution.Client.Utils ( determineNumJobs ) import Distribution.Utils.NubList ( fromNubList ) import Distribution.Verbosity ( modifyVerbosity, verbose ) import Distribution.Version ( Version ) import qualified Distribution.Deprecated.ParseUtils as OldParser ( ParseResult(..), locatedErrorMsg, showPWarning ) import Distribution.Client.SrcDist ( packageDirToSdist ) import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Distribution.Client.Tar as Tar import qualified Distribution.Client.GZipUtils as GZipUtils import Control.Monad.Trans (liftIO) import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as LBS import qualified Data.Map as Map import qualified Data.List.NonEmpty as NE import qualified Data.Set as Set import qualified Data.Hashable as Hashable import Numeric (showHex) import System.FilePath hiding (combine) import System.IO ( withBinaryFile, IOMode(ReadMode) ) import System.Directory import Network.URI ( URI(..), URIAuth(..), parseAbsoluteURI, uriToString ) ---------------------------------------- -- Resolving configuration to settings -- -- | Look up a 'PackageConfig' field in the 'ProjectConfig' for a specific -- 'PackageName'. This returns the configuration that applies to all local -- packages plus any package-specific configuration for this package. -- lookupLocalPackageConfig :: (Semigroup a, Monoid a) => (PackageConfig -> a) -> ProjectConfig -> PackageName -> a lookupLocalPackageConfig field ProjectConfig { projectConfigLocalPackages, projectConfigSpecificPackage } pkgname = field projectConfigLocalPackages <> maybe mempty field (Map.lookup pkgname (getMapMappend projectConfigSpecificPackage)) -- | Use a 'RepoContext' based on the 'BuildTimeSettings'. -- projectConfigWithBuilderRepoContext :: Verbosity -> BuildTimeSettings -> (RepoContext -> IO a) -> IO a projectConfigWithBuilderRepoContext verbosity BuildTimeSettings{..} = withRepoContext' verbosity buildSettingRemoteRepos buildSettingLocalNoIndexRepos buildSettingCacheDir buildSettingHttpTransport (Just buildSettingIgnoreExpiry) buildSettingProgPathExtra -- | Use a 'RepoContext', but only for the solver. The solver does not use the -- full facilities of the 'RepoContext' so we can get away with making one -- that doesn't have an http transport. And that avoids having to have access -- to the 'BuildTimeSettings' -- projectConfigWithSolverRepoContext :: Verbosity -> ProjectConfigShared -> ProjectConfigBuildOnly -> (RepoContext -> IO a) -> IO a projectConfigWithSolverRepoContext verbosity ProjectConfigShared{..} ProjectConfigBuildOnly{..} = withRepoContext' verbosity (fromNubList projectConfigRemoteRepos) (fromNubList projectConfigLocalNoIndexRepos) (fromFlagOrDefault (error "projectConfigWithSolverRepoContext: projectConfigCacheDir") projectConfigCacheDir) (flagToMaybe projectConfigHttpTransport) (flagToMaybe projectConfigIgnoreExpiry) (fromNubList projectConfigProgPathExtra) -- | Resolve the project configuration, with all its optional fields, into -- 'SolverSettings' with no optional fields (by applying defaults). -- resolveSolverSettings :: ProjectConfig -> SolverSettings resolveSolverSettings ProjectConfig{ projectConfigShared, projectConfigLocalPackages, projectConfigSpecificPackage } = SolverSettings {..} where --TODO: [required eventually] some of these settings need validation, e.g. -- the flag assignments need checking. solverSettingRemoteRepos = fromNubList projectConfigRemoteRepos solverSettingLocalNoIndexRepos = fromNubList projectConfigLocalNoIndexRepos solverSettingConstraints = projectConfigConstraints solverSettingPreferences = projectConfigPreferences solverSettingFlagAssignment = packageConfigFlagAssignment projectConfigLocalPackages solverSettingFlagAssignments = fmap packageConfigFlagAssignment (getMapMappend projectConfigSpecificPackage) solverSettingCabalVersion = flagToMaybe projectConfigCabalVersion solverSettingSolver = fromFlag projectConfigSolver solverSettingAllowOlder = fromMaybe mempty projectConfigAllowOlder solverSettingAllowNewer = fromMaybe mempty projectConfigAllowNewer solverSettingMaxBackjumps = case fromFlag projectConfigMaxBackjumps of n | n < 0 -> Nothing | otherwise -> Just n solverSettingReorderGoals = fromFlag projectConfigReorderGoals solverSettingCountConflicts = fromFlag projectConfigCountConflicts solverSettingFineGrainedConflicts = fromFlag projectConfigFineGrainedConflicts solverSettingMinimizeConflictSet = fromFlag projectConfigMinimizeConflictSet solverSettingStrongFlags = fromFlag projectConfigStrongFlags solverSettingAllowBootLibInstalls = fromFlag projectConfigAllowBootLibInstalls solverSettingOnlyConstrained = fromFlag projectConfigOnlyConstrained solverSettingIndexState = flagToMaybe projectConfigIndexState solverSettingActiveRepos = flagToMaybe projectConfigActiveRepos solverSettingIndependentGoals = fromFlag projectConfigIndependentGoals --solverSettingShadowPkgs = fromFlag projectConfigShadowPkgs --solverSettingReinstall = fromFlag projectConfigReinstall --solverSettingAvoidReinstalls = fromFlag projectConfigAvoidReinstalls --solverSettingOverrideReinstall = fromFlag projectConfigOverrideReinstall --solverSettingUpgradeDeps = fromFlag projectConfigUpgradeDeps ProjectConfigShared {..} = defaults <> projectConfigShared defaults = mempty { projectConfigSolver = Flag defaultSolver, projectConfigAllowOlder = Just (AllowOlder mempty), projectConfigAllowNewer = Just (AllowNewer mempty), projectConfigMaxBackjumps = Flag defaultMaxBackjumps, projectConfigReorderGoals = Flag (ReorderGoals False), projectConfigCountConflicts = Flag (CountConflicts True), projectConfigFineGrainedConflicts = Flag (FineGrainedConflicts True), projectConfigMinimizeConflictSet = Flag (MinimizeConflictSet False), projectConfigStrongFlags = Flag (StrongFlags False), projectConfigAllowBootLibInstalls = Flag (AllowBootLibInstalls False), projectConfigOnlyConstrained = Flag OnlyConstrainedNone, projectConfigIndependentGoals = Flag (IndependentGoals False) --projectConfigShadowPkgs = Flag False, --projectConfigReinstall = Flag False, --projectConfigAvoidReinstalls = Flag False, --projectConfigOverrideReinstall = Flag False, --projectConfigUpgradeDeps = Flag False } -- | Resolve the project configuration, with all its optional fields, into -- 'BuildTimeSettings' with no optional fields (by applying defaults). -- resolveBuildTimeSettings :: Verbosity -> CabalDirLayout -> ProjectConfig -> BuildTimeSettings resolveBuildTimeSettings verbosity CabalDirLayout { cabalLogsDirectory } ProjectConfig { projectConfigShared = ProjectConfigShared { projectConfigRemoteRepos, projectConfigLocalNoIndexRepos, projectConfigProgPathExtra }, projectConfigBuildOnly } = BuildTimeSettings {..} where buildSettingDryRun = fromFlag projectConfigDryRun buildSettingOnlyDeps = fromFlag projectConfigOnlyDeps buildSettingOnlyDownload = fromFlag projectConfigOnlyDownload buildSettingSummaryFile = fromNubList projectConfigSummaryFile --buildSettingLogFile -- defined below, more complicated --buildSettingLogVerbosity -- defined below, more complicated buildSettingBuildReports = fromFlag projectConfigBuildReports buildSettingSymlinkBinDir = flagToList projectConfigSymlinkBinDir buildSettingNumJobs = determineNumJobs projectConfigNumJobs buildSettingKeepGoing = fromFlag projectConfigKeepGoing buildSettingOfflineMode = fromFlag projectConfigOfflineMode buildSettingKeepTempFiles = fromFlag projectConfigKeepTempFiles buildSettingRemoteRepos = fromNubList projectConfigRemoteRepos buildSettingLocalNoIndexRepos = fromNubList projectConfigLocalNoIndexRepos buildSettingCacheDir = fromFlag projectConfigCacheDir buildSettingHttpTransport = flagToMaybe projectConfigHttpTransport buildSettingIgnoreExpiry = fromFlag projectConfigIgnoreExpiry buildSettingReportPlanningFailure = fromFlag projectConfigReportPlanningFailure buildSettingProgPathExtra = fromNubList projectConfigProgPathExtra buildSettingHaddockOpen = False ProjectConfigBuildOnly{..} = defaults <> projectConfigBuildOnly defaults = mempty { projectConfigDryRun = toFlag False, projectConfigOnlyDeps = toFlag False, projectConfigOnlyDownload = toFlag False, projectConfigBuildReports = toFlag NoReports, projectConfigReportPlanningFailure = toFlag False, projectConfigKeepGoing = toFlag False, projectConfigOfflineMode = toFlag False, projectConfigKeepTempFiles = toFlag False, projectConfigIgnoreExpiry = toFlag False } -- The logging logic: what log file to use and what verbosity. -- -- If the user has specified --remote-build-reporting=detailed, use the -- default log file location. If the --build-log option is set, use the -- provided location. Otherwise don't use logging, unless building in -- parallel (in which case the default location is used). -- buildSettingLogFile :: Maybe (Compiler -> Platform -> PackageId -> UnitId -> FilePath) buildSettingLogFile | useDefaultTemplate = Just (substLogFileName defaultTemplate) | otherwise = fmap substLogFileName givenTemplate defaultTemplate = toPathTemplate $ cabalLogsDirectory "$compiler" "$libname" <.> "log" givenTemplate = flagToMaybe projectConfigLogFile useDefaultTemplate | buildSettingBuildReports == DetailedReports = True | isJust givenTemplate = False | isParallelBuild = True | otherwise = False isParallelBuild = buildSettingNumJobs >= 2 substLogFileName :: PathTemplate -> Compiler -> Platform -> PackageId -> UnitId -> FilePath substLogFileName template compiler platform pkgid uid = fromPathTemplate (substPathTemplate env template) where env = initialPathTemplateEnv pkgid uid (compilerInfo compiler) platform -- If the user has specified --remote-build-reporting=detailed or -- --build-log, use more verbose logging. -- buildSettingLogVerbosity :: Verbosity buildSettingLogVerbosity | overrideVerbosity = modifyVerbosity (max verbose) verbosity | otherwise = verbosity overrideVerbosity :: Bool overrideVerbosity | buildSettingBuildReports == DetailedReports = True | isJust givenTemplate = True | isParallelBuild = False | otherwise = False --------------------------------------------- -- Reading and writing project config files -- -- | Find the root of this project. -- -- Searches for an explicit @cabal.project@ file, in the current directory or -- parent directories. If no project file is found then the current dir is the -- project root (and the project will use an implicit config). -- findProjectRoot :: Maybe FilePath -- ^ starting directory, or current directory -> Maybe FilePath -- ^ @cabal.project@ file name override -> IO (Either BadProjectRoot ProjectRoot) findProjectRoot _ (Just projectFile) | isAbsolute projectFile = do exists <- doesFileExist projectFile if exists then do projectFile' <- canonicalizePath projectFile let projectRoot = ProjectRootExplicit (takeDirectory projectFile') (takeFileName projectFile') return (Right projectRoot) else return (Left (BadProjectRootExplicitFile projectFile)) findProjectRoot mstartdir mprojectFile = do startdir <- maybe getCurrentDirectory canonicalizePath mstartdir homedir <- getHomeDirectory probe startdir homedir where projectFileName :: String projectFileName = fromMaybe "cabal.project" mprojectFile -- Search upwards. If we get to the users home dir or the filesystem root, -- then use the current dir probe :: FilePath -> String -> IO (Either BadProjectRoot ProjectRoot) probe startdir homedir = go startdir where go :: FilePath -> IO (Either BadProjectRoot ProjectRoot) go dir | isDrive dir || dir == homedir = case mprojectFile of Nothing -> return (Right (ProjectRootImplicit startdir)) Just file -> return (Left (BadProjectRootExplicitFile file)) go dir = do exists <- doesFileExist (dir projectFileName) if exists then return (Right (ProjectRootExplicit dir projectFileName)) else go (takeDirectory dir) -- | Errors returned by 'findProjectRoot'. -- data BadProjectRoot = BadProjectRootExplicitFile FilePath #if MIN_VERSION_base(4,8,0) deriving (Show, Typeable) #else deriving (Typeable) instance Show BadProjectRoot where show = renderBadProjectRoot #endif instance Exception BadProjectRoot where #if MIN_VERSION_base(4,8,0) displayException = renderBadProjectRoot #endif renderBadProjectRoot :: BadProjectRoot -> String renderBadProjectRoot (BadProjectRootExplicitFile projectFile) = "The given project file '" ++ projectFile ++ "' does not exist." withProjectOrGlobalConfig :: Verbosity -- ^ verbosity -> Flag Bool -- ^ whether to ignore local project (--ignore-project flag) -> Flag FilePath -- ^ @--cabal-config@ -> IO a -- ^ with project -> (ProjectConfig -> IO a) -- ^ without projet -> IO a withProjectOrGlobalConfig verbosity (Flag True) gcf _with without = do globalConfig <- runRebuild "" $ readGlobalConfig verbosity gcf without globalConfig withProjectOrGlobalConfig verbosity _ignorePrj gcf with without = withProjectOrGlobalConfig' verbosity gcf with without withProjectOrGlobalConfig' :: Verbosity -> Flag FilePath -> IO a -> (ProjectConfig -> IO a) -> IO a withProjectOrGlobalConfig' verbosity globalConfigFlag with without = do globalConfig <- runRebuild "" $ readGlobalConfig verbosity globalConfigFlag let res' = catch with $ \case (BadPackageLocations prov locs) | prov == Set.singleton Implicit , let isGlobErr (BadLocGlobEmptyMatch _) = True isGlobErr _ = False , any isGlobErr locs -> without globalConfig err -> throwIO err catch res' $ \case (BadProjectRootExplicitFile "") -> without globalConfig err -> throwIO err -- | Read all the config relevant for a project. This includes the project -- file if any, plus other global config. -- readProjectConfig :: Verbosity -> HttpTransport -> Flag Bool -- ^ @--ignore-project@ -> Flag FilePath -> DistDirLayout -> Rebuild ProjectConfigSkeleton readProjectConfig verbosity httpTransport ignoreProjectFlag configFileFlag distDirLayout = do global <- singletonProjectConfigSkeleton <$> readGlobalConfig verbosity configFileFlag local <- readProjectLocalConfigOrDefault verbosity httpTransport distDirLayout freeze <- readProjectLocalFreezeConfig verbosity httpTransport distDirLayout extra <- readProjectLocalExtraConfig verbosity httpTransport distDirLayout if ignoreProjectFlag == Flag True then return (global <> (singletonProjectConfigSkeleton defaultProject)) else return (global <> local <> freeze <> extra) where defaultProject :: ProjectConfig defaultProject = mempty { projectPackages = ["./"] } -- | Reads an explicit @cabal.project@ file in the given project root dir, -- or returns the default project config for an implicitly defined project. -- readProjectLocalConfigOrDefault :: Verbosity -> HttpTransport -> DistDirLayout -> Rebuild ProjectConfigSkeleton readProjectLocalConfigOrDefault verbosity httpTransport distDirLayout = do usesExplicitProjectRoot <- liftIO $ doesFileExist projectFile if usesExplicitProjectRoot then do readProjectFileSkeleton verbosity httpTransport distDirLayout "" "project file" else do monitorFiles [monitorNonExistentFile projectFile] return (singletonProjectConfigSkeleton defaultImplicitProjectConfig) where projectFile :: FilePath projectFile = distProjectFile distDirLayout "" defaultImplicitProjectConfig :: ProjectConfig defaultImplicitProjectConfig = mempty { -- We expect a package in the current directory. projectPackages = [ "./*.cabal" ], projectConfigProvenance = Set.singleton Implicit } -- | Reads a @cabal.project.local@ file in the given project root dir, -- or returns empty. This file gets written by @cabal configure@, or in -- principle can be edited manually or by other tools. -- readProjectLocalExtraConfig :: Verbosity -> HttpTransport -> DistDirLayout -> Rebuild ProjectConfigSkeleton readProjectLocalExtraConfig verbosity httpTransport distDirLayout = readProjectFileSkeleton verbosity httpTransport distDirLayout "local" "project local configuration file" -- | Reads a @cabal.project.freeze@ file in the given project root dir, -- or returns empty. This file gets written by @cabal freeze@, or in -- principle can be edited manually or by other tools. -- readProjectLocalFreezeConfig :: Verbosity -> HttpTransport ->DistDirLayout -> Rebuild ProjectConfigSkeleton readProjectLocalFreezeConfig verbosity httpTransport distDirLayout = readProjectFileSkeleton verbosity httpTransport distDirLayout "freeze" "project freeze file" -- | Reads a named extended (with imports and conditionals) config file in the given project root dir, or returns empty. -- readProjectFileSkeleton :: Verbosity -> HttpTransport -> DistDirLayout -> String -> String -> Rebuild ProjectConfigSkeleton readProjectFileSkeleton verbosity httpTransport DistDirLayout{distProjectFile, distDownloadSrcDirectory} extensionName extensionDescription = do exists <- liftIO $ doesFileExist extensionFile if exists then do monitorFiles [monitorFileHashed extensionFile] pcs <- liftIO readExtensionFile monitorFiles $ map monitorFileHashed (projectSkeletonImports pcs) pure pcs else do monitorFiles [monitorNonExistentFile extensionFile] return mempty where extensionFile = distProjectFile extensionName readExtensionFile = reportParseResult verbosity extensionDescription extensionFile =<< parseProjectSkeleton distDownloadSrcDirectory httpTransport verbosity [] extensionFile =<< BS.readFile extensionFile -- | Render the 'ProjectConfig' format. -- -- For the moment this is implemented in terms of a pretty printer for the -- legacy configuration types, plus a conversion. -- showProjectConfig :: ProjectConfig -> String showProjectConfig = showLegacyProjectConfig . convertToLegacyProjectConfig -- | Write a @cabal.project.local@ file in the given project root dir. -- writeProjectLocalExtraConfig :: DistDirLayout -> ProjectConfig -> IO () writeProjectLocalExtraConfig DistDirLayout{distProjectFile} = writeProjectConfigFile (distProjectFile "local") -- | Write a @cabal.project.freeze@ file in the given project root dir. -- writeProjectLocalFreezeConfig :: DistDirLayout -> ProjectConfig -> IO () writeProjectLocalFreezeConfig DistDirLayout{distProjectFile} = writeProjectConfigFile (distProjectFile "freeze") -- | Write in the @cabal.project@ format to the given file. -- writeProjectConfigFile :: FilePath -> ProjectConfig -> IO () writeProjectConfigFile file = writeFile file . showProjectConfig -- | Read the user's @~/.cabal/config@ file. -- readGlobalConfig :: Verbosity -> Flag FilePath -> Rebuild ProjectConfig readGlobalConfig verbosity configFileFlag = do config <- liftIO (loadConfig verbosity configFileFlag) configFile <- liftIO (getConfigFilePath configFileFlag) monitorFiles [monitorFileHashed configFile] return (convertLegacyGlobalConfig config) reportParseResult :: Verbosity -> String -> FilePath -> OldParser.ParseResult ProjectConfigSkeleton -> IO ProjectConfigSkeleton reportParseResult verbosity _filetype filename (OldParser.ParseOk warnings x) = do unless (null warnings) $ let msg = unlines (map (OldParser.showPWarning (intercalate ", " $ filename : projectSkeletonImports x)) warnings) in warn verbosity msg return x reportParseResult verbosity filetype filename (OldParser.ParseFailed err) = let (line, msg) = OldParser.locatedErrorMsg err in die' verbosity $ "Error parsing " ++ filetype ++ " " ++ filename ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg --------------------------------------------- -- Finding packages in the project -- -- | The location of a package as part of a project. Local file paths are -- either absolute (if the user specified it as such) or they are relative -- to the project root. -- data ProjectPackageLocation = ProjectPackageLocalCabalFile FilePath | ProjectPackageLocalDirectory FilePath FilePath -- dir and .cabal file | ProjectPackageLocalTarball FilePath | ProjectPackageRemoteTarball URI | ProjectPackageRemoteRepo SourceRepoList | ProjectPackageNamed PackageVersionConstraint deriving Show -- | Exception thrown by 'findProjectPackages'. -- data BadPackageLocations = BadPackageLocations (Set ProjectConfigProvenance) [BadPackageLocation] #if MIN_VERSION_base(4,8,0) deriving (Show, Typeable) #else deriving (Typeable) instance Show BadPackageLocations where show = renderBadPackageLocations #endif instance Exception BadPackageLocations where #if MIN_VERSION_base(4,8,0) displayException = renderBadPackageLocations #endif --TODO: [nice to have] custom exception subclass for Doc rendering, colour etc data BadPackageLocation = BadPackageLocationFile BadPackageLocationMatch | BadLocGlobEmptyMatch String | BadLocGlobBadMatches String [BadPackageLocationMatch] | BadLocUnexpectedUriScheme String | BadLocUnrecognisedUri String | BadLocUnrecognised String deriving Show data BadPackageLocationMatch = BadLocUnexpectedFile String | BadLocNonexistantFile String | BadLocDirNoCabalFile String | BadLocDirManyCabalFiles String deriving Show renderBadPackageLocations :: BadPackageLocations -> String renderBadPackageLocations (BadPackageLocations provenance bpls) -- There is no provenance information, -- render standard bad package error information. | Set.null provenance = renderErrors renderBadPackageLocation -- The configuration is implicit, render bad package locations -- using possibly specialized error messages. | Set.singleton Implicit == provenance = renderErrors renderImplicitBadPackageLocation -- The configuration contains both implicit and explicit provenance. -- This should not occur, and a message is output to assist debugging. | Implicit `Set.member` provenance = "Warning: both implicit and explicit configuration is present." ++ renderExplicit -- The configuration was read from one or more explicit path(s), -- list the locations and render the bad package error information. -- The intent is to supersede this with the relevant location information -- per package error. | otherwise = renderExplicit where renderErrors f = unlines (map f bpls) renderExplicit = "When using configuration(s) from " ++ intercalate ", " (mapMaybe getExplicit (Set.toList provenance)) ++ ", the following errors occurred:\n" ++ renderErrors renderBadPackageLocation getExplicit (Explicit path) = Just path getExplicit Implicit = Nothing --TODO: [nice to have] keep track of the config file (and src loc) packages -- were listed, to use in error messages -- | Render bad package location error information for the implicit -- @cabal.project@ configuration. -- -- TODO: This is currently not fully realized, with only one of the implicit -- cases handled. More cases should be added with informative help text -- about the issues related specifically when having no project configuration -- is present. renderImplicitBadPackageLocation :: BadPackageLocation -> String renderImplicitBadPackageLocation bpl = case bpl of BadLocGlobEmptyMatch pkglocstr -> "No cabal.project file or cabal file matching the default glob '" ++ pkglocstr ++ "' was found.\n" ++ "Please create a package description file .cabal " ++ "or a cabal.project file referencing the packages you " ++ "want to build." _ -> renderBadPackageLocation bpl renderBadPackageLocation :: BadPackageLocation -> String renderBadPackageLocation bpl = case bpl of BadPackageLocationFile badmatch -> renderBadPackageLocationMatch badmatch BadLocGlobEmptyMatch pkglocstr -> "The package location glob '" ++ pkglocstr ++ "' does not match any files or directories." BadLocGlobBadMatches pkglocstr failures -> "The package location glob '" ++ pkglocstr ++ "' does not match any " ++ "recognised forms of package. " ++ concatMap ((' ':) . renderBadPackageLocationMatch) failures BadLocUnexpectedUriScheme pkglocstr -> "The package location URI '" ++ pkglocstr ++ "' does not use a " ++ "supported URI scheme. The supported URI schemes are http, https and " ++ "file." BadLocUnrecognisedUri pkglocstr -> "The package location URI '" ++ pkglocstr ++ "' does not appear to " ++ "be a valid absolute URI." BadLocUnrecognised pkglocstr -> "The package location syntax '" ++ pkglocstr ++ "' is not recognised." renderBadPackageLocationMatch :: BadPackageLocationMatch -> String renderBadPackageLocationMatch bplm = case bplm of BadLocUnexpectedFile pkglocstr -> "The package location '" ++ pkglocstr ++ "' is not recognised. The " ++ "supported file targets are .cabal files, .tar.gz tarballs or package " ++ "directories (i.e. directories containing a .cabal file)." BadLocNonexistantFile pkglocstr -> "The package location '" ++ pkglocstr ++ "' does not exist." BadLocDirNoCabalFile pkglocstr -> "The package directory '" ++ pkglocstr ++ "' does not contain any " ++ ".cabal file." BadLocDirManyCabalFiles pkglocstr -> "The package directory '" ++ pkglocstr ++ "' contains multiple " ++ ".cabal files (which is not currently supported)." -- | Given the project config, -- -- Throws 'BadPackageLocations'. -- findProjectPackages :: DistDirLayout -> ProjectConfig -> Rebuild [ProjectPackageLocation] findProjectPackages DistDirLayout{distProjectRootDirectory} ProjectConfig{..} = do requiredPkgs <- findPackageLocations True projectPackages optionalPkgs <- findPackageLocations False projectPackagesOptional let repoPkgs = map ProjectPackageRemoteRepo projectPackagesRepo namedPkgs = map ProjectPackageNamed projectPackagesNamed return (concat [requiredPkgs, optionalPkgs, repoPkgs, namedPkgs]) where findPackageLocations :: Bool -> [String] -> Rebuild [ProjectPackageLocation] findPackageLocations required pkglocstr = do (problems, pkglocs) <- partitionEithers <$> traverse (findPackageLocation required) pkglocstr unless (null problems) $ liftIO $ throwIO $ BadPackageLocations projectConfigProvenance problems return (concat pkglocs) findPackageLocation :: Bool -> String -> Rebuild (Either BadPackageLocation [ProjectPackageLocation]) findPackageLocation _required@True pkglocstr = -- strategy: try first as a file:// or http(s):// URL. -- then as a file glob (usually encompassing single file) -- finally as a single file, for files that fail to parse as globs checkIsUriPackage pkglocstr `mplusMaybeT` checkIsFileGlobPackage pkglocstr `mplusMaybeT` checkIsSingleFilePackage pkglocstr >>= maybe (return (Left (BadLocUnrecognised pkglocstr))) return findPackageLocation _required@False pkglocstr = do -- just globs for optional case res <- checkIsFileGlobPackage pkglocstr case res of Nothing -> return (Left (BadLocUnrecognised pkglocstr)) Just (Left _) -> return (Right []) -- it's optional Just (Right pkglocs) -> return (Right pkglocs) checkIsUriPackage, checkIsFileGlobPackage, checkIsSingleFilePackage :: String -> Rebuild (Maybe (Either BadPackageLocation [ProjectPackageLocation])) checkIsUriPackage pkglocstr = case parseAbsoluteURI pkglocstr of Just uri@URI { uriScheme = scheme, uriAuthority = Just URIAuth { uriRegName = host }, uriPath = path, uriQuery = query, uriFragment = frag } | recognisedScheme && not (null host) -> return (Just (Right [ProjectPackageRemoteTarball uri])) | scheme == "file:" && null host && null query && null frag -> checkIsSingleFilePackage path | not recognisedScheme && not (null host) -> return (Just (Left (BadLocUnexpectedUriScheme pkglocstr))) | recognisedScheme && null host -> return (Just (Left (BadLocUnrecognisedUri pkglocstr))) where recognisedScheme = scheme == "http:" || scheme == "https:" || scheme == "file:" _ -> return Nothing checkIsFileGlobPackage pkglocstr = case simpleParsec pkglocstr of Nothing -> return Nothing Just glob -> liftM Just $ do matches <- matchFileGlob glob case matches of [] | isJust (isTrivialFilePathGlob glob) -> return (Left (BadPackageLocationFile (BadLocNonexistantFile pkglocstr))) [] -> return (Left (BadLocGlobEmptyMatch pkglocstr)) _ -> do (failures, pkglocs) <- partitionEithers <$> traverse checkFilePackageMatch matches return $! case (failures, pkglocs) of ([failure], []) | isJust (isTrivialFilePathGlob glob) -> Left (BadPackageLocationFile failure) (_, []) -> Left (BadLocGlobBadMatches pkglocstr failures) _ -> Right pkglocs checkIsSingleFilePackage pkglocstr = do let filename = distProjectRootDirectory pkglocstr isFile <- liftIO $ doesFileExist filename isDir <- liftIO $ doesDirectoryExist filename if isFile || isDir then checkFilePackageMatch pkglocstr >>= either (return . Just . Left . BadPackageLocationFile) (return . Just . Right . (\x->[x])) else return Nothing checkFilePackageMatch :: String -> Rebuild (Either BadPackageLocationMatch ProjectPackageLocation) checkFilePackageMatch pkglocstr = do -- The pkglocstr may be absolute or may be relative to the project root. -- Either way, does the right thing here. We return relative paths if -- they were relative in the first place. let abspath = distProjectRootDirectory pkglocstr isFile <- liftIO $ doesFileExist abspath isDir <- liftIO $ doesDirectoryExist abspath parentDirExists <- case takeDirectory abspath of [] -> return False dir -> liftIO $ doesDirectoryExist dir case () of _ | isDir -> do matches <- matchFileGlob (globStarDotCabal pkglocstr) case matches of [cabalFile] -> return (Right (ProjectPackageLocalDirectory pkglocstr cabalFile)) [] -> return (Left (BadLocDirNoCabalFile pkglocstr)) _ -> return (Left (BadLocDirManyCabalFiles pkglocstr)) | extensionIsTarGz pkglocstr -> return (Right (ProjectPackageLocalTarball pkglocstr)) | takeExtension pkglocstr == ".cabal" -> return (Right (ProjectPackageLocalCabalFile pkglocstr)) | isFile -> return (Left (BadLocUnexpectedFile pkglocstr)) | parentDirExists -> return (Left (BadLocNonexistantFile pkglocstr)) | otherwise -> return (Left (BadLocUnexpectedFile pkglocstr)) extensionIsTarGz f = takeExtension f == ".gz" && takeExtension (dropExtension f) == ".tar" -- | A glob to find all the cabal files in a directory. -- -- For a directory @some/dir/@, this is a glob of the form @some/dir/\*.cabal@. -- The directory part can be either absolute or relative. -- globStarDotCabal :: FilePath -> FilePathGlob globStarDotCabal dir = FilePathGlob (if isAbsolute dir then FilePathRoot root else FilePathRelative) (foldr (\d -> GlobDir [Literal d]) (GlobFile [WildCard, Literal ".cabal"]) dirComponents) where (root, dirComponents) = fmap splitDirectories (splitDrive dir) --TODO: [code cleanup] use sufficiently recent transformers package mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) mplusMaybeT ma mb = do mx <- ma case mx of Nothing -> mb Just x -> return (Just x) ------------------------------------------------- -- Fetching and reading packages in the project -- -- | Read the @.cabal@ files for a set of packages. For remote tarballs and -- VCS source repos this also fetches them if needed. -- -- Note here is where we convert from project-root relative paths to absolute -- paths. -- fetchAndReadSourcePackages :: Verbosity -> DistDirLayout -> ProjectConfigShared -> ProjectConfigBuildOnly -> [ProjectPackageLocation] -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] fetchAndReadSourcePackages verbosity distDirLayout projectConfigShared projectConfigBuildOnly pkgLocations = do pkgsLocalDirectory <- sequenceA [ readSourcePackageLocalDirectory verbosity dir cabalFile | location <- pkgLocations , (dir, cabalFile) <- projectPackageLocal location ] pkgsLocalTarball <- sequenceA [ readSourcePackageLocalTarball verbosity path | ProjectPackageLocalTarball path <- pkgLocations ] pkgsRemoteTarball <- do getTransport <- delayInitSharedResource $ configureTransport verbosity progPathExtra preferredHttpTransport sequenceA [ fetchAndReadSourcePackageRemoteTarball verbosity distDirLayout getTransport uri | ProjectPackageRemoteTarball uri <- pkgLocations ] pkgsRemoteRepo <- syncAndReadSourcePackagesRemoteRepos verbosity distDirLayout projectConfigShared [ repo | ProjectPackageRemoteRepo repo <- pkgLocations ] let pkgsNamed = [ NamedPackage pkgname [PackagePropertyVersion verrange] | ProjectPackageNamed (PackageVersionConstraint pkgname verrange) <- pkgLocations ] return $ concat [ pkgsLocalDirectory , pkgsLocalTarball , pkgsRemoteTarball , pkgsRemoteRepo , pkgsNamed ] where projectPackageLocal (ProjectPackageLocalDirectory dir file) = [(dir, file)] projectPackageLocal (ProjectPackageLocalCabalFile file) = [(dir, file)] where dir = takeDirectory file projectPackageLocal _ = [] progPathExtra = fromNubList (projectConfigProgPathExtra projectConfigShared) preferredHttpTransport = flagToMaybe (projectConfigHttpTransport projectConfigBuildOnly) -- | A helper for 'fetchAndReadSourcePackages' to handle the case of -- 'ProjectPackageLocalDirectory' and 'ProjectPackageLocalCabalFile'. -- We simply read the @.cabal@ file. -- readSourcePackageLocalDirectory :: Verbosity -> FilePath -- ^ The package directory -> FilePath -- ^ The package @.cabal@ file -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) readSourcePackageLocalDirectory verbosity dir cabalFile = do monitorFiles [monitorFileHashed cabalFile] root <- askRoot let location = LocalUnpackedPackage (root dir) liftIO $ fmap (mkSpecificSourcePackage location) . readSourcePackageCabalFile verbosity cabalFile =<< BS.readFile (root cabalFile) -- | A helper for 'fetchAndReadSourcePackages' to handle the case of -- 'ProjectPackageLocalTarball'. We scan through the @.tar.gz@ file to find -- the @.cabal@ file and read that. -- readSourcePackageLocalTarball :: Verbosity -> FilePath -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) readSourcePackageLocalTarball verbosity tarballFile = do monitorFiles [monitorFile tarballFile] root <- askRoot let location = LocalTarballPackage (root tarballFile) liftIO $ fmap (mkSpecificSourcePackage location) . uncurry (readSourcePackageCabalFile verbosity) =<< extractTarballPackageCabalFile (root tarballFile) -- | A helper for 'fetchAndReadSourcePackages' to handle the case of -- 'ProjectPackageRemoteTarball'. We download the tarball to the dist src dir -- and after that handle it like the local tarball case. -- fetchAndReadSourcePackageRemoteTarball :: Verbosity -> DistDirLayout -> Rebuild HttpTransport -> URI -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) fetchAndReadSourcePackageRemoteTarball verbosity DistDirLayout { distDownloadSrcDirectory } getTransport tarballUri = -- The tarball download is expensive so we use another layer of file -- monitor to avoid it whenever possible. rerunIfChanged verbosity monitor tarballUri $ do -- Download transport <- getTransport liftIO $ do transportCheckHttps verbosity transport tarballUri notice verbosity ("Downloading " ++ show tarballUri) createDirectoryIfMissingVerbose verbosity True distDownloadSrcDirectory _ <- downloadURI transport verbosity tarballUri tarballFile return () -- Read monitorFiles [monitorFile tarballFile] let location = RemoteTarballPackage tarballUri tarballFile liftIO $ fmap (mkSpecificSourcePackage location) . uncurry (readSourcePackageCabalFile verbosity) =<< extractTarballPackageCabalFile tarballFile where tarballStem :: FilePath tarballStem = distDownloadSrcDirectory localFileNameForRemoteTarball tarballUri tarballFile :: FilePath tarballFile = tarballStem <.> "tar.gz" monitor :: FileMonitor URI (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) monitor = newFileMonitor (tarballStem <.> "cache") -- | A helper for 'fetchAndReadSourcePackages' to handle all the cases of -- 'ProjectPackageRemoteRepo'. -- syncAndReadSourcePackagesRemoteRepos :: Verbosity -> DistDirLayout -> ProjectConfigShared -> [SourceRepoList] -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] syncAndReadSourcePackagesRemoteRepos verbosity DistDirLayout{distDownloadSrcDirectory} ProjectConfigShared { projectConfigProgPathExtra } repos = do repos' <- either reportSourceRepoProblems return $ validateSourceRepos repos -- All 'SourceRepo's grouped by referring to the "same" remote repo -- instance. So same location but can differ in commit/tag/branch/subdir. let reposByLocation :: Map (RepoType, String) [(SourceRepoList, RepoType)] reposByLocation = Map.fromListWith (++) [ ((rtype, rloc), [(repo, vcsRepoType vcs)]) | (repo, rloc, rtype, vcs) <- repos' ] --TODO: pass progPathExtra on to 'configureVCS' let _progPathExtra = fromNubList projectConfigProgPathExtra getConfiguredVCS <- delayInitSharedResources $ \repoType -> let vcs = Map.findWithDefault (error $ "Unknown VCS: " ++ prettyShow repoType) repoType knownVCSs in configureVCS verbosity {-progPathExtra-} vcs concat <$> sequenceA [ rerunIfChanged verbosity monitor repoGroup' $ do vcs' <- getConfiguredVCS repoType syncRepoGroupAndReadSourcePackages vcs' pathStem repoGroup' | repoGroup@((primaryRepo, repoType):_) <- Map.elems reposByLocation , let repoGroup' = map fst repoGroup pathStem = distDownloadSrcDirectory localFileNameForRemoteRepo primaryRepo monitor :: FileMonitor [SourceRepoList] [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] monitor = newFileMonitor (pathStem <.> "cache") ] where syncRepoGroupAndReadSourcePackages :: VCS ConfiguredProgram -> FilePath -> [SourceRepoList] -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] syncRepoGroupAndReadSourcePackages vcs pathStem repoGroup = do liftIO $ createDirectoryIfMissingVerbose verbosity False distDownloadSrcDirectory -- For syncing we don't care about different 'SourceRepo' values that -- are just different subdirs in the same repo. syncSourceRepos verbosity vcs [ (repo, repoPath) | (repo, _, repoPath) <- repoGroupWithPaths ] -- Run post-checkout-command if it is specified for_ repoGroupWithPaths $ \(repo, _, repoPath) -> for_ (nonEmpty (srpCommand repo)) $ \(cmd :| args) -> liftIO $ do exitCode <- rawSystemIOWithEnv verbosity cmd args (Just repoPath) Nothing Nothing Nothing Nothing unless (exitCode == ExitSuccess) $ exitWith exitCode -- But for reading we go through each 'SourceRepo' including its subdir -- value and have to know which path each one ended up in. sequenceA [ readPackageFromSourceRepo repoWithSubdir repoPath | (_, reposWithSubdir, repoPath) <- repoGroupWithPaths , repoWithSubdir <- NE.toList reposWithSubdir ] where -- So to do both things above, we pair them up here. repoGroupWithPaths :: [(SourceRepositoryPackage Proxy, NonEmpty (SourceRepositoryPackage Maybe), FilePath)] repoGroupWithPaths = zipWith (\(x, y) z -> (x,y,z)) (mapGroup [ (repo { srpSubdir = Proxy }, repo) | repo <- foldMap (NE.toList . srpFanOut) repoGroup ]) repoPaths mapGroup :: Ord k => [(k, v)] -> [(k, NonEmpty v)] mapGroup = Map.toList . Map.fromListWith (<>) . map (\(k, v) -> (k, pure v)) -- The repos in a group are given distinct names by simple enumeration -- foo, foo-2, foo-3 etc repoPaths :: [FilePath] repoPaths = pathStem : [ pathStem ++ "-" ++ show (i :: Int) | i <- [2..] ] readPackageFromSourceRepo :: SourceRepositoryPackage Maybe -> FilePath -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) readPackageFromSourceRepo repo repoPath = do let packageDir :: FilePath packageDir = maybe repoPath (repoPath ) (srpSubdir repo) entries <- liftIO $ getDirectoryContents packageDir --TODO: dcoutts 2018-06-23: wrap exceptions case filter (\e -> takeExtension e == ".cabal") entries of [] -> liftIO $ throwIO $ NoCabalFileFound packageDir (_:_:_) -> liftIO $ throwIO $ MultipleCabalFilesFound packageDir [cabalFileName] -> do let cabalFilePath = packageDir cabalFileName monitorFiles [monitorFileHashed cabalFilePath] gpd <- liftIO $ readSourcePackageCabalFile verbosity cabalFilePath =<< BS.readFile cabalFilePath -- write sdist tarball, to repoPath-pgkid tarball <- liftIO $ packageDirToSdist verbosity gpd packageDir let tarballPath = repoPath ++ "-" ++ prettyShow (packageId gpd) ++ ".tar.gz" liftIO $ LBS.writeFile tarballPath tarball let location = RemoteSourceRepoPackage repo tarballPath return $ mkSpecificSourcePackage location gpd reportSourceRepoProblems :: [(SourceRepoList, SourceRepoProblem)] -> Rebuild a reportSourceRepoProblems = liftIO . die' verbosity . renderSourceRepoProblems renderSourceRepoProblems :: [(SourceRepoList, SourceRepoProblem)] -> String renderSourceRepoProblems = unlines . map show -- "TODO: the repo problems" -- | Utility used by all the helpers of 'fetchAndReadSourcePackages' to make an -- appropriate @'PackageSpecifier' ('SourcePackage' (..))@ for a given package -- from a given location. -- mkSpecificSourcePackage :: PackageLocation FilePath -> GenericPackageDescription -> PackageSpecifier (SourcePackage UnresolvedPkgLoc) mkSpecificSourcePackage location pkg = SpecificSourcePackage SourcePackage { srcpkgPackageId = packageId pkg , srcpkgDescription = pkg , srcpkgSource = fmap Just location , srcpkgDescrOverride = Nothing } -- | Errors reported upon failing to parse a @.cabal@ file. -- data CabalFileParseError = CabalFileParseError FilePath -- ^ @.cabal@ file path BS.ByteString -- ^ @.cabal@ file contents (NonEmpty PError) -- ^ errors (Maybe Version) -- ^ We might discover the spec version the package needs [PWarning] -- ^ warnings deriving (Typeable) -- | Manual instance which skips file contents instance Show CabalFileParseError where showsPrec d (CabalFileParseError fp _ es mv ws) = showParen (d > 10) $ showString "CabalFileParseError" . showChar ' ' . showsPrec 11 fp . showChar ' ' . showsPrec 11 ("" :: String) . showChar ' ' . showsPrec 11 es . showChar ' ' . showsPrec 11 mv . showChar ' ' . showsPrec 11 ws instance Exception CabalFileParseError #if MIN_VERSION_base(4,8,0) where displayException = renderCabalFileParseError #endif renderCabalFileParseError :: CabalFileParseError -> String renderCabalFileParseError (CabalFileParseError filePath contents errors _ warnings) = renderParseError filePath contents errors warnings -- | Wrapper for the @.cabal@ file parser. It reports warnings on higher -- verbosity levels and throws 'CabalFileParseError' on failure. -- readSourcePackageCabalFile :: Verbosity -> FilePath -> BS.ByteString -> IO GenericPackageDescription readSourcePackageCabalFile verbosity pkgfilename content = case runParseResult (parseGenericPackageDescription content) of (warnings, Right pkg) -> do unless (null warnings) $ info verbosity (formatWarnings warnings) return pkg (warnings, Left (mspecVersion, errors)) -> throwIO $ CabalFileParseError pkgfilename content errors mspecVersion warnings where formatWarnings warnings = "The package description file " ++ pkgfilename ++ " has warnings: " ++ unlines (map (showPWarning pkgfilename) warnings) -- | When looking for a package's @.cabal@ file we can find none, or several, -- both of which are failures. -- data CabalFileSearchFailure = NoCabalFileFound FilePath | MultipleCabalFilesFound FilePath deriving (Show, Typeable) instance Exception CabalFileSearchFailure -- | Find the @.cabal@ file within a tarball file and return it by value. -- -- Can fail with a 'Tar.FormatError' or 'CabalFileSearchFailure' exception. -- extractTarballPackageCabalFile :: FilePath -> IO (FilePath, BS.ByteString) extractTarballPackageCabalFile tarballFile = withBinaryFile tarballFile ReadMode $ \hnd -> do content <- LBS.hGetContents hnd case extractTarballPackageCabalFilePure tarballFile content of Left (Left e) -> throwIO e Left (Right e) -> throwIO e Right (fileName, fileContent) -> (,) fileName <$> evaluate (LBS.toStrict fileContent) -- | Scan through a tar file stream and collect the @.cabal@ file, or fail. -- extractTarballPackageCabalFilePure :: FilePath -> LBS.ByteString -> Either (Either Tar.FormatError CabalFileSearchFailure) (FilePath, LBS.ByteString) extractTarballPackageCabalFilePure tarballFile = check . accumEntryMap . Tar.filterEntries isCabalFile . Tar.read . GZipUtils.maybeDecompress where accumEntryMap = Tar.foldlEntries (\m e -> Map.insert (Tar.entryTarPath e) e m) Map.empty check (Left (e, _m)) = Left (Left e) check (Right m) = case Map.elems m of [] -> Left (Right $ NoCabalFileFound tarballFile) [file] -> case Tar.entryContent file of Tar.NormalFile content _ -> Right (Tar.entryPath file, content) _ -> Left (Right $ NoCabalFileFound tarballFile) _files -> Left (Right $ MultipleCabalFilesFound tarballFile) isCabalFile e = case splitPath (Tar.entryPath e) of [ _dir, file] -> takeExtension file == ".cabal" [".", _dir, file] -> takeExtension file == ".cabal" _ -> False -- | The name to use for a local file for a remote tarball 'SourceRepo'. -- This is deterministic based on the remote tarball URI, and is intended -- to produce non-clashing file names for different tarballs. -- localFileNameForRemoteTarball :: URI -> FilePath localFileNameForRemoteTarball uri = mangleName uri ++ "-" ++ showHex locationHash "" where mangleName = truncateString 10 . dropExtension . dropExtension . takeFileName . dropTrailingPathSeparator . uriPath locationHash :: Word locationHash = fromIntegral (Hashable.hash (uriToString id uri "")) -- | The name to use for a local file or dir for a remote 'SourceRepo'. -- This is deterministic based on the source repo identity details, and -- intended to produce non-clashing file names for different repos. -- localFileNameForRemoteRepo :: SourceRepoList -> FilePath localFileNameForRemoteRepo SourceRepositoryPackage {srpType, srpLocation} = mangleName srpLocation ++ "-" ++ showHex locationHash "" where mangleName = truncateString 10 . dropExtension . takeFileName . dropTrailingPathSeparator -- just the parts that make up the "identity" of the repo locationHash :: Word locationHash = fromIntegral (Hashable.hash (show srpType, srpLocation)) -- | Truncate a string, with a visual indication that it is truncated. truncateString :: Int -> String -> String truncateString n s | length s <= n = s | otherwise = take (n-1) s ++ "_" -- TODO: add something like this, here or in the project planning -- Based on the package location, which packages will be built inplace in the -- build tree vs placed in the store. This has various implications on what we -- can do with the package, e.g. can we run tests, ghci etc. -- -- packageIsLocalToProject :: ProjectPackageLocation -> Bool --------------------------------------------- -- Checking configuration sanity -- data BadPerPackageCompilerPaths = BadPerPackageCompilerPaths [(PackageName, String)] #if MIN_VERSION_base(4,8,0) deriving (Show, Typeable) #else deriving (Typeable) instance Show BadPerPackageCompilerPaths where show = renderBadPerPackageCompilerPaths #endif instance Exception BadPerPackageCompilerPaths where #if MIN_VERSION_base(4,8,0) displayException = renderBadPerPackageCompilerPaths #endif --TODO: [nice to have] custom exception subclass for Doc rendering, colour etc renderBadPerPackageCompilerPaths :: BadPerPackageCompilerPaths -> String renderBadPerPackageCompilerPaths (BadPerPackageCompilerPaths ((pkgname, progname) : _)) = "The path to the compiler program (or programs used by the compiler) " ++ "cannot be specified on a per-package basis in the cabal.project file " ++ "(i.e. setting the '" ++ progname ++ "-location' for package '" ++ prettyShow pkgname ++ "'). All packages have to use the same compiler, so " ++ "specify the path in a global 'program-locations' section." --TODO: [nice to have] better format control so we can pretty-print the -- offending part of the project file. Currently the line wrapping breaks any -- formatting. renderBadPerPackageCompilerPaths _ = error "renderBadPerPackageCompilerPaths" -- | The project configuration is not allowed to specify program locations for -- programs used by the compiler as these have to be the same for each set of -- packages. -- -- We cannot check this until we know which programs the compiler uses, which -- in principle is not until we've configured the compiler. -- -- Throws 'BadPerPackageCompilerPaths' -- checkBadPerPackageCompilerPaths :: [ConfiguredProgram] -> Map PackageName PackageConfig -> IO () checkBadPerPackageCompilerPaths compilerPrograms packagesConfig = case [ (pkgname, progname) | let compProgNames = Set.fromList (map programId compilerPrograms) , (pkgname, pkgconf) <- Map.toList packagesConfig , progname <- Map.keys (getMapLast (packageConfigProgramPaths pkgconf)) , progname `Set.member` compProgNames ] of [] -> return () ps -> throwIO (BadPerPackageCompilerPaths ps) cabal-install-3.8.1.0/src/Distribution/Client/ProjectConfig/0000755000000000000000000000000007346545000022000 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/ProjectConfig/Legacy.hs0000644000000000000000000021142607346545000023546 0ustar0000000000000000{-# LANGUAGE RecordWildCards, NamedFieldPuns, DeriveGeneric, ConstraintKinds, FlexibleInstances #-} -- | Project configuration, implementation in terms of legacy types. -- module Distribution.Client.ProjectConfig.Legacy ( -- Project config skeletons ProjectConfigSkeleton, parseProjectSkeleton, instantiateProjectConfigSkeleton, singletonProjectConfigSkeleton, projectSkeletonImports, -- * Project config in terms of legacy types LegacyProjectConfig, parseLegacyProjectConfig, showLegacyProjectConfig, -- * Conversion to and from legacy config types commandLineFlagsToProjectConfig, convertLegacyProjectConfig, convertLegacyGlobalConfig, convertToLegacyProjectConfig, -- * Internals, just for tests parsePackageLocationTokenQ, renderPackageLocationToken ) where import Distribution.Client.Compat.Prelude import Distribution.Types.Flag (parsecFlagAssignment, FlagName) import Distribution.Client.ProjectConfig.Types import Distribution.Client.Types.RepoName (RepoName (..), unRepoName) import Distribution.Client.Types.Repo (RemoteRepo(..), LocalRepo (..), emptyRemoteRepo) import Distribution.Client.Types.AllowNewer (AllowNewer(..), AllowOlder(..)) import Distribution.Client.Types.SourceRepo (sourceRepositoryPackageGrammar, SourceRepoList) import Distribution.Client.Config ( SavedConfig(..), remoteRepoFields, postProcessRepo ) import Distribution.Client.CmdInstall.ClientInstallFlags ( ClientInstallFlags(..), defaultClientInstallFlags , clientInstallOptions ) import Distribution.Compat.Lens (view) import Distribution.Solver.Types.ConstraintSource import Distribution.FieldGrammar import Distribution.Package import Distribution.Types.SourceRepo (RepoType) import Distribution.Types.CondTree ( CondTree (..), CondBranch (..), mapTreeConds, traverseCondTreeC ) import Distribution.PackageDescription ( dispFlagAssignment, Condition (..), ConfVar (..), FlagAssignment ) import Distribution.PackageDescription.Configuration (simplifyWithSysParams) import Distribution.Simple.Compiler ( OptimisationLevel(..), DebugInfoLevel(..), CompilerInfo(..) ) import Distribution.Simple.InstallDirs ( CopyDest (NoCopyDest) ) import Distribution.Simple.Setup ( Flag(..), toFlag, fromFlagOrDefault , ConfigFlags(..), configureOptions , HaddockFlags(..), haddockOptions, defaultHaddockFlags , TestFlags(..), testOptions', defaultTestFlags , BenchmarkFlags(..), benchmarkOptions', defaultBenchmarkFlags , programDbPaths', splitArgs, DumpBuildInfo (NoDumpBuildInfo, DumpBuildInfo) , readPackageDb, showPackageDb ) import Distribution.Client.NixStyleOptions (NixStyleFlags (..)) import Distribution.Client.ProjectFlags (ProjectFlags (..), projectFlagsOptions, defaultProjectFlags) import Distribution.Client.Setup ( GlobalFlags(..), globalCommand , ConfigExFlags(..), configureExOptions, defaultConfigExFlags , InstallFlags(..), installOptions, defaultInstallFlags ) import Distribution.Simple.Program ( programName, knownPrograms ) import Distribution.Simple.Program.Db ( ProgramDb, defaultProgramDb ) import Distribution.Simple.Utils ( lowercase ) import Distribution.Utils.NubList ( toNubList, fromNubList, overNubList ) import Distribution.Simple.LocalBuildInfo ( toPathTemplate, fromPathTemplate ) import qualified Distribution.Deprecated.ReadP as Parse import Distribution.Deprecated.ReadP ( ReadP, (+++) ) import qualified Text.PrettyPrint as Disp import Text.PrettyPrint ( Doc, ($+$) ) import qualified Distribution.Deprecated.ParseUtils as ParseUtils import Distribution.Deprecated.ParseUtils ( ParseResult(..), PError(..), syntaxError, PWarning(..) , commaNewLineListFieldParsec, newLineListField, parseTokenQ , parseHaskellString, showToken , simpleFieldParsec, parseFail ) import Distribution.Client.ParseUtils import Distribution.Simple.Command ( CommandUI(commandOptions), ShowOrParseArgs(..) , OptionField, option, reqArg' ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint ) import Distribution.Parsec (ParsecParser, parsecToken) import Distribution.System (OS, Arch) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.ByteString.Char8 as BS import Network.URI (URI (..), parseURI) import Distribution.Fields.ConfVar (parseConditionConfVarFromClause) import Distribution.Client.HttpUtils import System.FilePath ((), isPathSeparator, makeValid) import System.Directory (createDirectoryIfMissing) ------------------------------------------------------------------ -- Handle extended project config files with conditionals and imports. -- -- | ProjectConfigSkeleton is a tree of conditional blocks and imports wrapping a config. It can be finalized by providing the conditional resolution info -- and then resolving and downloading the imports type ProjectConfigSkeleton = CondTree ConfVar [ProjectConfigImport] ProjectConfig type ProjectConfigImport = String singletonProjectConfigSkeleton :: ProjectConfig -> ProjectConfigSkeleton singletonProjectConfigSkeleton x = CondNode x mempty mempty instantiateProjectConfigSkeleton :: OS -> Arch -> CompilerInfo -> FlagAssignment -> ProjectConfigSkeleton -> ProjectConfig instantiateProjectConfigSkeleton os arch impl _flags skel = go $ mapTreeConds (fst . simplifyWithSysParams os arch impl) skel where go :: CondTree FlagName [ProjectConfigImport] ProjectConfig -> ProjectConfig go (CondNode l _imps ts) = let branches = concatMap processBranch ts in l <> mconcat branches processBranch (CondBranch cnd t mf) = case cnd of (Lit True) -> [go t] (Lit False) -> maybe ([]) ((:[]) . go) mf _ -> error $ "unable to process condition: " ++ show cnd -- TODO it would be nice if there were a pretty printer projectSkeletonImports :: ProjectConfigSkeleton -> [ProjectConfigImport] projectSkeletonImports = view traverseCondTreeC parseProjectSkeleton :: FilePath -> HttpTransport -> Verbosity -> [ProjectConfigImport] -> FilePath -> BS.ByteString -> IO (ParseResult ProjectConfigSkeleton) parseProjectSkeleton cacheDir httpTransport verbosity seenImports source bs = (sanityWalkPCS False =<<) <$> liftPR (go []) (ParseUtils.readFields bs) where go :: [ParseUtils.Field] -> [ParseUtils.Field] -> IO (ParseResult ProjectConfigSkeleton) go acc (x:xs) = case x of (ParseUtils.F l "import" importLoc) -> if importLoc `elem` seenImports then pure . parseFail $ ParseUtils.FromString ("cyclical import of " ++ importLoc) (Just l) else do let fs = fmap (\z -> CondNode z [importLoc] mempty) $ fieldsToConfig (reverse acc) res <- parseProjectSkeleton cacheDir httpTransport verbosity (importLoc : seenImports) importLoc =<< fetchImportConfig importLoc rest <- go [] xs pure . fmap mconcat . sequence $ [fs, res, rest] (ParseUtils.Section l "if" p xs') -> do subpcs <- go [] xs' let fs = fmap singletonProjectConfigSkeleton $ fieldsToConfig (reverse acc) (elseClauses, rest) <- parseElseClauses xs let condNode = (\c pcs e -> CondNode mempty mempty [CondBranch c pcs e]) <$> -- we rewrap as as a section so the readFields lexer of the conditional parser doesn't get confused adaptParseError l (parseConditionConfVarFromClause . BS.pack $ "if(" <> p <> ")") <*> subpcs <*> elseClauses pure . fmap mconcat . sequence $ [fs, condNode, rest] _ -> go (x:acc) xs go acc [] = pure . fmap singletonProjectConfigSkeleton . fieldsToConfig $ reverse acc parseElseClauses :: [ParseUtils.Field] -> IO (ParseResult (Maybe ProjectConfigSkeleton), ParseResult ProjectConfigSkeleton) parseElseClauses x = case x of (ParseUtils.Section _l "else" _p xs':xs) -> do subpcs <- go [] xs' rest <- go [] xs pure (Just <$> subpcs, rest) (ParseUtils.Section l "elif" p xs':xs) -> do subpcs <- go [] xs' (elseClauses, rest) <- parseElseClauses xs let condNode = (\c pcs e -> CondNode mempty mempty [CondBranch c pcs e]) <$> adaptParseError l (parseConditionConfVarFromClause . BS.pack $ "else("<> p <> ")") <*> subpcs <*> elseClauses pure (Just <$> condNode, rest) _ -> (\r -> (pure Nothing,r)) <$> go [] x fieldsToConfig xs = fmap (addProvenance . convertLegacyProjectConfig) $ parseLegacyProjectConfigFields source xs addProvenance x = x {projectConfigProvenance = Set.singleton (Explicit source)} adaptParseError _ (Right x) = pure x adaptParseError l (Left e) = parseFail $ ParseUtils.FromString (show e) (Just l) liftPR :: (a -> IO (ParseResult b)) -> ParseResult a -> IO (ParseResult b) liftPR f (ParseOk ws x) = addWarnings <$> f x where addWarnings (ParseOk ws' x') = ParseOk (ws' ++ ws) x' addWarnings x' = x' liftPR _ (ParseFailed e) = pure $ ParseFailed e fetchImportConfig :: ProjectConfigImport -> IO BS.ByteString fetchImportConfig pci = case parseURI pci of Just uri -> do let fp = cacheDir map (\x -> if isPathSeparator x then '_' else x) (makeValid $ show uri) createDirectoryIfMissing True cacheDir _ <- downloadURI httpTransport verbosity uri fp BS.readFile fp Nothing -> BS.readFile pci modifiesCompiler :: ProjectConfig -> Bool modifiesCompiler pc = isSet projectConfigHcFlavor || isSet projectConfigHcPath || isSet projectConfigHcPkg where isSet f = f (projectConfigShared pc) /= NoFlag sanityWalkPCS :: Bool -> ProjectConfigSkeleton -> ParseResult ProjectConfigSkeleton sanityWalkPCS underConditional t@(CondNode d _c comps) | underConditional && modifiesCompiler d = parseFail $ ParseUtils.FromString "Cannot set compiler in a conditional clause of a cabal project file" Nothing | otherwise = mapM_ sanityWalkBranch comps >> pure t sanityWalkBranch:: CondBranch ConfVar [ProjectConfigImport] ProjectConfig -> ParseResult () sanityWalkBranch (CondBranch _c t f) = traverse (sanityWalkPCS True) f >> sanityWalkPCS True t >> pure () ------------------------------------------------------------------ -- Representing the project config file in terms of legacy types -- -- | We already have parsers\/pretty-printers for almost all the fields in the -- project config file, but they're in terms of the types used for the command -- line flags for Setup.hs or cabal commands. We don't want to redefine them -- all, at least not yet so for the moment we use the parsers at the old types -- and use conversion functions. -- -- Ultimately if\/when this project-based approach becomes the default then we -- can redefine the parsers directly for the new types. -- data LegacyProjectConfig = LegacyProjectConfig { legacyPackages :: [String], legacyPackagesOptional :: [String], legacyPackagesRepo :: [SourceRepoList], legacyPackagesNamed :: [PackageVersionConstraint], legacySharedConfig :: LegacySharedConfig, legacyAllConfig :: LegacyPackageConfig, legacyLocalConfig :: LegacyPackageConfig, legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig } deriving (Show, Generic) instance Monoid LegacyProjectConfig where mempty = gmempty mappend = (<>) instance Semigroup LegacyProjectConfig where (<>) = gmappend data LegacyPackageConfig = LegacyPackageConfig { legacyConfigureFlags :: ConfigFlags, legacyInstallPkgFlags :: InstallFlags, legacyHaddockFlags :: HaddockFlags, legacyTestFlags :: TestFlags, legacyBenchmarkFlags :: BenchmarkFlags } deriving (Show, Generic) instance Monoid LegacyPackageConfig where mempty = gmempty mappend = (<>) instance Semigroup LegacyPackageConfig where (<>) = gmappend data LegacySharedConfig = LegacySharedConfig { legacyGlobalFlags :: GlobalFlags, legacyConfigureShFlags :: ConfigFlags, legacyConfigureExFlags :: ConfigExFlags, legacyInstallFlags :: InstallFlags, legacyClientInstallFlags:: ClientInstallFlags, legacyProjectFlags :: ProjectFlags } deriving (Show, Generic) instance Monoid LegacySharedConfig where mempty = gmempty mappend = (<>) instance Semigroup LegacySharedConfig where (<>) = gmappend ------------------------------------------------------------------ -- Converting from and to the legacy types -- -- | Convert configuration from the @cabal configure@ or @cabal build@ command -- line into a 'ProjectConfig' value that can combined with configuration from -- other sources. -- -- At the moment this uses the legacy command line flag types. See -- 'LegacyProjectConfig' for an explanation. -- commandLineFlagsToProjectConfig :: GlobalFlags -> NixStyleFlags a -> ClientInstallFlags -> ProjectConfig commandLineFlagsToProjectConfig globalFlags NixStyleFlags {..} clientInstallFlags = mempty { projectConfigBuildOnly = convertLegacyBuildOnlyFlags globalFlags configFlags installFlags clientInstallFlags haddockFlags testFlags benchmarkFlags, projectConfigShared = convertLegacyAllPackageFlags globalFlags configFlags configExFlags installFlags projectFlags, projectConfigLocalPackages = localConfig, projectConfigAllPackages = allConfig } where (localConfig, allConfig) = splitConfig (convertLegacyPerPackageFlags configFlags installFlags haddockFlags testFlags benchmarkFlags) -- split the package config (from command line arguments) into -- those applied to all packages and those to local only. -- -- for now we will just copy over the ProgramPaths/Extra into -- the AllPackages. The LocalPackages do not inherit them from -- AllPackages, and as such need to retain them. -- -- The general decision rule for what to put into allConfig -- into localConfig is the following: -- -- - anything that is host/toolchain/env specific should be applied -- to all packages, as packagesets have to be host/toolchain/env -- consistent. -- - anything else should be in the local config and could potentially -- be lifted into all-packages vial the `package *` cabal.project -- section. -- splitConfig :: PackageConfig -> (PackageConfig, PackageConfig) splitConfig pc = (pc , mempty { packageConfigProgramPaths = packageConfigProgramPaths pc , packageConfigProgramPathExtra = packageConfigProgramPathExtra pc , packageConfigDocumentation = packageConfigDocumentation pc }) -- | Convert from the types currently used for the user-wide @~/.cabal/config@ -- file into the 'ProjectConfig' type. -- -- Only a subset of the 'ProjectConfig' can be represented in the user-wide -- config. In particular it does not include packages that are in the project, -- and it also doesn't support package-specific configuration (only -- configuration that applies to all packages). -- convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig convertLegacyGlobalConfig SavedConfig { savedGlobalFlags = globalFlags, savedInstallFlags = installFlags, savedClientInstallFlags= clientInstallFlags, savedConfigureFlags = configFlags, savedConfigureExFlags = configExFlags, savedUserInstallDirs = _, savedGlobalInstallDirs = _, savedUploadFlags = _, savedReportFlags = _, savedHaddockFlags = haddockFlags, savedTestFlags = testFlags, savedBenchmarkFlags = benchmarkFlags, savedProjectFlags = projectFlags } = mempty { projectConfigBuildOnly = configBuildOnly, projectConfigShared = configShared, projectConfigAllPackages = configAllPackages } where --TODO: [code cleanup] eliminate use of default*Flags here and specify the -- defaults in the various resolve functions in terms of the new types. configExFlags' = defaultConfigExFlags <> configExFlags installFlags' = defaultInstallFlags <> installFlags clientInstallFlags' = defaultClientInstallFlags <> clientInstallFlags haddockFlags' = defaultHaddockFlags <> haddockFlags testFlags' = defaultTestFlags <> testFlags benchmarkFlags' = defaultBenchmarkFlags <> benchmarkFlags projectFlags' = defaultProjectFlags <> projectFlags configAllPackages = convertLegacyPerPackageFlags configFlags installFlags' haddockFlags' testFlags' benchmarkFlags' configShared = convertLegacyAllPackageFlags globalFlags configFlags configExFlags' installFlags' projectFlags' configBuildOnly = convertLegacyBuildOnlyFlags globalFlags configFlags installFlags' clientInstallFlags' haddockFlags' testFlags' benchmarkFlags' -- | Convert the project config from the legacy types to the 'ProjectConfig' -- and associated types. See 'LegacyProjectConfig' for an explanation of the -- approach. -- convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig convertLegacyProjectConfig LegacyProjectConfig { legacyPackages, legacyPackagesOptional, legacyPackagesRepo, legacyPackagesNamed, legacySharedConfig = LegacySharedConfig globalFlags configShFlags configExFlags installSharedFlags clientInstallFlags projectFlags, legacyAllConfig, legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags haddockFlags testFlags benchmarkFlags, legacySpecificConfig } = ProjectConfig { projectPackages = legacyPackages, projectPackagesOptional = legacyPackagesOptional, projectPackagesRepo = legacyPackagesRepo, projectPackagesNamed = legacyPackagesNamed, projectConfigBuildOnly = configBuildOnly, projectConfigShared = configPackagesShared, projectConfigProvenance = mempty, projectConfigAllPackages = configAllPackages, projectConfigLocalPackages = configLocalPackages, projectConfigSpecificPackage = fmap perPackage legacySpecificConfig } where configAllPackages = convertLegacyPerPackageFlags g i h t b where LegacyPackageConfig g i h t b = legacyAllConfig configLocalPackages = convertLegacyPerPackageFlags configFlags installPerPkgFlags haddockFlags testFlags benchmarkFlags configPackagesShared= convertLegacyAllPackageFlags globalFlags (configFlags <> configShFlags) configExFlags installSharedFlags projectFlags configBuildOnly = convertLegacyBuildOnlyFlags globalFlags configShFlags installSharedFlags clientInstallFlags haddockFlags testFlags benchmarkFlags perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags perPkgTestFlags perPkgBenchmarkFlags) = convertLegacyPerPackageFlags perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags perPkgTestFlags perPkgBenchmarkFlags -- | Helper used by other conversion functions that returns the -- 'ProjectConfigShared' subset of the 'ProjectConfig'. -- convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags -> ConfigExFlags -> InstallFlags -> ProjectFlags -> ProjectConfigShared convertLegacyAllPackageFlags globalFlags configFlags configExFlags installFlags projectFlags = ProjectConfigShared{..} where GlobalFlags { globalConfigFile = projectConfigConfigFile, globalRemoteRepos = projectConfigRemoteRepos, globalLocalNoIndexRepos = projectConfigLocalNoIndexRepos, globalActiveRepos = projectConfigActiveRepos, globalProgPathExtra = projectConfigProgPathExtra, globalStoreDir = projectConfigStoreDir } = globalFlags ConfigFlags { configDistPref = projectConfigDistDir, configHcFlavor = projectConfigHcFlavor, configHcPath = projectConfigHcPath, configHcPkg = projectConfigHcPkg, --configProgramPathExtra = projectConfigProgPathExtra DELETE ME --configInstallDirs = projectConfigInstallDirs, --configUserInstall = projectConfigUserInstall, configPackageDBs = projectConfigPackageDBs } = configFlags ConfigExFlags { configCabalVersion = projectConfigCabalVersion, configExConstraints = projectConfigConstraints, configPreferences = projectConfigPreferences, configSolver = projectConfigSolver, configAllowOlder = projectConfigAllowOlder, configAllowNewer = projectConfigAllowNewer, configWriteGhcEnvironmentFilesPolicy = projectConfigWriteGhcEnvironmentFilesPolicy } = configExFlags InstallFlags { installHaddockIndex = projectConfigHaddockIndex, --installReinstall = projectConfigReinstall, --installAvoidReinstalls = projectConfigAvoidReinstalls, --installOverrideReinstall = projectConfigOverrideReinstall, installIndexState = projectConfigIndexState, installMaxBackjumps = projectConfigMaxBackjumps, --installUpgradeDeps = projectConfigUpgradeDeps, installReorderGoals = projectConfigReorderGoals, installCountConflicts = projectConfigCountConflicts, installFineGrainedConflicts = projectConfigFineGrainedConflicts, installMinimizeConflictSet = projectConfigMinimizeConflictSet, installPerComponent = projectConfigPerComponent, installIndependentGoals = projectConfigIndependentGoals, --installShadowPkgs = projectConfigShadowPkgs, installStrongFlags = projectConfigStrongFlags, installAllowBootLibInstalls = projectConfigAllowBootLibInstalls, installOnlyConstrained = projectConfigOnlyConstrained } = installFlags ProjectFlags { flagProjectFileName = projectConfigProjectFile , flagIgnoreProject = projectConfigIgnoreProject } = projectFlags -- | Helper used by other conversion functions that returns the -- 'PackageConfig' subset of the 'ProjectConfig'. -- convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags -> TestFlags -> BenchmarkFlags -> PackageConfig convertLegacyPerPackageFlags configFlags installFlags haddockFlags testFlags benchmarkFlags = PackageConfig{..} where ConfigFlags { configProgramPaths, configProgramArgs, configProgramPathExtra = packageConfigProgramPathExtra, configVanillaLib = packageConfigVanillaLib, configProfLib = packageConfigProfLib, configSharedLib = packageConfigSharedLib, configStaticLib = packageConfigStaticLib, configDynExe = packageConfigDynExe, configFullyStaticExe = packageConfigFullyStaticExe, configProfExe = packageConfigProfExe, configProf = packageConfigProf, configProfDetail = packageConfigProfDetail, configProfLibDetail = packageConfigProfLibDetail, configConfigureArgs = packageConfigConfigureArgs, configOptimization = packageConfigOptimization, configProgPrefix = packageConfigProgPrefix, configProgSuffix = packageConfigProgSuffix, configGHCiLib = packageConfigGHCiLib, configSplitSections = packageConfigSplitSections, configSplitObjs = packageConfigSplitObjs, configStripExes = packageConfigStripExes, configStripLibs = packageConfigStripLibs, configExtraLibDirs = packageConfigExtraLibDirs, configExtraLibDirsStatic = packageConfigExtraLibDirsStatic, configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, configExtraIncludeDirs = packageConfigExtraIncludeDirs, configConfigurationsFlags = packageConfigFlagAssignment, configTests = packageConfigTests, configBenchmarks = packageConfigBenchmarks, configCoverage = coverage, configLibCoverage = libcoverage, --deprecated configDebugInfo = packageConfigDebugInfo, configDumpBuildInfo = packageConfigDumpBuildInfo, configRelocatable = packageConfigRelocatable } = configFlags packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths) packageConfigProgramArgs = MapMappend (Map.fromListWith (++) configProgramArgs) packageConfigCoverage = coverage <> libcoverage --TODO: defer this merging to the resolve phase InstallFlags { installDocumentation = packageConfigDocumentation, installRunTests = packageConfigRunTests } = installFlags HaddockFlags { haddockHoogle = packageConfigHaddockHoogle, haddockHtml = packageConfigHaddockHtml, haddockHtmlLocation = packageConfigHaddockHtmlLocation, haddockForeignLibs = packageConfigHaddockForeignLibs, haddockForHackage = packageConfigHaddockForHackage, haddockExecutables = packageConfigHaddockExecutables, haddockTestSuites = packageConfigHaddockTestSuites, haddockBenchmarks = packageConfigHaddockBenchmarks, haddockInternal = packageConfigHaddockInternal, haddockCss = packageConfigHaddockCss, haddockLinkedSource = packageConfigHaddockLinkedSource, haddockQuickJump = packageConfigHaddockQuickJump, haddockHscolourCss = packageConfigHaddockHscolourCss, haddockContents = packageConfigHaddockContents } = haddockFlags TestFlags { testHumanLog = packageConfigTestHumanLog, testMachineLog = packageConfigTestMachineLog, testShowDetails = packageConfigTestShowDetails, testKeepTix = packageConfigTestKeepTix, testWrapper = packageConfigTestWrapper, testFailWhenNoTestSuites = packageConfigTestFailWhenNoTestSuites, testOptions = packageConfigTestTestOptions } = testFlags BenchmarkFlags { benchmarkOptions = packageConfigBenchmarkOptions } = benchmarkFlags -- | Helper used by other conversion functions that returns the -- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'. -- convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags -> InstallFlags -> ClientInstallFlags -> HaddockFlags -> TestFlags -> BenchmarkFlags -> ProjectConfigBuildOnly convertLegacyBuildOnlyFlags globalFlags configFlags installFlags clientInstallFlags haddockFlags _ _ = ProjectConfigBuildOnly{..} where projectConfigClientInstallFlags = clientInstallFlags GlobalFlags { globalCacheDir = projectConfigCacheDir, globalLogsDir = projectConfigLogsDir, globalHttpTransport = projectConfigHttpTransport, globalIgnoreExpiry = projectConfigIgnoreExpiry } = globalFlags ConfigFlags { configVerbosity = projectConfigVerbosity } = configFlags InstallFlags { installDryRun = projectConfigDryRun, installOnlyDownload = projectConfigOnlyDownload, installOnly = _, installOnlyDeps = projectConfigOnlyDeps, installRootCmd = _, installSummaryFile = projectConfigSummaryFile, installLogFile = projectConfigLogFile, installBuildReports = projectConfigBuildReports, installReportPlanningFailure = projectConfigReportPlanningFailure, installSymlinkBinDir = projectConfigSymlinkBinDir, installNumJobs = projectConfigNumJobs, installKeepGoing = projectConfigKeepGoing, installOfflineMode = projectConfigOfflineMode } = installFlags HaddockFlags { haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere } = haddockFlags convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig convertToLegacyProjectConfig projectConfig@ProjectConfig { projectPackages, projectPackagesOptional, projectPackagesRepo, projectPackagesNamed, projectConfigAllPackages, projectConfigLocalPackages, projectConfigSpecificPackage } = LegacyProjectConfig { legacyPackages = projectPackages, legacyPackagesOptional = projectPackagesOptional, legacyPackagesRepo = projectPackagesRepo, legacyPackagesNamed = projectPackagesNamed, legacySharedConfig = convertToLegacySharedConfig projectConfig, legacyAllConfig = convertToLegacyPerPackageConfig projectConfigAllPackages, legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig <> convertToLegacyPerPackageConfig projectConfigLocalPackages, legacySpecificConfig = fmap convertToLegacyPerPackageConfig projectConfigSpecificPackage } convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig convertToLegacySharedConfig ProjectConfig { projectConfigBuildOnly = ProjectConfigBuildOnly {..}, projectConfigShared = ProjectConfigShared {..}, projectConfigAllPackages = PackageConfig { packageConfigDocumentation } } = LegacySharedConfig { legacyGlobalFlags = globalFlags , legacyConfigureShFlags = configFlags , legacyConfigureExFlags = configExFlags , legacyInstallFlags = installFlags , legacyClientInstallFlags = projectConfigClientInstallFlags , legacyProjectFlags = projectFlags } where globalFlags = GlobalFlags { globalVersion = mempty, globalNumericVersion = mempty, globalConfigFile = projectConfigConfigFile, globalConstraintsFile = mempty, globalRemoteRepos = projectConfigRemoteRepos, globalCacheDir = projectConfigCacheDir, globalLocalNoIndexRepos = projectConfigLocalNoIndexRepos, globalActiveRepos = projectConfigActiveRepos, globalLogsDir = projectConfigLogsDir, globalIgnoreExpiry = projectConfigIgnoreExpiry, globalHttpTransport = projectConfigHttpTransport, globalNix = mempty, globalStoreDir = projectConfigStoreDir, globalProgPathExtra = projectConfigProgPathExtra } configFlags = mempty { configVerbosity = projectConfigVerbosity, configDistPref = projectConfigDistDir, configPackageDBs = projectConfigPackageDBs } configExFlags = ConfigExFlags { configCabalVersion = projectConfigCabalVersion, configAppend = mempty, configBackup = mempty, configExConstraints = projectConfigConstraints, configPreferences = projectConfigPreferences, configSolver = projectConfigSolver, configAllowOlder = projectConfigAllowOlder, configAllowNewer = projectConfigAllowNewer, configWriteGhcEnvironmentFilesPolicy = projectConfigWriteGhcEnvironmentFilesPolicy } installFlags = InstallFlags { installDocumentation = packageConfigDocumentation, installHaddockIndex = projectConfigHaddockIndex, installDest = Flag NoCopyDest, installDryRun = projectConfigDryRun, installOnlyDownload = projectConfigOnlyDownload, installReinstall = mempty, --projectConfigReinstall, installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls, installOverrideReinstall = mempty, --projectConfigOverrideReinstall, installMaxBackjumps = projectConfigMaxBackjumps, installUpgradeDeps = mempty, --projectConfigUpgradeDeps, installReorderGoals = projectConfigReorderGoals, installCountConflicts = projectConfigCountConflicts, installFineGrainedConflicts = projectConfigFineGrainedConflicts, installMinimizeConflictSet = projectConfigMinimizeConflictSet, installIndependentGoals = projectConfigIndependentGoals, installShadowPkgs = mempty, --projectConfigShadowPkgs, installStrongFlags = projectConfigStrongFlags, installAllowBootLibInstalls = projectConfigAllowBootLibInstalls, installOnlyConstrained = projectConfigOnlyConstrained, installOnly = mempty, installOnlyDeps = projectConfigOnlyDeps, installIndexState = projectConfigIndexState, installRootCmd = mempty, --no longer supported installSummaryFile = projectConfigSummaryFile, installLogFile = projectConfigLogFile, installBuildReports = projectConfigBuildReports, installReportPlanningFailure = projectConfigReportPlanningFailure, installSymlinkBinDir = projectConfigSymlinkBinDir, installPerComponent = projectConfigPerComponent, installNumJobs = projectConfigNumJobs, installKeepGoing = projectConfigKeepGoing, installRunTests = mempty, installOfflineMode = projectConfigOfflineMode } projectFlags = ProjectFlags { flagProjectFileName = projectConfigProjectFile , flagIgnoreProject = projectConfigIgnoreProject } convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig convertToLegacyAllPackageConfig ProjectConfig { projectConfigBuildOnly = ProjectConfigBuildOnly {..}, projectConfigShared = ProjectConfigShared {..} } = LegacyPackageConfig { legacyConfigureFlags = configFlags, legacyInstallPkgFlags= mempty, legacyHaddockFlags = haddockFlags, legacyTestFlags = mempty, legacyBenchmarkFlags = mempty } where configFlags = ConfigFlags { configArgs = mempty, configPrograms_ = mempty, configProgramPaths = mempty, configProgramArgs = mempty, configProgramPathExtra = mempty, configHcFlavor = projectConfigHcFlavor, configHcPath = projectConfigHcPath, configHcPkg = projectConfigHcPkg, configInstantiateWith = mempty, configVanillaLib = mempty, configProfLib = mempty, configSharedLib = mempty, configStaticLib = mempty, configDynExe = mempty, configFullyStaticExe = mempty, configProfExe = mempty, configProf = mempty, configProfDetail = mempty, configProfLibDetail = mempty, configConfigureArgs = mempty, configOptimization = mempty, configProgPrefix = mempty, configProgSuffix = mempty, configInstallDirs = mempty, configScratchDir = mempty, configDistPref = mempty, configCabalFilePath = mempty, configVerbosity = mempty, configUserInstall = mempty, --projectConfigUserInstall, configPackageDBs = mempty, configGHCiLib = mempty, configSplitSections = mempty, configSplitObjs = mempty, configStripExes = mempty, configStripLibs = mempty, configExtraLibDirs = mempty, configExtraLibDirsStatic = mempty, configExtraFrameworkDirs = mempty, configConstraints = mempty, configDependencies = mempty, configExtraIncludeDirs = mempty, configDeterministic = mempty, configIPID = mempty, configCID = mempty, configConfigurationsFlags = mempty, configTests = mempty, configCoverage = mempty, --TODO: don't merge configLibCoverage = mempty, --TODO: don't merge configExactConfiguration = mempty, configBenchmarks = mempty, configFlagError = mempty, --TODO: ??? configRelocatable = mempty, configDebugInfo = mempty, configUseResponseFiles = mempty, configDumpBuildInfo = mempty, configAllowDependingOnPrivateLibs = mempty } haddockFlags = mempty { haddockKeepTempFiles = projectConfigKeepTempFiles } convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig convertToLegacyPerPackageConfig PackageConfig {..} = LegacyPackageConfig { legacyConfigureFlags = configFlags, legacyInstallPkgFlags = installFlags, legacyHaddockFlags = haddockFlags, legacyTestFlags = testFlags, legacyBenchmarkFlags = benchmarkFlags } where configFlags = ConfigFlags { configArgs = mempty, configPrograms_ = configPrograms_ mempty, configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths), configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs), configProgramPathExtra = packageConfigProgramPathExtra, configHcFlavor = mempty, configHcPath = mempty, configHcPkg = mempty, configInstantiateWith = mempty, configVanillaLib = packageConfigVanillaLib, configProfLib = packageConfigProfLib, configSharedLib = packageConfigSharedLib, configStaticLib = packageConfigStaticLib, configDynExe = packageConfigDynExe, configFullyStaticExe = packageConfigFullyStaticExe, configProfExe = packageConfigProfExe, configProf = packageConfigProf, configProfDetail = packageConfigProfDetail, configProfLibDetail = packageConfigProfLibDetail, configConfigureArgs = packageConfigConfigureArgs, configOptimization = packageConfigOptimization, configProgPrefix = packageConfigProgPrefix, configProgSuffix = packageConfigProgSuffix, configInstallDirs = mempty, configScratchDir = mempty, configDistPref = mempty, configCabalFilePath = mempty, configVerbosity = mempty, configUserInstall = mempty, configPackageDBs = mempty, configGHCiLib = packageConfigGHCiLib, configSplitSections = packageConfigSplitSections, configSplitObjs = packageConfigSplitObjs, configStripExes = packageConfigStripExes, configStripLibs = packageConfigStripLibs, configExtraLibDirs = packageConfigExtraLibDirs, configExtraLibDirsStatic = packageConfigExtraLibDirsStatic, configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, configConstraints = mempty, configDependencies = mempty, configExtraIncludeDirs = packageConfigExtraIncludeDirs, configIPID = mempty, configCID = mempty, configDeterministic = mempty, configConfigurationsFlags = packageConfigFlagAssignment, configTests = packageConfigTests, configCoverage = packageConfigCoverage, --TODO: don't merge configLibCoverage = packageConfigCoverage, --TODO: don't merge configExactConfiguration = mempty, configBenchmarks = packageConfigBenchmarks, configFlagError = mempty, --TODO: ??? configRelocatable = packageConfigRelocatable, configDebugInfo = packageConfigDebugInfo, configUseResponseFiles = mempty, configDumpBuildInfo = packageConfigDumpBuildInfo, configAllowDependingOnPrivateLibs = mempty } installFlags = mempty { installDocumentation = packageConfigDocumentation, installRunTests = packageConfigRunTests } haddockFlags = HaddockFlags { haddockProgramPaths = mempty, haddockProgramArgs = mempty, haddockHoogle = packageConfigHaddockHoogle, haddockHtml = packageConfigHaddockHtml, haddockHtmlLocation = packageConfigHaddockHtmlLocation, haddockForHackage = packageConfigHaddockForHackage, haddockForeignLibs = packageConfigHaddockForeignLibs, haddockExecutables = packageConfigHaddockExecutables, haddockTestSuites = packageConfigHaddockTestSuites, haddockBenchmarks = packageConfigHaddockBenchmarks, haddockInternal = packageConfigHaddockInternal, haddockCss = packageConfigHaddockCss, haddockLinkedSource = packageConfigHaddockLinkedSource, haddockQuickJump = packageConfigHaddockQuickJump, haddockHscolourCss = packageConfigHaddockHscolourCss, haddockContents = packageConfigHaddockContents, haddockDistPref = mempty, haddockKeepTempFiles = mempty, haddockVerbosity = mempty, haddockCabalFilePath = mempty, haddockArgs = mempty } testFlags = TestFlags { testDistPref = mempty, testVerbosity = mempty, testHumanLog = packageConfigTestHumanLog, testMachineLog = packageConfigTestMachineLog, testShowDetails = packageConfigTestShowDetails, testKeepTix = packageConfigTestKeepTix, testWrapper = packageConfigTestWrapper, testFailWhenNoTestSuites = packageConfigTestFailWhenNoTestSuites, testOptions = packageConfigTestTestOptions } benchmarkFlags = BenchmarkFlags { benchmarkDistPref = mempty, benchmarkVerbosity = mempty, benchmarkOptions = packageConfigBenchmarkOptions } ------------------------------------------------ -- Parsing and showing the project config file -- parseLegacyProjectConfigFields :: FilePath -> [ParseUtils.Field] -> ParseResult LegacyProjectConfig parseLegacyProjectConfigFields source = parseFieldsAndSections (legacyProjectConfigFieldDescrs constraintSrc) legacyPackageConfigSectionDescrs legacyPackageConfigFGSectionDescrs mempty where constraintSrc = ConstraintSourceProjectConfig source parseLegacyProjectConfig :: FilePath -> BS.ByteString -> ParseResult LegacyProjectConfig parseLegacyProjectConfig source bs = parseLegacyProjectConfigFields source =<< ParseUtils.readFields bs showLegacyProjectConfig :: LegacyProjectConfig -> String showLegacyProjectConfig config = Disp.render $ showConfig (legacyProjectConfigFieldDescrs constraintSrc) legacyPackageConfigSectionDescrs legacyPackageConfigFGSectionDescrs config $+$ Disp.text "" where -- Note: ConstraintSource is unused when pretty-printing. We fake -- it here to avoid having to pass it on call-sites. It's not great -- but requires re-work of how we annotate provenance. constraintSrc = ConstraintSourceProjectConfig "unused" legacyProjectConfigFieldDescrs :: ConstraintSource -> [FieldDescr LegacyProjectConfig] legacyProjectConfigFieldDescrs constraintSrc = [ newLineListField "packages" (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ legacyPackages (\v flags -> flags { legacyPackages = v }) , newLineListField "optional-packages" (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ legacyPackagesOptional (\v flags -> flags { legacyPackagesOptional = v }) , commaNewLineListFieldParsec "extra-packages" pretty parsec legacyPackagesNamed (\v flags -> flags { legacyPackagesNamed = v }) ] ++ map (liftField legacySharedConfig (\flags conf -> conf { legacySharedConfig = flags })) (legacySharedConfigFieldDescrs constraintSrc) ++ map (liftField legacyLocalConfig (\flags conf -> conf { legacyLocalConfig = flags })) legacyPackageConfigFieldDescrs -- | This is a bit tricky since it has to cover globs which have embedded @,@ -- chars. But we don't just want to parse strictly as a glob since we want to -- allow http urls which don't parse as globs, and possibly some -- system-dependent file paths. So we parse fairly liberally as a token, but -- we allow @,@ inside matched @{}@ braces. -- parsePackageLocationTokenQ :: ReadP r String parsePackageLocationTokenQ = parseHaskellString Parse.<++ parsePackageLocationToken where parsePackageLocationToken :: ReadP r String parsePackageLocationToken = fmap fst (Parse.gather outerTerm) where outerTerm = alternateEither1 outerToken (braces innerTerm) innerTerm = alternateEither innerToken (braces innerTerm) outerToken = Parse.munch1 outerChar >> return () innerToken = Parse.munch1 innerChar >> return () outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',') innerChar c = not (isSpace c || c == '{' || c == '}') braces = Parse.between (Parse.char '{') (Parse.char '}') alternateEither, alternateEither1, alternatePQs, alternate1PQs, alternateQsP, alternate1QsP :: ReadP r () -> ReadP r () -> ReadP r () alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p alternateEither p q = alternateEither1 p q +++ return () alternate1PQs p q = p >> alternateQsP q p alternatePQs p q = alternate1PQs p q +++ return () alternate1QsP q p = Parse.many1 q >> alternatePQs p q alternateQsP q p = alternate1QsP q p +++ return () renderPackageLocationToken :: String -> String renderPackageLocationToken s | needsQuoting = show s | otherwise = s where needsQuoting = not (ok 0 s) || s == "." -- . on its own on a line has special meaning || take 2 s == "--" -- on its own line is comment syntax --TODO: [code cleanup] these "." and "--" escaping issues -- ought to be dealt with systematically in ParseUtils. ok :: Int -> String -> Bool ok n [] = n == 0 ok _ ('"':_) = False ok n ('{':cs) = ok (n+1) cs ok n ('}':cs) = ok (n-1) cs ok n (',':cs) = (n > 0) && ok n cs ok _ (c:_) | isSpace c = False ok n (_ :cs) = ok n cs legacySharedConfigFieldDescrs :: ConstraintSource -> [FieldDescr LegacySharedConfig] legacySharedConfigFieldDescrs constraintSrc = concat [ liftFields legacyGlobalFlags (\flags conf -> conf { legacyGlobalFlags = flags }) . addFields [ newLineListField "extra-prog-path-shared-only" showTokenQ parseTokenQ (fromNubList . globalProgPathExtra) (\v conf -> conf { globalProgPathExtra = toNubList v }) ] . filterFields [ "remote-repo-cache" , "logs-dir", "store-dir", "ignore-expiry", "http-transport" , "active-repositories" ] . commandOptionsToFields $ commandOptions (globalCommand []) ParseArgs , liftFields legacyConfigureShFlags (\flags conf -> conf { legacyConfigureShFlags = flags }) . addFields [ commaNewLineListFieldParsec "package-dbs" (Disp.text . showPackageDb) (fmap readPackageDb parsecToken) configPackageDBs (\v conf -> conf { configPackageDBs = v }) ] . filterFields ["verbose", "builddir" ] . commandOptionsToFields $ configureOptions ParseArgs , liftFields legacyConfigureExFlags (\flags conf -> conf { legacyConfigureExFlags = flags }) . addFields [ commaNewLineListFieldParsec "constraints" (pretty . fst) (fmap (\constraint -> (constraint, constraintSrc)) parsec) configExConstraints (\v conf -> conf { configExConstraints = v }) , commaNewLineListFieldParsec "preferences" pretty parsec configPreferences (\v conf -> conf { configPreferences = v }) , monoidFieldParsec "allow-older" (maybe mempty pretty) (fmap Just parsec) (fmap unAllowOlder . configAllowOlder) (\v conf -> conf { configAllowOlder = fmap AllowOlder v }) , monoidFieldParsec "allow-newer" (maybe mempty pretty) (fmap Just parsec) (fmap unAllowNewer . configAllowNewer) (\v conf -> conf { configAllowNewer = fmap AllowNewer v }) ] . filterFields [ "cabal-lib-version", "solver", "write-ghc-environment-files" -- not "constraint" or "preference", we use our own plural ones above ] . commandOptionsToFields $ configureExOptions ParseArgs constraintSrc , liftFields legacyInstallFlags (\flags conf -> conf { legacyInstallFlags = flags }) . addFields [ newLineListField "build-summary" (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) (fromNubList . installSummaryFile) (\v conf -> conf { installSummaryFile = toNubList v }) ] . filterFields [ "doc-index-file" , "root-cmd", "symlink-bindir" , "build-log" , "remote-build-reporting", "report-planning-failure" , "jobs", "keep-going", "offline", "per-component" -- solver flags: , "max-backjumps", "reorder-goals", "count-conflicts" , "fine-grained-conflicts" , "minimize-conflict-set", "independent-goals" , "strong-flags" , "allow-boot-library-installs" , "reject-unconstrained-dependencies", "index-state" ] . commandOptionsToFields $ installOptions ParseArgs , liftFields legacyClientInstallFlags (\flags conf -> conf { legacyClientInstallFlags = flags }) . commandOptionsToFields $ clientInstallOptions ParseArgs , liftFields legacyProjectFlags (\flags conf -> conf { legacyProjectFlags = flags }) . commandOptionsToFields $ projectFlagsOptions ParseArgs ] legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig] legacyPackageConfigFieldDescrs = ( liftFields legacyConfigureFlags (\flags conf -> conf { legacyConfigureFlags = flags }) . addFields [ newLineListField "extra-include-dirs" showTokenQ parseTokenQ configExtraIncludeDirs (\v conf -> conf { configExtraIncludeDirs = v }) , newLineListField "extra-lib-dirs" showTokenQ parseTokenQ configExtraLibDirs (\v conf -> conf { configExtraLibDirs = v }) , newLineListField "extra-lib-dirs-static" showTokenQ parseTokenQ configExtraLibDirsStatic (\v conf -> conf { configExtraLibDirsStatic = v }) , newLineListField "extra-framework-dirs" showTokenQ parseTokenQ configExtraFrameworkDirs (\v conf -> conf { configExtraFrameworkDirs = v }) , newLineListField "extra-prog-path" showTokenQ parseTokenQ (fromNubList . configProgramPathExtra) (\v conf -> conf { configProgramPathExtra = toNubList v }) , newLineListField "configure-options" showTokenQ parseTokenQ configConfigureArgs (\v conf -> conf { configConfigureArgs = v }) , simpleFieldParsec "flags" dispFlagAssignment parsecFlagAssignment configConfigurationsFlags (\v conf -> conf { configConfigurationsFlags = v }) , overrideDumpBuildInfo ] . filterFields [ "with-compiler", "with-hc-pkg" , "program-prefix", "program-suffix" , "library-vanilla", "library-profiling" , "shared", "static", "executable-dynamic", "executable-static" , "profiling", "executable-profiling" , "profiling-detail", "library-profiling-detail" , "library-for-ghci", "split-objs", "split-sections" , "executable-stripping", "library-stripping" , "tests", "benchmarks" , "coverage", "library-coverage" , "relocatable" -- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs" -- or "extra-prog-path". We use corrected ones above that parse -- as list fields. ] . commandOptionsToFields ) (configureOptions ParseArgs) ++ liftFields legacyConfigureFlags (\flags conf -> conf { legacyConfigureFlags = flags }) [ overrideFieldCompiler , overrideFieldOptimization , overrideFieldDebugInfo ] ++ ( liftFields legacyInstallPkgFlags (\flags conf -> conf { legacyInstallPkgFlags = flags }) . filterFields [ "documentation", "run-tests" ] . commandOptionsToFields ) (installOptions ParseArgs) ++ ( liftFields legacyHaddockFlags (\flags conf -> conf { legacyHaddockFlags = flags }) . mapFieldNames ("haddock-"++) . addFields [ simpleFieldParsec "for-hackage" -- TODO: turn this into a library function (fromFlagOrDefault Disp.empty . fmap pretty) (toFlag <$> parsec <|> pure mempty) haddockForHackage (\v conf -> conf { haddockForHackage = v }) ] . filterFields [ "hoogle", "html", "html-location" , "foreign-libraries" , "executables", "tests", "benchmarks", "all", "internal", "css" , "hyperlink-source", "quickjump", "hscolour-css" , "contents-location", "keep-temp-files" ] . commandOptionsToFields ) (haddockOptions ParseArgs) ++ ( liftFields legacyTestFlags (\flags conf -> conf { legacyTestFlags = flags }) . mapFieldNames prefixTest . addFields [ newLineListField "test-options" (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) testOptions (\v conf -> conf { testOptions = v }) ] . filterFields [ "log", "machine-log", "show-details", "keep-tix-files" , "fail-when-no-test-suites", "test-wrapper" ] . commandOptionsToFields ) (testOptions' ParseArgs) ++ ( liftFields legacyBenchmarkFlags (\flags conf -> conf { legacyBenchmarkFlags = flags }) . addFields [ newLineListField "benchmark-options" (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) benchmarkOptions (\v conf -> conf { benchmarkOptions = v }) ] . filterFields [] . commandOptionsToFields ) (benchmarkOptions' ParseArgs) where overrideFieldCompiler = simpleFieldParsec "compiler" (fromFlagOrDefault Disp.empty . fmap pretty) (toFlag <$> parsec <|> pure mempty) configHcFlavor (\v flags -> flags { configHcFlavor = v }) overrideDumpBuildInfo = liftField configDumpBuildInfo (\v flags -> flags { configDumpBuildInfo = v }) $ let name = "build-info" in FieldDescr name (\f -> case f of Flag NoDumpBuildInfo -> Disp.text "False" Flag DumpBuildInfo -> Disp.text "True" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoDumpBuildInfo) | str == "True" -> ParseOk [] (Flag DumpBuildInfo) | lstr == "false" -> ParseOk [caseWarning name] (Flag NoDumpBuildInfo) | lstr == "true" -> ParseOk [caseWarning name] (Flag DumpBuildInfo) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str) -- TODO: [code cleanup] The following is a hack. The "optimization" and -- "debug-info" fields are OptArg, and viewAsFieldDescr fails on that. -- Instead of a hand-written parser and printer, we should handle this case -- properly in the library. overrideFieldOptimization = liftField configOptimization (\v flags -> flags { configOptimization = v }) $ let name = "optimization" in FieldDescr name (\f -> case f of Flag NoOptimisation -> Disp.text "False" Flag NormalOptimisation -> Disp.text "True" Flag MaximumOptimisation -> Disp.text "2" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoOptimisation) | str == "True" -> ParseOk [] (Flag NormalOptimisation) | str == "0" -> ParseOk [] (Flag NoOptimisation) | str == "1" -> ParseOk [] (Flag NormalOptimisation) | str == "2" -> ParseOk [] (Flag MaximumOptimisation) | lstr == "false" -> ParseOk [caseWarning name] (Flag NoOptimisation) | lstr == "true" -> ParseOk [caseWarning name] (Flag NormalOptimisation) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str) overrideFieldDebugInfo = liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ let name = "debug-info" in FieldDescr name (\f -> case f of Flag NoDebugInfo -> Disp.text "False" Flag MinimalDebugInfo -> Disp.text "1" Flag NormalDebugInfo -> Disp.text "True" Flag MaximalDebugInfo -> Disp.text "3" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) | str == "True" -> ParseOk [] (Flag NormalDebugInfo) | str == "0" -> ParseOk [] (Flag NoDebugInfo) | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) | str == "2" -> ParseOk [] (Flag NormalDebugInfo) | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) | lstr == "false" -> ParseOk [caseWarning name] (Flag NoDebugInfo) | lstr == "true" -> ParseOk [caseWarning name] (Flag NormalDebugInfo) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str) caseWarning name = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'." prefixTest name | "test-" `isPrefixOf` name = name | otherwise = "test-" ++ name legacyPackageConfigFGSectionDescrs :: ( FieldGrammar c g, Applicative (g SourceRepoList) , c (Identity RepoType) , c (List NoCommaFSep FilePathNT String) , c (NonEmpty' NoCommaFSep Token String) ) => [FGSectionDescr g LegacyProjectConfig] legacyPackageConfigFGSectionDescrs = [ packageRepoSectionDescr ] legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig] legacyPackageConfigSectionDescrs = [ packageSpecificOptionsSectionDescr , liftSection legacyLocalConfig (\flags conf -> conf { legacyLocalConfig = flags }) programOptionsSectionDescr , liftSection legacyLocalConfig (\flags conf -> conf { legacyLocalConfig = flags }) programLocationsSectionDescr , liftSection legacySharedConfig (\flags conf -> conf { legacySharedConfig = flags }) $ liftSection legacyGlobalFlags (\flags conf -> conf { legacyGlobalFlags = flags }) remoteRepoSectionDescr ] packageRepoSectionDescr :: ( FieldGrammar c g, Applicative (g SourceRepoList) , c (Identity RepoType) , c (List NoCommaFSep FilePathNT String) , c (NonEmpty' NoCommaFSep Token String) ) => FGSectionDescr g LegacyProjectConfig packageRepoSectionDescr = FGSectionDescr { fgSectionName = "source-repository-package" , fgSectionGrammar = sourceRepositoryPackageGrammar , fgSectionGet = map (\x->("", x)) . legacyPackagesRepo , fgSectionSet = \lineno unused pkgrepo projconf -> do unless (null unused) $ syntaxError lineno "the section 'source-repository-package' takes no arguments" return projconf { legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo] } } -- | The definitions of all the fields that can appear in the @package pkgfoo@ -- and @package *@ sections of the @cabal.project@-format files. -- packageSpecificOptionsFieldDescrs :: [FieldDescr LegacyPackageConfig] packageSpecificOptionsFieldDescrs = legacyPackageConfigFieldDescrs ++ programOptionsFieldDescrs (configProgramArgs . legacyConfigureFlags) (\args pkgconf -> pkgconf { legacyConfigureFlags = (legacyConfigureFlags pkgconf) { configProgramArgs = args } } ) ++ liftFields legacyConfigureFlags (\flags pkgconf -> pkgconf { legacyConfigureFlags = flags } ) programLocationsFieldDescrs -- | The definition of the @package pkgfoo@ sections of the @cabal.project@-format -- files. This section is per-package name. The special package @*@ applies to all -- packages used anywhere by the project, locally or as dependencies. -- packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig packageSpecificOptionsSectionDescr = SectionDescr { sectionName = "package", sectionFields = packageSpecificOptionsFieldDescrs, sectionSubsections = [], sectionGet = \projconf -> [ (prettyShow pkgname, pkgconf) | (pkgname, pkgconf) <- Map.toList . getMapMappend . legacySpecificConfig $ projconf ] ++ [ ("*", legacyAllConfig projconf) ], sectionSet = \lineno pkgnamestr pkgconf projconf -> case pkgnamestr of "*" -> return projconf { legacyAllConfig = legacyAllConfig projconf <> pkgconf } _ -> do pkgname <- case simpleParsec pkgnamestr of Just pkgname -> return pkgname Nothing -> syntaxError lineno $ "a 'package' section requires a package name " ++ "as an argument" return projconf { legacySpecificConfig = MapMappend $ Map.insertWith mappend pkgname pkgconf (getMapMappend $ legacySpecificConfig projconf) }, sectionEmpty = mempty } programOptionsFieldDescrs :: (a -> [(String, [String])]) -> ([(String, [String])] -> a -> a) -> [FieldDescr a] programOptionsFieldDescrs get' set = commandOptionsToFields $ programDbOptions defaultProgramDb ParseArgs get' set programOptionsSectionDescr :: SectionDescr LegacyPackageConfig programOptionsSectionDescr = SectionDescr { sectionName = "program-options", sectionFields = programOptionsFieldDescrs configProgramArgs (\args conf -> conf { configProgramArgs = args }), sectionSubsections = [], sectionGet = (\x->[("", x)]) . legacyConfigureFlags, sectionSet = \lineno unused confflags pkgconf -> do unless (null unused) $ syntaxError lineno "the section 'program-options' takes no arguments" return pkgconf { legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags }, sectionEmpty = mempty } programLocationsFieldDescrs :: [FieldDescr ConfigFlags] programLocationsFieldDescrs = commandOptionsToFields $ programDbPaths' (++ "-location") defaultProgramDb ParseArgs configProgramPaths (\paths conf -> conf { configProgramPaths = paths }) programLocationsSectionDescr :: SectionDescr LegacyPackageConfig programLocationsSectionDescr = SectionDescr { sectionName = "program-locations", sectionFields = programLocationsFieldDescrs, sectionSubsections = [], sectionGet = (\x->[("", x)]) . legacyConfigureFlags, sectionSet = \lineno unused confflags pkgconf -> do unless (null unused) $ syntaxError lineno "the section 'program-locations' takes no arguments" return pkgconf { legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags }, sectionEmpty = mempty } -- | For each known program @PROG@ in 'progDb', produce a @PROG-options@ -- 'OptionField'. programDbOptions :: ProgramDb -> ShowOrParseArgs -> (flags -> [(String, [String])]) -> ([(String, [String])] -> (flags -> flags)) -> [OptionField flags] programDbOptions progDb showOrParseArgs get' set = case showOrParseArgs of -- we don't want a verbose help text list so we just show a generic one: ShowArgs -> [programOptions "PROG"] ParseArgs -> map (programOptions . programName . fst) (knownPrograms progDb) where programOptions prog = option "" [prog ++ "-options"] ("give extra options to " ++ prog) get' set (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (\progArgs -> [ joinsArgs args | (prog', args) <- progArgs, prog==prog' ])) joinsArgs = unwords . map escape escape arg | any isSpace arg = "\"" ++ arg ++ "\"" | otherwise = arg -- The implementation is slight hack: we parse all as remote repository -- but if the url schema is file+noindex, we switch to local. remoteRepoSectionDescr :: SectionDescr GlobalFlags remoteRepoSectionDescr = SectionDescr { sectionName = "repository" , sectionEmpty = emptyRemoteRepo (RepoName "") , sectionFields = remoteRepoFields , sectionSubsections = [] , sectionGet = getS , sectionSet = setS } where getS :: GlobalFlags -> [(String, RemoteRepo)] getS gf = map (\x->(unRepoName $ remoteRepoName x, x)) (fromNubList (globalRemoteRepos gf)) ++ map (\x->(unRepoName $ localRepoName x, localToRemote x)) (fromNubList (globalLocalNoIndexRepos gf)) setS :: Int -> String -> RemoteRepo -> GlobalFlags -> ParseResult GlobalFlags setS lineno reponame repo0 conf = do repo1 <- postProcessRepo lineno reponame repo0 case repo1 of Left repo -> return conf { globalLocalNoIndexRepos = overNubList (++[repo]) (globalLocalNoIndexRepos conf) } Right repo -> return conf { globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf) } localToRemote :: LocalRepo -> RemoteRepo localToRemote (LocalRepo name path sharedCache) = (emptyRemoteRepo name) { remoteRepoURI = URI "file+noindex:" Nothing path "" (if sharedCache then "#shared-cache" else "") } ------------------------------- -- Local field utils -- -- | Parser combinator for simple fields which uses the field type's -- 'Monoid' instance for combining multiple occurrences of the field. monoidFieldParsec :: Monoid a => String -> (a -> Doc) -> ParsecParser a -> (b -> a) -> (a -> b -> b) -> FieldDescr b monoidFieldParsec name showF readF get' set = liftField get' set' $ ParseUtils.fieldParsec name showF readF where set' xs b = set (get' b `mappend` xs) b --TODO: [code cleanup] local redefinition that should replace the version in -- D.ParseUtils called showFilePath. This version escapes "." and "--" which -- otherwise are special syntax. showTokenQ :: String -> Doc showTokenQ "" = Disp.empty showTokenQ x@('-':'-':_) = Disp.text (show x) showTokenQ x@('.':[]) = Disp.text (show x) showTokenQ x = showToken x -- Handy util addFields :: [FieldDescr a] -> ([FieldDescr a] -> [FieldDescr a]) addFields = (++) cabal-install-3.8.1.0/src/Distribution/Client/ProjectConfig/Types.hs0000644000000000000000000005006207346545000023443 0ustar0000000000000000{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} -- | Handling project configuration, types. -- module Distribution.Client.ProjectConfig.Types ( -- * Types for project config ProjectConfig(..), ProjectConfigBuildOnly(..), ProjectConfigShared(..), ProjectConfigProvenance(..), PackageConfig(..), -- * Resolving configuration SolverSettings(..), BuildTimeSettings(..), -- * Extra useful Monoids MapLast(..), MapMappend(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Types.Repo ( RemoteRepo, LocalRepo ) import Distribution.Client.Types.AllowNewer ( AllowNewer(..), AllowOlder(..) ) import Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy ( WriteGhcEnvironmentFilesPolicy ) import Distribution.Client.Dependency.Types ( PreSolver ) import Distribution.Client.Targets ( UserConstraint ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Types.SourceRepo (SourceRepoList) import Distribution.Client.IndexUtils.IndexState ( TotalIndexState ) import Distribution.Client.IndexUtils.ActiveRepos ( ActiveRepos ) import Distribution.Client.CmdInstall.ClientInstallFlags ( ClientInstallFlags(..) ) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.ConstraintSource import Distribution.Package ( PackageName, PackageId, UnitId ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint ) import Distribution.Version ( Version ) import Distribution.System ( Platform ) import Distribution.PackageDescription ( FlagAssignment ) import Distribution.Simple.Compiler ( Compiler, CompilerFlavor, PackageDB , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) import Distribution.Simple.Setup ( Flag, HaddockTarget(..), TestShowDetails(..), DumpBuildInfo (..) ) import Distribution.Simple.InstallDirs ( PathTemplate ) import Distribution.Utils.NubList ( NubList ) import qualified Data.Map as Map ------------------------------- -- Project config types -- -- | This type corresponds directly to what can be written in the -- @cabal.project@ file. Other sources of configuration can also be injected -- into this type, such as the user-wide @~/.cabal/config@ file and the -- command line of @cabal configure@ or @cabal build@. -- -- Since it corresponds to the external project file it is an instance of -- 'Monoid' and all the fields can be empty. This also means there has to -- be a step where we resolve configuration. At a minimum resolving means -- applying defaults but it can also mean merging information from multiple -- sources. For example for package-specific configuration the project file -- can specify configuration that applies to all local packages, and then -- additional configuration for a specific package. -- -- Future directions: multiple profiles, conditionals. If we add these -- features then the gap between configuration as written in the config file -- and resolved settings we actually use will become even bigger. -- data ProjectConfig = ProjectConfig { -- | Packages in this project, including local dirs, local .cabal files -- local and remote tarballs. When these are file globs, they must -- match at least one package. projectPackages :: [String], -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that -- file globs are allowed to match nothing. The primary use case for -- this is to be able to say @optional-packages: */@ to automagically -- pick up deps that we unpack locally without erroring when -- there aren't any. projectPackagesOptional :: [String], -- | Packages in this project from remote source repositories. projectPackagesRepo :: [SourceRepoList], -- | Packages in this project from hackage repositories. projectPackagesNamed :: [PackageVersionConstraint], -- See respective types for an explanation of what these -- values are about: projectConfigBuildOnly :: ProjectConfigBuildOnly, projectConfigShared :: ProjectConfigShared, projectConfigProvenance :: Set ProjectConfigProvenance, -- | Configuration to be applied to *all* packages, -- whether named in `cabal.project` or not. projectConfigAllPackages :: PackageConfig, -- | Configuration to be applied to *local* packages; i.e., -- any packages which are explicitly named in `cabal.project`. projectConfigLocalPackages :: PackageConfig, projectConfigSpecificPackage :: MapMappend PackageName PackageConfig } deriving (Eq, Show, Generic, Typeable) -- | That part of the project configuration that only affects /how/ we build -- and not the /value/ of the things we build. This means this information -- does not need to be tracked for changes since it does not affect the -- outcome. -- data ProjectConfigBuildOnly = ProjectConfigBuildOnly { projectConfigVerbosity :: Flag Verbosity, projectConfigDryRun :: Flag Bool, projectConfigOnlyDeps :: Flag Bool, projectConfigOnlyDownload :: Flag Bool, projectConfigSummaryFile :: NubList PathTemplate, projectConfigLogFile :: Flag PathTemplate, projectConfigBuildReports :: Flag ReportLevel, projectConfigReportPlanningFailure :: Flag Bool, projectConfigSymlinkBinDir :: Flag FilePath, projectConfigNumJobs :: Flag (Maybe Int), projectConfigKeepGoing :: Flag Bool, projectConfigOfflineMode :: Flag Bool, projectConfigKeepTempFiles :: Flag Bool, projectConfigHttpTransport :: Flag String, projectConfigIgnoreExpiry :: Flag Bool, projectConfigCacheDir :: Flag FilePath, projectConfigLogsDir :: Flag FilePath, projectConfigClientInstallFlags :: ClientInstallFlags } deriving (Eq, Show, Generic) -- | Project configuration that is shared between all packages in the project. -- In particular this includes configuration that affects the solver. -- data ProjectConfigShared = ProjectConfigShared { projectConfigDistDir :: Flag FilePath, projectConfigConfigFile :: Flag FilePath, projectConfigProjectFile :: Flag FilePath, projectConfigIgnoreProject :: Flag Bool, projectConfigHcFlavor :: Flag CompilerFlavor, projectConfigHcPath :: Flag FilePath, projectConfigHcPkg :: Flag FilePath, projectConfigHaddockIndex :: Flag PathTemplate, -- Things that only make sense for manual mode, not --local mode -- too much control! --projectConfigUserInstall :: Flag Bool, --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), --TODO: [required eventually] decide what to do with InstallDirs -- currently we don't allow it to be specified in the config file projectConfigPackageDBs :: [Maybe PackageDB], -- configuration used both by the solver and other phases projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. projectConfigLocalNoIndexRepos :: NubList LocalRepo, projectConfigActiveRepos :: Flag ActiveRepos, projectConfigIndexState :: Flag TotalIndexState, projectConfigStoreDir :: Flag FilePath, -- solver configuration projectConfigConstraints :: [(UserConstraint, ConstraintSource)], projectConfigPreferences :: [PackageVersionConstraint], projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused projectConfigSolver :: Flag PreSolver, projectConfigAllowOlder :: Maybe AllowOlder, projectConfigAllowNewer :: Maybe AllowNewer, projectConfigWriteGhcEnvironmentFilesPolicy :: Flag WriteGhcEnvironmentFilesPolicy, projectConfigMaxBackjumps :: Flag Int, projectConfigReorderGoals :: Flag ReorderGoals, projectConfigCountConflicts :: Flag CountConflicts, projectConfigFineGrainedConflicts :: Flag FineGrainedConflicts, projectConfigMinimizeConflictSet :: Flag MinimizeConflictSet, projectConfigStrongFlags :: Flag StrongFlags, projectConfigAllowBootLibInstalls :: Flag AllowBootLibInstalls, projectConfigOnlyConstrained :: Flag OnlyConstrained, projectConfigPerComponent :: Flag Bool, projectConfigIndependentGoals :: Flag IndependentGoals, projectConfigProgPathExtra :: NubList FilePath -- More things that only make sense for manual mode, not --local mode -- too much control! --projectConfigShadowPkgs :: Flag Bool, --projectConfigReinstall :: Flag Bool, --projectConfigAvoidReinstalls :: Flag Bool, --projectConfigOverrideReinstall :: Flag Bool, --projectConfigUpgradeDeps :: Flag Bool } deriving (Eq, Show, Generic) -- | Specifies the provenance of project configuration, whether defaults were -- used or if the configuration was read from an explicit file path. data ProjectConfigProvenance -- | The configuration is implicit due to no explicit configuration -- being found. See 'Distribution.Client.ProjectConfig.readProjectConfig' -- for how implicit configuration is determined. = Implicit -- | The path the project configuration was explicitly read from. -- | The configuration was explicitly read from the specified 'FilePath'. | Explicit FilePath deriving (Eq, Ord, Show, Generic) -- | Project configuration that is specific to each package, that is where we -- can in principle have different values for different packages in the same -- project. -- data PackageConfig = PackageConfig { packageConfigProgramPaths :: MapLast String FilePath, packageConfigProgramArgs :: MapMappend String [String], packageConfigProgramPathExtra :: NubList FilePath, packageConfigFlagAssignment :: FlagAssignment, packageConfigVanillaLib :: Flag Bool, packageConfigSharedLib :: Flag Bool, packageConfigStaticLib :: Flag Bool, packageConfigDynExe :: Flag Bool, packageConfigFullyStaticExe :: Flag Bool, packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out packageConfigProfLib :: Flag Bool, -- this duplication packageConfigProfExe :: Flag Bool, -- and consistency packageConfigProfDetail :: Flag ProfDetailLevel, packageConfigProfLibDetail :: Flag ProfDetailLevel, packageConfigConfigureArgs :: [String], packageConfigOptimization :: Flag OptimisationLevel, packageConfigProgPrefix :: Flag PathTemplate, packageConfigProgSuffix :: Flag PathTemplate, packageConfigExtraLibDirs :: [FilePath], packageConfigExtraLibDirsStatic :: [FilePath], packageConfigExtraFrameworkDirs :: [FilePath], packageConfigExtraIncludeDirs :: [FilePath], packageConfigGHCiLib :: Flag Bool, packageConfigSplitSections :: Flag Bool, packageConfigSplitObjs :: Flag Bool, packageConfigStripExes :: Flag Bool, packageConfigStripLibs :: Flag Bool, packageConfigTests :: Flag Bool, packageConfigBenchmarks :: Flag Bool, packageConfigCoverage :: Flag Bool, packageConfigRelocatable :: Flag Bool, packageConfigDebugInfo :: Flag DebugInfoLevel, packageConfigDumpBuildInfo :: Flag DumpBuildInfo, packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this -- Haddock options packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this packageConfigHaddockForeignLibs :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this packageConfigHaddockLinkedSource :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockQuickJump :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this packageConfigHaddockContents :: Flag PathTemplate, --TODO: [required eventually] use this packageConfigHaddockForHackage :: Flag HaddockTarget, -- Test options packageConfigTestHumanLog :: Flag PathTemplate, packageConfigTestMachineLog :: Flag PathTemplate, packageConfigTestShowDetails :: Flag TestShowDetails, packageConfigTestKeepTix :: Flag Bool, packageConfigTestWrapper :: Flag FilePath, packageConfigTestFailWhenNoTestSuites :: Flag Bool, packageConfigTestTestOptions :: [PathTemplate], -- Benchmark options packageConfigBenchmarkOptions :: [PathTemplate] } deriving (Eq, Show, Generic) instance Binary ProjectConfig instance Binary ProjectConfigBuildOnly instance Binary ProjectConfigShared instance Binary ProjectConfigProvenance instance Binary PackageConfig instance Structured ProjectConfig instance Structured ProjectConfigBuildOnly instance Structured ProjectConfigShared instance Structured ProjectConfigProvenance instance Structured PackageConfig -- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes -- the last value rather than the first value for overlapping keys. newtype MapLast k v = MapLast { getMapLast :: Map k v } deriving (Eq, Show, Functor, Generic, Binary, Typeable) instance (Structured k, Structured v) => Structured (MapLast k v) instance Ord k => Monoid (MapLast k v) where mempty = MapLast Map.empty mappend = (<>) instance Ord k => Semigroup (MapLast k v) where MapLast a <> MapLast b = MapLast $ Map.union b a -- rather than Map.union which is the normal Map monoid instance -- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that -- 'mappend's values of overlapping keys rather than taking the first. newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } deriving (Eq, Show, Functor, Generic, Binary, Typeable) instance (Structured k, Structured v) => Structured (MapMappend k v) instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where mempty = MapMappend Map.empty mappend = (<>) instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) -- rather than Map.union which is the normal Map monoid instance instance Monoid ProjectConfig where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfig where (<>) = gmappend instance Monoid ProjectConfigBuildOnly where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfigBuildOnly where (<>) = gmappend instance Monoid ProjectConfigShared where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfigShared where (<>) = gmappend instance Monoid PackageConfig where mempty = gmempty mappend = (<>) instance Semigroup PackageConfig where (<>) = gmappend ---------------------------------------- -- Resolving configuration to settings -- -- | Resolved configuration for the solver. The idea is that this is easier to -- use than the raw configuration because in the raw configuration everything -- is optional (monoidial). In the 'BuildTimeSettings' every field is filled -- in, if only with the defaults. -- -- Use 'resolveSolverSettings' to make one from the project config (by -- applying defaults etc). -- data SolverSettings = SolverSettings { solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. solverSettingLocalNoIndexRepos :: [LocalRepo], solverSettingConstraints :: [(UserConstraint, ConstraintSource)], solverSettingPreferences :: [PackageVersionConstraint], solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages solverSettingFlagAssignments :: Map PackageName FlagAssignment, solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused solverSettingSolver :: PreSolver, solverSettingAllowOlder :: AllowOlder, solverSettingAllowNewer :: AllowNewer, solverSettingMaxBackjumps :: Maybe Int, solverSettingReorderGoals :: ReorderGoals, solverSettingCountConflicts :: CountConflicts, solverSettingFineGrainedConflicts :: FineGrainedConflicts, solverSettingMinimizeConflictSet :: MinimizeConflictSet, solverSettingStrongFlags :: StrongFlags, solverSettingAllowBootLibInstalls :: AllowBootLibInstalls, solverSettingOnlyConstrained :: OnlyConstrained, solverSettingIndexState :: Maybe TotalIndexState, solverSettingActiveRepos :: Maybe ActiveRepos, solverSettingIndependentGoals :: IndependentGoals -- Things that only make sense for manual mode, not --local mode -- too much control! --solverSettingShadowPkgs :: Bool, --solverSettingReinstall :: Bool, --solverSettingAvoidReinstalls :: Bool, --solverSettingOverrideReinstall :: Bool, --solverSettingUpgradeDeps :: Bool } deriving (Eq, Show, Generic, Typeable) instance Binary SolverSettings instance Structured SolverSettings -- | Resolved configuration for things that affect how we build and not the -- value of the things we build. The idea is that this is easier to use than -- the raw configuration because in the raw configuration everything is -- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, -- if only with the defaults. -- -- Use 'resolveBuildTimeSettings' to make one from the project config (by -- applying defaults etc). -- data BuildTimeSettings = BuildTimeSettings { buildSettingDryRun :: Bool, buildSettingOnlyDeps :: Bool, buildSettingOnlyDownload :: Bool, buildSettingSummaryFile :: [PathTemplate], buildSettingLogFile :: Maybe (Compiler -> Platform -> PackageId -> UnitId -> FilePath), buildSettingLogVerbosity :: Verbosity, buildSettingBuildReports :: ReportLevel, buildSettingReportPlanningFailure :: Bool, buildSettingSymlinkBinDir :: [FilePath], buildSettingNumJobs :: Int, buildSettingKeepGoing :: Bool, buildSettingOfflineMode :: Bool, buildSettingKeepTempFiles :: Bool, buildSettingRemoteRepos :: [RemoteRepo], buildSettingLocalNoIndexRepos :: [LocalRepo], buildSettingCacheDir :: FilePath, buildSettingHttpTransport :: Maybe String, buildSettingIgnoreExpiry :: Bool, buildSettingProgPathExtra :: [FilePath], buildSettingHaddockOpen :: Bool } cabal-install-3.8.1.0/src/Distribution/Client/ProjectFlags.hs0000644000000000000000000000577207346545000022176 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} module Distribution.Client.ProjectFlags ( ProjectFlags(..), defaultProjectFlags, projectFlagsOptions, removeIgnoreProjectOption, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.ReadE (succeedReadE) import Distribution.Simple.Command ( MkOptDescr, OptionField(optionName), ShowOrParseArgs (..), boolOpt', option , reqArg ) import Distribution.Simple.Setup (Flag (..), flagToList, flagToMaybe, toFlag, trueArg) data ProjectFlags = ProjectFlags { flagProjectFileName :: Flag FilePath -- ^ The cabal project file name; defaults to @cabal.project@. -- The name itself denotes the cabal project file name, but it also -- is the base of auxiliary project files, such as -- @cabal.project.local@ and @cabal.project.freeze@ which are also -- read and written out in some cases. If the path is not found -- in the current working directory, we will successively probe -- relative to parent directories until this name is found. , flagIgnoreProject :: Flag Bool -- ^ Whether to ignore the local project (i.e. don't search for cabal.project) -- The exact interpretation might be slightly different per command. } deriving (Show, Generic) defaultProjectFlags :: ProjectFlags defaultProjectFlags = ProjectFlags { flagProjectFileName = mempty , flagIgnoreProject = toFlag False -- Should we use 'Last' here? } projectFlagsOptions :: ShowOrParseArgs -> [OptionField ProjectFlags] projectFlagsOptions showOrParseArgs = [ option [] ["project-file"] "Set the name of the cabal.project file to search for in parent directories" flagProjectFileName (\pf flags -> flags { flagProjectFileName = pf }) (reqArg "FILE" (succeedReadE Flag) flagToList) , option ['z'] ["ignore-project"] "Ignore local project configuration" -- Flag True: --ignore-project is given and --project-file is not given -- Flag False: --ignore-project and --project-file is given -- NoFlag: neither --ignore-project or --project-file is given flagIgnoreProject (\v flags -> flags { flagIgnoreProject = if v == NoFlag then NoFlag else toFlag ((flagProjectFileName flags) == NoFlag && v == Flag True) }) (yesNoOpt showOrParseArgs) ] -- | As almost all commands use 'ProjectFlags' but not all can honour -- "ignore-project" flag, provide this utility to remove the flag -- parsing from the help message. removeIgnoreProjectOption :: [OptionField a] -> [OptionField a] removeIgnoreProjectOption = filter (\o -> optionName o /= "ignore-project") instance Monoid ProjectFlags where mempty = gmempty mappend = (<>) instance Semigroup ProjectFlags where (<>) = gmappend yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b yesNoOpt ShowArgs sf lf = trueArg sf lf yesNoOpt _ sf lf = boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf cabal-install-3.8.1.0/src/Distribution/Client/ProjectOrchestration.hs0000644000000000000000000015542107346545000023763 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards, NamedFieldPuns #-} {-# LANGUAGE RankNTypes, ScopedTypeVariables #-} -- | This module deals with building and incrementally rebuilding a collection -- of packages. It is what backs the @cabal build@ and @configure@ commands, -- as well as being a core part of @run@, @test@, @bench@ and others. -- -- The primary thing is in fact rebuilding (and trying to make that quick by -- not redoing unnecessary work), so building from scratch is just a special -- case. -- -- The build process and the code can be understood by breaking it down into -- three major parts: -- -- * The 'ElaboratedInstallPlan' type -- -- * The \"what to do\" phase, where we look at the all input configuration -- (project files, .cabal files, command line etc) and produce a detailed -- plan of what to do -- the 'ElaboratedInstallPlan'. -- -- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we -- re-execute it. -- -- As far as possible, the \"what to do\" phase embodies all the policy, leaving -- the \"do it\" phase policy free. The first phase contains more of the -- complicated logic, but it is contained in code that is either pure or just -- has read effects (except cache updates). Then the second phase does all the -- actions to build packages, but as far as possible it just follows the -- instructions and avoids any logic for deciding what to do (apart from -- recompilation avoidance in executing the plan). -- -- This division helps us keep the code under control, making it easier to -- understand, test and debug. So when you are extending these modules, please -- think about which parts of your change belong in which part. It is -- perfectly ok to extend the description of what to do (i.e. the -- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the -- first phase. Also, the second phase does not have direct access to any of -- the input configuration anyway; all the information has to flow via the -- 'ElaboratedInstallPlan'. -- module Distribution.Client.ProjectOrchestration ( -- * Discovery phase: what is in the project? CurrentCommand(..), establishProjectBaseContext, establishProjectBaseContextWithRoot, ProjectBaseContext(..), BuildTimeSettings(..), commandLineFlagsToProjectConfig, -- * Pre-build phase: decide what to do. withInstallPlan, runProjectPreBuildPhase, ProjectBuildContext(..), -- ** Selecting what targets we mean readTargetSelectors, reportTargetSelectorProblems, resolveTargets, TargetsMap, allTargetSelectors, uniqueTargetSelectors, TargetSelector(..), TargetImplicitCwd(..), PackageId, AvailableTarget(..), AvailableTargetStatus(..), TargetRequested(..), ComponentName(..), ComponentKind(..), ComponentTarget(..), SubComponentTarget(..), selectComponentTargetBasic, distinctTargetComponents, -- ** Utils for selecting targets filterTargetsKind, filterTargetsKindWith, selectBuildableTargets, selectBuildableTargetsWith, selectBuildableTargets', selectBuildableTargetsWith', forgetTargetsDetail, -- ** Adjusting the plan pruneInstallPlanToTargets, TargetAction(..), pruneInstallPlanToDependencies, CannotPruneDependencies(..), printPlan, -- * Build phase: now do it. runProjectBuildPhase, -- * Post build actions runProjectPostBuildPhase, dieOnBuildFailures, -- * Dummy projects establishDummyProjectBaseContext, establishDummyDistDirLayout, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Compat.Directory ( makeAbsolute ) import Distribution.Client.ProjectConfig import Distribution.Client.ProjectPlanning hiding ( pruneInstallPlanToTargets ) import qualified Distribution.Client.ProjectPlanning as ProjectPlanning ( pruneInstallPlanToTargets ) import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ProjectBuilding import Distribution.Client.ProjectPlanOutput import Distribution.Client.TargetProblem ( TargetProblem (..) ) import Distribution.Client.Types ( GenericReadyPackage(..), UnresolvedSourcePackage , PackageSpecifier(..) , SourcePackageDb(..) , WriteGhcEnvironmentFilesPolicy(..) , PackageLocation(..) , DocsResult(..) , TestsResult(..) ) import Distribution.Solver.Types.PackageIndex ( lookupPackageName ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.TargetSelector ( TargetSelector(..), TargetImplicitCwd(..) , ComponentKind(..), componentKind , readTargetSelectors, reportTargetSelectorProblems ) import Distribution.Client.DistDirLayout import Distribution.Client.BuildReports.Anonymous (cabalInstallID) import qualified Distribution.Client.BuildReports.Anonymous as BuildReports import qualified Distribution.Client.BuildReports.Storage as BuildReports ( storeLocal ) import Distribution.Client.Config (getCabalDir) import Distribution.Client.HttpUtils import Distribution.Client.Setup hiding (packageName) import Distribution.Compiler ( CompilerFlavor(GHC) ) import Distribution.Types.ComponentName ( componentNameString ) import Distribution.Types.InstalledPackageInfo ( InstalledPackageInfo ) import Distribution.Types.UnqualComponentName ( UnqualComponentName, packageNameToUnqualComponentName ) import Distribution.Solver.Types.OptionalStanza import Distribution.Package import Distribution.Types.Flag ( FlagAssignment, showFlagAssignment, diffFlagAssignment ) import Distribution.Simple.LocalBuildInfo ( ComponentName(..), pkgComponents ) import Distribution.Simple.Flag ( fromFlagOrDefault, flagToMaybe ) import qualified Distribution.Simple.Setup as Setup import Distribution.Simple.Command (commandShowOptions) import Distribution.Simple.Configure (computeEffectiveProfiling) import Distribution.Simple.Utils ( die', warn, notice, noticeNoWrap, debugNoWrap, createDirectoryIfMissingVerbose, ordNub ) import Distribution.Verbosity import Distribution.Version ( mkVersion ) import Distribution.Simple.Compiler ( compilerCompatVersion, showCompilerId, compilerId, compilerInfo , OptimisationLevel(..)) import Distribution.Utils.NubList ( fromNubList ) import Distribution.System ( Platform(Platform) ) import qualified Data.List.NonEmpty as NE import qualified Data.Set as Set import qualified Data.Map as Map import Control.Exception ( assert ) #ifdef MIN_VERSION_unix import System.Posix.Signals (sigKILL, sigSEGV) #endif -- | Tracks what command is being executed, because we need to hide this somewhere -- for cases that need special handling (usually for error reporting). data CurrentCommand = InstallCommand | HaddockCommand | OtherCommand deriving (Show, Eq) -- | This holds the context of a project prior to solving: the content of the -- @cabal.project@ and all the local package @.cabal@ files. -- data ProjectBaseContext = ProjectBaseContext { distDirLayout :: DistDirLayout, cabalDirLayout :: CabalDirLayout, projectConfig :: ProjectConfig, localPackages :: [PackageSpecifier UnresolvedSourcePackage], buildSettings :: BuildTimeSettings, currentCommand :: CurrentCommand } establishProjectBaseContext :: Verbosity -> ProjectConfig -> CurrentCommand -> IO ProjectBaseContext establishProjectBaseContext verbosity cliConfig currentCommand = do projectRoot <- either throwIO return =<< findProjectRoot Nothing mprojectFile establishProjectBaseContextWithRoot verbosity cliConfig projectRoot currentCommand where mprojectFile = Setup.flagToMaybe projectConfigProjectFile ProjectConfigShared { projectConfigProjectFile} = projectConfigShared cliConfig -- | Like 'establishProjectBaseContext' but doesn't search for project root. establishProjectBaseContextWithRoot :: Verbosity -> ProjectConfig -> ProjectRoot -> CurrentCommand -> IO ProjectBaseContext establishProjectBaseContextWithRoot verbosity cliConfig projectRoot currentCommand = do cabalDir <- getCabalDir let distDirLayout = defaultDistDirLayout projectRoot mdistDirectory httpTransport <- configureTransport verbosity (fromNubList . projectConfigProgPathExtra $ projectConfigShared cliConfig) (flagToMaybe . projectConfigHttpTransport $ projectConfigBuildOnly cliConfig) (projectConfig, localPackages) <- rebuildProjectConfig verbosity httpTransport distDirLayout cliConfig let ProjectConfigBuildOnly { projectConfigLogsDir } = projectConfigBuildOnly projectConfig ProjectConfigShared { projectConfigStoreDir } = projectConfigShared projectConfig mlogsDir = Setup.flagToMaybe projectConfigLogsDir mstoreDir <- sequenceA $ makeAbsolute <$> Setup.flagToMaybe projectConfigStoreDir let cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir buildSettings = resolveBuildTimeSettings verbosity cabalDirLayout projectConfig -- https://github.com/haskell/cabal/issues/6013 when (null (projectPackages projectConfig) && null (projectPackagesOptional projectConfig)) $ warn verbosity "There are no packages or optional-packages in the project" return ProjectBaseContext { distDirLayout, cabalDirLayout, projectConfig, localPackages, buildSettings, currentCommand } where mdistDirectory = Setup.flagToMaybe projectConfigDistDir ProjectConfigShared { projectConfigDistDir } = projectConfigShared cliConfig -- | This holds the context between the pre-build, build and post-build phases. -- data ProjectBuildContext = ProjectBuildContext { -- | This is the improved plan, before we select a plan subset based on -- the build targets, and before we do the dry-run. So this contains -- all packages in the project. elaboratedPlanOriginal :: ElaboratedInstallPlan, -- | This is the 'elaboratedPlanOriginal' after we select a plan subset -- and do the dry-run phase to find out what is up-to or out-of date. -- This is the plan that will be executed during the build phase. So -- this contains only a subset of packages in the project. elaboratedPlanToExecute:: ElaboratedInstallPlan, -- | The part of the install plan that's shared between all packages in -- the plan. This does not change between the two plan variants above, -- so there is just the one copy. elaboratedShared :: ElaboratedSharedConfig, -- | The result of the dry-run phase. This tells us about each member of -- the 'elaboratedPlanToExecute'. pkgsBuildStatus :: BuildStatusMap, -- | The targets selected by @selectPlanSubset@. This is useful eg. in -- CmdRun, where we need a valid target to execute. targetsMap :: TargetsMap } -- | Pre-build phase: decide what to do. -- withInstallPlan :: Verbosity -> ProjectBaseContext -> (ElaboratedInstallPlan -> ElaboratedSharedConfig -> IO a) -> IO a withInstallPlan verbosity ProjectBaseContext { distDirLayout, cabalDirLayout, projectConfig, localPackages } action = do -- Take the project configuration and make a plan for how to build -- everything in the project. This is independent of any specific targets -- the user has asked for. -- (elaboratedPlan, _, elaboratedShared, _, _) <- rebuildInstallPlan verbosity distDirLayout cabalDirLayout projectConfig localPackages action elaboratedPlan elaboratedShared runProjectPreBuildPhase :: Verbosity -> ProjectBaseContext -> (ElaboratedInstallPlan -> IO (ElaboratedInstallPlan, TargetsMap)) -> IO ProjectBuildContext runProjectPreBuildPhase verbosity ProjectBaseContext { distDirLayout, cabalDirLayout, projectConfig, localPackages } selectPlanSubset = do -- Take the project configuration and make a plan for how to build -- everything in the project. This is independent of any specific targets -- the user has asked for. -- (elaboratedPlan, _, elaboratedShared, _, _) <- rebuildInstallPlan verbosity distDirLayout cabalDirLayout projectConfig localPackages -- The plan for what to do is represented by an 'ElaboratedInstallPlan' -- Now given the specific targets the user has asked for, decide -- which bits of the plan we will want to execute. -- (elaboratedPlan', targets) <- selectPlanSubset elaboratedPlan -- Check which packages need rebuilding. -- This also gives us more accurate reasons for the --dry-run output. -- pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared elaboratedPlan' -- Improve the plan by marking up-to-date packages as installed. -- let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages pkgsBuildStatus elaboratedPlan' debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') return ProjectBuildContext { elaboratedPlanOriginal = elaboratedPlan, elaboratedPlanToExecute = elaboratedPlan'', elaboratedShared, pkgsBuildStatus, targetsMap = targets } -- | Build phase: now do it. -- -- Execute all or parts of the description of what to do to build or -- rebuild the various packages needed. -- runProjectBuildPhase :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> IO BuildOutcomes runProjectBuildPhase _ ProjectBaseContext{buildSettings} _ | buildSettingDryRun buildSettings = return Map.empty runProjectBuildPhase verbosity ProjectBaseContext{..} ProjectBuildContext {..} = fmap (Map.union (previousBuildOutcomes pkgsBuildStatus)) $ rebuildTargets verbosity distDirLayout (cabalStoreDirLayout cabalDirLayout) elaboratedPlanToExecute elaboratedShared pkgsBuildStatus buildSettings where previousBuildOutcomes :: BuildStatusMap -> BuildOutcomes previousBuildOutcomes = Map.mapMaybe $ \status -> case status of BuildStatusUpToDate buildSuccess -> Just (Right buildSuccess) --TODO: [nice to have] record build failures persistently _ -> Nothing -- | Post-build phase: various administrative tasks -- -- Update bits of state based on the build outcomes and report any failures. -- runProjectPostBuildPhase :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> BuildOutcomes -> IO () runProjectPostBuildPhase _ ProjectBaseContext{buildSettings} _ _ | buildSettingDryRun buildSettings = return () runProjectPostBuildPhase verbosity ProjectBaseContext {..} bc@ProjectBuildContext {..} buildOutcomes = do -- Update other build artefacts -- TODO: currently none, but could include: -- - bin symlinks/wrappers -- - haddock/hoogle/ctags indexes -- - delete stale lib registrations -- - delete stale package dirs postBuildStatus <- updatePostBuildProjectStatus verbosity distDirLayout elaboratedPlanOriginal pkgsBuildStatus buildOutcomes -- Write the .ghc.environment file (if allowed by the env file write policy). let writeGhcEnvFilesPolicy = projectConfigWriteGhcEnvironmentFilesPolicy . projectConfigShared $ projectConfig shouldWriteGhcEnvironment :: Bool shouldWriteGhcEnvironment = case fromFlagOrDefault NeverWriteGhcEnvironmentFiles writeGhcEnvFilesPolicy of AlwaysWriteGhcEnvironmentFiles -> True NeverWriteGhcEnvironmentFiles -> False WriteGhcEnvironmentFilesOnlyForGhc844AndNewer -> let compiler = pkgConfigCompiler elaboratedShared ghcCompatVersion = compilerCompatVersion GHC compiler in maybe False (>= mkVersion [8,4,4]) ghcCompatVersion when shouldWriteGhcEnvironment $ void $ writePlanGhcEnvironment (distProjectRootDirectory distDirLayout) elaboratedPlanOriginal elaboratedShared postBuildStatus -- Write the build reports writeBuildReports buildSettings bc elaboratedPlanToExecute buildOutcomes -- Finally if there were any build failures then report them and throw -- an exception to terminate the program dieOnBuildFailures verbosity currentCommand elaboratedPlanToExecute buildOutcomes -- Note that it is a deliberate design choice that the 'buildTargets' is -- not passed to phase 1, and the various bits of input config is not -- passed to phase 2. -- -- We make the install plan without looking at the particular targets the -- user asks us to build. The set of available things we can build is -- discovered from the env and config and is used to make the install plan. -- The targets just tell us which parts of the install plan to execute. -- -- Conversely, executing the plan does not directly depend on any of the -- input config. The bits that are needed (or better, the decisions based -- on it) all go into the install plan. -- Notionally, the 'BuildFlags' should be things that do not affect what -- we build, just how we do it. These ones of course do ------------------------------------------------------------------------------ -- Taking targets into account, selecting what to build -- -- | The set of components to build, represented as a mapping from 'UnitId's -- to the 'ComponentTarget's within the unit that will be selected -- (e.g. selected to build, test or repl). -- -- Associated with each 'ComponentTarget' is the set of 'TargetSelector's that -- matched this target. Typically this is exactly one, but in general it is -- possible to for different selectors to match the same target. This extra -- information is primarily to help make helpful error messages. -- type TargetsMap = Map UnitId [(ComponentTarget, NonEmpty TargetSelector)] -- | Get all target selectors. allTargetSelectors :: TargetsMap -> [TargetSelector] allTargetSelectors = concatMap (NE.toList . snd) . concat . Map.elems -- | Get all unique target selectors. uniqueTargetSelectors :: TargetsMap -> [TargetSelector] uniqueTargetSelectors = ordNub . allTargetSelectors -- | Given a set of 'TargetSelector's, resolve which 'UnitId's and -- 'ComponentTarget's they ought to refer to. -- -- The idea is that every user target identifies one or more roots in the -- 'ElaboratedInstallPlan', which we will use to determine the closure -- of what packages need to be built, dropping everything from the plan -- that is unnecessary. This closure and pruning is done by -- 'pruneInstallPlanToTargets' and this needs to be told the roots in terms -- of 'UnitId's and the 'ComponentTarget's within those. -- -- This means we first need to translate the 'TargetSelector's into the -- 'UnitId's and 'ComponentTarget's. This translation has to be different for -- the different command line commands, like @build@, @repl@ etc. For example -- the command @build pkgfoo@ could select a different set of components in -- pkgfoo than @repl pkgfoo@. The @build@ command would select any library and -- all executables, whereas @repl@ would select the library or a single -- executable. Furthermore, both of these examples could fail, and fail in -- different ways and each needs to be able to produce helpful error messages. -- -- So 'resolveTargets' takes two helpers: one to select the targets to be used -- by user targets that refer to a whole package ('TargetPackage'), and -- another to check user targets that refer to a component (or a module or -- file within a component). These helpers can fail, and use their own error -- type. Both helpers get given the 'AvailableTarget' info about the -- component(s). -- -- While commands vary quite a bit in their behaviour about which components to -- select for a whole-package target, most commands have the same behaviour for -- checking a user target that refers to a specific component. To help with -- this commands can use 'selectComponentTargetBasic', either directly or as -- a basis for their own @selectComponentTarget@ implementation. -- resolveTargets :: forall err. (forall k. TargetSelector -> [AvailableTarget k] -> Either (TargetProblem err) [k]) -> (forall k. SubComponentTarget -> AvailableTarget k -> Either (TargetProblem err) k ) -> ElaboratedInstallPlan -> Maybe (SourcePackageDb) -> [TargetSelector] -> Either [TargetProblem err] TargetsMap resolveTargets selectPackageTargets selectComponentTarget installPlan mPkgDb = fmap mkTargetsMap . either (Left . toList) Right . checkErrors . map (\ts -> (,) ts <$> checkTarget ts) where mkTargetsMap :: [(TargetSelector, [(UnitId, ComponentTarget)])] -> TargetsMap mkTargetsMap targets = Map.map nubComponentTargets $ Map.fromListWith (<>) [ (uid, [(ct, ts)]) | (ts, cts) <- targets , (uid, ct) <- cts ] AvailableTargetIndexes{..} = availableTargetIndexes installPlan checkTarget :: TargetSelector -> Either (TargetProblem err) [(UnitId, ComponentTarget)] -- We can ask to build any whole package, project-local or a dependency checkTarget bt@(TargetPackage _ [pkgid] mkfilter) | Just ats <- fmap (maybe id filterTargetsKind mkfilter) $ Map.lookup pkgid availableTargetsByPackageId = fmap (componentTargets WholeComponent) $ selectPackageTargets bt ats | otherwise = Left (TargetProblemNoSuchPackage pkgid) checkTarget (TargetPackage _ pkgids _) = error ("TODO: add support for multiple packages in a directory. Got\n" ++ unlines (map prettyShow pkgids)) -- For the moment this error cannot happen here, because it gets -- detected when the package config is being constructed. This case -- will need handling properly when we do add support. -- -- TODO: how should this use case play together with the -- '--cabal-file' option of 'configure' which allows using multiple -- .cabal files for a single package? checkTarget bt@(TargetAllPackages mkfilter) = fmap (componentTargets WholeComponent) . selectPackageTargets bt . maybe id filterTargetsKind mkfilter . filter availableTargetLocalToProject $ concat (Map.elems availableTargetsByPackageId) checkTarget (TargetComponent pkgid cname subtarget) | Just ats <- Map.lookup (pkgid, cname) availableTargetsByPackageIdAndComponentName = fmap (componentTargets subtarget) $ selectComponentTargets subtarget ats | Map.member pkgid availableTargetsByPackageId = Left (TargetProblemNoSuchComponent pkgid cname) | otherwise = Left (TargetProblemNoSuchPackage pkgid) checkTarget (TargetComponentUnknown pkgname ecname subtarget) | Just ats <- case ecname of Left ucname -> Map.lookup (pkgname, ucname) availableTargetsByPackageNameAndUnqualComponentName Right cname -> Map.lookup (pkgname, cname) availableTargetsByPackageNameAndComponentName = fmap (componentTargets subtarget) $ selectComponentTargets subtarget ats | Map.member pkgname availableTargetsByPackageName = Left (TargetProblemUnknownComponent pkgname ecname) | otherwise = Left (TargetNotInProject pkgname) checkTarget bt@(TargetPackageNamed pkgname mkfilter) | Just ats <- fmap (maybe id filterTargetsKind mkfilter) $ Map.lookup pkgname availableTargetsByPackageName = fmap (componentTargets WholeComponent) . selectPackageTargets bt $ ats | Just SourcePackageDb{ packageIndex } <- mPkgDb , let pkg = lookupPackageName packageIndex pkgname , not (null pkg) = Left (TargetAvailableInIndex pkgname) | otherwise = Left (TargetNotInProject pkgname) componentTargets :: SubComponentTarget -> [(b, ComponentName)] -> [(b, ComponentTarget)] componentTargets subtarget = map (fmap (\cname -> ComponentTarget cname subtarget)) selectComponentTargets :: SubComponentTarget -> [AvailableTarget k] -> Either (TargetProblem err) [k] selectComponentTargets subtarget = either (Left . NE.head) Right . checkErrors . map (selectComponentTarget subtarget) checkErrors :: [Either e a] -> Either (NonEmpty e) [a] checkErrors = (\(es, xs) -> case es of { [] -> Right xs; (e:es') -> Left (e:|es') }) . partitionEithers data AvailableTargetIndexes = AvailableTargetIndexes { availableTargetsByPackageIdAndComponentName :: AvailableTargetsMap (PackageId, ComponentName), availableTargetsByPackageId :: AvailableTargetsMap PackageId, availableTargetsByPackageName :: AvailableTargetsMap PackageName, availableTargetsByPackageNameAndComponentName :: AvailableTargetsMap (PackageName, ComponentName), availableTargetsByPackageNameAndUnqualComponentName :: AvailableTargetsMap (PackageName, UnqualComponentName) } type AvailableTargetsMap k = Map k [AvailableTarget (UnitId, ComponentName)] -- We define a bunch of indexes to help 'resolveTargets' with resolving -- 'TargetSelector's to specific 'UnitId's. -- -- They are all derived from the 'availableTargets' index. -- The 'availableTargetsByPackageIdAndComponentName' is just that main index, -- while the others are derived by re-grouping on the index key. -- -- They are all constructed lazily because they are not necessarily all used. -- availableTargetIndexes :: ElaboratedInstallPlan -> AvailableTargetIndexes availableTargetIndexes installPlan = AvailableTargetIndexes{..} where availableTargetsByPackageIdAndComponentName :: Map (PackageId, ComponentName) [AvailableTarget (UnitId, ComponentName)] availableTargetsByPackageIdAndComponentName = availableTargets installPlan availableTargetsByPackageId :: Map PackageId [AvailableTarget (UnitId, ComponentName)] availableTargetsByPackageId = Map.mapKeysWith (++) (\(pkgid, _cname) -> pkgid) availableTargetsByPackageIdAndComponentName `Map.union` availableTargetsEmptyPackages availableTargetsByPackageName :: Map PackageName [AvailableTarget (UnitId, ComponentName)] availableTargetsByPackageName = Map.mapKeysWith (++) packageName availableTargetsByPackageId availableTargetsByPackageNameAndComponentName :: Map (PackageName, ComponentName) [AvailableTarget (UnitId, ComponentName)] availableTargetsByPackageNameAndComponentName = Map.mapKeysWith (++) (\(pkgid, cname) -> (packageName pkgid, cname)) availableTargetsByPackageIdAndComponentName availableTargetsByPackageNameAndUnqualComponentName :: Map (PackageName, UnqualComponentName) [AvailableTarget (UnitId, ComponentName)] availableTargetsByPackageNameAndUnqualComponentName = Map.mapKeysWith (++) (\(pkgid, cname) -> let pname = packageName pkgid cname' = unqualComponentName pname cname in (pname, cname')) availableTargetsByPackageIdAndComponentName where unqualComponentName :: PackageName -> ComponentName -> UnqualComponentName unqualComponentName pkgname = fromMaybe (packageNameToUnqualComponentName pkgname) . componentNameString -- Add in all the empty packages. These do not appear in the -- availableTargetsByComponent map, since that only contains -- components, so packages with no components are invisible from -- that perspective. The empty packages need to be there for -- proper error reporting, so users can select the empty package -- and then we can report that it is empty, otherwise we falsely -- report there is no such package at all. availableTargetsEmptyPackages = Map.fromList [ (packageId pkg, []) | InstallPlan.Configured pkg <- InstallPlan.toList installPlan , case elabPkgOrComp pkg of ElabComponent _ -> False ElabPackage _ -> null (pkgComponents (elabPkgDescription pkg)) ] --TODO: [research required] what if the solution has multiple -- versions of this package? -- e.g. due to setup deps or due to multiple independent sets -- of packages being built (e.g. ghc + ghcjs in a project) filterTargetsKind :: ComponentKind -> [AvailableTarget k] -> [AvailableTarget k] filterTargetsKind ckind = filterTargetsKindWith (== ckind) filterTargetsKindWith :: (ComponentKind -> Bool) -> [AvailableTarget k] -> [AvailableTarget k] filterTargetsKindWith p ts = [ t | t@(AvailableTarget _ cname _ _) <- ts , p (componentKind cname) ] selectBuildableTargets :: [AvailableTarget k] -> [k] selectBuildableTargets = selectBuildableTargetsWith (const True) zipBuildableTargetsWith :: (TargetRequested -> Bool) -> [AvailableTarget k] -> [(k, AvailableTarget k)] zipBuildableTargetsWith p ts = [ (k, t) | t@(AvailableTarget _ _ (TargetBuildable k req) _) <- ts, p req ] selectBuildableTargetsWith :: (TargetRequested -> Bool) -> [AvailableTarget k] -> [k] selectBuildableTargetsWith p = map fst . zipBuildableTargetsWith p selectBuildableTargets' :: [AvailableTarget k] -> ([k], [AvailableTarget ()]) selectBuildableTargets' = selectBuildableTargetsWith' (const True) selectBuildableTargetsWith' :: (TargetRequested -> Bool) -> [AvailableTarget k] -> ([k], [AvailableTarget ()]) selectBuildableTargetsWith' p = (fmap . map) forgetTargetDetail . unzip . zipBuildableTargetsWith p forgetTargetDetail :: AvailableTarget k -> AvailableTarget () forgetTargetDetail = fmap (const ()) forgetTargetsDetail :: [AvailableTarget k] -> [AvailableTarget ()] forgetTargetsDetail = map forgetTargetDetail -- | A basic @selectComponentTarget@ implementation to use or pass to -- 'resolveTargets', that does the basic checks that the component is -- buildable and isn't a test suite or benchmark that is disabled. This -- can also be used to do these basic checks as part of a custom impl that -- selectComponentTargetBasic :: SubComponentTarget -> AvailableTarget k -> Either (TargetProblem a) k selectComponentTargetBasic subtarget AvailableTarget { availableTargetPackageId = pkgid, availableTargetComponentName = cname, availableTargetStatus } = case availableTargetStatus of TargetDisabledByUser -> Left (TargetOptionalStanzaDisabledByUser pkgid cname subtarget) TargetDisabledBySolver -> Left (TargetOptionalStanzaDisabledBySolver pkgid cname subtarget) TargetNotLocal -> Left (TargetComponentNotProjectLocal pkgid cname subtarget) TargetNotBuildable -> Left (TargetComponentNotBuildable pkgid cname subtarget) TargetBuildable targetKey _ -> Right targetKey -- | Wrapper around 'ProjectPlanning.pruneInstallPlanToTargets' that adjusts -- for the extra unneeded info in the 'TargetsMap'. -- pruneInstallPlanToTargets :: TargetAction -> TargetsMap -> ElaboratedInstallPlan -> ElaboratedInstallPlan pruneInstallPlanToTargets targetActionType targetsMap elaboratedPlan = assert (Map.size targetsMap > 0) $ ProjectPlanning.pruneInstallPlanToTargets targetActionType (Map.map (map fst) targetsMap) elaboratedPlan -- | Utility used by repl and run to check if the targets spans multiple -- components, since those commands do not support multiple components. -- distinctTargetComponents :: TargetsMap -> Set.Set (UnitId, ComponentName) distinctTargetComponents targetsMap = Set.fromList [ (uid, cname) | (uid, cts) <- Map.toList targetsMap , (ComponentTarget cname _, _) <- cts ] ------------------------------------------------------------------------------ -- Displaying what we plan to do -- -- | Print a user-oriented presentation of the install plan, indicating what -- will be built. -- printPlan :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> IO () printPlan verbosity ProjectBaseContext { buildSettings = BuildTimeSettings{buildSettingDryRun}, projectConfig = ProjectConfig { projectConfigLocalPackages = PackageConfig {packageConfigOptimization} } } ProjectBuildContext { elaboratedPlanToExecute = elaboratedPlan, elaboratedShared, pkgsBuildStatus } | null pkgs = notice verbosity "Up to date" | otherwise = noticeNoWrap verbosity $ unlines $ (showBuildProfile ++ "In order, the following " ++ wouldWill ++ " be built" ++ ifNormal " (use -v for more details)" ++ ":") : map showPkgAndReason pkgs where pkgs = InstallPlan.executionOrder elaboratedPlan ifVerbose s | verbosity >= verbose = s | otherwise = "" ifNormal s | verbosity >= verbose = "" | otherwise = s wouldWill | buildSettingDryRun = "would" | otherwise = "will" showPkgAndReason :: ElaboratedReadyPackage -> String showPkgAndReason (ReadyPackage elab) = unwords $ filter (not . null) $ [ " -" , if verbosity >= deafening then prettyShow (installedUnitId elab) else prettyShow (packageId elab) , case elabPkgOrComp elab of ElabPackage pkg -> showTargets elab ++ ifVerbose (showStanzas (pkgStanzasEnabled pkg)) ElabComponent comp -> "(" ++ showComp elab comp ++ ")" , showFlagAssignment (nonDefaultFlags elab) , showConfigureFlags elab , let buildStatus = pkgsBuildStatus Map.! installedUnitId elab in "(" ++ showBuildStatus buildStatus ++ ")" ] showComp :: ElaboratedConfiguredPackage -> ElaboratedComponent -> String showComp elab comp = maybe "custom" prettyShow (compComponentName comp) ++ if Map.null (elabInstantiatedWith elab) then "" else " with " ++ intercalate ", " -- TODO: Abbreviate the UnitIds [ prettyShow k ++ "=" ++ prettyShow v | (k,v) <- Map.toList (elabInstantiatedWith elab) ] nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment nonDefaultFlags elab = elabFlagAssignment elab `diffFlagAssignment` elabFlagDefaults elab showTargets :: ElaboratedConfiguredPackage -> String showTargets elab | null (elabBuildTargets elab) = "" | otherwise = "(" ++ intercalate ", " [ showComponentTarget (packageId elab) t | t <- elabBuildTargets elab ] ++ ")" showConfigureFlags :: ElaboratedConfiguredPackage -> String showConfigureFlags elab = let fullConfigureFlags = setupHsConfigureFlags (ReadyPackage elab) elaboratedShared verbosity "$builddir" -- | Given a default value @x@ for a flag, nub @Flag x@ -- into @NoFlag@. This gives us a tidier command line -- rendering. nubFlag :: Eq a => a -> Setup.Flag a -> Setup.Flag a nubFlag x (Setup.Flag x') | x == x' = Setup.NoFlag nubFlag _ f = f (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling fullConfigureFlags partialConfigureFlags = mempty { configProf = nubFlag False (configProf fullConfigureFlags), configProfExe = nubFlag tryExeProfiling (configProfExe fullConfigureFlags), configProfLib = nubFlag tryLibProfiling (configProfLib fullConfigureFlags) -- Maybe there are more we can add } -- Not necessary to "escape" it, it's just for user output in unwords . ("":) $ commandShowOptions (Setup.configureCommand (pkgConfigCompilerProgs elaboratedShared)) partialConfigureFlags showBuildStatus :: BuildStatus -> String showBuildStatus status = case status of BuildStatusPreExisting -> "existing package" BuildStatusInstalled -> "already installed" BuildStatusDownload {} -> "requires download & build" BuildStatusUnpack {} -> "requires build" BuildStatusRebuild _ rebuild -> case rebuild of BuildStatusConfigure (MonitoredValueChanged _) -> "configuration changed" BuildStatusConfigure mreason -> showMonitorChangedReason mreason BuildStatusBuild _ buildreason -> case buildreason of BuildReasonDepsRebuilt -> "dependency rebuilt" BuildReasonFilesChanged mreason -> showMonitorChangedReason mreason BuildReasonExtraTargets _ -> "additional components to build" BuildReasonEphemeralTargets -> "ephemeral targets" BuildStatusUpToDate {} -> "up to date" -- doesn't happen showMonitorChangedReason :: MonitorChangedReason a -> String showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file ++ " changed" showMonitorChangedReason (MonitoredValueChanged _) = "value changed" showMonitorChangedReason MonitorFirstRun = "first run" showMonitorChangedReason MonitorCorruptCache = "cannot read state cache" showBuildProfile :: String showBuildProfile = "Build profile: " ++ unwords [ "-w " ++ (showCompilerId . pkgConfigCompiler) elaboratedShared, "-O" ++ (case packageConfigOptimization of Setup.Flag NoOptimisation -> "0" Setup.Flag NormalOptimisation -> "1" Setup.Flag MaximumOptimisation -> "2" Setup.NoFlag -> "1")] ++ "\n" writeBuildReports :: BuildTimeSettings -> ProjectBuildContext -> ElaboratedInstallPlan -> BuildOutcomes -> IO () writeBuildReports settings buildContext plan buildOutcomes = do let plat@(Platform arch os) = pkgConfigPlatform . elaboratedShared $ buildContext comp = pkgConfigCompiler . elaboratedShared $ buildContext getRepo (RepoTarballPackage r _ _) = Just r getRepo _ = Nothing fromPlanPackage (InstallPlan.Configured pkg) (Just result) = let installOutcome = case result of Left bf -> case buildFailureReason bf of DependentFailed p -> BuildReports.DependencyFailed p DownloadFailed _ -> BuildReports.DownloadFailed UnpackFailed _ -> BuildReports.UnpackFailed ConfigureFailed _ -> BuildReports.ConfigureFailed BuildFailed _ -> BuildReports.BuildFailed TestsFailed _ -> BuildReports.TestsFailed InstallFailed _ -> BuildReports.InstallFailed ReplFailed _ -> BuildReports.InstallOk HaddocksFailed _ -> BuildReports.InstallOk BenchFailed _ -> BuildReports.InstallOk Right _br -> BuildReports.InstallOk docsOutcome = case result of Left bf -> case buildFailureReason bf of HaddocksFailed _ -> BuildReports.Failed _ -> BuildReports.NotTried Right br -> case buildResultDocs br of DocsNotTried -> BuildReports.NotTried DocsFailed -> BuildReports.Failed DocsOk -> BuildReports.Ok testsOutcome = case result of Left bf -> case buildFailureReason bf of TestsFailed _ -> BuildReports.Failed _ -> BuildReports.NotTried Right br -> case buildResultTests br of TestsNotTried -> BuildReports.NotTried TestsOk -> BuildReports.Ok in Just $ (BuildReports.BuildReport (packageId pkg) os arch (compilerId comp) cabalInstallID (elabFlagAssignment pkg) (map packageId $ elabLibDependencies pkg) installOutcome docsOutcome testsOutcome, getRepo . elabPkgSourceLocation $ pkg) -- TODO handle failure log files? fromPlanPackage _ _ = Nothing buildReports = mapMaybe (\x -> fromPlanPackage x (InstallPlan.lookupBuildOutcome x buildOutcomes)) $ InstallPlan.toList plan BuildReports.storeLocal (compilerInfo comp) (buildSettingSummaryFile settings) buildReports plat -- Note this doesn't handle the anonymous build reports set by buildSettingBuildReports but those appear to not be used or missed from v1 -- The usage pattern appears to be that rather than rely on flags to cabal to send build logs to the right place and package them with reports, etc, it is easier to simply capture its output to an appropriate handle. -- | If there are build failures then report them and throw an exception. -- dieOnBuildFailures :: Verbosity -> CurrentCommand -> ElaboratedInstallPlan -> BuildOutcomes -> IO () dieOnBuildFailures verbosity currentCommand plan buildOutcomes | null failures = return () | isSimpleCase = exitFailure | otherwise = do -- For failures where we have a build log, print the log plus a header sequence_ [ do notice verbosity $ '\n' : renderFailureDetail False pkg reason ++ "\nBuild log ( " ++ logfile ++ " ):" readFile logfile >>= noticeNoWrap verbosity | (pkg, ShowBuildSummaryAndLog reason logfile) <- failuresClassification ] -- For all failures, print either a short summary (if we showed the -- build log) or all details dieIfNotHaddockFailure verbosity $ unlines [ case failureClassification of ShowBuildSummaryAndLog reason _ | verbosity > normal -> renderFailureDetail mentionDepOf pkg reason | otherwise -> renderFailureSummary mentionDepOf pkg reason ++ ". See the build log above for details." ShowBuildSummaryOnly reason -> renderFailureDetail mentionDepOf pkg reason | let mentionDepOf = verbosity <= normal , (pkg, failureClassification) <- failuresClassification ] where failures :: [(UnitId, BuildFailure)] failures = [ (pkgid, failure) | (pkgid, Left failure) <- Map.toList buildOutcomes ] failuresClassification :: [(ElaboratedConfiguredPackage, BuildFailurePresentation)] failuresClassification = [ (pkg, classifyBuildFailure failure) | (pkgid, failure) <- failures , case buildFailureReason failure of DependentFailed {} -> verbosity > normal _ -> True , InstallPlan.Configured pkg <- maybeToList (InstallPlan.lookup plan pkgid) ] dieIfNotHaddockFailure :: Verbosity -> String -> IO () dieIfNotHaddockFailure | currentCommand == HaddockCommand = die' | all isHaddockFailure failuresClassification = warn | otherwise = die' where isHaddockFailure (_, ShowBuildSummaryOnly (HaddocksFailed _) ) = True isHaddockFailure (_, ShowBuildSummaryAndLog (HaddocksFailed _) _) = True isHaddockFailure _ = False classifyBuildFailure :: BuildFailure -> BuildFailurePresentation classifyBuildFailure BuildFailure { buildFailureReason = reason, buildFailureLogFile = mlogfile } = maybe (ShowBuildSummaryOnly reason) (ShowBuildSummaryAndLog reason) $ do logfile <- mlogfile e <- buildFailureException reason ExitFailure 1 <- fromException e return logfile -- Special case: we don't want to report anything complicated in the case -- of just doing build on the current package, since it's clear from -- context which package failed. -- -- We generalise this rule as follows: -- - if only one failure occurs, and it is in a single root -- package (i.e. a package with nothing else depending on it) -- - and that failure is of a kind that always reports enough -- detail itself (e.g. ghc reporting errors on stdout) -- - then we do not report additional error detail or context. -- isSimpleCase :: Bool isSimpleCase | [(pkgid, failure)] <- failures , [pkg] <- rootpkgs , installedUnitId pkg == pkgid , isFailureSelfExplanatory (buildFailureReason failure) , currentCommand /= InstallCommand = True | otherwise = False -- NB: if the Setup script segfaulted or was interrupted, -- we should give more detailed information. So only -- assume that exit code 1 is "pedestrian failure." isFailureSelfExplanatory :: BuildFailureReason -> Bool isFailureSelfExplanatory (BuildFailed e) | Just (ExitFailure 1) <- fromException e = True isFailureSelfExplanatory (ConfigureFailed e) | Just (ExitFailure 1) <- fromException e = True isFailureSelfExplanatory _ = False rootpkgs :: [ElaboratedConfiguredPackage] rootpkgs = [ pkg | InstallPlan.Configured pkg <- InstallPlan.toList plan , hasNoDependents pkg ] ultimateDeps :: UnitId -> [InstallPlan.GenericPlanPackage InstalledPackageInfo ElaboratedConfiguredPackage] ultimateDeps pkgid = filter (\pkg -> hasNoDependents pkg && installedUnitId pkg /= pkgid) (InstallPlan.reverseDependencyClosure plan [pkgid]) hasNoDependents :: HasUnitId pkg => pkg -> Bool hasNoDependents = null . InstallPlan.revDirectDeps plan . installedUnitId renderFailureDetail :: Bool -> ElaboratedConfiguredPackage -> BuildFailureReason -> String renderFailureDetail mentionDepOf pkg reason = renderFailureSummary mentionDepOf pkg reason ++ "." ++ renderFailureExtraDetail reason ++ maybe "" showException (buildFailureException reason) renderFailureSummary :: Bool -> ElaboratedConfiguredPackage -> BuildFailureReason -> String renderFailureSummary mentionDepOf pkg reason = case reason of DownloadFailed _ -> "Failed to download " ++ pkgstr UnpackFailed _ -> "Failed to unpack " ++ pkgstr ConfigureFailed _ -> "Failed to build " ++ pkgstr BuildFailed _ -> "Failed to build " ++ pkgstr ReplFailed _ -> "repl failed for " ++ pkgstr HaddocksFailed _ -> "Failed to build documentation for " ++ pkgstr TestsFailed _ -> "Tests failed for " ++ pkgstr BenchFailed _ -> "Benchmarks failed for " ++ pkgstr InstallFailed _ -> "Failed to build " ++ pkgstr DependentFailed depid -> "Failed to build " ++ prettyShow (packageId pkg) ++ " because it depends on " ++ prettyShow depid ++ " which itself failed to build" where pkgstr = elabConfiguredName verbosity pkg ++ if mentionDepOf then renderDependencyOf (installedUnitId pkg) else "" renderFailureExtraDetail :: BuildFailureReason -> String renderFailureExtraDetail (ConfigureFailed _) = " The failure occurred during the configure step." renderFailureExtraDetail (InstallFailed _) = " The failure occurred during the final install step." renderFailureExtraDetail _ = "" renderDependencyOf :: UnitId -> String renderDependencyOf pkgid = case ultimateDeps pkgid of [] -> "" (p1:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ ")" (p1:p2:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ " and " ++ elabPlanPackageName verbosity p2 ++ ")" (p1:p2:_) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ ", " ++ elabPlanPackageName verbosity p2 ++ " and others)" showException e = case fromException e of Just (ExitFailure 1) -> "" #ifdef MIN_VERSION_unix -- Note [Positive "signal" exit code] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- What's the business with the test for negative and positive -- signal values? The API for process specifies that if the -- process died due to a signal, it returns a *negative* exit -- code. So that's the negative test. -- -- What about the positive test? Well, when we find out that -- a process died due to a signal, we ourselves exit with that -- exit code. However, we don't "kill ourselves" with the -- signal; we just exit with the same code as the signal: thus -- the caller sees a *positive* exit code. So that's what -- happens when we get a positive exit code. Just (ExitFailure n) | -n == fromIntegral sigSEGV -> " The build process segfaulted (i.e. SIGSEGV)." | n == fromIntegral sigSEGV -> " The build process terminated with exit code " ++ show n ++ " which may be because some part of it segfaulted. (i.e. SIGSEGV)." | -n == fromIntegral sigKILL -> " The build process was killed (i.e. SIGKILL). " ++ explanation | n == fromIntegral sigKILL -> " The build process terminated with exit code " ++ show n ++ " which may be because some part of it was killed " ++ "(i.e. SIGKILL). " ++ explanation where explanation = "The typical reason for this is that there is not " ++ "enough memory available (e.g. the OS killed a process " ++ "using lots of memory)." #endif Just (ExitFailure n) -> " The build process terminated with exit code " ++ show n _ -> " The exception was:\n " #if MIN_VERSION_base(4,8,0) ++ displayException e #else ++ show e #endif buildFailureException :: BuildFailureReason -> Maybe SomeException buildFailureException reason = case reason of DownloadFailed e -> Just e UnpackFailed e -> Just e ConfigureFailed e -> Just e BuildFailed e -> Just e ReplFailed e -> Just e HaddocksFailed e -> Just e TestsFailed e -> Just e BenchFailed e -> Just e InstallFailed e -> Just e DependentFailed _ -> Nothing data BuildFailurePresentation = ShowBuildSummaryOnly BuildFailureReason | ShowBuildSummaryAndLog BuildFailureReason FilePath ------------------------------------------------------------------------------- -- Dummy projects ------------------------------------------------------------------------------- -- | Create a dummy project context, without a .cabal or a .cabal.project file -- (a place where to put a temporary dist directory is still needed) establishDummyProjectBaseContext :: Verbosity -> ProjectConfig -- ^ Project configuration including the global config if needed -> DistDirLayout -- ^ Where to put the dist directory -> [PackageSpecifier UnresolvedSourcePackage] -- ^ The packages to be included in the project -> CurrentCommand -> IO ProjectBaseContext establishDummyProjectBaseContext verbosity projectConfig distDirLayout localPackages currentCommand = do cabalDir <- getCabalDir let ProjectConfigBuildOnly { projectConfigLogsDir } = projectConfigBuildOnly projectConfig ProjectConfigShared { projectConfigStoreDir } = projectConfigShared projectConfig mlogsDir = flagToMaybe projectConfigLogsDir mstoreDir = flagToMaybe projectConfigStoreDir cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir buildSettings :: BuildTimeSettings buildSettings = resolveBuildTimeSettings verbosity cabalDirLayout projectConfig return ProjectBaseContext { distDirLayout, cabalDirLayout, projectConfig, localPackages, buildSettings, currentCommand } establishDummyDistDirLayout :: Verbosity -> ProjectConfig -> FilePath -> IO DistDirLayout establishDummyDistDirLayout verbosity cliConfig tmpDir = do let distDirLayout = defaultDistDirLayout projectRoot mdistDirectory -- Create the dist directories createDirectoryIfMissingVerbose verbosity True $ distDirectory distDirLayout createDirectoryIfMissingVerbose verbosity True $ distProjectCacheDirectory distDirLayout return distDirLayout where mdistDirectory = flagToMaybe $ projectConfigDistDir $ projectConfigShared cliConfig projectRoot = ProjectRootImplicit tmpDir cabal-install-3.8.1.0/src/Distribution/Client/ProjectPlanOutput.hs0000644000000000000000000012546007346545000023252 0ustar0000000000000000{-# LANGUAGE BangPatterns, RecordWildCards, NamedFieldPuns, DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, ScopedTypeVariables #-} module Distribution.Client.ProjectPlanOutput ( -- * Plan output writePlanExternalRepresentation, -- * Project status -- | Several outputs rely on having a general overview of PostBuildProjectStatus(..), updatePostBuildProjectStatus, createPackageEnvironment, writePlanGhcEnvironment, argsEquivalentOfGhcEnvironmentFile, ) where import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ProjectBuilding.Types import Distribution.Client.DistDirLayout import Distribution.Client.Types.Repo (Repo(..), RemoteRepo(..)) import Distribution.Client.Types.PackageLocation (PackageLocation(..)) import Distribution.Client.Types.ConfiguredId (confInstId) import Distribution.Client.Types.SourceRepo (SourceRepoMaybe, SourceRepositoryPackage (..)) import Distribution.Client.HashValue (showHashValue, hashValue) import Distribution.Client.Version (cabalInstallVersion) import qualified Distribution.Client.InstallPlan as InstallPlan import qualified Distribution.Client.Utils.Json as J import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.Solver.Types.ComponentDeps as ComponentDeps import Distribution.Package import Distribution.System import Distribution.InstalledPackageInfo (InstalledPackageInfo) import qualified Distribution.PackageDescription as PD import Distribution.Compiler (CompilerFlavor(GHC, GHCJS)) import Distribution.Simple.Compiler ( PackageDBStack, PackageDB(..) , compilerVersion, compilerFlavor, showCompilerId , compilerId, CompilerId(..), Compiler ) import Distribution.Simple.GHC ( getImplInfo, GhcImplInfo(supportsPkgEnvFiles) , GhcEnvironmentFileEntry(..), simpleGhcEnvironmentFile , writeGhcEnvironmentFile ) import Distribution.Simple.BuildPaths ( dllExtension, exeExtension, buildInfoPref ) import qualified Distribution.Compat.Graph as Graph import Distribution.Compat.Graph (Graph, Node) import qualified Distribution.Compat.Binary as Binary import Distribution.Simple.Utils import Distribution.Types.Version ( mkVersion ) import Distribution.Verbosity import Prelude () import Distribution.Client.Compat.Prelude import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Builder as BB import System.FilePath import System.IO import Distribution.Simple.Program.GHC (packageDbArgsDb) ----------------------------------------------------------------------------- -- Writing plan.json files -- -- | Write out a representation of the elaborated install plan. -- -- This is for the benefit of debugging and external tools like editors. -- writePlanExternalRepresentation :: DistDirLayout -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> IO () writePlanExternalRepresentation distDirLayout elaboratedInstallPlan elaboratedSharedConfig = writeFileAtomic (distProjectCacheFile distDirLayout "plan.json") $ BB.toLazyByteString . J.encodeToBuilder $ encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig -- | Renders a subset of the elaborated install plan in a semi-stable JSON -- format. -- encodePlanAsJson :: DistDirLayout -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> J.Value encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig = --TODO: [nice to have] include all of the sharedPackageConfig and all of -- the parts of the elaboratedInstallPlan J.object [ "cabal-version" J..= jdisplay cabalInstallVersion , "cabal-lib-version" J..= jdisplay cabalVersion , "compiler-id" J..= (J.String . showCompilerId . pkgConfigCompiler) elaboratedSharedConfig , "os" J..= jdisplay os , "arch" J..= jdisplay arch , "install-plan" J..= installPlanToJ elaboratedInstallPlan ] where plat :: Platform plat@(Platform arch os) = pkgConfigPlatform elaboratedSharedConfig installPlanToJ :: ElaboratedInstallPlan -> [J.Value] installPlanToJ = map planPackageToJ . InstallPlan.toList planPackageToJ :: ElaboratedPlanPackage -> J.Value planPackageToJ pkg = case pkg of InstallPlan.PreExisting ipi -> installedPackageInfoToJ ipi InstallPlan.Configured elab -> elaboratedPackageToJ False elab InstallPlan.Installed elab -> elaboratedPackageToJ True elab -- Note that the plan.json currently only uses the elaborated plan, -- not the improved plan. So we will not get the Installed state for -- that case, but the code supports it in case we want to use this -- later in some use case where we want the status of the build. installedPackageInfoToJ :: InstalledPackageInfo -> J.Value installedPackageInfoToJ ipi = -- Pre-existing packages lack configuration information such as their flag -- settings or non-lib components. We only get pre-existing packages for -- the global/core packages however, so this isn't generally a problem. -- So these packages are never local to the project. -- J.object [ "type" J..= J.String "pre-existing" , "id" J..= (jdisplay . installedUnitId) ipi , "pkg-name" J..= (jdisplay . pkgName . packageId) ipi , "pkg-version" J..= (jdisplay . pkgVersion . packageId) ipi , "depends" J..= map jdisplay (installedDepends ipi) ] elaboratedPackageToJ :: Bool -> ElaboratedConfiguredPackage -> J.Value elaboratedPackageToJ isInstalled elab = J.object $ [ "type" J..= J.String (if isInstalled then "installed" else "configured") , "id" J..= (jdisplay . installedUnitId) elab , "pkg-name" J..= (jdisplay . pkgName . packageId) elab , "pkg-version" J..= (jdisplay . pkgVersion . packageId) elab , "flags" J..= J.object [ PD.unFlagName fn J..= v | (fn,v) <- PD.unFlagAssignment (elabFlagAssignment elab) ] , "style" J..= J.String (style2str (elabLocalToProject elab) (elabBuildStyle elab)) , "pkg-src" J..= packageLocationToJ (elabPkgSourceLocation elab) ] ++ [ "pkg-cabal-sha256" J..= J.String (showHashValue hash) | Just hash <- [ fmap hashValue (elabPkgDescriptionOverride elab) ] ] ++ [ "pkg-src-sha256" J..= J.String (showHashValue hash) | Just hash <- [elabPkgSourceHash elab] ] ++ (case elabBuildStyle elab of BuildInplaceOnly -> ["dist-dir" J..= J.String dist_dir] ++ [buildInfoFileLocation] BuildAndInstall -> -- TODO: install dirs? [] ) ++ case elabPkgOrComp elab of ElabPackage pkg -> let components = J.object $ [ comp2str c J..= (J.object $ [ "depends" J..= map (jdisplay . confInstId) ldeps , "exe-depends" J..= map (jdisplay . confInstId) edeps ] ++ bin_file c) | (c,(ldeps,edeps)) <- ComponentDeps.toList $ ComponentDeps.zip (pkgLibDependencies pkg) (pkgExeDependencies pkg) ] in ["components" J..= components] ElabComponent comp -> ["depends" J..= map (jdisplay . confInstId) (elabLibDependencies elab) ,"exe-depends" J..= map jdisplay (elabExeDependencies elab) ,"component-name" J..= J.String (comp2str (compSolverName comp)) ] ++ bin_file (compSolverName comp) where -- | Only add build-info file location if the Setup.hs CLI -- is recent enough to be able to generate build info files. -- Otherwise, write 'null'. -- -- Consumers of `plan.json` can use the nullability of this file location -- to indicate that the given component uses `build-type: Custom` -- with an old lib:Cabal version. buildInfoFileLocation :: J.Pair buildInfoFileLocation | elabSetupScriptCliVersion elab < mkVersion [3, 7, 0, 0] = "build-info" J..= J.Null | otherwise = "build-info" J..= J.String (buildInfoPref dist_dir) packageLocationToJ :: PackageLocation (Maybe FilePath) -> J.Value packageLocationToJ pkgloc = case pkgloc of LocalUnpackedPackage local -> J.object [ "type" J..= J.String "local" , "path" J..= J.String local ] LocalTarballPackage local -> J.object [ "type" J..= J.String "local-tar" , "path" J..= J.String local ] RemoteTarballPackage uri _ -> J.object [ "type" J..= J.String "remote-tar" , "uri" J..= J.String (show uri) ] RepoTarballPackage repo _ _ -> J.object [ "type" J..= J.String "repo-tar" , "repo" J..= repoToJ repo ] RemoteSourceRepoPackage srcRepo _ -> J.object [ "type" J..= J.String "source-repo" , "source-repo" J..= sourceRepoToJ srcRepo ] repoToJ :: Repo -> J.Value repoToJ repo = case repo of RepoLocalNoIndex{..} -> J.object [ "type" J..= J.String "local-repo-no-index" , "path" J..= J.String repoLocalDir ] RepoRemote{..} -> J.object [ "type" J..= J.String "remote-repo" , "uri" J..= J.String (show (remoteRepoURI repoRemote)) ] RepoSecure{..} -> J.object [ "type" J..= J.String "secure-repo" , "uri" J..= J.String (show (remoteRepoURI repoRemote)) ] sourceRepoToJ :: SourceRepoMaybe -> J.Value sourceRepoToJ SourceRepositoryPackage{..} = J.object $ filter ((/= J.Null) . snd) $ [ "type" J..= jdisplay srpType , "location" J..= J.String srpLocation , "branch" J..= fmap J.String srpBranch , "tag" J..= fmap J.String srpTag , "subdir" J..= fmap J.String srpSubdir ] dist_dir :: FilePath dist_dir = distBuildDirectory distDirLayout (elabDistDirParams elaboratedSharedConfig elab) bin_file :: ComponentDeps.Component -> [J.Pair] bin_file c = case c of ComponentDeps.ComponentExe s -> bin_file' s ComponentDeps.ComponentTest s -> bin_file' s ComponentDeps.ComponentBench s -> bin_file' s ComponentDeps.ComponentFLib s -> flib_file' s _ -> [] bin_file' s = ["bin-file" J..= J.String bin] where bin = if elabBuildStyle elab == BuildInplaceOnly then dist_dir "build" prettyShow s prettyShow s <.> exeExtension plat else InstallDirs.bindir (elabInstallDirs elab) prettyShow s <.> exeExtension plat flib_file' :: (Pretty a, Show a) => a -> [J.Pair] flib_file' s = ["bin-file" J..= J.String bin] where bin = if elabBuildStyle elab == BuildInplaceOnly then dist_dir "build" prettyShow s ("lib" ++ prettyShow s) <.> dllExtension plat else InstallDirs.bindir (elabInstallDirs elab) ("lib" ++ prettyShow s) <.> dllExtension plat comp2str :: ComponentDeps.Component -> String comp2str = prettyShow style2str :: Bool -> BuildStyle -> String style2str True _ = "local" style2str False BuildInplaceOnly = "inplace" style2str False BuildAndInstall = "global" jdisplay :: Pretty a => a -> J.Value jdisplay = J.String . prettyShow ----------------------------------------------------------------------------- -- Project status -- -- So, what is the status of a project after a build? That is, how do the -- inputs (package source files etc) compare to the output artefacts (build -- libs, exes etc)? Do the outputs reflect the current values of the inputs -- or are outputs out of date or invalid? -- -- First of all, what do we mean by out-of-date and what do we mean by -- invalid? We think of the build system as a morally pure function that -- computes the output artefacts given input values. We say an output artefact -- is out of date when its value is not the value that would be computed by a -- build given the current values of the inputs. An output artefact can be -- out-of-date but still be perfectly usable; it simply correspond to a -- previous state of the inputs. -- -- On the other hand there are cases where output artefacts cannot safely be -- used. For example libraries and dynamically linked executables cannot be -- used when the libs they depend on change without them being recompiled -- themselves. Whether an artefact is still usable depends on what it is, e.g. -- dynamically linked vs statically linked and on how it gets updated (e.g. -- only atomically on success or if failure can leave invalid states). We need -- a definition (or two) that is independent of the kind of artefact and can -- be computed just in terms of changes in package graphs, but are still -- useful for determining when particular kinds of artefacts are invalid. -- -- Note that when we talk about packages in this context we just mean nodes -- in the elaborated install plan, which can be components or packages. -- -- There's obviously a close connection between packages being out of date and -- their output artefacts being unusable: most of the time if a package -- remains out of date at the end of a build then some of its output artefacts -- will be unusable. That is true most of the time because a build will have -- attempted to build one of the out-of-date package's dependencies. If the -- build of the dependency succeeded then it changed output artefacts (like -- libs) and if it failed then it may have failed after already changing -- things (think failure after updating some but not all .hi files). -- -- There are a few reasons we may end up with still-usable output artefacts -- for a package even when it remains out of date at the end of a build. -- Firstly if executing a plan fails then packages can be skipped, and thus we -- may have packages where all their dependencies were skipped. Secondly we -- have artefacts like statically linked executables which are not affected by -- libs they depend on being recompiled. Furthermore, packages can be out of -- date due to changes in build tools or Setup.hs scripts they depend on, but -- again libraries or executables in those out-of-date packages remain usable. -- -- So we have two useful definitions of invalid. Both are useful, for -- different purposes, so we will compute both. The first corresponds to the -- invalid libraries and dynamic executables. We say a package is invalid by -- changed deps if any of the packages it depends on (via library dep edges) -- were rebuilt (successfully or unsuccessfully). The second definition -- corresponds to invalid static executables. We say a package is invalid by -- a failed build simply if the package was built but unsuccessfully. -- -- So how do we find out what packages are out of date or invalid? -- -- Obviously we know something for all the packages that were part of the plan -- that was executed, but that is just a subset since we prune the plan down -- to the targets and their dependencies. -- -- Recall the steps we go though: -- -- + starting with the initial improved plan (this is the full project); -- -- + prune the plan to the user's build targets; -- -- + rebuildTargetsDryRun on the pruned plan giving us a BuildStatusMap -- covering the pruned subset of the original plan; -- -- + execute the plan giving us BuildOutcomes which tell us success/failure -- for each package. -- -- So given that the BuildStatusMap and BuildOutcomes do not cover everything -- in the original plan, what can they tell us about the original plan? -- -- The BuildStatusMap tells us directly that some packages are up to date and -- others out of date (but only for the pruned subset). But we know that -- everything that is a reverse dependency of an out-of-date package is itself -- out-of-date (whether or not it is in the pruned subset). Of course after -- a build the BuildOutcomes may tell us that some of those out-of-date -- packages are now up to date (ie a successful build outcome). -- -- The difference is packages that are reverse dependencies of out-of-date -- packages but are not brought up-to-date by the build (i.e. did not have -- successful outcomes, either because they failed or were not in the pruned -- subset to be built). We also know which packages were rebuilt, so we can -- use this to find the now-invalid packages. -- -- Note that there are still packages for which we cannot discover full status -- information. There may be packages outside of the pruned plan that do not -- depend on packages within the pruned plan that were discovered to be -- out-of-date. For these packages we do not know if their build artefacts -- are out-of-date or not. We do know however that they are not invalid, as -- that's not possible given our definition of invalid. Intuitively it is -- because we have not disturbed anything that these packages depend on, e.g. -- we've not rebuilt any libs they depend on. Recall that our widest -- definition of invalid was only concerned about dependencies on libraries -- (to cover problems like shared libs or GHC seeing inconsistent .hi files). -- -- So our algorithm for out-of-date packages is relatively simple: take the -- reverse dependency closure in the original improved plan (pre-pruning) of -- the out-of-date packages (as determined by the BuildStatusMap from the dry -- run). That gives a set of packages that were definitely out of date after -- the dry run. Now we remove from this set the packages that the -- BuildOutcomes tells us are now up-to-date after the build. The remaining -- set is the out-of-date packages. -- -- As for packages that are invalid by changed deps, we start with the plan -- dependency graph but keep only those edges that point to libraries (so -- ignoring deps on exes and setup scripts). We take the packages for which a -- build was attempted (successfully or unsuccessfully, but not counting -- knock-on failures) and take the reverse dependency closure. We delete from -- this set all the packages that were built successfully. Note that we do not -- need to intersect with the out-of-date packages since this follows -- automatically: all rev deps of packages we attempted to build must have -- been out of date at the start of the build, and if they were not built -- successfully then they're still out of date -- meeting our definition of -- invalid. type PackageIdSet = Set UnitId type PackagesUpToDate = PackageIdSet data PostBuildProjectStatus = PostBuildProjectStatus { -- | Packages that are known to be up to date. These were found to be -- up to date before the build, or they have a successful build outcome -- afterwards. -- -- This does not include any packages outside of the subset of the plan -- that was executed because we did not check those and so don't know -- for sure that they're still up to date. -- packagesDefinitelyUpToDate :: PackageIdSet, -- | Packages that are probably still up to date (and at least not -- known to be out of date, and certainly not invalid). This includes -- 'packagesDefinitelyUpToDate' plus packages that were up to date -- previously and are outside of the subset of the plan that was -- executed. It excludes 'packagesOutOfDate'. -- packagesProbablyUpToDate :: PackageIdSet, -- | Packages that are known to be out of date. These are packages -- that were determined to be out of date before the build, and they -- do not have a successful build outcome afterwards. -- -- Note that this can sometimes include packages outside of the subset -- of the plan that was executed. For example suppose package A and B -- depend on C, and A is the target so only A and C are in the subset -- to be built. Now suppose C is found to have changed, then both A -- and B are out-of-date before the build and since B is outside the -- subset to be built then it will remain out of date. -- -- Note also that this is /not/ the inverse of -- 'packagesDefinitelyUpToDate' or 'packagesProbablyUpToDate'. -- There are packages where we have no information (ones that were not -- in the subset of the plan that was executed). -- packagesOutOfDate :: PackageIdSet, -- | Packages that depend on libraries that have changed during the -- build (either build success or failure). -- -- This corresponds to the fact that libraries and dynamic executables -- are invalid once any of the libs they depend on change. -- -- This does include packages that themselves failed (i.e. it is a -- superset of 'packagesInvalidByFailedBuild'). It does not include -- changes in dependencies on executables (i.e. build tools). -- packagesInvalidByChangedLibDeps :: PackageIdSet, -- | Packages that themselves failed during the build (i.e. them -- directly not a dep). -- -- This corresponds to the fact that static executables are invalid -- in unlucky circumstances such as linking failing half way though, -- or data file generation failing. -- -- This is a subset of 'packagesInvalidByChangedLibDeps'. -- packagesInvalidByFailedBuild :: PackageIdSet, -- | A subset of the plan graph, including only dependency-on-library -- edges. That is, dependencies /on/ libraries, not dependencies /of/ -- libraries. This tells us all the libraries that packages link to. -- -- This is here as a convenience, as strictly speaking it's not status -- as it's just a function of the original 'ElaboratedInstallPlan'. -- packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage), -- | As a convenience for 'Set.intersection' with any of the other -- 'PackageIdSet's to select only packages that are part of the -- project locally (i.e. with a local source dir). -- packagesBuildLocal :: PackageIdSet, -- | As a convenience for 'Set.intersection' with any of the other -- 'PackageIdSet's to select only packages that are being built -- in-place within the project (i.e. not destined for the store). -- packagesBuildInplace :: PackageIdSet, -- | As a convenience for 'Set.intersection' or 'Set.difference' with -- any of the other 'PackageIdSet's to select only packages that were -- pre-installed or already in the store prior to the build. -- packagesAlreadyInStore :: PackageIdSet } -- | Work out which packages are out of date or invalid after a build. -- postBuildProjectStatus :: ElaboratedInstallPlan -> PackagesUpToDate -> BuildStatusMap -> BuildOutcomes -> PostBuildProjectStatus postBuildProjectStatus plan previousPackagesUpToDate pkgBuildStatus buildOutcomes = PostBuildProjectStatus { packagesDefinitelyUpToDate, packagesProbablyUpToDate, packagesOutOfDate, packagesInvalidByChangedLibDeps, packagesInvalidByFailedBuild, -- convenience stuff packagesLibDepGraph, packagesBuildLocal, packagesBuildInplace, packagesAlreadyInStore } where packagesDefinitelyUpToDate = packagesUpToDatePreBuild `Set.union` packagesSuccessfulPostBuild packagesProbablyUpToDate = packagesDefinitelyUpToDate `Set.union` (previousPackagesUpToDate' `Set.difference` packagesOutOfDatePreBuild) packagesOutOfDate = packagesOutOfDatePreBuild `Set.difference` packagesSuccessfulPostBuild packagesInvalidByChangedLibDeps = packagesDepOnChangedLib `Set.difference` packagesSuccessfulPostBuild packagesInvalidByFailedBuild = packagesFailurePostBuild -- Note: if any of the intermediate values below turn out to be useful in -- their own right then we can simply promote them to the result record -- The previous set of up-to-date packages will contain bogus package ids -- when the solver plan or config contributing to the hash changes. -- So keep only the ones where the package id (i.e. hash) is the same. previousPackagesUpToDate' = Set.intersection previousPackagesUpToDate (InstallPlan.keysSet plan) packagesUpToDatePreBuild = Set.filter (\ipkgid -> not (lookupBuildStatusRequiresBuild True ipkgid)) -- For packages not in the plan subset we did the dry-run on we don't -- know anything about their status, so not known to be /up to date/. (InstallPlan.keysSet plan) packagesOutOfDatePreBuild = Set.fromList . map installedUnitId $ InstallPlan.reverseDependencyClosure plan [ ipkgid | pkg <- InstallPlan.toList plan , let ipkgid = installedUnitId pkg , lookupBuildStatusRequiresBuild False ipkgid -- For packages not in the plan subset we did the dry-run on we don't -- know anything about their status, so not known to be /out of date/. ] packagesSuccessfulPostBuild = Set.fromList [ ikgid | (ikgid, Right _) <- Map.toList buildOutcomes ] -- direct failures, not failures due to deps packagesFailurePostBuild = Set.fromList [ ikgid | (ikgid, Left failure) <- Map.toList buildOutcomes , case buildFailureReason failure of DependentFailed _ -> False _ -> True ] -- Packages that have a library dependency on a package for which a build -- was attempted packagesDepOnChangedLib = Set.fromList . map Graph.nodeKey $ fromMaybe (error "packagesBuildStatusAfterBuild: broken dep closure") $ Graph.revClosure packagesLibDepGraph ( Map.keys . Map.filter (uncurry buildAttempted) $ Map.intersectionWith (,) pkgBuildStatus buildOutcomes ) -- The plan graph but only counting dependency-on-library edges packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage) packagesLibDepGraph = Graph.fromDistinctList [ Graph.N pkg (installedUnitId pkg) libdeps | pkg <- InstallPlan.toList plan , let libdeps = case pkg of InstallPlan.PreExisting ipkg -> installedDepends ipkg InstallPlan.Configured srcpkg -> elabLibDeps srcpkg InstallPlan.Installed srcpkg -> elabLibDeps srcpkg ] elabLibDeps :: ElaboratedConfiguredPackage -> [UnitId] elabLibDeps = map (newSimpleUnitId . confInstId) . elabLibDependencies -- Was a build was attempted for this package? -- If it doesn't have both a build status and outcome then the answer is no. buildAttempted :: BuildStatus -> BuildOutcome -> Bool -- And not if it didn't need rebuilding in the first place. buildAttempted buildStatus _buildOutcome | not (buildStatusRequiresBuild buildStatus) = False -- And not if it was skipped due to a dep failing first. buildAttempted _ (Left BuildFailure {buildFailureReason}) | DependentFailed _ <- buildFailureReason = False -- Otherwise, succeeded or failed, yes the build was tried. buildAttempted _ (Left BuildFailure {}) = True buildAttempted _ (Right _) = True lookupBuildStatusRequiresBuild :: Bool -> UnitId -> Bool lookupBuildStatusRequiresBuild def ipkgid = case Map.lookup ipkgid pkgBuildStatus of Nothing -> def -- Not in the plan subset we did the dry-run on Just buildStatus -> buildStatusRequiresBuild buildStatus packagesBuildLocal :: Set UnitId packagesBuildLocal = selectPlanPackageIdSet $ \pkg -> case pkg of InstallPlan.PreExisting _ -> False InstallPlan.Installed _ -> False InstallPlan.Configured srcpkg -> elabLocalToProject srcpkg packagesBuildInplace :: Set UnitId packagesBuildInplace = selectPlanPackageIdSet $ \pkg -> case pkg of InstallPlan.PreExisting _ -> False InstallPlan.Installed _ -> False InstallPlan.Configured srcpkg -> elabBuildStyle srcpkg == BuildInplaceOnly packagesAlreadyInStore :: Set UnitId packagesAlreadyInStore = selectPlanPackageIdSet $ \pkg -> case pkg of InstallPlan.PreExisting _ -> True InstallPlan.Installed _ -> True InstallPlan.Configured _ -> False selectPlanPackageIdSet :: (InstallPlan.GenericPlanPackage InstalledPackageInfo ElaboratedConfiguredPackage -> Bool) -> Set UnitId selectPlanPackageIdSet p = Map.keysSet . Map.filter p $ InstallPlan.toMap plan updatePostBuildProjectStatus :: Verbosity -> DistDirLayout -> ElaboratedInstallPlan -> BuildStatusMap -> BuildOutcomes -> IO PostBuildProjectStatus updatePostBuildProjectStatus verbosity distDirLayout elaboratedInstallPlan pkgsBuildStatus buildOutcomes = do -- Read the previous up-to-date set, update it and write it back previousUpToDate <- readPackagesUpToDateCacheFile distDirLayout let currentBuildStatus@PostBuildProjectStatus{..} = postBuildProjectStatus elaboratedInstallPlan previousUpToDate pkgsBuildStatus buildOutcomes let currentUpToDate = packagesProbablyUpToDate writePackagesUpToDateCacheFile distDirLayout currentUpToDate -- Report various possibly interesting things -- We additionally intersect with the packagesBuildInplace so that -- we don't show huge numbers of boring packages from the store. debugNoWrap verbosity $ "packages definitely up to date: " ++ displayPackageIdSet (packagesDefinitelyUpToDate `Set.intersection` packagesBuildInplace) debugNoWrap verbosity $ "packages previously probably up to date: " ++ displayPackageIdSet (previousUpToDate `Set.intersection` packagesBuildInplace) debugNoWrap verbosity $ "packages now probably up to date: " ++ displayPackageIdSet (packagesProbablyUpToDate `Set.intersection` packagesBuildInplace) debugNoWrap verbosity $ "packages newly up to date: " ++ displayPackageIdSet (packagesDefinitelyUpToDate `Set.difference` previousUpToDate `Set.intersection` packagesBuildInplace) debugNoWrap verbosity $ "packages out to date: " ++ displayPackageIdSet (packagesOutOfDate `Set.intersection` packagesBuildInplace) debugNoWrap verbosity $ "packages invalid due to dep change: " ++ displayPackageIdSet packagesInvalidByChangedLibDeps debugNoWrap verbosity $ "packages invalid due to build failure: " ++ displayPackageIdSet packagesInvalidByFailedBuild return currentBuildStatus where displayPackageIdSet = intercalate ", " . map prettyShow . Set.toList -- | Helper for reading the cache file. -- -- This determines the type and format of the binary cache file. -- readPackagesUpToDateCacheFile :: DistDirLayout -> IO PackagesUpToDate readPackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} = handleDoesNotExist Set.empty $ handleDecodeFailure $ withBinaryFile (distProjectCacheFile "up-to-date") ReadMode $ \hnd -> Binary.decodeOrFailIO =<< BS.hGetContents hnd where handleDecodeFailure = fmap (either (const Set.empty) id) -- | Helper for writing the package up-to-date cache file. -- -- This determines the type and format of the binary cache file. -- writePackagesUpToDateCacheFile :: DistDirLayout -> PackagesUpToDate -> IO () writePackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} upToDate = writeFileAtomic (distProjectCacheFile "up-to-date") $ Binary.encode upToDate -- | Prepare a package environment that includes all the library dependencies -- for a plan. -- -- When running cabal new-exec, we want to set things up so that the compiler -- can find all the right packages (and nothing else). This function is -- intended to do that work. It takes a location where it can write files -- temporarily, in case the compiler wants to learn this information via the -- filesystem, and returns any environment variable overrides the compiler -- needs. createPackageEnvironment :: Verbosity -> FilePath -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> PostBuildProjectStatus -> IO [(String, Maybe String)] createPackageEnvironment verbosity path elaboratedPlan elaboratedShared buildStatus | compilerFlavor (pkgConfigCompiler elaboratedShared) == GHC = do envFileM <- writePlanGhcEnvironment path elaboratedPlan elaboratedShared buildStatus case envFileM of Just envFile -> return [("GHC_ENVIRONMENT", Just envFile)] Nothing -> do warn verbosity "the configured version of GHC does not support reading package lists from the environment; commands that need the current project's package database are likely to fail" return [] | otherwise = do warn verbosity "package environment configuration is not supported for the currently configured compiler; commands that need the current project's package database are likely to fail" return [] -- Writing .ghc.environment files -- writePlanGhcEnvironment :: FilePath -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> PostBuildProjectStatus -> IO (Maybe FilePath) writePlanGhcEnvironment path elaboratedInstallPlan ElaboratedSharedConfig { pkgConfigCompiler = compiler, pkgConfigPlatform = platform } postBuildStatus | compilerFlavor compiler == GHC , supportsPkgEnvFiles (getImplInfo compiler) --TODO: check ghcjs compat = fmap Just $ writeGhcEnvironmentFile path platform (compilerVersion compiler) (renderGhcEnvironmentFile path elaboratedInstallPlan postBuildStatus) --TODO: [required eventually] support for writing user-wide package -- environments, e.g. like a global project, but we would not put the -- env file in the home dir, rather it lives under ~/.ghc/ writePlanGhcEnvironment _ _ _ _ = return Nothing renderGhcEnvironmentFile :: FilePath -> ElaboratedInstallPlan -> PostBuildProjectStatus -> [GhcEnvironmentFileEntry] renderGhcEnvironmentFile projectRootDir elaboratedInstallPlan postBuildStatus = headerComment : simpleGhcEnvironmentFile packageDBs unitIds where headerComment = GhcEnvFileComment $ "This is a GHC environment file written by cabal. This means you can\n" ++ "run ghc or ghci and get the environment of the project as a whole.\n" ++ "But you still need to use cabal repl $target to get the environment\n" ++ "of specific components (libs, exes, tests etc) because each one can\n" ++ "have its own source dirs, cpp flags etc.\n\n" unitIds = selectGhcEnvironmentFileLibraries postBuildStatus packageDBs = relativePackageDBPaths projectRootDir $ selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan argsEquivalentOfGhcEnvironmentFile :: Compiler -> DistDirLayout -> ElaboratedInstallPlan -> PostBuildProjectStatus -> [String] argsEquivalentOfGhcEnvironmentFile compiler = case compilerId compiler of CompilerId GHC _ -> argsEquivalentOfGhcEnvironmentFileGhc CompilerId GHCJS _ -> argsEquivalentOfGhcEnvironmentFileGhc CompilerId _ _ -> error "Only GHC and GHCJS are supported" -- TODO remove this when we drop support for non-.ghc.env ghc argsEquivalentOfGhcEnvironmentFileGhc :: DistDirLayout -> ElaboratedInstallPlan -> PostBuildProjectStatus -> [String] argsEquivalentOfGhcEnvironmentFileGhc distDirLayout elaboratedInstallPlan postBuildStatus = clearPackageDbStackFlag ++ packageDbArgsDb packageDBs ++ foldMap packageIdFlag packageIds where projectRootDir = distProjectRootDirectory distDirLayout packageIds = selectGhcEnvironmentFileLibraries postBuildStatus packageDBs = relativePackageDBPaths projectRootDir $ selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan -- TODO use proper flags? but packageDbArgsDb is private clearPackageDbStackFlag = ["-clear-package-db", "-global-package-db"] packageIdFlag uid = ["-package-id", prettyShow uid] -- We're producing an environment for users to use in ghci, so of course -- that means libraries only (can't put exes into the ghc package env!). -- The library environment should be /consistent/ with the environment -- that each of the packages in the project use (ie same lib versions). -- So that means all the normal library dependencies of all the things -- in the project (including deps of exes that are local to the project). -- We do not however want to include the dependencies of Setup.hs scripts, -- since these are generally uninteresting but also they need not in -- general be consistent with the library versions that packages local to -- the project use (recall that Setup.hs script's deps can be picked -- independently of other packages in the project). -- -- So, our strategy is as follows: -- -- produce a dependency graph of all the packages in the install plan, -- but only consider normal library deps as edges in the graph. Thus we -- exclude the dependencies on Setup.hs scripts (in the case of -- per-component granularity) or of Setup.hs scripts (in the case of -- per-package granularity). Then take a dependency closure, using as -- roots all the packages/components local to the project. This will -- exclude Setup scripts and their dependencies. -- -- Note: this algorithm will have to be adapted if/when the install plan -- is extended to cover multiple compilers at once, and may also have to -- change if we start to treat unshared deps of test suites in a similar -- way to how we treat Setup.hs script deps (ie being able to pick them -- independently). -- -- Since we had to use all the local packages, including exes, (as roots -- to find the libs) then those exes still end up in our list so we have -- to filter them out at the end. -- selectGhcEnvironmentFileLibraries :: PostBuildProjectStatus -> [UnitId] selectGhcEnvironmentFileLibraries PostBuildProjectStatus{..} = case Graph.closure packagesLibDepGraph (Set.toList packagesBuildLocal) of Nothing -> error "renderGhcEnvironmentFile: broken dep closure" Just nodes -> [ pkgid | Graph.N pkg pkgid _ <- nodes , hasUpToDateLib pkg ] where hasUpToDateLib planpkg = case planpkg of -- A pre-existing global lib InstallPlan.PreExisting _ -> True -- A package in the store. Check it's a lib. InstallPlan.Installed pkg -> elabRequiresRegistration pkg -- A package we were installing this time, either destined for the store -- or just locally. Check it's a lib and that it is probably up to date. InstallPlan.Configured pkg -> elabRequiresRegistration pkg && installedUnitId pkg `Set.member` packagesProbablyUpToDate selectGhcEnvironmentFilePackageDbs :: ElaboratedInstallPlan -> PackageDBStack selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan = -- If we have any inplace packages then their package db stack is the -- one we should use since it'll include the store + the local db but -- it's certainly possible to have no local inplace packages -- e.g. just "extra" packages coming from the store. case (inplacePackages, sourcePackages) of ([], pkgs) -> checkSamePackageDBs pkgs (pkgs, _) -> checkSamePackageDBs pkgs where checkSamePackageDBs :: [ElaboratedConfiguredPackage] -> PackageDBStack checkSamePackageDBs pkgs = case ordNub (map elabBuildPackageDBStack pkgs) of [packageDbs] -> packageDbs [] -> [] _ -> error $ "renderGhcEnvironmentFile: packages with " ++ "different package db stacks" -- This should not happen at the moment but will happen as soon -- as we support projects where we build packages with different -- compilers, at which point we have to consider how to adapt -- this feature, e.g. write out multiple env files, one for each -- compiler / project profile. inplacePackages :: [ElaboratedConfiguredPackage] inplacePackages = [ srcpkg | srcpkg <- sourcePackages , elabBuildStyle srcpkg == BuildInplaceOnly ] sourcePackages :: [ElaboratedConfiguredPackage] sourcePackages = [ srcpkg | pkg <- InstallPlan.toList elaboratedInstallPlan , srcpkg <- maybeToList $ case pkg of InstallPlan.Configured srcpkg -> Just srcpkg InstallPlan.Installed srcpkg -> Just srcpkg InstallPlan.PreExisting _ -> Nothing ] relativePackageDBPaths :: FilePath -> PackageDBStack -> PackageDBStack relativePackageDBPaths relroot = map (relativePackageDBPath relroot) relativePackageDBPath :: FilePath -> PackageDB -> PackageDB relativePackageDBPath relroot pkgdb = case pkgdb of GlobalPackageDB -> GlobalPackageDB UserPackageDB -> UserPackageDB SpecificPackageDB path -> SpecificPackageDB relpath where relpath = makeRelative relroot path cabal-install-3.8.1.0/src/Distribution/Client/ProjectPlanning.hs0000644000000000000000000054751607346545000022717 0ustar0000000000000000{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE NoMonoLocalBinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveFunctor #-} -- | Planning how to build everything in a project. -- module Distribution.Client.ProjectPlanning ( -- * elaborated install plan types ElaboratedInstallPlan, ElaboratedConfiguredPackage(..), ElaboratedPlanPackage, ElaboratedSharedConfig(..), ElaboratedReadyPackage, BuildStyle(..), CabalFileText, -- * Producing the elaborated install plan rebuildProjectConfig, rebuildInstallPlan, -- * Build targets availableTargets, AvailableTarget(..), AvailableTargetStatus(..), TargetRequested(..), ComponentTarget(..), SubComponentTarget(..), showComponentTarget, nubComponentTargets, -- * Selecting a plan subset pruneInstallPlanToTargets, TargetAction(..), pruneInstallPlanToDependencies, CannotPruneDependencies(..), -- * Utils required for building pkgHasEphemeralBuildTargets, elabBuildTargetWholeComponents, configureCompiler, -- * Setup.hs CLI flags for building setupHsScriptOptions, setupHsConfigureFlags, setupHsConfigureArgs, setupHsBuildFlags, setupHsBuildArgs, setupHsReplFlags, setupHsReplArgs, setupHsTestFlags, setupHsTestArgs, setupHsBenchFlags, setupHsBenchArgs, setupHsCopyFlags, setupHsRegisterFlags, setupHsHaddockFlags, setupHsHaddockArgs, packageHashInputs, -- * Path construction binDirectoryFor, binDirectories, storePackageInstallDirs, storePackageInstallDirs' ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.HashValue import Distribution.Client.HttpUtils import Distribution.Client.ProjectPlanning.Types as Ty import Distribution.Client.PackageHash import Distribution.Client.RebuildMonad import Distribution.Client.Store import Distribution.Client.ProjectConfig import Distribution.Client.ProjectConfig.Legacy import Distribution.Client.ProjectPlanOutput import Distribution.Client.Types import qualified Distribution.Client.InstallPlan as InstallPlan import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.Dependency import Distribution.Client.Dependency.Types import qualified Distribution.Client.IndexUtils as IndexUtils import Distribution.Client.Utils (incVersion) import Distribution.Client.Targets (userToPackageConstraint) import Distribution.Client.DistDirLayout import Distribution.Client.SetupWrapper import Distribution.Client.JobControl import Distribution.Client.FetchUtils import Distribution.Client.Config import qualified Hackage.Security.Client as Sec import Distribution.Client.Setup hiding (packageName, cabalVersion) import Distribution.Utils.NubList import Distribution.Utils.LogProgress import Distribution.Utils.MapAccum import qualified Distribution.Client.BuildReports.Storage as BuildReports ( storeLocal, fromPlanningFailure ) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ComponentDeps (ComponentDeps) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PkgConfigDb import Distribution.Solver.Types.ResolverPackage import Distribution.Solver.Types.SolverId import Distribution.Solver.Types.SolverPackage import Distribution.Solver.Types.InstSolverPackage import Distribution.Solver.Types.SourcePackage import Distribution.Solver.Types.Settings import Distribution.CabalSpecVersion import Distribution.ModuleName import Distribution.Package import Distribution.Types.AnnotatedId import Distribution.Types.ComponentName import Distribution.Types.DumpBuildInfo ( DumpBuildInfo (..) ) import Distribution.Types.LibraryName import Distribution.Types.GivenComponent (GivenComponent(..)) import Distribution.Types.PackageVersionConstraint import Distribution.Types.PkgconfigDependency import Distribution.Types.UnqualComponentName import Distribution.System import qualified Distribution.PackageDescription as Cabal import qualified Distribution.PackageDescription as PD import qualified Distribution.PackageDescription.Configuration as PD import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Compiler import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate import Distribution.Simple.Program import Distribution.Simple.Program.Db import Distribution.Simple.Program.Find import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Setup (Flag(..), toFlag, flagToMaybe, flagToList, fromFlagOrDefault) import qualified Distribution.Simple.Configure as Cabal import qualified Distribution.Simple.LocalBuildInfo as Cabal import Distribution.Simple.LocalBuildInfo ( Component(..), pkgComponents, componentBuildInfo , componentName ) import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.InstalledPackageInfo as IPI import Distribution.Backpack.ConfiguredComponent import Distribution.Backpack.LinkedComponent import Distribution.Backpack.ComponentsGraph import Distribution.Backpack.ModuleShape import Distribution.Backpack.FullUnitId import Distribution.Backpack import Distribution.Types.ComponentInclude import Distribution.Simple.Utils import Distribution.Version import qualified Distribution.Compat.Graph as Graph import Distribution.Compat.Graph(IsNode(..)) import Data.Foldable (fold) import Text.PrettyPrint (text, hang, quotes, colon, vcat, ($$), fsep, punctuate, comma) import qualified Text.PrettyPrint as Disp import qualified Data.Map as Map import qualified Data.Set as Set import Control.Monad.State as State import Control.Exception (assert) import Data.List (groupBy, deleteBy) import qualified Data.List.NonEmpty as NE import System.FilePath ------------------------------------------------------------------------------ -- * Elaborated install plan ------------------------------------------------------------------------------ -- "Elaborated" -- worked out with great care and nicety of detail; -- executed with great minuteness: elaborate preparations; -- elaborate care. -- -- So here's the idea: -- -- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc -- all passed in as separate args and which are then further selected, -- transformed etc during the execution of the build. Instead we construct -- an elaborated install plan that includes everything we will need, and then -- during the execution of the plan we do as little transformation of this -- info as possible. -- -- So we're trying to split the work into two phases: construction of the -- elaborated install plan (which as far as possible should be pure) and -- then simple execution of that plan without any smarts, just doing what the -- plan says to do. -- -- So that means we need a representation of this fully elaborated install -- plan. The representation consists of two parts: -- -- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a -- representation of source packages that includes a lot more detail about -- that package's individual configuration -- -- * A 'ElaboratedSharedConfig'. Some package configuration is the same for -- every package in a plan. Rather than duplicate that info every entry in -- the 'GenericInstallPlan' we keep that separately. -- -- The division between the shared and per-package config is /not set in stone -- for all time/. For example if we wanted to generalise the install plan to -- describe a situation where we want to build some packages with GHC and some -- with GHCJS then the platform and compiler would no longer be shared between -- all packages but would have to be per-package (probably with some sanity -- condition on the graph structure). -- -- Refer to ProjectPlanning.Types for details of these important types: -- type ElaboratedInstallPlan = ... -- type ElaboratedPlanPackage = ... -- data ElaboratedSharedConfig = ... -- data ElaboratedConfiguredPackage = ... -- data BuildStyle = -- | Check that an 'ElaboratedConfiguredPackage' actually makes -- sense under some 'ElaboratedSharedConfig'. sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> a -> a sanityCheckElaboratedConfiguredPackage sharedConfig elab@ElaboratedConfiguredPackage{..} = (case elabPkgOrComp of ElabPackage pkg -> sanityCheckElaboratedPackage elab pkg ElabComponent comp -> sanityCheckElaboratedComponent elab comp) -- either a package is being built inplace, or the -- 'installedPackageId' we assigned is consistent with -- the 'hashedInstalledPackageId' we would compute from -- the elaborated configured package . assert (elabBuildStyle == BuildInplaceOnly || elabComponentId == hashedInstalledPackageId (packageHashInputs sharedConfig elab)) -- the stanzas explicitly disabled should not be available . assert (optStanzaSetNull $ optStanzaKeysFilteredByValue (maybe False not) elabStanzasRequested `optStanzaSetIntersection` elabStanzasAvailable) -- either a package is built inplace, or we are not attempting to -- build any test suites or benchmarks (we never build these -- for remote packages!) . assert (elabBuildStyle == BuildInplaceOnly || optStanzaSetNull elabStanzasAvailable) sanityCheckElaboratedComponent :: ElaboratedConfiguredPackage -> ElaboratedComponent -> a -> a sanityCheckElaboratedComponent ElaboratedConfiguredPackage{..} ElaboratedComponent{..} = -- Should not be building bench or test if not inplace. assert (elabBuildStyle == BuildInplaceOnly || case compComponentName of Nothing -> True Just (CLibName _) -> True Just (CExeName _) -> True -- This is interesting: there's no way to declare a dependency -- on a foreign library at the moment, but you may still want -- to install these to the store Just (CFLibName _) -> True Just (CBenchName _) -> False Just (CTestName _) -> False) sanityCheckElaboratedPackage :: ElaboratedConfiguredPackage -> ElaboratedPackage -> a -> a sanityCheckElaboratedPackage ElaboratedConfiguredPackage{..} ElaboratedPackage{..} = -- we should only have enabled stanzas that actually can be built -- (according to the solver) assert (pkgStanzasEnabled `optStanzaSetIsSubset` elabStanzasAvailable) -- the stanzas that the user explicitly requested should be -- enabled (by the previous test, they are also available) . assert (optStanzaKeysFilteredByValue (fromMaybe False) elabStanzasRequested `optStanzaSetIsSubset` pkgStanzasEnabled) ------------------------------------------------------------------------------ -- * Deciding what to do: making an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------ -- | Return the up-to-date project config and information about the local -- packages within the project. -- rebuildProjectConfig :: Verbosity -> HttpTransport -> DistDirLayout -> ProjectConfig -> IO ( ProjectConfig , [PackageSpecifier UnresolvedSourcePackage] ) rebuildProjectConfig verbosity httpTransport distDirLayout@DistDirLayout { distProjectRootDirectory, distDirectory, distProjectCacheFile, distProjectCacheDirectory, distProjectFile } cliConfig = do fileMonitorProjectConfigKey <- do configPath <- getConfigFilePath projectConfigConfigFile return (configPath, distProjectFile "") (projectConfig, localPackages) <- runRebuild distProjectRootDirectory $ rerunIfChanged verbosity fileMonitorProjectConfig fileMonitorProjectConfigKey -- todo check deps too? $ do liftIO $ info verbosity "Project settings changed, reconfiguring..." liftIO $ createDirectoryIfMissingVerbose verbosity True distProjectCacheDirectory projectConfigSkeleton <- phaseReadProjectConfig -- have to create the cache directory before configuring the compiler (compiler, Platform arch os, _) <- configureCompiler verbosity distDirLayout ((fst $ PD.ignoreConditions projectConfigSkeleton) <> cliConfig) let projectConfig = instantiateProjectConfigSkeleton os arch (compilerInfo compiler) mempty projectConfigSkeleton localPackages <- phaseReadLocalPackages (projectConfig <> cliConfig) return (projectConfig, localPackages) info verbosity $ unlines $ ("this build was affected by the following (project) config files:" :) $ [ "- " ++ path | Explicit path <- Set.toList $ projectConfigProvenance projectConfig ] return (projectConfig <> cliConfig, localPackages) where ProjectConfigShared { projectConfigConfigFile } = projectConfigShared cliConfig ProjectConfigShared { projectConfigIgnoreProject } = projectConfigShared cliConfig fileMonitorProjectConfig :: FileMonitor (FilePath, FilePath) (ProjectConfig, [PackageSpecifier UnresolvedSourcePackage]) fileMonitorProjectConfig = newFileMonitor (distProjectCacheFile "config") -- Read the cabal.project (or implicit config) and combine it with -- arguments from the command line -- phaseReadProjectConfig :: Rebuild ProjectConfigSkeleton phaseReadProjectConfig = do readProjectConfig verbosity httpTransport projectConfigIgnoreProject projectConfigConfigFile distDirLayout -- Look for all the cabal packages in the project -- some of which may be local src dirs, tarballs etc -- phaseReadLocalPackages :: ProjectConfig -> Rebuild [PackageSpecifier UnresolvedSourcePackage] phaseReadLocalPackages projectConfig@ProjectConfig { projectConfigShared, projectConfigBuildOnly } = do pkgLocations <- findProjectPackages distDirLayout projectConfig -- Create folder only if findProjectPackages did not throw a -- BadPackageLocations exception. liftIO $ do createDirectoryIfMissingVerbose verbosity True distDirectory createDirectoryIfMissingVerbose verbosity True distProjectCacheDirectory fetchAndReadSourcePackages verbosity distDirLayout projectConfigShared projectConfigBuildOnly pkgLocations configureCompiler :: Verbosity -> DistDirLayout -> ProjectConfig -> Rebuild (Compiler, Platform, ProgramDb) configureCompiler verbosity DistDirLayout { distProjectCacheFile } ProjectConfig { projectConfigShared = ProjectConfigShared { projectConfigHcFlavor, projectConfigHcPath, projectConfigHcPkg }, projectConfigLocalPackages = PackageConfig { packageConfigProgramPaths, packageConfigProgramPathExtra } } = do let fileMonitorCompiler = newFileMonitor . distProjectCacheFile $ "compiler" progsearchpath <- liftIO $ getSystemSearchPath rerunIfChanged verbosity fileMonitorCompiler (hcFlavor, hcPath, hcPkg, progsearchpath, packageConfigProgramPaths, packageConfigProgramPathExtra) $ do liftIO $ info verbosity "Compiler settings changed, reconfiguring..." result@(_, _, progdb') <- liftIO $ Cabal.configCompilerEx hcFlavor hcPath hcPkg progdb verbosity -- Note that we added the user-supplied program locations and args -- for /all/ programs, not just those for the compiler prog and -- compiler-related utils. In principle we don't know which programs -- the compiler will configure (and it does vary between compilers). -- We do know however that the compiler will only configure the -- programs it cares about, and those are the ones we monitor here. monitorFiles (programsMonitorFiles progdb') return result where hcFlavor = flagToMaybe projectConfigHcFlavor hcPath = flagToMaybe projectConfigHcPath hcPkg = flagToMaybe projectConfigHcPkg progdb = userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) . modifyProgramSearchPath (++ [ ProgramSearchPathDir dir | dir <- fromNubList packageConfigProgramPathExtra ]) $ defaultProgramDb -- | Return an up-to-date elaborated install plan. -- -- Two variants of the install plan are returned: with and without packages -- from the store. That is, the \"improved\" plan where source packages are -- replaced by pre-existing installed packages from the store (when their ids -- match), and also the original elaborated plan which uses primarily source -- packages. -- The improved plan is what we use for building, but the original elaborated -- plan is useful for reporting and configuration. For example the @freeze@ -- command needs the source package info to know about flag choices and -- dependencies of executables and setup scripts. -- rebuildInstallPlan :: Verbosity -> DistDirLayout -> CabalDirLayout -> ProjectConfig -> [PackageSpecifier UnresolvedSourcePackage] -> IO ( ElaboratedInstallPlan -- with store packages , ElaboratedInstallPlan -- with source packages , ElaboratedSharedConfig , IndexUtils.TotalIndexState , IndexUtils.ActiveRepos ) -- ^ @(improvedPlan, elaboratedPlan, _, _, _)@ rebuildInstallPlan verbosity distDirLayout@DistDirLayout { distProjectRootDirectory, distProjectCacheFile } CabalDirLayout { cabalStoreDirLayout } = \projectConfig localPackages -> runRebuild distProjectRootDirectory $ do progsearchpath <- liftIO $ getSystemSearchPath let projectConfigMonitored = projectConfig { projectConfigBuildOnly = mempty } -- The overall improved plan is cached rerunIfChanged verbosity fileMonitorImprovedPlan -- react to changes in the project config, -- the package .cabal files and the path (projectConfigMonitored, localPackages, progsearchpath) $ do -- And so is the elaborated plan that the improved plan based on (elaboratedPlan, elaboratedShared, totalIndexState, activeRepos) <- rerunIfChanged verbosity fileMonitorElaboratedPlan (projectConfigMonitored, localPackages, progsearchpath) $ do compilerEtc <- phaseConfigureCompiler projectConfig _ <- phaseConfigurePrograms projectConfig compilerEtc (solverPlan, pkgConfigDB, totalIndexState, activeRepos) <- phaseRunSolver projectConfig compilerEtc localPackages (elaboratedPlan, elaboratedShared) <- phaseElaboratePlan projectConfig compilerEtc pkgConfigDB solverPlan localPackages phaseMaintainPlanOutputs elaboratedPlan elaboratedShared return (elaboratedPlan, elaboratedShared, totalIndexState, activeRepos) -- The improved plan changes each time we install something, whereas -- the underlying elaborated plan only changes when input config -- changes, so it's worth caching them separately. improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared return (improvedPlan, elaboratedPlan, elaboratedShared, totalIndexState, activeRepos) where fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile -- Configure the compiler we're using. -- -- This is moderately expensive and doesn't change that often so we cache -- it independently. -- phaseConfigureCompiler :: ProjectConfig -> Rebuild (Compiler, Platform, ProgramDb) phaseConfigureCompiler = configureCompiler verbosity distDirLayout -- Configuring other programs. -- -- Having configred the compiler, now we configure all the remaining -- programs. This is to check we can find them, and to monitor them for -- changes. -- -- TODO: [required eventually] we don't actually do this yet. -- -- We rely on the fact that the previous phase added the program config for -- all local packages, but that all the programs configured so far are the -- compiler program or related util programs. -- phaseConfigurePrograms :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> Rebuild () phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do -- Users are allowed to specify program locations independently for -- each package (e.g. to use a particular version of a pre-processor -- for some packages). However they cannot do this for the compiler -- itself as that's just not going to work. So we check for this. liftIO $ checkBadPerPackageCompilerPaths (configuredPrograms compilerprogdb) (getMapMappend (projectConfigSpecificPackage projectConfig)) --TODO: [required eventually] find/configure other programs that the -- user specifies. --TODO: [required eventually] find/configure all build-tools -- but note that some of them may be built as part of the plan. -- Run the solver to get the initial install plan. -- This is expensive so we cache it independently. -- phaseRunSolver :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> [PackageSpecifier UnresolvedSourcePackage] -> Rebuild (SolverInstallPlan, PkgConfigDb, IndexUtils.TotalIndexState, IndexUtils.ActiveRepos) phaseRunSolver projectConfig@ProjectConfig { projectConfigShared, projectConfigBuildOnly } (compiler, platform, progdb) localPackages = rerunIfChanged verbosity fileMonitorSolverPlan (solverSettings, localPackages, localPackagesEnabledStanzas, compiler, platform, programDbSignature progdb) $ do installedPkgIndex <- getInstalledPackages verbosity compiler progdb platform corePackageDbs (sourcePkgDb, tis, ar) <- getSourcePackages verbosity withRepoCtx (solverSettingIndexState solverSettings) (solverSettingActiveRepos solverSettings) pkgConfigDB <- getPkgConfigDb verbosity progdb --TODO: [code cleanup] it'd be better if the Compiler contained the -- ConfiguredPrograms that it needs, rather than relying on the progdb -- since we don't need to depend on all the programs here, just the -- ones relevant for the compiler. liftIO $ do solver <- chooseSolver verbosity (solverSettingSolver solverSettings) (compilerInfo compiler) notice verbosity "Resolving dependencies..." planOrError <- foldProgress logMsg (pure . Left) (pure . Right) $ planPackages verbosity compiler platform solver solverSettings installedPkgIndex sourcePkgDb pkgConfigDB localPackages localPackagesEnabledStanzas case planOrError of Left msg -> do reportPlanningFailure projectConfig compiler platform localPackages die' verbosity msg Right plan -> return (plan, pkgConfigDB, tis, ar) where corePackageDbs :: [PackageDB] corePackageDbs = applyPackageDbFlags [GlobalPackageDB] (projectConfigPackageDBs projectConfigShared) withRepoCtx = projectConfigWithSolverRepoContext verbosity projectConfigShared projectConfigBuildOnly solverSettings = resolveSolverSettings projectConfig logMsg message rest = debugNoWrap verbosity message >> rest localPackagesEnabledStanzas = Map.fromList [ (pkgname, stanzas) | pkg <- localPackages -- TODO: misnomer: we should separate -- builtin/global/inplace/local packages -- and packages explicitly mentioned in the project -- , let pkgname = pkgSpecifierTarget pkg testsEnabled = lookupLocalPackageConfig packageConfigTests projectConfig pkgname benchmarksEnabled = lookupLocalPackageConfig packageConfigBenchmarks projectConfig pkgname isLocal = isJust (shouldBeLocal pkg) stanzas | isLocal = Map.fromList $ [ (TestStanzas, enabled) | enabled <- flagToList testsEnabled ] ++ [ (BenchStanzas , enabled) | enabled <- flagToList benchmarksEnabled ] | otherwise = Map.fromList [(TestStanzas, False), (BenchStanzas, False) ] ] -- Elaborate the solver's install plan to get a fully detailed plan. This -- version of the plan has the final nix-style hashed ids. -- phaseElaboratePlan :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> PkgConfigDb -> SolverInstallPlan -> [PackageSpecifier (SourcePackage (PackageLocation loc))] -> Rebuild ( ElaboratedInstallPlan , ElaboratedSharedConfig ) phaseElaboratePlan ProjectConfig { projectConfigShared, projectConfigAllPackages, projectConfigLocalPackages, projectConfigSpecificPackage, projectConfigBuildOnly } (compiler, platform, progdb) pkgConfigDB solverPlan localPackages = do liftIO $ debug verbosity "Elaborating the install plan..." sourcePackageHashes <- rerunIfChanged verbosity fileMonitorSourceHashes (packageLocationsSignature solverPlan) $ getPackageSourceHashes verbosity withRepoCtx solverPlan defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler (elaboratedPlan, elaboratedShared) <- liftIO . runLogProgress verbosity $ elaborateInstallPlan verbosity platform compiler progdb pkgConfigDB distDirLayout cabalStoreDirLayout solverPlan localPackages sourcePackageHashes defaultInstallDirs projectConfigShared projectConfigAllPackages projectConfigLocalPackages (getMapMappend projectConfigSpecificPackage) let instantiatedPlan = instantiateInstallPlan cabalStoreDirLayout defaultInstallDirs elaboratedShared elaboratedPlan liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan instantiatedPlan) return (instantiatedPlan, elaboratedShared) where withRepoCtx = projectConfigWithSolverRepoContext verbosity projectConfigShared projectConfigBuildOnly -- Update the files we maintain that reflect our current build environment. -- In particular we maintain a JSON representation of the elaborated -- install plan (but not the improved plan since that reflects the state -- of the build rather than just the input environment). -- phaseMaintainPlanOutputs :: ElaboratedInstallPlan -> ElaboratedSharedConfig -> Rebuild () phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = liftIO $ do debug verbosity "Updating plan.json" writePlanExternalRepresentation distDirLayout elaboratedPlan elaboratedShared -- Improve the elaborated install plan. The elaborated plan consists -- mostly of source packages (with full nix-style hashed ids). Where -- corresponding installed packages already exist in the store, replace -- them in the plan. -- -- Note that we do monitor the store's package db here, so we will redo -- this improvement phase when the db changes -- including as a result of -- executing a plan and installing things. -- phaseImprovePlan :: ElaboratedInstallPlan -> ElaboratedSharedConfig -> Rebuild ElaboratedInstallPlan phaseImprovePlan elaboratedPlan elaboratedShared = do liftIO $ debug verbosity "Improving the install plan..." storePkgIdSet <- getStoreEntries cabalStoreDirLayout compid let improvedPlan = improveInstallPlanWithInstalledPackages storePkgIdSet elaboratedPlan liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan improvedPlan) -- TODO: [nice to have] having checked which packages from the store -- we're using, it may be sensible to sanity check those packages -- by loading up the compiler package db and checking everything -- matches up as expected, e.g. no dangling deps, files deleted. return improvedPlan where compid = compilerId (pkgConfigCompiler elaboratedShared) -- | If a 'PackageSpecifier' refers to a single package, return Just that -- package. reportPlanningFailure :: ProjectConfig -> Compiler -> Platform -> [PackageSpecifier UnresolvedSourcePackage] -> IO () reportPlanningFailure projectConfig comp platform pkgSpecifiers = when reportFailure $ BuildReports.storeLocal (compilerInfo comp) (fromNubList $ projectConfigSummaryFile . projectConfigBuildOnly $ projectConfig) buildReports platform -- TODO may want to handle the projectConfigLogFile paramenter here, or just remove it entirely? where reportFailure = Cabal.fromFlag . projectConfigReportPlanningFailure . projectConfigBuildOnly $ projectConfig pkgids = mapMaybe theSpecifiedPackage pkgSpecifiers buildReports = BuildReports.fromPlanningFailure platform (compilerId comp) pkgids -- TODO we may want to get more flag assignments and merge them here? (packageConfigFlagAssignment . projectConfigAllPackages $ projectConfig) theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId theSpecifiedPackage pkgSpec = case pkgSpec of NamedPackage name [PackagePropertyVersion version] -> PackageIdentifier name <$> trivialRange version NamedPackage _ _ -> Nothing SpecificSourcePackage pkg -> Just $ packageId pkg -- | If a range includes only a single version, return Just that version. trivialRange :: VersionRange -> Maybe Version trivialRange = foldVersionRange Nothing Just -- "== v" (\_ -> Nothing) (\_ -> Nothing) (\_ _ -> Nothing) (\_ _ -> Nothing) programsMonitorFiles :: ProgramDb -> [MonitorFilePath] programsMonitorFiles progdb = [ monitor | prog <- configuredPrograms progdb , monitor <- monitorFileSearchPath (programMonitorFiles prog) (programPath prog) ] -- | Select the bits of a 'ProgramDb' to monitor for value changes. -- Use 'programsMonitorFiles' for the files to monitor. -- programDbSignature :: ProgramDb -> [ConfiguredProgram] programDbSignature progdb = [ prog { programMonitorFiles = [] , programOverrideEnv = filter ((/="PATH") . fst) (programOverrideEnv prog) } | prog <- configuredPrograms progdb ] getInstalledPackages :: Verbosity -> Compiler -> ProgramDb -> Platform -> PackageDBStack -> Rebuild InstalledPackageIndex getInstalledPackages verbosity compiler progdb platform packagedbs = do monitorFiles . map monitorFileOrDirectory =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles verbosity compiler packagedbs progdb platform) liftIO $ IndexUtils.getInstalledPackages verbosity compiler packagedbs progdb {- --TODO: [nice to have] use this but for sanity / consistency checking getPackageDBContents :: Verbosity -> Compiler -> ProgramDb -> Platform -> PackageDB -> Rebuild InstalledPackageIndex getPackageDBContents verbosity compiler progdb platform packagedb = do monitorFiles . map monitorFileOrDirectory =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles verbosity compiler [packagedb] progdb platform) liftIO $ do createPackageDBIfMissing verbosity compiler progdb packagedb Cabal.getPackageDBContents verbosity compiler packagedb progdb -} getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) -> Maybe IndexUtils.TotalIndexState -> Maybe IndexUtils.ActiveRepos -> Rebuild (SourcePackageDb, IndexUtils.TotalIndexState, IndexUtils.ActiveRepos) getSourcePackages verbosity withRepoCtx idxState activeRepos = do (sourcePkgDbWithTIS, repos) <- liftIO $ withRepoCtx $ \repoctx -> do sourcePkgDbWithTIS <- IndexUtils.getSourcePackagesAtIndexState verbosity repoctx idxState activeRepos return (sourcePkgDbWithTIS, repoContextRepos repoctx) traverse_ needIfExists . IndexUtils.getSourcePackagesMonitorFiles $ repos return sourcePkgDbWithTIS getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb getPkgConfigDb verbosity progdb = do dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb -- Just monitor the dirs so we'll notice new .pc files. -- Alternatively we could monitor all the .pc files too. traverse_ monitorDirectoryStatus dirs liftIO $ readPkgConfigDb verbosity progdb -- | Select the config values to monitor for changes package source hashes. packageLocationsSignature :: SolverInstallPlan -> [(PackageId, PackageLocation (Maybe FilePath))] packageLocationsSignature solverPlan = [ (packageId pkg, srcpkgSource pkg) | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) <- SolverInstallPlan.toList solverPlan ] -- | Get the 'HashValue' for all the source packages where we use hashes, -- and download any packages required to do so. -- -- Note that we don't get hashes for local unpacked packages. -- getPackageSourceHashes :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) -> SolverInstallPlan -> Rebuild (Map PackageId PackageSourceHash) getPackageSourceHashes verbosity withRepoCtx solverPlan = do -- Determine if and where to get the package's source hash from. -- let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] allPkgLocations = [ (packageId pkg, srcpkgSource pkg) | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) <- SolverInstallPlan.toList solverPlan ] -- Tarballs that were local in the first place. -- We'll hash these tarball files directly. localTarballPkgs :: [(PackageId, FilePath)] localTarballPkgs = [ (pkgid, tarball) | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] -- Tarballs from remote URLs. We must have downloaded these already -- (since we extracted the .cabal file earlier) remoteTarballPkgs = [ (pkgid, tarball) | (pkgid, RemoteTarballPackage _ (Just tarball)) <- allPkgLocations ] -- tarballs from source-repository-package stanzas sourceRepoTarballPkgs = [ (pkgid, tarball) | (pkgid, RemoteSourceRepoPackage _ (Just tarball)) <- allPkgLocations ] -- Tarballs from repositories, either where the repository provides -- hashes as part of the repo metadata, or where we will have to -- download and hash the tarball. repoTarballPkgsWithMetadata :: [(PackageId, Repo)] repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] (repoTarballPkgsWithMetadata, repoTarballPkgsWithoutMetadata) = partitionEithers [ case repo of RepoSecure{} -> Left (pkgid, repo) _ -> Right (pkgid, repo) | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] -- For tarballs from repos that do not have hashes available we now have -- to check if the packages were downloaded already. -- (repoTarballPkgsToDownload, repoTarballPkgsDownloaded) <- fmap partitionEithers $ liftIO $ sequence [ do mtarball <- checkRepoTarballFetched repo pkgid case mtarball of Nothing -> return (Left (pkgid, repo)) Just tarball -> return (Right (pkgid, tarball)) | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] (hashesFromRepoMetadata, repoTarballPkgsNewlyDownloaded) <- -- Avoid having to initialise the repository (ie 'withRepoCtx') if we -- don't have to. (The main cost is configuring the http client.) if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata then return (Map.empty, []) else liftIO $ withRepoCtx $ \repoctx -> do -- For tarballs from repos that do have hashes available as part of the -- repo metadata we now load up the index for each repo and retrieve -- the hashes for the packages -- hashesFromRepoMetadata <- Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions fmap (Map.fromList . concat) $ sequence -- Reading the repo index is expensive so we group the packages by repo [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> Sec.withIndex secureRepo $ \repoIndex -> sequence [ do hash <- Sec.trusted <$> -- strip off Trusted tag Sec.indexLookupHash repoIndex pkgid -- Note that hackage-security currently uses SHA256 -- but this API could in principle give us some other -- choice in future. return (pkgid, hashFromTUF hash) | pkgid <- pkgids ] | (repo, pkgids) <- map (\grp@((_,repo):|_) -> (repo, map fst (NE.toList grp))) . NE.groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) $ repoTarballPkgsWithMetadata ] -- For tarballs from repos that do not have hashes available, download -- the ones we previously determined we need. -- repoTarballPkgsNewlyDownloaded <- sequence [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid return (pkgid, tarball) | (pkgid, repo) <- repoTarballPkgsToDownload ] return (hashesFromRepoMetadata, repoTarballPkgsNewlyDownloaded) -- Hash tarball files for packages where we have to do that. This includes -- tarballs that were local in the first place, plus tarballs from repos, -- either previously cached or freshly downloaded. -- let allTarballFilePkgs :: [(PackageId, FilePath)] allTarballFilePkgs = localTarballPkgs ++ remoteTarballPkgs ++ sourceRepoTarballPkgs ++ repoTarballPkgsDownloaded ++ repoTarballPkgsNewlyDownloaded hashesFromTarballFiles <- liftIO $ fmap Map.fromList $ sequence [ do srchash <- readFileHashValue tarball return (pkgid, srchash) | (pkgid, tarball) <- allTarballFilePkgs ] monitorFiles [ monitorFile tarball | (_pkgid, tarball) <- allTarballFilePkgs ] -- Return the combination return $! hashesFromRepoMetadata <> hashesFromTarballFiles -- | Append the given package databases to an existing PackageDBStack. -- A @Nothing@ entry will clear everything before it. applyPackageDbFlags :: PackageDBStack -> [Maybe PackageDB] -> PackageDBStack applyPackageDbFlags dbs' [] = dbs' applyPackageDbFlags _ (Nothing:dbs) = applyPackageDbFlags [] dbs applyPackageDbFlags dbs' (Just db:dbs) = applyPackageDbFlags (dbs' ++ [db]) dbs -- ------------------------------------------------------------ -- * Installation planning -- ------------------------------------------------------------ planPackages :: Verbosity -> Compiler -> Platform -> Solver -> SolverSettings -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [PackageSpecifier UnresolvedSourcePackage] -> Map PackageName (Map OptionalStanza Bool) -> Progress String String SolverInstallPlan planPackages verbosity comp platform solver SolverSettings{..} installedPkgIndex sourcePkgDb pkgConfigDB localPackages pkgStanzasEnable = resolveDependencies platform (compilerInfo comp) pkgConfigDB solver resolverParams where --TODO: [nice to have] disable multiple instances restriction in -- the solver, but then make sure we can cope with that in the -- output. resolverParams :: DepResolverParams resolverParams = setMaxBackjumps solverSettingMaxBackjumps . setIndependentGoals solverSettingIndependentGoals . setReorderGoals solverSettingReorderGoals . setCountConflicts solverSettingCountConflicts . setFineGrainedConflicts solverSettingFineGrainedConflicts . setMinimizeConflictSet solverSettingMinimizeConflictSet --TODO: [required eventually] should only be configurable for --custom installs -- . setAvoidReinstalls solverSettingAvoidReinstalls --TODO: [required eventually] should only be configurable for --custom installs -- . setShadowPkgs solverSettingShadowPkgs . setStrongFlags solverSettingStrongFlags . setAllowBootLibInstalls solverSettingAllowBootLibInstalls . setOnlyConstrained solverSettingOnlyConstrained . setSolverVerbosity verbosity --TODO: [required eventually] decide if we need to prefer -- installed for global packages, or prefer latest even for -- global packages. Perhaps should be configurable but with a -- different name than "upgrade-dependencies". . setPreferenceDefault PreferLatestForSelected {-(if solverSettingUpgradeDeps then PreferAllLatest else PreferLatestForSelected)-} . removeLowerBounds solverSettingAllowOlder . removeUpperBounds solverSettingAllowNewer . addDefaultSetupDependencies (defaultSetupDeps comp platform . PD.packageDescription . srcpkgDescription) . addSetupCabalMinVersionConstraint setupMinCabalVersionConstraint . addSetupCabalMaxVersionConstraint setupMaxCabalVersionConstraint . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | PackageVersionConstraint name ver <- solverSettingPreferences ] . addConstraints -- version constraints from the config file or command line [ LabeledPackageConstraint (userToPackageConstraint pc) src | (pc, src) <- solverSettingConstraints ] . addPreferences -- enable stanza preference unilaterally, regardless if the user asked -- accordingly or expressed no preference, to help hint the solver [ PackageStanzasPreference pkgname stanzas | pkg <- localPackages , let pkgname = pkgSpecifierTarget pkg stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable stanzas = [ stanza | stanza <- [minBound..maxBound] , Map.lookup stanza stanzaM /= Just False ] , not (null stanzas) ] . addConstraints -- enable stanza constraints where the user asked to enable [ LabeledPackageConstraint (PackageConstraint (scopeToplevel pkgname) (PackagePropertyStanzas stanzas)) ConstraintSourceConfigFlagOrTarget | pkg <- localPackages , let pkgname = pkgSpecifierTarget pkg stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable stanzas = [ stanza | stanza <- [minBound..maxBound] , Map.lookup stanza stanzaM == Just True ] , not (null stanzas) ] . addConstraints --TODO: [nice to have] should have checked at some point that the -- package in question actually has these flags. [ LabeledPackageConstraint (PackageConstraint (scopeToplevel pkgname) (PackagePropertyFlags flags)) ConstraintSourceConfigFlagOrTarget | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] . addConstraints --TODO: [nice to have] we have user-supplied flags for unspecified -- local packages (as well as specific per-package flags). For the -- former we just apply all these flags to all local targets which -- is silly. We should check if the flags are appropriate. [ LabeledPackageConstraint (PackageConstraint (scopeToplevel pkgname) (PackagePropertyFlags flags)) ConstraintSourceConfigFlagOrTarget | let flags = solverSettingFlagAssignment , not (PD.nullFlagAssignment flags) , pkg <- localPackages , let pkgname = pkgSpecifierTarget pkg ] $ stdResolverParams stdResolverParams :: DepResolverParams stdResolverParams = -- Note: we don't use the standardInstallPolicy here, since that uses -- its own addDefaultSetupDependencies that is not appropriate for us. basicInstallPolicy installedPkgIndex sourcePkgDb localPackages -- While we can talk to older Cabal versions (we need to be able to -- do so for custom Setup scripts that require older Cabal lib -- versions), we have problems talking to some older versions that -- don't support certain features. -- -- For example, Cabal-1.16 and older do not know about build targets. -- Even worse, 1.18 and older only supported the --constraint flag -- with source package ids, not --dependency with installed package -- ids. That is bad because we cannot reliably select the right -- dependencies in the presence of multiple instances (i.e. the -- store). See issue #3932. So we require Cabal 1.20 as a minimum. -- -- Moreover, lib:Cabal generally only supports the interface of -- current and past compilers; in fact recent lib:Cabal versions -- will warn when they encounter a too new or unknown GHC compiler -- version (c.f. #415). To avoid running into unsupported -- configurations we encode the compatibility matrix as lower -- bounds on lib:Cabal here (effectively corresponding to the -- respective major Cabal version bundled with the respective GHC -- release). -- -- GHC 9.2 needs Cabal >= 3.6 -- GHC 9.0 needs Cabal >= 3.4 -- GHC 8.10 needs Cabal >= 3.2 -- GHC 8.8 needs Cabal >= 3.0 -- GHC 8.6 needs Cabal >= 2.4 -- GHC 8.4 needs Cabal >= 2.2 -- GHC 8.2 needs Cabal >= 2.0 -- GHC 8.0 needs Cabal >= 1.24 -- GHC 7.10 needs Cabal >= 1.22 -- -- (NB: we don't need to consider older GHCs as Cabal >= 1.20 is -- the absolute lower bound) -- -- TODO: long-term, this compatibility matrix should be -- stored as a field inside 'Distribution.Compiler.Compiler' setupMinCabalVersionConstraint | isGHC, compVer >= mkVersion [9,4] = mkVersion [3,8] | isGHC, compVer >= mkVersion [9,2] = mkVersion [3,6] | isGHC, compVer >= mkVersion [9,0] = mkVersion [3,4] | isGHC, compVer >= mkVersion [8,10] = mkVersion [3,2] | isGHC, compVer >= mkVersion [8,8] = mkVersion [3,0] | isGHC, compVer >= mkVersion [8,6] = mkVersion [2,4] | isGHC, compVer >= mkVersion [8,4] = mkVersion [2,2] | isGHC, compVer >= mkVersion [8,2] = mkVersion [2,0] | isGHC, compVer >= mkVersion [8,0] = mkVersion [1,24] | isGHC, compVer >= mkVersion [7,10] = mkVersion [1,22] | otherwise = mkVersion [1,20] where isGHC = compFlav `elem` [GHC,GHCJS] compFlav = compilerFlavor comp compVer = compilerVersion comp -- As we can't predict the future, we also place a global upper -- bound on the lib:Cabal version we know how to interact with: -- -- The upper bound is computed by incrementing the current major -- version twice in order to allow for the current version, as -- well as the next adjacent major version (one of which will not -- be released, as only "even major" versions of Cabal are -- released to Hackage or bundled with proper GHC releases). -- -- For instance, if the current version of cabal-install is an odd -- development version, e.g. Cabal-2.1.0.0, then we impose an -- upper bound `setup.Cabal < 2.3`; if `cabal-install` is on a -- stable/release even version, e.g. Cabal-2.2.1.0, the upper -- bound is `setup.Cabal < 2.4`. This gives us enough flexibility -- when dealing with development snapshots of Cabal and cabal-install. -- setupMaxCabalVersionConstraint = alterVersion (take 2) $ incVersion 1 $ incVersion 1 cabalVersion ------------------------------------------------------------------------------ -- * Install plan post-processing ------------------------------------------------------------------------------ -- This phase goes from the InstallPlan we get from the solver and has to -- make an elaborated install plan. -- -- We go in two steps: -- -- 1. elaborate all the source packages that the solver has chosen. -- 2. swap source packages for pre-existing installed packages wherever -- possible. -- -- We do it in this order, elaborating and then replacing, because the easiest -- way to calculate the installed package ids used for the replacement step is -- from the elaborated configuration for each package. ------------------------------------------------------------------------------ -- * Install plan elaboration ------------------------------------------------------------------------------ -- Note [SolverId to ConfiguredId] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- Dependency solving is a per package affair, so after we're done, we -- end up with 'SolverInstallPlan' that records in 'solverPkgLibDeps' -- and 'solverPkgExeDeps' what packages provide the libraries and executables -- needed by each component of the package (phew!) For example, if I have -- -- library -- build-depends: lib -- build-tool-depends: pkg:exe1 -- build-tools: alex -- -- After dependency solving, I find out that this library component has -- library dependencies on lib-0.2, and executable dependencies on pkg-0.1 -- and alex-0.3 (other components of the package may have different -- dependencies). Note that I've "lost" the knowledge that I depend -- *specifically* on the exe1 executable from pkg. -- -- So, we have a this graph of packages, and we need to transform it into -- a graph of components which we are actually going to build. In particular: -- -- NODE changes from PACKAGE (SolverPackage) to COMPONENTS (ElaboratedConfiguredPackage) -- EDGE changes from PACKAGE DEP (SolverId) to COMPONENT DEPS (ConfiguredId) -- -- In both cases, what was previously a single node/edge may turn into multiple -- nodes/edges. Multiple components, because there may be multiple components -- in a package; multiple component deps, because we may depend upon multiple -- executables from the same package (and maybe, some day, multiple libraries -- from the same package.) -- -- Let's talk about how to do this transformation. Naively, we might consider -- just processing each package, converting it into (zero or) one or more -- components. But we also have to update the edges; this leads to -- two complications: -- -- 1. We don't know what the ConfiguredId of a component is until -- we've configured it, but we cannot configure a component unless -- we know the ConfiguredId of all its dependencies. Thus, we must -- process the 'SolverInstallPlan' in topological order. -- -- 2. When we process a package, we know the SolverIds of its -- dependencies, but we have to do some work to turn these into -- ConfiguredIds. For example, in the case of build-tool-depends, the -- SolverId isn't enough to uniquely determine the ConfiguredId we should -- elaborate to: we have to look at the executable name attached to -- the package name in the package description to figure it out. -- At the same time, we NEED to use the SolverId, because there might -- be multiple versions of the same package in the build plan -- (due to setup dependencies); we can't just look up the package name -- from the package description. -- -- We can adopt the following strategy: -- -- * When a package is transformed into components, record -- a mapping from SolverId to ALL of the components -- which were elaborated. -- -- * When we look up an edge, we use our knowledge of the -- component name to *filter* the list of components into -- the ones we actually wanted to refer to. -- -- By the way, we can tell that SolverInstallPlan is not the "right" type -- because a SolverId cannot adequately represent all possible dependency -- solver states: we may need to record foo-0.1 multiple times in -- the solver install plan with different dependencies. This imprecision in the -- type currently doesn't cause any problems because the dependency solver -- continues to enforce the single instance restriction regardless of compiler -- version. The right way to solve this is to come up with something very much -- like a 'ConfiguredId', in that it incorporates the version choices of its -- dependencies, but less fine grained. -- | Produce an elaborated install plan using the policy for local builds with -- a nix-style shared store. -- -- In theory should be able to make an elaborated install plan with a policy -- matching that of the classic @cabal install --user@ or @--global@ -- elaborateInstallPlan :: Verbosity -> Platform -> Compiler -> ProgramDb -> PkgConfigDb -> DistDirLayout -> StoreDirLayout -> SolverInstallPlan -> [PackageSpecifier (SourcePackage (PackageLocation loc))] -> Map PackageId PackageSourceHash -> InstallDirs.InstallDirTemplates -> ProjectConfigShared -> PackageConfig -> PackageConfig -> Map PackageName PackageConfig -> LogProgress (ElaboratedInstallPlan, ElaboratedSharedConfig) elaborateInstallPlan verbosity platform compiler compilerprogdb pkgConfigDB distDirLayout@DistDirLayout{..} storeDirLayout@StoreDirLayout{storePackageDBStack} solverPlan localPackages sourcePackageHashes defaultInstallDirs sharedPackageConfig allPackagesConfig localPackagesConfig perPackageConfig = do x <- elaboratedInstallPlan return (x, elaboratedSharedConfig) where elaboratedSharedConfig = ElaboratedSharedConfig { pkgConfigPlatform = platform, pkgConfigCompiler = compiler, pkgConfigCompilerProgs = compilerprogdb, pkgConfigReplOptions = mempty } preexistingInstantiatedPkgs :: Map UnitId FullUnitId preexistingInstantiatedPkgs = Map.fromList (mapMaybe f (SolverInstallPlan.toList solverPlan)) where f (SolverInstallPlan.PreExisting inst) | let ipkg = instSolverPkgIPI inst , not (IPI.indefinite ipkg) = Just (IPI.installedUnitId ipkg, (FullUnitId (IPI.installedComponentId ipkg) (Map.fromList (IPI.instantiatedWith ipkg)))) f _ = Nothing elaboratedInstallPlan :: LogProgress (InstallPlan.GenericInstallPlan IPI.InstalledPackageInfo ElaboratedConfiguredPackage) elaboratedInstallPlan = flip InstallPlan.fromSolverInstallPlanWithProgress solverPlan $ \mapDep planpkg -> case planpkg of SolverInstallPlan.PreExisting pkg -> return [InstallPlan.PreExisting (instSolverPkgIPI pkg)] SolverInstallPlan.Configured pkg -> let inplace_doc | shouldBuildInplaceOnly pkg = text "inplace" | otherwise = Disp.empty in addProgressCtx (text "In the" <+> inplace_doc <+> text "package" <+> quotes (pretty (packageId pkg))) $ map InstallPlan.Configured <$> elaborateSolverToComponents mapDep pkg -- NB: We don't INSTANTIATE packages at this point. That's -- a post-pass. This makes it simpler to compute dependencies. elaborateSolverToComponents :: (SolverId -> [ElaboratedPlanPackage]) -> SolverPackage UnresolvedPkgLoc -> LogProgress [ElaboratedConfiguredPackage] elaborateSolverToComponents mapDep spkg@(SolverPackage _ _ _ deps0 exe_deps0) = case mkComponentsGraph (elabEnabledSpec elab0) pd of Right g -> do let src_comps = componentsGraphToList g infoProgress $ hang (text "Component graph for" <+> pretty pkgid <<>> colon) 4 (dispComponentsWithDeps src_comps) (_, comps) <- mapAccumM buildComponent (Map.empty, Map.empty, Map.empty) (map fst src_comps) let not_per_component_reasons = why_not_per_component src_comps if null not_per_component_reasons then return comps else do checkPerPackageOk comps not_per_component_reasons return [elaborateSolverToPackage spkg g $ comps ++ maybeToList setupComponent] Left cns -> dieProgress $ hang (text "Dependency cycle between the following components:") 4 (vcat (map (text . componentNameStanza) cns)) where -- You are eligible to per-component build if this list is empty why_not_per_component g = cuz_buildtype ++ cuz_spec ++ cuz_length ++ cuz_flag ++ cuz_coverage where cuz reason = [text reason] -- We have to disable per-component for now with -- Configure-type scripts in order to prevent parallel -- invocation of the same `./configure` script. -- See https://github.com/haskell/cabal/issues/4548 -- -- Moreover, at this point in time, only non-Custom setup scripts -- are supported. Implementing per-component builds with -- Custom would require us to create a new 'ElabSetup' -- type, and teach all of the code paths how to handle it. -- Once you've implemented this, swap it for the code below. cuz_buildtype = case PD.buildType (elabPkgDescription elab0) of PD.Configure -> cuz "build-type is Configure" PD.Custom -> cuz "build-type is Custom" _ -> [] -- cabal-format versions prior to 1.8 have different build-depends semantics -- for now it's easier to just fallback to legacy-mode when specVersion < 1.8 -- see, https://github.com/haskell/cabal/issues/4121 cuz_spec | PD.specVersion pd >= CabalSpecV1_8 = [] | otherwise = cuz "cabal-version is less than 1.8" -- In the odd corner case that a package has no components at all -- then keep it as a whole package, since otherwise it turns into -- 0 component graph nodes and effectively vanishes. We want to -- keep it around at least for error reporting purposes. cuz_length | length g > 0 = [] | otherwise = cuz "there are no buildable components" -- For ease of testing, we let per-component builds be toggled -- at the top level cuz_flag | fromFlagOrDefault True (projectConfigPerComponent sharedPackageConfig) = [] | otherwise = cuz "you passed --disable-per-component" -- Enabling program coverage introduces odd runtime dependencies -- between components. cuz_coverage | fromFlagOrDefault False (packageConfigCoverage localPackagesConfig) = cuz "program coverage is enabled" | otherwise = [] -- | Sometimes a package may make use of features which are only -- supported in per-package mode. If this is the case, we should -- give an error when this occurs. checkPerPackageOk comps reasons = do let is_sublib (CLibName (LSubLibName _)) = True is_sublib _ = False when (any (matchElabPkg is_sublib) comps) $ dieProgress $ text "Internal libraries only supported with per-component builds." $$ text "Per-component builds were disabled because" <+> fsep (punctuate comma reasons) -- TODO: Maybe exclude Backpack too elab0 = elaborateSolverToCommon spkg pkgid = elabPkgSourceId elab0 pd = elabPkgDescription elab0 -- TODO: This is just a skeleton to get elaborateSolverToPackage -- working correctly -- TODO: When we actually support building these components, we -- have to add dependencies on this from all other components setupComponent :: Maybe ElaboratedConfiguredPackage setupComponent | PD.buildType (elabPkgDescription elab0) == PD.Custom = Just elab0 { elabModuleShape = emptyModuleShape, elabUnitId = notImpl "elabUnitId", elabComponentId = notImpl "elabComponentId", elabLinkedInstantiatedWith = Map.empty, elabInstallDirs = notImpl "elabInstallDirs", elabPkgOrComp = ElabComponent (ElaboratedComponent {..}) } | otherwise = Nothing where compSolverName = CD.ComponentSetup compComponentName = Nothing dep_pkgs = elaborateLibSolverId mapDep =<< CD.setupDeps deps0 compLibDependencies = map configuredId dep_pkgs compLinkedLibDependencies = notImpl "compLinkedLibDependencies" compOrderLibDependencies = notImpl "compOrderLibDependencies" -- Not supported: compExeDependencies = [] compExeDependencyPaths = [] compPkgConfigDependencies = [] notImpl f = error $ "Distribution.Client.ProjectPlanning.setupComponent: " ++ f ++ " not implemented yet" buildComponent :: (ConfiguredComponentMap, LinkedComponentMap, Map ComponentId FilePath) -> Cabal.Component -> LogProgress ((ConfiguredComponentMap, LinkedComponentMap, Map ComponentId FilePath), ElaboratedConfiguredPackage) buildComponent (cc_map, lc_map, exe_map) comp = addProgressCtx (text "In the stanza" <+> quotes (text (componentNameStanza cname))) $ do -- 1. Configure the component, but with a place holder ComponentId. cc0 <- toConfiguredComponent pd (error "Distribution.Client.ProjectPlanning.cc_cid: filled in later") (Map.unionWith Map.union external_lib_cc_map cc_map) (Map.unionWith Map.union external_exe_cc_map cc_map) comp -- 2. Read out the dependencies from the ConfiguredComponent cc0 let compLibDependencies = -- Nub because includes can show up multiple times ordNub (map (annotatedIdToConfiguredId . ci_ann_id) (cc_includes cc0)) compExeDependencies = map annotatedIdToConfiguredId (cc_exe_deps cc0) compExeDependencyPaths = [ (annotatedIdToConfiguredId aid', path) | aid' <- cc_exe_deps cc0 , Just paths <- [Map.lookup (ann_id aid') exe_map1] , path <- paths ] elab_comp = ElaboratedComponent {..} -- 3. Construct a preliminary ElaboratedConfiguredPackage, -- and use this to compute the component ID. Fix up cc_id -- correctly. let elab1 = elab0 { elabPkgOrComp = ElabComponent $ elab_comp } cid = case elabBuildStyle elab0 of BuildInplaceOnly -> mkComponentId $ prettyShow pkgid ++ "-inplace" ++ (case Cabal.componentNameString cname of Nothing -> "" Just s -> "-" ++ prettyShow s) BuildAndInstall -> hashedInstalledPackageId (packageHashInputs elaboratedSharedConfig elab1) -- knot tied cc = cc0 { cc_ann_id = fmap (const cid) (cc_ann_id cc0) } infoProgress $ dispConfiguredComponent cc -- 4. Perform mix-in linking let lookup_uid def_uid = case Map.lookup (unDefUnitId def_uid) preexistingInstantiatedPkgs of Just full -> full Nothing -> error ("lookup_uid: " ++ prettyShow def_uid) lc <- toLinkedComponent verbosity lookup_uid (elabPkgSourceId elab0) (Map.union external_lc_map lc_map) cc infoProgress $ dispLinkedComponent lc -- NB: elab is setup to be the correct form for an -- indefinite library, or a definite library with no holes. -- We will modify it in 'instantiateInstallPlan' to handle -- instantiated packages. -- 5. Construct the final ElaboratedConfiguredPackage let elab2 = elab1 { elabModuleShape = lc_shape lc, elabUnitId = abstractUnitId (lc_uid lc), elabComponentId = lc_cid lc, elabLinkedInstantiatedWith = Map.fromList (lc_insts lc), elabPkgOrComp = ElabComponent $ elab_comp { compLinkedLibDependencies = ordNub (map ci_id (lc_includes lc)), compOrderLibDependencies = ordNub (map (abstractUnitId . ci_id) (lc_includes lc ++ lc_sig_includes lc)) } } elab = elab2 { elabInstallDirs = computeInstallDirs storeDirLayout defaultInstallDirs elaboratedSharedConfig elab2 } -- 6. Construct the updated local maps let cc_map' = extendConfiguredComponentMap cc cc_map lc_map' = extendLinkedComponentMap lc lc_map exe_map' = Map.insert cid (inplace_bin_dir elab) exe_map return ((cc_map', lc_map', exe_map'), elab) where compLinkedLibDependencies = error "buildComponent: compLinkedLibDependencies" compOrderLibDependencies = error "buildComponent: compOrderLibDependencies" cname = Cabal.componentName comp compComponentName = Just cname compSolverName = CD.componentNameToComponent cname -- NB: compLinkedLibDependencies and -- compOrderLibDependencies are defined when we define -- 'elab'. external_lib_dep_sids = CD.select (== compSolverName) deps0 external_exe_dep_sids = CD.select (== compSolverName) exe_deps0 external_lib_dep_pkgs = concatMap mapDep external_lib_dep_sids -- Combine library and build-tool dependencies, for backwards -- compatibility (See issue #5412 and the documentation for -- InstallPlan.fromSolverInstallPlan), but prefer the versions -- specified as build-tools. external_exe_dep_pkgs = concatMap mapDep $ ordNubBy (pkgName . packageId) $ external_exe_dep_sids ++ external_lib_dep_sids external_exe_map = Map.fromList $ [ (getComponentId pkg, paths) | pkg <- external_exe_dep_pkgs , let paths = planPackageExePaths pkg ] exe_map1 = Map.union external_exe_map $ fmap (\x -> [x]) exe_map external_lib_cc_map = Map.fromListWith Map.union $ map mkCCMapping external_lib_dep_pkgs external_exe_cc_map = Map.fromListWith Map.union $ map mkCCMapping external_exe_dep_pkgs external_lc_map = Map.fromList $ map mkShapeMapping $ external_lib_dep_pkgs ++ concatMap mapDep external_exe_dep_sids compPkgConfigDependencies = [ (pn, fromMaybe (error $ "compPkgConfigDependencies: impossible! " ++ prettyShow pn ++ " from " ++ prettyShow (elabPkgSourceId elab0)) (pkgConfigDbPkgVersion pkgConfigDB pn)) | PkgconfigDependency pn _ <- PD.pkgconfigDepends (Cabal.componentBuildInfo comp) ] inplace_bin_dir elab = binDirectoryFor distDirLayout elaboratedSharedConfig elab $ case Cabal.componentNameString cname of Just n -> prettyShow n Nothing -> "" -- | Given a 'SolverId' referencing a dependency on a library, return -- the 'ElaboratedPlanPackage' corresponding to the library. This -- returns at most one result. elaborateLibSolverId :: (SolverId -> [ElaboratedPlanPackage]) -> SolverId -> [ElaboratedPlanPackage] elaborateLibSolverId mapDep = filter (matchPlanPkg (== (CLibName LMainLibName))) . mapDep -- | Given an 'ElaboratedPlanPackage', return the paths to where the -- executables that this package represents would be installed. -- The only case where multiple paths can be returned is the inplace -- monolithic package one, since there can be multiple exes and each one -- has its own directory. planPackageExePaths :: ElaboratedPlanPackage -> [FilePath] planPackageExePaths = -- Pre-existing executables are assumed to be in PATH -- already. In fact, this should be impossible. InstallPlan.foldPlanPackage (const []) $ \elab -> let executables :: [FilePath] executables = case elabPkgOrComp elab of -- Monolithic mode: all exes of the package ElabPackage _ -> unUnqualComponentName . PD.exeName <$> PD.executables (elabPkgDescription elab) -- Per-component mode: just the selected exe ElabComponent comp -> case fmap Cabal.componentNameString (compComponentName comp) of Just (Just n) -> [prettyShow n] _ -> [""] in binDirectoryFor distDirLayout elaboratedSharedConfig elab <$> executables elaborateSolverToPackage :: SolverPackage UnresolvedPkgLoc -> ComponentsGraph -> [ElaboratedConfiguredPackage] -> ElaboratedConfiguredPackage elaborateSolverToPackage pkg@(SolverPackage (SourcePackage pkgid _gpd _srcloc _descOverride) _flags _stanzas _deps0 _exe_deps0) compGraph comps = -- Knot tying: the final elab includes the -- pkgInstalledId, which is calculated by hashing many -- of the other fields of the elaboratedPackage. elab where elab0@ElaboratedConfiguredPackage{..} = elaborateSolverToCommon pkg elab1 = elab0 { elabUnitId = newSimpleUnitId pkgInstalledId, elabComponentId = pkgInstalledId, elabLinkedInstantiatedWith = Map.empty, elabPkgOrComp = ElabPackage $ ElaboratedPackage {..}, elabModuleShape = modShape } elab = elab1 { elabInstallDirs = computeInstallDirs storeDirLayout defaultInstallDirs elaboratedSharedConfig elab1 } modShape = case find (matchElabPkg (== (CLibName LMainLibName))) comps of Nothing -> emptyModuleShape Just e -> Ty.elabModuleShape e pkgInstalledId | shouldBuildInplaceOnly pkg = mkComponentId (prettyShow pkgid ++ "-inplace") | otherwise = assert (isJust elabPkgSourceHash) $ hashedInstalledPackageId (packageHashInputs elaboratedSharedConfig elab) -- recursive use of elab -- Need to filter out internal dependencies, because they don't -- correspond to anything real anymore. isExt confid = confSrcId confid /= pkgid filterExt = filter isExt filterExt' = filter (isExt . fst) pkgLibDependencies = buildComponentDeps (filterExt . compLibDependencies) pkgExeDependencies = buildComponentDeps (filterExt . compExeDependencies) pkgExeDependencyPaths = buildComponentDeps (filterExt' . compExeDependencyPaths) -- TODO: Why is this flat? pkgPkgConfigDependencies = CD.flatDeps $ buildComponentDeps compPkgConfigDependencies pkgDependsOnSelfLib = CD.fromList [ (CD.componentNameToComponent cn, [()]) | Graph.N _ cn _ <- fromMaybe [] mb_closure ] where mb_closure = Graph.revClosure compGraph [ k | k <- Graph.keys compGraph, is_lib k ] -- NB: the sublib case should not occur, because sub-libraries -- are not supported without per-component builds is_lib (CLibName _) = True is_lib _ = False buildComponentDeps f = CD.fromList [ (compSolverName comp, f comp) | ElaboratedConfiguredPackage{ elabPkgOrComp = ElabComponent comp } <- comps ] -- NB: This is not the final setting of 'pkgStanzasEnabled'. -- See [Sticky enabled testsuites]; we may enable some extra -- stanzas opportunistically when it is cheap to do so. -- -- However, we start off by enabling everything that was -- requested, so that we can maintain an invariant that -- pkgStanzasEnabled is a superset of elabStanzasRequested pkgStanzasEnabled = optStanzaKeysFilteredByValue (fromMaybe False) elabStanzasRequested elaborateSolverToCommon :: SolverPackage UnresolvedPkgLoc -> ElaboratedConfiguredPackage elaborateSolverToCommon pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride) flags stanzas deps0 _exe_deps0) = elaboratedPackage where elaboratedPackage = ElaboratedConfiguredPackage {..} -- These get filled in later elabUnitId = error "elaborateSolverToCommon: elabUnitId" elabComponentId = error "elaborateSolverToCommon: elabComponentId" elabInstantiatedWith = Map.empty elabLinkedInstantiatedWith = error "elaborateSolverToCommon: elabLinkedInstantiatedWith" elabPkgOrComp = error "elaborateSolverToCommon: elabPkgOrComp" elabInstallDirs = error "elaborateSolverToCommon: elabInstallDirs" elabModuleShape = error "elaborateSolverToCommon: elabModuleShape" elabIsCanonical = True elabPkgSourceId = pkgid elabPkgDescription = case PD.finalizePD flags elabEnabledSpec (const True) platform (compilerInfo compiler) [] gdesc of Right (desc, _) -> desc Left _ -> error "Failed to finalizePD in elaborateSolverToCommon" elabFlagAssignment = flags elabFlagDefaults = PD.mkFlagAssignment [ (Cabal.flagName flag, Cabal.flagDefault flag) | flag <- PD.genPackageFlags gdesc ] elabEnabledSpec = enableStanzas stanzas elabStanzasAvailable = stanzas elabStanzasRequested :: OptionalStanzaMap (Maybe Bool) elabStanzasRequested = optStanzaTabulate $ \o -> case o of -- NB: even if a package stanza is requested, if the package -- doesn't actually have any of that stanza we omit it from -- the request, to ensure that we don't decide that this -- package needs to be rebuilt. (It needs to be done here, -- because the ElaboratedConfiguredPackage is where we test -- whether or not there have been changes.) TestStanzas -> listToMaybe [ v | v <- maybeToList tests, _ <- PD.testSuites elabPkgDescription ] BenchStanzas -> listToMaybe [ v | v <- maybeToList benchmarks, _ <- PD.benchmarks elabPkgDescription ] where tests, benchmarks :: Maybe Bool tests = perPkgOptionMaybe pkgid packageConfigTests benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks -- This is a placeholder which will get updated by 'pruneInstallPlanPass1' -- and 'pruneInstallPlanPass2'. We can't populate it here -- because whether or not tests/benchmarks should be enabled -- is heuristically calculated based on whether or not the -- dependencies of the test suite have already been installed, -- but this function doesn't know what is installed (since -- we haven't improved the plan yet), so we do it in another pass. -- Check the comments of those functions for more details. elabConfigureTargets = [] elabBuildTargets = [] elabTestTargets = [] elabBenchTargets = [] elabReplTarget = Nothing elabHaddockTargets = [] elabBuildHaddocks = perPkgOptionFlag pkgid False packageConfigDocumentation elabPkgSourceLocation = srcloc elabPkgSourceHash = Map.lookup pkgid sourcePackageHashes elabLocalToProject = isLocalToProject pkg elabBuildStyle = if shouldBuildInplaceOnly pkg then BuildInplaceOnly else BuildAndInstall elabPackageDbs = projectConfigPackageDBs sharedPackageConfig elabBuildPackageDBStack = buildAndRegisterDbs elabRegisterPackageDBStack = buildAndRegisterDbs elabSetupScriptStyle = packageSetupScriptStyle elabPkgDescription elabSetupScriptCliVersion = packageSetupScriptSpecVersion elabSetupScriptStyle elabPkgDescription libDepGraph deps0 elabSetupPackageDBStack = buildAndRegisterDbs elabInplaceBuildPackageDBStack = inplacePackageDbs elabInplaceRegisterPackageDBStack = inplacePackageDbs elabInplaceSetupPackageDBStack = inplacePackageDbs buildAndRegisterDbs | shouldBuildInplaceOnly pkg = inplacePackageDbs | otherwise = corePackageDbs elabPkgDescriptionOverride = descOverride elabVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively elabSharedLib = pkgid `Set.member` pkgsUseSharedLibrary elabStaticLib = perPkgOptionFlag pkgid False packageConfigStaticLib elabDynExe = perPkgOptionFlag pkgid False packageConfigDynExe elabFullyStaticExe = perPkgOptionFlag pkgid False packageConfigFullyStaticExe elabGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still elabProfExe = perPkgOptionFlag pkgid False packageConfigProf elabProfLib = pkgid `Set.member` pkgsUseProfilingLibrary (elabProfExeDetail, elabProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault packageConfigProfDetail packageConfigProfLibDetail elabCoverage = perPkgOptionFlag pkgid False packageConfigCoverage elabOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization elabSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs elabSplitSections = perPkgOptionFlag pkgid False packageConfigSplitSections elabStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs elabStripExes = perPkgOptionFlag pkgid False packageConfigStripExes elabDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo elabDumpBuildInfo = perPkgOptionFlag pkgid NoDumpBuildInfo packageConfigDumpBuildInfo -- Combine the configured compiler prog settings with the user-supplied -- config. For the compiler progs any user-supplied config was taken -- into account earlier when configuring the compiler so its ok that -- our configured settings for the compiler override the user-supplied -- config here. elabProgramPaths = Map.fromList [ (programId prog, programPath prog) | prog <- configuredPrograms compilerprogdb ] <> perPkgOptionMapLast pkgid packageConfigProgramPaths elabProgramArgs = Map.fromList [ (programId prog, args) | prog <- configuredPrograms compilerprogdb , let args = programOverrideArgs prog , not (null args) ] <> perPkgOptionMapMappend pkgid packageConfigProgramArgs elabProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra elabConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs elabExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs elabExtraLibDirsStatic = perPkgOptionList pkgid packageConfigExtraLibDirsStatic elabExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs elabExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs elabProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix elabProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix elabHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle elabHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml elabHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation elabHaddockForeignLibs = perPkgOptionFlag pkgid False packageConfigHaddockForeignLibs elabHaddockForHackage = perPkgOptionFlag pkgid Cabal.ForDevelopment packageConfigHaddockForHackage elabHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables elabHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites elabHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks elabHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal elabHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss elabHaddockLinkedSource = perPkgOptionFlag pkgid False packageConfigHaddockLinkedSource elabHaddockQuickJump = perPkgOptionFlag pkgid False packageConfigHaddockQuickJump elabHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss elabHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents elabTestMachineLog = perPkgOptionMaybe pkgid packageConfigTestMachineLog elabTestHumanLog = perPkgOptionMaybe pkgid packageConfigTestHumanLog elabTestShowDetails = perPkgOptionMaybe pkgid packageConfigTestShowDetails elabTestKeepTix = perPkgOptionFlag pkgid False packageConfigTestKeepTix elabTestWrapper = perPkgOptionMaybe pkgid packageConfigTestWrapper elabTestFailWhenNoTestSuites = perPkgOptionFlag pkgid False packageConfigTestFailWhenNoTestSuites elabTestTestOptions = perPkgOptionList pkgid packageConfigTestTestOptions elabBenchmarkOptions = perPkgOptionList pkgid packageConfigBenchmarkOptions perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) perPkgOptionList pkgid f = lookupPerPkgOption pkgid f perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) where exe = fromFlagOrDefault def bothflag lib = fromFlagOrDefault def (bothflag <> libflag) bothflag = lookupPerPkgOption pkgid fboth libflag = lookupPerPkgOption pkgid flib lookupPerPkgOption :: (Package pkg, Monoid m) => pkg -> (PackageConfig -> m) -> m lookupPerPkgOption pkg f = -- This is where we merge the options from the project config that -- apply to all packages, all project local packages, and to specific -- named packages global `mappend` local `mappend` perpkg where global = f allPackagesConfig local | isLocalToProject pkg = f localPackagesConfig | otherwise = mempty perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) inplacePackageDbs = corePackageDbs ++ [ distPackageDB (compilerId compiler) ] corePackageDbs = applyPackageDbFlags (storePackageDBStack (compilerId compiler)) (projectConfigPackageDBs sharedPackageConfig) -- For this local build policy, every package that lives in a local source -- dir (as opposed to a tarball), or depends on such a package, will be -- built inplace into a shared dist dir. Tarball packages that depend on -- source dir packages will also get unpacked locally. shouldBuildInplaceOnly :: SolverPackage loc -> Bool shouldBuildInplaceOnly pkg = Set.member (packageId pkg) pkgsToBuildInplaceOnly pkgsToBuildInplaceOnly :: Set PackageId pkgsToBuildInplaceOnly = Set.fromList $ map packageId $ SolverInstallPlan.reverseDependencyClosure solverPlan (map PlannedId (Set.toList pkgsLocalToProject)) isLocalToProject :: Package pkg => pkg -> Bool isLocalToProject pkg = Set.member (packageId pkg) pkgsLocalToProject pkgsLocalToProject :: Set PackageId pkgsLocalToProject = Set.fromList (catMaybes (map shouldBeLocal localPackages)) --TODO: localPackages is a misnomer, it's all project packages -- here is where we decide which ones will be local! pkgsUseSharedLibrary :: Set PackageId pkgsUseSharedLibrary = packagesWithLibDepsDownwardClosedProperty needsSharedLib where needsSharedLib pkg = fromMaybe compilerShouldUseSharedLibByDefault (liftM2 (||) pkgSharedLib pkgDynExe) where pkgid = packageId pkg pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe --TODO: [code cleanup] move this into the Cabal lib. It's currently open -- coded in Distribution.Simple.Configure, but should be made a proper -- function of the Compiler or CompilerInfo. compilerShouldUseSharedLibByDefault = case compilerFlavor compiler of GHC -> GHC.isDynamic compiler GHCJS -> GHCJS.isDynamic compiler _ -> False pkgsUseProfilingLibrary :: Set PackageId pkgsUseProfilingLibrary = packagesWithLibDepsDownwardClosedProperty needsProfilingLib where needsProfilingLib pkg = fromFlagOrDefault False (profBothFlag <> profLibFlag) where pkgid = packageId pkg profBothFlag = lookupPerPkgOption pkgid packageConfigProf profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe libDepGraph = Graph.fromDistinctList $ map NonSetupLibDepSolverPlanPackage (SolverInstallPlan.toList solverPlan) packagesWithLibDepsDownwardClosedProperty property = Set.fromList . map packageId . fromMaybe [] $ Graph.closure libDepGraph [ Graph.nodeKey pkg | pkg <- SolverInstallPlan.toList solverPlan , property pkg ] -- just the packages that satisfy the property --TODO: [nice to have] this does not check the config consistency, -- e.g. a package explicitly turning off profiling, but something -- depending on it that needs profiling. This really needs a separate -- package config validation/resolution pass. --TODO: [nice to have] config consistency checking: -- + profiling libs & exes, exe needs lib, recursive -- + shared libs & exes, exe needs lib, recursive -- + vanilla libs & exes, exe needs lib, recursive -- + ghci or shared lib needed by TH, recursive, ghc version dependent -- TODO: Drop matchPlanPkg/matchElabPkg in favor of mkCCMapping shouldBeLocal :: PackageSpecifier (SourcePackage (PackageLocation loc)) -> Maybe PackageId shouldBeLocal NamedPackage{} = Nothing shouldBeLocal (SpecificSourcePackage pkg) = case srcpkgSource pkg of LocalUnpackedPackage _ -> Just (packageId pkg) _ -> Nothing -- | Given a 'ElaboratedPlanPackage', report if it matches a 'ComponentName'. matchPlanPkg :: (ComponentName -> Bool) -> ElaboratedPlanPackage -> Bool matchPlanPkg p = InstallPlan.foldPlanPackage (p . ipiComponentName) (matchElabPkg p) -- | Get the appropriate 'ComponentName' which identifies an installed -- component. ipiComponentName :: IPI.InstalledPackageInfo -> ComponentName ipiComponentName = CLibName . IPI.sourceLibName -- | Given a 'ElaboratedConfiguredPackage', report if it matches a -- 'ComponentName'. matchElabPkg :: (ComponentName -> Bool) -> ElaboratedConfiguredPackage -> Bool matchElabPkg p elab = case elabPkgOrComp elab of ElabComponent comp -> maybe False p (compComponentName comp) ElabPackage _ -> -- So, what should we do here? One possibility is to -- unconditionally return 'True', because whatever it is -- that we're looking for, it better be in this package. -- But this is a bit dodgy if the package doesn't actually -- have, e.g., a library. Fortunately, it's not possible -- for the build of the library/executables to be toggled -- by 'pkgStanzasEnabled', so the only thing we have to -- test is if the component in question is *buildable.* any (p . componentName) (Cabal.pkgBuildableComponents (elabPkgDescription elab)) -- | Given an 'ElaboratedPlanPackage', generate the mapping from 'PackageName' -- and 'ComponentName' to the 'ComponentId' that should be used -- in this case. mkCCMapping :: ElaboratedPlanPackage -> (PackageName, Map ComponentName (AnnotatedId ComponentId)) mkCCMapping = InstallPlan.foldPlanPackage (\ipkg -> (packageName ipkg, Map.singleton (ipiComponentName ipkg) -- TODO: libify (AnnotatedId { ann_id = IPI.installedComponentId ipkg, ann_pid = packageId ipkg, ann_cname = IPI.sourceComponentName ipkg }))) $ \elab -> let mk_aid cn = AnnotatedId { ann_id = elabComponentId elab, ann_pid = packageId elab, ann_cname = cn } in (packageName elab, case elabPkgOrComp elab of ElabComponent comp -> case compComponentName comp of Nothing -> Map.empty Just n -> Map.singleton n (mk_aid n) ElabPackage _ -> Map.fromList $ map (\comp -> let cn = Cabal.componentName comp in (cn, mk_aid cn)) (Cabal.pkgBuildableComponents (elabPkgDescription elab))) -- | Given an 'ElaboratedPlanPackage', generate the mapping from 'ComponentId' -- to the shape of this package, as per mix-in linking. mkShapeMapping :: ElaboratedPlanPackage -> (ComponentId, (OpenUnitId, ModuleShape)) mkShapeMapping dpkg = (getComponentId dpkg, (indef_uid, shape)) where (dcid, shape) = InstallPlan.foldPlanPackage -- Uses Monad (->) (liftM2 (,) IPI.installedComponentId shapeInstalledPackage) (liftM2 (,) elabComponentId elabModuleShape) dpkg indef_uid = IndefFullUnitId dcid (Map.fromList [ (req, OpenModuleVar req) | req <- Set.toList (modShapeRequires shape)]) -- | Get the bin\/ directories that a package's executables should reside in. -- -- The result may be empty if the package does not build any executables. -- -- The result may have several entries if this is an inplace build of a package -- with multiple executables. binDirectories :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> [FilePath] binDirectories layout config package = case elabBuildStyle package of -- quick sanity check: no sense returning a bin directory if we're not going -- to put any executables in it, that will just clog up the PATH _ | noExecutables -> [] BuildAndInstall -> [installedBinDirectory package] BuildInplaceOnly -> map (root) $ case elabPkgOrComp package of ElabComponent comp -> case compSolverName comp of CD.ComponentExe n -> [prettyShow n] _ -> [] ElabPackage _ -> map (prettyShow . PD.exeName) . PD.executables . elabPkgDescription $ package where noExecutables = null . PD.executables . elabPkgDescription $ package root = distBuildDirectory layout (elabDistDirParams config package) "build" -- | A newtype for 'SolverInstallPlan.SolverPlanPackage' for which the -- dependency graph considers only dependencies on libraries which are -- NOT from setup dependencies. Used to compute the set -- of packages needed for profiling and dynamic libraries. newtype NonSetupLibDepSolverPlanPackage = NonSetupLibDepSolverPlanPackage { unNonSetupLibDepSolverPlanPackage :: SolverInstallPlan.SolverPlanPackage } instance Package NonSetupLibDepSolverPlanPackage where packageId = packageId . unNonSetupLibDepSolverPlanPackage instance IsNode NonSetupLibDepSolverPlanPackage where type Key NonSetupLibDepSolverPlanPackage = SolverId nodeKey = nodeKey . unNonSetupLibDepSolverPlanPackage nodeNeighbors (NonSetupLibDepSolverPlanPackage spkg) = ordNub $ CD.nonSetupDeps (resolverPackageLibDeps spkg) type InstS = Map UnitId ElaboratedPlanPackage type InstM a = State InstS a getComponentId :: ElaboratedPlanPackage -> ComponentId getComponentId (InstallPlan.PreExisting dipkg) = IPI.installedComponentId dipkg getComponentId (InstallPlan.Configured elab) = elabComponentId elab getComponentId (InstallPlan.Installed elab) = elabComponentId elab extractElabBuildStyle :: InstallPlan.GenericPlanPackage ipkg ElaboratedConfiguredPackage -> BuildStyle extractElabBuildStyle (InstallPlan.Configured elab) = elabBuildStyle elab extractElabBuildStyle _ = BuildAndInstall -- instantiateInstallPlan is responsible for filling out an InstallPlan -- with all of the extra Configured packages that would be generated by -- recursively instantiating the dependencies of packages. -- -- Suppose we are compiling the following packages: -- -- unit f where -- signature H -- -- unit g where -- dependency f[H=containers:Data.Map] -- -- At entry, we have an InstallPlan with a single plan package per -- actual source package, e.g., only (indefinite!) f and g. The job of -- instantiation is to turn this into three plan packages: each of the -- packages as before, but also a new, definite package f[H=containers:Data.Map] -- -- How do we do this? The general strategy is to iterate over every -- package in the existing plan and recursively create new entries for -- each of its dependencies which is an instantiated package (e.g., -- f[H=p:G]). This process must be recursive, as f itself may depend on -- OTHER packages which it instantiated using its hole H. -- -- Some subtleties: -- -- * We have to keep track of whether or not we are instantiating with -- inplace packages, because instantiating a non-inplace package with -- an inplace packages makes it inplace (since it depends on -- something in the inplace store)! The rule is that if any of the -- modules in an instantiation are inplace, then the instantiated -- unit itself must be inplace. There is then a bunch of faffing -- about to keep track of BuildStyle. -- -- * ElaboratedConfiguredPackage was never really designed for post -- facto instantiation, so some of the steps for generating new -- instantiations are a little fraught. For example, the act of -- flipping a package to be inplace involves faffing about with four -- fields, because these fields are precomputed. A good refactor -- would be to reduce the amount of precomputation to simplify the -- algorithm here. -- -- * We use the state monad to cache already instantiated modules, so -- we don't instantiate the same thing multiple times. -- instantiateInstallPlan :: StoreDirLayout -> InstallDirs.InstallDirTemplates -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> ElaboratedInstallPlan instantiateInstallPlan storeDirLayout defaultInstallDirs elaboratedShared plan = InstallPlan.new (IndependentGoals False) (Graph.fromDistinctList (Map.elems ready_map)) where pkgs = InstallPlan.toList plan cmap = Map.fromList [ (getComponentId pkg, pkg) | pkg <- pkgs ] instantiateUnitId :: ComponentId -> Map ModuleName (Module, BuildStyle) -> InstM (DefUnitId, BuildStyle) instantiateUnitId cid insts = state $ \s -> case Map.lookup uid s of Nothing -> -- Knot tied -- TODO: I don't think the knot tying actually does -- anything useful let (r, s') = runState (instantiateComponent uid cid insts) (Map.insert uid r s) in ((def_uid, extractElabBuildStyle r), Map.insert uid r s') Just r -> ((def_uid, extractElabBuildStyle r), s) where def_uid = mkDefUnitId cid (fmap fst insts) uid = unDefUnitId def_uid -- No need to InplaceT; the inplace-ness is properly computed for -- the ElaboratedPlanPackage, so that will implicitly pass it on instantiateComponent :: UnitId -> ComponentId -> Map ModuleName (Module, BuildStyle) -> InstM ElaboratedPlanPackage instantiateComponent uid cid insts | Just planpkg <- Map.lookup cid cmap = case planpkg of InstallPlan.Configured (elab0@ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }) -> do deps <- traverse (fmap fst . substUnitId insts) (compLinkedLibDependencies comp) let build_style = fold (fmap snd insts) let getDep (Module dep_uid _) = [dep_uid] elab1 = fixupBuildStyle build_style $ elab0 { elabUnitId = uid, elabComponentId = cid, elabInstantiatedWith = fmap fst insts, elabIsCanonical = Map.null (fmap fst insts), elabPkgOrComp = ElabComponent comp { compOrderLibDependencies = (if Map.null insts then [] else [newSimpleUnitId cid]) ++ ordNub (map unDefUnitId (deps ++ concatMap (getDep . fst) (Map.elems insts))) } } elab = elab1 { elabInstallDirs = computeInstallDirs storeDirLayout defaultInstallDirs elaboratedShared elab1 } return $ InstallPlan.Configured elab _ -> return planpkg | otherwise = error ("instantiateComponent: " ++ prettyShow cid) substUnitId :: Map ModuleName (Module, BuildStyle) -> OpenUnitId -> InstM (DefUnitId, BuildStyle) substUnitId _ (DefiniteUnitId uid) = -- This COULD actually, secretly, be an inplace package, but in -- that case it doesn't matter as it's already been recorded -- in the package that depends on this return (uid, BuildAndInstall) substUnitId subst (IndefFullUnitId cid insts) = do insts' <- substSubst subst insts instantiateUnitId cid insts' -- NB: NOT composition substSubst :: Map ModuleName (Module, BuildStyle) -> Map ModuleName OpenModule -> InstM (Map ModuleName (Module, BuildStyle)) substSubst subst insts = traverse (substModule subst) insts substModule :: Map ModuleName (Module, BuildStyle) -> OpenModule -> InstM (Module, BuildStyle) substModule subst (OpenModuleVar mod_name) | Just m <- Map.lookup mod_name subst = return m | otherwise = error "substModule: non-closing substitution" substModule subst (OpenModule uid mod_name) = do (uid', build_style) <- substUnitId subst uid return (Module uid' mod_name, build_style) indefiniteUnitId :: ComponentId -> InstM UnitId indefiniteUnitId cid = do let uid = newSimpleUnitId cid r <- indefiniteComponent uid cid state $ \s -> (uid, Map.insert uid r s) indefiniteComponent :: UnitId -> ComponentId -> InstM ElaboratedPlanPackage indefiniteComponent _uid cid -- Only need Configured; this phase happens before improvement, so -- there shouldn't be any Installed packages here. | Just (InstallPlan.Configured epkg) <- Map.lookup cid cmap , ElabComponent elab_comp <- elabPkgOrComp epkg = do -- We need to do a little more processing of the includes: some -- of them are fully definite even without substitution. We -- want to build those too; see #5634. -- -- This code mimics similar code in Distribution.Backpack.ReadyComponent; -- however, unlike the conversion from LinkedComponent to -- ReadyComponent, this transformation is done *without* -- changing the type in question; and what we are simply -- doing is enforcing tighter invariants on the data -- structure in question. The new invariant is that there -- is no IndefFullUnitId in compLinkedLibDependencies that actually -- has no holes. We couldn't specify this invariant when -- we initially created the ElaboratedPlanPackage because -- we have no way of actually reifying the UnitId into a -- DefiniteUnitId (that's what substUnitId does!) new_deps <- for (compLinkedLibDependencies elab_comp) $ \uid -> if Set.null (openUnitIdFreeHoles uid) then fmap (DefiniteUnitId . fst) (substUnitId Map.empty uid) else return uid -- NB: no fixupBuildStyle needed here, as if the indefinite -- component depends on any inplace packages, it itself must -- be indefinite! There is no substitution here, we can't -- post facto add inplace deps return . InstallPlan.Configured $ epkg { elabPkgOrComp = ElabComponent elab_comp { compLinkedLibDependencies = new_deps, -- I think this is right: any new definite unit ids we -- minted in the phase above need to be built before us. -- Add 'em in. This doesn't remove any old dependencies -- on the indefinite package; they're harmless. compOrderLibDependencies = ordNub $ compOrderLibDependencies elab_comp ++ [unDefUnitId d | DefiniteUnitId d <- new_deps] } } | Just planpkg <- Map.lookup cid cmap = return planpkg | otherwise = error ("indefiniteComponent: " ++ prettyShow cid) fixupBuildStyle BuildAndInstall elab = elab fixupBuildStyle _ (elab@ElaboratedConfiguredPackage { elabBuildStyle = BuildInplaceOnly }) = elab fixupBuildStyle BuildInplaceOnly elab = elab { elabBuildStyle = BuildInplaceOnly, elabBuildPackageDBStack = elabInplaceBuildPackageDBStack elab, elabRegisterPackageDBStack = elabInplaceRegisterPackageDBStack elab, elabSetupPackageDBStack = elabInplaceSetupPackageDBStack elab } ready_map = execState work Map.empty work = for_ pkgs $ \pkg -> case pkg of InstallPlan.Configured elab | not (Map.null (elabLinkedInstantiatedWith elab)) -> indefiniteUnitId (elabComponentId elab) >> return () _ -> instantiateUnitId (getComponentId pkg) Map.empty >> return () --------------------------- -- Build targets -- -- Refer to ProjectPlanning.Types for details of these important types: -- data ComponentTarget = ... -- data SubComponentTarget = ... -- One step in the build system is to translate higher level intentions like -- "build this package", "test that package", or "repl that component" into -- a more detailed specification of exactly which components to build (or other -- actions like repl or build docs). This translation is somewhat different for -- different commands. For example "test" for a package will build a different -- set of components than "build". In addition, the translation of these -- intentions can fail. For example "run" for a package is only unambiguous -- when the package has a single executable. -- -- So we need a little bit of infrastructure to make it easy for the command -- implementations to select what component targets are meant when a user asks -- to do something with a package or component. To do this (and to be able to -- produce good error messages for mistakes and when targets are not available) -- we need to gather and summarise accurate information about all the possible -- targets, both available and unavailable. Then a command implementation can -- decide which of the available component targets should be selected. -- | An available target represents a component within a package that a user -- command could plausibly refer to. In this sense, all the components defined -- within the package are things the user could refer to, whether or not it -- would actually be possible to build that component. -- -- In particular the available target contains an 'AvailableTargetStatus' which -- informs us about whether it's actually possible to select this component to -- be built, and if not why not. This detail makes it possible for command -- implementations (like @build@, @test@ etc) to accurately report why a target -- cannot be used. -- -- Note that the type parameter is used to help enforce that command -- implementations can only select targets that can actually be built (by -- forcing them to return the @k@ value for the selected targets). -- In particular 'resolveTargets' makes use of this (with @k@ as -- @('UnitId', ComponentName')@) to identify the targets thus selected. -- data AvailableTarget k = AvailableTarget { availableTargetPackageId :: PackageId, availableTargetComponentName :: ComponentName, availableTargetStatus :: AvailableTargetStatus k, availableTargetLocalToProject :: Bool } deriving (Eq, Show, Functor) -- | The status of a an 'AvailableTarget' component. This tells us whether -- it's actually possible to select this component to be built, and if not -- why not. -- data AvailableTargetStatus k = TargetDisabledByUser -- ^ When the user does @tests: False@ | TargetDisabledBySolver -- ^ When the solver could not enable tests | TargetNotBuildable -- ^ When the component has @buildable: False@ | TargetNotLocal -- ^ When the component is non-core in a non-local package | TargetBuildable k TargetRequested -- ^ The target can or should be built deriving (Eq, Ord, Show, Functor) -- | This tells us whether a target ought to be built by default, or only if -- specifically requested. The policy is that components like libraries and -- executables are built by default by @build@, but test suites and benchmarks -- are not, unless this is overridden in the project configuration. -- data TargetRequested = TargetRequestedByDefault -- ^ To be built by default | TargetNotRequestedByDefault -- ^ Not to be built by default deriving (Eq, Ord, Show) -- | Given the install plan, produce the set of 'AvailableTarget's for each -- package-component pair. -- -- Typically there will only be one such target for each component, but for -- example if we have a plan with both normal and profiling variants of a -- component then we would get both as available targets, or similarly if we -- had a plan that contained two instances of the same version of a package. -- This approach makes it relatively easy to select all instances\/variants -- of a component. -- availableTargets :: ElaboratedInstallPlan -> Map (PackageId, ComponentName) [AvailableTarget (UnitId, ComponentName)] availableTargets installPlan = let rs = [ (pkgid, cname, fake, target) | pkg <- InstallPlan.toList installPlan , (pkgid, cname, fake, target) <- case pkg of InstallPlan.PreExisting ipkg -> availableInstalledTargets ipkg InstallPlan.Installed elab -> availableSourceTargets elab InstallPlan.Configured elab -> availableSourceTargets elab ] in Map.union (Map.fromListWith (++) [ ((pkgid, cname), [target]) | (pkgid, cname, fake, target) <- rs, not fake]) (Map.fromList [ ((pkgid, cname), [target]) | (pkgid, cname, fake, target) <- rs, fake]) -- The normal targets mask the fake ones. We get all instances of the -- normal ones and only one copy of the fake ones (as there are many -- duplicates of the fake ones). See 'availableSourceTargets' below for -- more details on this fake stuff is about. availableInstalledTargets :: IPI.InstalledPackageInfo -> [(PackageId, ComponentName, Bool, AvailableTarget (UnitId, ComponentName))] availableInstalledTargets ipkg = let unitid = installedUnitId ipkg cname = CLibName LMainLibName status = TargetBuildable (unitid, cname) TargetRequestedByDefault target = AvailableTarget (packageId ipkg) cname status False fake = False in [(packageId ipkg, cname, fake, target)] availableSourceTargets :: ElaboratedConfiguredPackage -> [(PackageId, ComponentName, Bool, AvailableTarget (UnitId, ComponentName))] availableSourceTargets elab = -- We have a somewhat awkward problem here. We need to know /all/ the -- components from /all/ the packages because these are the things that -- users could refer to. Unfortunately, at this stage the elaborated install -- plan does /not/ contain all components: some components have already -- been deleted because they cannot possibly be built. This is the case -- for components that are marked @buildable: False@ in their .cabal files. -- (It's not unreasonable that the unbuildable components have been pruned -- as the plan invariant is considerably simpler if all nodes can be built) -- -- We can recover the missing components but it's not exactly elegant. For -- a graph node corresponding to a component we still have the information -- about the package that it came from, and this includes the names of -- /all/ the other components in the package. So in principle this lets us -- find the names of all components, plus full details of the buildable -- components. -- -- Consider for example a package with 3 exe components: foo, bar and baz -- where foo and bar are buildable, but baz is not. So the plan contains -- nodes for the components foo and bar. Now we look at each of these two -- nodes and look at the package they come from and the names of the -- components in this package. This will give us the names foo, bar and -- baz, twice (once for each of the two buildable components foo and bar). -- -- We refer to these reconstructed missing components as fake targets. -- It is an invariant that they are not available to be built. -- -- To produce the final set of targets we put the fake targets in a finite -- map (thus eliminating the duplicates) and then we overlay that map with -- the normal buildable targets. (This is done above in 'availableTargets'.) -- [ (packageId elab, cname, fake, target) | component <- pkgComponents (elabPkgDescription elab) , let cname = componentName component status = componentAvailableTargetStatus component target = AvailableTarget { availableTargetPackageId = packageId elab, availableTargetComponentName = cname, availableTargetStatus = status, availableTargetLocalToProject = elabLocalToProject elab } fake = isFakeTarget cname -- TODO: The goal of this test is to exclude "instantiated" -- packages as available targets. This means that you can't -- ask for a particular instantiated component to be built; -- it will only get built by a dependency. Perhaps the -- correct way to implement this is to run selection -- prior to instantiating packages. If you refactor -- this, then you can delete this test. , elabIsCanonical elab -- Filter out some bogus parts of the cross product that are never needed , case status of TargetBuildable{} | fake -> False _ -> True ] where isFakeTarget cname = case elabPkgOrComp elab of ElabPackage _ -> False ElabComponent elabComponent -> compComponentName elabComponent /= Just cname componentAvailableTargetStatus :: Component -> AvailableTargetStatus (UnitId, ComponentName) componentAvailableTargetStatus component = case componentOptionalStanza $ CD.componentNameToComponent cname of -- it is not an optional stanza, so a library, exe or foreign lib Nothing | not buildable -> TargetNotBuildable | otherwise -> TargetBuildable (elabUnitId elab, cname) TargetRequestedByDefault -- it is not an optional stanza, so a testsuite or benchmark Just stanza -> case (optStanzaLookup stanza (elabStanzasRequested elab), -- TODO optStanzaSetMember stanza (elabStanzasAvailable elab)) of _ | not withinPlan -> TargetNotLocal (Just False, _) -> TargetDisabledByUser (Nothing, False) -> TargetDisabledBySolver _ | not buildable -> TargetNotBuildable (Just True, True) -> TargetBuildable (elabUnitId elab, cname) TargetRequestedByDefault (Nothing, True) -> TargetBuildable (elabUnitId elab, cname) TargetNotRequestedByDefault (Just True, False) -> error $ "componentAvailableTargetStatus: impossible; cname=" ++ prettyShow cname where cname = componentName component buildable = PD.buildable (componentBuildInfo component) withinPlan = elabLocalToProject elab || case elabPkgOrComp elab of ElabComponent elabComponent -> compComponentName elabComponent == Just cname ElabPackage _ -> case componentName component of CLibName (LMainLibName) -> True CExeName _ -> True --TODO: what about sub-libs and foreign libs? _ -> False -- | Merge component targets that overlap each other. Specially when we have -- multiple targets for the same component and one of them refers to the whole -- component (rather than a module or file within) then all the other targets -- for that component are subsumed. -- -- We also allow for information associated with each component target, and -- whenever we targets subsume each other we aggregate their associated info. -- nubComponentTargets :: [(ComponentTarget, a)] -> [(ComponentTarget, NonEmpty a)] nubComponentTargets = concatMap (wholeComponentOverrides . map snd) . groupBy ((==) `on` fst) . sortBy (compare `on` fst) . map (\t@((ComponentTarget cname _, _)) -> (cname, t)) . map compatSubComponentTargets where -- If we're building the whole component then that the only target all we -- need, otherwise we can have several targets within the component. wholeComponentOverrides :: [(ComponentTarget, a )] -> [(ComponentTarget, NonEmpty a)] wholeComponentOverrides ts = case [ ta | ta@(ComponentTarget _ WholeComponent, _) <- ts ] of ((t, x):_) -> let -- Delete tuple (t, x) from original list to avoid duplicates. -- Use 'deleteBy', to avoid additional Class constraint on 'nubComponentTargets'. ts' = deleteBy (\(t1, _) (t2, _) -> t1 == t2) (t, x) ts in [ (t, x :| map snd ts') ] [] -> [ (t, x :| []) | (t,x) <- ts ] -- Not all Cabal Setup.hs versions support sub-component targets, so switch -- them over to the whole component compatSubComponentTargets :: (ComponentTarget, a) -> (ComponentTarget, a) compatSubComponentTargets target@(ComponentTarget cname _subtarget, x) | not setupHsSupportsSubComponentTargets = (ComponentTarget cname WholeComponent, x) | otherwise = target -- Actually the reality is that no current version of Cabal's Setup.hs -- build command actually support building specific files or modules. setupHsSupportsSubComponentTargets = False -- TODO: when that changes, adjust this test, e.g. -- | pkgSetupScriptCliVersion >= Version [x,y] [] pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool pkgHasEphemeralBuildTargets elab = isJust (elabReplTarget elab) || (not . null) (elabTestTargets elab) || (not . null) (elabBenchTargets elab) || (not . null) (elabHaddockTargets elab) || (not . null) [ () | ComponentTarget _ subtarget <- elabBuildTargets elab , subtarget /= WholeComponent ] -- | The components that we'll build all of, meaning that after they're built -- we can skip building them again (unlike with building just some modules or -- other files within a component). -- elabBuildTargetWholeComponents :: ElaboratedConfiguredPackage -> Set ComponentName elabBuildTargetWholeComponents elab = Set.fromList [ cname | ComponentTarget cname WholeComponent <- elabBuildTargets elab ] ------------------------------------------------------------------------------ -- * Install plan pruning ------------------------------------------------------------------------------ -- | How 'pruneInstallPlanToTargets' should interpret the per-package -- 'ComponentTarget's: as build, repl or haddock targets. -- data TargetAction = TargetActionConfigure | TargetActionBuild | TargetActionRepl | TargetActionTest | TargetActionBench | TargetActionHaddock -- | Given a set of per-package\/per-component targets, take the subset of the -- install plan needed to build those targets. Also, update the package config -- to specify which optional stanzas to enable, and which targets within each -- package to build. -- -- NB: Pruning happens after improvement, which is important because we -- will prune differently depending on what is already installed (to -- implement "sticky" test suite enabling behavior). -- pruneInstallPlanToTargets :: TargetAction -> Map UnitId [ComponentTarget] -> ElaboratedInstallPlan -> ElaboratedInstallPlan pruneInstallPlanToTargets targetActionType perPkgTargetsMap elaboratedPlan = InstallPlan.new (InstallPlan.planIndepGoals elaboratedPlan) . Graph.fromDistinctList -- We have to do the pruning in two passes . pruneInstallPlanPass2 . pruneInstallPlanPass1 -- Set the targets that will be the roots for pruning . setRootTargets targetActionType perPkgTargetsMap . InstallPlan.toList $ elaboratedPlan -- | This is a temporary data type, where we temporarily -- override the graph dependencies of an 'ElaboratedPackage', -- so we can take a closure over them. We'll throw out the -- overridden dependencies when we're done so it's strictly temporary. -- -- For 'ElaboratedComponent', this the cached unit IDs always -- coincide with the real thing. data PrunedPackage = PrunedPackage ElaboratedConfiguredPackage [UnitId] instance Package PrunedPackage where packageId (PrunedPackage elab _) = packageId elab instance HasUnitId PrunedPackage where installedUnitId = nodeKey instance IsNode PrunedPackage where type Key PrunedPackage = UnitId nodeKey (PrunedPackage elab _) = nodeKey elab nodeNeighbors (PrunedPackage _ deps) = deps fromPrunedPackage :: PrunedPackage -> ElaboratedConfiguredPackage fromPrunedPackage (PrunedPackage elab _) = elab -- | Set the build targets based on the user targets (but not rev deps yet). -- This is required before we can prune anything. -- setRootTargets :: TargetAction -> Map UnitId [ComponentTarget] -> [ElaboratedPlanPackage] -> [ElaboratedPlanPackage] setRootTargets targetAction perPkgTargetsMap = assert (not (Map.null perPkgTargetsMap)) $ assert (all (not . null) (Map.elems perPkgTargetsMap)) $ map (mapConfiguredPackage setElabBuildTargets) where -- Set the targets we'll build for this package/component. This is just -- based on the root targets from the user, not targets implied by reverse -- dependencies. Those comes in the second pass once we know the rev deps. -- setElabBuildTargets elab = case (Map.lookup (installedUnitId elab) perPkgTargetsMap, targetAction) of (Nothing, _) -> elab (Just tgts, TargetActionConfigure) -> elab { elabConfigureTargets = tgts } (Just tgts, TargetActionBuild) -> elab { elabBuildTargets = tgts } (Just tgts, TargetActionTest) -> elab { elabTestTargets = tgts } (Just tgts, TargetActionBench) -> elab { elabBenchTargets = tgts } (Just [tgt], TargetActionRepl) -> elab { elabReplTarget = Just tgt , elabBuildHaddocks = False } (Just tgts, TargetActionHaddock) -> foldr setElabHaddockTargets (elab { elabHaddockTargets = tgts , elabBuildHaddocks = True }) tgts (Just _, TargetActionRepl) -> error "pruneInstallPlanToTargets: multiple repl targets" setElabHaddockTargets tgt elab | isTestComponentTarget tgt = elab { elabHaddockTestSuites = True } | isBenchComponentTarget tgt = elab { elabHaddockBenchmarks = True } | isForeignLibComponentTarget tgt = elab { elabHaddockForeignLibs = True } | isExeComponentTarget tgt = elab { elabHaddockExecutables = True } | isSubLibComponentTarget tgt = elab { elabHaddockInternal = True } | otherwise = elab -- | Assuming we have previously set the root build targets (i.e. the user -- targets but not rev deps yet), the first pruning pass does two things: -- -- * A first go at determining which optional stanzas (testsuites, benchmarks) -- are needed. We have a second go in the next pass. -- * Take the dependency closure using pruned dependencies. We prune deps that -- are used only by unneeded optional stanzas. These pruned deps are only -- used for the dependency closure and are not persisted in this pass. -- pruneInstallPlanPass1 :: [ElaboratedPlanPackage] -> [ElaboratedPlanPackage] pruneInstallPlanPass1 pkgs = map (mapConfiguredPackage fromPrunedPackage) (fromMaybe [] $ Graph.closure graph roots) where pkgs' = map (mapConfiguredPackage prune) pkgs graph = Graph.fromDistinctList pkgs' roots = mapMaybe find_root pkgs' prune elab = PrunedPackage elab' (pruneOptionalDependencies elab') where elab' = setDocumentation $ addOptionalStanzas elab is_root :: PrunedPackage -> Maybe UnitId is_root (PrunedPackage elab _) = if not $ and [ null (elabConfigureTargets elab) , null (elabBuildTargets elab) , null (elabTestTargets elab) , null (elabBenchTargets elab) , isNothing (elabReplTarget elab) , null (elabHaddockTargets elab) ] then Just (installedUnitId elab) else Nothing find_root (InstallPlan.Configured pkg) = is_root pkg -- When using the extra-packages stanza we need to -- look at installed packages as well. find_root (InstallPlan.Installed pkg) = is_root pkg find_root _ = Nothing -- Note [Sticky enabled testsuites] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- The testsuite and benchmark targets are somewhat special in that we need -- to configure the packages with them enabled, and we need to do that even -- if we only want to build one of several testsuites. -- -- There are two cases in which we will enable the testsuites (or -- benchmarks): if one of the targets is a testsuite, or if all of the -- testsuite dependencies are already cached in the store. The rationale -- for the latter is to minimise how often we have to reconfigure due to -- the particular targets we choose to build. Otherwise choosing to build -- a testsuite target, and then later choosing to build an exe target -- would involve unnecessarily reconfiguring the package with testsuites -- disabled. Technically this introduces a little bit of stateful -- behaviour to make this "sticky", but it should be benign. -- Decide whether or not to enable testsuites and benchmarks. -- See [Sticky enabled testsuites] addOptionalStanzas :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage addOptionalStanzas elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } = elab { elabPkgOrComp = ElabPackage (pkg { pkgStanzasEnabled = stanzas }) } where stanzas :: OptionalStanzaSet -- By default, we enabled all stanzas requested by the user, -- as per elabStanzasRequested, done in -- 'elaborateSolverToPackage' stanzas = pkgStanzasEnabled pkg -- optionalStanzasRequiredByTargets has to be done at -- prune-time because it depends on 'elabTestTargets' -- et al, which is done by 'setRootTargets' at the -- beginning of pruning. <> optionalStanzasRequiredByTargets elab -- optionalStanzasWithDepsAvailable has to be done at -- prune-time because it depends on what packages are -- installed, which is not known until after improvement -- (pruning is done after improvement) <> optionalStanzasWithDepsAvailable availablePkgs elab pkg addOptionalStanzas elab = elab setDocumentation :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage setDocumentation elab@ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } = elab { elabBuildHaddocks = elabBuildHaddocks elab && documentationEnabled (compSolverName comp) elab } where documentationEnabled c = case c of CD.ComponentLib -> const True CD.ComponentSubLib _ -> elabHaddockInternal CD.ComponentFLib _ -> elabHaddockForeignLibs CD.ComponentExe _ -> elabHaddockExecutables CD.ComponentTest _ -> elabHaddockTestSuites CD.ComponentBench _ -> elabHaddockBenchmarks CD.ComponentSetup -> const False setDocumentation elab = elab -- Calculate package dependencies but cut out those needed only by -- optional stanzas that we've determined we will not enable. -- These pruned deps are not persisted in this pass since they're based on -- the optional stanzas and we'll make further tweaks to the optional -- stanzas in the next pass. -- pruneOptionalDependencies :: ElaboratedConfiguredPackage -> [UnitId] pruneOptionalDependencies elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabComponent _ } = InstallPlan.depends elab -- no pruning pruneOptionalDependencies ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } = (CD.flatDeps . CD.filterDeps keepNeeded) (pkgOrderDependencies pkg) where keepNeeded (CD.ComponentTest _) _ = TestStanzas `optStanzaSetMember` stanzas keepNeeded (CD.ComponentBench _) _ = BenchStanzas `optStanzaSetMember` stanzas keepNeeded _ _ = True stanzas = pkgStanzasEnabled pkg optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage -> OptionalStanzaSet optionalStanzasRequiredByTargets pkg = optStanzaSetFromList [ stanza | ComponentTarget cname _ <- elabBuildTargets pkg ++ elabTestTargets pkg ++ elabBenchTargets pkg ++ maybeToList (elabReplTarget pkg) ++ elabHaddockTargets pkg , stanza <- maybeToList $ componentOptionalStanza $ CD.componentNameToComponent cname ] availablePkgs = Set.fromList [ installedUnitId pkg | InstallPlan.PreExisting pkg <- pkgs ] -- | Given a set of already installed packages @availablePkgs@, -- determine the set of available optional stanzas from @pkg@ -- which have all of their dependencies already installed. This is used -- to implement "sticky" testsuites, where once we have installed -- all of the deps needed for the test suite, we go ahead and -- enable it always. optionalStanzasWithDepsAvailable :: Set UnitId -> ElaboratedConfiguredPackage -> ElaboratedPackage -> OptionalStanzaSet optionalStanzasWithDepsAvailable availablePkgs elab pkg = optStanzaSetFromList [ stanza | stanza <- optStanzaSetToList (elabStanzasAvailable elab) , let deps :: [UnitId] deps = CD.select (optionalStanzaDeps stanza) -- TODO: probably need to select other -- dep types too eventually (pkgOrderDependencies pkg) , all (`Set.member` availablePkgs) deps ] where optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True optionalStanzaDeps _ _ = False -- The second pass does three things: -- -- * A second go at deciding which optional stanzas to enable. -- * Prune the dependencies based on the final choice of optional stanzas. -- * Extend the targets within each package to build, now we know the reverse -- dependencies, ie we know which libs are needed as deps by other packages. -- -- Achieving sticky behaviour with enabling\/disabling optional stanzas is -- tricky. The first approximation was handled by the first pass above, but -- it's not quite enough. That pass will enable stanzas if all of the deps -- of the optional stanza are already installed /in the store/. That's important -- but it does not account for dependencies that get built inplace as part of -- the project. We cannot take those inplace build deps into account in the -- pruning pass however because we don't yet know which ones we're going to -- build. Once we do know, we can have another go and enable stanzas that have -- all their deps available. Now we can consider all packages in the pruned -- plan to be available, including ones we already decided to build from -- source. -- -- Deciding which targets to build depends on knowing which packages have -- reverse dependencies (ie are needed). This requires the result of first -- pass, which is another reason we have to split it into two passes. -- -- Note that just because we might enable testsuites or benchmarks (in the -- first or second pass) doesn't mean that we build all (or even any) of them. -- That depends on which targets we picked in the first pass. -- pruneInstallPlanPass2 :: [ElaboratedPlanPackage] -> [ElaboratedPlanPackage] pruneInstallPlanPass2 pkgs = map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs where setStanzasDepsAndTargets elab = elab { elabBuildTargets = ordNub $ elabBuildTargets elab ++ libTargetsRequiredForRevDeps ++ exeTargetsRequiredForRevDeps, elabPkgOrComp = case elabPkgOrComp elab of ElabPackage pkg -> let stanzas = pkgStanzasEnabled pkg <> optionalStanzasWithDepsAvailable availablePkgs elab pkg keepNeeded (CD.ComponentTest _) _ = TestStanzas `optStanzaSetMember` stanzas keepNeeded (CD.ComponentBench _) _ = BenchStanzas `optStanzaSetMember` stanzas keepNeeded _ _ = True in ElabPackage $ pkg { pkgStanzasEnabled = stanzas, pkgLibDependencies = CD.filterDeps keepNeeded (pkgLibDependencies pkg), pkgExeDependencies = CD.filterDeps keepNeeded (pkgExeDependencies pkg), pkgExeDependencyPaths = CD.filterDeps keepNeeded (pkgExeDependencyPaths pkg) } r@(ElabComponent _) -> r } where libTargetsRequiredForRevDeps = [ ComponentTarget (CLibName Cabal.defaultLibName) WholeComponent | installedUnitId elab `Set.member` hasReverseLibDeps ] exeTargetsRequiredForRevDeps = -- TODO: allow requesting executable with different name -- than package name [ ComponentTarget (Cabal.CExeName $ packageNameToUnqualComponentName $ packageName $ elabPkgSourceId elab) WholeComponent | installedUnitId elab `Set.member` hasReverseExeDeps ] availablePkgs :: Set UnitId availablePkgs = Set.fromList (map installedUnitId pkgs) hasReverseLibDeps :: Set UnitId hasReverseLibDeps = Set.fromList [ depid | InstallPlan.Configured pkg <- pkgs , depid <- elabOrderLibDependencies pkg ] hasReverseExeDeps :: Set UnitId hasReverseExeDeps = Set.fromList [ depid | InstallPlan.Configured pkg <- pkgs , depid <- elabOrderExeDependencies pkg ] mapConfiguredPackage :: (srcpkg -> srcpkg') -> InstallPlan.GenericPlanPackage ipkg srcpkg -> InstallPlan.GenericPlanPackage ipkg srcpkg' mapConfiguredPackage f (InstallPlan.Configured pkg) = InstallPlan.Configured (f pkg) mapConfiguredPackage f (InstallPlan.Installed pkg) = InstallPlan.Installed (f pkg) mapConfiguredPackage _ (InstallPlan.PreExisting pkg) = InstallPlan.PreExisting pkg ------------------------------------ -- Support for --only-dependencies -- -- | Try to remove the given targets from the install plan. -- -- This is not always possible. -- pruneInstallPlanToDependencies :: Set UnitId -> ElaboratedInstallPlan -> Either CannotPruneDependencies ElaboratedInstallPlan pruneInstallPlanToDependencies pkgTargets installPlan = assert (all (isJust . InstallPlan.lookup installPlan) (Set.toList pkgTargets)) $ fmap (InstallPlan.new (InstallPlan.planIndepGoals installPlan)) . checkBrokenDeps . Graph.fromDistinctList . filter (\pkg -> installedUnitId pkg `Set.notMember` pkgTargets) . InstallPlan.toList $ installPlan where -- Our strategy is to remove the packages we don't want and then check -- if the remaining graph is broken or not, ie any packages with dangling -- dependencies. If there are then we cannot prune the given targets. checkBrokenDeps :: Graph.Graph ElaboratedPlanPackage -> Either CannotPruneDependencies (Graph.Graph ElaboratedPlanPackage) checkBrokenDeps graph = case Graph.broken graph of [] -> Right graph brokenPackages -> Left $ CannotPruneDependencies [ (pkg, missingDeps) | (pkg, missingDepIds) <- brokenPackages , let missingDeps = mapMaybe lookupDep missingDepIds ] where -- lookup in the original unpruned graph lookupDep = InstallPlan.lookup installPlan -- | It is not always possible to prune to only the dependencies of a set of -- targets. It may be the case that removing a package leaves something else -- that still needed the pruned package. -- -- This lists all the packages that would be broken, and their dependencies -- that would be missing if we did prune. -- newtype CannotPruneDependencies = CannotPruneDependencies [(ElaboratedPlanPackage, [ElaboratedPlanPackage])] deriving (Show) --------------------------- -- Setup.hs script policy -- -- Handling for Setup.hs scripts is a bit tricky, part of it lives in the -- solver phase, and part in the elaboration phase. We keep the helper -- functions for both phases together here so at least you can see all of it -- in one place. -- -- There are four major cases for Setup.hs handling: -- -- 1. @build-type@ Custom with a @custom-setup@ section -- 2. @build-type@ Custom without a @custom-setup@ section -- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ -- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ -- -- It's also worth noting that packages specifying @cabal-version: >= 1.23@ -- or later that have @build-type@ Custom will always have a @custom-setup@ -- section. Therefore in case 2, the specified @cabal-version@ will always be -- less than 1.23. -- -- In cases 1 and 2 we obviously have to build an external Setup.hs script, -- while in case 4 we can use the internal library API. -- -- TODO:In case 3 we should fail. We don't know how to talk to -- newer ./Setup.hs -- -- data SetupScriptStyle = ... -- see ProjectPlanning.Types -- | Work out the 'SetupScriptStyle' given the package description. -- packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle packageSetupScriptStyle pkg | buildType == PD.Custom , Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza , not (PD.defaultSetupDepends setupbi) -- but not one we added internally = SetupCustomExplicitDeps | buildType == PD.Custom , Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as , PD.defaultSetupDepends setupbi -- the solver fills in the deps = SetupCustomImplicitDeps | buildType == PD.Custom , Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver = SetupCustomImplicitDeps -- here we should fail. | PD.specVersion pkg > cabalSpecLatest -- one cabal-install is built against = SetupNonCustomExternalLib | otherwise = SetupNonCustomInternalLib where buildType = PD.buildType pkg -- | Part of our Setup.hs handling policy is implemented by getting the solver -- to work out setup dependencies for packages. The solver already handles -- packages that explicitly specify setup dependencies, but we can also tell -- the solver to treat other packages as if they had setup dependencies. -- That's what this function does, it gets called by the solver for all -- packages that don't already have setup dependencies. -- -- The dependencies we want to add is different for each 'SetupScriptStyle'. -- -- Note that adding default deps means these deps are actually /added/ to the -- packages that we get out of the solver in the 'SolverInstallPlan'. Making -- implicit setup deps explicit is a problem in the post-solver stages because -- we still need to distinguish the case of explicit and implicit setup deps. -- See 'rememberImplicitSetupDeps'. -- -- Note in addition to adding default setup deps, we also use -- 'addSetupCabalMinVersionConstraint' (in 'planPackages') to require -- @Cabal >= 1.20@ for Setup scripts. -- defaultSetupDeps :: Compiler -> Platform -> PD.PackageDescription -> Maybe [Dependency] defaultSetupDeps compiler platform pkg = case packageSetupScriptStyle pkg of -- For packages with build type custom that do not specify explicit -- setup dependencies, we add a dependency on Cabal and a number -- of other packages. SetupCustomImplicitDeps -> Just $ [ Dependency depPkgname anyVersion mainLibSet | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ [ Dependency cabalPkgname cabalConstraint mainLibSet | packageName pkg /= cabalPkgname ] where -- The Cabal dep is slightly special: -- * We omit the dep for the Cabal lib itself, since it bootstraps. -- * We constrain it to be < 1.25 -- -- Note: we also add a global constraint to require Cabal >= 1.20 -- for Setup scripts (see use addSetupCabalMinVersionConstraint). -- cabalConstraint = orLaterVersion (csvToVersion (PD.specVersion pkg)) `intersectVersionRanges` earlierVersion cabalCompatMaxVer -- The idea here is that at some point we will make significant -- breaking changes to the Cabal API that Setup.hs scripts use. -- So for old custom Setup scripts that do not specify explicit -- constraints, we constrain them to use a compatible Cabal version. cabalCompatMaxVer = mkVersion [1,25] -- For other build types (like Simple) if we still need to compile an -- external Setup.hs, it'll be one of the simple ones that only depends -- on Cabal and base. SetupNonCustomExternalLib -> Just [ Dependency cabalPkgname cabalConstraint mainLibSet , Dependency basePkgname anyVersion mainLibSet] where cabalConstraint = orLaterVersion (csvToVersion (PD.specVersion pkg)) -- The internal setup wrapper method has no deps at all. SetupNonCustomInternalLib -> Just [] -- This case gets ruled out by the caller, planPackages, see the note -- above in the SetupCustomImplicitDeps case. SetupCustomExplicitDeps -> error $ "defaultSetupDeps: called for a package with explicit " ++ "setup deps: " ++ prettyShow (packageId pkg) where -- we require one less -- -- This maps e.g. CabalSpecV3_0 to mkVersion [2,5] csvToVersion :: CabalSpecVersion -> Version csvToVersion = mkVersion . cabalSpecMinimumLibraryVersion -- | Work out which version of the Cabal we will be using to talk to the -- Setup.hs interface for this package. -- -- This depends somewhat on the 'SetupScriptStyle' but most cases are a result -- of what the solver picked for us, based on the explicit setup deps or the -- ones added implicitly by 'defaultSetupDeps'. -- packageSetupScriptSpecVersion :: SetupScriptStyle -> PD.PackageDescription -> Graph.Graph NonSetupLibDepSolverPlanPackage -> ComponentDeps [SolverId] -> Version -- We're going to be using the internal Cabal library, so the spec version of -- that is simply the version of the Cabal library that cabal-install has been -- built with. packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ _ = cabalVersion -- If we happen to be building the Cabal lib itself then because that -- bootstraps itself then we use the version of the lib we're building. packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ _ | packageName pkg == cabalPkgname = packageVersion pkg -- In all other cases we have a look at what version of the Cabal lib the -- solver picked. Or if it didn't depend on Cabal at all (which is very rare) -- then we look at the .cabal file to see what spec version it declares. packageSetupScriptSpecVersion _ pkg libDepGraph deps = case find ((cabalPkgname ==) . packageName) setupLibDeps of Just dep -> packageVersion dep Nothing -> mkVersion (cabalSpecMinimumLibraryVersion (PD.specVersion pkg)) where setupLibDeps = map packageId $ fromMaybe [] $ Graph.closure libDepGraph (CD.setupDeps deps) cabalPkgname, basePkgname :: PackageName cabalPkgname = mkPackageName "Cabal" basePkgname = mkPackageName "base" legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] legacyCustomSetupPkgs compiler (Platform _ os) = map mkPackageName $ [ "array", "base", "binary", "bytestring", "containers" , "deepseq", "directory", "filepath", "pretty" , "process", "time", "transformers" ] ++ [ "Win32" | os == Windows ] ++ [ "unix" | os /= Windows ] ++ [ "ghc-prim" | isGHC ] ++ [ "template-haskell" | isGHC ] ++ [ "old-time" | notGHC710 ] where isGHC = compilerCompatFlavor GHC compiler notGHC710 = case compilerCompatVersion GHC compiler of Nothing -> False Just v -> v <= mkVersion [7,9] -- The other aspects of our Setup.hs policy lives here where we decide on -- the 'SetupScriptOptions'. -- -- Our current policy for the 'SetupCustomImplicitDeps' case is that we -- try to make the implicit deps cover everything, and we don't allow the -- compiler to pick up other deps. This may or may not be sustainable, and -- we might have to allow the deps to be non-exclusive, but that itself would -- be tricky since we would have to allow the Setup access to all the packages -- in the store and local dbs. setupHsScriptOptions :: ElaboratedReadyPackage -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> DistDirLayout -> FilePath -> FilePath -> Bool -> Lock -> SetupScriptOptions -- TODO: Fix this so custom is a separate component. Custom can ALWAYS -- be a separate component!!! setupHsScriptOptions (ReadyPackage elab@ElaboratedConfiguredPackage{..}) plan ElaboratedSharedConfig{..} distdir srcdir builddir isParallelBuild cacheLock = SetupScriptOptions { useCabalVersion = thisVersion elabSetupScriptCliVersion, useCabalSpecVersion = Just elabSetupScriptCliVersion, useCompiler = Just pkgConfigCompiler, usePlatform = Just pkgConfigPlatform, usePackageDB = elabSetupPackageDBStack, usePackageIndex = Nothing, useDependencies = [ (uid, srcid) | ConfiguredId srcid (Just (CLibName LMainLibName)) uid <- elabSetupDependencies elab ], useDependenciesExclusive = True, useVersionMacros = elabSetupScriptStyle == SetupCustomExplicitDeps, useProgramDb = pkgConfigCompilerProgs, useDistPref = builddir, useLoggingHandle = Nothing, -- this gets set later useWorkingDir = Just srcdir, useExtraPathEnv = elabExeDependencyPaths elab, useExtraEnvOverrides = dataDirsEnvironmentForPlan distdir plan, useWin32CleanHack = False, --TODO: [required eventually] forceExternalSetupMethod = isParallelBuild, setupCacheLock = Just cacheLock, isInteractive = False } -- | To be used for the input for elaborateInstallPlan. -- -- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. -- userInstallDirTemplates :: Compiler -> IO InstallDirs.InstallDirTemplates userInstallDirTemplates compiler = do InstallDirs.defaultInstallDirs (compilerFlavor compiler) True -- user install False -- unused storePackageInstallDirs :: StoreDirLayout -> CompilerId -> InstalledPackageId -> InstallDirs.InstallDirs FilePath storePackageInstallDirs storeDirLayout compid ipkgid = storePackageInstallDirs' storeDirLayout compid $ newSimpleUnitId ipkgid storePackageInstallDirs' :: StoreDirLayout -> CompilerId -> UnitId -> InstallDirs.InstallDirs FilePath storePackageInstallDirs' StoreDirLayout{ storePackageDirectory , storeDirectory } compid unitid = InstallDirs.InstallDirs {..} where store = storeDirectory compid prefix = storePackageDirectory compid unitid bindir = prefix "bin" libdir = prefix "lib" libsubdir = "" -- Note: on macOS, we place libraries into -- @store/lib@ to work around the load -- command size limit of macOSs mach-o linker. -- See also @PackageHash.hashedInstalledPackageIdVeryShort@ dynlibdir | buildOS == OSX = store "lib" | otherwise = libdir flibdir = libdir libexecdir = prefix "libexec" libexecsubdir= "" includedir = libdir "include" datadir = prefix "share" datasubdir = "" docdir = datadir "doc" mandir = datadir "man" htmldir = docdir "html" haddockdir = htmldir sysconfdir = prefix "etc" computeInstallDirs :: StoreDirLayout -> InstallDirs.InstallDirTemplates -> ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> InstallDirs.InstallDirs FilePath computeInstallDirs storeDirLayout defaultInstallDirs elaboratedShared elab | elabBuildStyle elab == BuildInplaceOnly -- use the ordinary default install dirs = (InstallDirs.absoluteInstallDirs (elabPkgSourceId elab) (elabUnitId elab) (compilerInfo (pkgConfigCompiler elaboratedShared)) InstallDirs.NoCopyDest (pkgConfigPlatform elaboratedShared) defaultInstallDirs) { -- absoluteInstallDirs sets these as 'undefined' but we have -- to use them as "Setup.hs configure" args InstallDirs.libsubdir = "", InstallDirs.libexecsubdir = "", InstallDirs.datasubdir = "" } | otherwise -- use special simplified install dirs = storePackageInstallDirs' storeDirLayout (compilerId (pkgConfigCompiler elaboratedShared)) (elabUnitId elab) --TODO: [code cleanup] perhaps reorder this code -- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, -- make the various Setup.hs {configure,build,copy} flags setupHsConfigureFlags :: ElaboratedReadyPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.ConfigFlags setupHsConfigureFlags (ReadyPackage elab@ElaboratedConfiguredPackage{..}) sharedConfig@ElaboratedSharedConfig{..} verbosity builddir = sanityCheckElaboratedConfiguredPackage sharedConfig elab (Cabal.ConfigFlags {..}) where configArgs = mempty -- unused, passed via args configDistPref = toFlag builddir configCabalFilePath = mempty configVerbosity = toFlag verbosity configInstantiateWith = Map.toList elabInstantiatedWith configDeterministic = mempty -- doesn't matter, configIPID/configCID overridese configIPID = case elabPkgOrComp of ElabPackage pkg -> toFlag (prettyShow (pkgInstalledId pkg)) ElabComponent _ -> mempty configCID = case elabPkgOrComp of ElabPackage _ -> mempty ElabComponent _ -> toFlag elabComponentId configProgramPaths = Map.toList elabProgramPaths configProgramArgs | {- elabSetupScriptCliVersion < mkVersion [1,24,3] -} True -- workaround for -- -- It turns out, that even with Cabal 2.0, there's still cases such as e.g. -- custom Setup.hs scripts calling out to GHC even when going via -- @runProgram ghcProgram@, as e.g. happy does in its -- -- (see also ) -- -- So for now, let's pass the rather harmless and idempotent -- `-hide-all-packages` flag to all invocations (which has -- the benefit that every GHC invocation starts with a -- consistently well-defined clean slate) until we find a -- better way. = Map.toList $ Map.insertWith (++) "ghc" ["-hide-all-packages"] elabProgramArgs configProgramPathExtra = toNubList elabProgramPathExtra configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) configHcPath = mempty -- we use configProgramPaths instead configHcPkg = mempty -- we use configProgramPaths instead configVanillaLib = toFlag elabVanillaLib configSharedLib = toFlag elabSharedLib configStaticLib = toFlag elabStaticLib configDynExe = toFlag elabDynExe configFullyStaticExe = toFlag elabFullyStaticExe configGHCiLib = toFlag elabGHCiLib configProfExe = mempty configProfLib = toFlag elabProfLib configProf = toFlag elabProfExe -- configProfDetail is for exe+lib, but overridden by configProfLibDetail -- so we specify both so we can specify independently configProfDetail = toFlag elabProfExeDetail configProfLibDetail = toFlag elabProfLibDetail configCoverage = toFlag elabCoverage configLibCoverage = mempty configOptimization = toFlag elabOptimization configSplitSections = toFlag elabSplitSections configSplitObjs = toFlag elabSplitObjs configStripExes = toFlag elabStripExes configStripLibs = toFlag elabStripLibs configDebugInfo = toFlag elabDebugInfo configDumpBuildInfo = toFlag elabDumpBuildInfo configConfigurationsFlags = elabFlagAssignment configConfigureArgs = elabConfigureScriptArgs configExtraLibDirs = elabExtraLibDirs configExtraLibDirsStatic = elabExtraLibDirsStatic configExtraFrameworkDirs = elabExtraFrameworkDirs configExtraIncludeDirs = elabExtraIncludeDirs configProgPrefix = maybe mempty toFlag elabProgPrefix configProgSuffix = maybe mempty toFlag elabProgSuffix configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) elabInstallDirs -- we only use configDependencies, unless we're talking to an old Cabal -- in which case we use configConstraints -- NB: This does NOT use InstallPlan.depends, which includes executable -- dependencies which should NOT be fed in here (also you don't have -- enough info anyway) configDependencies = [ GivenComponent (packageName srcid) ln cid | ConfiguredId srcid mb_cn cid <- elabLibDependencies elab , let ln = case mb_cn of Just (CLibName lname) -> lname Just _ -> error "non-library dependency" Nothing -> LMainLibName ] configConstraints = case elabPkgOrComp of ElabPackage _ -> [ thisPackageVersionConstraint srcid | ConfiguredId srcid _ _uid <- elabLibDependencies elab ] ElabComponent _ -> [] -- explicitly clear, then our package db stack -- TODO: [required eventually] have to do this differently for older Cabal versions configPackageDBs = Nothing : map Just elabBuildPackageDBStack configTests = case elabPkgOrComp of ElabPackage pkg -> toFlag (TestStanzas `optStanzaSetMember` pkgStanzasEnabled pkg) ElabComponent _ -> mempty configBenchmarks = case elabPkgOrComp of ElabPackage pkg -> toFlag (BenchStanzas `optStanzaSetMember` pkgStanzasEnabled pkg) ElabComponent _ -> mempty configExactConfiguration = toFlag True configFlagError = mempty --TODO: [research required] appears not to be implemented configRelocatable = mempty --TODO: [research required] ??? configScratchDir = mempty -- never use configUserInstall = mempty -- don't rely on defaults configPrograms_ = mempty -- never use, shouldn't exist configUseResponseFiles = mempty configAllowDependingOnPrivateLibs = Flag $ not $ libraryVisibilitySupported pkgConfigCompiler setupHsConfigureArgs :: ElaboratedConfiguredPackage -> [String] setupHsConfigureArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) = [] setupHsConfigureArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }) = [showComponentTarget (packageId elab) (ComponentTarget cname WholeComponent)] where cname = fromMaybe (error "setupHsConfigureArgs: trying to configure setup") (compComponentName comp) setupHsBuildFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.BuildFlags setupHsBuildFlags _ _ verbosity builddir = Cabal.BuildFlags { buildProgramPaths = mempty, --unused, set at configure time buildProgramArgs = mempty, --unused, set at configure time buildVerbosity = toFlag verbosity, buildDistPref = toFlag builddir, buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), buildArgs = mempty, -- unused, passed via args not flags buildCabalFilePath= mempty } setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] setupHsBuildArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) -- Fix for #3335, don't pass build arguments if it's not supported | elabSetupScriptCliVersion elab >= mkVersion [1,17] = map (showComponentTarget (packageId elab)) (elabBuildTargets elab) | otherwise = [] setupHsBuildArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent _ }) = [] setupHsTestFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.TestFlags setupHsTestFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir = Cabal.TestFlags { testDistPref = toFlag builddir , testVerbosity = toFlag verbosity , testMachineLog = maybe mempty toFlag elabTestMachineLog , testHumanLog = maybe mempty toFlag elabTestHumanLog , testShowDetails = maybe (Flag Cabal.Always) toFlag elabTestShowDetails , testKeepTix = toFlag elabTestKeepTix , testWrapper = maybe mempty toFlag elabTestWrapper , testFailWhenNoTestSuites = toFlag elabTestFailWhenNoTestSuites , testOptions = elabTestTestOptions } setupHsTestArgs :: ElaboratedConfiguredPackage -> [String] -- TODO: Does the issue #3335 affects test as well setupHsTestArgs elab = mapMaybe (showTestComponentTarget (packageId elab)) (elabTestTargets elab) setupHsBenchFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.BenchmarkFlags setupHsBenchFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir = Cabal.BenchmarkFlags { benchmarkDistPref = toFlag builddir , benchmarkVerbosity = toFlag verbosity , benchmarkOptions = elabBenchmarkOptions } setupHsBenchArgs :: ElaboratedConfiguredPackage -> [String] setupHsBenchArgs elab = mapMaybe (showBenchComponentTarget (packageId elab)) (elabBenchTargets elab) setupHsReplFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.ReplFlags setupHsReplFlags _ sharedConfig verbosity builddir = Cabal.ReplFlags { replProgramPaths = mempty, --unused, set at configure time replProgramArgs = mempty, --unused, set at configure time replVerbosity = toFlag verbosity, replDistPref = toFlag builddir, replReload = mempty, --only used as callback from repl replReplOptions = pkgConfigReplOptions sharedConfig --runtime override for repl flags } setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] setupHsReplArgs elab = maybe [] (\t -> [showComponentTarget (packageId elab) t]) (elabReplTarget elab) --TODO: should be able to give multiple modules in one component setupHsCopyFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> FilePath -> Cabal.CopyFlags setupHsCopyFlags _ _ verbosity builddir destdir = Cabal.CopyFlags { copyArgs = [], -- TODO: could use this to only copy what we enabled copyDest = toFlag (InstallDirs.CopyTo destdir), copyDistPref = toFlag builddir, copyVerbosity = toFlag verbosity, copyCabalFilePath = mempty } setupHsRegisterFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> FilePath -> Cabal.RegisterFlags setupHsRegisterFlags ElaboratedConfiguredPackage{..} _ verbosity builddir pkgConfFile = Cabal.RegisterFlags { regPackageDB = mempty, -- misfeature regGenScript = mempty, -- never use regGenPkgConf = toFlag (Just pkgConfFile), regInPlace = case elabBuildStyle of BuildInplaceOnly -> toFlag True _ -> toFlag False, regPrintId = mempty, -- never use regDistPref = toFlag builddir, regArgs = [], regVerbosity = toFlag verbosity, regCabalFilePath = mempty } setupHsHaddockFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.HaddockFlags setupHsHaddockFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir = Cabal.HaddockFlags { haddockProgramPaths = mempty, --unused, set at configure time haddockProgramArgs = mempty, --unused, set at configure time haddockHoogle = toFlag elabHaddockHoogle, haddockHtml = toFlag elabHaddockHtml, haddockHtmlLocation = maybe mempty toFlag elabHaddockHtmlLocation, haddockForHackage = toFlag elabHaddockForHackage, haddockForeignLibs = toFlag elabHaddockForeignLibs, haddockExecutables = toFlag elabHaddockExecutables, haddockTestSuites = toFlag elabHaddockTestSuites, haddockBenchmarks = toFlag elabHaddockBenchmarks, haddockInternal = toFlag elabHaddockInternal, haddockCss = maybe mempty toFlag elabHaddockCss, haddockLinkedSource = toFlag elabHaddockLinkedSource, haddockQuickJump = toFlag elabHaddockQuickJump, haddockHscolourCss = maybe mempty toFlag elabHaddockHscolourCss, haddockContents = maybe mempty toFlag elabHaddockContents, haddockDistPref = toFlag builddir, haddockKeepTempFiles = mempty, --TODO: from build settings haddockVerbosity = toFlag verbosity, haddockCabalFilePath = mempty, haddockArgs = mempty } setupHsHaddockArgs :: ElaboratedConfiguredPackage -> [String] -- TODO: Does the issue #3335 affects test as well setupHsHaddockArgs elab = map (showComponentTarget (packageId elab)) (elabHaddockTargets elab) {- setupHsTestFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.TestFlags setupHsTestFlags _ _ verbosity builddir = Cabal.TestFlags { } -} ------------------------------------------------------------------------------ -- * Sharing installed packages ------------------------------------------------------------------------------ -- -- Nix style store management for tarball packages -- -- So here's our strategy: -- -- We use a per-user nix-style hashed store, but /only/ for tarball packages. -- So that includes packages from hackage repos (and other http and local -- tarballs). For packages in local directories we do not register them into -- the shared store by default, we just build them locally inplace. -- -- The reason we do it like this is that it's easy to make stable hashes for -- tarball packages, and these packages benefit most from sharing. By contrast -- unpacked dir packages are harder to hash and they tend to change more -- frequently so there's less benefit to sharing them. -- -- When using the nix store approach we have to run the solver *without* -- looking at the packages installed in the store, just at the source packages -- (plus core\/global installed packages). Then we do a post-processing pass -- to replace configured packages in the plan with pre-existing ones, where -- possible. Where possible of course means where the nix-style package hash -- equals one that's already in the store. -- -- One extra wrinkle is that unless we know package tarball hashes upfront, we -- will have to download the tarballs to find their hashes. So we have two -- options: delay replacing source with pre-existing installed packages until -- the point during the execution of the install plan where we have the -- tarball, or try to do as much up-front as possible and then check again -- during plan execution. The former isn't great because we would end up -- telling users we're going to re-install loads of packages when in fact we -- would just share them. It'd be better to give as accurate a prediction as -- we can. The latter is better for users, but we do still have to check -- during plan execution because it's important that we don't replace existing -- installed packages even if they have the same package hash, because we -- don't guarantee ABI stability. -- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but -- not replace installed packages with ghc-pkg. packageHashInputs :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> PackageHashInputs packageHashInputs pkgshared elab@(ElaboratedConfiguredPackage { elabPkgSourceHash = Just srchash }) = PackageHashInputs { pkgHashPkgId = packageId elab, pkgHashComponent = case elabPkgOrComp elab of ElabPackage _ -> Nothing ElabComponent comp -> Just (compSolverName comp), pkgHashSourceHash = srchash, pkgHashPkgConfigDeps = Set.fromList (elabPkgConfigDependencies elab), pkgHashDirectDeps = case elabPkgOrComp elab of ElabPackage (ElaboratedPackage{..}) -> Set.fromList $ [ confInstId dep | dep <- CD.select relevantDeps pkgLibDependencies ] ++ [ confInstId dep | dep <- CD.select relevantDeps pkgExeDependencies ] ElabComponent comp -> Set.fromList (map confInstId (compLibDependencies comp ++ compExeDependencies comp)), pkgHashOtherConfig = packageHashConfigInputs pkgshared elab } where -- Obviously the main deps are relevant relevantDeps CD.ComponentLib = True relevantDeps (CD.ComponentSubLib _) = True relevantDeps (CD.ComponentFLib _) = True relevantDeps (CD.ComponentExe _) = True -- Setup deps can affect the Setup.hs behaviour and thus what is built relevantDeps CD.ComponentSetup = True -- However testsuites and benchmarks do not get installed and should not -- affect the result, so we do not include them. relevantDeps (CD.ComponentTest _) = False relevantDeps (CD.ComponentBench _) = False packageHashInputs _ pkg = error $ "packageHashInputs: only for packages with source hashes. " ++ prettyShow (packageId pkg) packageHashConfigInputs :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> PackageHashConfigInputs packageHashConfigInputs shared@ElaboratedSharedConfig{..} pkg = PackageHashConfigInputs { pkgHashCompilerId = compilerId pkgConfigCompiler, pkgHashPlatform = pkgConfigPlatform, pkgHashFlagAssignment = elabFlagAssignment, pkgHashConfigureScriptArgs = elabConfigureScriptArgs, pkgHashVanillaLib = elabVanillaLib, pkgHashSharedLib = elabSharedLib, pkgHashDynExe = elabDynExe, pkgHashFullyStaticExe = elabFullyStaticExe, pkgHashGHCiLib = elabGHCiLib, pkgHashProfLib = elabProfLib, pkgHashProfExe = elabProfExe, pkgHashProfLibDetail = elabProfLibDetail, pkgHashProfExeDetail = elabProfExeDetail, pkgHashCoverage = elabCoverage, pkgHashOptimization = elabOptimization, pkgHashSplitSections = elabSplitSections, pkgHashSplitObjs = elabSplitObjs, pkgHashStripLibs = elabStripLibs, pkgHashStripExes = elabStripExes, pkgHashDebugInfo = elabDebugInfo, pkgHashProgramArgs = elabProgramArgs, pkgHashExtraLibDirs = elabExtraLibDirs, pkgHashExtraFrameworkDirs = elabExtraFrameworkDirs, pkgHashExtraIncludeDirs = elabExtraIncludeDirs, pkgHashProgPrefix = elabProgPrefix, pkgHashProgSuffix = elabProgSuffix, pkgHashPackageDbs = elabPackageDbs, pkgHashDocumentation = elabBuildHaddocks, pkgHashHaddockHoogle = elabHaddockHoogle, pkgHashHaddockHtml = elabHaddockHtml, pkgHashHaddockHtmlLocation = elabHaddockHtmlLocation, pkgHashHaddockForeignLibs = elabHaddockForeignLibs, pkgHashHaddockExecutables = elabHaddockExecutables, pkgHashHaddockTestSuites = elabHaddockTestSuites, pkgHashHaddockBenchmarks = elabHaddockBenchmarks, pkgHashHaddockInternal = elabHaddockInternal, pkgHashHaddockCss = elabHaddockCss, pkgHashHaddockLinkedSource = elabHaddockLinkedSource, pkgHashHaddockQuickJump = elabHaddockQuickJump, pkgHashHaddockContents = elabHaddockContents } where ElaboratedConfiguredPackage{..} = normaliseConfiguredPackage shared pkg -- | Given the 'InstalledPackageIndex' for a nix-style package store, and an -- 'ElaboratedInstallPlan', replace configured source packages by installed -- packages from the store whenever they exist. -- improveInstallPlanWithInstalledPackages :: Set UnitId -> ElaboratedInstallPlan -> ElaboratedInstallPlan improveInstallPlanWithInstalledPackages installedPkgIdSet = InstallPlan.installed canPackageBeImproved where canPackageBeImproved pkg = installedUnitId pkg `Set.member` installedPkgIdSet --TODO: sanity checks: -- * the installed package must have the expected deps etc -- * the installed package must not be broken, valid dep closure --TODO: decide what to do if we encounter broken installed packages, -- since overwriting is never safe. -- Path construction ------ -- | The path to the directory that contains a specific executable. -- NB: For inplace NOT InstallPaths.bindir installDirs; for an -- inplace build those values are utter nonsense. So we -- have to guess where the directory is going to be. -- Fortunately this is "stable" part of Cabal API. -- But the way we get the build directory is A HORRIBLE -- HACK. binDirectoryFor :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> FilePath -> FilePath binDirectoryFor layout config package exe = case elabBuildStyle package of BuildAndInstall -> installedBinDirectory package BuildInplaceOnly -> inplaceBinRoot layout config package exe -- package has been built and installed. installedBinDirectory :: ElaboratedConfiguredPackage -> FilePath installedBinDirectory = InstallDirs.bindir . elabInstallDirs -- | The path to the @build@ directory for an inplace build. inplaceBinRoot :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> FilePath inplaceBinRoot layout config package = distBuildDirectory layout (elabDistDirParams config package) "build" cabal-install-3.8.1.0/src/Distribution/Client/ProjectPlanning/0000755000000000000000000000000007346545000022341 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/ProjectPlanning/Types.hs0000644000000000000000000010727007346545000024010 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE TypeFamilies #-} -- | Types used while planning how to build everything in a project. -- -- Primarily this is the 'ElaboratedInstallPlan'. -- module Distribution.Client.ProjectPlanning.Types ( SolverInstallPlan, -- * Elaborated install plan types ElaboratedInstallPlan, normaliseConfiguredPackage, ElaboratedConfiguredPackage(..), elabDistDirParams, elabExeDependencyPaths, elabLibDependencies, elabOrderLibDependencies, elabExeDependencies, elabOrderExeDependencies, elabSetupDependencies, elabPkgConfigDependencies, elabInplaceDependencyBuildCacheFiles, elabRequiresRegistration, dataDirsEnvironmentForPlan, elabPlanPackageName, elabConfiguredName, elabComponentName, ElaboratedPackageOrComponent(..), ElaboratedComponent(..), ElaboratedPackage(..), pkgOrderDependencies, ElaboratedPlanPackage, ElaboratedSharedConfig(..), ElaboratedReadyPackage, BuildStyle(..), CabalFileText, -- * Build targets ComponentTarget(..), showComponentTarget, showTestComponentTarget, showBenchComponentTarget, SubComponentTarget(..), isSubLibComponentTarget, isForeignLibComponentTarget, isExeComponentTarget, isTestComponentTarget, isBenchComponentTarget, componentOptionalStanza, -- * Setup script SetupScriptStyle(..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.TargetSelector ( SubComponentTarget(..) ) import Distribution.Client.PackageHash import Distribution.Client.Types import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan ( GenericInstallPlan, GenericPlanPackage(..) ) import Distribution.Client.SolverInstallPlan ( SolverInstallPlan ) import Distribution.Client.DistDirLayout import Distribution.Backpack import Distribution.Backpack.ModuleShape import Distribution.Verbosity (normal) import Distribution.Types.ComponentRequestedSpec import Distribution.Types.PkgconfigVersion import Distribution.Types.PackageDescription (PackageDescription(..)) import Distribution.Package import Distribution.System import qualified Distribution.PackageDescription as Cabal import Distribution.InstalledPackageInfo (InstalledPackageInfo) import Distribution.Simple.Compiler import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) import qualified Distribution.Simple.BuildTarget as Cabal import Distribution.Simple.Program import Distribution.ModuleName (ModuleName) import Distribution.Simple.LocalBuildInfo ( ComponentName(..), LibraryName(..) ) import qualified Distribution.Simple.InstallDirs as InstallDirs import Distribution.Simple.InstallDirs (PathTemplate) import Distribution.Simple.Setup ( HaddockTarget, TestShowDetails, DumpBuildInfo (..), ReplOptions ) import Distribution.Version import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ComponentDeps (ComponentDeps) import Distribution.Solver.Types.OptionalStanza import Distribution.Compat.Graph (IsNode(..)) import Distribution.Simple.Utils (ordNub) import qualified Data.Map as Map import qualified Data.ByteString.Lazy as LBS import qualified Data.Monoid as Mon import System.FilePath (()) -- | The combination of an elaborated install plan plus a -- 'ElaboratedSharedConfig' contains all the details necessary to be able -- to execute the plan without having to make further policy decisions. -- -- It does not include dynamic elements such as resources (such as http -- connections). -- type ElaboratedInstallPlan = GenericInstallPlan InstalledPackageInfo ElaboratedConfiguredPackage type ElaboratedPlanPackage = GenericPlanPackage InstalledPackageInfo ElaboratedConfiguredPackage -- | User-friendly display string for an 'ElaboratedPlanPackage'. elabPlanPackageName :: Verbosity -> ElaboratedPlanPackage -> String elabPlanPackageName verbosity (PreExisting ipkg) | verbosity <= normal = prettyShow (packageName ipkg) | otherwise = prettyShow (installedUnitId ipkg) elabPlanPackageName verbosity (Configured elab) = elabConfiguredName verbosity elab elabPlanPackageName verbosity (Installed elab) = elabConfiguredName verbosity elab --TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle -- even platform and compiler could be different if we're building things -- like a server + client with ghc + ghcjs data ElaboratedSharedConfig = ElaboratedSharedConfig { pkgConfigPlatform :: Platform, pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo -- | The programs that the compiler configured (e.g. for GHC, the progs -- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are -- used. pkgConfigCompilerProgs :: ProgramDb, pkgConfigReplOptions :: ReplOptions } deriving (Show, Generic, Typeable) --TODO: [code cleanup] no Eq instance instance Binary ElaboratedSharedConfig instance Structured ElaboratedSharedConfig data ElaboratedConfiguredPackage = ElaboratedConfiguredPackage { -- | The 'UnitId' which uniquely identifies this item in a build plan elabUnitId :: UnitId, elabComponentId :: ComponentId, elabInstantiatedWith :: Map ModuleName Module, elabLinkedInstantiatedWith :: Map ModuleName OpenModule, -- | This is true if this is an indefinite package, or this is a -- package with no signatures. (Notably, it's not true for instantiated -- packages.) The motivation for this is if you ask to build -- @foo-indef@, this probably means that you want to typecheck -- it, NOT that you want to rebuild all of the various -- instantiations of it. elabIsCanonical :: Bool, -- | The 'PackageId' of the originating package elabPkgSourceId :: PackageId, -- | Shape of the package/component, for Backpack. elabModuleShape :: ModuleShape, -- | A total flag assignment for the package. -- TODO: Actually this can be per-component if we drop -- all flags that don't affect a component. elabFlagAssignment :: Cabal.FlagAssignment, -- | The original default flag assignment, used only for reporting. elabFlagDefaults :: Cabal.FlagAssignment, elabPkgDescription :: Cabal.PackageDescription, -- | Where the package comes from, e.g. tarball, local dir etc. This -- is not the same as where it may be unpacked to for the build. elabPkgSourceLocation :: PackageLocation (Maybe FilePath), -- | The hash of the source, e.g. the tarball. We don't have this for -- local source dir packages. elabPkgSourceHash :: Maybe PackageSourceHash, -- | Is this package one of the ones specified by location in the -- project file? (As opposed to a dependency, or a named package pulled -- in) elabLocalToProject :: Bool, -- | Are we going to build and install this package to the store, or are -- we going to build it and register it locally. elabBuildStyle :: BuildStyle, -- | Another way of phrasing 'pkgStanzasAvailable'. elabEnabledSpec :: ComponentRequestedSpec, -- | Which optional stanzas (ie testsuites, benchmarks) can be built. -- This means the solver produced a plan that has them available. -- This doesn't necessary mean we build them by default. elabStanzasAvailable :: OptionalStanzaSet, -- | Which optional stanzas the user explicitly asked to enable or -- to disable. This tells us which ones we build by default, and -- helps with error messages when the user asks to build something -- they explicitly disabled. -- -- TODO: The 'Bool' here should be refined into an ADT with three -- cases: NotRequested, ExplicitlyRequested and -- ImplicitlyRequested. A stanza is explicitly requested if -- the user asked, for this *specific* package, that the stanza -- be enabled; it's implicitly requested if the user asked for -- all global packages to have this stanza enabled. The -- difference between an explicit and implicit request is -- error reporting behavior: if a user asks for tests to be -- enabled for a specific package that doesn't have any tests, -- we should warn them about it, but we shouldn't complain -- that a user enabled tests globally, and some local packages -- just happen not to have any tests. (But perhaps we should -- warn if ALL local packages don't have any tests.) elabStanzasRequested :: OptionalStanzaMap (Maybe Bool), elabPackageDbs :: [Maybe PackageDB], elabSetupPackageDBStack :: PackageDBStack, elabBuildPackageDBStack :: PackageDBStack, elabRegisterPackageDBStack :: PackageDBStack, elabInplaceSetupPackageDBStack :: PackageDBStack, elabInplaceBuildPackageDBStack :: PackageDBStack, elabInplaceRegisterPackageDBStack :: PackageDBStack, elabPkgDescriptionOverride :: Maybe CabalFileText, -- TODO: make per-component variants of these flags elabVanillaLib :: Bool, elabSharedLib :: Bool, elabStaticLib :: Bool, elabDynExe :: Bool, elabFullyStaticExe :: Bool, elabGHCiLib :: Bool, elabProfLib :: Bool, elabProfExe :: Bool, elabProfLibDetail :: ProfDetailLevel, elabProfExeDetail :: ProfDetailLevel, elabCoverage :: Bool, elabOptimization :: OptimisationLevel, elabSplitObjs :: Bool, elabSplitSections :: Bool, elabStripLibs :: Bool, elabStripExes :: Bool, elabDebugInfo :: DebugInfoLevel, elabDumpBuildInfo :: DumpBuildInfo, elabProgramPaths :: Map String FilePath, elabProgramArgs :: Map String [String], elabProgramPathExtra :: [FilePath], elabConfigureScriptArgs :: [String], elabExtraLibDirs :: [FilePath], elabExtraLibDirsStatic :: [FilePath], elabExtraFrameworkDirs :: [FilePath], elabExtraIncludeDirs :: [FilePath], elabProgPrefix :: Maybe PathTemplate, elabProgSuffix :: Maybe PathTemplate, elabInstallDirs :: InstallDirs.InstallDirs FilePath, elabHaddockHoogle :: Bool, elabHaddockHtml :: Bool, elabHaddockHtmlLocation :: Maybe String, elabHaddockForeignLibs :: Bool, elabHaddockForHackage :: HaddockTarget, elabHaddockExecutables :: Bool, elabHaddockTestSuites :: Bool, elabHaddockBenchmarks :: Bool, elabHaddockInternal :: Bool, elabHaddockCss :: Maybe FilePath, elabHaddockLinkedSource :: Bool, elabHaddockQuickJump :: Bool, elabHaddockHscolourCss :: Maybe FilePath, elabHaddockContents :: Maybe PathTemplate, elabTestMachineLog :: Maybe PathTemplate, elabTestHumanLog :: Maybe PathTemplate, elabTestShowDetails :: Maybe TestShowDetails, elabTestKeepTix :: Bool, elabTestWrapper :: Maybe FilePath, elabTestFailWhenNoTestSuites :: Bool, elabTestTestOptions :: [PathTemplate], elabBenchmarkOptions :: [PathTemplate], -- Setup.hs related things: -- | One of four modes for how we build and interact with the Setup.hs -- script, based on whether it's a build-type Custom, with or without -- explicit deps and the cabal spec version the .cabal file needs. elabSetupScriptStyle :: SetupScriptStyle, -- | The version of the Cabal command line interface that we are using -- for this package. This is typically the version of the Cabal lib -- that the Setup.hs is built against. -- -- TODO: We might want to turn this into a enum, -- yet different enum than 'CabalSpecVersion'. elabSetupScriptCliVersion :: Version, -- Build time related: elabConfigureTargets :: [ComponentTarget], elabBuildTargets :: [ComponentTarget], elabTestTargets :: [ComponentTarget], elabBenchTargets :: [ComponentTarget], elabReplTarget :: Maybe ComponentTarget, elabHaddockTargets :: [ComponentTarget], elabBuildHaddocks :: Bool, --pkgSourceDir ? -- currently passed in later because they can use temp locations --pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc -- | Component/package specific information elabPkgOrComp :: ElaboratedPackageOrComponent } deriving (Eq, Show, Generic, Typeable) normaliseConfiguredPackage :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage normaliseConfiguredPackage ElaboratedSharedConfig{pkgConfigCompilerProgs} pkg = pkg { elabProgramArgs = Map.mapMaybeWithKey lookupFilter (elabProgramArgs pkg) } where knownProgramDb = addKnownPrograms builtinPrograms pkgConfigCompilerProgs pkgDesc :: PackageDescription pkgDesc = elabPkgDescription pkg removeEmpty :: [String] -> Maybe [String] removeEmpty [] = Nothing removeEmpty xs = Just xs lookupFilter :: String -> [String] -> Maybe [String] lookupFilter n args = removeEmpty $ case lookupKnownProgram n knownProgramDb of Just p -> programNormaliseArgs p (getVersion p) pkgDesc args Nothing -> args getVersion :: Program -> Maybe Version getVersion p = lookupProgram p knownProgramDb >>= programVersion -- | The package/component contains/is a library and so must be registered elabRequiresRegistration :: ElaboratedConfiguredPackage -> Bool elabRequiresRegistration elab = case elabPkgOrComp elab of ElabComponent comp -> case compComponentName comp of Just cn -> is_lib cn && build_target _ -> False ElabPackage pkg -> -- Tricky! Not only do we have to test if the user selected -- a library as a build target, we also have to test if -- the library was TRANSITIVELY depended upon, since we will -- also require a register in this case. -- -- NB: It would have been far nicer to just unconditionally -- register in all cases, but some Custom Setups will fall -- over if you try to do that, ESPECIALLY if there actually is -- a library but they hadn't built it. -- -- However, as the case of `cpphs-1.20.8` has shown in -- #5379, in cases when a monolithic package gets -- installed due to its executable components -- (i.e. exe:cpphs) into the store we *have* to register -- if there's a buildable public library (i.e. lib:cpphs) -- that was built and installed into the same store folder -- as otherwise this will cause build failures once a -- target actually depends on lib:cpphs. build_target || (elabBuildStyle elab == BuildAndInstall && Cabal.hasPublicLib (elabPkgDescription elab)) -- the next sub-condition below is currently redundant -- (see discussion in #5604 for more details), but it's -- being kept intentionally here as a safeguard because if -- internal libraries ever start working with -- non-per-component builds this condition won't be -- redundant anymore. || any (depends_on_lib pkg) (elabBuildTargets elab) where depends_on_lib pkg (ComponentTarget cn _) = not (null (CD.select (== CD.componentNameToComponent cn) (pkgDependsOnSelfLib pkg))) build_target = if not (null (elabBuildTargets elab)) then any is_lib_target (elabBuildTargets elab) -- Empty build targets mean we build /everything/; -- that means we have to look more carefully to see -- if there is anything to register else Cabal.hasLibs (elabPkgDescription elab) -- NB: this means we DO NOT reregister if you just built a -- single file is_lib_target (ComponentTarget cn WholeComponent) = is_lib cn is_lib_target _ = False is_lib (CLibName _) = True is_lib _ = False -- | Construct the environment needed for the data files to work. -- This consists of a separate @*_datadir@ variable for each -- inplace package in the plan. dataDirsEnvironmentForPlan :: DistDirLayout -> ElaboratedInstallPlan -> [(String, Maybe FilePath)] dataDirsEnvironmentForPlan distDirLayout = catMaybes . fmap (InstallPlan.foldPlanPackage (const Nothing) (dataDirEnvVarForPackage distDirLayout)) . InstallPlan.toList -- | Construct an environment variable that points -- the package's datadir to its correct location. -- This might be: -- * 'Just' the package's source directory plus the data subdirectory -- for inplace packages. -- * 'Nothing' for packages installed in the store (the path was -- already included in the package at install/build time). dataDirEnvVarForPackage :: DistDirLayout -> ElaboratedConfiguredPackage -> Maybe (String, Maybe FilePath) dataDirEnvVarForPackage distDirLayout pkg = case elabBuildStyle pkg of BuildAndInstall -> Nothing BuildInplaceOnly -> Just ( pkgPathEnvVar (elabPkgDescription pkg) "datadir" , Just $ srcPath (elabPkgSourceLocation pkg) dataDir (elabPkgDescription pkg)) where srcPath (LocalUnpackedPackage path) = path srcPath (LocalTarballPackage _path) = unpackedPath srcPath (RemoteTarballPackage _uri _localTar) = unpackedPath srcPath (RepoTarballPackage _repo _packageId _localTar) = unpackedPath srcPath (RemoteSourceRepoPackage _sourceRepo (Just localCheckout)) = localCheckout -- TODO: see https://github.com/haskell/cabal/wiki/Potential-Refactors#unresolvedpkgloc srcPath (RemoteSourceRepoPackage _sourceRepo Nothing) = error "calling dataDirEnvVarForPackage on a not-downloaded repo is an error" unpackedPath = distUnpackedSrcDirectory distDirLayout $ elabPkgSourceId pkg instance Package ElaboratedConfiguredPackage where packageId = elabPkgSourceId instance HasConfiguredId ElaboratedConfiguredPackage where configuredId elab = ConfiguredId (packageId elab) (elabComponentName elab) (elabComponentId elab) instance HasUnitId ElaboratedConfiguredPackage where installedUnitId = elabUnitId instance IsNode ElaboratedConfiguredPackage where type Key ElaboratedConfiguredPackage = UnitId nodeKey = elabUnitId nodeNeighbors = elabOrderDependencies instance Binary ElaboratedConfiguredPackage instance Structured ElaboratedConfiguredPackage data ElaboratedPackageOrComponent = ElabPackage ElaboratedPackage | ElabComponent ElaboratedComponent deriving (Eq, Show, Generic) instance Binary ElaboratedPackageOrComponent instance Structured ElaboratedPackageOrComponent elabComponentName :: ElaboratedConfiguredPackage -> Maybe ComponentName elabComponentName elab = case elabPkgOrComp elab of ElabPackage _ -> Just $ CLibName LMainLibName -- there could be more, but default this ElabComponent comp -> compComponentName comp -- | A user-friendly descriptor for an 'ElaboratedConfiguredPackage'. elabConfiguredName :: Verbosity -> ElaboratedConfiguredPackage -> String elabConfiguredName verbosity elab | verbosity <= normal = (case elabPkgOrComp elab of ElabPackage _ -> "" ElabComponent comp -> case compComponentName comp of Nothing -> "setup from " Just (CLibName LMainLibName) -> "" Just cname -> prettyShow cname ++ " from ") ++ prettyShow (packageId elab) | otherwise = prettyShow (elabUnitId elab) elabDistDirParams :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> DistDirParams elabDistDirParams shared elab = DistDirParams { distParamUnitId = installedUnitId elab, distParamComponentId = elabComponentId elab, distParamPackageId = elabPkgSourceId elab, distParamComponentName = case elabPkgOrComp elab of ElabComponent comp -> compComponentName comp ElabPackage _ -> Nothing, distParamCompilerId = compilerId (pkgConfigCompiler shared), distParamPlatform = pkgConfigPlatform shared, distParamOptimization = elabOptimization elab } -- | The full set of dependencies which dictate what order we -- need to build things in the install plan: "order dependencies" -- balls everything together. This is mostly only useful for -- ordering; if you are, for example, trying to compute what -- @--dependency@ flags to pass to a Setup script, you need to -- use 'elabLibDependencies'. This method is the same as -- 'nodeNeighbors'. -- -- NB: this method DOES include setup deps. elabOrderDependencies :: ElaboratedConfiguredPackage -> [UnitId] elabOrderDependencies elab = case elabPkgOrComp elab of -- Important not to have duplicates: otherwise InstallPlan gets -- confused. ElabPackage pkg -> ordNub (CD.flatDeps (pkgOrderDependencies pkg)) ElabComponent comp -> compOrderDependencies comp -- | Like 'elabOrderDependencies', but only returns dependencies on -- libraries. elabOrderLibDependencies :: ElaboratedConfiguredPackage -> [UnitId] elabOrderLibDependencies elab = case elabPkgOrComp elab of ElabPackage pkg -> map (newSimpleUnitId . confInstId) $ ordNub $ CD.flatDeps (pkgLibDependencies pkg) ElabComponent comp -> compOrderLibDependencies comp -- | The library dependencies (i.e., the libraries we depend on, NOT -- the dependencies of the library), NOT including setup dependencies. -- These are passed to the @Setup@ script via @--dependency@. elabLibDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] elabLibDependencies elab = case elabPkgOrComp elab of ElabPackage pkg -> ordNub (CD.nonSetupDeps (pkgLibDependencies pkg)) ElabComponent comp -> compLibDependencies comp -- | Like 'elabOrderDependencies', but only returns dependencies on -- executables. (This coincides with 'elabExeDependencies'.) elabOrderExeDependencies :: ElaboratedConfiguredPackage -> [UnitId] elabOrderExeDependencies = map newSimpleUnitId . elabExeDependencies -- | The executable dependencies (i.e., the executables we depend on); -- these are the executables we must add to the PATH before we invoke -- the setup script. elabExeDependencies :: ElaboratedConfiguredPackage -> [ComponentId] elabExeDependencies elab = map confInstId $ case elabPkgOrComp elab of ElabPackage pkg -> CD.nonSetupDeps (pkgExeDependencies pkg) ElabComponent comp -> compExeDependencies comp -- | This returns the paths of all the executables we depend on; we -- must add these paths to PATH before invoking the setup script. -- (This is usually what you want, not 'elabExeDependencies', if you -- actually want to build something.) elabExeDependencyPaths :: ElaboratedConfiguredPackage -> [FilePath] elabExeDependencyPaths elab = case elabPkgOrComp elab of ElabPackage pkg -> map snd $ CD.nonSetupDeps (pkgExeDependencyPaths pkg) ElabComponent comp -> map snd (compExeDependencyPaths comp) -- | The setup dependencies (the library dependencies of the setup executable; -- note that it is not legal for setup scripts to have executable -- dependencies at the moment.) elabSetupDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] elabSetupDependencies elab = case elabPkgOrComp elab of ElabPackage pkg -> CD.setupDeps (pkgLibDependencies pkg) -- TODO: Custom setups not supported for components yet. When -- they are, need to do this differently ElabComponent _ -> [] elabPkgConfigDependencies :: ElaboratedConfiguredPackage -> [(PkgconfigName, Maybe PkgconfigVersion)] elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg } = pkgPkgConfigDependencies pkg elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } = compPkgConfigDependencies comp -- | The cache files of all our inplace dependencies which, -- when updated, require us to rebuild. See #4202 for -- more details. Essentially, this is a list of filepaths -- that, if our dependencies get rebuilt, will themselves -- get updated. -- -- Note: the hash of these cache files gets built into -- the build cache ourselves, which means that we end -- up tracking transitive dependencies! -- -- Note: This tracks the "build" cache file, but not -- "registration" or "config" cache files. Why not? -- Arguably we should... -- -- Note: This is a bit of a hack, because it is not really -- the hashes of the SOURCES of our (transitive) dependencies -- that we should use to decide whether or not to rebuild, -- but the output BUILD PRODUCTS. The strategy we use -- here will never work if we want to implement unchanging -- rebuilds. elabInplaceDependencyBuildCacheFiles :: DistDirLayout -> ElaboratedSharedConfig -> ElaboratedInstallPlan -> ElaboratedConfiguredPackage -> [FilePath] elabInplaceDependencyBuildCacheFiles layout sconf plan root_elab = go =<< InstallPlan.directDeps plan (nodeKey root_elab) where go = InstallPlan.foldPlanPackage (const []) $ \elab -> do guard (elabBuildStyle elab == BuildInplaceOnly) return $ distPackageCacheFile layout (elabDistDirParams sconf elab) "build" -- | Some extra metadata associated with an -- 'ElaboratedConfiguredPackage' which indicates that the "package" -- in question is actually a single component to be built. Arguably -- it would be clearer if there were an ADT which branched into -- package work items and component work items, but I've structured -- it this way to minimize change to the existing code (which I -- don't feel qualified to rewrite.) data ElaboratedComponent = ElaboratedComponent { -- | The name of the component to be built according to the solver compSolverName :: CD.Component, -- | The name of the component to be built. Nothing if -- it's a setup dep. compComponentName :: Maybe ComponentName, -- | The *external* library dependencies of this component. We -- pass this to the configure script. compLibDependencies :: [ConfiguredId], -- | In a component prior to instantiation, this list specifies -- the 'OpenUnitId's which, after instantiation, are the -- actual dependencies of this package. Note that this does -- NOT include signature packages, which do not turn into real -- ordering dependencies when we instantiate. This is intended to be -- a purely temporary field, to carry some information to the -- instantiation phase. It's more precise than -- 'compLibDependencies', and also stores information about internal -- dependencies. compLinkedLibDependencies :: [OpenUnitId], -- | The executable dependencies of this component (including -- internal executables). compExeDependencies :: [ConfiguredId], -- | The @pkg-config@ dependencies of the component compPkgConfigDependencies :: [(PkgconfigName, Maybe PkgconfigVersion)], -- | The paths all our executable dependencies will be installed -- to once they are installed. compExeDependencyPaths :: [(ConfiguredId, FilePath)], -- | The UnitIds of the libraries (identifying elaborated packages/ -- components) that must be built before this project. This -- is used purely for ordering purposes. It can contain both -- references to definite and indefinite packages; an indefinite -- UnitId indicates that we must typecheck that indefinite package -- before we can build this one. compOrderLibDependencies :: [UnitId] } deriving (Eq, Show, Generic) instance Binary ElaboratedComponent instance Structured ElaboratedComponent -- | See 'elabOrderDependencies'. compOrderDependencies :: ElaboratedComponent -> [UnitId] compOrderDependencies comp = compOrderLibDependencies comp ++ compOrderExeDependencies comp -- | See 'elabOrderExeDependencies'. compOrderExeDependencies :: ElaboratedComponent -> [UnitId] compOrderExeDependencies = map (newSimpleUnitId . confInstId) . compExeDependencies data ElaboratedPackage = ElaboratedPackage { pkgInstalledId :: InstalledPackageId, -- | The exact dependencies (on other plan packages) -- pkgLibDependencies :: ComponentDeps [ConfiguredId], -- | Components which depend (transitively) on an internally -- defined library. These are used by 'elabRequiresRegistration', -- to determine if a user-requested build is going to need -- a library registration -- pkgDependsOnSelfLib :: ComponentDeps [()], -- | Dependencies on executable packages. -- pkgExeDependencies :: ComponentDeps [ConfiguredId], -- | Paths where executable dependencies live. -- pkgExeDependencyPaths :: ComponentDeps [(ConfiguredId, FilePath)], -- | Dependencies on @pkg-config@ packages. -- NB: this is NOT per-component (although it could be) -- because Cabal library does not track per-component -- pkg-config depends; it always does them all at once. -- pkgPkgConfigDependencies :: [(PkgconfigName, Maybe PkgconfigVersion)], -- | Which optional stanzas (ie testsuites, benchmarks) will actually -- be enabled during the package configure step. pkgStanzasEnabled :: OptionalStanzaSet } deriving (Eq, Show, Generic) instance Binary ElaboratedPackage instance Structured ElaboratedPackage -- | See 'elabOrderDependencies'. This gives the unflattened version, -- which can be useful in some circumstances. pkgOrderDependencies :: ElaboratedPackage -> ComponentDeps [UnitId] pkgOrderDependencies pkg = fmap (map (newSimpleUnitId . confInstId)) (pkgLibDependencies pkg) `Mon.mappend` fmap (map (newSimpleUnitId . confInstId)) (pkgExeDependencies pkg) -- | This is used in the install plan to indicate how the package will be -- built. -- data BuildStyle = -- | The classic approach where the package is built, then the files -- installed into some location and the result registered in a package db. -- -- If the package came from a tarball then it's built in a temp dir and -- the results discarded. BuildAndInstall -- | The package is built, but the files are not installed anywhere, -- rather the build dir is kept and the package is registered inplace. -- -- Such packages can still subsequently be installed. -- -- Typically 'BuildAndInstall' packages will only depend on other -- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones. -- | BuildInplaceOnly deriving (Eq, Show, Generic) instance Binary BuildStyle instance Structured BuildStyle instance Semigroup BuildStyle where BuildInplaceOnly <> _ = BuildInplaceOnly _ <> BuildInplaceOnly = BuildInplaceOnly _ <> _ = BuildAndInstall instance Monoid BuildStyle where mempty = BuildAndInstall mappend = (<>) type CabalFileText = LBS.ByteString type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage --------------------------- -- Build targets -- -- | Specific targets within a package or component to act on e.g. to build, -- haddock or open a repl. -- data ComponentTarget = ComponentTarget ComponentName SubComponentTarget deriving (Eq, Ord, Show, Generic) instance Binary ComponentTarget instance Structured ComponentTarget -- | Unambiguously render a 'ComponentTarget', e.g., to pass -- to a Cabal Setup script. showComponentTarget :: PackageId -> ComponentTarget -> String showComponentTarget pkgid = Cabal.showBuildTarget pkgid . toBuildTarget where toBuildTarget :: ComponentTarget -> Cabal.BuildTarget toBuildTarget (ComponentTarget cname subtarget) = case subtarget of WholeComponent -> Cabal.BuildTargetComponent cname ModuleTarget mname -> Cabal.BuildTargetModule cname mname FileTarget fname -> Cabal.BuildTargetFile cname fname showTestComponentTarget :: PackageId -> ComponentTarget -> Maybe String showTestComponentTarget _ (ComponentTarget (CTestName n) _) = Just $ prettyShow n showTestComponentTarget _ _ = Nothing isTestComponentTarget :: ComponentTarget -> Bool isTestComponentTarget (ComponentTarget (CTestName _) _) = True isTestComponentTarget _ = False showBenchComponentTarget :: PackageId -> ComponentTarget -> Maybe String showBenchComponentTarget _ (ComponentTarget (CBenchName n) _) = Just $ prettyShow n showBenchComponentTarget _ _ = Nothing isBenchComponentTarget :: ComponentTarget -> Bool isBenchComponentTarget (ComponentTarget (CBenchName _) _) = True isBenchComponentTarget _ = False isForeignLibComponentTarget :: ComponentTarget -> Bool isForeignLibComponentTarget (ComponentTarget (CFLibName _) _) = True isForeignLibComponentTarget _ = False isExeComponentTarget :: ComponentTarget -> Bool isExeComponentTarget (ComponentTarget (CExeName _) _ ) = True isExeComponentTarget _ = False isSubLibComponentTarget :: ComponentTarget -> Bool isSubLibComponentTarget (ComponentTarget (CLibName (LSubLibName _)) _) = True isSubLibComponentTarget _ = False componentOptionalStanza :: CD.Component -> Maybe OptionalStanza componentOptionalStanza (CD.ComponentTest _) = Just TestStanzas componentOptionalStanza (CD.ComponentBench _) = Just BenchStanzas componentOptionalStanza _ = Nothing --------------------------- -- Setup.hs script policy -- -- | There are four major cases for Setup.hs handling: -- -- 1. @build-type@ Custom with a @custom-setup@ section -- 2. @build-type@ Custom without a @custom-setup@ section -- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ -- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ -- -- It's also worth noting that packages specifying @cabal-version: >= 1.23@ -- or later that have @build-type@ Custom will always have a @custom-setup@ -- section. Therefore in case 2, the specified @cabal-version@ will always be -- less than 1.23. -- -- In cases 1 and 2 we obviously have to build an external Setup.hs script, -- while in case 4 we can use the internal library API. In case 3 we also have -- to build an external Setup.hs script because the package needs a later -- Cabal lib version than we can support internally. -- data SetupScriptStyle = SetupCustomExplicitDeps | SetupCustomImplicitDeps | SetupNonCustomExternalLib | SetupNonCustomInternalLib deriving (Eq, Show, Generic, Typeable) instance Binary SetupScriptStyle instance Structured SetupScriptStyle cabal-install-3.8.1.0/src/Distribution/Client/RebuildMonad.hs0000644000000000000000000002522507346545000022153 0ustar0000000000000000{-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables, BangPatterns #-} -- | An abstraction for re-running actions if values or files have changed. -- -- This is not a full-blown make-style incremental build system, it's a bit -- more ad-hoc than that, but it's easier to integrate with existing code. -- -- It's a convenient interface to the "Distribution.Client.FileMonitor" -- functions. -- module Distribution.Client.RebuildMonad ( -- * Rebuild monad Rebuild, runRebuild, execRebuild, askRoot, -- * Setting up file monitoring monitorFiles, MonitorFilePath, monitorFile, monitorFileHashed, monitorNonExistentFile, monitorDirectory, monitorNonExistentDirectory, monitorDirectoryExistence, monitorFileOrDirectory, monitorFileSearchPath, monitorFileHashedSearchPath, -- ** Monitoring file globs monitorFileGlob, monitorFileGlobExistence, FilePathGlob(..), FilePathRoot(..), FilePathGlobRel(..), GlobPiece(..), -- * Using a file monitor FileMonitor(..), newFileMonitor, rerunIfChanged, -- * Utils delayInitSharedResource, delayInitSharedResources, matchFileGlob, getDirectoryContentsMonitored, createDirectoryMonitored, monitorDirectoryStatus, doesFileExistMonitored, need, needIfExists, findFileWithExtensionMonitored, findFirstFileMonitored, findFileMonitored, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.FileMonitor import Distribution.Client.Glob hiding (matchFileGlob) import qualified Distribution.Client.Glob as Glob (matchFileGlob) import Distribution.Simple.Utils (debug) import qualified Data.Map.Strict as Map import Control.Monad.State as State import Control.Monad.Reader as Reader import Control.Concurrent.MVar (MVar, newMVar, modifyMVar) import System.FilePath import System.Directory -- | A monad layered on top of 'IO' to help with re-running actions when the -- input files and values they depend on change. The crucial operations are -- 'rerunIfChanged' and 'monitorFiles'. -- newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a) deriving (Functor, Applicative, Monad, MonadIO) -- | Use this within the body action of 'rerunIfChanged' to declare that the -- action depends on the given files. This can be based on what the action -- actually did. It is these files that will be checked for changes next -- time 'rerunIfChanged' is called for that 'FileMonitor'. -- -- Relative paths are interpreted as relative to an implicit root, ultimately -- passed in to 'runRebuild'. -- monitorFiles :: [MonitorFilePath] -> Rebuild () monitorFiles filespecs = Rebuild (State.modify (filespecs++)) -- | Run a 'Rebuild' IO action. unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath]) unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) [] -- | Run a 'Rebuild' IO action. runRebuild :: FilePath -> Rebuild a -> IO a runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) [] -- | Run a 'Rebuild' IO action. execRebuild :: FilePath -> Rebuild a -> IO [MonitorFilePath] execRebuild rootDir (Rebuild action) = execStateT (runReaderT action rootDir) [] -- | The root that relative paths are interpreted as being relative to. askRoot :: Rebuild FilePath askRoot = Rebuild Reader.ask -- | This captures the standard use pattern for a 'FileMonitor': given a -- monitor, an action and the input value the action depends on, either -- re-run the action to get its output, or if the value and files the action -- depends on have not changed then return a previously cached action result. -- -- The result is still in the 'Rebuild' monad, so these can be nested. -- -- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'. -- rerunIfChanged :: (Binary a, Structured a, Binary b, Structured b) => Verbosity -> FileMonitor a b -> a -> Rebuild b -> Rebuild b rerunIfChanged verbosity monitor key action = do rootDir <- askRoot changed <- liftIO $ checkFileMonitorChanged monitor rootDir key case changed of MonitorUnchanged result files -> do liftIO $ debug verbosity $ "File monitor '" ++ monitorName ++ "' unchanged." monitorFiles files return result MonitorChanged reason -> do liftIO $ debug verbosity $ "File monitor '" ++ monitorName ++ "' changed: " ++ showReason reason startTime <- liftIO $ beginUpdateFileMonitor (result, files) <- liftIO $ unRebuild rootDir action liftIO $ updateFileMonitor monitor rootDir (Just startTime) files key result monitorFiles files return result where monitorName = takeFileName (fileMonitorCacheFile monitor) showReason (MonitoredFileChanged file) = "file " ++ file showReason (MonitoredValueChanged _) = "monitor value changed" showReason MonitorFirstRun = "first run" showReason MonitorCorruptCache = "invalid cache file" -- | When using 'rerunIfChanged' for each element of a list of actions, it is -- sometimes the case that each action needs to make use of some resource. e.g. -- -- > sequence -- > [ rerunIfChanged verbosity monitor key $ do -- > resource <- mkResource -- > ... -- use the resource -- > | ... ] -- -- For efficiency one would like to share the resource between the actions -- but the straightforward way of doing this means initialising it every time -- even when no actions need re-running. -- -- > resource <- mkResource -- > sequence -- > [ rerunIfChanged verbosity monitor key $ do -- > ... -- use the resource -- > | ... ] -- -- This utility allows one to get the best of both worlds: -- -- > getResource <- delayInitSharedResource mkResource -- > sequence -- > [ rerunIfChanged verbosity monitor key $ do -- > resource <- getResource -- > ... -- use the resource -- > | ... ] -- delayInitSharedResource :: forall a. IO a -> Rebuild (Rebuild a) delayInitSharedResource action = do var <- liftIO (newMVar Nothing) return (liftIO (getOrInitResource var)) where getOrInitResource :: MVar (Maybe a) -> IO a getOrInitResource var = modifyMVar var $ \mx -> case mx of Just x -> return (Just x, x) Nothing -> do x <- action return (Just x, x) -- | Much like 'delayInitSharedResource' but for a keyed set of resources. -- -- > getResource <- delayInitSharedResource mkResource -- > sequence -- > [ rerunIfChanged verbosity monitor key $ do -- > resource <- getResource key -- > ... -- use the resource -- > | ... ] -- delayInitSharedResources :: forall k v. Ord k => (k -> IO v) -> Rebuild (k -> Rebuild v) delayInitSharedResources action = do var <- liftIO (newMVar Map.empty) return (liftIO . getOrInitResource var) where getOrInitResource :: MVar (Map k v) -> k -> IO v getOrInitResource var k = modifyMVar var $ \m -> case Map.lookup k m of Just x -> return (m, x) Nothing -> do x <- action k let !m' = Map.insert k x m return (m', x) -- | Utility to match a file glob against the file system, starting from a -- given root directory. The results are all relative to the given root. -- -- Since this operates in the 'Rebuild' monad, it also monitors the given glob -- for changes. -- matchFileGlob :: FilePathGlob -> Rebuild [FilePath] matchFileGlob glob = do root <- askRoot monitorFiles [monitorFileGlobExistence glob] liftIO $ Glob.matchFileGlob root glob getDirectoryContentsMonitored :: FilePath -> Rebuild [FilePath] getDirectoryContentsMonitored dir = do exists <- monitorDirectoryStatus dir if exists then liftIO $ getDirectoryContents dir else return [] createDirectoryMonitored :: Bool -> FilePath -> Rebuild () createDirectoryMonitored createParents dir = do monitorFiles [monitorDirectoryExistence dir] liftIO $ createDirectoryIfMissing createParents dir -- | Monitor a directory as in 'monitorDirectory' if it currently exists or -- as 'monitorNonExistentDirectory' if it does not. monitorDirectoryStatus :: FilePath -> Rebuild Bool monitorDirectoryStatus dir = do exists <- liftIO $ doesDirectoryExist dir monitorFiles [if exists then monitorDirectory dir else monitorNonExistentDirectory dir] return exists -- | Like 'doesFileExist', but in the 'Rebuild' monad. This does -- NOT track the contents of 'FilePath'; use 'need' in that case. doesFileExistMonitored :: FilePath -> Rebuild Bool doesFileExistMonitored f = do root <- askRoot exists <- liftIO $ doesFileExist (root f) monitorFiles [if exists then monitorFileExistence f else monitorNonExistentFile f] return exists -- | Monitor a single file need :: FilePath -> Rebuild () need f = monitorFiles [monitorFileHashed f] -- | Monitor a file if it exists; otherwise check for when it -- gets created. This is a bit better for recompilation avoidance -- because sometimes users give bad package metadata, and we don't -- want to repeatedly rebuild in this case (which we would if we -- need'ed a non-existent file). needIfExists :: FilePath -> Rebuild () needIfExists f = do root <- askRoot exists <- liftIO $ doesFileExist (root f) monitorFiles [if exists then monitorFileHashed f else monitorNonExistentFile f] -- | Like 'findFileWithExtension', but in the 'Rebuild' monad. findFileWithExtensionMonitored :: [String] -> [FilePath] -> FilePath -> Rebuild (Maybe FilePath) findFileWithExtensionMonitored extensions searchPath baseName = findFirstFileMonitored id [ path baseName <.> ext | path <- nub searchPath , ext <- nub extensions ] -- | Like 'findFirstFile', but in the 'Rebuild' monad. findFirstFileMonitored :: forall a. (a -> FilePath) -> [a] -> Rebuild (Maybe a) findFirstFileMonitored file = findFirst where findFirst :: [a] -> Rebuild (Maybe a) findFirst [] = return Nothing findFirst (x:xs) = do exists <- doesFileExistMonitored (file x) if exists then return (Just x) else findFirst xs -- | Like 'findFile', but in the 'Rebuild' monad. findFileMonitored :: [FilePath] -> FilePath -> Rebuild (Maybe FilePath) findFileMonitored searchPath fileName = findFirstFileMonitored id [ path fileName | path <- nub searchPath] cabal-install-3.8.1.0/src/Distribution/Client/Reconfigure.hs0000644000000000000000000001667607346545000022070 0ustar0000000000000000module Distribution.Client.Reconfigure ( Check(..), reconfigure ) where import Distribution.Client.Compat.Prelude import Data.Monoid ( Any(..) ) import System.Directory ( doesFileExist ) import Distribution.Simple.Configure ( localBuildInfoFile ) import Distribution.Simple.Setup ( Flag, flagToMaybe, toFlag ) import Distribution.Simple.Utils ( existsAndIsMoreRecentThan, defaultPackageDesc, info ) import Distribution.Client.Config ( SavedConfig(..) ) import Distribution.Client.Configure ( readConfigFlags ) import Distribution.Client.Nix ( findNixExpr, inNixShell, nixInstantiate ) import Distribution.Client.Sandbox ( findSavedDistPref, updateInstallDirs ) import Distribution.Client.Sandbox.PackageEnvironment ( userPackageEnvironmentFile ) import Distribution.Client.Setup ( ConfigFlags(..), ConfigExFlags, GlobalFlags(..) ) -- | @Check@ represents a function to check some condition on type @a@. The -- returned 'Any' is 'True' if any part of the condition failed. newtype Check a = Check { runCheck :: Any -- Did any previous check fail? -> a -- value returned by previous checks -> IO (Any, a) -- Did this check fail? What value is returned? } instance Semigroup (Check a) where (<>) c d = Check $ \any0 a0 -> do (any1, a1) <- runCheck c any0 a0 (any2, a2) <- runCheck d (any0 <> any1) a1 return (any0 <> any1 <> any2, a2) instance Monoid (Check a) where mempty = Check $ \_ a -> return (mempty, a) mappend = (<>) -- | Re-configure the package in the current directory if needed. Deciding -- when to reconfigure and with which options is convoluted: -- -- If we are reconfiguring, we must always run @configure@ with the -- verbosity option we are given; however, that a previous configuration -- uses a different verbosity setting is not reason enough to reconfigure. -- -- The package should be configured to use the same \"dist\" prefix as -- given to the @build@ command, otherwise the build will probably -- fail. Not only does this determine the \"dist\" prefix setting if we -- need to reconfigure anyway, but an existing configuration should be -- invalidated if its \"dist\" prefix differs. -- -- If the package has never been configured (i.e., there is no -- LocalBuildInfo), we must configure first, using the default options. -- -- If the package has been configured, there will be a 'LocalBuildInfo'. -- If there no package description file, we assume that the -- 'PackageDescription' is up to date, though the configuration may need -- to be updated for other reasons (see above). If there is a package -- description file, and it has been modified since the 'LocalBuildInfo' -- was generated, then we need to reconfigure. -- -- The caller of this function may also have specific requirements -- regarding the flags the last configuration used. For example, -- 'testAction' requires that the package be configured with test suites -- enabled. The caller may pass the required settings to this function -- along with a function to check the validity of the saved 'ConfigFlags'; -- these required settings will be checked first upon determining that -- a previous configuration exists. reconfigure :: ((ConfigFlags, ConfigExFlags) -> [String] -> GlobalFlags -> IO ()) -- ^ configure action -> Verbosity -- ^ Verbosity setting -> FilePath -- ^ \"dist\" prefix -> Flag (Maybe Int) -- ^ -j flag for reinstalling add-source deps. -> Check (ConfigFlags, ConfigExFlags) -- ^ Check that the required flags are set. -- If they are not set, provide a message explaining the -- reason for reconfiguration. -> [String] -- ^ Extra arguments -> GlobalFlags -- ^ Global flags -> SavedConfig -> IO SavedConfig reconfigure configureAction verbosity dist _numJobsFlag check extraArgs globalFlags config = do savedFlags@(_, _) <- readConfigFlags dist useNix <- fmap isJust (findNixExpr globalFlags config) alreadyInNixShell <- inNixShell if useNix && not alreadyInNixShell then do -- If we are using Nix, we must reinstantiate the derivation outside -- the shell. Eventually, the caller will invoke 'nixShell' which will -- rerun cabal inside the shell. That will bring us back to 'reconfigure', -- but inside the shell we'll take the second branch, below. -- This seems to have a problem: won't 'configureAction' call 'nixShell' -- yet again, spawning an infinite tree of subprocesses? -- No, because 'nixShell' doesn't spawn a new process if it is already -- running in a Nix shell. nixInstantiate verbosity dist False globalFlags config return config else do let checks :: Check (ConfigFlags, ConfigExFlags) checks = checkVerb <> checkDist <> checkOutdated <> check (Any frc, flags@(configFlags, _)) <- runCheck checks mempty savedFlags let config' :: SavedConfig config' = updateInstallDirs (configUserInstall configFlags) config when frc $ configureAction flags extraArgs globalFlags return config' where -- Changing the verbosity does not require reconfiguration, but the new -- verbosity should be used if reconfiguring. checkVerb :: Check (ConfigFlags, b) checkVerb = Check $ \_ (configFlags, configExFlags) -> do let configFlags' :: ConfigFlags configFlags' = configFlags { configVerbosity = toFlag verbosity} return (mempty, (configFlags', configExFlags)) -- Reconfiguration is required if @--build-dir@ changes. checkDist :: Check (ConfigFlags, b) checkDist = Check $ \_ (configFlags, configExFlags) -> do -- Always set the chosen @--build-dir@ before saving the flags, -- or bad things could happen. savedDist <- findSavedDistPref config (configDistPref configFlags) let distChanged :: Bool distChanged = dist /= savedDist when distChanged $ info verbosity "build directory changed" let configFlags' :: ConfigFlags configFlags' = configFlags { configDistPref = toFlag dist } return (Any distChanged, (configFlags', configExFlags)) checkOutdated :: Check (ConfigFlags, b) checkOutdated = Check $ \_ flags@(configFlags, _) -> do let buildConfig :: FilePath buildConfig = localBuildInfoFile dist -- Has the package ever been configured? If not, reconfiguration is -- required. configured <- doesFileExist buildConfig unless configured $ info verbosity "package has never been configured" -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need -- to force reconfigure. Note that it's possible to use @cabal.config@ -- even without sandboxes. userPackageEnvironmentFileModified <- existsAndIsMoreRecentThan userPackageEnvironmentFile buildConfig when userPackageEnvironmentFileModified $ info verbosity ("user package environment file ('" ++ userPackageEnvironmentFile ++ "') was modified") -- Is the configuration older than the package description? descrFile <- maybe (defaultPackageDesc verbosity) return (flagToMaybe (configCabalFilePath configFlags)) outdated <- existsAndIsMoreRecentThan descrFile buildConfig when outdated $ info verbosity (descrFile ++ " was changed") let failed :: Any failed = Any outdated <> Any userPackageEnvironmentFileModified <> Any (not configured) return (failed, flags) cabal-install-3.8.1.0/src/Distribution/Client/Run.hs0000644000000000000000000001470307346545000020351 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Run -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Implementation of the 'run' command. ----------------------------------------------------------------------------- module Distribution.Client.Run ( run, splitRunArgs ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Types.TargetInfo (targetCLBI) import Distribution.Types.LocalBuildInfo (componentNameTargets') import Distribution.Client.Utils (tryCanonicalizePath) import Distribution.Types.UnqualComponentName import Distribution.PackageDescription (Executable (..), TestSuite(..), Benchmark(..), PackageDescription (..), BuildInfo(buildable)) import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) import Distribution.Simple.BuildPaths (exeExtension) import Distribution.Simple.LocalBuildInfo (ComponentName (..), LocalBuildInfo (..), depLibraryPaths) import Distribution.Simple.Utils (die', notice, warn, rawSystemExitWithEnv, addLibraryPath) import Distribution.System (Platform (..)) import qualified Distribution.Simple.GHCJS as GHCJS import System.Directory (getCurrentDirectory) import Distribution.Compat.Environment (getEnvironment) import System.FilePath ((<.>), ()) -- | Return the executable to run and any extra arguments that should be -- forwarded to it. Die in case of error. splitRunArgs :: Verbosity -> LocalBuildInfo -> [String] -> IO (Executable, [String]) splitRunArgs verbosity lbi args = case whichExecutable of -- Either err (wasManuallyChosen, exe, paramsRest) Left err -> do warn verbosity `traverse_` maybeWarning -- If there is a warning, print it. die' verbosity err Right (True, exe, xs) -> return (exe, xs) Right (False, exe, xs) -> do let addition = " Interpreting all parameters to `run` as a parameter to" ++ " the default executable." -- If there is a warning, print it together with the addition. warn verbosity `traverse_` fmap (++addition) maybeWarning return (exe, xs) where pkg_descr = localPkgDescr lbi whichExecutable :: Either String -- Error string. ( Bool -- If it was manually chosen. , Executable -- The executable. , [String] -- The remaining parameters. ) whichExecutable = case (enabledExes, args) of ([] , _) -> Left "Couldn't find any enabled executables." ([exe], []) -> return (False, exe, []) ([exe], (x:xs)) | x == unUnqualComponentName (exeName exe) -> return (True, exe, xs) | otherwise -> return (False, exe, args) (_ , []) -> Left $ "This package contains multiple executables. " ++ "You must pass the executable name as the first argument " ++ "to 'cabal run'." (_ , (x:xs)) -> case find (\exe -> unUnqualComponentName (exeName exe) == x) enabledExes of Nothing -> Left $ "No executable named '" ++ x ++ "'." Just exe -> return (True, exe, xs) where enabledExes = filter (buildable . buildInfo) (executables pkg_descr) maybeWarning :: Maybe String maybeWarning = case args of [] -> Nothing (x:_) -> lookup (mkUnqualComponentName x) components where components :: [(UnqualComponentName, String)] -- Component name, message. components = [ (name, "The executable '" ++ prettyShow name ++ "' is disabled.") | e <- executables pkg_descr , not . buildable . buildInfo $ e, let name = exeName e] ++ [ (name, "There is a test-suite '" ++ prettyShow name ++ "'," ++ " but the `run` command is only for executables.") | t <- testSuites pkg_descr , let name = testName t] ++ [ (name, "There is a benchmark '" ++ prettyShow name ++ "'," ++ " but the `run` command is only for executables.") | b <- benchmarks pkg_descr , let name = benchmarkName b] -- | Run a given executable. run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () run verbosity lbi exe exeArgs = do curDir <- getCurrentDirectory let buildPref = buildDir lbi pkg_descr = localPkgDescr lbi dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", curDir dataDir pkg_descr) (path, runArgs) <- let exeName' = prettyShow $ exeName exe in case compilerFlavor (compiler lbi) of GHCJS -> do let (script, cmd, cmdArgs) = GHCJS.runCmd (withPrograms lbi) (buildPref exeName' exeName') script' <- tryCanonicalizePath script return (cmd, cmdArgs ++ [script']) _ -> do p <- tryCanonicalizePath $ buildPref exeName' (exeName' <.> exeExtension (hostPlatform lbi)) return (p, []) env <- (dataDirEnvVar:) <$> getEnvironment -- Add (DY)LD_LIBRARY_PATH if needed env' <- if withDynExe lbi then do let (Platform _ os) = hostPlatform lbi clbi <- case componentNameTargets' pkg_descr lbi (CExeName (exeName exe)) of [target] -> return (targetCLBI target) [] -> die' verbosity "run: Could not find executable in LocalBuildInfo" _ -> die' verbosity "run: Found multiple matching exes in LocalBuildInfo" paths <- depLibraryPaths True False lbi clbi return (addLibraryPath os paths env) else return env notice verbosity $ "Running " ++ prettyShow (exeName exe) ++ "..." rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' cabal-install-3.8.1.0/src/Distribution/Client/Sandbox.hs0000644000000000000000000001131007346545000021172 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- UI for the sandboxing functionality. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox ( loadConfigOrSandboxConfig, findSavedDistPref, updateInstallDirs, getPersistOrConfigCompiler ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.Setup ( ConfigFlags(..), GlobalFlags(..), configCompilerAux' ) import Distribution.Client.Config ( SavedConfig(..), defaultUserInstall, loadConfig ) import Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironmentType(..) , classifyPackageEnvironment , loadUserConfig ) import Distribution.Client.SetupWrapper ( SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Simple.Compiler ( Compiler(..) ) import Distribution.Simple.Configure ( maybeGetPersistBuildConfig , findDistPrefOrDefault , findDistPref ) import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo import Distribution.Simple.Program ( ProgramDb ) import Distribution.Simple.Setup ( Flag(..) , fromFlagOrDefault, flagToMaybe ) import Distribution.System ( Platform ) import System.Directory ( getCurrentDirectory ) -- * Basic sandbox functions. -- updateInstallDirs :: Flag Bool -> SavedConfig -> SavedConfig updateInstallDirs userInstallFlag savedConfig = savedConfig { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } where configureFlags = savedConfigureFlags savedConfig userInstallDirs = savedUserInstallDirs savedConfig globalInstallDirs = savedGlobalInstallDirs savedConfig installDirs | userInstall = userInstallDirs | otherwise = globalInstallDirs userInstall = fromFlagOrDefault defaultUserInstall (configUserInstall configureFlags `mappend` userInstallFlag) -- | Check which type of package environment we're in and return a -- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates -- whether we're working in a sandbox. loadConfigOrSandboxConfig :: Verbosity -> GlobalFlags -- ^ For @--config-file@ and -- @--sandbox-config-file@. -> IO SavedConfig loadConfigOrSandboxConfig verbosity globalFlags = do let configFileFlag = globalConfigFile globalFlags pkgEnvDir <- getCurrentDirectory pkgEnvType <- classifyPackageEnvironment pkgEnvDir case pkgEnvType of -- Only @cabal.config@ is present. UserPackageEnvironment -> do config <- loadConfig verbosity configFileFlag userConfig <- loadUserConfig verbosity pkgEnvDir Nothing let config' = config `mappend` userConfig return config' -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. AmbientPackageEnvironment -> do config <- loadConfig verbosity configFileFlag let globalConstraintsOpt = flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config globalConstraintConfig <- loadUserConfig verbosity pkgEnvDir globalConstraintsOpt let config' = config `mappend` globalConstraintConfig return config' -- | Return the saved \"dist/\" prefix, or the default prefix. findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath findSavedDistPref config flagDistPref = do let defDistPref = useDistPref defaultSetupScriptOptions flagDistPref' = configDistPref (savedConfigureFlags config) `mappend` flagDistPref findDistPref defDistPref flagDistPref' -- Utils (transitionary) -- -- | Try to read the most recently configured compiler from the -- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it -- cannot be read. getPersistOrConfigCompiler :: ConfigFlags -> IO (Compiler, Platform, ProgramDb) getPersistOrConfigCompiler configFlags = do distPref <- findDistPrefOrDefault (configDistPref configFlags) mlbi <- maybeGetPersistBuildConfig distPref case mlbi of Nothing -> do configCompilerAux' configFlags Just lbi -> return ( LocalBuildInfo.compiler lbi , LocalBuildInfo.hostPlatform lbi , LocalBuildInfo.withPrograms lbi ) cabal-install-3.8.1.0/src/Distribution/Client/Sandbox/0000755000000000000000000000000007346545000020642 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Sandbox/PackageEnvironment.hs0000644000000000000000000002732507346545000024767 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox.PackageEnvironment -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Utilities for working with the package environment file. Patterned after -- Distribution.Client.Config. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironment(..) , PackageEnvironmentType(..) , classifyPackageEnvironment , readPackageEnvironmentFile , showPackageEnvironment , showPackageEnvironmentWithComments , loadUserConfig , userPackageEnvironmentFile ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.Config ( SavedConfig(..) , configFieldDescriptions , haddockFlagsFields , installDirsFields, withProgramsFields , withProgramOptionsFields ) import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) import Distribution.Client.Setup ( ConfigExFlags(..) ) import Distribution.Client.Targets ( userConstraintPackageName ) import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate ) import Distribution.Simple.Setup ( Flag(..) , ConfigFlags(..), HaddockFlags(..) ) import Distribution.Simple.Utils ( warn, debug ) import Distribution.Solver.Types.ConstraintSource import Distribution.Deprecated.ParseUtils ( FieldDescr(..), ParseResult(..) , commaListFieldParsec, commaNewLineListFieldParsec , liftField, lineNo, locatedErrorMsg , readFields , showPWarning , syntaxError, warning ) import System.Directory ( doesFileExist ) import System.FilePath ( () ) import System.IO.Error ( isDoesNotExistError ) import Text.PrettyPrint ( ($+$) ) import qualified Data.ByteString as BS import qualified Text.PrettyPrint as Disp import qualified Distribution.Deprecated.ParseUtils as ParseUtils ( Field(..) ) -- -- * Configuration saved in the package environment file -- -- TODO: would be nice to remove duplication between -- D.C.Sandbox.PackageEnvironment and D.C.Config. data PackageEnvironment = PackageEnvironment { pkgEnvSavedConfig :: SavedConfig } deriving Generic instance Monoid PackageEnvironment where mempty = gmempty mappend = (<>) instance Semigroup PackageEnvironment where (<>) = gmappend -- | Optional package environment file that can be used to customize the default -- settings. Created by the user. userPackageEnvironmentFile :: FilePath userPackageEnvironmentFile = "cabal.config" -- | Type of the current package environment. data PackageEnvironmentType = UserPackageEnvironment -- ^ './cabal.config' | AmbientPackageEnvironment -- ^ '~/.cabal/config' -- | Is there a 'cabal.config' in this directory? classifyPackageEnvironment :: FilePath -> IO PackageEnvironmentType classifyPackageEnvironment pkgEnvDir = do isUser <- configExists userPackageEnvironmentFile return (classify isUser) where configExists fname = doesFileExist (pkgEnvDir fname) classify :: Bool -> PackageEnvironmentType classify True = UserPackageEnvironment classify False = AmbientPackageEnvironment -- | Load the user package environment if it exists (the optional "cabal.config" -- file). If it does not exist locally, attempt to load an optional global one. userPackageEnvironment :: Verbosity -> FilePath -> Maybe FilePath -> IO PackageEnvironment userPackageEnvironment verbosity pkgEnvDir globalConfigLocation = do let path = pkgEnvDir userPackageEnvironmentFile minp <- readPackageEnvironmentFile (ConstraintSourceUserConfig path) mempty path case (minp, globalConfigLocation) of (Just parseRes, _) -> processConfigParse path parseRes (_, Just globalLoc) -> do minp' <- readPackageEnvironmentFile (ConstraintSourceUserConfig globalLoc) mempty globalLoc maybe (warn verbosity ("no constraints file found at " ++ globalLoc) >> return mempty) (processConfigParse globalLoc) minp' _ -> do debug verbosity ("no user package environment file found at " ++ pkgEnvDir) return mempty where processConfigParse path (ParseOk warns parseResult) = do unless (null warns) $ warn verbosity $ unlines (map (showPWarning path) warns) return parseResult processConfigParse path (ParseFailed err) = do let (line, msg) = locatedErrorMsg err warn verbosity $ "Error parsing package environment file " ++ path ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg return mempty -- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. loadUserConfig :: Verbosity -> FilePath -> Maybe FilePath -> IO SavedConfig loadUserConfig verbosity pkgEnvDir globalConfigLocation = fmap pkgEnvSavedConfig $ userPackageEnvironment verbosity pkgEnvDir globalConfigLocation -- | Descriptions of all fields in the package environment file. pkgEnvFieldDescrs :: ConstraintSource -> [FieldDescr PackageEnvironment] pkgEnvFieldDescrs src = [ commaNewLineListFieldParsec "constraints" (pretty . fst) ((\pc -> (pc, src)) `fmap` parsec) (sortConstraints . configExConstraints . savedConfigureExFlags . pkgEnvSavedConfig) (\v pkgEnv -> updateConfigureExFlags pkgEnv (\flags -> flags { configExConstraints = v })) , commaListFieldParsec "preferences" pretty parsec (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) (\v pkgEnv -> updateConfigureExFlags pkgEnv (\flags -> flags { configPreferences = v })) ] ++ map toPkgEnv configFieldDescriptions' where configFieldDescriptions' :: [FieldDescr SavedConfig] configFieldDescriptions' = filter (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") (configFieldDescriptions src) toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment toPkgEnv fieldDescr = liftField pkgEnvSavedConfig (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) fieldDescr updateConfigureExFlags :: PackageEnvironment -> (ConfigExFlags -> ConfigExFlags) -> PackageEnvironment updateConfigureExFlags pkgEnv f = pkgEnv { pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig $ pkgEnv } } sortConstraints = sortBy (comparing $ userConstraintPackageName . fst) -- | Read the package environment file. readPackageEnvironmentFile :: ConstraintSource -> PackageEnvironment -> FilePath -> IO (Maybe (ParseResult PackageEnvironment)) readPackageEnvironmentFile src initial file = handleNotExists $ fmap (Just . parsePackageEnvironment src initial) (BS.readFile file) where handleNotExists action = catchIO action $ \ioe -> if isDoesNotExistError ioe then return Nothing else ioError ioe -- | Parse the package environment file. parsePackageEnvironment :: ConstraintSource -> PackageEnvironment -> BS.ByteString -> ParseResult PackageEnvironment parsePackageEnvironment src initial str = do fields <- readFields str let (knownSections, others) = partition isKnownSection fields pkgEnv <- parse others let config = pkgEnvSavedConfig pkgEnv installDirs0 = savedUserInstallDirs config (haddockFlags, installDirs, paths, args) <- foldM parseSections (savedHaddockFlags config, installDirs0, [], []) knownSections return pkgEnv { pkgEnvSavedConfig = config { savedConfigureFlags = (savedConfigureFlags config) { configProgramPaths = paths, configProgramArgs = args }, savedHaddockFlags = haddockFlags, savedUserInstallDirs = installDirs, savedGlobalInstallDirs = installDirs } } where isKnownSection :: ParseUtils.Field -> Bool isKnownSection (ParseUtils.Section _ "haddock" _ _) = True isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True isKnownSection _ = False parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment parse = parseFields (pkgEnvFieldDescrs src) initial parseSections :: SectionsAccum -> ParseUtils.Field -> ParseResult SectionsAccum parseSections accum@(h,d,p,a) (ParseUtils.Section _ "haddock" name fs) | name == "" = do h' <- parseFields haddockFlagsFields h fs return (h', d, p, a) | otherwise = do warning "The 'haddock' section should be unnamed" return accum parseSections (h,d,p,a) (ParseUtils.Section line "install-dirs" name fs) | name == "" = do d' <- parseFields installDirsFields d fs return (h, d',p,a) | otherwise = syntaxError line $ "Named 'install-dirs' section: '" ++ name ++ "'. Note that named 'install-dirs' sections are not allowed in the '" ++ userPackageEnvironmentFile ++ "' file." parseSections accum@(h, d,p,a) (ParseUtils.Section _ "program-locations" name fs) | name == "" = do p' <- parseFields withProgramsFields p fs return (h, d, p', a) | otherwise = do warning "The 'program-locations' section should be unnamed" return accum parseSections accum@(h, d, p, a) (ParseUtils.Section _ "program-default-options" name fs) | name == "" = do a' <- parseFields withProgramOptionsFields a fs return (h, d, p, a') | otherwise = do warning "The 'program-default-options' section should be unnamed" return accum parseSections accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum -- | Accumulator type for 'parseSections'. type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) , [(String, FilePath)], [(String, [String])]) -- | Pretty-print the package environment. showPackageEnvironment :: PackageEnvironment -> String showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv -- | Pretty-print the package environment with default values for empty fields -- commented out (just like the default ~/.cabal/config). showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) -> PackageEnvironment -> String showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ ppFields (pkgEnvFieldDescrs ConstraintSourceUnknown) mdefPkgEnv pkgEnv $+$ Disp.text "" $+$ ppSection "install-dirs" "" installDirsFields (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) where installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig cabal-install-3.8.1.0/src/Distribution/Client/SavedFlags.hs0000644000000000000000000000553507346545000021627 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} module Distribution.Client.SavedFlags ( readCommandFlags, writeCommandFlags , readSavedArgs, writeSavedArgs ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Simple.Command import Distribution.Simple.UserHooks ( Args ) import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, unintersperse ) import Distribution.Verbosity import System.Directory ( doesFileExist ) import System.FilePath ( takeDirectory ) writeSavedArgs :: Verbosity -> FilePath -> [String] -> IO () writeSavedArgs verbosity path args = do createDirectoryIfMissingVerbose (lessVerbose verbosity) True (takeDirectory path) writeFile path (intercalate "\0" args) -- | Write command-line flags to a file, separated by null characters. This -- format is also suitable for the @xargs -0@ command. Using the null -- character also avoids the problem of escaping newlines or spaces, -- because unlike other whitespace characters, the null character is -- not valid in command-line arguments. writeCommandFlags :: Verbosity -> FilePath -> CommandUI flags -> flags -> IO () writeCommandFlags verbosity path command flags = writeSavedArgs verbosity path (commandShowOptions command flags) readSavedArgs :: FilePath -> IO (Maybe [String]) readSavedArgs path = do exists <- doesFileExist path if exists then fmap (Just . unintersperse '\0') (readFile path) else return Nothing -- | Read command-line arguments, separated by null characters, from a file. -- Returns the default flags if the file does not exist. readCommandFlags :: FilePath -> CommandUI flags -> IO flags readCommandFlags path command = do savedArgs <- fmap (fromMaybe []) (readSavedArgs path) case (commandParseArgs command True savedArgs) of CommandHelp _ -> throwIO (SavedArgsErrorHelp savedArgs) CommandList _ -> throwIO (SavedArgsErrorList savedArgs) CommandErrors errs -> throwIO (SavedArgsErrorOther savedArgs errs) CommandReadyToGo (mkFlags, _) -> return (mkFlags (commandDefaultFlags command)) -- ----------------------------------------------------------------------------- -- * Exceptions -- ----------------------------------------------------------------------------- data SavedArgsError = SavedArgsErrorHelp Args | SavedArgsErrorList Args | SavedArgsErrorOther Args [String] deriving (Typeable) instance Show SavedArgsError where show (SavedArgsErrorHelp args) = "unexpected flag '--help', saved command line was:\n" ++ intercalate " " args show (SavedArgsErrorList args) = "unexpected flag '--list-options', saved command line was:\n" ++ intercalate " " args show (SavedArgsErrorOther args errs) = "saved command line was:\n" ++ intercalate " " args ++ "\n" ++ "encountered errors:\n" ++ intercalate "\n" errs instance Exception SavedArgsError cabal-install-3.8.1.0/src/Distribution/Client/ScriptUtils.hs0000644000000000000000000004550307346545000022074 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} -- | Utilities to help commands with scripts -- module Distribution.Client.ScriptUtils ( getScriptCacheDirectoryRoot, getScriptHash, getScriptCacheDirectory, ensureScriptCacheDirectory, withContextAndSelectors, AcceptNoTargets(..), TargetContext(..), updateContextAndWriteProjectFile, updateContextAndWriteProjectFile', fakeProjectSourcePackage, lSrcpkgDescription ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (toList) import Distribution.Compat.Lens import qualified Distribution.Types.Lens as L import Distribution.CabalSpecVersion ( CabalSpecVersion (..), cabalSpecLatest) import Distribution.Client.ProjectOrchestration import Distribution.Client.Config ( getCabalDir ) import Distribution.Client.DistDirLayout ( DistDirLayout(..) ) import Distribution.Client.HashValue ( hashValue, showHashValue ) import Distribution.Client.HttpUtils ( HttpTransport, configureTransport ) import Distribution.Client.NixStyleOptions ( NixStyleFlags (..) ) import Distribution.Client.ProjectConfig ( ProjectConfig(..), ProjectConfigShared(..) , reportParseResult, withProjectOrGlobalConfig , projectConfigHttpTransport ) import Distribution.Client.ProjectConfig.Legacy ( ProjectConfigSkeleton , parseProjectSkeleton, instantiateProjectConfigSkeleton ) import Distribution.Client.ProjectFlags ( flagIgnoreProject ) import Distribution.Client.RebuildMonad ( runRebuild ) import Distribution.Client.Setup ( ConfigFlags(..), GlobalFlags(..) ) import Distribution.Client.TargetSelector ( TargetSelectorProblem(..), TargetString(..) ) import Distribution.Client.Types ( PackageLocation(..), PackageSpecifier(..), UnresolvedSourcePackage ) import Distribution.FieldGrammar ( parseFieldGrammar, takeFields ) import Distribution.Fields ( ParseResult, parseFatalFailure, readFields ) import Distribution.PackageDescription ( ignoreConditions ) import Distribution.PackageDescription.FieldGrammar ( executableFieldGrammar ) import Distribution.PackageDescription.PrettyPrint ( showGenericPackageDescription ) import Distribution.Parsec ( Position(..) ) import Distribution.Simple.Flag ( fromFlagOrDefault, flagToMaybe ) import Distribution.Simple.PackageDescription ( parseString ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Simple.Compiler ( compilerInfo ) import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, createTempDirectory, die', handleDoesNotExist, readUTF8File, warn, writeUTF8File ) import qualified Distribution.SPDX.License as SPDX import Distribution.Solver.Types.SourcePackage as SP ( SourcePackage(..) ) import Distribution.System ( Platform(..) ) import Distribution.Types.BuildInfo ( BuildInfo(..) ) import Distribution.Types.CondTree ( CondTree(..) ) import Distribution.Types.Executable ( Executable(..) ) import Distribution.Types.GenericPackageDescription as GPD ( GenericPackageDescription(..), emptyGenericPackageDescription ) import Distribution.Types.PackageDescription ( PackageDescription(..), emptyPackageDescription ) import Distribution.Types.PackageName.Magic ( fakePackageCabalFileName, fakePackageId ) import Distribution.Utils.NubList ( fromNubList ) import Distribution.Client.ProjectPlanning ( configureCompiler ) import Distribution.Verbosity ( normal ) import Language.Haskell.Extension ( Language(..) ) import Control.Concurrent.MVar ( newEmptyMVar, putMVar, tryTakeMVar ) import Control.Exception ( bracket ) import qualified Data.ByteString.Char8 as BS import Data.ByteString.Lazy () import qualified Data.Set as S import System.Directory ( canonicalizePath, doesFileExist, getTemporaryDirectory, removeDirectoryRecursive ) import System.FilePath ( (), takeFileName ) import qualified Text.Parsec as P -- A note on multi-module script support #6787: -- Multi-module scripts are not supported and support is non-trivial. -- What you want to do is pass the absolute path to the script's directory in hs-source-dirs, -- but hs-source-dirs only accepts relative paths. This leaves you with several options none -- of which are particularly appealing. -- 1) Loosen the requirement that hs-source-dirs take relative paths -- 2) Add a field to BuildInfo that acts like an hs-source-dir, but accepts an absolute path -- 3) Use a path relative to the project root in hs-source-dirs, and pass extra flags to the -- repl to deal with the fact that the repl is relative to the working directory and not -- the project root. -- | Get the directory where script builds are cached. -- -- @CABAL_DIR\/script-builds\/@ getScriptCacheDirectoryRoot :: IO FilePath getScriptCacheDirectoryRoot = do cabalDir <- getCabalDir return $ cabalDir "script-builds" -- | Get the hash of a script's absolute path) -- -- Two hashes will be the same as long as the absolute paths -- are the same. getScriptHash :: FilePath -> IO String getScriptHash script = showHashValue . hashValue . fromString <$> canonicalizePath script -- | Get the directory for caching a script build. -- -- The only identity of a script is it's absolute path, so append the -- hashed path to @CABAL_DIR\/script-builds\/@ to get the cache directory. getScriptCacheDirectory :: FilePath -> IO FilePath getScriptCacheDirectory script = () <$> getScriptCacheDirectoryRoot <*> getScriptHash script -- | Get the directory for caching a script build and ensure it exists. -- -- The only identity of a script is it's absolute path, so append the -- hashed path to @CABAL_DIR\/script-builds\/@ to get the cache directory. ensureScriptCacheDirectory :: Verbosity -> FilePath -> IO FilePath ensureScriptCacheDirectory verbosity script = do cacheDir <- getScriptCacheDirectory script createDirectoryIfMissingVerbose verbosity True cacheDir return cacheDir -- | What your command should do when no targets are found. data AcceptNoTargets = RejectNoTargets -- ^ die on 'TargetSelectorNoTargetsInProject' | AcceptNoTargets -- ^ return a default 'TargetSelector' deriving (Eq, Show) -- | Information about the context in which we found the 'TargetSelector's. data TargetContext = ProjectContext -- ^ The target selectors are part of a project. | GlobalContext -- ^ The target selectors are from the global context. | ScriptContext FilePath Executable -- ^ The target selectors refer to a script. Contains the path to the script and -- the executable metadata parsed from the script deriving (Eq, Show) -- | Determine whether the targets represent regular targets or a script -- and return the proper context and target selectors. -- Die with an error message if selectors are valid as neither regular targets or as a script. -- -- In the case that the context refers to a temporary directory, -- delete it after the action finishes. withContextAndSelectors :: AcceptNoTargets -- ^ What your command should do when no targets are found. -> Maybe ComponentKind -- ^ A target filter -> NixStyleFlags a -- ^ Command line flags -> [String] -- ^ Target strings or a script and args. -> GlobalFlags -- ^ Global flags. -> (TargetContext -> ProjectBaseContext -> [TargetSelector] -> IO b) -- ^ The body of your command action. -> IO b withContextAndSelectors noTargets kind flags@NixStyleFlags {..} targetStrings globalFlags act = withTemporaryTempDirectory $ \mkTmpDir -> do (tc, ctx) <- withProjectOrGlobalConfig verbosity ignoreProject globalConfigFlag with (without mkTmpDir) (tc', ctx', sels) <- case targetStrings of -- Only script targets may contain spaces and or end with ':'. -- Trying to readTargetSelectors such a target leads to a parse error. [target] | any (\c -> isSpace c) target || ":" `isSuffixOf` target -> do scriptOrError target [TargetSelectorNoScript $ TargetString1 target] _ -> do -- In the case where a selector is both a valid target and script, assume it is a target, -- because you can disambiguate the script with "./script" readTargetSelectors (localPackages ctx) kind targetStrings >>= \case Left err@(TargetSelectorNoTargetsInProject:_) | [] <- targetStrings , AcceptNoTargets <- noTargets -> return (tc, ctx, defaultTarget) | (script:_) <- targetStrings -> scriptOrError script err Left err@(TargetSelectorNoSuch t _:_) | TargetString1 script <- t -> scriptOrError script err Left err@(TargetSelectorExpected t _ _:_) | TargetString1 script <- t -> scriptOrError script err Left err@(MatchingInternalError _ _ _:_) -- Handle ':' in middle of script name. | [script] <- targetStrings -> scriptOrError script err Left err -> reportTargetSelectorProblems verbosity err Right sels -> return (tc, ctx, sels) act tc' ctx' sels where verbosity = fromFlagOrDefault normal (configVerbosity configFlags) ignoreProject = flagIgnoreProject projectFlags cliConfig = commandLineFlagsToProjectConfig globalFlags flags mempty globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) defaultTarget = [TargetPackage TargetExplicitNamed [fakePackageId] Nothing] with = do ctx <- establishProjectBaseContext verbosity cliConfig OtherCommand return (ProjectContext, ctx) without mkDir globalConfig = do distDirLayout <- establishDummyDistDirLayout verbosity (globalConfig <> cliConfig) =<< mkDir ctx <- establishDummyProjectBaseContext verbosity (globalConfig <> cliConfig) distDirLayout [] OtherCommand return (GlobalContext, ctx) scriptOrError script err = do exists <- doesFileExist script if exists then do -- In the script case we always want a dummy context even when ignoreProject is False let mkCacheDir = ensureScriptCacheDirectory verbosity script (_, ctx) <- withProjectOrGlobalConfig verbosity (Flag True) globalConfigFlag with (without mkCacheDir) let projectRoot = distProjectRootDirectory $ distDirLayout ctx writeFile (projectRoot "scriptlocation") =<< canonicalizePath script scriptContents <- BS.readFile script executable <- readExecutableBlockFromScript verbosity scriptContents httpTransport <- configureTransport verbosity (fromNubList . projectConfigProgPathExtra $ projectConfigShared cliConfig) (flagToMaybe . projectConfigHttpTransport $ projectConfigBuildOnly cliConfig) projectCfgSkeleton <- readProjectBlockFromScript verbosity httpTransport (distDirLayout ctx) (takeFileName script) scriptContents (compiler, Platform arch os, _) <- runRebuild (distProjectRootDirectory . distDirLayout $ ctx) $ configureCompiler verbosity (distDirLayout ctx) ((fst $ ignoreConditions projectCfgSkeleton) <> projectConfig ctx) let projectCfg = instantiateProjectConfigSkeleton os arch (compilerInfo compiler) mempty projectCfgSkeleton :: ProjectConfig let executable' = executable & L.buildInfo . L.defaultLanguage %~ maybe (Just Haskell2010) Just ctx' = ctx & lProjectConfig %~ (<> projectCfg) return (ScriptContext script executable', ctx', defaultTarget) else reportTargetSelectorProblems verbosity err withTemporaryTempDirectory :: (IO FilePath -> IO a) -> IO a withTemporaryTempDirectory act = newEmptyMVar >>= \m -> bracket (getMkTmp m) (rmTmp m) act where -- We return an (IO Filepath) instead of a FilePath for two reasons: -- 1) To give the consumer the discretion to not create the tmpDir, -- but still grantee that it's deleted if they do create it -- 2) Because the path returned by createTempDirectory is not predicable getMkTmp m = return $ do tmpDir <- getTemporaryDirectory >>= flip createTempDirectory "cabal-repl." putMVar m tmpDir return tmpDir rmTmp m _ = tryTakeMVar m >>= maybe (return ()) (handleDoesNotExist () . removeDirectoryRecursive) -- | Add the 'SourcePackage' to the context and use it to write a .cabal file. updateContextAndWriteProjectFile' :: ProjectBaseContext -> SourcePackage (PackageLocation (Maybe FilePath)) -> IO ProjectBaseContext updateContextAndWriteProjectFile' ctx srcPkg = do let projectRoot = distProjectRootDirectory $ distDirLayout ctx packageFile = projectRoot fakePackageCabalFileName contents = showGenericPackageDescription (srcpkgDescription srcPkg) writePackageFile = writeUTF8File packageFile contents -- TODO This is here to prevent reconfiguration of cached repl packages. -- It's worth investigating why it's needed in the first place. packageFileExists <- doesFileExist packageFile if packageFileExists then do cached <- force <$> readUTF8File packageFile when (cached /= contents) writePackageFile else writePackageFile return (ctx & lLocalPackages %~ (++ [SpecificSourcePackage srcPkg])) -- | Add add the executable metadata to the context and write a .cabal file. updateContextAndWriteProjectFile :: ProjectBaseContext -> FilePath -> Executable -> IO ProjectBaseContext updateContextAndWriteProjectFile ctx scriptPath scriptExecutable = do let projectRoot = distProjectRootDirectory $ distDirLayout ctx absScript <- canonicalizePath scriptPath let -- Replace characters which aren't allowed in the executable component name with '_' -- Prefix with "cabal-script-" to make it clear to end users that the name may be mangled scriptExeName = "cabal-script-" ++ map censor (takeFileName scriptPath) censor c | c `S.member` ccNamecore = c | otherwise = '_' sourcePackage = fakeProjectSourcePackage projectRoot & lSrcpkgDescription . L.condExecutables .~ [(fromString scriptExeName, CondNode executable (targetBuildDepends $ buildInfo executable) [])] executable = scriptExecutable & L.modulePath .~ absScript updateContextAndWriteProjectFile' ctx sourcePackage parseScriptBlock :: BS.ByteString -> ParseResult Executable parseScriptBlock str = case readFields str of Right fs -> do let (fields, _) = takeFields fs parseFieldGrammar cabalSpecLatest fields (executableFieldGrammar "script") Left perr -> parseFatalFailure pos (show perr) where ppos = P.errorPos perr pos = Position (P.sourceLine ppos) (P.sourceColumn ppos) readScriptBlock :: Verbosity -> BS.ByteString -> IO Executable readScriptBlock verbosity = parseString parseScriptBlock verbosity "script block" -- | Extract the first encountered executable metadata block started and -- terminated by the below tokens or die. -- -- * @{- cabal:@ -- -- * @-}@ -- -- Return the metadata. readExecutableBlockFromScript :: Verbosity -> BS.ByteString -> IO Executable readExecutableBlockFromScript verbosity str = do str' <- case extractScriptBlock "cabal" str of Left e -> die' verbosity $ "Failed extracting script block: " ++ e Right x -> return x when (BS.all isSpace str') $ warn verbosity "Empty script block" readScriptBlock verbosity str' -- | Extract the first encountered project metadata block started and -- terminated by the below tokens. -- -- * @{- project:@ -- -- * @-}@ -- -- Return the metadata. readProjectBlockFromScript :: Verbosity -> HttpTransport -> DistDirLayout -> String -> BS.ByteString -> IO ProjectConfigSkeleton readProjectBlockFromScript verbosity httpTransport DistDirLayout{distDownloadSrcDirectory} scriptName str = do case extractScriptBlock "project" str of Left _ -> return mempty Right x -> reportParseResult verbosity "script" scriptName =<< parseProjectSkeleton distDownloadSrcDirectory httpTransport verbosity [] scriptName x -- | Extract the first encountered script metadata block started end -- terminated by the tokens -- -- * @{-
:@ -- -- * @-}@ -- -- appearing alone on lines (while tolerating trailing whitespace). -- These tokens are not part of the 'Right' result. -- -- In case of missing or unterminated blocks a 'Left'-error is -- returned. extractScriptBlock :: BS.ByteString -> BS.ByteString -> Either String BS.ByteString extractScriptBlock header str = goPre (BS.lines str) where isStartMarker = (== startMarker) . stripTrailSpace isEndMarker = (== endMarker) . stripTrailSpace stripTrailSpace = fst . BS.spanEnd isSpace -- before start marker goPre ls = case dropWhile (not . isStartMarker) ls of [] -> Left $ "`" ++ BS.unpack startMarker ++ "` start marker not found" (_:ls') -> goBody [] ls' goBody _ [] = Left $ "`" ++ BS.unpack endMarker ++ "` end marker not found" goBody acc (l:ls) | isEndMarker l = Right $! BS.unlines $ reverse acc | otherwise = goBody (l:acc) ls startMarker, endMarker :: BS.ByteString startMarker = "{- " <> header <> ":" endMarker = "-}" -- | The base for making a 'SourcePackage' for a fake project. -- It needs a 'Distribution.Types.Library.Library' or 'Executable' depending on the command. fakeProjectSourcePackage :: FilePath -> SourcePackage (PackageLocation loc) fakeProjectSourcePackage projectRoot = sourcePackage where sourcePackage = SourcePackage { srcpkgPackageId = fakePackageId , srcpkgDescription = genericPackageDescription , srcpkgSource = LocalUnpackedPackage projectRoot , srcpkgDescrOverride = Nothing } genericPackageDescription = emptyGenericPackageDescription { GPD.packageDescription = packageDescription } packageDescription = emptyPackageDescription { package = fakePackageId , specVersion = CabalSpecV2_2 , licenseRaw = Left SPDX.NONE } -- Lenses -- | A lens for the 'srcpkgDescription' field of 'SourcePackage' lSrcpkgDescription :: Lens' (SourcePackage loc) GenericPackageDescription lSrcpkgDescription f s = fmap (\x -> s { srcpkgDescription = x }) (f (srcpkgDescription s)) {-# inline lSrcpkgDescription #-} lLocalPackages :: Lens' ProjectBaseContext [PackageSpecifier UnresolvedSourcePackage] lLocalPackages f s = fmap (\x -> s { localPackages = x }) (f (localPackages s)) {-# inline lLocalPackages #-} lProjectConfig :: Lens' ProjectBaseContext ProjectConfig lProjectConfig f s = fmap (\x -> s { projectConfig = x }) (f (projectConfig s)) {-# inline lProjectConfig #-} -- Character classes -- Transcribed from "templates/Lexer.x" ccSpace, ccCtrlchar, ccPrintable, ccSymbol', ccParen, ccNamecore :: Set Char ccSpace = S.fromList " " ccCtrlchar = S.fromList $ [chr 0x0 .. chr 0x1f] ++ [chr 0x7f] ccPrintable = S.fromList [chr 0x0 .. chr 0xff] S.\\ ccCtrlchar ccSymbol' = S.fromList ",=<>+*&|!$%^@#?/\\~" ccParen = S.fromList "()[]" ccNamecore = ccPrintable S.\\ S.unions [ccSpace, S.fromList ":\"{}", ccParen, ccSymbol'] cabal-install-3.8.1.0/src/Distribution/Client/Security/0000755000000000000000000000000007346545000021053 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Security/DNS.hs0000644000000000000000000001653207346545000022042 0ustar0000000000000000{-# LANGUAGE CPP #-} module Distribution.Client.Security.DNS ( queryBootstrapMirrors ) where import Prelude () import Distribution.Client.Compat.Prelude import Network.URI (URI(..), URIAuth(..), parseURI) import Control.Exception (try) import Distribution.Simple.Utils #if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) import Network.DNS (queryTXT, Name(..), CharStr(..)) import qualified Data.ByteString.Char8 as BS.Char8 #else import Distribution.Simple.Program.Db ( emptyProgramDb, addKnownProgram , configureAllKnownPrograms, lookupProgram ) import Distribution.Simple.Program ( simpleProgram , programInvocation , getProgramInvocationOutput ) #endif -- | Try to lookup RFC1464-encoded mirror urls for a Hackage -- repository url by performing a DNS TXT lookup on the -- @_mirrors.@-prefixed URL hostname. -- -- Example: for @http://hackage.haskell.org/@ -- perform a DNS TXT query for the hostname -- @_mirrors.hackage.haskell.org@ which may look like e.g. -- -- > _mirrors.hackage.haskell.org. 300 IN TXT -- > "0.urlbase=http://hackage.fpcomplete.com/" -- > "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/" -- -- NB: hackage-security doesn't require DNS lookups being trustworthy, -- as the trust is established via the cryptographically signed TUF -- meta-data that is retrieved from the resolved Hackage repository. -- Moreover, we already have to protect against a compromised -- @hackage.haskell.org@ DNS entry, so an the additional -- @_mirrors.hackage.haskell.org@ DNS entry in the same SOA doesn't -- constitute a significant new attack vector anyway. -- queryBootstrapMirrors :: Verbosity -> URI -> IO [URI] #if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) -- use @resolv@ package for performing DNS queries queryBootstrapMirrors verbosity repoUri | Just auth <- uriAuthority repoUri = do let mirrorsDnsName = Name (BS.Char8.pack ("_mirrors." ++ uriRegName auth)) mirrors' <- try $ do txts <- queryTXT mirrorsDnsName evaluate (force $ extractMirrors (map snd txts)) mirrors <- case mirrors' of Left e -> do warn verbosity ("Caught exception during _mirrors lookup:"++ displayException (e :: SomeException)) return [] Right v -> return v if null mirrors then warn verbosity ("No mirrors found for " ++ show repoUri) else do info verbosity ("located " ++ show (length mirrors) ++ " mirrors for " ++ show repoUri ++ " :") for_ mirrors $ \url -> info verbosity ("- " ++ show url) return mirrors | otherwise = return [] -- | Extract list of mirrors from 'queryTXT' result extractMirrors :: [[CharStr]] -> [URI] extractMirrors txtChunks = mapMaybe (parseURI . snd) . sort $ vals where vals = [ (kn,v) | CharStr e <- concat txtChunks , Just (k,v) <- [splitRfc1464 (BS.Char8.unpack e)] , Just kn <- [isUrlBase k] ] ---------------------------------------------------------------------------- #else /* !defined(MIN_VERSION_resolv) */ -- use external method via @nslookup@ queryBootstrapMirrors verbosity repoUri | Just auth <- uriAuthority repoUri = do progdb <- configureAllKnownPrograms verbosity $ addKnownProgram nslookupProg emptyProgramDb case lookupProgram nslookupProg progdb of Nothing -> do warn verbosity "'nslookup' tool missing - can't locate mirrors" return [] Just nslookup -> do let mirrorsDnsName = "_mirrors." ++ uriRegName auth mirrors' <- try $ do out <- getProgramInvocationOutput verbosity $ programInvocation nslookup ["-query=TXT", mirrorsDnsName] evaluate (force $ extractMirrors mirrorsDnsName out) mirrors <- case mirrors' of Left e -> do warn verbosity ("Caught exception during _mirrors lookup:"++ displayException (e :: SomeException)) return [] Right v -> return v if null mirrors then warn verbosity ("No mirrors found for " ++ show repoUri) else do info verbosity ("located " ++ show (length mirrors) ++ " mirrors for " ++ show repoUri ++ " :") for_ mirrors $ \url -> info verbosity ("- " ++ show url) return mirrors | otherwise = return [] where nslookupProg = simpleProgram "nslookup" -- | Extract list of mirrors from @nslookup -query=TXT@ output. extractMirrors :: String -> String -> [URI] extractMirrors hostname s0 = mapMaybe (parseURI . snd) . sort $ vals where vals = [ (kn,v) | (h,ents) <- fromMaybe [] $ parseNsLookupTxt s0 , h == hostname , e <- ents , Just (k,v) <- [splitRfc1464 e] , Just kn <- [isUrlBase k] ] -- | Parse output of @nslookup -query=TXT $HOSTNAME@ tolerantly parseNsLookupTxt :: String -> Maybe [(String,[String])] parseNsLookupTxt = go0 [] [] where -- approximate grammar: -- := { } -- ( starts at begin of line, but may span multiple lines) -- := ^ TAB "text =" { } -- := string enclosed by '"'s ('\' and '"' are \-escaped) -- scan for ^ "text =" go0 [] _ [] = Nothing go0 res _ [] = Just (reverse res) go0 res _ ('\n':xs) = go0 res [] xs go0 res lw ('\t':'t':'e':'x':'t':' ':'=':xs) = go1 res (reverse lw) [] (dropWhile isSpace xs) go0 res lw (x:xs) = go0 res (x:lw) xs -- collect at least one go1 res lw qs ('"':xs) = case qstr "" xs of Just (s, xs') -> go1 res lw (s:qs) (dropWhile isSpace xs') Nothing -> Nothing -- bad quoting go1 _ _ [] _ = Nothing -- missing qstring go1 res lw qs xs = go0 ((lw,reverse qs):res) [] xs qstr _ ('\n':_) = Nothing -- We don't support unquoted LFs qstr acc ('\\':'\\':cs) = qstr ('\\':acc) cs qstr acc ('\\':'"':cs) = qstr ('"':acc) cs qstr acc ('"':cs) = Just (reverse acc, cs) qstr acc (c:cs) = qstr (c:acc) cs qstr _ [] = Nothing #endif ---------------------------------------------------------------------------- -- | Helper used by 'extractMirrors' for extracting @urlbase@ keys from Rfc1464-encoded data isUrlBase :: String -> Maybe Int isUrlBase s | ".urlbase" `isSuffixOf` s, not (null ns), all isDigit ns = readMaybe ns | otherwise = Nothing where ns = take (length s - 8) s -- | Split a TXT string into key and value according to RFC1464. -- Returns 'Nothing' if parsing fails. splitRfc1464 :: String -> Maybe (String,String) splitRfc1464 = go "" where go _ [] = Nothing go acc ('`':c:cs) = go (c:acc) cs go acc ('=':cs) = go2 (reverse acc) "" cs go acc (c:cs) | isSpace c = go acc cs | otherwise = go (c:acc) cs go2 k acc [] = Just (k,reverse acc) go2 _ _ ['`'] = Nothing go2 k acc ('`':c:cs) = go2 k (c:acc) cs go2 k acc (c:cs) = go2 k (c:acc) cs cabal-install-3.8.1.0/src/Distribution/Client/Security/HTTP.hs0000644000000000000000000001706107346545000022173 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} -- | Implementation of 'HttpLib' using cabal-install's own 'HttpTransport' module Distribution.Client.Security.HTTP (HttpLib, transportAdapter) where import Distribution.Solver.Compat.Prelude import Prelude () -- stdlibs import System.Directory ( getTemporaryDirectory ) import Network.URI ( URI ) import qualified Data.ByteString.Lazy as BS.L import qualified Network.HTTP as HTTP -- Cabal/cabal-install import Distribution.Verbosity ( Verbosity ) import Distribution.Client.HttpUtils ( HttpTransport(..), HttpCode ) import Distribution.Client.Utils ( withTempFileName ) -- hackage-security import Hackage.Security.Client.Repository.HttpLib (HttpLib (..)) import qualified Hackage.Security.Client as HC import qualified Hackage.Security.Client.Repository.HttpLib as HC import qualified Hackage.Security.Util.Checked as HC import qualified Hackage.Security.Util.Pretty as HC {------------------------------------------------------------------------------- 'HttpLib' implementation -------------------------------------------------------------------------------} -- | Translate from hackage-security's 'HttpLib' to cabal-install's 'HttpTransport' -- -- NOTE: The match between these two APIs is currently not perfect: -- -- * We don't get any response headers back from the 'HttpTransport', so we -- don't know if the server supports range requests. For now we optimistically -- assume that it does. -- * The 'HttpTransport' wants to know where to place the resulting file, -- whereas the 'HttpLib' expects an 'IO' action which streams the download; -- the security library then makes sure that the file gets written to a -- location which is suitable (in particular, to a temporary file in the -- directory where the file needs to end up, so that it can "finalize" the -- file simply by doing 'renameFile'). Right now we write the file to a -- temporary file in the system temp directory here and then read it again -- to pass it to the security library; this is a problem for two reasons: it -- is a source of inefficiency; and it means that the security library cannot -- insist on a minimum download rate (potential security attack). -- Fixing it however would require changing the 'HttpTransport'. transportAdapter :: Verbosity -> IO HttpTransport -> HttpLib transportAdapter verbosity getTransport = HttpLib{ httpGet = \headers uri callback -> do transport <- getTransport httpGetImpl verbosity transport headers uri callback , httpGetRange = \headers uri range callback -> do transport <- getTransport getRange verbosity transport headers uri range callback } httpGetImpl :: HC.Throws HC.SomeRemoteError => Verbosity -> HttpTransport -> [HC.HttpRequestHeader] -> URI -> ([HC.HttpResponseHeader] -> HC.BodyReader -> IO a) -> IO a httpGetImpl verbosity transport reqHeaders uri callback = wrapCustomEx $ do get' verbosity transport reqHeaders uri Nothing $ \code respHeaders br -> case code of 200 -> callback respHeaders br _ -> HC.throwChecked $ UnexpectedResponse uri code getRange :: HC.Throws HC.SomeRemoteError => Verbosity -> HttpTransport -> [HC.HttpRequestHeader] -> URI -> (Int, Int) -> (HC.HttpStatus -> [HC.HttpResponseHeader] -> HC.BodyReader -> IO a) -> IO a getRange verbosity transport reqHeaders uri range callback = wrapCustomEx $ do get' verbosity transport reqHeaders uri (Just range) $ \code respHeaders br -> case code of 200 -> callback HC.HttpStatus200OK respHeaders br 206 -> callback HC.HttpStatus206PartialContent respHeaders br _ -> HC.throwChecked $ UnexpectedResponse uri code -- | Internal generalization of 'get' and 'getRange' get' :: Verbosity -> HttpTransport -> [HC.HttpRequestHeader] -> URI -> Maybe (Int, Int) -> (HttpCode -> [HC.HttpResponseHeader] -> HC.BodyReader -> IO a) -> IO a get' verbosity transport reqHeaders uri mRange callback = do tempDir <- getTemporaryDirectory withTempFileName tempDir "transportAdapterGet" $ \temp -> do (code, _etag) <- getHttp transport verbosity uri Nothing temp reqHeaders' br <- HC.bodyReaderFromBS =<< BS.L.readFile temp callback code [HC.HttpResponseAcceptRangesBytes] br where reqHeaders' = mkReqHeaders reqHeaders mRange {------------------------------------------------------------------------------- Request headers -------------------------------------------------------------------------------} mkRangeHeader :: Int -> Int -> HTTP.Header mkRangeHeader from to = HTTP.Header HTTP.HdrRange rangeHeader where -- Content-Range header uses inclusive rather than exclusive bounds -- See rangeHeader = "bytes=" ++ show from ++ "-" ++ show (to - 1) mkReqHeaders :: [HC.HttpRequestHeader] -> Maybe (Int, Int) -> [HTTP.Header] mkReqHeaders reqHeaders mRange' = concat [ tr [] reqHeaders , [mkRangeHeader fr to | Just (fr, to) <- [mRange]] ] where -- guard against malformed range headers. mRange = case mRange' of Just (fr, to) | fr >= to -> Nothing _ -> mRange' tr :: [(HTTP.HeaderName, [String])] -> [HC.HttpRequestHeader] -> [HTTP.Header] tr acc [] = concatMap finalize acc tr acc (HC.HttpRequestMaxAge0:os) = tr (insert HTTP.HdrCacheControl ["max-age=0"] acc) os tr acc (HC.HttpRequestNoTransform:os) = tr (insert HTTP.HdrCacheControl ["no-transform"] acc) os -- Some headers are comma-separated, others need multiple headers for -- multiple options. -- -- TODO: Right we just comma-separate all of them. finalize :: (HTTP.HeaderName, [String]) -> [HTTP.Header] finalize (name, strs) = [HTTP.Header name (intercalate ", " (reverse strs))] insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] insert x y = modifyAssocList x (++ y) -- modify the first matching element modifyAssocList :: Eq a => a -> (b -> b) -> [(a, b)] -> [(a, b)] modifyAssocList a f = go where go [] = [] go (p@(a', b) : xs) | a == a' = (a', f b) : xs | otherwise = p : go xs {------------------------------------------------------------------------------- Custom exceptions -------------------------------------------------------------------------------} data UnexpectedResponse = UnexpectedResponse URI Int deriving (Typeable) instance HC.Pretty UnexpectedResponse where pretty (UnexpectedResponse uri code) = "Unexpected response " ++ show code ++ " for " ++ show uri #if MIN_VERSION_base(4,8,0) deriving instance Show UnexpectedResponse instance Exception UnexpectedResponse where displayException = HC.pretty #else instance Show UnexpectedResponse where show = HC.pretty instance Exception UnexpectedResponse #endif wrapCustomEx :: ( ( HC.Throws UnexpectedResponse , HC.Throws IOException ) => IO a) -> (HC.Throws HC.SomeRemoteError => IO a) wrapCustomEx act = HC.handleChecked (\(ex :: UnexpectedResponse) -> go ex) $ HC.handleChecked (\(ex :: IOException) -> go ex) $ act where go ex = HC.throwChecked (HC.SomeRemoteError ex) cabal-install-3.8.1.0/src/Distribution/Client/Setup.hs0000644000000000000000000031234207346545000020705 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Setup -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- ----------------------------------------------------------------------------- module Distribution.Client.Setup ( globalCommand, GlobalFlags(..), defaultGlobalFlags , RepoContext(..), withRepoContext , configureCommand, ConfigFlags(..), configureOptions, filterConfigureFlags , configPackageDB', configCompilerAux' , configureExCommand, ConfigExFlags(..), defaultConfigExFlags , buildCommand, BuildFlags(..) , filterTestFlags , replCommand, testCommand, benchmarkCommand, testOptions, benchmarkOptions , configureExOptions, reconfigureCommand , installCommand, InstallFlags(..), installOptions, defaultInstallFlags , filterHaddockArgs, filterHaddockFlags, haddockOptions , defaultSolver, defaultMaxBackjumps , listCommand, ListFlags(..), listNeedsCompiler ,UpdateFlags(..), defaultUpdateFlags , infoCommand, InfoFlags(..) , fetchCommand, FetchFlags(..) , freezeCommand, FreezeFlags(..) , genBoundsCommand , getCommand, unpackCommand, GetFlags(..) , checkCommand , formatCommand , uploadCommand, UploadFlags(..), IsCandidate(..) , reportCommand, ReportFlags(..) , runCommand , initCommand, initOptions, IT.InitFlags(..) , actAsSetupCommand, ActAsSetupFlags(..) , userConfigCommand, UserConfigFlags(..) , manpageCommand , haddockCommand , cleanCommand , copyCommand , registerCommand , liftOptions , yesNoOpt ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (get) import Distribution.Client.Types.Credentials (Username (..), Password (..)) import Distribution.Client.Types.Repo (RemoteRepo(..), LocalRepo (..)) import Distribution.Client.Types.AllowNewer (AllowNewer(..), AllowOlder(..), RelaxDeps(..)) import Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Dependency.Types ( PreSolver(..) ) import Distribution.Client.IndexUtils.ActiveRepos ( ActiveRepos ) import Distribution.Client.IndexUtils.IndexState ( TotalIndexState, headTotalIndexState ) import qualified Distribution.Client.Init.Types as IT import qualified Distribution.Client.Init.Defaults as IT import Distribution.Client.Targets ( UserConstraint, readUserConstraint ) import Distribution.Utils.NubList ( NubList, toNubList, fromNubList) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.Settings import Distribution.Simple.Compiler ( Compiler, PackageDB, PackageDBStack ) import Distribution.Simple.Program (ProgramDb, defaultProgramDb) import Distribution.Simple.Command hiding (boolOpt, boolOpt') import qualified Distribution.Simple.Command as Command import Distribution.Simple.Configure ( configCompilerAuxEx, interpretPackageDbFlags, computeEffectiveProfiling ) import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Flag ( Flag(..), toFlag, flagToMaybe, flagToList, maybeToFlag , flagElim, fromFlagOrDefault ) import Distribution.Simple.Setup ( ConfigFlags(..), BuildFlags(..), ReplFlags , TestFlags, BenchmarkFlags , HaddockFlags(..) , CleanFlags(..) , CopyFlags(..), RegisterFlags(..) , readPackageDbList, showPackageDbList , BooleanFlag(..), optionVerbosity , boolOpt, boolOpt', trueArg, falseArg , optionNumJobs ) import Distribution.Simple.InstallDirs ( PathTemplate, InstallDirs(..) , toPathTemplate, fromPathTemplate, combinePathTemplate ) import Distribution.Version ( Version, mkVersion ) import Distribution.Types.GivenComponent ( GivenComponent(..) ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint(..) ) import Distribution.Types.UnqualComponentName ( unqualComponentNameToPackageName ) import Distribution.PackageDescription ( BuildType(..), RepoKind(..), LibraryName(..) ) import Distribution.System ( Platform ) import Distribution.ReadE ( ReadE(..), succeedReadE, parsecToReadE, parsecToReadEErr, unexpectMsgString ) import qualified Distribution.Compat.CharParsing as P import Distribution.Verbosity ( lessVerbose, normal, verboseNoFlags, verboseNoTimestamp ) import Distribution.Simple.Utils ( wrapText ) import Distribution.Client.GlobalFlags ( GlobalFlags(..), defaultGlobalFlags , RepoContext(..), withRepoContext ) import Distribution.Client.ManpageFlags (ManpageFlags, defaultManpageFlags, manpageOptions) import Distribution.FieldGrammar.Newtypes (SpecVersion (..)) import Data.List ( deleteFirstsBy ) import System.FilePath ( () ) globalCommand :: [Command action] -> CommandUI GlobalFlags globalCommand commands = CommandUI { commandName = "", commandSynopsis = "Command line interface to the Haskell Cabal infrastructure.", commandUsage = \pname -> "See http://www.haskell.org/cabal/ for more information.\n" ++ "\n" ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", commandDescription = Just $ \pname -> let commands' = commands ++ [commandAddAction helpCommandUI undefined] cmdDescs = getNormalCommandDescriptions commands' -- if new commands are added, we want them to appear even if they -- are not included in the custom listing below. Thus, we calculate -- the `otherCmds` list and append it under the `other` category. -- Alternatively, a new testcase could be added that ensures that -- the set of commands listed here is equal to the set of commands -- that are actually available. otherCmds = deleteFirstsBy (==) (map fst cmdDescs) [ "help" , "update" , "install" , "fetch" , "list" , "info" , "user-config" , "get" , "unpack" , "init" , "configure" , "build" , "clean" , "run" , "repl" , "test" , "bench" , "check" , "sdist" , "upload" , "report" , "freeze" , "gen-bounds" , "outdated" , "haddock" , "hscolour" , "exec" , "new-build" , "new-configure" , "new-repl" , "new-freeze" , "new-run" , "new-test" , "new-bench" , "new-haddock" , "new-exec" , "new-update" , "new-install" , "new-clean" , "new-sdist" , "list-bin" -- v1 commands, stateful style , "v1-build" , "v1-configure" , "v1-repl" , "v1-freeze" , "v1-run" , "v1-test" , "v1-bench" , "v1-haddock" , "v1-exec" , "v1-update" , "v1-install" , "v1-clean" , "v1-sdist" , "v1-doctest" , "v1-copy" , "v1-register" , "v1-reconfigure" -- v2 commands, nix-style , "v2-build" , "v2-configure" , "v2-repl" , "v2-freeze" , "v2-run" , "v2-test" , "v2-bench" , "v2-haddock" , "v2-exec" , "v2-update" , "v2-install" , "v2-clean" , "v2-sdist" ] maxlen = maximum $ [length name | (name, _) <- cmdDescs] align str = str ++ replicate (maxlen - length str) ' ' startGroup n = " ["++n++"]" par = "" addCmd n = case lookup n cmdDescs of Nothing -> "" Just d -> " " ++ align n ++ " " ++ d in "Commands:\n" ++ unlines ( [ startGroup "global" , addCmd "update" , addCmd "install" , par , addCmd "help" , addCmd "info" , addCmd "list" , addCmd "fetch" , addCmd "user-config" , par , startGroup "package" , addCmd "get" , addCmd "unpack" , addCmd "init" , par , addCmd "configure" , addCmd "build" , addCmd "clean" , par , addCmd "run" , addCmd "repl" , addCmd "test" , addCmd "bench" , par , addCmd "check" , addCmd "sdist" , addCmd "upload" , addCmd "report" , par , addCmd "freeze" , addCmd "gen-bounds" , addCmd "outdated" , addCmd "haddock" , addCmd "hscolour" , addCmd "exec" , addCmd "list-bin" , par , startGroup "new-style projects (forwards-compatible aliases)" , addCmd "v2-build" , addCmd "v2-configure" , addCmd "v2-repl" , addCmd "v2-run" , addCmd "v2-test" , addCmd "v2-bench" , addCmd "v2-freeze" , addCmd "v2-haddock" , addCmd "v2-exec" , addCmd "v2-update" , addCmd "v2-install" , addCmd "v2-clean" , addCmd "v2-sdist" , par , startGroup "legacy command aliases" , addCmd "v1-build" , addCmd "v1-configure" , addCmd "v1-repl" , addCmd "v1-run" , addCmd "v1-test" , addCmd "v1-bench" , addCmd "v1-freeze" , addCmd "v1-haddock" , addCmd "v1-exec" , addCmd "v1-update" , addCmd "v1-install" , addCmd "v1-clean" , addCmd "v1-sdist" , addCmd "v1-doctest" , addCmd "v1-copy" , addCmd "v1-register" , addCmd "v1-reconfigure" ] ++ if null otherCmds then [] else par :startGroup "other" :[addCmd n | n <- otherCmds]) ++ "\n" ++ "For more information about a command use:\n" ++ " " ++ pname ++ " COMMAND --help\n" ++ "or " ++ pname ++ " help COMMAND\n" ++ "\n" ++ "To install Cabal packages from hackage use:\n" ++ " " ++ pname ++ " install foo [--dry-run]\n" ++ "\n" ++ "Occasionally you need to update the list of available packages:\n" ++ " " ++ pname ++ " update\n", commandNotes = Nothing, commandDefaultFlags = mempty, commandOptions = args } where args :: ShowOrParseArgs -> [OptionField GlobalFlags] args ShowArgs = argsShown args ParseArgs = argsShown ++ argsNotShown -- arguments we want to show in the help argsShown :: [OptionField GlobalFlags] argsShown = [ option ['V'] ["version"] "Print version information" globalVersion (\v flags -> flags { globalVersion = v }) trueArg ,option [] ["numeric-version"] "Print just the version number" globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) trueArg ,option [] ["config-file"] "Set an alternate location for the config file" globalConfigFile (\v flags -> flags { globalConfigFile = v }) (reqArgFlag "FILE") ,option [] ["ignore-expiry"] "Ignore expiry dates on signed metadata (use only in exceptional circumstances)" globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v }) trueArg ,option [] ["http-transport"] "Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')" globalHttpTransport (\v flags -> flags { globalHttpTransport = v }) (reqArgFlag "HttpTransport") ,multiOption "nix" globalNix (\v flags -> flags { globalNix = v }) [ noArg (Flag True) [] ["enable-nix"] "Enable Nix integration: run commands through nix-shell if a 'shell.nix' file exists", noArg (Flag False) [] ["disable-nix"] "Disable Nix integration" ] ,option [] ["store-dir", "storedir"] "The location of the build store" globalStoreDir (\v flags -> flags { globalStoreDir = v }) (reqArgFlag "DIR") , option [] ["active-repositories"] "The active package repositories (set to ':none' to disable all repositories)" globalActiveRepos (\v flags -> flags { globalActiveRepos = v }) (reqArg "REPOS" (parsecToReadE (\err -> "Error parsing active-repositories: " ++ err) (toFlag `fmap` parsec)) (map prettyShow . flagToList)) ] -- arguments we don't want shown in the help -- the remote repo flags are not useful compared to the more general "active-repositories" flag. -- the global logs directory was only used in v1, while in v2 we have specific project config logs dirs -- default-user-config is support for a relatively obscure workflow for v1-freeze. argsNotShown :: [OptionField GlobalFlags] argsNotShown = [ option [] ["remote-repo"] "The name and url for a remote repository" globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) (reqArg' "NAME:URL" (toNubList . maybeToList . readRemoteRepo) (map showRemoteRepo . fromNubList)) ,option [] ["local-no-index-repo"] "The name and a path for a local no-index repository" globalLocalNoIndexRepos (\v flags -> flags { globalLocalNoIndexRepos = v }) (reqArg' "NAME:PATH" (toNubList . maybeToList . readLocalRepo) (map showLocalRepo . fromNubList)) ,option [] ["remote-repo-cache"] "The location where downloads from all remote repos are cached" globalCacheDir (\v flags -> flags { globalCacheDir = v }) (reqArgFlag "DIR") ,option [] ["logs-dir", "logsdir"] "The location to put log files" globalLogsDir (\v flags -> flags { globalLogsDir = v }) (reqArgFlag "DIR") ,option [] ["default-user-config"] "Set a location for a cabal.config file for projects without their own cabal.config freeze file." globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v}) (reqArgFlag "FILE") ] -- ------------------------------------------------------------ -- * Config flags -- ------------------------------------------------------------ configureCommand :: CommandUI ConfigFlags configureCommand = c { commandName = "configure" , commandDefaultFlags = mempty , commandDescription = Just $ \_ -> wrapText $ "Configure how the package is built by setting " ++ "package (and other) flags.\n" ++ "\n" ++ "The configuration affects several other commands, " ++ "including v1-build, v1-test, v1-bench, v1-run, v1-repl.\n" , commandUsage = \pname -> "Usage: " ++ pname ++ " v1-configure [FLAGS]\n" , commandNotes = Just $ \pname -> (Cabal.programFlagsDescription defaultProgramDb ++ "\n") ++ "Examples:\n" ++ " " ++ pname ++ " v1-configure\n" ++ " Configure with defaults;\n" ++ " " ++ pname ++ " v1-configure --enable-tests -fcustomflag\n" ++ " Configure building package including tests,\n" ++ " with some package-specific flag.\n" } where c = Cabal.configureCommand defaultProgramDb configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] configureOptions = commandOptions configureCommand -- | Given some 'ConfigFlags' for the version of Cabal that -- cabal-install was built with, and a target older 'Version' of -- Cabal that we want to pass these flags to, convert the -- flags into a form that will be accepted by the older -- Setup script. Generally speaking, this just means filtering -- out flags that the old Cabal library doesn't understand, but -- in some cases it may also mean "emulating" a feature using -- some more legacy flags. filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags filterConfigureFlags flags cabalLibVersion -- NB: we expect the latest version to be the most common case, -- so test it first. | cabalLibVersion >= mkVersion [3,7,0] = flags_latest -- The naming convention is that flags_version gives flags with -- all flags *introduced* in version eliminated. -- It is NOT the latest version of Cabal library that -- these flags work for; version of introduction is a more -- natural metric. | cabalLibVersion < mkVersion [1,3,10] = flags_1_3_10 | cabalLibVersion < mkVersion [1,10,0] = flags_1_10_0 | cabalLibVersion < mkVersion [1,12,0] = flags_1_12_0 | cabalLibVersion < mkVersion [1,14,0] = flags_1_14_0 | cabalLibVersion < mkVersion [1,18,0] = flags_1_18_0 | cabalLibVersion < mkVersion [1,19,1] = flags_1_19_1 | cabalLibVersion < mkVersion [1,19,2] = flags_1_19_2 | cabalLibVersion < mkVersion [1,21,1] = flags_1_21_1 | cabalLibVersion < mkVersion [1,22,0] = flags_1_22_0 | cabalLibVersion < mkVersion [1,22,1] = flags_1_22_1 | cabalLibVersion < mkVersion [1,23,0] = flags_1_23_0 | cabalLibVersion < mkVersion [1,25,0] = flags_1_25_0 | cabalLibVersion < mkVersion [2,1,0] = flags_2_1_0 | cabalLibVersion < mkVersion [2,5,0] = flags_2_5_0 | cabalLibVersion < mkVersion [3,7,0] = flags_3_7_0 | otherwise = error "the impossible just happened" -- see first guard where flags_latest = flags { -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. -- Note: this is not in the wrong place. configConstraints gets -- repopulated in flags_1_19_1 but it needs to be set to empty for -- newer versions first. configConstraints = [] } flags_3_7_0 = flags_latest { -- Cabal < 3.7 does not know about --extra-lib-dirs-static configExtraLibDirsStatic = [], -- Cabal < 3.7 does not understand '--enable-build-info' or '--disable-build-info' configDumpBuildInfo = NoFlag } flags_2_5_0 = flags_3_7_0 { -- Cabal < 2.5 does not understand --dependency=pkg:component=cid -- (public sublibraries), so we convert it to the legacy -- --dependency=pkg_or_internal_compoent=cid configDependencies = let convertToLegacyInternalDep (GivenComponent _ (LSubLibName cn) cid) = Just $ GivenComponent (unqualComponentNameToPackageName cn) LMainLibName cid convertToLegacyInternalDep (GivenComponent pn LMainLibName cid) = Just $ GivenComponent pn LMainLibName cid in catMaybes $ convertToLegacyInternalDep <$> configDependencies flags -- Cabal < 2.5 doesn't know about '--allow-depending-on-private-libs'. , configAllowDependingOnPrivateLibs = NoFlag -- Cabal < 2.5 doesn't know about '--enable/disable-executable-static'. , configFullyStaticExe = NoFlag } flags_2_1_0 = flags_2_5_0 { -- Cabal < 2.1 doesn't know about -v +timestamp modifier configVerbosity = fmap verboseNoTimestamp (configVerbosity flags_latest) -- Cabal < 2.1 doesn't know about ---static , configStaticLib = NoFlag , configSplitSections = NoFlag } flags_1_25_0 = flags_2_1_0 { -- Cabal < 1.25.0 doesn't know about --dynlibdir. configInstallDirs = configInstallDirs_1_25_0, -- Cabal < 1.25 doesn't have extended verbosity syntax configVerbosity = fmap verboseNoFlags (configVerbosity flags_2_1_0), -- Cabal < 1.25 doesn't support --deterministic configDeterministic = mempty } configInstallDirs_1_25_0 = let dirs = configInstallDirs flags in dirs { dynlibdir = NoFlag , libexecsubdir = NoFlag , libexecdir = maybeToFlag $ combinePathTemplate <$> flagToMaybe (libexecdir dirs) <*> flagToMaybe (libexecsubdir dirs) } -- Cabal < 1.23 doesn't know about '--profiling-detail'. -- Cabal < 1.23 has a hacked up version of 'enable-profiling' -- which we shouldn't use. (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling flags flags_1_23_0 = flags_1_25_0 { configProfDetail = NoFlag , configProfLibDetail = NoFlag , configIPID = NoFlag , configProf = NoFlag , configProfExe = Flag tryExeProfiling , configProfLib = Flag tryLibProfiling } -- Cabal == 1.22.0.* had a discontinuity (see #5946 or e9a8d48a3adce34d) -- due to temporary amnesia of the --*-executable-profiling flags flags_1_22_1 = flags_1_23_0 { configDebugInfo = NoFlag , configProfExe = NoFlag } -- Cabal < 1.22 doesn't know about '--disable-debug-info'. flags_1_22_0 = flags_1_23_0 { configDebugInfo = NoFlag } -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' -- Cabal < 1.21.1 doesn't know about 'enable-profiling' -- (but we already dealt with it in flags_1_23_0) flags_1_21_1 = flags_1_22_0 { configRelocatable = NoFlag , configCoverage = NoFlag , configLibCoverage = configCoverage flags } -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and -- '--enable-library-stripping'. flags_1_19_2 = flags_1_21_1 { configExactConfiguration = NoFlag , configStripLibs = NoFlag } -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. flags_1_19_1 = flags_1_19_2 { configDependencies = [] , configConstraints = configConstraints flags } -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. flags_1_18_0 = flags_1_19_1 { configProgramPathExtra = toNubList [] , configInstallDirs = configInstallDirs_1_18_0} configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_1) { sysconfdir = NoFlag } -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } -- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic' -- and '--enable/disable-library-coverage'. flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag , configDynExe = NoFlag } -- Cabal < 1.10.0 doesn't know about '--disable-tests'. flags_1_10_0 = flags_1_12_0 { configTests = NoFlag } -- Cabal < 1.3.10 does not grok the '--constraints' flag. flags_1_3_10 = flags_1_10_0 { configConstraints = [] } -- | Get the package database settings from 'ConfigFlags', accounting for -- @--package-db@ and @--user@ flags. configPackageDB' :: ConfigFlags -> PackageDBStack configPackageDB' cfg = interpretPackageDbFlags userInstall (configPackageDBs cfg) where userInstall = Cabal.fromFlagOrDefault True (configUserInstall cfg) -- | Configure the compiler, but reduce verbosity during this step. configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramDb) configCompilerAux' configFlags = configCompilerAuxEx configFlags --FIXME: make configCompilerAux use a sensible verbosity { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } -- ------------------------------------------------------------ -- * Config extra flags -- ------------------------------------------------------------ -- | cabal configure takes some extra flags beyond runghc Setup configure -- data ConfigExFlags = ConfigExFlags { configCabalVersion :: Flag Version, configAppend :: Flag Bool, configBackup :: Flag Bool, configExConstraints :: [(UserConstraint, ConstraintSource)], configPreferences :: [PackageVersionConstraint], configSolver :: Flag PreSolver, configAllowNewer :: Maybe AllowNewer, configAllowOlder :: Maybe AllowOlder, configWriteGhcEnvironmentFilesPolicy :: Flag WriteGhcEnvironmentFilesPolicy } deriving (Eq, Show, Generic) defaultConfigExFlags :: ConfigExFlags defaultConfigExFlags = mempty { configSolver = Flag defaultSolver } configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) configureExCommand = configureCommand { commandDefaultFlags = (mempty, defaultConfigExFlags), commandOptions = \showOrParseArgs -> liftOptions fst setFst (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) . optionName) $ configureOptions showOrParseArgs) ++ liftOptions snd setSnd (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) configureExOptions :: ShowOrParseArgs -> ConstraintSource -> [OptionField ConfigExFlags] configureExOptions _showOrParseArgs src = [ option [] ["cabal-lib-version"] ("Select which version of the Cabal lib to use to build packages " ++ "(useful for testing).") configCabalVersion (\v flags -> flags { configCabalVersion = v }) (reqArg "VERSION" (parsecToReadE ("Cannot parse cabal lib version: "++) (fmap toFlag parsec)) (map prettyShow. flagToList)) , option "" ["append"] "appending the new config to the old config file" configAppend (\v flags -> flags { configAppend = v }) (boolOpt [] []) , option "" ["backup"] "the backup of the config file before any alterations" configBackup (\v flags -> flags { configBackup = v }) (boolOpt [] []) , option "c" ["constraint"] "Specify constraints on a package (version, installed/source, flags)" configExConstraints (\v flags -> flags { configExConstraints = v }) (reqArg "CONSTRAINT" ((\x -> [(x, src)]) `fmap` ReadE readUserConstraint) (map $ prettyShow . fst)) , option [] ["preference"] "Specify preferences (soft constraints) on the version of a package" configPreferences (\v flags -> flags { configPreferences = v }) (reqArg "CONSTRAINT" (parsecToReadE (const "dependency expected") (fmap (\x -> [x]) parsec)) (map prettyShow)) , optionSolver configSolver (\v flags -> flags { configSolver = v }) , option [] ["allow-older"] ("Ignore lower bounds in all dependencies or DEPS") (fmap unAllowOlder . configAllowOlder) (\v flags -> flags { configAllowOlder = fmap AllowOlder v}) (optArg "DEPS" (parsecToReadEErr unexpectMsgString relaxDepsParser) (Just RelaxDepsAll) relaxDepsPrinter) , option [] ["allow-newer"] ("Ignore upper bounds in all dependencies or DEPS") (fmap unAllowNewer . configAllowNewer) (\v flags -> flags { configAllowNewer = fmap AllowNewer v}) (optArg "DEPS" (parsecToReadEErr unexpectMsgString relaxDepsParser) (Just RelaxDepsAll) relaxDepsPrinter) , option [] ["write-ghc-environment-files"] ("Whether to create a .ghc.environment file after a successful build" ++ " (v2-build only)") configWriteGhcEnvironmentFilesPolicy (\v flags -> flags { configWriteGhcEnvironmentFilesPolicy = v}) (reqArg "always|never|ghc8.4.4+" writeGhcEnvironmentFilesPolicyParser writeGhcEnvironmentFilesPolicyPrinter) ] writeGhcEnvironmentFilesPolicyParser :: ReadE (Flag WriteGhcEnvironmentFilesPolicy) writeGhcEnvironmentFilesPolicyParser = ReadE $ \case "always" -> Right $ Flag AlwaysWriteGhcEnvironmentFiles "never" -> Right $ Flag NeverWriteGhcEnvironmentFiles "ghc8.4.4+" -> Right $ Flag WriteGhcEnvironmentFilesOnlyForGhc844AndNewer policy -> Left $ "Cannot parse the GHC environment file write policy '" <> policy <> "'" writeGhcEnvironmentFilesPolicyPrinter :: Flag WriteGhcEnvironmentFilesPolicy -> [String] writeGhcEnvironmentFilesPolicyPrinter = \case (Flag AlwaysWriteGhcEnvironmentFiles) -> ["always"] (Flag NeverWriteGhcEnvironmentFiles) -> ["never"] (Flag WriteGhcEnvironmentFilesOnlyForGhc844AndNewer) -> ["ghc8.4.4+"] NoFlag -> [] relaxDepsParser :: CabalParsing m => m (Maybe RelaxDeps) relaxDepsParser = do rs <- P.sepBy parsec (P.char ',') if null rs then fail $ "empty argument list is not allowed. " ++ "Note: use --allow-newer without the equals sign to permit all " ++ "packages to use newer versions." else return . Just . RelaxDepsSome . toList $ rs relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String] relaxDepsPrinter Nothing = [] relaxDepsPrinter (Just RelaxDepsAll) = [Nothing] relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . prettyShow) $ pkgs instance Monoid ConfigExFlags where mempty = gmempty mappend = (<>) instance Semigroup ConfigExFlags where (<>) = gmappend reconfigureCommand :: CommandUI (ConfigFlags, ConfigExFlags) reconfigureCommand = configureExCommand { commandName = "reconfigure" , commandSynopsis = "Reconfigure the package if necessary." , commandDescription = Just $ \pname -> wrapText $ "Run `configure` with the most recently used flags, or append FLAGS " ++ "to the most recently used configuration. " ++ "Accepts the same flags as `" ++ pname ++ " v1-configure'. " ++ "If the package has never been configured, the default flags are " ++ "used." , commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v1-reconfigure\n" ++ " Configure with the most recently used flags.\n" ++ " " ++ pname ++ " v1-reconfigure -w PATH\n" ++ " Reconfigure with the most recently used flags,\n" ++ " but use the compiler at PATH.\n\n" , commandUsage = usageAlternatives "v1-reconfigure" [ "[FLAGS]" ] , commandDefaultFlags = mempty } -- ------------------------------------------------------------ -- * Build flags -- ------------------------------------------------------------ buildCommand :: CommandUI BuildFlags buildCommand = parent { commandName = "build", commandDescription = Just $ \_ -> wrapText $ "Components encompass executables, tests, and benchmarks.\n" ++ "\n" ++ "Affected by configuration options, see `v1-configure`.\n", commandDefaultFlags = commandDefaultFlags parent, commandUsage = usageAlternatives "v1-build" $ [ "[FLAGS]", "COMPONENTS [FLAGS]" ], commandOptions = commandOptions parent , commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v1-build " ++ " All the components in the package\n" ++ " " ++ pname ++ " v1-build foo " ++ " A component (i.e. lib, exe, test suite)\n\n" ++ Cabal.programFlagsDescription defaultProgramDb } where parent = Cabal.buildCommand defaultProgramDb -- ------------------------------------------------------------ -- * Test flags -- ------------------------------------------------------------ -- | Given some 'TestFlags' for the version of Cabal that -- cabal-install was built with, and a target older 'Version' of -- Cabal that we want to pass these flags to, convert the -- flags into a form that will be accepted by the older -- Setup script. Generally speaking, this just means filtering -- out flags that the old Cabal library doesn't understand, but -- in some cases it may also mean "emulating" a feature using -- some more legacy flags. filterTestFlags :: TestFlags -> Version -> TestFlags filterTestFlags flags cabalLibVersion -- NB: we expect the latest version to be the most common case, -- so test it first. | cabalLibVersion >= mkVersion [3,0,0] = flags_latest -- The naming convention is that flags_version gives flags with -- all flags *introduced* in version eliminated. -- It is NOT the latest version of Cabal library that -- these flags work for; version of introduction is a more -- natural metric. | cabalLibVersion < mkVersion [3,0,0] = flags_3_0_0 | otherwise = error "the impossible just happened" -- see first guard where flags_latest = flags flags_3_0_0 = flags_latest { -- Cabal < 3.0 doesn't know about --test-wrapper Cabal.testWrapper = NoFlag } -- ------------------------------------------------------------ -- * Repl command -- ------------------------------------------------------------ replCommand :: CommandUI ReplFlags replCommand = parent { commandName = "repl", commandDescription = Just $ \pname -> wrapText $ "If the current directory contains no package, ignores COMPONENT " ++ "parameters and opens an interactive interpreter session;\n" ++ "\n" ++ "Otherwise, (re)configures with the given or default flags, and " ++ "loads the interpreter with the relevant modules. For executables, " ++ "tests and benchmarks, loads the main module (and its " ++ "dependencies); for libraries all exposed/other modules.\n" ++ "\n" ++ "The default component is the library itself, or the executable " ++ "if that is the only component.\n" ++ "\n" ++ "Support for loading specific modules is planned but not " ++ "implemented yet. For certain scenarios, `" ++ pname ++ " v1-exec -- ghci :l Foo` may be used instead. Note that `v1-exec` will " ++ "not (re)configure and you will have to specify the location of " ++ "other modules, if required.\n", commandUsage = \pname -> "Usage: " ++ pname ++ " v1-repl [COMPONENT] [FLAGS]\n", commandDefaultFlags = commandDefaultFlags parent, commandOptions = commandOptions parent, commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v1-repl " ++ " The first component in the package\n" ++ " " ++ pname ++ " v1-repl foo " ++ " A named component (i.e. lib, exe, test suite)\n" ++ " " ++ pname ++ " v1-repl --ghc-options=\"-lstdc++\"" ++ " Specifying flags for interpreter\n" } where parent = Cabal.replCommand defaultProgramDb -- ------------------------------------------------------------ -- * Test command -- ------------------------------------------------------------ testCommand :: CommandUI (BuildFlags, TestFlags) testCommand = parent { commandName = "test", commandDescription = Just $ \pname -> wrapText $ "If necessary (re)configures with `--enable-tests` flag and builds" ++ " the test suite.\n" ++ "\n" ++ "Remember that the tests' dependencies must be installed if there" ++ " are additional ones; e.g. with `" ++ pname ++ " v1-install --only-dependencies --enable-tests`.\n" ++ "\n" ++ "By defining UserHooks in a custom Setup.hs, the package can" ++ " define actions to be executed before and after running tests.\n", commandUsage = usageAlternatives "v1-test" [ "[FLAGS]", "TESTCOMPONENTS [FLAGS]" ], commandDefaultFlags = (Cabal.defaultBuildFlags, commandDefaultFlags parent), commandOptions = \showOrParseArgs -> liftOptions get1 set1 (Cabal.buildOptions progDb showOrParseArgs) ++ liftOptions get2 set2 (commandOptions parent showOrParseArgs) } where get1 (a,_) = a; set1 a (_,b) = (a,b) get2 (_,b) = b; set2 b (a,_) = (a,b) parent = Cabal.testCommand progDb = defaultProgramDb -- ------------------------------------------------------------ -- * Bench command -- ------------------------------------------------------------ benchmarkCommand :: CommandUI (BuildFlags, BenchmarkFlags) benchmarkCommand = parent { commandName = "bench", commandUsage = usageAlternatives "v1-bench" [ "[FLAGS]", "BENCHCOMPONENTS [FLAGS]" ], commandDescription = Just $ \pname -> wrapText $ "If necessary (re)configures with `--enable-benchmarks` flag and" ++ " builds the benchmarks.\n" ++ "\n" ++ "Remember that the benchmarks' dependencies must be installed if" ++ " there are additional ones; e.g. with `" ++ pname ++ " v1-install --only-dependencies --enable-benchmarks`.\n" ++ "\n" ++ "By defining UserHooks in a custom Setup.hs, the package can" ++ " define actions to be executed before and after running" ++ " benchmarks.\n", commandDefaultFlags = (Cabal.defaultBuildFlags, commandDefaultFlags parent), commandOptions = \showOrParseArgs -> liftOptions get1 set1 (Cabal.buildOptions progDb showOrParseArgs) ++ liftOptions get2 set2 (commandOptions parent showOrParseArgs) } where get1 (a,_) = a; set1 a (_,b) = (a,b) get2 (_,b) = b; set2 b (a,_) = (a,b) parent = Cabal.benchmarkCommand progDb = defaultProgramDb -- ------------------------------------------------------------ -- * Fetch command -- ------------------------------------------------------------ data FetchFlags = FetchFlags { -- fetchOutput :: Flag FilePath, fetchDeps :: Flag Bool, fetchDryRun :: Flag Bool, fetchSolver :: Flag PreSolver, fetchMaxBackjumps :: Flag Int, fetchReorderGoals :: Flag ReorderGoals, fetchCountConflicts :: Flag CountConflicts, fetchFineGrainedConflicts :: Flag FineGrainedConflicts, fetchMinimizeConflictSet :: Flag MinimizeConflictSet, fetchIndependentGoals :: Flag IndependentGoals, fetchShadowPkgs :: Flag ShadowPkgs, fetchStrongFlags :: Flag StrongFlags, fetchAllowBootLibInstalls :: Flag AllowBootLibInstalls, fetchOnlyConstrained :: Flag OnlyConstrained, fetchTests :: Flag Bool, fetchBenchmarks :: Flag Bool, fetchVerbosity :: Flag Verbosity } defaultFetchFlags :: FetchFlags defaultFetchFlags = FetchFlags { -- fetchOutput = mempty, fetchDeps = toFlag True, fetchDryRun = toFlag False, fetchSolver = Flag defaultSolver, fetchMaxBackjumps = Flag defaultMaxBackjumps, fetchReorderGoals = Flag (ReorderGoals False), fetchCountConflicts = Flag (CountConflicts True), fetchFineGrainedConflicts = Flag (FineGrainedConflicts True), fetchMinimizeConflictSet = Flag (MinimizeConflictSet False), fetchIndependentGoals = Flag (IndependentGoals False), fetchShadowPkgs = Flag (ShadowPkgs False), fetchStrongFlags = Flag (StrongFlags False), fetchAllowBootLibInstalls = Flag (AllowBootLibInstalls False), fetchOnlyConstrained = Flag OnlyConstrainedNone, fetchTests = toFlag False, fetchBenchmarks = toFlag False, fetchVerbosity = toFlag normal } fetchCommand :: CommandUI FetchFlags fetchCommand = CommandUI { commandName = "fetch", commandSynopsis = "Downloads packages for later installation.", commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" ], commandDescription = Just $ \_ -> "Note that it currently is not possible to fetch the dependencies for a\n" ++ "package in the current directory.\n", commandNotes = Nothing, commandDefaultFlags = defaultFetchFlags, commandOptions = \ showOrParseArgs -> [ optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) -- , option "o" ["output"] -- "Put the package(s) somewhere specific rather than the usual cache." -- fetchOutput (\v flags -> flags { fetchOutput = v }) -- (reqArgFlag "PATH") , option [] ["dependencies", "deps"] "Resolve and fetch dependencies (default)" fetchDeps (\v flags -> flags { fetchDeps = v }) trueArg , option [] ["no-dependencies", "no-deps"] "Ignore dependencies" fetchDeps (\v flags -> flags { fetchDeps = v }) falseArg , option [] ["dry-run"] "Do not install anything, only print what would be installed." fetchDryRun (\v flags -> flags { fetchDryRun = v }) trueArg , option "" ["tests"] "dependency checking and compilation for test suites listed in the package description file." fetchTests (\v flags -> flags { fetchTests = v }) (boolOpt [] []) , option "" ["benchmarks"] "dependency checking and compilation for benchmarks listed in the package description file." fetchBenchmarks (\v flags -> flags { fetchBenchmarks = v }) (boolOpt [] []) ] ++ optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : optionSolverFlags showOrParseArgs fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) fetchCountConflicts (\v flags -> flags { fetchCountConflicts = v }) fetchFineGrainedConflicts (\v flags -> flags { fetchFineGrainedConflicts = v }) fetchMinimizeConflictSet (\v flags -> flags { fetchMinimizeConflictSet = v }) fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) fetchAllowBootLibInstalls (\v flags -> flags { fetchAllowBootLibInstalls = v }) fetchOnlyConstrained (\v flags -> flags { fetchOnlyConstrained = v }) } -- ------------------------------------------------------------ -- * Freeze command -- ------------------------------------------------------------ data FreezeFlags = FreezeFlags { freezeDryRun :: Flag Bool, freezeTests :: Flag Bool, freezeBenchmarks :: Flag Bool, freezeSolver :: Flag PreSolver, freezeMaxBackjumps :: Flag Int, freezeReorderGoals :: Flag ReorderGoals, freezeCountConflicts :: Flag CountConflicts, freezeFineGrainedConflicts :: Flag FineGrainedConflicts, freezeMinimizeConflictSet :: Flag MinimizeConflictSet, freezeIndependentGoals :: Flag IndependentGoals, freezeShadowPkgs :: Flag ShadowPkgs, freezeStrongFlags :: Flag StrongFlags, freezeAllowBootLibInstalls :: Flag AllowBootLibInstalls, freezeOnlyConstrained :: Flag OnlyConstrained, freezeVerbosity :: Flag Verbosity } defaultFreezeFlags :: FreezeFlags defaultFreezeFlags = FreezeFlags { freezeDryRun = toFlag False, freezeTests = toFlag False, freezeBenchmarks = toFlag False, freezeSolver = Flag defaultSolver, freezeMaxBackjumps = Flag defaultMaxBackjumps, freezeReorderGoals = Flag (ReorderGoals False), freezeCountConflicts = Flag (CountConflicts True), freezeFineGrainedConflicts = Flag (FineGrainedConflicts True), freezeMinimizeConflictSet = Flag (MinimizeConflictSet False), freezeIndependentGoals = Flag (IndependentGoals False), freezeShadowPkgs = Flag (ShadowPkgs False), freezeStrongFlags = Flag (StrongFlags False), freezeAllowBootLibInstalls = Flag (AllowBootLibInstalls False), freezeOnlyConstrained = Flag OnlyConstrainedNone, freezeVerbosity = toFlag normal } freezeCommand :: CommandUI FreezeFlags freezeCommand = CommandUI { commandName = "freeze", commandSynopsis = "Freeze dependencies.", commandDescription = Just $ \_ -> wrapText $ "Calculates a valid set of dependencies and their exact versions. " ++ "If successful, saves the result to the file `cabal.config`.\n" ++ "\n" ++ "The package versions specified in `cabal.config` will be used for " ++ "any future installs.\n" ++ "\n" ++ "An existing `cabal.config` is ignored and overwritten.\n", commandNotes = Nothing, commandUsage = usageFlags "freeze", commandDefaultFlags = defaultFreezeFlags, commandOptions = \ showOrParseArgs -> [ optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) , option [] ["dry-run"] "Do not freeze anything, only print what would be frozen" freezeDryRun (\v flags -> flags { freezeDryRun = v }) trueArg , option [] ["tests"] ("freezing of the dependencies of any tests suites " ++ "in the package description file.") freezeTests (\v flags -> flags { freezeTests = v }) (boolOpt [] []) , option [] ["benchmarks"] ("freezing of the dependencies of any benchmarks suites " ++ "in the package description file.") freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) (boolOpt [] []) ] ++ optionSolver freezeSolver (\v flags -> flags { freezeSolver = v }): optionSolverFlags showOrParseArgs freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) freezeCountConflicts (\v flags -> flags { freezeCountConflicts = v }) freezeFineGrainedConflicts (\v flags -> flags { freezeFineGrainedConflicts = v }) freezeMinimizeConflictSet (\v flags -> flags { freezeMinimizeConflictSet = v }) freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) freezeAllowBootLibInstalls (\v flags -> flags { freezeAllowBootLibInstalls = v }) freezeOnlyConstrained (\v flags -> flags { freezeOnlyConstrained = v }) } -- ------------------------------------------------------------ -- * 'gen-bounds' command -- ------------------------------------------------------------ genBoundsCommand :: CommandUI FreezeFlags genBoundsCommand = CommandUI { commandName = "gen-bounds", commandSynopsis = "Generate dependency bounds.", commandDescription = Just $ \_ -> wrapText $ "Generates bounds for all dependencies that do not currently have them. " ++ "Generated bounds are printed to stdout. " ++ "You can then paste them into your .cabal file.\n" ++ "\n", commandNotes = Nothing, commandUsage = usageFlags "gen-bounds", commandDefaultFlags = defaultFreezeFlags, commandOptions = \ _ -> [ optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) ] } -- ------------------------------------------------------------ -- * Update command -- ------------------------------------------------------------ data UpdateFlags = UpdateFlags { updateVerbosity :: Flag Verbosity, updateIndexState :: Flag TotalIndexState } deriving Generic defaultUpdateFlags :: UpdateFlags defaultUpdateFlags = UpdateFlags { updateVerbosity = toFlag normal, updateIndexState = toFlag headTotalIndexState } -- ------------------------------------------------------------ -- * Other commands -- ------------------------------------------------------------ cleanCommand :: CommandUI CleanFlags cleanCommand = Cabal.cleanCommand { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-clean [FLAGS]\n" } checkCommand :: CommandUI (Flag Verbosity) checkCommand = CommandUI { commandName = "check", commandSynopsis = "Check the package for common mistakes.", commandDescription = Just $ \_ -> wrapText $ "Expects a .cabal package file in the current directory.\n" ++ "\n" ++ "The checks correspond to the requirements to packages on Hackage. " ++ "If no errors and warnings are reported, Hackage will accept this " ++ "package.\n", commandNotes = Nothing, commandUsage = usageFlags "check", commandDefaultFlags = toFlag normal, commandOptions = \_ -> [optionVerbosity id const] } formatCommand :: CommandUI (Flag Verbosity) formatCommand = CommandUI { commandName = "format", commandSynopsis = "Reformat the .cabal file using the standard style.", commandDescription = Nothing, commandNotes = Nothing, commandUsage = usageAlternatives "format" ["[FILE]"], commandDefaultFlags = toFlag normal, commandOptions = \_ -> [] } manpageCommand :: CommandUI ManpageFlags manpageCommand = CommandUI { commandName = "man", commandSynopsis = "Outputs manpage source.", commandDescription = Just $ \_ -> "Output manpage source to STDOUT.\n", commandNotes = Nothing, commandUsage = usageFlags "man", commandDefaultFlags = defaultManpageFlags, commandOptions = manpageOptions } runCommand :: CommandUI BuildFlags runCommand = CommandUI { commandName = "run", commandSynopsis = "Builds and runs an executable.", commandDescription = Just $ \pname -> wrapText $ "Builds and then runs the specified executable. If no executable is " ++ "specified, but the package contains just one executable, that one " ++ "is built and executed.\n" ++ "\n" ++ "Use `" ++ pname ++ " v1-test --show-details=streaming` to run a " ++ "test-suite and get its full output.\n", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v1-run\n" ++ " Run the only executable in the current package;\n" ++ " " ++ pname ++ " v1-run foo -- --fooflag\n" ++ " Works similar to `./foo --fooflag`.\n", commandUsage = usageAlternatives "v1-run" ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], commandDefaultFlags = mempty, commandOptions = commandOptions parent } where parent = Cabal.buildCommand defaultProgramDb -- ------------------------------------------------------------ -- * Report flags -- ------------------------------------------------------------ data ReportFlags = ReportFlags { reportUsername :: Flag Username, reportPassword :: Flag Password, reportVerbosity :: Flag Verbosity } deriving Generic defaultReportFlags :: ReportFlags defaultReportFlags = ReportFlags { reportUsername = mempty, reportPassword = mempty, reportVerbosity = toFlag normal } reportCommand :: CommandUI ReportFlags reportCommand = CommandUI { commandName = "report", commandSynopsis = "Upload build reports to a remote server.", commandDescription = Nothing, commandNotes = Just $ \_ -> "You can store your Hackage login in the ~/.cabal/config file\n", commandUsage = usageAlternatives "report" ["[FLAGS]"], commandDefaultFlags = defaultReportFlags, commandOptions = \_ -> [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) ,option ['u'] ["username"] "Hackage username." reportUsername (\v flags -> flags { reportUsername = v }) (reqArg' "USERNAME" (toFlag . Username) (flagToList . fmap unUsername)) ,option ['p'] ["password"] "Hackage password." reportPassword (\v flags -> flags { reportPassword = v }) (reqArg' "PASSWORD" (toFlag . Password) (flagToList . fmap unPassword)) ] } instance Monoid ReportFlags where mempty = gmempty mappend = (<>) instance Semigroup ReportFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Get flags -- ------------------------------------------------------------ data GetFlags = GetFlags { getDestDir :: Flag FilePath, getPristine :: Flag Bool, getIndexState :: Flag TotalIndexState, getActiveRepos :: Flag ActiveRepos, getSourceRepository :: Flag (Maybe RepoKind), getVerbosity :: Flag Verbosity } deriving Generic defaultGetFlags :: GetFlags defaultGetFlags = GetFlags { getDestDir = mempty, getPristine = mempty, getIndexState = mempty, getActiveRepos = mempty, getSourceRepository = mempty, getVerbosity = toFlag normal } getCommand :: CommandUI GetFlags getCommand = CommandUI { commandName = "get", commandSynopsis = "Download/Extract a package's source code (repository).", commandDescription = Just $ \_ -> wrapText $ unlines descriptionOfGetCommand, commandNotes = Just $ \pname -> unlines $ notesOfGetCommand "get" pname, commandUsage = usagePackages "get", commandDefaultFlags = defaultGetFlags, commandOptions = \_ -> [ optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) ,option "d" ["destdir"] "Where to place the package source, defaults to the current directory." getDestDir (\v flags -> flags { getDestDir = v }) (reqArgFlag "PATH") ,option "s" ["source-repository"] "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." getSourceRepository (\v flags -> flags { getSourceRepository = v }) (optArg "[head|this|...]" (parsecToReadE (const "invalid source-repository") (fmap (toFlag . Just) parsec)) (Flag Nothing) (map (fmap show) . flagToList)) , option [] ["index-state"] ("Use source package index state as it existed at a previous time. " ++ "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD'). " ++ "This determines which package versions are available as well as " ++ ".cabal file revision is selected (unless --pristine is used).") getIndexState (\v flags -> flags { getIndexState = v }) (reqArg "STATE" (parsecToReadE (const $ "index-state must be a " ++ "unix-timestamps (e.g. '@1474732068'), " ++ "a ISO8601 UTC timestamp " ++ "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option [] ["pristine"] ("Unpack the original pristine tarball, rather than updating the " ++ ".cabal file with the latest revision from the package archive.") getPristine (\v flags -> flags { getPristine = v }) trueArg ] } -- | List of lines describing command @get@. descriptionOfGetCommand :: [String] descriptionOfGetCommand = [ "Creates a local copy of a package's source code. By default it gets the source" , "tarball and unpacks it in a local subdirectory. Alternatively, with -s it will" , "get the code from the source repository specified by the package." ] -- | Notes for the command @get@. notesOfGetCommand :: String -- ^ Either @"get"@ or @"unpack"@. -> String -- ^ E.g. @"cabal"@. -> [String] -- ^ List of lines. notesOfGetCommand cmd pname = [ "Examples:" , " " ++ unwords [ pname, cmd, "hlint" ] , " Download the latest stable version of hlint;" , " " ++ unwords [ pname, cmd, "lens --source-repository=head" ] , " Download the source repository of lens (i.e. git clone from github)." ] -- 'cabal unpack' is a deprecated alias for 'cabal get'. unpackCommand :: CommandUI GetFlags unpackCommand = getCommand { commandName = "unpack" , commandSynopsis = synopsis , commandNotes = Just $ \ pname -> unlines $ notesOfGetCommand "unpack" pname , commandUsage = usagePackages "unpack" } where synopsis = "Deprecated alias for 'get'." instance Monoid GetFlags where mempty = gmempty mappend = (<>) instance Semigroup GetFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * List flags -- ------------------------------------------------------------ data ListFlags = ListFlags { listInstalled :: Flag Bool , listSimpleOutput :: Flag Bool , listCaseInsensitive :: Flag Bool , listVerbosity :: Flag Verbosity , listPackageDBs :: [Maybe PackageDB] , listHcPath :: Flag FilePath } deriving Generic defaultListFlags :: ListFlags defaultListFlags = ListFlags { listInstalled = Flag False , listSimpleOutput = Flag False , listCaseInsensitive = Flag True , listVerbosity = toFlag normal , listPackageDBs = [] , listHcPath = mempty } listCommand :: CommandUI ListFlags listCommand = CommandUI { commandName = "list", commandSynopsis = "List packages matching a search string.", commandDescription = Just $ \_ -> wrapText $ "List all packages, or all packages matching one of the search" ++ " strings.\n" ++ "\n" ++ "Use the package database specified with --package-db. " ++ "If not specified, use the user package database.\n", commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " list pandoc\n" ++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n", commandUsage = usageAlternatives "list" [ "[FLAGS]" , "[FLAGS] STRINGS"], commandDefaultFlags = defaultListFlags, commandOptions = const listOptions } listOptions :: [OptionField ListFlags] listOptions = [ optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) , option [] ["installed"] "Only print installed packages" listInstalled (\v flags -> flags { listInstalled = v }) trueArg , option [] ["simple-output"] "Print in a easy-to-parse format" listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) trueArg , option ['i'] ["ignore-case"] "Ignore case distinctions" listCaseInsensitive (\v flags -> flags { listCaseInsensitive = v }) (boolOpt' (['i'], ["ignore-case"]) (['I'], ["strict-case"])) , option "" ["package-db"] ( "Append the given package database to the list of package" ++ " databases used (to satisfy dependencies and register into)." ++ " May be a specific file, 'global' or 'user'. The initial list" ++ " is ['global'], ['global', 'user']," ++ " depending on context. Use 'clear' to reset the list to empty." ++ " See the user guide for details.") listPackageDBs (\v flags -> flags { listPackageDBs = v }) (reqArg' "DB" readPackageDbList showPackageDbList) , option "w" ["with-compiler"] "give the path to a particular compiler" listHcPath (\v flags -> flags { listHcPath = v }) (reqArgFlag "PATH") ] listNeedsCompiler :: ListFlags -> Bool listNeedsCompiler f = flagElim False (const True) (listHcPath f) || fromFlagOrDefault False (listInstalled f) instance Monoid ListFlags where mempty = gmempty mappend = (<>) instance Semigroup ListFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Info flags -- ------------------------------------------------------------ data InfoFlags = InfoFlags { infoVerbosity :: Flag Verbosity, infoPackageDBs :: [Maybe PackageDB] } deriving Generic defaultInfoFlags :: InfoFlags defaultInfoFlags = InfoFlags { infoVerbosity = toFlag normal, infoPackageDBs = [] } infoCommand :: CommandUI InfoFlags infoCommand = CommandUI { commandName = "info", commandSynopsis = "Display detailed information about a particular package.", commandDescription = Just $ \_ -> wrapText $ "Use the package database specified with --package-db. " ++ "If not specified, use the user package database.\n", commandNotes = Nothing, commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], commandDefaultFlags = defaultInfoFlags, commandOptions = \_ -> [ optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) , option "" ["package-db"] ( "Append the given package database to the list of package" ++ " databases used (to satisfy dependencies and register into)." ++ " May be a specific file, 'global' or 'user'. The initial list" ++ " is ['global'], ['global', 'user']," ++ " depending on context. Use 'clear' to reset the list to empty." ++ " See the user guide for details.") infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) (reqArg' "DB" readPackageDbList showPackageDbList) ] } instance Monoid InfoFlags where mempty = gmempty mappend = (<>) instance Semigroup InfoFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Install flags -- ------------------------------------------------------------ -- | Install takes the same flags as configure along with a few extras. -- data InstallFlags = InstallFlags { installDocumentation :: Flag Bool, installHaddockIndex :: Flag PathTemplate, installDest :: Flag Cabal.CopyDest, installDryRun :: Flag Bool, installOnlyDownload :: Flag Bool, installMaxBackjumps :: Flag Int, installReorderGoals :: Flag ReorderGoals, installCountConflicts :: Flag CountConflicts, installFineGrainedConflicts :: Flag FineGrainedConflicts, installMinimizeConflictSet :: Flag MinimizeConflictSet, installIndependentGoals :: Flag IndependentGoals, installShadowPkgs :: Flag ShadowPkgs, installStrongFlags :: Flag StrongFlags, installAllowBootLibInstalls :: Flag AllowBootLibInstalls, installOnlyConstrained :: Flag OnlyConstrained, installReinstall :: Flag Bool, installAvoidReinstalls :: Flag AvoidReinstalls, installOverrideReinstall :: Flag Bool, installUpgradeDeps :: Flag Bool, installOnly :: Flag Bool, installOnlyDeps :: Flag Bool, installIndexState :: Flag TotalIndexState, installRootCmd :: Flag String, installSummaryFile :: NubList PathTemplate, installLogFile :: Flag PathTemplate, installBuildReports :: Flag ReportLevel, installReportPlanningFailure :: Flag Bool, -- Note: symlink-bindir is no longer used by v2-install and can be removed -- when removing v1 commands installSymlinkBinDir :: Flag FilePath, installPerComponent :: Flag Bool, installNumJobs :: Flag (Maybe Int), installKeepGoing :: Flag Bool, installRunTests :: Flag Bool, installOfflineMode :: Flag Bool } deriving (Eq, Show, Generic) instance Binary InstallFlags defaultInstallFlags :: InstallFlags defaultInstallFlags = InstallFlags { installDocumentation = Flag False, installHaddockIndex = Flag docIndexFile, installDest = Flag Cabal.NoCopyDest, installDryRun = Flag False, installOnlyDownload = Flag False, installMaxBackjumps = Flag defaultMaxBackjumps, installReorderGoals = Flag (ReorderGoals False), installCountConflicts = Flag (CountConflicts True), installFineGrainedConflicts = Flag (FineGrainedConflicts True), installMinimizeConflictSet = Flag (MinimizeConflictSet False), installIndependentGoals= Flag (IndependentGoals False), installShadowPkgs = Flag (ShadowPkgs False), installStrongFlags = Flag (StrongFlags False), installAllowBootLibInstalls = Flag (AllowBootLibInstalls False), installOnlyConstrained = Flag OnlyConstrainedNone, installReinstall = Flag False, installAvoidReinstalls = Flag (AvoidReinstalls False), installOverrideReinstall = Flag False, installUpgradeDeps = Flag False, installOnly = Flag False, installOnlyDeps = Flag False, installIndexState = mempty, installRootCmd = mempty, installSummaryFile = mempty, installLogFile = mempty, installBuildReports = Flag NoReports, installReportPlanningFailure = Flag False, installSymlinkBinDir = mempty, installPerComponent = Flag True, installNumJobs = mempty, installKeepGoing = Flag False, installRunTests = mempty, installOfflineMode = Flag False } where docIndexFile = toPathTemplate ("$datadir" "doc" "$arch-$os-$compiler" "index.html") defaultMaxBackjumps :: Int defaultMaxBackjumps = 4000 defaultSolver :: PreSolver defaultSolver = AlwaysModular allSolvers :: String allSolvers = intercalate ", " (map prettyShow ([minBound .. maxBound] :: [PreSolver])) installCommand :: CommandUI ( ConfigFlags, ConfigExFlags, InstallFlags , HaddockFlags, TestFlags, BenchmarkFlags ) installCommand = CommandUI { commandName = "install", commandSynopsis = "Install packages.", commandUsage = usageAlternatives "v1-install" [ "[FLAGS]" , "[FLAGS] PACKAGES" ], commandDescription = Just $ \_ -> wrapText $ "Installs one or more packages. By default, the installed package" ++ " will be registered in the user's package database." ++ "\n" ++ "If PACKAGES are specified, downloads and installs those packages." ++ " Otherwise, install the package in the current directory (and/or its" ++ " dependencies) (there must be exactly one .cabal file in the current" ++ " directory).\n" ++ "\n" ++ "The flags to `v1-install` are saved and" ++ " affect future commands such as `v1-build` and `v1-repl`. See the help for" ++ " `v1-configure` for a list of commands being affected.\n" ++ "\n" ++ "Installed executables will by default" ++ " be put into `~/.cabal/bin/`." ++ " If you want installed executable to be available globally, make" ++ " sure that the PATH environment variable contains that directory.\n" ++ "\n", commandNotes = Just $ \pname -> ( case commandNotes $ Cabal.configureCommand defaultProgramDb of Just desc -> desc pname ++ "\n" Nothing -> "" ) ++ "Examples:\n" ++ " " ++ pname ++ " v1-install " ++ " Package in the current directory\n" ++ " " ++ pname ++ " v1-install foo " ++ " Package from the hackage server\n" ++ " " ++ pname ++ " v1-install foo-1.0 " ++ " Specific version of a package\n" ++ " " ++ pname ++ " v1-install 'foo < 2' " ++ " Constrained package version\n" ++ " " ++ pname ++ " v1-install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n" ++ " " ++ (map (const ' ') pname) ++ " " ++ " Change installation destination\n", commandDefaultFlags = (mempty, mempty, mempty, mempty, mempty, mempty), commandOptions = \showOrParseArgs -> liftOptions get1 set1 -- Note: [Hidden Flags] -- hide "constraint", "dependency", and -- "exact-configuration" from the configure options. (filter ((`notElem` ["constraint", "dependency" , "exact-configuration"]) . optionName) $ configureOptions showOrParseArgs) ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) ++ liftOptions get3 set3 -- hide "target-package-db" flag from the -- install options. (filter ((`notElem` ["target-package-db"]) . optionName) $ installOptions showOrParseArgs) ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) ++ liftOptions get5 set5 (testOptions showOrParseArgs) ++ liftOptions get6 set6 (benchmarkOptions showOrParseArgs) } where get1 (a,_,_,_,_,_) = a; set1 a (_,b,c,d,e,f) = (a,b,c,d,e,f) get2 (_,b,_,_,_,_) = b; set2 b (a,_,c,d,e,f) = (a,b,c,d,e,f) get3 (_,_,c,_,_,_) = c; set3 c (a,b,_,d,e,f) = (a,b,c,d,e,f) get4 (_,_,_,d,_,_) = d; set4 d (a,b,c,_,e,f) = (a,b,c,d,e,f) get5 (_,_,_,_,e,_) = e; set5 e (a,b,c,d,_,f) = (a,b,c,d,e,f) get6 (_,_,_,_,_,f) = f; set6 f (a,b,c,d,e,_) = (a,b,c,d,e,f) haddockCommand :: CommandUI HaddockFlags haddockCommand = Cabal.haddockCommand { commandUsage = usageAlternatives "v1-haddock" $ [ "[FLAGS]", "COMPONENTS [FLAGS]" ] } filterHaddockArgs :: [String] -> Version -> [String] filterHaddockArgs args cabalLibVersion | cabalLibVersion >= mkVersion [2,3,0] = args_latest | cabalLibVersion < mkVersion [2,3,0] = args_2_3_0 | otherwise = args_latest where args_latest = args -- Cabal < 2.3 doesn't know about per-component haddock args_2_3_0 = [] filterHaddockFlags :: HaddockFlags -> Version -> HaddockFlags filterHaddockFlags flags cabalLibVersion | cabalLibVersion >= mkVersion [2,3,0] = flags_latest | cabalLibVersion < mkVersion [2,3,0] = flags_2_3_0 | otherwise = flags_latest where flags_latest = flags flags_2_3_0 = flags_latest { -- Cabal < 2.3 doesn't know about per-component haddock haddockArgs = [] } haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] haddockOptions showOrParseArgs = [ opt { optionName = "haddock-" ++ name, optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr | descr <- optionDescr opt] } | opt <- commandOptions Cabal.haddockCommand showOrParseArgs , let name = optionName opt , name `elem` ["hoogle", "html", "html-location" ,"executables", "tests", "benchmarks", "all", "internal", "css" ,"hyperlink-source", "quickjump", "hscolour-css" ,"contents-location", "for-hackage"] ] testOptions :: ShowOrParseArgs -> [OptionField TestFlags] testOptions showOrParseArgs = [ opt { optionName = prefixTest name, optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map prefixTest lflags)) descr | descr <- optionDescr opt] } | opt <- commandOptions Cabal.testCommand showOrParseArgs , let name = optionName opt , name `elem` ["log", "machine-log", "show-details", "keep-tix-files" ,"fail-when-no-test-suites", "test-options", "test-option" ,"test-wrapper"] ] where prefixTest name | "test-" `isPrefixOf` name = name | otherwise = "test-" ++ name benchmarkOptions :: ShowOrParseArgs -> [OptionField BenchmarkFlags] benchmarkOptions showOrParseArgs = [ opt { optionName = prefixBenchmark name, optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map prefixBenchmark lflags)) descr | descr <- optionDescr opt] } | opt <- commandOptions Cabal.benchmarkCommand showOrParseArgs , let name = optionName opt , name `elem` ["benchmark-options", "benchmark-option"] ] where prefixBenchmark name | "benchmark-" `isPrefixOf` name = name | otherwise = "benchmark-" ++ name fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] installOptions showOrParseArgs = [ option "" ["documentation"] "building of documentation" installDocumentation (\v flags -> flags { installDocumentation = v }) (boolOpt [] []) , option [] ["doc-index-file"] "A central index of haddock API documentation (template cannot use $pkgid)" installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) (reqArg' "TEMPLATE" (toFlag.toPathTemplate) (flagToList . fmap fromPathTemplate)) , option [] ["dry-run"] "Do not install anything, only print what would be installed." installDryRun (\v flags -> flags { installDryRun = v }) trueArg , option [] ["only-download"] "Do not build anything, only fetch the packages." installOnlyDownload (\v flags -> flags { installOnlyDownload = v }) trueArg , option "" ["target-package-db"] "package database to install into. Required when using ${pkgroot} prefix." installDest (\v flags -> flags { installDest = v }) (reqArg "DATABASE" (succeedReadE (Flag . Cabal.CopyToDb)) (\f -> case f of Flag (Cabal.CopyToDb p) -> [p]; _ -> [])) ] ++ optionSolverFlags showOrParseArgs installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) installReorderGoals (\v flags -> flags { installReorderGoals = v }) installCountConflicts (\v flags -> flags { installCountConflicts = v }) installFineGrainedConflicts (\v flags -> flags { installFineGrainedConflicts = v }) installMinimizeConflictSet (\v flags -> flags { installMinimizeConflictSet = v }) installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) installStrongFlags (\v flags -> flags { installStrongFlags = v }) installAllowBootLibInstalls (\v flags -> flags { installAllowBootLibInstalls = v }) installOnlyConstrained (\v flags -> flags { installOnlyConstrained = v }) ++ [ option [] ["reinstall"] "Install even if it means installing the same version again." installReinstall (\v flags -> flags { installReinstall = v }) (yesNoOpt showOrParseArgs) , option [] ["avoid-reinstalls"] "Do not select versions that would destructively overwrite installed packages." (fmap asBool . installAvoidReinstalls) (\v flags -> flags { installAvoidReinstalls = fmap AvoidReinstalls v }) (yesNoOpt showOrParseArgs) , option [] ["force-reinstalls"] "Reinstall packages even if they will most likely break other installed packages." installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) (yesNoOpt showOrParseArgs) , option [] ["upgrade-dependencies"] "Pick the latest version for all dependencies, rather than trying to pick an installed version." installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["only-dependencies"] "Install only the dependencies necessary to build the given packages" installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["dependencies-only"] "A synonym for --only-dependencies" installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["index-state"] ("Use source package index state as it existed at a previous time. " ++ "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').") installIndexState (\v flags -> flags { installIndexState = v }) (reqArg "STATE" (parsecToReadE (const $ "index-state must be a " ++ "unix-timestamps (e.g. '@1474732068'), " ++ "a ISO8601 UTC timestamp " ++ "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option [] ["root-cmd"] "(No longer supported, do not use.)" installRootCmd (\v flags -> flags { installRootCmd = v }) (reqArg' "COMMAND" toFlag flagToList) , option [] ["symlink-bindir"] "Add symlinks to installed executables into this directory." installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) (reqArgFlag "DIR") , option [] ["build-summary"] "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" installSummaryFile (\v flags -> flags { installSummaryFile = v }) (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) , option [] ["build-log"] "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" installLogFile (\v flags -> flags { installLogFile = v }) (reqArg' "TEMPLATE" (toFlag.toPathTemplate) (flagToList . fmap fromPathTemplate)) , option [] ["remote-build-reporting"] "Generate build reports to send to a remote server (none, anonymous or detailed)." installBuildReports (\v flags -> flags { installBuildReports = v }) (reqArg "LEVEL" (parsecToReadE (const $ "report level must be 'none', " ++ "'anonymous' or 'detailed'") (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option [] ["report-planning-failure"] "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) trueArg , option "" ["per-component"] "Per-component builds when possible" installPerComponent (\v flags -> flags { installPerComponent = v }) (boolOpt [] []) , option [] ["run-tests"] "Run package test suites during installation." installRunTests (\v flags -> flags { installRunTests = v }) trueArg , optionNumJobs installNumJobs (\v flags -> flags { installNumJobs = v }) , option [] ["keep-going"] "After a build failure, continue to build other unaffected packages." installKeepGoing (\v flags -> flags { installKeepGoing = v }) trueArg , option [] ["offline"] "Don't download packages from the Internet." installOfflineMode (\v flags -> flags { installOfflineMode = v }) (yesNoOpt showOrParseArgs) ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" -- avoids ParseArgs -> [ option [] ["only"] "Only installs the package in the current directory." installOnly (\v flags -> flags { installOnly = v }) trueArg ] _ -> [] instance Monoid InstallFlags where mempty = gmempty mappend = (<>) instance Semigroup InstallFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Upload flags -- ------------------------------------------------------------ -- | Is this a candidate package or a package to be published? data IsCandidate = IsCandidate | IsPublished deriving Eq data UploadFlags = UploadFlags { uploadCandidate :: Flag IsCandidate, uploadDoc :: Flag Bool, uploadUsername :: Flag Username, uploadPassword :: Flag Password, uploadPasswordCmd :: Flag [String], uploadVerbosity :: Flag Verbosity } deriving Generic defaultUploadFlags :: UploadFlags defaultUploadFlags = UploadFlags { uploadCandidate = toFlag IsCandidate, uploadDoc = toFlag False, uploadUsername = mempty, uploadPassword = mempty, uploadPasswordCmd = mempty, uploadVerbosity = toFlag normal } uploadCommand :: CommandUI UploadFlags uploadCommand = CommandUI { commandName = "upload", commandSynopsis = "Uploads source packages or documentation to Hackage.", commandDescription = Nothing, commandNotes = Just $ \_ -> "You can store your Hackage login in the ~/.cabal/config file\n" ++ relevantConfigValuesText ["username", "password", "password-command"], commandUsage = \pname -> "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", commandDefaultFlags = defaultUploadFlags, commandOptions = \_ -> [optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v }) ,option [] ["publish"] "Publish the package instead of uploading it as a candidate." uploadCandidate (\v flags -> flags { uploadCandidate = v }) (noArg (Flag IsPublished)) ,option ['d'] ["documentation"] ("Upload documentation instead of a source package. " ++ "By default, this uploads documentation for a package candidate. " ++ "To upload documentation for " ++ "a published package, combine with --publish.") uploadDoc (\v flags -> flags { uploadDoc = v }) trueArg ,option ['u'] ["username"] "Hackage username." uploadUsername (\v flags -> flags { uploadUsername = v }) (reqArg' "USERNAME" (toFlag . Username) (flagToList . fmap unUsername)) ,option ['p'] ["password"] "Hackage password." uploadPassword (\v flags -> flags { uploadPassword = v }) (reqArg' "PASSWORD" (toFlag . Password) (flagToList . fmap unPassword)) ,option ['P'] ["password-command"] "Command to get Hackage password." uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v }) (reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe)) ] } instance Monoid UploadFlags where mempty = gmempty mappend = (<>) instance Semigroup UploadFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Init flags -- ------------------------------------------------------------ initCommand :: CommandUI IT.InitFlags initCommand = CommandUI { commandName = "init", commandSynopsis = "Create a new cabal package.", commandDescription = Just $ \_ -> wrapText $ "Create a .cabal, CHANGELOG.md, minimal initial Haskell code and optionally a LICENSE file.\n" ++ "\n" ++ "Calling init with no arguments runs interactive mode, " ++ "which will try to guess as much as possible and prompt you for the rest.\n" ++ "Non-interactive mode can be invoked by the -n/--non-interactive flag, " ++ "which will let you specify the options via flags and will use the defaults for the rest.\n" ++ "It is also possible to call init with a single argument, which denotes the project's desired " ++ "root directory.\n", commandNotes = Nothing, commandUsage = \pname -> "Usage: " ++ pname ++ " init [PROJECT ROOT] [FLAGS]\n", commandDefaultFlags = IT.defaultInitFlags, commandOptions = initOptions } initOptions :: ShowOrParseArgs -> [OptionField IT.InitFlags] initOptions _ = [ option ['i'] ["interactive"] "interactive mode." IT.interactive (\v flags -> flags { IT.interactive = v }) (boolOpt' (['i'], ["interactive"]) (['n'], ["non-interactive"])) , option ['q'] ["quiet"] "Do not generate log messages to stdout." IT.quiet (\v flags -> flags { IT.quiet = v }) trueArg , option [] ["no-comments"] "Do not generate explanatory comments in the .cabal file." IT.noComments (\v flags -> flags { IT.noComments = v }) trueArg , option ['m'] ["minimal"] "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." IT.minimal (\v flags -> flags { IT.minimal = v }) trueArg , option [] ["overwrite"] "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." IT.overwrite (\v flags -> flags { IT.overwrite = v }) trueArg , option [] ["package-dir", "packagedir"] "Root directory of the package (default = current directory)." IT.packageDir (\v flags -> flags { IT.packageDir = v }) (reqArgFlag "DIRECTORY") , option ['p'] ["package-name"] "Name of the Cabal package to create." IT.packageName (\v flags -> flags { IT.packageName = v }) (reqArg "PACKAGE" (parsecToReadE ("Cannot parse package name: "++) (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option [] ["version"] "Initial version of the package." IT.version (\v flags -> flags { IT.version = v }) (reqArg "VERSION" (parsecToReadE ("Cannot parse package version: "++) (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option [] ["cabal-version"] "Version of the Cabal specification." IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) (reqArg "CABALSPECVERSION" (parsecToReadE ("Cannot parse Cabal specification version: "++) (fmap (toFlag . getSpecVersion) parsec)) (flagToList . fmap (prettyShow . SpecVersion))) , option ['l'] ["license"] "Project license." IT.license (\v flags -> flags { IT.license = v }) (reqArg "LICENSE" (parsecToReadE ("Cannot parse license: "++) (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option ['a'] ["author"] "Name of the project's author." IT.author (\v flags -> flags { IT.author = v }) (reqArgFlag "NAME") , option ['e'] ["email"] "Email address of the maintainer." IT.email (\v flags -> flags { IT.email = v }) (reqArgFlag "EMAIL") , option ['u'] ["homepage"] "Project homepage and/or repository." IT.homepage (\v flags -> flags { IT.homepage = v }) (reqArgFlag "URL") , option ['s'] ["synopsis"] "Short project synopsis." IT.synopsis (\v flags -> flags { IT.synopsis = v }) (reqArgFlag "TEXT") , option ['c'] ["category"] "Project category." IT.category (\v flags -> flags { IT.category = v }) (reqArgFlag "CATEGORY") , option ['x'] ["extra-source-file"] "Extra source file to be distributed with tarball." IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) (reqArg' "FILE" (Flag . (:[])) (fromFlagOrDefault [])) , option [] ["extra-doc-file"] "Extra doc file to be distributed with tarball." IT.extraDoc (\v flags -> flags { IT.extraDoc = v }) (reqArg' "FILE" (Flag . (:[])) (fromFlagOrDefault [])) , option [] ["lib", "is-library"] "Build a library." IT.packageType (\v flags -> flags { IT.packageType = v }) (noArg (Flag IT.Library)) , option [] ["exe", "is-executable"] "Build an executable." IT.packageType (\v flags -> flags { IT.packageType = v }) (noArg (Flag IT.Executable)) , option [] ["libandexe", "is-libandexe"] "Build a library and an executable." IT.packageType (\v flags -> flags { IT.packageType = v }) (noArg (Flag IT.LibraryAndExecutable)) , option [] ["tests"] "Generate a test suite, standalone or for a library." IT.initializeTestSuite (\v flags -> flags { IT.initializeTestSuite = v }) trueArg , option [] ["test-dir"] "Directory containing tests." IT.testDirs (\v flags -> flags { IT.testDirs = v }) (reqArg' "DIR" (Flag . (:[])) (fromFlagOrDefault [])) , option [] ["simple"] "Create a simple project with sensible defaults." IT.simpleProject (\v flags -> flags { IT.simpleProject = v }) trueArg , option [] ["main-is"] "Specify the main module." IT.mainIs (\v flags -> flags { IT.mainIs = v }) (reqArgFlag "FILE") , option [] ["language"] "Specify the default language." IT.language (\v flags -> flags { IT.language = v }) (reqArg "LANGUAGE" (parsecToReadE ("Cannot parse language: "++) (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) , option ['o'] ["expose-module"] "Export a module from the package." IT.exposedModules (\v flags -> flags { IT.exposedModules = v }) (reqArg "MODULE" (parsecToReadE ("Cannot parse module name: "++) (Flag . (:[]) <$> parsec)) (flagElim [] (fmap prettyShow))) , option [] ["extension"] "Use a LANGUAGE extension (in the other-extensions field)." IT.otherExts (\v flags -> flags { IT.otherExts = v }) (reqArg "EXTENSION" (parsecToReadE ("Cannot parse extension: "++) (Flag . (:[]) <$> parsec)) (flagElim [] (fmap prettyShow))) , option ['d'] ["dependency"] "Package dependency." IT.dependencies (\v flags -> flags { IT.dependencies = v }) (reqArg "PACKAGE" (parsecToReadE ("Cannot parse dependency: "++) (Flag . (:[]) <$> parsec)) (flagElim [] (fmap prettyShow))) , option [] ["application-dir"] "Directory containing package application executable." IT.applicationDirs (\v flags -> flags { IT.applicationDirs = v}) (reqArg' "DIR" (Flag . (:[])) (fromFlagOrDefault [])) , option [] ["source-dir", "sourcedir"] "Directory containing package library source." IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) (reqArg' "DIR" (Flag. (:[])) (fromFlagOrDefault [])) , option [] ["build-tool"] "Required external build tool." IT.buildTools (\v flags -> flags { IT.buildTools = v }) (reqArg' "TOOL" (Flag . (:[])) (fromFlagOrDefault [])) , option "w" ["with-compiler"] "give the path to a particular compiler. For 'init', this flag is used \ \to set the bounds inferred for the 'base' package." IT.initHcPath (\v flags -> flags { IT.initHcPath = v }) (reqArgFlag "PATH") , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) ] -- ------------------------------------------------------------ -- * Copy and Register -- ------------------------------------------------------------ copyCommand :: CommandUI CopyFlags copyCommand = Cabal.copyCommand { commandNotes = Just $ \pname -> "Examples:\n" ++ " " ++ pname ++ " v1-copy " ++ " All the components in the package\n" ++ " " ++ pname ++ " v1-copy foo " ++ " A component (i.e. lib, exe, test suite)" , commandUsage = usageAlternatives "v1-copy" $ [ "[FLAGS]" , "COMPONENTS [FLAGS]" ] } registerCommand :: CommandUI RegisterFlags registerCommand = Cabal.registerCommand { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-register [FLAGS]\n" } -- ------------------------------------------------------------ -- * ActAsSetup flags -- ------------------------------------------------------------ data ActAsSetupFlags = ActAsSetupFlags { actAsSetupBuildType :: Flag BuildType } deriving Generic defaultActAsSetupFlags :: ActAsSetupFlags defaultActAsSetupFlags = ActAsSetupFlags { actAsSetupBuildType = toFlag Simple } actAsSetupCommand :: CommandUI ActAsSetupFlags actAsSetupCommand = CommandUI { commandName = "act-as-setup", commandSynopsis = "Run as-if this was a Setup.hs", commandDescription = Nothing, commandNotes = Nothing, commandUsage = \pname -> "Usage: " ++ pname ++ " act-as-setup\n", commandDefaultFlags = defaultActAsSetupFlags, commandOptions = \_ -> [option "" ["build-type"] "Use the given build type." actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v }) (reqArg "BUILD-TYPE" (parsecToReadE ("Cannot parse build type: "++) (fmap toFlag parsec)) (map prettyShow . flagToList)) ] } instance Monoid ActAsSetupFlags where mempty = gmempty mappend = (<>) instance Semigroup ActAsSetupFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * UserConfig flags -- ------------------------------------------------------------ data UserConfigFlags = UserConfigFlags { userConfigVerbosity :: Flag Verbosity, userConfigForce :: Flag Bool, userConfigAppendLines :: Flag [String] } deriving Generic instance Monoid UserConfigFlags where mempty = UserConfigFlags { userConfigVerbosity = toFlag normal, userConfigForce = toFlag False, userConfigAppendLines = toFlag [] } mappend = (<>) instance Semigroup UserConfigFlags where (<>) = gmappend userConfigCommand :: CommandUI UserConfigFlags userConfigCommand = CommandUI { commandName = "user-config", commandSynopsis = "Display and update the user's global cabal configuration.", commandDescription = Just $ \_ -> wrapText $ "When upgrading cabal, the set of configuration keys and their default" ++ " values may change. This command provides means to merge the existing" ++ " config in ~/.cabal/config" ++ " (i.e. all bindings that are actually defined and not commented out)" ++ " and the default config of the new version.\n" ++ "\n" ++ "init: Creates a new config file at either ~/.cabal/config or as" ++ " specified by --config-file, if given. An existing file won't be " ++ " overwritten unless -f or --force is given.\n" ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" ++ " the default configuration that would be created by cabal if the" ++ " config file did not exist.\n" ++ "update: Applies the pseudo-diff to the configuration that would be" ++ " created by default, and write the result back to ~/.cabal/config.", commandNotes = Nothing, commandUsage = usageAlternatives "user-config" ["init", "diff", "update"], commandDefaultFlags = mempty, commandOptions = \ _ -> [ optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) , option ['f'] ["force"] "Overwrite the config file if it already exists." userConfigForce (\v flags -> flags { userConfigForce = v }) trueArg , option ['a'] ["augment"] "Additional setting to augment the config file (replacing a previous setting if it existed)." userConfigAppendLines (\v flags -> flags {userConfigAppendLines = Flag $ concat (flagToList (userConfigAppendLines flags) ++ flagToList v)}) (reqArg' "CONFIGLINE" (Flag . (:[])) (fromMaybe [] . flagToMaybe)) ] } -- ------------------------------------------------------------ -- * GetOpt Utils -- ------------------------------------------------------------ reqArgFlag :: ArgPlaceHolder -> MkOptDescr (b -> Flag String) (Flag String -> b -> b) b reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList liftOptions :: (b -> a) -> (a -> b -> b) -> [OptionField a] -> [OptionField b] liftOptions get set = map (liftOption get set) yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b yesNoOpt ShowArgs sf lf = trueArg sf lf yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf optionSolver :: (flags -> Flag PreSolver) -> (Flag PreSolver -> flags -> flags) -> OptionField flags optionSolver get set = option [] ["solver"] ("Select dependency solver to use (default: " ++ prettyShow defaultSolver ++ "). Choices: " ++ allSolvers ++ ".") get set (reqArg "SOLVER" (parsecToReadE (const $ "solver must be one of: " ++ allSolvers) (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) optionSolverFlags :: ShowOrParseArgs -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) -> (flags -> Flag ReorderGoals) -> (Flag ReorderGoals -> flags -> flags) -> (flags -> Flag CountConflicts) -> (Flag CountConflicts -> flags -> flags) -> (flags -> Flag FineGrainedConflicts) -> (Flag FineGrainedConflicts -> flags -> flags) -> (flags -> Flag MinimizeConflictSet) -> (Flag MinimizeConflictSet -> flags -> flags) -> (flags -> Flag IndependentGoals) -> (Flag IndependentGoals -> flags -> flags) -> (flags -> Flag ShadowPkgs) -> (Flag ShadowPkgs -> flags -> flags) -> (flags -> Flag StrongFlags) -> (Flag StrongFlags -> flags -> flags) -> (flags -> Flag AllowBootLibInstalls) -> (Flag AllowBootLibInstalls -> flags -> flags) -> (flags -> Flag OnlyConstrained) -> (Flag OnlyConstrained -> flags -> flags) -> [OptionField flags] optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg getcc setcc getfgc setfgc getmc setmc getig setig getsip setsip getstrfl setstrfl getib setib getoc setoc = [ option [] ["max-backjumps"] ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") getmbj setmbj (reqArg "NUM" (parsecToReadE ("Cannot parse number: "++) (fmap toFlag P.signedIntegral)) (map show . flagToList)) , option [] ["reorder-goals"] "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." (fmap asBool . getrg) (setrg . fmap ReorderGoals) (yesNoOpt showOrParseArgs) , option [] ["count-conflicts"] "Try to speed up solving by preferring goals that are involved in a lot of conflicts (default)." (fmap asBool . getcc) (setcc . fmap CountConflicts) (yesNoOpt showOrParseArgs) , option [] ["fine-grained-conflicts"] "Skip a version of a package if it does not resolve the conflicts encountered in the last version, as a solver optimization (default)." (fmap asBool . getfgc) (setfgc . fmap FineGrainedConflicts) (yesNoOpt showOrParseArgs) , option [] ["minimize-conflict-set"] ("When there is no solution, try to improve the error message by finding " ++ "a minimal conflict set (default: false). May increase run time " ++ "significantly.") (fmap asBool . getmc) (setmc . fmap MinimizeConflictSet) (yesNoOpt showOrParseArgs) , option [] ["independent-goals"] "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." (fmap asBool . getig) (setig . fmap IndependentGoals) (yesNoOpt showOrParseArgs) , option [] ["shadow-installed-packages"] "If multiple package instances of the same version are installed, treat all but one as shadowed." (fmap asBool . getsip) (setsip . fmap ShadowPkgs) (yesNoOpt showOrParseArgs) , option [] ["strong-flags"] "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." (fmap asBool . getstrfl) (setstrfl . fmap StrongFlags) (yesNoOpt showOrParseArgs) , option [] ["allow-boot-library-installs"] "Allow cabal to install base, ghc-prim, integer-simple, integer-gmp, and template-haskell." (fmap asBool . getib) (setib . fmap AllowBootLibInstalls) (yesNoOpt showOrParseArgs) , option [] ["reject-unconstrained-dependencies"] "Require these packages to have constraints on them if they are to be selected (default: none)." getoc setoc (reqArg "none|all" (parsecToReadE (const "reject-unconstrained-dependencies must be 'none' or 'all'") (toFlag `fmap` parsec)) (flagToList . fmap prettyShow)) ] usagePackages :: String -> String -> String usagePackages name pname = "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" usageFlags :: String -> String -> String usageFlags name pname = "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" -- ------------------------------------------------------------ -- * Repo helpers -- ------------------------------------------------------------ showRemoteRepo :: RemoteRepo -> String showRemoteRepo = prettyShow readRemoteRepo :: String -> Maybe RemoteRepo readRemoteRepo = simpleParsec showLocalRepo :: LocalRepo -> String showLocalRepo = prettyShow readLocalRepo :: String -> Maybe LocalRepo readLocalRepo = simpleParsec -- ------------------------------------------------------------ -- * Helpers for Documentation -- ------------------------------------------------------------ relevantConfigValuesText :: [String] -> String relevantConfigValuesText vs = "Relevant global configuration keys:\n" ++ concat [" " ++ v ++ "\n" |v <- vs] cabal-install-3.8.1.0/src/Distribution/Client/SetupWrapper.hs0000644000000000000000000012230207346545000022241 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.SetupWrapper -- Copyright : (c) The University of Glasgow 2006, -- Duncan Coutts 2008 -- -- Maintainer : cabal-devel@haskell.org -- Stability : alpha -- Portability : portable -- -- An interface to building and installing Cabal packages. -- If the @Built-Type@ field is specified as something other than -- 'Custom', and the current version of Cabal is acceptable, this performs -- setup actions directly. Otherwise it builds the setup script and -- runs it with the given arguments. module Distribution.Client.SetupWrapper ( getSetup, runSetup, runSetupCommand, setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.CabalSpecVersion (cabalSpecMinimumLibraryVersion) import qualified Distribution.Make as Make import qualified Distribution.Simple as Simple import Distribution.Version ( Version, mkVersion, versionNumbers, VersionRange, anyVersion , intersectVersionRanges, orLaterVersion , withinRange ) import qualified Distribution.Backpack as Backpack import Distribution.Package ( newSimpleUnitId, unsafeMkDefUnitId, ComponentId , PackageId, mkPackageName , PackageIdentifier(..), packageVersion, packageName ) import Distribution.PackageDescription ( GenericPackageDescription(packageDescription) , PackageDescription(..), specVersion, buildType , BuildType(..) ) import Distribution.Types.ModuleRenaming (defaultRenaming) import Distribution.Simple.Configure ( configCompilerEx ) import Distribution.Compiler ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) import Distribution.Simple.Compiler ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import Distribution.Simple.PreProcess ( runSimplePreProcessor, ppUnlit ) import Distribution.Simple.Build.Macros ( generatePackageVersionMacros ) import Distribution.Simple.Program ( ProgramDb, emptyProgramDb , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram , ghcjsProgram ) import Distribution.Simple.Program.Find ( programSearchPathAsPATHVar , ProgramSearchPathEntry(ProgramSearchPathDir) ) import Distribution.Simple.Program.Run ( getEffectiveEnvironment ) import qualified Distribution.Simple.Program.Strip as Strip import Distribution.Simple.BuildPaths ( defaultDistPref, exeExtension ) import Distribution.Simple.Command ( CommandUI(..), commandShowOptions ) import Distribution.Simple.Program.GHC ( GhcMode(..), GhcOptions(..), renderGhcOptions ) import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.InstalledPackageInfo as IPI import Distribution.Client.Types import Distribution.Client.Config ( getCabalDir ) import Distribution.Client.IndexUtils ( getInstalledPackages ) import Distribution.Client.JobControl ( Lock, criticalSection ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Utils.Generic ( safeHead ) import Distribution.Simple.Utils ( die', debug, info, infoNoWrap , cabalVersion, tryFindPackageDesc , createDirectoryIfMissingVerbose, installExecutableFile , copyFileVerbose, rewriteFileEx, rewriteFileLBS ) import Distribution.Client.Utils ( inDir, tryCanonicalizePath, withExtraPathEnv , existsAndIsMoreRecentThan, moreRecentFile, withEnv, withEnvOverrides #ifdef mingw32_HOST_OS , canonicalizePathNoThrow #endif ) import Distribution.ReadE import Distribution.System ( Platform(..), buildPlatform ) import Distribution.Utils.NubList ( toNubListR ) import Distribution.Verbosity import Distribution.Compat.Stack import System.Directory ( doesFileExist ) import System.FilePath ( (), (<.>) ) import System.IO ( Handle, hPutStr ) import Distribution.Compat.Process (createProcess) import System.Process ( StdStream(..), proc, waitForProcess , ProcessHandle ) import qualified System.Process as Process import Data.List ( foldl1' ) import Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) import qualified Data.ByteString.Lazy as BS #ifdef mingw32_HOST_OS import Distribution.Simple.Utils ( withTempDirectory ) import Control.Exception ( bracket ) import System.FilePath ( equalFilePath, takeDirectory ) import System.Directory ( doesDirectoryExist ) import qualified System.Win32 as Win32 #endif -- | @Setup@ encapsulates the outcome of configuring a setup method to build a -- particular package. data Setup = Setup { setupMethod :: SetupMethod , setupScriptOptions :: SetupScriptOptions , setupVersion :: Version , setupBuildType :: BuildType , setupPackage :: PackageDescription } -- | @SetupMethod@ represents one of the methods used to run Cabal commands. data SetupMethod = InternalMethod -- ^ run Cabal commands through \"cabal\" in the -- current process | SelfExecMethod -- ^ run Cabal commands through \"cabal\" as a -- child process | ExternalMethod FilePath -- ^ run Cabal commands through a custom \"Setup\" executable -- TODO: The 'setupWrapper' and 'SetupScriptOptions' should be split into two -- parts: one that has no policy and just does as it's told with all the -- explicit options, and an optional initial part that applies certain -- policies (like if we should add the Cabal lib as a dep, and if so which -- version). This could be structured as an action that returns a fully -- elaborated 'SetupScriptOptions' containing no remaining policy choices. -- -- See also the discussion at https://github.com/haskell/cabal/pull/3094 -- | @SetupScriptOptions@ are options used to configure and run 'Setup', as -- opposed to options given to the Cabal command at runtime. data SetupScriptOptions = SetupScriptOptions { -- | The version of the Cabal library to use (if 'useDependenciesExclusive' -- is not set). A suitable version of the Cabal library must be installed -- (or for some build-types be the one cabal-install was built with). -- -- The version found also determines the version of the Cabal specification -- that we us for talking to the Setup.hs, unless overridden by -- 'useCabalSpecVersion'. -- useCabalVersion :: VersionRange, -- | This is the version of the Cabal specification that we believe that -- this package uses. This affects the semantics and in particular the -- Setup command line interface. -- -- This is similar to 'useCabalVersion' but instead of probing the system -- for a version of the /Cabal library/ you just say exactly which version -- of the /spec/ we will use. Using this also avoid adding the Cabal -- library as an additional dependency, so add it to 'useDependencies' -- if needed. -- useCabalSpecVersion :: Maybe Version, useCompiler :: Maybe Compiler, usePlatform :: Maybe Platform, usePackageDB :: PackageDBStack, usePackageIndex :: Maybe InstalledPackageIndex, useProgramDb :: ProgramDb, useDistPref :: FilePath, useLoggingHandle :: Maybe Handle, useWorkingDir :: Maybe FilePath, -- | Extra things to add to PATH when invoking the setup script. useExtraPathEnv :: [FilePath], -- | Extra environment variables paired with overrides, where -- -- * @'Just' v@ means \"set the environment variable's value to @v@\". -- * 'Nothing' means \"unset the environment variable\". useExtraEnvOverrides :: [(String, Maybe FilePath)], forceExternalSetupMethod :: Bool, -- | List of dependencies to use when building Setup.hs. useDependencies :: [(ComponentId, PackageId)], -- | Is the list of setup dependencies exclusive? -- -- When this is @False@, if we compile the Setup.hs script we do so with the -- list in 'useDependencies' but all other packages in the environment are -- also visible. A suitable version of @Cabal@ library (see -- 'useCabalVersion') is also added to the list of dependencies, unless -- 'useDependencies' already contains a Cabal dependency. -- -- When @True@, only the 'useDependencies' packages are used, with other -- packages in the environment hidden. -- -- This feature is here to support the setup stanza in .cabal files that -- specifies explicit (and exclusive) dependencies, as well as the old -- style with no dependencies. useDependenciesExclusive :: Bool, -- | Should we build the Setup.hs with CPP version macros available? -- We turn this on when we have a setup stanza in .cabal that declares -- explicit setup dependencies. -- useVersionMacros :: Bool, -- Used only by 'cabal clean' on Windows. -- -- Note: win32 clean hack ------------------------- -- On Windows, running './dist/setup/setup clean' doesn't work because the -- setup script will try to delete itself (which causes it to fail horribly, -- unlike on Linux). So we have to move the setup exe out of the way first -- and then delete it manually. This applies only to the external setup -- method. useWin32CleanHack :: Bool, -- Used only when calling setupWrapper from parallel code to serialise -- access to the setup cache; should be Nothing otherwise. -- -- Note: setup exe cache ------------------------ -- When we are installing in parallel, we always use the external setup -- method. Since compiling the setup script each time adds noticeable -- overhead, we use a shared setup script cache -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal -- version) combination the cache holds a compiled setup script -- executable. This only affects the Simple build type; for the Custom, -- Configure and Make build types we always compile the setup script anew. setupCacheLock :: Maybe Lock, -- | Is the task we are going to run an interactive foreground task, -- or an non-interactive background task? Based on this flag we -- decide whether or not to delegate ctrl+c to the spawned task isInteractive :: Bool } defaultSetupScriptOptions :: SetupScriptOptions defaultSetupScriptOptions = SetupScriptOptions { useCabalVersion = anyVersion, useCabalSpecVersion = Nothing, useCompiler = Nothing, usePlatform = Nothing, usePackageDB = [GlobalPackageDB, UserPackageDB], usePackageIndex = Nothing, useDependencies = [], useDependenciesExclusive = False, useVersionMacros = False, useProgramDb = emptyProgramDb, useDistPref = defaultDistPref, useLoggingHandle = Nothing, useWorkingDir = Nothing, useExtraPathEnv = [], useExtraEnvOverrides = [], useWin32CleanHack = False, forceExternalSetupMethod = False, setupCacheLock = Nothing, isInteractive = False } workingDir :: SetupScriptOptions -> FilePath workingDir options = case fromMaybe "" (useWorkingDir options) of [] -> "." dir -> dir -- | A @SetupRunner@ implements a 'SetupMethod'. type SetupRunner = Verbosity -> SetupScriptOptions -> BuildType -> [String] -> IO () -- | Prepare to build a package by configuring a 'SetupMethod'. The returned -- 'Setup' object identifies the method. The 'SetupScriptOptions' may be changed -- during the configuration process; the final values are given by -- 'setupScriptOptions'. getSetup :: Verbosity -> SetupScriptOptions -> Maybe PackageDescription -> IO Setup getSetup verbosity options mpkg = do pkg <- maybe getPkg return mpkg let options' = options { useCabalVersion = intersectVersionRanges (useCabalVersion options) (orLaterVersion (mkVersion (cabalSpecMinimumLibraryVersion (specVersion pkg)))) } buildType' = buildType pkg (version, method, options'') <- getSetupMethod verbosity options' pkg buildType' return Setup { setupMethod = method , setupScriptOptions = options'' , setupVersion = version , setupBuildType = buildType' , setupPackage = pkg } where getPkg = tryFindPackageDesc verbosity (fromMaybe "." (useWorkingDir options)) >>= readGenericPackageDescription verbosity >>= return . packageDescription -- | Decide if we're going to be able to do a direct internal call to the -- entry point in the Cabal library or if we're going to have to compile -- and execute an external Setup.hs script. -- getSetupMethod :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType -> IO (Version, SetupMethod, SetupScriptOptions) getSetupMethod verbosity options pkg buildType' | buildType' == Custom || maybe False (cabalVersion /=) (useCabalSpecVersion options) || not (cabalVersion `withinRange` useCabalVersion options) = getExternalSetupMethod verbosity options pkg buildType' | isJust (useLoggingHandle options) -- Forcing is done to use an external process e.g. due to parallel -- build concerns. || forceExternalSetupMethod options = return (cabalVersion, SelfExecMethod, options) | otherwise = return (cabalVersion, InternalMethod, options) runSetupMethod :: WithCallStack (SetupMethod -> SetupRunner) runSetupMethod InternalMethod = internalSetupMethod runSetupMethod (ExternalMethod path) = externalSetupMethod path runSetupMethod SelfExecMethod = selfExecSetupMethod -- | Run a configured 'Setup' with specific arguments. runSetup :: Verbosity -> Setup -> [String] -- ^ command-line arguments -> IO () runSetup verbosity setup args0 = do let method = setupMethod setup options = setupScriptOptions setup bt = setupBuildType setup args = verbosityHack (setupVersion setup) args0 when (verbosity >= deafening {- avoid test if not debug -} && args /= args0) $ infoNoWrap verbose $ "Applied verbosity hack:\n" ++ " Before: " ++ show args0 ++ "\n" ++ " After: " ++ show args ++ "\n" runSetupMethod method verbosity options bt args -- | This is a horrible hack to make sure passing fancy verbosity -- flags (e.g., @-v'info +callstack'@) doesn't break horribly on -- old Setup. We can't do it in 'filterConfigureFlags' because -- verbosity applies to ALL commands. verbosityHack :: Version -> [String] -> [String] verbosityHack ver args0 | ver >= mkVersion [2,1] = args0 | otherwise = go args0 where go (('-':'v':rest) : args) | Just rest' <- munch rest = ("-v" ++ rest') : go args go (('-':'-':'v':'e':'r':'b':'o':'s':'e':'=':rest) : args) | Just rest' <- munch rest = ("--verbose=" ++ rest') : go args go ("--verbose" : rest : args) | Just rest' <- munch rest = "--verbose" : rest' : go args go rest@("--" : _) = rest go (arg:args) = arg : go args go [] = [] munch rest = case runReadE flagToVerbosity rest of Right v | ver < mkVersion [2,0], verboseHasFlags v -- We could preserve the prefix, but since we're assuming -- it's Cabal's verbosity flag, we can assume that -- any format is OK -> Just (showForCabal (verboseNoFlags v)) | ver < mkVersion [2,1], isVerboseTimestamp v -- +timestamp wasn't yet available in Cabal-2.0.0 -> Just (showForCabal (verboseNoTimestamp v)) _ -> Nothing -- | Run a command through a configured 'Setup'. runSetupCommand :: Verbosity -> Setup -> CommandUI flags -- ^ command definition -> flags -- ^ command flags -> [String] -- ^ extra command-line arguments -> IO () runSetupCommand verbosity setup cmd flags extraArgs = do let args = commandName cmd : commandShowOptions cmd flags ++ extraArgs runSetup verbosity setup args -- | Configure a 'Setup' and run a command in one step. The command flags -- may depend on the Cabal library version in use. setupWrapper :: Verbosity -> SetupScriptOptions -> Maybe PackageDescription -> CommandUI flags -> (Version -> flags) -- ^ produce command flags given the Cabal library version -> (Version -> [String]) -> IO () setupWrapper verbosity options mpkg cmd flags extraArgs = do setup <- getSetup verbosity options mpkg runSetupCommand verbosity setup cmd (flags $ setupVersion setup) (extraArgs $ setupVersion setup) -- ------------------------------------------------------------ -- * Internal SetupMethod -- ------------------------------------------------------------ internalSetupMethod :: SetupRunner internalSetupMethod verbosity options bt args = do info verbosity $ "Using internal setup method with build-type " ++ show bt ++ " and args:\n " ++ show args inDir (useWorkingDir options) $ do withEnv "HASKELL_DIST_DIR" (useDistPref options) $ withExtraPathEnv (useExtraPathEnv options) $ withEnvOverrides (useExtraEnvOverrides options) $ buildTypeAction bt args buildTypeAction :: BuildType -> ([String] -> IO ()) buildTypeAction Simple = Simple.defaultMainArgs buildTypeAction Configure = Simple.defaultMainWithHooksArgs Simple.autoconfUserHooks buildTypeAction Make = Make.defaultMainArgs buildTypeAction Custom = error "buildTypeAction Custom" -- | @runProcess'@ is a version of @runProcess@ where we have -- the additional option to decide whether or not we should -- delegate CTRL+C to the spawned process. runProcess' :: FilePath -- ^ Filename of the executable -> [String] -- ^ Arguments to pass to executable -> Maybe FilePath -- ^ Optional path to working directory -> Maybe [(String, String)] -- ^ Optional environment -> Maybe Handle -- ^ Handle for @stdin@ -> Maybe Handle -- ^ Handle for @stdout@ -> Maybe Handle -- ^ Handle for @stderr@ -> Bool -- ^ Delegate Ctrl+C ? -> IO ProcessHandle runProcess' cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr _delegate = do (_,_,_,ph) <- createProcess (proc cmd args){ Process.cwd = mb_cwd , Process.env = mb_env , Process.std_in = mbToStd mb_stdin , Process.std_out = mbToStd mb_stdout , Process.std_err = mbToStd mb_stderr , Process.delegate_ctlc = _delegate } return ph where mbToStd :: Maybe Handle -> StdStream mbToStd Nothing = Inherit mbToStd (Just hdl) = UseHandle hdl -- ------------------------------------------------------------ -- * Self-Exec SetupMethod -- ------------------------------------------------------------ selfExecSetupMethod :: SetupRunner selfExecSetupMethod verbosity options bt args0 = do let args = ["act-as-setup", "--build-type=" ++ prettyShow bt, "--"] ++ args0 info verbosity $ "Using self-exec internal setup method with build-type " ++ show bt ++ " and args:\n " ++ show args path <- getExecutablePath info verbosity $ unwords (path : args) case useLoggingHandle options of Nothing -> return () Just logHandle -> info verbosity $ "Redirecting build log to " ++ show logHandle searchpath <- programSearchPathAsPATHVar (map ProgramSearchPathDir (useExtraPathEnv options) ++ getProgramSearchPath (useProgramDb options)) env <- getEffectiveEnvironment $ [ ("PATH", Just searchpath) , ("HASKELL_DIST_DIR", Just (useDistPref options)) ] ++ useExtraEnvOverrides options process <- runProcess' path args (useWorkingDir options) env Nothing (useLoggingHandle options) (useLoggingHandle options) (isInteractive options) exitCode <- waitForProcess process unless (exitCode == ExitSuccess) $ exitWith exitCode -- ------------------------------------------------------------ -- * External SetupMethod -- ------------------------------------------------------------ externalSetupMethod :: WithCallStack (FilePath -> SetupRunner) externalSetupMethod path verbosity options _ args = do info verbosity $ unwords (path : args) case useLoggingHandle options of Nothing -> return () Just logHandle -> info verbosity $ "Redirecting build log to " ++ show logHandle -- See 'Note: win32 clean hack' above. #ifdef mingw32_HOST_OS if useWin32CleanHack options then doWin32CleanHack path else doInvoke path #else doInvoke path #endif where doInvoke path' = do searchpath <- programSearchPathAsPATHVar (map ProgramSearchPathDir (useExtraPathEnv options) ++ getProgramSearchPath (useProgramDb options)) env <- getEffectiveEnvironment $ [ ("PATH", Just searchpath) , ("HASKELL_DIST_DIR", Just (useDistPref options)) ] ++ useExtraEnvOverrides options debug verbosity $ "Setup arguments: "++unwords args process <- runProcess' path' args (useWorkingDir options) env Nothing (useLoggingHandle options) (useLoggingHandle options) (isInteractive options) exitCode <- waitForProcess process unless (exitCode == ExitSuccess) $ exitWith exitCode #ifdef mingw32_HOST_OS doWin32CleanHack path' = do info verbosity $ "Using the Win32 clean hack." -- Recursively removes the temp dir on exit. withTempDirectory verbosity (workingDir options) "cabal-tmp" $ \tmpDir -> bracket (moveOutOfTheWay tmpDir path') (maybeRestore path') doInvoke moveOutOfTheWay tmpDir path' = do let newPath = tmpDir "setup" <.> exeExtension buildPlatform Win32.moveFile path' newPath return newPath maybeRestore oldPath path' = do let oldPathDir = takeDirectory oldPath oldPathDirExists <- doesDirectoryExist oldPathDir -- 'setup clean' didn't complete, 'dist/setup' still exists. when oldPathDirExists $ Win32.moveFile path' oldPath #endif getExternalSetupMethod :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType -> IO (Version, SetupMethod, SetupScriptOptions) getExternalSetupMethod verbosity options pkg bt = do debug verbosity $ "Using external setup method with build-type " ++ show bt debug verbosity $ "Using explicit dependencies: " ++ show (useDependenciesExclusive options) createDirectoryIfMissingVerbose verbosity True setupDir (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse debug verbosity $ "Using Cabal library version " ++ prettyShow cabalLibVersion path <- if useCachedSetupExecutable then getCachedSetupExecutable options' cabalLibVersion mCabalLibInstalledPkgId else compileSetupExecutable options' cabalLibVersion mCabalLibInstalledPkgId False -- Since useWorkingDir can change the relative path, the path argument must -- be turned into an absolute path. On some systems, runProcess' will take -- path as relative to the new working directory instead of the current -- working directory. path' <- tryCanonicalizePath path -- See 'Note: win32 clean hack' above. #ifdef mingw32_HOST_OS -- setupProgFile may not exist if we're using a cached program setupProgFile' <- canonicalizePathNoThrow setupProgFile let win32CleanHackNeeded = (useWin32CleanHack options) -- Skip when a cached setup script is used. && setupProgFile' `equalFilePath` path' #else let win32CleanHackNeeded = False #endif let options'' = options' { useWin32CleanHack = win32CleanHackNeeded } return (cabalLibVersion, ExternalMethod path', options'') where setupDir = workingDir options useDistPref options "setup" setupVersionFile = setupDir "setup" <.> "version" setupHs = setupDir "setup" <.> "hs" setupProgFile = setupDir "setup" <.> exeExtension buildPlatform platform = fromMaybe buildPlatform (usePlatform options) useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) maybeGetInstalledPackages :: SetupScriptOptions -> Compiler -> ProgramDb -> IO InstalledPackageIndex maybeGetInstalledPackages options' comp progdb = case usePackageIndex options' of Just index -> return index Nothing -> getInstalledPackages verbosity comp (usePackageDB options') progdb -- Choose the version of Cabal to use if the setup script has a dependency on -- Cabal, and possibly update the setup script options. The version also -- determines how to filter the flags to Setup. -- -- We first check whether the dependency solver has specified a Cabal version. -- If it has, we use the solver's version without looking at the installed -- package index (See issue #3436). Otherwise, we pick the Cabal version by -- checking 'useCabalSpecVersion', then the saved version, and finally the -- versions available in the index. -- -- The version chosen here must match the one used in 'compileSetupExecutable' -- (See issue #3433). cabalLibVersionToUse :: IO (Version, Maybe ComponentId ,SetupScriptOptions) cabalLibVersionToUse = case find (isCabalPkgId . snd) (useDependencies options) of Just (unitId, pkgId) -> do let version = pkgVersion pkgId updateSetupScript version bt writeSetupVersionFile version return (version, Just unitId, options) Nothing -> case useCabalSpecVersion options of Just version -> do updateSetupScript version bt writeSetupVersionFile version return (version, Nothing, options) Nothing -> do savedVer <- savedVersion case savedVer of Just version | version `withinRange` useCabalVersion options -> do updateSetupScript version bt -- Does the previously compiled setup executable -- still exist and is it up-to date? useExisting <- canUseExistingSetup version if useExisting then return (version, Nothing, options) else installedVersion _ -> installedVersion where -- This check duplicates the checks in 'getCachedSetupExecutable' / -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice -- because the selected Cabal version may change as a result of this -- check. canUseExistingSetup :: Version -> IO Bool canUseExistingSetup version = if useCachedSetupExecutable then do (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version doesFileExist cachedSetupProgFile else (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile writeSetupVersionFile :: Version -> IO () writeSetupVersionFile version = writeFile setupVersionFile (show version ++ "\n") installedVersion :: IO (Version, Maybe InstalledPackageId ,SetupScriptOptions) installedVersion = do (comp, progdb, options') <- configureCompiler options (version, mipkgid, options'') <- installedCabalVersion options' comp progdb updateSetupScript version bt writeSetupVersionFile version return (version, mipkgid, options'') savedVersion :: IO (Maybe Version) savedVersion = do versionString <- readFile setupVersionFile `catchIO` \_ -> return "" case reads versionString of [(version,s)] | all isSpace s -> return (Just version) _ -> return Nothing -- | Update a Setup.hs script, creating it if necessary. updateSetupScript :: Version -> BuildType -> IO () updateSetupScript _ Custom = do useHs <- doesFileExist customSetupHs useLhs <- doesFileExist customSetupLhs unless (useHs || useLhs) $ die' verbosity "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." let src = (if useHs then customSetupHs else customSetupLhs) srcNewer <- src `moreRecentFile` setupHs when srcNewer $ if useHs then copyFileVerbose verbosity src setupHs else runSimplePreProcessor ppUnlit src setupHs verbosity where customSetupHs = workingDir options "Setup.hs" customSetupLhs = workingDir options "Setup.lhs" updateSetupScript cabalLibVersion _ = rewriteFileLBS verbosity setupHs (buildTypeScript cabalLibVersion) buildTypeScript :: Version -> BS.ByteString buildTypeScript cabalLibVersion = case bt of Simple -> "import Distribution.Simple; main = defaultMain\n" Configure | cabalLibVersion >= mkVersion [1,3,10] -> "import Distribution.Simple; main = defaultMainWithHooks autoconfUserHooks\n" | otherwise -> "import Distribution.Simple; main = defaultMainWithHooks defaultUserHooks\n" Make -> "import Distribution.Make; main = defaultMain\n" Custom -> error "buildTypeScript Custom" installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramDb -> IO (Version, Maybe InstalledPackageId ,SetupScriptOptions) installedCabalVersion options' _ _ | packageName pkg == mkPackageName "Cabal" && bt == Custom = return (packageVersion pkg, Nothing, options') installedCabalVersion options' compiler progdb = do index <- maybeGetInstalledPackages options' compiler progdb let cabalDepName = mkPackageName "Cabal" cabalDepVersion = useCabalVersion options' options'' = options' { usePackageIndex = Just index } case PackageIndex.lookupDependency index cabalDepName cabalDepVersion of [] -> die' verbosity $ "The package '" ++ prettyShow (packageName pkg) ++ "' requires Cabal library version " ++ prettyShow (useCabalVersion options) ++ " but no suitable version is installed." pkgs -> let ipkginfo = fromMaybe err $ safeHead . snd . bestVersion fst $ pkgs err = error "Distribution.Client.installedCabalVersion: empty version list" in return (packageVersion ipkginfo ,Just . IPI.installedComponentId $ ipkginfo, options'') bestVersion :: (a -> Version) -> [a] -> a bestVersion f = firstMaximumBy (comparing (preference . f)) where -- Like maximumBy, but picks the first maximum element instead of the -- last. In general, we expect the preferred version to go first in the -- list. For the default case, this has the effect of choosing the version -- installed in the user package DB instead of the global one. See #1463. -- -- Note: firstMaximumBy could be written as just -- `maximumBy cmp . reverse`, but the problem is that the behaviour of -- maximumBy is not fully specified in the case when there is not a single -- greatest element. firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a firstMaximumBy _ [] = error "Distribution.Client.firstMaximumBy: empty list" firstMaximumBy cmp xs = foldl1' maxBy xs where maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } preference version = (sameVersion, sameMajorVersion ,stableVersion, latestVersion) where sameVersion = version == cabalVersion sameMajorVersion = majorVersion version == majorVersion cabalVersion majorVersion = take 2 . versionNumbers stableVersion = case versionNumbers version of (_:x:_) -> even x _ -> False latestVersion = version configureCompiler :: SetupScriptOptions -> IO (Compiler, ProgramDb, SetupScriptOptions) configureCompiler options' = do (comp, progdb) <- case useCompiler options' of Just comp -> return (comp, useProgramDb options') Nothing -> do (comp, _, progdb) <- configCompilerEx (Just GHC) Nothing Nothing (useProgramDb options') verbosity return (comp, progdb) -- Whenever we need to call configureCompiler, we also need to access the -- package index, so let's cache it in SetupScriptOptions. index <- maybeGetInstalledPackages options' comp progdb return (comp, progdb, options' { useCompiler = Just comp, usePackageIndex = Just index, useProgramDb = progdb }) -- | Path to the setup exe cache directory and path to the cached setup -- executable. cachedSetupDirAndProg :: SetupScriptOptions -> Version -> IO (FilePath, FilePath) cachedSetupDirAndProg options' cabalLibVersion = do cabalDir <- getCabalDir let setupCacheDir = cabalDir "setup-exe-cache" cachedSetupProgFile = setupCacheDir ("setup-" ++ buildTypeString ++ "-" ++ cabalVersionString ++ "-" ++ platformString ++ "-" ++ compilerVersionString) <.> exeExtension buildPlatform return (setupCacheDir, cachedSetupProgFile) where buildTypeString = show bt cabalVersionString = "Cabal-" ++ prettyShow cabalLibVersion compilerVersionString = prettyShow $ maybe buildCompilerId compilerId $ useCompiler options' platformString = prettyShow platform -- | Look up the setup executable in the cache; update the cache if the setup -- executable is not found. getCachedSetupExecutable :: SetupScriptOptions -> Version -> Maybe InstalledPackageId -> IO FilePath getCachedSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId = do (setupCacheDir, cachedSetupProgFile) <- cachedSetupDirAndProg options' cabalLibVersion cachedSetupExists <- doesFileExist cachedSetupProgFile if cachedSetupExists then debug verbosity $ "Found cached setup executable: " ++ cachedSetupProgFile else criticalSection' $ do -- The cache may have been populated while we were waiting. cachedSetupExists' <- doesFileExist cachedSetupProgFile if cachedSetupExists' then debug verbosity $ "Found cached setup executable: " ++ cachedSetupProgFile else do debug verbosity $ "Setup executable not found in the cache." src <- compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId True createDirectoryIfMissingVerbose verbosity True setupCacheDir installExecutableFile verbosity src cachedSetupProgFile -- Do not strip if we're using GHCJS, since the result may be a script when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ Strip.stripExe verbosity platform (useProgramDb options') cachedSetupProgFile return cachedSetupProgFile where criticalSection' = maybe id criticalSection $ setupCacheLock options' -- | If the Setup.hs is out of date wrt the executable then recompile it. -- Currently this is GHC/GHCJS only. It should really be generalised. -- compileSetupExecutable :: SetupScriptOptions -> Version -> Maybe ComponentId -> Bool -> IO FilePath compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId forceCompile = do setupHsNewer <- setupHs `moreRecentFile` setupProgFile cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile let outOfDate = setupHsNewer || cabalVersionNewer when (outOfDate || forceCompile) $ do debug verbosity "Setup executable needs to be updated, compiling..." (compiler, progdb, options'') <- configureCompiler options' let cabalPkgid = PackageIdentifier (mkPackageName "Cabal") cabalLibVersion (program, extraOpts) = case compilerFlavor compiler of GHCJS -> (ghcjsProgram, ["-build-runner"]) _ -> (ghcProgram, ["-threaded"]) cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)]) maybeCabalLibInstalledPkgId -- With 'useDependenciesExclusive' we enforce the deps specified, -- so only the given ones can be used. Otherwise we allow the use -- of packages in the ambient environment, and add on a dep on the -- Cabal library (unless 'useDependencies' already contains one). -- -- With 'useVersionMacros' we use a version CPP macros .h file. -- -- Both of these options should be enabled for packages that have -- opted-in and declared a custom-settup stanza. -- selectedDeps | useDependenciesExclusive options' = useDependencies options' | otherwise = useDependencies options' ++ if any (isCabalPkgId . snd) (useDependencies options') then [] else cabalDep addRenaming (ipid, _) = -- Assert 'DefUnitId' invariant (Backpack.DefiniteUnitId (unsafeMkDefUnitId (newSimpleUnitId ipid)) ,defaultRenaming) cppMacrosFile = setupDir "setup_macros.h" ghcOptions = mempty { -- Respect -v0, but don't crank up verbosity on GHC if -- Cabal verbosity is requested. For that, use -- --ghc-option=-v instead! ghcOptVerbosity = Flag (min verbosity normal) , ghcOptMode = Flag GhcModeMake , ghcOptInputFiles = toNubListR [setupHs] , ghcOptOutputFile = Flag setupProgFile , ghcOptObjDir = Flag setupDir , ghcOptHiDir = Flag setupDir , ghcOptSourcePathClear = Flag True , ghcOptSourcePath = case bt of Custom -> toNubListR [workingDir options'] _ -> mempty , ghcOptPackageDBs = usePackageDB options'' , ghcOptHideAllPackages = Flag (useDependenciesExclusive options') , ghcOptCabal = Flag (useDependenciesExclusive options') , ghcOptPackages = toNubListR $ map addRenaming selectedDeps , ghcOptCppIncludes = toNubListR [ cppMacrosFile | useVersionMacros options' ] , ghcOptExtra = extraOpts } let ghcCmdLine = renderGhcOptions compiler platform ghcOptions when (useVersionMacros options') $ rewriteFileEx verbosity cppMacrosFile $ generatePackageVersionMacros (pkgVersion $ package pkg) (map snd selectedDeps) case useLoggingHandle options of Nothing -> runDbProgram verbosity program progdb ghcCmdLine -- If build logging is enabled, redirect compiler output to -- the log file. (Just logHandle) -> do output <- getDbProgramOutput verbosity program progdb ghcCmdLine hPutStr logHandle output return setupProgFile isCabalPkgId :: PackageIdentifier -> Bool isCabalPkgId (PackageIdentifier pname _) = pname == mkPackageName "Cabal" cabal-install-3.8.1.0/src/Distribution/Client/SolverInstallPlan.hs0000644000000000000000000004044407346545000023222 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TypeFamilies #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.SolverInstallPlan -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- The 'SolverInstallPlan' is the graph of packages produced by the -- dependency solver, and specifies at the package-granularity what -- things are going to be installed. To put it another way: the -- dependency solver produces a 'SolverInstallPlan', which is then -- consumed by various other parts of Cabal. -- ----------------------------------------------------------------------------- module Distribution.Client.SolverInstallPlan( SolverInstallPlan(..), SolverPlanPackage, ResolverPackage(..), -- * Operations on 'SolverInstallPlan's new, toList, toMap, remove, showPlanIndex, showInstallPlan, -- * Checking validity of plans valid, closed, consistent, acyclic, -- ** Details on invalid plans SolverPlanProblem(..), showPlanProblem, problems, -- ** Querying the install plan dependencyClosure, reverseDependencyClosure, topologicalOrder, reverseTopologicalOrder, ) where import Distribution.Client.Compat.Prelude hiding (toList) import Prelude () import Distribution.Package ( PackageIdentifier(..), Package(..), PackageName , HasUnitId(..), PackageId, packageVersion, packageName ) import Distribution.Types.Flag (nullFlagAssignment) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Client.Types ( UnresolvedPkgLoc ) import Distribution.Version ( Version ) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.ResolverPackage import Distribution.Solver.Types.SolverId import Distribution.Solver.Types.SolverPackage import Distribution.Compat.Graph (Graph, IsNode(..)) import qualified Data.Foldable as Foldable import qualified Data.Graph as OldGraph import qualified Distribution.Compat.Graph as Graph import qualified Data.Map as Map import Data.Array ((!)) type SolverPlanPackage = ResolverPackage UnresolvedPkgLoc type SolverPlanIndex = Graph SolverPlanPackage data SolverInstallPlan = SolverInstallPlan { planIndex :: !SolverPlanIndex, planIndepGoals :: !IndependentGoals } deriving (Typeable, Generic) {- -- | Much like 'planPkgIdOf', but mapping back to full packages. planPkgOf :: SolverInstallPlan -> Graph.Vertex -> SolverPlanPackage planPkgOf plan v = case Graph.lookupKey (planIndex plan) (planPkgIdOf plan v) of Just pkg -> pkg Nothing -> error "InstallPlan: internal error: planPkgOf lookup failed" -} instance Binary SolverInstallPlan instance Structured SolverInstallPlan showPlanIndex :: [SolverPlanPackage] -> String showPlanIndex = intercalate "\n" . map showPlanPackage showInstallPlan :: SolverInstallPlan -> String showInstallPlan = showPlanIndex . toList showPlanPackage :: SolverPlanPackage -> String showPlanPackage (PreExisting ipkg) = "PreExisting " ++ prettyShow (packageId ipkg) ++ " (" ++ prettyShow (installedUnitId ipkg) ++ ")" showPlanPackage (Configured spkg) = "Configured " ++ prettyShow (packageId spkg) ++ flags ++ comps where flags | nullFlagAssignment fa = "" | otherwise = " " ++ prettyShow (solverPkgFlags spkg) where fa = solverPkgFlags spkg comps | null deps = "" | otherwise = " " ++ unwords (map prettyShow $ Foldable.toList deps) where deps :: Set CD.Component deps = CD.components (solverPkgLibDeps spkg) <> CD.components (solverPkgExeDeps spkg) -- | Build an installation plan from a valid set of resolved packages. -- new :: IndependentGoals -> SolverPlanIndex -> Either [SolverPlanProblem] SolverInstallPlan new indepGoals index = case problems indepGoals index of [] -> Right (SolverInstallPlan index indepGoals) probs -> Left probs toList :: SolverInstallPlan -> [SolverPlanPackage] toList = Foldable.toList . planIndex toMap :: SolverInstallPlan -> Map SolverId SolverPlanPackage toMap = Graph.toMap . planIndex -- | Remove packages from the install plan. This will result in an -- error if there are remaining packages that depend on any matching -- package. This is primarily useful for obtaining an install plan for -- the dependencies of a package or set of packages without actually -- installing the package itself, as when doing development. -- remove :: (SolverPlanPackage -> Bool) -> SolverInstallPlan -> Either [SolverPlanProblem] (SolverInstallPlan) remove shouldRemove plan = new (planIndepGoals plan) newIndex where newIndex = Graph.fromDistinctList $ filter (not . shouldRemove) (toList plan) -- ------------------------------------------------------------ -- * Checking validity of plans -- ------------------------------------------------------------ -- | A valid installation plan is a set of packages that is 'acyclic', -- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the -- plan has to have a valid configuration (see 'configuredPackageValid'). -- -- * if the result is @False@ use 'problems' to get a detailed list. -- valid :: IndependentGoals -> SolverPlanIndex -> Bool valid indepGoals index = null $ problems indepGoals index data SolverPlanProblem = PackageMissingDeps SolverPlanPackage [PackageIdentifier] | PackageCycle [SolverPlanPackage] | PackageInconsistency PackageName [(PackageIdentifier, Version)] | PackageStateInvalid SolverPlanPackage SolverPlanPackage showPlanProblem :: SolverPlanProblem -> String showPlanProblem (PackageMissingDeps pkg missingDeps) = "Package " ++ prettyShow (packageId pkg) ++ " depends on the following packages which are missing from the plan: " ++ intercalate ", " (map prettyShow missingDeps) showPlanProblem (PackageCycle cycleGroup) = "The following packages are involved in a dependency cycle " ++ intercalate ", " (map (prettyShow.packageId) cycleGroup) showPlanProblem (PackageInconsistency name inconsistencies) = "Package " ++ prettyShow name ++ " is required by several packages," ++ " but they require inconsistent versions:\n" ++ unlines [ " package " ++ prettyShow pkg ++ " requires " ++ prettyShow (PackageIdentifier name ver) | (pkg, ver) <- inconsistencies ] showPlanProblem (PackageStateInvalid pkg pkg') = "Package " ++ prettyShow (packageId pkg) ++ " is in the " ++ showPlanState pkg ++ " state but it depends on package " ++ prettyShow (packageId pkg') ++ " which is in the " ++ showPlanState pkg' ++ " state" where showPlanState (PreExisting _) = "pre-existing" showPlanState (Configured _) = "configured" -- | For an invalid plan, produce a detailed list of problems as human readable -- error messages. This is mainly intended for debugging purposes. -- Use 'showPlanProblem' for a human readable explanation. -- problems :: IndependentGoals -> SolverPlanIndex -> [SolverPlanProblem] problems indepGoals index = [ PackageMissingDeps pkg (mapMaybe (fmap packageId . flip Graph.lookup index) missingDeps) | (pkg, missingDeps) <- Graph.broken index ] ++ [ PackageCycle cycleGroup | cycleGroup <- Graph.cycles index ] ++ [ PackageInconsistency name inconsistencies | (name, inconsistencies) <- dependencyInconsistencies indepGoals index ] ++ [ PackageStateInvalid pkg pkg' | pkg <- Foldable.toList index , Just pkg' <- map (flip Graph.lookup index) (nodeNeighbors pkg) , not (stateDependencyRelation pkg pkg') ] -- | Compute all roots of the install plan, and verify that the transitive -- plans from those roots are all consistent. -- -- NOTE: This does not check for dependency cycles. Moreover, dependency cycles -- may be absent from the subplans even if the larger plan contains a dependency -- cycle. Such cycles may or may not be an issue; either way, we don't check -- for them here. dependencyInconsistencies :: IndependentGoals -> SolverPlanIndex -> [(PackageName, [(PackageIdentifier, Version)])] dependencyInconsistencies indepGoals index = concatMap dependencyInconsistencies' subplans where subplans :: [SolverPlanIndex] subplans = -- Not Graph.closure!! map (nonSetupClosure index) (rootSets indepGoals index) -- NB: When we check for inconsistencies, packages from the setup -- scripts don't count as part of the closure (this way, we -- can build, e.g., Cabal-1.24.1 even if its setup script is -- built with Cabal-1.24.0). -- -- This is a best effort function that swallows any non-existent -- SolverIds. nonSetupClosure :: SolverPlanIndex -> [SolverId] -> SolverPlanIndex nonSetupClosure index pkgids0 = closure Graph.empty pkgids0 where closure :: Graph SolverPlanPackage -> [SolverId] -> SolverPlanIndex closure completed [] = completed closure completed (pkgid:pkgids) = case Graph.lookup pkgid index of Nothing -> closure completed pkgids Just pkg -> case Graph.lookup (nodeKey pkg) completed of Just _ -> closure completed pkgids Nothing -> closure completed' pkgids' where completed' = Graph.insert pkg completed pkgids' = CD.nonSetupDeps (resolverPackageLibDeps pkg) ++ pkgids -- | Compute the root sets of a plan -- -- A root set is a set of packages whose dependency closure must be consistent. -- This is the set of all top-level library roots (taken together normally, or -- as singletons sets if we are considering them as independent goals), along -- with all setup dependencies of all packages. rootSets :: IndependentGoals -> SolverPlanIndex -> [[SolverId]] rootSets (IndependentGoals indepGoals) index = if indepGoals then map (:[]) libRoots else [libRoots] ++ setupRoots index where libRoots :: [SolverId] libRoots = libraryRoots index -- | Compute the library roots of a plan -- -- The library roots are the set of packages with no reverse dependencies -- (no reverse library dependencies but also no reverse setup dependencies). libraryRoots :: SolverPlanIndex -> [SolverId] libraryRoots index = map (nodeKey . toPkgId) roots where (graph, toPkgId, _) = Graph.toGraph index indegree = OldGraph.indegree graph roots = filter isRoot (OldGraph.vertices graph) isRoot v = indegree ! v == 0 -- | The setup dependencies of each package in the plan setupRoots :: SolverPlanIndex -> [[SolverId]] setupRoots = filter (not . null) . map (CD.setupDeps . resolverPackageLibDeps) . Foldable.toList -- | Given a package index where we assume we want to use all the packages -- (use 'dependencyClosure' if you need to get such a index subset) find out -- if the dependencies within it use consistent versions of each package. -- Return all cases where multiple packages depend on different versions of -- some other package. -- -- Each element in the result is a package name along with the packages that -- depend on it and the versions they require. These are guaranteed to be -- distinct. -- dependencyInconsistencies' :: SolverPlanIndex -> [(PackageName, [(PackageIdentifier, Version)])] dependencyInconsistencies' index = [ (name, [ (pid, packageVersion dep) | (dep,pids) <- uses, pid <- pids]) | (name, ipid_map) <- Map.toList inverseIndex , let uses = Map.elems ipid_map , reallyIsInconsistent (map fst uses) ] where -- For each package name (of a dependency, somewhere) -- and each installed ID of that package -- the associated package instance -- and a list of reverse dependencies (as source IDs) inverseIndex :: Map PackageName (Map SolverId (SolverPlanPackage, [PackageId])) inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) [ (packageName dep, Map.fromList [(sid,(dep,[packageId pkg]))]) | -- For each package @pkg@ pkg <- Foldable.toList index -- Find out which @sid@ @pkg@ depends on , sid <- CD.nonSetupDeps (resolverPackageLibDeps pkg) -- And look up those @sid@ (i.e., @sid@ is the ID of @dep@) , Just dep <- [Graph.lookup sid index] ] -- If, in a single install plan, we depend on more than one version of a -- package, then this is ONLY okay in the (rather special) case that we -- depend on precisely two versions of that package, and one of them -- depends on the other. This is necessary for example for the base where -- we have base-3 depending on base-4. reallyIsInconsistent :: [SolverPlanPackage] -> Bool reallyIsInconsistent [] = False reallyIsInconsistent [_p] = False reallyIsInconsistent [p1, p2] = let pid1 = nodeKey p1 pid2 = nodeKey p2 in pid1 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p2) && pid2 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p1) reallyIsInconsistent _ = True -- | The graph of packages (nodes) and dependencies (edges) must be acyclic. -- -- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out -- which packages are involved in dependency cycles. -- acyclic :: SolverPlanIndex -> Bool acyclic = null . Graph.cycles -- | An installation plan is closed if for every package in the set, all of -- its dependencies are also in the set. That is, the set is closed under the -- dependency relation. -- -- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out -- which packages depend on packages not in the index. -- closed :: SolverPlanIndex -> Bool closed = null . Graph.broken -- | An installation plan is consistent if all dependencies that target a -- single package name, target the same version. -- -- This is slightly subtle. It is not the same as requiring that there be at -- most one version of any package in the set. It only requires that of -- packages which have more than one other package depending on them. We could -- actually make the condition even more precise and say that different -- versions are OK so long as they are not both in the transitive closure of -- any other package (or equivalently that their inverse closures do not -- intersect). The point is we do not want to have any packages depending -- directly or indirectly on two different versions of the same package. The -- current definition is just a safe approximation of that. -- -- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to -- find out which packages are. -- consistent :: SolverPlanIndex -> Bool consistent = null . dependencyInconsistencies (IndependentGoals False) -- | The states of packages have that depend on each other must respect -- this relation. That is for very case where package @a@ depends on -- package @b@ we require that @dependencyStatesOk a b = True@. -- stateDependencyRelation :: SolverPlanPackage -> SolverPlanPackage -> Bool stateDependencyRelation PreExisting{} PreExisting{} = True stateDependencyRelation (Configured _) PreExisting{} = True stateDependencyRelation (Configured _) (Configured _) = True stateDependencyRelation _ _ = False -- | Compute the dependency closure of a package in a install plan -- dependencyClosure :: SolverInstallPlan -> [SolverId] -> [SolverPlanPackage] dependencyClosure plan = fromMaybe [] . Graph.closure (planIndex plan) reverseDependencyClosure :: SolverInstallPlan -> [SolverId] -> [SolverPlanPackage] reverseDependencyClosure plan = fromMaybe [] . Graph.revClosure (planIndex plan) topologicalOrder :: SolverInstallPlan -> [SolverPlanPackage] topologicalOrder plan = Graph.topSort (planIndex plan) reverseTopologicalOrder :: SolverInstallPlan -> [SolverPlanPackage] reverseTopologicalOrder plan = Graph.revTopSort (planIndex plan) cabal-install-3.8.1.0/src/Distribution/Client/SourceFiles.hs0000644000000000000000000001542007346545000022025 0ustar0000000000000000-- | Contains an @sdist@ like function which computes the source files -- that we should track to determine if a rebuild is necessary. -- Unlike @sdist@, we can operate directly on the true -- 'PackageDescription' (not flattened). -- -- The naming convention, roughly, is that to declare we need the -- source for some type T, you use the function needT; some functions -- need auxiliary information. -- -- We can only use this code for non-Custom scripts; Custom scripts -- may have arbitrary extra dependencies (esp. new preprocessors) which -- we cannot "see" easily. module Distribution.Client.SourceFiles (needElaboratedConfiguredPackage) where import Control.Monad.IO.Class import Distribution.Client.ProjectPlanning.Types import Distribution.Client.RebuildMonad import Distribution.Solver.Types.OptionalStanza import Distribution.Simple.Glob (matchDirFileGlobWithDie) import Distribution.Simple.PreProcess import Distribution.Types.PackageDescription import Distribution.Types.Component import Distribution.Types.ComponentRequestedSpec (ComponentRequestedSpec) import Distribution.Types.Library import Distribution.Types.Executable import Distribution.Types.Benchmark import Distribution.Types.BenchmarkInterface import Distribution.Types.TestSuite import Distribution.Types.TestSuiteInterface import Distribution.Types.BuildInfo import Distribution.Types.ForeignLib import Distribution.Utils.Path import Distribution.ModuleName import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Verbosity (silent) import System.FilePath needElaboratedConfiguredPackage :: ElaboratedConfiguredPackage -> Rebuild () needElaboratedConfiguredPackage elab = case elabPkgOrComp elab of ElabComponent ecomp -> needElaboratedComponent elab ecomp ElabPackage epkg -> needElaboratedPackage elab epkg needElaboratedPackage :: ElaboratedConfiguredPackage -> ElaboratedPackage -> Rebuild () needElaboratedPackage elab epkg = traverse_ (needComponent pkg_descr) (enabledComponents pkg_descr enabled) where pkg_descr :: PackageDescription pkg_descr = elabPkgDescription elab enabled_stanzas :: OptionalStanzaSet enabled_stanzas = pkgStanzasEnabled epkg enabled :: ComponentRequestedSpec enabled = enableStanzas enabled_stanzas needElaboratedComponent :: ElaboratedConfiguredPackage -> ElaboratedComponent -> Rebuild () needElaboratedComponent elab ecomp = case mb_comp of Nothing -> needSetup Just comp -> needComponent pkg_descr comp where pkg_descr :: PackageDescription pkg_descr = elabPkgDescription elab mb_comp :: Maybe Component mb_comp = fmap (getComponent pkg_descr) (compComponentName ecomp) needComponent :: PackageDescription -> Component -> Rebuild () needComponent pkg_descr comp = case comp of CLib lib -> needLibrary pkg_descr lib CFLib flib -> needForeignLib pkg_descr flib CExe exe -> needExecutable pkg_descr exe CTest test -> needTestSuite pkg_descr test CBench bench -> needBenchmark pkg_descr bench needSetup :: Rebuild () needSetup = findFirstFileMonitored id ["Setup.hs", "Setup.lhs"] >> return () needLibrary :: PackageDescription -> Library -> Rebuild () needLibrary pkg_descr (Library { exposedModules = modules , signatures = sigs , libBuildInfo = bi }) = needBuildInfo pkg_descr bi (modules ++ sigs) needForeignLib :: PackageDescription -> ForeignLib -> Rebuild () needForeignLib pkg_descr (ForeignLib { foreignLibModDefFile = fs , foreignLibBuildInfo = bi }) = do traverse_ needIfExists fs needBuildInfo pkg_descr bi [] needExecutable :: PackageDescription -> Executable -> Rebuild () needExecutable pkg_descr (Executable { modulePath = mainPath , buildInfo = bi }) = do needBuildInfo pkg_descr bi [] needMainFile bi mainPath needTestSuite :: PackageDescription -> TestSuite -> Rebuild () needTestSuite pkg_descr t = case testInterface t of TestSuiteExeV10 _ mainPath -> do needBuildInfo pkg_descr bi [] needMainFile bi mainPath TestSuiteLibV09 _ m -> needBuildInfo pkg_descr bi [m] TestSuiteUnsupported _ -> return () -- soft fail where bi :: BuildInfo bi = testBuildInfo t needMainFile :: BuildInfo -> FilePath -> Rebuild () needMainFile bi mainPath = do -- The matter here is subtle. It might *seem* that we -- should just search for mainPath, but as per -- b61cb051f63ed5869b8f4a6af996ff7e833e4b39 'main-is' -- will actually be the source file AFTER preprocessing, -- whereas we need to get the file *prior* to preprocessing. ppFile <- findFileWithExtensionMonitored (ppSuffixes knownSuffixHandlers) (map getSymbolicPath (hsSourceDirs bi)) (dropExtension mainPath) case ppFile of -- But check the original path in the end, because -- maybe it's a non-preprocessed file with a non-traditional -- extension. Nothing -> findFileMonitored (map getSymbolicPath (hsSourceDirs bi)) mainPath >>= maybe (return ()) need Just pp -> need pp needBenchmark :: PackageDescription -> Benchmark -> Rebuild () needBenchmark pkg_descr bm = case benchmarkInterface bm of BenchmarkExeV10 _ mainPath -> do needBuildInfo pkg_descr bi [] needMainFile bi mainPath BenchmarkUnsupported _ -> return () -- soft fail where bi :: BuildInfo bi = benchmarkBuildInfo bm needBuildInfo :: PackageDescription -> BuildInfo -> [ModuleName] -> Rebuild () needBuildInfo pkg_descr bi modules = do -- NB: These are separate because there may be both A.hs and -- A.hs-boot; need to track both. findNeededModules ["hs", "lhs", "hsig", "lhsig"] findNeededModules ["hs-boot", "lhs-boot"] expandedExtraSrcFiles <- liftIO $ fmap concat . for (extraSrcFiles pkg_descr) $ \fpath -> matchDirFileGlobWithDie silent (\ _ _ -> return []) (specVersion pkg_descr) "." fpath traverse_ needIfExists $ concat [ cSources bi , cxxSources bi , jsSources bi , cmmSources bi , asmSources bi , expandedExtraSrcFiles ] for_ (installIncludes bi) $ \f -> findFileMonitored ("." : includeDirs bi) f >>= maybe (return ()) need where findNeededModules :: [String] -> Rebuild () findNeededModules exts = traverse_ (findNeededModule exts) (modules ++ otherModules bi) findNeededModule :: [String] -> ModuleName -> Rebuild () findNeededModule exts m = findFileWithExtensionMonitored (ppSuffixes knownSuffixHandlers ++ exts) (map getSymbolicPath (hsSourceDirs bi)) (toFilePath m) >>= maybe (return ()) need cabal-install-3.8.1.0/src/Distribution/Client/SrcDist.hs0000644000000000000000000001104107346545000021150 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} -- | Utilities to implement cabal @v2-sdist@. module Distribution.Client.SrcDist ( allPackageSourceFiles, packageDirToSdist, ) where import Distribution.Client.Compat.Prelude import Prelude () import Control.Monad.State.Lazy (StateT, evalStateT, gets, modify) import Control.Monad.Trans (liftIO) import Control.Monad.Writer.Lazy (WriterT, execWriterT, tell) import System.FilePath (normalise, takeDirectory, ()) import Distribution.Client.Utils (tryFindAddSourcePackageDesc) import Distribution.Package (Package (packageId)) import Distribution.PackageDescription.Configuration (flattenPackageDescription) import Distribution.Simple.PackageDescription (readGenericPackageDescription) import Distribution.Simple.PreProcess (knownSuffixHandlers) import Distribution.Simple.SrcDist (listPackageSourcesWithDie) import Distribution.Simple.Utils (die') import Distribution.Types.GenericPackageDescription (GenericPackageDescription) import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Compression.GZip as GZip import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BSL import qualified Data.Set as Set -- | List all source files of a given add-source dependency. Exits with error if -- something is wrong (e.g. there is no .cabal file in the given directory). -- -- Used in sandbox and projectbuilding. -- TODO: when sandboxes are removed, move to ProjectBuilding. -- allPackageSourceFiles :: Verbosity -> FilePath -> IO [FilePath] allPackageSourceFiles verbosity packageDir = do pd <- do let err = "Error reading source files of package." desc <- tryFindAddSourcePackageDesc verbosity packageDir err flattenPackageDescription `fmap` readGenericPackageDescription verbosity desc listPackageSourcesWithDie verbosity (\_ _ -> return []) packageDir pd knownSuffixHandlers -- | Create a tarball for a package in a directory packageDirToSdist :: Verbosity -> GenericPackageDescription -- ^ read in GPD -> FilePath -- ^ directory containing that GPD -> IO BSL.ByteString -- ^ resulting sdist tarball packageDirToSdist verbosity gpd dir = do let thisDie :: Verbosity -> String -> IO a thisDie v s = die' v $ "sdist of " <> prettyShow (packageId gpd) ++ ": " ++ s files' <- listPackageSourcesWithDie verbosity thisDie dir (flattenPackageDescription gpd) knownSuffixHandlers let files :: [FilePath] files = nub $ sort $ map normalise files' let entriesM :: StateT (Set.Set FilePath) (WriterT [Tar.Entry] IO) () entriesM = do let prefix = prettyShow (packageId gpd) modify (Set.insert prefix) case Tar.toTarPath True prefix of Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) Right path -> tell [Tar.directoryEntry path] for_ files $ \file -> do let fileDir = takeDirectory (prefix file) needsEntry <- gets (Set.notMember fileDir) when needsEntry $ do modify (Set.insert fileDir) case Tar.toTarPath True fileDir of Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) Right path -> tell [Tar.directoryEntry path] contents <- liftIO . fmap BSL.fromStrict . BS.readFile $ dir file case Tar.toTarPath False (prefix file) of Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) Right path -> tell [(Tar.fileEntry path contents) { Tar.entryPermissions = Tar.ordinaryFilePermissions }] entries <- execWriterT (evalStateT entriesM mempty) let -- Pretend our GZip file is made on Unix. normalize bs = BSL.concat [pfx, "\x03", rest'] where (pfx, rest) = BSL.splitAt 9 bs rest' = BSL.tail rest -- The Unix epoch, which is the default value, is -- unsuitable because it causes unpacking problems on -- Windows; we need a post-1980 date. One gigasecond -- after the epoch is during 2001-09-09, so that does -- nicely. See #5596. setModTime :: Tar.Entry -> Tar.Entry setModTime entry = entry { Tar.entryTime = 1000000000 } return . normalize . GZip.compress . Tar.write $ fmap setModTime entries cabal-install-3.8.1.0/src/Distribution/Client/Store.hs0000644000000000000000000002562307346545000020704 0ustar0000000000000000{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns #-} -- | Management for the installed package store. -- module Distribution.Client.Store ( -- * The store layout StoreDirLayout(..), defaultStoreDirLayout, -- * Reading store entries getStoreEntries, doesStoreEntryExist, -- * Creating store entries newStoreEntry, NewStoreEntryOutcome(..), -- * Concurrency strategy -- $concurrency ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Client.DistDirLayout import Distribution.Client.RebuildMonad import Distribution.Package (UnitId, mkUnitId) import Distribution.Compiler (CompilerId) import Distribution.Simple.Utils ( withTempDirectory, debug, info ) import Distribution.Verbosity ( silent ) import qualified Data.Set as Set import Control.Exception import System.FilePath import System.Directory #ifdef MIN_VERSION_lukko import Lukko #else import System.IO (openFile, IOMode(ReadWriteMode), hClose) import GHC.IO.Handle.Lock (hLock, hTryLock, LockMode(ExclusiveLock)) #if MIN_VERSION_base(4,11,0) import GHC.IO.Handle.Lock (hUnlock) #endif #endif -- $concurrency -- -- We access and update the store concurrently. Our strategy to do that safely -- is as follows. -- -- The store entries once created are immutable. This alone simplifies matters -- considerably. -- -- Additionally, the way 'UnitId' hashes are constructed means that if a store -- entry exists already then we can assume its content is ok to reuse, rather -- than having to re-recreate. This is the nix-style input hashing concept. -- -- A consequence of this is that with a little care it is /safe/ to race -- updates against each other. Consider two independent concurrent builds that -- both want to build a particular 'UnitId', where that entry does not yet -- exist in the store. It is safe for both to build and try to install this -- entry into the store provided that: -- -- * only one succeeds -- * the looser discovers that they lost, they abandon their own build and -- re-use the store entry installed by the winner. -- -- Note that because builds are not reproducible in general (nor even -- necessarily ABI compatible) then it is essential that the loser abandon -- their build and use the one installed by the winner, so that subsequent -- packages are built against the exact package from the store rather than some -- morally equivalent package that may not be ABI compatible. -- -- Our overriding goal is that store reads be simple, cheap and not require -- locking. We will derive our write-side protocol to make this possible. -- -- The read-side protocol is simply: -- -- * check for the existence of a directory entry named after the 'UnitId' in -- question. That is, if the dir entry @$root/foo-1.0-fe56a...@ exists then -- the store entry can be assumed to be complete and immutable. -- -- Given our read-side protocol, the final step on the write side must be to -- atomically rename a fully-formed store entry directory into its final -- location. While this will indeed be the final step, the preparatory steps -- are more complicated. The tricky aspect is that the store also contains a -- number of shared package databases (one per compiler version). Our read -- strategy means that by the time we install the store dir entry the package -- db must already have been updated. We cannot do the package db update -- as part of atomically renaming the store entry directory however. Furthermore -- it is not safe to allow either package db update because the db entry -- contains the ABI hash and this is not guaranteed to be deterministic. So we -- must register the new package prior to the atomic dir rename. Since this -- combination of steps are not atomic then we need locking. -- -- The write-side protocol is: -- -- * Create a unique temp dir and write all store entry files into it. -- -- * Take a lock named after the 'UnitId' in question. -- -- * Once holding the lock, check again for the existence of the final store -- entry directory. If the entry exists then the process lost the race and it -- must abandon, unlock and re-use the existing store entry. If the entry -- does not exist then the process won the race and it can proceed. -- -- * Register the package into the package db. Note that the files are not in -- their final location at this stage so registration file checks may need -- to be disabled. -- -- * Atomically rename the temp dir to the final store entry location. -- -- * Release the previously-acquired lock. -- -- Obviously this means it is possible to fail after registering but before -- installing the store entry, leaving a dangling package db entry. This is not -- much of a problem because this entry does not determine package existence -- for cabal. It does mean however that the package db update should be insert -- or replace, i.e. not failing if the db entry already exists. -- | Check if a particular 'UnitId' exists in the store. -- doesStoreEntryExist :: StoreDirLayout -> CompilerId -> UnitId -> IO Bool doesStoreEntryExist StoreDirLayout{storePackageDirectory} compid unitid = doesDirectoryExist (storePackageDirectory compid unitid) -- | Return the 'UnitId's of all packages\/components already installed in the -- store. -- getStoreEntries :: StoreDirLayout -> CompilerId -> Rebuild (Set UnitId) getStoreEntries StoreDirLayout{storeDirectory} compid = do paths <- getDirectoryContentsMonitored (storeDirectory compid) return $! mkEntries paths where mkEntries = Set.delete (mkUnitId "package.db") . Set.delete (mkUnitId "incoming") . Set.fromList . map mkUnitId . filter valid valid ('.':_) = False valid _ = True -- | The outcome of 'newStoreEntry': either the store entry was newly created -- or it existed already. The latter case happens if there was a race between -- two builds of the same store entry. -- data NewStoreEntryOutcome = UseNewStoreEntry | UseExistingStoreEntry deriving (Eq, Show) -- | Place a new entry into the store. See the concurrency strategy description -- for full details. -- -- In particular, it takes two actions: one to place files into a temporary -- location, and a second to perform any necessary registration. The first -- action is executed without any locks held (the temp dir is unique). The -- second action holds a lock that guarantees that only one cabal process is -- able to install this store entry. This means it is safe to register into -- the compiler package DB or do other similar actions. -- -- Note that if you need to use the registration information later then you -- /must/ check the 'NewStoreEntryOutcome' and if it's'UseExistingStoreEntry' -- then you must read the existing registration information (unless your -- registration information is constructed fully deterministically). -- newStoreEntry :: Verbosity -> StoreDirLayout -> CompilerId -> UnitId -> (FilePath -> IO (FilePath, [FilePath])) -- ^ Action to place files. -> IO () -- ^ Register action, if necessary. -> IO NewStoreEntryOutcome newStoreEntry verbosity storeDirLayout@StoreDirLayout{..} compid unitid copyFiles register = -- See $concurrency above for an explanation of the concurrency protocol withTempIncomingDir storeDirLayout compid $ \incomingTmpDir -> do -- Write all store entry files within the temp dir and return the prefix. (incomingEntryDir, otherFiles) <- copyFiles incomingTmpDir -- Take a lock named after the 'UnitId' in question. withIncomingUnitIdLock verbosity storeDirLayout compid unitid $ do -- Check for the existence of the final store entry directory. exists <- doesStoreEntryExist storeDirLayout compid unitid if exists -- If the entry exists then we lost the race and we must abandon, -- unlock and re-use the existing store entry. then do info verbosity $ "Concurrent build race: abandoning build in favour of existing " ++ "store entry " ++ prettyShow compid prettyShow unitid return UseExistingStoreEntry -- If the entry does not exist then we won the race and can proceed. else do -- Register the package into the package db (if appropriate). register -- Atomically rename the temp dir to the final store entry location. renameDirectory incomingEntryDir finalEntryDir for_ otherFiles $ \file -> do let finalStoreFile = storeDirectory compid makeRelative (incomingTmpDir (dropDrive (storeDirectory compid))) file createDirectoryIfMissing True (takeDirectory finalStoreFile) renameFile file finalStoreFile debug verbosity $ "Installed store entry " ++ prettyShow compid prettyShow unitid return UseNewStoreEntry where finalEntryDir = storePackageDirectory compid unitid withTempIncomingDir :: StoreDirLayout -> CompilerId -> (FilePath -> IO a) -> IO a withTempIncomingDir StoreDirLayout{storeIncomingDirectory} compid action = do createDirectoryIfMissing True incomingDir withTempDirectory silent incomingDir "new" action where incomingDir = storeIncomingDirectory compid withIncomingUnitIdLock :: Verbosity -> StoreDirLayout -> CompilerId -> UnitId -> IO a -> IO a withIncomingUnitIdLock verbosity StoreDirLayout{storeIncomingLock} compid unitid action = bracket takeLock releaseLock (\_hnd -> action) where #ifdef MIN_VERSION_lukko takeLock | fileLockingSupported = do fd <- fdOpen (storeIncomingLock compid unitid) gotLock <- fdTryLock fd ExclusiveLock unless gotLock $ do info verbosity $ "Waiting for file lock on store entry " ++ prettyShow compid prettyShow unitid fdLock fd ExclusiveLock return fd -- if there's no locking, do nothing. Be careful on AIX. | otherwise = return undefined -- :( releaseLock fd | fileLockingSupported = do fdUnlock fd fdClose fd | otherwise = return () #else takeLock = do h <- openFile (storeIncomingLock compid unitid) ReadWriteMode -- First try non-blocking, but if we would have to wait then -- log an explanation and do it again in blocking mode. gotlock <- hTryLock h ExclusiveLock unless gotlock $ do info verbosity $ "Waiting for file lock on store entry " ++ prettyShow compid prettyShow unitid hLock h ExclusiveLock return h releaseLock h = hUnlock h >> hClose h #endif cabal-install-3.8.1.0/src/Distribution/Client/Tar.hs0000644000000000000000000000677207346545000020342 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# OPTIONS_GHC -fno-warn-orphans #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Tar -- Copyright : (c) 2007 Bjorn Bringert, -- 2008 Andrea Vezzosi, -- 2008-2009 Duncan Coutts -- License : BSD3 -- -- Maintainer : duncan@community.haskell.org -- Portability : portable -- -- Reading, writing and manipulating \"@.tar@\" archive files. -- ----------------------------------------------------------------------------- module Distribution.Client.Tar ( -- * @tar.gz@ operations createTarGzFile, extractTarGzFile, -- * Other local utils buildTreeRefTypeCode, buildTreeSnapshotTypeCode, isBuildTreeRefTypeCode, filterEntries, filterEntriesM, entriesToList, ) where import Distribution.Client.Compat.Prelude import Prelude () import qualified Data.ByteString.Lazy as BS import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Archive.Tar.Check as Tar import qualified Codec.Compression.GZip as GZip import qualified Distribution.Client.GZipUtils as GZipUtils -- for foldEntries... import Control.Exception (throw) -- -- * High level operations -- createTarGzFile :: FilePath -- ^ Full Tarball path -> FilePath -- ^ Base directory -> FilePath -- ^ Directory to archive, relative to base dir -> IO () createTarGzFile tar base dir = BS.writeFile tar . GZip.compress . Tar.write =<< Tar.pack base [dir] extractTarGzFile :: FilePath -- ^ Destination directory -> FilePath -- ^ Expected subdir (to check for tarbombs) -> FilePath -- ^ Tarball -> IO () extractTarGzFile dir expected tar = Tar.unpack dir . Tar.checkTarbomb expected . Tar.read . GZipUtils.maybeDecompress =<< BS.readFile tar instance (Exception a, Exception b) => Exception (Either a b) where toException (Left e) = toException e toException (Right e) = toException e fromException e = case fromException e of Just e' -> Just (Left e') Nothing -> case fromException e of Just e' -> Just (Right e') Nothing -> Nothing -- | Type code for the local build tree reference entry type. We don't use the -- symbolic link entry type because it allows only 100 ASCII characters for the -- path. buildTreeRefTypeCode :: Tar.TypeCode buildTreeRefTypeCode = 'C' -- | Type code for the local build tree snapshot entry type. buildTreeSnapshotTypeCode :: Tar.TypeCode buildTreeSnapshotTypeCode = 'S' -- | Is this a type code for a build tree reference? isBuildTreeRefTypeCode :: Tar.TypeCode -> Bool isBuildTreeRefTypeCode typeCode | (typeCode == buildTreeRefTypeCode || typeCode == buildTreeSnapshotTypeCode) = True | otherwise = False filterEntries :: (Tar.Entry -> Bool) -> Tar.Entries e -> Tar.Entries e filterEntries p = Tar.foldEntries (\e es -> if p e then Tar.Next e es else es) Tar.Done Tar.Fail filterEntriesM :: Monad m => (Tar.Entry -> m Bool) -> Tar.Entries e -> m (Tar.Entries e) filterEntriesM p = Tar.foldEntries (\entry rest -> do keep <- p entry xs <- rest if keep then return (Tar.Next entry xs) else return xs) (return Tar.Done) (return . Tar.Fail) entriesToList :: Exception e => Tar.Entries e -> [Tar.Entry] entriesToList = Tar.foldEntries (:) [] throw cabal-install-3.8.1.0/src/Distribution/Client/TargetProblem.hs0000644000000000000000000000375407346545000022360 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} module Distribution.Client.TargetProblem ( TargetProblem(..), TargetProblem', ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.ProjectPlanning (AvailableTarget) import Distribution.Client.TargetSelector (SubComponentTarget, TargetSelector) import Distribution.Package (PackageId, PackageName) import Distribution.Simple.LocalBuildInfo (ComponentName (..)) import Distribution.Types.UnqualComponentName (UnqualComponentName) -- | Target problems that occur during project orchestration. data TargetProblem a = TargetNotInProject PackageName | TargetAvailableInIndex PackageName | TargetComponentNotProjectLocal PackageId ComponentName SubComponentTarget | TargetComponentNotBuildable PackageId ComponentName SubComponentTarget | TargetOptionalStanzaDisabledByUser PackageId ComponentName SubComponentTarget | TargetOptionalStanzaDisabledBySolver PackageId ComponentName SubComponentTarget | TargetProblemUnknownComponent PackageName (Either UnqualComponentName ComponentName) | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] -- ^ The 'TargetSelector' matches component (test/benchmark/...) but none are buildable | TargetProblemNoTargets TargetSelector -- ^ There are no targets at all -- The target matching stuff only returns packages local to the project, -- so these lookups should never fail, but if 'resolveTargets' is called -- directly then of course it can. | TargetProblemNoSuchPackage PackageId | TargetProblemNoSuchComponent PackageId ComponentName -- | A custom target problem | CustomTargetProblem a deriving (Eq, Show, Functor) -- | Type alias for a 'TargetProblem' with no user-defined problems/errors. -- -- Can use the utilities below for reporting/rendering problems. type TargetProblem' = TargetProblem Void cabal-install-3.8.1.0/src/Distribution/Client/TargetSelector.hs0000644000000000000000000027017507346545000022543 0ustar0000000000000000{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor, RecordWildCards, NamedFieldPuns #-} {-# LANGUAGE ScopedTypeVariables #-} -- TODO {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.TargetSelector -- Copyright : (c) Duncan Coutts 2012, 2015, 2016 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- -- Handling for user-specified target selectors. -- ----------------------------------------------------------------------------- module Distribution.Client.TargetSelector ( -- * Target selectors TargetSelector(..), TargetImplicitCwd(..), ComponentKind(..), ComponentKindFilter, SubComponentTarget(..), QualLevel(..), componentKind, -- * Reading target selectors readTargetSelectors, TargetSelectorProblem(..), reportTargetSelectorProblems, showTargetSelector, TargetString(..), showTargetString, parseTargetString, -- ** non-IO readTargetSelectorsWith, DirActions(..), defaultDirActions, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Package ( Package(..), PackageId, PackageName, packageName ) import Distribution.Types.UnqualComponentName ( UnqualComponentName, mkUnqualComponentName, unUnqualComponentName , packageNameToUnqualComponentName ) import Distribution.Client.Types ( PackageLocation(..), PackageSpecifier(..) ) import Distribution.PackageDescription ( PackageDescription , Executable(..) , TestSuite(..), TestSuiteInterface(..), testModules , Benchmark(..), BenchmarkInterface(..), benchmarkModules , BuildInfo(..), explicitLibModules, exeModules ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.Solver.Types.SourcePackage ( SourcePackage(..) ) import Distribution.ModuleName ( ModuleName, toFilePath ) import Distribution.Simple.LocalBuildInfo ( Component(..), ComponentName(..), LibraryName(..) , pkgComponents, componentName, componentBuildInfo ) import Distribution.Types.ForeignLib import Distribution.Simple.Utils ( die', lowercase, ordNub ) import Distribution.Client.Utils ( makeRelativeCanonical ) import Data.List ( stripPrefix, groupBy ) import qualified Data.List.NonEmpty as NE import qualified Data.Map.Lazy as Map.Lazy import qualified Data.Map.Strict as Map import qualified Data.Set as Set import Control.Arrow ((&&&)) import Control.Monad hiding ( mfilter ) import qualified Distribution.Deprecated.ReadP as Parse import Distribution.Deprecated.ReadP ( (+++), (<++) ) import Distribution.Deprecated.ParseUtils ( readPToMaybe ) import System.FilePath as FilePath ( takeExtension, dropExtension , splitDirectories, joinPath, splitPath ) import qualified System.Directory as IO ( doesFileExist, doesDirectoryExist, canonicalizePath , getCurrentDirectory ) import System.FilePath ( (), (<.>), normalise, dropTrailingPathSeparator, equalFilePath ) import Text.EditDistance ( defaultEditCosts, restrictedDamerauLevenshteinDistance ) import Distribution.Utils.Path import qualified Prelude (foldr1) -- ------------------------------------------------------------ -- * Target selector terms -- ------------------------------------------------------------ -- | A target selector is expression selecting a set of components (as targets -- for a actions like @build@, @run@, @test@ etc). A target selector -- corresponds to the user syntax for referring to targets on the command line. -- -- From the users point of view a target can be many things: packages, dirs, -- component names, files etc. Internally we consider a target to be a specific -- component (or module\/file within a component), and all the users' notions -- of targets are just different ways of referring to these component targets. -- -- So target selectors are expressions in the sense that they are interpreted -- to refer to one or more components. For example a 'TargetPackage' gets -- interpreted differently by different commands to refer to all or a subset -- of components within the package. -- -- The syntax has lots of optional parts: -- -- > [ package name | package dir | package .cabal file ] -- > [ [lib:|exe:] component name ] -- > [ module name | source file ] -- data TargetSelector = -- | One (or more) packages as a whole, or all the components of a -- particular kind within the package(s). -- -- These are always packages that are local to the project. In the case -- that there is more than one, they all share the same directory location. -- TargetPackage TargetImplicitCwd [PackageId] (Maybe ComponentKindFilter) -- | A package specified by name. This may refer to @extra-packages@ from -- the @cabal.project@ file, or a dependency of a known project package or -- could refer to a package from a hackage archive. It needs further -- context to resolve to a specific package. -- | TargetPackageNamed PackageName (Maybe ComponentKindFilter) -- | All packages, or all components of a particular kind in all packages. -- | TargetAllPackages (Maybe ComponentKindFilter) -- | A specific component in a package within the project. -- | TargetComponent PackageId ComponentName SubComponentTarget -- | A component in a package, but where it cannot be verified that the -- package has such a component, or because the package is itself not -- known. -- | TargetComponentUnknown PackageName (Either UnqualComponentName ComponentName) SubComponentTarget deriving (Eq, Ord, Show, Generic) -- | Does this 'TargetPackage' selector arise from syntax referring to a -- package in the current directory (e.g. @tests@ or no giving no explicit -- target at all) or does it come from syntax referring to a package name -- or location. -- data TargetImplicitCwd = TargetImplicitCwd | TargetExplicitNamed deriving (Eq, Ord, Show, Generic) data ComponentKind = LibKind | FLibKind | ExeKind | TestKind | BenchKind deriving (Eq, Ord, Enum, Show) type ComponentKindFilter = ComponentKind -- | Either the component as a whole or detail about a file or module target -- within a component. -- data SubComponentTarget = -- | The component as a whole WholeComponent -- | A specific module within a component. | ModuleTarget ModuleName -- | A specific file within a component. Note that this does not carry the -- file extension. | FileTarget FilePath deriving (Eq, Ord, Show, Generic) instance Binary SubComponentTarget instance Structured SubComponentTarget -- ------------------------------------------------------------ -- * Top level, do everything -- ------------------------------------------------------------ -- | Parse a bunch of command line args as 'TargetSelector's, failing with an -- error if any are unrecognised. The possible target selectors are based on -- the available packages (and their locations). -- readTargetSelectors :: [PackageSpecifier (SourcePackage (PackageLocation a))] -> Maybe ComponentKindFilter -- ^ This parameter is used when there are ambiguous selectors. -- If it is 'Just', then we attempt to resolve ambiguity -- by applying it, since otherwise there is no way to allow -- contextually valid yet syntactically ambiguous selectors. -- (#4676, #5461) -> [String] -> IO (Either [TargetSelectorProblem] [TargetSelector]) readTargetSelectors = readTargetSelectorsWith defaultDirActions readTargetSelectorsWith :: (Applicative m, Monad m) => DirActions m -> [PackageSpecifier (SourcePackage (PackageLocation a))] -> Maybe ComponentKindFilter -> [String] -> m (Either [TargetSelectorProblem] [TargetSelector]) readTargetSelectorsWith dirActions@DirActions{} pkgs mfilter targetStrs = case parseTargetStrings targetStrs of ([], usertargets) -> do usertargets' <- traverse (getTargetStringFileStatus dirActions) usertargets knowntargets <- getKnownTargets dirActions pkgs case resolveTargetSelectors knowntargets usertargets' mfilter of ([], btargets) -> return (Right btargets) (problems, _) -> return (Left problems) (strs, _) -> return (Left (map TargetSelectorUnrecognised strs)) data DirActions m = DirActions { doesFileExist :: FilePath -> m Bool, doesDirectoryExist :: FilePath -> m Bool, canonicalizePath :: FilePath -> m FilePath, getCurrentDirectory :: m FilePath } defaultDirActions :: DirActions IO defaultDirActions = DirActions { doesFileExist = IO.doesFileExist, doesDirectoryExist = IO.doesDirectoryExist, -- Workaround for canonicalizePath = IO.canonicalizePath . dropTrailingPathSeparator, getCurrentDirectory = IO.getCurrentDirectory } makeRelativeToCwd :: Applicative m => DirActions m -> FilePath -> m FilePath makeRelativeToCwd DirActions{..} path = makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory -- ------------------------------------------------------------ -- * Parsing target strings -- ------------------------------------------------------------ -- | The outline parse of a target selector. It takes one of the forms: -- -- > str1 -- > str1:str2 -- > str1:str2:str3 -- > str1:str2:str3:str4 -- data TargetString = TargetString1 String | TargetString2 String String | TargetString3 String String String | TargetString4 String String String String | TargetString5 String String String String String | TargetString7 String String String String String String String deriving (Show, Eq) -- | Parse a bunch of 'TargetString's (purely without throwing exceptions). -- parseTargetStrings :: [String] -> ([String], [TargetString]) parseTargetStrings = partitionEithers . map (\str -> maybe (Left str) Right (parseTargetString str)) parseTargetString :: String -> Maybe TargetString parseTargetString = readPToMaybe parseTargetApprox where parseTargetApprox :: Parse.ReadP r TargetString parseTargetApprox = (do a <- tokenQ return (TargetString1 a)) +++ (do a <- tokenQ0 _ <- Parse.char ':' b <- tokenQ return (TargetString2 a b)) +++ (do a <- tokenQ0 _ <- Parse.char ':' b <- tokenQ _ <- Parse.char ':' c <- tokenQ return (TargetString3 a b c)) +++ (do a <- tokenQ0 _ <- Parse.char ':' b <- token _ <- Parse.char ':' c <- tokenQ _ <- Parse.char ':' d <- tokenQ return (TargetString4 a b c d)) +++ (do a <- tokenQ0 _ <- Parse.char ':' b <- token _ <- Parse.char ':' c <- tokenQ _ <- Parse.char ':' d <- tokenQ _ <- Parse.char ':' e <- tokenQ return (TargetString5 a b c d e)) +++ (do a <- tokenQ0 _ <- Parse.char ':' b <- token _ <- Parse.char ':' c <- tokenQ _ <- Parse.char ':' d <- tokenQ _ <- Parse.char ':' e <- tokenQ _ <- Parse.char ':' f <- tokenQ _ <- Parse.char ':' g <- tokenQ return (TargetString7 a b c d e f g)) token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') tokenQ = parseHaskellString <++ token token0 = Parse.munch (\x -> not (isSpace x) && x /= ':') tokenQ0= parseHaskellString <++ token0 parseHaskellString :: Parse.ReadP r String parseHaskellString = Parse.readS_to_P reads -- | Render a 'TargetString' back as the external syntax. This is mainly for -- error messages. -- showTargetString :: TargetString -> String showTargetString = intercalate ":" . components where components (TargetString1 s1) = [s1] components (TargetString2 s1 s2) = [s1,s2] components (TargetString3 s1 s2 s3) = [s1,s2,s3] components (TargetString4 s1 s2 s3 s4) = [s1,s2,s3,s4] components (TargetString5 s1 s2 s3 s4 s5) = [s1,s2,s3,s4,s5] components (TargetString7 s1 s2 s3 s4 s5 s6 s7) = [s1,s2,s3,s4,s5,s6,s7] showTargetSelector :: TargetSelector -> String showTargetSelector ts = case [ t | ql <- [QL1 .. QLFull] , t <- renderTargetSelector ql ts ] of (t':_) -> showTargetString (forgetFileStatus t') [] -> "" showTargetSelectorKind :: TargetSelector -> String showTargetSelectorKind bt = case bt of TargetPackage TargetExplicitNamed _ Nothing -> "package" TargetPackage TargetExplicitNamed _ (Just _) -> "package:filter" TargetPackage TargetImplicitCwd _ Nothing -> "cwd-package" TargetPackage TargetImplicitCwd _ (Just _) -> "cwd-package:filter" TargetPackageNamed _ Nothing -> "named-package" TargetPackageNamed _ (Just _) -> "named-package:filter" TargetAllPackages Nothing -> "package *" TargetAllPackages (Just _) -> "package *:filter" TargetComponent _ _ WholeComponent -> "component" TargetComponent _ _ ModuleTarget{} -> "module" TargetComponent _ _ FileTarget{} -> "file" TargetComponentUnknown _ _ WholeComponent -> "unknown-component" TargetComponentUnknown _ _ ModuleTarget{} -> "unknown-module" TargetComponentUnknown _ _ FileTarget{} -> "unknown-file" -- ------------------------------------------------------------ -- * Checking if targets exist as files -- ------------------------------------------------------------ data TargetStringFileStatus = TargetStringFileStatus1 String FileStatus | TargetStringFileStatus2 String FileStatus String | TargetStringFileStatus3 String FileStatus String String | TargetStringFileStatus4 String String String String | TargetStringFileStatus5 String String String String String | TargetStringFileStatus7 String String String String String String String deriving (Eq, Ord, Show) data FileStatus = FileStatusExistsFile FilePath -- the canonicalised filepath | FileStatusExistsDir FilePath -- the canonicalised filepath | FileStatusNotExists Bool -- does the parent dir exist even? deriving (Eq, Ord, Show) noFileStatus :: FileStatus noFileStatus = FileStatusNotExists False getTargetStringFileStatus :: (Applicative m, Monad m) => DirActions m -> TargetString -> m TargetStringFileStatus getTargetStringFileStatus DirActions{..} t = case t of TargetString1 s1 -> (\f1 -> TargetStringFileStatus1 s1 f1) <$> fileStatus s1 TargetString2 s1 s2 -> (\f1 -> TargetStringFileStatus2 s1 f1 s2) <$> fileStatus s1 TargetString3 s1 s2 s3 -> (\f1 -> TargetStringFileStatus3 s1 f1 s2 s3) <$> fileStatus s1 TargetString4 s1 s2 s3 s4 -> return (TargetStringFileStatus4 s1 s2 s3 s4) TargetString5 s1 s2 s3 s4 s5 -> return (TargetStringFileStatus5 s1 s2 s3 s4 s5) TargetString7 s1 s2 s3 s4 s5 s6 s7 -> return (TargetStringFileStatus7 s1 s2 s3 s4 s5 s6 s7) where fileStatus f = do fexists <- doesFileExist f dexists <- doesDirectoryExist f case splitPath f of _ | fexists -> FileStatusExistsFile <$> canonicalizePath f | dexists -> FileStatusExistsDir <$> canonicalizePath f (d:_) -> FileStatusNotExists <$> doesDirectoryExist d _ -> pure (FileStatusNotExists False) forgetFileStatus :: TargetStringFileStatus -> TargetString forgetFileStatus t = case t of TargetStringFileStatus1 s1 _ -> TargetString1 s1 TargetStringFileStatus2 s1 _ s2 -> TargetString2 s1 s2 TargetStringFileStatus3 s1 _ s2 s3 -> TargetString3 s1 s2 s3 TargetStringFileStatus4 s1 s2 s3 s4 -> TargetString4 s1 s2 s3 s4 TargetStringFileStatus5 s1 s2 s3 s4 s5 -> TargetString5 s1 s2 s3 s4 s5 TargetStringFileStatus7 s1 s2 s3 s4 s5 s6 s7 -> TargetString7 s1 s2 s3 s4 s5 s6 s7 getFileStatus :: TargetStringFileStatus -> Maybe FileStatus getFileStatus (TargetStringFileStatus1 _ f) = Just f getFileStatus (TargetStringFileStatus2 _ f _) = Just f getFileStatus (TargetStringFileStatus3 _ f _ _) = Just f getFileStatus _ = Nothing setFileStatus :: FileStatus -> TargetStringFileStatus -> TargetStringFileStatus setFileStatus f (TargetStringFileStatus1 s1 _) = TargetStringFileStatus1 s1 f setFileStatus f (TargetStringFileStatus2 s1 _ s2) = TargetStringFileStatus2 s1 f s2 setFileStatus f (TargetStringFileStatus3 s1 _ s2 s3) = TargetStringFileStatus3 s1 f s2 s3 setFileStatus _ t = t copyFileStatus :: TargetStringFileStatus -> TargetStringFileStatus -> TargetStringFileStatus copyFileStatus src dst = case getFileStatus src of Just f -> setFileStatus f dst Nothing -> dst -- ------------------------------------------------------------ -- * Resolving target strings to target selectors -- ------------------------------------------------------------ -- | Given a bunch of user-specified targets, try to resolve what it is they -- refer to. -- resolveTargetSelectors :: KnownTargets -> [TargetStringFileStatus] -> Maybe ComponentKindFilter -> ([TargetSelectorProblem], [TargetSelector]) -- default local dir target if there's no given target: resolveTargetSelectors (KnownTargets{knownPackagesAll = []}) [] _ = ([TargetSelectorNoTargetsInProject], []) -- if the component kind filter is just exes, we don't want to suggest "all" as a target. resolveTargetSelectors (KnownTargets{knownPackagesPrimary = []}) [] ckf = ([TargetSelectorNoTargetsInCwd (ckf /= Just ExeKind) ], []) resolveTargetSelectors (KnownTargets{knownPackagesPrimary}) [] _ = ([], [TargetPackage TargetImplicitCwd pkgids Nothing]) where pkgids = [ pinfoId | KnownPackage{pinfoId} <- knownPackagesPrimary ] resolveTargetSelectors knowntargets targetStrs mfilter = partitionEithers . map (resolveTargetSelector knowntargets mfilter) $ targetStrs resolveTargetSelector :: KnownTargets -> Maybe ComponentKindFilter -> TargetStringFileStatus -> Either TargetSelectorProblem TargetSelector resolveTargetSelector knowntargets@KnownTargets{..} mfilter targetStrStatus = case findMatch (matcher targetStrStatus) of Unambiguous _ | projectIsEmpty -> Left TargetSelectorNoTargetsInProject Unambiguous (TargetPackage TargetImplicitCwd [] _) -> Left (TargetSelectorNoCurrentPackage targetStr) Unambiguous target -> Right target None errs | projectIsEmpty -> Left TargetSelectorNoTargetsInProject | otherwise -> Left (classifyMatchErrors errs) Ambiguous _ targets | Just kfilter <- mfilter , [target] <- applyKindFilter kfilter targets -> Right target Ambiguous exactMatch targets -> case disambiguateTargetSelectors matcher targetStrStatus exactMatch targets of Right targets' -> Left (TargetSelectorAmbiguous targetStr targets') Left ((m, ms):_) -> Left (MatchingInternalError targetStr m ms) Left [] -> internalError "resolveTargetSelector" where matcher = matchTargetSelector knowntargets targetStr = forgetFileStatus targetStrStatus projectIsEmpty = null knownPackagesAll classifyMatchErrors errs | Just expectedNE <- NE.nonEmpty expected = let (things, got:|_) = NE.unzip expectedNE in TargetSelectorExpected targetStr (NE.toList things) got | not (null nosuch) = TargetSelectorNoSuch targetStr nosuch | otherwise = internalError $ "classifyMatchErrors: " ++ show errs where expected = [ (thing, got) | (_, MatchErrorExpected thing got) <- map (innerErr Nothing) errs ] -- Trim the list of alternatives by dropping duplicates and -- retaining only at most three most similar (by edit distance) ones. nosuch = Map.foldrWithKey genResults [] $ Map.fromListWith Set.union $ [ ((inside, thing, got), Set.fromList alts) | (inside, MatchErrorNoSuch thing got alts) <- map (innerErr Nothing) errs ] genResults (inside, thing, got) alts acc = ( inside , thing , got , take maxResults $ map fst $ takeWhile distanceLow $ sortBy (comparing snd) $ map addLevDist $ Set.toList alts ) : acc where addLevDist = id &&& restrictedDamerauLevenshteinDistance defaultEditCosts got distanceLow (_, dist) = dist < length got `div` 2 maxResults = 3 innerErr _ (MatchErrorIn kind thing m) = innerErr (Just (kind,thing)) m innerErr c m = (c,m) applyKindFilter :: ComponentKindFilter -> [TargetSelector] -> [TargetSelector] applyKindFilter kfilter = filter go where go (TargetPackage _ _ (Just filter')) = kfilter == filter' go (TargetPackageNamed _ (Just filter')) = kfilter == filter' go (TargetAllPackages (Just filter')) = kfilter == filter' go (TargetComponent _ cname _) | CLibName _ <- cname = kfilter == LibKind | CFLibName _ <- cname = kfilter == FLibKind | CExeName _ <- cname = kfilter == ExeKind | CTestName _ <- cname = kfilter == TestKind | CBenchName _ <- cname = kfilter == BenchKind go _ = True -- | The various ways that trying to resolve a 'TargetString' to a -- 'TargetSelector' can fail. -- data TargetSelectorProblem = TargetSelectorExpected TargetString [String] String -- ^ [expected thing] (actually got) | TargetSelectorNoSuch TargetString [(Maybe (String, String), String, String, [String])] -- ^ [([in thing], no such thing, actually got, alternatives)] | TargetSelectorAmbiguous TargetString [(TargetString, TargetSelector)] | MatchingInternalError TargetString TargetSelector [(TargetString, [TargetSelector])] | TargetSelectorUnrecognised String -- ^ Syntax error when trying to parse a target string. | TargetSelectorNoCurrentPackage TargetString | TargetSelectorNoTargetsInCwd Bool -- ^ bool that flags when it is acceptable to suggest "all" as a target | TargetSelectorNoTargetsInProject | TargetSelectorNoScript TargetString deriving (Show, Eq) -- | Qualification levels. -- Given the filepath src/F, executable component A, and package foo: data QualLevel = QL1 -- ^ @src/F@ | QL2 -- ^ @foo:src/F | A:src/F@ | QL3 -- ^ @foo:A:src/F | exe:A:src/F@ | QLFull -- ^ @pkg:foo:exe:A:file:src/F@ deriving (Eq, Enum, Show) disambiguateTargetSelectors :: (TargetStringFileStatus -> Match TargetSelector) -> TargetStringFileStatus -> MatchClass -> [TargetSelector] -> Either [(TargetSelector, [(TargetString, [TargetSelector])])] [(TargetString, TargetSelector)] disambiguateTargetSelectors matcher matchInput exactMatch matchResults = case partitionEithers results of (errs@(_:_), _) -> Left errs ([], ok) -> Right ok where -- So, here's the strategy. We take the original match results, and make a -- table of all their renderings at all qualification levels. -- Note there can be multiple renderings at each qualification level. -- Note that renderTargetSelector won't immediately work on any file syntax -- When rendering syntax, the FileStatus is always FileStatusNotExists, -- which will never match on syntaxForm1File! -- Because matchPackageDirectoryPrefix expects a FileStatusExistsFile. -- So we need to copy over the file status from the input -- TargetStringFileStatus, onto the new rendered TargetStringFileStatus matchResultsRenderings :: [(TargetSelector, [TargetStringFileStatus])] matchResultsRenderings = [ (matchResult, matchRenderings) | matchResult <- matchResults , let matchRenderings = [ copyFileStatus matchInput rendering | ql <- [QL1 .. QLFull] , rendering <- renderTargetSelector ql matchResult ] ] -- Of course the point is that we're looking for renderings that are -- unambiguous matches. So we build another memo table of all the matches -- for all of those renderings. So by looking up in this table we can see -- if we've got an unambiguous match. memoisedMatches :: Map TargetStringFileStatus (Match TargetSelector) memoisedMatches = -- avoid recomputing the main one if it was an exact match (if exactMatch == Exact then Map.insert matchInput (Match Exact 0 matchResults) else id) $ Map.Lazy.fromList -- (matcher rendering) should *always* be a Match! Otherwise we will hit -- the internal error later on. [ (rendering, matcher rendering) | rendering <- concatMap snd matchResultsRenderings ] -- Finally, for each of the match results, we go through all their -- possible renderings (in order of qualification level, though remember -- there can be multiple renderings per level), and find the first one -- that has an unambiguous match. results :: [Either (TargetSelector, [(TargetString, [TargetSelector])]) (TargetString, TargetSelector)] results = [ case findUnambiguous originalMatch matchRenderings of Just unambiguousRendering -> Right ( forgetFileStatus unambiguousRendering , originalMatch) -- This case is an internal error, but we bubble it up and report it Nothing -> Left ( originalMatch , [ (forgetFileStatus rendering, matches) | rendering <- matchRenderings , let Match m _ matches = memoisedMatches Map.! rendering , m /= Inexact ] ) | (originalMatch, matchRenderings) <- matchResultsRenderings ] findUnambiguous :: TargetSelector -> [TargetStringFileStatus] -> Maybe TargetStringFileStatus findUnambiguous _ [] = Nothing findUnambiguous t (r:rs) = case memoisedMatches Map.! r of Match Exact _ [t'] | t == t' -> Just r Match Exact _ _ -> findUnambiguous t rs Match Unknown _ _ -> findUnambiguous t rs Match Inexact _ _ -> internalError "Match Inexact" NoMatch _ _ -> internalError "NoMatch" internalError :: String -> a internalError msg = error $ "TargetSelector: internal error: " ++ msg -- | Throw an exception with a formatted message if there are any problems. -- reportTargetSelectorProblems :: Verbosity -> [TargetSelectorProblem] -> IO a reportTargetSelectorProblems verbosity problems = do case [ str | TargetSelectorUnrecognised str <- problems ] of [] -> return () targets -> die' verbosity $ unlines [ "Unrecognised target syntax for '" ++ name ++ "'." | name <- targets ] case [ (t, m, ms) | MatchingInternalError t m ms <- problems ] of [] -> return () ((target, originalMatch, renderingsAndMatches):_) -> die' verbosity $ "Internal error in target matching. It should always " ++ "be possible to find a syntax that's sufficiently qualified to " ++ "give an unambiguous match. However when matching '" ++ showTargetString target ++ "' we found " ++ showTargetSelector originalMatch ++ " (" ++ showTargetSelectorKind originalMatch ++ ") which does " ++ "not have an unambiguous syntax. The possible syntax and the " ++ "targets they match are as follows:\n" ++ unlines [ "'" ++ showTargetString rendering ++ "' which matches " ++ intercalate ", " [ showTargetSelector match ++ " (" ++ showTargetSelectorKind match ++ ")" | match <- matches ] | (rendering, matches) <- renderingsAndMatches ] case [ (t, e, g) | TargetSelectorExpected t e g <- problems ] of [] -> return () targets -> die' verbosity $ unlines [ "Unrecognised target '" ++ showTargetString target ++ "'.\n" ++ "Expected a " ++ intercalate " or " expected ++ ", rather than '" ++ got ++ "'." | (target, expected, got) <- targets ] case [ (t, e) | TargetSelectorNoSuch t e <- problems ] of [] -> return () targets -> die' verbosity $ unlines [ "Unknown target '" ++ showTargetString target ++ "'.\n" ++ unlines [ (case inside of Just (kind, "") -> "The " ++ kind ++ " has no " Just (kind, thing) -> "The " ++ kind ++ " " ++ thing ++ " has no " Nothing -> "There is no ") ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" | (thing, got, _alts) <- nosuch' ] ++ "." ++ if null alternatives then "" else "\nPerhaps you meant " ++ intercalate ";\nor " [ "the " ++ thing ++ " '" ++ intercalate "' or '" alts ++ "'?" | (thing, alts) <- alternatives ] | (inside, nosuch') <- groupByContainer nosuch , let alternatives = [ (thing, alts) | (thing,_got,alts@(_:_)) <- nosuch' ] ] | (target, nosuch) <- targets , let groupByContainer = map (\g@((inside,_,_,_):_) -> (inside, [ (thing,got,alts) | (_,thing,got,alts) <- g ])) . groupBy ((==) `on` (\(x,_,_,_) -> x)) . sortBy (compare `on` (\(x,_,_,_) -> x)) ] where mungeThing "file" = "file target" mungeThing thing = thing case [ (t, ts) | TargetSelectorAmbiguous t ts <- problems ] of [] -> return () targets -> die' verbosity $ unlines [ "Ambiguous target '" ++ showTargetString target ++ "'. It could be:\n " ++ unlines [ " "++ showTargetString ut ++ " (" ++ showTargetSelectorKind bt ++ ")" | (ut, bt) <- amb ] | (target, amb) <- targets ] case [ t | TargetSelectorNoCurrentPackage t <- problems ] of [] -> return () target:_ -> die' verbosity $ "The target '" ++ showTargetString target ++ "' refers to the " ++ "components in the package in the current directory, but there " ++ "is no package in the current directory (or at least not listed " ++ "as part of the project)." --TODO: report a different error if there is a .cabal file but it's -- not a member of the project case [ () | TargetSelectorNoTargetsInCwd True <- problems ] of [] -> return () _:_ -> die' verbosity $ "No targets given and there is no package in the current " ++ "directory. Use the target 'all' for all packages in the " ++ "project or specify packages or components by name or location. " ++ "See 'cabal build --help' for more details on target options." case [ () | TargetSelectorNoTargetsInCwd False <- problems ] of [] -> return () _:_ -> die' verbosity $ "No targets given and there is no package in the current " ++ "directory. Specify packages or components by name or location. " ++ "See 'cabal build --help' for more details on target options." case [ () | TargetSelectorNoTargetsInProject <- problems ] of [] -> return () _:_ -> die' verbosity $ "There is no .cabal package file or cabal.project file. " ++ "To build packages locally you need at minimum a .cabal " ++ "file. You can use 'cabal init' to create one.\n" ++ "\n" ++ "For non-trivial projects you will also want a cabal.project " ++ "file in the root directory of your project. This file lists the " ++ "packages in your project and all other build configuration. " ++ "See the Cabal user guide for full details." case [ t | TargetSelectorNoScript t <- problems ] of [] -> return () target:_ -> die' verbosity $ "The script '" ++ showTargetString target ++ "' does not exist, " ++ "and only script targets may contain whitespace characters or end " ++ "with ':'" fail "reportTargetSelectorProblems: internal error" ---------------------------------- -- Syntax type -- -- | Syntax for the 'TargetSelector': the matcher and renderer -- data Syntax = Syntax QualLevel Matcher Renderer | AmbiguousAlternatives Syntax Syntax | ShadowingAlternatives Syntax Syntax type Matcher = TargetStringFileStatus -> Match TargetSelector type Renderer = TargetSelector -> [TargetStringFileStatus] foldSyntax :: (a -> a -> a) -> (a -> a -> a) -> (QualLevel -> Matcher -> Renderer -> a) -> (Syntax -> a) foldSyntax ambiguous unambiguous syntax = go where go (Syntax ql match render) = syntax ql match render go (AmbiguousAlternatives a b) = ambiguous (go a) (go b) go (ShadowingAlternatives a b) = unambiguous (go a) (go b) ---------------------------------- -- Top level renderer and matcher -- renderTargetSelector :: QualLevel -> TargetSelector -> [TargetStringFileStatus] renderTargetSelector ql ts = foldSyntax (++) (++) (\ql' _ render -> guard (ql == ql') >> render ts) syntax where syntax = syntaxForms emptyKnownTargets -- don't need known targets for rendering matchTargetSelector :: KnownTargets -> TargetStringFileStatus -> Match TargetSelector matchTargetSelector knowntargets = \usertarget -> nubMatchesBy (==) $ let ql = targetQualLevel usertarget in foldSyntax (<|>) () (\ql' match _ -> guard (ql == ql') >> match usertarget) syntax where syntax = syntaxForms knowntargets targetQualLevel TargetStringFileStatus1{} = QL1 targetQualLevel TargetStringFileStatus2{} = QL2 targetQualLevel TargetStringFileStatus3{} = QL3 targetQualLevel TargetStringFileStatus4{} = QLFull targetQualLevel TargetStringFileStatus5{} = QLFull targetQualLevel TargetStringFileStatus7{} = QLFull ---------------------------------- -- Syntax forms -- -- | All the forms of syntax for 'TargetSelector'. -- syntaxForms :: KnownTargets -> Syntax syntaxForms KnownTargets { knownPackagesAll = pinfo, knownPackagesPrimary = ppinfo, knownComponentsAll = cinfo, knownComponentsPrimary = pcinfo, knownComponentsOther = ocinfo } = -- The various forms of syntax here are ambiguous in many cases. -- Our policy is by default we expose that ambiguity and report -- ambiguous matches. In certain cases we override the ambiguity -- by having some forms shadow others. -- -- We make modules shadow files because module name "Q" clashes -- with file "Q" with no extension but these refer to the same -- thing anyway so it's not a useful ambiguity. Other cases are -- not ambiguous like "Q" vs "Q.hs" or "Data.Q" vs "Data/Q". ambiguousAlternatives -- convenient single-component forms [ shadowingAlternatives [ ambiguousAlternatives [ syntaxForm1All , syntaxForm1Filter ppinfo , shadowingAlternatives [ syntaxForm1Component pcinfo , syntaxForm1Package pinfo ] ] , syntaxForm1Component ocinfo , syntaxForm1Module cinfo , syntaxForm1File pinfo ] -- two-component partially qualified forms -- fully qualified form for 'all' , syntaxForm2MetaAll , syntaxForm2AllFilter , syntaxForm2NamespacePackage pinfo , syntaxForm2PackageComponent pinfo , syntaxForm2PackageFilter pinfo , syntaxForm2KindComponent cinfo , shadowingAlternatives [ syntaxForm2PackageModule pinfo , syntaxForm2PackageFile pinfo ] , shadowingAlternatives [ syntaxForm2ComponentModule cinfo , syntaxForm2ComponentFile cinfo ] -- rarely used partially qualified forms , syntaxForm3PackageKindComponent pinfo , shadowingAlternatives [ syntaxForm3PackageComponentModule pinfo , syntaxForm3PackageComponentFile pinfo ] , shadowingAlternatives [ syntaxForm3KindComponentModule cinfo , syntaxForm3KindComponentFile cinfo ] , syntaxForm3NamespacePackageFilter pinfo -- fully-qualified forms for all and cwd with filter , syntaxForm3MetaAllFilter , syntaxForm3MetaCwdFilter ppinfo -- fully-qualified form for package and package with filter , syntaxForm3MetaNamespacePackage pinfo , syntaxForm4MetaNamespacePackageFilter pinfo -- fully-qualified forms for component, module and file , syntaxForm5MetaNamespacePackageKindComponent pinfo , syntaxForm7MetaNamespacePackageKindComponentNamespaceModule pinfo , syntaxForm7MetaNamespacePackageKindComponentNamespaceFile pinfo ] where ambiguousAlternatives = Prelude.foldr1 AmbiguousAlternatives shadowingAlternatives = Prelude.foldr1 ShadowingAlternatives -- | Syntax: "all" to select all packages in the project -- -- > cabal build all -- syntaxForm1All :: Syntax syntaxForm1All = syntaxForm1 render $ \str1 _fstatus1 -> do guardMetaAll str1 return (TargetAllPackages Nothing) where render (TargetAllPackages Nothing) = [TargetStringFileStatus1 "all" noFileStatus] render _ = [] -- | Syntax: filter -- -- > cabal build tests -- syntaxForm1Filter :: [KnownPackage] -> Syntax syntaxForm1Filter ps = syntaxForm1 render $ \str1 _fstatus1 -> do kfilter <- matchComponentKindFilter str1 return (TargetPackage TargetImplicitCwd pids (Just kfilter)) where pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = [TargetStringFileStatus1 (dispF kfilter) noFileStatus] render _ = [] -- | Syntax: package (name, dir or file) -- -- > cabal build foo -- > cabal build ../bar ../bar/bar.cabal -- syntaxForm1Package :: [KnownPackage] -> Syntax syntaxForm1Package pinfo = syntaxForm1 render $ \str1 fstatus1 -> do guardPackage str1 fstatus1 p <- matchPackage pinfo str1 fstatus1 case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) KnownPackageName pn -> return (TargetPackageNamed pn Nothing) where render (TargetPackage TargetExplicitNamed [p] Nothing) = [TargetStringFileStatus1 (dispP p) noFileStatus] render (TargetPackageNamed pn Nothing) = [TargetStringFileStatus1 (dispPN pn) noFileStatus] render _ = [] -- | Syntax: component -- -- > cabal build foo -- syntaxForm1Component :: [KnownComponent] -> Syntax syntaxForm1Component cs = syntaxForm1 render $ \str1 _fstatus1 -> do guardComponentName str1 c <- matchComponentName cs str1 return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) where render (TargetComponent p c WholeComponent) = [TargetStringFileStatus1 (dispC p c) noFileStatus] render _ = [] -- | Syntax: module -- -- > cabal build Data.Foo -- syntaxForm1Module :: [KnownComponent] -> Syntax syntaxForm1Module cs = syntaxForm1 render $ \str1 _fstatus1 -> do guardModuleName str1 let ms = [ (m,c) | c <- cs, m <- cinfoModules c ] (m,c) <- matchModuleNameAnd ms str1 return (TargetComponent (cinfoPackageId c) (cinfoName c) (ModuleTarget m)) where render (TargetComponent _p _c (ModuleTarget m)) = [TargetStringFileStatus1 (dispM m) noFileStatus] render _ = [] -- | Syntax: file name -- -- > cabal build Data/Foo.hs bar/Main.hsc -- syntaxForm1File :: [KnownPackage] -> Syntax syntaxForm1File ps = -- Note there's a bit of an inconsistency here vs the other syntax forms -- for files. For the single-part syntax the target has to point to a file -- that exists (due to our use of matchPackageDirectoryPrefix), whereas for -- all the other forms we don't require that. syntaxForm1 render $ \str1 fstatus1 -> expecting "file" str1 $ do (pkgfile, ~KnownPackage{pinfoId, pinfoComponents}) -- always returns the KnownPackage case <- matchPackageDirectoryPrefix ps fstatus1 orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do (filepath, c) <- matchComponentFile pinfoComponents pkgfile return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) where render (TargetComponent _p _c (FileTarget f)) = [TargetStringFileStatus1 f noFileStatus] render _ = [] --- -- | Syntax: :all -- -- > cabal build :all -- syntaxForm2MetaAll :: Syntax syntaxForm2MetaAll = syntaxForm2 render $ \str1 _fstatus1 str2 -> do guardNamespaceMeta str1 guardMetaAll str2 return (TargetAllPackages Nothing) where render (TargetAllPackages Nothing) = [TargetStringFileStatus2 "" noFileStatus "all"] render _ = [] -- | Syntax: all : filer -- -- > cabal build all:tests -- syntaxForm2AllFilter :: Syntax syntaxForm2AllFilter = syntaxForm2 render $ \str1 _fstatus1 str2 -> do guardMetaAll str1 kfilter <- matchComponentKindFilter str2 return (TargetAllPackages (Just kfilter)) where render (TargetAllPackages (Just kfilter)) = [TargetStringFileStatus2 "all" noFileStatus (dispF kfilter)] render _ = [] -- | Syntax: package : filer -- -- > cabal build foo:tests -- syntaxForm2PackageFilter :: [KnownPackage] -> Syntax syntaxForm2PackageFilter ps = syntaxForm2 render $ \str1 fstatus1 str2 -> do guardPackage str1 fstatus1 p <- matchPackage ps str1 fstatus1 kfilter <- matchComponentKindFilter str2 case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) KnownPackageName pn -> return (TargetPackageNamed pn (Just kfilter)) where render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = [TargetStringFileStatus2 (dispP p) noFileStatus (dispF kfilter)] render (TargetPackageNamed pn (Just kfilter)) = [TargetStringFileStatus2 (dispPN pn) noFileStatus (dispF kfilter)] render _ = [] -- | Syntax: pkg : package name -- -- > cabal build pkg:foo -- syntaxForm2NamespacePackage :: [KnownPackage] -> Syntax syntaxForm2NamespacePackage pinfo = syntaxForm2 render $ \str1 _fstatus1 str2 -> do guardNamespacePackage str1 guardPackageName str2 p <- matchPackage pinfo str2 noFileStatus case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) KnownPackageName pn -> return (TargetPackageNamed pn Nothing) where render (TargetPackage TargetExplicitNamed [p] Nothing) = [TargetStringFileStatus2 "pkg" noFileStatus (dispP p)] render (TargetPackageNamed pn Nothing) = [TargetStringFileStatus2 "pkg" noFileStatus (dispPN pn)] render _ = [] -- | Syntax: package : component -- -- > cabal build foo:foo -- > cabal build ./foo:foo -- > cabal build ./foo.cabal:foo -- syntaxForm2PackageComponent :: [KnownPackage] -> Syntax syntaxForm2PackageComponent ps = syntaxForm2 render $ \str1 fstatus1 str2 -> do guardPackage str1 fstatus1 guardComponentName str2 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentName pinfoComponents str2 return (TargetComponent pinfoId (cinfoName c) WholeComponent) --TODO: the error here ought to say there's no component by that name in -- this package, and name the package KnownPackageName pn -> let cn = mkUnqualComponentName str2 in return (TargetComponentUnknown pn (Left cn) WholeComponent) where render (TargetComponent p c WholeComponent) = [TargetStringFileStatus2 (dispP p) noFileStatus (dispC p c)] render (TargetComponentUnknown pn (Left cn) WholeComponent) = [TargetStringFileStatus2 (dispPN pn) noFileStatus (prettyShow cn)] render _ = [] -- | Syntax: namespace : component -- -- > cabal build lib:foo exe:foo -- syntaxForm2KindComponent :: [KnownComponent] -> Syntax syntaxForm2KindComponent cs = syntaxForm2 render $ \str1 _fstatus1 str2 -> do ckind <- matchComponentKind str1 guardComponentName str2 c <- matchComponentKindAndName cs ckind str2 return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) where render (TargetComponent p c WholeComponent) = [TargetStringFileStatus2 (dispCK c) noFileStatus (dispC p c)] render _ = [] -- | Syntax: package : module -- -- > cabal build foo:Data.Foo -- > cabal build ./foo:Data.Foo -- > cabal build ./foo.cabal:Data.Foo -- syntaxForm2PackageModule :: [KnownPackage] -> Syntax syntaxForm2PackageModule ps = syntaxForm2 render $ \str1 fstatus1 str2 -> do guardPackage str1 fstatus1 guardModuleName str2 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do let ms = [ (m,c) | c <- pinfoComponents, m <- cinfoModules c ] (m,c) <- matchModuleNameAnd ms str2 return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) KnownPackageName pn -> do m <- matchModuleNameUnknown str2 -- We assume the primary library component of the package: return (TargetComponentUnknown pn (Right $ CLibName LMainLibName) (ModuleTarget m)) where render (TargetComponent p _c (ModuleTarget m)) = [TargetStringFileStatus2 (dispP p) noFileStatus (dispM m)] render _ = [] -- | Syntax: component : module -- -- > cabal build foo:Data.Foo -- syntaxForm2ComponentModule :: [KnownComponent] -> Syntax syntaxForm2ComponentModule cs = syntaxForm2 render $ \str1 _fstatus1 str2 -> do guardComponentName str1 guardModuleName str2 c <- matchComponentName cs str1 orNoThingIn "component" (cinfoStrName c) $ do let ms = cinfoModules c m <- matchModuleName ms str2 return (TargetComponent (cinfoPackageId c) (cinfoName c) (ModuleTarget m)) where render (TargetComponent p c (ModuleTarget m)) = [TargetStringFileStatus2 (dispC p c) noFileStatus (dispM m)] render _ = [] -- | Syntax: package : filename -- -- > cabal build foo:Data/Foo.hs -- > cabal build ./foo:Data/Foo.hs -- > cabal build ./foo.cabal:Data/Foo.hs -- syntaxForm2PackageFile :: [KnownPackage] -> Syntax syntaxForm2PackageFile ps = syntaxForm2 render $ \str1 fstatus1 str2 -> do guardPackage str1 fstatus1 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do (filepath, c) <- matchComponentFile pinfoComponents str2 return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) KnownPackageName pn -> let filepath = str2 in -- We assume the primary library component of the package: return (TargetComponentUnknown pn (Right $ CLibName LMainLibName) (FileTarget filepath)) where render (TargetComponent p _c (FileTarget f)) = [TargetStringFileStatus2 (dispP p) noFileStatus f] render _ = [] -- | Syntax: component : filename -- -- > cabal build foo:Data/Foo.hs -- syntaxForm2ComponentFile :: [KnownComponent] -> Syntax syntaxForm2ComponentFile cs = syntaxForm2 render $ \str1 _fstatus1 str2 -> do guardComponentName str1 c <- matchComponentName cs str1 orNoThingIn "component" (cinfoStrName c) $ do (filepath, _) <- matchComponentFile [c] str2 return (TargetComponent (cinfoPackageId c) (cinfoName c) (FileTarget filepath)) where render (TargetComponent p c (FileTarget f)) = [TargetStringFileStatus2 (dispC p c) noFileStatus f] render _ = [] --- -- | Syntax: :all : filter -- -- > cabal build :all:tests -- syntaxForm3MetaAllFilter :: Syntax syntaxForm3MetaAllFilter = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do guardNamespaceMeta str1 guardMetaAll str2 kfilter <- matchComponentKindFilter str3 return (TargetAllPackages (Just kfilter)) where render (TargetAllPackages (Just kfilter)) = [TargetStringFileStatus3 "" noFileStatus "all" (dispF kfilter)] render _ = [] syntaxForm3MetaCwdFilter :: [KnownPackage] -> Syntax syntaxForm3MetaCwdFilter ps = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do guardNamespaceMeta str1 guardNamespaceCwd str2 kfilter <- matchComponentKindFilter str3 return (TargetPackage TargetImplicitCwd pids (Just kfilter)) where pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = [TargetStringFileStatus3 "" noFileStatus "cwd" (dispF kfilter)] render _ = [] -- | Syntax: :pkg : package name -- -- > cabal build :pkg:foo -- syntaxForm3MetaNamespacePackage :: [KnownPackage] -> Syntax syntaxForm3MetaNamespacePackage pinfo = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do guardNamespaceMeta str1 guardNamespacePackage str2 guardPackageName str3 p <- matchPackage pinfo str3 noFileStatus case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) KnownPackageName pn -> return (TargetPackageNamed pn Nothing) where render (TargetPackage TargetExplicitNamed [p] Nothing) = [TargetStringFileStatus3 "" noFileStatus "pkg" (dispP p)] render (TargetPackageNamed pn Nothing) = [TargetStringFileStatus3 "" noFileStatus "pkg" (dispPN pn)] render _ = [] -- | Syntax: package : namespace : component -- -- > cabal build foo:lib:foo -- > cabal build foo/:lib:foo -- > cabal build foo.cabal:lib:foo -- syntaxForm3PackageKindComponent :: [KnownPackage] -> Syntax syntaxForm3PackageKindComponent ps = syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do guardPackage str1 fstatus1 ckind <- matchComponentKind str2 guardComponentName str3 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentKindAndName pinfoComponents ckind str3 return (TargetComponent pinfoId (cinfoName c) WholeComponent) KnownPackageName pn -> let cn = mkComponentName pn ckind (mkUnqualComponentName str3) in return (TargetComponentUnknown pn (Right cn) WholeComponent) where render (TargetComponent p c WholeComponent) = [TargetStringFileStatus3 (dispP p) noFileStatus (dispCK c) (dispC p c)] render (TargetComponentUnknown pn (Right c) WholeComponent) = [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCK c) (dispC' pn c)] render _ = [] -- | Syntax: package : component : module -- -- > cabal build foo:foo:Data.Foo -- > cabal build foo/:foo:Data.Foo -- > cabal build foo.cabal:foo:Data.Foo -- syntaxForm3PackageComponentModule :: [KnownPackage] -> Syntax syntaxForm3PackageComponentModule ps = syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do guardPackage str1 fstatus1 guardComponentName str2 guardModuleName str3 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentName pinfoComponents str2 orNoThingIn "component" (cinfoStrName c) $ do let ms = cinfoModules c m <- matchModuleName ms str3 return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) KnownPackageName pn -> do let cn = mkUnqualComponentName str2 m <- matchModuleNameUnknown str3 return (TargetComponentUnknown pn (Left cn) (ModuleTarget m)) where render (TargetComponent p c (ModuleTarget m)) = [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) (dispM m)] render (TargetComponentUnknown pn (Left c) (ModuleTarget m)) = [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) (dispM m)] render _ = [] -- | Syntax: namespace : component : module -- -- > cabal build lib:foo:Data.Foo -- syntaxForm3KindComponentModule :: [KnownComponent] -> Syntax syntaxForm3KindComponentModule cs = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do ckind <- matchComponentKind str1 guardComponentName str2 guardModuleName str3 c <- matchComponentKindAndName cs ckind str2 orNoThingIn "component" (cinfoStrName c) $ do let ms = cinfoModules c m <- matchModuleName ms str3 return (TargetComponent (cinfoPackageId c) (cinfoName c) (ModuleTarget m)) where render (TargetComponent p c (ModuleTarget m)) = [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) (dispM m)] render _ = [] -- | Syntax: package : component : filename -- -- > cabal build foo:foo:Data/Foo.hs -- > cabal build foo/:foo:Data/Foo.hs -- > cabal build foo.cabal:foo:Data/Foo.hs -- syntaxForm3PackageComponentFile :: [KnownPackage] -> Syntax syntaxForm3PackageComponentFile ps = syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do guardPackage str1 fstatus1 guardComponentName str2 p <- matchPackage ps str1 fstatus1 case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentName pinfoComponents str2 orNoThingIn "component" (cinfoStrName c) $ do (filepath, _) <- matchComponentFile [c] str3 return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) KnownPackageName pn -> let cn = mkUnqualComponentName str2 filepath = str3 in return (TargetComponentUnknown pn (Left cn) (FileTarget filepath)) where render (TargetComponent p c (FileTarget f)) = [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) f] render (TargetComponentUnknown pn (Left c) (FileTarget f)) = [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) f] render _ = [] -- | Syntax: namespace : component : filename -- -- > cabal build lib:foo:Data/Foo.hs -- syntaxForm3KindComponentFile :: [KnownComponent] -> Syntax syntaxForm3KindComponentFile cs = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do ckind <- matchComponentKind str1 guardComponentName str2 c <- matchComponentKindAndName cs ckind str2 orNoThingIn "component" (cinfoStrName c) $ do (filepath, _) <- matchComponentFile [c] str3 return (TargetComponent (cinfoPackageId c) (cinfoName c) (FileTarget filepath)) where render (TargetComponent p c (FileTarget f)) = [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) f] render _ = [] syntaxForm3NamespacePackageFilter :: [KnownPackage] -> Syntax syntaxForm3NamespacePackageFilter ps = syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do guardNamespacePackage str1 guardPackageName str2 p <- matchPackage ps str2 noFileStatus kfilter <- matchComponentKindFilter str3 case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) KnownPackageName pn -> return (TargetPackageNamed pn (Just kfilter)) where render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = [TargetStringFileStatus3 "pkg" noFileStatus (dispP p) (dispF kfilter)] render (TargetPackageNamed pn (Just kfilter)) = [TargetStringFileStatus3 "pkg" noFileStatus (dispPN pn) (dispF kfilter)] render _ = [] -- syntaxForm4MetaNamespacePackageFilter :: [KnownPackage] -> Syntax syntaxForm4MetaNamespacePackageFilter ps = syntaxForm4 render $ \str1 str2 str3 str4 -> do guardNamespaceMeta str1 guardNamespacePackage str2 guardPackageName str3 p <- matchPackage ps str3 noFileStatus kfilter <- matchComponentKindFilter str4 case p of KnownPackage{pinfoId} -> return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) KnownPackageName pn -> return (TargetPackageNamed pn (Just kfilter)) where render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = [TargetStringFileStatus4 "" "pkg" (dispP p) (dispF kfilter)] render (TargetPackageNamed pn (Just kfilter)) = [TargetStringFileStatus4 "" "pkg" (dispPN pn) (dispF kfilter)] render _ = [] -- | Syntax: :pkg : package : namespace : component -- -- > cabal build :pkg:foo:lib:foo -- syntaxForm5MetaNamespacePackageKindComponent :: [KnownPackage] -> Syntax syntaxForm5MetaNamespacePackageKindComponent ps = syntaxForm5 render $ \str1 str2 str3 str4 str5 -> do guardNamespaceMeta str1 guardNamespacePackage str2 guardPackageName str3 ckind <- matchComponentKind str4 guardComponentName str5 p <- matchPackage ps str3 noFileStatus case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentKindAndName pinfoComponents ckind str5 return (TargetComponent pinfoId (cinfoName c) WholeComponent) KnownPackageName pn -> let cn = mkComponentName pn ckind (mkUnqualComponentName str5) in return (TargetComponentUnknown pn (Right cn) WholeComponent) where render (TargetComponent p c WholeComponent) = [TargetStringFileStatus5 "" "pkg" (dispP p) (dispCK c) (dispC p c)] render (TargetComponentUnknown pn (Right c) WholeComponent) = [TargetStringFileStatus5 "" "pkg" (dispPN pn) (dispCK c) (dispC' pn c)] render _ = [] -- | Syntax: :pkg : package : namespace : component : module : module -- -- > cabal build :pkg:foo:lib:foo:module:Data.Foo -- syntaxForm7MetaNamespacePackageKindComponentNamespaceModule :: [KnownPackage] -> Syntax syntaxForm7MetaNamespacePackageKindComponentNamespaceModule ps = syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do guardNamespaceMeta str1 guardNamespacePackage str2 guardPackageName str3 ckind <- matchComponentKind str4 guardComponentName str5 guardNamespaceModule str6 p <- matchPackage ps str3 noFileStatus case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentKindAndName pinfoComponents ckind str5 orNoThingIn "component" (cinfoStrName c) $ do let ms = cinfoModules c m <- matchModuleName ms str7 return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) KnownPackageName pn -> do let cn = mkComponentName pn ckind (mkUnqualComponentName str2) m <- matchModuleNameUnknown str7 return (TargetComponentUnknown pn (Right cn) (ModuleTarget m)) where render (TargetComponent p c (ModuleTarget m)) = [TargetStringFileStatus7 "" "pkg" (dispP p) (dispCK c) (dispC p c) "module" (dispM m)] render (TargetComponentUnknown pn (Right c) (ModuleTarget m)) = [TargetStringFileStatus7 "" "pkg" (dispPN pn) (dispCK c) (dispC' pn c) "module" (dispM m)] render _ = [] -- | Syntax: :pkg : package : namespace : component : file : filename -- -- > cabal build :pkg:foo:lib:foo:file:Data/Foo.hs -- syntaxForm7MetaNamespacePackageKindComponentNamespaceFile :: [KnownPackage] -> Syntax syntaxForm7MetaNamespacePackageKindComponentNamespaceFile ps = syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do guardNamespaceMeta str1 guardNamespacePackage str2 guardPackageName str3 ckind <- matchComponentKind str4 guardComponentName str5 guardNamespaceFile str6 p <- matchPackage ps str3 noFileStatus case p of KnownPackage{pinfoId, pinfoComponents} -> orNoThingIn "package" (prettyShow (packageName pinfoId)) $ do c <- matchComponentKindAndName pinfoComponents ckind str5 orNoThingIn "component" (cinfoStrName c) $ do (filepath,_) <- matchComponentFile [c] str7 return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) KnownPackageName pn -> let cn = mkComponentName pn ckind (mkUnqualComponentName str5) filepath = str7 in return (TargetComponentUnknown pn (Right cn) (FileTarget filepath)) where render (TargetComponent p c (FileTarget f)) = [TargetStringFileStatus7 "" "pkg" (dispP p) (dispCK c) (dispC p c) "file" f] render (TargetComponentUnknown pn (Right c) (FileTarget f)) = [TargetStringFileStatus7 "" "pkg" (dispPN pn) (dispCK c) (dispC' pn c) "file" f] render _ = [] --------------------------------------- -- Syntax utils -- type Match1 = String -> FileStatus -> Match TargetSelector type Match2 = String -> FileStatus -> String -> Match TargetSelector type Match3 = String -> FileStatus -> String -> String -> Match TargetSelector type Match4 = String -> String -> String -> String -> Match TargetSelector type Match5 = String -> String -> String -> String -> String -> Match TargetSelector type Match7 = String -> String -> String -> String -> String -> String -> String -> Match TargetSelector syntaxForm1 :: Renderer -> Match1 -> Syntax syntaxForm2 :: Renderer -> Match2 -> Syntax syntaxForm3 :: Renderer -> Match3 -> Syntax syntaxForm4 :: Renderer -> Match4 -> Syntax syntaxForm5 :: Renderer -> Match5 -> Syntax syntaxForm7 :: Renderer -> Match7 -> Syntax syntaxForm1 render f = Syntax QL1 match render where match = \(TargetStringFileStatus1 str1 fstatus1) -> f str1 fstatus1 syntaxForm2 render f = Syntax QL2 match render where match = \(TargetStringFileStatus2 str1 fstatus1 str2) -> f str1 fstatus1 str2 syntaxForm3 render f = Syntax QL3 match render where match = \(TargetStringFileStatus3 str1 fstatus1 str2 str3) -> f str1 fstatus1 str2 str3 syntaxForm4 render f = Syntax QLFull match render where match (TargetStringFileStatus4 str1 str2 str3 str4) = f str1 str2 str3 str4 match _ = mzero syntaxForm5 render f = Syntax QLFull match render where match (TargetStringFileStatus5 str1 str2 str3 str4 str5) = f str1 str2 str3 str4 str5 match _ = mzero syntaxForm7 render f = Syntax QLFull match render where match (TargetStringFileStatus7 str1 str2 str3 str4 str5 str6 str7) = f str1 str2 str3 str4 str5 str6 str7 match _ = mzero dispP :: Package p => p -> String dispP = prettyShow . packageName dispPN :: PackageName -> String dispPN = prettyShow dispC :: PackageId -> ComponentName -> String dispC = componentStringName . packageName dispC' :: PackageName -> ComponentName -> String dispC' = componentStringName dispCN :: UnqualComponentName -> String dispCN = prettyShow dispK :: ComponentKind -> String dispK = showComponentKindShort dispCK :: ComponentName -> String dispCK = dispK . componentKind dispF :: ComponentKind -> String dispF = showComponentKindFilterShort dispM :: ModuleName -> String dispM = prettyShow ------------------------------- -- Package and component info -- data KnownTargets = KnownTargets { knownPackagesAll :: [KnownPackage], knownPackagesPrimary :: [KnownPackage], knownPackagesOther :: [KnownPackage], knownComponentsAll :: [KnownComponent], knownComponentsPrimary :: [KnownComponent], knownComponentsOther :: [KnownComponent] } deriving Show data KnownPackage = KnownPackage { pinfoId :: PackageId, pinfoDirectory :: Maybe (FilePath, FilePath), pinfoPackageFile :: Maybe (FilePath, FilePath), pinfoComponents :: [KnownComponent] } | KnownPackageName { pinfoName :: PackageName } deriving Show data KnownComponent = KnownComponent { cinfoName :: ComponentName, cinfoStrName :: ComponentStringName, cinfoPackageId :: PackageId, cinfoSrcDirs :: [FilePath], cinfoModules :: [ModuleName], cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) cinfoCFiles :: [FilePath], cinfoJsFiles :: [FilePath] } deriving Show type ComponentStringName = String knownPackageName :: KnownPackage -> PackageName knownPackageName KnownPackage{pinfoId} = packageName pinfoId knownPackageName KnownPackageName{pinfoName} = pinfoName emptyKnownTargets :: KnownTargets emptyKnownTargets = KnownTargets [] [] [] [] [] [] getKnownTargets :: forall m a. (Applicative m, Monad m) => DirActions m -> [PackageSpecifier (SourcePackage (PackageLocation a))] -> m KnownTargets getKnownTargets dirActions@DirActions{..} pkgs = do pinfo <- traverse (collectKnownPackageInfo dirActions) pkgs cwd <- getCurrentDirectory (ppinfo, opinfo) <- selectPrimaryPackage cwd pinfo return KnownTargets { knownPackagesAll = pinfo, knownPackagesPrimary = ppinfo, knownPackagesOther = opinfo, knownComponentsAll = allComponentsIn pinfo, knownComponentsPrimary = allComponentsIn ppinfo, knownComponentsOther = allComponentsIn opinfo } where mPkgDir :: KnownPackage -> Maybe FilePath mPkgDir KnownPackage { pinfoDirectory = Just (dir,_) } = Just dir mPkgDir _ = Nothing selectPrimaryPackage :: FilePath -> [KnownPackage] -> m ([KnownPackage], [KnownPackage]) selectPrimaryPackage _ [] = return ([] , []) selectPrimaryPackage cwd (pkg : packages) = do (ppinfo, opinfo) <- selectPrimaryPackage cwd packages isPkgDirCwd <- maybe (pure False) (compareFilePath dirActions cwd) (mPkgDir pkg) return (if isPkgDirCwd then (pkg : ppinfo, opinfo) else (ppinfo, pkg : opinfo)) allComponentsIn ps = [ c | KnownPackage{pinfoComponents} <- ps, c <- pinfoComponents ] collectKnownPackageInfo :: (Applicative m, Monad m) => DirActions m -> PackageSpecifier (SourcePackage (PackageLocation a)) -> m KnownPackage collectKnownPackageInfo _ (NamedPackage pkgname _props) = return (KnownPackageName pkgname) collectKnownPackageInfo dirActions@DirActions{..} (SpecificSourcePackage SourcePackage { srcpkgDescription = pkg, srcpkgSource = loc }) = do (pkgdir, pkgfile) <- case loc of --TODO: local tarballs, remote tarballs etc LocalUnpackedPackage dir -> do dirabs <- canonicalizePath dir dirrel <- makeRelativeToCwd dirActions dirabs --TODO: ought to get this earlier in project reading let fileabs = dirabs prettyShow (packageName pkg) <.> "cabal" filerel = dirrel prettyShow (packageName pkg) <.> "cabal" exists <- doesFileExist fileabs return ( Just (dirabs, dirrel) , if exists then Just (fileabs, filerel) else Nothing ) _ -> return (Nothing, Nothing) let pinfo = KnownPackage { pinfoId = packageId pkg, pinfoDirectory = pkgdir, pinfoPackageFile = pkgfile, pinfoComponents = collectKnownComponentInfo (flattenPackageDescription pkg) } return pinfo collectKnownComponentInfo :: PackageDescription -> [KnownComponent] collectKnownComponentInfo pkg = [ KnownComponent { cinfoName = componentName c, cinfoStrName = componentStringName (packageName pkg) (componentName c), cinfoPackageId = packageId pkg, cinfoSrcDirs = ordNub (map getSymbolicPath (hsSourceDirs bi)), cinfoModules = ordNub (componentModules c), cinfoHsFiles = ordNub (componentHsFiles c), cinfoCFiles = ordNub (cSources bi), cinfoJsFiles = ordNub (jsSources bi) } | c <- pkgComponents pkg , let bi = componentBuildInfo c ] componentStringName :: PackageName -> ComponentName -> ComponentStringName componentStringName pkgname (CLibName LMainLibName) = prettyShow pkgname componentStringName _ (CLibName (LSubLibName name)) = unUnqualComponentName name componentStringName _ (CFLibName name) = unUnqualComponentName name componentStringName _ (CExeName name) = unUnqualComponentName name componentStringName _ (CTestName name) = unUnqualComponentName name componentStringName _ (CBenchName name) = unUnqualComponentName name componentModules :: Component -> [ModuleName] -- I think it's unlikely users will ask to build a requirement -- which is not mentioned locally. componentModules (CLib lib) = explicitLibModules lib componentModules (CFLib flib) = foreignLibModules flib componentModules (CExe exe) = exeModules exe componentModules (CTest test) = testModules test componentModules (CBench bench) = benchmarkModules bench componentHsFiles :: Component -> [FilePath] componentHsFiles (CExe exe) = [modulePath exe] componentHsFiles (CTest TestSuite { testInterface = TestSuiteExeV10 _ mainfile }) = [mainfile] componentHsFiles (CBench Benchmark { benchmarkInterface = BenchmarkExeV10 _ mainfile }) = [mainfile] componentHsFiles _ = [] ------------------------------ -- Matching meta targets -- guardNamespaceMeta :: String -> Match () guardNamespaceMeta = guardToken [""] "meta namespace" guardMetaAll :: String -> Match () guardMetaAll = guardToken ["all"] "meta-target 'all'" guardNamespacePackage :: String -> Match () guardNamespacePackage = guardToken ["pkg", "package"] "'pkg' namespace" guardNamespaceCwd :: String -> Match () guardNamespaceCwd = guardToken ["cwd"] "'cwd' namespace" guardNamespaceModule :: String -> Match () guardNamespaceModule = guardToken ["mod", "module"] "'module' namespace" guardNamespaceFile :: String -> Match () guardNamespaceFile = guardToken ["file"] "'file' namespace" guardToken :: [String] -> String -> String -> Match () guardToken tokens msg s | caseFold s `elem` tokens = increaseConfidence | otherwise = matchErrorExpected msg s ------------------------------ -- Matching component kinds -- componentKind :: ComponentName -> ComponentKind componentKind (CLibName _) = LibKind componentKind (CFLibName _) = FLibKind componentKind (CExeName _) = ExeKind componentKind (CTestName _) = TestKind componentKind (CBenchName _) = BenchKind cinfoKind :: KnownComponent -> ComponentKind cinfoKind = componentKind . cinfoName matchComponentKind :: String -> Match ComponentKind matchComponentKind s | s' `elem` liblabels = increaseConfidence >> return LibKind | s' `elem` fliblabels = increaseConfidence >> return FLibKind | s' `elem` exelabels = increaseConfidence >> return ExeKind | s' `elem` testlabels = increaseConfidence >> return TestKind | s' `elem` benchlabels = increaseConfidence >> return BenchKind | otherwise = matchErrorExpected "component kind" s where s' = caseFold s liblabels = ["lib", "library"] fliblabels = ["flib", "foreign-library"] exelabels = ["exe", "executable"] testlabels = ["tst", "test", "test-suite"] benchlabels = ["bench", "benchmark"] matchComponentKindFilter :: String -> Match ComponentKind matchComponentKindFilter s | s' `elem` liblabels = increaseConfidence >> return LibKind | s' `elem` fliblabels = increaseConfidence >> return FLibKind | s' `elem` exelabels = increaseConfidence >> return ExeKind | s' `elem` testlabels = increaseConfidence >> return TestKind | s' `elem` benchlabels = increaseConfidence >> return BenchKind | otherwise = matchErrorExpected "component kind filter" s where s' = caseFold s liblabels = ["libs", "libraries"] fliblabels = ["flibs", "foreign-libraries"] exelabels = ["exes", "executables"] testlabels = ["tests", "test-suites"] benchlabels = ["benches", "benchmarks"] showComponentKind :: ComponentKind -> String showComponentKind LibKind = "library" showComponentKind FLibKind = "foreign library" showComponentKind ExeKind = "executable" showComponentKind TestKind = "test-suite" showComponentKind BenchKind = "benchmark" showComponentKindShort :: ComponentKind -> String showComponentKindShort LibKind = "lib" showComponentKindShort FLibKind = "flib" showComponentKindShort ExeKind = "exe" showComponentKindShort TestKind = "test" showComponentKindShort BenchKind = "bench" showComponentKindFilterShort :: ComponentKind -> String showComponentKindFilterShort LibKind = "libs" showComponentKindFilterShort FLibKind = "flibs" showComponentKindFilterShort ExeKind = "exes" showComponentKindFilterShort TestKind = "tests" showComponentKindFilterShort BenchKind = "benchmarks" ------------------------------ -- Matching package targets -- guardPackage :: String -> FileStatus -> Match () guardPackage str fstatus = guardPackageName str <|> guardPackageDir str fstatus <|> guardPackageFile str fstatus guardPackageName :: String -> Match () guardPackageName s | validPackageName s = increaseConfidence | otherwise = matchErrorExpected "package name" s validPackageName :: String -> Bool validPackageName s = all validPackageNameChar s && not (null s) where validPackageNameChar c = isAlphaNum c || c == '-' guardPackageDir :: String -> FileStatus -> Match () guardPackageDir _ (FileStatusExistsDir _) = increaseConfidence guardPackageDir str _ = matchErrorExpected "package directory" str guardPackageFile :: String -> FileStatus -> Match () guardPackageFile _ (FileStatusExistsFile file) | takeExtension file == ".cabal" = increaseConfidence guardPackageFile str _ = matchErrorExpected "package .cabal file" str matchPackage :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage matchPackage pinfo = \str fstatus -> orNoThingIn "project" "" $ matchPackageName pinfo str (matchPackageNameUnknown str <|> matchPackageDir pinfo str fstatus <|> matchPackageFile pinfo str fstatus) matchPackageName :: [KnownPackage] -> String -> Match KnownPackage matchPackageName ps = \str -> do guard (validPackageName str) orNoSuchThing "package" str (map (prettyShow . knownPackageName) ps) $ increaseConfidenceFor $ matchInexactly caseFold (prettyShow . knownPackageName) ps str matchPackageNameUnknown :: String -> Match KnownPackage matchPackageNameUnknown str = do pn <- matchParse str unknownMatch (KnownPackageName pn) matchPackageDir :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage matchPackageDir ps = \str fstatus -> case fstatus of FileStatusExistsDir canondir -> orNoSuchThing "package directory" str (map (snd . fst) dirs) $ increaseConfidenceFor $ fmap snd $ matchExactly (fst . fst) dirs canondir _ -> mzero where dirs = [ ((dabs,drel),p) | p@KnownPackage{ pinfoDirectory = Just (dabs,drel) } <- ps ] matchPackageFile :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage matchPackageFile ps = \str fstatus -> do case fstatus of FileStatusExistsFile canonfile -> orNoSuchThing "package .cabal file" str (map (snd . fst) files) $ increaseConfidenceFor $ fmap snd $ matchExactly (fst . fst) files canonfile _ -> mzero where files = [ ((fabs,frel),p) | p@KnownPackage{ pinfoPackageFile = Just (fabs,frel) } <- ps ] --TODO: test outcome when dir exists but doesn't match any known one --TODO: perhaps need another distinction, vs no such thing, point is the -- thing is not known, within the project, but could be outside project ------------------------------ -- Matching component targets -- guardComponentName :: String -> Match () guardComponentName s | all validComponentChar s && not (null s) = increaseConfidence | otherwise = matchErrorExpected "component name" s where validComponentChar c = isAlphaNum c || c == '.' || c == '_' || c == '-' || c == '\'' matchComponentName :: [KnownComponent] -> String -> Match KnownComponent matchComponentName cs str = orNoSuchThing "component" str (map cinfoStrName cs) $ increaseConfidenceFor $ matchInexactly caseFold cinfoStrName cs str matchComponentKindAndName :: [KnownComponent] -> ComponentKind -> String -> Match KnownComponent matchComponentKindAndName cs ckind str = orNoSuchThing (showComponentKind ckind ++ " component") str (map render cs) $ increaseConfidenceFor $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) (\c -> (cinfoKind c, cinfoStrName c)) cs (ckind, str) where render c = showComponentKindShort (cinfoKind c) ++ ":" ++ cinfoStrName c ------------------------------ -- Matching module targets -- guardModuleName :: String -> Match () guardModuleName s = case simpleParsec s :: Maybe ModuleName of Just _ -> increaseConfidence _ | all validModuleChar s && not (null s) -> return () | otherwise -> matchErrorExpected "module name" s where validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' matchModuleName :: [ModuleName] -> String -> Match ModuleName matchModuleName ms str = orNoSuchThing "module" str (map prettyShow ms) $ increaseConfidenceFor $ matchInexactly caseFold prettyShow ms str matchModuleNameAnd :: [(ModuleName, a)] -> String -> Match (ModuleName, a) matchModuleNameAnd ms str = orNoSuchThing "module" str (map (prettyShow . fst) ms) $ increaseConfidenceFor $ matchInexactly caseFold (prettyShow . fst) ms str matchModuleNameUnknown :: String -> Match ModuleName matchModuleNameUnknown str = expecting "module" str $ increaseConfidenceFor $ matchParse str ------------------------------ -- Matching file targets -- matchPackageDirectoryPrefix :: [KnownPackage] -> FileStatus -> Match (FilePath, KnownPackage) matchPackageDirectoryPrefix ps (FileStatusExistsFile filepath) = increaseConfidenceFor $ matchDirectoryPrefix pkgdirs filepath where pkgdirs = [ (dir, p) | p@KnownPackage { pinfoDirectory = Just (dir,_) } <- ps ] matchPackageDirectoryPrefix _ _ = mzero matchComponentFile :: [KnownComponent] -> String -> Match (FilePath, KnownComponent) matchComponentFile cs str = orNoSuchThing "file" str [] $ matchComponentModuleFile cs str <|> matchComponentOtherFile cs str matchComponentOtherFile :: [KnownComponent] -> String -> Match (FilePath, KnownComponent) matchComponentOtherFile cs = matchFile [ (normalise (srcdir file), c) | c <- cs , srcdir <- cinfoSrcDirs c , file <- cinfoHsFiles c ++ cinfoCFiles c ++ cinfoJsFiles c ] . normalise matchComponentModuleFile :: [KnownComponent] -> String -> Match (FilePath, KnownComponent) matchComponentModuleFile cs str = do matchFile [ (normalise (d toFilePath m), c) | c <- cs , d <- cinfoSrcDirs c , m <- cinfoModules c ] (dropExtension (normalise str)) -- Drop the extension because FileTarget -- is stored without the extension -- utils -- | Compare two filepaths for equality using DirActions' canonicalizePath -- to normalize AND canonicalize filepaths before comparison. compareFilePath :: (Applicative m, Monad m) => DirActions m -> FilePath -> FilePath -> m Bool compareFilePath DirActions{..} fp1 fp2 | equalFilePath fp1 fp2 = pure True -- avoid unnecessary IO if we can match earlier | otherwise = do c1 <- canonicalizePath fp1 c2 <- canonicalizePath fp2 pure $ equalFilePath c1 c2 matchFile :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) matchFile fs = increaseConfidenceFor . matchInexactly caseFold fst fs matchDirectoryPrefix :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) matchDirectoryPrefix dirs filepath = tryEach $ [ (file, x) | (dir,x) <- dirs , file <- maybeToList (stripDirectory dir) ] where stripDirectory :: FilePath -> Maybe FilePath stripDirectory dir = joinPath `fmap` stripPrefix (splitDirectories dir) filepathsplit filepathsplit = splitDirectories filepath ------------------------------ -- Matching monad -- -- | A matcher embodies a way to match some input as being some recognised -- value. In particular it deals with multiple and ambiguous matches. -- -- There are various matcher primitives ('matchExactly', 'matchInexactly'), -- ways to combine matchers ('matchPlus', 'matchPlusShadowing') and finally we -- can run a matcher against an input using 'findMatch'. -- data Match a = NoMatch !Confidence [MatchError] | Match !MatchClass !Confidence [a] deriving Show -- | The kind of match, inexact or exact. We keep track of this so we can -- prefer exact over inexact matches. The 'Ord' here is important: we try -- to maximise this, so 'Exact' is the top value and 'Inexact' the bottom. -- data MatchClass = Unknown -- ^ Matches an unknown thing e.g. parses as a package -- name without it being a specific known package | Inexact -- ^ Matches a known thing inexactly -- e.g. matches a known package case insensitively | Exact -- ^ Exactly matches a known thing, -- e.g. matches a known package case sensitively deriving (Show, Eq, Ord) type Confidence = Int data MatchError = MatchErrorExpected String String -- thing got | MatchErrorNoSuch String String [String] -- thing got alts | MatchErrorIn String String MatchError -- kind thing deriving (Show, Eq) instance Functor Match where fmap _ (NoMatch d ms) = NoMatch d ms fmap f (Match m d xs) = Match m d (fmap f xs) instance Applicative Match where pure a = Match Exact 0 [a] (<*>) = ap instance Alternative Match where empty = NoMatch 0 [] (<|>) = matchPlus instance Monad Match where return = pure NoMatch d ms >>= _ = NoMatch d ms Match m d xs >>= f = -- To understand this, it needs to be read in context with the -- implementation of 'matchPlus' below case msum (map f xs) of Match m' d' xs' -> Match (min m m') (d + d') xs' -- The minimum match class is the one we keep. The match depth is -- tracked but not used in the Match case. NoMatch d' ms -> NoMatch (d + d') ms -- Here is where we transfer the depth we were keeping track of in -- the Match case over to the NoMatch case where it finally gets used. instance MonadPlus Match where mzero = empty mplus = matchPlus () :: Match a -> Match a -> Match a () = matchPlusShadowing infixl 3 -- | Combine two matchers. Exact matches are used over inexact matches -- but if we have multiple exact, or inexact then the we collect all the -- ambiguous matches. -- -- This operator is associative, has unit 'mzero' and is also commutative. -- matchPlus :: Match a -> Match a -> Match a matchPlus a@(Match _ _ _ ) (NoMatch _ _) = a matchPlus (NoMatch _ _ ) b@(Match _ _ _) = b matchPlus a@(NoMatch d_a ms_a) b@(NoMatch d_b ms_b) | d_a > d_b = a -- We only really make use of the depth in the NoMatch case. | d_a < d_b = b | otherwise = NoMatch d_a (ms_a ++ ms_b) matchPlus a@(Match m_a d_a xs_a) b@(Match m_b d_b xs_b) | m_a > m_b = a -- exact over inexact | m_a < m_b = b -- exact over inexact | otherwise = Match m_a (max d_a d_b) (xs_a ++ xs_b) -- | Combine two matchers. This is similar to 'matchPlus' with the -- difference that an exact match from the left matcher shadows any exact -- match on the right. Inexact matches are still collected however. -- -- This operator is associative, has unit 'mzero' and is not commutative. -- matchPlusShadowing :: Match a -> Match a -> Match a matchPlusShadowing a@(Match Exact _ _) _ = a matchPlusShadowing a b = matchPlus a b ------------------------------ -- Various match primitives -- matchErrorExpected :: String -> String -> Match a matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] matchErrorNoSuch :: String -> String -> [String] -> Match a matchErrorNoSuch thing got alts = NoMatch 0 [MatchErrorNoSuch thing got alts] expecting :: String -> String -> Match a -> Match a expecting thing got (NoMatch 0 _) = matchErrorExpected thing got expecting _ _ m = m orNoSuchThing :: String -> String -> [String] -> Match a -> Match a orNoSuchThing thing got alts (NoMatch 0 _) = matchErrorNoSuch thing got alts orNoSuchThing _ _ _ m = m orNoThingIn :: String -> String -> Match a -> Match a orNoThingIn kind name (NoMatch n ms) = NoMatch n [ MatchErrorIn kind name m | m <- ms ] orNoThingIn _ _ m = m increaseConfidence :: Match () increaseConfidence = Match Exact 1 [()] increaseConfidenceFor :: Match a -> Match a increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r nubMatchesBy :: (a -> a -> Bool) -> Match a -> Match a nubMatchesBy _ (NoMatch d msgs) = NoMatch d msgs nubMatchesBy eq (Match m d xs) = Match m d (nubBy eq xs) -- | Lift a list of matches to an exact match. -- exactMatches, inexactMatches :: [a] -> Match a exactMatches [] = mzero exactMatches xs = Match Exact 0 xs inexactMatches [] = mzero inexactMatches xs = Match Inexact 0 xs unknownMatch :: a -> Match a unknownMatch x = Match Unknown 0 [x] tryEach :: [a] -> Match a tryEach = exactMatches ------------------------------ -- Top level match runner -- -- | Given a matcher and a key to look up, use the matcher to find all the -- possible matches. There may be 'None', a single 'Unambiguous' match or -- you may have an 'Ambiguous' match with several possibilities. -- findMatch :: Match a -> MaybeAmbiguous a findMatch match = case match of NoMatch _ msgs -> None msgs Match _ _ [x] -> Unambiguous x Match m d [] -> error $ "findMatch: impossible: " ++ show match' where match' = Match m d [] :: Match () -- TODO: Maybe use Data.List.NonEmpty inside -- Match so that this case would be correct -- by construction? Match m _ xs -> Ambiguous m xs data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous MatchClass [a] deriving Show ------------------------------ -- Basic matchers -- -- | A primitive matcher that looks up a value in a finite 'Map'. The -- value must match exactly. -- matchExactly :: Ord k => (a -> k) -> [a] -> (k -> Match a) matchExactly key xs = \k -> case Map.lookup k m of Nothing -> mzero Just ys -> exactMatches ys where m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] -- | A primitive matcher that looks up a value in a finite 'Map'. It checks -- for an exact or inexact match. We get an inexact match if the match -- is not exact, but the canonical forms match. It takes a canonicalisation -- function for this purpose. -- -- So for example if we used string case fold as the canonicalisation -- function, then we would get case insensitive matching (but it will still -- report an exact match when the case matches too). -- matchInexactly :: (Ord k, Ord k') => (k -> k') -> (a -> k) -> [a] -> (k -> Match a) matchInexactly cannonicalise key xs = \k -> case Map.lookup k m of Just ys -> exactMatches ys Nothing -> case Map.lookup (cannonicalise k) m' of Just ys -> inexactMatches ys Nothing -> mzero where m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] -- the map of canonicalised keys to groups of inexact matches m' = Map.mapKeysWith (++) cannonicalise m matchParse :: Parsec a => String -> Match a matchParse = maybe mzero return . simpleParsec ------------------------------ -- Utils -- caseFold :: String -> String caseFold = lowercase -- | Make a 'ComponentName' given an 'UnqualComponentName' and knowing the -- 'ComponentKind'. We also need the 'PackageName' to distinguish the package's -- primary library from named private libraries. -- mkComponentName :: PackageName -> ComponentKind -> UnqualComponentName -> ComponentName mkComponentName pkgname ckind ucname = case ckind of LibKind | packageNameToUnqualComponentName pkgname == ucname -> CLibName LMainLibName | otherwise -> CLibName $ LSubLibName ucname FLibKind -> CFLibName ucname ExeKind -> CExeName ucname TestKind -> CTestName ucname BenchKind -> CBenchName ucname ------------------------------ -- Example inputs -- {- ex1pinfo :: [KnownPackage] ex1pinfo = [ addComponent (CExeName (mkUnqualComponentName "foo-exe")) [] ["Data.Foo"] $ KnownPackage { pinfoId = PackageIdentifier (mkPackageName "foo") (mkVersion [1]), pinfoDirectory = Just ("/the/foo", "foo"), pinfoPackageFile = Just ("/the/foo/foo.cabal", "foo/foo.cabal"), pinfoComponents = [] } , KnownPackage { pinfoId = PackageIdentifier (mkPackageName "bar") (mkVersion [1]), pinfoDirectory = Just ("/the/bar", "bar"), pinfoPackageFile = Just ("/the/bar/bar.cabal", "bar/bar.cabal"), pinfoComponents = [] } ] where addComponent n ds ms p = p { pinfoComponents = KnownComponent n (componentStringName (pinfoId p) n) p ds (map mkMn ms) [] [] [] : pinfoComponents p } mkMn :: String -> ModuleName mkMn = ModuleName.fromString -} {- stargets = [ TargetComponent (CExeName "foo") WholeComponent , TargetComponent (CExeName "foo") (ModuleTarget (mkMn "Foo")) , TargetComponent (CExeName "tst") (ModuleTarget (mkMn "Foo")) ] where mkMn :: String -> ModuleName mkMn = fromJust . simpleParse ex_pkgid :: PackageIdentifier Just ex_pkgid = simpleParse "thelib" -} {- ex_cs :: [KnownComponent] ex_cs = [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) ] where mkC n ds ms = KnownComponent n (componentStringName n) ds (map mkMn ms) mkMn :: String -> ModuleName mkMn = fromJust . simpleParse pkgid :: PackageIdentifier Just pkgid = simpleParse "thelib" -} cabal-install-3.8.1.0/src/Distribution/Client/Targets.hs0000644000000000000000000006576607346545000021235 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Targets -- Copyright : (c) Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- -- Handling for user-specified targets ----------------------------------------------------------------------------- module Distribution.Client.Targets ( -- * User targets UserTarget(..), readUserTargets, -- * Resolving user targets to package specifiers resolveUserTargets, -- ** Detailed interface UserTargetProblem(..), readUserTarget, reportUserTargetProblems, expandUserTarget, PackageTarget(..), fetchPackageTarget, readPackageTarget, PackageTargetProblem(..), reportPackageTargetProblems, disambiguatePackageTargets, disambiguatePackageName, -- * User constraints UserQualifier(..), UserConstraintScope(..), UserConstraint(..), userConstraintPackageName, readUserConstraint, userToPackageConstraint, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Package ( Package(..), PackageName, unPackageName, mkPackageName , packageName ) import Distribution.Client.Types ( PackageLocation(..), ResolvedPkgLoc, UnresolvedSourcePackage , PackageSpecifier(..) ) import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageConstraint import Distribution.Solver.Types.PackagePath import Distribution.Solver.Types.PackageIndex (PackageIndex) import qualified Distribution.Solver.Types.PackageIndex as PackageIndex import Distribution.Solver.Types.SourcePackage import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Distribution.Client.Tar as Tar import Distribution.Client.FetchUtils import Distribution.Client.Utils ( tryFindPackageDesc ) import Distribution.Client.GlobalFlags ( RepoContext(..) ) import Distribution.Types.PackageVersionConstraint ( PackageVersionConstraint (..) ) import Distribution.PackageDescription ( GenericPackageDescription ) import Distribution.Types.Flag ( parsecFlagAssignmentNonEmpty ) import Distribution.Version ( isAnyVersion ) import Distribution.Simple.Utils ( die', lowercase ) import Distribution.PackageDescription.Parsec ( parseGenericPackageDescriptionMaybe ) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) import qualified Data.Map as Map import qualified Data.ByteString.Lazy as BS import qualified Distribution.Client.GZipUtils as GZipUtils import qualified Distribution.Compat.CharParsing as P import System.FilePath ( takeExtension, dropExtension, takeDirectory, splitPath ) import System.Directory ( doesFileExist, doesDirectoryExist ) import Network.URI ( URI(..), URIAuth(..), parseAbsoluteURI ) -- ------------------------------------------------------------ -- * User targets -- ------------------------------------------------------------ -- | Various ways that a user may specify a package or package collection. -- data UserTarget = -- | A partially specified package, identified by name and possibly with -- an exact version or a version constraint. -- -- > cabal install foo -- > cabal install foo-1.0 -- > cabal install 'foo < 2' -- UserTargetNamed PackageVersionConstraint -- | A specific package that is unpacked in a local directory, often the -- current directory. -- -- > cabal install . -- > cabal install ../lib/other -- -- * Note: in future, if multiple @.cabal@ files are allowed in a single -- directory then this will refer to the collection of packages. -- | UserTargetLocalDir FilePath -- | A specific local unpacked package, identified by its @.cabal@ file. -- -- > cabal install foo.cabal -- > cabal install ../lib/other/bar.cabal -- | UserTargetLocalCabalFile FilePath -- | A specific package that is available as a local tarball file -- -- > cabal install dist/foo-1.0.tar.gz -- > cabal install ../build/baz-1.0.tar.gz -- | UserTargetLocalTarball FilePath -- | A specific package that is available as a remote tarball file -- -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz -- | UserTargetRemoteTarball URI deriving (Show,Eq) -- ------------------------------------------------------------ -- * Parsing and checking user targets -- ------------------------------------------------------------ readUserTargets :: Verbosity -> [String] -> IO [UserTarget] readUserTargets verbosity targetStrs = do (problems, targets) <- liftM partitionEithers (traverse readUserTarget targetStrs) reportUserTargetProblems verbosity problems return targets data UserTargetProblem = UserTargetUnexpectedFile String | UserTargetNonexistantFile String | UserTargetUnexpectedUriScheme String | UserTargetUnrecognisedUri String | UserTargetUnrecognised String deriving Show readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) readUserTarget targetstr = case eitherParsec targetstr of Right dep -> return (Right (UserTargetNamed dep)) Left _err -> do fileTarget <- testFileTargets targetstr case fileTarget of Just target -> return target Nothing -> case testUriTargets targetstr of Just target -> return target Nothing -> return (Left (UserTargetUnrecognised targetstr)) where testFileTargets :: FilePath -> IO (Maybe (Either UserTargetProblem UserTarget)) testFileTargets filename = do isDir <- doesDirectoryExist filename isFile <- doesFileExist filename parentDirExists <- case takeDirectory filename of [] -> return False dir -> doesDirectoryExist dir let result :: Maybe (Either UserTargetProblem UserTarget) result | isDir = Just (Right (UserTargetLocalDir filename)) | isFile && extensionIsTarGz filename = Just (Right (UserTargetLocalTarball filename)) | isFile && takeExtension filename == ".cabal" = Just (Right (UserTargetLocalCabalFile filename)) | isFile = Just (Left (UserTargetUnexpectedFile filename)) | parentDirExists = Just (Left (UserTargetNonexistantFile filename)) | otherwise = Nothing return result testUriTargets :: String -> Maybe (Either UserTargetProblem UserTarget) testUriTargets str = case parseAbsoluteURI str of Just uri@URI { uriScheme = scheme, uriAuthority = Just URIAuth { uriRegName = host } } | scheme /= "http:" && scheme /= "https:" -> Just (Left (UserTargetUnexpectedUriScheme targetstr)) | null host -> Just (Left (UserTargetUnrecognisedUri targetstr)) | otherwise -> Just (Right (UserTargetRemoteTarball uri)) _ -> Nothing extensionIsTarGz :: FilePath -> Bool extensionIsTarGz f = takeExtension f == ".gz" && takeExtension (dropExtension f) == ".tar" reportUserTargetProblems :: Verbosity -> [UserTargetProblem] -> IO () reportUserTargetProblems verbosity problems = do case [ target | UserTargetUnrecognised target <- problems ] of [] -> return () target -> die' verbosity $ unlines [ "Unrecognised target '" ++ name ++ "'." | name <- target ] ++ "Targets can be:\n" ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" case [ target | UserTargetNonexistantFile target <- problems ] of [] -> return () target -> die' verbosity $ unlines [ "The file does not exist '" ++ name ++ "'." | name <- target ] case [ target | UserTargetUnexpectedFile target <- problems ] of [] -> return () target -> die' verbosity $ unlines [ "Unrecognised file target '" ++ name ++ "'." | name <- target ] ++ "File targets can be either package tarballs 'pkgname.tar.gz' " ++ "or cabal files 'pkgname.cabal'." case [ target | UserTargetUnexpectedUriScheme target <- problems ] of [] -> return () target -> die' verbosity $ unlines [ "URL target not supported '" ++ name ++ "'." | name <- target ] ++ "Only 'http://' and 'https://' URLs are supported." case [ target | UserTargetUnrecognisedUri target <- problems ] of [] -> return () target -> die' verbosity $ unlines [ "Unrecognise URL target '" ++ name ++ "'." | name <- target ] -- ------------------------------------------------------------ -- * Resolving user targets to package specifiers -- ------------------------------------------------------------ -- | Given a bunch of user-specified targets, try to resolve what it is they -- refer to. They can either be specific packages (local dirs, tarballs etc) -- or they can be named packages (with or without version info). -- resolveUserTargets :: Package pkg => Verbosity -> RepoContext -> PackageIndex pkg -> [UserTarget] -> IO [PackageSpecifier UnresolvedSourcePackage] resolveUserTargets verbosity repoCtxt available userTargets = do -- given the user targets, get a list of fully or partially resolved -- package references packageTargets <- traverse (readPackageTarget verbosity) =<< traverse (fetchPackageTarget verbosity repoCtxt) . concat =<< traverse (expandUserTarget verbosity) userTargets -- users are allowed to give package names case-insensitively, so we must -- disambiguate named package references let (problems, packageSpecifiers) :: ([PackageTargetProblem], [PackageSpecifier UnresolvedSourcePackage]) = disambiguatePackageTargets available availableExtra packageTargets -- use any extra specific available packages to help us disambiguate availableExtra :: [PackageName] availableExtra = [ packageName pkg | PackageTargetLocation pkg <- packageTargets ] reportPackageTargetProblems verbosity problems return packageSpecifiers -- ------------------------------------------------------------ -- * Package targets -- ------------------------------------------------------------ -- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. -- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. -- data PackageTarget pkg = PackageTargetNamed PackageName [PackageProperty] UserTarget -- | A package identified by name, but case insensitively, so it needs -- to be resolved to the right case-sensitive name. | PackageTargetNamedFuzzy PackageName [PackageProperty] UserTarget | PackageTargetLocation pkg deriving (Show, Functor, Foldable, Traversable) -- ------------------------------------------------------------ -- * Converting user targets to package targets -- ------------------------------------------------------------ -- | Given a user-specified target, expand it to a bunch of package targets -- (each of which refers to only one package). -- expandUserTarget :: Verbosity -> UserTarget -> IO [PackageTarget (PackageLocation ())] expandUserTarget verbosity userTarget = case userTarget of UserTargetNamed (PackageVersionConstraint name vrange) -> let props = [ PackagePropertyVersion vrange | not (isAnyVersion vrange) ] in return [PackageTargetNamedFuzzy name props userTarget] UserTargetLocalDir dir -> return [ PackageTargetLocation (LocalUnpackedPackage dir) ] UserTargetLocalCabalFile file -> do let dir = takeDirectory file _ <- tryFindPackageDesc verbosity dir (localPackageError dir) -- just as a check return [ PackageTargetLocation (LocalUnpackedPackage dir) ] UserTargetLocalTarball tarballFile -> return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] UserTargetRemoteTarball tarballURL -> return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] localPackageError :: FilePath -> String localPackageError dir = "Error reading local package.\nCouldn't find .cabal file in: " ++ dir -- ------------------------------------------------------------ -- * Fetching and reading package targets -- ------------------------------------------------------------ -- | Fetch any remote targets so that they can be read. -- fetchPackageTarget :: Verbosity -> RepoContext -> PackageTarget (PackageLocation ()) -> IO (PackageTarget ResolvedPkgLoc) fetchPackageTarget verbosity repoCtxt = traverse $ fetchPackage verbosity repoCtxt . fmap (const Nothing) -- | Given a package target that has been fetched, read the .cabal file. -- -- This only affects targets given by location, named targets are unaffected. -- readPackageTarget :: Verbosity -> PackageTarget ResolvedPkgLoc -> IO (PackageTarget UnresolvedSourcePackage) readPackageTarget verbosity = traverse modifyLocation where modifyLocation :: ResolvedPkgLoc -> IO UnresolvedSourcePackage modifyLocation location = case location of LocalUnpackedPackage dir -> do pkg <- tryFindPackageDesc verbosity dir (localPackageError dir) >>= readGenericPackageDescription verbosity return SourcePackage { srcpkgPackageId = packageId pkg , srcpkgDescription = pkg , srcpkgSource = fmap Just location , srcpkgDescrOverride = Nothing } LocalTarballPackage tarballFile -> readTarballPackageTarget location tarballFile tarballFile RemoteTarballPackage tarballURL tarballFile -> readTarballPackageTarget location tarballFile (show tarballURL) RepoTarballPackage _repo _pkgid _ -> error "TODO: readPackageTarget RepoTarballPackage" -- For repo tarballs this info should be obtained from the index. RemoteSourceRepoPackage _srcRepo _ -> error "TODO: readPackageTarget RemoteSourceRepoPackage" -- This can't happen, because it would have errored out already -- in fetchPackage, via fetchPackageTarget before it gets to this -- function. -- -- When that is corrected, this will also need to be fixed. readTarballPackageTarget :: ResolvedPkgLoc -> FilePath -> FilePath -> IO UnresolvedSourcePackage readTarballPackageTarget location tarballFile tarballOriginalLoc = do (filename, content) <- extractTarballPackageCabalFile tarballFile tarballOriginalLoc case parsePackageDescription' content of Nothing -> die' verbosity $ "Could not parse the cabal file " ++ filename ++ " in " ++ tarballFile Just pkg -> return SourcePackage { srcpkgPackageId = packageId pkg , srcpkgDescription = pkg , srcpkgSource = fmap Just location , srcpkgDescrOverride = Nothing } extractTarballPackageCabalFile :: FilePath -> String -> IO (FilePath, BS.ByteString) extractTarballPackageCabalFile tarballFile tarballOriginalLoc = either (die' verbosity . formatErr) return . check . accumEntryMap . Tar.filterEntries isCabalFile . Tar.read . GZipUtils.maybeDecompress =<< BS.readFile tarballFile where formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg accumEntryMap :: Tar.Entries Tar.FormatError -> Either (Tar.FormatError, Map Tar.TarPath Tar.Entry) (Map Tar.TarPath Tar.Entry) accumEntryMap = Tar.foldlEntries (\m e -> Map.insert (Tar.entryTarPath e) e m) Map.empty check (Left e) = Left (show e) check (Right m) = case Map.elems m of [] -> Left noCabalFile [file] -> case Tar.entryContent file of Tar.NormalFile content _ -> Right (Tar.entryPath file, content) _ -> Left noCabalFile _files -> Left multipleCabalFiles where noCabalFile = "No cabal file found" multipleCabalFiles = "Multiple cabal files found" isCabalFile :: Tar.Entry -> Bool isCabalFile e = case splitPath (Tar.entryPath e) of [ _dir, file] -> takeExtension file == ".cabal" [".", _dir, file] -> takeExtension file == ".cabal" _ -> False parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription parsePackageDescription' bs = parseGenericPackageDescriptionMaybe (BS.toStrict bs) -- ------------------------------------------------------------ -- * Checking package targets -- ------------------------------------------------------------ data PackageTargetProblem = PackageNameUnknown PackageName UserTarget | PackageNameAmbiguous PackageName [PackageName] UserTarget deriving Show -- | Users are allowed to give package names case-insensitively, so we must -- disambiguate named package references. -- disambiguatePackageTargets :: Package pkg' => PackageIndex pkg' -> [PackageName] -> [PackageTarget pkg] -> ( [PackageTargetProblem] , [PackageSpecifier pkg] ) disambiguatePackageTargets availablePkgIndex availableExtra targets = partitionEithers (map disambiguatePackageTarget targets) where disambiguatePackageTarget packageTarget = case packageTarget of PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) PackageTargetNamed pkgname props userTarget | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) -> Left (PackageNameUnknown pkgname userTarget) | otherwise -> Right (NamedPackage pkgname props) PackageTargetNamedFuzzy pkgname props userTarget -> case disambiguatePackageName packageNameEnv pkgname of None -> Left (PackageNameUnknown pkgname userTarget) Ambiguous pkgnames -> Left (PackageNameAmbiguous pkgname pkgnames userTarget) Unambiguous pkgname' -> Right (NamedPackage pkgname' props) -- use any extra specific available packages to help us disambiguate packageNameEnv :: PackageNameEnv packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) (extraPackageNameEnv availableExtra) -- | Report problems to the user. That is, if there are any problems -- then raise an exception. reportPackageTargetProblems :: Verbosity -> [PackageTargetProblem] -> IO () reportPackageTargetProblems verbosity problems = do case [ pkg | PackageNameUnknown pkg _ <- problems ] of [] -> return () pkgs -> die' verbosity $ unlines [ "There is no package named '" ++ prettyShow name ++ "'. " | name <- pkgs ] ++ "You may need to run 'cabal update' to get the latest " ++ "list of available packages." case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of [] -> return () ambiguities -> die' verbosity $ unlines [ "There is no package named '" ++ prettyShow name ++ "'. " ++ (if length matches > 1 then "However, the following package names exist: " else "However, the following package name exists: ") ++ intercalate ", " [ "'" ++ prettyShow m ++ "'" | m <- matches] ++ "." | (name, matches) <- ambiguities ] -- ------------------------------------------------------------ -- * Disambiguating package names -- ------------------------------------------------------------ data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a] -- | Given a package name and a list of matching names, figure out -- which one it might be referring to. If there is an exact -- case-sensitive match then that's ok (i.e. returned via -- 'Unambiguous'). If it matches just one package case-insensitively -- or if it matches multiple packages case-insensitively, in that case -- the result is 'Ambiguous'. -- -- Note: Before cabal 2.2, when only a single package matched -- case-insensitively it would be considered 'Unambiguous'. -- disambiguatePackageName :: PackageNameEnv -> PackageName -> MaybeAmbiguous PackageName disambiguatePackageName (PackageNameEnv pkgNameLookup) name = case nub (pkgNameLookup name) of [] -> None names -> case find (name==) names of Just name' -> Unambiguous name' Nothing -> Ambiguous names newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) instance Monoid PackageNameEnv where mempty = PackageNameEnv (const []) mappend = (<>) instance Semigroup PackageNameEnv where PackageNameEnv lookupA <> PackageNameEnv lookupB = PackageNameEnv (\name -> lookupA name ++ lookupB name) indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup where pkgNameLookup pname = map fst (PackageIndex.searchByName pkgIndex $ unPackageName pname) extraPackageNameEnv :: [PackageName] -> PackageNameEnv extraPackageNameEnv names = PackageNameEnv pkgNameLookup where pkgNameLookup pname = [ pname' | let lname = lowercase (unPackageName pname) , pname' <- names , lowercase (unPackageName pname') == lname ] -- ------------------------------------------------------------ -- * Package constraints -- ------------------------------------------------------------ -- | Version of 'Qualifier' that a user may specify on the -- command line. data UserQualifier = -- | Top-level dependency. UserQualToplevel -- | Setup dependency. | UserQualSetup PackageName -- | Executable dependency. | UserQualExe PackageName PackageName deriving (Eq, Show, Generic) instance Binary UserQualifier instance Structured UserQualifier -- | Version of 'ConstraintScope' that a user may specify on the -- command line. data UserConstraintScope = -- | Scope that applies to the package when it has the specified qualifier. UserQualified UserQualifier PackageName -- | Scope that applies to the package when it has a setup qualifier. | UserAnySetupQualifier PackageName -- | Scope that applies to the package when it has any qualifier. | UserAnyQualifier PackageName deriving (Eq, Show, Generic) instance Binary UserConstraintScope instance Structured UserConstraintScope fromUserQualifier :: UserQualifier -> Qualifier fromUserQualifier UserQualToplevel = QualToplevel fromUserQualifier (UserQualSetup name) = QualSetup name fromUserQualifier (UserQualExe name1 name2) = QualExe name1 name2 fromUserConstraintScope :: UserConstraintScope -> ConstraintScope fromUserConstraintScope (UserQualified q pn) = ScopeQualified (fromUserQualifier q) pn fromUserConstraintScope (UserAnySetupQualifier pn) = ScopeAnySetupQualifier pn fromUserConstraintScope (UserAnyQualifier pn) = ScopeAnyQualifier pn -- | Version of 'PackageConstraint' that the user can specify on -- the command line. data UserConstraint = UserConstraint UserConstraintScope PackageProperty deriving (Eq, Show, Generic) instance Binary UserConstraint instance Structured UserConstraint userConstraintPackageName :: UserConstraint -> PackageName userConstraintPackageName (UserConstraint scope _) = scopePN scope where scopePN (UserQualified _ pn) = pn scopePN (UserAnyQualifier pn) = pn scopePN (UserAnySetupQualifier pn) = pn userToPackageConstraint :: UserConstraint -> PackageConstraint userToPackageConstraint (UserConstraint scope prop) = PackageConstraint (fromUserConstraintScope scope) prop readUserConstraint :: String -> Either String UserConstraint readUserConstraint str = case explicitEitherParsec parsec str of Left err -> Left $ msgCannotParse ++ err Right c -> Right c where msgCannotParse = "expected a (possibly qualified) package name followed by a " ++ "constraint, which is either a version range, 'installed', " ++ "'source', 'test', 'bench', or flags. " instance Pretty UserConstraint where pretty (UserConstraint scope prop) = dispPackageConstraint $ PackageConstraint (fromUserConstraintScope scope) prop instance Parsec UserConstraint where parsec = do scope <- parseConstraintScope P.spaces prop <- P.choice [ PackagePropertyFlags <$> parsecFlagAssignmentNonEmpty -- headed by "+-" , PackagePropertyVersion <$> parsec -- headed by "<=>" (will be) , PackagePropertyInstalled <$ P.string "installed" , PackagePropertySource <$ P.string "source" , PackagePropertyStanzas [TestStanzas] <$ P.string "test" , PackagePropertyStanzas [BenchStanzas] <$ P.string "bench" ] return (UserConstraint scope prop) where parseConstraintScope :: forall m. CabalParsing m => m UserConstraintScope parseConstraintScope = do pn <- parsec P.choice [ P.char '.' *> withDot pn , P.char ':' *> withColon pn , return (UserQualified UserQualToplevel pn) ] where withDot :: PackageName -> m UserConstraintScope withDot pn | pn == mkPackageName "any" = UserAnyQualifier <$> parsec | pn == mkPackageName "setup" = UserAnySetupQualifier <$> parsec | otherwise = P.unexpected $ "constraint scope: " ++ unPackageName pn withColon :: PackageName -> m UserConstraintScope withColon pn = UserQualified (UserQualSetup pn) <$ P.string "setup." <*> parsec cabal-install-3.8.1.0/src/Distribution/Client/Types.hs0000644000000000000000000000346507346545000020714 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Types -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Various common data types for the entire cabal-install system ----------------------------------------------------------------------------- module Distribution.Client.Types ( module Distribution.Client.Types.AllowNewer, module Distribution.Client.Types.ConfiguredId, module Distribution.Client.Types.ConfiguredPackage, module Distribution.Client.Types.BuildResults, module Distribution.Client.Types.PackageLocation, module Distribution.Client.Types.PackageSpecifier, module Distribution.Client.Types.ReadyPackage, module Distribution.Client.Types.Repo, module Distribution.Client.Types.RepoName, module Distribution.Client.Types.SourcePackageDb, module Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy, ) where import Distribution.Client.Types.AllowNewer import Distribution.Client.Types.BuildResults import Distribution.Client.Types.ConfiguredId import Distribution.Client.Types.ConfiguredPackage import Distribution.Client.Types.PackageLocation import Distribution.Client.Types.PackageSpecifier import Distribution.Client.Types.Repo import Distribution.Client.Types.RepoName import Distribution.Client.Types.ReadyPackage import Distribution.Client.Types.SourcePackageDb import Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy cabal-install-3.8.1.0/src/Distribution/Client/Types/0000755000000000000000000000000007346545000020350 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Types/AllowNewer.hs0000644000000000000000000002117007346545000022764 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.AllowNewer ( AllowNewer (..), AllowOlder (..), RelaxDeps (..), mkRelaxDepSome, RelaxDepMod (..), RelaxDepScope (..), RelaxDepSubject (..), RelaxedDep (..), isRelaxDeps, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Parsec (parsecLeadingCommaNonEmpty) import Distribution.Types.PackageId (PackageId, PackageIdentifier (..)) import Distribution.Types.PackageName (PackageName, mkPackageName) import Distribution.Types.Version (nullVersion) import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- $setup -- >>> import Distribution.Parsec -- TODO: When https://github.com/haskell/cabal/issues/4203 gets tackled, -- it may make sense to move these definitions to the Solver.Types -- module -- | 'RelaxDeps' in the context of upper bounds (i.e. for @--allow-newer@ flag) newtype AllowNewer = AllowNewer { unAllowNewer :: RelaxDeps } deriving (Eq, Read, Show, Generic) -- | 'RelaxDeps' in the context of lower bounds (i.e. for @--allow-older@ flag) newtype AllowOlder = AllowOlder { unAllowOlder :: RelaxDeps } deriving (Eq, Read, Show, Generic) -- | Generic data type for policy when relaxing bounds in dependencies. -- Don't use this directly: use 'AllowOlder' or 'AllowNewer' depending -- on whether or not you are relaxing an lower or upper bound -- (respectively). data RelaxDeps = -- | Ignore upper (resp. lower) bounds in some (or no) dependencies on the given packages. -- -- @RelaxDepsSome []@ is the default, i.e. honor the bounds in all -- dependencies, never choose versions newer (resp. older) than allowed. RelaxDepsSome [RelaxedDep] -- | Ignore upper (resp. lower) bounds in dependencies on all packages. -- -- __Note__: This is should be semantically equivalent to -- -- > RelaxDepsSome [RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll] -- -- (TODO: consider normalising 'RelaxDeps' and/or 'RelaxedDep') | RelaxDepsAll deriving (Eq, Read, Show, Generic) -- | Dependencies can be relaxed either for all packages in the install plan, or -- only for some packages. data RelaxedDep = RelaxedDep !RelaxDepScope !RelaxDepMod !RelaxDepSubject deriving (Eq, Read, Show, Generic) -- | Specify the scope of a relaxation, i.e. limit which depending -- packages are allowed to have their version constraints relaxed. data RelaxDepScope = RelaxDepScopeAll -- ^ Apply relaxation in any package | RelaxDepScopePackage !PackageName -- ^ Apply relaxation to in all versions of a package | RelaxDepScopePackageId !PackageId -- ^ Apply relaxation to a specific version of a package only deriving (Eq, Read, Show, Generic) -- | Modifier for dependency relaxation data RelaxDepMod = RelaxDepModNone -- ^ Default semantics | RelaxDepModCaret -- ^ Apply relaxation only to @^>=@ constraints deriving (Eq, Read, Show, Generic) -- | Express whether to relax bounds /on/ @all@ packages, or a single package data RelaxDepSubject = RelaxDepSubjectAll | RelaxDepSubjectPkg !PackageName deriving (Eq, Ord, Read, Show, Generic) instance Pretty RelaxedDep where pretty (RelaxedDep scope rdmod subj) = case scope of RelaxDepScopeAll -> Disp.text "*:" Disp.<> modDep RelaxDepScopePackage p0 -> pretty p0 Disp.<> Disp.colon Disp.<> modDep RelaxDepScopePackageId p0 -> pretty p0 Disp.<> Disp.colon Disp.<> modDep where modDep = case rdmod of RelaxDepModNone -> pretty subj RelaxDepModCaret -> Disp.char '^' Disp.<> pretty subj instance Parsec RelaxedDep where parsec = P.char '*' *> relaxedDepStarP <|> (parsec >>= relaxedDepPkgidP) -- continuation after * relaxedDepStarP :: CabalParsing m => m RelaxedDep relaxedDepStarP = RelaxedDep RelaxDepScopeAll <$ P.char ':' <*> modP <*> parsec <|> pure (RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll) -- continuation after package identifier relaxedDepPkgidP :: CabalParsing m => PackageIdentifier -> m RelaxedDep relaxedDepPkgidP pid@(PackageIdentifier pn v) | pn == mkPackageName "all" , v == nullVersion = RelaxedDep RelaxDepScopeAll <$ P.char ':' <*> modP <*> parsec <|> pure (RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll) | v == nullVersion = RelaxedDep (RelaxDepScopePackage pn) <$ P.char ':' <*> modP <*> parsec <|> pure (RelaxedDep RelaxDepScopeAll RelaxDepModNone (RelaxDepSubjectPkg pn)) | otherwise = RelaxedDep (RelaxDepScopePackageId pid) <$ P.char ':' <*> modP <*> parsec modP :: P.CharParsing m => m RelaxDepMod modP = RelaxDepModCaret <$ P.char '^' <|> pure RelaxDepModNone instance Pretty RelaxDepSubject where pretty RelaxDepSubjectAll = Disp.text "*" pretty (RelaxDepSubjectPkg pn) = pretty pn instance Parsec RelaxDepSubject where parsec = RelaxDepSubjectAll <$ P.char '*' <|> pkgn where pkgn = do pn <- parsec pure $ if pn == mkPackageName "all" then RelaxDepSubjectAll else RelaxDepSubjectPkg pn instance Pretty RelaxDeps where pretty rd | not (isRelaxDeps rd) = Disp.text "none" pretty (RelaxDepsSome pkgs) = Disp.fsep . Disp.punctuate Disp.comma . map pretty $ pkgs pretty RelaxDepsAll = Disp.text "all" -- | -- -- >>> simpleParsec "all" :: Maybe RelaxDeps -- Just RelaxDepsAll -- -- >>> simpleParsec "none" :: Maybe RelaxDeps -- Just (RelaxDepsSome []) -- -- >>> simpleParsec "*, *" :: Maybe RelaxDeps -- Just RelaxDepsAll -- -- >>> simpleParsec "*:*" :: Maybe RelaxDeps -- Just RelaxDepsAll -- -- >>> simpleParsec "foo:bar, quu:puu" :: Maybe RelaxDeps -- Just (RelaxDepsSome [RelaxedDep (RelaxDepScopePackage (PackageName "foo")) RelaxDepModNone (RelaxDepSubjectPkg (PackageName "bar")),RelaxedDep (RelaxDepScopePackage (PackageName "quu")) RelaxDepModNone (RelaxDepSubjectPkg (PackageName "puu"))]) -- -- This is not a glitch, even it looks like: -- -- >>> simpleParsec ", all" :: Maybe RelaxDeps -- Just RelaxDepsAll -- -- >>> simpleParsec "" :: Maybe RelaxDeps -- Nothing -- instance Parsec RelaxDeps where parsec = do xs <- parsecLeadingCommaNonEmpty parsec pure $ case toList xs of [RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll] -> RelaxDepsAll [RelaxedDep RelaxDepScopeAll RelaxDepModNone (RelaxDepSubjectPkg pn)] | pn == mkPackageName "none" -> mempty xs' -> mkRelaxDepSome xs' instance Binary RelaxDeps instance Binary RelaxDepMod instance Binary RelaxDepScope instance Binary RelaxDepSubject instance Binary RelaxedDep instance Binary AllowNewer instance Binary AllowOlder instance Structured RelaxDeps instance Structured RelaxDepMod instance Structured RelaxDepScope instance Structured RelaxDepSubject instance Structured RelaxedDep instance Structured AllowNewer instance Structured AllowOlder -- | Return 'True' if 'RelaxDeps' specifies a non-empty set of relaxations -- -- Equivalent to @isRelaxDeps = (/= 'mempty')@ isRelaxDeps :: RelaxDeps -> Bool isRelaxDeps (RelaxDepsSome []) = False isRelaxDeps (RelaxDepsSome (_:_)) = True isRelaxDeps RelaxDepsAll = True -- | A smarter 'RelaxedDepsSome', @*:*@ is the same as @all@. mkRelaxDepSome :: [RelaxedDep] -> RelaxDeps mkRelaxDepSome xs | any (== RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll) xs = RelaxDepsAll | otherwise = RelaxDepsSome xs -- | 'RelaxDepsAll' is the /absorbing element/ instance Semigroup RelaxDeps where -- identity element RelaxDepsSome [] <> r = r l@(RelaxDepsSome _) <> RelaxDepsSome [] = l -- absorbing element l@RelaxDepsAll <> _ = l (RelaxDepsSome _) <> r@RelaxDepsAll = r -- combining non-{identity,absorbing} elements (RelaxDepsSome a) <> (RelaxDepsSome b) = RelaxDepsSome (a ++ b) -- | @'RelaxDepsSome' []@ is the /identity element/ instance Monoid RelaxDeps where mempty = RelaxDepsSome [] mappend = (<>) instance Semigroup AllowNewer where AllowNewer x <> AllowNewer y = AllowNewer (x <> y) instance Semigroup AllowOlder where AllowOlder x <> AllowOlder y = AllowOlder (x <> y) instance Monoid AllowNewer where mempty = AllowNewer mempty mappend = (<>) instance Monoid AllowOlder where mempty = AllowOlder mempty mappend = (<>) cabal-install-3.8.1.0/src/Distribution/Client/Types/BuildResults.hs0000644000000000000000000000370307346545000023330 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.BuildResults ( BuildOutcome, BuildOutcomes, BuildFailure (..), BuildResult (..), TestsResult (..), DocsResult (..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Types.InstalledPackageInfo (InstalledPackageInfo) import Distribution.Types.PackageId (PackageId) import Distribution.Types.UnitId (UnitId) -- | A summary of the outcome for building a single package. -- type BuildOutcome = Either BuildFailure BuildResult -- | A summary of the outcome for building a whole set of packages. -- type BuildOutcomes = Map UnitId BuildOutcome data BuildFailure = PlanningFailed | DependentFailed PackageId | DownloadFailed SomeException | UnpackFailed SomeException | ConfigureFailed SomeException | BuildFailed SomeException | TestsFailed SomeException | InstallFailed SomeException deriving (Show, Typeable, Generic) instance Exception BuildFailure -- Note that the @Maybe InstalledPackageInfo@ is a slight hack: we only -- the public library's 'InstalledPackageInfo' is stored here, even if -- there were 'InstalledPackageInfo' from internal libraries. This -- 'InstalledPackageInfo' is not used anyway, so it makes no difference. data BuildResult = BuildResult DocsResult TestsResult (Maybe InstalledPackageInfo) deriving (Show, Generic) data DocsResult = DocsNotTried | DocsFailed | DocsOk deriving (Show, Generic, Typeable) data TestsResult = TestsNotTried | TestsOk deriving (Show, Generic, Typeable) instance Binary BuildFailure instance Binary BuildResult instance Binary DocsResult instance Binary TestsResult instance Structured BuildFailure instance Structured BuildResult instance Structured DocsResult instance Structured TestsResult cabal-install-3.8.1.0/src/Distribution/Client/Types/ConfiguredId.hs0000644000000000000000000000634107346545000023252 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.ConfiguredId ( InstalledPackageId, ConfiguredId (..), annotatedIdToConfiguredId, HasConfiguredId (..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.InstalledPackageInfo (InstalledPackageInfo, sourceComponentName, installedComponentId) import Distribution.Package (Package (..)) import Distribution.Types.AnnotatedId (AnnotatedId (..)) import Distribution.Types.ComponentId (ComponentId) import Distribution.Types.ComponentName (ComponentName) import Distribution.Types.PackageId (PackageId) ------------------------------------------------------------------------------- -- InstalledPackageId ------------------------------------------------------------------------------- -- | Within Cabal the library we no longer have a @InstalledPackageId@ type. -- That's because it deals with the compilers' notion of a registered library, -- and those really are libraries not packages. Those are now named units. -- -- The package management layer does however deal with installed packages, as -- whole packages not just as libraries. So we do still need a type for -- installed package ids. At the moment however we track installed packages via -- their primary library, which is a unit id. In future this may change -- slightly and we may distinguish these two types and have an explicit -- conversion when we register units with the compiler. -- type InstalledPackageId = ComponentId ------------------------------------------------------------------------------- -- ConfiguredId ------------------------------------------------------------------------------- -- | A ConfiguredId is a package ID for a configured package. -- -- Once we configure a source package we know its UnitId. It is still -- however useful in lots of places to also know the source ID for the package. -- We therefore bundle the two. -- -- An already installed package of course is also "configured" (all its -- configuration parameters and dependencies have been specified). data ConfiguredId = ConfiguredId { confSrcId :: PackageId , confCompName :: Maybe ComponentName , confInstId :: ComponentId } deriving (Eq, Ord, Generic) annotatedIdToConfiguredId :: AnnotatedId ComponentId -> ConfiguredId annotatedIdToConfiguredId aid = ConfiguredId { confSrcId = ann_pid aid, confCompName = Just (ann_cname aid), confInstId = ann_id aid } instance Binary ConfiguredId instance Structured ConfiguredId instance Show ConfiguredId where show cid = show (confInstId cid) instance Package ConfiguredId where packageId = confSrcId ------------------------------------------------------------------------------- -- HasConfiguredId class ------------------------------------------------------------------------------- class HasConfiguredId a where configuredId :: a -> ConfiguredId -- NB: This instance is slightly dangerous, in that you'll lose -- information about the specific UnitId you depended on. instance HasConfiguredId InstalledPackageInfo where configuredId ipkg = ConfiguredId (packageId ipkg) (Just (sourceComponentName ipkg)) (installedComponentId ipkg) cabal-install-3.8.1.0/src/Distribution/Client/Types/ConfiguredPackage.hs0000644000000000000000000000670207346545000024252 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeFamilies #-} module Distribution.Client.Types.ConfiguredPackage ( ConfiguredPackage (..), ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Compat.Graph (IsNode (..)) import Distribution.Package (newSimpleUnitId, HasMungedPackageId (..), HasUnitId (..), Package (..), PackageInstalled (..), UnitId) import Distribution.Types.Flag (FlagAssignment) import Distribution.Types.ComponentName import Distribution.Types.LibraryName (LibraryName (..)) import Distribution.Types.MungedPackageId (computeCompatPackageId) import Distribution.Simple.Utils (ordNub) import Distribution.Client.Types.ConfiguredId import Distribution.Solver.Types.OptionalStanza (OptionalStanzaSet) import Distribution.Solver.Types.PackageFixedDeps import Distribution.Solver.Types.SourcePackage (SourcePackage) import qualified Distribution.Solver.Types.ComponentDeps as CD -- | A 'ConfiguredPackage' is a not-yet-installed package along with the -- total configuration information. The configuration information is total in -- the sense that it provides all the configuration information and so the -- final configure process will be independent of the environment. -- -- 'ConfiguredPackage' is assumed to not support Backpack. Only the -- @v2-build@ codepath supports Backpack. -- data ConfiguredPackage loc = ConfiguredPackage { confPkgId :: InstalledPackageId , confPkgSource :: SourcePackage loc -- ^ package info, including repo , confPkgFlags :: FlagAssignment -- ^ complete flag assignment for the package , confPkgStanzas :: OptionalStanzaSet -- ^ list of enabled optional stanzas for the package , confPkgDeps :: CD.ComponentDeps [ConfiguredId] -- ^ set of exact dependencies (installed or source). -- -- These must be consistent with the 'buildDepends' -- in the 'PackageDescription' that you'd get by -- applying the flag assignment and optional stanzas. } deriving (Eq, Show, Generic) -- | 'HasConfiguredId' indicates data types which have a 'ConfiguredId'. -- This type class is mostly used to conveniently finesse between -- 'ElaboratedPackage' and 'ElaboratedComponent'. -- instance HasConfiguredId (ConfiguredPackage loc) where configuredId pkg = ConfiguredId (packageId pkg) (Just (CLibName LMainLibName)) (confPkgId pkg) -- 'ConfiguredPackage' is the legacy codepath, we are guaranteed -- to never have a nontrivial 'UnitId' instance PackageFixedDeps (ConfiguredPackage loc) where depends = fmap (map (newSimpleUnitId . confInstId)) . confPkgDeps instance IsNode (ConfiguredPackage loc) where type Key (ConfiguredPackage loc) = UnitId nodeKey = newSimpleUnitId . confPkgId -- TODO: if we update ConfiguredPackage to support order-only -- dependencies, need to include those here. -- NB: have to deduplicate, otherwise the planner gets confused nodeNeighbors = ordNub . CD.flatDeps . depends instance (Binary loc) => Binary (ConfiguredPackage loc) instance Package (ConfiguredPackage loc) where packageId cpkg = packageId (confPkgSource cpkg) instance HasMungedPackageId (ConfiguredPackage loc) where mungedId cpkg = computeCompatPackageId (packageId cpkg) LMainLibName -- Never has nontrivial UnitId instance HasUnitId (ConfiguredPackage loc) where installedUnitId = newSimpleUnitId . confPkgId instance PackageInstalled (ConfiguredPackage loc) where installedDepends = CD.flatDeps . depends cabal-install-3.8.1.0/src/Distribution/Client/Types/Credentials.hs0000644000000000000000000000034107346545000023137 0ustar0000000000000000module Distribution.Client.Types.Credentials ( Username (..), Password (..), ) where import Prelude (String) newtype Username = Username { unUsername :: String } newtype Password = Password { unPassword :: String } cabal-install-3.8.1.0/src/Distribution/Client/Types/InstallMethod.hs0000644000000000000000000000162007346545000023452 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.InstallMethod where import Distribution.Client.Compat.Prelude import Prelude () import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as PP data InstallMethod = InstallMethodCopy | InstallMethodSymlink deriving (Eq, Show, Generic, Bounded, Enum) instance Binary InstallMethod instance Structured InstallMethod -- | Last instance Semigroup InstallMethod where _ <> x = x instance Parsec InstallMethod where parsec = do name <- P.munch1 isAlpha case name of "copy" -> pure InstallMethodCopy "symlink" -> pure InstallMethodSymlink _ -> P.unexpected $ "InstallMethod: " ++ name instance Pretty InstallMethod where pretty InstallMethodCopy = PP.text "copy" pretty InstallMethodSymlink = PP.text "symlink" cabal-install-3.8.1.0/src/Distribution/Client/Types/OverwritePolicy.hs0000644000000000000000000000166107346545000024056 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.OverwritePolicy where import Distribution.Client.Compat.Prelude import Prelude () import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as PP data OverwritePolicy = NeverOverwrite | AlwaysOverwrite | PromptOverwrite deriving (Show, Eq, Generic, Bounded, Enum) instance Binary OverwritePolicy instance Structured OverwritePolicy instance Parsec OverwritePolicy where parsec = do name <- P.munch1 isAlpha case name of "always" -> pure AlwaysOverwrite "never" -> pure NeverOverwrite "prompt" -> pure PromptOverwrite _ -> P.unexpected $ "OverwritePolicy: " ++ name instance Pretty OverwritePolicy where pretty NeverOverwrite = PP.text "never" pretty AlwaysOverwrite = PP.text "always" pretty PromptOverwrite = PP.text "prompt" cabal-install-3.8.1.0/src/Distribution/Client/Types/PackageLocation.hs0000644000000000000000000000313007346545000023725 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.PackageLocation ( PackageLocation (..), UnresolvedPkgLoc, ResolvedPkgLoc, UnresolvedSourcePackage, ) where import Distribution.Client.Compat.Prelude import Prelude () import Network.URI (URI) import Distribution.Types.PackageId (PackageId) import Distribution.Client.Types.Repo import Distribution.Client.Types.SourceRepo (SourceRepoMaybe) import Distribution.Solver.Types.SourcePackage (SourcePackage) type UnresolvedPkgLoc = PackageLocation (Maybe FilePath) type ResolvedPkgLoc = PackageLocation FilePath data PackageLocation local = -- | An unpacked package in the given dir, or current dir LocalUnpackedPackage FilePath -- | A package as a tarball that's available as a local tarball | LocalTarballPackage FilePath -- | A package as a tarball from a remote URI | RemoteTarballPackage URI local -- | A package available as a tarball from a repository. -- -- It may be from a local repository or from a remote repository, with a -- locally cached copy. ie a package available from hackage | RepoTarballPackage Repo PackageId local -- | A package available from a version control system source repository | RemoteSourceRepoPackage SourceRepoMaybe local deriving (Show, Functor, Eq, Ord, Generic, Typeable) instance Binary local => Binary (PackageLocation local) instance Structured local => Structured (PackageLocation local) -- | Convenience alias for 'SourcePackage UnresolvedPkgLoc'. type UnresolvedSourcePackage = SourcePackage UnresolvedPkgLoc cabal-install-3.8.1.0/src/Distribution/Client/Types/PackageSpecifier.hs0000644000000000000000000000401207346545000024066 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.PackageSpecifier ( PackageSpecifier (..), pkgSpecifierTarget, pkgSpecifierConstraints, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Package (Package (..), packageName, packageVersion) import Distribution.Types.PackageName (PackageName) import Distribution.Version (thisVersion) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.PackageConstraint -- | A fully or partially resolved reference to a package. -- data PackageSpecifier pkg = -- | A partially specified reference to a package (either source or -- installed). It is specified by package name and optionally some -- required properties. Use a dependency resolver to pick a specific -- package satisfying these properties. -- NamedPackage PackageName [PackageProperty] -- | A fully specified source package. -- | SpecificSourcePackage pkg deriving (Eq, Show, Functor, Generic) instance Binary pkg => Binary (PackageSpecifier pkg) instance Structured pkg => Structured (PackageSpecifier pkg) pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName pkgSpecifierTarget (NamedPackage name _) = name pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg pkgSpecifierConstraints :: Package pkg => PackageSpecifier pkg -> [LabeledPackageConstraint] pkgSpecifierConstraints (NamedPackage name props) = map toLpc props where toLpc prop = LabeledPackageConstraint (PackageConstraint (scopeToplevel name) prop) ConstraintSourceUserTarget pkgSpecifierConstraints (SpecificSourcePackage pkg) = [LabeledPackageConstraint pc ConstraintSourceUserTarget] where pc = PackageConstraint (ScopeTarget $ packageName pkg) (PackagePropertyVersion $ thisVersion (packageVersion pkg)) cabal-install-3.8.1.0/src/Distribution/Client/Types/ReadyPackage.hs0000644000000000000000000000240507346545000023225 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Distribution.Client.Types.ReadyPackage ( GenericReadyPackage (..), ReadyPackage, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Compat.Graph (IsNode (..)) import Distribution.Package (HasMungedPackageId, HasUnitId, Package, PackageInstalled) import Distribution.Client.Types.ConfiguredPackage (ConfiguredPackage) import Distribution.Client.Types.PackageLocation (UnresolvedPkgLoc) import Distribution.Solver.Types.PackageFixedDeps -- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be -- installed already, hence itself ready to be installed. newtype GenericReadyPackage srcpkg = ReadyPackage srcpkg -- see 'ConfiguredPackage'. deriving (Eq, Show, Generic, Package, PackageFixedDeps, HasMungedPackageId, HasUnitId, PackageInstalled, Binary) -- Can't newtype derive this instance IsNode srcpkg => IsNode (GenericReadyPackage srcpkg) where type Key (GenericReadyPackage srcpkg) = Key srcpkg nodeKey (ReadyPackage spkg) = nodeKey spkg nodeNeighbors (ReadyPackage spkg) = nodeNeighbors spkg type ReadyPackage = GenericReadyPackage (ConfiguredPackage UnresolvedPkgLoc) cabal-install-3.8.1.0/src/Distribution/Client/Types/Repo.hs0000644000000000000000000001435007346545000021614 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.Repo ( -- * Remote repository RemoteRepo (..), emptyRemoteRepo, -- * Local repository (no-index) LocalRepo (..), emptyLocalRepo, localRepoCacheKey, -- * Repository Repo (..), repoName, isRepoRemote, maybeRepoRemote, ) where import Distribution.Client.Compat.Prelude import Prelude () import Network.URI (URI (..), nullURI, parseAbsoluteURI, uriToString) import Distribution.Simple.Utils (toUTF8BS) import Distribution.Client.HashValue (hashValue, showHashValue, truncateHash) import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp import Distribution.Client.Types.RepoName ------------------------------------------------------------------------------- -- Remote repository ------------------------------------------------------------------------------- data RemoteRepo = RemoteRepo { remoteRepoName :: RepoName, remoteRepoURI :: URI, -- | Enable secure access? -- -- 'Nothing' here represents "whatever the default is"; this is important -- to allow for a smooth transition from opt-in to opt-out security -- (once we switch to opt-out, all access to the central Hackage -- repository should be secure by default) remoteRepoSecure :: Maybe Bool, -- | Root key IDs (for bootstrapping) remoteRepoRootKeys :: [String], -- | Threshold for verification during bootstrapping remoteRepoKeyThreshold :: Int, -- | Normally a repo just specifies an HTTP or HTTPS URI, but as a -- special case we may know a repo supports both and want to try HTTPS -- if we can, but still allow falling back to HTTP. -- -- This field is not currently stored in the config file, but is filled -- in automagically for known repos. remoteRepoShouldTryHttps :: Bool } deriving (Show, Eq, Ord, Generic) instance Binary RemoteRepo instance Structured RemoteRepo instance Pretty RemoteRepo where pretty r = pretty (remoteRepoName r) <<>> Disp.colon <<>> Disp.text (uriToString id (remoteRepoURI r) []) -- | Note: serialised format represents 'RemoteRepo' only partially. instance Parsec RemoteRepo where parsec = do name <- parsec _ <- P.char ':' uriStr <- P.munch1 (\c -> isAlphaNum c || c `elem` ("+-=._/*()@'$:;&!?~" :: String)) uri <- maybe (fail $ "Cannot parse URI:" ++ uriStr) return (parseAbsoluteURI uriStr) return RemoteRepo { remoteRepoName = name , remoteRepoURI = uri , remoteRepoSecure = Nothing , remoteRepoRootKeys = [] , remoteRepoKeyThreshold = 0 , remoteRepoShouldTryHttps = False } -- | Construct a partial 'RemoteRepo' value to fold the field parser list over. emptyRemoteRepo :: RepoName -> RemoteRepo emptyRemoteRepo name = RemoteRepo name nullURI Nothing [] 0 False ------------------------------------------------------------------------------- -- Local repository ------------------------------------------------------------------------------- -- | /no-index/ style local repositories. -- -- https://github.com/haskell/cabal/issues/6359 data LocalRepo = LocalRepo { localRepoName :: RepoName , localRepoPath :: FilePath , localRepoSharedCache :: Bool } deriving (Show, Eq, Ord, Generic) instance Binary LocalRepo instance Structured LocalRepo -- | Note: doesn't parse 'localRepoSharedCache' field. instance Parsec LocalRepo where parsec = do n <- parsec _ <- P.char ':' p <- P.munch1 (const True) -- restrict what can be a path? return (LocalRepo n p False) instance Pretty LocalRepo where pretty (LocalRepo n p _) = pretty n <<>> Disp.colon <<>> Disp.text p -- | Construct a partial 'LocalRepo' value to fold the field parser list over. emptyLocalRepo :: RepoName -> LocalRepo emptyLocalRepo name = LocalRepo name "" False -- | Calculate a cache key for local-repo. -- -- For remote repositories we just use name, but local repositories may -- all be named "local", so we add a bit of `localRepoPath` into the -- mix. localRepoCacheKey :: LocalRepo -> String localRepoCacheKey local = unRepoName (localRepoName local) ++ "-" ++ hashPart where hashPart = showHashValue $ truncateHash 8 $ hashValue $ LBS.fromStrict $ toUTF8BS $ localRepoPath local ------------------------------------------------------------------------------- -- Any repository ------------------------------------------------------------------------------- -- | Different kinds of repositories -- -- NOTE: It is important that this type remains serializable. data Repo -- | Local repository, without index. -- -- https://github.com/haskell/cabal/issues/6359 = RepoLocalNoIndex { repoLocal :: LocalRepo , repoLocalDir :: FilePath } -- | Standard (unsecured) remote repositories | RepoRemote { repoRemote :: RemoteRepo , repoLocalDir :: FilePath } -- | Secure repositories -- -- Although this contains the same fields as 'RepoRemote', we use a separate -- constructor to avoid confusing the two. -- -- Not all access to a secure repo goes through the hackage-security -- library currently; code paths that do not still make use of the -- 'repoRemote' and 'repoLocalDir' fields directly. | RepoSecure { repoRemote :: RemoteRepo , repoLocalDir :: FilePath } deriving (Show, Eq, Ord, Generic) instance Binary Repo instance Structured Repo -- | Check if this is a remote repo isRepoRemote :: Repo -> Bool isRepoRemote RepoLocalNoIndex{} = False isRepoRemote _ = True -- | Extract @RemoteRepo@ from @Repo@ if remote. maybeRepoRemote :: Repo -> Maybe RemoteRepo maybeRepoRemote (RepoLocalNoIndex _ _localDir) = Nothing maybeRepoRemote (RepoRemote r _localDir) = Just r maybeRepoRemote (RepoSecure r _localDir) = Just r repoName :: Repo -> RepoName repoName (RepoLocalNoIndex r _) = localRepoName r repoName (RepoRemote r _) = remoteRepoName r repoName (RepoSecure r _) = remoteRepoName r cabal-install-3.8.1.0/src/Distribution/Client/Types/RepoName.hs0000644000000000000000000000204707346545000022415 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.RepoName ( RepoName (..), ) where import Distribution.Client.Compat.Prelude import Prelude () import qualified Distribution.Compat.CharParsing as P import qualified Text.PrettyPrint as Disp -- $setup -- >>> import Distribution.Parsec -- | Repository name. -- -- May be used as path segment. -- newtype RepoName = RepoName { unRepoName :: String } deriving (Show, Eq, Ord, Generic) instance Binary RepoName instance Structured RepoName instance NFData RepoName instance Pretty RepoName where pretty = Disp.text . unRepoName -- | -- -- >>> simpleParsec "hackage.haskell.org" :: Maybe RepoName -- Just (RepoName "hackage.haskell.org") -- -- >>> simpleParsec "0123" :: Maybe RepoName -- Nothing -- instance Parsec RepoName where parsec = RepoName <$> parser where parser = (:) <$> lead <*> rest lead = P.satisfy (\c -> isAlpha c || c == '_' || c == '-' || c == '.') rest = P.munch (\c -> isAlphaNum c || c == '_' || c == '-' || c == '.') cabal-install-3.8.1.0/src/Distribution/Client/Types/SourcePackageDb.hs0000644000000000000000000000512607346545000023672 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.SourcePackageDb ( SourcePackageDb (..), lookupDependency, lookupPackageName, ) where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Types.PackageName (PackageName) import Distribution.Types.VersionRange (VersionRange, withinRange) import Distribution.Package (packageVersion) import Distribution.Client.Types.PackageLocation (UnresolvedSourcePackage) import qualified Distribution.Solver.Types.PackageIndex as PackageIndex import Distribution.Solver.Types.PackageIndex (PackageIndex) import qualified Data.Map as Map -- | This is the information we get from a @00-index.tar.gz@ hackage index. -- data SourcePackageDb = SourcePackageDb { packageIndex :: PackageIndex UnresolvedSourcePackage , packagePreferences :: Map PackageName VersionRange } deriving (Eq, Generic) instance Binary SourcePackageDb -- | Does a case-sensitive search by package name and a range of versions. -- -- We get back any number of versions of the specified package name, all -- satisfying the version range constraint. -- -- Additionally, `preferred-versions` (such as version deprecation) are -- honoured in this lookup, which is the only difference to -- 'PackageIndex.lookupDependency' lookupDependency :: SourcePackageDb -> PackageName -> VersionRange -> [UnresolvedSourcePackage] lookupDependency sourceDb pname version = filterPreferredVersions pref $ PackageIndex.lookupDependency (packageIndex sourceDb) pname version where pref = Map.lookup pname (packagePreferences sourceDb) -- | Does a case-sensitive search by package name. -- -- Additionally, `preferred-versions` (such as version deprecation) are -- honoured in this lookup, which is the only difference to -- 'PackageIndex.lookupPackageName' lookupPackageName :: SourcePackageDb -> PackageName -> [UnresolvedSourcePackage] lookupPackageName sourceDb pname = filterPreferredVersions pref $ PackageIndex.lookupPackageName (packageIndex sourceDb) pname where pref = Map.lookup pname (packagePreferences sourceDb) -- | @filterPreferredVersions 'range' 'versions'@. -- If a 'range' is given, only keep versions that satisfy the range. -- If 'range' is 'Nothing', all versions are kept. -- -- The 'range' is expected to be obtained from the 'SourcePackageDb.packagePreferences'. filterPreferredVersions :: Maybe VersionRange -> [UnresolvedSourcePackage] -> [UnresolvedSourcePackage] filterPreferredVersions Nothing versions = versions filterPreferredVersions (Just range) versions = filter ((`withinRange` range) . packageVersion) versions cabal-install-3.8.1.0/src/Distribution/Client/Types/SourceRepo.hs0000644000000000000000000001171407346545000022776 0ustar0000000000000000{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE UndecidableInstances #-} module Distribution.Client.Types.SourceRepo ( SourceRepositoryPackage (..), SourceRepoList, SourceRepoMaybe, SourceRepoProxy, srpHoist, srpToProxy, srpFanOut, sourceRepositoryPackageGrammar, ) where import Distribution.Client.Compat.Prelude import Distribution.Compat.Lens (Lens, Lens') import Prelude () import Distribution.FieldGrammar import Distribution.Types.SourceRepo (RepoType (..)) -- | @source-repository-package@ definition -- data SourceRepositoryPackage f = SourceRepositoryPackage { srpType :: !RepoType , srpLocation :: !String , srpTag :: !(Maybe String) , srpBranch :: !(Maybe String) , srpSubdir :: !(f FilePath) , srpCommand :: ![String] } deriving (Generic) deriving instance (Eq (f FilePath)) => Eq (SourceRepositoryPackage f) deriving instance (Ord (f FilePath)) => Ord (SourceRepositoryPackage f) deriving instance (Show (f FilePath)) => Show (SourceRepositoryPackage f) deriving instance (Binary (f FilePath)) => Binary (SourceRepositoryPackage f) deriving instance (Typeable f, Structured (f FilePath)) => Structured (SourceRepositoryPackage f) -- | Read from @cabal.project@ type SourceRepoList = SourceRepositoryPackage [] -- | Distilled from 'Distribution.Types.SourceRepo.SourceRepo' type SourceRepoMaybe = SourceRepositoryPackage Maybe -- | 'SourceRepositoryPackage' without subdir. Used in clone errors. Cloning doesn't care about subdirectory. type SourceRepoProxy = SourceRepositoryPackage Proxy srpHoist :: (forall x. f x -> g x) -> SourceRepositoryPackage f -> SourceRepositoryPackage g srpHoist nt s = s { srpSubdir = nt (srpSubdir s) } srpToProxy :: SourceRepositoryPackage f -> SourceRepositoryPackage Proxy srpToProxy s = s { srpSubdir = Proxy } -- | Split single @source-repository-package@ declaration with multiple subdirs, -- into multiple ones with at most single subdir. srpFanOut :: SourceRepositoryPackage [] -> NonEmpty (SourceRepositoryPackage Maybe) srpFanOut s@SourceRepositoryPackage { srpSubdir = [] } = s { srpSubdir = Nothing } :| [] srpFanOut s@SourceRepositoryPackage { srpSubdir = d:ds } = f d :| map f ds where f subdir = s { srpSubdir = Just subdir } ------------------------------------------------------------------------------- -- Lens ------------------------------------------------------------------------------- srpTypeLens :: Lens' (SourceRepositoryPackage f) RepoType srpTypeLens f s = fmap (\x -> s { srpType = x }) (f (srpType s)) {-# INLINE srpTypeLens #-} srpLocationLens :: Lens' (SourceRepositoryPackage f) String srpLocationLens f s = fmap (\x -> s { srpLocation = x }) (f (srpLocation s)) {-# INLINE srpLocationLens #-} srpTagLens :: Lens' (SourceRepositoryPackage f) (Maybe String) srpTagLens f s = fmap (\x -> s { srpTag = x }) (f (srpTag s)) {-# INLINE srpTagLens #-} srpBranchLens :: Lens' (SourceRepositoryPackage f) (Maybe String) srpBranchLens f s = fmap (\x -> s { srpBranch = x }) (f (srpBranch s)) {-# INLINE srpBranchLens #-} srpSubdirLens :: Lens (SourceRepositoryPackage f) (SourceRepositoryPackage g) (f FilePath) (g FilePath) srpSubdirLens f s = fmap (\x -> s { srpSubdir = x }) (f (srpSubdir s)) {-# INLINE srpSubdirLens #-} srpCommandLensNE :: Lens' (SourceRepositoryPackage f) (Maybe (NonEmpty String)) srpCommandLensNE f s = fmap (\x -> s { srpCommand = maybe [] toList x }) (f (nonEmpty (srpCommand s))) {-# INLINE srpCommandLensNE #-} ------------------------------------------------------------------------------- -- Parser & PPrinter ------------------------------------------------------------------------------- sourceRepositoryPackageGrammar :: ( FieldGrammar c g, Applicative (g SourceRepoList) , c (Identity RepoType) , c (List NoCommaFSep FilePathNT String) , c (NonEmpty' NoCommaFSep Token String) ) => g SourceRepoList SourceRepoList sourceRepositoryPackageGrammar = SourceRepositoryPackage <$> uniqueField "type" srpTypeLens <*> uniqueFieldAla "location" Token srpLocationLens <*> optionalFieldAla "tag" Token srpTagLens <*> optionalFieldAla "branch" Token srpBranchLens <*> monoidalFieldAla "subdir" (alaList' NoCommaFSep FilePathNT) srpSubdirLens -- note: NoCommaFSep is somewhat important for roundtrip, as "." is there... <*> fmap (maybe [] toList) pcc where pcc = optionalFieldAla "post-checkout-command" (alaNonEmpty' NoCommaFSep Token) srpCommandLensNE {-# SPECIALIZE sourceRepositoryPackageGrammar :: ParsecFieldGrammar' SourceRepoList #-} {-# SPECIALIZE sourceRepositoryPackageGrammar :: PrettyFieldGrammar' SourceRepoList #-} cabal-install-3.8.1.0/src/Distribution/Client/Types/WriteGhcEnvironmentFilesPolicy.hs0000644000000000000000000000131707346545000027012 0ustar0000000000000000{-# LANGUAGE DeriveGeneric #-} module Distribution.Client.Types.WriteGhcEnvironmentFilesPolicy ( WriteGhcEnvironmentFilesPolicy (..), ) where import Prelude () import Distribution.Client.Compat.Prelude -- | Whether 'v2-build' should write a .ghc.environment file after -- success. Possible values: 'always', 'never' (the default), 'ghc8.4.4+' -- (8.4.4 is the earliest version that supports -- '-package-env -'). data WriteGhcEnvironmentFilesPolicy = AlwaysWriteGhcEnvironmentFiles | NeverWriteGhcEnvironmentFiles | WriteGhcEnvironmentFilesOnlyForGhc844AndNewer deriving (Eq, Enum, Bounded, Generic, Show) instance Binary WriteGhcEnvironmentFilesPolicy instance Structured WriteGhcEnvironmentFilesPolicy cabal-install-3.8.1.0/src/Distribution/Client/Upload.hs0000644000000000000000000002357407346545000021037 0ustar0000000000000000module Distribution.Client.Upload (upload, uploadDoc, report) where import Distribution.Client.Compat.Prelude import qualified Prelude as Unsafe (tail, head, read) import Distribution.Client.Types.Credentials ( Username(..), Password(..) ) import Distribution.Client.Types.Repo (Repo, RemoteRepo(..), maybeRepoRemote) import Distribution.Client.Types.RepoName (unRepoName) import Distribution.Client.HttpUtils ( HttpTransport(..), remoteRepoTryUpgradeToHttps ) import Distribution.Client.Setup ( IsCandidate(..), RepoContext(..) ) import Distribution.Simple.Utils (notice, warn, info, die', toUTF8BS) import Distribution.Utils.String (trim) import Distribution.Client.Config import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import Distribution.Client.BuildReports.Anonymous (parseBuildReport) import qualified Distribution.Client.BuildReports.Upload as BuildReport import Network.URI (URI(uriPath, uriAuthority), URIAuth(uriRegName)) import Network.HTTP (Header(..), HeaderName(..)) import System.IO (hFlush, stdout) import System.IO.Echo (withoutInputEcho) import System.FilePath ((), takeExtension, takeFileName, dropExtension) import qualified System.FilePath.Posix as FilePath.Posix (()) import System.Directory type Auth = Maybe (String, String) -- > stripExtensions ["tar", "gz"] "foo.tar.gz" -- Just "foo" -- > stripExtensions ["tar", "gz"] "foo.gz.tar" -- Nothing stripExtensions :: [String] -> FilePath -> Maybe String stripExtensions exts path = foldM f path (reverse exts) where f p e | takeExtension p == '.':e = Just (dropExtension p) | otherwise = Nothing upload :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IsCandidate -> [FilePath] -> IO () upload verbosity repoCtxt mUsername mPassword isCandidate paths = do let repos :: [Repo] repos = repoContextRepos repoCtxt transport <- repoContextGetTransport repoCtxt targetRepo <- case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of [] -> die' verbosity "Cannot upload. No remote repositories are configured." (r:rs) -> remoteRepoTryUpgradeToHttps verbosity transport (last (r:|rs)) let targetRepoURI :: URI targetRepoURI = remoteRepoURI targetRepo domain :: String domain = maybe "Hackage" uriRegName $ uriAuthority targetRepoURI rootIfEmpty x = if null x then "/" else x uploadURI :: URI uploadURI = targetRepoURI { uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. case isCandidate of IsCandidate -> "packages/candidates" IsPublished -> "upload" } packageURI pkgid = targetRepoURI { uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. concat [ "package/", pkgid , case isCandidate of IsCandidate -> "/candidate" IsPublished -> "" ] } Username username <- maybe (promptUsername domain) return mUsername Password password <- maybe (promptPassword domain) return mPassword let auth = Just (username,password) for_ paths $ \path -> do notice verbosity $ "Uploading " ++ path ++ "... " case fmap takeFileName (stripExtensions ["tar", "gz"] path) of Just pkgid -> handlePackage transport verbosity uploadURI (packageURI pkgid) auth isCandidate path -- This case shouldn't really happen, since we check in Main that we -- only pass tar.gz files to upload. Nothing -> die' verbosity $ "Not a tar.gz file: " ++ path uploadDoc :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IsCandidate -> FilePath -> IO () uploadDoc verbosity repoCtxt mUsername mPassword isCandidate path = do let repos = repoContextRepos repoCtxt transport <- repoContextGetTransport repoCtxt targetRepo <- case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of [] -> die' verbosity $ "Cannot upload. No remote repositories are configured." (r:rs) -> remoteRepoTryUpgradeToHttps verbosity transport (last (r:|rs)) let targetRepoURI = remoteRepoURI targetRepo domain = maybe "Hackage" uriRegName $ uriAuthority targetRepoURI rootIfEmpty x = if null x then "/" else x uploadURI = targetRepoURI { uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. concat [ "package/", pkgid , case isCandidate of IsCandidate -> "/candidate" IsPublished -> "" , "/docs" ] } packageUri = targetRepoURI { uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. concat [ "package/", pkgid , case isCandidate of IsCandidate -> "/candidate" IsPublished -> "" ] } (reverseSuffix, reversePkgid) = break (== '-') (reverse (takeFileName path)) pkgid = reverse $ Unsafe.tail reversePkgid when (reverse reverseSuffix /= "docs.tar.gz" || null reversePkgid || Unsafe.head reversePkgid /= '-') $ die' verbosity "Expected a file name matching the pattern -docs.tar.gz" Username username <- maybe (promptUsername domain) return mUsername Password password <- maybe (promptPassword domain) return mPassword let auth = Just (username,password) headers = [ Header HdrContentType "application/x-tar" , Header HdrContentEncoding "gzip" ] notice verbosity $ "Uploading documentation " ++ path ++ "... " resp <- putHttpFile transport verbosity uploadURI path auth headers case resp of -- Hackage responds with 204 No Content when docs are uploaded -- successfully. (code,_) | code `elem` [200,204] -> do notice verbosity $ okMessage packageUri (code,err) -> do notice verbosity $ "Error uploading documentation " ++ path ++ ": " ++ "http code " ++ show code ++ "\n" ++ err exitFailure where okMessage packageUri = case isCandidate of IsCandidate -> "Documentation successfully uploaded for package candidate. " ++ "You can now preview the result at '" ++ show packageUri ++ "'. To upload non-candidate documentation, use 'cabal upload --publish'." IsPublished -> "Package documentation successfully published. You can now view it at '" ++ show packageUri ++ "'." promptUsername :: String -> IO Username promptUsername domain = do putStr $ domain ++ " username: " hFlush stdout fmap Username getLine promptPassword :: String -> IO Password promptPassword domain = do putStr $ domain ++ " password: " hFlush stdout -- save/restore the terminal echoing status (no echoing for entering the password) passwd <- withoutInputEcho $ fmap Password getLine putStrLn "" return passwd report :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IO () report verbosity repoCtxt mUsername mPassword = do let repos :: [Repo] repos = repoContextRepos repoCtxt remoteRepos :: [RemoteRepo] remoteRepos = mapMaybe maybeRepoRemote repos for_ remoteRepos $ \remoteRepo -> do let domain = maybe "Hackage" uriRegName $ uriAuthority (remoteRepoURI remoteRepo) Username username <- maybe (promptUsername domain) return mUsername Password password <- maybe (promptPassword domain) return mPassword let auth :: (String, String) auth = (username, password) dotCabal <- getCabalDir let srcDir :: FilePath srcDir = dotCabal "reports" unRepoName (remoteRepoName remoteRepo) -- We don't want to bomb out just because we haven't built any packages -- from this repo yet. srcExists <- doesDirectoryExist srcDir when srcExists $ do contents <- getDirectoryContents srcDir for_ (filter (\c -> takeExtension c ==".log") contents) $ \logFile -> do inp <- readFile (srcDir logFile) let (reportStr, buildLog) = Unsafe.read inp :: (String,String) -- TODO: eradicateNoParse case parseBuildReport (toUTF8BS reportStr) of Left errs -> warn verbosity $ "Errors: " ++ errs -- FIXME Right report' -> do info verbosity $ "Uploading report for " ++ prettyShow (BuildReport.package report') BuildReport.uploadReports verbosity repoCtxt auth (remoteRepoURI remoteRepo) [(report', Just buildLog)] return () handlePackage :: HttpTransport -> Verbosity -> URI -> URI -> Auth -> IsCandidate -> FilePath -> IO () handlePackage transport verbosity uri packageUri auth isCandidate path = do resp <- postHttpFile transport verbosity uri path auth case resp of (code,warnings) | code `elem` [200, 204] -> notice verbosity $ okMessage isCandidate ++ if null warnings then "" else "\n" ++ formatWarnings (trim warnings) (code,err) -> do notice verbosity $ "Error uploading " ++ path ++ ": " ++ "http code " ++ show code ++ "\n" ++ err exitFailure where okMessage :: IsCandidate -> String okMessage IsCandidate = "Package successfully uploaded as candidate. " ++ "You can now preview the result at '" ++ show packageUri ++ "'. To publish the candidate, use 'cabal upload --publish'." okMessage IsPublished = "Package successfully published. You can now view it at '" ++ show packageUri ++ "'." formatWarnings :: String -> String formatWarnings x = "Warnings:\n" ++ (unlines . map ("- " ++) . lines) x cabal-install-3.8.1.0/src/Distribution/Client/Utils.hs0000644000000000000000000004241307346545000020704 0ustar0000000000000000{-# LANGUAGE ForeignFunctionInterface, ScopedTypeVariables, CPP #-} module Distribution.Client.Utils ( MergeResult(..) , mergeBy, duplicates, duplicatesBy , readMaybe , inDir, withEnv, withEnvOverrides , logDirChange, withExtraPathEnv , determineNumJobs, numberOfProcessors , removeExistingFile , withTempFileName , makeAbsoluteToCwd , makeRelativeToCwd, makeRelativeToDir , makeRelativeCanonical , filePathToByteString , byteStringToFilePath, tryCanonicalizePath , canonicalizePathNoThrow , moreRecentFile, existsAndIsMoreRecentThan , tryFindAddSourcePackageDesc , tryFindPackageDesc , findOpenProgramLocation , relaxEncodingErrors , ProgressPhase (..) , progressMessage , pvpize , incVersion , getCurrentYear , listFilesRecursive , listFilesInside , safeRead ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Compat.Environment import Distribution.Compat.Time ( getModTime ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Version import Distribution.Simple.Utils ( die', findPackageDesc, noticeNoWrap ) import Distribution.System ( Platform (..), OS(..)) import qualified Data.ByteString.Lazy as BS import Data.Bits ( (.|.), shiftL, shiftR ) import System.FilePath import Control.Monad ( zipWithM_ ) import Data.List ( groupBy ) import Foreign.C.Types ( CInt(..) ) import qualified Control.Exception as Exception ( finally ) import qualified Control.Exception.Safe as Safe ( bracket ) import System.Directory ( canonicalizePath, doesFileExist, findExecutable, getCurrentDirectory , removeFile, setCurrentDirectory, getDirectoryContents, doesDirectoryExist ) import System.IO ( Handle, hClose, openTempFile , hGetEncoding, hSetEncoding ) import System.IO.Unsafe ( unsafePerformIO ) import GHC.IO.Encoding ( recover, TextEncoding(TextEncoding) ) import GHC.IO.Encoding.Failure ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) import Data.Time.Clock.POSIX (getCurrentTime) import Data.Time.LocalTime (getCurrentTimeZone, localDay) import Data.Time (utcToLocalTime) import Data.Time.Calendar (toGregorian) #if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) import qualified System.Directory as Dir import qualified System.IO.Error as IOError #endif -- | Generic merging utility. For sorted input lists this is a full outer join. -- mergeBy :: forall a b. (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] mergeBy cmp = merge where merge :: [a] -> [b] -> [MergeResult a b] merge [] ys = [ OnlyInRight y | y <- ys] merge xs [] = [ OnlyInLeft x | x <- xs] merge (x:xs) (y:ys) = case x `cmp` y of GT -> OnlyInRight y : merge (x:xs) ys EQ -> InBoth x y : merge xs ys LT -> OnlyInLeft x : merge xs (y:ys) data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b duplicates :: Ord a => [a] -> [[a]] duplicates = duplicatesBy compare duplicatesBy :: forall a. (a -> a -> Ordering) -> [a] -> [[a]] duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp where eq :: a -> a -> Bool eq a b = case cmp a b of EQ -> True _ -> False moreThanOne (_:_:_) = True moreThanOne _ = False -- | Like 'removeFile', but does not throw an exception when the file does not -- exist. removeExistingFile :: FilePath -> IO () removeExistingFile path = do exists <- doesFileExist path when exists $ removeFile path -- | A variant of 'withTempFile' that only gives us the file name, and while -- it will clean up the file afterwards, it's lenient if the file is -- moved\/deleted. -- withTempFileName :: FilePath -> String -> (FilePath -> IO a) -> IO a withTempFileName tmpDir template action = Safe.bracket (openTempFile tmpDir template) (\(name, _) -> removeExistingFile name) (\(name, h) -> hClose h >> action name) -- | Executes the action in the specified directory. -- -- Warning: This operation is NOT thread-safe, because current -- working directory is a process-global concept. inDir :: Maybe FilePath -> IO a -> IO a inDir Nothing m = m inDir (Just d) m = do old <- getCurrentDirectory setCurrentDirectory d m `Exception.finally` setCurrentDirectory old -- | Executes the action with an environment variable set to some -- value. -- -- Warning: This operation is NOT thread-safe, because current -- environment is a process-global concept. withEnv :: String -> String -> IO a -> IO a withEnv k v m = do mb_old <- lookupEnv k setEnv k v m `Exception.finally` (case mb_old of Nothing -> unsetEnv k Just old -> setEnv k old) -- | Executes the action with a list of environment variables and -- corresponding overrides, where -- -- * @'Just' v@ means \"set the environment variable's value to @v@\". -- * 'Nothing' means \"unset the environment variable\". -- -- Warning: This operation is NOT thread-safe, because current -- environment is a process-global concept. withEnvOverrides :: [(String, Maybe FilePath)] -> IO a -> IO a withEnvOverrides overrides m = do mb_olds <- traverse lookupEnv envVars traverse_ (uncurry update) overrides m `Exception.finally` zipWithM_ update envVars mb_olds where envVars :: [String] envVars = map fst overrides update :: String -> Maybe FilePath -> IO () update var Nothing = unsetEnv var update var (Just val) = setEnv var val -- | Executes the action, increasing the PATH environment -- in some way -- -- Warning: This operation is NOT thread-safe, because the -- environment variables are a process-global concept. withExtraPathEnv :: [FilePath] -> IO a -> IO a withExtraPathEnv paths m = do oldPathSplit <- getSearchPath let newPath :: String newPath = mungePath $ intercalate [searchPathSeparator] (paths ++ oldPathSplit) oldPath :: String oldPath = mungePath $ intercalate [searchPathSeparator] oldPathSplit -- TODO: This is a horrible hack to work around the fact that -- setEnv can't take empty values as an argument mungePath p | p == "" = "/dev/null" | otherwise = p setEnv "PATH" newPath m `Exception.finally` setEnv "PATH" oldPath -- | Log directory change in 'make' compatible syntax logDirChange :: (String -> IO ()) -> Maybe FilePath -> IO a -> IO a logDirChange _ Nothing m = m logDirChange l (Just d) m = do l $ "cabal: Entering directory '" ++ d ++ "'\n" m `Exception.finally` (l $ "cabal: Leaving directory '" ++ d ++ "'\n") foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt -- The number of processors is not going to change during the duration of the -- program, so unsafePerformIO is safe here. numberOfProcessors :: Int numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors -- | Determine the number of jobs to use given the value of the '-j' flag. determineNumJobs :: Flag (Maybe Int) -> Int determineNumJobs numJobsFlag = case numJobsFlag of NoFlag -> 1 Flag Nothing -> numberOfProcessors Flag (Just n) -> n -- | Given a relative path, make it absolute relative to the current -- directory. Absolute paths are returned unmodified. makeAbsoluteToCwd :: FilePath -> IO FilePath makeAbsoluteToCwd path | isAbsolute path = return path | otherwise = do cwd <- getCurrentDirectory return $! cwd path -- | Given a path (relative or absolute), make it relative to the current -- directory, including using @../..@ if necessary. makeRelativeToCwd :: FilePath -> IO FilePath makeRelativeToCwd path = makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory -- | Given a path (relative or absolute), make it relative to the given -- directory, including using @../..@ if necessary. makeRelativeToDir :: FilePath -> FilePath -> IO FilePath makeRelativeToDir path dir = makeRelativeCanonical <$> canonicalizePath path <*> canonicalizePath dir -- | Given a canonical absolute path and canonical absolute dir, make the path -- relative to the directory, including using @../..@ if necessary. Returns -- the original absolute path if it is not on the same drive as the given dir. makeRelativeCanonical :: FilePath -> FilePath -> FilePath makeRelativeCanonical path dir | takeDrive path /= takeDrive dir = path | otherwise = go (splitPath path) (splitPath dir) where go (p:ps) (d:ds) | p' == d' = go ps ds where (p', d') = (dropTrailingPathSeparator p, dropTrailingPathSeparator d) go [] [] = "./" go ps ds = joinPath (replicate (length ds) ".." ++ ps) -- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is -- encoded as a little-endian 'Word32'. filePathToByteString :: FilePath -> BS.ByteString filePathToByteString p = BS.pack $ foldr conv [] codepts where codepts :: [Word32] codepts = map (fromIntegral . ord) p conv :: Word32 -> [Word8] -> [Word8] conv w32 rest = b0:b1:b2:b3:rest where b0 = fromIntegral $ w32 b1 = fromIntegral $ w32 `shiftR` 8 b2 = fromIntegral $ w32 `shiftR` 16 b3 = fromIntegral $ w32 `shiftR` 24 -- | Reverse operation to 'filePathToByteString'. byteStringToFilePath :: BS.ByteString -> FilePath byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected | otherwise = go 0 where unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" bslen = BS.length bs go i | i == bslen = [] | otherwise = (chr . fromIntegral $ w32) : go (i+4) where w32 :: Word32 w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) b0 = fromIntegral $ BS.index bs i b1 = fromIntegral $ BS.index bs (i + 1) b2 = fromIntegral $ BS.index bs (i + 2) b3 = fromIntegral $ BS.index bs (i + 3) -- | Workaround for the inconsistent behaviour of 'canonicalizePath'. Always -- throws an error if the path refers to a non-existent file. tryCanonicalizePath :: FilePath -> IO FilePath tryCanonicalizePath path = do ret <- canonicalizePath path #if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) exists <- liftM2 (||) (doesFileExist ret) (Dir.doesDirectoryExist ret) unless exists $ IOError.ioError $ IOError.mkIOError IOError.doesNotExistErrorType "canonicalizePath" Nothing (Just ret) #endif return ret -- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws -- an exception, returns the path argument unmodified. canonicalizePathNoThrow :: FilePath -> IO FilePath canonicalizePathNoThrow path = do canonicalizePath path `catchIO` (\_ -> return path) -------------------- -- Modification time -- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead -- of getModificationTime for higher precision. We can't merge the two because -- Distribution.Client.Time uses MIN_VERSION macros. moreRecentFile :: FilePath -> FilePath -> IO Bool moreRecentFile a b = do exists <- doesFileExist b if not exists then return True else do tb <- getModTime b ta <- getModTime a return (ta > tb) -- | Like 'moreRecentFile', but also checks that the first file exists. existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool existsAndIsMoreRecentThan a b = do exists <- doesFileExist a if not exists then return False else a `moreRecentFile` b -- | Sets the handler for encoding errors to one that transliterates invalid -- characters into one present in the encoding (i.e., \'?\'). -- This is opposed to the default behavior, which is to throw an exception on -- error. This function will ignore file handles that have a Unicode encoding -- set. It's a no-op for versions of `base` less than 4.4. relaxEncodingErrors :: Handle -> IO () relaxEncodingErrors handle = do maybeEncoding <- hGetEncoding handle case maybeEncoding of Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> let relax x = x { recover = recoverEncode TransliterateCodingFailure } in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) _ -> return () -- |Like 'tryFindPackageDesc', but with error specific to add-source deps. tryFindAddSourcePackageDesc :: Verbosity -> FilePath -> String -> IO FilePath tryFindAddSourcePackageDesc verbosity depPath err = tryFindPackageDesc verbosity depPath $ err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " ++ depPath -- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be -- found, with @err@ prefixing the error message. This function simply allows -- us to give a more descriptive error than that provided by @findPackageDesc@. tryFindPackageDesc :: Verbosity -> FilePath -> String -> IO FilePath tryFindPackageDesc verbosity depPath err = do errOrCabalFile <- findPackageDesc depPath case errOrCabalFile of Right file -> return file Left _ -> die' verbosity err findOpenProgramLocation :: Platform -> IO (Either String FilePath) findOpenProgramLocation (Platform _ os) = let locate name = do exe <- findExecutable name case exe of Just s -> pure (Right s) Nothing -> pure (Left ("Couldn't find file-opener program `" <> name <> "`")) xdg = locate "xdg-open" in case os of Windows -> pure (Right "start") OSX -> locate "open" Linux -> xdg FreeBSD -> xdg OpenBSD -> xdg NetBSD -> xdg DragonFly -> xdg _ -> pure (Left ("Couldn't determine file-opener program for " <> show os)) -- | Phase of building a dependency. Represents current status of package -- dependency processing. See #4040 for details. data ProgressPhase = ProgressDownloading | ProgressDownloaded | ProgressStarting | ProgressBuilding | ProgressHaddock | ProgressInstalling | ProgressCompleted progressMessage :: Verbosity -> ProgressPhase -> String -> IO () progressMessage verbosity phase subject = do noticeNoWrap verbosity $ phaseStr ++ subject ++ "\n" where phaseStr = case phase of ProgressDownloading -> "Downloading " ProgressDownloaded -> "Downloaded " ProgressStarting -> "Starting " ProgressBuilding -> "Building " ProgressHaddock -> "Haddock " ProgressInstalling -> "Installing " ProgressCompleted -> "Completed " -- | Given a version, return an API-compatible (according to PVP) version range. -- -- If the boolean argument denotes whether to use a desugared -- representation (if 'True') or the new-style @^>=@-form (if -- 'False'). -- -- Example: @pvpize True (mkVersion [0,4,1])@ produces the version range @>= 0.4 && < 0.5@ (which is the -- same as @0.4.*@). pvpize :: Bool -> Version -> VersionRange pvpize False v = majorBoundVersion v pvpize True v = orLaterVersion v' `intersectVersionRanges` earlierVersion (incVersion 1 v') where v' = alterVersion (take 2) v -- | Increment the nth version component (counting from 0). incVersion :: Int -> Version -> Version incVersion n = alterVersion (incVersion' n) where incVersion' 0 [] = [1] incVersion' 0 (v:_) = [v+1] incVersion' m [] = replicate m 0 ++ [1] incVersion' m (v:vs) = v : incVersion' (m-1) vs -- | Returns the current calendar year. getCurrentYear :: IO Integer getCurrentYear = do u <- getCurrentTime z <- getCurrentTimeZone let l = utcToLocalTime z u (y, _, _) = toGregorian $ localDay l return y -- | From System.Directory.Extra -- https://hackage.haskell.org/package/extra-1.7.9 listFilesInside :: (FilePath -> IO Bool) -> FilePath -> IO [FilePath] listFilesInside test dir = ifM (notM $ test $ dropTrailingPathSeparator dir) (pure []) $ do (dirs,files) <- partitionM doesDirectoryExist =<< listContents dir rest <- concatMapM (listFilesInside test) dirs pure $ files ++ rest -- | From System.Directory.Extra -- https://hackage.haskell.org/package/extra-1.7.9 listFilesRecursive :: FilePath -> IO [FilePath] listFilesRecursive = listFilesInside (const $ pure True) -- | From System.Directory.Extra -- https://hackage.haskell.org/package/extra-1.7.9 listContents :: FilePath -> IO [FilePath] listContents dir = do xs <- getDirectoryContents dir pure $ sort [dir x | x <- xs, not $ all (== '.') x] -- | From Control.Monad.Extra -- https://hackage.haskell.org/package/extra-1.7.9 ifM :: Monad m => m Bool -> m a -> m a -> m a ifM b t f = do b' <- b; if b' then t else f -- | From Control.Monad.Extra -- https://hackage.haskell.org/package/extra-1.7.9 concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b] {-# INLINE concatMapM #-} concatMapM op = foldr f (pure []) where f x xs = do x' <- op x; if null x' then xs else do xs' <- xs; pure $ x' ++ xs' -- | From Control.Monad.Extra -- https://hackage.haskell.org/package/extra-1.7.9 partitionM :: Monad m => (a -> m Bool) -> [a] -> m ([a], [a]) partitionM _ [] = pure ([], []) partitionM f (x:xs) = do res <- f x (as,bs) <- partitionM f xs pure ([x | res]++as, [x | not res]++bs) -- | From Control.Monad.Extra -- https://hackage.haskell.org/package/extra-1.7.9 notM :: Functor m => m Bool -> m Bool notM = fmap not safeRead :: Read a => String -> Maybe a safeRead s | [(x, "")] <- reads s = Just x | otherwise = Nothing cabal-install-3.8.1.0/src/Distribution/Client/Utils/0000755000000000000000000000000007346545000020344 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Client/Utils/Json.hs0000644000000000000000000001517507346545000021622 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} -- | Minimal JSON / RFC 7159 support -- -- The API is heavily inspired by @aeson@'s API but puts emphasis on -- simplicity rather than performance. The 'ToJSON' instances are -- intended to have an encoding compatible with @aeson@'s encoding. -- module Distribution.Client.Utils.Json ( Value(..) , Object, object, Pair, (.=) , encodeToString , encodeToBuilder , ToJSON(toJSON) ) where import Distribution.Client.Compat.Prelude import Data.Char (intToDigit) import Data.ByteString.Builder (Builder) import qualified Data.ByteString.Builder as BB -- TODO: We may want to replace 'String' with 'Text' or 'ByteString' -- | A JSON value represented as a Haskell value. data Value = Object !Object | Array [Value] | String String | Number !Double | Bool !Bool | Null deriving (Eq, Read, Show) -- | A key\/value pair for an 'Object' type Pair = (String, Value) -- | A JSON \"object\" (key/value map). type Object = [Pair] infixr 8 .= -- | A key-value pair for encoding a JSON object. (.=) :: ToJSON v => String -> v -> Pair k .= v = (k, toJSON v) -- | Create a 'Value' from a list of name\/value 'Pair's. object :: [Pair] -> Value object = Object instance IsString Value where fromString = String -- | A type that can be converted to JSON. class ToJSON a where -- | Convert a Haskell value to a JSON-friendly intermediate type. toJSON :: a -> Value instance ToJSON () where toJSON () = Array [] instance ToJSON Value where toJSON = id instance ToJSON Bool where toJSON = Bool instance ToJSON a => ToJSON [a] where toJSON = Array . map toJSON instance ToJSON a => ToJSON (Maybe a) where toJSON Nothing = Null toJSON (Just a) = toJSON a instance (ToJSON a,ToJSON b) => ToJSON (a,b) where toJSON (a,b) = Array [toJSON a, toJSON b] instance (ToJSON a,ToJSON b,ToJSON c) => ToJSON (a,b,c) where toJSON (a,b,c) = Array [toJSON a, toJSON b, toJSON c] instance (ToJSON a,ToJSON b,ToJSON c, ToJSON d) => ToJSON (a,b,c,d) where toJSON (a,b,c,d) = Array [toJSON a, toJSON b, toJSON c, toJSON d] instance ToJSON Float where toJSON = Number . realToFrac instance ToJSON Double where toJSON = Number instance ToJSON Int where toJSON = Number . realToFrac instance ToJSON Int8 where toJSON = Number . realToFrac instance ToJSON Int16 where toJSON = Number . realToFrac instance ToJSON Int32 where toJSON = Number . realToFrac instance ToJSON Word where toJSON = Number . realToFrac instance ToJSON Word8 where toJSON = Number . realToFrac instance ToJSON Word16 where toJSON = Number . realToFrac instance ToJSON Word32 where toJSON = Number . realToFrac -- | Possibly lossy due to conversion to 'Double' instance ToJSON Int64 where toJSON = Number . realToFrac -- | Possibly lossy due to conversion to 'Double' instance ToJSON Word64 where toJSON = Number . realToFrac -- | Possibly lossy due to conversion to 'Double' instance ToJSON Integer where toJSON = Number . fromInteger ------------------------------------------------------------------------------ -- 'BB.Builder'-based encoding -- | Serialise value as JSON/UTF8-encoded 'Builder' encodeToBuilder :: ToJSON a => a -> Builder encodeToBuilder = encodeValueBB . toJSON encodeValueBB :: Value -> Builder encodeValueBB jv = case jv of Bool True -> "true" Bool False -> "false" Null -> "null" Number n | isNaN n || isInfinite n -> encodeValueBB Null | Just i <- doubleToInt64 n -> BB.int64Dec i | otherwise -> BB.doubleDec n Array a -> encodeArrayBB a String s -> encodeStringBB s Object o -> encodeObjectBB o encodeArrayBB :: [Value] -> Builder encodeArrayBB [] = "[]" encodeArrayBB jvs = BB.char8 '[' <> go jvs <> BB.char8 ']' where go = mconcat . intersperse (BB.char8 ',') . map encodeValueBB encodeObjectBB :: Object -> Builder encodeObjectBB [] = "{}" encodeObjectBB jvs = BB.char8 '{' <> go jvs <> BB.char8 '}' where go = mconcat . intersperse (BB.char8 ',') . map encPair encPair (l,x) = encodeStringBB l <> BB.char8 ':' <> encodeValueBB x encodeStringBB :: String -> Builder encodeStringBB str = BB.char8 '"' <> go str <> BB.char8 '"' where go = BB.stringUtf8 . escapeString ------------------------------------------------------------------------------ -- 'String'-based encoding -- | Serialise value as JSON-encoded Unicode 'String' encodeToString :: ToJSON a => a -> String encodeToString jv = encodeValue (toJSON jv) [] encodeValue :: Value -> ShowS encodeValue jv = case jv of Bool b -> showString (if b then "true" else "false") Null -> showString "null" Number n | isNaN n || isInfinite n -> encodeValue Null | Just i <- doubleToInt64 n -> shows i | otherwise -> shows n Array a -> encodeArray a String s -> encodeString s Object o -> encodeObject o encodeArray :: [Value] -> ShowS encodeArray [] = showString "[]" encodeArray jvs = ('[':) . go jvs . (']':) where go [] = id go [x] = encodeValue x go (x:xs) = encodeValue x . (',':) . go xs encodeObject :: Object -> ShowS encodeObject [] = showString "{}" encodeObject jvs = ('{':) . go jvs . ('}':) where go [] = id go [(l,x)] = encodeString l . (':':) . encodeValue x go ((l,x):lxs) = encodeString l . (':':) . encodeValue x . (',':) . go lxs encodeString :: String -> ShowS encodeString str = ('"':) . showString (escapeString str) . ('"':) ------------------------------------------------------------------------------ -- helpers -- | Try to convert 'Double' into 'Int64', return 'Nothing' if not -- representable loss-free as integral 'Int64' value. doubleToInt64 :: Double -> Maybe Int64 doubleToInt64 x | fromInteger x' == x , x' <= toInteger (maxBound :: Int64) , x' >= toInteger (minBound :: Int64) = Just (fromIntegral x') | otherwise = Nothing where x' = round x -- | Minimally escape a 'String' in accordance with RFC 7159, "7. Strings" escapeString :: String -> String escapeString s | not (any needsEscape s) = s | otherwise = escape s where escape [] = [] escape (x:xs) = case x of '\\' -> '\\':'\\':escape xs '"' -> '\\':'"':escape xs '\b' -> '\\':'b':escape xs '\f' -> '\\':'f':escape xs '\n' -> '\\':'n':escape xs '\r' -> '\\':'r':escape xs '\t' -> '\\':'t':escape xs c | ord c < 0x10 -> '\\':'u':'0':'0':'0':intToDigit (ord c):escape xs | ord c < 0x20 -> '\\':'u':'0':'0':'1':intToDigit (ord c - 0x10):escape xs | otherwise -> c : escape xs -- unescaped = %x20-21 / %x23-5B / %x5D-10FFFF needsEscape c = ord c < 0x20 || c `elem` ['\\','"'] cabal-install-3.8.1.0/src/Distribution/Client/Utils/Parsec.hs0000644000000000000000000000724407346545000022124 0ustar0000000000000000module Distribution.Client.Utils.Parsec ( renderParseError, ) where import Distribution.Client.Compat.Prelude import Prelude () import System.FilePath (normalise) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS8 import Distribution.Parsec (PError (..), PWarning (..), Position (..), showPos, zeroPos) import Distribution.Simple.Utils (fromUTF8BS) -- | Render parse error highlighting the part of the input file. renderParseError :: FilePath -> BS.ByteString -> NonEmpty PError -> [PWarning] -> String renderParseError filepath contents errors warnings = unlines $ [ "Errors encountered when parsing cabal file " <> filepath <> ":" ] ++ renderedErrors ++ renderedWarnings where filepath' = normalise filepath -- lines of the input file. 'lines' is taken, so they are called rows -- contents, line number, whether it's empty line rows :: [(String, Int, Bool)] rows = zipWith f (BS8.lines contents) [1..] where f bs i = let s = fromUTF8BS bs in (s, i, isEmptyOrComment s) rowsZipper = listToZipper rows isEmptyOrComment :: String -> Bool isEmptyOrComment s = case dropWhile (== ' ') s of "" -> True -- empty ('-':'-':_) -> True -- comment _ -> False renderedErrors = concatMap renderError errors renderedWarnings = concatMap renderWarning warnings renderError :: PError -> [String] renderError (PError pos@(Position row col) msg) -- if position is 0:0, then it doesn't make sense to show input -- looks like, Parsec errors have line-feed in them | pos == zeroPos = msgs | otherwise = msgs ++ formatInput row col where msgs = [ "", filepath' ++ ":" ++ showPos pos ++ ": error:", trimLF msg, "" ] renderWarning :: PWarning -> [String] renderWarning (PWarning _ pos@(Position row col) msg) | pos == zeroPos = msgs | otherwise = msgs ++ formatInput row col where msgs = [ "", filepath' ++ ":" ++ showPos pos ++ ": warning:", trimLF msg, "" ] -- sometimes there are (especially trailing) newlines. trimLF :: String -> String trimLF = dropWhile (== '\n') . reverse . dropWhile (== '\n') . reverse -- format line: prepend the given line number formatInput :: Int -> Int -> [String] formatInput row col = case advance (row - 1) rowsZipper of Zipper xs ys -> before ++ after where before = case span (\(_, _, b) -> b) xs of (_, []) -> [] (zs, z : _) -> map formatInputLine $ z : reverse zs after = case ys of [] -> [] (z : _zs) -> [ formatInputLine z -- error line , " | " ++ replicate (col - 1) ' ' ++ "^" -- pointer: ^ ] -- do we need rows after? -- ++ map formatInputLine (take 1 zs) -- one row after formatInputLine :: (String, Int, Bool) -> String formatInputLine (str, row, _) = leftPadShow row ++ " | " ++ str -- hopefully we don't need to work with over 99999 lines .cabal files -- at that point small glitches in error messages are hopefully fine. leftPadShow :: Int -> String leftPadShow n = let s = show n in replicate (5 - length s) ' ' ++ s data Zipper a = Zipper [a] [a] listToZipper :: [a] -> Zipper a listToZipper = Zipper [] advance :: Int -> Zipper a -> Zipper a advance n z@(Zipper xs ys) | n <= 0 = z | otherwise = case ys of [] -> z (y:ys') -> advance (n - 1) $ Zipper (y:xs) ys' cabal-install-3.8.1.0/src/Distribution/Client/VCS.hs0000644000000000000000000006573607346545000020254 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE NamedFieldPuns, RecordWildCards, RankNTypes #-} module Distribution.Client.VCS ( -- * VCS driver type VCS, vcsRepoType, vcsProgram, -- ** Type re-exports RepoType, Program, ConfiguredProgram, -- * Validating 'SourceRepo's and configuring VCS drivers validatePDSourceRepo, validateSourceRepo, validateSourceRepos, SourceRepoProblem(..), configureVCS, configureVCSs, -- * Running the VCS driver cloneSourceRepo, syncSourceRepos, -- * The individual VCS drivers knownVCSs, vcsBzr, vcsDarcs, vcsGit, vcsHg, vcsSvn, vcsPijul, ) where import Prelude () import Distribution.Client.Compat.Prelude import Distribution.Types.SourceRepo ( RepoType(..), KnownRepoType (..) ) import Distribution.Client.Types.SourceRepo (SourceRepoMaybe, SourceRepositoryPackage (..), srpToProxy) import Distribution.Client.RebuildMonad ( Rebuild, monitorFiles, MonitorFilePath, monitorDirectoryExistence ) import Distribution.Verbosity as Verbosity ( normal ) import Distribution.Simple.Program ( Program(programFindVersion) , ConfiguredProgram(programVersion) , simpleProgram, findProgramVersion , ProgramInvocation(..), programInvocation, runProgramInvocation, getProgramInvocationOutput , emptyProgramDb, requireProgram ) import Distribution.Version ( mkVersion ) import qualified Distribution.PackageDescription as PD import Control.Applicative ( liftA2 ) import Control.Exception ( throw, try ) import Control.Monad.Trans ( liftIO ) import qualified Data.Char as Char import qualified Data.List as List import qualified Data.Map as Map import System.FilePath ( takeDirectory, () ) import System.Directory ( doesDirectoryExist , removeDirectoryRecursive ) import System.IO.Error ( isDoesNotExistError ) -- | A driver for a version control system, e.g. git, darcs etc. -- data VCS program = VCS { -- | The type of repository this driver is for. vcsRepoType :: RepoType, -- | The vcs program itself. -- This is used at type 'Program' and 'ConfiguredProgram'. vcsProgram :: program, -- | The program invocation(s) to get\/clone a repository into a fresh -- local directory. vcsCloneRepo :: forall f. Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -- Source URI -> FilePath -- Destination directory -> [ProgramInvocation], -- | The program invocation(s) to synchronise a whole set of /related/ -- repositories with corresponding local directories. Also returns the -- files that the command depends on, for change monitoring. vcsSyncRepos :: forall f. Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] } -- ------------------------------------------------------------ -- * Selecting repos and drivers -- ------------------------------------------------------------ data SourceRepoProblem = SourceRepoRepoTypeUnspecified | SourceRepoRepoTypeUnsupported (SourceRepositoryPackage Proxy) RepoType | SourceRepoLocationUnspecified deriving Show -- | Validates that the 'SourceRepo' specifies a location URI and a repository -- type that is supported by a VCS driver. -- -- | It also returns the 'VCS' driver we should use to work with it. -- validateSourceRepo :: SourceRepositoryPackage f -> Either SourceRepoProblem (SourceRepositoryPackage f, String, RepoType, VCS Program) validateSourceRepo = \repo -> do let rtype = srpType repo vcs <- Map.lookup rtype knownVCSs ?! SourceRepoRepoTypeUnsupported (srpToProxy repo) rtype let uri = srpLocation repo return (repo, uri, rtype, vcs) where a ?! e = maybe (Left e) Right a validatePDSourceRepo :: PD.SourceRepo -> Either SourceRepoProblem (SourceRepoMaybe, String, RepoType, VCS Program) validatePDSourceRepo repo = do rtype <- PD.repoType repo ?! SourceRepoRepoTypeUnspecified uri <- PD.repoLocation repo ?! SourceRepoLocationUnspecified validateSourceRepo SourceRepositoryPackage { srpType = rtype , srpLocation = uri , srpTag = PD.repoTag repo , srpBranch = PD.repoBranch repo , srpSubdir = PD.repoSubdir repo , srpCommand = mempty } where a ?! e = maybe (Left e) Right a -- | As 'validateSourceRepo' but for a bunch of 'SourceRepo's, and return -- things in a convenient form to pass to 'configureVCSs', or to report -- problems. -- validateSourceRepos :: [SourceRepositoryPackage f] -> Either [(SourceRepositoryPackage f, SourceRepoProblem)] [(SourceRepositoryPackage f, String, RepoType, VCS Program)] validateSourceRepos rs = case partitionEithers (map validateSourceRepo' rs) of (problems@(_:_), _) -> Left problems ([], vcss) -> Right vcss where validateSourceRepo' :: SourceRepositoryPackage f -> Either (SourceRepositoryPackage f, SourceRepoProblem) (SourceRepositoryPackage f, String, RepoType, VCS Program) validateSourceRepo' r = either (Left . (,) r) Right (validateSourceRepo r) configureVCS :: Verbosity -> VCS Program -> IO (VCS ConfiguredProgram) configureVCS verbosity vcs@VCS{vcsProgram = prog} = asVcsConfigured <$> requireProgram verbosity prog emptyProgramDb where asVcsConfigured (prog', _) = vcs { vcsProgram = prog' } configureVCSs :: Verbosity -> Map RepoType (VCS Program) -> IO (Map RepoType (VCS ConfiguredProgram)) configureVCSs verbosity = traverse (configureVCS verbosity) -- ------------------------------------------------------------ -- * Running the driver -- ------------------------------------------------------------ -- | Clone a single source repo into a fresh directory, using a configured VCS. -- -- This is for making a new copy, not synchronising an existing copy. It will -- fail if the destination directory already exists. -- -- Make sure to validate the 'SourceRepo' using 'validateSourceRepo' first. -- cloneSourceRepo :: Verbosity -> VCS ConfiguredProgram -> SourceRepositoryPackage f -> [Char] -> IO () cloneSourceRepo verbosity vcs repo@SourceRepositoryPackage{ srpLocation = srcuri } destdir = traverse_ (runProgramInvocation verbosity) invocations where invocations = vcsCloneRepo vcs verbosity (vcsProgram vcs) repo srcuri destdir -- | Syncronise a set of 'SourceRepo's referring to the same repository with -- corresponding local directories. The local directories may or may not -- already exist. -- -- The 'SourceRepo' values used in a single invocation of 'syncSourceRepos', -- or used across a series of invocations with any local directory must refer -- to the /same/ repository. That means it must be the same location but they -- can differ in the branch, or tag or subdir. -- -- The reason to allow multiple related 'SourceRepo's is to allow for the -- network or storage to be shared between different checkouts of the repo. -- For example if a single repo contains multiple packages in different subdirs -- and in some project it may make sense to use a different state of the repo -- for one subdir compared to another. -- syncSourceRepos :: Verbosity -> VCS ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> Rebuild () syncSourceRepos verbosity vcs repos = do files <- liftIO $ vcsSyncRepos vcs verbosity (vcsProgram vcs) repos monitorFiles files -- ------------------------------------------------------------ -- * The various VCS drivers -- ------------------------------------------------------------ -- | The set of all supported VCS drivers, organised by 'RepoType'. -- knownVCSs :: Map RepoType (VCS Program) knownVCSs = Map.fromList [ (vcsRepoType vcs, vcs) | vcs <- vcss ] where vcss = [ vcsBzr, vcsDarcs, vcsGit, vcsHg, vcsSvn ] -- | VCS driver for Bazaar. -- vcsBzr :: VCS Program vcsBzr = VCS { vcsRepoType = KnownRepoType Bazaar, vcsProgram = bzrProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo verbosity prog repo srcuri destdir = [ programInvocation prog ([branchCmd, srcuri, destdir] ++ tagArgs ++ verboseArg) ] where -- The @get@ command was deprecated in version 2.4 in favour of -- the alias @branch@ branchCmd | programVersion prog >= Just (mkVersion [2,4]) = "branch" | otherwise = "get" tagArgs :: [String] tagArgs = case srpTag repo of Nothing -> [] Just tag -> ["-r", "tag:" ++ tag] verboseArg :: [String] verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for bzr" bzrProgram :: Program bzrProgram = (simpleProgram "bzr") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- "Bazaar (bzr) 2.6.0\n ... lots of extra stuff" (_:_:ver:_) -> ver _ -> "" } -- | VCS driver for Darcs. -- vcsDarcs :: VCS Program vcsDarcs = VCS { vcsRepoType = KnownRepoType Darcs, vcsProgram = darcsProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo verbosity prog repo srcuri destdir = [ programInvocation prog cloneArgs ] where cloneArgs :: [String] cloneArgs = [cloneCmd, srcuri, destdir] ++ tagArgs ++ verboseArg -- At some point the @clone@ command was introduced as an alias for -- @get@, and @clone@ seems to be the recommended one now. cloneCmd :: String cloneCmd | programVersion prog >= Just (mkVersion [2,8]) = "clone" | otherwise = "get" tagArgs :: [String] tagArgs = case srpTag repo of Nothing -> [] Just tag -> ["-t", tag] verboseArg :: [String] verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _ _ [] = return [] vcsSyncRepos verbosity prog ((primaryRepo, primaryLocalDir) : secondaryRepos) = monitors <$ do vcsSyncRepo verbosity prog primaryRepo primaryLocalDir Nothing for_ secondaryRepos $ \ (repo, localDir) -> vcsSyncRepo verbosity prog repo localDir $ Just primaryLocalDir where dirs :: [FilePath] dirs = primaryLocalDir : (snd <$> secondaryRepos) monitors :: [MonitorFilePath] monitors = monitorDirectoryExistence <$> dirs vcsSyncRepo verbosity prog SourceRepositoryPackage{..} localDir _peer = try (lines <$> darcsWithOutput localDir ["log", "--last", "1"]) >>= \ case Right (_:_:_:x:_) | Just tag <- (List.stripPrefix "tagged " . List.dropWhile Char.isSpace) x , Just tag' <- srpTag , tag == tag' -> pure () Left e | not (isDoesNotExistError e) -> throw e _ -> do removeDirectoryRecursive localDir `catch` liftA2 unless isDoesNotExistError throw darcs (takeDirectory localDir) cloneArgs where darcs :: FilePath -> [String] -> IO () darcs = darcs' runProgramInvocation darcsWithOutput :: FilePath -> [String] -> IO String darcsWithOutput = darcs' getProgramInvocationOutput darcs' :: (Verbosity -> ProgramInvocation -> t) -> FilePath -> [String] -> t darcs' f cwd args = f verbosity (programInvocation prog args) { progInvokeCwd = Just cwd } cloneArgs :: [String] cloneArgs = ["clone"] ++ tagArgs ++ [srpLocation, localDir] ++ verboseArg tagArgs :: [String] tagArgs = case srpTag of Nothing -> [] Just tag -> ["-t" ++ tag] verboseArg :: [String] verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] darcsProgram :: Program darcsProgram = (simpleProgram "darcs") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- "2.8.5 (release)" (ver:_) -> ver _ -> "" } -- | VCS driver for Git. -- vcsGit :: VCS Program vcsGit = VCS { vcsRepoType = KnownRepoType Git, vcsProgram = gitProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo verbosity prog repo srcuri destdir = [ programInvocation prog cloneArgs ] -- And if there's a tag, we have to do that in a second step: ++ [ git (resetArgs tag) | tag <- maybeToList (srpTag repo) ] ++ [ git (["submodule", "sync", "--recursive"] ++ verboseArg) , git (["submodule", "update", "--init", "--force", "--recursive"] ++ verboseArg) ] where git args = (programInvocation prog args) {progInvokeCwd = Just destdir} cloneArgs = ["clone", srcuri, destdir] ++ branchArgs ++ verboseArg branchArgs = case srpBranch repo of Just b -> ["--branch", b] Nothing -> [] resetArgs tag = "reset" : verboseArg ++ ["--hard", tag, "--"] verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _ _ [] = return [] vcsSyncRepos verbosity gitProg ((primaryRepo, primaryLocalDir) : secondaryRepos) = do vcsSyncRepo verbosity gitProg primaryRepo primaryLocalDir Nothing sequence_ [ vcsSyncRepo verbosity gitProg repo localDir (Just primaryLocalDir) | (repo, localDir) <- secondaryRepos ] return [ monitorDirectoryExistence dir | dir <- (primaryLocalDir : map snd secondaryRepos) ] vcsSyncRepo verbosity gitProg SourceRepositoryPackage{..} localDir peer = do exists <- doesDirectoryExist localDir if exists then git localDir ["fetch"] else git (takeDirectory localDir) cloneArgs -- Before trying to checkout other commits, all submodules must be -- de-initialised and the .git/modules directory must be deleted. This -- is needed because sometimes `git submodule sync` does not actually -- update the submodule source URL. Detailed description here: -- https://git.coop/-/snippets/85 git localDir ["submodule", "deinit", "--force", "--all"] let gitModulesDir = localDir ".git" "modules" gitModulesExists <- doesDirectoryExist gitModulesDir when gitModulesExists $ removeDirectoryRecursive gitModulesDir git localDir resetArgs git localDir $ ["submodule", "sync", "--recursive"] ++ verboseArg git localDir $ ["submodule", "update", "--force", "--init", "--recursive"] ++ verboseArg git localDir $ ["submodule", "foreach", "--recursive"] ++ verboseArg ++ ["git clean -ffxdq"] git localDir $ ["clean", "-ffxdq"] where git :: FilePath -> [String] -> IO () git cwd args = runProgramInvocation verbosity $ (programInvocation gitProg args) { progInvokeCwd = Just cwd } cloneArgs = ["clone", "--no-checkout", loc, localDir] ++ case peer of Nothing -> [] Just peerLocalDir -> ["--reference", peerLocalDir] ++ verboseArg where loc = srpLocation resetArgs = "reset" : verboseArg ++ ["--hard", resetTarget, "--" ] resetTarget = fromMaybe "HEAD" (srpBranch `mplus` srpTag) verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] gitProgram :: Program gitProgram = (simpleProgram "git") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- "git version 2.5.5" (_:_:ver:_) | all isTypical ver -> ver -- or annoyingly "git version 2.17.1.windows.2" yes, really (_:_:ver:_) -> intercalate "." . takeWhile (all isNum) . split $ ver _ -> "" } where isNum c = c >= '0' && c <= '9' isTypical c = isNum c || c == '.' split cs = case break (=='.') cs of (chunk,[]) -> chunk : [] (chunk,_:rest) -> chunk : split rest -- | VCS driver for Mercurial. -- vcsHg :: VCS Program vcsHg = VCS { vcsRepoType = KnownRepoType Mercurial, vcsProgram = hgProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo verbosity prog repo srcuri destdir = [ programInvocation prog cloneArgs ] where cloneArgs = ["clone", srcuri, destdir] ++ branchArgs ++ tagArgs ++ verboseArg branchArgs = case srpBranch repo of Just b -> ["--branch", b] Nothing -> [] tagArgs = case srpTag repo of Just t -> ["--rev", t] Nothing -> [] verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _ _ [] = return [] vcsSyncRepos verbosity hgProg ((primaryRepo, primaryLocalDir) : secondaryRepos) = do vcsSyncRepo verbosity hgProg primaryRepo primaryLocalDir sequence_ [ vcsSyncRepo verbosity hgProg repo localDir | (repo, localDir) <- secondaryRepos ] return [ monitorDirectoryExistence dir | dir <- (primaryLocalDir : map snd secondaryRepos) ] vcsSyncRepo verbosity hgProg repo localDir = do exists <- doesDirectoryExist localDir if exists then hg localDir ["pull"] else hg (takeDirectory localDir) cloneArgs hg localDir checkoutArgs where hg :: FilePath -> [String] -> IO () hg cwd args = runProgramInvocation verbosity $ (programInvocation hgProg args) { progInvokeCwd = Just cwd } cloneArgs = ["clone", "--noupdate", (srpLocation repo), localDir] ++ verboseArg verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] checkoutArgs = [ "checkout", "--clean" ] ++ tagArgs tagArgs = case srpTag repo of Just t -> ["--rev", t] Nothing -> [] hgProgram :: Program hgProgram = (simpleProgram "hg") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- Mercurial Distributed SCM (version 3.5.2)\n ... long message (_:_:_:_:ver:_) -> takeWhile (\c -> Char.isDigit c || c == '.') ver _ -> "" } -- | VCS driver for Subversion. -- vcsSvn :: VCS Program vcsSvn = VCS { vcsRepoType = KnownRepoType SVN, vcsProgram = svnProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo verbosity prog _repo srcuri destdir = [ programInvocation prog checkoutArgs ] where checkoutArgs = ["checkout", srcuri, destdir] ++ verboseArg verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] --TODO: branch or tag? vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for svn" svnProgram :: Program svnProgram = (simpleProgram "svn") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- svn, version 1.9.4 (r1740329)\n ... long message (_:_:ver:_) -> ver _ -> "" } -- | VCS driver for Pijul. -- Documentation for Pijul can be found at -- -- 2020-04-09 Oleg: -- -- As far as I understand pijul, there are branches and "tags" in pijul, -- but there aren't a "commit hash" identifying an arbitrary state. -- -- One can create `a pijul tag`, which will make a patch hash, -- which depends on everything currently in the repository. -- I guess if you try to apply that patch, you'll be forced to apply -- all the dependencies too. In other words, there are no named tags. -- -- It's not clear to me whether there is an option to -- "apply this patch *and* all of its dependencies". -- And relatedly, whether how to make sure that there are no other -- patches applied. -- -- With branches it's easier, as you can `pull` and `checkout` them, -- and they seem to be similar enough. Yet, pijul documentations says -- -- > Note that the purpose of branches in Pijul is quite different from Git, -- since Git's "feature branches" can usually be implemented by just -- patches. -- -- I guess it means that indeed instead of creating a branch and making PR -- in "GitHub" workflow, you'd just create a patch and offer it. -- You can do that with `git` too. Push (a branch with) commit to remote -- and ask other to cherry-pick that commit. Yet, in git identity of commit -- changes when it applied to other trees, where patches in pijul have -- will continue to have the same hash. -- -- Unfortunately pijul doesn't talk about conflict resolution. -- It seems that you get something like: -- -- % pijul status -- On branch merge -- -- Unresolved conflicts: -- (fix conflicts and record the resolution with "pijul record ...") -- -- foo -- -- % cat foo -- first line -- >> >>>>>>>>>>>>>>>>>>>>>>>>>>>>> -- branch BBB -- ================================ -- branch AAA -- <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< -- last line -- -- And then the `pijul dependencies` would draw you a graph like -- -- -- -----> foo on branch B -----> -- resolve confict Initial patch -- -----> foo on branch A -----> -- -- Which is seems reasonable. -- -- So currently, pijul support is very experimental, and most likely -- won't work, even the basics are in place. Tests are also written -- but disabled, as the branching model differs from `git` one, -- for which tests are written. -- vcsPijul :: VCS Program vcsPijul = VCS { vcsRepoType = KnownRepoType Pijul, vcsProgram = pijulProgram, vcsCloneRepo, vcsSyncRepos } where vcsCloneRepo :: Verbosity -- ^ it seems that pijul does not have verbose flag -> ConfiguredProgram -> SourceRepositoryPackage f -> FilePath -> FilePath -> [ProgramInvocation] vcsCloneRepo _verbosity prog repo srcuri destdir = [ programInvocation prog cloneArgs ] -- And if there's a tag, we have to do that in a second step: ++ [ (programInvocation prog (checkoutArgs tag)) { progInvokeCwd = Just destdir } | tag <- maybeToList (srpTag repo) ] where cloneArgs :: [String] cloneArgs = ["clone", srcuri, destdir] ++ branchArgs branchArgs :: [String] branchArgs = case srpBranch repo of Just b -> ["--from-branch", b] Nothing -> [] checkoutArgs tag = "checkout" : [tag] -- TODO: this probably doesn't work either vcsSyncRepos :: Verbosity -> ConfiguredProgram -> [(SourceRepositoryPackage f, FilePath)] -> IO [MonitorFilePath] vcsSyncRepos _ _ [] = return [] vcsSyncRepos verbosity pijulProg ((primaryRepo, primaryLocalDir) : secondaryRepos) = do vcsSyncRepo verbosity pijulProg primaryRepo primaryLocalDir Nothing sequence_ [ vcsSyncRepo verbosity pijulProg repo localDir (Just primaryLocalDir) | (repo, localDir) <- secondaryRepos ] return [ monitorDirectoryExistence dir | dir <- (primaryLocalDir : map snd secondaryRepos) ] vcsSyncRepo verbosity pijulProg SourceRepositoryPackage{..} localDir peer = do exists <- doesDirectoryExist localDir if exists then pijul localDir ["pull"] -- TODO: this probably doesn't work. else pijul (takeDirectory localDir) cloneArgs pijul localDir checkoutArgs where pijul :: FilePath -> [String] -> IO () pijul cwd args = runProgramInvocation verbosity $ (programInvocation pijulProg args) { progInvokeCwd = Just cwd } cloneArgs :: [String] cloneArgs = ["clone", loc, localDir] ++ case peer of Nothing -> [] Just peerLocalDir -> [peerLocalDir] where loc = srpLocation checkoutArgs :: [String] checkoutArgs = "checkout" : ["--force", checkoutTarget, "--" ] checkoutTarget = fromMaybe "HEAD" (srpBranch `mplus` srpTag) -- TODO: this is definitely wrong. pijulProgram :: Program pijulProgram = (simpleProgram "pijul") { programFindVersion = findProgramVersion "--version" $ \str -> case words str of -- "pijul 0.12.2 (_:ver:_) | all isTypical ver -> ver _ -> "" } where isNum c = c >= '0' && c <= '9' isTypical c = isNum c || c == '.' cabal-install-3.8.1.0/src/Distribution/Client/Version.hs0000644000000000000000000000114607346545000021227 0ustar0000000000000000-- | Provides the version number of @cabal-install@. module Distribution.Client.Version ( cabalInstallVersion ) where import Distribution.Version -- This value determines the `cabal-install --version` output. -- -- It is used in several places throughout the project, including anonymous build reports, client configuration, -- and project planning output. Historically, this was a @Paths_*@ module, however, this conflicted with -- program coverage information generated by HPC, and hence was moved to be a standalone value. -- cabalInstallVersion :: Version cabalInstallVersion = mkVersion [3,8,1,0] cabal-install-3.8.1.0/src/Distribution/Client/Win32SelfUpgrade.hs0000644000000000000000000001677307346545000022642 0ustar0000000000000000{-# LANGUAGE CPP, ForeignFunctionInterface #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Win32SelfUpgrade -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Support for self-upgrading executables on Windows platforms. ----------------------------------------------------------------------------- module Distribution.Client.Win32SelfUpgrade ( -- * Explanation -- -- | Windows inherited a design choice from DOS that while initially innocuous -- has rather unfortunate consequences. It maintains the invariant that every -- open file has a corresponding name on disk. One positive consequence of this -- is that an executable can always find its own executable file. The downside -- is that a program cannot be deleted or upgraded while it is running without -- hideous workarounds. This module implements one such hideous workaround. -- -- The basic idea is: -- -- * Move our own exe file to a new name -- * Copy a new exe file to the previous name -- * Run the new exe file, passing our own PID and new path -- * Wait for the new process to start -- * Close the new exe file -- * Exit old process -- -- Then in the new process: -- -- * Inform the old process that we've started -- * Wait for the old process to die -- * Delete the old exe file -- * Exit new process -- possibleSelfUpgrade, deleteOldExeFile, ) where import Distribution.Client.Compat.Prelude hiding (log) import Prelude () #ifdef mingw32_HOST_OS import qualified System.Win32 as Win32 import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) import Foreign.Ptr (Ptr, nullPtr) import System.Process (runProcess) import System.Directory (canonicalizePath) import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) import Distribution.Verbosity as Verbosity (showForCabal) import Distribution.Simple.Utils (debug, info) -- | If one of the given files is our own exe file then we arrange things such -- that the nested action can replace our own exe file. -- -- We require that the new process accepts a command line invocation that -- calls 'deleteOldExeFile', passing in the PID and exe file. -- possibleSelfUpgrade :: Verbosity -> [FilePath] -> IO a -> IO a possibleSelfUpgrade verbosity newPaths action = do dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE newPaths' <- traverse canonicalizePath newPaths let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' if not doingSelfUpgrade then action else do info verbosity $ "cabal-install does the replace-own-exe-file dance..." tmpPath <- moveOurExeOutOfTheWay verbosity result <- action scheduleOurDemise verbosity dstPath tmpPath (\pid path -> ["win32selfupgrade", pid, path ,"--verbose=" ++ Verbosity.showForCabal verbosity]) return result -- | The name of a Win32 Event object that we use to synchronise between the -- old and new processes. We need to synchronise to make sure that the old -- process has not yet terminated by the time the new one starts up and looks -- for the old process. Otherwise the old one might have already terminated -- and we could not wait on it terminating reliably (eg the PID might get -- re-used). -- syncEventName :: String syncEventName = "Local\\cabal-install-upgrade" -- | The first part of allowing our exe file to be replaced is to move the -- existing exe file out of the way. Although we cannot delete our exe file -- while we're still running, fortunately we can rename it, at least within -- the same directory. -- moveOurExeOutOfTheWay :: Verbosity -> IO FilePath moveOurExeOutOfTheWay verbosity = do ourPID <- getCurrentProcessId dstPath <- Win32.getModuleFileName Win32.nullHANDLE let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath Win32.moveFile dstPath tmpPath return tmpPath -- | Assuming we've now installed the new exe file in the right place, we -- launch it and ask it to delete our exe file when we eventually terminate. -- scheduleOurDemise :: Verbosity -> FilePath -> FilePath -> (String -> FilePath -> [String]) -> IO () scheduleOurDemise verbosity dstPath tmpPath mkArgs = do ourPID <- getCurrentProcessId event <- createEvent syncEventName let args = mkArgs (show ourPID) tmpPath log $ "launching child " ++ unwords (dstPath : map show args) _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing log $ "waiting for the child to start up" waitForSingleObject event (10*1000) -- wait at most 10 sec log $ "child started ok" where log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) -- | Assuming we're now in the new child process, we've been asked by the old -- process to wait for it to terminate and then we can remove the old exe file -- that it renamed itself to. -- deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () deleteOldExeFile verbosity oldPID tmpPath = do log $ "process started. Will delete exe file of process " ++ show oldPID ++ " at path " ++ tmpPath log $ "getting handle of parent process " ++ show oldPID oldPHANDLE <- Win32.openProcess Win32.sYNCHRONIZE False (fromIntegral oldPID) log $ "synchronising with parent" event <- openEvent syncEventName setEvent event log $ "waiting for parent process to terminate" waitForSingleObject oldPHANDLE Win32.iNFINITE log $ "parent process terminated" log $ "deleting parent's old .exe file" Win32.deleteFile tmpPath where log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) ------------------------ -- Win32 foreign imports -- -- A bunch of functions sadly not provided by the Win32 package. #ifdef x86_64_HOST_ARCH #define CALLCONV ccall #else #define CALLCONV stdcall #endif foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" getCurrentProcessId :: IO DWORD foreign import CALLCONV unsafe "windows.h WaitForSingleObject" waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD waitForSingleObject :: HANDLE -> DWORD -> IO () waitForSingleObject handle timeout = Win32.failIf_ bad "WaitForSingleObject" $ waitForSingleObject_ handle timeout where bad result = not (result == 0 || result == wAIT_TIMEOUT) wAIT_TIMEOUT = 0x00000102 foreign import CALLCONV unsafe "windows.h CreateEventW" createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE createEvent :: String -> IO HANDLE createEvent name = do Win32.failIfNull "CreateEvent" $ Win32.withTString name $ createEvent_ nullPtr False False foreign import CALLCONV unsafe "windows.h OpenEventW" openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE openEvent :: String -> IO HANDLE openEvent name = do Win32.failIfNull "OpenEvent" $ Win32.withTString name $ openEvent_ eVENT_MODIFY_STATE False where eVENT_MODIFY_STATE :: DWORD eVENT_MODIFY_STATE = 0x0002 foreign import CALLCONV unsafe "windows.h SetEvent" setEvent_ :: HANDLE -> IO BOOL setEvent :: HANDLE -> IO () setEvent handle = Win32.failIfFalse_ "SetEvent" $ setEvent_ handle #else import Distribution.Simple.Utils (die') possibleSelfUpgrade :: Verbosity -> [FilePath] -> IO a -> IO a possibleSelfUpgrade _ _ action = action deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () deleteOldExeFile verbosity _ _ = die' verbosity "win32selfupgrade not needed except on win32" #endif cabal-install-3.8.1.0/src/Distribution/Deprecated/0000755000000000000000000000000007346545000020066 5ustar0000000000000000cabal-install-3.8.1.0/src/Distribution/Deprecated/ParseUtils.hs0000644000000000000000000003256307346545000022526 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Deprecated.ParseUtils -- Copyright : (c) The University of Glasgow 2004 -- License : BSD3 -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Utilities for parsing 'PackageDescription' and 'InstalledPackageInfo'. -- -- The @.cabal@ file format is not trivial, especially with the introduction -- of configurations and the section syntax that goes with that. This module -- has a bunch of parsing functions that is used by the @.cabal@ parser and a -- couple others. It has the parsing framework code and also little parsers for -- many of the formats we get in various @.cabal@ file fields, like module -- names, comma separated lists etc. -- This module is meant to be local-only to Distribution... {-# OPTIONS_HADDOCK hide #-} {-# LANGUAGE Rank2Types #-} module Distribution.Deprecated.ParseUtils ( LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning, runP, runE, ParseResult(..), parseFail, showPWarning, Field(..), lineNo, FieldDescr(..), readFields, parseHaskellString, parseTokenQ, parseOptCommaList, showFilePath, showToken, showFreeText, field, simpleField, listField, listFieldWithSep, spaceListField, newLineListField, liftField, readPToMaybe, fieldParsec, simpleFieldParsec, listFieldParsec, commaListFieldParsec, commaNewLineListFieldParsec, UnrecFieldParser, ) where import Distribution.Client.Compat.Prelude hiding (get) import Prelude () import Distribution.Deprecated.ReadP as ReadP hiding (get) import Distribution.Pretty import Distribution.ReadE import Distribution.Utils.Generic import System.FilePath (normalise) import Text.PrettyPrint (Doc, punctuate, comma, fsep, sep) import qualified Text.Read as Read import qualified Control.Monad.Fail as Fail import Distribution.Parsec (ParsecParser, parsecLeadingCommaList, parsecLeadingOptCommaList) import qualified Data.ByteString as BS import qualified Distribution.Fields as Fields import qualified Distribution.Fields.Field as Fields import qualified Distribution.Parsec as Parsec import qualified Distribution.Fields.LexerMonad as Fields import qualified Text.Parsec.Error as PE import qualified Text.Parsec.Pos as PP -- ----------------------------------------------------------------------------- type LineNo = Int data PError = AmbiguousParse String LineNo | NoParse String LineNo | TabsError LineNo | FromString String (Maybe LineNo) deriving (Eq, Show) data PWarning = PWarning String | UTFWarning LineNo String deriving (Eq, Show) showPWarning :: FilePath -> PWarning -> String showPWarning fpath (PWarning msg) = normalise fpath ++ ": " ++ msg showPWarning fpath (UTFWarning line fname) = normalise fpath ++ ":" ++ show line ++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field." data ParseResult a = ParseFailed PError | ParseOk [PWarning] a deriving Show instance Functor ParseResult where fmap _ (ParseFailed err) = ParseFailed err fmap f (ParseOk ws x) = ParseOk ws $ f x instance Applicative ParseResult where pure = ParseOk [] (<*>) = ap instance Monad ParseResult where return = pure ParseFailed err >>= _ = ParseFailed err ParseOk ws x >>= f = case f x of ParseFailed err -> ParseFailed err ParseOk ws' x' -> ParseOk (ws'++ws) x' #if !(MIN_VERSION_base(4,9,0)) fail = parseResultFail #elif !(MIN_VERSION_base(4,13,0)) fail = Fail.fail #endif instance Foldable ParseResult where foldMap _ (ParseFailed _ ) = mempty foldMap f (ParseOk _ x) = f x instance Traversable ParseResult where traverse _ (ParseFailed err) = pure (ParseFailed err) traverse f (ParseOk ws x) = ParseOk ws <$> f x instance Fail.MonadFail ParseResult where fail = parseResultFail parseResultFail :: String -> ParseResult a parseResultFail s = parseFail (FromString s Nothing) parseFail :: PError -> ParseResult a parseFail = ParseFailed runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a runP line fieldname p s = case [ x | (x,"") <- results ] of [a] -> ParseOk (utf8Warnings line fieldname s) a --TODO: what is this double parse thing all about? -- Can't we just do the all isSpace test the first time? [] -> case [ x | (x,ys) <- results, all isSpace ys ] of [a] -> ParseOk (utf8Warnings line fieldname s) a [] -> ParseFailed (NoParse fieldname line) _ -> ParseFailed (AmbiguousParse fieldname line) _ -> ParseFailed (AmbiguousParse fieldname line) where results = readP_to_S p s runE :: LineNo -> String -> ReadE a -> String -> ParseResult a runE line fieldname p s = case runReadE p s of Right a -> ParseOk (utf8Warnings line fieldname s) a Left e -> syntaxError line $ "Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s utf8Warnings :: LineNo -> String -> String -> [PWarning] utf8Warnings line fieldname s = take 1 [ UTFWarning n fieldname | (n,l) <- zip [line..] (lines s) , '\xfffd' `elem` l ] locatedErrorMsg :: PError -> (Maybe LineNo, String) locatedErrorMsg (AmbiguousParse f n) = (Just n, "Ambiguous parse in field '"++f++"'.") locatedErrorMsg (NoParse f n) = (Just n, "Parse of field '"++f++"' failed.") locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.") locatedErrorMsg (FromString s n) = (n, s) syntaxError :: LineNo -> String -> ParseResult a syntaxError n s = ParseFailed $ FromString s (Just n) warning :: String -> ParseResult () warning s = ParseOk [PWarning s] () -- | Field descriptor. The parameter @a@ parameterizes over where the field's -- value is stored in. data FieldDescr a = FieldDescr { fieldName :: String , fieldGet :: a -> Doc , fieldSet :: LineNo -> String -> a -> ParseResult a -- ^ @fieldSet n str x@ Parses the field value from the given input -- string @str@ and stores the result in @x@ if the parse was -- successful. Otherwise, reports an error on line number @n@. } field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a field name showF readF = FieldDescr name showF (\line val _st -> runP line name readF val) fieldParsec :: String -> (a -> Doc) -> ParsecParser a -> FieldDescr a fieldParsec name showF readF = FieldDescr name showF $ \line val _st -> case explicitEitherParsec readF val of Left err -> ParseFailed (FromString err (Just line)) Right x -> ParseOk [] x -- Lift a field descriptor storing into an 'a' to a field descriptor storing -- into a 'b'. liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b liftField get set (FieldDescr name showF parseF) = FieldDescr name (showF . get) (\line str b -> do a <- parseF line str (get b) return (set a b)) -- Parser combinator for simple fields. Takes a field name, a pretty printer, -- a parser function, an accessor, and a setter, returns a FieldDescr over the -- compoid structure. simpleField :: String -> (a -> Doc) -> ReadP a a -> (b -> a) -> (a -> b -> b) -> FieldDescr b simpleField name showF readF get set = liftField get set $ field name showF readF simpleFieldParsec :: String -> (a -> Doc) -> ParsecParser a -> (b -> a) -> (a -> b -> b) -> FieldDescr b simpleFieldParsec name showF readF get set = liftField get set $ fieldParsec name showF readF commaListFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b commaListFieldWithSepParsec separator name showF readF get set = liftField get set' $ fieldParsec name showF' (parsecLeadingCommaList readF) where set' xs b = set (get b ++ xs) b showF' = separator . punctuate comma . map showF commaListFieldParsec :: String -> (a -> Doc) -> ParsecParser a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b commaListFieldParsec = commaListFieldWithSepParsec fsep commaNewLineListFieldParsec :: String -> (a -> Doc) -> ParsecParser a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b commaNewLineListFieldParsec = commaListFieldWithSepParsec sep spaceListField :: String -> (a -> Doc) -> ReadP [a] a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b spaceListField name showF readF get set = liftField get set' $ field name showF' (parseSpaceList readF) where set' xs b = set (get b ++ xs) b showF' = fsep . map showF -- this is a different definition from listField, like -- commaNewLineListField it pretty prints on multiple lines newLineListField :: String -> (a -> Doc) -> ReadP [a] a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b newLineListField = listFieldWithSep sep listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b listFieldWithSep separator name showF readF get set = liftField get set' $ field name showF' (parseOptCommaList readF) where set' xs b = set (get b ++ xs) b showF' = separator . map showF listFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b listFieldWithSepParsec separator name showF readF get set = liftField get set' $ fieldParsec name showF' (parsecLeadingOptCommaList readF) where set' xs b = set (get b ++ xs) b showF' = separator . map showF listField :: String -> (a -> Doc) -> ReadP [a] a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b listField = listFieldWithSep fsep listFieldParsec :: String -> (a -> Doc) -> ParsecParser a -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b listFieldParsec = listFieldWithSepParsec fsep -- | The type of a function which, given a name-value pair of an -- unrecognized field, and the current structure being built, -- decides whether to incorporate the unrecognized field -- (by returning Just x, where x is a possibly modified version -- of the structure being built), or not (by returning Nothing). type UnrecFieldParser a = (String,String) -> a -> Maybe a ------------------------------------------------------------------------------ -- The data type for our three syntactic categories data Field = F LineNo String String -- ^ A regular @: @ field | Section LineNo String String [Field] -- ^ A section with a name and possible parameter. The syntactic -- structure is: -- -- @ -- { -- * -- } -- @ deriving (Show ,Eq) -- for testing lineNo :: Field -> LineNo lineNo (F n _ _) = n lineNo (Section n _ _ _) = n readFields :: BS.ByteString -> ParseResult [Field] readFields input = case Fields.readFields' input of Right (fs, ws) -> ParseOk [ PWarning msg | Fields.PWarning _ _ msg <- Fields.toPWarnings ws ] (legacyFields fs) Left perr -> ParseFailed $ NoParse (PE.showErrorMessages "or" "unknown parse error" "expecting" "unexpected" "end of file" (PE.errorMessages perr)) (PP.sourceLine pos) where pos = PE.errorPos perr legacyFields :: [Fields.Field Parsec.Position] -> [Field] legacyFields = map legacyField legacyField :: Fields.Field Parsec.Position -> Field legacyField (Fields.Field (Fields.Name pos name) fls) = F (posToLineNo pos) (fromUTF8BS name) (Fields.fieldLinesToString fls) legacyField (Fields.Section (Fields.Name pos name) args fs) = Section (posToLineNo pos) (fromUTF8BS name) (Fields.sectionArgsToString args) (legacyFields fs) posToLineNo :: Parsec.Position -> LineNo posToLineNo (Parsec.Position row _) = row ------------------------------------------------------------------------------ -- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a -- because the "compat" version of ReadP isn't quite powerful enough. In -- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a -- Hence the trick above to make 'lic' polymorphic. -- Different than the naive version. it turns out Read instance for String accepts -- the ['a', 'b'] syntax, which we do not want. In particular it messes -- up any token starting with []. parseHaskellString :: ReadP r String parseHaskellString = readS_to_P $ Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0 parseTokenQ :: ReadP r String parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',') parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas -> ReadP r [a] parseSpaceList p = sepBy p skipSpaces -- This version avoid parse ambiguity for list element parsers -- that have multiple valid parses of prefixes. parseOptCommaList :: ReadP r a -> ReadP r [a] parseOptCommaList p = sepBy p localSep where -- The separator must not be empty or it introduces ambiguity localSep = (skipSpaces >> char ',' >> skipSpaces) +++ (satisfy isSpace >> skipSpaces) readPToMaybe :: ReadP a a -> String -> Maybe a readPToMaybe p str = listToMaybe [ r | (r,s) <- readP_to_S p str , all isSpace s ] cabal-install-3.8.1.0/src/Distribution/Deprecated/ReadP.hs0000644000000000000000000003503407346545000021422 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE GADTs #-} ----------------------------------------------------------------------------- -- | -- -- Module : Distribution.Deprecated.ReadP -- Copyright : (c) The University of Glasgow 2002 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Portability : portable -- -- This is a library of parser combinators, originally written by Koen Claessen. -- It parses all alternatives in parallel, so it never keeps hold of -- the beginning of the input string, a common source of space leaks with -- other parsers. The '(+++)' choice combinator is genuinely commutative; -- it makes no difference which branch is \"shorter\". -- -- See also Koen's paper /Parallel Parsing Processes/ -- (). -- -- This version of ReadP has been locally hacked to make it H98, by -- Martin Sjögren -- -- The unit tests have been moved to UnitTest.Distribution.Deprecated.ReadP, by -- Mark Lentczner ----------------------------------------------------------------------------- module Distribution.Deprecated.ReadP ( -- * The 'ReadP' type ReadP, -- :: * -> *; instance Functor, Monad, MonadPlus -- * Primitive operations get, -- :: ReadP Char look, -- :: ReadP String (+++), -- :: ReadP a -> ReadP a -> ReadP a (<++), -- :: ReadP a -> ReadP a -> ReadP a gather, -- :: ReadP a -> ReadP (String, a) -- * Other operations pfail, -- :: ReadP a eof, -- :: ReadP () satisfy, -- :: (Char -> Bool) -> ReadP Char char, -- :: Char -> ReadP Char string, -- :: String -> ReadP String munch, -- :: (Char -> Bool) -> ReadP String munch1, -- :: (Char -> Bool) -> ReadP String skipSpaces, -- :: ReadP () skipSpaces1,-- :: ReadP () choice, -- :: [ReadP a] -> ReadP a count, -- :: Int -> ReadP a -> ReadP [a] between, -- :: ReadP open -> ReadP close -> ReadP a -> ReadP a option, -- :: a -> ReadP a -> ReadP a optional, -- :: ReadP a -> ReadP () many, -- :: ReadP a -> ReadP [a] many1, -- :: ReadP a -> ReadP [a] skipMany, -- :: ReadP a -> ReadP () skipMany1, -- :: ReadP a -> ReadP () sepBy, -- :: ReadP a -> ReadP sep -> ReadP [a] sepBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] endBy, -- :: ReadP a -> ReadP sep -> ReadP [a] endBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] chainr, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a chainl, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a chainl1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a chainr1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a manyTill, -- :: ReadP a -> ReadP end -> ReadP [a] -- * Running a parser ReadS, -- :: *; = String -> [(a,String)] readP_to_S, -- :: ReadP a -> ReadS a readS_to_P, -- :: ReadS a -> ReadP a readP_to_E, -- ** Internal Parser, ) where import Prelude () import Distribution.Client.Compat.Prelude hiding (many, get) import Control.Monad( replicateM, (>=>) ) import qualified Control.Monad.Fail as Fail import Distribution.ReadE (ReadE (..)) infixr 5 +++, <++ -- --------------------------------------------------------------------------- -- The P type -- is representation type -- should be kept abstract data P s a = Get (s -> P s a) | Look ([s] -> P s a) | Fail | Result a (P s a) | Final [(a,[s])] -- invariant: list is non-empty! -- Monad, MonadPlus instance Functor (P s) where fmap = liftM instance Applicative (P s) where pure x = Result x Fail (<*>) = ap instance Monad (P s) where return = pure (Get f) >>= k = Get (f >=> k) (Look f) >>= k = Look (f >=> k) Fail >>= _ = Fail (Result x p) >>= k = k x `mplus` (p >>= k) (Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s] #if !(MIN_VERSION_base(4,9,0)) fail _ = Fail #elif !(MIN_VERSION_base(4,13,0)) fail = Fail.fail #endif instance Fail.MonadFail (P s) where fail _ = Fail instance Alternative (P s) where empty = mzero (<|>) = mplus instance MonadPlus (P s) where mzero = Fail -- most common case: two gets are combined Get f1 `mplus` Get f2 = Get (\c -> f1 c `mplus` f2 c) -- results are delivered as soon as possible Result x p `mplus` q = Result x (p `mplus` q) p `mplus` Result x q = Result x (p `mplus` q) -- fail disappears Fail `mplus` p = p p `mplus` Fail = p -- two finals are combined -- final + look becomes one look and one final (=optimization) -- final + sthg else becomes one look and one final Final r `mplus` Final t = Final (r ++ t) Final r `mplus` Look f = Look (\s -> Final (r ++ run (f s) s)) Final r `mplus` p = Look (\s -> Final (r ++ run p s)) Look f `mplus` Final r = Look (\s -> Final (run (f s) s ++ r)) p `mplus` Final r = Look (\s -> Final (run p s ++ r)) -- two looks are combined (=optimization) -- look + sthg else floats upwards Look f `mplus` Look g = Look (\s -> f s `mplus` g s) Look f `mplus` p = Look (\s -> f s `mplus` p) p `mplus` Look f = Look (\s -> p `mplus` f s) -- --------------------------------------------------------------------------- -- The ReadP type newtype Parser r s a = R ((a -> P s r) -> P s r) type ReadP r a = Parser r Char a -- Functor, Monad, MonadPlus instance Functor (Parser r s) where fmap h (R f) = R (\k -> f (k . h)) instance Applicative (Parser r s) where pure x = R (\k -> k x) (<*>) = ap instance s ~ Char => Alternative (Parser r s) where empty = pfail (<|>) = (+++) instance Monad (Parser r s) where return = pure R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k)) #if !(MIN_VERSION_base(4,9,0)) fail _ = R (const Fail) #elif !(MIN_VERSION_base(4,13,0)) fail = Fail.fail #endif instance Fail.MonadFail (Parser r s) where fail _ = R (const Fail) instance s ~ Char => MonadPlus (Parser r s) where mzero = pfail mplus = (+++) -- --------------------------------------------------------------------------- -- Operations over P final :: [(a,[s])] -> P s a -- Maintains invariant for Final constructor final [] = Fail final r = Final r run :: P c a -> ([c] -> [(a, [c])]) run (Get f) (c:s) = run (f c) s run (Look f) s = run (f s) s run (Result x p) s = (x,s) : run p s run (Final r) _ = r run _ _ = [] -- --------------------------------------------------------------------------- -- Operations over ReadP get :: ReadP r Char -- ^ Consumes and returns the next character. -- Fails if there is no input left. get = R Get look :: ReadP r String -- ^ Look-ahead: returns the part of the input that is left, without -- consuming it. look = R Look pfail :: ReadP r a -- ^ Always fails. pfail = R (const Fail) eof :: ReadP r () -- ^ Succeeds iff we are at the end of input eof = do { s <- look ; if null s then return () else pfail } (+++) :: ReadP r a -> ReadP r a -> ReadP r a -- ^ Symmetric choice. R f1 +++ R f2 = R (\k -> f1 k `mplus` f2 k) (<++) :: ReadP a a -> ReadP r a -> ReadP r a -- ^ Local, exclusive, left-biased choice: If left parser -- locally produces any result at all, then right parser is -- not used. R f <++ q = do s <- look probe (f return) s 0 where probe (Get f') (c:s) n = probe (f' c) s (n+1 :: Int) probe (Look f') s n = probe (f' s) s n probe p@(Result _ _) _ n = discard n >> R (p >>=) probe (Final r) _ _ = R (Final r >>=) probe _ _ _ = q discard 0 = return () discard n = get >> discard (n-1 :: Int) gather :: ReadP (String -> P Char r) a -> ReadP r (String, a) -- ^ Transforms a parser into one that does the same, but -- in addition returns the exact characters read. -- IMPORTANT NOTE: 'gather' gives a runtime error if its first argument -- is built using any occurrences of readS_to_P. gather (R m) = R (\k -> gath id (m (\a -> return (\s -> k (s,a))))) where gath l (Get f) = Get (\c -> gath (l.(c:)) (f c)) gath _ Fail = Fail gath l (Look f) = Look (gath l . f) gath l (Result k p) = k (l []) `mplus` gath l p gath _ (Final _) = error "do not use readS_to_P in gather!" -- --------------------------------------------------------------------------- -- Derived operations satisfy :: (Char -> Bool) -> ReadP r Char -- ^ Consumes and returns the next character, if it satisfies the -- specified predicate. satisfy p = do c <- get; if p c then return c else pfail char :: Char -> ReadP r Char -- ^ Parses and returns the specified character. char c = satisfy (c ==) string :: String -> ReadP r String -- ^ Parses and returns the specified string. string this = do s <- look; scan this s where scan [] _ = return this scan (x:xs) (y:ys) | x == y = get >> scan xs ys scan _ _ = pfail munch :: (Char -> Bool) -> ReadP r String -- ^ Parses the first zero or more characters satisfying the predicate. munch p = do s <- look scan s where scan (c:cs) | p c = do _ <- get; s <- scan cs; return (c:s) scan _ = do return "" munch1 :: (Char -> Bool) -> ReadP r String -- ^ Parses the first one or more characters satisfying the predicate. munch1 p = do c <- get if p c then do s <- munch p; return (c:s) else pfail choice :: [ReadP r a] -> ReadP r a -- ^ Combines all parsers in the specified list. choice [] = pfail choice [p] = p choice (p:ps) = p +++ choice ps skipSpaces :: ReadP r () -- ^ Skips all whitespace. skipSpaces = do s <- look skip s where skip (c:s) | isSpace c = do _ <- get; skip s skip _ = do return () skipSpaces1 :: ReadP r () -- ^ Like 'skipSpaces' but succeeds only if there is at least one -- whitespace character to skip. skipSpaces1 = satisfy isSpace >> skipSpaces count :: Int -> ReadP r a -> ReadP r [a] -- ^ @ count n p @ parses @n@ occurrences of @p@ in sequence. A list of -- results is returned. count n p = replicateM n p between :: ReadP r open -> ReadP r close -> ReadP r a -> ReadP r a -- ^ @ between open close p @ parses @open@, followed by @p@ and finally -- @close@. Only the value of @p@ is returned. between open close p = do _ <- open x <- p _ <- close return x option :: a -> ReadP r a -> ReadP r a -- ^ @option x p@ will either parse @p@ or return @x@ without consuming -- any input. option x p = p +++ return x optional :: ReadP r a -> ReadP r () -- ^ @optional p@ optionally parses @p@ and always returns @()@. optional p = (p >> return ()) +++ return () many :: ReadP r a -> ReadP r [a] -- ^ Parses zero or more occurrences of the given parser. many p = return [] +++ many1 p many1 :: ReadP r a -> ReadP r [a] -- ^ Parses one or more occurrences of the given parser. many1 p = liftM2 (:) p (many p) skipMany :: ReadP r a -> ReadP r () -- ^ Like 'many', but discards the result. skipMany p = many p >> return () skipMany1 :: ReadP r a -> ReadP r () -- ^ Like 'many1', but discards the result. skipMany1 p = p >> skipMany p sepBy :: ReadP r a -> ReadP r sep -> ReadP r [a] -- ^ @sepBy p sep@ parses zero or more occurrences of @p@, separated by @sep@. -- Returns a list of values returned by @p@. sepBy p sep = sepBy1 p sep +++ return [] sepBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] -- ^ @sepBy1 p sep@ parses one or more occurrences of @p@, separated by @sep@. -- Returns a list of values returned by @p@. sepBy1 p sep = liftM2 (:) p (many (sep >> p)) endBy :: ReadP r a -> ReadP r sep -> ReadP r [a] -- ^ @endBy p sep@ parses zero or more occurrences of @p@, separated and ended -- by @sep@. endBy p sep = many (do x <- p ; _ <- sep ; return x) endBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] -- ^ @endBy p sep@ parses one or more occurrences of @p@, separated and ended -- by @sep@. endBy1 p sep = many1 (do x <- p ; _ <- sep ; return x) chainr :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a -- ^ @chainr p op x@ parses zero or more occurrences of @p@, separated by @op@. -- Returns a value produced by a /right/ associative application of all -- functions returned by @op@. If there are no occurrences of @p@, @x@ is -- returned. chainr p op x = chainr1 p op +++ return x chainl :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a -- ^ @chainl p op x@ parses zero or more occurrences of @p@, separated by @op@. -- Returns a value produced by a /left/ associative application of all -- functions returned by @op@. If there are no occurrences of @p@, @x@ is -- returned. chainl p op x = chainl1 p op +++ return x chainr1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a -- ^ Like 'chainr', but parses one or more occurrences of @p@. chainr1 p op = scan where scan = p >>= rest rest x = do f <- op y <- scan return (f x y) +++ return x chainl1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a -- ^ Like 'chainl', but parses one or more occurrences of @p@. chainl1 p op = p >>= rest where rest x = do f <- op y <- p rest (f x y) +++ return x manyTill :: ReadP r a -> ReadP [a] end -> ReadP r [a] -- ^ @manyTill p end@ parses zero or more occurrences of @p@, until @end@ -- succeeds. Returns a list of values returned by @p@. manyTill p end = scan where scan = (end >> return []) <++ (liftM2 (:) p scan) -- --------------------------------------------------------------------------- -- Converting between ReadP and Read readP_to_S :: ReadP a a -> ReadS a -- ^ Converts a parser into a Haskell ReadS-style function. -- This is the main way in which you can \"run\" a 'ReadP' parser: -- the expanded type is -- @ readP_to_S :: ReadP a -> String -> [(a,String)] @ readP_to_S (R f) = run (f return) readS_to_P :: ReadS a -> ReadP r a -- ^ Converts a Haskell ReadS-style function into a parser. -- Warning: This introduces local backtracking in the resulting -- parser, and therefore a possible inefficiency. readS_to_P r = R (\k -> Look (\s -> final [bs'' | (a,s') <- r s, bs'' <- run (k a) s'])) ------------------------------------------------------------------------------- -- ReadE ------------------------------------------------------------------------------- readP_to_E :: (String -> String) -> ReadP a a -> ReadE a readP_to_E err r = ReadE $ \txt -> case [ p | (p, s) <- readP_to_S r txt , all isSpace s ] of [] -> Left (err txt) (p:_) -> Right p cabal-install-3.8.1.0/src/Distribution/Deprecated/ViewAsFieldDescr.hs0000644000000000000000000000652507346545000023555 0ustar0000000000000000module Distribution.Deprecated.ViewAsFieldDescr ( viewAsFieldDescr ) where import Distribution.Client.Compat.Prelude hiding (get) import Prelude () import qualified Data.List.NonEmpty as NE import Distribution.ReadE (parsecToReadE) import Distribution.Simple.Command import Text.PrettyPrint (cat, comma, punctuate, text) import Text.PrettyPrint as PP (empty) import Distribution.Deprecated.ParseUtils (FieldDescr (..), runE, syntaxError) -- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool > -- Choice > Opt) and consider only the first one. viewAsFieldDescr :: OptionField a -> FieldDescr a viewAsFieldDescr (OptionField _n []) = error "Distribution.command.viewAsFieldDescr: unexpected" viewAsFieldDescr (OptionField n (d:dd)) = FieldDescr n get set where optDescr = head $ NE.sortBy cmp (d:|dd) cmp :: OptDescr a -> OptDescr a -> Ordering ReqArg{} `cmp` ReqArg{} = EQ ReqArg{} `cmp` _ = GT BoolOpt{} `cmp` ReqArg{} = LT BoolOpt{} `cmp` BoolOpt{} = EQ BoolOpt{} `cmp` _ = GT ChoiceOpt{} `cmp` ReqArg{} = LT ChoiceOpt{} `cmp` BoolOpt{} = LT ChoiceOpt{} `cmp` ChoiceOpt{} = EQ ChoiceOpt{} `cmp` _ = GT OptArg{} `cmp` OptArg{} = EQ OptArg{} `cmp` _ = LT -- get :: a -> Doc get t = case optDescr of ReqArg _ _ _ _ ppr -> (cat . punctuate comma . map text . ppr) t OptArg _ _ _ _ _ ppr -> case ppr t of [] -> PP.empty (Nothing : _) -> text "True" (Just a : _) -> text a ChoiceOpt alts -> fromMaybe PP.empty $ listToMaybe [ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t] BoolOpt _ _ _ _ enabled -> (maybe PP.empty pretty . enabled) t -- set :: LineNo -> String -> a -> ParseResult a set line val a = case optDescr of ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val -- We parse for a single value instead of a -- list, as one can't really implement -- parseList :: ReadE a -> ReadE [a] with -- the current ReadE definition ChoiceOpt{} -> case getChoiceByLongFlag optDescr val of Just f -> return (f a) _ -> syntaxError line val BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runE line n (parsecToReadE ("" ++) parsec) val OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val -- Optional arguments are parsed just like -- required arguments here; we don't -- provide a method to set an OptArg field -- to the default value. getChoiceByLongFlag :: OptDescr a -> String -> Maybe (a -> a) getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe [ set | (_,(_sf,lf:_), set, _) <- alts , lf == val] getChoiceByLongFlag _ _ = error "Distribution.command.getChoiceByLongFlag: expected a choice option" cabal-install-3.8.1.0/tests/0000755000000000000000000000000007346545000013722 5ustar0000000000000000cabal-install-3.8.1.0/tests/IntegrationTests2.hs0000644000000000000000000023132507346545000017654 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} -- For the handy instance IsString PackageIdentifier {-# OPTIONS_GHC -fno-warn-orphans #-} module Main where import Distribution.Client.Compat.Prelude import Prelude () import Distribution.Client.DistDirLayout import Distribution.Client.ProjectConfig import Distribution.Client.Config (getCabalDir) import Distribution.Client.HttpUtils import Distribution.Client.TargetSelector hiding (DirActions(..)) import qualified Distribution.Client.TargetSelector as TS (DirActions(..)) import Distribution.Client.ProjectPlanning import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ProjectBuilding import Distribution.Client.ProjectOrchestration ( resolveTargets, distinctTargetComponents ) import Distribution.Client.TargetProblem ( TargetProblem', TargetProblem (..) ) import Distribution.Client.Types ( PackageLocation(..), UnresolvedSourcePackage , PackageSpecifier(..) ) import Distribution.Client.Targets ( UserConstraint(..), UserConstraintScope(UserAnyQualifier) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Solver.Types.SourcePackage as SP import Distribution.Solver.Types.ConstraintSource ( ConstraintSource(ConstraintSourceUnknown) ) import Distribution.Solver.Types.PackageConstraint ( PackageProperty(PackagePropertySource) ) import qualified Distribution.Client.CmdBuild as CmdBuild import qualified Distribution.Client.CmdRepl as CmdRepl import qualified Distribution.Client.CmdRun as CmdRun import qualified Distribution.Client.CmdTest as CmdTest import qualified Distribution.Client.CmdBench as CmdBench import qualified Distribution.Client.CmdHaddock as CmdHaddock import qualified Distribution.Client.CmdListBin as CmdListBin import Distribution.Package import Distribution.PackageDescription import Distribution.InstalledPackageInfo (InstalledPackageInfo) import Distribution.Simple.Setup (toFlag, HaddockFlags(..), defaultHaddockFlags) import Distribution.Client.Setup (globalCommand) import Distribution.Simple.Compiler import Distribution.Simple.Command import qualified Distribution.Simple.Flag as Flag import Distribution.System import Distribution.Version import Distribution.ModuleName (ModuleName) import Distribution.Text import Distribution.Utils.Path import qualified Data.Map as Map import qualified Data.Set as Set import Control.Monad import Control.Concurrent (threadDelay) import Control.Exception hiding (assert) import System.FilePath import System.Directory import System.IO (hPutStrLn, stderr) import Test.Tasty import Test.Tasty.HUnit import Test.Tasty.Options import Data.Tagged (Tagged(..)) import qualified Data.ByteString as BS import Distribution.Client.GlobalFlags (GlobalFlags, globalNix) import Distribution.Simple.Flag (Flag (Flag, NoFlag)) import Data.Maybe (fromJust) #if !MIN_VERSION_directory(1,2,7) removePathForcibly :: FilePath -> IO () removePathForcibly = removeDirectoryRecursive #endif main :: IO () main = defaultMainWithIngredients (defaultIngredients ++ [includingOptions projectConfigOptionDescriptions]) (withProjectConfig $ \config -> testGroup "Integration tests (internal)" (tests config)) tests :: ProjectConfig -> [TestTree] tests config = --TODO: tests for: -- * normal success -- * dry-run tests with changes [ testGroup "Discovery and planning" $ [ testCase "find root" testFindProjectRoot , testCase "find root fail" testExceptionFindProjectRoot , testCase "no package" (testExceptionInFindingPackage config) , testCase "no package2" (testExceptionInFindingPackage2 config) , testCase "proj conf1" (testExceptionInProjectConfig config) ] , testGroup "Target selectors" $ [ testCaseSteps "valid" testTargetSelectors , testCase "bad syntax" testTargetSelectorBadSyntax , testCaseSteps "ambiguous syntax" testTargetSelectorAmbiguous , testCase "no current pkg" testTargetSelectorNoCurrentPackage , testCase "no targets" testTargetSelectorNoTargets , testCase "project empty" testTargetSelectorProjectEmpty , testCase "canonicalized path" testTargetSelectorCanonicalizedPath , testCase "problems (common)" (testTargetProblemsCommon config) , testCaseSteps "problems (build)" (testTargetProblemsBuild config) , testCaseSteps "problems (repl)" (testTargetProblemsRepl config) , testCaseSteps "problems (run)" (testTargetProblemsRun config) , testCaseSteps "problems (list-bin)" (testTargetProblemsListBin config) , testCaseSteps "problems (test)" (testTargetProblemsTest config) , testCaseSteps "problems (bench)" (testTargetProblemsBench config) , testCaseSteps "problems (haddock)" (testTargetProblemsHaddock config) ] , testGroup "Exceptions during building (local inplace)" $ [ testCase "configure" (testExceptionInConfigureStep config) , testCase "build" (testExceptionInBuildStep config) -- , testCase "register" testExceptionInRegisterStep ] --TODO: need to repeat for packages for the store --TODO: need to check we can build sub-libs, foreign libs and exes -- components for non-local packages / packages in the store. , testGroup "Successful builds" $ [ testCaseSteps "Setup script styles" (testSetupScriptStyles config) , testCase "keep-going" (testBuildKeepGoing config) #ifndef mingw32_HOST_OS -- disabled because https://github.com/haskell/cabal/issues/6272 , testCase "local tarball" (testBuildLocalTarball config) #endif ] , testGroup "Regression tests" $ [ testCase "issue #3324" (testRegressionIssue3324 config) , testCase "program options scope all" (testProgramOptionsAll config) , testCase "program options scope local" (testProgramOptionsLocal config) , testCase "program options scope specific" (testProgramOptionsSpecific config) ] , testGroup "Flag tests" $ [ testCase "Test Nix Flag" testNixFlags, testCase "Test Ignore Project Flag" testIgnoreProjectFlag ] ] testFindProjectRoot :: Assertion testFindProjectRoot = do Left (BadProjectRootExplicitFile file) <- findProjectRoot (Just testdir) (Just testfile) file @?= testfile where testdir = basedir "exception" "no-pkg2" testfile = "bklNI8O1OpOUuDu3F4Ij4nv3oAqN" testExceptionFindProjectRoot :: Assertion testExceptionFindProjectRoot = do Right (ProjectRootExplicit dir _) <- findProjectRoot (Just testdir) Nothing cwd <- getCurrentDirectory dir @?= cwd testdir where testdir = basedir "exception" "no-pkg2" testTargetSelectors :: (String -> IO ()) -> Assertion testTargetSelectors reportSubCase = do (_, _, _, localPackages, _) <- configureProject testdir config let readTargetSelectors' = readTargetSelectorsWith (dirActions testdir) localPackages Nothing reportSubCase "cwd" do Right ts <- readTargetSelectors' [] ts @?= [TargetPackage TargetImplicitCwd ["p-0.1"] Nothing] reportSubCase "all" do Right ts <- readTargetSelectors' ["all", ":all"] ts @?= replicate 2 (TargetAllPackages Nothing) reportSubCase "filter" do Right ts <- readTargetSelectors' [ "libs", ":cwd:libs" , "flibs", ":cwd:flibs" , "exes", ":cwd:exes" , "tests", ":cwd:tests" , "benchmarks", ":cwd:benchmarks"] zipWithM_ (@?=) ts [ TargetPackage TargetImplicitCwd ["p-0.1"] (Just kind) | kind <- concatMap (replicate 2) [LibKind .. ] ] reportSubCase "all:filter" do Right ts <- readTargetSelectors' [ "all:libs", ":all:libs" , "all:flibs", ":all:flibs" , "all:exes", ":all:exes" , "all:tests", ":all:tests" , "all:benchmarks", ":all:benchmarks"] zipWithM_ (@?=) ts [ TargetAllPackages (Just kind) | kind <- concatMap (replicate 2) [LibKind .. ] ] reportSubCase "pkg" do Right ts <- readTargetSelectors' [ ":pkg:p", ".", "./", "p.cabal" , "q", ":pkg:q", "q/", "./q/", "q/q.cabal"] ts @?= replicate 4 (mkTargetPackage "p-0.1") ++ replicate 5 (mkTargetPackage "q-0.1") reportSubCase "pkg:filter" do Right ts <- readTargetSelectors' [ "p:libs", ".:libs", ":pkg:p:libs" , "p:flibs", ".:flibs", ":pkg:p:flibs" , "p:exes", ".:exes", ":pkg:p:exes" , "p:tests", ".:tests", ":pkg:p:tests" , "p:benchmarks", ".:benchmarks", ":pkg:p:benchmarks" , "q:libs", "q/:libs", ":pkg:q:libs" , "q:flibs", "q/:flibs", ":pkg:q:flibs" , "q:exes", "q/:exes", ":pkg:q:exes" , "q:tests", "q/:tests", ":pkg:q:tests" , "q:benchmarks", "q/:benchmarks", ":pkg:q:benchmarks"] zipWithM_ (@?=) ts $ [ TargetPackage TargetExplicitNamed ["p-0.1"] (Just kind) | kind <- concatMap (replicate 3) [LibKind .. ] ] ++ [ TargetPackage TargetExplicitNamed ["q-0.1"] (Just kind) | kind <- concatMap (replicate 3) [LibKind .. ] ] reportSubCase "component" do Right ts <- readTargetSelectors' [ "p", "lib:p", "p:lib:p", ":pkg:p:lib:p" , "lib:q", "q:lib:q", ":pkg:q:lib:q" ] ts @?= replicate 4 (TargetComponent "p-0.1" (CLibName LMainLibName) WholeComponent) ++ replicate 3 (TargetComponent "q-0.1" (CLibName LMainLibName) WholeComponent) reportSubCase "module" do Right ts <- readTargetSelectors' [ "P", "lib:p:P", "p:p:P", ":pkg:p:lib:p:module:P" , "QQ", "lib:q:QQ", "q:q:QQ", ":pkg:q:lib:q:module:QQ" , "pexe:PMain" -- p:P or q:QQ would be ambiguous here , "qexe:QMain" -- package p vs component p ] ts @?= replicate 4 (TargetComponent "p-0.1" (CLibName LMainLibName) (ModuleTarget "P")) ++ replicate 4 (TargetComponent "q-0.1" (CLibName LMainLibName) (ModuleTarget "QQ")) ++ [ TargetComponent "p-0.1" (CExeName "pexe") (ModuleTarget "PMain") , TargetComponent "q-0.1" (CExeName "qexe") (ModuleTarget "QMain") ] reportSubCase "file" do Right ts <- readTargetSelectors' [ "./P.hs", "p:P.lhs", "lib:p:P.hsc", "p:p:P.hsc", ":pkg:p:lib:p:file:P.y" , "q/QQ.hs", "q:QQ.lhs", "lib:q:QQ.hsc", "q:q:QQ.hsc", ":pkg:q:lib:q:file:QQ.y" , "q/Q.hs", "q:Q.lhs", "lib:q:Q.hsc", "q:q:Q.hsc", ":pkg:q:lib:q:file:Q.y" , "app/Main.hs", "p:app/Main.hs", "exe:ppexe:app/Main.hs", "p:ppexe:app/Main.hs", ":pkg:p:exe:ppexe:file:app/Main.hs" ] ts @?= replicate 5 (TargetComponent "p-0.1" (CLibName LMainLibName) (FileTarget "P")) ++ replicate 5 (TargetComponent "q-0.1" (CLibName LMainLibName) (FileTarget "QQ")) ++ replicate 5 (TargetComponent "q-0.1" (CLibName LMainLibName) (FileTarget "Q")) ++ replicate 5 (TargetComponent "p-0.1" (CExeName "ppexe") (FileTarget ("app" "Main.hs"))) -- Note there's a bit of an inconsistency here: for the single-part -- syntax the target has to point to a file that exists, whereas for -- all the other forms we don't require that. cleanProject testdir where testdir = "targets/simple" config = mempty testTargetSelectorBadSyntax :: Assertion testTargetSelectorBadSyntax = do (_, _, _, localPackages, _) <- configureProject testdir config let targets = [ "foo bar", " foo" , "foo:", "foo::bar" , "foo: ", "foo: :bar" , "a:b:c:d:e:f", "a:b:c:d:e:f:g:h" ] Left errs <- readTargetSelectors localPackages Nothing targets zipWithM_ (@?=) errs (map TargetSelectorUnrecognised targets) cleanProject testdir where testdir = "targets/empty" config = mempty testTargetSelectorAmbiguous :: (String -> IO ()) -> Assertion testTargetSelectorAmbiguous reportSubCase = do -- 'all' is ambiguous with packages and cwd components reportSubCase "ambiguous: all vs pkg" assertAmbiguous "all" [mkTargetPackage "all", mkTargetAllPackages] [mkpkg "all" []] reportSubCase "ambiguous: all vs cwd component" assertAmbiguous "all" [mkTargetComponent "other" (CExeName "all"), mkTargetAllPackages] [mkpkg "other" [mkexe "all"]] -- but 'all' is not ambiguous with non-cwd components, modules or files reportSubCase "unambiguous: all vs non-cwd comp, mod, file" assertUnambiguous "All" mkTargetAllPackages [ mkpkgAt "foo" [mkexe "All"] "foo" , mkpkg "bar" [ mkexe "bar" `withModules` ["All"] , mkexe "baz" `withCFiles` ["All"] ] ] -- filters 'libs', 'exes' etc are ambiguous with packages and -- local components reportSubCase "ambiguous: cwd-pkg filter vs pkg" assertAmbiguous "libs" [ mkTargetPackage "libs" , TargetPackage TargetImplicitCwd ["libs"] (Just LibKind) ] [mkpkg "libs" []] reportSubCase "ambiguous: filter vs cwd component" assertAmbiguous "exes" [ mkTargetComponent "other" (CExeName "exes") , TargetPackage TargetImplicitCwd ["other"] (Just ExeKind) ] [mkpkg "other" [mkexe "exes"]] -- but filters are not ambiguous with non-cwd components, modules or files reportSubCase "unambiguous: filter vs non-cwd comp, mod, file" assertUnambiguous "Libs" (TargetPackage TargetImplicitCwd ["bar"] (Just LibKind)) [ mkpkgAt "foo" [mkexe "Libs"] "foo" , mkpkg "bar" [ mkexe "bar" `withModules` ["Libs"] , mkexe "baz" `withCFiles` ["Libs"] ] ] -- local components shadow packages and other components reportSubCase "unambiguous: cwd comp vs pkg, non-cwd comp" assertUnambiguous "foo" (mkTargetComponent "other" (CExeName "foo")) [ mkpkg "other" [mkexe "foo"] , mkpkgAt "other2" [mkexe "foo"] "other2" -- shadows non-local foo , mkpkg "foo" [] ] -- shadows package foo -- local components shadow modules and files reportSubCase "unambiguous: cwd comp vs module, file" assertUnambiguous "Foo" (mkTargetComponent "bar" (CExeName "Foo")) [ mkpkg "bar" [mkexe "Foo"] , mkpkg "other" [ mkexe "other" `withModules` ["Foo"] , mkexe "other2" `withCFiles` ["Foo"] ] ] -- packages shadow non-local components reportSubCase "unambiguous: pkg vs non-cwd comp" assertUnambiguous "foo" (mkTargetPackage "foo") [ mkpkg "foo" [] , mkpkgAt "other" [mkexe "foo"] "other" -- shadows non-local foo ] -- packages shadow modules and files reportSubCase "unambiguous: pkg vs module, file" assertUnambiguous "Foo" (mkTargetPackage "Foo") [ mkpkgAt "Foo" [] "foo" , mkpkg "other" [ mkexe "other" `withModules` ["Foo"] , mkexe "other2" `withCFiles` ["Foo"] ] ] -- File target is ambiguous, part of multiple components reportSubCase "ambiguous: file in multiple comps" assertAmbiguous "Bar.hs" [ mkTargetFile "foo" (CExeName "bar") "Bar" , mkTargetFile "foo" (CExeName "bar2") "Bar" ] [ mkpkg "foo" [ mkexe "bar" `withModules` ["Bar"] , mkexe "bar2" `withModules` ["Bar"] ] ] reportSubCase "ambiguous: file in multiple comps with path" assertAmbiguous ("src" "Bar.hs") [ mkTargetFile "foo" (CExeName "bar") ("src" "Bar") , mkTargetFile "foo" (CExeName "bar2") ("src" "Bar") ] [ mkpkg "foo" [ mkexe "bar" `withModules` ["Bar"] `withHsSrcDirs` ["src"] , mkexe "bar2" `withModules` ["Bar"] `withHsSrcDirs` ["src"] ] ] -- non-exact case packages and components are ambiguous reportSubCase "ambiguous: non-exact-case pkg names" assertAmbiguous "Foo" [ mkTargetPackage "foo", mkTargetPackage "FOO" ] [ mkpkg "foo" [], mkpkg "FOO" [] ] reportSubCase "ambiguous: non-exact-case comp names" assertAmbiguous "Foo" [ mkTargetComponent "bar" (CExeName "foo") , mkTargetComponent "bar" (CExeName "FOO") ] [ mkpkg "bar" [mkexe "foo", mkexe "FOO"] ] -- exact-case Module or File over non-exact case package or component reportSubCase "unambiguous: module vs non-exact-case pkg, comp" assertUnambiguous "Baz" (mkTargetModule "other" (CExeName "other") "Baz") [ mkpkg "baz" [mkexe "BAZ"] , mkpkg "other" [ mkexe "other" `withModules` ["Baz"] ] ] reportSubCase "unambiguous: file vs non-exact-case pkg, comp" assertUnambiguous "Baz" (mkTargetFile "other" (CExeName "other") "Baz") [ mkpkg "baz" [mkexe "BAZ"] , mkpkg "other" [ mkexe "other" `withCFiles` ["Baz"] ] ] where assertAmbiguous :: String -> [TargetSelector] -> [SourcePackage (PackageLocation a)] -> Assertion assertAmbiguous str tss pkgs = do res <- readTargetSelectorsWith fakeDirActions (map SpecificSourcePackage pkgs) Nothing [str] case res of Left [TargetSelectorAmbiguous _ tss'] -> sort (map snd tss') @?= sort tss _ -> assertFailure $ "expected Left [TargetSelectorAmbiguous _ _], " ++ "got " ++ show res assertUnambiguous :: String -> TargetSelector -> [SourcePackage (PackageLocation a)] -> Assertion assertUnambiguous str ts pkgs = do res <- readTargetSelectorsWith fakeDirActions (map SpecificSourcePackage pkgs) Nothing [str] case res of Right [ts'] -> ts' @?= ts _ -> assertFailure $ "expected Right [Target...], " ++ "got " ++ show res fakeDirActions = TS.DirActions { TS.doesFileExist = \_p -> return True, TS.doesDirectoryExist = \_p -> return True, TS.canonicalizePath = \p -> return ("/" p), -- FilePath.Unix. ? TS.getCurrentDirectory = return "/" } mkpkg :: String -> [Executable] -> SourcePackage (PackageLocation a) mkpkg pkgidstr exes = mkpkgAt pkgidstr exes "" mkpkgAt :: String -> [Executable] -> FilePath -> SourcePackage (PackageLocation a) mkpkgAt pkgidstr exes loc = SourcePackage { srcpkgPackageId = pkgid, srcpkgSource = LocalUnpackedPackage loc, srcpkgDescrOverride = Nothing, srcpkgDescription = GenericPackageDescription { packageDescription = emptyPackageDescription { package = pkgid }, gpdScannedVersion = Nothing, genPackageFlags = [], condLibrary = Nothing, condSubLibraries = [], condForeignLibs = [], condExecutables = [ ( exeName exe, CondNode exe [] [] ) | exe <- exes ], condTestSuites = [], condBenchmarks = [] } } where pkgid = fromMaybe (error $ "failed to parse " ++ pkgidstr) $ simpleParse pkgidstr mkexe :: String -> Executable mkexe name = mempty { exeName = fromString name } withModules :: Executable -> [String] -> Executable withModules exe mods = exe { buildInfo = (buildInfo exe) { otherModules = map fromString mods } } withCFiles :: Executable -> [FilePath] -> Executable withCFiles exe files = exe { buildInfo = (buildInfo exe) { cSources = files } } withHsSrcDirs :: Executable -> [FilePath] -> Executable withHsSrcDirs exe srcDirs = exe { buildInfo = (buildInfo exe) { hsSourceDirs = map unsafeMakeSymbolicPath srcDirs }} mkTargetPackage :: PackageId -> TargetSelector mkTargetPackage pkgid = TargetPackage TargetExplicitNamed [pkgid] Nothing mkTargetComponent :: PackageId -> ComponentName -> TargetSelector mkTargetComponent pkgid cname = TargetComponent pkgid cname WholeComponent mkTargetModule :: PackageId -> ComponentName -> ModuleName -> TargetSelector mkTargetModule pkgid cname mname = TargetComponent pkgid cname (ModuleTarget mname) mkTargetFile :: PackageId -> ComponentName -> String -> TargetSelector mkTargetFile pkgid cname fname = TargetComponent pkgid cname (FileTarget fname) mkTargetAllPackages :: TargetSelector mkTargetAllPackages = TargetAllPackages Nothing instance IsString PackageIdentifier where fromString pkgidstr = pkgid where pkgid = fromMaybe (error $ "fromString @PackageIdentifier " ++ show pkgidstr) $ simpleParse pkgidstr testTargetSelectorNoCurrentPackage :: Assertion testTargetSelectorNoCurrentPackage = do (_, _, _, localPackages, _) <- configureProject testdir config let readTargetSelectors' = readTargetSelectorsWith (dirActions testdir) localPackages Nothing targets = [ "libs", ":cwd:libs" , "flibs", ":cwd:flibs" , "exes", ":cwd:exes" , "tests", ":cwd:tests" , "benchmarks", ":cwd:benchmarks"] Left errs <- readTargetSelectors' targets zipWithM_ (@?=) errs [ TargetSelectorNoCurrentPackage ts | target <- targets , let ts = fromMaybe (error $ "failed to parse target string " ++ target) $ parseTargetString target ] cleanProject testdir where testdir = "targets/complex" config = mempty testTargetSelectorNoTargets :: Assertion testTargetSelectorNoTargets = do (_, _, _, localPackages, _) <- configureProject testdir config Left errs <- readTargetSelectors localPackages Nothing [] errs @?= [TargetSelectorNoTargetsInCwd True] cleanProject testdir where testdir = "targets/complex" config = mempty testTargetSelectorProjectEmpty :: Assertion testTargetSelectorProjectEmpty = do (_, _, _, localPackages, _) <- configureProject testdir config Left errs <- readTargetSelectors localPackages Nothing [] errs @?= [TargetSelectorNoTargetsInProject] cleanProject testdir where testdir = "targets/empty" config = mempty -- | Ensure we don't miss primary package and produce -- TargetSelectorNoTargetsInCwd error due to symlink or -- drive capitalisation mismatch when no targets are given testTargetSelectorCanonicalizedPath :: Assertion testTargetSelectorCanonicalizedPath = do (_, _, _, localPackages, _) <- configureProject testdir config cwd <- getCurrentDirectory let virtcwd = cwd basedir symlink -- Check that the symlink is there before running test as on Windows -- some versions/configurations of git won't pull down/create the symlink canRunTest <- doesDirectoryExist virtcwd when canRunTest (do let dirActions' = (dirActions symlink) { TS.getCurrentDirectory = return virtcwd } Right ts <- readTargetSelectorsWith dirActions' localPackages Nothing [] ts @?= [TargetPackage TargetImplicitCwd ["p-0.1"] Nothing]) cleanProject testdir where testdir = "targets/simple" symlink = "targets/symbolic-link-to-simple" config = mempty testTargetProblemsCommon :: ProjectConfig -> Assertion testTargetProblemsCommon config0 = do (_,elaboratedPlan,_) <- planProject testdir config let pkgIdMap :: Map.Map PackageName PackageId pkgIdMap = Map.fromList [ (packageName p, packageId p) | p <- InstallPlan.toList elaboratedPlan ] cases :: [( TargetSelector -> TargetProblem' , TargetSelector )] cases = [ -- Cannot resolve packages outside of the project ( \_ -> TargetProblemNoSuchPackage "foobar" , mkTargetPackage "foobar" ) -- We cannot currently build components like testsuites or -- benchmarks from packages that are not local to the project , ( \_ -> TargetComponentNotProjectLocal (pkgIdMap Map.! "filepath") (CTestName "filepath-tests") WholeComponent , mkTargetComponent (pkgIdMap Map.! "filepath") (CTestName "filepath-tests") ) -- Components can be explicitly @buildable: False@ , ( \_ -> TargetComponentNotBuildable "q-0.1" (CExeName "buildable-false") WholeComponent , mkTargetComponent "q-0.1" (CExeName "buildable-false") ) -- Testsuites and benchmarks can be disabled by the solver if it -- cannot satisfy deps , ( \_ -> TargetOptionalStanzaDisabledBySolver "q-0.1" (CTestName "solver-disabled") WholeComponent , mkTargetComponent "q-0.1" (CTestName "solver-disabled") ) -- Testsuites and benchmarks can be disabled explicitly by the -- user via config , ( \_ -> TargetOptionalStanzaDisabledByUser "q-0.1" (CBenchName "user-disabled") WholeComponent , mkTargetComponent "q-0.1" (CBenchName "user-disabled") ) -- An unknown package. The target selector resolution should only -- produce known packages, so this should not happen with the -- output from 'readTargetSelectors'. , ( \_ -> TargetProblemNoSuchPackage "foobar" , mkTargetPackage "foobar" ) -- An unknown component of a known package. The target selector -- resolution should only produce known packages, so this should -- not happen with the output from 'readTargetSelectors'. , ( \_ -> TargetProblemNoSuchComponent "q-0.1" (CExeName "no-such") , mkTargetComponent "q-0.1" (CExeName "no-such") ) ] assertTargetProblems elaboratedPlan CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget cases where testdir = "targets/complex" config = config0 { projectConfigLocalPackages = (projectConfigLocalPackages config0) { packageConfigBenchmarks = toFlag False } , projectConfigShared = (projectConfigShared config0) { projectConfigConstraints = [( UserConstraint (UserAnyQualifier "filepath") PackagePropertySource , ConstraintSourceUnknown )] } } testTargetProblemsBuild :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsBuild config reportSubCase = do reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "all-disabled" assertProjectTargetProblems "targets/all-disabled" config { projectConfigLocalPackages = (projectConfigLocalPackages config) { packageConfigBenchmarks = toFlag False } } CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CBenchName "user-disabled") TargetDisabledByUser True , AvailableTarget "p-0.1" (CTestName "solver-disabled") TargetDisabledBySolver True , AvailableTarget "p-0.1" (CExeName "buildable-false") TargetNotBuildable True , AvailableTarget "p-0.1" (CLibName LMainLibName) TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "enabled component kinds" -- When we explicitly enable all the component kinds then selecting the -- whole package selects those component kinds too do (_,elaboratedPlan,_) <- planProject "targets/variety" config { projectConfigLocalPackages = (projectConfigLocalPackages config) { packageConfigTests = toFlag True, packageConfigBenchmarks = toFlag True } } assertProjectDistinctTargets elaboratedPlan CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget [ mkTargetPackage "p-0.1" ] [ ("p-0.1-inplace", (CLibName LMainLibName)) , ("p-0.1-inplace-a-benchmark", CBenchName "a-benchmark") , ("p-0.1-inplace-a-testsuite", CTestName "a-testsuite") , ("p-0.1-inplace-an-exe", CExeName "an-exe") , ("p-0.1-inplace-libp", CFLibName "libp") ] reportSubCase "disabled component kinds" -- When we explicitly disable all the component kinds then selecting the -- whole package only selects the library, foreign lib and exes do (_,elaboratedPlan,_) <- planProject "targets/variety" config { projectConfigLocalPackages = (projectConfigLocalPackages config) { packageConfigTests = toFlag False, packageConfigBenchmarks = toFlag False } } assertProjectDistinctTargets elaboratedPlan CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget [ mkTargetPackage "p-0.1" ] [ ("p-0.1-inplace", (CLibName LMainLibName)) , ("p-0.1-inplace-an-exe", CExeName "an-exe") , ("p-0.1-inplace-libp", CFLibName "libp") ] reportSubCase "requested component kinds" -- When we selecting the package with an explicit filter then we get those -- components even though we did not explicitly enable tests/benchmarks do (_,elaboratedPlan,_) <- planProject "targets/variety" config assertProjectDistinctTargets elaboratedPlan CmdBuild.selectPackageTargets CmdBuild.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] (Just TestKind) , TargetPackage TargetExplicitNamed ["p-0.1"] (Just BenchKind) ] [ ("p-0.1-inplace-a-benchmark", CBenchName "a-benchmark") , ("p-0.1-inplace-a-testsuite", CTestName "a-testsuite") ] testTargetProblemsRepl :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsRepl config reportSubCase = do reportSubCase "multiple-libs" assertProjectTargetProblems "targets/multiple-libs" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip CmdRepl.matchesMultipleProblem [ AvailableTarget "p-0.1" (CLibName LMainLibName) (TargetBuildable () TargetRequestedByDefault) True , AvailableTarget "q-0.1" (CLibName LMainLibName) (TargetBuildable () TargetRequestedByDefault) True ] , mkTargetAllPackages ) ] reportSubCase "multiple-exes" assertProjectTargetProblems "targets/multiple-exes" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip CmdRepl.matchesMultipleProblem [ AvailableTarget "p-0.1" (CExeName "p2") (TargetBuildable () TargetRequestedByDefault) True , AvailableTarget "p-0.1" (CExeName "p1") (TargetBuildable () TargetRequestedByDefault) True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "multiple-tests" assertProjectTargetProblems "targets/multiple-tests" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip CmdRepl.matchesMultipleProblem [ AvailableTarget "p-0.1" (CTestName "p2") (TargetBuildable () TargetNotRequestedByDefault) True , AvailableTarget "p-0.1" (CTestName "p1") (TargetBuildable () TargetNotRequestedByDefault) True ] , TargetPackage TargetExplicitNamed ["p-0.1"] (Just TestKind) ) ] reportSubCase "multiple targets" do (_,elaboratedPlan,_) <- planProject "targets/multiple-exes" config assertProjectDistinctTargets elaboratedPlan CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ mkTargetComponent "p-0.1" (CExeName "p1") , mkTargetComponent "p-0.1" (CExeName "p2") ] [ ("p-0.1-inplace-p1", CExeName "p1") , ("p-0.1-inplace-p2", CExeName "p2") ] reportSubCase "libs-disabled" assertProjectTargetProblems "targets/libs-disabled" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CLibName LMainLibName) TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "exes-disabled" assertProjectTargetProblems "targets/exes-disabled" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CExeName "p") TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "test-only" assertProjectTargetProblems "targets/test-only" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CTestName "pexe") (TargetBuildable () TargetNotRequestedByDefault) True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "requested component kinds" do (_,elaboratedPlan,_) <- planProject "targets/variety" config -- by default we only get the lib assertProjectDistinctTargets elaboratedPlan CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] Nothing ] [ ("p-0.1-inplace", (CLibName LMainLibName)) ] -- When we select the package with an explicit filter then we get those -- components even though we did not explicitly enable tests/benchmarks assertProjectDistinctTargets elaboratedPlan CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] (Just TestKind) ] [ ("p-0.1-inplace-a-testsuite", CTestName "a-testsuite") ] assertProjectDistinctTargets elaboratedPlan CmdRepl.selectPackageTargets CmdRepl.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] (Just BenchKind) ] [ ("p-0.1-inplace-a-benchmark", CBenchName "a-benchmark") ] testTargetProblemsListBin :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsListBin config reportSubCase = do reportSubCase "one-of-each" do (_,elaboratedPlan,_) <- planProject "targets/one-of-each" config assertProjectDistinctTargets elaboratedPlan CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] Nothing ] [ ("p-0.1-inplace-p1", CExeName "p1") ] reportSubCase "multiple-exes" assertProjectTargetProblems "targets/multiple-exes" config CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ ( flip CmdListBin.matchesMultipleProblem [ AvailableTarget "p-0.1" (CExeName "p2") (TargetBuildable () TargetRequestedByDefault) True , AvailableTarget "p-0.1" (CExeName "p1") (TargetBuildable () TargetRequestedByDefault) True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "multiple targets" do (_,elaboratedPlan,_) <- planProject "targets/multiple-exes" config assertProjectDistinctTargets elaboratedPlan CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ mkTargetComponent "p-0.1" (CExeName "p1") , mkTargetComponent "p-0.1" (CExeName "p2") ] [ ("p-0.1-inplace-p1", CExeName "p1") , ("p-0.1-inplace-p2", CExeName "p2") ] reportSubCase "exes-disabled" assertProjectTargetProblems "targets/exes-disabled" config CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CExeName "p") TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "lib-only" assertProjectTargetProblems "targets/lib-only" config CmdListBin.selectPackageTargets CmdListBin.selectComponentTarget [ (CmdListBin.noComponentsProblem, mkTargetPackage "p-0.1" ) ] testTargetProblemsRun :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsRun config reportSubCase = do reportSubCase "one-of-each" do (_,elaboratedPlan,_) <- planProject "targets/one-of-each" config assertProjectDistinctTargets elaboratedPlan CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] Nothing ] [ ("p-0.1-inplace-p1", CExeName "p1") ] reportSubCase "multiple-exes" assertProjectTargetProblems "targets/multiple-exes" config CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ ( flip CmdRun.matchesMultipleProblem [ AvailableTarget "p-0.1" (CExeName "p2") (TargetBuildable () TargetRequestedByDefault) True , AvailableTarget "p-0.1" (CExeName "p1") (TargetBuildable () TargetRequestedByDefault) True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "multiple targets" do (_,elaboratedPlan,_) <- planProject "targets/multiple-exes" config assertProjectDistinctTargets elaboratedPlan CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ mkTargetComponent "p-0.1" (CExeName "p1") , mkTargetComponent "p-0.1" (CExeName "p2") ] [ ("p-0.1-inplace-p1", CExeName "p1") , ("p-0.1-inplace-p2", CExeName "p2") ] reportSubCase "exes-disabled" assertProjectTargetProblems "targets/exes-disabled" config CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CExeName "p") TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "lib-only" assertProjectTargetProblems "targets/lib-only" config CmdRun.selectPackageTargets CmdRun.selectComponentTarget [ (CmdRun.noExesProblem, mkTargetPackage "p-0.1" ) ] testTargetProblemsTest :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsTest config reportSubCase = do reportSubCase "disabled by config" assertProjectTargetProblems "targets/tests-disabled" config { projectConfigLocalPackages = (projectConfigLocalPackages config) { packageConfigTests = toFlag False } } CmdTest.selectPackageTargets CmdTest.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CTestName "user-disabled") TargetDisabledByUser True , AvailableTarget "p-0.1" (CTestName "solver-disabled") TargetDisabledByUser True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "disabled by solver & buildable false" assertProjectTargetProblems "targets/tests-disabled" config CmdTest.selectPackageTargets CmdTest.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CTestName "user-disabled") TargetDisabledBySolver True , AvailableTarget "p-0.1" (CTestName "solver-disabled") TargetDisabledBySolver True ] , mkTargetPackage "p-0.1" ) , ( flip TargetProblemNoneEnabled [ AvailableTarget "q-0.1" (CTestName "buildable-false") TargetNotBuildable True ] , mkTargetPackage "q-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdTest.selectPackageTargets CmdTest.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "no tests" assertProjectTargetProblems "targets/simple" config CmdTest.selectPackageTargets CmdTest.selectComponentTarget [ ( CmdTest.noTestsProblem, mkTargetPackage "p-0.1" ) , ( CmdTest.noTestsProblem, mkTargetPackage "q-0.1" ) ] reportSubCase "not a test" assertProjectTargetProblems "targets/variety" config CmdTest.selectPackageTargets CmdTest.selectComponentTarget $ [ ( const (CmdTest.notTestProblem "p-0.1" (CLibName LMainLibName)) , mkTargetComponent "p-0.1" (CLibName LMainLibName) ) , ( const (CmdTest.notTestProblem "p-0.1" (CExeName "an-exe")) , mkTargetComponent "p-0.1" (CExeName "an-exe") ) , ( const (CmdTest.notTestProblem "p-0.1" (CFLibName "libp")) , mkTargetComponent "p-0.1" (CFLibName "libp") ) , ( const (CmdTest.notTestProblem "p-0.1" (CBenchName "a-benchmark")) , mkTargetComponent "p-0.1" (CBenchName "a-benchmark") ) ] ++ [ ( const (CmdTest.isSubComponentProblem "p-0.1" cname (ModuleTarget modname)) , mkTargetModule "p-0.1" cname modname ) | (cname, modname) <- [ (CTestName "a-testsuite", "TestModule") , (CBenchName "a-benchmark", "BenchModule") , (CExeName "an-exe", "ExeModule") , ((CLibName LMainLibName), "P") ] ] ++ [ ( const (CmdTest.isSubComponentProblem "p-0.1" cname (FileTarget fname)) , mkTargetFile "p-0.1" cname fname) | (cname, fname) <- [ (CTestName "a-testsuite", "Test.hs") , (CBenchName "a-benchmark", "Bench.hs") , (CExeName "an-exe", "Main.hs") ] ] testTargetProblemsBench :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsBench config reportSubCase = do reportSubCase "disabled by config" assertProjectTargetProblems "targets/benchmarks-disabled" config { projectConfigLocalPackages = (projectConfigLocalPackages config) { packageConfigBenchmarks = toFlag False } } CmdBench.selectPackageTargets CmdBench.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CBenchName "user-disabled") TargetDisabledByUser True , AvailableTarget "p-0.1" (CBenchName "solver-disabled") TargetDisabledByUser True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "disabled by solver & buildable false" assertProjectTargetProblems "targets/benchmarks-disabled" config CmdBench.selectPackageTargets CmdBench.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CBenchName "user-disabled") TargetDisabledBySolver True , AvailableTarget "p-0.1" (CBenchName "solver-disabled") TargetDisabledBySolver True ] , mkTargetPackage "p-0.1" ) , ( flip TargetProblemNoneEnabled [ AvailableTarget "q-0.1" (CBenchName "buildable-false") TargetNotBuildable True ] , mkTargetPackage "q-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config CmdBench.selectPackageTargets CmdBench.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "no benchmarks" assertProjectTargetProblems "targets/simple" config CmdBench.selectPackageTargets CmdBench.selectComponentTarget [ ( CmdBench.noBenchmarksProblem, mkTargetPackage "p-0.1" ) , ( CmdBench.noBenchmarksProblem, mkTargetPackage "q-0.1" ) ] reportSubCase "not a benchmark" assertProjectTargetProblems "targets/variety" config CmdBench.selectPackageTargets CmdBench.selectComponentTarget $ [ ( const (CmdBench.componentNotBenchmarkProblem "p-0.1" (CLibName LMainLibName)) , mkTargetComponent "p-0.1" (CLibName LMainLibName) ) , ( const (CmdBench.componentNotBenchmarkProblem "p-0.1" (CExeName "an-exe")) , mkTargetComponent "p-0.1" (CExeName "an-exe") ) , ( const (CmdBench.componentNotBenchmarkProblem "p-0.1" (CFLibName "libp")) , mkTargetComponent "p-0.1" (CFLibName "libp") ) , ( const (CmdBench.componentNotBenchmarkProblem "p-0.1" (CTestName "a-testsuite")) , mkTargetComponent "p-0.1" (CTestName "a-testsuite") ) ] ++ [ ( const (CmdBench.isSubComponentProblem "p-0.1" cname (ModuleTarget modname)) , mkTargetModule "p-0.1" cname modname ) | (cname, modname) <- [ (CTestName "a-testsuite", "TestModule") , (CBenchName "a-benchmark", "BenchModule") , (CExeName "an-exe", "ExeModule") , ((CLibName LMainLibName), "P") ] ] ++ [ ( const (CmdBench.isSubComponentProblem "p-0.1" cname (FileTarget fname)) , mkTargetFile "p-0.1" cname fname) | (cname, fname) <- [ (CTestName "a-testsuite", "Test.hs") , (CBenchName "a-benchmark", "Bench.hs") , (CExeName "an-exe", "Main.hs") ] ] testTargetProblemsHaddock :: ProjectConfig -> (String -> IO ()) -> Assertion testTargetProblemsHaddock config reportSubCase = do reportSubCase "all-disabled" assertProjectTargetProblems "targets/all-disabled" config (let haddockFlags = mkHaddockFlags False True True False in CmdHaddock.selectPackageTargets haddockFlags) CmdHaddock.selectComponentTarget [ ( flip TargetProblemNoneEnabled [ AvailableTarget "p-0.1" (CBenchName "user-disabled") TargetDisabledByUser True , AvailableTarget "p-0.1" (CTestName "solver-disabled") TargetDisabledBySolver True , AvailableTarget "p-0.1" (CExeName "buildable-false") TargetNotBuildable True , AvailableTarget "p-0.1" (CLibName LMainLibName) TargetNotBuildable True ] , mkTargetPackage "p-0.1" ) ] reportSubCase "empty-pkg" assertProjectTargetProblems "targets/empty-pkg" config (let haddockFlags = mkHaddockFlags False False False False in CmdHaddock.selectPackageTargets haddockFlags) CmdHaddock.selectComponentTarget [ ( TargetProblemNoTargets, mkTargetPackage "p-0.1" ) ] reportSubCase "enabled component kinds" -- When we explicitly enable all the component kinds then selecting the -- whole package selects those component kinds too (_,elaboratedPlan,_) <- planProject "targets/variety" config let haddockFlags = mkHaddockFlags True True True True in assertProjectDistinctTargets elaboratedPlan (CmdHaddock.selectPackageTargets haddockFlags) CmdHaddock.selectComponentTarget [ mkTargetPackage "p-0.1" ] [ ("p-0.1-inplace", (CLibName LMainLibName)) , ("p-0.1-inplace-a-benchmark", CBenchName "a-benchmark") , ("p-0.1-inplace-a-testsuite", CTestName "a-testsuite") , ("p-0.1-inplace-an-exe", CExeName "an-exe") , ("p-0.1-inplace-libp", CFLibName "libp") ] reportSubCase "disabled component kinds" -- When we explicitly disable all the component kinds then selecting the -- whole package only selects the library let haddockFlags = mkHaddockFlags False False False False in assertProjectDistinctTargets elaboratedPlan (CmdHaddock.selectPackageTargets haddockFlags) CmdHaddock.selectComponentTarget [ mkTargetPackage "p-0.1" ] [ ("p-0.1-inplace", (CLibName LMainLibName)) ] reportSubCase "requested component kinds" -- When we selecting the package with an explicit filter then it does not -- matter if the config was to disable all the component kinds let haddockFlags = mkHaddockFlags False False False False in assertProjectDistinctTargets elaboratedPlan (CmdHaddock.selectPackageTargets haddockFlags) CmdHaddock.selectComponentTarget [ TargetPackage TargetExplicitNamed ["p-0.1"] (Just FLibKind) , TargetPackage TargetExplicitNamed ["p-0.1"] (Just ExeKind) , TargetPackage TargetExplicitNamed ["p-0.1"] (Just TestKind) , TargetPackage TargetExplicitNamed ["p-0.1"] (Just BenchKind) ] [ ("p-0.1-inplace-a-benchmark", CBenchName "a-benchmark") , ("p-0.1-inplace-a-testsuite", CTestName "a-testsuite") , ("p-0.1-inplace-an-exe", CExeName "an-exe") , ("p-0.1-inplace-libp", CFLibName "libp") ] where mkHaddockFlags flib exe test bench = defaultHaddockFlags { haddockForeignLibs = toFlag flib, haddockExecutables = toFlag exe, haddockTestSuites = toFlag test, haddockBenchmarks = toFlag bench } assertProjectDistinctTargets :: forall err. (Eq err, Show err) => ElaboratedInstallPlan -> (forall k. TargetSelector -> [AvailableTarget k] -> Either (TargetProblem err) [k]) -> (forall k. SubComponentTarget -> AvailableTarget k -> Either (TargetProblem err) k ) -> [TargetSelector] -> [(UnitId, ComponentName)] -> Assertion assertProjectDistinctTargets elaboratedPlan selectPackageTargets selectComponentTarget targetSelectors expectedTargets | Right targets <- results = distinctTargetComponents targets @?= Set.fromList expectedTargets | otherwise = assertFailure $ "assertProjectDistinctTargets: expected " ++ "(Right targets) but got " ++ show results where results = resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing targetSelectors assertProjectTargetProblems :: forall err. (Eq err, Show err) => FilePath -> ProjectConfig -> (forall k. TargetSelector -> [AvailableTarget k] -> Either (TargetProblem err) [k]) -> (forall k. SubComponentTarget -> AvailableTarget k -> Either (TargetProblem err) k ) -> [(TargetSelector -> TargetProblem err, TargetSelector)] -> Assertion assertProjectTargetProblems testdir config selectPackageTargets selectComponentTarget cases = do (_,elaboratedPlan,_) <- planProject testdir config assertTargetProblems elaboratedPlan selectPackageTargets selectComponentTarget cases assertTargetProblems :: forall err. (Eq err, Show err) => ElaboratedInstallPlan -> (forall k. TargetSelector -> [AvailableTarget k] -> Either (TargetProblem err) [k]) -> (forall k. SubComponentTarget -> AvailableTarget k -> Either (TargetProblem err) k ) -> [(TargetSelector -> TargetProblem err, TargetSelector)] -> Assertion assertTargetProblems elaboratedPlan selectPackageTargets selectComponentTarget = mapM_ (uncurry assertTargetProblem) where assertTargetProblem expected targetSelector = let res = resolveTargets selectPackageTargets selectComponentTarget elaboratedPlan Nothing [targetSelector] in case res of Left [problem] -> problem @?= expected targetSelector unexpected -> assertFailure $ "expected resolveTargets result: (Left [problem]) " ++ "but got: " ++ show unexpected testExceptionInFindingPackage :: ProjectConfig -> Assertion testExceptionInFindingPackage config = do BadPackageLocations _ locs <- expectException "BadPackageLocations" $ void $ planProject testdir config case locs of [BadLocGlobEmptyMatch "./*.cabal"] -> return () _ -> assertFailure "expected BadLocGlobEmptyMatch" cleanProject testdir where testdir = "exception/no-pkg" testExceptionInFindingPackage2 :: ProjectConfig -> Assertion testExceptionInFindingPackage2 config = do BadPackageLocations _ locs <- expectException "BadPackageLocations" $ void $ planProject testdir config case locs of [BadPackageLocationFile (BadLocDirNoCabalFile ".")] -> return () _ -> assertFailure $ "expected BadLocDirNoCabalFile, got " ++ show locs cleanProject testdir where testdir = "exception/no-pkg2" testExceptionInProjectConfig :: ProjectConfig -> Assertion testExceptionInProjectConfig config = do BadPerPackageCompilerPaths ps <- expectException "BadPerPackageCompilerPaths" $ void $ planProject testdir config case ps of [(pn,"ghc")] | "foo" == pn -> return () _ -> assertFailure $ "expected (PackageName \"foo\",\"ghc\"), got " ++ show ps cleanProject testdir where testdir = "exception/bad-config" testExceptionInConfigureStep :: ProjectConfig -> Assertion testExceptionInConfigureStep config = do (plan, res) <- executePlan =<< planProject testdir config (_pkga1, failure) <- expectPackageFailed plan res pkgidA1 case buildFailureReason failure of ConfigureFailed _ -> return () _ -> assertFailure $ "expected ConfigureFailed, got " ++ show failure cleanProject testdir where testdir = "exception/configure" pkgidA1 = PackageIdentifier "a" (mkVersion [1]) testExceptionInBuildStep :: ProjectConfig -> Assertion testExceptionInBuildStep config = do (plan, res) <- executePlan =<< planProject testdir config (_pkga1, failure) <- expectPackageFailed plan res pkgidA1 expectBuildFailed failure where testdir = "exception/build" pkgidA1 = PackageIdentifier "a" (mkVersion [1]) testSetupScriptStyles :: ProjectConfig -> (String -> IO ()) -> Assertion testSetupScriptStyles config reportSubCase = do reportSubCase (show SetupCustomExplicitDeps) plan0@(_,_,sharedConfig) <- planProject testdir1 config let isOSX (Platform _ OSX) = True isOSX _ = False -- Skip the Custom tests when the shipped Cabal library is buggy unless (isOSX (pkgConfigPlatform sharedConfig) && compilerVersion (pkgConfigCompiler sharedConfig) < mkVersion [7,10]) $ do (plan1, res1) <- executePlan plan0 pkg1 <- expectPackageInstalled plan1 res1 pkgidA elabSetupScriptStyle pkg1 @?= SetupCustomExplicitDeps hasDefaultSetupDeps pkg1 @?= Just False marker1 <- readFile (basedir testdir1 "marker") marker1 @?= "ok" removeFile (basedir testdir1 "marker") -- implicit deps implies 'Cabal < 2' which conflicts w/ GHC 8.2 or later when (compilerVersion (pkgConfigCompiler sharedConfig) < mkVersion [8,2]) $ do reportSubCase (show SetupCustomImplicitDeps) (plan2, res2) <- executePlan =<< planProject testdir2 config pkg2 <- expectPackageInstalled plan2 res2 pkgidA elabSetupScriptStyle pkg2 @?= SetupCustomImplicitDeps hasDefaultSetupDeps pkg2 @?= Just True marker2 <- readFile (basedir testdir2 "marker") marker2 @?= "ok" removeFile (basedir testdir2 "marker") reportSubCase (show SetupNonCustomInternalLib) (plan3, res3) <- executePlan =<< planProject testdir3 config pkg3 <- expectPackageInstalled plan3 res3 pkgidA elabSetupScriptStyle pkg3 @?= SetupNonCustomInternalLib {- --TODO: the SetupNonCustomExternalLib case is hard to test since it -- requires a version of Cabal that's later than the one we're testing -- e.g. needs a .cabal file that specifies cabal-version: >= 2.0 -- and a corresponding Cabal package that we can use to try and build a -- default Setup.hs. reportSubCase (show SetupNonCustomExternalLib) (plan4, res4) <- executePlan =<< planProject testdir4 config pkg4 <- expectPackageInstalled plan4 res4 pkgidA pkgSetupScriptStyle pkg4 @?= SetupNonCustomExternalLib -} where testdir1 = "build/setup-custom1" testdir2 = "build/setup-custom2" testdir3 = "build/setup-simple" pkgidA = PackageIdentifier "a" (mkVersion [0,1]) -- The solver fills in default setup deps explicitly, but marks them as such hasDefaultSetupDeps = fmap defaultSetupDepends . setupBuildInfo . elabPkgDescription -- | Test the behaviour with and without @--keep-going@ -- testBuildKeepGoing :: ProjectConfig -> Assertion testBuildKeepGoing config = do -- P is expected to fail, Q does not depend on P but without -- parallel build and without keep-going then we don't build Q yet. (plan1, res1) <- executePlan =<< planProject testdir (config `mappend` keepGoing False) (_, failure1) <- expectPackageFailed plan1 res1 "p-0.1" expectBuildFailed failure1 _ <- expectPackageConfigured plan1 res1 "q-0.1" -- With keep-going then we should go on to successfully build Q (plan2, res2) <- executePlan =<< planProject testdir (config `mappend` keepGoing True) (_, failure2) <- expectPackageFailed plan2 res2 "p-0.1" expectBuildFailed failure2 _ <- expectPackageInstalled plan2 res2 "q-0.1" return () where testdir = "build/keep-going" keepGoing kg = mempty { projectConfigBuildOnly = mempty { projectConfigKeepGoing = toFlag kg } } -- | Test we can successfully build packages from local tarball files. -- testBuildLocalTarball :: ProjectConfig -> Assertion testBuildLocalTarball config = do -- P is a tarball package, Q is a local dir package that depends on it. (plan, res) <- executePlan =<< planProject testdir config _ <- expectPackageInstalled plan res "p-0.1" _ <- expectPackageInstalled plan res "q-0.1" return () where testdir = "build/local-tarball" -- | See -- -- This test just doesn't seem to work on Windows, -- due filesystem woes. -- testRegressionIssue3324 :: ProjectConfig -> Assertion testRegressionIssue3324 config = when (buildOS /= Windows) $ do -- expected failure first time due to missing dep (plan1, res1) <- executePlan =<< planProject testdir config (_pkgq, failure) <- expectPackageFailed plan1 res1 "q-0.1" expectBuildFailed failure -- add the missing dep, now it should work let qcabal = basedir testdir "q" "q.cabal" withFileFinallyRestore qcabal $ do tryFewTimes $ BS.appendFile qcabal (" build-depends: p\n") (plan2, res2) <- executePlan =<< planProject testdir config _ <- expectPackageInstalled plan2 res2 "p-0.1" _ <- expectPackageInstalled plan2 res2 "q-0.1" return () where testdir = "regression/3324" -- | Test global program options are propagated correctly -- from ProjectConfig to ElaboratedInstallPlan testProgramOptionsAll :: ProjectConfig -> Assertion testProgramOptionsAll config0 = do -- P is a tarball package, Q is a local dir package that depends on it. (_, elaboratedPlan, _) <- planProject testdir config let packages = filterConfiguredPackages $ InstallPlan.toList elaboratedPlan assertEqual "q" (Just [ghcFlag]) (getProgArgs packages "q") assertEqual "p" (Just [ghcFlag]) (getProgArgs packages "p") where testdir = "regression/program-options" programArgs = MapMappend (Map.fromList [("ghc", [ghcFlag])]) ghcFlag = "-fno-full-laziness" -- Insert flag into global config config = config0 { projectConfigAllPackages = (projectConfigAllPackages config0) { packageConfigProgramArgs = programArgs } } -- | Test local program options are propagated correctly -- from ProjectConfig to ElaboratedInstallPlan testProgramOptionsLocal :: ProjectConfig -> Assertion testProgramOptionsLocal config0 = do (_, elaboratedPlan, _) <- planProject testdir config let localPackages = filterConfiguredPackages $ InstallPlan.toList elaboratedPlan assertEqual "q" (Just [ghcFlag]) (getProgArgs localPackages "q") assertEqual "p" Nothing (getProgArgs localPackages "p") where testdir = "regression/program-options" programArgs = MapMappend (Map.fromList [("ghc", [ghcFlag])]) ghcFlag = "-fno-full-laziness" -- Insert flag into local config config = config0 { projectConfigLocalPackages = (projectConfigLocalPackages config0) { packageConfigProgramArgs = programArgs } } -- | Test package specific program options are propagated correctly -- from ProjectConfig to ElaboratedInstallPlan testProgramOptionsSpecific :: ProjectConfig -> Assertion testProgramOptionsSpecific config0 = do (_, elaboratedPlan, _) <- planProject testdir config let packages = filterConfiguredPackages $ InstallPlan.toList elaboratedPlan assertEqual "q" (Nothing) (getProgArgs packages "q") assertEqual "p" (Just [ghcFlag]) (getProgArgs packages "p") where testdir = "regression/program-options" programArgs = MapMappend (Map.fromList [("ghc", [ghcFlag])]) ghcFlag = "-fno-full-laziness" -- Insert flag into package "p" config config = config0 { projectConfigSpecificPackage = MapMappend (Map.fromList [(mkPackageName "p", configArgs)]) } configArgs = mempty { packageConfigProgramArgs = programArgs } filterConfiguredPackages :: [ElaboratedPlanPackage] -> [ElaboratedConfiguredPackage] filterConfiguredPackages [] = [] filterConfiguredPackages (InstallPlan.PreExisting _ : pkgs) = filterConfiguredPackages pkgs filterConfiguredPackages (InstallPlan.Installed elab : pkgs) = elab : filterConfiguredPackages pkgs filterConfiguredPackages (InstallPlan.Configured elab : pkgs) = elab : filterConfiguredPackages pkgs getProgArgs :: [ElaboratedConfiguredPackage] -> String -> Maybe [String] getProgArgs [] _ = Nothing getProgArgs (elab : pkgs) name | pkgName (elabPkgSourceId elab) == mkPackageName name = Map.lookup "ghc" (elabProgramArgs elab) | otherwise = getProgArgs pkgs name --------------------------------- -- Test utils to plan and build -- basedir :: FilePath basedir = "tests" "IntegrationTests2" dirActions :: FilePath -> TS.DirActions IO dirActions testdir = defaultDirActions { TS.doesFileExist = \p -> TS.doesFileExist defaultDirActions (virtcwd p), TS.doesDirectoryExist = \p -> TS.doesDirectoryExist defaultDirActions (virtcwd p), TS.canonicalizePath = \p -> TS.canonicalizePath defaultDirActions (virtcwd p), TS.getCurrentDirectory = TS.canonicalizePath defaultDirActions virtcwd } where virtcwd = basedir testdir type ProjDetails = (DistDirLayout, CabalDirLayout, ProjectConfig, [PackageSpecifier UnresolvedSourcePackage], BuildTimeSettings) configureProject :: FilePath -> ProjectConfig -> IO ProjDetails configureProject testdir cliConfig = do cabalDir <- getCabalDir let cabalDirLayout = defaultCabalDirLayout cabalDir projectRootDir <- canonicalizePath (basedir testdir) isexplict <- doesFileExist (projectRootDir "cabal.project") let projectRoot | isexplict = ProjectRootExplicit projectRootDir (projectRootDir "cabal.project") | otherwise = ProjectRootImplicit projectRootDir distDirLayout = defaultDistDirLayout projectRoot Nothing -- Clear state between test runs. The state remains if the previous run -- ended in an exception (as we leave the files to help with debugging). cleanProject testdir httpTransport <- configureTransport verbosity [] Nothing (projectConfig, localPackages) <- rebuildProjectConfig verbosity httpTransport distDirLayout cliConfig let buildSettings = resolveBuildTimeSettings verbosity cabalDirLayout projectConfig return (distDirLayout, cabalDirLayout, projectConfig, localPackages, buildSettings) type PlanDetails = (ProjDetails, ElaboratedInstallPlan, ElaboratedSharedConfig) planProject :: FilePath -> ProjectConfig -> IO PlanDetails planProject testdir cliConfig = do projDetails@( distDirLayout, cabalDirLayout, projectConfig, localPackages, _buildSettings) <- configureProject testdir cliConfig (elaboratedPlan, _, elaboratedShared, _, _) <- rebuildInstallPlan verbosity distDirLayout cabalDirLayout projectConfig localPackages return (projDetails, elaboratedPlan, elaboratedShared) executePlan :: PlanDetails -> IO (ElaboratedInstallPlan, BuildOutcomes) executePlan ((distDirLayout, cabalDirLayout, _, _, buildSettings), elaboratedPlan, elaboratedShared) = do let targets :: Map.Map UnitId [ComponentTarget] targets = Map.fromList [ (unitid, [ComponentTarget cname WholeComponent]) | ts <- Map.elems (availableTargets elaboratedPlan) , AvailableTarget { availableTargetStatus = TargetBuildable (unitid, cname) _ } <- ts ] elaboratedPlan' = pruneInstallPlanToTargets TargetActionBuild targets elaboratedPlan pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared elaboratedPlan' let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages pkgsBuildStatus elaboratedPlan' buildOutcomes <- rebuildTargets verbosity distDirLayout (cabalStoreDirLayout cabalDirLayout) elaboratedPlan'' elaboratedShared pkgsBuildStatus -- Avoid trying to use act-as-setup mode: buildSettings { buildSettingNumJobs = 1 } return (elaboratedPlan'', buildOutcomes) cleanProject :: FilePath -> IO () cleanProject testdir = do alreadyExists <- doesDirectoryExist distDir when alreadyExists $ removePathForcibly distDir where projectRoot = ProjectRootImplicit (basedir testdir) distDirLayout = defaultDistDirLayout projectRoot Nothing distDir = distDirectory distDirLayout verbosity :: Verbosity verbosity = minBound --normal --verbose --maxBound --minBound ------------------------------------------- -- Tasty integration to adjust the config -- withProjectConfig :: (ProjectConfig -> TestTree) -> TestTree withProjectConfig testtree = askOption $ \ghcPath -> testtree (mkProjectConfig ghcPath) mkProjectConfig :: GhcPath -> ProjectConfig mkProjectConfig (GhcPath ghcPath) = mempty { projectConfigShared = mempty { projectConfigHcPath = maybeToFlag ghcPath }, projectConfigBuildOnly = mempty { projectConfigNumJobs = toFlag (Just 1) } } where maybeToFlag = maybe mempty toFlag data GhcPath = GhcPath (Maybe FilePath) deriving Typeable instance IsOption GhcPath where defaultValue = GhcPath Nothing optionName = Tagged "with-ghc" optionHelp = Tagged "The ghc compiler to use" parseValue = Just . GhcPath . Just projectConfigOptionDescriptions :: [OptionDescription] projectConfigOptionDescriptions = [Option (Proxy :: Proxy GhcPath)] --------------------------------------- -- HUint style utils for this context -- expectException :: Exception e => String -> IO a -> IO e expectException expected action = do res <- try action case res of Left e -> return e Right _ -> throwIO $ HUnitFailure Nothing $ "expected an exception " ++ expected expectPackagePreExisting :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId -> IO InstalledPackageInfo expectPackagePreExisting plan buildOutcomes pkgid = do planpkg <- expectPlanPackage plan pkgid case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of (InstallPlan.PreExisting pkg, Nothing) -> return pkg (_, buildResult) -> unexpectedBuildResult "PreExisting" planpkg buildResult expectPackageConfigured :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId -> IO ElaboratedConfiguredPackage expectPackageConfigured plan buildOutcomes pkgid = do planpkg <- expectPlanPackage plan pkgid case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of (InstallPlan.Configured pkg, Nothing) -> return pkg (_, buildResult) -> unexpectedBuildResult "Configured" planpkg buildResult expectPackageInstalled :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId -> IO ElaboratedConfiguredPackage expectPackageInstalled plan buildOutcomes pkgid = do planpkg <- expectPlanPackage plan pkgid case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of (InstallPlan.Configured pkg, Just (Right _result)) -- result isn't used by any test -> return pkg -- package can be installed in the global .store! -- (when installing from tarball!) (InstallPlan.Installed pkg, Nothing) -> return pkg (_, buildResult) -> unexpectedBuildResult "Installed" planpkg buildResult expectPackageFailed :: ElaboratedInstallPlan -> BuildOutcomes -> PackageId -> IO (ElaboratedConfiguredPackage, BuildFailure) expectPackageFailed plan buildOutcomes pkgid = do planpkg <- expectPlanPackage plan pkgid case (planpkg, InstallPlan.lookupBuildOutcome planpkg buildOutcomes) of (InstallPlan.Configured pkg, Just (Left failure)) -> return (pkg, failure) (_, buildResult) -> unexpectedBuildResult "Failed" planpkg buildResult unexpectedBuildResult :: String -> ElaboratedPlanPackage -> Maybe (Either BuildFailure BuildResult) -> IO a unexpectedBuildResult expected planpkg buildResult = throwIO $ HUnitFailure Nothing $ "expected to find " ++ display (packageId planpkg) ++ " in the " ++ expected ++ " state, but it is actually in the " ++ actual ++ " state." where actual = case (buildResult, planpkg) of (Nothing, InstallPlan.PreExisting{}) -> "PreExisting" (Nothing, InstallPlan.Configured{}) -> "Configured" (Just (Right _), InstallPlan.Configured{}) -> "Installed" (Just (Left _), InstallPlan.Configured{}) -> "Failed" (Nothing, InstallPlan.Installed{}) -> "Installed globally" _ -> "Impossible! " ++ show buildResult ++ show planpkg expectPlanPackage :: ElaboratedInstallPlan -> PackageId -> IO ElaboratedPlanPackage expectPlanPackage plan pkgid = case [ pkg | pkg <- InstallPlan.toList plan , packageId pkg == pkgid ] of [pkg] -> return pkg [] -> throwIO $ HUnitFailure Nothing $ "expected to find " ++ display pkgid ++ " in the install plan but it's not there" _ -> throwIO $ HUnitFailure Nothing $ "expected to find only one instance of " ++ display pkgid ++ " in the install plan but there's several" expectBuildFailed :: BuildFailure -> IO () expectBuildFailed (BuildFailure _ (BuildFailed _)) = return () expectBuildFailed (BuildFailure _ reason) = assertFailure $ "expected BuildFailed, got " ++ show reason --------------------------------------- -- Other utils -- -- | Allow altering a file during a test, but then restore it afterwards -- -- We read into the memory, as filesystems are tricky. (especially Windows) -- withFileFinallyRestore :: FilePath -> IO a -> IO a withFileFinallyRestore file action = do originalContents <- BS.readFile file action `finally` handle onIOError (tryFewTimes $ BS.writeFile file originalContents) where onIOError :: IOException -> IO () onIOError e = putStrLn $ "WARNING: Cannot restore " ++ file ++ "; " ++ show e -- Hopefully works around some Windows file-locking things. -- Use with care: -- -- Try action 4 times, with small sleep in between, -- retrying if it fails for 'IOException' reason. -- tryFewTimes :: forall a. IO a -> IO a tryFewTimes action = go (3 :: Int) where go :: Int -> IO a go !n | n <= 0 = action | otherwise = action `catch` onIOError n onIOError :: Int -> IOException -> IO a onIOError n e = do hPutStrLn stderr $ "Trying " ++ show n ++ " after " ++ show e threadDelay 10000 go (n - 1) testNixFlags :: Assertion testNixFlags = do let gc = globalCommand [] -- changing from the v1 to v2 build command does not change whether the "--enable-nix" flag -- sets the globalNix param of the GlobalFlags type to True even though the v2 command doesn't use it let nixEnabledFlags = getFlags gc . commandParseArgs gc True $ ["--enable-nix", "build"] let nixDisabledFlags = getFlags gc . commandParseArgs gc True $ ["--disable-nix", "build"] let nixDefaultFlags = getFlags gc . commandParseArgs gc True $ ["build"] True @=? isJust nixDefaultFlags True @=? isJust nixEnabledFlags True @=? isJust nixDisabledFlags Just True @=? (fromFlag . globalNix . fromJust $ nixEnabledFlags) Just False @=? (fromFlag . globalNix . fromJust $ nixDisabledFlags) Nothing @=? (fromFlag . globalNix . fromJust $ nixDefaultFlags) where fromFlag :: Flag Bool -> Maybe Bool fromFlag (Flag x) = Just x fromFlag NoFlag = Nothing getFlags :: CommandUI GlobalFlags -> CommandParse (GlobalFlags -> GlobalFlags, [String]) -> Maybe GlobalFlags getFlags cui (CommandReadyToGo (mkflags, _)) = Just . mkflags . commandDefaultFlags $ cui getFlags _ _ = Nothing testIgnoreProjectFlag :: Assertion testIgnoreProjectFlag = do -- Coverage flag should be false globally by default (~/.cabal folder) (_, _, prjConfigGlobal, _, _) <- configureProject testdir ignoreSetConfig let globalCoverageFlag = packageConfigCoverage . projectConfigLocalPackages $ prjConfigGlobal False @=? Flag.fromFlagOrDefault False globalCoverageFlag -- It is set to true in the cabal.project file (_, _, prjConfigLocal, _, _) <- configureProject testdir emptyConfig let localCoverageFlag = packageConfigCoverage . projectConfigLocalPackages $ prjConfigLocal True @=? Flag.fromFlagOrDefault False localCoverageFlag where testdir = "build/ignore-project" emptyConfig = mempty ignoreSetConfig :: ProjectConfig ignoreSetConfig = mempty { projectConfigShared = mempty { projectConfigIgnoreProject = Flag True } } cabal-install-3.8.1.0/tests/LongTests.hs0000644000000000000000000000316507346545000016205 0ustar0000000000000000module Main (main) where import Test.Tasty import Distribution.Simple.Utils import Distribution.Verbosity import Distribution.Compat.Time import qualified UnitTests.Distribution.Client.FileMonitor import qualified UnitTests.Distribution.Client.VCS import qualified UnitTests.Distribution.Solver.Modular.QuickCheck import qualified UnitTests.Distribution.Client.Described import UnitTests.Options main :: IO () main = do (mtimeChange, mtimeChange') <- calibrateMtimeChangeDelay let toMillis :: Int -> Double toMillis x = fromIntegral x / 1000.0 notice normal $ "File modification time resolution calibration completed, " ++ "maximum delay observed: " ++ (show . toMillis $ mtimeChange ) ++ " ms. " ++ "Will be using delay of " ++ (show . toMillis $ mtimeChange') ++ " for test runs." defaultMainWithIngredients (includingOptions extraOptions : defaultIngredients) (tests mtimeChange') tests :: Int -> TestTree tests mtimeChangeCalibrated = askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) -> let mtimeChange = if mtimeChangeProvided /= 0 then mtimeChangeProvided else mtimeChangeCalibrated in testGroup "Long-running tests" [ testGroup "Solver QuickCheck" UnitTests.Distribution.Solver.Modular.QuickCheck.tests , testGroup "UnitTests.Distribution.Client.VCS" $ UnitTests.Distribution.Client.VCS.tests mtimeChange , testGroup "UnitTests.Distribution.Client.FileMonitor" $ UnitTests.Distribution.Client.FileMonitor.tests mtimeChange , UnitTests.Distribution.Client.Described.tests ] cabal-install-3.8.1.0/tests/MemoryUsageTests.hs0000644000000000000000000000051507346545000017537 0ustar0000000000000000module Main where import Test.Tasty import qualified UnitTests.Distribution.Solver.Modular.MemoryUsage tests :: TestTree tests = testGroup "Memory Usage" [ testGroup "UnitTests.Distribution.Solver.Modular.MemoryUsage" UnitTests.Distribution.Solver.Modular.MemoryUsage.tests ] main :: IO () main = defaultMain tests cabal-install-3.8.1.0/tests/UnitTests.hs0000644000000000000000000000753107346545000016226 0ustar0000000000000000module Main (main) where import Test.Tasty import qualified UnitTests.Distribution.Client.BuildReport import qualified UnitTests.Distribution.Client.Configure import qualified UnitTests.Distribution.Client.FetchUtils import qualified UnitTests.Distribution.Client.Get import qualified UnitTests.Distribution.Client.Glob import qualified UnitTests.Distribution.Client.GZipUtils import qualified UnitTests.Distribution.Client.IndexUtils import qualified UnitTests.Distribution.Client.IndexUtils.Timestamp import qualified UnitTests.Distribution.Client.Init import qualified UnitTests.Distribution.Client.InstallPlan import qualified UnitTests.Distribution.Client.JobControl import qualified UnitTests.Distribution.Client.ProjectConfig import qualified UnitTests.Distribution.Client.ProjectPlanning import qualified UnitTests.Distribution.Client.Store import qualified UnitTests.Distribution.Client.Tar import qualified UnitTests.Distribution.Client.Targets import qualified UnitTests.Distribution.Client.UserConfig import qualified UnitTests.Distribution.Solver.Modular.Builder import qualified UnitTests.Distribution.Solver.Modular.RetryLog import qualified UnitTests.Distribution.Solver.Modular.Solver import qualified UnitTests.Distribution.Solver.Modular.WeightedPSQ import qualified UnitTests.Distribution.Solver.Types.OptionalStanza main :: IO () main = do initTests <- UnitTests.Distribution.Client.Init.tests defaultMain $ testGroup "Unit Tests" [ testGroup "UnitTests.Distribution.Client.BuildReport" UnitTests.Distribution.Client.BuildReport.tests , testGroup "UnitTests.Distribution.Client.Configure" UnitTests.Distribution.Client.Configure.tests , testGroup "UnitTests.Distribution.Client.FetchUtils" UnitTests.Distribution.Client.FetchUtils.tests , testGroup "UnitTests.Distribution.Client.Get" UnitTests.Distribution.Client.Get.tests , testGroup "UnitTests.Distribution.Client.Glob" UnitTests.Distribution.Client.Glob.tests , testGroup "Distribution.Client.GZipUtils" UnitTests.Distribution.Client.GZipUtils.tests , testGroup "UnitTests.Distribution.Client.IndexUtils" UnitTests.Distribution.Client.IndexUtils.tests , testGroup "UnitTests.Distribution.Client.IndexUtils.Timestamp" UnitTests.Distribution.Client.IndexUtils.Timestamp.tests , testGroup "Distribution.Client.Init" initTests , testGroup "UnitTests.Distribution.Client.InstallPlan" UnitTests.Distribution.Client.InstallPlan.tests , testGroup "UnitTests.Distribution.Client.JobControl" UnitTests.Distribution.Client.JobControl.tests , testGroup "UnitTests.Distribution.Client.ProjectConfig" UnitTests.Distribution.Client.ProjectConfig.tests , testGroup "UnitTests.Distribution.Client.ProjectPlanning" UnitTests.Distribution.Client.ProjectPlanning.tests , testGroup "Distribution.Client.Store" UnitTests.Distribution.Client.Store.tests , testGroup "Distribution.Client.Tar" UnitTests.Distribution.Client.Tar.tests , testGroup "Distribution.Client.Targets" UnitTests.Distribution.Client.Targets.tests , testGroup "UnitTests.Distribution.Client.UserConfig" UnitTests.Distribution.Client.UserConfig.tests , testGroup "UnitTests.Distribution.Solver.Modular.Builder" UnitTests.Distribution.Solver.Modular.Builder.tests , testGroup "UnitTests.Distribution.Solver.Modular.RetryLog" UnitTests.Distribution.Solver.Modular.RetryLog.tests , testGroup "UnitTests.Distribution.Solver.Modular.Solver" UnitTests.Distribution.Solver.Modular.Solver.tests , testGroup "UnitTests.Distribution.Solver.Modular.WeightedPSQ" UnitTests.Distribution.Solver.Modular.WeightedPSQ.tests , testGroup "UnitTests.Distribution.Solver.Types.OptionalStanza" UnitTests.Distribution.Solver.Types.OptionalStanza.tests ] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/0000755000000000000000000000000007346545000021561 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/ArbitraryInstances.hs0000644000000000000000000003330407346545000025727 0ustar0000000000000000{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Client.ArbitraryInstances ( adjustSize, shortListOf, shortListOf1, arbitraryFlag, ShortToken(..), arbitraryShortToken, NonMEmpty(..), NoShrink(..), -- * Shrinker Shrinker, runShrinker, shrinker, shrinkerPP, shrinkerAla, ) where import Distribution.Client.Compat.Prelude import Prelude () import Data.Char (isLetter) import Data.List ((\\)) import Distribution.Simple.Setup import Distribution.Types.Flag (mkFlagAssignment) import Distribution.Client.BuildReports.Types (BuildReport, InstallOutcome, Outcome, ReportLevel (..)) import Distribution.Client.CmdInstall.ClientInstallFlags (InstallMethod) import Distribution.Client.Glob (FilePathGlob (..), FilePathGlobRel (..), FilePathRoot (..), GlobPiece (..)) import Distribution.Client.IndexUtils.ActiveRepos (ActiveRepoEntry (..), ActiveRepos (..), CombineStrategy (..)) import Distribution.Client.IndexUtils.IndexState (RepoIndexState (..), TotalIndexState, makeTotalIndexState) import Distribution.Client.IndexUtils.Timestamp (Timestamp, epochTimeToTimestamp) import Distribution.Client.Targets import Distribution.Client.Types (RepoName (..), WriteGhcEnvironmentFilesPolicy) import Distribution.Client.Types.AllowNewer import Distribution.Client.Types.OverwritePolicy (OverwritePolicy) import Distribution.Solver.Types.OptionalStanza (OptionalStanza (..), OptionalStanzaMap, OptionalStanzaSet, optStanzaSetFromList, optStanzaTabulate) import Distribution.Solver.Types.PackageConstraint (PackageProperty (..)) import Data.Coerce (Coercible, coerce) import Network.URI (URI (..), URIAuth (..), isUnreserved) import Test.QuickCheck import Test.QuickCheck.GenericArbitrary import Test.QuickCheck.Instances.Cabal () -- note: there are plenty of instances defined in ProjectConfig test file. -- they should be moved here or into Cabal-quickcheck ------------------------------------------------------------------------------- -- Utilities ------------------------------------------------------------------------------- data Shrinker a = Shrinker a [a] instance Functor Shrinker where fmap f (Shrinker x xs) = Shrinker (f x) (map f xs) instance Applicative Shrinker where pure x = Shrinker x [] Shrinker f fs <*> Shrinker x xs = Shrinker (f x) (map f xs ++ map ($ x) fs) runShrinker :: Shrinker a -> [a] runShrinker (Shrinker _ xs) = xs shrinker :: Arbitrary a => a -> Shrinker a shrinker x = Shrinker x (shrink x) shrinkerAla :: (Coercible a b, Arbitrary b) => (a -> b) -> a -> Shrinker a shrinkerAla pack = shrinkerPP pack coerce -- | shrinker with pre and post functions. shrinkerPP :: Arbitrary b => (a -> b) -> (b -> a) -> a -> Shrinker a shrinkerPP pack unpack x = Shrinker x (map unpack (shrink (pack x))) ------------------------------------------------------------------------------- -- Non-Cabal instances ------------------------------------------------------------------------------- instance Arbitrary URI where arbitrary = URI <$> elements ["file:", "http:", "https:"] <*> (Just <$> arbitrary) <*> (('/':) <$> arbitraryURIToken) <*> (('?':) <$> arbitraryURIToken) <*> pure "" instance Arbitrary URIAuth where arbitrary = URIAuth <$> pure "" -- no password as this does not roundtrip <*> arbitraryURIToken <*> arbitraryURIPort arbitraryURIToken :: Gen String arbitraryURIToken = shortListOf1 6 (elements (filter isUnreserved ['\0'..'\255'])) arbitraryURIPort :: Gen String arbitraryURIPort = oneof [ pure "", (':':) <$> shortListOf1 4 (choose ('0','9')) ] ------------------------------------------------------------------------------- -- cabal-install (and Cabal) types ------------------------------------------------------------------------------- shrinkBoundedEnum :: (Eq a, Enum a, Bounded a) => a -> [a] shrinkBoundedEnum x | x == minBound = [] | otherwise = [pred x] adjustSize :: (Int -> Int) -> Gen a -> Gen a adjustSize adjust gen = sized (\n -> resize (adjust n) gen) shortListOf :: Int -> Gen a -> Gen [a] shortListOf bound gen = sized $ \n -> do k <- choose (0, (n `div` 2) `min` bound) vectorOf k gen shortListOf1 :: Int -> Gen a -> Gen [a] shortListOf1 bound gen = sized $ \n -> do k <- choose (1, 1 `max` ((n `div` 2) `min` bound)) vectorOf k gen newtype ShortToken = ShortToken { getShortToken :: String } deriving Show instance Arbitrary ShortToken where arbitrary = ShortToken <$> (shortListOf1 5 (choose ('#', '~')) `suchThat` (all (`notElem` "{}")) `suchThat` (not . ("[]" `isPrefixOf`))) --TODO: [code cleanup] need to replace parseHaskellString impl to stop -- accepting Haskell list syntax [], ['a'] etc, just allow String syntax. -- Workaround, don't generate [] as this does not round trip. shrink (ShortToken cs) = [ ShortToken cs' | cs' <- shrink cs, not (null cs') ] arbitraryShortToken :: Gen String arbitraryShortToken = getShortToken <$> arbitrary newtype NonMEmpty a = NonMEmpty { getNonMEmpty :: a } deriving (Eq, Ord, Show) instance (Arbitrary a, Monoid a, Eq a) => Arbitrary (NonMEmpty a) where arbitrary = NonMEmpty <$> (arbitrary `suchThat` (/= mempty)) shrink (NonMEmpty x) = [ NonMEmpty x' | x' <- shrink x, x' /= mempty ] newtype NoShrink a = NoShrink { getNoShrink :: a } deriving (Eq, Ord, Show) instance Arbitrary a => Arbitrary (NoShrink a) where arbitrary = NoShrink <$> arbitrary shrink _ = [] instance Arbitrary Timestamp where -- note: no negative timestamps -- -- >>> utcTimeToPOSIXSeconds $ UTCTime (fromGregorian 100000 01 01) 0 -- >>> 3093527980800s -- arbitrary = maybe (toEnum 0) id . epochTimeToTimestamp . (`mod` 3093527980800) . abs <$> arbitrary instance Arbitrary RepoIndexState where arbitrary = frequency [ (1, pure IndexStateHead) , (50, IndexStateTime <$> arbitrary) ] instance Arbitrary TotalIndexState where arbitrary = makeTotalIndexState <$> arbitrary <*> arbitrary instance Arbitrary WriteGhcEnvironmentFilesPolicy where arbitrary = arbitraryBoundedEnum arbitraryFlag :: Gen a -> Gen (Flag a) arbitraryFlag = liftArbitrary instance Arbitrary RepoName where -- TODO: rename refinement? arbitrary = RepoName <$> (mk `suchThat` \x -> not $ "--" `isPrefixOf` x) where mk = (:) <$> lead <*> rest lead = elements [ c | c <- [ '\NUL' .. '\255' ], isAlpha c || c `elem` "_-."] rest = listOf (elements [ c | c <- [ '\NUL' .. '\255' ], isAlphaNum c || c `elem` "_-."]) instance Arbitrary ReportLevel where arbitrary = arbitraryBoundedEnum instance Arbitrary OverwritePolicy where arbitrary = arbitraryBoundedEnum instance Arbitrary InstallMethod where arbitrary = arbitraryBoundedEnum ------------------------------------------------------------------------------- -- ActiveRepos ------------------------------------------------------------------------------- instance Arbitrary ActiveRepos where arbitrary = ActiveRepos <$> shortListOf 5 arbitrary instance Arbitrary ActiveRepoEntry where arbitrary = frequency [ (10, ActiveRepo <$> arbitrary <*> arbitrary) , (1, ActiveRepoRest <$> arbitrary) ] instance Arbitrary CombineStrategy where arbitrary = arbitraryBoundedEnum shrink = shrinkBoundedEnum ------------------------------------------------------------------------------- -- AllowNewer ------------------------------------------------------------------------------- instance Arbitrary AllowNewer where arbitrary = AllowNewer <$> arbitrary instance Arbitrary AllowOlder where arbitrary = AllowOlder <$> arbitrary instance Arbitrary RelaxDeps where arbitrary = oneof [ pure mempty , mkRelaxDepSome <$> shortListOf1 3 arbitrary , pure RelaxDepsAll ] instance Arbitrary RelaxDepMod where arbitrary = elements [RelaxDepModNone, RelaxDepModCaret] shrink RelaxDepModCaret = [RelaxDepModNone] shrink _ = [] instance Arbitrary RelaxDepScope where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary RelaxDepSubject where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary RelaxedDep where arbitrary = genericArbitrary shrink = genericShrink ------------------------------------------------------------------------------- -- UserConstraint ------------------------------------------------------------------------------- instance Arbitrary UserConstraintScope where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary UserQualifier where arbitrary = oneof [ pure UserQualToplevel , UserQualSetup <$> arbitrary -- -- TODO: Re-enable UserQualExe tests once we decide on a syntax. -- , UserQualExe <$> arbitrary <*> arbitrary ] instance Arbitrary UserConstraint where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary PackageProperty where arbitrary = oneof [ PackagePropertyVersion <$> arbitrary , pure PackagePropertyInstalled , pure PackagePropertySource , PackagePropertyFlags . mkFlagAssignment <$> shortListOf1 3 arbitrary , PackagePropertyStanzas . (\x->[x]) <$> arbitrary ] instance Arbitrary OptionalStanza where arbitrary = elements [minBound..maxBound] instance Arbitrary OptionalStanzaSet where arbitrary = fmap optStanzaSetFromList arbitrary instance Arbitrary a => Arbitrary (OptionalStanzaMap a) where arbitrary = do x1 <- arbitrary x2 <- arbitrary return $ optStanzaTabulate $ \x -> case x of TestStanzas -> x1 BenchStanzas -> x2 ------------------------------------------------------------------------------- -- BuildReport ------------------------------------------------------------------------------- instance Arbitrary BuildReport where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary InstallOutcome where arbitrary = genericArbitrary shrink = genericShrink instance Arbitrary Outcome where arbitrary = genericArbitrary shrink = genericShrink ------------------------------------------------------------------------------- -- Glob ------------------------------------------------------------------------------- instance Arbitrary FilePathGlob where arbitrary = (FilePathGlob <$> arbitrary <*> arbitrary) `suchThat` validFilePathGlob shrink (FilePathGlob root pathglob) = [ FilePathGlob root' pathglob' | (root', pathglob') <- shrink (root, pathglob) , validFilePathGlob (FilePathGlob root' pathglob') ] validFilePathGlob :: FilePathGlob -> Bool validFilePathGlob (FilePathGlob FilePathRelative pathglob) = case pathglob of GlobDirTrailing -> False GlobDir [Literal "~"] _ -> False GlobDir [Literal (d:":")] _ | isLetter d -> False _ -> True validFilePathGlob _ = True instance Arbitrary FilePathRoot where arbitrary = frequency [ (3, pure FilePathRelative) , (1, pure (FilePathRoot unixroot)) , (1, FilePathRoot <$> windrive) , (1, pure FilePathHomeDir) ] where unixroot = "/" windrive = do d <- choose ('A', 'Z'); return (d : ":\\") shrink FilePathRelative = [] shrink (FilePathRoot _) = [FilePathRelative] shrink FilePathHomeDir = [FilePathRelative] instance Arbitrary FilePathGlobRel where arbitrary = sized $ \sz -> oneof $ take (max 1 sz) [ pure GlobDirTrailing , GlobFile <$> (getGlobPieces <$> arbitrary) , GlobDir <$> (getGlobPieces <$> arbitrary) <*> resize (sz `div` 2) arbitrary ] shrink GlobDirTrailing = [] shrink (GlobFile glob) = GlobDirTrailing : [ GlobFile (getGlobPieces glob') | glob' <- shrink (GlobPieces glob) ] shrink (GlobDir glob pathglob) = pathglob : GlobFile glob : [ GlobDir (getGlobPieces glob') pathglob' | (glob', pathglob') <- shrink (GlobPieces glob, pathglob) ] newtype GlobPieces = GlobPieces { getGlobPieces :: [GlobPiece] } deriving Eq instance Arbitrary GlobPieces where arbitrary = GlobPieces . mergeLiterals <$> shortListOf1 5 arbitrary shrink (GlobPieces glob) = [ GlobPieces (mergeLiterals (getNonEmpty glob')) | glob' <- shrink (NonEmpty glob) ] mergeLiterals :: [GlobPiece] -> [GlobPiece] mergeLiterals (Literal a : Literal b : ps) = mergeLiterals (Literal (a++b) : ps) mergeLiterals (Union as : ps) = Union (map mergeLiterals as) : mergeLiterals ps mergeLiterals (p:ps) = p : mergeLiterals ps mergeLiterals [] = [] instance Arbitrary GlobPiece where arbitrary = sized $ \sz -> frequency [ (3, Literal <$> shortListOf1 10 (elements globLiteralChars)) , (1, pure WildCard) , (1, Union <$> resize (sz `div` 2) (shortListOf1 5 (shortListOf1 5 arbitrary))) ] shrink (Literal str) = [ Literal str' | str' <- shrink str , not (null str') , all (`elem` globLiteralChars) str' ] shrink WildCard = [] shrink (Union as) = [ Union (map getGlobPieces (getNonEmpty as')) | as' <- shrink (NonEmpty (map GlobPieces as)) ] globLiteralChars :: [Char] globLiteralChars = ['\0'..'\128'] \\ "*{},/\\" cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/BuildReport.hs0000644000000000000000000000162407346545000024353 0ustar0000000000000000module UnitTests.Distribution.Client.BuildReport ( tests, ) where import Distribution.Client.Compat.Prelude import Prelude () import UnitTests.Distribution.Client.ArbitraryInstances () import UnitTests.Distribution.Client.TreeDiffInstances () import Data.TreeDiff.QuickCheck (ediffEq) import Test.QuickCheck (Property, counterexample) import Test.Tasty (TestTree) import Test.Tasty.QuickCheck (testProperty) import Distribution.Client.BuildReports.Anonymous (BuildReport, parseBuildReport, showBuildReport) import Distribution.Simple.Utils (toUTF8BS) -- instances import Test.QuickCheck.Instances.Cabal () tests :: [TestTree] tests = [ testProperty "test" roundtrip ] roundtrip :: BuildReport -> Property roundtrip br = counterexample str $ Right br `ediffEq` parseBuildReport (toUTF8BS str) where str :: String str = showBuildReport br cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Configure.hs0000644000000000000000000001070607346545000024042 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} module UnitTests.Distribution.Client.Configure (tests) where import Distribution.Client.CmdConfigure import Test.Tasty import Test.Tasty.HUnit import Control.Monad import qualified Data.Map as Map import System.Directory import System.FilePath import Distribution.Verbosity import Distribution.Client.Setup import Distribution.Client.NixStyleOptions import Distribution.Client.ProjectConfig.Types import Distribution.Client.ProjectFlags import Distribution.Simple import Distribution.Simple.Flag tests :: [TestTree] tests = [ configureTests ] configureTests :: TestTree configureTests = testGroup "Configure tests" [ testCase "New config" $ do let flags = (defaultNixStyleFlags ()) { configFlags = mempty { configOptimization = Flag MaximumOptimisation , configVerbosity = Flag silent } } projConfig <- configureAction' flags [] defaultGlobalFlags Flag MaximumOptimisation @=? (packageConfigOptimization . projectConfigLocalPackages $ snd projConfig) , testCase "Replacement + new config" $ do let flags = (defaultNixStyleFlags ()) { configExFlags = mempty { configAppend = Flag True } , configFlags = mempty { configOptimization = Flag NoOptimisation , configVerbosity = Flag silent } , projectFlags = mempty { flagProjectFileName = Flag projectFile } } (_, ProjectConfig {..}) <- configureAction' flags [] defaultGlobalFlags Flag NoOptimisation @=? packageConfigOptimization projectConfigLocalPackages Flag silent @=? projectConfigVerbosity projectConfigBuildOnly , testCase "Old + new config" $ do let flags = (defaultNixStyleFlags ()) { configExFlags = mempty { configAppend = Flag True } , configFlags = mempty { configVerbosity = Flag silent } , projectFlags = mempty { flagProjectFileName = Flag projectFile } } (_, ProjectConfig {..}) <- configureAction' flags [] defaultGlobalFlags Flag MaximumOptimisation @=? packageConfigOptimization projectConfigLocalPackages Flag silent @=? projectConfigVerbosity projectConfigBuildOnly , testCase "Old + new config, no appending" $ do let flags = (defaultNixStyleFlags ()) { configFlags = mempty { configVerbosity = Flag silent } , projectFlags = mempty { flagProjectFileName = Flag projectFile } } (_, ProjectConfig {..}) <- configureAction' flags [] defaultGlobalFlags NoFlag @=? packageConfigOptimization projectConfigLocalPackages Flag silent @=? projectConfigVerbosity projectConfigBuildOnly , testCase "Old + new config, backup check" $ do let flags = (defaultNixStyleFlags ()) { configFlags = mempty { configVerbosity = Flag silent } , projectFlags = mempty { flagProjectFileName = Flag projectFile } } backup = projectFile <.> "local~" exists <- doesFileExist backup when exists $ removeFile backup _ <- configureAction' flags [] defaultGlobalFlags doesFileExist backup >>= assertBool ("No file found, expected: " ++ backup) , testCase "Local program options" $ do let ghcFlags = ["-fno-full-laziness"] flags = (defaultNixStyleFlags ()) { configFlags = mempty { configVerbosity = Flag silent , configProgramArgs = [("ghc", ghcFlags)] } , projectFlags = mempty { flagProjectFileName = Flag projectFile } } (_, ProjectConfig {..}) <- configureAction' flags [] defaultGlobalFlags assertEqual "global" Nothing (Map.lookup "ghc" (getMapMappend (packageConfigProgramArgs projectConfigAllPackages))) assertEqual "local" (Just ghcFlags) (Map.lookup "ghc" (getMapMappend (packageConfigProgramArgs projectConfigLocalPackages))) ] projectFile :: FilePath projectFile = "tests" "fixtures" "configure" "cabal.project" cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Described.hs0000644000000000000000000000302007346545000023774 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module UnitTests.Distribution.Client.Described where import Distribution.Client.Compat.Prelude import Prelude () import Test.QuickCheck.Instances.Cabal () import UnitTests.Distribution.Client.ArbitraryInstances () import UnitTests.Distribution.Client.DescribedInstances () import Distribution.Described (testDescribed) import Test.Tasty (TestTree, testGroup) import Distribution.Client.BuildReports.Types (InstallOutcome, Outcome) import Distribution.Client.IndexUtils.ActiveRepos (ActiveRepos) import Distribution.Client.IndexUtils.IndexState (RepoIndexState, TotalIndexState) import Distribution.Client.IndexUtils.Timestamp (Timestamp) import Distribution.Client.Targets (UserConstraint) import Distribution.Client.Types (RepoName) import Distribution.Client.Types.AllowNewer (RelaxDepSubject, RelaxDeps, RelaxedDep) tests :: TestTree tests = testGroup "Described" [ testDescribed (Proxy :: Proxy Timestamp) , testDescribed (Proxy :: Proxy RepoIndexState) , testDescribed (Proxy :: Proxy TotalIndexState) , testDescribed (Proxy :: Proxy RepoName) , testDescribed (Proxy :: Proxy ActiveRepos) , testDescribed (Proxy :: Proxy RelaxDepSubject) , testDescribed (Proxy :: Proxy RelaxedDep) , testDescribed (Proxy :: Proxy RelaxDeps) , testDescribed (Proxy :: Proxy UserConstraint) , testDescribed (Proxy :: Proxy InstallOutcome) , testDescribed (Proxy :: Proxy Outcome) ] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/DescribedInstances.hs0000644000000000000000000002321707346545000025656 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Client.DescribedInstances where import Distribution.Client.Compat.Prelude import Distribution.Described import Data.List ((\\)) import Distribution.Types.PackageId (PackageIdentifier) import Distribution.Types.PackageName (PackageName) import Distribution.Types.VersionRange (VersionRange) import Distribution.Client.BuildReports.Types (InstallOutcome, Outcome) import Distribution.Client.IndexUtils.ActiveRepos (ActiveRepoEntry, ActiveRepos, CombineStrategy) import Distribution.Client.IndexUtils.IndexState (RepoIndexState, TotalIndexState) import Distribution.Client.IndexUtils.Timestamp (Timestamp) import Distribution.Client.Targets (UserConstraint) import Distribution.Client.Types (RepoName) import Distribution.Client.Types.AllowNewer (RelaxDepSubject, RelaxDeps, RelaxedDep) import Distribution.Client.Glob (FilePathGlob) ------------------------------------------------------------------------------- -- BuildReport ------------------------------------------------------------------------------- instance Described InstallOutcome where describe _ = REUnion [ "PlanningFailed" , "DependencyFailed" <> RESpaces1 <> describe (Proxy :: Proxy PackageIdentifier) , "DownloadFailed" , "UnpackFailed" , "SetupFailed" , "ConfigureFailed" , "BuildFailed" , "TestsFailed" , "InstallFailed" , "InstallOk" ] instance Described Outcome where describe _ = REUnion [ fromString (prettyShow o) | o <- [minBound .. maxBound :: Outcome] ] ------------------------------------------------------------------------------- -- Glob ------------------------------------------------------------------------------- -- This instance is incorrect as it may generate C:\dir\{foo,bar} instance Described FilePathGlob where describe _ = REUnion [ root, relative, homedir ] where root = REUnion [ fromString "/" , reChars (['a'..'z'] ++ ['A' .. 'Z']) <> ":" <> reChars "/\\" ] <> REOpt pieces homedir = "~/" <> REOpt pieces relative = pieces pieces :: GrammarRegex void pieces = REMunch1 sep piece <> REOpt "/" piece :: GrammarRegex void piece = RERec "glob" $ REMunch1 mempty $ REUnion [ normal , escape , wildcard , "{" <> REMunch1 "," (REVar Nothing) <> "}" ] sep :: GrammarRegex void sep = reChars "/\\" wildcard :: GrammarRegex void wildcard = "*" normal = reChars $ ['\0'..'\128'] \\ "*{},/\\" escape = fromString "\\" <> reChars "*{}," ------------------------------------------------------------------------------- -- AllowNewer ------------------------------------------------------------------------------- instance Described RelaxedDep where describe _ = REOpt (describeRelaxDepScope <> ":" <> REOpt ("^")) <> describe (Proxy :: Proxy RelaxDepSubject) where describeRelaxDepScope = REUnion [ "*" , "all" , RENamed "package-name" (describe (Proxy :: Proxy PackageName)) , RENamed "package-id" (describe (Proxy :: Proxy PackageIdentifier)) ] instance Described RelaxDepSubject where describe _ = REUnion [ "*" , "all" , RENamed "package-name" (describe (Proxy :: Proxy PackageName)) ] instance Described RelaxDeps where describe _ = REUnion [ "*" , "all" , "none" , RECommaNonEmpty (describe (Proxy :: Proxy RelaxedDep)) ] ------------------------------------------------------------------------------- -- ActiveRepos ------------------------------------------------------------------------------- instance Described ActiveRepos where describe _ = REUnion [ ":none" , RECommaNonEmpty (describe (Proxy :: Proxy ActiveRepoEntry)) ] instance Described ActiveRepoEntry where describe _ = REUnion [ ":rest" <> strategy , REOpt ":repo:" <> describe (Proxy :: Proxy RepoName) <> strategy ] where strategy = REOpt $ ":" <> describe (Proxy :: Proxy CombineStrategy) instance Described CombineStrategy where describe _ = REUnion [ "skip" , "merge" , "override" ] ------------------------------------------------------------------------------- -- UserConstraint ------------------------------------------------------------------------------- instance Described UserConstraint where describe _ = REAppend [ describeConstraintScope , describeConstraintProperty ] where describeConstraintScope :: GrammarRegex void describeConstraintScope = REUnion [ "any." <> describePN , "setup." <> describePN , describePN , describePN <> ":setup." <> describePN ] describeConstraintProperty :: GrammarRegex void describeConstraintProperty = REUnion [ RESpaces <> RENamed "version-range" (describe (Proxy :: Proxy VersionRange)) , RESpaces1 <> describeConstraintProperty' ] describeConstraintProperty' :: GrammarRegex void describeConstraintProperty' = REUnion [ "installed" , "source" , "test" , "bench" , describeFlagAssignmentNonEmpty ] describePN :: GrammarRegex void describePN = RENamed "package-name" (describe (Proxy :: Proxy PackageName)) ------------------------------------------------------------------------------- -- IndexState ------------------------------------------------------------------------------- instance Described TotalIndexState where describe _ = reCommaNonEmpty $ REUnion [ describe (Proxy :: Proxy RepoName) <> RESpaces1 <> ris , ris ] where ris = describe (Proxy :: Proxy RepoIndexState) instance Described RepoName where describe _ = lead <> rest where lead = RECharSet $ csAlpha <> "_-." rest = reMunchCS $ csAlphaNum <> "_-." instance Described RepoIndexState where describe _ = REUnion [ "HEAD" , RENamed "timestamp" (describe (Proxy :: Proxy Timestamp)) ] instance Described Timestamp where describe _ = REUnion [ posix , utc ] where posix = reChar '@' <> reMunch1CS "0123456789" utc = RENamed "date" date <> reChar 'T' <> RENamed "time" time <> reChar 'Z' date = REOpt digit <> REUnion [ leapYear <> reChar '-' <> leapMD , commonYear <> reChar '-' <> commonMD ] -- leap year: either -- * divisible by 400 -- * not divisible by 100 and divisible by 4 leapYear = REUnion [ div4 <> "00" , digit <> digit <> div4not0 ] -- common year: either -- * not divisible by 400 but divisible by 100 -- * not divisible by 4 commonYear = REUnion [ notDiv4 <> "00" , digit <> digit <> notDiv4 ] div4 = REUnion [ "0" <> reChars "048" , "1" <> reChars "26" , "2" <> reChars "048" , "3" <> reChars "26" , "4" <> reChars "048" , "5" <> reChars "26" , "6" <> reChars "048" , "7" <> reChars "26" , "8" <> reChars "048" , "9" <> reChars "26" ] div4not0 = REUnion [ "0" <> reChars "48" -- no zero , "1" <> reChars "26" , "2" <> reChars "048" , "3" <> reChars "26" , "4" <> reChars "048" , "5" <> reChars "26" , "6" <> reChars "048" , "7" <> reChars "26" , "8" <> reChars "048" , "9" <> reChars "26" ] notDiv4 = REUnion [ "0" <> reChars "1235679" , "1" <> reChars "01345789" , "2" <> reChars "1235679" , "3" <> reChars "01345789" , "4" <> reChars "1235679" , "5" <> reChars "01345789" , "6" <> reChars "1235679" , "7" <> reChars "01345789" , "8" <> reChars "1235679" , "9" <> reChars "01345789" ] leapMD = REUnion [ jan, fe', mar, apr, may, jun, jul, aug, sep, oct, nov, dec ] commonMD = REUnion [ jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec ] jan = "01-" <> d31 feb = "02-" <> d28 fe' = "02-" <> d29 mar = "03-" <> d31 apr = "04-" <> d30 may = "05-" <> d31 jun = "06-" <> d30 jul = "07-" <> d31 aug = "08-" <> d31 sep = "09-" <> d30 oct = "10-" <> d31 nov = "11-" <> d30 dec = "12-" <> d31 d28 = REUnion [ "0" <> digit1, "1" <> digit, "2" <> reChars "012345678" ] d29 = REUnion [ "0" <> digit1, "1" <> digit, "2" <> digit ] d30 = REUnion [ "0" <> digit1, "1" <> digit, "2" <> digit, "30" ] d31 = REUnion [ "0" <> digit1, "1" <> digit, "2" <> digit, "30", "31" ] time = ho <> reChar ':' <> minSec <> reChar ':' <> minSec -- 0..23 ho = REUnion [ "0" <> digit , "1" <> digit , "2" <> reChars "0123" ] -- 0..59 minSec = reChars "012345" <> digit digit = reChars "0123456789" digit1 = reChars "123456789" cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/FetchUtils.hs0000644000000000000000000001762607346545000024203 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} module UnitTests.Distribution.Client.FetchUtils ( tests, ) where import Control.Concurrent (threadDelay) import Control.Exception import Data.Time.Clock (NominalDiffTime, UTCTime, diffUTCTime, getCurrentTime) import Distribution.Client.FetchUtils import Distribution.Client.GlobalFlags (RepoContext (..)) import Distribution.Client.HttpUtils (HttpCode, HttpTransport (..)) import Distribution.Client.Types.PackageLocation (PackageLocation (..), ResolvedPkgLoc) import Distribution.Client.Types.Repo (Repo (..), emptyRemoteRepo) import Distribution.Client.Types.RepoName (RepoName (..)) import Distribution.Types.PackageId (PackageIdentifier (..)) import Distribution.Types.PackageName (mkPackageName) import qualified Distribution.Verbosity as Verbosity import Distribution.Version (mkVersion) import Network.URI (URI, uriPath) import Test.Tasty import Test.Tasty.HUnit import UnitTests.TempTestDir (withTestDir) tests :: [TestTree] tests = [ testGroup "asyncFetchPackages" [ testCase "handles an empty package list" testEmpty, testCase "passes an unpacked local package through" testPassLocalPackage, testCase "handles http" testHttp, testCase "aborts on interrupt in GET" $ testGetInterrupt, testCase "aborts on other exception in GET" $ testGetException, testCase "aborts on interrupt in GET (uncollected download)" $ testUncollectedInterrupt, testCase "continues on other exception in GET (uncollected download)" $ testUncollectedException ] ] verbosity :: Verbosity.Verbosity verbosity = Verbosity.silent -- | An interval that we use to assert that something happens "immediately". -- Must be shorter than 'longSleep' to ensure those are interrupted. -- 1s would be a reasonable value, but failed tempfile cleanup on Windows CI -- takes ~1s. shortDelta :: NominalDiffTime shortDelta = 5 -- 5s longSleep :: IO () longSleep = threadDelay 10000000 -- 10s testEmpty :: Assertion testEmpty = do let repoCtxt = undefined pkgLocs = [] res <- asyncFetchPackages verbosity repoCtxt pkgLocs $ \_ -> return () res @?= () testPassLocalPackage :: Assertion testPassLocalPackage = do let repoCtxt = error "repoCtxt undefined" loc = LocalUnpackedPackage "a" res <- asyncFetchPackages verbosity repoCtxt [loc] $ \downloadMap -> waitAsyncFetchPackage verbosity downloadMap loc res @?= LocalUnpackedPackage "a" testHttp :: Assertion testHttp = withFakeRepoCtxt get200 $ \repoCtxt repo -> do let pkgId = mkPkgId "foo" loc = RepoTarballPackage repo pkgId Nothing res <- asyncFetchPackages verbosity repoCtxt [loc] $ \downloadMap -> waitAsyncFetchPackage verbosity downloadMap loc case res of RepoTarballPackage repo' pkgId' _ -> do repo' @?= repo pkgId' @?= pkgId _ -> assertFailure $ "expected RepoTarballPackage, got " ++ show res where get200 = \_uri -> return 200 testGetInterrupt :: Assertion testGetInterrupt = testGetAny UserInterrupt testGetException :: Assertion testGetException = testGetAny $ userError "some error" -- | Test that if a GET request fails with the given exception, -- we exit promptly. We queue two slow downloads after the failing -- download to cover a buggy scenario where -- 1. first download throws -- 2. second download is cancelled, but swallows AsyncCancelled -- 3. third download keeps running testGetAny :: Exception e => e -> Assertion testGetAny exc = withFakeRepoCtxt get $ \repoCtxt repo -> do let loc pkgId = RepoTarballPackage repo pkgId Nothing pkgLocs = [loc throws, loc slowA, loc slowB] start <- getCurrentTime res :: Either SomeException ResolvedPkgLoc <- try $ asyncFetchPackages verbosity repoCtxt pkgLocs $ \downloadMap -> do waitAsyncFetchPackage verbosity downloadMap (loc throws) assertFaster start shortDelta case res of Left _ -> pure () Right _ -> assertFailure $ "expected an exception, got " ++ show res where throws = mkPkgId "throws" slowA = mkPkgId "slowA" slowB = mkPkgId "slowB" get uri = case uriPath uri of "package/throws-1.0.tar.gz" -> throwIO exc "package/slowA-1.0.tar.gz" -> longSleep >> return 200 "package/slowB-1.0.tar.gz" -> longSleep >> return 200 _ -> assertFailure $ "unexpected URI: " ++ show uri -- | Test that when an undemanded download is interrupted (Ctrl-C), -- we still abort directly. testUncollectedInterrupt :: Assertion testUncollectedInterrupt = withFakeRepoCtxt get $ \repoCtxt repo -> do let loc pkgId = RepoTarballPackage repo pkgId Nothing pkgLocs = [loc throws, loc slowA, loc slowB] start <- getCurrentTime res :: Either SomeException ResolvedPkgLoc <- try $ asyncFetchPackages verbosity repoCtxt pkgLocs $ \downloadMap -> do waitAsyncFetchPackage verbosity downloadMap (loc slowA) assertFaster start shortDelta case res of Left _ -> pure () Right _ -> assertFailure $ "expected an exception, got " ++ show res where throws = mkPkgId "throws" slowA = mkPkgId "slowA" slowB = mkPkgId "slowB" get uri = case uriPath uri of "package/throws-1.0.tar.gz" -> throwIO UserInterrupt "package/slowA-1.0.tar.gz" -> longSleep >> return 200 "package/slowB-1.0.tar.gz" -> longSleep >> return 200 _ -> assertFailure $ "unexpected URI: " ++ show uri -- | Test that a download failure doesn't automatically abort things, -- e.g. if we don't collect the download. (In practice, we might collect -- the download and handle its exception.) testUncollectedException :: Assertion testUncollectedException = withFakeRepoCtxt get $ \repoCtxt repo -> do let loc pkgId = RepoTarballPackage repo pkgId Nothing pkgLocs = [loc throws, loc foo] start <- getCurrentTime res <- asyncFetchPackages verbosity repoCtxt pkgLocs $ \downloadMap -> do waitAsyncFetchPackage verbosity downloadMap (loc foo) assertFaster start shortDelta case res of RepoTarballPackage repo' pkgId' _ -> do repo' @?= repo pkgId' @?= foo _ -> assertFailure $ "expected RepoTarballPackage, got " ++ show res where throws = mkPkgId "throws" foo = mkPkgId "foo" get uri = case uriPath uri of "package/throws-1.0.tar.gz" -> throwIO $ userError "failed download" "package/foo-1.0.tar.gz" -> return 200 _ -> assertFailure $ "unexpected URI: " ++ show uri assertFaster :: UTCTime -> NominalDiffTime -> Assertion assertFaster start delta = do t <- getCurrentTime assertBool ("took longer than " ++ show delta) (diffUTCTime t start < delta) mkPkgId :: String -> PackageIdentifier mkPkgId name = PackageIdentifier (mkPackageName name) (mkVersion [1, 0]) -- | Provide a repo and a repo context with the given GET handler. withFakeRepoCtxt :: (URI -> IO HttpCode) -> (RepoContext -> Repo -> IO a) -> IO a withFakeRepoCtxt handleGet action = withTestDir verbosity "fake repo" $ \tmpDir -> let repo = RepoRemote { repoRemote = emptyRemoteRepo $ RepoName "fake", repoLocalDir = tmpDir } repoCtxt = RepoContext { repoContextRepos = [repo], repoContextGetTransport = return httpTransport, repoContextWithSecureRepo = \_ _ -> error "fake repo ctxt: repoContextWithSecureRepo not implemented", repoContextIgnoreExpiry = error "fake repo ctxt: repoContextIgnoreExpiry not implemented" } in action repoCtxt repo where httpTransport = HttpTransport { getHttp = \_verbosity uri _etag _filepath _headers -> do code <- handleGet uri return (code, Nothing), postHttp = error "fake transport: postHttp not implemented", postHttpFile = error "fake transport: postHttpFile not implemented", putHttpFile = error "fake transport: putHttp not implemented", transportSupportsHttps = error "fake transport: transportSupportsHttps not implemented", transportManuallySelected = True } cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/FileMonitor.hs0000644000000000000000000010411707346545000024350 0ustar0000000000000000module UnitTests.Distribution.Client.FileMonitor (tests) where import Distribution.Parsec (simpleParsec) import Data.Proxy (Proxy (..)) import Control.Monad import Control.Exception import Control.Concurrent (threadDelay) import qualified Data.Set as Set import System.FilePath import qualified System.Directory as IO import Prelude hiding (writeFile) import qualified Prelude as IO (writeFile) import Distribution.Compat.Binary import Distribution.Simple.Utils (withTempDirectory) import Distribution.System (buildOS, OS (Windows)) import Distribution.Verbosity (silent) import Distribution.Client.FileMonitor import Distribution.Compat.Time import Distribution.Utils.Structured (structureHash, Structured) import GHC.Fingerprint (Fingerprint (..)) import Test.Tasty import Test.Tasty.ExpectedFailure import Test.Tasty.HUnit tests :: Int -> [TestTree] tests mtimeChange = [ testGroup "Structured hashes" [ testCase "MonitorStateFile" $ structureHash (Proxy :: Proxy MonitorStateFile) @?= Fingerprint 0xe4108804c34962f6 0x06e94f8fc9e48e13 , testCase "MonitorStateGlob" $ structureHash (Proxy :: Proxy MonitorStateGlob) @?= Fingerprint 0xfd8f6be0e8258fe7 0xdb5fac737139bca6 , testCase "MonitorStateFileSet" $ structureHash (Proxy :: Proxy MonitorStateFileSet) @?= Fingerprint 0xb745f4ea498389a5 0x70db6adb5078aa27 ] , testCase "sanity check mtimes" $ testFileMTimeSanity mtimeChange , testCase "sanity check dirs" $ testDirChangeSanity mtimeChange , testCase "no monitor cache" testNoMonitorCache , testCaseSteps "corrupt monitor cache" testCorruptMonitorCache , testCase "empty monitor" testEmptyMonitor , testCase "missing file" testMissingFile , testCase "change file" $ testChangedFile mtimeChange , testCase "file mtime vs content" $ testChangedFileMtimeVsContent mtimeChange , testCase "update during action" $ testUpdateDuringAction mtimeChange , testCase "remove file" testRemoveFile , testCase "non-existent file" testNonExistentFile , testCase "changed file type" $ testChangedFileType mtimeChange , testCase "several monitor kinds" $ testMultipleMonitorKinds mtimeChange , testGroup "glob matches" [ testCase "no change" testGlobNoChange , testCase "add match" $ testGlobAddMatch mtimeChange , testCase "remove match" $ testGlobRemoveMatch mtimeChange , testCase "change match" $ testGlobChangeMatch mtimeChange , testCase "add match subdir" $ testGlobAddMatchSubdir mtimeChange , testCase "remove match subdir" $ testGlobRemoveMatchSubdir mtimeChange , testCase "change match subdir" $ testGlobChangeMatchSubdir mtimeChange , testCase "match toplevel dir" $ testGlobMatchTopDir mtimeChange , testCase "add non-match" $ testGlobAddNonMatch mtimeChange , testCase "remove non-match" $ testGlobRemoveNonMatch mtimeChange , knownBrokenInWindows "See issue #3126" $ testCase "add non-match subdir" $ testGlobAddNonMatchSubdir mtimeChange , testCase "remove non-match subdir" $ testGlobRemoveNonMatchSubdir mtimeChange , testCase "invariant sorted 1" $ testInvariantMonitorStateGlobFiles mtimeChange , testCase "invariant sorted 2" $ testInvariantMonitorStateGlobDirs mtimeChange , testCase "match dirs" $ testGlobMatchDir mtimeChange , knownBrokenInWindows "See issue #3126" $ testCase "match dirs only" $ testGlobMatchDirOnly mtimeChange , testCase "change file type" $ testGlobChangeFileType mtimeChange , testCase "absolute paths" $ testGlobAbsolutePath mtimeChange ] , testCase "value unchanged" testValueUnchanged , testCase "value changed" testValueChanged , testCase "value & file changed" $ testValueAndFileChanged mtimeChange , testCase "value updated" testValueUpdated ] where knownBrokenInWindows msg = case buildOS of Windows -> expectFailBecause msg _ -> id -- Check the file system behaves the way we expect it to -- we rely on file mtimes having a reasonable resolution testFileMTimeSanity :: Int -> Assertion testFileMTimeSanity mtimeChange = withTempDirectory silent "." "file-status-" $ \dir -> do replicateM_ 10 $ do IO.writeFile (dir "a") "content" t1 <- getModTime (dir "a") threadDelay mtimeChange IO.writeFile (dir "a") "content" t2 <- getModTime (dir "a") assertBool "expected different file mtimes" (t2 > t1) -- We rely on directories changing mtime when entries are added or removed testDirChangeSanity :: Int -> Assertion testDirChangeSanity mtimeChange = withTempDirectory silent "." "dir-mtime-" $ \dir -> do expectMTimeChange dir "file add" $ IO.writeFile (dir "file") "content" expectMTimeSame dir "file content change" $ IO.writeFile (dir "file") "new content" expectMTimeChange dir "file del" $ IO.removeFile (dir "file") expectMTimeChange dir "subdir add" $ IO.createDirectory (dir "dir") expectMTimeSame dir "subdir file add" $ IO.writeFile (dir "dir" "file") "content" expectMTimeChange dir "subdir file move in" $ IO.renameFile (dir "dir" "file") (dir "file") expectMTimeChange dir "subdir file move out" $ IO.renameFile (dir "file") (dir "dir" "file") expectMTimeSame dir "subdir dir add" $ IO.createDirectory (dir "dir" "subdir") expectMTimeChange dir "subdir dir move in" $ IO.renameDirectory (dir "dir" "subdir") (dir "subdir") expectMTimeChange dir "subdir dir move out" $ IO.renameDirectory (dir "subdir") (dir "dir" "subdir") where expectMTimeChange, expectMTimeSame :: FilePath -> String -> IO () -> Assertion expectMTimeChange dir descr action = do t <- getModTime dir threadDelay mtimeChange action t' <- getModTime dir assertBool ("expected dir mtime change on " ++ descr) (t' > t) expectMTimeSame dir descr action = do t <- getModTime dir threadDelay mtimeChange action t' <- getModTime dir assertBool ("expected same dir mtime on " ++ descr) (t' == t) -- Now for the FileMonitor tests proper... -- first run, where we don't even call updateMonitor testNoMonitorCache :: Assertion testNoMonitorCache = withFileMonitor $ \root monitor -> do reason <- expectMonitorChanged root (monitor :: FileMonitor () ()) () reason @?= MonitorFirstRun -- write garbage into the binary cache file testCorruptMonitorCache :: (String -> IO ()) -> Assertion testCorruptMonitorCache step = withFileMonitor $ \root monitor -> do step "Writing broken file" IO.writeFile (fileMonitorCacheFile monitor) "broken" reason <- expectMonitorChanged root monitor () reason @?= MonitorCorruptCache step "Updating file monitor" updateMonitor root monitor [] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [] step "Writing broken file again" IO.writeFile (fileMonitorCacheFile monitor) "broken" reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitorCorruptCache -- no files to monitor testEmptyMonitor :: Assertion testEmptyMonitor = withFileMonitor $ \root monitor -> do touchFile root "a" updateMonitor root monitor [] () () touchFile root "b" (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [] -- monitor a file that is expected to exist testMissingFile :: Assertion testMissingFile = do test monitorFile touchFile "a" test monitorFileHashed touchFile "a" test monitorFile touchFile ("dir" "a") test monitorFileHashed touchFile ("dir" "a") test monitorDirectory touchDir "a" test monitorDirectory touchDir ("dir" "a") where test :: (FilePath -> MonitorFilePath) -> (RootPath -> FilePath -> IO ()) -> FilePath -> IO () test monitorKind touch file = withFileMonitor $ \root monitor -> do -- a file that doesn't exist at snapshot time is considered to have -- changed updateMonitor root monitor [monitorKind file] () () reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged file -- a file doesn't exist at snapshot time, but gets added afterwards is -- also considered to have changed updateMonitor root monitor [monitorKind file] () () touch root file reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged file testChangedFile :: Int -> Assertion testChangedFile mtimeChange = do test monitorFile touchFile touchFile "a" test monitorFileHashed touchFile touchFileContent "a" test monitorFile touchFile touchFile ("dir" "a") test monitorFileHashed touchFile touchFileContent ("dir" "a") test monitorDirectory touchDir touchDir "a" test monitorDirectory touchDir touchDir ("dir" "a") where test :: (FilePath -> MonitorFilePath) -> (RootPath -> FilePath -> IO ()) -> (RootPath -> FilePath -> IO ()) -> FilePath -> IO () test monitorKind touch touch' file = withFileMonitor $ \root monitor -> do touch root file updateMonitor root monitor [monitorKind file] () () threadDelay mtimeChange touch' root file reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged file testChangedFileMtimeVsContent :: Int -> Assertion testChangedFileMtimeVsContent mtimeChange = withFileMonitor $ \root monitor -> do -- if we don't touch the file, it's unchanged touchFile root "a" updateMonitor root monitor [monitorFile "a"] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFile "a"] -- if we do touch the file, it's changed if we only consider mtime updateMonitor root monitor [monitorFile "a"] () () threadDelay mtimeChange touchFile root "a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged "a" -- but if we touch the file, it's unchanged if we consider content hash updateMonitor root monitor [monitorFileHashed "a"] () () threadDelay mtimeChange touchFile root "a" (res2, files2) <- expectMonitorUnchanged root monitor () res2 @?= () files2 @?= [monitorFileHashed "a"] -- finally if we change the content it's changed updateMonitor root monitor [monitorFileHashed "a"] () () threadDelay mtimeChange touchFileContent root "a" reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged "a" testUpdateDuringAction :: Int -> Assertion testUpdateDuringAction mtimeChange = do test (monitorFile "a") touchFile "a" test (monitorFileHashed "a") touchFile "a" test (monitorDirectory "a") touchDir "a" test (monitorFileGlobStr "*") touchFile "a" test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } touchDir "a" where test :: MonitorFilePath -> (RootPath -> FilePath -> IO ()) -> FilePath -> IO () test monitorSpec touch file = withFileMonitor $ \root monitor -> do touch root file updateMonitor root monitor [monitorSpec] () () -- start doing an update action... threadDelay mtimeChange -- some time passes touch root file -- a file gets updates during the action threadDelay mtimeChange -- some time passes then we finish updateMonitor root monitor [monitorSpec] () () -- we don't notice this change since we took the timestamp after the -- action finished (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorSpec] -- Let's try again, this time taking the timestamp before the action timestamp' <- beginUpdateFileMonitor threadDelay mtimeChange -- some time passes touch root file -- a file gets updates during the action threadDelay mtimeChange -- some time passes then we finish updateMonitorWithTimestamp root monitor timestamp' [monitorSpec] () () -- now we do notice the change since we took the snapshot before the -- action finished reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged file testRemoveFile :: Assertion testRemoveFile = do test monitorFile touchFile removeFile "a" test monitorFileHashed touchFile removeFile "a" test monitorFile touchFile removeFile ("dir" "a") test monitorFileHashed touchFile removeFile ("dir" "a") test monitorDirectory touchDir removeDir "a" test monitorDirectory touchDir removeDir ("dir" "a") where test :: (FilePath -> MonitorFilePath) -> (RootPath -> FilePath -> IO ()) -> (RootPath -> FilePath -> IO ()) -> FilePath -> IO () test monitorKind touch remove file = withFileMonitor $ \root monitor -> do touch root file updateMonitor root monitor [monitorKind file] () () remove root file reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged file -- monitor a file that we expect not to exist testNonExistentFile :: Assertion testNonExistentFile = withFileMonitor $ \root monitor -> do -- a file that doesn't exist at snapshot time or check time is unchanged updateMonitor root monitor [monitorNonExistentFile "a"] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorNonExistentFile "a"] -- if the file then exists it has changed touchFile root "a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged "a" -- if the file then exists at snapshot and check time it has changed updateMonitor root monitor [monitorNonExistentFile "a"] () () reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged "a" -- but if the file existed at snapshot time and doesn't exist at check time -- it is consider unchanged. This is unlike files we expect to exist, but -- that's because files that exist can have different content and actions -- can depend on that content, whereas if the action expected a file not to -- exist and it now does not, it'll give the same result, irrespective of -- the fact that the file might have existed in the meantime. updateMonitor root monitor [monitorNonExistentFile "a"] () () removeFile root "a" (res2, files2) <- expectMonitorUnchanged root monitor () res2 @?= () files2 @?= [monitorNonExistentFile "a"] testChangedFileType :: Int-> Assertion testChangedFileType mtimeChange = do test (monitorFile "a") touchFile removeFile createDir test (monitorFileHashed "a") touchFile removeFile createDir test (monitorDirectory "a") createDir removeDir touchFile test (monitorFileOrDirectory "a") createDir removeDir touchFile test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } touchFile removeFile createDir test (monitorFileGlobStr "*") { monitorKindDir = DirModTime } createDir removeDir touchFile where test :: MonitorFilePath -> (RootPath -> String -> IO ()) -> (RootPath -> String -> IO ()) -> (RootPath -> String -> IO ()) -> IO () test monitorKind touch remove touch' = withFileMonitor $ \root monitor -> do touch root "a" updateMonitor root monitor [monitorKind] () () threadDelay mtimeChange remove root "a" touch' root "a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged "a" -- Monitoring the same file with two different kinds of monitor should work -- both should be kept, and both checked for changes. -- We had a bug where only one monitor kind was kept per file. -- https://github.com/haskell/cabal/pull/3863#issuecomment-248495178 testMultipleMonitorKinds :: Int -> Assertion testMultipleMonitorKinds mtimeChange = withFileMonitor $ \root monitor -> do touchFile root "a" updateMonitor root monitor [monitorFile "a", monitorFileHashed "a"] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFile "a", monitorFileHashed "a"] threadDelay mtimeChange touchFile root "a" -- not changing content, just mtime reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged "a" createDir root "dir" updateMonitor root monitor [monitorDirectory "dir", monitorDirectoryExistence "dir"] () () (res2, files2) <- expectMonitorUnchanged root monitor () res2 @?= () files2 @?= [monitorDirectory "dir", monitorDirectoryExistence "dir"] threadDelay mtimeChange touchFile root ("dir" "a") -- changing dir mtime, not existence reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged "dir" ------------------ -- globs -- testGlobNoChange :: Assertion testGlobNoChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") touchFile root ("dir" "good-b") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/good-*"] testGlobAddMatch :: Int -> Assertion testGlobAddMatch mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/good-*"] threadDelay mtimeChange touchFile root ("dir" "good-b") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "good-b") testGlobRemoveMatch :: Int -> Assertion testGlobRemoveMatch mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") touchFile root ("dir" "good-b") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () threadDelay mtimeChange removeFile root "dir/good-a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "good-a") testGlobChangeMatch :: Int -> Assertion testGlobChangeMatch mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") touchFile root ("dir" "good-b") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () threadDelay mtimeChange touchFile root ("dir" "good-b") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/good-*"] touchFileContent root ("dir" "good-b") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "good-b") testGlobAddMatchSubdir :: Int -> Assertion testGlobAddMatchSubdir mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "good-a") updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () threadDelay mtimeChange touchFile root ("dir" "b" "good-b") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "b" "good-b") testGlobRemoveMatchSubdir :: Int -> Assertion testGlobRemoveMatchSubdir mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "good-a") touchFile root ("dir" "b" "good-b") updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () threadDelay mtimeChange removeDir root ("dir" "a") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "a" "good-a") testGlobChangeMatchSubdir :: Int -> Assertion testGlobChangeMatchSubdir mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "good-a") touchFile root ("dir" "b" "good-b") updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () threadDelay mtimeChange touchFile root ("dir" "b" "good-b") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*/good-*"] touchFileContent root "dir/b/good-b" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "b" "good-b") -- check nothing goes squiffy with matching in the top dir testGlobMatchTopDir :: Int -> Assertion testGlobMatchTopDir mtimeChange = withFileMonitor $ \root monitor -> do updateMonitor root monitor [monitorFileGlobStr "*"] () () threadDelay mtimeChange touchFile root "a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged "a" testGlobAddNonMatch :: Int -> Assertion testGlobAddNonMatch mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () threadDelay mtimeChange touchFile root ("dir" "bad") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/good-*"] testGlobRemoveNonMatch :: Int -> Assertion testGlobRemoveNonMatch mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "good-a") touchFile root ("dir" "bad") updateMonitor root monitor [monitorFileGlobStr "dir/good-*"] () () threadDelay mtimeChange removeFile root "dir/bad" (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/good-*"] testGlobAddNonMatchSubdir :: Int -> Assertion testGlobAddNonMatchSubdir mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "good-a") updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () threadDelay mtimeChange touchFile root ("dir" "b" "bad") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*/good-*"] testGlobRemoveNonMatchSubdir :: Int -> Assertion testGlobRemoveNonMatchSubdir mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "good-a") touchFile root ("dir" "b" "bad") updateMonitor root monitor [monitorFileGlobStr "dir/*/good-*"] () () threadDelay mtimeChange removeDir root ("dir" "b") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*/good-*"] -- try and tickle a bug that happens if we don't maintain the invariant that -- MonitorStateGlobFiles entries are sorted testInvariantMonitorStateGlobFiles :: Int -> Assertion testInvariantMonitorStateGlobFiles mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a") touchFile root ("dir" "b") touchFile root ("dir" "c") touchFile root ("dir" "d") updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () threadDelay mtimeChange -- so there should be no change (since we're doing content checks) -- but if we can get the dir entries to appear in the wrong order -- then if the sorted invariant is not maintained then we can fool -- the 'probeGlobStatus' into thinking there's changes removeFile root ("dir" "a") removeFile root ("dir" "b") removeFile root ("dir" "c") removeFile root ("dir" "d") touchFile root ("dir" "d") touchFile root ("dir" "c") touchFile root ("dir" "b") touchFile root ("dir" "a") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*"] -- same thing for the subdirs case testInvariantMonitorStateGlobDirs :: Int -> Assertion testInvariantMonitorStateGlobDirs mtimeChange = withFileMonitor $ \root monitor -> do touchFile root ("dir" "a" "file") touchFile root ("dir" "b" "file") touchFile root ("dir" "c" "file") touchFile root ("dir" "d" "file") updateMonitor root monitor [monitorFileGlobStr "dir/*/file"] () () threadDelay mtimeChange removeDir root ("dir" "a") removeDir root ("dir" "b") removeDir root ("dir" "c") removeDir root ("dir" "d") touchFile root ("dir" "d" "file") touchFile root ("dir" "c" "file") touchFile root ("dir" "b" "file") touchFile root ("dir" "a" "file") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*/file"] -- ensure that a glob can match a directory as well as a file testGlobMatchDir :: Int -> Assertion testGlobMatchDir mtimeChange = withFileMonitor $ \root monitor -> do createDir root ("dir" "a") updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () threadDelay mtimeChange -- nothing changed yet (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*"] -- expect dir/b to match and be detected as changed createDir root ("dir" "b") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "b") -- now remove dir/a and expect it to be detected as changed updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () threadDelay mtimeChange removeDir root ("dir" "a") reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged ("dir" "a") testGlobMatchDirOnly :: Int -> Assertion testGlobMatchDirOnly mtimeChange = withFileMonitor $ \root monitor -> do updateMonitor root monitor [monitorFileGlobStr "dir/*/"] () () threadDelay mtimeChange -- expect file dir/a to not match, so not detected as changed touchFile root ("dir" "a") (res, files) <- expectMonitorUnchanged root monitor () res @?= () files @?= [monitorFileGlobStr "dir/*/"] -- note that checking the file monitor for changes can updates the -- cached dir mtimes (when it has to record that there's new matches) -- so we need an extra mtime delay threadDelay mtimeChange -- but expect dir/b to match createDir root ("dir" "b") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "b") testGlobChangeFileType :: Int -> Assertion testGlobChangeFileType mtimeChange = withFileMonitor $ \root monitor -> do -- change file to dir touchFile root ("dir" "a") updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () threadDelay mtimeChange removeFile root ("dir" "a") createDir root ("dir" "a") reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged ("dir" "a") -- change dir to file updateMonitor root monitor [monitorFileGlobStr "dir/*"] () () threadDelay mtimeChange removeDir root ("dir" "a") touchFile root ("dir" "a") reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged ("dir" "a") testGlobAbsolutePath :: Int -> Assertion testGlobAbsolutePath mtimeChange = withFileMonitor $ \root monitor -> do root' <- absoluteRoot root -- absolute glob, removing a file touchFile root ("dir/good-a") touchFile root ("dir/good-b") updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () threadDelay mtimeChange removeFile root "dir/good-a" reason <- expectMonitorChanged root monitor () reason @?= MonitoredFileChanged (root' "dir" "good-a") -- absolute glob, adding a file updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () threadDelay mtimeChange touchFile root ("dir/good-a") reason2 <- expectMonitorChanged root monitor () reason2 @?= MonitoredFileChanged (root' "dir" "good-a") -- absolute glob, changing a file updateMonitor root monitor [monitorFileGlobStr (root' "dir/good-*")] () () threadDelay mtimeChange touchFileContent root "dir/good-b" reason3 <- expectMonitorChanged root monitor () reason3 @?= MonitoredFileChanged (root' "dir" "good-b") ------------------ -- value changes -- testValueUnchanged :: Assertion testValueUnchanged = withFileMonitor $ \root monitor -> do touchFile root "a" updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" (res, files) <- expectMonitorUnchanged root monitor 42 res @?= "ok" files @?= [monitorFile "a"] testValueChanged :: Assertion testValueChanged = withFileMonitor $ \root monitor -> do touchFile root "a" updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" reason <- expectMonitorChanged root monitor 43 reason @?= MonitoredValueChanged 42 testValueAndFileChanged :: Int -> Assertion testValueAndFileChanged mtimeChange = withFileMonitor $ \root monitor -> do touchFile root "a" -- we change the value and the file, and the value change is reported updateMonitor root monitor [monitorFile "a"] (42 :: Int) "ok" threadDelay mtimeChange touchFile root "a" reason <- expectMonitorChanged root monitor 43 reason @?= MonitoredValueChanged 42 -- if fileMonitorCheckIfOnlyValueChanged then if only the value changed -- then it's reported as MonitoredValueChanged let monitor' :: FileMonitor Int String monitor' = monitor { fileMonitorCheckIfOnlyValueChanged = True } updateMonitor root monitor' [monitorFile "a"] 42 "ok" reason2 <- expectMonitorChanged root monitor' 43 reason2 @?= MonitoredValueChanged 42 -- but if a file changed too then we don't report MonitoredValueChanged updateMonitor root monitor' [monitorFile "a"] 42 "ok" threadDelay mtimeChange touchFile root "a" reason3 <- expectMonitorChanged root monitor' 43 reason3 @?= MonitoredFileChanged "a" testValueUpdated :: Assertion testValueUpdated = withFileMonitor $ \root monitor -> do touchFile root "a" let monitor' :: FileMonitor (Set.Set Int) String monitor' = (monitor :: FileMonitor (Set.Set Int) String) { fileMonitorCheckIfOnlyValueChanged = True, fileMonitorKeyValid = Set.isSubsetOf } updateMonitor root monitor' [monitorFile "a"] (Set.fromList [42,43]) "ok" (res,_files) <- expectMonitorUnchanged root monitor' (Set.fromList [42]) res @?= "ok" reason <- expectMonitorChanged root monitor' (Set.fromList [42,44]) reason @?= MonitoredValueChanged (Set.fromList [42,43]) ------------- -- Utils newtype RootPath = RootPath FilePath touchFile :: RootPath -> FilePath -> IO () touchFile (RootPath root) fname = do let path = root fname IO.createDirectoryIfMissing True (takeDirectory path) IO.writeFile path "touched" touchFileContent :: RootPath -> FilePath -> IO () touchFileContent (RootPath root) fname = do let path = root fname IO.createDirectoryIfMissing True (takeDirectory path) IO.writeFile path "different" removeFile :: RootPath -> FilePath -> IO () removeFile (RootPath root) fname = IO.removeFile (root fname) touchDir :: RootPath -> FilePath -> IO () touchDir root@(RootPath rootdir) dname = do IO.createDirectoryIfMissing True (rootdir dname) touchFile root (dname "touch") removeFile root (dname "touch") createDir :: RootPath -> FilePath -> IO () createDir (RootPath root) dname = do let path = root dname IO.createDirectoryIfMissing True (takeDirectory path) IO.createDirectory path removeDir :: RootPath -> FilePath -> IO () removeDir (RootPath root) dname = IO.removeDirectoryRecursive (root dname) absoluteRoot :: RootPath -> IO FilePath absoluteRoot (RootPath root) = IO.canonicalizePath root monitorFileGlobStr :: String -> MonitorFilePath monitorFileGlobStr globstr | Just glob <- simpleParsec globstr = monitorFileGlob glob | otherwise = error $ "Failed to parse " ++ globstr expectMonitorChanged :: (Binary a, Structured a, Binary b, Structured b) => RootPath -> FileMonitor a b -> a -> IO (MonitorChangedReason a) expectMonitorChanged root monitor key = do res <- checkChanged root monitor key case res of MonitorChanged reason -> return reason MonitorUnchanged _ _ -> throwIO $ HUnitFailure Nothing "expected change" expectMonitorUnchanged :: (Binary a, Structured a, Binary b, Structured b) => RootPath -> FileMonitor a b -> a -> IO (b, [MonitorFilePath]) expectMonitorUnchanged root monitor key = do res <- checkChanged root monitor key case res of MonitorChanged _reason -> throwIO $ HUnitFailure Nothing "expected no change" MonitorUnchanged b files -> return (b, files) checkChanged :: (Binary a, Structured a, Binary b, Structured b) => RootPath -> FileMonitor a b -> a -> IO (MonitorChanged a b) checkChanged (RootPath root) monitor key = checkFileMonitorChanged monitor root key updateMonitor :: (Binary a, Structured a, Binary b, Structured b) => RootPath -> FileMonitor a b -> [MonitorFilePath] -> a -> b -> IO () updateMonitor (RootPath root) monitor files key result = updateFileMonitor monitor root Nothing files key result updateMonitorWithTimestamp :: (Binary a, Structured a, Binary b, Structured b) => RootPath -> FileMonitor a b -> MonitorTimestamp -> [MonitorFilePath] -> a -> b -> IO () updateMonitorWithTimestamp (RootPath root) monitor timestamp files key result = updateFileMonitor monitor root (Just timestamp) files key result withFileMonitor :: Eq a => (RootPath -> FileMonitor a b -> IO c) -> IO c withFileMonitor action = do withTempDirectory silent "." "file-status-" $ \root -> do let file = root <.> "monitor" monitor = newFileMonitor file finally (action (RootPath root) monitor) $ do exists <- IO.doesFileExist file when exists $ IO.removeFile file cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/GZipUtils.hs0000644000000000000000000000531707346545000024015 0ustar0000000000000000module UnitTests.Distribution.Client.GZipUtils ( tests ) where import Prelude () import Distribution.Client.Compat.Prelude import Codec.Compression.GZip as GZip import Codec.Compression.Zlib as Zlib import Control.Exception (try) import Data.ByteString as BS (null) import Data.ByteString.Lazy as BSL (pack, toChunks) import Data.ByteString.Lazy.Char8 as BSLL (pack, init, length) import Distribution.Client.GZipUtils (maybeDecompress) import Test.Tasty import Test.Tasty.HUnit import Test.Tasty.QuickCheck tests :: [TestTree] tests = [ testCase "maybeDecompress" maybeDecompressUnitTest -- "decompress plain" property is non-trivial to state, -- maybeDecompress returns input bytestring only if error occurs right at the beginning of the decompression process -- generating such input would essentially duplicate maybeDecompress implementation , testProperty "decompress zlib" prop_maybeDecompress_zlib , testProperty "decompress gzip" prop_maybeDecompress_gzip ] maybeDecompressUnitTest :: Assertion maybeDecompressUnitTest = assertBool "decompress plain" (maybeDecompress original == original) >> assertBool "decompress zlib (with show)" (show (maybeDecompress compressedZlib) == show original) >> assertBool "decompress gzip (with show)" (show (maybeDecompress compressedGZip) == show original) >> assertBool "decompress zlib" (maybeDecompress compressedZlib == original) >> assertBool "decompress gzip" (maybeDecompress compressedGZip == original) >> assertBool "have no empty chunks" (all (not . BS.null) . BSL.toChunks . maybeDecompress $ compressedZlib) >> (runBrokenStream >>= assertBool "decompress broken stream" . isLeft) where original = BSLL.pack "original uncompressed input" compressedZlib = Zlib.compress original compressedGZip = GZip.compress original runBrokenStream :: IO (Either SomeException ()) runBrokenStream = try . void . evaluate . BSLL.length $ maybeDecompress (BSLL.init compressedZlib <> BSLL.pack "*") prop_maybeDecompress_zlib :: [Word8] -> Property prop_maybeDecompress_zlib ws = property $ maybeDecompress compressedZlib === original where original = BSL.pack ws compressedZlib = Zlib.compress original prop_maybeDecompress_gzip :: [Word8] -> Property prop_maybeDecompress_gzip ws = property $ maybeDecompress compressedGZip === original where original = BSL.pack ws compressedGZip = GZip.compress original -- (Only available from "Data.Either" since 7.8.) isLeft :: Either a b -> Bool isLeft (Right _) = False isLeft (Left _) = True cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Get.hs0000644000000000000000000002132707346545000022641 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-} module UnitTests.Distribution.Client.Get (tests) where import Distribution.Client.Get import Distribution.Types.PackageId import Distribution.Types.PackageName import Distribution.Types.SourceRepo (SourceRepo (..), emptySourceRepo, RepoKind (..), RepoType (..), KnownRepoType (..)) import Distribution.Client.Types.SourceRepo (SourceRepositoryPackage (..)) import Distribution.Verbosity as Verbosity import Distribution.Version import Control.Monad import Control.Exception import Data.Typeable import System.FilePath import System.Directory import System.Exit import System.IO.Error import Test.Tasty import Test.Tasty.HUnit import UnitTests.Options (RunNetworkTests (..)) import UnitTests.TempTestDir (withTestDir) tests :: [TestTree] tests = [ testGroup "forkPackages" [ testCase "no repos" testNoRepos , testCase "no repos of requested kind" testNoReposOfKind , testCase "no repo type specified" testNoRepoType , testCase "unsupported repo type" testUnsupportedRepoType , testCase "no repo location specified" testNoRepoLocation , testCase "correct repo kind selection" testSelectRepoKind , testCase "repo destination exists" testRepoDestinationExists , testCase "git fetch failure" testGitFetchFailed ] , askOption $ \(RunNetworkTests doRunNetTests) -> testGroup "forkPackages, network tests" $ includeTestsIf doRunNetTests $ [ testCase "git clone" testNetworkGitClone ] ] where includeTestsIf True xs = xs includeTestsIf False _ = [] verbosity :: Verbosity verbosity = Verbosity.silent -- for debugging try verbose pkgidfoo :: PackageId pkgidfoo = PackageIdentifier (mkPackageName "foo") (mkVersion [1,0]) -- ------------------------------------------------------------ -- * Unit tests -- ------------------------------------------------------------ testNoRepos :: Assertion testNoRepos = do e <- assertException $ clonePackagesFromSourceRepo verbosity "." Nothing pkgrepos e @?= ClonePackageNoSourceRepos pkgidfoo where pkgrepos = [(pkgidfoo, [])] testNoReposOfKind :: Assertion testNoReposOfKind = do e <- assertException $ clonePackagesFromSourceRepo verbosity "." repokind pkgrepos e @?= ClonePackageNoSourceReposOfKind pkgidfoo repokind where pkgrepos = [(pkgidfoo, [repo])] repo = emptySourceRepo RepoHead repokind = Just RepoThis testNoRepoType :: Assertion testNoRepoType = do e <- assertException $ clonePackagesFromSourceRepo verbosity "." Nothing pkgrepos e @?= ClonePackageNoRepoType pkgidfoo repo where pkgrepos = [(pkgidfoo, [repo])] repo = emptySourceRepo RepoHead testUnsupportedRepoType :: Assertion testUnsupportedRepoType = do e <- assertException $ clonePackagesFromSourceRepo verbosity "." Nothing pkgrepos e @?= ClonePackageUnsupportedRepoType pkgidfoo repo' repotype where pkgrepos = [(pkgidfoo, [repo])] repo = (emptySourceRepo RepoHead) { repoType = Just repotype , repoLocation = Just "loc" } repo' = SourceRepositoryPackage { srpType = repotype , srpLocation = "loc" , srpTag = Nothing , srpBranch = Nothing , srpSubdir = Proxy , srpCommand = [] } repotype = OtherRepoType "baz" testNoRepoLocation :: Assertion testNoRepoLocation = do e <- assertException $ clonePackagesFromSourceRepo verbosity "." Nothing pkgrepos e @?= ClonePackageNoRepoLocation pkgidfoo repo where pkgrepos = [(pkgidfoo, [repo])] repo = (emptySourceRepo RepoHead) { repoType = Just repotype } repotype = KnownRepoType Darcs testSelectRepoKind :: Assertion testSelectRepoKind = sequence_ [ do e <- test requestedRepoType pkgrepos e @?= ClonePackageNoRepoType pkgidfoo expectedRepo e' <- test requestedRepoType (reverse pkgrepos) e' @?= ClonePackageNoRepoType pkgidfoo expectedRepo | let test rt rs = assertException $ clonePackagesFromSourceRepo verbosity "." rt rs , (requestedRepoType, expectedRepo) <- cases ] where pkgrepos = [(pkgidfoo, [repo1, repo2, repo3])] repo1 = emptySourceRepo RepoThis repo2 = emptySourceRepo RepoHead repo3 = emptySourceRepo (RepoKindUnknown "bar") cases = [ (Nothing, repo1) , (Just RepoThis, repo1) , (Just RepoHead, repo2) , (Just (RepoKindUnknown "bar"), repo3) ] testRepoDestinationExists :: Assertion testRepoDestinationExists = withTestDir verbosity "repos" $ \tmpdir -> do let pkgdir = tmpdir "foo" createDirectory pkgdir e1 <- assertException $ clonePackagesFromSourceRepo verbosity tmpdir Nothing pkgrepos e1 @?= ClonePackageDestinationExists pkgidfoo pkgdir True {- isdir -} removeDirectory pkgdir writeFile pkgdir "" e2 <- assertException $ clonePackagesFromSourceRepo verbosity tmpdir Nothing pkgrepos e2 @?= ClonePackageDestinationExists pkgidfoo pkgdir False {- isfile -} where pkgrepos = [(pkgidfoo, [repo])] repo = (emptySourceRepo RepoHead) { repoType = Just (KnownRepoType Darcs), repoLocation = Just "" } testGitFetchFailed :: Assertion testGitFetchFailed = withTestDir verbosity "repos" $ \tmpdir -> do let srcdir = tmpdir "src" repo = (emptySourceRepo RepoHead) { repoType = Just (KnownRepoType Git), repoLocation = Just srcdir } repo' = SourceRepositoryPackage { srpType = KnownRepoType Git , srpLocation = srcdir , srpTag = Nothing , srpBranch = Nothing , srpSubdir = Proxy , srpCommand = [] } pkgrepos = [(pkgidfoo, [repo])] e1 <- assertException $ clonePackagesFromSourceRepo verbosity tmpdir Nothing pkgrepos e1 @?= ClonePackageFailedWithExitCode pkgidfoo repo' "git" (ExitFailure 128) testNetworkGitClone :: Assertion testNetworkGitClone = withTestDir verbosity "repos" $ \tmpdir -> do let repo1 = (emptySourceRepo RepoHead) { repoType = Just (KnownRepoType Git), repoLocation = Just "https://github.com/haskell/zlib.git" } clonePackagesFromSourceRepo verbosity tmpdir Nothing [(mkpkgid "zlib1", [repo1])] assertFileContains (tmpdir "zlib1/zlib.cabal") ["name:", "zlib"] let repo2 = (emptySourceRepo RepoHead) { repoType = Just (KnownRepoType Git), repoLocation = Just (tmpdir "zlib1") } clonePackagesFromSourceRepo verbosity tmpdir Nothing [(mkpkgid "zlib2", [repo2])] assertFileContains (tmpdir "zlib2/zlib.cabal") ["name:", "zlib"] let repo3 = (emptySourceRepo RepoHead) { repoType = Just (KnownRepoType Git), repoLocation = Just (tmpdir "zlib1"), repoTag = Just "0.5.0.0" } clonePackagesFromSourceRepo verbosity tmpdir Nothing [(mkpkgid "zlib3", [repo3])] assertFileContains (tmpdir "zlib3/zlib.cabal") ["version:", "0.5.0.0"] where mkpkgid nm = PackageIdentifier (mkPackageName nm) (mkVersion []) -- ------------------------------------------------------------ -- * HUnit utils -- ------------------------------------------------------------ assertException :: forall e a. (Exception e, HasCallStack) => IO a -> IO e assertException action = do r <- try action case r of Left e -> return e Right _ -> assertFailure $ "expected exception of type " ++ show (typeOf (undefined :: e)) -- | Expect that one line in a file matches exactly the given words (i.e. at -- least insensitive to whitespace) -- assertFileContains :: HasCallStack => FilePath -> [String] -> Assertion assertFileContains file expected = do c <- readFile file `catch` \e -> if isDoesNotExistError e then assertFailure $ "expected a file to exist: " ++ file else throwIO e unless (expected `elem` map words (lines c)) $ assertFailure $ "expected the file " ++ file ++ " to contain " ++ show (take 100 expected) cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Glob.hs0000644000000000000000000000766007346545000023011 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Client.Glob (tests) where import Distribution.Client.Compat.Prelude hiding (last) import Prelude () import Distribution.Client.Glob import Distribution.Utils.Structured (structureHash) import UnitTests.Distribution.Client.ArbitraryInstances () import Test.Tasty import Test.Tasty.QuickCheck import Test.Tasty.HUnit import GHC.Fingerprint (Fingerprint (..)) tests :: [TestTree] tests = [ testProperty "print/parse roundtrip" prop_roundtrip_printparse , testCase "parse examples" testParseCases , testGroup "Structured hashes" [ testCase "GlobPiece" $ structureHash (Proxy :: Proxy GlobPiece) @?= Fingerprint 0xd5e5361866a30ea2 0x31fbfe7b58864782 , testCase "FilePathGlobRel" $ structureHash (Proxy :: Proxy FilePathGlobRel) @?= Fingerprint 0x76fa5bcb865a8501 0xb152f68915316f98 , testCase "FilePathRoot" $ structureHash (Proxy :: Proxy FilePathRoot) @?= Fingerprint 0x713373d51426ec64 0xda7376a38ecee5a5 , testCase "FilePathGlob" $ structureHash (Proxy :: Proxy FilePathGlob) @?= Fingerprint 0x3c11c41f3f03a1f0 0x96e69d85c37d0024 ] ] --TODO: [nice to have] tests for trivial globs, tests for matching, -- tests for windows style file paths prop_roundtrip_printparse :: FilePathGlob -> Property prop_roundtrip_printparse pathglob = counterexample (prettyShow pathglob) $ eitherParsec (prettyShow pathglob) === Right pathglob -- first run, where we don't even call updateMonitor testParseCases :: Assertion testParseCases = do FilePathGlob (FilePathRoot "/") GlobDirTrailing <- testparse "/" FilePathGlob FilePathHomeDir GlobDirTrailing <- testparse "~/" FilePathGlob (FilePathRoot "A:\\") GlobDirTrailing <- testparse "A:/" FilePathGlob (FilePathRoot "Z:\\") GlobDirTrailing <- testparse "z:/" FilePathGlob (FilePathRoot "C:\\") GlobDirTrailing <- testparse "C:\\" FilePathGlob FilePathRelative (GlobFile [Literal "_:"]) <- testparse "_:" FilePathGlob FilePathRelative (GlobFile [Literal "."]) <- testparse "." FilePathGlob FilePathRelative (GlobFile [Literal "~"]) <- testparse "~" FilePathGlob FilePathRelative (GlobDir [Literal "."] GlobDirTrailing) <- testparse "./" FilePathGlob FilePathRelative (GlobFile [Literal "foo"]) <- testparse "foo" FilePathGlob FilePathRelative (GlobDir [Literal "foo"] (GlobFile [Literal "bar"])) <- testparse "foo/bar" FilePathGlob FilePathRelative (GlobDir [Literal "foo"] (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "foo/bar/" FilePathGlob (FilePathRoot "/") (GlobDir [Literal "foo"] (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "/foo/bar/" FilePathGlob (FilePathRoot "C:\\") (GlobDir [Literal "foo"] (GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "C:\\foo\\bar\\" FilePathGlob FilePathRelative (GlobFile [WildCard]) <- testparse "*" FilePathGlob FilePathRelative (GlobFile [WildCard,WildCard]) <- testparse "**" -- not helpful but valid FilePathGlob FilePathRelative (GlobFile [WildCard, Literal "foo", WildCard]) <- testparse "*foo*" FilePathGlob FilePathRelative (GlobFile [Literal "foo", WildCard, Literal "bar"]) <- testparse "foo*bar" FilePathGlob FilePathRelative (GlobFile [Union [[WildCard], [Literal "foo"]]]) <- testparse "{*,foo}" parseFail "{" parseFail "}" parseFail "," parseFail "{" parseFail "{{}" parseFail "{}" parseFail "{,}" parseFail "{foo,}" parseFail "{,foo}" return () testparse :: String -> IO FilePathGlob testparse s = case eitherParsec s of Right p -> return p Left err -> throwIO $ HUnitFailure Nothing ("expected parse of: " ++ s ++ " -- " ++ err) parseFail :: String -> Assertion parseFail s = case eitherParsec s :: Either String FilePathGlob of Right p -> throwIO $ HUnitFailure Nothing ("expected no parse of: " ++ s ++ " -- " ++ show p) Left _ -> return () cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/IndexUtils.hs0000644000000000000000000000557207346545000024216 0ustar0000000000000000module UnitTests.Distribution.Client.IndexUtils where import Distribution.Client.IndexUtils import qualified Distribution.Compat.NonEmptySet as NES import Distribution.Simple.Utils (toUTF8LBS) import Distribution.Version import Distribution.Types.Dependency import Distribution.Types.PackageName import Distribution.Types.LibraryName import Test.Tasty import Test.Tasty.HUnit tests :: [TestTree] tests = [ simpleVersionsParserTests ] simpleVersionsParserTests :: TestTree simpleVersionsParserTests = testGroup "Simple preferred-versions Parser Tests" [ testCase "simple deprecation dependency" $ do let prefs = parsePreferredVersionsWarnings (toUTF8LBS "binary < 0.9.0.0 || > 0.9.0.0") prefs @?= [ Right (Dependency (mkPackageName "binary") (unionVersionRanges (earlierVersion $ mkVersion [0,9,0,0]) (laterVersion $ mkVersion [0,9,0,0]) ) (NES.singleton LMainLibName) ) ] , testCase "multiple deprecation dependency" $ do let prefs = parsePreferredVersionsWarnings (toUTF8LBS "binary < 0.9.0.0 || > 0.9.0.0\ncontainers == 0.6.4.1") prefs @?= [ Right (Dependency (mkPackageName "binary") (unionVersionRanges (earlierVersion $ mkVersion [0,9,0,0]) (laterVersion $ mkVersion [0,9,0,0]) ) (NES.singleton LMainLibName) ) , Right (Dependency (mkPackageName "containers") (thisVersion $ mkVersion [0,6,4,1]) (NES.singleton LMainLibName) ) ] , testCase "unparsable dependency" $ do let prefs = parsePreferredVersionsWarnings (toUTF8LBS "binary 0.9.0.0 || > 0.9.0.0") prefs @?= [ Left binaryDepParseError ] , testCase "partial parse" $ do let prefs = parsePreferredVersionsWarnings (toUTF8LBS "binary 0.9.0.0 || > 0.9.0.0\ncontainers == 0.6.4.1") prefs @?= [ Left binaryDepParseError , Right (Dependency (mkPackageName "containers") (thisVersion $ mkVersion [0,6,4,1]) (NES.singleton LMainLibName) ) ] ] where binaryDepParseError = PreferredVersionsParseError { preferredVersionsParsecError = mconcat [ "\"\" (line 1, column 8):\n" , "unexpected '0'\n" , "expecting space, white space, opening paren, operator or end of input" ] , preferredVersionsOriginalDependency = "binary 0.9.0.0 || > 0.9.0.0" }cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/IndexUtils/0000755000000000000000000000000007346545000023651 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/IndexUtils/Timestamp.hs0000644000000000000000000000351207346545000026151 0ustar0000000000000000module UnitTests.Distribution.Client.IndexUtils.Timestamp (tests) where import Distribution.Parsec (simpleParsec) import Distribution.Pretty (prettyShow) import Data.Time import Data.Time.Clock.POSIX import Distribution.Client.IndexUtils.Timestamp import Test.Tasty import Test.Tasty.QuickCheck tests :: [TestTree] tests = [ testProperty "Timestamp1" prop_timestamp1 , testProperty "Timestamp2" prop_timestamp2 , testProperty "Timestamp3" prop_timestamp3 , testProperty "Timestamp4" prop_timestamp4 , testProperty "Timestamp5" prop_timestamp5 ] -- test unixtime format parsing prop_timestamp1 :: NonNegative Int -> Bool prop_timestamp1 (NonNegative t0) = Just t == simpleParsec ('@':show t0) where t = toEnum t0 :: Timestamp -- test prettyShow/simpleParse roundtrip prop_timestamp2 :: Int -> Bool prop_timestamp2 t0 | t /= nullTimestamp = simpleParsec (prettyShow t) == Just t | otherwise = prettyShow t == "" where t = toEnum t0 :: Timestamp -- test prettyShow against reference impl prop_timestamp3 :: Int -> Bool prop_timestamp3 t0 | t /= nullTimestamp = refDisp t == prettyShow t | otherwise = prettyShow t == "" where t = toEnum t0 :: Timestamp refDisp = maybe undefined (formatTime undefined "%FT%TZ") . timestampToUTCTime -- test utcTimeToTimestamp/timestampToUTCTime roundtrip prop_timestamp4 :: Int -> Bool prop_timestamp4 t0 | t /= nullTimestamp = (utcTimeToTimestamp =<< timestampToUTCTime t) == Just t | otherwise = timestampToUTCTime t == Nothing where t = toEnum t0 :: Timestamp prop_timestamp5 :: Int -> Bool prop_timestamp5 t0 | t /= nullTimestamp = timestampToUTCTime t == Just ut | otherwise = timestampToUTCTime t == Nothing where t = toEnum t0 :: Timestamp ut = posixSecondsToUTCTime (fromIntegral t0) cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init.hs0000644000000000000000000000334007346545000023020 0ustar0000000000000000module UnitTests.Distribution.Client.Init ( tests ) where import Test.Tasty import qualified UnitTests.Distribution.Client.Init.Interactive as Interactive import qualified UnitTests.Distribution.Client.Init.NonInteractive as NonInteractive import qualified UnitTests.Distribution.Client.Init.Golden as Golden import qualified UnitTests.Distribution.Client.Init.Simple as Simple import qualified UnitTests.Distribution.Client.Init.FileCreators as FileCreators import UnitTests.Distribution.Client.Init.Utils import Distribution.Client.Config import Distribution.Client.IndexUtils import Distribution.Client.Init.Types import Distribution.Client.Sandbox import Distribution.Client.Setup import Distribution.Verbosity tests :: IO [TestTree] tests = do confFlags <- loadConfigOrSandboxConfig v defaultGlobalFlags let confFlags' = savedConfigureFlags confFlags `mappend` compFlags initFlags' = savedInitFlags confFlags `mappend` emptyFlags globalFlags' = savedGlobalFlags confFlags `mappend` defaultGlobalFlags (comp, _, progdb) <- configCompilerAux' confFlags' withRepoContext v globalFlags' $ \repoCtx -> do let pkgDb = configPackageDB' confFlags' pkgIx <- getInstalledPackages v comp pkgDb progdb srcDb <- getSourcePackages v repoCtx return [ Interactive.tests v initFlags' pkgIx srcDb , NonInteractive.tests v initFlags' comp pkgIx srcDb , Golden.tests v initFlags' pkgIx srcDb , Simple.tests v initFlags' pkgIx srcDb , FileCreators.tests v initFlags' comp pkgIx srcDb ] where v :: Verbosity v = normal compFlags :: ConfigFlags compFlags = mempty { configHcPath = initHcPath emptyFlags } cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/0000755000000000000000000000000007346545000022464 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/FileCreators.hs0000644000000000000000000000505407346545000025406 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedLists #-} module UnitTests.Distribution.Client.Init.FileCreators ( tests ) where import Test.Tasty import Test.Tasty.HUnit import UnitTests.Distribution.Client.Init.Utils import Distribution.Client.Init.FileCreators import Distribution.Client.Init.NonInteractive.Command import Distribution.Client.Init.Types import Distribution.Client.Types import Distribution.Simple import Distribution.Simple.Flag import Distribution.Simple.PackageIndex import Distribution.Verbosity tests :: Verbosity -> InitFlags -> Compiler -> InstalledPackageIndex -> SourcePackageDb -> TestTree tests _v _initFlags comp pkgIx srcDb = testGroup "Distribution.Client.Init.FileCreators" [ testCase "Check . as source directory" $ do let dummyFlags' = dummyFlags { packageType = Flag LibraryAndExecutable , minimal = Flag False , overwrite = Flag False , packageDir = Flag "/home/test/test-package" , extraDoc = Flag ["CHANGELOG.md"] , exposedModules = Flag [] , otherModules = Flag [] , otherExts = Flag [] , buildTools = Flag [] , mainIs = Flag "quxApp/Main.hs" , dependencies = Flag [] , sourceDirs = Flag ["."] } inputs = -- createProject stuff [ "True" , "[\"quxTest/Main.hs\"]" -- writeProject stuff -- writeLicense , "2021" -- writeFileSafe , "True" -- findNewPath , "False" -- writeChangeLog -- writeFileSafe , "False" -- prepareLibTarget -- writeDirectoriesSafe , "True" -- findNewPath , "False" -- prepareExeTarget -- writeDirectoriesSafe , "False" -- writeFileSafe , "False" -- prepareTestTarget -- writeDirectoriesSafe , "False" -- writeFileSafe , "False" -- writeCabalFile -- writeFileSafe , "False" ] case flip _runPrompt inputs $ do projSettings <- createProject comp silent pkgIx srcDb dummyFlags' writeProject projSettings of Left (BreakException ex) -> assertFailure $ show ex Right _ -> return () ]cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/Golden.hs0000644000000000000000000003556707346545000024250 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} module UnitTests.Distribution.Client.Init.Golden ( tests ) where import Test.Tasty import Test.Tasty.Golden import Test.Tasty.HUnit import qualified Data.ByteString.Lazy.Char8 as BS8 import Data.List.NonEmpty (fromList) import Data.List.NonEmpty as NEL (NonEmpty, drop) #if __GLASGOW_HASKELL__ < 804 import Data.Semigroup ((<>)) #endif import Distribution.Client.Init.Types import Distribution.Simple.PackageIndex hiding (fromList) import Distribution.Verbosity import Distribution.Client.Types.SourcePackageDb import Distribution.Client.Init.Interactive.Command import Distribution.Client.Init.Format import Distribution.Fields.Pretty import Distribution.Types.PackageName (PackageName) import Distribution.Client.Init.FlagExtractors import Distribution.Simple.Flag import Distribution.CabalSpecVersion import System.FilePath import UnitTests.Distribution.Client.Init.Utils import Distribution.Client.Init.Defaults -- -------------------------------------------------------------------- -- -- golden test suite -- | Golden executable tests. -- -- We test target generation against a golden file in @tests/fixtures/init/@ for -- executables, libraries, and test targets with the following: -- -- * Empty flags, non-simple target gen, no special options -- * Empty flags, simple target gen, no special options -- * Empty flags, non-simple target gen, with generated comments (no minimal setting) -- * Empty flags, non-simple target gen, with minimal setting (no generated comments) -- * Empty flags, non-simple target gen, minimal and generated comments set. -- -- Additionally, we test whole @.cabal@ file generation for every combination -- of library, lib + tests, exe, exe + tests, exe + lib, exe + lib + tests -- and so on against the same options. -- tests :: Verbosity -> InitFlags -> InstalledPackageIndex -> SourcePackageDb -> TestTree tests v initFlags pkgIx srcDb = testGroup "golden" [ goldenLibTests v pkgIx pkgDir pkgName , goldenExeTests v pkgIx pkgDir pkgName , goldenTestTests v pkgIx pkgDir pkgName , goldenPkgDescTests v srcDb pkgDir pkgName , goldenCabalTests v pkgIx srcDb ] where pkgDir = evalPrompt (getPackageDir initFlags) $ fromList ["."] pkgName = evalPrompt (packageNamePrompt srcDb initFlags) $ fromList ["test-package", "y"] goldenPkgDescTests :: Verbosity -> SourcePackageDb -> FilePath -> PackageName -> TestTree goldenPkgDescTests v srcDb pkgDir pkgName = testGroup "package description golden tests" [ goldenVsString "Empty flags, non-simple, no comments" (goldenPkgDesc "pkg.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runPkgDesc opts emptyFlags pkgArgs , goldenVsString "Empty flags, non-simple, with comments" (goldenPkgDesc "pkg-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runPkgDesc opts emptyFlags pkgArgs , goldenVsString "Dummy flags, with comments" (goldenPkgDesc "pkg-with-flags.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runPkgDesc opts dummyFlags pkgArgs , goldenVsString "Dummy flags, old cabal version, with comments" (goldenPkgDesc "pkg-old-cabal-with-flags.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runPkgDesc opts (dummyFlags {cabalVersion = Flag CabalSpecV2_0}) pkgArgs ] where runPkgDesc opts flags args = do case _runPrompt (genPkgDescription flags srcDb) args of Left e -> assertFailure $ show e Right (pkg, _) -> mkStanza $ mkPkgDescription opts pkg goldenExeTests :: Verbosity -> InstalledPackageIndex -> FilePath -> PackageName -> TestTree goldenExeTests v pkgIx pkgDir pkgName = testGroup "exe golden tests" [ goldenVsString "Empty flags, not simple, no options, no comments" (goldenExe "exe-no-comments.golden") $ let opts = WriteOpts False False True v pkgDir Executable pkgName defaultCabalVersion in runGoldenExe opts exeArgs emptyFlags , goldenVsString "Empty flags, not simple, with comments + no minimal" (goldenExe "exe-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Executable pkgName defaultCabalVersion in runGoldenExe opts exeArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + no comments" (goldenExe "exe-minimal-no-comments.golden") $ let opts = WriteOpts False True True v pkgDir Executable pkgName defaultCabalVersion in runGoldenExe opts exeArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + comments" (goldenExe "exe-simple-minimal-with-comments.golden") $ let opts = WriteOpts False True False v pkgDir Executable pkgName defaultCabalVersion in runGoldenExe opts exeArgs emptyFlags , goldenVsString "Build tools flag, not simple, with comments + no minimal" (goldenExe "exe-build-tools-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Executable pkgName defaultCabalVersion in runGoldenExe opts exeArgs (emptyFlags {buildTools = Flag ["happy"]}) ] where runGoldenExe opts args flags = case _runPrompt (genExeTarget flags pkgIx) args of Right (t, _) -> mkStanza [mkExeStanza opts $ t {_exeDependencies = mangleBaseDep t _exeDependencies}] Left e -> assertFailure $ show e goldenLibTests :: Verbosity -> InstalledPackageIndex -> FilePath -> PackageName -> TestTree goldenLibTests v pkgIx pkgDir pkgName = testGroup "lib golden tests" [ goldenVsString "Empty flags, not simple, no options, no comments" (goldenLib "lib-no-comments.golden") $ let opts = WriteOpts False False True v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs emptyFlags , goldenVsString "Empty flags, simple, no options, no comments" (goldenLib "lib-simple-no-comments.golden") $ let opts = WriteOpts False False True v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs emptyFlags , goldenVsString "Empty flags, not simple, with comments + no minimal" (goldenLib "lib-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + no comments" (goldenLib "lib-minimal-no-comments.golden") $ let opts = WriteOpts False True True v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + comments" (goldenLib "lib-simple-minimal-with-comments.golden") $ let opts = WriteOpts False True False v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs emptyFlags , goldenVsString "Build tools flag, not simple, with comments + no minimal" (goldenLib "lib-build-tools-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runGoldenLib opts libArgs (emptyFlags {buildTools = Flag ["happy"]}) ] where runGoldenLib opts args flags = case _runPrompt (genLibTarget flags pkgIx) args of Right (t, _) -> mkStanza [mkLibStanza opts $ t {_libDependencies = mangleBaseDep t _libDependencies}] Left e -> assertFailure $ show e goldenTestTests :: Verbosity -> InstalledPackageIndex -> FilePath -> PackageName -> TestTree goldenTestTests v pkgIx pkgDir pkgName = testGroup "test golden tests" [ goldenVsString "Empty flags, not simple, no options, no comments" (goldenTest "test-no-comments.golden") $ let opts = WriteOpts False False True v pkgDir Library pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags , goldenVsString "Empty flags, not simple, with comments + no minimal" (goldenTest "test-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + no comments" (goldenTest "test-minimal-no-comments.golden") $ let opts = WriteOpts False True True v pkgDir Library pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags , goldenVsString "Empty flags, not simple, with minimal + comments" (goldenTest "test-simple-minimal-with-comments.golden") $ let opts = WriteOpts False True False v pkgDir Library pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags , goldenVsString "Build tools flag, not simple, with comments + no minimal" (goldenTest "test-build-tools-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir Library pkgName defaultCabalVersion in runGoldenTest opts testArgs (emptyFlags {buildTools = Flag ["happy"]}) , goldenVsString "Standalone tests, empty flags, not simple, no options, no comments" (goldenTest "standalone-test-no-comments.golden") $ let opts = WriteOpts False False True v pkgDir TestSuite pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags , goldenVsString "Standalone tests, empty flags, not simple, with comments + no minimal" (goldenTest "standalone-test-with-comments.golden") $ let opts = WriteOpts False False False v pkgDir TestSuite pkgName defaultCabalVersion in runGoldenTest opts testArgs emptyFlags ] where runGoldenTest opts args flags = case _runPrompt (genTestTarget flags pkgIx) args of Left e -> assertFailure $ show e Right (Nothing, _) -> assertFailure "goldenTestTests: Tests not enabled." Right (Just t, _) -> mkStanza [mkTestStanza opts $ t {_testDependencies = mangleBaseDep t _testDependencies}] -- | Full cabal file golden tests goldenCabalTests :: Verbosity -> InstalledPackageIndex -> SourcePackageDb -> TestTree goldenCabalTests v pkgIx srcDb = testGroup ".cabal file golden tests" [ goldenVsString "Library and executable, empty flags, not simple, with comments + no minimal" (goldenCabal "cabal-lib-and-exe-with-comments.golden") $ runGoldenTest (fullProjArgs "Y") emptyFlags , goldenVsString "Library and executable, empty flags, not simple, no comments + no minimal" (goldenCabal "cabal-lib-and-exe-no-comments.golden") $ runGoldenTest (fullProjArgs "N") emptyFlags , goldenVsString "Library, empty flags, not simple, with comments + no minimal" (goldenCabal "cabal-lib-with-comments.golden") $ runGoldenTest (libProjArgs "Y") emptyFlags , goldenVsString "Library, empty flags, not simple, no comments + no minimal" (goldenCabal "cabal-lib-no-comments.golden") $ runGoldenTest (libProjArgs "N") emptyFlags , goldenVsString "Test suite, empty flags, not simple, with comments + no minimal" (goldenCabal "cabal-test-suite-with-comments.golden") $ runGoldenTest (testProjArgs "Y") emptyFlags , goldenVsString "Test suite, empty flags, not simple, no comments + no minimal" (goldenCabal "cabal-test-suite-no-comments.golden") $ runGoldenTest (testProjArgs "N") emptyFlags ] where runGoldenTest args flags = case _runPrompt (createProject v pkgIx srcDb flags) args of Left e -> assertFailure $ show e (Right (ProjectSettings opts pkgDesc (Just libTarget) (Just exeTarget) (Just testTarget), _)) -> do let pkgFields = mkPkgDescription opts pkgDesc commonStanza = mkCommonStanza opts libStanza = mkLibStanza opts $ libTarget {_libDependencies = mangleBaseDep libTarget _libDependencies} exeStanza = mkExeStanza opts $ exeTarget {_exeDependencies = mangleBaseDep exeTarget _exeDependencies} testStanza = mkTestStanza opts $ testTarget {_testDependencies = mangleBaseDep testTarget _testDependencies} mkStanza $ pkgFields ++ [commonStanza, libStanza, exeStanza, testStanza] (Right (ProjectSettings opts pkgDesc (Just libTarget) Nothing (Just testTarget), _)) -> do let pkgFields = mkPkgDescription opts pkgDesc commonStanza = mkCommonStanza opts libStanza = mkLibStanza opts $ libTarget {_libDependencies = mangleBaseDep libTarget _libDependencies} testStanza = mkTestStanza opts $ testTarget {_testDependencies = mangleBaseDep testTarget _testDependencies} mkStanza $ pkgFields ++ [commonStanza, libStanza, testStanza] (Right (ProjectSettings opts pkgDesc Nothing Nothing (Just testTarget), _)) -> do let pkgFields = mkPkgDescription opts pkgDesc commonStanza = mkCommonStanza opts testStanza = mkTestStanza opts $ testTarget {_testDependencies = mangleBaseDep testTarget _testDependencies} mkStanza $ pkgFields ++ [commonStanza, testStanza] (Right (ProjectSettings _ _ l e t, _)) -> assertFailure $ show l ++ "\n" ++ show e ++ "\n" ++ show t -- -------------------------------------------------------------------- -- -- utils mkStanza :: [PrettyField FieldAnnotation] -> IO BS8.ByteString mkStanza fields = return . BS8.pack $ showFields' annCommentLines postProcessFieldLines 4 fields golden :: FilePath golden = "tests" "fixtures" "init" "golden" goldenExe :: FilePath -> FilePath goldenExe file = golden "exe" file goldenTest :: FilePath -> FilePath goldenTest file = golden "test" file goldenLib :: FilePath -> FilePath goldenLib file = golden "lib" file goldenCabal :: FilePath -> FilePath goldenCabal file = golden "cabal" file goldenPkgDesc :: FilePath -> FilePath goldenPkgDesc file = golden "pkg-desc" file libArgs :: NonEmpty String libArgs = fromList ["1", "2"] exeArgs :: NonEmpty String exeArgs = fromList ["1", "2", "1"] testArgs :: NonEmpty String testArgs = fromList ["y", "1", "test", "1"] pkgArgs :: NonEmpty String pkgArgs = fromList [ "5" , "foo-package" , "y" , "0.1.0.0" , "2" , "git username" , "foo-kmett" , "git email" , "foo-kmett@kmett.kmett" , "home" , "synopsis" , "4" ] testProjArgs :: String -> NonEmpty String testProjArgs comments = fromList ["4", "n", "foo-package"] <> pkgArgs <> fromList (NEL.drop 1 testArgs) <> fromList [comments] libProjArgs :: String -> NonEmpty String libProjArgs comments = fromList ["1", "n", "foo-package"] <> pkgArgs <> libArgs <> testArgs <> fromList [comments] fullProjArgs :: String -> NonEmpty String fullProjArgs comments = fromList ["3", "n", "foo-package"] <> pkgArgs <> libArgs <> exeArgs <> testArgs <> fromList [comments] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/Interactive.hs0000644000000000000000000010761307346545000025305 0ustar0000000000000000module UnitTests.Distribution.Client.Init.Interactive ( tests ) where import Prelude as P import Test.Tasty import Test.Tasty.HUnit import Distribution.Client.Init.Defaults import Distribution.Client.Init.Interactive.Command import Distribution.Client.Init.Types import qualified Distribution.SPDX as SPDX import Data.List.NonEmpty hiding (zip) import Distribution.Client.Types import Distribution.Simple.PackageIndex hiding (fromList) import Distribution.Types.PackageName import Distribution.Types.Version import Distribution.Verbosity import Language.Haskell.Extension import UnitTests.Distribution.Client.Init.Utils import Distribution.Client.Init.FlagExtractors import Distribution.Simple.Setup import Distribution.CabalSpecVersion import qualified Data.Set as Set import Distribution.FieldGrammar.Newtypes -- -------------------------------------------------------------------- -- -- Init Test main tests :: Verbosity -> InitFlags -> InstalledPackageIndex -> SourcePackageDb -> TestTree tests _v initFlags pkgIx srcDb = testGroup "Distribution.Client.Init.Interactive.Command.hs" [ createProjectTest pkgIx srcDb , fileCreatorTests pkgIx srcDb pkgName , interactiveTests srcDb ] where pkgName = evalPrompt (packageNamePrompt srcDb initFlags) $ fromList ["test-package", "y"] -- pkgNm = evalPrompt (getPackageName srcDb initFlags) $ fromList ["test-package", "y"] createProjectTest :: InstalledPackageIndex -> SourcePackageDb -> TestTree createProjectTest pkgIx srcDb = testGroup "createProject tests" [ testGroup "with flags" [ testCase "Check the interactive workflow" $ do let dummyFlags' = dummyFlags { packageType = Flag LibraryAndExecutable , minimal = Flag False , overwrite = Flag False , packageDir = Flag "/home/test/test-package" , extraSrc = NoFlag , exposedModules = Flag [] , otherModules = Flag [] , otherExts = Flag [] , buildTools = Flag [] , mainIs = Flag "quxApp/Main.hs" , dependencies = Flag [] } case (_runPrompt $ createProject silent pkgIx srcDb dummyFlags') (fromList ["n", "3", "quxTest/Main.hs"]) of Right (ProjectSettings opts desc (Just lib) (Just exe) (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= True _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "QuxPackage" _pkgCabalVersion desc @?= CabalSpecV2_2 _pkgName desc @?= mkPackageName "QuxPackage" _pkgVersion desc @?= mkVersion [4,2,6] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "We are Qux, and this is our package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["quxSrc"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?= [] _libBuildTools lib @?= [] _exeMainIs exe @?= HsFilePath "quxApp/Main.hs" Standard _exeApplicationDirs exe @?= ["quxApp"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] _testMainIs test @?= HsFilePath "quxTest/Main.hs" Standard _testDirs test @?= ["quxTest"] _testLanguage test @?= Haskell98 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?! Nothing Left e -> assertFailure $ show e ] , testGroup "with tests" [ testCase "Check the interactive library and executable workflow" $ do let inputs = fromList -- package type [ "3" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- library target -- source dir , "1" -- language , "2" -- executable target -- main file , "1" -- application dir , "2" -- language , "2" -- test target , "y" -- main file , "1" -- test dir , "test" -- language , "1" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?! [] _libBuildTools lib @?= [] _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["exe"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] _testMainIs test @?= HsFilePath "Main.hs" Standard _testDirs test @?= ["test"] _testLanguage test @?= Haskell2010 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?! Nothing Left e -> assertFailure $ show e , testCase "Check the interactive library workflow" $ do let inputs = fromList -- package type [ "1" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- library target -- source dir , "1" -- language , "2" -- test target , "y" -- main file , "1" -- test dir , "test" -- language , "1" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) Nothing (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Library _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?! [] _libBuildTools lib @?= [] _testMainIs test @?= HsFilePath "Main.hs" Standard _testDirs test @?= ["test"] _testLanguage test @?= Haskell2010 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?= Nothing test @?! Nothing Left e -> assertFailure $ show e , testCase "Check the interactive library workflow" $ do let inputs = fromList -- package type [ "4" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- test target -- main file , "1" -- test dir , "test" -- language , "1" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc Nothing Nothing (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= TestSuite _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _testMainIs test @?= HsFilePath "Main.hs" Standard _testDirs test @?= ["test"] _testLanguage test @?= Haskell2010 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?= Nothing exe @?= Nothing test @?! Nothing Left e -> assertFailure $ show e ] , testGroup "without tests" [ testCase "Check the interactive library and executable workflow" $ do let inputs = fromList -- package type [ "3" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- library target -- source dir , "1" -- language , "2" -- executable target -- main file , "1" -- application dir , "2" -- language , "2" -- test suite , "n" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?! [] _libBuildTools lib @?= [] _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["exe"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?= Nothing Left e -> assertFailure $ show e , testCase "Check the interactive library workflow" $ do let inputs = fromList -- package type [ "1" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- library target -- source dir , "1" -- language , "2" -- test suite , "n" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) Nothing Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Library _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?! [] _libBuildTools lib @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?= Nothing test @?= Nothing Left e -> assertFailure $ show e , testCase "Check the interactive library workflow - cabal < 1.18" $ do let inputs = fromList -- package type [ "1" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- library target -- source dir , "1" -- language , "2" -- test suite , "n" -- comments , "y" ] flags = emptyFlags { cabalVersion = Flag CabalSpecV1_10 , extraDoc = Flag [defaultChangelog] , extraSrc = Flag ["README.md"] } case (_runPrompt $ createProject silent pkgIx srcDb flags) inputs of Right (ProjectSettings opts desc (Just lib) Nothing Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Library _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV1_10 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= Set.fromList [defaultChangelog, "README.md"] _pkgExtraDocFiles desc @?= Nothing _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule :| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?! [] _libBuildTools lib @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?= Nothing test @?= Nothing Left e -> assertFailure $ show e , testCase "Check the interactive executable workflow" $ do let inputs = fromList -- package type [ "2" -- overwrite , "n" -- package dir , "test-package" -- package description -- cabal version , "4" -- package name , "test-package" , "test-package" -- version , "3.1.2.3" -- license , "3" -- author , "git username" , "Foobar" -- email , "git email" , "foobar@qux.com" -- homepage , "qux.com" -- synopsis , "Qux's package" -- category , "3" -- executable target -- main file , "1" -- application dir , "2" -- language , "2" -- comments , "y" ] case (_runPrompt $ createProject silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc Nothing (Just exe) Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Executable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV2_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [3,1,2,3] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "Qux's package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["exe"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?= Nothing exe @?! Nothing test @?= Nothing Left e -> assertFailure $ show e ] ] fileCreatorTests :: InstalledPackageIndex -> SourcePackageDb -> PackageName -> TestTree fileCreatorTests pkgIx srcDb _pkgName = testGroup "generators" [ testGroup "genPkgDescription" [ testCase "Check common package flags workflow" $ do let inputs = fromList [ "1" -- pick the first cabal version in the list , "my-test-package" -- package name , "y" -- "yes to prompt internal to package name" , "0.2.0.1" -- package version , "2" -- pick the second license in the list , "git username" -- name guessed by calling "git config user.name" , "Foobar" -- author name , "git email" -- email guessed by calling "git config user.email" , "foobar@qux.com" -- maintainer email , "qux.com" -- package homepage , "Qux's package" -- package synopsis , "3" -- pick the third category in the list ] runGenTest inputs $ genPkgDescription emptyFlags srcDb ] , testGroup "genLibTarget" [ testCase "Check library package flags workflow" $ do let inputs = fromList [ "1" -- pick the first source directory in the list , "2" -- pick the second language in the list ] runGenTest inputs $ genLibTarget emptyFlags pkgIx ] , testGroup "genExeTarget" [ testCase "Check executable package flags workflow" $ do let inputs = fromList [ "1" -- pick the first main file option in the list , "2" -- pick the second application directory in the list , "1" -- pick the first language in the list ] runGenTest inputs $ genExeTarget emptyFlags pkgIx ] , testGroup "genTestTarget" [ testCase "Check test package flags workflow" $ do let inputs = fromList [ "y" -- say yes to tests , "1" -- pick the first main file option in the list , "test" -- package test dir , "1" -- pick the first language in the list ] runGenTest inputs $ genTestTarget emptyFlags pkgIx ] ] where runGenTest inputs go = case _runPrompt go inputs of Left e -> assertFailure $ show e Right{} -> return () interactiveTests :: SourcePackageDb -> TestTree interactiveTests srcDb = testGroup "Check top level getter functions" [ testGroup "Simple prompt tests" [ testGroup "Check packageNamePrompt output" [ testSimplePrompt "New package name 1" (packageNamePrompt srcDb) (mkPackageName "test-package") [ "test-package" , "test-package" ] , testSimplePrompt "New package name 2" (packageNamePrompt srcDb) (mkPackageName "test-package") [ "test-package" , "" ] , testSimplePrompt "Existing package name 1" (packageNamePrompt srcDb) (mkPackageName "test-package") [ "test-package" , "cabal-install" , "y" , "test-package" ] , testSimplePrompt "Existing package name 2" (packageNamePrompt srcDb) (mkPackageName "cabal-install") [ "test-package" , "cabal-install" , "n" ] ] , testGroup "Check mainFilePrompt output" [ testSimplePrompt "New valid main file" mainFilePrompt defaultMainIs [ "1" ] , testSimplePrompt "New valid other main file" mainFilePrompt (HsFilePath "Main.hs" Standard) [ "3" , "Main.hs" ] , testSimplePrompt "Invalid other main file" mainFilePrompt (HsFilePath "Main.lhs" Literate) [ "3" , "Yoink.jl" , "2" ] ] , testGroup "Check versionPrompt output" [ testSimplePrompt "Proper PVP" versionPrompt (mkVersion [0,3,1,0]) [ "0.3.1.0" ] , testSimplePrompt "No PVP" versionPrompt (mkVersion [0,3,1,0]) [ "yee-haw" , "0.3.1.0" ] ] , testGroup "Check synopsisPrompt output" [ testSimplePrompt "1" synopsisPrompt "We are Qux, and this is our package" ["We are Qux, and this is our package"] , testSimplePrompt "2" synopsisPrompt "Resistance is futile, you will be assimilated" ["Resistance is futile, you will be assimilated"] ] , testSimplePrompt "Check authorPrompt output (name supplied by the user)" authorPrompt "Foobar" ["git username", "Foobar"] , testSimplePrompt "Check authorPrompt output (name guessed from git config)" authorPrompt "git username" ["git username", ""] , testSimplePrompt "Check emailPrompt output (email supplied by the user)" emailPrompt "foobar@qux.com" ["git email", "foobar@qux.com"] , testSimplePrompt "Check emailPrompt output (email guessed from git config)" emailPrompt "git@email" ["git@email", ""] , testSimplePrompt "Check homepagePrompt output" homepagePrompt "qux.com" ["qux.com"] , testSimplePrompt "Check testDirsPrompt output" testDirsPrompt ["quxTest"] ["quxTest"] -- this tests 4) other, and can be used to model more inputs in case of failure , testSimplePrompt "Check srcDirsPrompt output" srcDirsPrompt ["app"] ["4", "app"] ] , testGroup "Numbered prompt tests" [ testGroup "Check categoryPrompt output" [ testNumberedPrompt "Category indices" categoryPrompt defaultCategories , testSimplePrompt "Other category" categoryPrompt "Unlisted" [ show $ P.length defaultCategories + 1 , "Unlisted" ] , testSimplePrompt "No category" categoryPrompt "" [ "" ] ] , testGroup "Check licensePrompt output" $ let other = show (1 + P.length defaultLicenseIds) in [ testNumberedPrompt "License indices" licensePrompt $ fmap (\l -> SpecLicense . Left . SPDX.License $ SPDX.ELicense (SPDX.ELicenseId l) Nothing) defaultLicenseIds , testSimplePrompt "Other license 1" licensePrompt (SpecLicense . Left $ mkLicense SPDX.CC_BY_NC_ND_4_0) [ other , "CC-BY-NC-ND-4.0" ] , testSimplePrompt "Other license 2" licensePrompt (SpecLicense . Left $ mkLicense SPDX.D_FSL_1_0) [ other , "D-FSL-1.0" ] , testSimplePrompt "Other license 3" licensePrompt (SpecLicense . Left $ mkLicense SPDX.NPOSL_3_0) [ other , "NPOSL-3.0" ] , testSimplePrompt "Invalid license" licensePrompt (SpecLicense $ Left SPDX.NONE) [ other , "yay" , other , "NONE" ] , testPromptBreak "Invalid index" licensePrompt [ "42" ] ] , testGroup "Check languagePrompt output" [ testNumberedPrompt "Language indices" (`languagePrompt` "test") [Haskell2010, Haskell98, GHC2021] , testSimplePrompt "Other language" (`languagePrompt` "test") (UnknownLanguage "Haskell2022") [ "4" , "Haskell2022" ] , testSimplePrompt "Invalid language" (`languagePrompt` "test") Haskell2010 [ "4" , "Lang_TS!" , "1" ] ] , testGroup "Check srcDirsPrompt output" [ testNumberedPrompt "Soruce dirs indices" srcDirsPrompt [[defaultSourceDir], ["lib"], ["src-lib"]] , testSimplePrompt "Other source dir" srcDirsPrompt ["src"] [ "4" , "src" ] ] , testGroup "Check appDirsPrompt output" [ testNumberedPrompt "App dirs indices" appDirsPrompt [[defaultApplicationDir], ["exe"], ["src-exe"]] , testSimplePrompt "Other app dir" appDirsPrompt ["app"] [ "4" , "app" ] ] , testNumberedPrompt "Check packageTypePrompt output" packageTypePrompt [Library, Executable, LibraryAndExecutable] , testNumberedPrompt "Check cabalVersionPrompt output" cabalVersionPrompt defaultCabalVersions ] , testGroup "Bool prompt tests" [ testBoolPrompt "Check noCommentsPrompt output - y" noCommentsPrompt False "y" , testBoolPrompt "Check noCommentsPrompt output - Y" noCommentsPrompt False "Y" , testBoolPrompt "Check noCommentsPrompt output - n" noCommentsPrompt True "n" , testBoolPrompt "Check noCommentsPrompt output - N" noCommentsPrompt True "N" ] ] -- -------------------------------------------------------------------- -- -- Prompt test utils testSimplePrompt :: Eq a => Show a => String -> (InitFlags -> PurePrompt a) -> a -> [String] -> TestTree testSimplePrompt label f target = testPrompt label f (assertFailure . show) (\(a,_) -> target @=? a) testPromptBreak :: Eq a => Show a => String -> (InitFlags -> PurePrompt a) -> [String] -> TestTree testPromptBreak label f = testPrompt label f go (assertFailure . show) where go BreakException{} = return () testPrompt :: Eq a => Show a => String -> (InitFlags -> PurePrompt a) -> (BreakException -> Assertion) -> ((a, NonEmpty String) -> Assertion) -> [String] -> TestTree testPrompt label f g h input = testCase label $ case (_runPrompt $ f emptyFlags) (fromList input) of Left x -> g x -- :: BreakException Right x -> h x -- :: (a, other inputs) testNumberedPrompt :: (Eq a, Show a) => String -> (InitFlags -> PurePrompt a) -> [a] -> TestTree testNumberedPrompt label act = testGroup label . (++ goBreak) . fmap go . indexed1 where indexed1 = zip [1 :: Int ..] mkLabel a n = "testing index " ++ show n ++ ") with: " ++ show a go (n, a) = testSimplePrompt (mkLabel a n) act a [show n] goBreak = [ testPromptBreak "testing index -1" act ["-1"] , testPromptBreak "testing index 1000" act ["1000"] ] testBoolPrompt :: String -> (InitFlags -> PurePrompt Bool) -> Bool -> String -> TestTree testBoolPrompt label act target b = testSimplePrompt label act target [b] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/NonInteractive.hs0000644000000000000000000014266607346545000025767 0ustar0000000000000000module UnitTests.Distribution.Client.Init.NonInteractive ( tests ) where import Test.Tasty import Test.Tasty.HUnit import UnitTests.Distribution.Client.Init.Utils import qualified Data.List.NonEmpty as NEL import qualified Distribution.SPDX as SPDX import Distribution.Client.Init.Defaults import Distribution.Client.Init.NonInteractive.Command import Distribution.Client.Init.Types import Distribution.Client.Types import Distribution.Simple import Distribution.Simple.PackageIndex import Distribution.Verbosity import Distribution.CabalSpecVersion import Distribution.ModuleName (fromString) import Distribution.Simple.Flag import Data.List (foldl') import qualified Data.Set as Set import Distribution.Client.Init.Utils (mkPackageNameDep, mkStringyDep) import Distribution.FieldGrammar.Newtypes tests :: Verbosity -> InitFlags -> Compiler -> InstalledPackageIndex -> SourcePackageDb -> TestTree tests _v _initFlags comp pkgIx srcDb = testGroup "Distribution.Client.Init.NonInteractive.Command" [ testGroup "driver function test" [ driverFunctionTest pkgIx srcDb comp ] , testGroup "target creator tests" [ fileCreatorTests pkgIx srcDb comp ] , testGroup "non-interactive tests" [ nonInteractiveTests pkgIx srcDb comp ] ] driverFunctionTest :: InstalledPackageIndex -> SourcePackageDb -> Compiler -> TestTree driverFunctionTest pkgIx srcDb comp = testGroup "createProject" [ testGroup "with flags" [ testCase "Check the non-interactive workflow 1" $ do let dummyFlags' = dummyFlags { packageType = Flag LibraryAndExecutable , minimal = Flag False , overwrite = Flag False , packageDir = Flag "/home/test/test-package" , extraDoc = Flag ["CHANGELOG.md"] , exposedModules = Flag [] , otherModules = Flag [] , otherExts = Flag [] , buildTools = Flag [] , mainIs = Flag "quxApp/Main.hs" , dependencies = Flag [] } inputs = NEL.fromList [ "True" , "[\"quxTest/Main.hs\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb dummyFlags') inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= True _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "QuxPackage" _pkgCabalVersion desc @?= CabalSpecV2_2 _pkgName desc @?= mkPackageName "QuxPackage" _pkgVersion desc @?= mkVersion [4,2,6] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "We are Qux, and this is our package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["quxSrc"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule NEL.:| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?= [] _libBuildTools lib @?= [] _exeMainIs exe @?= HsFilePath "quxApp/Main.hs" Standard _exeApplicationDirs exe @?= ["quxApp"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] _testMainIs test @?= HsFilePath "quxTest/Main.hs" Standard _testDirs test @?= ["quxTest"] _testLanguage test @?= Haskell98 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] assertBool "The library should be a dependency of the executable" $ mkPackageNameDep (_optPkgName opts) `elem` _exeDependencies exe assertBool "The library should be a dependency of the test executable" $ mkPackageNameDep (_optPkgName opts) `elem` _testDependencies test Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?! Nothing Left e -> assertFailure $ show e , testCase "Check the non-interactive workflow 2" $ do let dummyFlags' = dummyFlags { packageType = Flag LibraryAndExecutable , minimal = Flag False , overwrite = Flag False , packageDir = Flag "/home/test/test-package" , extraSrc = Flag [] , exposedModules = Flag [] , otherModules = NoFlag , otherExts = Flag [] , buildTools = Flag [] , mainIs = Flag "quxApp/Main.hs" , dependencies = Flag [] } inputs = NEL.fromList -- extra sources [ "[\"CHANGELOG.md\"]" -- lib other modules , "False" -- exe other modules , "False" -- test main file , "True" , "[\"quxTest/Main.hs\"]" -- test other modules , "False" ] case (_runPrompt $ createProject comp silent pkgIx srcDb dummyFlags') inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= True _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "QuxPackage" _pkgCabalVersion desc @?= CabalSpecV2_2 _pkgName desc @?= mkPackageName "QuxPackage" _pkgVersion desc @?= mkVersion [4,2,6] _pkgLicense desc @?! (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "qux.com" _pkgSynopsis desc @?= "We are Qux, and this is our package" _pkgCategory desc @?= "Control" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["quxSrc"] _libLanguage lib @?= Haskell98 _libExposedModules lib @?= myLibModule NEL.:| [] _libOtherModules lib @?= [] _libOtherExts lib @?= [] _libDependencies lib @?= [] _libBuildTools lib @?= [] _exeMainIs exe @?= HsFilePath "quxApp/Main.hs" Standard _exeApplicationDirs exe @?= ["quxApp"] _exeLanguage exe @?= Haskell98 _exeOtherModules exe @?= [] _exeOtherExts exe @?= [] _exeDependencies exe @?! [] _exeBuildTools exe @?= [] _testMainIs test @?= HsFilePath "quxTest/Main.hs" Standard _testDirs test @?= ["quxTest"] _testLanguage test @?= Haskell98 _testOtherModules test @?= [] _testOtherExts test @?= [] _testDependencies test @?! [] _testBuildTools test @?= [] assertBool "The library should be a dependency of the executable" $ mkPackageNameDep (_optPkgName opts) `elem` _exeDependencies exe assertBool "The library should be a dependency of the test executable" $ mkPackageNameDep (_optPkgName opts) `elem` _testDependencies test Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?! Nothing Left e -> assertFailure $ show e ] , testGroup "with tests" [ testCase "Check the non-interactive library and executable workflow" $ do let inputs = NEL.fromList -- package dir [ "test-package" -- package description -- cabal version , "cabal-install version 3.4.0.0\ncompiled using version 3.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "" , "Foobar" -- author email , "" , "foobar@qux.com" -- extra source files , "test-package" , "[]" -- library target -- source dirs , "src" , "True" -- exposed modules , "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other modules , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" -- other extensions , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"src/Foo.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" -- executable target -- application dirs , "app" , "[]" -- main file , "test-package" , "[\"test-package/app/\"]" , "True" , "[]" -- other modules , "test-package" , "True" , "[\"app/Main.hs\", \"app/Foo.hs\", \"app/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"app/Foo.hs\", \"app/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"app/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, DataKinds #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" -- test target -- main file , "True" , "[\"test-package/test/\"]" -- other modules , "test-package" , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"test/Foo.hs\", \"test/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"test/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Test.Tasty\nimport Test.Tasty.HUnit" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/bar.y\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb (emptyFlags { initializeTestSuite = Flag True , packageType = Flag LibraryAndExecutable })) inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV3_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [0,1,0,0] _pkgLicense desc @?= (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "" _pkgSynopsis desc @?= "" _pkgCategory desc @?= "" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell2010 _libExposedModules lib @?= NEL.fromList (map fromString ["Foo", "Bar"]) _libOtherModules lib @?= map fromString ["Baz.Internal"] _libOtherExts lib @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _libDependencies lib @?! [] _libBuildTools lib @?= [mkStringyDep "happy:happy"] _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["app"] _exeLanguage exe @?= Haskell2010 _exeOtherModules exe @?= map fromString ["Foo", "Bar"] _exeOtherExts exe @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _exeDependencies exe @?! [] _exeBuildTools exe @?= [mkStringyDep "happy:happy"] _testMainIs test @?= HsFilePath "Main.hs" Standard _testDirs test @?= ["test"] _testLanguage test @?= Haskell2010 _testOtherModules test @?= map fromString ["Foo", "Bar"] _testOtherExts test @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _testDependencies test @?! [] _testBuildTools test @?= [mkStringyDep "happy:happy"] assertBool "The library should be a dependency of the executable" $ mkPackageNameDep (_optPkgName opts) `elem` _exeDependencies exe assertBool "The library should be a dependency of the test executable" $ mkPackageNameDep (_optPkgName opts) `elem` _testDependencies test Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?! Nothing Left e -> assertFailure $ show e , testCase "Check the non-interactive library workflow" $ do let inputs = NEL.fromList -- package dir [ "test-package" -- package description -- cabal version , "cabal-install version 3.4.0.0\ncompiled using version 3.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "Foobar" -- author email , "foobar@qux.com" -- extra source files , "test-package" , "[]" -- library target -- source dirs , "src" , "True" -- exposed modules , "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other modules , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" -- other extensions , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"src/Foo.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" -- test target -- main file , "True" , "[\"test-package/test/\"]" -- other modules , "test-package" , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"test/Foo.hs\", \"test/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"test/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Test.Tasty\nimport Test.Tasty.HUnit" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/bar.y\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb (emptyFlags { initializeTestSuite = Flag True , packageType = Flag Library })) inputs of Right (ProjectSettings opts desc (Just lib) Nothing (Just test), _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Library _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV3_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [0,1,0,0] _pkgLicense desc @?= (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "" _pkgSynopsis desc @?= "" _pkgCategory desc @?= "" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell2010 _libExposedModules lib @?= NEL.fromList (map fromString ["Foo", "Bar"]) _libOtherModules lib @?= map fromString ["Baz.Internal"] _libOtherExts lib @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _libDependencies lib @?! [] _libBuildTools lib @?= [mkStringyDep "happy:happy"] _testMainIs test @?= HsFilePath "Main.hs" Standard _testDirs test @?= ["test"] _testLanguage test @?= Haskell2010 _testOtherModules test @?= map fromString ["Foo", "Bar"] _testOtherExts test @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _testDependencies test @?! [] _testBuildTools test @?= [mkStringyDep "happy:happy"] assertBool "The library should be a dependency of the test executable" $ mkPackageNameDep (_optPkgName opts) `elem` _testDependencies test Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?= Nothing test @?! Nothing Left e -> assertFailure $ show e ] , testGroup "without tests" [ testCase "Check the non-interactive library and executable workflow" $ do let inputs = NEL.fromList -- package type [ "test-package" , "[\".\", \"..\", \"src/\", \"app/Main.hs\"]" , "[\".\", \"..\", \"src/\", \"app/Main.hs\"]" -- package dir , "test-package" -- package description -- cabal version , "cabal-install version 3.4.0.0\ncompiled using version 3.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "" , "Foobar" -- author email , "" , "foobar@qux.com" -- extra source files , "test-package" , "[]" -- library target -- source dirs , "src" , "True" -- exposed modules , "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other modules , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" -- other extensions , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"src/Foo.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" -- executable target -- application dirs , "app" , "[]" -- main file , "test-package" , "[\"test-package/app/\"]" , "True" , "[]" -- other modules , "test-package" , "True" , "[\"app/Main.hs\", \"app/Foo.hs\", \"app/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"app/Foo.hs\", \"app/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"app/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, DataKinds #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) (Just exe) Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= LibraryAndExecutable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV3_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [0,1,0,0] _pkgLicense desc @?= (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "" _pkgSynopsis desc @?= "" _pkgCategory desc @?= "" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell2010 _libExposedModules lib @?= NEL.fromList (map fromString ["Foo", "Bar"]) _libOtherModules lib @?= map fromString ["Baz.Internal"] _libOtherExts lib @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _libDependencies lib @?! [] _libBuildTools lib @?= [mkStringyDep "happy:happy"] _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["app"] _exeLanguage exe @?= Haskell2010 _exeOtherModules exe @?= map fromString ["Foo", "Bar"] _exeOtherExts exe @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _exeDependencies exe @?! [] _exeBuildTools exe @?= [mkStringyDep "happy:happy"] assertBool "The library should be a dependency of the executable" $ mkPackageNameDep (_optPkgName opts) `elem` _exeDependencies exe Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?! Nothing test @?= Nothing Left e -> assertFailure $ show e , testCase "Check the non-interactive library workflow" $ do let inputs = NEL.fromList -- package type [ "test-package" , "[\".\", \"..\", \"src/\"]" , "[\".\", \"..\", \"src/\"]" -- package dir , "test-package" -- package description -- cabal version , "cabal-install version 3.4.0.0\ncompiled using version 3.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "" , "Foobar" -- author email , "" , "foobar@qux.com" -- extra source files , "test-package" , "[]" -- library target -- source dirs , "src" , "True" -- exposed modules , "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other modules , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" -- other extensions , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"src/Foo.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc (Just lib) Nothing Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Library _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV3_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [0,1,0,0] _pkgLicense desc @?= (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "" _pkgSynopsis desc @?= "" _pkgCategory desc @?= "" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _libSourceDirs lib @?= ["src"] _libLanguage lib @?= Haskell2010 _libExposedModules lib @?= NEL.fromList (map fromString ["Foo", "Bar"]) _libOtherModules lib @?= map fromString ["Baz.Internal"] _libOtherExts lib @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _libDependencies lib @?! [] _libBuildTools lib @?= [mkStringyDep "happy:happy"] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?! Nothing exe @?= Nothing test @?= Nothing Left e -> assertFailure $ show e , testCase "Check the non-interactive executable workflow" $ do let inputs = NEL.fromList -- package type [ "test-package" , "[\".\", \"..\", \"app/Main.hs\"]" , "[\".\", \"..\", \"app/Main.hs\"]" -- package dir , "test-package" -- package description -- cabal version , "cabal-install version 3.4.0.0\ncompiled using version 3.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "" , "Foobar" -- author email , "" , "foobar@qux.com" -- extra source files , "test-package" , "[]" -- executable target -- application dirs , "app" , "[]" -- main file , "test-package" , "[\"test-package/app/\"]" , "True" , "[]" -- other modules , "test-package" , "True" , "[\"app/Main.hs\", \"app/Foo.hs\", \"app/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"app/Foo.hs\", \"app/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"app/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, DataKinds #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] case (_runPrompt $ createProject comp silent pkgIx srcDb emptyFlags) inputs of Right (ProjectSettings opts desc Nothing (Just exe) Nothing, _) -> do _optOverwrite opts @?= False _optMinimal opts @?= False _optNoComments opts @?= False _optVerbosity opts @?= silent _optPkgDir opts @?= "/home/test/test-package" _optPkgType opts @?= Executable _optPkgName opts @?= mkPackageName "test-package" _pkgCabalVersion desc @?= CabalSpecV3_4 _pkgName desc @?= mkPackageName "test-package" _pkgVersion desc @?= mkVersion [0,1,0,0] _pkgLicense desc @?= (SpecLicense . Left $ SPDX.NONE) _pkgAuthor desc @?= "Foobar" _pkgEmail desc @?= "foobar@qux.com" _pkgHomePage desc @?= "" _pkgSynopsis desc @?= "" _pkgCategory desc @?= "" _pkgExtraSrcFiles desc @?= mempty _pkgExtraDocFiles desc @?= pure (Set.singleton "CHANGELOG.md") _exeMainIs exe @?= HsFilePath "Main.hs" Standard _exeApplicationDirs exe @?= ["app"] _exeLanguage exe @?= Haskell2010 _exeOtherModules exe @?= map fromString ["Foo", "Bar"] _exeOtherExts exe @?= map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards] _exeDependencies exe @?! [] _exeBuildTools exe @?= [mkStringyDep "happy:happy"] Right (ProjectSettings _ _ lib exe test, _) -> do lib @?= Nothing exe @?! Nothing test @?= Nothing Left e -> assertFailure $ show e ] ] fileCreatorTests :: InstalledPackageIndex -> SourcePackageDb -> Compiler -> TestTree fileCreatorTests pkgIx srcDb comp = testGroup "generators" [ testGroup "genPkgDescription" [ testCase "Check common package flags workflow" $ do let inputs = NEL.fromList -- cabal version [ "cabal-install version 2.4.0.0\ncompiled using version 2.4.0.0 of the Cabal library \n" -- package name , "test-package" , "test-package" -- author name , "" , "Foobar" -- author email , "" , "foobar@qux.com" -- extra source files , "test-package" , "[]" ] case (_runPrompt $ genPkgDescription emptyFlags srcDb) inputs of Left e -> assertFailure $ show e Right{} -> return () ] , testGroup "genLibTarget" [ testCase "Check library package flags workflow" $ do let inputs = NEL.fromList -- source dirs [ "src" , "True" -- exposed modules , "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other modules , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" -- other extensions , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"src/Foo.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] case (_runPrompt $ genLibTarget emptyFlags comp pkgIx defaultCabalVersion) inputs of Left e -> assertFailure $ show e Right{} -> return () ] , testGroup "genExeTarget" [ testCase "Check executable package flags workflow" $ do let inputs = NEL.fromList -- application dirs [ "app" , "[]" -- main file , "test-package" , "[\"test-package/app/\"]" , "True" , "[]" -- other modules , "test-package" , "True" , "[\"app/Main.hs\", \"app/Foo.hs\", \"app/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"app/Foo.hs\", \"app/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"app/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Control.Monad.Extra" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] case (_runPrompt $ genExeTarget emptyFlags comp pkgIx defaultCabalVersion) inputs of Left e -> assertFailure $ show e Right{} -> return () ] , testGroup "genTestTarget" [ testCase "Check test package flags workflow" $ do let inputs = NEL.fromList -- main file [ "True" , "[]" -- other modules , "test-package" , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/Bar.hs\"]" , "module Foo where" , "module Bar where" -- other extensions , "True" , "[\"test/Foo.hs\", \"test/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" -- dependencies , "True" , "[\"test/Main.hs\"]" , "True" , "test-package" , "module Main where" , "import Test.Tasty\nimport Test.Tasty.HUnit" , "{-# LANGUAGE OverloadedStrings, LambdaCase #-}" -- build tools , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/bar.y\"]" ] flags = emptyFlags {initializeTestSuite = Flag True} case (_runPrompt $ genTestTarget flags comp pkgIx defaultCabalVersion) inputs of Left e -> assertFailure $ show e Right{} -> return () ] ] nonInteractiveTests :: InstalledPackageIndex -> SourcePackageDb -> Compiler -> TestTree nonInteractiveTests pkgIx srcDb comp = testGroup "Check top level getter functions" [ testGroup "Simple heuristics tests" [ testGroup "Check packageNameHeuristics output" [ testSimple "New package name" (packageNameHeuristics srcDb) (mkPackageName "test-package") [ "test-package" , "test-package" ] , testSimple "Existing package name" (packageNameHeuristics srcDb) (mkPackageName "cabal-install") [ "test-package" , "cabal-install" ] ] , testSimple "Check authorHeuristics output" authorHeuristics "Foobar" [ "" , "Foobar" ] , testSimple "Check emailHeuristics output" emailHeuristics "foobar@qux.com" [ "" , "foobar@qux.com" ] , testSimple "Check srcDirsHeuristics output" srcDirsHeuristics ["src"] [ "src" , "True" ] , testSimple "Check appDirsHeuristics output" appDirsHeuristics ["app"] [ "test-package" , "[\"test-package/app/\"]" ] , testGroup "Check packageTypeHeuristics output" [ testSimple "Library" packageTypeHeuristics Library [ "test-package" , "[\".\", \"..\", \"test/Main.hs\", \"src/\"]" , "[\".\", \"..\", \"test/Main.hs\", \"src/\"]" ] , testSimple "Executable" packageTypeHeuristics Executable [ "test-package" , "[\".\", \"..\", \"app/Main.hs\"]" , "[\".\", \"..\", \"app/Main.hs\"]" ] , testSimple "Library and Executable" packageTypeHeuristics LibraryAndExecutable [ "test-package" , "[\".\", \"..\", \"src/\", \"app/Main.hs\"]" , "[\".\", \"..\", \"src/\", \"app/Main.hs\"]" ] , testSimple "TestSuite" packageTypeHeuristics TestSuite [ "test-package" , "[\".\", \"..\", \"test/Main.hs\"]" , "[\".\", \"..\", \"test/Main.hs\"]" ] ] , testGroup "Check cabalVersionHeuristics output" [ testSimple "Broken command" cabalVersionHeuristics defaultCabalVersion [""] , testSimple "Proper answer" cabalVersionHeuristics CabalSpecV2_4 ["cabal-install version 2.4.0.0\ncompiled using version 2.4.0.0 of the Cabal library \n"] ] , testGroup "Check languageHeuristics output" [ testSimple "Non GHC compiler" (`languageHeuristics` (comp {compilerId = CompilerId Helium $ mkVersion [1,8,1]})) Haskell2010 [] , testSimple "Higher version compiler" (`languageHeuristics` (comp {compilerId = CompilerId GHC $ mkVersion [8,10,4]})) Haskell2010 [] , testSimple "Lower version compiler" (`languageHeuristics` (comp {compilerId = CompilerId GHC $ mkVersion [6,0,1]})) Haskell98 [] ] , testGroup "Check extraDocFileHeuristics output" [ testSimple "No extra sources" extraDocFileHeuristics (pure (Set.singleton "CHANGELOG.md")) [ "test-package" , "[]" ] , testSimple "Extra doc files present" extraDocFileHeuristics (pure $ Set.singleton "README.md") [ "test-package" , "[\"README.md\"]" ] ] , testGroup "Check mainFileHeuristics output" [ testSimple "No main file defined" mainFileHeuristics (toHsFilePath "Main.hs") [ "test-package" , "[\"test-package/app/\"]" , "True" , "[]" ] , testSimple "Main file already defined" mainFileHeuristics (toHsFilePath "app/Main.hs") [ "test-package" , "[\"test-package/app/\"]" , "True" , "[\"app/Main.hs\"]" ] , testSimple "Main lhs file already defined" mainFileHeuristics (toHsFilePath "app/Main.lhs") [ "test-package" , "[\"test-package/app/\"]" , "True" , "[\"app/Main.lhs\"]" ] ] , testGroup "Check exposedModulesHeuristics output" [ testSimple "Default exposed modules" exposedModulesHeuristics (myLibModule NEL.:| []) [ "src" , "True" , "True" , "[]" , "test-package" , "True" , "[]" ] , testSimple "Contains exposed modules" exposedModulesHeuristics (NEL.fromList $ map fromString ["Foo", "Bar"]) [ "src" , "True" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" , "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "module Foo where" , "module Bar where" ] ] , testGroup "Check libOtherModulesHeuristics output" [ testSimple "Library directory exists" libOtherModulesHeuristics (map fromString ["Baz.Internal"]) [ "test-package" , "True" , "[\"src/Foo.hs\", \"src/Bar.hs\", \"src/Baz/Internal.hs\"]" , "module Foo where" , "module Bar where" , "module Baz.Internal where" ] , testSimple "Library directory doesn't exist" libOtherModulesHeuristics [] [ "test-package" , "False" ] ] , testGroup "Check exeOtherModulesHeuristics output" [ testSimple "Executable directory exists" exeOtherModulesHeuristics (map fromString ["Foo", "Bar"]) [ "test-package" , "True" , "[\"app/Main.hs\", \"app/Foo.hs\", \"app/Bar.hs\"]" , "module Foo where" , "module Bar where" ] , testSimple "Executable directory doesn't exist" exeOtherModulesHeuristics [] [ "test-package" , "False" ] ] , testGroup "Check testOtherModulesHeuristics output" [ testSimple "Test directory exists" testOtherModulesHeuristics (map fromString ["Foo", "Bar"]) [ "test-package" , "True" , "[\"test/Main.hs\", \"test/Foo.hs\", \"test/Bar.hs\"]" , "module Foo where" , "module Bar where" ] , testSimple "Test directory doesn't exist" testOtherModulesHeuristics [] [ "test-package" , "False" ] ] , testGroup "Check dependenciesHeuristics output" [ testSimple "base version bounds is correct" (fmap (flip foldl' anyVersion $ \a (Dependency n v _) -> if unPackageName n == "base" && baseVersion comp /= anyVersion then v else a) . (\x -> dependenciesHeuristics x "" pkgIx)) (baseVersion comp) [ "True" , "[]" ] ] , testSimple "Check buildToolsHeuristics output" (\a -> buildToolsHeuristics a "" defaultCabalVersion) [mkStringyDep "happy:happy"] [ "True" , "[\"app/Main.hs\", \"src/Foo.hs\", \"src/bar.y\"]" ] , testSimple "Check otherExtsHeuristics output" (`otherExtsHeuristics` "") (map EnableExtension [OverloadedStrings, LambdaCase, RankNTypes, RecordWildCards]) [ "True" , "[\"src/Foo.hs\", \"src/Bar.hs\"]" , "\"{-# LANGUAGE OverloadedStrings, LambdaCase #-}\n{-# LANGUAGE RankNTypes #-}\"" , "\"{-# LANGUAGE RecordWildCards #-}\"" ] , testSimple "Check versionHeuristics output" versionHeuristics (mkVersion [0,1,0,0]) [""] , testSimple "Check homepageHeuristics output" homepageHeuristics "" [""] , testSimple "Check synopsisHeuristics output" synopsisHeuristics "" [""] , testSimple "Check testDirsHeuristics output" testDirsHeuristics ["test"] [""] , testSimple "Check categoryHeuristics output" categoryHeuristics "" [""] , testSimple "Check minimalHeuristics output" minimalHeuristics False [""] , testSimple "Check overwriteHeuristics output" overwriteHeuristics False [""] , testSimple "Check initializeTestSuiteHeuristics output" initializeTestSuiteHeuristics False [""] , testSimple "Check licenseHeuristics output" licenseHeuristics (SpecLicense $ Left SPDX.NONE) [""] ] , testGroup "Bool heuristics tests" [ testBool "Check noCommentsHeuristics output" noCommentsHeuristics False "" ] ] testSimple :: Eq a => Show a => String -> (InitFlags -> PurePrompt a) -> a -> [String] -> TestTree testSimple label f target = testGo label f (assertFailure . show) (\(a, _) -> target @=? a) testBool :: String -> (InitFlags -> PurePrompt Bool) -> Bool -> String -> TestTree testBool label f target input = testSimple label f target [input] testGo :: Eq a => Show a => String -> (InitFlags -> PurePrompt a) -> (BreakException -> Assertion) -> ((a, NEL.NonEmpty String) -> Assertion) -> [String] -> TestTree testGo label f g h inputs = testCase label $ case (_runPrompt $ f emptyFlags) (NEL.fromList inputs) of Left x -> g x Right x -> h x cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/Simple.hs0000644000000000000000000001476507346545000024266 0ustar0000000000000000module UnitTests.Distribution.Client.Init.Simple ( tests ) where import Prelude as P import Test.Tasty import Test.Tasty.HUnit import Distribution.Client.Init.Defaults import Distribution.Client.Init.Simple import Distribution.Client.Init.Types import Data.List.NonEmpty hiding (zip) import Distribution.Client.Types import Distribution.Simple.PackageIndex hiding (fromList) import Distribution.Types.PackageName import Distribution.Verbosity import UnitTests.Distribution.Client.Init.Utils import Distribution.Simple.Setup import qualified Data.List.NonEmpty as NEL import Distribution.Types.Dependency import Distribution.Client.Init.Utils (mkPackageNameDep, getBaseDep) import qualified Data.Set as Set import Distribution.Client.Init.FlagExtractors (getCabalVersionNoPrompt) tests :: Verbosity -> InitFlags -> InstalledPackageIndex -> SourcePackageDb -> TestTree tests v _initFlags pkgIx srcDb = testGroup "Distribution.Client.Init.Simple.hs" [ simpleCreateProjectTests v pkgIx srcDb pkgName ] where pkgName = mkPackageName "simple-test" simpleCreateProjectTests :: Verbosity -> InstalledPackageIndex -> SourcePackageDb -> PackageName -> TestTree simpleCreateProjectTests v pkgIx srcDb pkgName = testGroup "Simple createProject tests" [ testCase "Simple lib createProject - no tests" $ do let inputs = fromList [ "1" -- package type: Library , "simple-test" -- package dir (ignored, piped to current dir due to prompt monad) , "n" -- no tests ] flags = emptyFlags { packageType = Flag Library } settings = ProjectSettings (WriteOpts False False False v "/home/test/1" Library pkgName defaultCabalVersion) (simplePkgDesc pkgName) (Just $ simpleLibTarget baseDep) Nothing Nothing case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple lib project: " ++ show e Right (settings', _) -> settings @=? settings' , testCase "Simple lib createProject - with tests" $ do let inputs = fromList ["1", "simple-test", "y", "1"] flags = emptyFlags { packageType = Flag Library } settings = ProjectSettings (WriteOpts False False False v "/home/test/1" Library pkgName defaultCabalVersion) (simplePkgDesc pkgName) (Just $ simpleLibTarget baseDep) Nothing (Just $ simpleTestTarget (Just pkgName) baseDep) case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple lib (with tests)project: " ++ show e Right (settings', _) -> settings @=? settings' , testCase "Simple exe createProject" $ do let inputs = fromList ["2", "simple-test"] flags = emptyFlags { packageType = Flag Executable } settings = ProjectSettings (WriteOpts False False False v "/home/test/2" Executable pkgName defaultCabalVersion) (simplePkgDesc pkgName) Nothing (Just $ simpleExeTarget Nothing baseDep) Nothing case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple exe project: " ++ show e Right (settings', _) -> settings @=? settings' , testCase "Simple lib+exe createProject - no tests" $ do let inputs = fromList ["2", "simple-test", "n"] flags = emptyFlags { packageType = Flag LibraryAndExecutable } settings = ProjectSettings (WriteOpts False False False v "/home/test/2" LibraryAndExecutable pkgName defaultCabalVersion) (simplePkgDesc pkgName) (Just $ simpleLibTarget baseDep) (Just $ simpleExeTarget (Just pkgName) baseDep) Nothing case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple lib+exe project: " ++ show e Right (settings', _) -> settings @=? settings' , testCase "Simple lib+exe createProject - with tests" $ do let inputs = fromList ["2", "simple-test", "y", "1"] flags = emptyFlags { packageType = Flag LibraryAndExecutable } settings = ProjectSettings (WriteOpts False False False v "/home/test/2" LibraryAndExecutable pkgName defaultCabalVersion) (simplePkgDesc pkgName) (Just $ simpleLibTarget baseDep) (Just $ simpleExeTarget (Just pkgName) baseDep) (Just $ simpleTestTarget (Just pkgName) baseDep) case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple lib+exe (with tests) project: " ++ show e Right (settings', _) -> settings @=? settings' , testCase "Simple standalone tests" $ do let inputs = fromList ["2", "simple-test", "y", "1"] flags = emptyFlags { packageType = Flag TestSuite } settings = ProjectSettings (WriteOpts False False False v "/home/test/2" TestSuite pkgName defaultCabalVersion) (simplePkgDesc pkgName) Nothing Nothing (Just $ simpleTestTarget Nothing baseDep) case _runPrompt (createProject v pkgIx srcDb flags) inputs of Left e -> assertFailure $ "Failed to create simple standalone test project: " ++ show e Right (settings', _) -> settings @=? settings' ] where baseDep = case _runPrompt (getBaseDep pkgIx emptyFlags) $ fromList [] of Left e -> error $ show e Right a -> fst a -- -------------------------------------------------------------------- -- -- Utils mkPkgDep :: Maybe PackageName -> [Dependency] mkPkgDep Nothing = [] mkPkgDep (Just pn) = [mkPackageNameDep pn] simplePkgDesc :: PackageName -> PkgDescription simplePkgDesc pkgName = PkgDescription defaultCabalVersion pkgName defaultVersion (defaultLicense $ getCabalVersionNoPrompt dummyFlags) "" "" "" "" "" mempty (Just $ Set.singleton defaultChangelog) simpleLibTarget :: [Dependency] -> LibTarget simpleLibTarget baseDep = LibTarget [defaultSourceDir] defaultLanguage (myLibModule NEL.:| []) [] [] baseDep [] simpleExeTarget :: Maybe PackageName -> [Dependency] -> ExeTarget simpleExeTarget pn baseDep = ExeTarget defaultMainIs [defaultApplicationDir] defaultLanguage [] [] (baseDep ++ mkPkgDep pn) [] simpleTestTarget :: Maybe PackageName -> [Dependency] -> TestTarget simpleTestTarget pn baseDep = TestTarget defaultMainIs [defaultTestDir] defaultLanguage [] [] (baseDep ++ mkPkgDep pn) [] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Init/Utils.hs0000644000000000000000000000621007346545000024117 0ustar0000000000000000module UnitTests.Distribution.Client.Init.Utils ( dummyFlags , emptyFlags , mkLicense , baseVersion , mangleBaseDep , (@?!) , (@!?) ) where import Distribution.Client.Init.Types import qualified Distribution.SPDX as SPDX import Distribution.CabalSpecVersion import Distribution.Simple.Setup import Distribution.Types.PackageName import Distribution.Types.Version import Language.Haskell.Extension import Test.Tasty.HUnit import Distribution.Types.Dependency import Distribution.Types.VersionRange import Distribution.Simple.Compiler import Distribution.Pretty import Distribution.FieldGrammar.Newtypes -- -------------------------------------------------------------------- -- -- Test flags dummyFlags :: InitFlags dummyFlags = emptyFlags { noComments = Flag True , packageName = Flag (mkPackageName "QuxPackage") , version = Flag (mkVersion [4,2,6]) , cabalVersion = Flag CabalSpecV2_2 , license = Flag $ SpecLicense $ Left $ SPDX.License $ SPDX.ELicense (SPDX.ELicenseId SPDX.MIT) Nothing , author = Flag "Foobar" , email = Flag "foobar@qux.com" , homepage = Flag "qux.com" , synopsis = Flag "We are Qux, and this is our package" , category = Flag "Control" , language = Flag Haskell98 , initializeTestSuite = Flag True , sourceDirs = Flag ["quxSrc"] , testDirs = Flag ["quxTest"] , applicationDirs = Flag ["quxApp"] } emptyFlags :: InitFlags emptyFlags = mempty -- | Retireves the proper base version based on the GHC version baseVersion :: Compiler -> VersionRange baseVersion Compiler {compilerId = CompilerId GHC ver} = let ghcToBase = baseVersion' . prettyShow $ ver in if null ghcToBase then anyVersion else majorBoundVersion $ mkVersion ghcToBase baseVersion _ = anyVersion baseVersion' :: String -> [Int] baseVersion' "9.0.1" = [4,15,0,0] baseVersion' "8.10.4" = [4,14,1,0] baseVersion' "8.8.4" = [4,13,0,0] baseVersion' "8.6.5" = [4,12,0,0] baseVersion' "8.4.4" = [4,11,1,0] baseVersion' "8.2.2" = [4,10,1,0] baseVersion' "8.0.2" = [4,10,0,0] baseVersion' "7.10.3" = [4,9,0,0] baseVersion' "7.8.4" = [4,8,0,0] baseVersion' "7.6.3" = [4,7,0,0] baseVersion' _ = [] -- -------------------------------------------------------------------- -- -- Test utils mkLicense :: SPDX.LicenseId -> SPDX.License mkLicense lid = SPDX.License (SPDX.ELicense (SPDX.ELicenseId lid) Nothing) mangleBaseDep :: a -> (a -> [Dependency]) -> [Dependency] mangleBaseDep target f = [ if unPackageName x == "base" then Dependency x anyVersion z else dep | dep@(Dependency x _ z) <- f target ] infix 1 @?!, @!? -- | Just like @'@?='@, except it checks for difference rather than equality. (@?!) :: (Eq a, Show a, HasCallStack) => a -> a -> Assertion actual @?! unexpected = assertBool ("unexpected: " ++ show unexpected) (actual /= unexpected) -- | Just like @'@=?'@, except it checks for difference rather than equality. (@!?) :: (Eq a, Show a, HasCallStack) => a -> a -> Assertion (@!?) = flip (@?!) cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/InstallPlan.hs0000644000000000000000000002601407346545000024341 0ustar0000000000000000{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE NoMonoLocalBinds #-} {-# LANGUAGE ConstraintKinds #-} module UnitTests.Distribution.Client.InstallPlan (tests) where import Distribution.Client.Compat.Prelude import qualified Prelude as Unsafe (tail) import Distribution.Package import Distribution.Version import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (GenericInstallPlan, IsUnit) import qualified Distribution.Compat.Graph as Graph import Distribution.Compat.Graph (IsNode(..)) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.PackageFixedDeps import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Client.Types import Distribution.Client.JobControl import Data.Graph import Data.Array hiding (index) import Data.List () import Control.Monad (replicateM) import qualified Data.Map as Map import qualified Data.Set as Set import Data.IORef import Control.Concurrent (threadDelay) import System.Random import Test.QuickCheck import Test.Tasty import Test.Tasty.QuickCheck tests :: [TestTree] tests = [ testProperty "reverseTopologicalOrder" prop_reverseTopologicalOrder , testProperty "executionOrder" prop_executionOrder , testProperty "execute serial" prop_execute_serial , testProperty "execute parallel" prop_execute_parallel , testProperty "execute/executionOrder" prop_execute_vs_executionOrder ] prop_reverseTopologicalOrder :: TestInstallPlan -> Bool prop_reverseTopologicalOrder (TestInstallPlan plan graph toVertex _) = isReverseTopologicalOrder graph (map (toVertex . installedUnitId) (InstallPlan.reverseTopologicalOrder plan)) -- | @executionOrder@ is in reverse topological order prop_executionOrder :: TestInstallPlan -> Bool prop_executionOrder (TestInstallPlan plan graph toVertex _) = isReversePartialTopologicalOrder graph (map toVertex pkgids) && allConfiguredPackages plan == Set.fromList pkgids where pkgids = map installedUnitId (InstallPlan.executionOrder plan) -- | @execute@ is in reverse topological order prop_execute_serial :: TestInstallPlan -> Property prop_execute_serial tplan@(TestInstallPlan plan graph toVertex _) = ioProperty $ do jobCtl <- newSerialJobControl pkgids <- executeTestInstallPlan jobCtl tplan (\_ -> return ()) return $ isReversePartialTopologicalOrder graph (map toVertex pkgids) && allConfiguredPackages plan == Set.fromList pkgids prop_execute_parallel :: Positive (Small Int) -> TestInstallPlan -> Property prop_execute_parallel (Positive (Small maxJobLimit)) tplan@(TestInstallPlan plan graph toVertex _) = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit pkgids <- executeTestInstallPlan jobCtl tplan $ \_ -> do delay <- randomRIO (0,1000) threadDelay delay return $ isReversePartialTopologicalOrder graph (map toVertex pkgids) && allConfiguredPackages plan == Set.fromList pkgids -- | return the packages that are visited by execute, in order. executeTestInstallPlan :: JobControl IO (UnitId, Either () ()) -> TestInstallPlan -> (TestPkg -> IO ()) -> IO [UnitId] executeTestInstallPlan jobCtl (TestInstallPlan plan _ _ _) visit = do resultsRef <- newIORef [] _ <- InstallPlan.execute jobCtl False (const ()) plan $ \(ReadyPackage pkg) -> do visit pkg atomicModifyIORef resultsRef $ \pkgs -> (installedUnitId pkg:pkgs, ()) return (Right ()) fmap reverse (readIORef resultsRef) -- | @execute@ visits the packages in the same order as @executionOrder@ prop_execute_vs_executionOrder :: TestInstallPlan -> Property prop_execute_vs_executionOrder tplan@(TestInstallPlan plan _ _ _) = ioProperty $ do jobCtl <- newSerialJobControl pkgids <- executeTestInstallPlan jobCtl tplan (\_ -> return ()) let pkgids' = map installedUnitId (InstallPlan.executionOrder plan) return (pkgids == pkgids') -------------------------- -- Property helper utils -- -- | A graph topological ordering is a linear ordering of its vertices such -- that for every directed edge uv from vertex u to vertex v, u comes before v -- in the ordering. -- -- A reverse topological ordering is the swapped: for every directed edge uv -- from vertex u to vertex v, v comes before u in the ordering. -- isReverseTopologicalOrder :: Graph -> [Vertex] -> Bool isReverseTopologicalOrder g vs = and [ ixs ! u > ixs ! v | let ixs = array (bounds g) (zip vs [0::Int ..]) , (u,v) <- edges g ] isReversePartialTopologicalOrder :: Graph -> [Vertex] -> Bool isReversePartialTopologicalOrder g vs = and [ case (ixs ! u, ixs ! v) of (Just ixu, Just ixv) -> ixu > ixv _ -> True | let ixs = array (bounds g) (zip (range (bounds g)) (repeat Nothing) ++ zip vs (map Just [0::Int ..])) , (u,v) <- edges g ] allConfiguredPackages :: HasUnitId srcpkg => GenericInstallPlan ipkg srcpkg -> Set UnitId allConfiguredPackages plan = Set.fromList [ installedUnitId pkg | InstallPlan.Configured pkg <- InstallPlan.toList plan ] -------------------- -- Test generators -- data TestInstallPlan = TestInstallPlan (GenericInstallPlan TestPkg TestPkg) Graph (UnitId -> Vertex) (Vertex -> UnitId) instance Show TestInstallPlan where show (TestInstallPlan plan _ _ _) = InstallPlan.showInstallPlan plan data TestPkg = TestPkg PackageId UnitId [UnitId] deriving (Eq, Show) instance IsNode TestPkg where type Key TestPkg = UnitId nodeKey (TestPkg _ ipkgid _) = ipkgid nodeNeighbors (TestPkg _ _ deps) = deps instance Package TestPkg where packageId (TestPkg pkgid _ _) = pkgid instance HasUnitId TestPkg where installedUnitId (TestPkg _ ipkgid _) = ipkgid instance PackageFixedDeps TestPkg where depends (TestPkg _ _ deps) = CD.singleton CD.ComponentLib deps instance PackageInstalled TestPkg where installedDepends (TestPkg _ _ deps) = deps instance Arbitrary TestInstallPlan where arbitrary = arbitraryTestInstallPlan arbitraryTestInstallPlan :: Gen TestInstallPlan arbitraryTestInstallPlan = do graph <- arbitraryAcyclicGraph (choose (2,5)) (choose (1,5)) 0.3 plan <- arbitraryInstallPlan mkTestPkg mkTestPkg 0.5 graph let toVertexMap = Map.fromList [ (mkUnitIdV v, v) | v <- vertices graph ] fromVertexMap = Map.fromList [ (v, mkUnitIdV v) | v <- vertices graph ] toVertex = (toVertexMap Map.!) fromVertex = (fromVertexMap Map.!) return (TestInstallPlan plan graph toVertex fromVertex) where mkTestPkg pkgv depvs = return (TestPkg pkgid ipkgid deps) where pkgid = mkPkgId pkgv ipkgid = mkUnitIdV pkgv deps = map mkUnitIdV depvs mkUnitIdV = mkUnitId . show mkPkgId v = PackageIdentifier (mkPackageName ("pkg" ++ show v)) (mkVersion [1]) -- | Generate a random 'InstallPlan' following the structure of an existing -- 'Graph'. -- -- It takes generators for installed and source packages and the chance that -- each package is installed (for those packages with no prerequisites). -- arbitraryInstallPlan :: (IsUnit ipkg, IsUnit srcpkg) => (Vertex -> [Vertex] -> Gen ipkg) -> (Vertex -> [Vertex] -> Gen srcpkg) -> Float -> Graph -> Gen (InstallPlan.GenericInstallPlan ipkg srcpkg) arbitraryInstallPlan mkIPkg mkSrcPkg ipkgProportion graph = do (ipkgvs, srcpkgvs) <- fmap ((\(ipkgs, srcpkgs) -> (map fst ipkgs, map fst srcpkgs)) . partition snd) $ sequenceA [ do isipkg <- if isRoot then pick ipkgProportion else return False return (v, isipkg) | (v,n) <- assocs (outdegree graph) , let isRoot = n == 0 ] ipkgs <- sequenceA [ mkIPkg pkgv depvs | pkgv <- ipkgvs , let depvs = graph ! pkgv ] srcpkgs <- sequenceA [ mkSrcPkg pkgv depvs | pkgv <- srcpkgvs , let depvs = graph ! pkgv ] let index = Graph.fromDistinctList (map InstallPlan.PreExisting ipkgs ++ map InstallPlan.Configured srcpkgs) return $ InstallPlan.new (IndependentGoals False) index -- | Generate a random directed acyclic graph, based on the algorithm presented -- here -- -- It generates a DAG based on ranks of nodes. Nodes in each rank can only -- have edges to nodes in subsequent ranks. -- -- The generator is parametrised by a generator for the number of ranks and -- the number of nodes within each rank. It is also parametrised by the -- chance that each node in each rank will have an edge from each node in -- each previous rank. Thus a higher chance will produce a more densely -- connected graph. -- arbitraryAcyclicGraph :: Gen Int -> Gen Int -> Float -> Gen Graph arbitraryAcyclicGraph genNRanks genNPerRank edgeChance = do nranks <- genNRanks rankSizes <- replicateM nranks genNPerRank let rankStarts = scanl (+) 0 rankSizes rankRanges = drop 1 (zip rankStarts (Unsafe.tail rankStarts)) totalRange = sum rankSizes rankEdges <- traverse (uncurry genRank) rankRanges return $ buildG (0, totalRange-1) (concat rankEdges) where genRank :: Vertex -> Vertex -> Gen [Edge] genRank rankStart rankEnd = filterM (const (pick edgeChance)) [ (i,j) | i <- [0..rankStart-1] , j <- [rankStart..rankEnd-1] ] pick :: Float -> Gen Bool pick chance = do p <- choose (0,1) return (p < chance) -------------------------------- -- Inspecting generated graphs -- {- -- Handy util for checking the generated graphs look sensible writeDotFile :: FilePath -> Graph -> IO () writeDotFile file = writeFile file . renderDotGraph renderDotGraph :: Graph -> String renderDotGraph graph = unlines ( [header ,graphDefaultAtribs ,nodeDefaultAtribs ,edgeDefaultAtribs] ++ map renderNode (vertices graph) ++ map renderEdge (edges graph) ++ [footer] ) where renderNode n = "\t" ++ show n ++ " [label=\"" ++ show n ++ "\"];" renderEdge (n, n') = "\t" ++ show n ++ " -> " ++ show n' ++ "[];" header, footer, graphDefaultAtribs, nodeDefaultAtribs, edgeDefaultAtribs :: String header = "digraph packages {" footer = "}" graphDefaultAtribs = "\tgraph [fontsize=14, fontcolor=black, color=black];" nodeDefaultAtribs = "\tnode [label=\"\\N\", width=\"0.75\", shape=ellipse];" edgeDefaultAtribs = "\tedge [fontsize=10];" -} cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/JobControl.hs0000644000000000000000000001545107346545000024176 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} module UnitTests.Distribution.Client.JobControl (tests) where import Distribution.Client.JobControl import Distribution.Client.Compat.Prelude import Prelude () import Data.IORef (newIORef, atomicModifyIORef) import Control.Monad (replicateM_, replicateM) import Control.Concurrent (threadDelay) import Control.Exception (try) import qualified Data.Set as Set import Test.Tasty import Test.Tasty.QuickCheck hiding (collect) tests :: [TestTree] tests = [ testGroup "serial" [ testProperty "submit batch" prop_submit_serial , testProperty "submit batch" prop_remaining_serial , testProperty "submit interleaved" prop_interleaved_serial , testProperty "concurrent jobs" prop_concurrent_serial , testProperty "cancel" prop_cancel_serial , testProperty "exceptions" prop_exception_serial ] , testGroup "parallel" [ testProperty "submit batch" prop_submit_parallel , testProperty "submit batch" prop_remaining_parallel , testProperty "submit interleaved" prop_interleaved_parallel , testProperty "concurrent jobs" prop_concurrent_parallel , testProperty "cancel" prop_cancel_parallel , testProperty "exceptions" prop_exception_parallel ] ] prop_submit_serial :: [Int] -> Property prop_submit_serial xs = ioProperty $ do jobCtl <- newSerialJobControl prop_submit jobCtl xs prop_submit_parallel :: Positive (Small Int) -> [Int] -> Property prop_submit_parallel (Positive (Small maxJobLimit)) xs = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit prop_submit jobCtl xs prop_remaining_serial :: [Int] -> Property prop_remaining_serial xs = ioProperty $ do jobCtl <- newSerialJobControl prop_remaining jobCtl xs prop_remaining_parallel :: Positive (Small Int) -> [Int] -> Property prop_remaining_parallel (Positive (Small maxJobLimit)) xs = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit prop_remaining jobCtl xs prop_interleaved_serial :: [Int] -> Property prop_interleaved_serial xs = ioProperty $ do jobCtl <- newSerialJobControl prop_submit_interleaved jobCtl xs prop_interleaved_parallel :: Positive (Small Int) -> [Int] -> Property prop_interleaved_parallel (Positive (Small maxJobLimit)) xs = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit prop_submit_interleaved jobCtl xs prop_submit :: JobControl IO Int -> [Int] -> IO Bool prop_submit jobCtl xs = do traverse_ (\x -> spawnJob jobCtl (return x)) xs xs' <- traverse (\_ -> collectJob jobCtl) xs return (sort xs == sort xs') prop_remaining :: JobControl IO Int -> [Int] -> IO Bool prop_remaining jobCtl xs = do traverse_ (\x -> spawnJob jobCtl (return x)) xs xs' <- collectRemainingJobs jobCtl return (sort xs == sort xs') collectRemainingJobs :: Monad m => JobControl m a -> m [a] collectRemainingJobs jobCtl = go [] where go xs = do remaining <- remainingJobs jobCtl if remaining then do x <- collectJob jobCtl go (x:xs) else return xs prop_submit_interleaved :: JobControl IO (Maybe Int) -> [Int] -> IO Bool prop_submit_interleaved jobCtl xs = do xs' <- sequenceA [ spawn >> collect | let spawns = map (\x -> spawnJob jobCtl (return (Just x))) xs ++ repeat (return ()) collects = replicate 5 (return Nothing) ++ map (\_ -> collectJob jobCtl) xs , (spawn, collect) <- zip spawns collects ] return (sort xs == sort (catMaybes xs')) prop_concurrent_serial :: NonNegative (Small Int) -> Property prop_concurrent_serial (NonNegative (Small ntasks)) = ioProperty $ do jobCtl <- newSerialJobControl countRef <- newIORef (0 :: Int) replicateM_ ntasks (spawnJob jobCtl (task countRef)) counts <- replicateM ntasks (collectJob jobCtl) return $ length counts == ntasks && all (\(n0, n1) -> n0 == 0 && n1 == 1) counts where task countRef = do n0 <- atomicModifyIORef countRef (\n -> (n+1, n)) threadDelay 100 n1 <- atomicModifyIORef countRef (\n -> (n-1, n)) return (n0, n1) prop_concurrent_parallel :: Positive (Small Int) -> NonNegative Int -> Property prop_concurrent_parallel (Positive (Small maxJobLimit)) (NonNegative ntasks) = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit countRef <- newIORef (0 :: Int) replicateM_ ntasks (spawnJob jobCtl (task countRef)) counts <- replicateM ntasks (collectJob jobCtl) return $ length counts == ntasks && all (\(n0, n1) -> n0 >= 0 && n0 < maxJobLimit && n1 > 0 && n1 <= maxJobLimit) counts -- we do hit the concurrency limit (in the right circumstances) && if ntasks >= maxJobLimit*2 -- give us enough of a margin then any (\(_,n1) -> n1 == maxJobLimit) counts else True where task countRef = do n0 <- atomicModifyIORef countRef (\n -> (n+1, n)) threadDelay 100 n1 <- atomicModifyIORef countRef (\n -> (n-1, n)) return (n0, n1) prop_cancel_serial :: [Int] -> [Int] -> Property prop_cancel_serial xs ys = ioProperty $ do jobCtl <- newSerialJobControl traverse_ (\x -> spawnJob jobCtl (return x)) (xs++ys) xs' <- traverse (\_ -> collectJob jobCtl) xs cancelJobs jobCtl ys' <- collectRemainingJobs jobCtl return (sort xs == sort xs' && null ys') prop_cancel_parallel :: Positive (Small Int) -> [Int] -> [Int] -> Property prop_cancel_parallel (Positive (Small maxJobLimit)) xs ys = do ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit traverse_ (\x -> spawnJob jobCtl (threadDelay 100 >> return x)) (xs++ys) xs' <- traverse (\_ -> collectJob jobCtl) xs cancelJobs jobCtl ys' <- collectRemainingJobs jobCtl return $ Set.fromList (xs'++ys') `Set.isSubsetOf` Set.fromList (xs++ys) data TestException = TestException Int deriving (Typeable, Show) instance Exception TestException prop_exception_serial :: [Either Int Int] -> Property prop_exception_serial xs = ioProperty $ do jobCtl <- newSerialJobControl prop_exception jobCtl xs prop_exception_parallel :: Positive (Small Int) -> [Either Int Int] -> Property prop_exception_parallel (Positive (Small maxJobLimit)) xs = ioProperty $ do jobCtl <- newParallelJobControl maxJobLimit prop_exception jobCtl xs prop_exception :: JobControl IO Int -> [Either Int Int] -> IO Bool prop_exception jobCtl xs = do traverse_ (\x -> spawnJob jobCtl (either (throwIO . TestException) return x)) xs xs' <- replicateM (length xs) $ do mx <- try (collectJob jobCtl) return $ case mx of Left (TestException n) -> Left n Right n -> Right n return (sort xs == sort xs') cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/ProjectConfig.hs0000644000000000000000000010236607346545000024661 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- simplifier goes nuts otherwise #if __GLASGOW_HASKELL__ < 806 {-# OPTIONS_GHC -funfolding-use-threshold=30 #-} #endif module UnitTests.Distribution.Client.ProjectConfig (tests) where #if !MIN_VERSION_base(4,8,0) import Data.Monoid import Control.Applicative #endif import Data.Map (Map) import qualified Data.Map as Map import Data.List (isPrefixOf, intercalate, (\\)) import Network.URI (URI) import Distribution.Deprecated.ParseUtils import qualified Distribution.Deprecated.ReadP as Parse import Distribution.Package import Distribution.PackageDescription import Distribution.Compiler import Distribution.Version import Distribution.Simple.Program.Types import Distribution.Simple.Program.Db import Distribution.Simple.Utils (toUTF8BS) import Distribution.Types.PackageVersionConstraint import Distribution.Parsec import Distribution.Pretty import Distribution.Client.Types import Distribution.Client.CmdInstall.ClientInstallFlags import Distribution.Client.Dependency.Types import Distribution.Client.Targets import Distribution.Client.Types.SourceRepo import Distribution.Utils.NubList import Distribution.Solver.Types.PackageConstraint import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.Settings import Distribution.Client.ProjectConfig import Distribution.Client.ProjectConfig.Legacy import UnitTests.Distribution.Client.ArbitraryInstances import UnitTests.Distribution.Client.TreeDiffInstances () import Data.TreeDiff.Class import Data.TreeDiff.QuickCheck import Test.Tasty import Test.Tasty.QuickCheck tests :: [TestTree] tests = [ testGroup "ProjectConfig <-> LegacyProjectConfig round trip" $ [ testProperty "packages" prop_roundtrip_legacytypes_packages , testProperty "buildonly" prop_roundtrip_legacytypes_buildonly , testProperty "specific" prop_roundtrip_legacytypes_specific ] ++ -- a couple tests seem to trigger a RTS fault in ghc-7.6 and older -- unclear why as of yet concat [ [ testProperty "shared" prop_roundtrip_legacytypes_shared , testProperty "local" prop_roundtrip_legacytypes_local , testProperty "all" prop_roundtrip_legacytypes_all ] | not usingGhc76orOlder ] , testGroup "individual parser tests" [ testProperty "package location" prop_parsePackageLocationTokenQ , testProperty "RelaxedDep" prop_roundtrip_printparse_RelaxedDep , testProperty "RelaxDeps" prop_roundtrip_printparse_RelaxDeps , testProperty "RelaxDeps'" prop_roundtrip_printparse_RelaxDeps' ] , testGroup "ProjectConfig printing/parsing round trip" [ testProperty "packages" prop_roundtrip_printparse_packages , testProperty "buildonly" prop_roundtrip_printparse_buildonly , testProperty "shared" prop_roundtrip_printparse_shared , testProperty "local" prop_roundtrip_printparse_local , testProperty "specific" prop_roundtrip_printparse_specific , testProperty "all" prop_roundtrip_printparse_all ] ] where usingGhc76orOlder = case buildCompilerId of CompilerId GHC v -> v < mkVersion [7,7] _ -> False ------------------------------------------------ -- Round trip: conversion to/from legacy types -- roundtrip :: (Eq a, ToExpr a, Show b) => (a -> b) -> (b -> a) -> a -> Property roundtrip f f_inv x = counterexample (show y) $ x `ediffEq` f_inv y -- no counterexample with y, as they not have ToExpr where y = f x roundtrip_legacytypes :: ProjectConfig -> Property roundtrip_legacytypes = roundtrip convertToLegacyProjectConfig convertLegacyProjectConfig prop_roundtrip_legacytypes_all :: ProjectConfig -> Property prop_roundtrip_legacytypes_all config = roundtrip_legacytypes config { projectConfigProvenance = mempty } prop_roundtrip_legacytypes_packages :: ProjectConfig -> Property prop_roundtrip_legacytypes_packages config = roundtrip_legacytypes config { projectConfigBuildOnly = mempty, projectConfigShared = mempty, projectConfigProvenance = mempty, projectConfigLocalPackages = mempty, projectConfigSpecificPackage = mempty } prop_roundtrip_legacytypes_buildonly :: ProjectConfigBuildOnly -> Property prop_roundtrip_legacytypes_buildonly config = roundtrip_legacytypes mempty { projectConfigBuildOnly = config } prop_roundtrip_legacytypes_shared :: ProjectConfigShared -> Property prop_roundtrip_legacytypes_shared config = roundtrip_legacytypes mempty { projectConfigShared = config } prop_roundtrip_legacytypes_local :: PackageConfig -> Property prop_roundtrip_legacytypes_local config = roundtrip_legacytypes mempty { projectConfigLocalPackages = config } prop_roundtrip_legacytypes_specific :: Map PackageName PackageConfig -> Property prop_roundtrip_legacytypes_specific config = roundtrip_legacytypes mempty { projectConfigSpecificPackage = MapMappend config } -------------------------------------------- -- Round trip: printing and parsing config -- roundtrip_printparse :: ProjectConfig -> Property roundtrip_printparse config = case fmap convertLegacyProjectConfig (parseLegacyProjectConfig "unused" (toUTF8BS str)) of ParseOk _ x -> counterexample ("shown:\n" ++ str) $ x `ediffEq` config { projectConfigProvenance = mempty } ParseFailed err -> counterexample ("shown:\n" ++ str ++ "\nERROR: " ++ show err) False where str :: String str = showLegacyProjectConfig (convertToLegacyProjectConfig config) prop_roundtrip_printparse_all :: ProjectConfig -> Property prop_roundtrip_printparse_all config = roundtrip_printparse config { projectConfigBuildOnly = hackProjectConfigBuildOnly (projectConfigBuildOnly config), projectConfigShared = hackProjectConfigShared (projectConfigShared config) } prop_roundtrip_printparse_packages :: [PackageLocationString] -> [PackageLocationString] -> [SourceRepoList] -> [PackageVersionConstraint] -> Property prop_roundtrip_printparse_packages pkglocstrs1 pkglocstrs2 repos named = roundtrip_printparse mempty { projectPackages = map getPackageLocationString pkglocstrs1, projectPackagesOptional = map getPackageLocationString pkglocstrs2, projectPackagesRepo = repos, projectPackagesNamed = named } prop_roundtrip_printparse_buildonly :: ProjectConfigBuildOnly -> Property prop_roundtrip_printparse_buildonly config = roundtrip_printparse mempty { projectConfigBuildOnly = hackProjectConfigBuildOnly config } hackProjectConfigBuildOnly :: ProjectConfigBuildOnly -> ProjectConfigBuildOnly hackProjectConfigBuildOnly config = config { -- These fields are only command line transitory things, not -- something to be recorded persistently in a config file projectConfigOnlyDeps = mempty, projectConfigOnlyDownload = mempty, projectConfigDryRun = mempty } prop_roundtrip_printparse_shared :: ProjectConfigShared -> Property prop_roundtrip_printparse_shared config = roundtrip_printparse mempty { projectConfigShared = hackProjectConfigShared config } hackProjectConfigShared :: ProjectConfigShared -> ProjectConfigShared hackProjectConfigShared config = config { projectConfigProjectFile = mempty, -- not present within project files projectConfigConfigFile = mempty, -- ditto projectConfigConstraints = --TODO: [required eventually] parse ambiguity in constraint -- "pkgname -any" as either any version or disabled flag "any". let ambiguous (UserConstraint _ (PackagePropertyFlags flags), _) = (not . null) [ () | (name, False) <- unFlagAssignment flags , "any" `isPrefixOf` unFlagName name ] ambiguous _ = False in filter (not . ambiguous) (projectConfigConstraints config) } prop_roundtrip_printparse_local :: PackageConfig -> Property prop_roundtrip_printparse_local config = roundtrip_printparse mempty { projectConfigLocalPackages = config } prop_roundtrip_printparse_specific :: Map PackageName (NonMEmpty PackageConfig) -> Property prop_roundtrip_printparse_specific config = roundtrip_printparse mempty { projectConfigSpecificPackage = MapMappend (fmap getNonMEmpty config) } ---------------------------- -- Individual Parser tests -- -- | Helper to parse a given string -- -- Succeeds only if there is a unique complete parse runReadP :: Parse.ReadP a a -> String -> Maybe a runReadP parser s = case [ x | (x,"") <- Parse.readP_to_S parser s ] of [x'] -> Just x' _ -> Nothing prop_parsePackageLocationTokenQ :: PackageLocationString -> Bool prop_parsePackageLocationTokenQ (PackageLocationString str) = runReadP parsePackageLocationTokenQ (renderPackageLocationToken str) == Just str prop_roundtrip_printparse_RelaxedDep :: RelaxedDep -> Property prop_roundtrip_printparse_RelaxedDep rdep = counterexample (prettyShow rdep) $ eitherParsec (prettyShow rdep) == Right rdep prop_roundtrip_printparse_RelaxDeps :: RelaxDeps -> Property prop_roundtrip_printparse_RelaxDeps rdep = counterexample (prettyShow rdep) $ Right rdep `ediffEq` eitherParsec (prettyShow rdep) prop_roundtrip_printparse_RelaxDeps' :: RelaxDeps -> Property prop_roundtrip_printparse_RelaxDeps' rdep = counterexample rdep' $ Right rdep `ediffEq` eitherParsec rdep' where rdep' = go (prettyShow rdep) -- replace 'all' tokens by '*' go :: String -> String go [] = [] go "all" = "*" go ('a':'l':'l':c:rest) | c `elem` ":," = '*' : go (c:rest) go rest = let (x,y) = break (`elem` ":,") rest (x',y') = span (`elem` ":,^") y in x++x'++go y' ------------------------ -- Arbitrary instances -- instance Arbitrary ProjectConfig where arbitrary = ProjectConfig <$> (map getPackageLocationString <$> arbitrary) <*> (map getPackageLocationString <$> arbitrary) <*> shortListOf 3 arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> (MapMappend . fmap getNonMEmpty . Map.fromList <$> shortListOf 3 arbitrary) -- package entries with no content are equivalent to -- the entry not existing at all, so exclude empty shrink ProjectConfig { projectPackages = x0 , projectPackagesOptional = x1 , projectPackagesRepo = x2 , projectPackagesNamed = x3 , projectConfigBuildOnly = x4 , projectConfigShared = x5 , projectConfigProvenance = x6 , projectConfigLocalPackages = x7 , projectConfigSpecificPackage = x8 , projectConfigAllPackages = x9 } = [ ProjectConfig { projectPackages = x0' , projectPackagesOptional = x1' , projectPackagesRepo = x2' , projectPackagesNamed = x3' , projectConfigBuildOnly = x4' , projectConfigShared = x5' , projectConfigProvenance = x6' , projectConfigLocalPackages = x7' , projectConfigSpecificPackage = (MapMappend (fmap getNonMEmpty x8')) , projectConfigAllPackages = x9' } | ((x0', x1', x2', x3'), (x4', x5', x6', x7', x8', x9')) <- shrink ((x0, x1, x2, x3), (x4, x5, x6, x7, fmap NonMEmpty (getMapMappend x8), x9)) ] newtype PackageLocationString = PackageLocationString { getPackageLocationString :: String } deriving Show instance Arbitrary PackageLocationString where arbitrary = PackageLocationString <$> oneof [ show . getNonEmpty <$> (arbitrary :: Gen (NonEmptyList String)) , arbitraryGlobLikeStr , show <$> (arbitrary :: Gen URI) ] `suchThat` (\xs -> not ("{" `isPrefixOf` xs)) arbitraryGlobLikeStr :: Gen String arbitraryGlobLikeStr = outerTerm where outerTerm = concat <$> shortListOf1 4 (frequency [ (2, token) , (1, braces <$> innerTerm) ]) innerTerm = intercalate "," <$> shortListOf1 3 (frequency [ (3, token) , (1, braces <$> innerTerm) ]) token = shortListOf1 4 (elements (['#'..'~'] \\ "{,}")) braces s = "{" ++ s ++ "}" instance Arbitrary ClientInstallFlags where arbitrary = ClientInstallFlags <$> arbitrary <*> arbitraryFlag arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitraryFlag arbitraryShortToken instance Arbitrary ProjectConfigBuildOnly where arbitrary = ProjectConfigBuildOnly <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> (toNubList <$> shortListOf 2 arbitrary) <*> arbitrary <*> arbitrary <*> arbitrary <*> (fmap getShortToken <$> arbitrary) <*> arbitraryNumJobs <*> arbitrary <*> arbitrary <*> arbitrary <*> (fmap getShortToken <$> arbitrary) <*> arbitrary <*> (fmap getShortToken <$> arbitrary) <*> (fmap getShortToken <$> arbitrary) <*> arbitrary where arbitraryNumJobs = fmap (fmap getPositive) <$> arbitrary shrink ProjectConfigBuildOnly { projectConfigVerbosity = x00 , projectConfigDryRun = x01 , projectConfigOnlyDeps = x02 , projectConfigOnlyDownload = x18 , projectConfigSummaryFile = x03 , projectConfigLogFile = x04 , projectConfigBuildReports = x05 , projectConfigReportPlanningFailure = x06 , projectConfigSymlinkBinDir = x07 , projectConfigNumJobs = x09 , projectConfigKeepGoing = x10 , projectConfigOfflineMode = x11 , projectConfigKeepTempFiles = x12 , projectConfigHttpTransport = x13 , projectConfigIgnoreExpiry = x14 , projectConfigCacheDir = x15 , projectConfigLogsDir = x16 , projectConfigClientInstallFlags = x17 } = [ ProjectConfigBuildOnly { projectConfigVerbosity = x00' , projectConfigDryRun = x01' , projectConfigOnlyDeps = x02' , projectConfigOnlyDownload = x18' , projectConfigSummaryFile = x03' , projectConfigLogFile = x04' , projectConfigBuildReports = x05' , projectConfigReportPlanningFailure = x06' , projectConfigSymlinkBinDir = x07' , projectConfigNumJobs = postShrink_NumJobs x09' , projectConfigKeepGoing = x10' , projectConfigOfflineMode = x11' , projectConfigKeepTempFiles = x12' , projectConfigHttpTransport = x13 , projectConfigIgnoreExpiry = x14' , projectConfigCacheDir = x15 , projectConfigLogsDir = x16 , projectConfigClientInstallFlags = x17' } | ((x00', x01', x02', x03', x04'), (x05', x06', x07', x09'), (x10', x11', x12', x14'), ( x17', x18' )) <- shrink ((x00, x01, x02, x03, x04), (x05, x06, x07, preShrink_NumJobs x09), (x10, x11, x12, x14), ( x17, x18 )) ] where preShrink_NumJobs = fmap (fmap Positive) postShrink_NumJobs = fmap (fmap getPositive) instance Arbitrary ProjectConfigShared where arbitrary = do projectConfigDistDir <- arbitraryFlag arbitraryShortToken projectConfigConfigFile <- arbitraryFlag arbitraryShortToken projectConfigProjectFile <- arbitraryFlag arbitraryShortToken projectConfigIgnoreProject <- arbitrary projectConfigHcFlavor <- arbitrary projectConfigHcPath <- arbitraryFlag arbitraryShortToken projectConfigHcPkg <- arbitraryFlag arbitraryShortToken projectConfigHaddockIndex <- arbitrary projectConfigPackageDBs <- shortListOf 2 arbitrary projectConfigRemoteRepos <- arbitrary projectConfigLocalNoIndexRepos <- arbitrary projectConfigActiveRepos <- arbitrary projectConfigIndexState <- arbitrary projectConfigStoreDir <- arbitraryFlag arbitraryShortToken projectConfigConstraints <- arbitraryConstraints projectConfigPreferences <- shortListOf 2 arbitrary projectConfigCabalVersion <- arbitrary projectConfigSolver <- arbitrary projectConfigAllowOlder <- arbitrary projectConfigAllowNewer <- arbitrary projectConfigWriteGhcEnvironmentFilesPolicy <- arbitrary projectConfigMaxBackjumps <- arbitrary projectConfigReorderGoals <- arbitrary projectConfigCountConflicts <- arbitrary projectConfigFineGrainedConflicts <- arbitrary projectConfigMinimizeConflictSet <- arbitrary projectConfigStrongFlags <- arbitrary projectConfigAllowBootLibInstalls <- arbitrary projectConfigOnlyConstrained <- arbitrary projectConfigPerComponent <- arbitrary projectConfigIndependentGoals <- arbitrary projectConfigProgPathExtra <- toNubList <$> listOf arbitraryShortToken return ProjectConfigShared {..} where arbitraryConstraints :: Gen [(UserConstraint, ConstraintSource)] arbitraryConstraints = fmap (\uc -> (uc, projectConfigConstraintSource)) <$> arbitrary shrink ProjectConfigShared {..} = runShrinker $ pure ProjectConfigShared <*> shrinker projectConfigDistDir <*> shrinker projectConfigConfigFile <*> shrinker projectConfigProjectFile <*> shrinker projectConfigIgnoreProject <*> shrinker projectConfigHcFlavor <*> shrinkerAla (fmap NonEmpty) projectConfigHcPath <*> shrinkerAla (fmap NonEmpty) projectConfigHcPkg <*> shrinker projectConfigHaddockIndex <*> shrinker projectConfigPackageDBs <*> shrinker projectConfigRemoteRepos <*> shrinker projectConfigLocalNoIndexRepos <*> shrinker projectConfigActiveRepos <*> shrinker projectConfigIndexState <*> shrinker projectConfigStoreDir <*> shrinkerPP preShrink_Constraints postShrink_Constraints projectConfigConstraints <*> shrinker projectConfigPreferences <*> shrinker projectConfigCabalVersion <*> shrinker projectConfigSolver <*> shrinker projectConfigAllowOlder <*> shrinker projectConfigAllowNewer <*> shrinker projectConfigWriteGhcEnvironmentFilesPolicy <*> shrinker projectConfigMaxBackjumps <*> shrinker projectConfigReorderGoals <*> shrinker projectConfigCountConflicts <*> shrinker projectConfigFineGrainedConflicts <*> shrinker projectConfigMinimizeConflictSet <*> shrinker projectConfigStrongFlags <*> shrinker projectConfigAllowBootLibInstalls <*> shrinker projectConfigOnlyConstrained <*> shrinker projectConfigPerComponent <*> shrinker projectConfigIndependentGoals <*> shrinker projectConfigProgPathExtra where preShrink_Constraints = map fst postShrink_Constraints = map (\uc -> (uc, projectConfigConstraintSource)) projectConfigConstraintSource :: ConstraintSource projectConfigConstraintSource = ConstraintSourceProjectConfig "unused" instance Arbitrary ProjectConfigProvenance where arbitrary = elements [Implicit, Explicit "cabal.project"] instance Arbitrary PackageConfig where arbitrary = PackageConfig <$> (MapLast . Map.fromList <$> shortListOf 10 ((,) <$> arbitraryProgramName <*> arbitraryShortToken)) <*> (MapMappend . Map.fromList <$> shortListOf 10 ((,) <$> arbitraryProgramName <*> listOf arbitraryShortToken)) <*> (toNubList <$> listOf arbitraryShortToken) <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> shortListOf 5 arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitrary <*> shortListOf 5 arbitraryShortToken <*> shortListOf 5 arbitraryShortToken <*> shortListOf 5 arbitraryShortToken <*> shortListOf 5 arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitraryFlag arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitraryFlag arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitraryFlag arbitraryShortToken <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitraryFlag arbitraryShortToken <*> arbitrary <*> shortListOf 5 arbitrary <*> shortListOf 5 arbitrary where arbitraryProgramName :: Gen String arbitraryProgramName = elements [ programName prog | (prog, _) <- knownPrograms (defaultProgramDb) ] shrink PackageConfig { packageConfigProgramPaths = x00 , packageConfigProgramArgs = x01 , packageConfigProgramPathExtra = x02 , packageConfigFlagAssignment = x03 , packageConfigVanillaLib = x04 , packageConfigSharedLib = x05 , packageConfigStaticLib = x42 , packageConfigDynExe = x06 , packageConfigFullyStaticExe = x50 , packageConfigProf = x07 , packageConfigProfLib = x08 , packageConfigProfExe = x09 , packageConfigProfDetail = x10 , packageConfigProfLibDetail = x11 , packageConfigConfigureArgs = x12 , packageConfigOptimization = x13 , packageConfigProgPrefix = x14 , packageConfigProgSuffix = x15 , packageConfigExtraLibDirs = x16 , packageConfigExtraLibDirsStatic = x53 , packageConfigExtraFrameworkDirs = x17 , packageConfigExtraIncludeDirs = x18 , packageConfigGHCiLib = x19 , packageConfigSplitSections = x20 , packageConfigSplitObjs = x20_1 , packageConfigStripExes = x21 , packageConfigStripLibs = x22 , packageConfigTests = x23 , packageConfigBenchmarks = x24 , packageConfigCoverage = x25 , packageConfigRelocatable = x26 , packageConfigDebugInfo = x27 , packageConfigDumpBuildInfo = x27_1 , packageConfigRunTests = x28 , packageConfigDocumentation = x29 , packageConfigHaddockHoogle = x30 , packageConfigHaddockHtml = x31 , packageConfigHaddockHtmlLocation = x32 , packageConfigHaddockForeignLibs = x33 , packageConfigHaddockExecutables = x33_1 , packageConfigHaddockTestSuites = x34 , packageConfigHaddockBenchmarks = x35 , packageConfigHaddockInternal = x36 , packageConfigHaddockCss = x37 , packageConfigHaddockLinkedSource = x38 , packageConfigHaddockQuickJump = x43 , packageConfigHaddockHscolourCss = x39 , packageConfigHaddockContents = x40 , packageConfigHaddockForHackage = x41 , packageConfigTestHumanLog = x44 , packageConfigTestMachineLog = x45 , packageConfigTestShowDetails = x46 , packageConfigTestKeepTix = x47 , packageConfigTestWrapper = x48 , packageConfigTestFailWhenNoTestSuites = x49 , packageConfigTestTestOptions = x51 , packageConfigBenchmarkOptions = x52 } = [ PackageConfig { packageConfigProgramPaths = postShrink_Paths x00' , packageConfigProgramArgs = postShrink_Args x01' , packageConfigProgramPathExtra = x02' , packageConfigFlagAssignment = x03' , packageConfigVanillaLib = x04' , packageConfigSharedLib = x05' , packageConfigStaticLib = x42' , packageConfigDynExe = x06' , packageConfigFullyStaticExe = x50' , packageConfigProf = x07' , packageConfigProfLib = x08' , packageConfigProfExe = x09' , packageConfigProfDetail = x10' , packageConfigProfLibDetail = x11' , packageConfigConfigureArgs = map getNonEmpty x12' , packageConfigOptimization = x13' , packageConfigProgPrefix = x14' , packageConfigProgSuffix = x15' , packageConfigExtraLibDirs = map getNonEmpty x16' , packageConfigExtraLibDirsStatic = map getNonEmpty x53' , packageConfigExtraFrameworkDirs = map getNonEmpty x17' , packageConfigExtraIncludeDirs = map getNonEmpty x18' , packageConfigGHCiLib = x19' , packageConfigSplitSections = x20' , packageConfigSplitObjs = x20_1' , packageConfigStripExes = x21' , packageConfigStripLibs = x22' , packageConfigTests = x23' , packageConfigBenchmarks = x24' , packageConfigCoverage = x25' , packageConfigRelocatable = x26' , packageConfigDebugInfo = x27' , packageConfigDumpBuildInfo = x27_1' , packageConfigRunTests = x28' , packageConfigDocumentation = x29' , packageConfigHaddockHoogle = x30' , packageConfigHaddockHtml = x31' , packageConfigHaddockHtmlLocation = x32' , packageConfigHaddockForeignLibs = x33' , packageConfigHaddockExecutables = x33_1' , packageConfigHaddockTestSuites = x34' , packageConfigHaddockBenchmarks = x35' , packageConfigHaddockInternal = x36' , packageConfigHaddockCss = fmap getNonEmpty x37' , packageConfigHaddockLinkedSource = x38' , packageConfigHaddockQuickJump = x43' , packageConfigHaddockHscolourCss = fmap getNonEmpty x39' , packageConfigHaddockContents = x40' , packageConfigHaddockForHackage = x41' , packageConfigTestHumanLog = x44' , packageConfigTestMachineLog = x45' , packageConfigTestShowDetails = x46' , packageConfigTestKeepTix = x47' , packageConfigTestWrapper = x48' , packageConfigTestFailWhenNoTestSuites = x49' , packageConfigTestTestOptions = x51' , packageConfigBenchmarkOptions = x52' } | (((x00', x01', x02', x03', x04'), (x05', x42', x06', x50', x07', x08', x09'), (x10', x11', x12', x13', x14'), (x15', x16', x53', x17', x18', x19')), ((x20', x20_1', x21', x22', x23', x24'), (x25', x26', x27', x27_1', x28', x29'), (x30', x31', x32', (x33', x33_1'), x34'), (x35', x36', x37', x38', x43', x39'), (x40', x41'), (x44', x45', x46', x47', x48', x49', x51', x52'))) <- shrink (((preShrink_Paths x00, preShrink_Args x01, x02, x03, x04), (x05, x42, x06, x50, x07, x08, x09), (x10, x11, map NonEmpty x12, x13, x14), (x15, map NonEmpty x16, map NonEmpty x53, map NonEmpty x17, map NonEmpty x18, x19)), ((x20, x20_1, x21, x22, x23, x24), (x25, x26, x27, x27_1, x28, x29), (x30, x31, x32, (x33, x33_1), x34), (x35, x36, fmap NonEmpty x37, x38, x43, fmap NonEmpty x39), (x40, x41), (x44, x45, x46, x47, x48, x49, x51, x52))) ] where preShrink_Paths = Map.map NonEmpty . Map.mapKeys NoShrink . getMapLast postShrink_Paths = MapLast . Map.map getNonEmpty . Map.mapKeys getNoShrink preShrink_Args = Map.map (NonEmpty . map NonEmpty) . Map.mapKeys NoShrink . getMapMappend postShrink_Args = MapMappend . Map.map (map getNonEmpty . getNonEmpty) . Map.mapKeys getNoShrink instance f ~ [] => Arbitrary (SourceRepositoryPackage f) where arbitrary = SourceRepositoryPackage <$> arbitrary <*> (getShortToken <$> arbitrary) <*> (fmap getShortToken <$> arbitrary) <*> (fmap getShortToken <$> arbitrary) <*> (fmap getShortToken <$> shortListOf 3 arbitrary) <*> (fmap getShortToken <$> shortListOf 3 arbitrary) shrink SourceRepositoryPackage {..} = runShrinker $ pure SourceRepositoryPackage <*> shrinker srpType <*> shrinkerAla ShortToken srpLocation <*> shrinkerAla (fmap ShortToken) srpTag <*> shrinkerAla (fmap ShortToken) srpBranch <*> shrinkerAla (fmap ShortToken) srpSubdir <*> shrinkerAla (fmap ShortToken) srpCommand instance Arbitrary RemoteRepo where arbitrary = RemoteRepo <$> arbitrary <*> arbitrary -- URI <*> arbitrary <*> listOf arbitraryRootKey <*> fmap getNonNegative arbitrary <*> pure False where arbitraryRootKey = shortListOf1 5 (oneof [ choose ('0', '9') , choose ('a', 'f') ]) instance Arbitrary LocalRepo where arbitrary = LocalRepo <$> arbitrary <*> elements ["/tmp/foo", "/tmp/bar"] -- TODO: generate valid absolute paths <*> arbitrary instance Arbitrary PreSolver where arbitrary = elements [minBound..maxBound] instance Arbitrary ReorderGoals where arbitrary = ReorderGoals <$> arbitrary instance Arbitrary CountConflicts where arbitrary = CountConflicts <$> arbitrary instance Arbitrary FineGrainedConflicts where arbitrary = FineGrainedConflicts <$> arbitrary instance Arbitrary MinimizeConflictSet where arbitrary = MinimizeConflictSet <$> arbitrary instance Arbitrary IndependentGoals where arbitrary = IndependentGoals <$> arbitrary instance Arbitrary StrongFlags where arbitrary = StrongFlags <$> arbitrary instance Arbitrary AllowBootLibInstalls where arbitrary = AllowBootLibInstalls <$> arbitrary instance Arbitrary OnlyConstrained where arbitrary = oneof [ pure OnlyConstrainedAll , pure OnlyConstrainedNone ] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/ProjectPlanning.hs0000644000000000000000000000631407346545000025216 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} module UnitTests.Distribution.Client.ProjectPlanning (tests) where import Data.List.NonEmpty import Distribution.Client.ProjectPlanning (ComponentTarget (..), SubComponentTarget (..), nubComponentTargets) import Distribution.Types.ComponentName import Distribution.Types.LibraryName import Test.Tasty import Test.Tasty.HUnit tests :: [TestTree] tests = [ testGroup "Build Target Tests" buildTargetTests ] -- ---------------------------------------------------------------------------- -- Build Target Tests -- ---------------------------------------------------------------------------- buildTargetTests :: [TestTree] buildTargetTests = [ testGroup "nubComponentTargets" nubComponentTargetsTests ] nubComponentTargetsTests :: [TestTree] nubComponentTargetsTests = [ testCase "Works on empty list" $ nubComponentTargets [] @?= ([] :: [(ComponentTarget, NonEmpty Int)]) , testCase "Merges targets to same component" $ nubComponentTargets [ (mainLibModuleTarget, 1 :: Int) , (mainLibFileTarget, 2) ] @?= [(mainLibWholeCompTarget, 1 :| [2])] , testCase "Merges whole component targets" $ nubComponentTargets [(mainLibFileTarget, 2), (mainLibWholeCompTarget, 1 :: Int)] @?= [(mainLibWholeCompTarget, 2 :| [1])], testCase "Don't merge unrelated targets" $ nubComponentTargets [ (mainLibWholeCompTarget, 1 :: Int) , (exeWholeCompTarget, 2) ] @?= [(mainLibWholeCompTarget, pure 1), (exeWholeCompTarget, pure 2)] , testCase "Merge multiple related targets" $ nubComponentTargets [ (mainLibWholeCompTarget, 1 :: Int) , (mainLibModuleTarget, 4) , (exeWholeCompTarget, 2) , (exeFileTarget, 3) ] @?= [(mainLibWholeCompTarget, 1 :| [4]), (exeWholeCompTarget, 2 :| [3])] , testCase "Merge related targets, don't merge unrelated ones" $ nubComponentTargets [ (mainLibFileTarget, 1 :: Int) , (mainLibModuleTarget, 4) , (exeWholeCompTarget, 2) , (exeFileTarget, 3) , (exe2FileTarget, 5) ] @?= [ (mainLibWholeCompTarget, 1 :| [4]) , (exeWholeCompTarget, 2 :| [3]) , (exe2WholeCompTarget, 5 :| []) ] ] -- ---------------------------------------------------------------------------- -- Utils -- ---------------------------------------------------------------------------- mainLibWholeCompTarget :: ComponentTarget mainLibWholeCompTarget = ComponentTarget (CLibName LMainLibName) WholeComponent mainLibModuleTarget :: ComponentTarget mainLibModuleTarget = ComponentTarget (CLibName LMainLibName) (ModuleTarget "Lib") mainLibFileTarget :: ComponentTarget mainLibFileTarget = ComponentTarget (CLibName LMainLibName) (FileTarget "./Lib.hs") exeWholeCompTarget :: ComponentTarget exeWholeCompTarget = ComponentTarget (CExeName "exe") WholeComponent exeFileTarget :: ComponentTarget exeFileTarget = ComponentTarget (CExeName "exe") (FileTarget "./Main.hs") exe2WholeCompTarget :: ComponentTarget exe2WholeCompTarget = ComponentTarget (CExeName "exe2") WholeComponent exe2FileTarget :: ComponentTarget exe2FileTarget = ComponentTarget (CExeName "exe2") (FileTarget "./Main2.hs") cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Store.hs0000644000000000000000000001432007346545000023211 0ustar0000000000000000module UnitTests.Distribution.Client.Store (tests) where --import Control.Monad --import Control.Concurrent (forkIO, threadDelay) --import Control.Concurrent.MVar import qualified Data.Set as Set import System.FilePath import System.Directory --import System.Random import Distribution.Package (UnitId, mkUnitId) import Distribution.Compiler (CompilerId(..), CompilerFlavor(..)) import Distribution.Version (mkVersion) import Distribution.Verbosity (Verbosity, silent) import Distribution.Simple.Utils (withTempDirectory) import Distribution.Client.Store import Distribution.Client.RebuildMonad import Test.Tasty import Test.Tasty.HUnit tests :: [TestTree] tests = [ testCase "list content empty" testListEmpty , testCase "install serial" testInstallSerial --, testCase "install parallel" testInstallParallel --TODO: figure out some way to do a parallel test, see issue below ] testListEmpty :: Assertion testListEmpty = withTempDirectory verbosity "." "store-" $ \tmp -> do let storeDirLayout = defaultStoreDirLayout (tmp "store") assertStoreEntryExists storeDirLayout compid unitid False assertStoreContent tmp storeDirLayout compid Set.empty where compid = CompilerId GHC (mkVersion [1,0]) unitid = mkUnitId "foo-1.0-xyz" testInstallSerial :: Assertion testInstallSerial = withTempDirectory verbosity "." "store-" $ \tmp -> do let storeDirLayout = defaultStoreDirLayout (tmp "store") copyFiles file content dir = do -- we copy into a prefix inside the tmp dir and return the prefix let destprefix = dir "prefix" createDirectory destprefix writeFile (destprefix file) content return (destprefix,[]) assertNewStoreEntry tmp storeDirLayout compid unitid1 (copyFiles "file1" "content-foo") (return ()) UseNewStoreEntry assertNewStoreEntry tmp storeDirLayout compid unitid1 (copyFiles "file1" "content-foo") (return ()) UseExistingStoreEntry assertNewStoreEntry tmp storeDirLayout compid unitid2 (copyFiles "file2" "content-bar") (return ()) UseNewStoreEntry let pkgDir :: UnitId -> FilePath pkgDir = storePackageDirectory storeDirLayout compid assertFileEqual (pkgDir unitid1 "file1") "content-foo" assertFileEqual (pkgDir unitid2 "file2") "content-bar" where compid = CompilerId GHC (mkVersion [1,0]) unitid1 = mkUnitId "foo-1.0-xyz" unitid2 = mkUnitId "bar-2.0-xyz" {- -- unfortunately a parallel test like the one below is thwarted by the normal -- process-internal file locking. If that locking were not in place then we -- ought to get the blocking behaviour, but due to the normal Handle locking -- it just fails instead. testInstallParallel :: Assertion testInstallParallel = withTempDirectory verbosity "." "store-" $ \tmp -> do let storeDirLayout = defaultStoreDirLayout (tmp "store") sync1 <- newEmptyMVar sync2 <- newEmptyMVar outv <- newEmptyMVar regv <- newMVar (0 :: Int) sequence_ [ do forkIO $ do let copyFiles dir = do delay <- randomRIO (1,100000) writeFile (dir "file") (show n) putMVar sync1 () readMVar sync2 threadDelay delay register = do modifyMVar_ regv (return . (+1)) threadDelay 200000 o <- newStoreEntry verbosity storeDirLayout compid unitid copyFiles register putMVar outv (n, o) | n <- [0..9 :: Int] ] replicateM_ 10 (takeMVar sync1) -- all threads are in the copyFiles action concurrently, release them: putMVar sync2 () outcomes <- replicateM 10 (takeMVar outv) regcount <- readMVar regv let regcount' = length [ () | (_, UseNewStoreEntry) <- outcomes ] assertEqual "num registrations" 1 regcount assertEqual "num registrations" 1 regcount' assertStoreContent tmp storeDirLayout compid (Set.singleton unitid) let pkgDir :: UnitId -> FilePath pkgDir = storePackageDirectory storeDirLayout compid case [ n | (n, UseNewStoreEntry) <- outcomes ] of [n] -> assertFileEqual (pkgDir unitid "file") (show n) _ -> assertFailure "impossible" where compid = CompilerId GHC (mkVersion [1,0]) unitid = mkUnitId "foo-1.0-xyz" -} ------------- -- Utils assertNewStoreEntry :: FilePath -> StoreDirLayout -> CompilerId -> UnitId -> (FilePath -> IO (FilePath,[FilePath])) -> IO () -> NewStoreEntryOutcome -> Assertion assertNewStoreEntry tmp storeDirLayout compid unitid copyFiles register expectedOutcome = do entries <- runRebuild tmp $ getStoreEntries storeDirLayout compid outcome <- newStoreEntry verbosity storeDirLayout compid unitid copyFiles register assertEqual "newStoreEntry outcome" expectedOutcome outcome assertStoreEntryExists storeDirLayout compid unitid True let expected = Set.insert unitid entries assertStoreContent tmp storeDirLayout compid expected assertStoreEntryExists :: StoreDirLayout -> CompilerId -> UnitId -> Bool -> Assertion assertStoreEntryExists storeDirLayout compid unitid expected = do actual <- doesStoreEntryExist storeDirLayout compid unitid assertEqual "store entry exists" expected actual assertStoreContent :: FilePath -> StoreDirLayout -> CompilerId -> Set.Set UnitId -> Assertion assertStoreContent tmp storeDirLayout compid expected = do actual <- runRebuild tmp $ getStoreEntries storeDirLayout compid assertEqual "store content" actual expected assertFileEqual :: FilePath -> String -> Assertion assertFileEqual path expected = do exists <- doesFileExist path assertBool ("file does not exist:\n" ++ path) exists actual <- readFile path assertEqual ("file content for:\n" ++ path) expected actual verbosity :: Verbosity verbosity = silent cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Tar.hs0000644000000000000000000000571107346545000022647 0ustar0000000000000000module UnitTests.Distribution.Client.Tar ( tests ) where import Distribution.Client.Tar ( filterEntries , filterEntriesM ) import Codec.Archive.Tar ( Entries(..) , foldEntries ) import Codec.Archive.Tar.Entry ( EntryContent(..) , simpleEntry , Entry(..) , toTarPath ) import Test.Tasty import Test.Tasty.HUnit import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BS.Char8 import Control.Monad.Writer.Lazy (runWriterT, tell) tests :: [TestTree] tests = [ testCase "filterEntries" filterTest , testCase "filterEntriesM" filterMTest ] filterTest :: Assertion filterTest = do let e1 = getFileEntry "file1" "x" e2 = getFileEntry "file2" "y" p = (\e -> let str = BS.Char8.unpack $ case entryContent e of NormalFile dta _ -> dta _ -> error "Invalid entryContent" in str /= "y") assertEqual "Unexpected result for filter" "xz" $ entriesToString $ filterEntries p $ Next e1 $ Next e2 Done assertEqual "Unexpected result for filter" "z" $ entriesToString $ filterEntries p $ Done assertEqual "Unexpected result for filter" "xf" $ entriesToString $ filterEntries p $ Next e1 $ Next e2 $ Fail "f" filterMTest :: Assertion filterMTest = do let e1 = getFileEntry "file1" "x" e2 = getFileEntry "file2" "y" p = (\e -> let str = BS.Char8.unpack $ case entryContent e of NormalFile dta _ -> dta _ -> error "Invalid entryContent" in tell "t" >> return (str /= "y")) (r, w) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 Done assertEqual "Unexpected result for filterM" "xz" $ entriesToString r assertEqual "Unexpected result for filterM w" "tt" w (r1, w1) <- runWriterT $ filterEntriesM p $ Done assertEqual "Unexpected result for filterM" "z" $ entriesToString r1 assertEqual "Unexpected result for filterM w" "" w1 (r2, w2) <- runWriterT $ filterEntriesM p $ Next e1 $ Next e2 $ Fail "f" assertEqual "Unexpected result for filterM" "xf" $ entriesToString r2 assertEqual "Unexpected result for filterM w" "tt" w2 getFileEntry :: FilePath -> [Char] -> Entry getFileEntry pth dta = simpleEntry tp $ NormalFile dta' $ BS.length dta' where tp = case toTarPath False pth of Right tp' -> tp' Left e -> error e dta' = BS.Char8.pack dta entriesToString :: Entries String -> String entriesToString = foldEntries (\e acc -> let str = BS.Char8.unpack $ case entryContent e of NormalFile dta _ -> dta _ -> error "Invalid entryContent" in str ++ acc) "z" id cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/Targets.hs0000644000000000000000000000762307346545000023536 0ustar0000000000000000module UnitTests.Distribution.Client.Targets ( tests ) where import Distribution.Client.Targets (UserQualifier(..) ,UserConstraintScope(..) ,UserConstraint(..), readUserConstraint) import Distribution.Package (mkPackageName) import Distribution.PackageDescription (mkFlagName, mkFlagAssignment) import Distribution.Version (anyVersion, thisVersion, mkVersion) import Distribution.Parsec (explicitEitherParsec, parsec, parsecCommaList) import Distribution.Solver.Types.PackageConstraint (PackageProperty(..)) import Distribution.Solver.Types.OptionalStanza (OptionalStanza(..)) import Test.Tasty import Test.Tasty.HUnit import Data.List (intercalate) -- Helper function: makes a test group by mapping each element -- of a list to a test case. makeGroup :: String -> (a -> Assertion) -> [a] -> TestTree makeGroup name f xs = testGroup name $ zipWith testCase (map show [0 :: Integer ..]) (map f xs) tests :: [TestTree] tests = [ makeGroup "readUserConstraint" (uncurry readUserConstraintTest) exampleConstraints , makeGroup "parseUserConstraint" (uncurry parseUserConstraintTest) exampleConstraints , makeGroup "readUserConstraints" (uncurry readUserConstraintsTest) [-- First example only. (head exampleStrs, take 1 exampleUcs), -- All examples separated by commas. (intercalate ", " exampleStrs, exampleUcs)] ] where (exampleStrs, exampleUcs) = unzip exampleConstraints exampleConstraints :: [(String, UserConstraint)] exampleConstraints = [ ("template-haskell installed", UserConstraint (UserQualified UserQualToplevel (pn "template-haskell")) PackagePropertyInstalled) , ("bytestring >= 0", UserConstraint (UserQualified UserQualToplevel (pn "bytestring")) (PackagePropertyVersion anyVersion)) , ("any.directory test", UserConstraint (UserAnyQualifier (pn "directory")) (PackagePropertyStanzas [TestStanzas])) , ("setup.Cabal installed", UserConstraint (UserAnySetupQualifier (pn "Cabal")) PackagePropertyInstalled) , ("process:setup.bytestring ==5.2", UserConstraint (UserQualified (UserQualSetup (pn "process")) (pn "bytestring")) (PackagePropertyVersion (thisVersion (mkVersion [5, 2])))) -- flag MUST be prefixed with - or + , ("network:setup.containers +foo -bar +baz", UserConstraint (UserQualified (UserQualSetup (pn "network")) (pn "containers")) (PackagePropertyFlags (mkFlagAssignment [(fn "foo", True), (fn "bar", False), (fn "baz", True)]))) -- -- TODO: Re-enable UserQualExe tests once we decide on a syntax. -- -- , ("foo:happy:exe.template-haskell test", -- UserConstraint (UserQualified (UserQualExe (pn "foo") (pn "happy")) (pn "template-haskell")) -- (PackagePropertyStanzas [TestStanzas])) ] where pn = mkPackageName fn = mkFlagName readUserConstraintTest :: String -> UserConstraint -> Assertion readUserConstraintTest str uc = assertEqual ("Couldn't read constraint: '" ++ str ++ "'") expected actual where expected = Right uc actual = readUserConstraint str parseUserConstraintTest :: String -> UserConstraint -> Assertion parseUserConstraintTest str uc = assertEqual ("Couldn't parse constraint: '" ++ str ++ "'") expected actual where expected = Right uc actual = explicitEitherParsec parsec str readUserConstraintsTest :: String -> [UserConstraint] -> Assertion readUserConstraintsTest str ucs = assertEqual ("Couldn't read constraints: '" ++ str ++ "'") expected actual where expected = Right ucs actual = explicitEitherParsec (parsecCommaList parsec) str cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/TreeDiffInstances.hs0000644000000000000000000000516607346545000025465 0ustar0000000000000000{-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Client.TreeDiffInstances () where import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageConstraint import Distribution.Solver.Types.Settings import Distribution.Client.BuildReports.Types import Distribution.Client.CmdInstall.ClientInstallFlags import Distribution.Client.Dependency.Types import Distribution.Client.IndexUtils.ActiveRepos import Distribution.Client.IndexUtils.IndexState import Distribution.Client.IndexUtils.Timestamp import Distribution.Client.ProjectConfig.Types import Distribution.Client.Targets import Distribution.Client.Types import Distribution.Client.Types.OverwritePolicy (OverwritePolicy) import Distribution.Client.Types.SourceRepo (SourceRepositoryPackage) import Distribution.Simple.Compiler (PackageDB) import Data.TreeDiff.Class import Data.TreeDiff.Instances.Cabal () import Network.URI instance (ToExpr k, ToExpr v) => ToExpr (MapMappend k v) instance (ToExpr k, ToExpr v) => ToExpr (MapLast k v) instance ToExpr (f FilePath) => ToExpr (SourceRepositoryPackage f) instance ToExpr ActiveRepoEntry instance ToExpr ActiveRepos instance ToExpr AllowBootLibInstalls instance ToExpr AllowNewer instance ToExpr AllowOlder instance ToExpr BuildReport instance ToExpr ClientInstallFlags instance ToExpr CombineStrategy instance ToExpr ConstraintSource instance ToExpr CountConflicts instance ToExpr FineGrainedConflicts instance ToExpr IndependentGoals instance ToExpr InstallMethod instance ToExpr InstallOutcome instance ToExpr LocalRepo instance ToExpr MinimizeConflictSet instance ToExpr OnlyConstrained instance ToExpr OptionalStanza instance ToExpr Outcome instance ToExpr OverwritePolicy instance ToExpr PackageConfig instance ToExpr PackageDB instance ToExpr PackageProperty instance ToExpr PreSolver instance ToExpr ProjectConfig instance ToExpr ProjectConfigBuildOnly instance ToExpr ProjectConfigProvenance instance ToExpr ProjectConfigShared instance ToExpr RelaxDepMod instance ToExpr RelaxDeps instance ToExpr RelaxDepScope instance ToExpr RelaxDepSubject instance ToExpr RelaxedDep instance ToExpr RemoteRepo instance ToExpr ReorderGoals instance ToExpr RepoIndexState instance ToExpr RepoName instance ToExpr ReportLevel instance ToExpr StrongFlags instance ToExpr Timestamp instance ToExpr TotalIndexState instance ToExpr UserConstraint instance ToExpr UserConstraintScope instance ToExpr UserQualifier instance ToExpr WriteGhcEnvironmentFilesPolicy instance ToExpr URI instance ToExpr URIAuth cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/UserConfig.hs0000644000000000000000000001012707346545000024162 0ustar0000000000000000{-# LANGUAGE CPP #-} module UnitTests.Distribution.Client.UserConfig ( tests ) where import Control.Exception (bracket) import Control.Monad (replicateM_) import Data.List (sort, nub) #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif import System.Directory (doesFileExist, getCurrentDirectory, getTemporaryDirectory) import System.FilePath (()) import Test.Tasty import Test.Tasty.HUnit import Distribution.Client.Config import Distribution.Utils.NubList (fromNubList) import Distribution.Client.Setup (GlobalFlags (..), InstallFlags (..)) import Distribution.Client.Utils (removeExistingFile) import Distribution.Simple.Setup (Flag (..), ConfigFlags (..), fromFlag) import Distribution.Simple.Utils (withTempDirectory) import Distribution.Verbosity (silent) tests :: [TestTree] tests = [ testCase "nullDiffOnCreate" nullDiffOnCreateTest , testCase "canDetectDifference" canDetectDifference , testCase "canUpdateConfig" canUpdateConfig , testCase "doubleUpdateConfig" doubleUpdateConfig , testCase "newDefaultConfig" newDefaultConfig ] nullDiffOnCreateTest :: Assertion nullDiffOnCreateTest = bracketTest $ \configFile -> do -- Create a new default config file in our test directory. _ <- createDefaultConfigFile silent [] configFile -- Now we read it in and compare it against the default. diff <- userConfigDiff silent (globalFlags configFile) [] assertBool (unlines $ "Following diff should be empty:" : diff) $ null diff canDetectDifference :: Assertion canDetectDifference = bracketTest $ \configFile -> do -- Create a new default config file in our test directory. _ <- createDefaultConfigFile silent [] configFile appendFile configFile "verbose: 0\n" diff <- userConfigDiff silent (globalFlags configFile) [] assertBool (unlines $ "Should detect a difference:" : diff) $ diff == [ "+ verbose: 0" ] canUpdateConfig :: Assertion canUpdateConfig = bracketTest $ \configFile -> do -- Write a trivial cabal file. writeFile configFile "tests: True\n" -- Update the config file. userConfigUpdate silent (globalFlags configFile) [] -- Load it again. updated <- loadConfig silent (Flag configFile) assertBool ("Field 'tests' should be True") $ fromFlag (configTests $ savedConfigureFlags updated) doubleUpdateConfig :: Assertion doubleUpdateConfig = bracketTest $ \configFile -> do -- Create a new default config file in our test directory. _ <- createDefaultConfigFile silent [] configFile -- Update it twice. replicateM_ 2 $ userConfigUpdate silent (globalFlags configFile) [] -- Load it again. updated <- loadConfig silent (Flag configFile) assertBool ("Field 'remote-repo' doesn't contain duplicates") $ listUnique (map show . fromNubList . globalRemoteRepos $ savedGlobalFlags updated) assertBool ("Field 'extra-prog-path' doesn't contain duplicates") $ listUnique (map show . fromNubList . configProgramPathExtra $ savedConfigureFlags updated) assertBool ("Field 'build-summary' doesn't contain duplicates") $ listUnique (map show . fromNubList . installSummaryFile $ savedInstallFlags updated) newDefaultConfig :: Assertion newDefaultConfig = do sysTmpDir <- getTemporaryDirectory withTempDirectory silent sysTmpDir "cabal-test" $ \tmpDir -> do let configFile = tmpDir "tmp.config" _ <- createDefaultConfigFile silent [] configFile exists <- doesFileExist configFile assertBool ("Config file should be written to " ++ configFile) exists globalFlags :: FilePath -> GlobalFlags globalFlags configFile = mempty { globalConfigFile = Flag configFile } listUnique :: Ord a => [a] -> Bool listUnique xs = let sorted = sort xs in nub sorted == xs bracketTest :: (FilePath -> IO ()) -> Assertion bracketTest = bracket testSetup testTearDown where testSetup :: IO FilePath testSetup = fmap ( "test-user-config") getCurrentDirectory testTearDown :: FilePath -> IO () testTearDown configFile = mapM_ removeExistingFile [configFile, configFile ++ ".backup"] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Client/VCS.hs0000644000000000000000000011225407346545000022555 0ustar0000000000000000{-# LANGUAGE RecordWildCards, NamedFieldPuns, KindSignatures, DataKinds #-} {-# LANGUAGE AllowAmbiguousTypes, TypeApplications, ScopedTypeVariables #-} module UnitTests.Distribution.Client.VCS (tests) where import Distribution.Client.Compat.Prelude import Distribution.Client.VCS import Distribution.Client.RebuildMonad ( execRebuild ) import Distribution.Simple.Program import Distribution.System ( buildOS, OS (Windows) ) import Distribution.Verbosity as Verbosity import Distribution.Client.Types.SourceRepo (SourceRepositoryPackage (..), SourceRepoProxy) import Data.List (mapAccumL) import Data.Tuple import qualified Data.Map as Map import qualified Data.Set as Set import qualified Control.Monad.State as State import Control.Monad.State (StateT, liftIO, execStateT) import Control.Exception import Control.Concurrent (threadDelay) import System.IO import System.FilePath import System.Directory import System.Random import Test.Tasty import Test.Tasty.QuickCheck import Test.Tasty.ExpectedFailure import UnitTests.Distribution.Client.ArbitraryInstances import UnitTests.TempTestDir (withTestDir, removeDirectoryRecursiveHack) -- | These tests take the following approach: we generate a pure representation -- of a repository plus a corresponding real repository, and then run various -- test operations and compare the actual working state with the expected -- working state. -- -- The first test simply checks that the test infrastructure works. It -- constructs a repository on disk and then checks out every tag or commit -- and checks that the working state is the same as the pure representation. -- -- The second test works in a similar way but tests 'syncSourceRepos'. It -- uses an arbitrary source repo and a set of (initially empty) destination -- directories. It picks a number of tags or commits from the source repo and -- synchronises the destination directories to those target states, and then -- checks that the working state is as expected (given the pure representation). -- tests :: MTimeChange -> [TestTree] tests mtimeChange = map (localOption $ QuickCheckTests 10) [ ignoreInWindows "See issue #8048" $ testGroup "git" [ testProperty "check VCS test framework" prop_framework_git , testProperty "cloneSourceRepo" prop_cloneRepo_git , testProperty "syncSourceRepos" prop_syncRepos_git ] -- , ignoreTestBecause "for the moment they're not yet working" $ testGroup "darcs" [ testProperty "check VCS test framework" $ prop_framework_darcs mtimeChange , testProperty "cloneSourceRepo" $ prop_cloneRepo_darcs mtimeChange , testProperty "syncSourceRepos" $ prop_syncRepos_darcs mtimeChange ] , ignoreTestBecause "for the moment they're not yet working" $ testGroup "pijul" [ testProperty "check VCS test framework" prop_framework_pijul , testProperty "cloneSourceRepo" prop_cloneRepo_pijul , testProperty "syncSourceRepos" prop_syncRepos_pijul ] , ignoreTestBecause "for the moment they're not yet working" $ testGroup "mercurial" [ testProperty "check VCS test framework" prop_framework_hg , testProperty "cloneSourceRepo" prop_cloneRepo_hg , testProperty "syncSourceRepos" prop_syncRepos_hg ] ] where ignoreInWindows msg = case buildOS of Windows -> ignoreTestBecause msg _ -> id prop_framework_git :: BranchingRepoRecipe 'SubmodulesSupported -> Property prop_framework_git = ioProperty . prop_framework vcsGit vcsTestDriverGit . WithBranchingSupport prop_framework_darcs :: MTimeChange -> NonBranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_framework_darcs mtimeChange = ioProperty . prop_framework vcsDarcs (vcsTestDriverDarcs mtimeChange) . WithoutBranchingSupport prop_framework_pijul :: BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_framework_pijul = ioProperty . prop_framework vcsPijul vcsTestDriverPijul . WithBranchingSupport prop_framework_hg :: BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_framework_hg = ioProperty . prop_framework vcsHg vcsTestDriverHg . WithBranchingSupport prop_cloneRepo_git :: BranchingRepoRecipe 'SubmodulesSupported -> Property prop_cloneRepo_git = ioProperty . prop_cloneRepo vcsGit vcsTestDriverGit . WithBranchingSupport prop_cloneRepo_darcs :: MTimeChange -> NonBranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_cloneRepo_darcs mtimeChange = ioProperty . prop_cloneRepo vcsDarcs (vcsTestDriverDarcs mtimeChange) . WithoutBranchingSupport prop_cloneRepo_pijul :: BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_cloneRepo_pijul = ioProperty . prop_cloneRepo vcsPijul vcsTestDriverPijul . WithBranchingSupport prop_cloneRepo_hg :: BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_cloneRepo_hg = ioProperty . prop_cloneRepo vcsHg vcsTestDriverHg . WithBranchingSupport prop_syncRepos_git :: RepoDirSet -> SyncTargetIterations -> PrngSeed -> BranchingRepoRecipe 'SubmodulesSupported -> Property prop_syncRepos_git destRepoDirs syncTargetSetIterations seed = ioProperty . prop_syncRepos vcsGit vcsTestDriverGit destRepoDirs syncTargetSetIterations seed . WithBranchingSupport prop_syncRepos_darcs :: MTimeChange -> RepoDirSet -> SyncTargetIterations -> PrngSeed -> NonBranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_syncRepos_darcs mtimeChange destRepoDirs syncTargetSetIterations seed = ioProperty . prop_syncRepos vcsDarcs (vcsTestDriverDarcs mtimeChange) destRepoDirs syncTargetSetIterations seed . WithoutBranchingSupport prop_syncRepos_pijul :: RepoDirSet -> SyncTargetIterations -> PrngSeed -> BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_syncRepos_pijul destRepoDirs syncTargetSetIterations seed = ioProperty . prop_syncRepos vcsPijul vcsTestDriverPijul destRepoDirs syncTargetSetIterations seed . WithBranchingSupport prop_syncRepos_hg :: RepoDirSet -> SyncTargetIterations -> PrngSeed -> BranchingRepoRecipe 'SubmodulesNotSupported -> Property prop_syncRepos_hg destRepoDirs syncTargetSetIterations seed = ioProperty . prop_syncRepos vcsHg vcsTestDriverHg destRepoDirs syncTargetSetIterations seed . WithBranchingSupport -- ------------------------------------------------------------ -- * General test setup -- ------------------------------------------------------------ testSetup :: VCS Program -> (Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver) -> RepoRecipe submodules -> (VCSTestDriver -> FilePath -> RepoState -> IO a) -> IO a testSetup vcs mkVCSTestDriver repoRecipe theTest = do -- test setup vcs' <- configureVCS verbosity vcs withTestDir verbosity "vcstest" $ \tmpdir -> do let srcRepoPath = tmpdir "src" submodulesPath = tmpdir "submodules" vcsDriver = mkVCSTestDriver verbosity vcs' submodulesPath srcRepoPath repoState <- createRepo vcsDriver repoRecipe -- actual test result <- theTest vcsDriver tmpdir repoState return result where verbosity = silent -- ------------------------------------------------------------ -- * Test 1: VCS infrastructure -- ------------------------------------------------------------ -- | This test simply checks that the test infrastructure works. It constructs -- a repository on disk and then checks out every tag or commit and checks that -- the working state is the same as the pure representation. -- prop_framework :: VCS Program -> (Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver) -> RepoRecipe submodules -> IO () prop_framework vcs mkVCSTestDriver repoRecipe = testSetup vcs mkVCSTestDriver repoRecipe $ \vcsDriver tmpdir repoState -> mapM_ (checkAtTag vcsDriver tmpdir) (Map.toList (allTags repoState)) where -- Check for any given tag/commit in the 'RepoState' that the working state -- matches the actual working state from the repository at that tag/commit. checkAtTag VCSTestDriver {..} tmpdir (tagname, expectedState) = case vcsCheckoutTag of -- We handle two cases: inplace checkouts for VCSs that support it -- (e.g. git) and separate dir otherwise (e.g. darcs) Left checkoutInplace -> do checkoutInplace tagname checkExpectedWorkingState vcsIgnoreFiles vcsRepoRoot expectedState Right checkoutCloneTo -> do checkoutCloneTo tagname destRepoPath checkExpectedWorkingState vcsIgnoreFiles destRepoPath expectedState removeDirectoryRecursiveHack silent destRepoPath where destRepoPath = tmpdir "dest" -- ------------------------------------------------------------ -- * Test 2: 'cloneSourceRepo' -- ------------------------------------------------------------ prop_cloneRepo :: VCS Program -> (Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver) -> RepoRecipe submodules -> IO () prop_cloneRepo vcs mkVCSTestDriver repoRecipe = testSetup vcs mkVCSTestDriver repoRecipe $ \vcsDriver tmpdir repoState -> mapM_ (checkAtTag vcsDriver tmpdir) (Map.toList (allTags repoState)) where checkAtTag VCSTestDriver{..} tmpdir (tagname, expectedState) = do cloneSourceRepo verbosity vcsVCS repo destRepoPath checkExpectedWorkingState vcsIgnoreFiles destRepoPath expectedState removeDirectoryRecursiveHack verbosity destRepoPath where destRepoPath = tmpdir "dest" repo = SourceRepositoryPackage { srpType = vcsRepoType vcsVCS , srpLocation = vcsRepoRoot , srpTag = Just tagname , srpBranch = Nothing , srpSubdir = [] , srpCommand = [] } verbosity = silent -- ------------------------------------------------------------ -- * Test 3: 'syncSourceRepos' -- ------------------------------------------------------------ newtype RepoDirSet = RepoDirSet Int deriving Show newtype SyncTargetIterations = SyncTargetIterations Int deriving Show newtype PrngSeed = PrngSeed Int deriving Show prop_syncRepos :: VCS Program -> (Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver) -> RepoDirSet -> SyncTargetIterations -> PrngSeed -> RepoRecipe submodules -> IO () prop_syncRepos vcs mkVCSTestDriver repoDirs syncTargetSetIterations seed repoRecipe = testSetup vcs mkVCSTestDriver repoRecipe $ \vcsDriver tmpdir repoState -> let srcRepoPath = vcsRepoRoot vcsDriver destRepoPaths = map (tmpdir ) (getRepoDirs repoDirs) in checkSyncRepos verbosity vcsDriver repoState srcRepoPath destRepoPaths syncTargetSetIterations seed where verbosity = silent getRepoDirs :: RepoDirSet -> [FilePath] getRepoDirs (RepoDirSet n) = [ "dest" ++ show i | i <- [1..n] ] -- | The purpose of this test is to check that irrespective of the local cached -- repo dir we can sync it to an arbitrary target state. So we do that by -- syncing each target dir to a sequence of target states without cleaning it -- in between. -- -- One slight complication is that 'syncSourceRepos' takes a whole list of -- target dirs to sync in one go (to allow for sharing). So we must actually -- generate and sync to a sequence of list of target repo states. -- -- So, given a source repo dir, the corresponding 'RepoState' and a number of -- target repo dirs, pick a sequence of (lists of) sync targets from the -- 'RepoState' and syncronise the target dirs with those targets, checking for -- each one that the actual working state matches the expected repo state. -- checkSyncRepos :: Verbosity -> VCSTestDriver -> RepoState -> FilePath -> [FilePath] -> SyncTargetIterations -> PrngSeed -> IO () checkSyncRepos verbosity VCSTestDriver { vcsVCS = vcs, vcsIgnoreFiles } repoState srcRepoPath destRepoPath (SyncTargetIterations syncTargetSetIterations) (PrngSeed seed) = mapM_ checkSyncTargetSet syncTargetSets where checkSyncTargetSet :: [(SourceRepoProxy, FilePath, RepoWorkingState)] -> IO () checkSyncTargetSet syncTargets = do _ <- execRebuild "root-unused" $ syncSourceRepos verbosity vcs [ (repo, repoPath) | (repo, repoPath, _) <- syncTargets ] sequence_ [ checkExpectedWorkingState vcsIgnoreFiles repoPath workingState | (_, repoPath, workingState) <- syncTargets ] syncTargetSets = take syncTargetSetIterations $ pickSyncTargetSets (vcsRepoType vcs) repoState srcRepoPath destRepoPath (mkStdGen seed) pickSyncTargetSets :: RepoType -> RepoState -> FilePath -> [FilePath] -> StdGen -> [[(SourceRepoProxy, FilePath, RepoWorkingState)]] pickSyncTargetSets repoType repoState srcRepoPath dstReposPath = assert (Map.size (allTags repoState) > 0) $ unfoldr (Just . swap . pickSyncTargetSet) where pickSyncTargetSet :: Rand [(SourceRepoProxy, FilePath, RepoWorkingState)] pickSyncTargetSet = flip (mapAccumL (flip pickSyncTarget)) dstReposPath pickSyncTarget :: FilePath -> Rand (SourceRepoProxy, FilePath, RepoWorkingState) pickSyncTarget destRepoPath prng = (prng', (repo, destRepoPath, workingState)) where repo = SourceRepositoryPackage { srpType = repoType , srpLocation = srcRepoPath , srpTag = Just tag , srpBranch = Nothing , srpSubdir = Proxy , srpCommand = [] } (tag, workingState) = Map.elemAt tagIdx (allTags repoState) (tagIdx, prng') = randomR (0, Map.size (allTags repoState) - 1) prng type Rand a = StdGen -> (StdGen, a) instance Arbitrary RepoDirSet where arbitrary = sized $ \n -> oneof $ [ RepoDirSet <$> pure 1 ] ++ [ RepoDirSet <$> choose (2,5) | n >= 3 ] shrink (RepoDirSet n) = [ RepoDirSet i | i <- shrink n, i > 0 ] instance Arbitrary SyncTargetIterations where arbitrary = sized $ \n -> SyncTargetIterations <$> elements [ 1 .. min 20 (n + 1) ] shrink (SyncTargetIterations n) = [ SyncTargetIterations i | i <- shrink n, i > 0 ] instance Arbitrary PrngSeed where arbitrary = PrngSeed <$> arbitraryBoundedRandom -- ------------------------------------------------------------ -- * Instructions for constructing repositories -- ------------------------------------------------------------ -- These instructions for constructing a repository can be interpreted in two -- ways: to make a pure representation of repository state, and to execute -- VCS commands to make a repository on-disk. data SubmodulesSupport = SubmodulesSupported | SubmodulesNotSupported class KnownSubmodulesSupport (a :: SubmodulesSupport) where submoduleSupport :: SubmodulesSupport instance KnownSubmodulesSupport 'SubmodulesSupported where submoduleSupport = SubmodulesSupported instance KnownSubmodulesSupport 'SubmodulesNotSupported where submoduleSupport = SubmodulesNotSupported data FileUpdate = FileUpdate FilePath String deriving Show data SubmoduleAdd = SubmoduleAdd FilePath FilePath (Commit 'SubmodulesSupported) deriving Show newtype Commit (submodules :: SubmodulesSupport) = Commit [Either FileUpdate SubmoduleAdd] deriving Show data TaggedCommits (submodules :: SubmodulesSupport) = TaggedCommits TagName [Commit submodules] deriving Show data BranchCommits (submodules :: SubmodulesSupport) = BranchCommits BranchName [Commit submodules] deriving Show type BranchName = String type TagName = String -- | Instructions to make a repository without branches, for VCSs that do not -- support branches (e.g. darcs). newtype NonBranchingRepoRecipe submodules = NonBranchingRepoRecipe [TaggedCommits submodules] deriving Show -- | Instructions to make a repository with branches, for VCSs that do -- support branches (e.g. git). newtype BranchingRepoRecipe submodules = BranchingRepoRecipe [Either (TaggedCommits submodules) (BranchCommits submodules)] deriving Show data RepoRecipe submodules = WithBranchingSupport (BranchingRepoRecipe submodules) | WithoutBranchingSupport (NonBranchingRepoRecipe submodules) deriving Show -- --------------------------------------------------------------------------- -- Arbitrary instances for them genFileName :: Gen FilePath genFileName = (\c -> "file" [c]) <$> choose ('A', 'E') instance Arbitrary FileUpdate where arbitrary = genOnlyFileUpdate where genOnlyFileUpdate = FileUpdate <$> genFileName <*> genFileContent genFileContent = vectorOf 10 (choose ('#', '~')) instance Arbitrary SubmoduleAdd where arbitrary = genOnlySubmoduleAdd where genOnlySubmoduleAdd = SubmoduleAdd <$> genFileName <*> genSubmoduleSrc <*> arbitrary genSubmoduleSrc = vectorOf 20 (choose ('a', 'z')) instance forall submodules.KnownSubmodulesSupport submodules => Arbitrary (Commit submodules) where arbitrary = Commit <$> shortListOf1 5 fileUpdateOrSubmoduleAdd where fileUpdateOrSubmoduleAdd = case submoduleSupport @submodules of SubmodulesSupported -> frequency [ (10, Left <$> arbitrary) , (1, Right <$> arbitrary) ] SubmodulesNotSupported -> Left <$> arbitrary shrink (Commit writes) = Commit <$> filter (not . null) (shrink writes) instance KnownSubmodulesSupport submodules => Arbitrary (TaggedCommits submodules) where arbitrary = TaggedCommits <$> genTagName <*> shortListOf1 5 arbitrary where genTagName = ("tag_" ++) <$> shortListOf1 5 (choose ('A', 'Z')) shrink (TaggedCommits tag commits) = TaggedCommits tag <$> filter (not . null) (shrink commits) instance KnownSubmodulesSupport submodules => Arbitrary (BranchCommits submodules) where arbitrary = BranchCommits <$> genBranchName <*> shortListOf1 5 arbitrary where genBranchName = sized $ \n -> (\c -> "branch_" ++ [c]) <$> elements (take (max 1 n) ['A'..'E']) shrink (BranchCommits branch commits) = BranchCommits branch <$> filter (not . null) (shrink commits) instance KnownSubmodulesSupport submodules => Arbitrary (NonBranchingRepoRecipe submodules) where arbitrary = NonBranchingRepoRecipe <$> shortListOf1 15 arbitrary shrink (NonBranchingRepoRecipe xs) = NonBranchingRepoRecipe <$> filter (not . null) (shrink xs) instance KnownSubmodulesSupport submodules => Arbitrary (BranchingRepoRecipe submodules) where arbitrary = BranchingRepoRecipe <$> shortListOf1 15 taggedOrBranch where taggedOrBranch = frequency [ (3, Left <$> arbitrary) , (1, Right <$> arbitrary) ] shrink (BranchingRepoRecipe xs) = BranchingRepoRecipe <$> filter (not . null) (shrink xs) -- ------------------------------------------------------------ -- * A pure model of repository state -- ------------------------------------------------------------ -- | The full state of a repository. In particular it records the full working -- state for every tag. -- -- This is also the interpreter state for executing a 'RepoRecipe'. -- -- This allows us to compare expected working states with the actual files in -- the working directory of a repository. See 'checkExpectedWorkingState'. -- data RepoState = RepoState { currentBranch :: BranchName, currentWorking :: RepoWorkingState, allTags :: Map TagOrCommitId RepoWorkingState, allBranches :: Map BranchName RepoWorkingState } deriving Show type RepoWorkingState = Map FilePath String type CommitId = String type TagOrCommitId = String ------------------------------------------------------------------------------ -- Functions used to interpret instructions for constructing repositories initialRepoState :: RepoState initialRepoState = RepoState { currentBranch = "branch_master", currentWorking = Map.empty, allTags = Map.empty, allBranches = Map.empty } updateFile :: FilePath -> String -> RepoState -> RepoState updateFile filename content state@RepoState{currentWorking} = let removeSubmodule = Map.filterWithKey (\path _ -> not $ filename `isPrefixOf` path) currentWorking in state { currentWorking = Map.insert filename content removeSubmodule } addSubmodule :: FilePath -> RepoState -> RepoState -> RepoState addSubmodule submodulePath submoduleState mainState = let newFiles = Map.mapKeys (submodulePath ) (currentWorking submoduleState) removeSubmodule = Map.filterWithKey (\path _ -> not $ submodulePath `isPrefixOf` path ) (currentWorking mainState) newWorking = Map.union removeSubmodule newFiles in mainState { currentWorking = newWorking} addTagOrCommit :: TagOrCommitId -> RepoState -> RepoState addTagOrCommit commit state@RepoState{currentWorking, allTags} = state { allTags = Map.insert commit currentWorking allTags } switchBranch :: BranchName -> RepoState -> RepoState switchBranch branch state@RepoState{currentWorking, currentBranch, allBranches} = -- Use updated allBranches to cover case of switching to the same branch let allBranches' = Map.insert currentBranch currentWorking allBranches in state { currentBranch = branch, currentWorking = case Map.lookup branch allBranches' of Just working -> working -- otherwise we're creating a new branch, which starts -- from our current branch state Nothing -> currentWorking, allBranches = allBranches' } -- ------------------------------------------------------------ -- * Comparing on-disk with expected 'RepoWorkingState' -- ------------------------------------------------------------ -- | Compare expected working states with the actual files in -- the working directory of a repository. -- checkExpectedWorkingState :: Set FilePath -> FilePath -> RepoWorkingState -> IO () checkExpectedWorkingState ignore repoPath expectedState = do currentState <- getCurrentWorkingState ignore repoPath unless (currentState == expectedState) $ throwIO (WorkingStateMismatch expectedState currentState) data WorkingStateMismatch = WorkingStateMismatch RepoWorkingState -- expected RepoWorkingState -- actual deriving Show instance Exception WorkingStateMismatch getCurrentWorkingState :: Set FilePath -> FilePath -> IO RepoWorkingState getCurrentWorkingState ignore repoRoot = do entries <- getDirectoryContentsRecursive ignore repoRoot "" Map.fromList <$> mapM getFileEntry [ file | (file, isDir) <- entries, not isDir ] where getFileEntry name = withBinaryFile (repoRoot name) ReadMode $ \h -> do str <- hGetContents h _ <- evaluate (length str) return (name, str) getDirectoryContentsRecursive :: Set FilePath -> FilePath -> FilePath -> IO [(FilePath, Bool)] getDirectoryContentsRecursive ignore dir0 dir = do entries <- getDirectoryContents (dir0 dir) entries' <- sequence [ do isdir <- doesDirectoryExist (dir0 dir entry) return (dir entry, isdir) | entry <- entries , not (isPrefixOf "." entry) , (dir entry) `Set.notMember` ignore ] let subdirs = [ d | (d, True) <- entries' ] subdirEntries <- mapM (getDirectoryContentsRecursive ignore dir0) subdirs return (concat (entries' : subdirEntries)) -- ------------------------------------------------------------ -- * Executing instructions to make on-disk VCS repos -- ------------------------------------------------------------ -- | Execute the instructions in a 'RepoRecipe' using the given 'VCSTestDriver' -- to make an on-disk repository. -- -- This also returns a 'RepoState'. This is done as part of construction to -- support VCSs like git that have commit ids, so that those commit ids can be -- included in the 'RepoState's 'allTags' set. -- createRepo :: VCSTestDriver -> RepoRecipe submodules -> IO RepoState createRepo vcsDriver@VCSTestDriver{vcsRepoRoot, vcsInit} recipe = do createDirectoryIfMissing True vcsRepoRoot createDirectoryIfMissing True (vcsRepoRoot "file") vcsInit execStateT createRepoAction initialRepoState where createRepoAction :: StateT RepoState IO () createRepoAction = case recipe of WithoutBranchingSupport r -> execNonBranchingRepoRecipe vcsDriver r WithBranchingSupport r -> execBranchingRepoRecipe vcsDriver r type CreateRepoAction a = VCSTestDriver -> a -> StateT RepoState IO () execNonBranchingRepoRecipe :: CreateRepoAction (NonBranchingRepoRecipe submodules) execNonBranchingRepoRecipe vcsDriver (NonBranchingRepoRecipe taggedCommits) = mapM_ (execTaggdCommits vcsDriver) taggedCommits execBranchingRepoRecipe :: CreateRepoAction (BranchingRepoRecipe submodules) execBranchingRepoRecipe vcsDriver (BranchingRepoRecipe taggedCommits) = mapM_ (either (execTaggdCommits vcsDriver) (execBranchCommits vcsDriver)) taggedCommits execBranchCommits :: CreateRepoAction (BranchCommits submodules) execBranchCommits vcsDriver@VCSTestDriver{vcsSwitchBranch} (BranchCommits branch commits) = do mapM_ (execCommit vcsDriver) commits -- add commits and then switch branch State.modify (switchBranch branch) state <- State.get -- repo state after the commits and branch switch liftIO $ vcsSwitchBranch state branch -- It may seem odd that we add commits on the existing branch and then -- switch branch. In part this is because git cannot branch from an empty -- repo state, it complains that the master branch doesn't exist yet. execTaggdCommits :: CreateRepoAction (TaggedCommits submodules) execTaggdCommits vcsDriver@VCSTestDriver{vcsTagState} (TaggedCommits tagname commits) = do mapM_ (execCommit vcsDriver) commits -- add commits then tag state <- State.get -- repo state after the commits liftIO $ vcsTagState state tagname State.modify (addTagOrCommit tagname) execCommit :: CreateRepoAction (Commit submodules) execCommit vcsDriver@VCSTestDriver{..} (Commit fileUpdates) = do mapM_ (either (execFileUpdate vcsDriver) (execSubmoduleAdd vcsDriver)) fileUpdates state <- State.get -- existing state, not updated mcommit <- liftIO $ vcsCommitChanges state State.modify (maybe id addTagOrCommit mcommit) execFileUpdate :: CreateRepoAction FileUpdate execFileUpdate VCSTestDriver{..} (FileUpdate filename content) = do isDir <- liftIO $ doesDirectoryExist (vcsRepoRoot filename) liftIO . when isDir $ removeDirectoryRecursive (vcsRepoRoot filename) liftIO $ writeFile (vcsRepoRoot filename) content state <- State.get -- existing state, not updated liftIO $ vcsAddFile state filename State.modify (updateFile filename content) execSubmoduleAdd :: CreateRepoAction SubmoduleAdd execSubmoduleAdd vcsDriver (SubmoduleAdd submodulePath source submoduleCommit) = do submoduleVcsDriver <- liftIO $ vcsSubmoduleDriver vcsDriver source let submoduleRecipe = WithoutBranchingSupport $ NonBranchingRepoRecipe [TaggedCommits "submodule-tag" [submoduleCommit]] submoduleState <- liftIO $ createRepo submoduleVcsDriver submoduleRecipe mainState <- State.get -- existing state, not updated liftIO $ vcsAddSubmodule vcsDriver mainState (vcsRepoRoot submoduleVcsDriver) submodulePath State.modify $ addSubmodule submodulePath submoduleState -- ------------------------------------------------------------ -- * VCSTestDriver for various VCSs -- ------------------------------------------------------------ -- | Extends 'VCS' with extra methods to construct a repository. Used by -- 'createRepo'. -- -- Several of the methods are allowed to rely on the current 'RepoState' -- because some VCSs need different commands for initial vs later actions -- (like adding a file to the tracked set, or creating a new branch). -- -- The driver instance knows the particular repo directory. -- data VCSTestDriver = VCSTestDriver { vcsVCS :: VCS ConfiguredProgram, vcsRepoRoot :: FilePath, vcsIgnoreFiles :: Set FilePath, vcsInit :: IO (), vcsAddFile :: RepoState -> FilePath -> IO (), vcsSubmoduleDriver :: FilePath -> IO VCSTestDriver, vcsAddSubmodule :: RepoState -> FilePath -> FilePath -> IO (), vcsCommitChanges :: RepoState -> IO (Maybe CommitId), vcsTagState :: RepoState -> TagName -> IO (), vcsSwitchBranch :: RepoState -> BranchName -> IO (), vcsCheckoutTag :: Either (TagName -> IO ()) (TagName -> FilePath -> IO ()) } vcsTestDriverGit :: Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver vcsTestDriverGit verbosity vcs submoduleDir repoRoot = VCSTestDriver { vcsVCS = vcs , vcsRepoRoot = repoRoot , vcsIgnoreFiles = Set.empty , vcsInit = git $ ["init"] ++ verboseArg , vcsAddFile = \_ filename -> git ["add", filename] , vcsCommitChanges = \_state -> do git $ [ "-c", "user.name=A", "-c", "user.email=a@example.com" , "commit", "--all", "--message=a patch" , "--author=A " ] ++ verboseArg commit <- git' ["log", "--format=%H", "-1"] let commit' = takeWhile (not . isSpace) commit return (Just commit') , vcsTagState = \_ tagname -> git ["tag", "--force", "--no-sign", tagname] , vcsSubmoduleDriver = pure . vcsTestDriverGit verbosity vcs submoduleDir . (submoduleDir ) , vcsAddSubmodule = \_ source dest -> do destExists <- (||) <$> doesFileExist (repoRoot dest) <*> doesDirectoryExist (repoRoot dest) when destExists $ git ["rm", "-f", dest] -- If there is an old submodule git dir with the same name, remove it. -- It most likely has a different URL and `git submodule add` will fai. submoduleGitDirExists <- doesDirectoryExist $ submoduleGitDir dest when submoduleGitDirExists $ removeDirectoryRecursive (submoduleGitDir dest) git ["submodule", "add", source, dest] git ["submodule", "update", "--init", "--recursive", "--force"] , vcsSwitchBranch = \RepoState{allBranches} branchname -> do deinitAndRemoveCachedSubmodules unless (branchname `Map.member` allBranches) $ git ["branch", branchname] git $ ["checkout", branchname] ++ verboseArg updateSubmodulesAndCleanup , vcsCheckoutTag = Left $ \tagname -> do deinitAndRemoveCachedSubmodules git $ ["checkout", "--detach", "--force", tagname] ++ verboseArg updateSubmodulesAndCleanup } where gitInvocation args = (programInvocation (vcsProgram vcs) args) { progInvokeCwd = Just repoRoot } git = runProgramInvocation verbosity . gitInvocation git' = getProgramInvocationOutput verbosity . gitInvocation verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] submoduleGitDir path = repoRoot ".git" "modules" path deinitAndRemoveCachedSubmodules = do git $ ["submodule", "deinit", "--force", "--all"] ++ verboseArg let gitModulesDir = repoRoot ".git" "modules" gitModulesExists <- doesDirectoryExist gitModulesDir when gitModulesExists $ removeDirectoryRecursive gitModulesDir updateSubmodulesAndCleanup = do git $ ["submodule", "sync", "--recursive"] ++ verboseArg git $ ["submodule", "update", "--init", "--recursive", "--force"] ++ verboseArg git $ ["submodule", "foreach", "--recursive"] ++ verboseArg ++ ["git clean -ffxdq"] git $ ["clean", "-ffxdq"] ++ verboseArg type MTimeChange = Int vcsTestDriverDarcs :: MTimeChange -> Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver vcsTestDriverDarcs mtimeChange verbosity vcs _ repoRoot = VCSTestDriver { vcsVCS = vcs , vcsRepoRoot = repoRoot , vcsIgnoreFiles = Set.singleton "_darcs" , vcsInit = darcs ["initialize"] , vcsAddFile = \state filename -> do threadDelay mtimeChange unless (filename `Map.member` currentWorking state) $ darcs ["add", filename] -- Darcs's file change tracking relies on mtime changes, -- so we have to be careful with doing stuff too quickly: , vcsSubmoduleDriver = \_-> fail "vcsSubmoduleDriver: darcs does not support submodules" , vcsAddSubmodule = \_ _ _ -> fail "vcsAddSubmodule: darcs does not support submodules" , vcsCommitChanges = \_state -> do threadDelay mtimeChange darcs ["record", "--all", "--author=author", "--name=a patch"] return Nothing , vcsTagState = \_ tagname -> darcs ["tag", "--author=author", tagname] , vcsSwitchBranch = \_ _ -> fail "vcsSwitchBranch: darcs does not support branches within a repo" , vcsCheckoutTag = Right $ \tagname dest -> darcs ["clone", "--lazy", "--tag=^" ++ tagname ++ "$", ".", dest] } where darcsInvocation args = (programInvocation (vcsProgram vcs) args) { progInvokeCwd = Just repoRoot } darcs = runProgramInvocation verbosity . darcsInvocation vcsTestDriverPijul :: Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver vcsTestDriverPijul verbosity vcs _ repoRoot = VCSTestDriver { vcsVCS = vcs , vcsRepoRoot = repoRoot , vcsIgnoreFiles = Set.empty , vcsInit = pijul $ ["init"] , vcsAddFile = \_ filename -> pijul ["add", filename] , vcsSubmoduleDriver = \_ -> fail "vcsSubmoduleDriver: pijul does not support submodules" , vcsAddSubmodule = \_ _ _ -> fail "vcsAddSubmodule: pijul does not support submodules" , vcsCommitChanges = \_state -> do pijul $ ["record", "-a", "-m 'a patch'" , "-A 'A '" ] commit <- pijul' ["log"] let commit' = takeWhile (not . isSpace) commit return (Just commit') -- tags work differently in pijul... -- so this is wrong , vcsTagState = \_ tagname -> pijul ["tag", tagname] , vcsSwitchBranch = \_ branchname -> do -- unless (branchname `Map.member` allBranches) $ -- pijul ["from-branch", branchname] pijul $ ["checkout", branchname] , vcsCheckoutTag = Left $ \tagname -> pijul $ ["checkout", tagname] } where gitInvocation args = (programInvocation (vcsProgram vcs) args) { progInvokeCwd = Just repoRoot } pijul = runProgramInvocation verbosity . gitInvocation pijul' = getProgramInvocationOutput verbosity . gitInvocation vcsTestDriverHg :: Verbosity -> VCS ConfiguredProgram -> FilePath -> FilePath -> VCSTestDriver vcsTestDriverHg verbosity vcs _ repoRoot = VCSTestDriver { vcsVCS = vcs , vcsRepoRoot = repoRoot , vcsIgnoreFiles = Set.empty , vcsInit = hg $ ["init"] ++ verboseArg , vcsAddFile = \_ filename -> hg ["add", filename] , vcsSubmoduleDriver = \_ -> fail "vcsSubmoduleDriver: hg submodules not supported" , vcsAddSubmodule = \_ _ _ -> fail "vcsAddSubmodule: hg submodules not supported" , vcsCommitChanges = \_state -> do hg $ [ "--user='A '" , "commit", "--message=a patch" ] ++ verboseArg commit <- hg' ["log", "--template='{node}\\n' -l1"] let commit' = takeWhile (not . isSpace) commit return (Just commit') , vcsTagState = \_ tagname -> hg ["tag", "--force", tagname] , vcsSwitchBranch = \RepoState{allBranches} branchname -> do unless (branchname `Map.member` allBranches) $ hg ["branch", branchname] hg $ ["checkout", branchname] ++ verboseArg , vcsCheckoutTag = Left $ \tagname -> hg $ ["checkout", "--rev", tagname] ++ verboseArg } where hgInvocation args = (programInvocation (vcsProgram vcs) args) { progInvokeCwd = Just repoRoot } hg = runProgramInvocation verbosity . hgInvocation hg' = getProgramInvocationOutput verbosity . hgInvocation verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/0000755000000000000000000000000007346545000023220 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/Builder.hs0000644000000000000000000000074407346545000025147 0ustar0000000000000000module UnitTests.Distribution.Solver.Modular.Builder ( tests ) where import Distribution.Solver.Modular.Builder import Test.Tasty import Test.Tasty.QuickCheck tests :: [TestTree] tests = [ testProperty "splitsAltImplementation" splitsTest ] -- | Simpler splits implementation splits' :: [a] -> [(a, [a])] splits' [] = [] splits' (x : xs) = (x, xs) : map (\ (y, ys) -> (y, x : ys)) (splits' xs) splitsTest :: [Int] -> Property splitsTest xs = splits' xs === splits xs cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/DSL.hs0000644000000000000000000010477707346545000024216 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} -- | DSL for testing the modular solver module UnitTests.Distribution.Solver.Modular.DSL ( ExampleDependency(..) , Dependencies(..) , ExSubLib(..) , ExTest(..) , ExExe(..) , ExConstraint(..) , ExPreference(..) , ExampleDb , ExampleVersionRange , ExamplePkgVersion , ExamplePkgName , ExampleFlagName , ExFlag(..) , ExampleAvailable(..) , ExampleInstalled(..) , ExampleQualifier(..) , ExampleVar(..) , EnableAllTests(..) , dependencies , publicDependencies , unbuildableDependencies , exAv , exAvNoLibrary , exInst , exSubLib , exTest , exExe , exFlagged , exResolve , extractInstallPlan , declareFlags , withSubLibrary , withSubLibraries , withSetupDeps , withTest , withTests , withExe , withExes , runProgress , mkSimpleVersion , mkVersionRange ) where import Prelude () import Distribution.Solver.Compat.Prelude import Distribution.Utils.Generic -- base import Control.Arrow (second) import qualified Data.Map as Map import qualified Distribution.Compat.NonEmptySet as NonEmptySet -- Cabal import qualified Distribution.CabalSpecVersion as C import qualified Distribution.Compiler as C import qualified Distribution.InstalledPackageInfo as IPI import Distribution.License (License(..)) import qualified Distribution.ModuleName as Module import qualified Distribution.Package as C hiding (HasUnitId(..)) import qualified Distribution.Types.ExeDependency as C import qualified Distribution.Types.ForeignLib as C import qualified Distribution.Types.LegacyExeDependency as C import qualified Distribution.Types.LibraryVisibility as C import qualified Distribution.Types.PkgconfigDependency as C import qualified Distribution.Types.PkgconfigVersion as C import qualified Distribution.Types.PkgconfigVersionRange as C import qualified Distribution.Types.UnqualComponentName as C import qualified Distribution.Types.CondTree as C import qualified Distribution.PackageDescription as C import qualified Distribution.PackageDescription.Check as C import qualified Distribution.Simple.PackageIndex as C.PackageIndex import Distribution.Simple.Setup (BooleanFlag(..)) import qualified Distribution.System as C import Distribution.Text (display) import qualified Distribution.Verbosity as C import qualified Distribution.Version as C import qualified Distribution.Utils.Path as C import Language.Haskell.Extension (Extension(..), Language(..)) -- cabal-install import Distribution.Client.Dependency import Distribution.Client.Dependency.Types import Distribution.Client.Types import qualified Distribution.Client.SolverInstallPlan as CI.SolverInstallPlan import Distribution.Solver.Types.ComponentDeps (ComponentDeps) import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.Flag import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import qualified Distribution.Solver.Types.PackageIndex as CI.PackageIndex import Distribution.Solver.Types.PackageConstraint import qualified Distribution.Solver.Types.PackagePath as P import qualified Distribution.Solver.Types.PkgConfigDb as PC import Distribution.Solver.Types.Settings import Distribution.Solver.Types.SolverPackage import Distribution.Solver.Types.SourcePackage import Distribution.Solver.Types.Variable {------------------------------------------------------------------------------- Example package database DSL In order to be able to set simple examples up quickly, we define a very simple version of the package database here explicitly designed for use in tests. The design of `ExampleDb` takes the perspective of the solver, not the perspective of the package DB. This makes it easier to set up tests for various parts of the solver, but makes the mapping somewhat awkward, because it means we first map from "solver perspective" `ExampleDb` to the package database format, and then the modular solver internally in `IndexConversion` maps this back to the solver specific data structures. IMPLEMENTATION NOTES -------------------- TODO: Perhaps these should be made comments of the corresponding data type definitions. For now these are just my own conclusions and may be wrong. * The difference between `GenericPackageDescription` and `PackageDescription` is that `PackageDescription` describes a particular _configuration_ of a package (for instance, see documentation for `checkPackage`). A `GenericPackageDescription` can be turned into a `PackageDescription` in two ways: a. `finalizePD` does the proper translation, by taking into account the platform, available dependencies, etc. and picks a flag assignment (or gives an error if no flag assignment can be found) b. `flattenPackageDescription` ignores flag assignment and just joins all components together. The slightly odd thing is that a `GenericPackageDescription` contains a `PackageDescription` as a field; both of the above functions do the same thing: they take the embedded `PackageDescription` as a basis for the result value, but override `library`, `executables`, `testSuites`, `benchmarks` and `buildDepends`. * The `condTreeComponents` fields of a `CondTree` is a list of triples `(condition, then-branch, else-branch)`, where the `else-branch` is optional. -------------------------------------------------------------------------------} type ExamplePkgName = String type ExamplePkgVersion = Int type ExamplePkgHash = String -- for example "installed" packages type ExampleFlagName = String type ExampleSubLibName = String type ExampleTestName = String type ExampleExeName = String type ExampleVersionRange = C.VersionRange data Dependencies = Dependencies { depsVisibility :: C.LibraryVisibility , depsIsBuildable :: Bool , depsExampleDependencies :: [ExampleDependency] } deriving Show instance Semigroup Dependencies where deps1 <> deps2 = Dependencies { depsVisibility = depsVisibility deps1 <> depsVisibility deps2 , depsIsBuildable = depsIsBuildable deps1 && depsIsBuildable deps2 , depsExampleDependencies = depsExampleDependencies deps1 ++ depsExampleDependencies deps2 } instance Monoid Dependencies where mempty = Dependencies { depsVisibility = mempty , depsIsBuildable = True , depsExampleDependencies = [] } mappend = (<>) dependencies :: [ExampleDependency] -> Dependencies dependencies deps = mempty { depsExampleDependencies = deps } publicDependencies :: Dependencies publicDependencies = mempty { depsVisibility = C.LibraryVisibilityPublic } unbuildableDependencies :: Dependencies unbuildableDependencies = mempty { depsIsBuildable = False } data ExampleDependency = -- | Simple dependency on any version ExAny ExamplePkgName -- | Simple dependency on a fixed version | ExFix ExamplePkgName ExamplePkgVersion -- | Simple dependency on a range of versions, with an inclusive lower bound -- and an exclusive upper bound. | ExRange ExamplePkgName ExamplePkgVersion ExamplePkgVersion -- | Sub-library dependency | ExSubLibAny ExamplePkgName ExampleSubLibName -- | Sub-library dependency on a fixed version | ExSubLibFix ExamplePkgName ExampleSubLibName ExamplePkgVersion -- | Build-tool-depends dependency | ExBuildToolAny ExamplePkgName ExampleExeName -- | Build-tool-depends dependency on a fixed version | ExBuildToolFix ExamplePkgName ExampleExeName ExamplePkgVersion -- | Legacy build-tools dependency | ExLegacyBuildToolAny ExamplePkgName -- | Legacy build-tools dependency on a fixed version | ExLegacyBuildToolFix ExamplePkgName ExamplePkgVersion -- | Dependencies indexed by a flag | ExFlagged ExampleFlagName Dependencies Dependencies -- | Dependency on a language extension | ExExt Extension -- | Dependency on a language version | ExLang Language -- | Dependency on a pkg-config package | ExPkg (ExamplePkgName, ExamplePkgVersion) deriving Show -- | Simplified version of D.Types.GenericPackageDescription.Flag for use in -- example source packages. data ExFlag = ExFlag { exFlagName :: ExampleFlagName , exFlagDefault :: Bool , exFlagType :: FlagType } deriving Show data ExSubLib = ExSubLib ExampleSubLibName Dependencies data ExTest = ExTest ExampleTestName Dependencies data ExExe = ExExe ExampleExeName Dependencies exSubLib :: ExampleSubLibName -> [ExampleDependency] -> ExSubLib exSubLib name deps = ExSubLib name (dependencies deps) exTest :: ExampleTestName -> [ExampleDependency] -> ExTest exTest name deps = ExTest name (dependencies deps) exExe :: ExampleExeName -> [ExampleDependency] -> ExExe exExe name deps = ExExe name (dependencies deps) exFlagged :: ExampleFlagName -> [ExampleDependency] -> [ExampleDependency] -> ExampleDependency exFlagged n t e = ExFlagged n (dependencies t) (dependencies e) data ExConstraint = ExVersionConstraint ConstraintScope ExampleVersionRange | ExFlagConstraint ConstraintScope ExampleFlagName Bool | ExStanzaConstraint ConstraintScope [OptionalStanza] deriving Show data ExPreference = ExPkgPref ExamplePkgName ExampleVersionRange | ExStanzaPref ExamplePkgName [OptionalStanza] deriving Show data ExampleAvailable = ExAv { exAvName :: ExamplePkgName , exAvVersion :: ExamplePkgVersion , exAvDeps :: ComponentDeps Dependencies -- Setting flags here is only necessary to override the default values of -- the fields in C.Flag. , exAvFlags :: [ExFlag] } deriving Show data ExampleVar = P ExampleQualifier ExamplePkgName | F ExampleQualifier ExamplePkgName ExampleFlagName | S ExampleQualifier ExamplePkgName OptionalStanza data ExampleQualifier = QualNone | QualIndep ExamplePkgName | QualSetup ExamplePkgName -- The two package names are the build target and the package containing the -- setup script. | QualIndepSetup ExamplePkgName ExamplePkgName -- The two package names are the package depending on the exe and the -- package containing the exe. | QualExe ExamplePkgName ExamplePkgName -- | Whether to enable tests in all packages in a test case. newtype EnableAllTests = EnableAllTests Bool deriving BooleanFlag -- | Constructs an 'ExampleAvailable' package for the 'ExampleDb', -- given: -- -- 1. The name 'ExamplePkgName' of the available package, -- 2. The version 'ExamplePkgVersion' available -- 3. The list of dependency constraints ('ExampleDependency') -- for this package's library component. 'ExampleDependency' -- provides a number of pre-canned dependency types to look at. -- exAv :: ExamplePkgName -> ExamplePkgVersion -> [ExampleDependency] -> ExampleAvailable exAv n v ds = (exAvNoLibrary n v) { exAvDeps = CD.fromLibraryDeps (dependencies ds) } -- | Constructs an 'ExampleAvailable' package without a default library -- component. exAvNoLibrary :: ExamplePkgName -> ExamplePkgVersion -> ExampleAvailable exAvNoLibrary n v = ExAv { exAvName = n , exAvVersion = v , exAvDeps = CD.empty , exAvFlags = [] } -- | Override the default settings (e.g., manual vs. automatic) for a subset of -- a package's flags. declareFlags :: [ExFlag] -> ExampleAvailable -> ExampleAvailable declareFlags flags ex = ex { exAvFlags = flags } withSubLibrary :: ExampleAvailable -> ExSubLib -> ExampleAvailable withSubLibrary ex lib = withSubLibraries ex [lib] withSubLibraries :: ExampleAvailable -> [ExSubLib] -> ExampleAvailable withSubLibraries ex libs = let subLibCDs = CD.fromList [(CD.ComponentSubLib $ C.mkUnqualComponentName name, deps) | ExSubLib name deps <- libs] in ex { exAvDeps = exAvDeps ex <> subLibCDs } withSetupDeps :: ExampleAvailable -> [ExampleDependency] -> ExampleAvailable withSetupDeps ex setupDeps = ex { exAvDeps = exAvDeps ex <> CD.fromSetupDeps (dependencies setupDeps) } withTest :: ExampleAvailable -> ExTest -> ExampleAvailable withTest ex test = withTests ex [test] withTests :: ExampleAvailable -> [ExTest] -> ExampleAvailable withTests ex tests = let testCDs = CD.fromList [(CD.ComponentTest $ C.mkUnqualComponentName name, deps) | ExTest name deps <- tests] in ex { exAvDeps = exAvDeps ex <> testCDs } withExe :: ExampleAvailable -> ExExe -> ExampleAvailable withExe ex exe = withExes ex [exe] withExes :: ExampleAvailable -> [ExExe] -> ExampleAvailable withExes ex exes = let exeCDs = CD.fromList [(CD.ComponentExe $ C.mkUnqualComponentName name, deps) | ExExe name deps <- exes] in ex { exAvDeps = exAvDeps ex <> exeCDs } -- | An installed package in 'ExampleDb'; construct me with 'exInst'. data ExampleInstalled = ExInst { exInstName :: ExamplePkgName , exInstVersion :: ExamplePkgVersion , exInstHash :: ExamplePkgHash , exInstBuildAgainst :: [ExamplePkgHash] } deriving Show -- | Constructs an example installed package given: -- -- 1. The name of the package 'ExamplePkgName', i.e., 'String' -- 2. The version of the package 'ExamplePkgVersion', i.e., 'Int' -- 3. The IPID for the package 'ExamplePkgHash', i.e., 'String' -- (just some unique identifier for the package.) -- 4. The 'ExampleInstalled' packages which this package was -- compiled against.) -- exInst :: ExamplePkgName -> ExamplePkgVersion -> ExamplePkgHash -> [ExampleInstalled] -> ExampleInstalled exInst pn v hash deps = ExInst pn v hash (map exInstHash deps) -- | An example package database is a list of installed packages -- 'ExampleInstalled' and available packages 'ExampleAvailable'. -- Generally, you want to use 'exInst' and 'exAv' to construct -- these packages. type ExampleDb = [Either ExampleInstalled ExampleAvailable] type DependencyTree a = C.CondTree C.ConfVar [C.Dependency] a type DependencyComponent a = C.CondBranch C.ConfVar [C.Dependency] a exDbPkgs :: ExampleDb -> [ExamplePkgName] exDbPkgs = map (either exInstName exAvName) exAvSrcPkg :: ExampleAvailable -> UnresolvedSourcePackage exAvSrcPkg ex = let pkgId = exAvPkgId ex flags :: [C.PackageFlag] flags = let declaredFlags :: Map ExampleFlagName C.PackageFlag declaredFlags = Map.fromListWith (\f1 f2 -> error $ "duplicate flag declarations: " ++ show [f1, f2]) [(exFlagName flag, mkFlag flag) | flag <- exAvFlags ex] usedFlags :: Map ExampleFlagName C.PackageFlag usedFlags = Map.fromList [(fn, mkDefaultFlag fn) | fn <- names] where names = extractFlags $ CD.flatDeps (exAvDeps ex) in -- 'declaredFlags' overrides 'usedFlags' to give flags non-default settings: Map.elems $ declaredFlags `Map.union` usedFlags subLibraries = [(name, deps) | (CD.ComponentSubLib name, deps) <- CD.toList (exAvDeps ex)] foreignLibraries = [(name, deps) | (CD.ComponentFLib name, deps) <- CD.toList (exAvDeps ex)] testSuites = [(name, deps) | (CD.ComponentTest name, deps) <- CD.toList (exAvDeps ex)] benchmarks = [(name, deps) | (CD.ComponentBench name, deps) <- CD.toList (exAvDeps ex)] executables = [(name, deps) | (CD.ComponentExe name, deps) <- CD.toList (exAvDeps ex)] setup = case depsExampleDependencies $ CD.setupDeps (exAvDeps ex) of [] -> Nothing deps -> Just C.SetupBuildInfo { C.setupDepends = mkSetupDeps deps, C.defaultSetupDepends = False } package = SourcePackage { srcpkgPackageId = pkgId , srcpkgSource = LocalTarballPackage "<>" , srcpkgDescrOverride = Nothing , srcpkgDescription = C.GenericPackageDescription { C.packageDescription = C.emptyPackageDescription { C.package = pkgId , C.setupBuildInfo = setup , C.licenseRaw = Right BSD3 , C.buildTypeRaw = if isNothing setup then Just C.Simple else Just C.Custom , C.category = "category" , C.maintainer = "maintainer" , C.description = "description" , C.synopsis = "synopsis" , C.licenseFiles = [C.unsafeMakeSymbolicPath "LICENSE"] -- Version 2.0 is required for internal libraries. , C.specVersion = C.CabalSpecV2_0 } , C.gpdScannedVersion = Nothing , C.genPackageFlags = flags , C.condLibrary = let mkLib v bi = mempty { C.libVisibility = v, C.libBuildInfo = bi } -- Avoid using the Monoid instance for [a] when getting -- the library dependencies, to allow for the possibility -- that the package doesn't have a library: libDeps = lookup CD.ComponentLib (CD.toList (exAvDeps ex)) in mkTopLevelCondTree defaultLib mkLib <$> libDeps , C.condSubLibraries = let mkTree = mkTopLevelCondTree defaultSubLib mkLib mkLib v bi = mempty { C.libVisibility = v, C.libBuildInfo = bi } in map (second mkTree) subLibraries , C.condForeignLibs = let mkTree = mkTopLevelCondTree (mkLib defaultTopLevelBuildInfo) (const mkLib) mkLib bi = mempty { C.foreignLibBuildInfo = bi } in map (second mkTree) foreignLibraries , C.condExecutables = let mkTree = mkTopLevelCondTree defaultExe (const mkExe) mkExe bi = mempty { C.buildInfo = bi } in map (second mkTree) executables , C.condTestSuites = let mkTree = mkTopLevelCondTree defaultTest (const mkTest) mkTest bi = mempty { C.testBuildInfo = bi } in map (second mkTree) testSuites , C.condBenchmarks = let mkTree = mkTopLevelCondTree defaultBenchmark (const mkBench) mkBench bi = mempty { C.benchmarkBuildInfo = bi } in map (second mkTree) benchmarks } } pkgCheckErrors = -- We ignore these warnings because some unit tests test that the -- solver allows unknown extensions/languages when the compiler -- supports them. let ignore = ["Unknown extensions:", "Unknown languages:"] in [ err | err <- C.checkPackage (srcpkgDescription package) Nothing , not $ any (`isPrefixOf` C.explanation err) ignore ] in if null pkgCheckErrors then package else error $ "invalid GenericPackageDescription for package " ++ display pkgId ++ ": " ++ show pkgCheckErrors where defaultTopLevelBuildInfo :: C.BuildInfo defaultTopLevelBuildInfo = mempty { C.defaultLanguage = Just Haskell98 } defaultLib :: C.Library defaultLib = mempty { C.libBuildInfo = defaultTopLevelBuildInfo , C.exposedModules = [Module.fromString "Module"] , C.libVisibility = C.LibraryVisibilityPublic } defaultSubLib :: C.Library defaultSubLib = mempty { C.libBuildInfo = defaultTopLevelBuildInfo , C.exposedModules = [Module.fromString "Module"] } defaultExe :: C.Executable defaultExe = mempty { C.buildInfo = defaultTopLevelBuildInfo , C.modulePath = "Main.hs" } defaultTest :: C.TestSuite defaultTest = mempty { C.testBuildInfo = defaultTopLevelBuildInfo , C.testInterface = C.TestSuiteExeV10 (C.mkVersion [1,0]) "Test.hs" } defaultBenchmark :: C.Benchmark defaultBenchmark = mempty { C.benchmarkBuildInfo = defaultTopLevelBuildInfo , C.benchmarkInterface = C.BenchmarkExeV10 (C.mkVersion [1,0]) "Benchmark.hs" } -- Split the set of dependencies into the set of dependencies of the library, -- the dependencies of the test suites and extensions. splitTopLevel :: [ExampleDependency] -> ( [ExampleDependency] , [Extension] , Maybe Language , [(ExamplePkgName, ExamplePkgVersion)] -- pkg-config , [(ExamplePkgName, ExampleExeName, C.VersionRange)] -- build tools , [(ExamplePkgName, C.VersionRange)] -- legacy build tools ) splitTopLevel [] = ([], [], Nothing, [], [], []) splitTopLevel (ExBuildToolAny p e:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, exts, lang, pcpkgs, (p, e, C.anyVersion):exes, legacyExes) splitTopLevel (ExBuildToolFix p e v:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, exts, lang, pcpkgs, (p, e, C.thisVersion (mkSimpleVersion v)):exes, legacyExes) splitTopLevel (ExLegacyBuildToolAny p:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, exts, lang, pcpkgs, exes, (p, C.anyVersion):legacyExes) splitTopLevel (ExLegacyBuildToolFix p v:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, exts, lang, pcpkgs, exes, (p, C.thisVersion (mkSimpleVersion v)):legacyExes) splitTopLevel (ExExt ext:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, ext:exts, lang, pcpkgs, exes, legacyExes) splitTopLevel (ExLang lang:deps) = case splitTopLevel deps of (other, exts, Nothing, pcpkgs, exes, legacyExes) -> (other, exts, Just lang, pcpkgs, exes, legacyExes) _ -> error "Only 1 Language dependency is supported" splitTopLevel (ExPkg pkg:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (other, exts, lang, pkg:pcpkgs, exes, legacyExes) splitTopLevel (dep:deps) = let (other, exts, lang, pcpkgs, exes, legacyExes) = splitTopLevel deps in (dep:other, exts, lang, pcpkgs, exes, legacyExes) -- Extract the total set of flags used extractFlags :: Dependencies -> [ExampleFlagName] extractFlags deps = concatMap go (depsExampleDependencies deps) where go :: ExampleDependency -> [ExampleFlagName] go (ExAny _) = [] go (ExFix _ _) = [] go (ExRange _ _ _) = [] go (ExSubLibAny _ _) = [] go (ExSubLibFix _ _ _) = [] go (ExBuildToolAny _ _) = [] go (ExBuildToolFix _ _ _) = [] go (ExLegacyBuildToolAny _) = [] go (ExLegacyBuildToolFix _ _) = [] go (ExFlagged f a b) = f : extractFlags a ++ extractFlags b go (ExExt _) = [] go (ExLang _) = [] go (ExPkg _) = [] -- Convert 'Dependencies' into a tree of a specific component type, using -- the given top level component and function for creating a component at -- any level. mkTopLevelCondTree :: forall a. Semigroup a => a -> (C.LibraryVisibility -> C.BuildInfo -> a) -> Dependencies -> DependencyTree a mkTopLevelCondTree defaultTopLevel mkComponent deps = let condNode = mkCondTree mkComponent deps in condNode { C.condTreeData = defaultTopLevel <> C.condTreeData condNode } -- Convert 'Dependencies' into a tree of a specific component type, using -- the given function to generate each component. mkCondTree :: (C.LibraryVisibility -> C.BuildInfo -> a) -> Dependencies -> DependencyTree a mkCondTree mkComponent deps = let (libraryDeps, exts, mlang, pcpkgs, buildTools, legacyBuildTools) = splitTopLevel (depsExampleDependencies deps) (directDeps, flaggedDeps) = splitDeps libraryDeps component = mkComponent (depsVisibility deps) bi bi = mempty { C.otherExtensions = exts , C.defaultLanguage = mlang , C.buildToolDepends = [ C.ExeDependency (C.mkPackageName p) (C.mkUnqualComponentName e) vr | (p, e, vr) <- buildTools] , C.buildTools = [ C.LegacyExeDependency n vr | (n,vr) <- legacyBuildTools] , C.pkgconfigDepends = [ C.PkgconfigDependency n' v' | (n,v) <- pcpkgs , let n' = C.mkPkgconfigName n , let v' = C.PcThisVersion (mkSimplePkgconfigVersion v) ] , C.buildable = depsIsBuildable deps } in C.CondNode { C.condTreeData = component -- TODO: Arguably, build-tools dependencies should also -- effect constraints on conditional tree. But no way to -- distinguish between them , C.condTreeConstraints = map mkDirect directDeps , C.condTreeComponents = map (mkFlagged mkComponent) flaggedDeps } mkDirect :: (ExamplePkgName, C.LibraryName, C.VersionRange) -> C.Dependency mkDirect (dep, name, vr) = C.Dependency (C.mkPackageName dep) vr (NonEmptySet.singleton name) mkFlagged :: (C.LibraryVisibility -> C.BuildInfo -> a) -> (ExampleFlagName, Dependencies, Dependencies) -> DependencyComponent a mkFlagged mkComponent (f, a, b) = C.CondBranch (C.Var (C.PackageFlag (C.mkFlagName f))) (mkCondTree mkComponent a) (Just (mkCondTree mkComponent b)) -- Split a set of dependencies into direct dependencies and flagged -- dependencies. A direct dependency is a tuple of the name of package and -- its version range meant to be converted to a 'C.Dependency' with -- 'mkDirect' for example. A flagged dependency is the set of dependencies -- guarded by a flag. splitDeps :: [ExampleDependency] -> ( [(ExamplePkgName, C.LibraryName, C.VersionRange)] , [(ExampleFlagName, Dependencies, Dependencies)] ) splitDeps [] = ([], []) splitDeps (ExAny p:deps) = let (directDeps, flaggedDeps) = splitDeps deps in ((p, C.LMainLibName, C.anyVersion):directDeps, flaggedDeps) splitDeps (ExFix p v:deps) = let (directDeps, flaggedDeps) = splitDeps deps in ((p, C.LMainLibName, C.thisVersion $ mkSimpleVersion v):directDeps, flaggedDeps) splitDeps (ExRange p v1 v2:deps) = let (directDeps, flaggedDeps) = splitDeps deps in ((p, C.LMainLibName, mkVersionRange v1 v2):directDeps, flaggedDeps) splitDeps (ExSubLibAny p lib:deps) = let (directDeps, flaggedDeps) = splitDeps deps in ((p, C.LSubLibName (C.mkUnqualComponentName lib), C.anyVersion):directDeps, flaggedDeps) splitDeps (ExSubLibFix p lib v:deps) = let (directDeps, flaggedDeps) = splitDeps deps in ((p, C.LSubLibName (C.mkUnqualComponentName lib), C.thisVersion $ mkSimpleVersion v):directDeps, flaggedDeps) splitDeps (ExFlagged f a b:deps) = let (directDeps, flaggedDeps) = splitDeps deps in (directDeps, (f, a, b):flaggedDeps) splitDeps (dep:_) = error $ "Unexpected dependency: " ++ show dep -- custom-setup only supports simple dependencies mkSetupDeps :: [ExampleDependency] -> [C.Dependency] mkSetupDeps deps = case splitDeps deps of (directDeps, []) -> map mkDirect directDeps _ -> error "mkSetupDeps: custom setup has non-simple deps" mkSimpleVersion :: ExamplePkgVersion -> C.Version mkSimpleVersion n = C.mkVersion [n, 0, 0] mkSimplePkgconfigVersion :: ExamplePkgVersion -> C.PkgconfigVersion mkSimplePkgconfigVersion = C.versionToPkgconfigVersion . mkSimpleVersion mkVersionRange :: ExamplePkgVersion -> ExamplePkgVersion -> C.VersionRange mkVersionRange v1 v2 = C.intersectVersionRanges (C.orLaterVersion $ mkSimpleVersion v1) (C.earlierVersion $ mkSimpleVersion v2) mkFlag :: ExFlag -> C.PackageFlag mkFlag flag = C.MkPackageFlag { C.flagName = C.mkFlagName $ exFlagName flag , C.flagDescription = "" , C.flagDefault = exFlagDefault flag , C.flagManual = case exFlagType flag of Manual -> True Automatic -> False } mkDefaultFlag :: ExampleFlagName -> C.PackageFlag mkDefaultFlag flag = C.MkPackageFlag { C.flagName = C.mkFlagName flag , C.flagDescription = "" , C.flagDefault = True , C.flagManual = False } exAvPkgId :: ExampleAvailable -> C.PackageIdentifier exAvPkgId ex = C.PackageIdentifier { pkgName = C.mkPackageName (exAvName ex) , pkgVersion = C.mkVersion [exAvVersion ex, 0, 0] } exInstInfo :: ExampleInstalled -> IPI.InstalledPackageInfo exInstInfo ex = IPI.emptyInstalledPackageInfo { IPI.installedUnitId = C.mkUnitId (exInstHash ex) , IPI.sourcePackageId = exInstPkgId ex , IPI.depends = map C.mkUnitId (exInstBuildAgainst ex) } exInstPkgId :: ExampleInstalled -> C.PackageIdentifier exInstPkgId ex = C.PackageIdentifier { pkgName = C.mkPackageName (exInstName ex) , pkgVersion = C.mkVersion [exInstVersion ex, 0, 0] } exAvIdx :: [ExampleAvailable] -> CI.PackageIndex.PackageIndex UnresolvedSourcePackage exAvIdx = CI.PackageIndex.fromList . map exAvSrcPkg exInstIdx :: [ExampleInstalled] -> C.PackageIndex.InstalledPackageIndex exInstIdx = C.PackageIndex.fromList . map exInstInfo exResolve :: ExampleDb -- List of extensions supported by the compiler, or Nothing if unknown. -> Maybe [Extension] -- List of languages supported by the compiler, or Nothing if unknown. -> Maybe [Language] -> PC.PkgConfigDb -> [ExamplePkgName] -> Maybe Int -> CountConflicts -> FineGrainedConflicts -> MinimizeConflictSet -> IndependentGoals -> ReorderGoals -> AllowBootLibInstalls -> OnlyConstrained -> EnableBackjumping -> SolveExecutables -> Maybe (Variable P.QPN -> Variable P.QPN -> Ordering) -> [ExConstraint] -> [ExPreference] -> C.Verbosity -> EnableAllTests -> Progress String String CI.SolverInstallPlan.SolverInstallPlan exResolve db exts langs pkgConfigDb targets mbj countConflicts fineGrainedConflicts minimizeConflictSet indepGoals reorder allowBootLibInstalls onlyConstrained enableBj solveExes goalOrder constraints prefs verbosity enableAllTests = resolveDependencies C.buildPlatform compiler pkgConfigDb Modular params where defaultCompiler = C.unknownCompilerInfo C.buildCompilerId C.NoAbiTag compiler = defaultCompiler { C.compilerInfoExtensions = exts , C.compilerInfoLanguages = langs } (inst, avai) = partitionEithers db instIdx = exInstIdx inst avaiIdx = SourcePackageDb { packageIndex = exAvIdx avai , packagePreferences = Map.empty } enableTests | asBool enableAllTests = fmap (\p -> PackageConstraint (scopeToplevel (C.mkPackageName p)) (PackagePropertyStanzas [TestStanzas])) (exDbPkgs db) | otherwise = [] targets' = fmap (\p -> NamedPackage (C.mkPackageName p) []) targets params = addConstraints (fmap toConstraint constraints) $ addConstraints (fmap toLpc enableTests) $ addPreferences (fmap toPref prefs) $ setCountConflicts countConflicts $ setFineGrainedConflicts fineGrainedConflicts $ setMinimizeConflictSet minimizeConflictSet $ setIndependentGoals indepGoals $ setReorderGoals reorder $ setMaxBackjumps mbj $ setAllowBootLibInstalls allowBootLibInstalls $ setOnlyConstrained onlyConstrained $ setEnableBackjumping enableBj $ setSolveExecutables solveExes $ setGoalOrder goalOrder $ setSolverVerbosity verbosity $ standardInstallPolicy instIdx avaiIdx targets' toLpc pc = LabeledPackageConstraint pc ConstraintSourceUnknown toConstraint (ExVersionConstraint scope v) = toLpc $ PackageConstraint scope (PackagePropertyVersion v) toConstraint (ExFlagConstraint scope fn b) = toLpc $ PackageConstraint scope (PackagePropertyFlags (C.mkFlagAssignment [(C.mkFlagName fn, b)])) toConstraint (ExStanzaConstraint scope stanzas) = toLpc $ PackageConstraint scope (PackagePropertyStanzas stanzas) toPref (ExPkgPref n v) = PackageVersionPreference (C.mkPackageName n) v toPref (ExStanzaPref n stanzas) = PackageStanzasPreference (C.mkPackageName n) stanzas extractInstallPlan :: CI.SolverInstallPlan.SolverInstallPlan -> [(ExamplePkgName, ExamplePkgVersion)] extractInstallPlan = catMaybes . map confPkg . CI.SolverInstallPlan.toList where confPkg :: CI.SolverInstallPlan.SolverPlanPackage -> Maybe (String, Int) confPkg (CI.SolverInstallPlan.Configured pkg) = srcPkg pkg confPkg _ = Nothing srcPkg :: SolverPackage UnresolvedPkgLoc -> Maybe (String, Int) srcPkg cpkg = let C.PackageIdentifier pn ver = C.packageId (solverPkgSource cpkg) in (\vn -> (C.unPackageName pn, vn)) <$> safeHead (C.versionNumbers ver) {------------------------------------------------------------------------------- Auxiliary -------------------------------------------------------------------------------} -- | Run Progress computation runProgress :: Progress step e a -> ([step], Either e a) runProgress = go where go (Step s p) = let (ss, result) = go p in (s:ss, result) go (Fail e) = ([], Left e) go (Done a) = ([], Right a) cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/DSL/0000755000000000000000000000000007346545000023642 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/DSL/TestCaseUtils.hs0000644000000000000000000002562607346545000026745 0ustar0000000000000000{-# LANGUAGE RecordWildCards #-} -- | Utilities for creating HUnit test cases with the solver DSL. module UnitTests.Distribution.Solver.Modular.DSL.TestCaseUtils ( SolverTest , SolverResult(..) , maxBackjumps , disableFineGrainedConflicts , minimizeConflictSet , independentGoals , allowBootLibInstalls , onlyConstrained , disableBackjumping , disableSolveExecutables , goalOrder , constraints , preferences , setVerbose , enableAllTests , solverSuccess , solverFailure , anySolverFailure , mkTest , mkTestExts , mkTestLangs , mkTestPCDepends , mkTestExtLangPC , runTest ) where import Prelude () import Distribution.Solver.Compat.Prelude import Data.List (elemIndex) -- test-framework import Test.Tasty as TF import Test.Tasty.HUnit (testCase, assertEqual, assertBool) -- Cabal import qualified Distribution.PackageDescription as C import Language.Haskell.Extension (Extension(..), Language(..)) import Distribution.Verbosity -- cabal-install import qualified Distribution.Solver.Types.PackagePath as P import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb (..), pkgConfigDbFromList) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.Variable import Distribution.Client.Dependency (foldProgress) import UnitTests.Distribution.Solver.Modular.DSL import UnitTests.Options maxBackjumps :: Maybe Int -> SolverTest -> SolverTest maxBackjumps mbj test = test { testMaxBackjumps = mbj } disableFineGrainedConflicts :: SolverTest -> SolverTest disableFineGrainedConflicts test = test { testFineGrainedConflicts = FineGrainedConflicts False } minimizeConflictSet :: SolverTest -> SolverTest minimizeConflictSet test = test { testMinimizeConflictSet = MinimizeConflictSet True } -- | Combinator to turn on --independent-goals behavior, i.e. solve -- for the goals as if we were solving for each goal independently. independentGoals :: SolverTest -> SolverTest independentGoals test = test { testIndepGoals = IndependentGoals True } allowBootLibInstalls :: SolverTest -> SolverTest allowBootLibInstalls test = test { testAllowBootLibInstalls = AllowBootLibInstalls True } onlyConstrained :: SolverTest -> SolverTest onlyConstrained test = test { testOnlyConstrained = OnlyConstrainedAll } disableBackjumping :: SolverTest -> SolverTest disableBackjumping test = test { testEnableBackjumping = EnableBackjumping False } disableSolveExecutables :: SolverTest -> SolverTest disableSolveExecutables test = test { testSolveExecutables = SolveExecutables False } goalOrder :: [ExampleVar] -> SolverTest -> SolverTest goalOrder order test = test { testGoalOrder = Just order } constraints :: [ExConstraint] -> SolverTest -> SolverTest constraints cs test = test { testConstraints = cs } preferences :: [ExPreference] -> SolverTest -> SolverTest preferences prefs test = test { testSoftConstraints = prefs } -- | Increase the solver's verbosity. This is necessary for test cases that -- check the contents of the verbose log. setVerbose :: SolverTest -> SolverTest setVerbose test = test { testVerbosity = verbose } enableAllTests :: SolverTest -> SolverTest enableAllTests test = test { testEnableAllTests = EnableAllTests True } {------------------------------------------------------------------------------- Solver tests -------------------------------------------------------------------------------} data SolverTest = SolverTest { testLabel :: String , testTargets :: [String] , testResult :: SolverResult , testMaxBackjumps :: Maybe Int , testFineGrainedConflicts :: FineGrainedConflicts , testMinimizeConflictSet :: MinimizeConflictSet , testIndepGoals :: IndependentGoals , testAllowBootLibInstalls :: AllowBootLibInstalls , testOnlyConstrained :: OnlyConstrained , testEnableBackjumping :: EnableBackjumping , testSolveExecutables :: SolveExecutables , testGoalOrder :: Maybe [ExampleVar] , testConstraints :: [ExConstraint] , testSoftConstraints :: [ExPreference] , testVerbosity :: Verbosity , testDb :: ExampleDb , testSupportedExts :: Maybe [Extension] , testSupportedLangs :: Maybe [Language] , testPkgConfigDb :: PkgConfigDb , testEnableAllTests :: EnableAllTests } -- | Expected result of a solver test. data SolverResult = SolverResult { -- | The solver's log should satisfy this predicate. Note that we also print -- the log, so evaluating a large log here can cause a space leak. resultLogPredicate :: [String] -> Bool, -- | Fails with an error message satisfying the predicate, or succeeds with -- the given plan. resultErrorMsgPredicateOrPlan :: Either (String -> Bool) [(String, Int)] } solverSuccess :: [(String, Int)] -> SolverResult solverSuccess = SolverResult (const True) . Right solverFailure :: (String -> Bool) -> SolverResult solverFailure = SolverResult (const True) . Left -- | Can be used for test cases where we just want to verify that -- they fail, but do not care about the error message. anySolverFailure :: SolverResult anySolverFailure = solverFailure (const True) -- | Makes a solver test case, consisting of the following components: -- -- 1. An 'ExampleDb', representing the package database (both -- installed and remote) we are doing dependency solving over, -- 2. A 'String' name for the test, -- 3. A list '[String]' of package names to solve for -- 4. The expected result, either 'Nothing' if there is no -- satisfying solution, or a list '[(String, Int)]' of -- packages to install, at which versions. -- -- See 'UnitTests.Distribution.Solver.Modular.DSL' for how -- to construct an 'ExampleDb', as well as definitions of 'db1' etc. -- in this file. mkTest :: ExampleDb -> String -> [String] -> SolverResult -> SolverTest mkTest = mkTestExtLangPC Nothing Nothing (Just []) mkTestExts :: [Extension] -> ExampleDb -> String -> [String] -> SolverResult -> SolverTest mkTestExts exts = mkTestExtLangPC (Just exts) Nothing (Just []) mkTestLangs :: [Language] -> ExampleDb -> String -> [String] -> SolverResult -> SolverTest mkTestLangs langs = mkTestExtLangPC Nothing (Just langs) (Just []) mkTestPCDepends :: Maybe [(String, String)] -> ExampleDb -> String -> [String] -> SolverResult -> SolverTest mkTestPCDepends mPkgConfigDb = mkTestExtLangPC Nothing Nothing mPkgConfigDb mkTestExtLangPC :: Maybe [Extension] -> Maybe [Language] -> Maybe [(String, String)] -> ExampleDb -> String -> [String] -> SolverResult -> SolverTest mkTestExtLangPC exts langs mPkgConfigDb db label targets result = SolverTest { testLabel = label , testTargets = targets , testResult = result , testMaxBackjumps = Nothing , testFineGrainedConflicts = FineGrainedConflicts True , testMinimizeConflictSet = MinimizeConflictSet False , testIndepGoals = IndependentGoals False , testAllowBootLibInstalls = AllowBootLibInstalls False , testOnlyConstrained = OnlyConstrainedNone , testEnableBackjumping = EnableBackjumping True , testSolveExecutables = SolveExecutables True , testGoalOrder = Nothing , testConstraints = [] , testSoftConstraints = [] , testVerbosity = normal , testDb = db , testSupportedExts = exts , testSupportedLangs = langs , testPkgConfigDb = maybe NoPkgConfigDb pkgConfigDbFromList mPkgConfigDb , testEnableAllTests = EnableAllTests False } runTest :: SolverTest -> TF.TestTree runTest SolverTest{..} = askOption $ \(OptionShowSolverLog showSolverLog) -> testCase testLabel $ do let progress = exResolve testDb testSupportedExts testSupportedLangs testPkgConfigDb testTargets testMaxBackjumps (CountConflicts True) testFineGrainedConflicts testMinimizeConflictSet testIndepGoals (ReorderGoals False) testAllowBootLibInstalls testOnlyConstrained testEnableBackjumping testSolveExecutables (sortGoals <$> testGoalOrder) testConstraints testSoftConstraints testVerbosity testEnableAllTests printMsg msg = when showSolverLog $ putStrLn msg msgs = foldProgress (:) (const []) (const []) progress assertBool ("Unexpected solver log:\n" ++ unlines msgs) $ resultLogPredicate testResult $ concatMap lines msgs result <- foldProgress ((>>) . printMsg) (return . Left) (return . Right) progress case result of Left err -> assertBool ("Unexpected error:\n" ++ err) (checkErrorMsg testResult err) Right plan -> assertEqual "" (toMaybe testResult) (Just (extractInstallPlan plan)) where toMaybe :: SolverResult -> Maybe [(String, Int)] toMaybe = either (const Nothing) Just . resultErrorMsgPredicateOrPlan checkErrorMsg :: SolverResult -> String -> Bool checkErrorMsg result msg = case resultErrorMsgPredicateOrPlan result of Left f -> f msg Right _ -> False sortGoals :: [ExampleVar] -> Variable P.QPN -> Variable P.QPN -> Ordering sortGoals = orderFromList . map toVariable -- Sort elements in the list ahead of elements not in the list. Otherwise, -- follow the order in the list. orderFromList :: Eq a => [a] -> a -> a -> Ordering orderFromList xs = comparing $ \x -> let i = elemIndex x xs in (isNothing i, i) toVariable :: ExampleVar -> Variable P.QPN toVariable (P q pn) = PackageVar (toQPN q pn) toVariable (F q pn fn) = FlagVar (toQPN q pn) (C.mkFlagName fn) toVariable (S q pn stanza) = StanzaVar (toQPN q pn) stanza toQPN :: ExampleQualifier -> ExamplePkgName -> P.QPN toQPN q pn = P.Q pp (C.mkPackageName pn) where pp = case q of QualNone -> P.PackagePath P.DefaultNamespace P.QualToplevel QualIndep p -> P.PackagePath (P.Independent $ C.mkPackageName p) P.QualToplevel QualSetup s -> P.PackagePath P.DefaultNamespace (P.QualSetup (C.mkPackageName s)) QualIndepSetup p s -> P.PackagePath (P.Independent $ C.mkPackageName p) (P.QualSetup (C.mkPackageName s)) QualExe p1 p2 -> P.PackagePath P.DefaultNamespace (P.QualExe (C.mkPackageName p1) (C.mkPackageName p2)) cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/MemoryUsage.hs0000644000000000000000000001735307346545000026022 0ustar0000000000000000-- | Tests for detecting space leaks in the dependency solver. module UnitTests.Distribution.Solver.Modular.MemoryUsage (tests) where import Test.Tasty (TestTree) import UnitTests.Distribution.Solver.Modular.DSL import UnitTests.Distribution.Solver.Modular.DSL.TestCaseUtils tests :: [TestTree] tests = [ runTest $ basicTest "basic space leak test" , runTest $ flagsTest "package with many flags" , runTest $ issue2899 "issue #2899" , runTest $ duplicateDependencies "duplicate dependencies" , runTest $ duplicateFlaggedDependencies "duplicate flagged dependencies" ] -- | This test solves for n packages that each have two versions. There is no -- solution, because the nth package depends on another package that doesn't fit -- its version constraint. Backjumping and fine grained conflicts are disabled, -- so the solver must explore a search tree of size 2^n. It should fail if -- memory usage is proportional to the size of the tree. basicTest :: String -> SolverTest basicTest name = disableBackjumping $ disableFineGrainedConflicts $ mkTest pkgs name ["target"] anySolverFailure where n :: Int n = 18 pkgs :: ExampleDb pkgs = map Right $ [ exAv "target" 1 [ExAny $ pkgName 1]] ++ [ exAv (pkgName i) v [ExRange (pkgName $ i + 1) 2 4] | i <- [1..n], v <- [2, 3]] ++ [exAv (pkgName $ n + 1) 1 []] pkgName :: Int -> ExamplePkgName pkgName x = "pkg-" ++ show x -- | This test is similar to 'basicTest', except that it has one package with n -- flags, flag-1 through flag-n. The solver assigns flags in order, so it -- doesn't discover the unknown dependencies under flag-n until it has assigned -- all of the flags. It has to explore the whole search tree. flagsTest :: String -> SolverTest flagsTest name = disableBackjumping $ disableFineGrainedConflicts $ goalOrder orderedFlags $ mkTest pkgs name ["pkg"] anySolverFailure where n :: Int n = 16 pkgs :: ExampleDb pkgs = [Right $ exAv "pkg" 1 $ [exFlagged (numberedFlag n) [ExAny "unknown1"] [ExAny "unknown2"]] -- The remaining flags have no effect: ++ [exFlagged (numberedFlag i) [] [] | i <- [1..n - 1]] ] orderedFlags :: [ExampleVar] orderedFlags = [F QualNone "pkg" (numberedFlag i) | i <- [1..n]] -- | Test for a space leak caused by sharing of search trees under packages with -- link choices (issue #2899). -- -- The goal order is fixed so that the solver chooses setup-dep and then -- target-setup.setup-dep at the top of the search tree. target-setup.setup-dep -- has two choices: link to setup-dep, and don't link to setup-dep. setup-dep -- has a long chain of dependencies (pkg-1 through pkg-n). However, pkg-n -- depends on pkg-n+1, which doesn't exist, so there is no solution. Since each -- dependency has two versions, the solver must try 2^n combinations when -- backjumping and fine grained conflicts are disabled. These combinations -- create large search trees under each of the two choices for -- target-setup.setup-dep. Although the choice to not link is disallowed by the -- Single Instance Restriction, the solver doesn't know that until it has -- explored (and evaluated) the whole tree under the choice to link. If the two -- trees are shared, memory usage spikes. issue2899 :: String -> SolverTest issue2899 name = disableBackjumping $ disableFineGrainedConflicts $ goalOrder goals $ mkTest pkgs name ["target"] anySolverFailure where n :: Int n = 16 pkgs :: ExampleDb pkgs = map Right $ [ exAv "target" 1 [ExAny "setup-dep"] `withSetupDeps` [ExAny "setup-dep"] , exAv "setup-dep" 1 [ExAny $ pkgName 1]] ++ [ exAv (pkgName i) v [ExAny $ pkgName (i + 1)] | i <- [1..n], v <- [1, 2]] pkgName :: Int -> ExamplePkgName pkgName x = "pkg-" ++ show x goals :: [ExampleVar] goals = [P QualNone "setup-dep", P (QualSetup "target") "setup-dep"] -- | Test for an issue related to lifting dependencies out of conditionals when -- converting a PackageDescription to the solver's internal representation. -- -- Issue: -- For each conditional and each package B, the solver combined each dependency -- on B in the true branch with each dependency on B in the false branch. It -- added the combined dependencies to the build-depends outside of the -- conditional. Since dependencies could be lifted out of multiple levels of -- conditionals, the number of new dependencies could grow exponentially in the -- number of levels. For example, the following package generated 4 copies of B -- under flag-2=False, 8 copies under flag-1=False, and 16 copies at the top -- level: -- -- if flag(flag-1) -- build-depends: B, B -- else -- if flag(flag-2) -- build-depends: B, B -- else -- if flag(flag-3) -- build-depends: B, B -- else -- build-depends: B, B -- -- This issue caused the quickcheck tests to start frequently running out of -- memory after an optimization that pruned unreachable branches (See PR #4929). -- Each problematic test case contained at least one build-depends field with -- duplicate dependencies, which was then duplicated under multiple levels of -- conditionals by the solver's "buildable: False" transformation, when -- "buildable: False" was under multiple flags. Finally, the branch pruning -- feature put all build-depends fields in consecutive levels of the condition -- tree, causing the solver's representation of the package to follow the -- pattern in the example above. -- -- Now the solver avoids this issue by combining all dependencies on the same -- package before lifting them out of conditionals. -- -- This test case is an expanded version of the example above, with library and -- build-tool dependencies. duplicateDependencies :: String -> SolverTest duplicateDependencies name = mkTest pkgs name ["A"] $ solverSuccess [("A", 1), ("B", 1)] where copies, depth :: Int copies = 50 depth = 50 pkgs :: ExampleDb pkgs = [ Right $ exAv "A" 1 (dependencyTree 1) , Right $ exAv "B" 1 [] `withExe` exExe "exe" [] ] dependencyTree :: Int -> [ExampleDependency] dependencyTree n | n > depth = buildDepends | otherwise = [exFlagged (numberedFlag n) buildDepends (dependencyTree (n + 1))] where buildDepends = replicate copies (ExFix "B" 1) ++ replicate copies (ExBuildToolFix "B" "exe" 1) -- | This test is similar to duplicateDependencies, except that every dependency -- on B is replaced by a conditional that contains B in both branches. It tests -- that the solver doesn't just combine dependencies within one build-depends or -- build-tool-depends field; it also needs to combine dependencies after they -- are lifted out of conditionals. duplicateFlaggedDependencies :: String -> SolverTest duplicateFlaggedDependencies name = mkTest pkgs name ["A"] $ solverSuccess [("A", 1), ("B", 1)] where copies, depth :: Int copies = 15 depth = 15 pkgs :: ExampleDb pkgs = [ Right $ exAv "A" 1 (dependencyTree 1) , Right $ exAv "B" 1 [] `withExe` exExe "exe" [] ] dependencyTree :: Int -> [ExampleDependency] dependencyTree n | n > depth = flaggedDeps | otherwise = [exFlagged (numberedFlag n) flaggedDeps (dependencyTree (n + 1))] where flaggedDeps = zipWith ($) (replicate copies flaggedDep) [0 :: Int ..] flaggedDep m = exFlagged (numberedFlag n ++ "-" ++ show m) buildDepends buildDepends buildDepends = [ExFix "B" 1, ExBuildToolFix "B" "exe" 1] numberedFlag :: Int -> ExampleFlagName numberedFlag n = "flag-" ++ show n cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/QuickCheck.hs0000644000000000000000000005270107346545000025573 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE StandaloneDeriving #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Solver.Modular.QuickCheck (tests) where import Prelude () import Distribution.Client.Compat.Prelude import Control.Arrow ((&&&)) import Data.Either (lefts) import Data.Hashable (Hashable(..)) import Data.List (groupBy, isInfixOf) import Text.Show.Pretty (parseValue, valToStr) import Test.Tasty (TestTree) import Test.QuickCheck (Arbitrary (..), Gen, Positive (..), frequency, oneof, shrinkList, shuffle, listOf, shrinkNothing, vectorOf, elements, sublistOf, counterexample, (===), (==>), Blind (..)) import Test.QuickCheck.Instances.Cabal () import Distribution.Types.Flag (FlagName) import Distribution.Utils.ShortText (ShortText) import Distribution.Client.Setup (defaultMaxBackjumps) import Distribution.Types.LibraryVisibility import Distribution.Types.PackageName import Distribution.Types.UnqualComponentName import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ComponentDeps ( Component(..), ComponentDep, ComponentDeps ) import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageConstraint import qualified Distribution.Solver.Types.PackagePath as P import Distribution.Solver.Types.PkgConfigDb (pkgConfigDbFromList) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.Variable import Distribution.Verbosity import Distribution.Version import UnitTests.Distribution.Solver.Modular.DSL import UnitTests.Distribution.Solver.Modular.QuickCheck.Utils ( testPropertyWithSeed ) tests :: [TestTree] tests = [ -- This test checks that certain solver parameters do not affect the -- existence of a solution. It runs the solver twice, and only sets those -- parameters on the second run. The test also applies parameters that -- can affect the existence of a solution to both runs. testPropertyWithSeed "target and goal order do not affect solvability" $ \test targetOrder mGoalOrder1 mGoalOrder2 indepGoals -> let r1 = solve' mGoalOrder1 test r2 = solve' mGoalOrder2 test { testTargets = targets2 } solve' goalOrder = solve (EnableBackjumping True) (FineGrainedConflicts True) (ReorderGoals False) (CountConflicts True) indepGoals (getBlind <$> goalOrder) targets = testTargets test targets2 = case targetOrder of SameOrder -> targets ReverseOrder -> reverse targets in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> isRight (resultPlan r1) === isRight (resultPlan r2) , testPropertyWithSeed "solvable without --independent-goals => solvable with --independent-goals" $ \test reorderGoals -> let r1 = solve' (IndependentGoals False) test r2 = solve' (IndependentGoals True) test solve' indep = solve (EnableBackjumping True) (FineGrainedConflicts True) reorderGoals (CountConflicts True) indep Nothing in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> isRight (resultPlan r1) `implies` isRight (resultPlan r2) , testPropertyWithSeed "backjumping does not affect solvability" $ \test reorderGoals indepGoals -> let r1 = solve' (EnableBackjumping True) test r2 = solve' (EnableBackjumping False) test solve' enableBj = solve enableBj (FineGrainedConflicts False) reorderGoals (CountConflicts True) indepGoals Nothing in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> isRight (resultPlan r1) === isRight (resultPlan r2) , testPropertyWithSeed "fine-grained conflicts does not affect solvability" $ \test reorderGoals indepGoals -> let r1 = solve' (FineGrainedConflicts True) test r2 = solve' (FineGrainedConflicts False) test solve' fineGrainedConflicts = solve (EnableBackjumping True) fineGrainedConflicts reorderGoals (CountConflicts True) indepGoals Nothing in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> isRight (resultPlan r1) === isRight (resultPlan r2) -- The next two tests use --no-count-conflicts, because the goal order used -- with --count-conflicts depends on the total set of conflicts seen by the -- solver. The solver explores more of the tree and encounters more -- conflicts when it doesn't backjump. The different goal orders can lead to -- different solutions and cause the test to fail. -- TODO: Find a faster way to randomly sort goals, and then use a random -- goal order in these tests. , testPropertyWithSeed "backjumping does not affect the result (with static goal order)" $ \test reorderGoals indepGoals -> let r1 = solve' (EnableBackjumping True) test r2 = solve' (EnableBackjumping False) test solve' enableBj = solve enableBj (FineGrainedConflicts False) reorderGoals (CountConflicts False) indepGoals Nothing in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> resultPlan r1 === resultPlan r2 , testPropertyWithSeed "fine-grained conflicts does not affect the result (with static goal order)" $ \test reorderGoals indepGoals -> let r1 = solve' (FineGrainedConflicts True) test r2 = solve' (FineGrainedConflicts False) test solve' fineGrainedConflicts = solve (EnableBackjumping True) fineGrainedConflicts reorderGoals (CountConflicts False) indepGoals Nothing in counterexample (showResults r1 r2) $ noneReachedBackjumpLimit [r1, r2] ==> resultPlan r1 === resultPlan r2 ] where noneReachedBackjumpLimit :: [Result] -> Bool noneReachedBackjumpLimit = not . any (\r -> resultPlan r == Left BackjumpLimitReached) showResults :: Result -> Result -> String showResults r1 r2 = showResult 1 r1 ++ showResult 2 r2 showResult :: Int -> Result -> String showResult n result = unlines $ ["", "Run " ++ show n ++ ":"] ++ resultLog result ++ ["result: " ++ show (resultPlan result)] implies :: Bool -> Bool -> Bool implies x y = not x || y isRight :: Either a b -> Bool isRight (Right _) = True isRight _ = False newtype VarOrdering = VarOrdering { unVarOrdering :: Variable P.QPN -> Variable P.QPN -> Ordering } solve :: EnableBackjumping -> FineGrainedConflicts -> ReorderGoals -> CountConflicts -> IndependentGoals -> Maybe VarOrdering -> SolverTest -> Result solve enableBj fineGrainedConflicts reorder countConflicts indep goalOrder test = let (lg, result) = runProgress $ exResolve (unTestDb (testDb test)) Nothing Nothing (pkgConfigDbFromList []) (map unPN (testTargets test)) -- The backjump limit prevents individual tests from using -- too much time and memory. (Just defaultMaxBackjumps) countConflicts fineGrainedConflicts (MinimizeConflictSet False) indep reorder (AllowBootLibInstalls False) OnlyConstrainedNone enableBj (SolveExecutables True) (unVarOrdering <$> goalOrder) (testConstraints test) (testPreferences test) normal (EnableAllTests False) failure :: String -> Failure failure msg | "Backjump limit reached" `isInfixOf` msg = BackjumpLimitReached | otherwise = OtherFailure in Result { resultLog = lg , resultPlan = -- Force the result so that we check for internal errors when we check -- for success or failure. See D.C.Dependency.validateSolverResult. force $ either (Left . failure) (Right . extractInstallPlan) result } -- | How to modify the order of the input targets. data TargetOrder = SameOrder | ReverseOrder deriving Show instance Arbitrary TargetOrder where arbitrary = elements [SameOrder, ReverseOrder] shrink SameOrder = [] shrink ReverseOrder = [SameOrder] data Result = Result { resultLog :: [String] , resultPlan :: Either Failure [(ExamplePkgName, ExamplePkgVersion)] } data Failure = BackjumpLimitReached | OtherFailure deriving (Eq, Generic, Show) instance NFData Failure -- | Package name. newtype PN = PN { unPN :: String } deriving (Eq, Ord, Show) instance Arbitrary PN where arbitrary = PN <$> elements ("base" : [[pn] | pn <- ['A'..'G']]) -- | Package version. newtype PV = PV { unPV :: Int } deriving (Eq, Ord, Show) instance Arbitrary PV where arbitrary = PV <$> elements [1..10] type TestPackage = Either ExampleInstalled ExampleAvailable getName :: TestPackage -> PN getName = PN . either exInstName exAvName getVersion :: TestPackage -> PV getVersion = PV . either exInstVersion exAvVersion data SolverTest = SolverTest { testDb :: TestDb , testTargets :: [PN] , testConstraints :: [ExConstraint] , testPreferences :: [ExPreference] } -- | Pretty-print the test when quickcheck calls 'show'. instance Show SolverTest where show test = let str = "SolverTest {testDb = " ++ show (testDb test) ++ ", testTargets = " ++ show (testTargets test) ++ ", testConstraints = " ++ show (testConstraints test) ++ ", testPreferences = " ++ show (testPreferences test) ++ "}" in maybe str valToStr $ parseValue str instance Arbitrary SolverTest where arbitrary = do db <- arbitrary let pkgVersions = nub $ map (getName &&& getVersion) (unTestDb db) pkgs = nub $ map fst pkgVersions Positive n <- arbitrary targets <- randomSubset n pkgs constraints <- case pkgVersions of [] -> return [] _ -> boundedListOf 1 $ arbitraryConstraint pkgVersions prefs <- case pkgVersions of [] -> return [] _ -> boundedListOf 3 $ arbitraryPreference pkgVersions return (SolverTest db targets constraints prefs) shrink test = [test { testDb = db } | db <- shrink (testDb test)] ++ [test { testTargets = targets } | targets <- shrink (testTargets test)] ++ [test { testConstraints = cs } | cs <- shrink (testConstraints test)] ++ [test { testPreferences = prefs } | prefs <- shrink (testPreferences test)] -- | Collection of source and installed packages. newtype TestDb = TestDb { unTestDb :: ExampleDb } deriving Show instance Arbitrary TestDb where arbitrary = do -- Avoid cyclic dependencies by grouping packages by name and only -- allowing each package to depend on packages in the groups before it. groupedPkgs <- shuffle . groupBy ((==) `on` fst) . nub . sort =<< boundedListOf 10 arbitrary db <- foldM nextPkgs (TestDb []) groupedPkgs TestDb <$> shuffle (unTestDb db) where nextPkgs :: TestDb -> [(PN, PV)] -> Gen TestDb nextPkgs db pkgs = TestDb . (++ unTestDb db) <$> traverse (nextPkg db) pkgs nextPkg :: TestDb -> (PN, PV) -> Gen TestPackage nextPkg db (pn, v) = do installed <- arbitrary if installed then Left <$> arbitraryExInst pn v (lefts $ unTestDb db) else Right <$> arbitraryExAv pn v db shrink (TestDb pkgs) = map TestDb $ shrink pkgs arbitraryExAv :: PN -> PV -> TestDb -> Gen ExampleAvailable arbitraryExAv pn v db = (\cds -> ExAv (unPN pn) (unPV v) cds []) <$> arbitraryComponentDeps pn db arbitraryExInst :: PN -> PV -> [ExampleInstalled] -> Gen ExampleInstalled arbitraryExInst pn v pkgs = do pkgHash <- vectorOf 10 $ elements $ ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] numDeps <- min 3 <$> arbitrary deps <- randomSubset numDeps pkgs return $ ExInst (unPN pn) (unPV v) pkgHash (map exInstHash deps) arbitraryComponentDeps :: PN -> TestDb -> Gen (ComponentDeps Dependencies) arbitraryComponentDeps _ (TestDb []) = return $ CD.fromLibraryDeps (dependencies []) arbitraryComponentDeps pn db = do -- dedupComponentNames removes components with duplicate names, for example, -- 'ComponentExe x' and 'ComponentTest x', and then CD.fromList combines -- duplicate unnamed components. cds <- CD.fromList . dedupComponentNames . filter (isValid . fst) <$> boundedListOf 5 (arbitraryComponentDep db) return $ if isCompleteComponentDeps cds then cds else -- Add a library if the ComponentDeps isn't complete. CD.fromLibraryDeps (dependencies []) <> cds where isValid :: Component -> Bool isValid (ComponentSubLib name) = name /= mkUnqualComponentName (unPN pn) isValid _ = True dedupComponentNames = nubBy ((\x y -> isJust x && isJust y && x == y) `on` componentName . fst) componentName :: Component -> Maybe UnqualComponentName componentName ComponentLib = Nothing componentName ComponentSetup = Nothing componentName (ComponentSubLib n) = Just n componentName (ComponentFLib n) = Just n componentName (ComponentExe n) = Just n componentName (ComponentTest n) = Just n componentName (ComponentBench n) = Just n -- | Returns true if the ComponentDeps forms a complete package, i.e., it -- contains a library, exe, test, or benchmark. isCompleteComponentDeps :: ComponentDeps a -> Bool isCompleteComponentDeps = any (completesPkg . fst) . CD.toList where completesPkg ComponentLib = True completesPkg (ComponentExe _) = True completesPkg (ComponentTest _) = True completesPkg (ComponentBench _) = True completesPkg (ComponentSubLib _) = False completesPkg (ComponentFLib _) = False completesPkg ComponentSetup = False arbitraryComponentDep :: TestDb -> Gen (ComponentDep Dependencies) arbitraryComponentDep db = do comp <- arbitrary deps <- case comp of ComponentSetup -> smallListOf (arbitraryExDep db SetupDep) _ -> boundedListOf 5 (arbitraryExDep db NonSetupDep) return ( comp , Dependencies { depsExampleDependencies = deps -- TODO: Test different values for visibility and buildability. , depsVisibility = LibraryVisibilityPublic , depsIsBuildable = True } ) -- | Location of an 'ExampleDependency'. It determines which values are valid. data ExDepLocation = SetupDep | NonSetupDep arbitraryExDep :: TestDb -> ExDepLocation -> Gen ExampleDependency arbitraryExDep db@(TestDb pkgs) level = let flag = ExFlagged <$> arbitraryFlagName <*> arbitraryDeps db <*> arbitraryDeps db other = -- Package checks require dependencies on "base" to have bounds. let notBase = filter ((/= PN "base") . getName) pkgs in [ExAny . unPN <$> elements (map getName notBase) | not (null notBase)] ++ [ -- existing version let fixed pkg = ExFix (unPN $ getName pkg) (unPV $ getVersion pkg) in fixed <$> elements pkgs -- random version of an existing package , ExFix . unPN . getName <$> elements pkgs <*> (unPV <$> arbitrary) ] in oneof $ case level of NonSetupDep -> flag : other SetupDep -> other arbitraryDeps :: TestDb -> Gen Dependencies arbitraryDeps db = frequency [ (1, return unbuildableDependencies) , (20, dependencies <$> smallListOf (arbitraryExDep db NonSetupDep)) ] arbitraryFlagName :: Gen String arbitraryFlagName = (:[]) <$> elements ['A'..'E'] arbitraryConstraint :: [(PN, PV)] -> Gen ExConstraint arbitraryConstraint pkgs = do (PN pn, v) <- elements pkgs let anyQualifier = ScopeAnyQualifier (mkPackageName pn) oneof [ ExVersionConstraint anyQualifier <$> arbitraryVersionRange v , ExStanzaConstraint anyQualifier <$> sublistOf [TestStanzas, BenchStanzas] ] arbitraryPreference :: [(PN, PV)] -> Gen ExPreference arbitraryPreference pkgs = do (PN pn, v) <- elements pkgs oneof [ ExStanzaPref pn <$> sublistOf [TestStanzas, BenchStanzas] , ExPkgPref pn <$> arbitraryVersionRange v ] arbitraryVersionRange :: PV -> Gen VersionRange arbitraryVersionRange (PV v) = let version = mkSimpleVersion v in elements [ thisVersion version , notThisVersion version , earlierVersion version , orLaterVersion version , noVersion ] instance Arbitrary ReorderGoals where arbitrary = ReorderGoals <$> arbitrary shrink (ReorderGoals reorder) = [ReorderGoals False | reorder] instance Arbitrary IndependentGoals where arbitrary = IndependentGoals <$> arbitrary shrink (IndependentGoals indep) = [IndependentGoals False | indep] instance Arbitrary Component where arbitrary = oneof [ return ComponentLib , ComponentSubLib <$> arbitraryUQN , ComponentExe <$> arbitraryUQN , ComponentFLib <$> arbitraryUQN , ComponentTest <$> arbitraryUQN , ComponentBench <$> arbitraryUQN , return ComponentSetup ] shrink ComponentLib = [] shrink _ = [ComponentLib] -- The "component-" prefix prevents component names and build-depends -- dependency names from overlapping. -- TODO: Remove the prefix once the QuickCheck tests support dependencies on -- internal libraries. arbitraryUQN :: Gen UnqualComponentName arbitraryUQN = mkUnqualComponentName <$> (\c -> "component-" ++ [c]) <$> elements "ABC" instance Arbitrary ExampleInstalled where arbitrary = error "arbitrary not implemented: ExampleInstalled" shrink ei = [ ei { exInstBuildAgainst = deps } | deps <- shrinkList shrinkNothing (exInstBuildAgainst ei)] instance Arbitrary ExampleAvailable where arbitrary = error "arbitrary not implemented: ExampleAvailable" shrink ea = [ea { exAvDeps = deps } | deps <- shrink (exAvDeps ea)] instance (Arbitrary a, Monoid a) => Arbitrary (ComponentDeps a) where arbitrary = error "arbitrary not implemented: ComponentDeps" shrink = filter isCompleteComponentDeps . map CD.fromList . shrink . CD.toList instance Arbitrary ExampleDependency where arbitrary = error "arbitrary not implemented: ExampleDependency" shrink (ExAny _) = [] shrink (ExFix "base" _) = [] -- preserve bounds on base shrink (ExFix pn _) = [ExAny pn] shrink (ExFlagged flag th el) = depsExampleDependencies th ++ depsExampleDependencies el ++ [ExFlagged flag th' el | th' <- shrink th] ++ [ExFlagged flag th el' | el' <- shrink el] shrink dep = error $ "Dependency not handled: " ++ show dep instance Arbitrary Dependencies where arbitrary = error "arbitrary not implemented: Dependencies" shrink deps = [ deps { depsVisibility = v } | v <- shrink $ depsVisibility deps ] ++ [ deps { depsIsBuildable = b } | b <- shrink $ depsIsBuildable deps ] ++ [ deps { depsExampleDependencies = ds } | ds <- shrink $ depsExampleDependencies deps ] instance Arbitrary ExConstraint where arbitrary = error "arbitrary not implemented: ExConstraint" shrink (ExStanzaConstraint scope stanzas) = [ExStanzaConstraint scope stanzas' | stanzas' <- shrink stanzas] shrink (ExVersionConstraint scope vr) = [ExVersionConstraint scope vr' | vr' <- shrink vr] shrink _ = [] instance Arbitrary ExPreference where arbitrary = error "arbitrary not implemented: ExPreference" shrink (ExStanzaPref pn stanzas) = [ExStanzaPref pn stanzas' | stanzas' <- shrink stanzas] shrink (ExPkgPref pn vr) = [ExPkgPref pn vr' | vr' <- shrink vr] instance Arbitrary OptionalStanza where arbitrary = error "arbitrary not implemented: OptionalStanza" shrink BenchStanzas = [TestStanzas] shrink TestStanzas = [] -- Randomly sorts solver variables using 'hash'. -- TODO: Sorting goals with this function is very slow. instance Arbitrary VarOrdering where arbitrary = do f <- arbitrary :: Gen (Int -> Int) return $ VarOrdering (comparing (f . hash)) instance Hashable pn => Hashable (Variable pn) instance Hashable a => Hashable (P.Qualified a) instance Hashable P.PackagePath instance Hashable P.Qualifier instance Hashable P.Namespace instance Hashable OptionalStanza instance Hashable FlagName instance Hashable PackageName instance Hashable ShortText deriving instance Generic (Variable pn) deriving instance Generic (P.Qualified a) deriving instance Generic P.PackagePath deriving instance Generic P.Namespace deriving instance Generic P.Qualifier randomSubset :: Int -> [a] -> Gen [a] randomSubset n xs = take n <$> shuffle xs boundedListOf :: Int -> Gen a -> Gen [a] boundedListOf n gen = take n <$> listOf gen -- | Generates lists with average length less than 1. smallListOf :: Gen a -> Gen [a] smallListOf gen = frequency [ (fr, vectorOf n gen) | (fr, n) <- [(3, 0), (5, 1), (2, 2)]] cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/QuickCheck/0000755000000000000000000000000007346545000025232 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/QuickCheck/Utils.hs0000644000000000000000000000250407346545000026667 0ustar0000000000000000module UnitTests.Distribution.Solver.Modular.QuickCheck.Utils ( testPropertyWithSeed ) where import Data.Tagged (Tagged, retag) import System.Random (getStdRandom, random) import Test.Tasty (TestTree) import Test.Tasty.Options (OptionDescription, lookupOption, setOption) import Test.Tasty.Providers (IsTest (..), singleTest) import Test.Tasty.QuickCheck ( QC (..), QuickCheckReplay (..), Testable, property ) import Distribution.Simple.Utils import Distribution.Verbosity -- | Create a QuickCheck test that prints the seed before testing the property. -- The seed can be useful for debugging non-terminating test cases. This is -- related to https://github.com/feuerbach/tasty/issues/86. testPropertyWithSeed :: Testable a => String -> a -> TestTree testPropertyWithSeed name = singleTest name . QCWithSeed . QC . property newtype QCWithSeed = QCWithSeed QC instance IsTest QCWithSeed where testOptions = retag (testOptions :: Tagged QC [OptionDescription]) run options (QCWithSeed test) progress = do replay <- case lookupOption options of QuickCheckReplay (Just override) -> return override QuickCheckReplay Nothing -> getStdRandom random notice normal $ "Using --quickcheck-replay=" ++ show replay run (setOption (QuickCheckReplay (Just replay)) options) test progress cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/RetryLog.hs0000644000000000000000000000500607346545000025324 0ustar0000000000000000{-# LANGUAGE StandaloneDeriving #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module UnitTests.Distribution.Solver.Modular.RetryLog ( tests ) where import Distribution.Solver.Modular.Message import Distribution.Solver.Modular.RetryLog import Distribution.Solver.Types.Progress import Test.Tasty (TestTree) import Test.Tasty.HUnit (testCase, (@?=)) import Test.Tasty.QuickCheck ( Arbitrary(..), Blind(..), listOf, oneof, testProperty, (===)) type Log a = Progress a String String tests :: [TestTree] tests = [ testProperty "'toProgress . fromProgress' is identity" $ \p -> toProgress (fromProgress p) === (p :: Log Int) , testProperty "'mapFailure f' is like 'foldProgress Step (Fail . f) Done'" $ let mapFailureProgress f = foldProgress Step (Fail . f) Done in \(Blind f) p -> toProgress (mapFailure f (fromProgress p)) === mapFailureProgress (f :: String -> Int) (p :: Log Int) , testProperty "'retry p f' is like 'foldProgress Step f Done p'" $ \p (Blind f) -> toProgress (retry (fromProgress p) (fromProgress . f)) === (foldProgress Step f Done (p :: Log Int) :: Log Int) , testProperty "failWith" $ \step failure -> toProgress (failWith step failure) === (Step step (Fail failure) :: Log Int) , testProperty "succeedWith" $ \step success -> toProgress (succeedWith step success) === (Step step (Done success) :: Log Int) , testProperty "continueWith" $ \step p -> toProgress (continueWith step (fromProgress p)) === (Step step p :: Log Int) , testCase "tryWith with failure" $ let failure = Fail "Error" s = Step Success in toProgress (tryWith Success $ fromProgress (s (s failure))) @?= (s (Step Enter (s (s (Step Leave failure)))) :: Log Message) , testCase "tryWith with success" $ let done = Done "Done" s = Step Success in toProgress (tryWith Success $ fromProgress (s (s done))) @?= (s (Step Enter (s (s done))) :: Log Message) ] instance (Arbitrary step, Arbitrary fail, Arbitrary done) => Arbitrary (Progress step fail done) where arbitrary = do steps <- listOf arbitrary end <- oneof [Fail `fmap` arbitrary, Done `fmap` arbitrary] return $ foldr Step end steps deriving instance (Eq step, Eq fail, Eq done) => Eq (Progress step fail done) deriving instance (Show step, Show fail, Show done) => Show (Progress step fail done) deriving instance Eq Message deriving instance Show Message cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/Solver.hs0000644000000000000000000030347307346545000025040 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} -- | This is a set of unit tests for the dependency solver, -- which uses the solver DSL ("UnitTests.Distribution.Solver.Modular.DSL") -- to more conveniently create package databases to run the solver tests on. module UnitTests.Distribution.Solver.Modular.Solver (tests) where -- base import Data.List (isInfixOf) import qualified Distribution.Version as V -- test-framework import Test.Tasty as TF -- Cabal import Language.Haskell.Extension ( Extension(..) , KnownExtension(..), Language(..)) -- cabal-install import Distribution.Solver.Types.Flag import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageConstraint import qualified Distribution.Solver.Types.PackagePath as P import UnitTests.Distribution.Solver.Modular.DSL import UnitTests.Distribution.Solver.Modular.DSL.TestCaseUtils tests :: [TF.TestTree] tests = [ testGroup "Simple dependencies" [ runTest $ mkTest db1 "alreadyInstalled" ["A"] (solverSuccess []) , runTest $ mkTest db1 "installLatest" ["B"] (solverSuccess [("B", 2)]) , runTest $ mkTest db1 "simpleDep1" ["C"] (solverSuccess [("B", 1), ("C", 1)]) , runTest $ mkTest db1 "simpleDep2" ["D"] (solverSuccess [("B", 2), ("D", 1)]) , runTest $ mkTest db1 "failTwoVersions" ["C", "D"] anySolverFailure , runTest $ indep $ mkTest db1 "indepTwoVersions" ["C", "D"] (solverSuccess [("B", 1), ("B", 2), ("C", 1), ("D", 1)]) , runTest $ indep $ mkTest db1 "aliasWhenPossible1" ["C", "E"] (solverSuccess [("B", 1), ("C", 1), ("E", 1)]) , runTest $ indep $ mkTest db1 "aliasWhenPossible2" ["D", "E"] (solverSuccess [("B", 2), ("D", 1), ("E", 1)]) , runTest $ indep $ mkTest db2 "aliasWhenPossible3" ["C", "D"] (solverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) , runTest $ mkTest db1 "buildDepAgainstOld" ["F"] (solverSuccess [("B", 1), ("E", 1), ("F", 1)]) , runTest $ mkTest db1 "buildDepAgainstNew" ["G"] (solverSuccess [("B", 2), ("E", 1), ("G", 1)]) , runTest $ indep $ mkTest db1 "multipleInstances" ["F", "G"] anySolverFailure , runTest $ mkTest db21 "unknownPackage1" ["A"] (solverSuccess [("A", 1), ("B", 1)]) , runTest $ mkTest db22 "unknownPackage2" ["A"] (solverFailure (isInfixOf "unknown package: C")) , runTest $ mkTest db23 "unknownPackage3" ["A"] (solverFailure (isInfixOf "unknown package: B")) , runTest $ mkTest [] "unknown target" ["A"] (solverFailure (isInfixOf "unknown package: A")) ] , testGroup "Flagged dependencies" [ runTest $ mkTest db3 "forceFlagOn" ["C"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ mkTest db3 "forceFlagOff" ["D"] (solverSuccess [("A", 2), ("B", 1), ("D", 1)]) , runTest $ indep $ mkTest db3 "linkFlags1" ["C", "D"] anySolverFailure , runTest $ indep $ mkTest db4 "linkFlags2" ["C", "D"] anySolverFailure , runTest $ indep $ mkTest db18 "linkFlags3" ["A", "B"] (solverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("F", 1)]) ] , testGroup "Lifting dependencies out of conditionals" [ runTest $ commonDependencyLogMessage "common dependency log message" , runTest $ twoLevelDeepCommonDependencyLogMessage "two level deep common dependency log message" , runTest $ testBackjumpingWithCommonDependency "backjumping with common dependency" ] , testGroup "Manual flags" [ runTest $ mkTest dbManualFlags "Use default value for manual flag" ["pkg"] $ solverSuccess [("pkg", 1), ("true-dep", 1)] , let checkFullLog = any $ isInfixOf "rejecting: pkg:-flag (manual flag can only be changed explicitly)" in runTest $ setVerbose $ constraints [ExVersionConstraint (ScopeAnyQualifier "true-dep") V.noVersion] $ mkTest dbManualFlags "Don't toggle manual flag to avoid conflict" ["pkg"] $ -- TODO: We should check the summarized log instead of the full log -- for the manual flags error message, but it currently only -- appears in the full log. SolverResult checkFullLog (Left $ const True) , let cs = [ExFlagConstraint (ScopeAnyQualifier "pkg") "flag" False] in runTest $ constraints cs $ mkTest dbManualFlags "Toggle manual flag with flag constraint" ["pkg"] $ solverSuccess [("false-dep", 1), ("pkg", 1)] ] , testGroup "Qualified manual flag constraints" [ let name = "Top-level flag constraint does not constrain setup dep's flag" cs = [ExFlagConstraint (ScopeQualified P.QualToplevel "B") "flag" False] in runTest $ constraints cs $ mkTest dbSetupDepWithManualFlag name ["A"] $ solverSuccess [ ("A", 1), ("B", 1), ("B", 2) , ("b-1-false-dep", 1), ("b-2-true-dep", 1) ] , let name = "Solver can toggle setup dep's flag to match top-level constraint" cs = [ ExFlagConstraint (ScopeQualified P.QualToplevel "B") "flag" False , ExVersionConstraint (ScopeAnyQualifier "b-2-true-dep") V.noVersion ] in runTest $ constraints cs $ mkTest dbSetupDepWithManualFlag name ["A"] $ solverSuccess [ ("A", 1), ("B", 1), ("B", 2) , ("b-1-false-dep", 1), ("b-2-false-dep", 1) ] , let name = "User can constrain flags separately with qualified constraints" cs = [ ExFlagConstraint (ScopeQualified P.QualToplevel "B") "flag" True , ExFlagConstraint (ScopeQualified (P.QualSetup "A") "B") "flag" False ] in runTest $ constraints cs $ mkTest dbSetupDepWithManualFlag name ["A"] $ solverSuccess [ ("A", 1), ("B", 1), ("B", 2) , ("b-1-true-dep", 1), ("b-2-false-dep", 1) ] -- Regression test for #4299 , let name = "Solver can link deps when only one has constrained manual flag" cs = [ExFlagConstraint (ScopeQualified P.QualToplevel "B") "flag" False] in runTest $ constraints cs $ mkTest dbLinkedSetupDepWithManualFlag name ["A"] $ solverSuccess [ ("A", 1), ("B", 1), ("b-1-false-dep", 1) ] , let name = "Solver cannot link deps that have conflicting manual flag constraints" cs = [ ExFlagConstraint (ScopeQualified P.QualToplevel "B") "flag" True , ExFlagConstraint (ScopeQualified (P.QualSetup "A") "B") "flag" False ] failureReason = "(constraint from unknown source requires opposite flag selection)" checkFullLog lns = all (\msg -> any (msg `isInfixOf`) lns) [ "rejecting: B:-flag " ++ failureReason , "rejecting: A:setup.B:+flag " ++ failureReason ] in runTest $ constraints cs $ setVerbose $ mkTest dbLinkedSetupDepWithManualFlag name ["A"] $ SolverResult checkFullLog (Left $ const True) ] , testGroup "Stanzas" [ runTest $ enableAllTests $ mkTest db5 "simpleTest1" ["C"] (solverSuccess [("A", 2), ("C", 1)]) , runTest $ enableAllTests $ mkTest db5 "simpleTest2" ["D"] anySolverFailure , runTest $ enableAllTests $ mkTest db5 "simpleTest3" ["E"] (solverSuccess [("A", 1), ("E", 1)]) , runTest $ enableAllTests $ mkTest db5 "simpleTest4" ["F"] anySolverFailure -- TODO , runTest $ enableAllTests $ mkTest db5 "simpleTest5" ["G"] (solverSuccess [("A", 2), ("G", 1)]) , runTest $ enableAllTests $ mkTest db5 "simpleTest6" ["E", "G"] anySolverFailure , runTest $ indep $ enableAllTests $ mkTest db5 "simpleTest7" ["E", "G"] (solverSuccess [("A", 1), ("A", 2), ("E", 1), ("G", 1)]) , runTest $ enableAllTests $ mkTest db6 "depsWithTests1" ["C"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ indep $ enableAllTests $ mkTest db6 "depsWithTests2" ["C", "D"] (solverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)]) , runTest $ testTestSuiteWithFlag "test suite with flag" ] , testGroup "Setup dependencies" [ runTest $ mkTest db7 "setupDeps1" ["B"] (solverSuccess [("A", 2), ("B", 1)]) , runTest $ mkTest db7 "setupDeps2" ["C"] (solverSuccess [("A", 2), ("C", 1)]) , runTest $ mkTest db7 "setupDeps3" ["D"] (solverSuccess [("A", 1), ("D", 1)]) , runTest $ mkTest db7 "setupDeps4" ["E"] (solverSuccess [("A", 1), ("A", 2), ("E", 1)]) , runTest $ mkTest db7 "setupDeps5" ["F"] (solverSuccess [("A", 1), ("A", 2), ("F", 1)]) , runTest $ mkTest db8 "setupDeps6" ["C", "D"] (solverSuccess [("A", 1), ("B", 1), ("B", 2), ("C", 1), ("D", 1)]) , runTest $ mkTest db9 "setupDeps7" ["F", "G"] (solverSuccess [("A", 1), ("B", 1), ("B",2 ), ("C", 1), ("D", 1), ("E", 1), ("E", 2), ("F", 1), ("G", 1)]) , runTest $ mkTest db10 "setupDeps8" ["C"] (solverSuccess [("C", 1)]) , runTest $ indep $ mkTest dbSetupDeps "setupDeps9" ["A", "B"] (solverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2)]) ] , testGroup "Base shim" [ runTest $ mkTest db11 "baseShim1" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest db12 "baseShim2" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest db12 "baseShim3" ["B"] (solverSuccess [("B", 1)]) , runTest $ mkTest db12 "baseShim4" ["C"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ mkTest db12 "baseShim5" ["D"] anySolverFailure , runTest $ mkTest db12 "baseShim6" ["E"] (solverSuccess [("E", 1), ("syb", 2)]) ] , testGroup "Base" [ runTest $ mkTest dbBase "Refuse to install base without --allow-boot-library-installs" ["base"] $ solverFailure (isInfixOf "only already installed instances can be used") , runTest $ allowBootLibInstalls $ mkTest dbBase "Install base with --allow-boot-library-installs" ["base"] $ solverSuccess [("base", 1), ("ghc-prim", 1), ("integer-gmp", 1), ("integer-simple", 1)] ] , testGroup "reject-unconstrained" [ runTest $ onlyConstrained $ mkTest db12 "missing syb" ["E"] $ solverFailure (isInfixOf "not a user-provided goal") , runTest $ onlyConstrained $ mkTest db12 "all goals" ["E", "syb"] $ solverSuccess [("E", 1), ("syb", 2)] , runTest $ onlyConstrained $ mkTest db17 "backtracking" ["A", "B"] $ solverSuccess [("A", 2), ("B", 1)] , runTest $ onlyConstrained $ mkTest db17 "failure message" ["A"] $ solverFailure $ isInfixOf $ "Could not resolve dependencies:\n" ++ "[__0] trying: A-3.0.0 (user goal)\n" ++ "[__1] next goal: C (dependency of A)\n" ++ "[__1] fail (not a user-provided goal nor mentioned as a constraint, " ++ "but reject-unconstrained-dependencies was set)\n" ++ "[__1] fail (backjumping, conflict set: A, C)\n" ++ "After searching the rest of the dependency tree exhaustively, " ++ "these were the goals I've had most trouble fulfilling: A, C, B" ] , testGroup "Cycles" [ runTest $ mkTest db14 "simpleCycle1" ["A"] anySolverFailure , runTest $ mkTest db14 "simpleCycle2" ["A", "B"] anySolverFailure , runTest $ mkTest db14 "cycleWithFlagChoice1" ["C"] (solverSuccess [("C", 1), ("E", 1)]) , runTest $ mkTest db15 "cycleThroughSetupDep1" ["A"] anySolverFailure , runTest $ mkTest db15 "cycleThroughSetupDep2" ["B"] anySolverFailure , runTest $ mkTest db15 "cycleThroughSetupDep3" ["C"] (solverSuccess [("C", 2), ("D", 1)]) , runTest $ mkTest db15 "cycleThroughSetupDep4" ["D"] (solverSuccess [("D", 1)]) , runTest $ mkTest db15 "cycleThroughSetupDep5" ["E"] (solverSuccess [("C", 2), ("D", 1), ("E", 1)]) , runTest $ issue4161 "detect cycle between package and its setup script" , runTest $ testCyclicDependencyErrorMessages "cyclic dependency error messages" ] , testGroup "Extensions" [ runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupported" ["A"] anySolverFailure , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "unsupportedIndirect" ["B"] anySolverFailure , runTest $ mkTestExts [EnableExtension RankNTypes] dbExts1 "supported" ["A"] (solverSuccess [("A",1)]) , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedIndirect" ["C"] (solverSuccess [("A",1),("B",1), ("C",1)]) , runTest $ mkTestExts [EnableExtension CPP] dbExts1 "disabledExtension" ["D"] anySolverFailure , runTest $ mkTestExts (map EnableExtension [CPP,RankNTypes]) dbExts1 "disabledExtension" ["D"] anySolverFailure , runTest $ mkTestExts (UnknownExtension "custom" : map EnableExtension [CPP,RankNTypes]) dbExts1 "supportedUnknown" ["E"] (solverSuccess [("A",1),("B",1),("C",1),("E",1)]) ] , testGroup "Languages" [ runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupported" ["A"] anySolverFailure , runTest $ mkTestLangs [Haskell98,Haskell2010] dbLangs1 "supported" ["A"] (solverSuccess [("A",1)]) , runTest $ mkTestLangs [Haskell98] dbLangs1 "unsupportedIndirect" ["B"] anySolverFailure , runTest $ mkTestLangs [Haskell98, Haskell2010, UnknownLanguage "Haskell3000"] dbLangs1 "supportedUnknown" ["C"] (solverSuccess [("A",1),("B",1),("C",1)]) ] , testGroup "Qualified Package Constraints" [ runTest $ mkTest dbConstraints "install latest versions without constraints" ["A", "B", "C"] $ solverSuccess [("A", 7), ("B", 8), ("C", 9), ("D", 7), ("D", 8), ("D", 9)] , let cs = [ ExVersionConstraint (ScopeAnyQualifier "D") $ mkVersionRange 1 4 ] in runTest $ constraints cs $ mkTest dbConstraints "force older versions with unqualified constraint" ["A", "B", "C"] $ solverSuccess [("A", 1), ("B", 2), ("C", 3), ("D", 1), ("D", 2), ("D", 3)] , let cs = [ ExVersionConstraint (ScopeQualified P.QualToplevel "D") $ mkVersionRange 1 4 , ExVersionConstraint (ScopeQualified (P.QualSetup "B") "D") $ mkVersionRange 4 7 ] in runTest $ constraints cs $ mkTest dbConstraints "force multiple versions with qualified constraints" ["A", "B", "C"] $ solverSuccess [("A", 1), ("B", 5), ("C", 9), ("D", 1), ("D", 5), ("D", 9)] , let cs = [ ExVersionConstraint (ScopeAnySetupQualifier "D") $ mkVersionRange 1 4 ] in runTest $ constraints cs $ mkTest dbConstraints "constrain package across setup scripts" ["A", "B", "C"] $ solverSuccess [("A", 7), ("B", 2), ("C", 3), ("D", 2), ("D", 3), ("D", 7)] ] , testGroup "Package Preferences" [ runTest $ preferences [ ExPkgPref "A" $ mkvrThis 1] $ mkTest db13 "selectPreferredVersionSimple" ["A"] (solverSuccess [("A", 1)]) , runTest $ preferences [ ExPkgPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionSimple2" ["A"] (solverSuccess [("A", 2)]) , runTest $ preferences [ ExPkgPref "A" $ mkvrOrEarlier 2 , ExPkgPref "A" $ mkvrOrEarlier 1] $ mkTest db13 "selectPreferredVersionMultiple" ["A"] (solverSuccess [("A", 1)]) , runTest $ preferences [ ExPkgPref "A" $ mkvrOrEarlier 1 , ExPkgPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple2" ["A"] (solverSuccess [("A", 1)]) , runTest $ preferences [ ExPkgPref "A" $ mkvrThis 1 , ExPkgPref "A" $ mkvrThis 2] $ mkTest db13 "selectPreferredVersionMultiple3" ["A"] (solverSuccess [("A", 2)]) , runTest $ preferences [ ExPkgPref "A" $ mkvrThis 1 , ExPkgPref "A" $ mkvrOrEarlier 2] $ mkTest db13 "selectPreferredVersionMultiple4" ["A"] (solverSuccess [("A", 1)]) ] , testGroup "Stanza Preferences" [ runTest $ mkTest dbStanzaPreferences1 "disable tests by default" ["pkg"] $ solverSuccess [("pkg", 1)] , runTest $ preferences [ExStanzaPref "pkg" [TestStanzas]] $ mkTest dbStanzaPreferences1 "enable tests with testing preference" ["pkg"] $ solverSuccess [("pkg", 1), ("test-dep", 1)] , runTest $ preferences [ExStanzaPref "pkg" [TestStanzas]] $ mkTest dbStanzaPreferences2 "disable testing when it's not possible" ["pkg"] $ solverSuccess [("pkg", 1)] , testStanzaPreference "test stanza preference" ] , testGroup "Buildable Field" [ testBuildable "avoid building component with unknown dependency" (ExAny "unknown") , testBuildable "avoid building component with unknown extension" (ExExt (UnknownExtension "unknown")) , testBuildable "avoid building component with unknown language" (ExLang (UnknownLanguage "unknown")) , runTest $ mkTest dbBuildable1 "choose flags that set buildable to false" ["pkg"] (solverSuccess [("flag1-false", 1), ("flag2-true", 1), ("pkg", 1)]) , runTest $ mkTest dbBuildable2 "choose version that sets buildable to false" ["A"] (solverSuccess [("A", 1), ("B", 2)]) ] , testGroup "Pkg-config dependencies" [ runTest $ mkTestPCDepends (Just []) dbPC1 "noPkgs" ["A"] anySolverFailure , runTest $ mkTestPCDepends (Just [("pkgA", "0")]) dbPC1 "tooOld" ["A"] anySolverFailure , runTest $ mkTestPCDepends (Just [("pkgA", "1.0.0"), ("pkgB", "1.0.0")]) dbPC1 "pruneNotFound" ["C"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ mkTestPCDepends (Just [("pkgA", "1.0.0"), ("pkgB", "2.0.0")]) dbPC1 "chooseNewest" ["C"] (solverSuccess [("A", 1), ("B", 2), ("C", 1)]) , runTest $ mkTestPCDepends Nothing dbPC1 "noPkgConfigFailure" ["A"] anySolverFailure , runTest $ mkTestPCDepends Nothing dbPC1 "noPkgConfigSuccess" ["D"] (solverSuccess [("D",1)]) ] , testGroup "Independent goals" [ runTest $ indep $ mkTest db16 "indepGoals1" ["A", "B"] (solverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1), ("D", 2), ("E", 1)]) , runTest $ testIndepGoals2 "indepGoals2" , runTest $ testIndepGoals3 "indepGoals3" , runTest $ testIndepGoals4 "indepGoals4" , runTest $ testIndepGoals5 "indepGoals5 - fixed goal order" FixedGoalOrder , runTest $ testIndepGoals5 "indepGoals5 - default goal order" DefaultGoalOrder , runTest $ testIndepGoals6 "indepGoals6 - fixed goal order" FixedGoalOrder , runTest $ testIndepGoals6 "indepGoals6 - default goal order" DefaultGoalOrder ] -- Tests designed for the backjumping blog post , testGroup "Backjumping" [ runTest $ mkTest dbBJ1a "bj1a" ["A"] (solverSuccess [("A", 1), ("B", 1)]) , runTest $ mkTest dbBJ1b "bj1b" ["A"] (solverSuccess [("A", 1), ("B", 1)]) , runTest $ mkTest dbBJ1c "bj1c" ["A"] (solverSuccess [("A", 1), ("B", 1)]) , runTest $ mkTest dbBJ2 "bj2" ["A"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ mkTest dbBJ3 "bj3" ["A"] (solverSuccess [("A", 1), ("Ba", 1), ("C", 1)]) , runTest $ mkTest dbBJ4 "bj4" ["A"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ mkTest dbBJ5 "bj5" ["A"] (solverSuccess [("A", 1), ("B", 1), ("D", 1)]) , runTest $ mkTest dbBJ6 "bj6" ["A"] (solverSuccess [("A", 1), ("B", 1)]) , runTest $ mkTest dbBJ7 "bj7" ["A"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) , runTest $ indep $ mkTest dbBJ8 "bj8" ["A", "B"] (solverSuccess [("A", 1), ("B", 1), ("C", 1)]) ] , testGroup "main library dependencies" [ let db = [Right $ exAvNoLibrary "A" 1 `withExe` exExe "exe" []] in runTest $ mkTest db "install build target without a library" ["A"] $ solverSuccess [("A", 1)] , let db = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAvNoLibrary "B" 1 `withExe` exExe "exe" [] ] in runTest $ mkTest db "reject build-depends dependency with no library" ["A"] $ solverFailure (isInfixOf "rejecting: B-1.0.0 (does not contain library, which is required by A)") , let exe = exExe "exe" [] db = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAvNoLibrary "B" 2 `withExe` exe , Right $ exAv "B" 1 [] `withExe` exe ] in runTest $ mkTest db "choose version of build-depends dependency that has a library" ["A"] $ solverSuccess [("A", 1), ("B", 1)] ] , testGroup "sub-library dependencies" [ let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib"] , Right $ exAv "B" 1 [] ] in runTest $ mkTest db "reject package that is missing required sub-library" ["A"] $ solverFailure $ isInfixOf $ "rejecting: B-1.0.0 (does not contain library 'sub-lib', which is required by A)" , let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib"] , Right $ exAvNoLibrary "B" 1 `withSubLibrary` exSubLib "sub-lib" [] ] in runTest $ mkTest db "reject package with private but required sub-library" ["A"] $ solverFailure $ isInfixOf $ "rejecting: B-1.0.0 (library 'sub-lib' is private, but it is required by A)" , let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib"] , Right $ exAvNoLibrary "B" 1 `withSubLibrary` exSubLib "sub-lib" [ExFlagged "make-lib-private" (dependencies []) publicDependencies] ] in runTest $ constraints [ExFlagConstraint (ScopeAnyQualifier "B") "make-lib-private" True] $ mkTest db "reject package with sub-library made private by flag constraint" ["A"] $ solverFailure $ isInfixOf $ "rejecting: B-1.0.0 (library 'sub-lib' is private, but it is required by A)" , let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib"] , Right $ exAvNoLibrary "B" 1 `withSubLibrary` exSubLib "sub-lib" [ExFlagged "make-lib-private" (dependencies []) publicDependencies] ] in runTest $ mkTest db "treat sub-library as visible even though flag choice could make it private" ["A"] $ solverSuccess [("A", 1), ("B", 1)] , let db = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [] `withSubLibrary` exSubLib "sub-lib" [] , Right $ exAv "C" 1 [ExSubLibAny "B" "sub-lib"] ] goals :: [ExampleVar] goals = [ P QualNone "A" , P QualNone "B" , P QualNone "C" ] in runTest $ goalOrder goals $ mkTest db "reject package that requires a private sub-library" ["A", "C"] $ solverFailure $ isInfixOf $ "rejecting: C-1.0.0 (requires library 'sub-lib' from B, but the component is private)" , let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib-v1"] , Right $ exAv "B" 2 [] `withSubLibrary` ExSubLib "sub-lib-v2" publicDependencies , Right $ exAv "B" 1 [] `withSubLibrary` ExSubLib "sub-lib-v1" publicDependencies ] in runTest $ mkTest db "choose version of package containing correct sub-library" ["A"] $ solverSuccess [("A", 1), ("B", 1)] , let db = [ Right $ exAv "A" 1 [ExSubLibAny "B" "sub-lib"] , Right $ exAv "B" 2 [] `withSubLibrary` ExSubLib "sub-lib" (dependencies []) , Right $ exAv "B" 1 [] `withSubLibrary` ExSubLib "sub-lib" publicDependencies ] in runTest $ mkTest db "choose version of package with public sub-library" ["A"] $ solverSuccess [("A", 1), ("B", 1)] ] -- build-tool-depends dependencies , testGroup "build-tool-depends" [ runTest $ mkTest dbBuildTools "simple exe dependency" ["A"] (solverSuccess [("A", 1), ("bt-pkg", 2)]) , runTest $ disableSolveExecutables $ mkTest dbBuildTools "don't install build tool packages in legacy mode" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest dbBuildTools "flagged exe dependency" ["B"] (solverSuccess [("B", 1), ("bt-pkg", 2)]) , runTest $ enableAllTests $ mkTest dbBuildTools "test suite exe dependency" ["C"] (solverSuccess [("C", 1), ("bt-pkg", 2)]) , runTest $ mkTest dbBuildTools "unknown exe" ["D"] $ solverFailure (isInfixOf "does not contain executable 'unknown-exe', which is required by D") , runTest $ disableSolveExecutables $ mkTest dbBuildTools "don't check for build tool executables in legacy mode" ["D"] $ solverSuccess [("D", 1)] , runTest $ mkTest dbBuildTools "unknown build tools package error mentions package, not exe" ["E"] $ solverFailure (isInfixOf "unknown package: E:unknown-pkg:exe.unknown-pkg (dependency of E)") , runTest $ mkTest dbBuildTools "unknown flagged exe" ["F"] $ solverFailure (isInfixOf "does not contain executable 'unknown-exe', which is required by F +flagF") , runTest $ enableAllTests $ mkTest dbBuildTools "unknown test suite exe" ["G"] $ solverFailure (isInfixOf "does not contain executable 'unknown-exe', which is required by G *test") , runTest $ mkTest dbBuildTools "wrong exe for build tool package version" ["H"] $ solverFailure $ isInfixOf $ -- The solver reports the version conflict when a version conflict -- and an executable conflict apply to the same package version. "[__1] rejecting: H:bt-pkg:exe.bt-pkg-4.0.0 (conflict: H => H:bt-pkg:exe.bt-pkg (exe exe1)==3.0.0)\n" ++ "[__1] rejecting: H:bt-pkg:exe.bt-pkg-3.0.0 (does not contain executable 'exe1', which is required by H)\n" ++ "[__1] rejecting: H:bt-pkg:exe.bt-pkg-2.0.0 (conflict: H => H:bt-pkg:exe.bt-pkg (exe exe1)==3.0.0)" , runTest $ chooseExeAfterBuildToolsPackage True "choose exe after choosing its package - success" , runTest $ chooseExeAfterBuildToolsPackage False "choose exe after choosing its package - failure" , runTest $ rejectInstalledBuildToolPackage "reject installed package for build-tool dependency" , runTest $ requireConsistentBuildToolVersions "build tool versions must be consistent within one package" ] -- build-tools dependencies , testGroup "legacy build-tools" [ runTest $ mkTest dbLegacyBuildTools1 "bt1" ["A"] (solverSuccess [("A", 1), ("alex", 1)]) , runTest $ disableSolveExecutables $ mkTest dbLegacyBuildTools1 "bt1 - don't install build tool packages in legacy mode" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest dbLegacyBuildTools2 "bt2" ["A"] $ solverFailure (isInfixOf "does not contain executable 'alex', which is required by A") , runTest $ disableSolveExecutables $ mkTest dbLegacyBuildTools2 "bt2 - don't check for build tool executables in legacy mode" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest dbLegacyBuildTools3 "bt3" ["A"] (solverSuccess [("A", 1)]) , runTest $ mkTest dbLegacyBuildTools4 "bt4" ["C"] (solverSuccess [("A", 1), ("B", 1), ("C", 1), ("alex", 1), ("alex", 2)]) , runTest $ mkTest dbLegacyBuildTools5 "bt5" ["B"] (solverSuccess [("A", 1), ("A", 2), ("B", 1), ("alex", 1)]) , runTest $ mkTest dbLegacyBuildTools6 "bt6" ["A"] (solverSuccess [("A", 1), ("alex", 1), ("happy", 1)]) ] -- internal dependencies , testGroup "internal dependencies" [ runTest $ mkTest dbIssue3775 "issue #3775" ["B"] (solverSuccess [("A", 2), ("B", 2), ("warp", 1)]) ] -- tests for partial fix for issue #5325 , testGroup "Components that are unbuildable in the current environment" $ let flagConstraint = ExFlagConstraint . ScopeAnyQualifier in [ let db = [ Right $ exAv "A" 1 [ExFlagged "build-lib" (dependencies []) unbuildableDependencies] ] in runTest $ constraints [flagConstraint "A" "build-lib" False] $ mkTest db "install unbuildable library" ["A"] $ solverSuccess [("A", 1)] , let db = [ Right $ exAvNoLibrary "A" 1 `withExe` exExe "exe" [ExFlagged "build-exe" (dependencies []) unbuildableDependencies] ] in runTest $ constraints [flagConstraint "A" "build-exe" False] $ mkTest db "install unbuildable exe" ["A"] $ solverSuccess [("A", 1)] , let db = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [ExFlagged "build-lib" (dependencies []) unbuildableDependencies] ] in runTest $ constraints [flagConstraint "B" "build-lib" False] $ mkTest db "reject library dependency with unbuildable library" ["A"] $ solverFailure $ isInfixOf $ "rejecting: B-1.0.0 (library is not buildable in the " ++ "current environment, but it is required by A)" , let db = [ Right $ exAv "A" 1 [ExBuildToolAny "B" "bt"] , Right $ exAv "B" 1 [ExFlagged "build-lib" (dependencies []) unbuildableDependencies] `withExe` exExe "bt" [] ] in runTest $ constraints [flagConstraint "B" "build-lib" False] $ mkTest db "allow build-tool dependency with unbuildable library" ["A"] $ solverSuccess [("A", 1), ("B", 1)] , let db = [ Right $ exAv "A" 1 [ExBuildToolAny "B" "bt"] , Right $ exAv "B" 1 [] `withExe` exExe "bt" [ExFlagged "build-exe" (dependencies []) unbuildableDependencies] ] in runTest $ constraints [flagConstraint "B" "build-exe" False] $ mkTest db "reject build-tool dependency with unbuildable exe" ["A"] $ solverFailure $ isInfixOf $ "rejecting: A:B:exe.B-1.0.0 (executable 'bt' is not " ++ "buildable in the current environment, but it is required by A)" , runTest $ chooseUnbuildableExeAfterBuildToolsPackage "choose unbuildable exe after choosing its package" ] , testGroup "--fine-grained-conflicts" [ -- Skipping a version because of a problematic dependency: -- -- When the solver explores A-4, it finds that it cannot satisfy B's -- dependencies. This allows the solver to skip the subsequent -- versions of A that also depend on B. runTest $ let db = [ Right $ exAv "A" 4 [ExAny "B"] , Right $ exAv "A" 3 [ExAny "B"] , Right $ exAv "A" 2 [ExAny "B"] , Right $ exAv "A" 1 [] , Right $ exAv "B" 2 [ExAny "unknown1"] , Right $ exAv "B" 1 [ExAny "unknown2"] ] msg = [ "[__0] trying: A-4.0.0 (user goal)" , "[__1] trying: B-2.0.0 (dependency of A)" , "[__2] unknown package: unknown1 (dependency of B)" , "[__2] fail (backjumping, conflict set: B, unknown1)" , "[__1] trying: B-1.0.0" , "[__2] unknown package: unknown2 (dependency of B)" , "[__2] fail (backjumping, conflict set: B, unknown2)" , "[__1] fail (backjumping, conflict set: A, B, unknown1, unknown2)" , "[__0] skipping: A-3.0.0, A-2.0.0 (has the same characteristics that " ++ "caused the previous version to fail: depends on 'B')" , "[__0] trying: A-1.0.0" , "[__1] done" ] in setVerbose $ mkTest db "skip version due to problematic dependency" ["A"] $ SolverResult (isInfixOf msg) $ Right [("A", 1)] , -- Skipping a version because of a restrictive constraint on a -- dependency: -- -- The solver rejects A-4 because its constraint on B excludes B-1. -- Then the solver is able to skip A-3 and A-2 because they also -- exclude B-1, even though they don't have the exact same constraints -- on B. runTest $ let db = [ Right $ exAv "A" 4 [ExFix "B" 14] , Right $ exAv "A" 3 [ExFix "B" 13] , Right $ exAv "A" 2 [ExFix "B" 12] , Right $ exAv "A" 1 [ExFix "B" 11] , Right $ exAv "B" 11 [] ] msg = [ "[__0] trying: A-4.0.0 (user goal)" , "[__1] next goal: B (dependency of A)" , "[__1] rejecting: B-11.0.0 (conflict: A => B==14.0.0)" , "[__1] fail (backjumping, conflict set: A, B)" , "[__0] skipping: A-3.0.0, A-2.0.0 (has the same characteristics that " ++ "caused the previous version to fail: depends on 'B' but excludes " ++ "version 11.0.0)" , "[__0] trying: A-1.0.0" , "[__1] next goal: B (dependency of A)" , "[__1] trying: B-11.0.0" , "[__2] done" ] in setVerbose $ mkTest db "skip version due to restrictive constraint on its dependency" ["A"] $ SolverResult (isInfixOf msg) $ Right [("A", 1), ("B", 11)] , -- This test tests the case where the solver chooses a version for one -- package, B, before choosing a version for one of its reverse -- dependencies, C. While the solver is exploring the subtree rooted -- at B-3, it finds that C-2's dependency on B conflicts with B-3. -- Then the solver is able to skip C-1, because it also excludes B-3. -- -- --fine-grained-conflicts could have a benefit in this case even -- though the solver would have found the conflict between B-3 and C-1 -- immediately after trying C-1 anyway. It prevents C-1 from -- introducing any other conflicts which could increase the size of -- the conflict set. runTest $ let db = [ Right $ exAv "A" 1 [ExAny "B", ExAny "C"] , Right $ exAv "B" 3 [] , Right $ exAv "B" 2 [] , Right $ exAv "B" 1 [] , Right $ exAv "C" 2 [ExFix "B" 2] , Right $ exAv "C" 1 [ExFix "B" 1] ] goals = [P QualNone pkg | pkg <- ["A", "B", "C"]] expectedMsg = [ "[__0] trying: A-1.0.0 (user goal)" , "[__1] trying: B-3.0.0 (dependency of A)" , "[__2] next goal: C (dependency of A)" , "[__2] rejecting: C-2.0.0 (conflict: B==3.0.0, C => B==2.0.0)" , "[__2] skipping: C-1.0.0 (has the same characteristics that caused the " ++ "previous version to fail: excludes 'B' version 3.0.0)" , "[__2] fail (backjumping, conflict set: A, B, C)" , "[__1] trying: B-2.0.0" , "[__2] next goal: C (dependency of A)" , "[__2] trying: C-2.0.0" , "[__3] done" ] in setVerbose $ goalOrder goals $ mkTest db "skip version that excludes dependency that was already chosen" ["A"] $ SolverResult (isInfixOf expectedMsg) $ Right [("A", 1), ("B", 2), ("C", 2)] , -- This test tests how the solver merges conflicts when it has -- multiple reasons to add a variable to the conflict set. In this -- case, package A conflicts with B and C. The solver should take the -- union of the conflicts and then only skip a version if it does not -- resolve any of the conflicts. -- -- The solver rejects A-3 because it can't find consistent versions for -- its two dependencies, B and C. Then it skips A-2 because A-2 also -- depends on B and C. This test ensures that the solver considers -- A-1 even though A-1 only resolves one of the conflicts (A-1 removes -- the dependency on C). runTest $ let db = [ Right $ exAv "A" 3 [ExAny "B", ExAny "C"] , Right $ exAv "A" 2 [ExAny "B", ExAny "C"] , Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [ExFix "D" 1] , Right $ exAv "C" 1 [ExFix "D" 2] , Right $ exAv "D" 1 [] , Right $ exAv "D" 2 [] ] goals = [P QualNone pkg | pkg <- ["A", "B", "C", "D"]] msg = [ "[__0] trying: A-3.0.0 (user goal)" , "[__1] trying: B-1.0.0 (dependency of A)" , "[__2] trying: C-1.0.0 (dependency of A)" , "[__3] next goal: D (dependency of B)" , "[__3] rejecting: D-2.0.0 (conflict: B => D==1.0.0)" , "[__3] rejecting: D-1.0.0 (conflict: C => D==2.0.0)" , "[__3] fail (backjumping, conflict set: B, C, D)" , "[__2] fail (backjumping, conflict set: A, B, C, D)" , "[__1] fail (backjumping, conflict set: A, B, C, D)" , "[__0] skipping: A-2.0.0 (has the same characteristics that caused the " ++ "previous version to fail: depends on 'B'; depends on 'C')" , "[__0] trying: A-1.0.0" , "[__1] trying: B-1.0.0 (dependency of A)" , "[__2] next goal: D (dependency of B)" , "[__2] rejecting: D-2.0.0 (conflict: B => D==1.0.0)" , "[__2] trying: D-1.0.0" , "[__3] done" ] in setVerbose $ goalOrder goals $ mkTest db "only skip a version if it resolves none of the previous conflicts" ["A"] $ SolverResult (isInfixOf msg) $ Right [("A", 1), ("B", 1), ("D", 1)] , -- This test ensures that the solver log doesn't show all conflicts -- that the solver encountered in a subtree. The solver should only -- show the conflicts that are contained in the current conflict set. -- -- The goal order forces the solver to try A-4, encounter a conflict -- with B-2, try B-1, and then try C. A-4 conflicts with the only -- version of C, so the solver backjumps with a conflict set of -- {A, C}. When the solver skips the next version of A, the log should -- mention the conflict with C but not B. runTest $ let db = [ Right $ exAv "A" 4 [ExFix "B" 1, ExFix "C" 1] , Right $ exAv "A" 3 [ExFix "B" 1, ExFix "C" 1] , Right $ exAv "A" 2 [ExFix "C" 1] , Right $ exAv "A" 1 [ExFix "C" 2] , Right $ exAv "B" 2 [] , Right $ exAv "B" 1 [] , Right $ exAv "C" 2 [] ] goals = [P QualNone pkg | pkg <- ["A", "B", "C"]] msg = [ "[__0] trying: A-4.0.0 (user goal)" , "[__1] next goal: B (dependency of A)" , "[__1] rejecting: B-2.0.0 (conflict: A => B==1.0.0)" , "[__1] trying: B-1.0.0" , "[__2] next goal: C (dependency of A)" , "[__2] rejecting: C-2.0.0 (conflict: A => C==1.0.0)" , "[__2] fail (backjumping, conflict set: A, C)" , "[__0] skipping: A-3.0.0, A-2.0.0 (has the same characteristics that caused the " ++ "previous version to fail: depends on 'C' but excludes version 2.0.0)" , "[__0] trying: A-1.0.0" , "[__1] next goal: C (dependency of A)" , "[__1] trying: C-2.0.0" , "[__2] done" ] in setVerbose $ goalOrder goals $ mkTest db "don't show conflicts that aren't part of the conflict set" ["A"] $ SolverResult (isInfixOf msg) $ Right [("A", 1), ("C", 2)] , -- Tests that the conflict set is properly updated when a version is -- skipped due to being excluded by one of its reverse dependencies' -- constraints. runTest $ let db = [ Right $ exAv "A" 2 [ExFix "B" 3] , Right $ exAv "A" 1 [ExFix "B" 1] , Right $ exAv "B" 2 [] , Right $ exAv "B" 1 [] ] msg = [ "[__0] trying: A-2.0.0 (user goal)" , "[__1] next goal: B (dependency of A)" -- During this step, the solver adds A and B to the -- conflict set, with the details of each package's -- conflict: -- -- A: A's constraint rejected B-2. -- B: B was rejected by A's B==3 constraint , "[__1] rejecting: B-2.0.0 (conflict: A => B==3.0.0)" -- When the solver skips B-1, it cannot simply reuse the -- previous conflict set. It also needs to update A's -- entry to say that A also rejected B-1. Otherwise, the -- solver wouldn't know that A-1 could resolve one of -- the conflicts encountered while exploring A-2. The -- solver would skip A-1, even though it leads to the -- solution. , "[__1] skipping: B-1.0.0 (has the same characteristics that caused " ++ "the previous version to fail: excluded by constraint '==3.0.0' from 'A')" , "[__1] fail (backjumping, conflict set: A, B)" , "[__0] trying: A-1.0.0" , "[__1] next goal: B (dependency of A)" , "[__1] rejecting: B-2.0.0 (conflict: A => B==1.0.0)" , "[__1] trying: B-1.0.0" , "[__2] done" ] in setVerbose $ mkTest db "update conflict set after skipping version - 1" ["A"] $ SolverResult (isInfixOf msg) $ Right [("A", 1), ("B", 1)] , -- Tests that the conflict set is properly updated when a version is -- skipped due to excluding a version of one of its dependencies. -- This test is similar the previous one, with the goal order reversed. runTest $ let db = [ Right $ exAv "A" 2 [] , Right $ exAv "A" 1 [] , Right $ exAv "B" 2 [ExFix "A" 3] , Right $ exAv "B" 1 [ExFix "A" 1] ] goals = [P QualNone pkg | pkg <- ["A", "B"]] msg = [ "[__0] trying: A-2.0.0 (user goal)" , "[__1] next goal: B (user goal)" , "[__1] rejecting: B-2.0.0 (conflict: A==2.0.0, B => A==3.0.0)" , "[__1] skipping: B-1.0.0 (has the same characteristics that caused " ++ "the previous version to fail: excludes 'A' version 2.0.0)" , "[__1] fail (backjumping, conflict set: A, B)" , "[__0] trying: A-1.0.0" , "[__1] next goal: B (user goal)" , "[__1] rejecting: B-2.0.0 (conflict: A==1.0.0, B => A==3.0.0)" , "[__1] trying: B-1.0.0" , "[__2] done" ] in setVerbose $ goalOrder goals $ mkTest db "update conflict set after skipping version - 2" ["A", "B"] $ SolverResult (isInfixOf msg) $ Right [("A", 1), ("B", 1)] ] -- Tests for the contents of the solver's log , testGroup "Solver log" [ -- See issue #3203. The solver should only choose a version for A once. runTest $ let db = [Right $ exAv "A" 1 []] p :: [String] -> Bool p lg = elem "targets: A" lg && length (filter ("trying: A" `isInfixOf`) lg) == 1 in setVerbose $ mkTest db "deduplicate targets" ["A", "A"] $ SolverResult p $ Right [("A", 1)] , runTest $ let db = [Right $ exAv "A" 1 [ExAny "B"]] msg = "After searching the rest of the dependency tree exhaustively, " ++ "these were the goals I've had most trouble fulfilling: A, B" in mkTest db "exhaustive search failure message" ["A"] $ solverFailure (isInfixOf msg) , testSummarizedLog "show conflicts from final conflict set after exhaustive search" Nothing $ "Could not resolve dependencies:\n" ++ "[__0] trying: A-1.0.0 (user goal)\n" ++ "[__1] unknown package: F (dependency of A)\n" ++ "[__1] fail (backjumping, conflict set: A, F)\n" ++ "After searching the rest of the dependency tree exhaustively, " ++ "these were the goals I've had most trouble fulfilling: A, F" , testSummarizedLog "show first conflicts after inexhaustive search" (Just 3) $ "Could not resolve dependencies:\n" ++ "[__0] trying: A-1.0.0 (user goal)\n" ++ "[__1] trying: B-3.0.0 (dependency of A)\n" ++ "[__2] unknown package: C (dependency of B)\n" ++ "[__2] fail (backjumping, conflict set: B, C)\n" ++ "Backjump limit reached (currently 3, change with --max-backjumps " ++ "or try to run with --reorder-goals).\n" , testSummarizedLog "don't show summarized log when backjump limit is too low" (Just 1) $ "Backjump limit reached (currently 1, change with --max-backjumps " ++ "or try to run with --reorder-goals).\n" ++ "Failed to generate a summarized dependency solver log due to low backjump limit." , testMinimizeConflictSet "minimize conflict set with --minimize-conflict-set" , testNoMinimizeConflictSet "show original conflict set with --no-minimize-conflict-set" , runTest $ let db = [ Right $ exAv "my-package" 1 [ExFix "other-package" 3] , Left $ exInst "other-package" 2 "other-package-2.0.0" []] msg = "rejecting: other-package-2.0.0/installed-2.0.0" in mkTest db "show full installed package version (issue #5892)" ["my-package"] $ solverFailure (isInfixOf msg) , runTest $ let db = [ Right $ exAv "my-package" 1 [ExFix "other-package" 3] , Left $ exInst "other-package" 2 "other-package-AbCdEfGhIj0123456789" [] ] msg = "rejecting: other-package-2.0.0/installed-AbCdEfGhIj0123456789" in mkTest db "show full installed package ABI hash (issue #5892)" ["my-package"] $ solverFailure (isInfixOf msg) ] ] where indep = independentGoals mkvrThis = V.thisVersion . makeV mkvrOrEarlier = V.orEarlierVersion . makeV makeV v = V.mkVersion [v,0,0] data GoalOrder = FixedGoalOrder | DefaultGoalOrder {------------------------------------------------------------------------------- Specific example database for the tests -------------------------------------------------------------------------------} db1 :: ExampleDb db1 = let a = exInst "A" 1 "A-1" [] in [ Left a , Right $ exAv "B" 1 [ExAny "A"] , Right $ exAv "B" 2 [ExAny "A"] , Right $ exAv "C" 1 [ExFix "B" 1] , Right $ exAv "D" 1 [ExFix "B" 2] , Right $ exAv "E" 1 [ExAny "B"] , Right $ exAv "F" 1 [ExFix "B" 1, ExAny "E"] , Right $ exAv "G" 1 [ExFix "B" 2, ExAny "E"] , Right $ exAv "Z" 1 [] ] -- In this example, we _can_ install C and D as independent goals, but we have -- to pick two different versions for B (arbitrarily) db2 :: ExampleDb db2 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "B" 1 [ExAny "A"] , Right $ exAv "B" 2 [ExAny "A"] , Right $ exAv "C" 1 [ExAny "B", ExFix "A" 1] , Right $ exAv "D" 1 [ExAny "B", ExFix "A" 2] ] db3 :: ExampleDb db3 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "B" 1 [exFlagged "flagB" [ExFix "A" 1] [ExFix "A" 2]] , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] , Right $ exAv "D" 1 [ExFix "A" 2, ExAny "B"] ] -- | Like db3, but the flag picks a different package rather than a -- different package version -- -- In db3 we cannot install C and D as independent goals because: -- -- * The multiple instance restriction says C and D _must_ share B -- * Since C relies on A-1, C needs B to be compiled with flagB on -- * Since D relies on A-2, D needs B to be compiled with flagB off -- * Hence C and D have incompatible requirements on B's flags. -- -- However, _even_ if we don't check explicitly that we pick the same flag -- assignment for 0.B and 1.B, we will still detect the problem because -- 0.B depends on 0.A-1, 1.B depends on 1.A-2, hence we cannot link 0.A to -- 1.A and therefore we cannot link 0.B to 1.B. -- -- In db4 the situation however is trickier. We again cannot install -- packages C and D as independent goals because: -- -- * As above, the multiple instance restriction says that C and D _must_ share B -- * Since C relies on Ax-2, it requires B to be compiled with flagB off -- * Since D relies on Ay-2, it requires B to be compiled with flagB on -- * Hence C and D have incompatible requirements on B's flags. -- -- But now this requirement is more indirect. If we only check dependencies -- we don't see the problem: -- -- * We link 0.B to 1.B -- * 0.B relies on Ay-1 -- * 1.B relies on Ax-1 -- -- We will insist that 0.Ay will be linked to 1.Ay, and 0.Ax to 1.Ax, but since -- we only ever assign to one of these, these constraints are never broken. db4 :: ExampleDb db4 = [ Right $ exAv "Ax" 1 [] , Right $ exAv "Ax" 2 [] , Right $ exAv "Ay" 1 [] , Right $ exAv "Ay" 2 [] , Right $ exAv "B" 1 [exFlagged "flagB" [ExFix "Ax" 1] [ExFix "Ay" 1]] , Right $ exAv "C" 1 [ExFix "Ax" 2, ExAny "B"] , Right $ exAv "D" 1 [ExFix "Ay" 2, ExAny "B"] ] -- | Simple database containing one package with a manual flag. dbManualFlags :: ExampleDb dbManualFlags = [ Right $ declareFlags [ExFlag "flag" True Manual] $ exAv "pkg" 1 [exFlagged "flag" [ExAny "true-dep"] [ExAny "false-dep"]] , Right $ exAv "true-dep" 1 [] , Right $ exAv "false-dep" 1 [] ] -- | Database containing a setup dependency with a manual flag. A's library and -- setup script depend on two different versions of B. B's manual flag can be -- set to different values in the two places where it is used. dbSetupDepWithManualFlag :: ExampleDb dbSetupDepWithManualFlag = let bFlags = [ExFlag "flag" True Manual] in [ Right $ exAv "A" 1 [ExFix "B" 1] `withSetupDeps` [ExFix "B" 2] , Right $ declareFlags bFlags $ exAv "B" 1 [exFlagged "flag" [ExAny "b-1-true-dep"] [ExAny "b-1-false-dep"]] , Right $ declareFlags bFlags $ exAv "B" 2 [exFlagged "flag" [ExAny "b-2-true-dep"] [ExAny "b-2-false-dep"]] , Right $ exAv "b-1-true-dep" 1 [] , Right $ exAv "b-1-false-dep" 1 [] , Right $ exAv "b-2-true-dep" 1 [] , Right $ exAv "b-2-false-dep" 1 [] ] -- | A database similar to 'dbSetupDepWithManualFlag', except that the library -- and setup script both depend on B-1. B must be linked because of the Single -- Instance Restriction, and its flag can only have one value. dbLinkedSetupDepWithManualFlag :: ExampleDb dbLinkedSetupDepWithManualFlag = [ Right $ exAv "A" 1 [ExFix "B" 1] `withSetupDeps` [ExFix "B" 1] , Right $ declareFlags [ExFlag "flag" True Manual] $ exAv "B" 1 [exFlagged "flag" [ExAny "b-1-true-dep"] [ExAny "b-1-false-dep"]] , Right $ exAv "b-1-true-dep" 1 [] , Right $ exAv "b-1-false-dep" 1 [] ] -- | Some tests involving testsuites -- -- Note that in this test framework test suites are always enabled; if you -- want to test without test suites just set up a test database without -- test suites. -- -- * C depends on A (through its test suite) -- * D depends on B-2 (through its test suite), but B-2 is unavailable -- * E depends on A-1 directly and on A through its test suite. We prefer -- to use A-1 for the test suite in this case. -- * F depends on A-1 directly and on A-2 through its test suite. In this -- case we currently fail to install F, although strictly speaking -- test suites should be considered independent goals. -- * G is like E, but for version A-2. This means that if we cannot install -- E and G together, unless we regard them as independent goals. db5 :: ExampleDb db5 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "B" 1 [] , Right $ exAv "C" 1 [] `withTest` exTest "testC" [ExAny "A"] , Right $ exAv "D" 1 [] `withTest` exTest "testD" [ExFix "B" 2] , Right $ exAv "E" 1 [ExFix "A" 1] `withTest` exTest "testE" [ExAny "A"] , Right $ exAv "F" 1 [ExFix "A" 1] `withTest` exTest "testF" [ExFix "A" 2] , Right $ exAv "G" 1 [ExFix "A" 2] `withTest` exTest "testG" [ExAny "A"] ] -- Now the _dependencies_ have test suites -- -- * Installing C is a simple example. C wants version 1 of A, but depends on -- B, and B's testsuite depends on an any version of A. In this case we prefer -- to link (if we don't regard test suites as independent goals then of course -- linking here doesn't even come into it). -- * Installing [C, D] means that we prefer to link B -- depending on how we -- set things up, this means that we should also link their test suites. db6 :: ExampleDb db6 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "B" 1 [] `withTest` exTest "testA" [ExAny "A"] , Right $ exAv "C" 1 [ExFix "A" 1, ExAny "B"] , Right $ exAv "D" 1 [ExAny "B"] ] -- | This test checks that the solver can backjump to disable a flag, even if -- the problematic dependency is also under a test suite. (issue #4390) -- -- The goal order forces the solver to choose the flag before enabling testing. -- Previously, the solver couldn't handle this case, because it only tried to -- disable testing, and when that failed, it backjumped past the flag choice. -- The solver should also try to set the flag to false, because that avoids the -- dependency on B. testTestSuiteWithFlag :: String -> SolverTest testTestSuiteWithFlag name = goalOrder goals $ enableAllTests $ mkTest db name ["A", "B"] $ solverSuccess [("A", 1), ("B", 1)] where db :: ExampleDb db = [ Right $ exAv "A" 1 [] `withTest` exTest "test" [exFlagged "flag" [ExFix "B" 2] []] , Right $ exAv "B" 1 [] ] goals :: [ExampleVar] goals = [ P QualNone "B" , P QualNone "A" , F QualNone "A" "flag" , S QualNone "A" TestStanzas ] -- Packages with setup dependencies -- -- Install.. -- * B: Simple example, just make sure setup deps are taken into account at all -- * C: Both the package and the setup script depend on any version of A. -- In this case we prefer to link -- * D: Variation on C.1 where the package requires a specific (not latest) -- version but the setup dependency is not fixed. Again, we prefer to -- link (picking the older version) -- * E: Variation on C.2 with the setup dependency the more inflexible. -- Currently, in this case we do not see the opportunity to link because -- we consider setup dependencies after normal dependencies; we will -- pick A.2 for E, then realize we cannot link E.setup.A to A.2, and pick -- A.1 instead. This isn't so easy to fix (if we want to fix it at all); -- in particular, considering setup dependencies _before_ other deps is -- not an improvement, because in general we would prefer to link setup -- setups to package deps, rather than the other way around. (For example, -- if we change this ordering then the test for D would start to install -- two versions of A). -- * F: The package and the setup script depend on different versions of A. -- This will only work if setup dependencies are considered independent. db7 :: ExampleDb db7 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "B" 1 [] `withSetupDeps` [ExAny "A"] , Right $ exAv "C" 1 [ExAny "A" ] `withSetupDeps` [ExAny "A" ] , Right $ exAv "D" 1 [ExFix "A" 1] `withSetupDeps` [ExAny "A" ] , Right $ exAv "E" 1 [ExAny "A" ] `withSetupDeps` [ExFix "A" 1] , Right $ exAv "F" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] ] -- If we install C and D together (not as independent goals), we need to build -- both B.1 and B.2, both of which depend on A. db8 :: ExampleDb db8 = [ Right $ exAv "A" 1 [] , Right $ exAv "B" 1 [ExAny "A"] , Right $ exAv "B" 2 [ExAny "A"] , Right $ exAv "C" 1 [] `withSetupDeps` [ExFix "B" 1] , Right $ exAv "D" 1 [] `withSetupDeps` [ExFix "B" 2] ] -- Extended version of `db8` so that we have nested setup dependencies db9 :: ExampleDb db9 = db8 ++ [ Right $ exAv "E" 1 [ExAny "C"] , Right $ exAv "E" 2 [ExAny "D"] , Right $ exAv "F" 1 [] `withSetupDeps` [ExFix "E" 1] , Right $ exAv "G" 1 [] `withSetupDeps` [ExFix "E" 2] ] -- Multiple already-installed packages with inter-dependencies, and one package -- (C) that depends on package A-1 for its setup script and package A-2 as a -- library dependency. db10 :: ExampleDb db10 = let rts = exInst "rts" 1 "rts-inst" [] ghc_prim = exInst "ghc-prim" 1 "ghc-prim-inst" [rts] base = exInst "base" 1 "base-inst" [rts, ghc_prim] a1 = exInst "A" 1 "A1-inst" [base] a2 = exInst "A" 2 "A2-inst" [base] in [ Left rts , Left ghc_prim , Left base , Left a1 , Left a2 , Right $ exAv "C" 1 [ExFix "A" 2] `withSetupDeps` [ExFix "A" 1] ] -- | This database tests that a package's setup dependencies are correctly -- linked when the package is linked. See pull request #3268. -- -- When A and B are installed as independent goals, their dependencies on C must -- be linked, due to the single instance restriction. Since C depends on D, 0.D -- and 1.D must be linked. C also has a setup dependency on D, so 0.C-setup.D -- and 1.C-setup.D must be linked. However, D's two link groups must remain -- independent. The solver should be able to choose D-1 for C's library and D-2 -- for C's setup script. dbSetupDeps :: ExampleDb dbSetupDeps = [ Right $ exAv "A" 1 [ExAny "C"] , Right $ exAv "B" 1 [ExAny "C"] , Right $ exAv "C" 1 [ExFix "D" 1] `withSetupDeps` [ExFix "D" 2] , Right $ exAv "D" 1 [] , Right $ exAv "D" 2 [] ] -- | Tests for dealing with base shims db11 :: ExampleDb db11 = let base3 = exInst "base" 3 "base-3-inst" [base4] base4 = exInst "base" 4 "base-4-inst" [] in [ Left base3 , Left base4 , Right $ exAv "A" 1 [ExFix "base" 3] ] -- | Slightly more realistic version of db11 where base-3 depends on syb -- This means that if a package depends on base-3 and on syb, then they MUST -- share the version of syb -- -- * Package A relies on base-3 (which relies on base-4) -- * Package B relies on base-4 -- * Package C relies on both A and B -- * Package D relies on base-3 and on syb-2, which is not possible because -- base-3 has a dependency on syb-1 (non-inheritance of the Base qualifier) -- * Package E relies on base-4 and on syb-2, which is fine. db12 :: ExampleDb db12 = let base3 = exInst "base" 3 "base-3-inst" [base4, syb1] base4 = exInst "base" 4 "base-4-inst" [] syb1 = exInst "syb" 1 "syb-1-inst" [base4] in [ Left base3 , Left base4 , Left syb1 , Right $ exAv "syb" 2 [ExFix "base" 4] , Right $ exAv "A" 1 [ExFix "base" 3, ExAny "syb"] , Right $ exAv "B" 1 [ExFix "base" 4, ExAny "syb"] , Right $ exAv "C" 1 [ExAny "A", ExAny "B"] , Right $ exAv "D" 1 [ExFix "base" 3, ExFix "syb" 2] , Right $ exAv "E" 1 [ExFix "base" 4, ExFix "syb" 2] ] dbBase :: ExampleDb dbBase = [ Right $ exAv "base" 1 [ExAny "ghc-prim", ExAny "integer-simple", ExAny "integer-gmp"] , Right $ exAv "ghc-prim" 1 [] , Right $ exAv "integer-simple" 1 [] , Right $ exAv "integer-gmp" 1 [] ] db13 :: ExampleDb db13 = [ Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [] , Right $ exAv "A" 3 [] ] -- | A, B, and C have three different dependencies on D that can be set to -- different versions with qualified constraints. Each version of D can only -- be depended upon by one version of A, B, or C, so that the versions of A, B, -- and C in the install plan indicate which version of D was chosen for each -- dependency. The one-to-one correspondence between versions of A, B, and C and -- versions of D also prevents linking, which would complicate the solver's -- behavior. dbConstraints :: ExampleDb dbConstraints = [Right $ exAv "A" v [ExFix "D" v] | v <- [1, 4, 7]] ++ [Right $ exAv "B" v [] `withSetupDeps` [ExFix "D" v] | v <- [2, 5, 8]] ++ [Right $ exAv "C" v [] `withSetupDeps` [ExFix "D" v] | v <- [3, 6, 9]] ++ [Right $ exAv "D" v [] | v <- [1..9]] dbStanzaPreferences1 :: ExampleDb dbStanzaPreferences1 = [ Right $ exAv "pkg" 1 [] `withTest` exTest "test" [ExAny "test-dep"] , Right $ exAv "test-dep" 1 [] ] dbStanzaPreferences2 :: ExampleDb dbStanzaPreferences2 = [ Right $ exAv "pkg" 1 [] `withTest` exTest "test" [ExAny "unknown"] ] -- | This is a test case for a bug in stanza preferences (#3930). The solver -- should be able to install 'A' by enabling 'flag' and disabling testing. When -- it tries goals in the specified order and prefers testing, it encounters -- 'unknown-pkg2'. 'unknown-pkg2' is only introduced by testing and 'flag', so -- the conflict set should contain both of those variables. Before the fix, it -- only contained 'flag'. The solver backjumped past the choice to disable -- testing and failed to find the solution. testStanzaPreference :: String -> TestTree testStanzaPreference name = let pkg = exAv "A" 1 [exFlagged "flag" [] [ExAny "unknown-pkg1"]] `withTest` exTest "test" [exFlagged "flag" [ExAny "unknown-pkg2"] []] goals = [ P QualNone "A" , F QualNone "A" "flag" , S QualNone "A" TestStanzas ] in runTest $ goalOrder goals $ preferences [ ExStanzaPref "A" [TestStanzas]] $ mkTest [Right pkg] name ["A"] $ solverSuccess [("A", 1)] -- | Database with some cycles -- -- * Simplest non-trivial cycle: A -> B and B -> A -- * There is a cycle C -> D -> C, but it can be broken by picking the -- right flag assignment. db14 :: ExampleDb db14 = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [ExAny "A"] , Right $ exAv "C" 1 [exFlagged "flagC" [ExAny "D"] [ExAny "E"]] , Right $ exAv "D" 1 [ExAny "C"] , Right $ exAv "E" 1 [] ] -- | Cycles through setup dependencies -- -- The first cycle is unsolvable: package A has a setup dependency on B, -- B has a regular dependency on A, and we only have a single version available -- for both. -- -- The second cycle can be broken by picking different versions: package C-2.0 -- has a setup dependency on D, and D has a regular dependency on C-*. However, -- version C-1.0 is already available (perhaps it didn't have this setup dep). -- Thus, we should be able to break this cycle even if we are installing package -- E, which explicitly depends on C-2.0. db15 :: ExampleDb db15 = [ -- First example (real cycle, no solution) Right $ exAv "A" 1 [] `withSetupDeps` [ExAny "B"] , Right $ exAv "B" 1 [ExAny "A"] -- Second example (cycle can be broken by picking versions carefully) , Left $ exInst "C" 1 "C-1-inst" [] , Right $ exAv "C" 2 [] `withSetupDeps` [ExAny "D"] , Right $ exAv "D" 1 [ExAny "C" ] , Right $ exAv "E" 1 [ExFix "C" 2] ] -- | Detect a cycle between a package and its setup script. -- -- This type of cycle can easily occur when v2-build adds default setup -- dependencies to packages without custom-setup stanzas. For example, cabal -- adds 'time' as a setup dependency for 'time'. The solver should detect the -- cycle when it attempts to link the setup and non-setup instances of the -- package and then choose a different version for the setup dependency. issue4161 :: String -> SolverTest issue4161 name = setVerbose $ mkTest db name ["target"] $ SolverResult checkFullLog $ Right [("target", 1), ("time", 1), ("time", 2)] where db :: ExampleDb db = [ Right $ exAv "target" 1 [ExFix "time" 2] , Right $ exAv "time" 2 [] `withSetupDeps` [ExAny "time"] , Right $ exAv "time" 1 [] ] checkFullLog :: [String] -> Bool checkFullLog = any $ isInfixOf $ "rejecting: time:setup.time~>time-2.0.0 (cyclic dependencies; " ++ "conflict set: time:setup.time)" -- | Packages pkg-A, pkg-B, and pkg-C form a cycle. The solver should backtrack -- as soon as it chooses the last package in the cycle, to avoid searching parts -- of the tree that have no solution. Since there is no way to break the cycle, -- it should fail with an error message describing the cycle. testCyclicDependencyErrorMessages :: String -> SolverTest testCyclicDependencyErrorMessages name = goalOrder goals $ mkTest db name ["pkg-A"] $ SolverResult checkFullLog $ Left checkSummarizedLog where db :: ExampleDb db = [ Right $ exAv "pkg-A" 1 [ExAny "pkg-B"] , Right $ exAv "pkg-B" 1 [ExAny "pkg-C"] , Right $ exAv "pkg-C" 1 [ExAny "pkg-A", ExAny "pkg-D"] , Right $ exAv "pkg-D" 1 [ExAny "pkg-E"] , Right $ exAv "pkg-E" 1 [] ] -- The solver should backtrack as soon as pkg-A, pkg-B, and pkg-C form a -- cycle. It shouldn't try pkg-D or pkg-E. checkFullLog :: [String] -> Bool checkFullLog = not . any (\l -> "pkg-D" `isInfixOf` l || "pkg-E" `isInfixOf` l) checkSummarizedLog :: String -> Bool checkSummarizedLog = isInfixOf "rejecting: pkg-C-1.0.0 (cyclic dependencies; conflict set: pkg-A, pkg-B, pkg-C)" -- Solve for pkg-D and pkg-E last. goals :: [ExampleVar] goals = [P QualNone ("pkg-" ++ [c]) | c <- ['A'..'E']] -- | Check that the solver can backtrack after encountering the SIR (issue #2843) -- -- When A and B are installed as independent goals, the single instance -- restriction prevents B from depending on C. This database tests that the -- solver can backtrack after encountering the single instance restriction and -- choose the only valid flag assignment (-flagA +flagB): -- -- > flagA flagB B depends on -- > On _ C-* -- > Off On E-* <-- only valid flag assignment -- > Off Off D-2.0, C-* -- -- Since A depends on C-* and D-1.0, and C-1.0 depends on any version of D, -- we must build C-1.0 against D-1.0. Since B depends on D-2.0, we cannot have -- C in the transitive closure of B's dependencies, because that would mean we -- would need two instances of C: one built against D-1.0 and one built against -- D-2.0. db16 :: ExampleDb db16 = [ Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] , Right $ exAv "B" 1 [ ExFix "D" 2 , exFlagged "flagA" [ExAny "C"] [exFlagged "flagB" [ExAny "E"] [ExAny "C"]]] , Right $ exAv "C" 1 [ExAny "D"] , Right $ exAv "D" 1 [] , Right $ exAv "D" 2 [] , Right $ exAv "E" 1 [] ] -- Try to get the solver to backtrack while satisfying -- reject-unconstrained-dependencies: both the first and last versions of A -- require packages outside the closed set, so it will have to try the -- middle one. db17 :: ExampleDb db17 = [ Right $ exAv "A" 1 [ExAny "C"] , Right $ exAv "A" 2 [ExAny "B"] , Right $ exAv "A" 3 [ExAny "C"] , Right $ exAv "B" 1 [] , Right $ exAv "C" 1 [ExAny "B"] ] -- | This test checks that when the solver discovers a constraint on a -- package's version after choosing to link that package, it can backtrack to -- try alternative versions for the linked-to package. See pull request #3327. -- -- When A and B are installed as independent goals, their dependencies on C -- must be linked. Since C depends on D, A and B's dependencies on D must also -- be linked. This test fixes the goal order so that the solver chooses D-2 for -- both 0.D and 1.D before it encounters the test suites' constraints. The -- solver must backtrack to try D-1 for both 0.D and 1.D. testIndepGoals2 :: String -> SolverTest testIndepGoals2 name = goalOrder goals $ independentGoals $ enableAllTests $ mkTest db name ["A", "B"] $ solverSuccess [("A", 1), ("B", 1), ("C", 1), ("D", 1)] where db :: ExampleDb db = [ Right $ exAv "A" 1 [ExAny "C"] `withTest` exTest "test" [ExFix "D" 1] , Right $ exAv "B" 1 [ExAny "C"] `withTest` exTest "test" [ExFix "D" 1] , Right $ exAv "C" 1 [ExAny "D"] , Right $ exAv "D" 1 [] , Right $ exAv "D" 2 [] ] goals :: [ExampleVar] goals = [ P (QualIndep "A") "A" , P (QualIndep "A") "C" , P (QualIndep "A") "D" , P (QualIndep "B") "B" , P (QualIndep "B") "C" , P (QualIndep "B") "D" , S (QualIndep "B") "B" TestStanzas , S (QualIndep "A") "A" TestStanzas ] -- | Issue #2834 -- When both A and B are installed as independent goals, their dependencies on -- C must be linked. The only combination of C's flags that is consistent with -- A and B's dependencies on D is -flagA +flagB. This database tests that the -- solver can backtrack to find the right combination of flags (requiring F, but -- not E or G) and apply it to both 0.C and 1.C. -- -- > flagA flagB C depends on -- > On _ D-1, E-* -- > Off On F-* <-- Only valid choice -- > Off Off D-2, G-* -- -- The single instance restriction means we cannot have one instance of C -- built against D-1 and one instance built against D-2; since A depends on -- D-1, and B depends on C-2, it is therefore important that C cannot depend -- on any version of D. db18 :: ExampleDb db18 = [ Right $ exAv "A" 1 [ExAny "C", ExFix "D" 1] , Right $ exAv "B" 1 [ExAny "C", ExFix "D" 2] , Right $ exAv "C" 1 [exFlagged "flagA" [ExFix "D" 1, ExAny "E"] [exFlagged "flagB" [ExAny "F"] [ExFix "D" 2, ExAny "G"]]] , Right $ exAv "D" 1 [] , Right $ exAv "D" 2 [] , Right $ exAv "E" 1 [] , Right $ exAv "F" 1 [] , Right $ exAv "G" 1 [] ] -- | When both values for flagA introduce package B, the solver should be able -- to choose B before choosing a value for flagA. It should try to choose a -- version for B that is in the union of the version ranges required by +flagA -- and -flagA. commonDependencyLogMessage :: String -> SolverTest commonDependencyLogMessage name = mkTest db name ["A"] $ solverFailure $ isInfixOf $ "[__0] trying: A-1.0.0 (user goal)\n" ++ "[__1] next goal: B (dependency of A +/-flagA)\n" ++ "[__1] rejecting: B-2.0.0 (conflict: A +/-flagA => B==1.0.0 || ==3.0.0)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [exFlagged "flagA" [ExFix "B" 1] [ExFix "B" 3]] , Right $ exAv "B" 2 [] ] -- | Test lifting dependencies out of multiple levels of conditionals. twoLevelDeepCommonDependencyLogMessage :: String -> SolverTest twoLevelDeepCommonDependencyLogMessage name = mkTest db name ["A"] $ solverFailure $ isInfixOf $ "unknown package: B (dependency of A +/-flagA +/-flagB)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [exFlagged "flagA" [exFlagged "flagB" [ExAny "B"] [ExAny "B"]] [exFlagged "flagB" [ExAny "B"] [ExAny "B"]]] ] -- | Test handling nested conditionals that are controlled by the same flag. -- The solver should treat flagA as introducing 'unknown' with value true, not -- both true and false. That means that when +flagA causes a conflict, the -- solver should try flipping flagA to false to resolve the conflict, rather -- than backjumping past flagA. testBackjumpingWithCommonDependency :: String -> SolverTest testBackjumpingWithCommonDependency name = mkTest db name ["A"] $ solverSuccess [("A", 1), ("B", 1)] where db :: ExampleDb db = [ Right $ exAv "A" 1 [exFlagged "flagA" [exFlagged "flagA" [ExAny "unknown"] [ExAny "unknown"]] [ExAny "B"]] , Right $ exAv "B" 1 [] ] -- | Tricky test case with independent goals (issue #2842) -- -- Suppose we are installing D, E, and F as independent goals: -- -- * D depends on A-* and C-1, requiring A-1 to be built against C-1 -- * E depends on B-* and C-2, requiring B-1 to be built against C-2 -- * F depends on A-* and B-*; this means we need A-1 and B-1 both to be built -- against the same version of C, violating the single instance restriction. -- -- We can visualize this DB as: -- -- > C-1 C-2 -- > /|\ /|\ -- > / | \ / | \ -- > / | X | \ -- > | | / \ | | -- > | |/ \| | -- > | + + | -- > | | | | -- > | A B | -- > \ |\ /| / -- > \ | \ / | / -- > \| V |/ -- > D F E testIndepGoals3 :: String -> SolverTest testIndepGoals3 name = goalOrder goals $ independentGoals $ mkTest db name ["D", "E", "F"] anySolverFailure where db :: ExampleDb db = [ Right $ exAv "A" 1 [ExAny "C"] , Right $ exAv "B" 1 [ExAny "C"] , Right $ exAv "C" 1 [] , Right $ exAv "C" 2 [] , Right $ exAv "D" 1 [ExAny "A", ExFix "C" 1] , Right $ exAv "E" 1 [ExAny "B", ExFix "C" 2] , Right $ exAv "F" 1 [ExAny "A", ExAny "B"] ] goals :: [ExampleVar] goals = [ P (QualIndep "D") "D" , P (QualIndep "D") "C" , P (QualIndep "D") "A" , P (QualIndep "E") "E" , P (QualIndep "E") "C" , P (QualIndep "E") "B" , P (QualIndep "F") "F" , P (QualIndep "F") "B" , P (QualIndep "F") "C" , P (QualIndep "F") "A" ] -- | This test checks that the solver correctly backjumps when dependencies -- of linked packages are not linked. It is an example where the conflict set -- from enforcing the single instance restriction is not sufficient. See pull -- request #3327. -- -- When A, B, and C are installed as independent goals with the specified goal -- order, the first choice that the solver makes for E is 0.E-2. Then, when it -- chooses dependencies for B and C, it links both 1.E and 2.E to 0.E. Finally, -- the solver discovers C's test's constraint on E. It must backtrack to try -- 1.E-1 and then link 2.E to 1.E. Backjumping all the way to 0.E does not lead -- to a solution, because 0.E's version is constrained by A and cannot be -- changed. testIndepGoals4 :: String -> SolverTest testIndepGoals4 name = goalOrder goals $ independentGoals $ enableAllTests $ mkTest db name ["A", "B", "C"] $ solverSuccess [("A",1), ("B",1), ("C",1), ("D",1), ("E",1), ("E",2)] where db :: ExampleDb db = [ Right $ exAv "A" 1 [ExFix "E" 2] , Right $ exAv "B" 1 [ExAny "D"] , Right $ exAv "C" 1 [ExAny "D"] `withTest` exTest "test" [ExFix "E" 1] , Right $ exAv "D" 1 [ExAny "E"] , Right $ exAv "E" 1 [] , Right $ exAv "E" 2 [] ] goals :: [ExampleVar] goals = [ P (QualIndep "A") "A" , P (QualIndep "A") "E" , P (QualIndep "B") "B" , P (QualIndep "B") "D" , P (QualIndep "B") "E" , P (QualIndep "C") "C" , P (QualIndep "C") "D" , P (QualIndep "C") "E" , S (QualIndep "C") "C" TestStanzas ] -- | Test the trace messages that we get when a package refers to an unknown pkg -- -- TODO: Currently we don't actually test the trace messages, and this particular -- test still succeeds. The trace can only be verified by hand. db21 :: ExampleDb db21 = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "A" 2 [ExAny "C"] -- A-2.0 will be tried first, but C unknown , Right $ exAv "B" 1 [] ] -- | A variant of 'db21', which actually fails. db22 :: ExampleDb db22 = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "A" 2 [ExAny "C"] ] -- | Another test for the unknown package message. This database tests that -- filtering out redundant conflict set messages in the solver log doesn't -- interfere with generating a message about a missing package (part of issue -- #3617). The conflict set for the missing package is {A, B}. That conflict set -- is propagated up the tree to the level of A. Since the conflict set is the -- same at both levels, the solver only keeps one of the backjumping messages. db23 :: ExampleDb db23 = [ Right $ exAv "A" 1 [ExAny "B"] ] -- | Database for (unsuccessfully) trying to expose a bug in the handling -- of implied linking constraints. The question is whether an implied linking -- constraint should only have the introducing package in its conflict set, -- or also its link target. -- -- It turns out that as long as the Single Instance Restriction is in place, -- it does not matter, because there will always be an option that is failing -- due to the SIR, which contains the link target in its conflict set. -- -- Even if the SIR is not in place, if there is a solution, one will always -- be found, because without the SIR, linking is always optional, but never -- necessary. -- testIndepGoals5 :: String -> GoalOrder -> SolverTest testIndepGoals5 name fixGoalOrder = case fixGoalOrder of FixedGoalOrder -> goalOrder goals test DefaultGoalOrder -> test where test :: SolverTest test = independentGoals $ mkTest db name ["X", "Y"] $ solverSuccess [("A", 1), ("A", 2), ("B", 1), ("C", 1), ("C", 2), ("X", 1), ("Y", 1)] db :: ExampleDb db = [ Right $ exAv "X" 1 [ExFix "C" 2, ExAny "A"] , Right $ exAv "Y" 1 [ExFix "C" 1, ExFix "A" 2] , Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [ExAny "B"] , Right $ exAv "B" 1 [ExAny "C"] , Right $ exAv "C" 1 [] , Right $ exAv "C" 2 [] ] goals :: [ExampleVar] goals = [ P (QualIndep "X") "X" , P (QualIndep "X") "A" , P (QualIndep "X") "B" , P (QualIndep "X") "C" , P (QualIndep "Y") "Y" , P (QualIndep "Y") "A" , P (QualIndep "Y") "B" , P (QualIndep "Y") "C" ] -- | A simplified version of 'testIndepGoals5'. testIndepGoals6 :: String -> GoalOrder -> SolverTest testIndepGoals6 name fixGoalOrder = case fixGoalOrder of FixedGoalOrder -> goalOrder goals test DefaultGoalOrder -> test where test :: SolverTest test = independentGoals $ mkTest db name ["X", "Y"] $ solverSuccess [("A", 1), ("A", 2), ("B", 1), ("B", 2), ("X", 1), ("Y", 1)] db :: ExampleDb db = [ Right $ exAv "X" 1 [ExFix "B" 2, ExAny "A"] , Right $ exAv "Y" 1 [ExFix "B" 1, ExFix "A" 2] , Right $ exAv "A" 1 [] , Right $ exAv "A" 2 [ExAny "B"] , Right $ exAv "B" 1 [] , Right $ exAv "B" 2 [] ] goals :: [ExampleVar] goals = [ P (QualIndep "X") "X" , P (QualIndep "X") "A" , P (QualIndep "X") "B" , P (QualIndep "Y") "Y" , P (QualIndep "Y") "A" , P (QualIndep "Y") "B" ] dbExts1 :: ExampleDb dbExts1 = [ Right $ exAv "A" 1 [ExExt (EnableExtension RankNTypes)] , Right $ exAv "B" 1 [ExExt (EnableExtension CPP), ExAny "A"] , Right $ exAv "C" 1 [ExAny "B"] , Right $ exAv "D" 1 [ExExt (DisableExtension CPP), ExAny "B"] , Right $ exAv "E" 1 [ExExt (UnknownExtension "custom"), ExAny "C"] ] dbLangs1 :: ExampleDb dbLangs1 = [ Right $ exAv "A" 1 [ExLang Haskell2010] , Right $ exAv "B" 1 [ExLang Haskell98, ExAny "A"] , Right $ exAv "C" 1 [ExLang (UnknownLanguage "Haskell3000"), ExAny "B"] ] -- | cabal must set enable-exe to false in order to avoid the unavailable -- dependency. Flags are true by default. The flag choice causes "pkg" to -- depend on "false-dep". testBuildable :: String -> ExampleDependency -> TestTree testBuildable testName unavailableDep = runTest $ mkTestExtLangPC (Just []) (Just [Haskell98]) (Just []) db testName ["pkg"] expected where expected = solverSuccess [("false-dep", 1), ("pkg", 1)] db = [ Right $ exAv "pkg" 1 [exFlagged "enable-exe" [ExAny "true-dep"] [ExAny "false-dep"]] `withExe` exExe "exe" [ unavailableDep , ExFlagged "enable-exe" (dependencies []) unbuildableDependencies ] , Right $ exAv "true-dep" 1 [] , Right $ exAv "false-dep" 1 [] ] -- | cabal must choose -flag1 +flag2 for "pkg", which requires packages -- "flag1-false" and "flag2-true". dbBuildable1 :: ExampleDb dbBuildable1 = [ Right $ exAv "pkg" 1 [ exFlagged "flag1" [ExAny "flag1-true"] [ExAny "flag1-false"] , exFlagged "flag2" [ExAny "flag2-true"] [ExAny "flag2-false"]] `withExes` [ exExe "exe1" [ ExAny "unknown" , ExFlagged "flag1" (dependencies []) unbuildableDependencies , ExFlagged "flag2" (dependencies []) unbuildableDependencies] , exExe "exe2" [ ExAny "unknown" , ExFlagged "flag1" (dependencies []) (dependencies [ExFlagged "flag2" unbuildableDependencies (dependencies [])])] ] , Right $ exAv "flag1-true" 1 [] , Right $ exAv "flag1-false" 1 [] , Right $ exAv "flag2-true" 1 [] , Right $ exAv "flag2-false" 1 [] ] -- | cabal must pick B-2 to avoid the unknown dependency. dbBuildable2 :: ExampleDb dbBuildable2 = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [ExAny "unknown"] , Right $ exAv "B" 2 [] `withExe` exExe "exe" [ ExAny "unknown" , ExFlagged "disable-exe" unbuildableDependencies (dependencies []) ] , Right $ exAv "B" 3 [ExAny "unknown"] ] -- | Package databases for testing @pkg-config@ dependencies. -- when no pkgconfig db is present, cabal must pick flag1 false and flag2 true to avoid the pkg dependency. dbPC1 :: ExampleDb dbPC1 = [ Right $ exAv "A" 1 [ExPkg ("pkgA", 1)] , Right $ exAv "B" 1 [ExPkg ("pkgB", 1), ExAny "A"] , Right $ exAv "B" 2 [ExPkg ("pkgB", 2), ExAny "A"] , Right $ exAv "C" 1 [ExAny "B"] , Right $ exAv "D" 1 [exFlagged "flag1" [ExAny "A"] [], exFlagged "flag2" [] [ExAny "A"]] ] -- | Test for the solver's summarized log. The final conflict set is {A, F}, -- though the goal order forces the solver to find the (avoidable) conflict -- between B and C first. When the solver reaches the backjump limit, it should -- only show the log to the first conflict. When the backjump limit is high -- enough to allow an exhaustive search, the solver should make use of the final -- conflict set to only show the conflict between A and F in the summarized log. testSummarizedLog :: String -> Maybe Int -> String -> TestTree testSummarizedLog testName mbj expectedMsg = runTest $ maxBackjumps mbj $ goalOrder goals $ mkTest db testName ["A"] $ solverFailure (== expectedMsg) where db = [ Right $ exAv "A" 1 [ExAny "B", ExAny "F"] , Right $ exAv "B" 3 [ExAny "C"] , Right $ exAv "B" 2 [ExAny "D"] , Right $ exAv "B" 1 [ExAny "E"] , Right $ exAv "E" 1 [] ] goals :: [ExampleVar] goals = [P QualNone pkg | pkg <- ["A", "B", "C", "D", "E", "F"]] dbMinimizeConflictSet :: ExampleDb dbMinimizeConflictSet = [ Right $ exAv "A" 3 [ExFix "B" 2, ExFix "C" 1, ExFix "D" 2] , Right $ exAv "A" 2 [ExFix "B" 1, ExFix "C" 2, ExFix "D" 2] , Right $ exAv "A" 1 [ExFix "B" 1, ExFix "C" 1, ExFix "D" 2] , Right $ exAv "B" 1 [] , Right $ exAv "C" 1 [] , Right $ exAv "D" 1 [] ] -- | Test that the solver can find a minimal conflict set with -- --minimize-conflict-set. In the first run, the goal order causes the solver -- to find that A-3 conflicts with B, A-2 conflicts with C, and A-1 conflicts -- with D. The full log should show that the original final conflict set is -- {A, B, C, D}. Then the solver should be able to reduce the conflict set to -- {A, D}, since all versions of A conflict with D. The summarized log should -- only mention A and D. testMinimizeConflictSet :: String -> TestTree testMinimizeConflictSet testName = runTest $ minimizeConflictSet $ goalOrder goals $ setVerbose $ mkTest dbMinimizeConflictSet testName ["A"] $ SolverResult checkFullLog (Left (== expectedMsg)) where checkFullLog :: [String] -> Bool checkFullLog = containsInOrder [ "[__0] fail (backjumping, conflict set: A, B, C, D)" , "Found no solution after exhaustively searching the dependency tree. " ++ "Rerunning the dependency solver to minimize the conflict set ({A, B, C, D})." , "Trying to remove variable \"A\" from the conflict set." , "Failed to remove \"A\" from the conflict set. Continuing with {A, B, C, D}." , "Trying to remove variable \"B\" from the conflict set." , "Successfully removed \"B\" from the conflict set. Continuing with {A, D}." , "Trying to remove variable \"D\" from the conflict set." , "Failed to remove \"D\" from the conflict set. Continuing with {A, D}." ] expectedMsg = "Could not resolve dependencies:\n" ++ "[__0] trying: A-3.0.0 (user goal)\n" ++ "[__1] next goal: D (dependency of A)\n" ++ "[__1] rejecting: D-1.0.0 (conflict: A => D==2.0.0)\n" ++ "[__1] fail (backjumping, conflict set: A, D)\n" ++ "After searching the rest of the dependency tree exhaustively, these " ++ "were the goals I've had most trouble fulfilling: A (5), D (4)" goals :: [ExampleVar] goals = [P QualNone pkg | pkg <- ["A", "B", "C", "D"]] -- | This test uses the same packages and goal order as testMinimizeConflictSet, -- but it doesn't set --minimize-conflict-set. The solver should print the -- original final conflict set and the conflict between A and B. It should also -- suggest rerunning with --minimize-conflict-set. testNoMinimizeConflictSet :: String -> TestTree testNoMinimizeConflictSet testName = runTest $ goalOrder goals $ setVerbose $ mkTest dbMinimizeConflictSet testName ["A"] $ solverFailure (== expectedMsg) where expectedMsg = "Could not resolve dependencies:\n" ++ "[__0] trying: A-3.0.0 (user goal)\n" ++ "[__1] next goal: B (dependency of A)\n" ++ "[__1] rejecting: B-1.0.0 (conflict: A => B==2.0.0)\n" ++ "[__1] fail (backjumping, conflict set: A, B)\n" ++ "After searching the rest of the dependency tree exhaustively, " ++ "these were the goals I've had most trouble fulfilling: " ++ "A (7), B (2), C (2), D (2)\n" ++ "Try running with --minimize-conflict-set to improve the error message." goals :: [ExampleVar] goals = [P QualNone pkg | pkg <- ["A", "B", "C", "D"]] {------------------------------------------------------------------------------- Simple databases for the illustrations for the backjumping blog post -------------------------------------------------------------------------------} -- | Motivate conflict sets dbBJ1a :: ExampleDb dbBJ1a = [ Right $ exAv "A" 1 [ExFix "B" 1] , Right $ exAv "A" 2 [ExFix "B" 2] , Right $ exAv "B" 1 [] ] -- | Show that we can skip some decisions dbBJ1b :: ExampleDb dbBJ1b = [ Right $ exAv "A" 1 [ExFix "B" 1] , Right $ exAv "A" 2 [ExFix "B" 2, ExAny "C"] , Right $ exAv "B" 1 [] , Right $ exAv "C" 1 [] , Right $ exAv "C" 2 [] ] -- | Motivate why both A and B need to be in the conflict set dbBJ1c :: ExampleDb dbBJ1c = [ Right $ exAv "A" 1 [ExFix "B" 1] , Right $ exAv "B" 1 [] , Right $ exAv "B" 2 [] ] -- | Motivate the need for accumulating conflict sets while we walk the tree dbBJ2 :: ExampleDb dbBJ2 = [ Right $ exAv "A" 1 [ExFix "B" 1] , Right $ exAv "A" 2 [ExFix "B" 2] , Right $ exAv "B" 1 [ExFix "C" 1] , Right $ exAv "B" 2 [ExFix "C" 2] , Right $ exAv "C" 1 [] ] -- | Motivate the need for `QGoalReason` dbBJ3 :: ExampleDb dbBJ3 = [ Right $ exAv "A" 1 [ExAny "Ba"] , Right $ exAv "A" 2 [ExAny "Bb"] , Right $ exAv "Ba" 1 [ExFix "C" 1] , Right $ exAv "Bb" 1 [ExFix "C" 2] , Right $ exAv "C" 1 [] ] -- | `QGOalReason` not unique dbBJ4 :: ExampleDb dbBJ4 = [ Right $ exAv "A" 1 [ExAny "B", ExAny "C"] , Right $ exAv "B" 1 [ExAny "C"] , Right $ exAv "C" 1 [] ] -- | Flags are represented somewhat strangely in the tree -- -- This example probably won't be in the blog post itself but as a separate -- bug report (#3409) dbBJ5 :: ExampleDb dbBJ5 = [ Right $ exAv "A" 1 [exFlagged "flagA" [ExFix "B" 1] [ExFix "C" 1]] , Right $ exAv "B" 1 [ExFix "D" 1] , Right $ exAv "C" 1 [ExFix "D" 2] , Right $ exAv "D" 1 [] ] -- | Conflict sets for cycles dbBJ6 :: ExampleDb dbBJ6 = [ Right $ exAv "A" 1 [ExAny "B"] , Right $ exAv "B" 1 [] , Right $ exAv "B" 2 [ExAny "C"] , Right $ exAv "C" 1 [ExAny "A"] ] -- | Conflicts not unique dbBJ7 :: ExampleDb dbBJ7 = [ Right $ exAv "A" 1 [ExAny "B", ExFix "C" 1] , Right $ exAv "B" 1 [ExFix "C" 1] , Right $ exAv "C" 1 [] , Right $ exAv "C" 2 [] ] -- | Conflict sets for SIR (C shared subgoal of independent goals A, B) dbBJ8 :: ExampleDb dbBJ8 = [ Right $ exAv "A" 1 [ExAny "C"] , Right $ exAv "B" 1 [ExAny "C"] , Right $ exAv "C" 1 [] ] {------------------------------------------------------------------------------- Databases for build-tool-depends -------------------------------------------------------------------------------} -- | Multiple packages depending on exes from 'bt-pkg'. dbBuildTools :: ExampleDb dbBuildTools = [ Right $ exAv "A" 1 [ExBuildToolAny "bt-pkg" "exe1"] , Right $ exAv "B" 1 [exFlagged "flagB" [ExAny "unknown"] [ExBuildToolAny "bt-pkg" "exe1"]] , Right $ exAv "C" 1 [] `withTest` exTest "testC" [ExBuildToolAny "bt-pkg" "exe1"] , Right $ exAv "D" 1 [ExBuildToolAny "bt-pkg" "unknown-exe"] , Right $ exAv "E" 1 [ExBuildToolAny "unknown-pkg" "exe1"] , Right $ exAv "F" 1 [exFlagged "flagF" [ExBuildToolAny "bt-pkg" "unknown-exe"] [ExAny "unknown"]] , Right $ exAv "G" 1 [] `withTest` exTest "testG" [ExBuildToolAny "bt-pkg" "unknown-exe"] , Right $ exAv "H" 1 [ExBuildToolFix "bt-pkg" "exe1" 3] , Right $ exAv "bt-pkg" 4 [] , Right $ exAv "bt-pkg" 3 [] `withExe` exExe "exe2" [] , Right $ exAv "bt-pkg" 2 [] `withExe` exExe "exe1" [] , Right $ exAv "bt-pkg" 1 [] ] -- The solver should never choose an installed package for a build tool -- dependency. rejectInstalledBuildToolPackage :: String -> SolverTest rejectInstalledBuildToolPackage name = mkTest db name ["A"] $ solverFailure $ isInfixOf $ "rejecting: A:B:exe.B-1.0.0/installed-1 " ++ "(does not contain executable 'exe', which is required by A)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [ExBuildToolAny "B" "exe"] , Left $ exInst "B" 1 "B-1" [] ] -- | This test forces the solver to choose B as a build-tool dependency before -- it sees the dependency on executable exe2 from B. The solver needs to check -- that the version that it already chose for B contains the necessary -- executable. This order causes a different "missing executable" error message -- than when the solver checks for the executable in the same step that it -- chooses the build-tool package. -- -- This case may become impossible if we ever add the executable name to the -- build-tool goal qualifier. Then this test would involve two qualified goals -- for B, one for exe1 and another for exe2. chooseExeAfterBuildToolsPackage :: Bool -> String -> SolverTest chooseExeAfterBuildToolsPackage shouldSucceed name = goalOrder goals $ mkTest db name ["A"] $ if shouldSucceed then solverSuccess [("A", 1), ("B", 1)] else solverFailure $ isInfixOf $ "rejecting: A:+flagA (requires executable 'exe2' from A:B:exe.B, " ++ "but the component does not exist)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [ ExBuildToolAny "B" "exe1" , exFlagged "flagA" [ExBuildToolAny "B" "exe2"] [ExAny "unknown"]] , Right $ exAv "B" 1 [] `withExes` [exExe exe [] | exe <- if shouldSucceed then ["exe1", "exe2"] else ["exe1"]] ] goals :: [ExampleVar] goals = [ P QualNone "A" , P (QualExe "A" "B") "B" , F QualNone "A" "flagA" ] -- | Test that when one package depends on two executables from another package, -- both executables must come from the same instance of that package. We could -- lift this restriction in the future by adding the executable name to the goal -- qualifier. requireConsistentBuildToolVersions :: String -> SolverTest requireConsistentBuildToolVersions name = mkTest db name ["A"] $ solverFailure $ isInfixOf $ "[__1] rejecting: A:B:exe.B-2.0.0 (conflict: A => A:B:exe.B (exe exe1)==1.0.0)\n" ++ "[__1] rejecting: A:B:exe.B-1.0.0 (conflict: A => A:B:exe.B (exe exe2)==2.0.0)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [ ExBuildToolFix "B" "exe1" 1 , ExBuildToolFix "B" "exe2" 2 ] , Right $ exAv "B" 2 [] `withExes` exes , Right $ exAv "B" 1 [] `withExes` exes ] exes = [exExe "exe1" [], exExe "exe2" []] -- | This test is similar to the failure case for -- chooseExeAfterBuildToolsPackage, except that the build tool is unbuildable -- instead of missing. chooseUnbuildableExeAfterBuildToolsPackage :: String -> SolverTest chooseUnbuildableExeAfterBuildToolsPackage name = constraints [ExFlagConstraint (ScopeAnyQualifier "B") "build-bt2" False] $ goalOrder goals $ mkTest db name ["A"] $ solverFailure $ isInfixOf $ "rejecting: A:+use-bt2 (requires executable 'bt2' from A:B:exe.B, but " ++ "the component is not buildable in the current environment)" where db :: ExampleDb db = [ Right $ exAv "A" 1 [ ExBuildToolAny "B" "bt1" , exFlagged "use-bt2" [ExBuildToolAny "B" "bt2"] [ExAny "unknown"]] , Right $ exAvNoLibrary "B" 1 `withExes` [ exExe "bt1" [] , exExe "bt2" [ExFlagged "build-bt2" (dependencies []) unbuildableDependencies] ] ] goals :: [ExampleVar] goals = [ P QualNone "A" , P (QualExe "A" "B") "B" , F QualNone "A" "use-bt2" ] {------------------------------------------------------------------------------- Databases for legacy build-tools -------------------------------------------------------------------------------} dbLegacyBuildTools1 :: ExampleDb dbLegacyBuildTools1 = [ Right $ exAv "alex" 1 [] `withExe` exExe "alex" [], Right $ exAv "A" 1 [ExLegacyBuildToolAny "alex"] ] -- Test that a recognized build tool dependency specifies the name of both the -- package and the executable. This db has no solution. dbLegacyBuildTools2 :: ExampleDb dbLegacyBuildTools2 = [ Right $ exAv "alex" 1 [] `withExe` exExe "other-exe" [], Right $ exAv "other-package" 1 [] `withExe` exExe "alex" [], Right $ exAv "A" 1 [ExLegacyBuildToolAny "alex"] ] -- Test that build-tools on a random thing doesn't matter (only -- the ones we recognize need to be in db) dbLegacyBuildTools3 :: ExampleDb dbLegacyBuildTools3 = [ Right $ exAv "A" 1 [ExLegacyBuildToolAny "otherdude"] ] -- Test that we can solve for different versions of executables dbLegacyBuildTools4 :: ExampleDb dbLegacyBuildTools4 = [ Right $ exAv "alex" 1 [] `withExe` exExe "alex" [], Right $ exAv "alex" 2 [] `withExe` exExe "alex" [], Right $ exAv "A" 1 [ExLegacyBuildToolFix "alex" 1], Right $ exAv "B" 1 [ExLegacyBuildToolFix "alex" 2], Right $ exAv "C" 1 [ExAny "A", ExAny "B"] ] -- Test that exe is not related to library choices dbLegacyBuildTools5 :: ExampleDb dbLegacyBuildTools5 = [ Right $ exAv "alex" 1 [ExFix "A" 1] `withExe` exExe "alex" [], Right $ exAv "A" 1 [], Right $ exAv "A" 2 [], Right $ exAv "B" 1 [ExLegacyBuildToolFix "alex" 1, ExFix "A" 2] ] -- Test that build-tools on build-tools works dbLegacyBuildTools6 :: ExampleDb dbLegacyBuildTools6 = [ Right $ exAv "alex" 1 [] `withExe` exExe "alex" [], Right $ exAv "happy" 1 [ExLegacyBuildToolAny "alex"] `withExe` exExe "happy" [], Right $ exAv "A" 1 [ExLegacyBuildToolAny "happy"] ] -- Test that build-depends on library/executable package works. -- Extracted from https://github.com/haskell/cabal/issues/3775 dbIssue3775 :: ExampleDb dbIssue3775 = [ Right $ exAv "warp" 1 [], -- NB: the warp build-depends refers to the package, not the internal -- executable! Right $ exAv "A" 2 [ExFix "warp" 1] `withExe` exExe "warp" [ExAny "A"], Right $ exAv "B" 2 [ExAny "A", ExAny "warp"] ] -- | Returns true if the second list contains all elements of the first list, in -- order. containsInOrder :: Eq a => [a] -> [a] -> Bool containsInOrder [] _ = True containsInOrder _ [] = False containsInOrder (x:xs) (y:ys) | x == y = containsInOrder xs ys | otherwise = containsInOrder (x:xs) ys cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Modular/WeightedPSQ.hs0000644000000000000000000000373307346545000025706 0ustar0000000000000000{-# LANGUAGE ParallelListComp #-} module UnitTests.Distribution.Solver.Modular.WeightedPSQ ( tests ) where import qualified Distribution.Solver.Modular.WeightedPSQ as W import Data.List (sort) import Test.Tasty (TestTree) import Test.Tasty.HUnit (testCase, (@?=)) import Test.Tasty.QuickCheck (Blind(..), testProperty) tests :: [TestTree] tests = [ testProperty "'toList . fromList' preserves elements" $ \xs -> sort (xs :: [(Int, Char, Bool)]) == sort (W.toList (W.fromList xs)) , testProperty "'toList . fromList' sorts stably" $ \xs -> let indexAsValue :: [(Int, (), Int)] indexAsValue = [(x, (), i) | x <- xs | i <- [0..]] in isSorted $ W.toList $ W.fromList indexAsValue , testProperty "'mapWeightsWithKey' sorts by weight" $ \xs (Blind f) -> isSorted $ W.weights $ W.mapWeightsWithKey (f :: Int -> Int -> Int) $ W.fromList (xs :: [(Int, Int, Int)]) , testCase "applying 'mapWeightsWithKey' twice sorts twice" $ let indexAsKey :: [((), Int, ())] indexAsKey = [((), i, ()) | i <- [0..10]] actual = W.toList $ W.mapWeightsWithKey (\_ _ -> ()) $ W.mapWeightsWithKey (\i _ -> -i) $ -- should not be ignored W.fromList indexAsKey in reverse indexAsKey @?= actual , testProperty "'union' sorts by weight" $ \xs ys -> isSorted $ W.weights $ W.union (W.fromList xs) (W.fromList (ys :: [(Int, Int, Int)])) , testProperty "'union' preserves elements" $ \xs ys -> let union = W.union (W.fromList xs) (W.fromList (ys :: [(Int, Int, Int)])) in sort (xs ++ ys) == sort (W.toList union) , testCase "'lookup' returns first occurrence" $ let xs = W.fromList [((), False, 'A'), ((), True, 'C'), ((), True, 'B')] in Just 'C' @?= W.lookup True xs ] isSorted :: Ord a => [a] -> Bool isSorted (x1 : xs@(x2 : _)) = x1 <= x2 && isSorted xs isSorted _ = True cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Types/0000755000000000000000000000000007346545000022721 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Distribution/Solver/Types/OptionalStanza.hs0000644000000000000000000000234607346545000026230 0ustar0000000000000000{-# LANGUAGE CPP #-} module UnitTests.Distribution.Solver.Types.OptionalStanza ( tests, ) where import Distribution.Solver.Types.OptionalStanza import UnitTests.Distribution.Client.ArbitraryInstances () import Test.Tasty import Test.Tasty.QuickCheck #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif tests :: [TestTree] tests = [ testProperty "fromList . toList = id" $ \xs -> optStanzaSetFromList (optStanzaSetToList xs) === xs , testProperty "member x (insert x xs) = True" $ \x xs -> optStanzaSetMember x (optStanzaSetInsert x xs) === True , testProperty "member x (singleton y) = (x == y)" $ \x y -> optStanzaSetMember x (optStanzaSetSingleton y) === (x == y) , testProperty "(subset xs ys, member x xs) ==> member x ys" $ \x xs ys -> optStanzaSetIsSubset xs ys && optStanzaSetMember x xs ==> optStanzaSetMember x ys , testProperty "tabulate index = id" $ \xs -> optStanzaTabulate (optStanzaIndex xs) === (xs :: OptionalStanzaMap Int) , testProperty "keysFilteredByValue" $ \xs -> let set i = if optStanzaIndex xs i then optStanzaSetSingleton i else mempty in optStanzaKeysFilteredByValue id xs === set TestStanzas `mappend` set BenchStanzas ] cabal-install-3.8.1.0/tests/UnitTests/0000755000000000000000000000000007346545000015664 5ustar0000000000000000cabal-install-3.8.1.0/tests/UnitTests/Options.hs0000644000000000000000000000342707346545000017661 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} module UnitTests.Options ( OptionShowSolverLog(..) , OptionMtimeChangeDelay(..) , RunNetworkTests(..) , extraOptions ) where import Data.Proxy import Data.Typeable import Test.Tasty.Options {------------------------------------------------------------------------------- Test options -------------------------------------------------------------------------------} extraOptions :: [OptionDescription] extraOptions = [ Option (Proxy :: Proxy OptionShowSolverLog) , Option (Proxy :: Proxy OptionMtimeChangeDelay) , Option (Proxy :: Proxy RunNetworkTests) ] newtype OptionShowSolverLog = OptionShowSolverLog Bool deriving Typeable instance IsOption OptionShowSolverLog where defaultValue = OptionShowSolverLog False parseValue = fmap OptionShowSolverLog . safeReadBool optionName = return "show-solver-log" optionHelp = return "Show full log from the solver" optionCLParser = flagCLParser Nothing (OptionShowSolverLog True) newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int deriving Typeable instance IsOption OptionMtimeChangeDelay where defaultValue = OptionMtimeChangeDelay 0 parseValue = fmap OptionMtimeChangeDelay . safeRead optionName = return "mtime-change-delay" optionHelp = return $ "How long to wait before attempting to detect" ++ "file modification, in microseconds" newtype RunNetworkTests = RunNetworkTests Bool deriving Typeable instance IsOption RunNetworkTests where defaultValue = RunNetworkTests True parseValue = fmap RunNetworkTests . safeReadBool optionName = return "run-network-tests" optionHelp = return "Run tests that need network access (default true)." cabal-install-3.8.1.0/tests/UnitTests/TempTestDir.hs0000644000000000000000000000654107346545000020432 0ustar0000000000000000{-# LANGUAGE CPP #-} module UnitTests.TempTestDir ( withTestDir, removeDirectoryRecursiveHack ) where import Distribution.Verbosity import Distribution.Compat.Internal.TempFile (createTempDirectory) import Distribution.Simple.Utils (warn) import Control.Monad (when) import Control.Exception (bracket, try, throwIO) import Control.Concurrent (threadDelay) import System.IO.Error import System.Directory #if !(MIN_VERSION_directory(1,2,7)) import System.FilePath (()) #endif import qualified System.Info (os) -- | Much like 'withTemporaryDirectory' but with a number of hacks to make -- sure on windows that we can clean up the directory at the end. -- withTestDir :: Verbosity -> String -> (FilePath -> IO a) -> IO a withTestDir verbosity template action = do systmpdir <- getTemporaryDirectory bracket (createTempDirectory systmpdir template) (removeDirectoryRecursiveHack verbosity) action -- | On Windows, file locks held by programs we run (in this case VCSs) -- are not always released prior to completing process termination! -- -- This means we run into stale locks when trying to delete the test -- directory. There is no sane way to wait on those locks being released, -- we just have to wait, try again and hope. -- -- In addition, on Windows a file that is not writable also cannot be deleted, -- so we must try setting the permissions to readable before deleting files. -- Some VCS tools on Windows create files with read-only attributes. -- removeDirectoryRecursiveHack :: Verbosity -> FilePath -> IO () removeDirectoryRecursiveHack verbosity dir | isWindows = go 1 where isWindows = System.Info.os == "mingw32" limit = 3 go :: Int -> IO () go n = do res <- try $ removePathForcibly dir case res of Left e -- wait a second and try again | isPermissionError e && n < limit -> do threadDelay 1000000 go (n+1) -- but if we hit the limt warn and fail. | isPermissionError e -> do warn verbosity $ "Windows file locking hack: hit the retry limit " ++ show limit ++ " while trying to remove " ++ dir throwIO e -- or it's a different error fail. | otherwise -> throwIO e Right () -> when (n > 1) $ warn verbosity $ "Windows file locking hack: had to try " ++ show n ++ " times to remove " ++ dir removeDirectoryRecursiveHack _ dir = removeDirectoryRecursive dir #if !(MIN_VERSION_directory(1,2,7)) -- A simplified version that ought to work for our use case here, and does -- not rely on directory internals. removePathForcibly :: FilePath -> IO () removePathForcibly path = do makeRemovable path `catchIOError` \ _ -> pure () isDir <- doesDirectoryExist path if isDir then do entries <- getDirectoryContents path sequence_ [ removePathForcibly (path entry) | entry <- entries, entry /= ".", entry /= ".." ] removeDirectory path else removeFile path where makeRemovable :: FilePath -> IO () makeRemovable p = setPermissions p emptyPermissions { readable = True, searchable = True, writable = True } #endif