cabal-install-1.22.6.0/0000755000000000000000000000000012540225656012643 5ustar0000000000000000cabal-install-1.22.6.0/LICENSE0000644000000000000000000000335512540225656013656 0ustar0000000000000000Copyright (c) 2003-2008, Isaac Jones, Simon Marlow, Martin Sjögren, Bjorn Bringert, Krasimir Angelov, Malcolm Wallace, Ross Patterson, Lemmih, Paolo Martini, Don Stewart, Duncan Coutts All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Isaac Jones nor the names of other contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. cabal-install-1.22.6.0/cabal-install.cabal0000644000000000000000000002107712540225656016344 0ustar0000000000000000Name: cabal-install Version: 1.22.6.0 Synopsis: The command-line interface for Cabal and Hackage. Description: The \'cabal\' command-line program simplifies the process of managing Haskell software by automating the fetching, configuration, compilation and installation of Haskell libraries and programs. homepage: http://www.haskell.org/cabal/ bug-reports: https://github.com/haskell/cabal/issues License: BSD3 License-File: LICENSE Author: Lemmih Paolo Martini Bjorn Bringert Isaac Potoczny-Jones Duncan Coutts Maintainer: cabal-devel@haskell.org Copyright: 2005 Lemmih 2006 Paolo Martini 2007 Bjorn Bringert 2007 Isaac Potoczny-Jones 2007-2012 Duncan Coutts Category: Distribution Build-type: Simple Cabal-Version: >= 1.10 Extra-Source-Files: README.md bash-completion/cabal bootstrap.sh changelog -- Generated with '../Cabal/misc/gen-extra-source-files.sh | sort' tests/PackageTests/Freeze/my.cabal source-repository head type: git location: https://github.com/haskell/cabal/ subdir: cabal-install Flag old-directory description: Use directory < 1.2 and old-time default: False Flag network-uri description: Get Network.URI from the network-uri package default: True executable cabal main-is: Main.hs ghc-options: -Wall -fwarn-tabs other-modules: Distribution.Client.BuildReports.Anonymous Distribution.Client.BuildReports.Storage Distribution.Client.BuildReports.Types Distribution.Client.BuildReports.Upload Distribution.Client.Check Distribution.Client.Config Distribution.Client.Configure Distribution.Client.Dependency Distribution.Client.Dependency.TopDown Distribution.Client.Dependency.TopDown.Constraints Distribution.Client.Dependency.TopDown.Types Distribution.Client.Dependency.Types Distribution.Client.Dependency.Modular Distribution.Client.Dependency.Modular.Assignment Distribution.Client.Dependency.Modular.Builder Distribution.Client.Dependency.Modular.Configured Distribution.Client.Dependency.Modular.ConfiguredConversion Distribution.Client.Dependency.Modular.Dependency Distribution.Client.Dependency.Modular.Explore Distribution.Client.Dependency.Modular.Flag Distribution.Client.Dependency.Modular.Index Distribution.Client.Dependency.Modular.IndexConversion Distribution.Client.Dependency.Modular.Log Distribution.Client.Dependency.Modular.Message Distribution.Client.Dependency.Modular.Package Distribution.Client.Dependency.Modular.Preference Distribution.Client.Dependency.Modular.PSQ Distribution.Client.Dependency.Modular.Solver Distribution.Client.Dependency.Modular.Tree Distribution.Client.Dependency.Modular.Validate Distribution.Client.Dependency.Modular.Version Distribution.Client.Exec Distribution.Client.Fetch Distribution.Client.FetchUtils Distribution.Client.Freeze Distribution.Client.Get Distribution.Client.GZipUtils Distribution.Client.Haddock Distribution.Client.HttpUtils Distribution.Client.IndexUtils Distribution.Client.Init Distribution.Client.Init.Heuristics Distribution.Client.Init.Licenses Distribution.Client.Init.Types Distribution.Client.Install Distribution.Client.InstallPlan Distribution.Client.InstallSymlink Distribution.Client.JobControl Distribution.Client.List Distribution.Client.PackageIndex Distribution.Client.PackageUtils Distribution.Client.ParseUtils Distribution.Client.Run Distribution.Client.Sandbox Distribution.Client.Sandbox.Index Distribution.Client.Sandbox.PackageEnvironment Distribution.Client.Sandbox.Timestamp Distribution.Client.Sandbox.Types Distribution.Client.Setup Distribution.Client.SetupWrapper Distribution.Client.SrcDist Distribution.Client.Tar Distribution.Client.Targets Distribution.Client.Types Distribution.Client.Update Distribution.Client.Upload Distribution.Client.Utils Distribution.Client.World Distribution.Client.Win32SelfUpgrade Distribution.Client.Compat.Environment Distribution.Client.Compat.ExecutablePath Distribution.Client.Compat.FilePerms Distribution.Client.Compat.Process Distribution.Client.Compat.Semaphore Distribution.Client.Compat.Time Paths_cabal_install -- NOTE: when updating build-depends, don't forget to update version regexps -- in bootstrap.sh. build-depends: array >= 0.1 && < 0.6, base >= 4.3 && < 5, bytestring >= 0.9 && < 1, Cabal >= 1.22.2 && < 1.23, containers >= 0.1 && < 0.6, filepath >= 1.0 && < 1.5, HTTP >= 4000.2.5 && < 4000.3, mtl >= 2.0 && < 3, pretty >= 1 && < 1.2, random >= 1 && < 1.2, stm >= 2.0 && < 3, time >= 1.1 && < 1.6, zlib >= 0.5.3 && < 0.6 if flag(old-directory) build-depends: directory >= 1 && < 1.2, old-time >= 1 && < 1.2, process >= 1.0.1.1 && < 1.1.0.2 else build-depends: directory >= 1.2 && < 1.3, process >= 1.1.0.2 && < 1.3 -- NOTE: you MUST include the network dependency even when network-uri -- is pulled in, otherwise the constraint solver doesn't have enough -- information if flag(network-uri) build-depends: network-uri >= 2.6, network >= 2.6 else build-depends: network >= 2.4 && < 2.6 if os(windows) build-depends: Win32 >= 2 && < 3 cpp-options: -DWIN32 else build-depends: unix >= 2.0 && < 2.8 if arch(arm) && impl(ghc < 7.6) -- older ghc on arm does not support -threaded cc-options: -DCABAL_NO_THREADED else ghc-options: -threaded c-sources: cbits/getnumcores.c default-language: Haskell2010 -- Small, fast running tests. Test-Suite unit-tests type: exitcode-stdio-1.0 main-is: UnitTests.hs hs-source-dirs: tests, . ghc-options: -Wall -fwarn-tabs other-modules: UnitTests.Distribution.Client.Targets UnitTests.Distribution.Client.Dependency.Modular.PSQ UnitTests.Distribution.Client.Sandbox UnitTests.Distribution.Client.UserConfig build-depends: base, array, bytestring, Cabal, containers, mtl, pretty, process, directory, filepath, stm, time, HTTP, zlib, test-framework, test-framework-hunit, test-framework-quickcheck2 >= 0.3, HUnit, QuickCheck >= 2.5 if flag(old-directory) build-depends: old-time if flag(network-uri) build-depends: network-uri >= 2.6, network >= 2.6 else build-depends: network-uri < 2.6, network < 2.6 if os(windows) build-depends: Win32 cpp-options: -DWIN32 else build-depends: unix if arch(arm) cc-options: -DCABAL_NO_THREADED else ghc-options: -threaded default-language: Haskell2010 -- Large, system tests that build packages. test-suite package-tests type: exitcode-stdio-1.0 hs-source-dirs: tests main-is: PackageTests.hs other-modules: PackageTests.Exec.Check PackageTests.Freeze.Check PackageTests.MultipleSource.Check PackageTests.PackageTester build-depends: Cabal, HUnit, QuickCheck >= 2.1.0.1 && < 2.8, base, bytestring, directory, extensible-exceptions, filepath, process, regex-posix, test-framework, test-framework-hunit, test-framework-quickcheck2 >= 0.2.12 if os(windows) build-depends: Win32 >= 2 && < 3 cpp-options: -DWIN32 else build-depends: unix >= 2.0 && < 2.8 if arch(arm) cc-options: -DCABAL_NO_THREADED else ghc-options: -threaded ghc-options: -Wall default-language: Haskell2010 cabal-install-1.22.6.0/bootstrap.sh0000755000000000000000000003607712540225656015234 0ustar0000000000000000#!/usr/bin/env sh # A script to bootstrap cabal-install. # It works by downloading and installing the Cabal, zlib and # HTTP packages. It then installs cabal-install itself. # It expects to be run inside the cabal-install directory. # Install settings, you can override these by setting environment vars. E.g. if # you don't want profiling and dynamic versions of libraries to be installed in # addition to vanilla, run 'EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh' #VERBOSE DEFAULT_CONFIGURE_OPTS="--enable-library-profiling --enable-shared" EXTRA_CONFIGURE_OPTS=${EXTRA_CONFIGURE_OPTS-$DEFAULT_CONFIGURE_OPTS} #EXTRA_BUILD_OPTS #EXTRA_INSTALL_OPTS die () { printf "\nError during cabal-install bootstrap:\n$1\n" >&2 && exit 2 ;} # programs, you can override these by setting environment vars GHC="${GHC:-ghc}" GHC_PKG="${GHC_PKG:-ghc-pkg}" GHC_VER="$(${GHC} --numeric-version)" HADDOCK=${HADDOCK:-haddock} WGET="${WGET:-wget}" CURL="${CURL:-curl}" FETCH="${FETCH:-fetch}" TAR="${TAR:-tar}" GZIP_PROGRAM="${GZIP_PROGRAM:-gzip}" # The variable SCOPE_OF_INSTALLATION can be set on the command line to # use/install the libaries needed to build cabal-install to a custom package # database instead of the user or global package database. # # Example: # # $ ghc-pkg init /my/package/database # $ SCOPE_OF_INSTALLATION='--package-db=/my/package/database' ./bootstrap.sh # # You can also combine SCOPE_OF_INSTALLATION with PREFIX: # # $ ghc-pkg init /my/prefix/packages.conf.d # $ SCOPE_OF_INSTALLATION='--package-db=/my/prefix/packages.conf.d' \ # PREFIX=/my/prefix ./bootstrap.sh # # If you use the --global,--user or --sandbox arguments, this will # override the SCOPE_OF_INSTALLATION setting and not use the package # database you pass in the SCOPE_OF_INSTALLATION variable. SCOPE_OF_INSTALLATION="${SCOPE_OF_INSTALLATION:---user}" DEFAULT_PREFIX="${HOME}/.cabal" # Try to respect $TMPDIR but override if needed - see #1710. [ -"$TMPDIR"- = -""- ] || echo "$TMPDIR" | grep -q ld && export TMPDIR=/tmp/cabal-$(echo $(od -XN4 -An /dev/random)) && mkdir $TMPDIR # Check for a C compiler. [ ! -x "$CC" ] && for ccc in gcc clang cc icc; do ${ccc} --version > /dev/null 2>&1 && CC=$ccc && echo "Using $CC for C compiler. If this is not what you want, set CC." >&2 && break done # None found. [ ! -x `which "$CC"` ] && die "C compiler not found (or could not be run). If a C compiler is installed make sure it is on your PATH, or set the CC variable." # Check the C compiler/linker work. LINK="$(for link in collect2 ld; do echo 'main;' | ${CC} -v -x c - -o /dev/null -\#\#\# 2>&1 | grep -qw $link && echo 'main;' | ${CC} -v -x c - -o /dev/null -\#\#\# 2>&1 | grep -w $link | sed -e "s|\(.*$link\).*|\1|" -e 's/ //g' -e 's|"||' && break done)" # They don't. [ -z "$LINK" ] && die "C compiler and linker could not compile a simple test program. Please check your toolchain." ## Warn that were's overriding $LD if set (if you want). [ -x "$LD" ] && [ "$LD" != "$LINK" ] && echo "Warning: value set in $LD is not the same as C compiler's $LINK." >&2 echo "Using $LINK instead." >&2 # Set LD, overriding environment if necessary. LD=$LINK # Check we're in the right directory, etc. grep "cabal-install" ./cabal-install.cabal > /dev/null 2>&1 || die "The bootstrap.sh script must be run in the cabal-install directory" ${GHC} --numeric-version > /dev/null 2>&1 || die "${GHC} not found (or could not be run). If ghc is installed, make sure it is on your PATH, or set the GHC and GHC_PKG vars." ${GHC_PKG} --version > /dev/null 2>&1 || die "${GHC_PKG} not found." GHC_VER="$(${GHC} --numeric-version)" GHC_PKG_VER="$(${GHC_PKG} --version | cut -d' ' -f 5)" [ ${GHC_VER} = ${GHC_PKG_VER} ] || die "Version mismatch between ${GHC} and ${GHC_PKG}. If you set the GHC variable then set GHC_PKG too." while [ "$#" -gt 0 ]; do case "${1}" in "--user") SCOPE_OF_INSTALLATION="${1}" shift;; "--global") SCOPE_OF_INSTALLATION="${1}" DEFAULT_PREFIX="/usr/local" shift;; "--sandbox") shift # check if there is another argument which doesn't start with -- if [ "$#" -le 0 ] || [ ! -z $(echo "${1}" | grep "^--") ] then SANDBOX=".cabal-sandbox" else SANDBOX="${1}" shift fi;; "--no-doc") NO_DOCUMENTATION=1 shift;; *) echo "Unknown argument or option, quitting: ${1}" echo "usage: bootstrap.sh [OPTION]" echo echo "options:" echo " --user Install for the local user (default)" echo " --global Install systemwide (must be run as root)" echo " --no-doc Do not generate documentation for installed "\ "packages" echo " --sandbox Install to a sandbox in the default location"\ "(.cabal-sandbox)" echo " --sandbox path Install to a sandbox located at path" exit;; esac done abspath () { case "$1" in /*)printf "%s\n" "$1";; *)printf "%s\n" "$PWD/$1";; esac; } if [ ! -z "$SANDBOX" ] then # set up variables for sandbox bootstrap # Make the sandbox path absolute since it will be used from # different working directories when the dependency packages are # installed. SANDBOX=$(abspath "$SANDBOX") # Get the name of the package database which cabal sandbox would use. GHC_ARCH=$(ghc --info | sed -n 's/.*"Target platform".*"\([^-]\+\)-[^-]\+-\([^"]\+\)".*/\1-\2/p') PACKAGEDB="$SANDBOX/${GHC_ARCH}-ghc-${GHC_VER}-packages.conf.d" # Assume that if the directory is already there, it is already a # package database. We will get an error immediately below if it # isn't. Uses -r to try to be compatible with Solaris, and allow # symlinks as well as a normal dir/file. [ ! -r "$PACKAGEDB" ] && ghc-pkg init "$PACKAGEDB" PREFIX="$SANDBOX" SCOPE_OF_INSTALLATION="--package-db=$PACKAGEDB" echo Bootstrapping in sandbox at \'$SANDBOX\'. fi # Check for haddock unless no documentation should be generated. if [ ! ${NO_DOCUMENTATION} ] then ${HADDOCK} --version > /dev/null 2>&1 || die "${HADDOCK} not found." fi PREFIX=${PREFIX:-${DEFAULT_PREFIX}} # Versions of the packages to install. # The version regex says what existing installed versions are ok. PARSEC_VER="3.1.7"; PARSEC_VER_REGEXP="[3]\.[01]\." # >= 3.0 && < 3.2 DEEPSEQ_VER="1.4.0.0"; DEEPSEQ_VER_REGEXP="1\.[1-9]\." # >= 1.1 && < 2 BINARY_VER="0.7.2.3"; BINARY_VER_REGEXP="[0]\.[7]\." # == 0.7.* TEXT_VER="1.2.0.3"; TEXT_VER_REGEXP="((1\.[012]\.)|(0\.([2-9]|(1[0-1]))\.))" # >= 0.2 && < 1.3 NETWORK_VER="2.6.0.2"; NETWORK_VER_REGEXP="2\.[0-6]\." # >= 2.0 && < 2.7 NETWORK_URI_VER="2.6.0.1"; NETWORK_URI_VER_REGEXP="2\.6\." # >= 2.6 && < 2.7 CABAL_VER="1.22.4.0"; CABAL_VER_REGEXP="1\.22" # >= 1.22 && < 1.23 TRANS_VER="0.4.2.0"; TRANS_VER_REGEXP="0\.[4]\." # >= 0.2.* && < 0.5 MTL_VER="2.2.1"; MTL_VER_REGEXP="[2]\." # >= 2.0 && < 3 HTTP_VER="4000.2.19"; HTTP_VER_REGEXP="4000\.2\.([5-9]|1[0-9]|2[0-9])" # >= 4000.2.5 < 4000.3 ZLIB_VER="0.5.4.2"; ZLIB_VER_REGEXP="0\.[45]\." # == 0.4.* || == 0.5.* TIME_VER="1.5" TIME_VER_REGEXP="1\.[12345]\.?" # >= 1.1 && < 1.6 RANDOM_VER="1.1" RANDOM_VER_REGEXP="1\.[01]\.?" # >= 1 && < 1.2 STM_VER="2.4.4"; STM_VER_REGEXP="2\." # == 2.* OLD_TIME_VER="1.1.0.3"; OLD_TIME_VER_REGEXP="1\.[01]\.?" # >=1.0.0.0 && <1.2 OLD_LOCALE_VER="1.0.0.7"; OLD_LOCALE_VER_REGEXP="1\.0\.?" # >=1.0.0.0 && <1.1 HACKAGE_URL="https://hackage.haskell.org/package" # Haddock fails for network-2.5.0.0. NO_DOCS_PACKAGES_VER_REGEXP="network-uri-2\.5\.[0-9]+\.[0-9]+" # Cache the list of packages: echo "Checking installed packages for ghc-${GHC_VER}..." ${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg.list || die "running '${GHC_PKG} list' failed" # Will we need to install this package, or is a suitable version installed? need_pkg () { PKG=$1 VER_MATCH=$2 if egrep " ${PKG}-${VER_MATCH}" ghc-pkg.list > /dev/null 2>&1 then return 1; else return 0; fi #Note: we cannot use "! grep" here as Solaris 9 /bin/sh doesn't like it. } info_pkg () { PKG=$1 VER=$2 VER_MATCH=$3 if need_pkg ${PKG} ${VER_MATCH} then if [ -r "${PKG}-${VER}.tar.gz" ] then echo "${PKG}-${VER} will be installed from local tarball." else echo "${PKG}-${VER} will be downloaded and installed." fi else echo "${PKG} is already installed and the version is ok." fi } fetch_pkg () { PKG=$1 VER=$2 URL=${HACKAGE_URL}/${PKG}-${VER}/${PKG}-${VER}.tar.gz if which ${CURL} > /dev/null then # TODO: switch back to resuming curl command once # https://github.com/haskell/hackage-server/issues/111 is resolved #${CURL} -L --fail -C - -O ${URL} || die "Failed to download ${PKG}." ${CURL} -L --fail -O ${URL} || die "Failed to download ${PKG}." elif which ${WGET} > /dev/null then ${WGET} -c ${URL} || die "Failed to download ${PKG}." elif which ${FETCH} > /dev/null then ${FETCH} ${URL} || die "Failed to download ${PKG}." else die "Failed to find a downloader. 'curl', 'wget' or 'fetch' is required." fi [ -f "${PKG}-${VER}.tar.gz" ] || die "Downloading ${URL} did not create ${PKG}-${VER}.tar.gz" } unpack_pkg () { PKG=$1 VER=$2 rm -rf "${PKG}-${VER}.tar" "${PKG}-${VER}" ${GZIP_PROGRAM} -d < "${PKG}-${VER}.tar.gz" | ${TAR} -xf - [ -d "${PKG}-${VER}" ] || die "Failed to unpack ${PKG}-${VER}.tar.gz" } install_pkg () { PKG=$1 VER=$2 [ -x Setup ] && ./Setup clean [ -f Setup ] && rm Setup ${GHC} --make Setup -o Setup || die "Compiling the Setup script failed." [ -x Setup ] || die "The Setup script does not exist or cannot be run" args="${SCOPE_OF_INSTALLATION} --prefix=${PREFIX} --with-compiler=${GHC}" args="$args --with-hc-pkg=${GHC_PKG} --with-gcc=${CC} --with-ld=${LD}" args="$args ${EXTRA_CONFIGURE_OPTS} ${VERBOSE}" ./Setup configure $args || die "Configuring the ${PKG} package failed." ./Setup build ${EXTRA_BUILD_OPTS} ${VERBOSE} || die "Building the ${PKG} package failed." if [ ! ${NO_DOCUMENTATION} ] then if echo "${PKG}-${VER}" | egrep ${NO_DOCS_PACKAGES_VER_REGEXP} > /dev/null 2>&1 then echo "Skipping documentation for the ${PKG} package." else ./Setup haddock --with-ghc=${GHC} --with-haddock=${HADDOCK} ${VERBOSE} || die "Documenting the ${PKG} package failed." fi fi ./Setup install ${EXTRA_INSTALL_OPTS} ${VERBOSE} || die "Installing the ${PKG} package failed." } do_pkg () { PKG=$1 VER=$2 VER_MATCH=$3 if need_pkg ${PKG} ${VER_MATCH} then echo if [ -r "${PKG}-${VER}.tar.gz" ] then echo "Using local tarball for ${PKG}-${VER}." else echo "Downloading ${PKG}-${VER}..." fetch_pkg ${PKG} ${VER} fi unpack_pkg ${PKG} ${VER} cd "${PKG}-${VER}" install_pkg ${PKG} ${VER} cd .. fi } # Replicate the flag selection logic for network-uri in the .cabal file. do_network_uri_pkg () { # Refresh installed package list. ${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg-stage2.list \ || die "running '${GHC_PKG} list' failed" NETWORK_URI_DUMMY_VER="2.5.0.0"; NETWORK_URI_DUMMY_VER_REGEXP="2\.5\." # < 2.6 if egrep " network-2\.[6-9]\." ghc-pkg-stage2.list > /dev/null 2>&1 then # Use network >= 2.6 && network-uri >= 2.6 info_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} do_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} else # Use network < 2.6 && network-uri < 2.6 info_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} ${NETWORK_URI_DUMMY_VER_REGEXP} do_pkg "network-uri" ${NETWORK_URI_DUMMY_VER} ${NETWORK_URI_DUMMY_VER_REGEXP} fi } # Actually do something! info_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} info_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} info_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} info_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} info_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} info_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} info_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} info_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} info_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} info_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} info_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} info_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} info_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} info_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} info_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} do_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} do_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} do_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} do_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} do_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} do_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} do_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} do_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} do_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} # We conditionally install network-uri, depending on the network version. do_network_uri_pkg do_pkg "old-locale" ${OLD_LOCALE_VER} ${OLD_LOCALE_VER_REGEXP} do_pkg "old-time" ${OLD_TIME_VER} ${OLD_TIME_VER_REGEXP} do_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} do_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} do_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} do_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} install_pkg "cabal-install" # Use the newly built cabal to turn the prefix/package database into a # legit cabal sandbox. This works because 'cabal sandbox init' will # reuse the already existing package database and other files if they # are in the expected locations. [ ! -z "$SANDBOX" ] && $SANDBOX/bin/cabal sandbox init --sandbox $SANDBOX echo echo "===========================================" CABAL_BIN="$PREFIX/bin" if [ -x "$CABAL_BIN/cabal" ] then echo "The 'cabal' program has been installed in $CABAL_BIN/" echo "You should either add $CABAL_BIN to your PATH" echo "or copy the cabal program to a directory that is on your PATH." echo echo "The first thing to do is to get the latest list of packages with:" echo " cabal update" echo "This will also create a default config file (if it does not already" echo "exist) at $HOME/.cabal/config" echo echo "By default cabal will install programs to $HOME/.cabal/bin" echo "If you do not want to add this directory to your PATH then you can" echo "change the setting in the config file, for example you could use:" echo "symlink-bindir: $HOME/bin" else echo "Sorry, something went wrong." echo "The 'cabal' executable was not successfully installed into" echo "$CABAL_BIN/" fi echo rm ghc-pkg.list cabal-install-1.22.6.0/README.md0000644000000000000000000001256612540225656014134 0ustar0000000000000000The cabal-install package ========================= See the [Cabal web site] for more information. The `cabal-install` package provides a command line tool named `cabal`. It uses the [Cabal] library and provides a user interface to the Cabal/[Hackage] build automation and package management system. It can build and install both local and remote packages, including dependencies. [Cabal web site]: http://www.haskell.org/cabal/ [Cabal]: ../Cabal/README.md Installing the `cabal` command-line tool ======================================== The `cabal-install` package requires a number of other packages, most of which come with a standard GHC installation. It requires the [network] package, which is sometimes packaged separately by Linux distributions; for example, on Debian or Ubuntu, it is located in the "libghc6-network-dev" package. `cabal` requires a few other Haskell packages that are not always installed. The exact list is specified in the [.cabal] file or in the [bootstrap.sh] file. All these packages are available from [Hackage]. Note that on some Unix systems you may need to install an additional zlib development package using your system package manager; for example, on Debian or Ubuntu, it is located in the "zlib1g-dev" package; on Fedora, it is located in the "zlib-devel" package. It is required because the Haskell zlib package uses the system zlib C library and header files. The `cabal-install` package is now part of the [Haskell Platform], so you do not usually need to install it separately. However, if you are starting from a minimal GHC installation, you need to install `cabal-install` manually. Since it is an ordinary Cabal package, `cabal-install` can be built the standard way; to facilitate this, the process has been partially automated. It is described below. [.cabal]: cabal-install.cabal [network]: http://hackage.haskell.org/package/network [Haskell Platform]: http://www.haskell.org/platform/ Quick start on Unix-like systems -------------------------------- As a convenience for users on Unix-like systems, there is a [bootstrap.sh] script that will download and install each of `cabal-install`'s dependencies in turn. $ ./bootstrap.sh It will download and install the dependencies. The script will install the library packages (vanilla, profiling and shared) into `$HOME/.cabal/` and the `cabal` program into `$HOME/.cabal/bin/`. If you don't want to install profiling and shared versions of the libraries, use $ EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh You then have the choice either to place `$HOME/.cabal/bin` on your `$PATH` or move the `cabal` program to somewhere on your `$PATH`. Next, you can get the latest list of packages by running: $ cabal update This will also create a default configuration file, if it does not already exist, at `$HOME/.cabal/config`. By default, `cabal` will install programs to `$HOME/.cabal/bin`. If you do not want to add this directory to your `$PATH`, you can change the setting in the config file; for example, you could use the following: symlink-bindir: $HOME/bin Quick start on Windows systems ------------------------------ For Windows users, a precompiled program ([cabal.exe]) is provided. Download and put it somewhere on your `%PATH%` (for example, `C:\Program Files\Haskell\bin`.) Next, you can get the latest list of packages by running: $ cabal update This will also create a default configuration file (if it does not already exist) at `C:\Documents and Settings\%USERNAME%\Application Data\cabal\config`. [cabal.exe]: http://www.haskell.org/cabal/release/cabal-install-latest/ Using `cabal` ============= There are two sets of commands: commands for working with a local project build tree and those for working with packages distributed from [Hackage]. For the list of the full set of commands and flags for each command, run: $ cabal help Commands for developers for local build trees --------------------------------------------- The commands for local project build trees are almost the same as the `runghc Setup` command-line interface you may already be familiar with. In particular, it has the following commands: * `cabal configure` * `cabal build` * `cabal haddock` * `cabal clean` * `cabal sdist` The `install` command is somewhat different; it is an all-in-one operation. If you run `cabal install` in your build tree, it will configure, build, and install. It takes all the flags that `configure` takes such as `--global` and `--prefix`. In addition, `cabal` will download and install any dependencies that are not already installed. It can also rebuild packages to ensure a consistent set of dependencies. Commands for released Hackage packages -------------------------------------- $ cabal update This command gets the latest list of packages from the [Hackage] server. On occasion, this command must be run manually--for instance, if you want to install a newly released package. $ cabal install xmonad This command installs one or more named packages, and all their dependencies, from Hackage. By default, it installs the latest available version; however, you may specify exact versions or version ranges. For example, `cabal install alex-2.2` or `cabal install parsec < 3`. $ cabal list xml This does a search of the installed and available packages. It does a case-insensitive substring match on the package name. [Hackage]: http://hackage.haskell.org [bootstrap.sh]: bootstrap.sh cabal-install-1.22.6.0/changelog0000644000000000000000000002067112540225656014523 0ustar0000000000000000-*-change-log-*- 1.22.6.0 Ryan Thomas June 2015 * A fix for @ezyang's fix for #2502. (Mikhail Glushenkov) 1.22.5.0 Ryan Thomas June 2015 * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) 1.22.4.0 Ryan Thomas May 2015 * Force cabal upload to always use digest auth and never basic auth. * Add dependency-graph information to `printPlan` output * bootstrap.sh: fixes linker matching to avoid cases where tested linker names appear unexpectedly in compiler output (fixes #2542) 1.22.3.0 Ryan Thomas April 2015 * Fix bash completion for sandbox subcommands - Fixes #2513 (Mikhail Glushenkov) * filterConfigureFlags: filter more flags (Mikhail Glushenkov) 1.22.2.0 Ryan Thomas March 2015 * Don't pass '--{en,dis}able-profiling' to old setup exes. * -Wall police * Allow filepath 1.4 1.21.x (current development version) * New command: user-config (#2159). * Implement 'cabal repl --only' (#2016). * Fix an issue when 'cabal repl' was doing unnecessary compilation (#1715). * Prompt the user to specify source directory in 'cabal init' (#1989). * Remove the self-upgrade check (#2090). * Don't redownload already downloaded packages when bootstrapping (#2133). * Support sandboxes in 'bootstrap.sh' (#2137). * Install profiling and shared libs by default in 'bootstrap.sh' (#2009). 1.20.0.3 Johan Tibell June 2014 * Don't attempt to rename dist if it is already named correctly * Treat all flags of a package as interdependent. * Allow template-haskell to be upgradable again 1.20.0.2 Johan Tibell May 2014 * Increase max-backjumps to 2000. * Fix solver bug which led to missed install plans. * Fix streaming test output. * Tweak solver heuristics to avoid reinstalls. 1.20.0.1 Johan Tibell May 2014 * Fix cabal repl search path bug on Windows * Include OS and arch in cabal-install user agent * Revert --constraint flag behavior in configure to 1.18 behavior 1.20.0.0 Johan Tibell April 2014 * Build only selected executables * Add -j flag to build/test/bench/run * Improve install log file * Don't symlink executables when in a sandbox * Add --package-db flag to 'list' and 'info' * Make upload more efficient * Add --require-sandbox option * Add experimental Cabal file format command * Add haddock section to config file * Add --main-is flag to init 0.14.0 Andres Loeh April 2012 * Works with ghc-7.4 * Completely new modular dependency solver (default in most cases) * Some tweaks to old topdown dependency solver * Install plans are now checked for reinstalls that break packages * Flags --constraint and --preference work for nonexisting packages * New constraint forms for source and installed packages * New constraint form for package-specific use flags * New constraint form for package-specific stanza flags * Test suite dependencies are pulled in on demand * No longer install packages on --enable-tests when tests fail * New "cabal bench" command * Various "cabal init" tweaks 0.10.0 Duncan Coutts February 2011 * New package targets: local dirs, local and remote tarballs * Initial support for a "world" package target * Partial fix for situation where user packages mask global ones * Removed cabal upgrade, new --upgrade-dependencies flag * New cabal install --only-dependencies flag * New cabal fetch --no-dependencies and --dry-run flags * Improved output for cabal info * Simpler and faster bash command line completion * Fix for broken proxies that decompress wrongly * Fix for cabal unpack to preserve executable permissions * Adjusted the output for the -v verbosity level in a few places 0.8.2 Duncan Coutts March 2010 * Fix for cabal update on Windows * On windows switch to per-user installs (rather than global) * Handle intra-package dependencies in dependency planning * Minor tweaks to cabal init feature * Fix various -Wall warnings * Fix for cabal sdist --snapshot 0.8.0 Duncan Coutts Dec 2009 * Works with ghc-6.12 * New "cabal init" command for making initial project .cabal file * New feature to maintain an index of haddock documentation 0.6.4 Duncan Coutts Nov 2009 * Improve the algorithm for selecting the base package version * Hackage errors now reported by "cabal upload [--check]" * Improved format of messages from "cabal check" * Config file can now be selected by an env var * Updated tar reading/writing code * Improve instructions in the README and bootstrap output * Fix bootstrap.sh on Solaris 9 * Fix bootstrap for systems where network uses parsec 3 * Fix building with ghc-6.6 0.6.2 Duncan Coutts Feb 2009 * The upgrade command has been disabled in this release * The configure and install commands now have consistent behaviour * Reduce the tendancy to re-install already existing packages * The --constraint= flag now works for the install command * New --preference= flag for soft constraints / version preferences * Improved bootstrap.sh script, smarter and better error checking * New cabal info command to display detailed info on packages * New cabal unpack command to download and untar a package * HTTP-4000 package required, should fix bugs with http proxies * Now works with authenticated proxies. * On Windows can now override the proxy setting using an env var * Fix compatability with config files generated by older versions * Warn if the hackage package list is very old * More helpful --help output, mention config file and examples * Better documentation in ~/.cabal/config file * Improved command line interface for logging and build reporting * Minor improvements to some messages 0.6.0 Duncan Coutts Oct 2008 * Constraint solver can now cope with base 3 and base 4 * Allow use of package version preferences from hackage index * More detailed output from cabal install --dry-run -v * Improved bootstrap.sh 0.5.2 Duncan Coutts Aug 2008 * Suport building haddock documentaion * Self-reinstall now works on Windows * Allow adding symlinks to excutables into a separate bindir * New self-documenting config file * New install --reinstall flag * More helpful status messages in a couple places * Upload failures now report full text error message from the server * Support for local package repositories * New build logging and reporting * New command to upload build reports to (a compatible) server * Allow tilde in hackage server URIs * Internal code improvements * Many other minor improvements and bug fixes 0.5.1 Duncan Coutts June 2008 * Restore minimal hugs support in dependency resolver * Fix for disabled http proxies on Windows * Revert to global installs on Windows by default 0.5.0 Duncan Coutts June 2008 * New package dependency resolver, solving diamond dep problem * Integrate cabal-setup functionality * Integrate cabal-upload functionality * New cabal update and check commands * Improved behavior for install and upgrade commands * Full Windows support * New command line handling * Bash command line completion * Allow case insensitive package names on command line * New --dry-run flag for install, upgrade and fetch commands * New --root-cmd flag to allow installing as root * New --cabal-lib-version flag to select different Cabal lib versions * Support for HTTP proxies * Improved cabal list output * Build other non-dependent packages even when some fail * Report a summary of all build failures at the end * Partial support for hugs * Partial implementation of build reporting and logging * More consistent logging and verbosity * Significant internal code restructuring 0.4 Duncan Coutts Oct 2007 * Renamed executable from 'cabal-install' to 'cabal' * Partial Windows compatability * Do per-user installs by default * cabal install now installs the package in the current directory * Allow multiple remote servers * Use zlib lib and internal tar code and rather than external tar * Reorganised configuration files * Significant code restructuring * Cope with packages with conditional dependencies 0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 * Switch from smart-server, dumb-client model to the reverse * New .tar.gz based index format * New remote and local package archive format cabal-install-1.22.6.0/Main.hs0000644000000000000000000014432412540225656014073 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Main -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- Entry point to the default cabal-install front-end. ----------------------------------------------------------------------------- module Main (main) where import Distribution.Client.Setup ( GlobalFlags(..), globalCommand, globalRepos , ConfigFlags(..) , ConfigExFlags(..), defaultConfigExFlags, configureExCommand , BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) , buildCommand, replCommand, testCommand, benchmarkCommand , InstallFlags(..), defaultInstallFlags , installCommand, upgradeCommand , FetchFlags(..), fetchCommand , FreezeFlags(..), freezeCommand , GetFlags(..), getCommand, unpackCommand , checkCommand , formatCommand , updateCommand , ListFlags(..), listCommand , InfoFlags(..), infoCommand , UploadFlags(..), uploadCommand , ReportFlags(..), reportCommand , runCommand , InitFlags(initVerbosity), initCommand , SDistFlags(..), SDistExFlags(..), sdistCommand , Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand , SandboxFlags(..), sandboxCommand , ExecFlags(..), execCommand , UserConfigFlags(..), userConfigCommand , reportCommand ) import Distribution.Simple.Setup ( HaddockFlags(..), haddockCommand, defaultHaddockFlags , HscolourFlags(..), hscolourCommand , ReplFlags(..) , CopyFlags(..), copyCommand , RegisterFlags(..), registerCommand , CleanFlags(..), cleanCommand , TestFlags(..), BenchmarkFlags(..) , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag , configAbsolutePaths ) import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Config ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff , userConfigUpdate ) import Distribution.Client.Targets ( readUserTargets ) import qualified Distribution.Client.List as List ( list, info ) import Distribution.Client.Install (install) import Distribution.Client.Configure (configure) import Distribution.Client.Update (update) import Distribution.Client.Exec (exec) import Distribution.Client.Fetch (fetch) import Distribution.Client.Freeze (freeze) import Distribution.Client.Check as Check (check) --import Distribution.Client.Clean (clean) import Distribution.Client.Upload as Upload (upload, check, report) import Distribution.Client.Run (run, splitRunArgs) import Distribution.Client.SrcDist (sdist) import Distribution.Client.Get (get) import Distribution.Client.Sandbox (sandboxInit ,sandboxAddSource ,sandboxDelete ,sandboxDeleteSource ,sandboxListSources ,sandboxHcPkg ,dumpPackageEnvironment ,getSandboxConfigFilePath ,loadConfigOrSandboxConfig ,initPackageDBIfNeeded ,maybeWithSandboxDirOnSearchPath ,maybeWithSandboxPackageInfo ,WereDepsReinstalled(..) ,maybeReinstallAddSourceDeps ,tryGetIndexFilePath ,sandboxBuildDir ,updateSandboxConfigFileFlag ,configCompilerAux' ,configPackageDB') import Distribution.Client.Sandbox.PackageEnvironment (setPackageDB ,userPackageEnvironmentFile) import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord) import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox) import Distribution.Client.Init (initCabal) import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade import Distribution.Client.Utils (determineNumJobs #if defined(mingw32_HOST_OS) ,relaxEncodingErrors #endif ,existsAndIsMoreRecentThan) import Distribution.PackageDescription ( Executable(..), benchmarkName, benchmarkBuildInfo, testName , testBuildInfo, buildable ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.PackageDescription.PrettyPrint ( writeGenericPackageDescription ) import Distribution.Simple.Build ( startInterpreter ) import Distribution.Simple.Command ( CommandParse(..), CommandUI(..), Command , commandsRun, commandAddAction, hiddenCommand ) import Distribution.Simple.Compiler ( Compiler(..) ) import Distribution.Simple.Configure ( checkPersistBuildConfigOutdated, configCompilerAuxEx , ConfigStateFileError(..), localBuildInfoFile , getPersistBuildConfig, tryGetPersistBuildConfig ) import qualified Distribution.Simple.LocalBuildInfo as LBI import Distribution.Simple.Program (defaultProgramConfiguration ,configureAllKnownPrograms) import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Utils ( cabalVersion, die, notice, info, topHandler , findPackageDesc, tryFindPackageDesc ) import Distribution.Text ( display ) import Distribution.Verbosity as Verbosity ( Verbosity, normal ) import Distribution.Version ( Version(..), orLaterVersion ) import qualified Paths_cabal_install (version) import System.Environment (getArgs, getProgName) import System.Exit (exitFailure) import System.FilePath (splitExtension, takeExtension) import System.IO ( BufferMode(LineBuffering), hSetBuffering #ifdef mingw32_HOST_OS , stderr #endif , stdout ) import System.Directory (doesFileExist, getCurrentDirectory) import Data.List (intercalate) import Data.Maybe (mapMaybe) #if !MIN_VERSION_base(4,8,0) import Data.Monoid (Monoid(..)) #endif import Control.Monad (when, unless) -- | Entry point -- main :: IO () main = do -- Enable line buffering so that we can get fast feedback even when piped. -- This is especially important for CI and build systems. hSetBuffering stdout LineBuffering -- The default locale encoding for Windows CLI is not UTF-8 and printing -- Unicode characters to it will fail unless we relax the handling of encoding -- errors when writing to stderr and stdout. #ifdef mingw32_HOST_OS relaxEncodingErrors stdout relaxEncodingErrors stderr #endif getArgs >>= mainWorker mainWorker :: [String] -> IO () mainWorker args = topHandler $ case commandsRun (globalCommand commands) commands args of CommandHelp help -> printGlobalHelp help CommandList opts -> printOptionsList opts CommandErrors errs -> printErrors errs CommandReadyToGo (globalFlags, commandParse) -> case commandParse of _ | fromFlagOrDefault False (globalVersion globalFlags) -> printVersion | fromFlagOrDefault False (globalNumericVersion globalFlags) -> printNumericVersion CommandHelp help -> printCommandHelp help CommandList opts -> printOptionsList opts CommandErrors errs -> printErrors errs CommandReadyToGo action -> do globalFlags' <- updateSandboxConfigFileFlag globalFlags action globalFlags' where printCommandHelp help = do pname <- getProgName putStr (help pname) printGlobalHelp help = do pname <- getProgName configFile <- defaultConfigFile putStr (help pname) putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" ++ " " ++ configFile ++ "\n" exists <- doesFileExist configFile when (not exists) $ putStrLn $ "This file will be generated with sensible " ++ "defaults if you run 'cabal update'." printOptionsList = putStr . unlines printErrors errs = die $ intercalate "\n" errs printNumericVersion = putStrLn $ display Paths_cabal_install.version printVersion = putStrLn $ "cabal-install version " ++ display Paths_cabal_install.version ++ "\nusing version " ++ display cabalVersion ++ " of the Cabal library " commands = [installCommand `commandAddAction` installAction ,updateCommand `commandAddAction` updateAction ,listCommand `commandAddAction` listAction ,infoCommand `commandAddAction` infoAction ,fetchCommand `commandAddAction` fetchAction ,freezeCommand `commandAddAction` freezeAction ,getCommand `commandAddAction` getAction ,hiddenCommand $ unpackCommand `commandAddAction` unpackAction ,checkCommand `commandAddAction` checkAction ,sdistCommand `commandAddAction` sdistAction ,uploadCommand `commandAddAction` uploadAction ,reportCommand `commandAddAction` reportAction ,runCommand `commandAddAction` runAction ,initCommand `commandAddAction` initAction ,configureExCommand `commandAddAction` configureAction ,buildCommand `commandAddAction` buildAction ,replCommand `commandAddAction` replAction ,sandboxCommand `commandAddAction` sandboxAction ,haddockCommand `commandAddAction` haddockAction ,execCommand `commandAddAction` execAction ,userConfigCommand `commandAddAction` userConfigAction ,cleanCommand `commandAddAction` cleanAction ,wrapperAction copyCommand copyVerbosity copyDistPref ,wrapperAction hscolourCommand hscolourVerbosity hscolourDistPref ,wrapperAction registerCommand regVerbosity regDistPref ,testCommand `commandAddAction` testAction ,benchmarkCommand `commandAddAction` benchmarkAction ,hiddenCommand $ formatCommand `commandAddAction` formatAction ,hiddenCommand $ upgradeCommand `commandAddAction` upgradeAction ,hiddenCommand $ win32SelfUpgradeCommand`commandAddAction` win32SelfUpgradeAction ] wrapperAction :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) -> (flags -> Flag String) -> Command (GlobalFlags -> IO ()) wrapperAction command verbosityFlag distPrefFlag = commandAddAction command { commandDefaultFlags = mempty } $ \flags extraArgs _globalFlags -> do let verbosity = fromFlagOrDefault normal (verbosityFlag flags) setupScriptOptions = defaultSetupScriptOptions { useDistPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (distPrefFlag flags) } setupWrapper verbosity setupScriptOptions Nothing command (const flags) extraArgs configureAction :: (ConfigFlags, ConfigExFlags) -> [String] -> GlobalFlags -> IO () configureAction (configFlags, configExFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags (configUserInstall configFlags) let configFlags' = savedConfigureFlags config `mappend` configFlags configExFlags' = savedConfigureExFlags config `mappend` configExFlags globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, conf) <- configCompilerAuxEx configFlags' -- If we're working inside a sandbox and the user has set the -w option, we -- may need to create a sandbox-local package DB for this compiler and add a -- timestamp record for this compiler to the timestamp file. let configFlags'' = case useSandbox of NoSandbox -> configFlags' (UseSandbox sandboxDir) -> setPackageDB sandboxDir comp platform configFlags' whenUsingSandbox useSandbox $ \sandboxDir -> do initPackageDBIfNeeded verbosity configFlags'' comp conf -- NOTE: We do not write the new sandbox package DB location to -- 'cabal.sandbox.config' here because 'configure -w' must not affect -- subsequent 'install' (for UI compatibility with non-sandboxed mode). indexFile <- tryGetIndexFilePath config maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform maybeWithSandboxDirOnSearchPath useSandbox $ configure verbosity (configPackageDB' configFlags'') (globalRepos globalFlags') comp platform conf configFlags'' configExFlags' extraArgs buildAction :: (BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO () buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do let distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (buildDistPref buildFlags) verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck (buildOnly buildExFlags) -- Calls 'configureAction' to do the real work, so nothing special has to be -- done to support sandboxes. (useSandbox, config) <- reconfigure verbosity distPref mempty [] globalFlags noAddSource (buildNumJobs buildFlags) (const Nothing) maybeWithSandboxDirOnSearchPath useSandbox $ build verbosity config distPref buildFlags extraArgs -- | Actually do the work of building the package. This is separate from -- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke -- 'reconfigure' twice. build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () build verbosity config distPref buildFlags extraArgs = setupWrapper verbosity setupOptions Nothing (Cabal.buildCommand progConf) mkBuildFlags extraArgs where progConf = defaultProgramConfiguration setupOptions = defaultSetupScriptOptions { useDistPref = distPref } mkBuildFlags version = filterBuildFlags version config buildFlags' buildFlags' = buildFlags { buildVerbosity = toFlag verbosity , buildDistPref = toFlag distPref } -- | Make sure that we don't pass new flags to setup scripts compiled against -- old versions of Cabal. filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags filterBuildFlags version config buildFlags | version >= Version [1,19,1] [] = buildFlags_latest -- Cabal < 1.19.1 doesn't support 'build -j'. | otherwise = buildFlags_pre_1_19_1 where buildFlags_pre_1_19_1 = buildFlags { buildNumJobs = NoFlag } buildFlags_latest = buildFlags { -- Take the 'jobs' setting '~/.cabal/config' into account. buildNumJobs = Flag . Just . determineNumJobs $ (numJobsConfigFlag `mappend` numJobsCmdLineFlag) } numJobsConfigFlag = installNumJobs . savedInstallFlags $ config numJobsCmdLineFlag = buildNumJobs buildFlags replAction :: (ReplFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO () replAction (replFlags, buildExFlags) extraArgs globalFlags = do cwd <- getCurrentDirectory pkgDesc <- findPackageDesc cwd either (const onNoPkgDesc) (const onPkgDesc) pkgDesc where verbosity = fromFlagOrDefault normal (replVerbosity replFlags) -- There is a .cabal file in the current directory: start a REPL and load -- the project's modules. onPkgDesc = do let distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (replDistPref replFlags) noAddSource = case replReload replFlags of Flag True -> SkipAddSourceDepsCheck _ -> fromFlagOrDefault DontSkipAddSourceDepsCheck (buildOnly buildExFlags) progConf = defaultProgramConfiguration setupOptions = defaultSetupScriptOptions { useCabalVersion = orLaterVersion $ Version [1,18,0] [] , useDistPref = distPref } replFlags' = replFlags { replVerbosity = toFlag verbosity , replDistPref = toFlag distPref } -- Calls 'configureAction' to do the real work, so nothing special has to -- be done to support sandboxes. (useSandbox, _config) <- reconfigure verbosity distPref mempty [] globalFlags noAddSource NoFlag (const Nothing) maybeWithSandboxDirOnSearchPath useSandbox $ setupWrapper verbosity setupOptions Nothing (Cabal.replCommand progConf) (const replFlags') extraArgs -- No .cabal file in the current directory: just start the REPL (possibly -- using the sandbox package DB). onNoPkgDesc = do (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags = savedConfigureFlags config (comp, _platform, programDb) <- configCompilerAux' configFlags startInterpreter verbosity programDb comp (configPackageDB' configFlags) -- | Re-configure the package in the current directory if needed. Deciding -- when to reconfigure and with which options is convoluted: -- -- If we are reconfiguring, we must always run @configure@ with the -- verbosity option we are given; however, that a previous configuration -- uses a different verbosity setting is not reason enough to reconfigure. -- -- The package should be configured to use the same \"dist\" prefix as -- given to the @build@ command, otherwise the build will probably -- fail. Not only does this determine the \"dist\" prefix setting if we -- need to reconfigure anyway, but an existing configuration should be -- invalidated if its \"dist\" prefix differs. -- -- If the package has never been configured (i.e., there is no -- LocalBuildInfo), we must configure first, using the default options. -- -- If the package has been configured, there will be a 'LocalBuildInfo'. -- If there no package description file, we assume that the -- 'PackageDescription' is up to date, though the configuration may need -- to be updated for other reasons (see above). If there is a package -- description file, and it has been modified since the 'LocalBuildInfo' -- was generated, then we need to reconfigure. -- -- The caller of this function may also have specific requirements -- regarding the flags the last configuration used. For example, -- 'testAction' requires that the package be configured with test suites -- enabled. The caller may pass the required settings to this function -- along with a function to check the validity of the saved 'ConfigFlags'; -- these required settings will be checked first upon determining that -- a previous configuration exists. reconfigure :: Verbosity -- ^ Verbosity setting -> FilePath -- ^ \"dist\" prefix -> ConfigFlags -- ^ Additional config flags to set. These flags -- will be 'mappend'ed to the last used or -- default 'ConfigFlags' as appropriate, so -- this value should be 'mempty' with only the -- required flags set. The required verbosity -- and \"dist\" prefix flags will be set -- automatically because they are always -- required; therefore, it is not necessary to -- set them here. -> [String] -- ^ Extra arguments -> GlobalFlags -- ^ Global flags -> SkipAddSourceDepsCheck -- ^ Should we skip the timestamp check for modified -- add-source dependencies? -> Flag (Maybe Int) -- ^ -j flag for reinstalling add-source deps. -> (ConfigFlags -> Maybe String) -- ^ Check that the required flags are set in -- the last used 'ConfigFlags'. If the required -- flags are not set, provide a message to the -- user explaining the reason for -- reconfiguration. Because the correct \"dist\" -- prefix setting is always required, it is checked -- automatically; this function need not check -- for it. -> IO (UseSandbox, SavedConfig) reconfigure verbosity distPref addConfigFlags extraArgs globalFlags skipAddSourceDepsCheck numJobsFlag checkFlags = do eLbi <- tryGetPersistBuildConfig distPref case eLbi of Left err -> onNoBuildConfig err Right lbi -> onBuildConfig lbi where -- We couldn't load the saved package config file. -- -- If we're in a sandbox: add-source deps don't have to be reinstalled -- (since we don't know the compiler & platform). onNoBuildConfig :: ConfigStateFileError -> IO (UseSandbox, SavedConfig) onNoBuildConfig err = do let msg = case err of ConfigStateFileMissing -> "Package has never been configured." ConfigStateFileNoParse -> "Saved package config file seems " ++ "to be corrupt." _ -> show err case err of ConfigStateFileBadVersion _ _ _ -> info verbosity msg _ -> do notice verbosity $ msg ++ " Configuring with default flags." ++ configureManually configureAction (defaultFlags, defaultConfigExFlags) extraArgs globalFlags loadConfigOrSandboxConfig verbosity globalFlags mempty -- Package has been configured, but the configuration may be out of -- date or required flags may not be set. -- -- If we're in a sandbox: reinstall the modified add-source deps and -- force reconfigure if we did. onBuildConfig :: LBI.LocalBuildInfo -> IO (UseSandbox, SavedConfig) onBuildConfig lbi = do let configFlags = LBI.configFlags lbi flags = mconcat [configFlags, addConfigFlags, distVerbFlags] -- Was the sandbox created after the package was already configured? We -- may need to skip reinstallation of add-source deps and force -- reconfigure. let buildConfig = localBuildInfoFile distPref sandboxConfig <- getSandboxConfigFilePath globalFlags isSandboxConfigNewer <- sandboxConfig `existsAndIsMoreRecentThan` buildConfig let skipAddSourceDepsCheck' | isSandboxConfigNewer = SkipAddSourceDepsCheck | otherwise = skipAddSourceDepsCheck when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $ info verbosity "Skipping add-source deps check..." (useSandbox, config, depsReinstalled) <- case skipAddSourceDepsCheck' of DontSkipAddSourceDepsCheck -> maybeReinstallAddSourceDeps verbosity numJobsFlag flags globalFlags SkipAddSourceDepsCheck -> do (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags (configUserInstall flags) return (useSandbox, config, NoDepsReinstalled) -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need -- to force reconfigure. Note that it's possible to use @cabal.config@ -- even without sandboxes. isUserPackageEnvironmentFileNewer <- userPackageEnvironmentFile `existsAndIsMoreRecentThan` buildConfig -- Determine whether we need to reconfigure and which message to show to -- the user if that is the case. mMsg <- determineMessageToShow lbi configFlags depsReinstalled isSandboxConfigNewer isUserPackageEnvironmentFileNewer case mMsg of -- No message for the user indicates that reconfiguration -- is not required. Nothing -> return (useSandbox, config) -- Show the message and reconfigure. Just msg -> do notice verbosity msg configureAction (flags, defaultConfigExFlags) extraArgs globalFlags return (useSandbox, config) -- Determine what message, if any, to display to the user if reconfiguration -- is required. determineMessageToShow :: LBI.LocalBuildInfo -> ConfigFlags -> WereDepsReinstalled -> Bool -> Bool -> IO (Maybe String) determineMessageToShow _ _ _ True _ = -- The sandbox was created after the package was already configured. return $! Just $! sandboxConfigNewerMessage determineMessageToShow _ _ _ False True = -- The user package environment file was modified. return $! Just $! userPackageEnvironmentFileModifiedMessage determineMessageToShow lbi configFlags depsReinstalled False False = do let savedDistPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (configDistPref configFlags) case depsReinstalled of ReinstalledSomeDeps -> -- Some add-source deps were reinstalled. return $! Just $! reinstalledDepsMessage NoDepsReinstalled -> case checkFlags configFlags of -- Flag required by the caller is not set. Just msg -> return $! Just $! msg ++ configureManually Nothing -- Required "dist" prefix is not set. | savedDistPref /= distPref -> return $! Just distPrefMessage -- All required flags are set, but the configuration -- may be outdated. | otherwise -> case LBI.pkgDescrFile lbi of Nothing -> return Nothing Just pdFile -> do outdated <- checkPersistBuildConfigOutdated distPref pdFile return $! if outdated then Just $! outdatedMessage pdFile else Nothing defaultFlags = mappend addConfigFlags distVerbFlags distVerbFlags = mempty { configVerbosity = toFlag verbosity , configDistPref = toFlag distPref } reconfiguringMostRecent = " Re-configuring with most recently used options." configureManually = " If this fails, please run configure manually." sandboxConfigNewerMessage = "The sandbox was created after the package was already configured." ++ reconfiguringMostRecent ++ configureManually userPackageEnvironmentFileModifiedMessage = "The user package environment file ('" ++ userPackageEnvironmentFile ++ "') was modified." ++ reconfiguringMostRecent ++ configureManually distPrefMessage = "Package previously configured with different \"dist\" prefix." ++ reconfiguringMostRecent ++ configureManually outdatedMessage pdFile = pdFile ++ " has been changed." ++ reconfiguringMostRecent ++ configureManually reinstalledDepsMessage = "Some add-source dependencies have been reinstalled." ++ reconfiguringMostRecent ++ configureManually installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -> [String] -> GlobalFlags -> IO () installAction (configFlags, _, installFlags, _) _ _globalFlags | fromFlagOrDefault False (installOnly installFlags) = let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) in setupWrapper verbosity defaultSetupScriptOptions Nothing installCommand (const mempty) [] installAction (configFlags, configExFlags, installFlags, haddockFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags (configUserInstall configFlags) targets <- readUserTargets verbosity extraArgs -- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to -- 'configure' when run inside a sandbox. Right now, running -- -- $ cabal sandbox init && cabal configure -w /path/to/ghc -- && cabal build && cabal install -- -- performs the compilation twice unless you also pass -w to 'install'. -- However, this is the same behaviour that 'cabal install' has in the normal -- mode of operation, so we stick to it for consistency. let sandboxDistPref = case useSandbox of NoSandbox -> NoFlag UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir configFlags' = maybeForceTests installFlags' $ savedConfigureFlags config `mappend` configFlags configExFlags' = defaultConfigExFlags `mappend` savedConfigureExFlags config `mappend` configExFlags installFlags' = defaultInstallFlags `mappend` savedInstallFlags config `mappend` installFlags haddockFlags' = defaultHaddockFlags `mappend` savedHaddockFlags config `mappend` haddockFlags globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, conf) <- configCompilerAux' configFlags' -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the future. conf' <- configureAllKnownPrograms verbosity conf -- If we're working inside a sandbox and the user has set the -w option, we -- may need to create a sandbox-local package DB for this compiler and add a -- timestamp record for this compiler to the timestamp file. configFlags'' <- case useSandbox of NoSandbox -> configAbsolutePaths $ configFlags' (UseSandbox sandboxDir) -> return $ (setPackageDB sandboxDir comp platform configFlags') { configDistPref = sandboxDistPref } whenUsingSandbox useSandbox $ \sandboxDir -> do initPackageDBIfNeeded verbosity configFlags'' comp conf' indexFile <- tryGetIndexFilePath config maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform -- FIXME: Passing 'SandboxPackageInfo' to install unconditionally here means -- that 'cabal install some-package' inside a sandbox will sometimes reinstall -- modified add-source deps, even if they are not among the dependencies of -- 'some-package'. This can also prevent packages that depend on older -- versions of add-source'd packages from building (see #1362). maybeWithSandboxPackageInfo verbosity configFlags'' globalFlags' comp platform conf useSandbox $ \mSandboxPkgInfo -> maybeWithSandboxDirOnSearchPath useSandbox $ install verbosity (configPackageDB' configFlags'') (globalRepos globalFlags') comp platform conf' useSandbox mSandboxPkgInfo globalFlags' configFlags'' configExFlags' installFlags' haddockFlags' targets where -- '--run-tests' implies '--enable-tests'. maybeForceTests installFlags' configFlags' = if fromFlagOrDefault False (installRunTests installFlags') then configFlags' { configTests = toFlag True } else configFlags' testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO () testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (testVerbosity testFlags) distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (testDistPref testFlags) setupOptions = defaultSetupScriptOptions { useDistPref = distPref } buildFlags' = buildFlags { buildVerbosity = testVerbosity testFlags , buildDistPref = testDistPref testFlags } addConfigFlags = mempty { configTests = toFlag True } checkFlags flags | fromFlagOrDefault False (configTests flags) = Nothing | otherwise = Just "Re-configuring with test suites enabled." noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck (buildOnly buildExFlags) -- reconfigure also checks if we're in a sandbox and reinstalls add-source -- deps if needed. (useSandbox, config) <- reconfigure verbosity distPref addConfigFlags [] globalFlags noAddSource (buildNumJobs buildFlags') checkFlags -- the package was just configured, so the LBI must be available lbi <- getPersistBuildConfig distPref let pkgDescr = LBI.localPkgDescr lbi nameTestsOnly = LBI.foldComponent (const Nothing) (const Nothing) (\t -> if buildable (testBuildInfo t) then Just (testName t) else Nothing) (const Nothing) tests = mapMaybe nameTestsOnly $ LBI.pkgComponents pkgDescr extraArgs' | null extraArgs = tests | otherwise = extraArgs if null tests then notice verbosity "Package has no buildable test suites." else do maybeWithSandboxDirOnSearchPath useSandbox $ build verbosity config distPref buildFlags' extraArgs' maybeWithSandboxDirOnSearchPath useSandbox $ setupWrapper verbosity setupOptions Nothing Cabal.testCommand (const testFlags) extraArgs' benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO () benchmarkAction (benchmarkFlags, buildFlags, buildExFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (benchmarkVerbosity benchmarkFlags) distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (benchmarkDistPref benchmarkFlags) setupOptions = defaultSetupScriptOptions { useDistPref = distPref } buildFlags' = buildFlags { buildVerbosity = benchmarkVerbosity benchmarkFlags , buildDistPref = benchmarkDistPref benchmarkFlags } addConfigFlags = mempty { configBenchmarks = toFlag True } checkFlags flags | fromFlagOrDefault False (configBenchmarks flags) = Nothing | otherwise = Just "Re-configuring with benchmarks enabled." noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck (buildOnly buildExFlags) -- reconfigure also checks if we're in a sandbox and reinstalls add-source -- deps if needed. (useSandbox, config) <- reconfigure verbosity distPref addConfigFlags [] globalFlags noAddSource (buildNumJobs buildFlags') checkFlags -- the package was just configured, so the LBI must be available lbi <- getPersistBuildConfig distPref let pkgDescr = LBI.localPkgDescr lbi nameBenchsOnly = LBI.foldComponent (const Nothing) (const Nothing) (const Nothing) (\b -> if buildable (benchmarkBuildInfo b) then Just (benchmarkName b) else Nothing) benchs = mapMaybe nameBenchsOnly $ LBI.pkgComponents pkgDescr extraArgs' | null extraArgs = benchs | otherwise = extraArgs if null benchs then notice verbosity "Package has no buildable benchmarks." else do maybeWithSandboxDirOnSearchPath useSandbox $ build verbosity config distPref buildFlags' extraArgs' maybeWithSandboxDirOnSearchPath useSandbox $ setupWrapper verbosity setupOptions Nothing Cabal.benchmarkCommand (const benchmarkFlags) extraArgs' haddockAction :: HaddockFlags -> [String] -> GlobalFlags -> IO () haddockAction haddockFlags extraArgs globalFlags = do let verbosity = fromFlag (haddockVerbosity haddockFlags) (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let haddockFlags' = defaultHaddockFlags `mappend` savedHaddockFlags config `mappend` haddockFlags setupScriptOptions = defaultSetupScriptOptions { useDistPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (haddockDistPref haddockFlags') } setupWrapper verbosity setupScriptOptions Nothing haddockCommand (const haddockFlags') extraArgs cleanAction :: CleanFlags -> [String] -> GlobalFlags -> IO () cleanAction cleanFlags extraArgs _globalFlags = setupWrapper verbosity setupScriptOptions Nothing cleanCommand (const cleanFlags) extraArgs where verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) setupScriptOptions = defaultSetupScriptOptions { useDistPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (cleanDistPref cleanFlags), useWin32CleanHack = True } listAction :: ListFlags -> [String] -> GlobalFlags -> IO () listAction listFlags extraArgs globalFlags = do let verbosity = fromFlag (listVerbosity listFlags) (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags' = savedConfigureFlags config configFlags = configFlags' { configPackageDBs = configPackageDBs configFlags' `mappend` listPackageDBs listFlags } globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, _, conf) <- configCompilerAux' configFlags List.list verbosity (configPackageDB' configFlags) (globalRepos globalFlags') comp conf listFlags extraArgs infoAction :: InfoFlags -> [String] -> GlobalFlags -> IO () infoAction infoFlags extraArgs globalFlags = do let verbosity = fromFlag (infoVerbosity infoFlags) targets <- readUserTargets verbosity extraArgs (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags' = savedConfigureFlags config configFlags = configFlags' { configPackageDBs = configPackageDBs configFlags' `mappend` infoPackageDBs infoFlags } globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, _, conf) <- configCompilerAuxEx configFlags List.info verbosity (configPackageDB' configFlags) (globalRepos globalFlags') comp conf globalFlags' infoFlags targets updateAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO () updateAction verbosityFlag extraArgs globalFlags = do unless (null extraArgs) $ die $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs let verbosity = fromFlag verbosityFlag (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags NoFlag let globalFlags' = savedGlobalFlags config `mappend` globalFlags update verbosity (globalRepos globalFlags') upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -> [String] -> GlobalFlags -> IO () upgradeAction _ _ _ = die $ "Use the 'cabal install' command instead of 'cabal upgrade'.\n" ++ "You can install the latest version of a package using 'cabal install'. " ++ "The 'cabal upgrade' command has been removed because people found it " ++ "confusing and it often led to broken packages.\n" ++ "If you want the old upgrade behaviour then use the install command " ++ "with the --upgrade-dependencies flag (but check first with --dry-run " ++ "to see what would happen). This will try to pick the latest versions " ++ "of all dependencies, rather than the usual behaviour of trying to pick " ++ "installed versions of all dependencies. If you do use " ++ "--upgrade-dependencies, it is recommended that you do not upgrade core " ++ "packages (e.g. by using appropriate --constraint= flags)." fetchAction :: FetchFlags -> [String] -> GlobalFlags -> IO () fetchAction fetchFlags extraArgs globalFlags = do let verbosity = fromFlag (fetchVerbosity fetchFlags) targets <- readUserTargets verbosity extraArgs config <- loadConfig verbosity (globalConfigFile globalFlags) mempty let configFlags = savedConfigureFlags config globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, conf) <- configCompilerAux' configFlags fetch verbosity (configPackageDB' configFlags) (globalRepos globalFlags') comp platform conf globalFlags' fetchFlags targets freezeAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () freezeAction freezeFlags _extraArgs globalFlags = do let verbosity = fromFlag (freezeVerbosity freezeFlags) (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags = savedConfigureFlags config globalFlags' = savedGlobalFlags config `mappend` globalFlags (comp, platform, conf) <- configCompilerAux' configFlags maybeWithSandboxPackageInfo verbosity configFlags globalFlags' comp platform conf useSandbox $ \mSandboxPkgInfo -> maybeWithSandboxDirOnSearchPath useSandbox $ freeze verbosity (configPackageDB' configFlags) (globalRepos globalFlags') comp platform conf mSandboxPkgInfo globalFlags' freezeFlags uploadAction :: UploadFlags -> [String] -> GlobalFlags -> IO () uploadAction uploadFlags extraArgs globalFlags = do let verbosity = fromFlag (uploadVerbosity uploadFlags) config <- loadConfig verbosity (globalConfigFile globalFlags) mempty let uploadFlags' = savedUploadFlags config `mappend` uploadFlags globalFlags' = savedGlobalFlags config `mappend` globalFlags tarfiles = extraArgs checkTarFiles extraArgs if fromFlag (uploadCheck uploadFlags') then Upload.check verbosity tarfiles else upload verbosity (globalRepos globalFlags') (flagToMaybe $ uploadUsername uploadFlags') (flagToMaybe $ uploadPassword uploadFlags') tarfiles where checkTarFiles tarfiles | null tarfiles = die "the 'upload' command expects one or more .tar.gz packages." | not (null otherFiles) = die $ "the 'upload' command expects only .tar.gz packages: " ++ intercalate ", " otherFiles | otherwise = sequence_ [ do exists <- doesFileExist tarfile unless exists $ die $ "file not found: " ++ tarfile | tarfile <- tarfiles ] where otherFiles = filter (not . isTarGzFile) tarfiles isTarGzFile file = case splitExtension file of (file', ".gz") -> takeExtension file' == ".tar" _ -> False checkAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO () checkAction verbosityFlag extraArgs _globalFlags = do unless (null extraArgs) $ die $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs allOk <- Check.check (fromFlag verbosityFlag) unless allOk exitFailure formatAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO () formatAction verbosityFlag extraArgs _globalFlags = do let verbosity = fromFlag verbosityFlag path <- case extraArgs of [] -> do cwd <- getCurrentDirectory tryFindPackageDesc cwd (p:_) -> return p pkgDesc <- readPackageDescription verbosity path -- Uses 'writeFileAtomic' under the hood. writeGenericPackageDescription path pkgDesc sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> GlobalFlags -> IO () sdistAction (sdistFlags, sdistExFlags) extraArgs _globalFlags = do unless (null extraArgs) $ die $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs sdist sdistFlags sdistExFlags reportAction :: ReportFlags -> [String] -> GlobalFlags -> IO () reportAction reportFlags extraArgs globalFlags = do unless (null extraArgs) $ die $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs let verbosity = fromFlag (reportVerbosity reportFlags) config <- loadConfig verbosity (globalConfigFile globalFlags) mempty let globalFlags' = savedGlobalFlags config `mappend` globalFlags reportFlags' = savedReportFlags config `mappend` reportFlags Upload.report verbosity (globalRepos globalFlags') (flagToMaybe $ reportUsername reportFlags') (flagToMaybe $ reportPassword reportFlags') runAction :: (BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO () runAction (buildFlags, buildExFlags) extraArgs globalFlags = do let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (buildDistPref buildFlags) noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck (buildOnly buildExFlags) -- reconfigure also checks if we're in a sandbox and reinstalls add-source -- deps if needed. (useSandbox, config) <- reconfigure verbosity distPref mempty [] globalFlags noAddSource (buildNumJobs buildFlags) (const Nothing) lbi <- getPersistBuildConfig distPref (exe, exeArgs) <- splitRunArgs lbi extraArgs maybeWithSandboxDirOnSearchPath useSandbox $ build verbosity config distPref buildFlags ["exe:" ++ exeName exe] maybeWithSandboxDirOnSearchPath useSandbox $ run verbosity lbi exe exeArgs getAction :: GetFlags -> [String] -> GlobalFlags -> IO () getAction getFlags extraArgs globalFlags = do let verbosity = fromFlag (getVerbosity getFlags) targets <- readUserTargets verbosity extraArgs config <- loadConfig verbosity (globalConfigFile globalFlags) mempty let globalFlags' = savedGlobalFlags config `mappend` globalFlags get verbosity (globalRepos (savedGlobalFlags config)) globalFlags' getFlags targets unpackAction :: GetFlags -> [String] -> GlobalFlags -> IO () unpackAction getFlags extraArgs globalFlags = do getAction getFlags extraArgs globalFlags initAction :: InitFlags -> [String] -> GlobalFlags -> IO () initAction initFlags _extraArgs globalFlags = do let verbosity = fromFlag (initVerbosity initFlags) (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags = savedConfigureFlags config (comp, _, conf) <- configCompilerAux' configFlags initCabal verbosity (configPackageDB' configFlags) comp conf initFlags sandboxAction :: SandboxFlags -> [String] -> GlobalFlags -> IO () sandboxAction sandboxFlags extraArgs globalFlags = do let verbosity = fromFlag (sandboxVerbosity sandboxFlags) case extraArgs of -- Basic sandbox commands. ["init"] -> sandboxInit verbosity sandboxFlags globalFlags ["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags ("add-source":extra) -> do when (noExtraArgs extra) $ die "The 'sandbox add-source' command expects at least one argument" sandboxAddSource verbosity extra sandboxFlags globalFlags ("delete-source":extra) -> do when (noExtraArgs extra) $ die "The 'sandbox delete-source' command expects \ \at least one argument" sandboxDeleteSource verbosity extra sandboxFlags globalFlags ["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags -- More advanced commands. ("hc-pkg":extra) -> do when (noExtraArgs extra) $ die $ "The 'sandbox hc-pkg' command expects at least one argument" sandboxHcPkg verbosity sandboxFlags globalFlags extra ["buildopts"] -> die "Not implemented!" -- Hidden commands. ["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags -- Error handling. [] -> die $ "Please specify a subcommand (see 'help sandbox')" _ -> die $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs where noExtraArgs = (<1) . length execAction :: ExecFlags -> [String] -> GlobalFlags -> IO () execAction execFlags extraArgs globalFlags = do let verbosity = fromFlag (execVerbosity execFlags) (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty let configFlags = savedConfigureFlags config (comp, platform, conf) <- configCompilerAux' configFlags exec verbosity useSandbox comp platform conf extraArgs userConfigAction :: UserConfigFlags -> [String] -> GlobalFlags -> IO () userConfigAction ucflags extraArgs globalFlags = do let verbosity = fromFlag (userConfigVerbosity ucflags) case extraArgs of ("diff":_) -> mapM_ putStrLn =<< userConfigDiff globalFlags ("update":_) -> userConfigUpdate verbosity globalFlags -- Error handling. [] -> die $ "Please specify a subcommand (see 'help user-config')" _ -> die $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs -- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details. -- win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> GlobalFlags -> IO () win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags) Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path win32SelfUpgradeAction _ _ _ = return () cabal-install-1.22.6.0/Setup.hs0000644000000000000000000000005612540225656014300 0ustar0000000000000000import Distribution.Simple main = defaultMain cabal-install-1.22.6.0/cbits/0000755000000000000000000000000012540225656013747 5ustar0000000000000000cabal-install-1.22.6.0/cbits/getnumcores.c0000644000000000000000000000206412540225656016450 0ustar0000000000000000#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 612) && !defined(CABAL_NO_THREADED) /* Since version 6.12, GHC's threaded RTS includes a getNumberOfProcessors function, so we try to use that if available. cabal-install is always built with -threaded nowadays. */ #define HAS_GET_NUMBER_OF_PROCESSORS #endif #ifndef HAS_GET_NUMBER_OF_PROCESSORS #ifdef _WIN32 #include #elif MACOS #include #include #elif __linux__ #include #endif int getNumberOfProcessors() { #ifdef WIN32 SYSTEM_INFO sysinfo; GetSystemInfo(&sysinfo); return sysinfo.dwNumberOfProcessors; #elif MACOS int nm[2]; size_t len = 4; uint32_t count; nm[0] = CTL_HW; nm[1] = HW_AVAILCPU; sysctl(nm, 2, &count, &len, NULL, 0); if(count < 1) { nm[1] = HW_NCPU; sysctl(nm, 2, &count, &len, NULL, 0); if(count < 1) { count = 1; } } return count; #elif __linux__ return sysconf(_SC_NPROCESSORS_ONLN); #else return 1; #endif } #endif /* HAS_GET_NUMBER_OF_PROCESSORS */ cabal-install-1.22.6.0/bash-completion/0000755000000000000000000000000012540225656015727 5ustar0000000000000000cabal-install-1.22.6.0/bash-completion/cabal0000644000000000000000000000527512540225656016725 0ustar0000000000000000# cabal command line completion # Copyright 2007-2008 "Lennart Kolmodin" # "Duncan Coutts" # # List cabal targets by type, pass: # - test-suite for test suites # - benchmark for benchmarks # - executable for executables # - executable|test-suite|benchmark for the three _cabal_list() { cat *.cabal | grep -Ei "^[[:space:]]*($1)[[:space:]]" | sed -e "s/.* \([^ ]*\).*/\1/" } # List possible targets depending on the command supplied as parameter. The # ideal option would be to implement this via --list-options on cabal directly. # This is a temporary workaround. _cabal_targets() { # If command ($*) contains build, repl, test or bench completes with # targets of according type. [ -f *.cabal ] || return 0 local comp for comp in $*; do [ $comp == build ] && _cabal_list "executable|test-suite|benchmark" && break [ $comp == repl ] && _cabal_list "executable|test-suite|benchmark" && break [ $comp == run ] && _cabal_list "executable" && break [ $comp == test ] && _cabal_list "test-suite" && break [ $comp == bench ] && _cabal_list "benchmark" && break done } # List possible subcommands of a cabal subcommand. # # In example "sandbox" is a cabal subcommand that itself has subcommands. Since # "cabal --list-options" doesn't work in such cases we have to get the list # using other means. _cabal_subcommands() { local word for word in "$@"; do case "$word" in sandbox) # Get list of "cabal sandbox" subcommands from its help message. # # Following command short-circuits if it reaches flags section. # This is to prevent any problems that might arise from unfortunate # word combinations in flag descriptions. Usage section is parsed # using simple regexp and "sandbox" subcommand is printed for each # successful substitution. "$1" help sandbox | sed -rn '/Flags/q;s/^.* cabal sandbox *([^ ]*).*/\1/;t p;b;: p;p' break # Terminate for loop. ;; esac done } _cabal() { # get the word currently being completed local cur cur=${COMP_WORDS[$COMP_CWORD]} # create a command line to run local cmd # copy all words the user has entered cmd=( ${COMP_WORDS[@]} ) # replace the current word with --list-options cmd[${COMP_CWORD}]="--list-options" # the resulting completions should be put into this array COMPREPLY=( $( compgen -W "$( ${cmd[@]} ) $( _cabal_targets ${cmd[@]} ) $( _cabal_subcommands ${COMP_WORDS[@]} )" -- $cur ) ) } complete -F _cabal -o default cabal cabal-install-1.22.6.0/tests/0000755000000000000000000000000012540225656014005 5ustar0000000000000000cabal-install-1.22.6.0/tests/PackageTests.hs0000644000000000000000000000670712540225656016731 0ustar0000000000000000-- | Groups black-box tests of cabal-install and configures them to test -- the correct binary. -- -- This file should do nothing but import tests from other modules and run -- them with the path to the correct cabal-install binary. module Main where -- Modules from Cabal. import Distribution.Simple.Program.Builtin (ghcPkgProgram) import Distribution.Simple.Program.Db (defaultProgramDb, requireProgram, setProgramSearchPath) import Distribution.Simple.Program.Find (ProgramSearchPathEntry(ProgramSearchPathDir), defaultProgramSearchPath) import Distribution.Simple.Program.Types ( Program(..), simpleProgram, programPath) import Distribution.Simple.Utils ( findProgramVersion ) import Distribution.Verbosity (normal) -- Third party modules. import qualified Control.Exception.Extensible as E import System.Directory ( canonicalizePath, getCurrentDirectory, setCurrentDirectory , removeFile, doesFileExist ) import System.FilePath (()) import Test.Framework (Test, defaultMain, testGroup) import Control.Monad ( when ) -- Module containing common test code. import PackageTests.PackageTester ( TestsPaths(..) , packageTestsDirectory , packageTestsConfigFile ) -- Modules containing the tests. import qualified PackageTests.Exec.Check import qualified PackageTests.Freeze.Check import qualified PackageTests.MultipleSource.Check -- List of tests to run. Each test will be called with the path to the -- cabal binary to use. tests :: PackageTests.PackageTester.TestsPaths -> [Test] tests paths = [ testGroup "Freeze" $ PackageTests.Freeze.Check.tests paths , testGroup "Exec" $ PackageTests.Exec.Check.tests paths , testGroup "MultipleSource" $ PackageTests.MultipleSource.Check.tests paths ] cabalProgram :: Program cabalProgram = (simpleProgram "cabal") { programFindVersion = findProgramVersion "--numeric-version" id } main :: IO () main = do buildDir <- canonicalizePath "dist/build/cabal" let programSearchPath = ProgramSearchPathDir buildDir : defaultProgramSearchPath (cabal, _) <- requireProgram normal cabalProgram (setProgramSearchPath programSearchPath defaultProgramDb) (ghcPkg, _) <- requireProgram normal ghcPkgProgram defaultProgramDb canonicalConfigPath <- canonicalizePath $ "tests" packageTestsDirectory let testsPaths = TestsPaths { cabalPath = programPath cabal, ghcPkgPath = programPath ghcPkg, configPath = canonicalConfigPath packageTestsConfigFile } putStrLn $ "Using cabal: " ++ cabalPath testsPaths putStrLn $ "Using ghc-pkg: " ++ ghcPkgPath testsPaths cwd <- getCurrentDirectory let confFile = packageTestsDirectory "cabal-config" removeConf = do b <- doesFileExist confFile when b $ removeFile confFile let runTests = do setCurrentDirectory "tests" removeConf -- assert that there is no existing config file -- (we want deterministic testing with the default -- config values) defaultMain $ tests testsPaths runTests `E.finally` do -- remove the default config file that got created by the tests removeConf -- Change back to the old working directory so that the tests can be -- repeatedly run in `cabal repl` via `:main`. setCurrentDirectory cwd cabal-install-1.22.6.0/tests/UnitTests.hs0000644000000000000000000000142712540225656016307 0ustar0000000000000000module Main where import Test.Framework import qualified UnitTests.Distribution.Client.Sandbox import qualified UnitTests.Distribution.Client.UserConfig import qualified UnitTests.Distribution.Client.Targets import qualified UnitTests.Distribution.Client.Dependency.Modular.PSQ tests :: [Test] tests = [ testGroup "UnitTests.Distribution.Client.UserConfig" UnitTests.Distribution.Client.UserConfig.tests ,testGroup "Distribution.Client.Sandbox" UnitTests.Distribution.Client.Sandbox.tests ,testGroup "Distribution.Client.Targets" UnitTests.Distribution.Client.Targets.tests ,testGroup "UnitTests.Distribution.Client.Dependency.Modular.PSQ" UnitTests.Distribution.Client.Dependency.Modular.PSQ.tests ] main :: IO () main = defaultMain tests cabal-install-1.22.6.0/tests/PackageTests/0000755000000000000000000000000012540225656016363 5ustar0000000000000000cabal-install-1.22.6.0/tests/PackageTests/PackageTester.hs0000644000000000000000000002010612540225656021440 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} -- TODO This module was originally based on the PackageTests.PackageTester -- module in Cabal, however it has a few differences. I suspect that as -- this module ages the two modules will diverge further. As such, I have -- not attempted to merge them into a single module nor to extract a common -- module from them. Refactor this module and/or Cabal's -- PackageTests.PackageTester to remove commonality. -- 2014-05-15 Ben Armston -- | Routines for black-box testing cabal-install. -- -- Instead of driving the tests by making library calls into -- Distribution.Simple.* or Distribution.Client.* this module only every -- executes the `cabal-install` binary. -- -- You can set the following VERBOSE environment variable to control -- the verbosity of the output generated by this module. module PackageTests.PackageTester ( TestsPaths(..) , Result(..) , packageTestsDirectory , packageTestsConfigFile -- * Running cabal commands , cabal_clean , cabal_exec , cabal_freeze , cabal_install , cabal_sandbox , run -- * Test helpers , assertCleanSucceeded , assertExecFailed , assertExecSucceeded , assertFreezeSucceeded , assertInstallSucceeded , assertSandboxSucceeded ) where import qualified Control.Exception.Extensible as E import Control.Monad (when, unless) import Data.Maybe (fromMaybe) import System.Directory (canonicalizePath, doesFileExist) import System.Environment (getEnv) import System.Exit (ExitCode(ExitSuccess)) import System.FilePath ( (<.>) ) import System.IO (hClose, hGetChar, hIsEOF) import System.IO.Error (isDoesNotExistError) import System.Process (runProcess, waitForProcess) import Test.HUnit (Assertion, assertFailure) import Distribution.Simple.BuildPaths (exeExtension) import Distribution.Simple.Utils (printRawCommandAndArgs) import Distribution.Compat.CreatePipe (createPipe) import Distribution.ReadE (readEOrFail) import Distribution.Verbosity (Verbosity, flagToVerbosity, normal) data Success = Failure -- | ConfigureSuccess -- | BuildSuccess -- | TestSuccess -- | BenchSuccess | CleanSuccess | ExecSuccess | FreezeSuccess | InstallSuccess | SandboxSuccess deriving (Eq, Show) data TestsPaths = TestsPaths { cabalPath :: FilePath -- ^ absolute path to cabal executable. , ghcPkgPath :: FilePath -- ^ absolute path to ghc-pkg executable. , configPath :: FilePath -- ^ absolute path of the default config file -- to use for tests (tests are free to use -- a different one). } data Result = Result { successful :: Bool , success :: Success , outputText :: String } deriving Show nullResult :: Result nullResult = Result True Failure "" ------------------------------------------------------------------------ -- * Config packageTestsDirectory :: FilePath packageTestsDirectory = "PackageTests" packageTestsConfigFile :: FilePath packageTestsConfigFile = "cabal-config" ------------------------------------------------------------------------ -- * Running cabal commands recordRun :: (String, ExitCode, String) -> Success -> Result -> Result recordRun (cmd, exitCode, exeOutput) thisSucc res = res { successful = successful res && exitCode == ExitSuccess , success = if exitCode == ExitSuccess then thisSucc else success res , outputText = (if null $ outputText res then "" else outputText res ++ "\n") ++ cmd ++ "\n" ++ exeOutput } -- | Run the clean command and return its result. cabal_clean :: TestsPaths -> FilePath -> [String] -> IO Result cabal_clean paths dir args = do res <- cabal paths dir (["clean"] ++ args) return $ recordRun res CleanSuccess nullResult -- | Run the exec command and return its result. cabal_exec :: TestsPaths -> FilePath -> [String] -> IO Result cabal_exec paths dir args = do res <- cabal paths dir (["exec"] ++ args) return $ recordRun res ExecSuccess nullResult -- | Run the freeze command and return its result. cabal_freeze :: TestsPaths -> FilePath -> [String] -> IO Result cabal_freeze paths dir args = do res <- cabal paths dir (["freeze"] ++ args) return $ recordRun res FreezeSuccess nullResult -- | Run the install command and return its result. cabal_install :: TestsPaths -> FilePath -> [String] -> IO Result cabal_install paths dir args = do res <- cabal paths dir (["install"] ++ args) return $ recordRun res InstallSuccess nullResult -- | Run the sandbox command and return its result. cabal_sandbox :: TestsPaths -> FilePath -> [String] -> IO Result cabal_sandbox paths dir args = do res <- cabal paths dir (["sandbox"] ++ args) return $ recordRun res SandboxSuccess nullResult -- | Returns the command that was issued, the return code, and the output text. cabal :: TestsPaths -> FilePath -> [String] -> IO (String, ExitCode, String) cabal paths dir cabalArgs = do run (Just dir) (cabalPath paths) args where args = configFileArg : cabalArgs configFileArg = "--config-file=" ++ configPath paths -- | Returns the command that was issued, the return code, and the output text run :: Maybe FilePath -> String -> [String] -> IO (String, ExitCode, String) run cwd path args = do verbosity <- getVerbosity -- path is relative to the current directory; canonicalizePath makes it -- absolute, so that runProcess will find it even when changing directory. path' <- do pathExists <- doesFileExist path canonicalizePath (if pathExists then path else path <.> exeExtension) printRawCommandAndArgs verbosity path' args (readh, writeh) <- createPipe pid <- runProcess path' args cwd Nothing Nothing (Just writeh) (Just writeh) -- fork off a thread to start consuming the output out <- suckH [] readh hClose readh -- wait for the program to terminate exitcode <- waitForProcess pid let fullCmd = unwords (path' : args) return ("\"" ++ fullCmd ++ "\" in " ++ fromMaybe "" cwd, exitcode, out) where suckH output h = do eof <- hIsEOF h if eof then return (reverse output) else do c <- hGetChar h suckH (c:output) h ------------------------------------------------------------------------ -- * Test helpers assertCleanSucceeded :: Result -> Assertion assertCleanSucceeded result = unless (successful result) $ assertFailure $ "expected: \'cabal clean\' should succeed\n" ++ " output: " ++ outputText result assertExecSucceeded :: Result -> Assertion assertExecSucceeded result = unless (successful result) $ assertFailure $ "expected: \'cabal exec\' should succeed\n" ++ " output: " ++ outputText result assertExecFailed :: Result -> Assertion assertExecFailed result = when (successful result) $ assertFailure $ "expected: \'cabal exec\' should fail\n" ++ " output: " ++ outputText result assertFreezeSucceeded :: Result -> Assertion assertFreezeSucceeded result = unless (successful result) $ assertFailure $ "expected: \'cabal freeze\' should succeed\n" ++ " output: " ++ outputText result assertInstallSucceeded :: Result -> Assertion assertInstallSucceeded result = unless (successful result) $ assertFailure $ "expected: \'cabal install\' should succeed\n" ++ " output: " ++ outputText result assertSandboxSucceeded :: Result -> Assertion assertSandboxSucceeded result = unless (successful result) $ assertFailure $ "expected: \'cabal sandbox\' should succeed\n" ++ " output: " ++ outputText result ------------------------------------------------------------------------ -- Verbosity lookupEnv :: String -> IO (Maybe String) lookupEnv name = (fmap Just $ getEnv name) `E.catch` \ (e :: IOError) -> if isDoesNotExistError e then return Nothing else E.throw e -- TODO: Convert to a "-v" flag instead. getVerbosity :: IO Verbosity getVerbosity = do maybe normal (readEOrFail flagToVerbosity) `fmap` lookupEnv "VERBOSE" cabal-install-1.22.6.0/tests/PackageTests/Exec/0000755000000000000000000000000012540225656017247 5ustar0000000000000000cabal-install-1.22.6.0/tests/PackageTests/Exec/Check.hs0000644000000000000000000001275112540225656020626 0ustar0000000000000000{-# LANGUAGE CPP #-} module PackageTests.Exec.Check ( tests ) where import PackageTests.PackageTester import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (assertBool) #if !MIN_VERSION_base(4,8,0) import Control.Applicative ((<$>)) #endif import Control.Monad (when) import Data.List (intercalate, isInfixOf) import System.FilePath (()) import System.Directory (getDirectoryContents) dir :: FilePath dir = packageTestsDirectory "Exec" tests :: TestsPaths -> [TF.Test] tests paths = [ testCase "exits with failure if given no argument" $ do result <- cabal_exec paths dir [] assertExecFailed result , testCase "prints error message if given no argument" $ do result <- cabal_exec paths dir [] assertExecFailed result let output = outputText result expected = "specify an executable to run" errMsg = "should have requested an executable be specified\n" ++ output assertBool errMsg $ expected `isInfixOf` (intercalate " " . lines $ output) , testCase "runs the given command" $ do result <- cabal_exec paths dir ["echo", "this", "string"] assertExecSucceeded result let output = outputText result expected = "this string" errMsg = "should have ran the given command\n" ++ output assertBool errMsg $ expected `isInfixOf` (intercalate " " . lines $ output) , testCase "can run executables installed in the sandbox" $ do -- Test that an executable installed into the sandbox can be found. -- We do this by removing any existing sandbox. Checking that the -- executable cannot be found. Creating a new sandbox. Installing -- the executable and checking it can be run. cleanPreviousBuilds paths assertMyExecutableNotFound paths assertPackageInstall paths result <- cabal_exec paths dir ["my-executable"] assertExecSucceeded result let output = outputText result expected = "This is my-executable" errMsg = "should have found a my-executable\n" ++ output assertBool errMsg $ expected `isInfixOf` (intercalate " " . lines $ output) , testCase "adds the sandbox bin directory to the PATH" $ do cleanPreviousBuilds paths assertMyExecutableNotFound paths assertPackageInstall paths result <- cabal_exec paths dir ["bash", "--", "-c", "my-executable"] assertExecSucceeded result let output = outputText result expected = "This is my-executable" errMsg = "should have found a my-executable\n" ++ output assertBool errMsg $ expected `isInfixOf` (intercalate " " . lines $ output) , testCase "configures GHC to use the sandbox" $ do let libNameAndVersion = "my-0.1" cleanPreviousBuilds paths assertPackageInstall paths assertMyLibIsNotAvailableOutsideofSandbox paths libNameAndVersion result <- cabal_exec paths dir ["ghc-pkg", "list"] assertExecSucceeded result let output = outputText result errMsg = "my library should have been found" assertBool errMsg $ libNameAndVersion `isInfixOf` (intercalate " " . lines $ output) -- , testCase "can find executables built from the package" $ do , testCase "configures cabal to use the sandbox" $ do let libNameAndVersion = "my-0.1" cleanPreviousBuilds paths assertPackageInstall paths assertMyLibIsNotAvailableOutsideofSandbox paths libNameAndVersion result <- cabal_exec paths dir ["bash", "--", "-c", "cd subdir ; cabal sandbox hc-pkg list"] assertExecSucceeded result let output = outputText result errMsg = "my library should have been found" assertBool errMsg $ libNameAndVersion `isInfixOf` (intercalate " " . lines $ output) ] cleanPreviousBuilds :: TestsPaths -> IO () cleanPreviousBuilds paths = do sandboxExists <- not . null . filter (== "cabal.sandbox.config") <$> getDirectoryContents dir assertCleanSucceeded =<< cabal_clean paths dir [] when sandboxExists $ do assertSandboxSucceeded =<< cabal_sandbox paths dir ["delete"] assertPackageInstall :: TestsPaths -> IO () assertPackageInstall paths = do assertSandboxSucceeded =<< cabal_sandbox paths dir ["init"] assertInstallSucceeded =<< cabal_install paths dir [] assertMyExecutableNotFound :: TestsPaths -> IO () assertMyExecutableNotFound paths = do result <- cabal_exec paths dir ["my-executable"] assertExecFailed result let output = outputText result expected = "cabal: The program 'my-executable' is required but it " ++ "could not be found" errMsg = "should not have found a my-executable\n" ++ output assertBool errMsg $ expected `isInfixOf` (intercalate " " . lines $ output) assertMyLibIsNotAvailableOutsideofSandbox :: TestsPaths -> String -> IO () assertMyLibIsNotAvailableOutsideofSandbox paths libNameAndVersion = do (_, _, output) <- run (Just $ dir) (ghcPkgPath paths) ["list"] assertBool "my library should not have been found" $ not $ libNameAndVersion `isInfixOf` (intercalate " " . lines $ output) cabal-install-1.22.6.0/tests/PackageTests/Freeze/0000755000000000000000000000000012540225656017603 5ustar0000000000000000cabal-install-1.22.6.0/tests/PackageTests/Freeze/my.cabal0000644000000000000000000000074012540225656021215 0ustar0000000000000000name: my version: 0.1 license: BSD3 cabal-version: >= 1.20.0 build-type: Simple library exposed-modules: Foo build-depends: base test-suite test-Foo type: exitcode-stdio-1.0 hs-source-dirs: tests main-is: test-Foo.hs build-depends: base, my, test-framework benchmark bench-Foo type: exitcode-stdio-1.0 hs-source-dirs: benchmarks main-is: benchmark-Foo.hs build-depends: base, my, criterion cabal-install-1.22.6.0/tests/PackageTests/Freeze/Check.hs0000644000000000000000000001050312540225656021153 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} module PackageTests.Freeze.Check ( tests ) where import PackageTests.PackageTester import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (assertBool) import qualified Control.Exception.Extensible as E import Data.List (intercalate, isInfixOf) import System.Directory (doesFileExist, removeFile) import System.FilePath (()) import System.IO.Error (isDoesNotExistError) dir :: FilePath dir = packageTestsDirectory "Freeze" tests :: TestsPaths -> [TF.Test] tests paths = [ testCase "runs without error" $ do removeCabalConfig result <- cabal_freeze paths dir [] assertFreezeSucceeded result , testCase "freezes direct dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir [] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should have frozen base\n" ++ c) $ " base ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "freezes transitory dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir [] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should have frozen ghc-prim\n" ++ c) $ " ghc-prim ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "does not freeze packages which are not dependend upon" $ do -- XXX Test this against a package installed in the sandbox but -- not depended upon. removeCabalConfig result <- cabal_freeze paths dir [] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should not have frozen exceptions\n" ++ c) $ not $ " exceptions ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "does not include a constraint for the package being frozen" $ do removeCabalConfig result <- cabal_freeze paths dir [] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should not have frozen self\n" ++ c) $ not $ " my ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "--dry-run does not modify the cabal.config file" $ do removeCabalConfig result <- cabal_freeze paths dir ["--dry-run"] assertFreezeSucceeded result c <- doesFileExist $ dir "cabal.config" assertBool "cabal.config file should not have been created" (not c) , testCase "--enable-tests freezes test dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir ["--enable-tests"] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should have frozen test-framework\n" ++ c) $ " test-framework ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "--disable-tests does not freeze test dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir ["--disable-tests"] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should not have frozen test-framework\n" ++ c) $ not $ " test-framework ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "--enable-benchmarks freezes benchmark dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir ["--disable-benchmarks"] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should not have frozen criterion\n" ++ c) $ not $ " criterion ==" `isInfixOf` (intercalate " " $ lines $ c) , testCase "--disable-benchmarks does not freeze benchmark dependencies" $ do removeCabalConfig result <- cabal_freeze paths dir ["--disable-benchmarks"] assertFreezeSucceeded result c <- readCabalConfig assertBool ("should not have frozen criterion\n" ++ c) $ not $ " criterion ==" `isInfixOf` (intercalate " " $ lines $ c) ] removeCabalConfig :: IO () removeCabalConfig = do removeFile (dir "cabal.config") `E.catch` \ (e :: IOError) -> if isDoesNotExistError e then return () else E.throw e readCabalConfig :: IO String readCabalConfig = do readFile $ dir "cabal.config" cabal-install-1.22.6.0/tests/PackageTests/MultipleSource/0000755000000000000000000000000012540225656021337 5ustar0000000000000000cabal-install-1.22.6.0/tests/PackageTests/MultipleSource/Check.hs0000644000000000000000000000170112540225656022707 0ustar0000000000000000module PackageTests.MultipleSource.Check ( tests ) where import PackageTests.PackageTester import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Control.Monad (void, when) import System.Directory (doesDirectoryExist) import System.FilePath (()) dir :: FilePath dir = packageTestsDirectory "MultipleSource" tests :: TestsPaths -> [TF.Test] tests paths = [ testCase "finds second source of multiple source" $ do sandboxExists <- doesDirectoryExist $ dir ".cabal-sandbox" when sandboxExists $ void $ cabal_sandbox paths dir ["delete"] assertSandboxSucceeded =<< cabal_sandbox paths dir ["init"] assertSandboxSucceeded =<< cabal_sandbox paths dir ["add-source", "p"] assertSandboxSucceeded =<< cabal_sandbox paths dir ["add-source", "q"] assertInstallSucceeded =<< cabal_install paths dir ["q"] ] cabal-install-1.22.6.0/tests/UnitTests/0000755000000000000000000000000012540225656015747 5ustar0000000000000000cabal-install-1.22.6.0/tests/UnitTests/Distribution/0000755000000000000000000000000012540225656020426 5ustar0000000000000000cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/0000755000000000000000000000000012540225656021644 5ustar0000000000000000cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/UserConfig.hs0000644000000000000000000000730112540225656024245 0ustar0000000000000000{-# LANGUAGE CPP #-} module UnitTests.Distribution.Client.UserConfig ( tests ) where import Control.Exception (bracket) import Data.List (sort, nub) #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif import System.Directory (getCurrentDirectory, removeDirectoryRecursive, createDirectoryIfMissing) import System.FilePath (takeDirectory) import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, assertBool) import Distribution.Client.Compat.Environment (lookupEnv, setEnv) import Distribution.Client.Config import Distribution.Utils.NubList (fromNubList) import Distribution.Client.Setup (GlobalFlags (..), InstallFlags (..)) import Distribution.Simple.Setup (ConfigFlags (..), fromFlag) import Distribution.Verbosity (silent) tests :: [TF.Test] tests = [ testCase "nullDiffOnCreate" nullDiffOnCreateTest , testCase "canDetectDifference" canDetectDifference , testCase "canUpdateConfig" canUpdateConfig , testCase "doubleUpdateConfig" doubleUpdateConfig ] nullDiffOnCreateTest :: Assertion nullDiffOnCreateTest = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty -- Now we read it in and compare it against the default. diff <- userConfigDiff mempty assertBool (unlines $ "Following diff should be empty:" : diff) $ null diff canDetectDifference :: Assertion canDetectDifference = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty cabalFile <- defaultConfigFile appendFile cabalFile "verbose: 0\n" diff <- userConfigDiff mempty assertBool (unlines $ "Should detect a difference:" : diff) $ diff == [ "- verbose: 1", "+ verbose: 0" ] canUpdateConfig :: Assertion canUpdateConfig = bracketTest . const $ do cabalFile <- defaultConfigFile createDirectoryIfMissing True $ takeDirectory cabalFile -- Write a trivial cabal file. writeFile cabalFile "tests: True\n" -- Update the config file. userConfigUpdate silent mempty -- Load it again. updated <- loadConfig silent mempty mempty assertBool ("Field 'tests' should be True") $ fromFlag (configTests $ savedConfigureFlags updated) doubleUpdateConfig :: Assertion doubleUpdateConfig = bracketTest . const $ do -- Create a new default config file in our test directory. _ <- loadConfig silent mempty mempty -- Update it. userConfigUpdate silent mempty userConfigUpdate silent mempty -- Load it again. updated <- loadConfig silent mempty mempty assertBool ("Field 'remote-repo' doesn't contain duplicates") $ listUnique (map show . fromNubList . globalRemoteRepos $ savedGlobalFlags updated) assertBool ("Field 'extra-prog-path' doesn't contain duplicates") $ listUnique (map show . fromNubList . configProgramPathExtra $ savedConfigureFlags updated) assertBool ("Field 'build-summary' doesn't contain duplicates") $ listUnique (map show . fromNubList . installSummaryFile $ savedInstallFlags updated) listUnique :: Ord a => [a] -> Bool listUnique xs = let sorted = sort xs in nub sorted == xs bracketTest :: ((FilePath, FilePath) -> IO ()) -> Assertion bracketTest = bracket testSetup testTearDown where testSetup :: IO (FilePath, FilePath) testSetup = do Just oldHome <- lookupEnv "HOME" testdir <- fmap (++ "/test-user-config") getCurrentDirectory setEnv "HOME" testdir return (oldHome, testdir) testTearDown :: (FilePath, FilePath) -> IO () testTearDown (oldHome, testdir) = do setEnv "HOME" oldHome removeDirectoryRecursive testdir cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/Targets.hs0000644000000000000000000000412512540225656023613 0ustar0000000000000000module UnitTests.Distribution.Client.Targets ( tests ) where import Distribution.Client.Targets (UserConstraint (..), readUserConstraint) import Distribution.Compat.ReadP (ReadP, readP_to_S) import Distribution.Package (PackageName (..)) import Distribution.ParseUtils (parseCommaList) import Distribution.Text (parse) import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, assertEqual) import Data.Char (isSpace) tests :: [TF.Test] tests = [ testCase "readUserConstraint" readUserConstraintTest , testCase "parseUserConstraint" parseUserConstraintTest , testCase "readUserConstraints" readUserConstraintsTest ] readUserConstraintTest :: Assertion readUserConstraintTest = assertEqual ("Couldn't read constraint: '" ++ constr ++ "'") expected actual where pkgName = "template-haskell" constr = pkgName ++ " installed" expected = UserConstraintInstalled (PackageName pkgName) actual = let (Right r) = readUserConstraint constr in r parseUserConstraintTest :: Assertion parseUserConstraintTest = assertEqual ("Couldn't parse constraint: '" ++ constr ++ "'") expected actual where pkgName = "template-haskell" constr = pkgName ++ " installed" expected = [UserConstraintInstalled (PackageName pkgName)] actual = [ x | (x, ys) <- readP_to_S parseUserConstraint constr , all isSpace ys] parseUserConstraint :: ReadP r UserConstraint parseUserConstraint = parse readUserConstraintsTest :: Assertion readUserConstraintsTest = assertEqual ("Couldn't read constraints: '" ++ constr ++ "'") expected actual where pkgName = "template-haskell" constr = pkgName ++ " installed" expected = [[UserConstraintInstalled (PackageName pkgName)]] actual = [ x | (x, ys) <- readP_to_S parseUserConstraints constr , all isSpace ys] parseUserConstraints :: ReadP r [UserConstraint] parseUserConstraints = parseCommaList parse cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/Sandbox.hs0000644000000000000000000000172512540225656023603 0ustar0000000000000000module UnitTests.Distribution.Client.Sandbox ( tests ) where import Distribution.Client.Sandbox (withSandboxBinDirOnSearchPath) import Test.Framework as TF (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, assertBool, assertEqual) import System.FilePath (getSearchPath, ()) tests :: [TF.Test] tests = [ testCase "sandboxBinDirOnSearchPath" sandboxBinDirOnSearchPathTest , testCase "oldSearchPathRestored" oldSearchPathRestoreTest ] sandboxBinDirOnSearchPathTest :: Assertion sandboxBinDirOnSearchPathTest = withSandboxBinDirOnSearchPath "foo" $ do r <- getSearchPath assertBool "'foo/bin' not on search path" $ ("foo" "bin") `elem` r oldSearchPathRestoreTest :: Assertion oldSearchPathRestoreTest = do r <- getSearchPath withSandboxBinDirOnSearchPath "foo" $ return () r' <- getSearchPath assertEqual "Old search path wasn't restored" r r' cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/Dependency/0000755000000000000000000000000012540225656023722 5ustar0000000000000000cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/Dependency/Modular/0000755000000000000000000000000012540225656025325 5ustar0000000000000000cabal-install-1.22.6.0/tests/UnitTests/Distribution/Client/Dependency/Modular/PSQ.hs0000644000000000000000000000112212540225656026320 0ustar0000000000000000module UnitTests.Distribution.Client.Dependency.Modular.PSQ ( tests ) where import Distribution.Client.Dependency.Modular.PSQ import Test.Framework as TF (Test) import Test.Framework.Providers.QuickCheck2 tests :: [TF.Test] tests = [ testProperty "splitsAltImplementation" splitsTest ] -- | Original splits implementation splits' :: PSQ k a -> PSQ k (a, PSQ k a) splits' xs = casePSQ xs (PSQ []) (\ k v ys -> cons k (v, ys) (fmap (\ (w, zs) -> (w, cons k v zs)) (splits' ys))) splitsTest :: [(Int, Int)] -> Bool splitsTest psq = splits' (PSQ psq) == splits (PSQ psq) cabal-install-1.22.6.0/Distribution/0000755000000000000000000000000012540225656015322 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/0000755000000000000000000000000012540225656016540 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/InstallSymlink.hs0000644000000000000000000002552512540225656022062 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.InstallSymlink -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Managing installing binaries with symlinks. ----------------------------------------------------------------------------- module Distribution.Client.InstallSymlink ( symlinkBinaries, symlinkBinary, ) where #if mingw32_HOST_OS import Distribution.Package (PackageIdentifier) import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.Setup (InstallFlags) import Distribution.Simple.Setup (ConfigFlags) import Distribution.Simple.Compiler symlinkBinaries :: Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> IO [(PackageIdentifier, String, FilePath)] symlinkBinaries _ _ _ _ = return [] symlinkBinary :: FilePath -> FilePath -> String -> String -> IO Bool symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows" #else import Distribution.Client.Types ( SourcePackage(..), ReadyPackage(..), enableStanzas ) import Distribution.Client.Setup ( InstallFlags(installSymlinkBinDir) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Package ( PackageIdentifier, Package(packageId), mkPackageKey, PackageKey ) import Distribution.Compiler ( CompilerId(..) ) import qualified Distribution.PackageDescription as PackageDescription import Distribution.PackageDescription ( PackageDescription ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription ) import Distribution.Simple.Setup ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Simple.Compiler ( Compiler, CompilerInfo(..), packageKeySupported ) import System.Posix.Files ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink , removeLink ) import System.Directory ( canonicalizePath ) import System.FilePath ( (), splitPath, joinPath, isAbsolute ) import Prelude hiding (ioError) import System.IO.Error ( isDoesNotExistError, ioError ) import Distribution.Compat.Exception ( catchIO ) import Control.Exception ( assert ) import Data.Maybe ( catMaybes ) -- | We would like by default to install binaries into some location that is on -- the user's PATH. For per-user installations on Unix systems that basically -- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ -- directory will be on the user's PATH. However some people are a bit nervous -- about letting a package manager install programs into @~/bin/@. -- -- A compromise solution is that instead of installing binaries directly into -- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ -- and then create symlinks in @~/bin/@. We can be careful when setting up the -- symlinks that we do not overwrite any binary that the user installed. We can -- check if it was a symlink we made because it would point to the private dir -- where we install our binaries. This means we can install normally without -- worrying and in a later phase set up symlinks, and if that fails then we -- report it to the user, but even in this case the package is still in an OK -- installed state. -- -- This is an optional feature that users can choose to use or not. It is -- controlled from the config file. Of course it only works on POSIX systems -- with symlinks so is not available to Windows users. -- symlinkBinaries :: Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> IO [(PackageIdentifier, String, FilePath)] symlinkBinaries comp configFlags installFlags plan = case flagToMaybe (installSymlinkBinDir installFlags) of Nothing -> return [] Just symlinkBinDir | null exes -> return [] | otherwise -> do publicBinDir <- canonicalizePath symlinkBinDir -- TODO: do we want to do this here? : -- createDirectoryIfMissing True publicBinDir fmap catMaybes $ sequence [ do privateBinDir <- pkgBinDir pkg pkg_key ok <- symlinkBinary publicBinDir privateBinDir publicExeName privateExeName if ok then return Nothing else return (Just (pkgid, publicExeName, privateBinDir privateExeName)) | (ReadyPackage _ _flags _ deps, pkg, exe) <- exes , let pkgid = packageId pkg pkg_key = mkPackageKey (packageKeySupported comp) pkgid (map Installed.packageKey deps) [] publicExeName = PackageDescription.exeName exe privateExeName = prefix ++ publicExeName ++ suffix prefix = substTemplate pkgid pkg_key prefixTemplate suffix = substTemplate pkgid pkg_key suffixTemplate ] where exes = [ (cpkg, pkg, exe) | InstallPlan.Installed cpkg _ <- InstallPlan.toList plan , let pkg = pkgDescription cpkg , exe <- PackageDescription.executables pkg , PackageDescription.buildable (PackageDescription.buildInfo exe) ] pkgDescription :: ReadyPackage -> PackageDescription pkgDescription (ReadyPackage (SourcePackage _ pkg _ _) flags stanzas _) = case finalizePackageDescription flags (const True) platform cinfo [] (enableStanzas stanzas pkg) of Left _ -> error "finalizePackageDescription ReadyPackage failed" Right (desc, _) -> desc -- This is sadly rather complicated. We're kind of re-doing part of the -- configuration for the package. :-( pkgBinDir :: PackageDescription -> PackageKey -> IO FilePath pkgBinDir pkg pkg_key = do defaultDirs <- InstallDirs.defaultInstallDirs compilerFlavor (fromFlag (configUserInstall configFlags)) (PackageDescription.hasLibs pkg) let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) absoluteDirs = InstallDirs.absoluteInstallDirs (packageId pkg) pkg_key cinfo InstallDirs.NoCopyDest platform templateDirs canonicalizePath (InstallDirs.bindir absoluteDirs) substTemplate pkgid pkg_key = InstallDirs.fromPathTemplate . InstallDirs.substPathTemplate env where env = InstallDirs.initialPathTemplateEnv pkgid pkg_key cinfo platform fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) platform = InstallPlan.planPlatform plan cinfo = InstallPlan.planCompiler plan (CompilerId compilerFlavor _) = compilerInfoId cinfo symlinkBinary :: FilePath -- ^ The canonical path of the public bin dir -- eg @/home/user/bin@ -> FilePath -- ^ The canonical path of the private bin dir -- eg @/home/user/.cabal/bin@ -> String -- ^ The name of the executable to go in the public -- bin dir, eg @foo@ -> String -- ^ The name of the executable to in the private bin -- dir, eg @foo-1.0@ -> IO Bool -- ^ If creating the symlink was successful. @False@ -- if there was another file there already that we -- did not own. Other errors like permission errors -- just propagate as exceptions. symlinkBinary publicBindir privateBindir publicName privateName = do ok <- targetOkToOverwrite (publicBindir publicName) (privateBindir privateName) case ok of NotOurFile -> return False NotExists -> mkLink >> return True OkToOverwrite -> rmLink >> mkLink >> return True where relativeBindir = makeRelative publicBindir privateBindir mkLink = createSymbolicLink (relativeBindir privateName) (publicBindir publicName) rmLink = removeLink (publicBindir publicName) -- | Check a file path of a symlink that we would like to create to see if it -- is OK. For it to be OK to overwrite it must either not already exist yet or -- be a symlink to our target (in which case we can assume ownership). -- targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private -- binary that we would like to create -> FilePath -- ^ The canonical path of the private binary. -- Use 'canonicalizePath' to make this. -> IO SymlinkStatus targetOkToOverwrite symlink target = handleNotExist $ do status <- getSymbolicLinkStatus symlink if not (isSymbolicLink status) then return NotOurFile else do target' <- canonicalizePath symlink -- This relies on canonicalizePath handling symlinks if target == target' then return OkToOverwrite else return NotOurFile where handleNotExist action = catchIO action $ \ioexception -> -- If the target doesn't exist then there's no problem overwriting it! if isDoesNotExistError ioexception then return NotExists else ioError ioexception data SymlinkStatus = NotExists -- ^ The file doesn't exist so we can make a symlink. | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll -- have to delete it first before we make a new symlink. | NotOurFile -- ^ A file already exists and it is not one of our existing -- symlinks (either because it is not a symlink or because -- it points somewhere other than our managed space). deriving Show -- | Take two canonical paths and produce a relative path to get from the first -- to the second, even if it means adding @..@ path components. -- makeRelative :: FilePath -> FilePath -> FilePath makeRelative a b = assert (isAbsolute a && isAbsolute b) $ let as = splitPath a bs = splitPath b commonLen = length $ takeWhile id $ zipWith (==) as bs in joinPath $ [ ".." | _ <- drop commonLen as ] ++ drop commonLen bs #endif cabal-install-1.22.6.0/Distribution/Client/FetchUtils.hs0000644000000000000000000001461312540225656021153 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.FetchUtils -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Functions for fetching packages ----------------------------------------------------------------------------- module Distribution.Client.FetchUtils ( -- * fetching packages fetchPackage, isFetched, checkFetched, -- ** specifically for repo packages fetchRepoTarball, -- * fetching other things downloadIndex, ) where import Distribution.Client.Types import Distribution.Client.HttpUtils ( downloadURI, isOldHackageURI, DownloadResult(..) ) import Distribution.Package ( PackageId, packageName, packageVersion ) import Distribution.Simple.Utils ( notice, info, setupMessage ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity ) import Data.Maybe import System.Directory ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) import System.IO ( openTempFile, hClose ) import System.FilePath ( (), (<.>) ) import qualified System.FilePath.Posix as FilePath.Posix ( combine, joinPath ) import Network.URI ( URI(uriPath) ) -- ------------------------------------------------------------ -- * Actually fetch things -- ------------------------------------------------------------ -- | Returns @True@ if the package has already been fetched -- or does not need fetching. -- isFetched :: PackageLocation (Maybe FilePath) -> IO Bool isFetched loc = case loc of LocalUnpackedPackage _dir -> return True LocalTarballPackage _file -> return True RemoteTarballPackage _uri local -> return (isJust local) RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) checkFetched :: PackageLocation (Maybe FilePath) -> IO (Maybe (PackageLocation FilePath)) checkFetched loc = case loc of LocalUnpackedPackage dir -> return (Just $ LocalUnpackedPackage dir) LocalTarballPackage file -> return (Just $ LocalTarballPackage file) RemoteTarballPackage uri (Just file) -> return (Just $ RemoteTarballPackage uri file) RepoTarballPackage repo pkgid (Just file) -> return (Just $ RepoTarballPackage repo pkgid file) RemoteTarballPackage _uri Nothing -> return Nothing RepoTarballPackage repo pkgid Nothing -> do let file = packageFile repo pkgid exists <- doesFileExist file if exists then return (Just $ RepoTarballPackage repo pkgid file) else return Nothing -- | Fetch a package if we don't have it already. -- fetchPackage :: Verbosity -> PackageLocation (Maybe FilePath) -> IO (PackageLocation FilePath) fetchPackage verbosity loc = case loc of LocalUnpackedPackage dir -> return (LocalUnpackedPackage dir) LocalTarballPackage file -> return (LocalTarballPackage file) RemoteTarballPackage uri (Just file) -> return (RemoteTarballPackage uri file) RepoTarballPackage repo pkgid (Just file) -> return (RepoTarballPackage repo pkgid file) RemoteTarballPackage uri Nothing -> do path <- downloadTarballPackage uri return (RemoteTarballPackage uri path) RepoTarballPackage repo pkgid Nothing -> do local <- fetchRepoTarball verbosity repo pkgid return (RepoTarballPackage repo pkgid local) where downloadTarballPackage uri = do notice verbosity ("Downloading " ++ show uri) tmpdir <- getTemporaryDirectory (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" hClose hnd _ <- downloadURI verbosity uri path return path -- | Fetch a repo package if we don't have it already. -- fetchRepoTarball :: Verbosity -> Repo -> PackageId -> IO FilePath fetchRepoTarball verbosity repo pkgid = do fetched <- doesFileExist (packageFile repo pkgid) if fetched then do info verbosity $ display pkgid ++ " has already been downloaded." return (packageFile repo pkgid) else do setupMessage verbosity "Downloading" pkgid downloadRepoPackage where downloadRepoPackage = case repoKind repo of Right LocalRepo -> return (packageFile repo pkgid) Left remoteRepo -> do let uri = packageURI remoteRepo pkgid dir = packageDir repo pkgid path = packageFile repo pkgid createDirectoryIfMissing True dir _ <- downloadURI verbosity uri path return path -- | Downloads an index file to [config-dir/packages/serv-id]. -- downloadIndex :: Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult downloadIndex verbosity repo cacheDir = do let uri = (remoteRepoURI repo) { uriPath = uriPath (remoteRepoURI repo) `FilePath.Posix.combine` "00-index.tar.gz" } path = cacheDir "00-index" <.> "tar.gz" createDirectoryIfMissing True cacheDir downloadURI verbosity uri path -- ------------------------------------------------------------ -- * Path utilities -- ------------------------------------------------------------ -- | Generate the full path to the locally cached copy of -- the tarball for a given @PackageIdentifer@. -- packageFile :: Repo -> PackageId -> FilePath packageFile repo pkgid = packageDir repo pkgid display pkgid <.> "tar.gz" -- | Generate the full path to the directory where the local cached copy of -- the tarball for a given @PackageIdentifer@ is stored. -- packageDir :: Repo -> PackageId -> FilePath packageDir repo pkgid = repoLocalDir repo display (packageName pkgid) display (packageVersion pkgid) -- | Generate the URI of the tarball for a given package. -- packageURI :: RemoteRepo -> PackageId -> URI packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = (remoteRepoURI repo) { uriPath = FilePath.Posix.joinPath [uriPath (remoteRepoURI repo) ,display (packageName pkgid) ,display (packageVersion pkgid) ,display pkgid <.> "tar.gz"] } packageURI repo pkgid = (remoteRepoURI repo) { uriPath = FilePath.Posix.joinPath [uriPath (remoteRepoURI repo) ,"package" ,display pkgid <.> "tar.gz"] } cabal-install-1.22.6.0/Distribution/Client/PackageUtils.hs0000644000000000000000000000234112540225656021450 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.PackageUtils -- Copyright : (c) Duncan Coutts 2010 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Various package description utils that should be in the Cabal lib ----------------------------------------------------------------------------- module Distribution.Client.PackageUtils ( externalBuildDepends, ) where import Distribution.Package ( packageVersion, packageName, Dependency(..) ) import Distribution.PackageDescription ( PackageDescription(..) ) import Distribution.Version ( withinRange ) -- | The list of dependencies that refer to external packages -- rather than internal package components. -- externalBuildDepends :: PackageDescription -> [Dependency] externalBuildDepends pkg = filter (not . internal) (buildDepends pkg) where -- True if this dependency is an internal one (depends on a library -- defined in the same package). internal (Dependency depName versionRange) = depName == packageName pkg && packageVersion pkg `withinRange` versionRange cabal-install-1.22.6.0/Distribution/Client/Exec.hs0000644000000000000000000001136312540225656017764 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Exec -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Implementation of the 'exec' command. Runs an arbitrary executable in an -- environment suitable for making use of the sandbox. ----------------------------------------------------------------------------- module Distribution.Client.Exec ( exec ) where import qualified Distribution.Simple.GHC as GHC import qualified Distribution.Simple.GHCJS as GHCJS import Distribution.Client.Sandbox (getSandboxConfigFilePath) import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath) import Distribution.Client.Sandbox.Types (UseSandbox (..)) import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor) import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram) import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath) import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..)) import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation) import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) ) import Distribution.Simple.Utils (die) import Distribution.System (Platform) import Distribution.Verbosity (Verbosity) import System.FilePath (searchPathSeparator, ()) #if !MIN_VERSION_base(4,8,0) import Control.Applicative ((<$>)) import Data.Monoid (mempty) #endif -- | Execute the given command in the package's environment. -- -- The given command is executed with GHC configured to use the correct -- package database and with the sandbox bin directory added to the PATH. exec :: Verbosity -> UseSandbox -> Compiler -> Platform -> ProgramDb -> [String] -> IO () exec verbosity useSandbox comp platform programDb extraArgs = case extraArgs of (exe:args) -> do program <- requireProgram' verbosity useSandbox programDb exe env <- ((++) (programOverrideEnv program)) <$> environmentOverrides let invocation = programInvocation program { programOverrideEnv = env } args runProgramInvocation verbosity invocation [] -> die "Please specify an executable to run" where environmentOverrides = case useSandbox of NoSandbox -> return [] (UseSandbox sandboxDir) -> sandboxEnvironment verbosity sandboxDir comp platform programDb -- | Return the package's sandbox environment. -- -- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox. sandboxEnvironment :: Verbosity -> FilePath -> Compiler -> Platform -> ProgramDb -> IO [(String, Maybe String)] sandboxEnvironment verbosity sandboxDir comp platform programDb = case compilerFlavor comp of GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH" GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH" _ -> die "exec only works with GHC and GHCJS" where env getGlobalPackageDB hcProgram packagePathEnvVar = do let Just program = lookupProgram hcProgram programDb gDb <- getGlobalPackageDB verbosity program sandboxConfigFilePath <- getSandboxConfigFilePath mempty let compilerPackagePath = hcPackagePath gDb return [ (packagePathEnvVar, compilerPackagePath) , ("CABAL_SANDBOX_PACKAGE_PATH", compilerPackagePath) , ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath) ] hcPackagePath gDb = let s = sandboxPackageDBPath sandboxDir comp platform in Just $ prependToSearchPath gDb s prependToSearchPath path newValue = newValue ++ [searchPathSeparator] ++ path -- | Check that a program is configured and available to be run. If -- a sandbox is available check in the sandbox's directory. requireProgram' :: Verbosity -> UseSandbox -> ProgramDb -> String -> IO ConfiguredProgram requireProgram' verbosity useSandbox programDb exe = do (program, _) <- requireProgram verbosity (simpleProgram exe) updateSearchPath return program where updateSearchPath = flip modifyProgramSearchPath programDb $ \searchPath -> case useSandbox of NoSandbox -> searchPath UseSandbox sandboxDir -> ProgramSearchPathDir (sandboxDir "bin") : searchPath cabal-install-1.22.6.0/Distribution/Client/SrcDist.hs0000644000000000000000000001373412540225656020457 0ustar0000000000000000-- Implements the \"@.\/cabal sdist@\" command, which creates a source -- distribution for this package. That is, packs up the source code -- into a tarball, making use of the corresponding Cabal module. module Distribution.Client.SrcDist ( sdist ) where import Distribution.Client.SetupWrapper ( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper ) import Distribution.Client.Tar (createTarGzFile) import Distribution.Package ( Package(..) ) import Distribution.PackageDescription ( PackageDescription ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, defaultPackageDesc , die, notice, withTempDirectory ) import Distribution.Client.Setup ( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) ) import Distribution.Simple.Setup ( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault ) import Distribution.Simple.BuildPaths ( srcPref) import Distribution.Simple.Program (requireProgram, simpleProgram, programPath) import Distribution.Simple.Program.Db (emptyProgramDb) import Distribution.Text ( display ) import Distribution.Verbosity (Verbosity) import Distribution.Version (Version(..), orLaterVersion) import System.FilePath ((), (<.>)) import Control.Monad (when, unless) import System.Directory (doesFileExist, removeFile, canonicalizePath) import System.Process (runProcess, waitForProcess) import System.Exit (ExitCode(..)) -- |Create a source distribution. sdist :: SDistFlags -> SDistExFlags -> IO () sdist flags exflags = do pkg <- return . flattenPackageDescription =<< readPackageDescription verbosity =<< defaultPackageDesc verbosity let withDir = if not needMakeArchive then (\f -> f tmpTargetDir) else withTempDirectory verbosity tmpTargetDir "sdist." -- 'withTempDir' fails if we don't create 'tmpTargetDir'... when needMakeArchive $ createDirectoryIfMissingVerbose verbosity True tmpTargetDir withDir $ \tmpDir -> do let outDir = if isOutDirectory then tmpDir else tmpDir tarBallName pkg flags' = (if not needMakeArchive then flags else flags { sDistDirectory = Flag outDir }) unless isListSources $ createDirectoryIfMissingVerbose verbosity True outDir -- Run 'setup sdist --output-directory=tmpDir' (or -- '--list-source'/'--output-directory=someOtherDir') in case we were passed -- those options. setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') [] -- Unless we were given --list-sources or --output-directory ourselves, -- create an archive. when needMakeArchive $ createArchive verbosity pkg tmpDir distPref when isOutDirectory $ notice verbosity $ "Source directory created: " ++ tmpTargetDir when isListSources $ notice verbosity $ "List of package sources written to file '" ++ (fromFlag . sDistListSources $ flags) ++ "'" where flagEnabled f = not . null . flagToList . f $ flags isListSources = flagEnabled sDistListSources isOutDirectory = flagEnabled sDistDirectory needMakeArchive = not (isListSources || isOutDirectory) verbosity = fromFlag (sDistVerbosity flags) distPref = fromFlag (sDistDistPref flags) tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags) setupOpts = defaultSetupScriptOptions { -- The '--output-directory' sdist flag was introduced in Cabal 1.12, and -- '--list-sources' in 1.17. useCabalVersion = if isListSources then orLaterVersion $ Version [1,17,0] [] else orLaterVersion $ Version [1,12,0] [] } format = fromFlag (sDistFormat exflags) createArchive = case format of TargzFormat -> createTarGzArchive ZipFormat -> createZipArchive tarBallName :: PackageDescription -> String tarBallName = display . packageId -- | Create a tar.gz archive from a tree of source files. createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath -> IO () createTarGzArchive verbosity pkg tmpDir targetPref = do createTarGzFile tarBallFilePath tmpDir (tarBallName pkg) notice verbosity $ "Source tarball created: " ++ tarBallFilePath where tarBallFilePath = targetPref tarBallName pkg <.> "tar.gz" -- | Create a zip archive from a tree of source files. createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath -> IO () createZipArchive verbosity pkg tmpDir targetPref = do let dir = tarBallName pkg zipfile = targetPref dir <.> "zip" (zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb -- zip has an annoying habit of updating the target rather than creating -- it from scratch. While that might sound like an optimisation, it doesn't -- remove files already in the archive that are no longer present in the -- uncompressed tree. alreadyExists <- doesFileExist zipfile when alreadyExists $ removeFile zipfile -- We call zip with a different CWD, so have to make the path -- absolute. Can't just use 'canonicalizePath zipfile' since this function -- requires its argument to refer to an existing file. zipfileAbs <- fmap ( dir <.> "zip") . canonicalizePath $ targetPref --TODO: use runProgramInvocation, but has to be able to set CWD hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir] (Just tmpDir) Nothing Nothing Nothing Nothing exitCode <- waitForProcess hnd unless (exitCode == ExitSuccess) $ die $ "Generating the zip file failed " ++ "(zip returned exit code " ++ show exitCode ++ ")" notice verbosity $ "Source zip archive created: " ++ zipfile where zipProgram = simpleProgram "zip" cabal-install-1.22.6.0/Distribution/Client/Haddock.hs0000644000000000000000000000526412540225656020440 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Haddock -- Copyright : (c) Andrea Vezzosi 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Interfacing with Haddock -- ----------------------------------------------------------------------------- module Distribution.Client.Haddock ( regenerateHaddockIndex ) where import Data.List (maximumBy) import System.Directory (createDirectoryIfMissing, renameFile) import System.FilePath ((), splitFileName) import Distribution.Package ( packageVersion ) import Distribution.Simple.Haddock (haddockPackagePaths) import Distribution.Simple.Program (haddockProgram, ProgramConfiguration , rawSystemProgram, requireProgramVersion) import Distribution.Version (Version(Version), orLaterVersion) import Distribution.Verbosity (Verbosity) import Distribution.Simple.PackageIndex ( InstalledPackageIndex, allPackagesByName ) import Distribution.Simple.Utils ( comparing, debug, installDirectoryContents, withTempDirectory ) import Distribution.InstalledPackageInfo as InstalledPackageInfo ( InstalledPackageInfo_(exposed) ) regenerateHaddockIndex :: Verbosity -> InstalledPackageIndex -> ProgramConfiguration -> FilePath -> IO () regenerateHaddockIndex verbosity pkgs conf index = do (paths, warns) <- haddockPackagePaths pkgs' Nothing let paths' = [ (interface, html) | (interface, Just html) <- paths] case warns of Nothing -> return () Just m -> debug verbosity m (confHaddock, _, _) <- requireProgramVersion verbosity haddockProgram (orLaterVersion (Version [0,6] [])) conf createDirectoryIfMissing True destDir withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do let flags = [ "--gen-contents" , "--gen-index" , "--odir=" ++ tempDir , "--title=Haskell modules on this system" ] ++ [ "--read-interface=" ++ html ++ "," ++ interface | (interface, html) <- paths' ] rawSystemProgram verbosity confHaddock flags renameFile (tempDir "index.html") (tempDir destFile) installDirectoryContents verbosity tempDir destDir where (destDir,destFile) = splitFileName index pkgs' = [ maximumBy (comparing packageVersion) pkgvers' | (_pname, pkgvers) <- allPackagesByName pkgs , let pkgvers' = filter exposed pkgvers , not (null pkgvers') ] cabal-install-1.22.6.0/Distribution/Client/PackageIndex.hs0000644000000000000000000004123512540225656021424 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFunctor #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.PackageIndex -- Copyright : (c) David Himmelstrup 2005, -- Bjorn Bringert 2007, -- Duncan Coutts 2008 -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- An index of packages. -- module Distribution.Client.PackageIndex ( -- * Package index data type PackageIndex, -- * Creating an index fromList, -- * Updates merge, insert, deletePackageName, deletePackageId, deleteDependency, -- * Queries -- ** Precise lookups elemByPackageId, elemByPackageName, lookupPackageName, lookupPackageId, lookupDependency, -- ** Case-insensitive searches searchByName, SearchResult(..), searchByNameSubstring, -- ** Bulk queries allPackages, allPackagesByName, -- ** Special queries brokenPackages, dependencyClosure, reverseDependencyClosure, topologicalOrder, reverseTopologicalOrder, dependencyInconsistencies, dependencyCycles, dependencyGraph, ) where import Prelude hiding (lookup) import Control.Exception (assert) import qualified Data.Map as Map import Data.Map (Map) import qualified Data.Tree as Tree import qualified Data.Graph as Graph import qualified Data.Array as Array import Data.Array ((!)) import Data.List (groupBy, sortBy, nub, isInfixOf) #if !MIN_VERSION_base(4,8,0) import Data.Monoid (Monoid(..)) #endif import Data.Maybe (isJust, isNothing, fromMaybe, catMaybes) import Distribution.Package ( PackageName(..), PackageIdentifier(..) , Package(..), packageName, packageVersion , Dependency(Dependency), PackageFixedDeps(..) ) import Distribution.Version ( Version, withinRange ) import Distribution.Simple.Utils (lowercase, equating, comparing) -- | The collection of information about packages from one or more 'PackageDB's. -- -- It can be searched efficiently by package name and version. -- newtype PackageIndex pkg = PackageIndex -- This index package names to all the package records matching that package -- name case-sensitively. It includes all versions. -- -- This allows us to find all versions satisfying a dependency. -- Most queries are a map lookup followed by a linear scan of the bucket. -- (Map PackageName [pkg]) deriving (Show, Read, Functor) instance Package pkg => Monoid (PackageIndex pkg) where mempty = PackageIndex Map.empty mappend = merge --save one mappend with empty in the common case: mconcat [] = mempty mconcat xs = foldr1 mappend xs invariant :: Package pkg => PackageIndex pkg -> Bool invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) where goodBucket _ [] = False goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 where check pkgid [] = packageName pkgid == name check pkgid (pkg':pkgs) = packageName pkgid == name && pkgid < pkgid' && check pkgid' pkgs where pkgid' = packageId pkg' -- -- * Internal helpers -- mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg mkPackageIndex index = assert (invariant (PackageIndex index)) (PackageIndex index) internalError :: String -> a internalError name = error ("PackageIndex." ++ name ++ ": internal error") -- | Lookup a name in the index to get all packages that match that name -- case-sensitively. -- lookup :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m -- -- * Construction -- -- | Build an index out of a bunch of packages. -- -- If there are duplicates, later ones mask earlier ones. -- fromList :: Package pkg => [pkg] -> PackageIndex pkg fromList pkgs = mkPackageIndex . Map.map fixBucket . Map.fromListWith (++) $ [ (packageName pkg, [pkg]) | pkg <- pkgs ] where fixBucket = -- out of groups of duplicates, later ones mask earlier ones -- but Map.fromListWith (++) constructs groups in reverse order map head -- Eq instance for PackageIdentifier is wrong, so use Ord: . groupBy (\a b -> EQ == comparing packageId a b) -- relies on sortBy being a stable sort so we -- can pick consistently among duplicates . sortBy (comparing packageId) -- -- * Updates -- -- | Merge two indexes. -- -- Packages from the second mask packages of the same exact name -- (case-sensitively) from the first. -- merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg merge i1@(PackageIndex m1) i2@(PackageIndex m2) = assert (invariant i1 && invariant i2) $ mkPackageIndex (Map.unionWith mergeBuckets m1 m2) -- | Elements in the second list mask those in the first. mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg] mergeBuckets [] ys = ys mergeBuckets xs [] = xs mergeBuckets xs@(x:xs') ys@(y:ys') = case packageId x `compare` packageId y of GT -> y : mergeBuckets xs ys' EQ -> y : mergeBuckets xs' ys' LT -> x : mergeBuckets xs' ys -- | Inserts a single package into the index. -- -- This is equivalent to (but slightly quicker than) using 'mappend' or -- 'merge' with a singleton index. -- insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg insert pkg (PackageIndex index) = mkPackageIndex $ Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index where pkgid = packageId pkg insertNoDup [] = [pkg] insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of LT -> pkg : pkgs EQ -> pkg : pkgs' GT -> pkg' : insertNoDup pkgs' -- | Internal delete helper. -- delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg -> PackageIndex pkg delete name p (PackageIndex index) = mkPackageIndex $ Map.update filterBucket name index where filterBucket = deleteEmptyBucket . filter (not . p) deleteEmptyBucket [] = Nothing deleteEmptyBucket remaining = Just remaining -- | Removes a single package from the index. -- deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg -> PackageIndex pkg deletePackageId pkgid = delete (packageName pkgid) (\pkg -> packageId pkg == pkgid) -- | Removes all packages with this (case-sensitive) name from the index. -- deletePackageName :: Package pkg => PackageName -> PackageIndex pkg -> PackageIndex pkg deletePackageName name = delete name (\pkg -> packageName pkg == name) -- | Removes all packages satisfying this dependency from the index. -- deleteDependency :: Package pkg => Dependency -> PackageIndex pkg -> PackageIndex pkg deleteDependency (Dependency name verstionRange) = delete name (\pkg -> packageVersion pkg `withinRange` verstionRange) -- -- * Bulk queries -- -- | Get all the packages from the index. -- allPackages :: Package pkg => PackageIndex pkg -> [pkg] allPackages (PackageIndex m) = concat (Map.elems m) -- | Get all the packages from the index. -- -- They are grouped by package name, case-sensitively. -- allPackagesByName :: Package pkg => PackageIndex pkg -> [[pkg]] allPackagesByName (PackageIndex m) = Map.elems m -- -- * Lookups -- elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool elemByPackageId index = isJust . lookupPackageId index elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool elemByPackageName index = not . null . lookupPackageName index -- | Does a lookup by package id (name & version). -- -- Since multiple package DBs mask each other case-sensitively by package name, -- then we get back at most one package. -- lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Maybe pkg lookupPackageId index pkgid = case [ pkg | pkg <- lookup index (packageName pkgid) , packageId pkg == pkgid ] of [] -> Nothing [pkg] -> Just pkg _ -> internalError "lookupPackageIdentifier" -- | Does a case-sensitive search by package name. -- lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] lookupPackageName index name = [ pkg | pkg <- lookup index name , packageName pkg == name ] -- | Does a case-sensitive search by package name and a range of versions. -- -- We get back any number of versions of the specified package name, all -- satisfying the version range constraint. -- lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg] lookupDependency index (Dependency name versionRange) = [ pkg | pkg <- lookup index name , packageName pkg == name , packageVersion pkg `withinRange` versionRange ] -- -- * Case insensitive name lookups -- -- | Does a case-insensitive search by package name. -- -- If there is only one package that compares case-insensitively to this name -- then the search is unambiguous and we get back all versions of that package. -- If several match case-insensitively but one matches exactly then it is also -- unambiguous. -- -- If however several match case-insensitively and none match exactly then we -- have an ambiguous result, and we get back all the versions of all the -- packages. The list of ambiguous results is split by exact package name. So -- it is a non-empty list of non-empty lists. -- searchByName :: Package pkg => PackageIndex pkg -> String -> [(PackageName, [pkg])] searchByName (PackageIndex m) name = [ pkgs | pkgs@(PackageName name',_) <- Map.toList m , lowercase name' == lname ] where lname = lowercase name data SearchResult a = None | Unambiguous a | Ambiguous [a] -- | Does a case-insensitive substring search by package name. -- -- That is, all packages that contain the given string in their name. -- searchByNameSubstring :: Package pkg => PackageIndex pkg -> String -> [(PackageName, [pkg])] searchByNameSubstring (PackageIndex m) searchterm = [ pkgs | pkgs@(PackageName name, _) <- Map.toList m , lsearchterm `isInfixOf` lowercase name ] where lsearchterm = lowercase searchterm -- -- * Special queries -- -- | All packages that have dependencies that are not in the index. -- -- Returns such packages along with the dependencies that they're missing. -- brokenPackages :: PackageFixedDeps pkg => PackageIndex pkg -> [(pkg, [PackageIdentifier])] brokenPackages index = [ (pkg, missing) | pkg <- allPackages index , let missing = [ pkg' | pkg' <- depends pkg , isNothing (lookupPackageId index pkg') ] , not (null missing) ] -- | Tries to take the transitive closure of the package dependencies. -- -- If the transitive closure is complete then it returns that subset of the -- index. Otherwise it returns the broken packages as in 'brokenPackages'. -- -- * Note that if the result is @Right []@ it is because at least one of -- the original given 'PackageIdentifier's do not occur in the index. -- dependencyClosure :: PackageFixedDeps pkg => PackageIndex pkg -> [PackageIdentifier] -> Either (PackageIndex pkg) [(pkg, [PackageIdentifier])] dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of (completed, []) -> Left completed (completed, _) -> Right (brokenPackages completed) where closure completed failed [] = (completed, failed) closure completed failed (pkgid:pkgids) = case lookupPackageId index pkgid of Nothing -> closure completed (pkgid:failed) pkgids Just pkg -> case lookupPackageId completed (packageId pkg) of Just _ -> closure completed failed pkgids Nothing -> closure completed' failed pkgids' where completed' = insert pkg completed pkgids' = depends pkg ++ pkgids -- | Takes the transitive closure of the packages reverse dependencies. -- -- * The given 'PackageIdentifier's must be in the index. -- reverseDependencyClosure :: PackageFixedDeps pkg => PackageIndex pkg -> [PackageIdentifier] -> [pkg] reverseDependencyClosure index = map vertexToPkg . concatMap Tree.flatten . Graph.dfs reverseDepGraph . map (fromMaybe noSuchPkgId . pkgIdToVertex) where (depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index reverseDepGraph = Graph.transposeG depGraph noSuchPkgId = error "reverseDependencyClosure: package is not in the graph" topologicalOrder :: PackageFixedDeps pkg => PackageIndex pkg -> [pkg] topologicalOrder index = map toPkgId . Graph.topSort $ graph where (graph, toPkgId, _) = dependencyGraph index reverseTopologicalOrder :: PackageFixedDeps pkg => PackageIndex pkg -> [pkg] reverseTopologicalOrder index = map toPkgId . Graph.topSort . Graph.transposeG $ graph where (graph, toPkgId, _) = dependencyGraph index -- | Given a package index where we assume we want to use all the packages -- (use 'dependencyClosure' if you need to get such a index subset) find out -- if the dependencies within it use consistent versions of each package. -- Return all cases where multiple packages depend on different versions of -- some other package. -- -- Each element in the result is a package name along with the packages that -- depend on it and the versions they require. These are guaranteed to be -- distinct. -- dependencyInconsistencies :: PackageFixedDeps pkg => PackageIndex pkg -> [(PackageName, [(PackageIdentifier, Version)])] dependencyInconsistencies index = [ (name, inconsistencies) | (name, uses) <- Map.toList inverseIndex , let inconsistencies = duplicatesBy uses versions = map snd inconsistencies , reallyIsInconsistent name (nub versions) ] where inverseIndex = Map.fromListWith (++) [ (packageName dep, [(packageId pkg, packageVersion dep)]) | pkg <- allPackages index , dep <- depends pkg ] duplicatesBy = (\groups -> if length groups == 1 then [] else concat groups) . groupBy (equating snd) . sortBy (comparing snd) reallyIsInconsistent :: PackageName -> [Version] -> Bool reallyIsInconsistent _ [] = False reallyIsInconsistent name [v1, v2] = case (mpkg1, mpkg2) of (Just pkg1, Just pkg2) -> pkgid1 `notElem` depends pkg2 && pkgid2 `notElem` depends pkg1 _ -> True where pkgid1 = PackageIdentifier name v1 pkgid2 = PackageIdentifier name v2 mpkg1 = lookupPackageId index pkgid1 mpkg2 = lookupPackageId index pkgid2 reallyIsInconsistent _ _ = True -- | Find if there are any cycles in the dependency graph. If there are no -- cycles the result is @[]@. -- -- This actually computes the strongly connected components. So it gives us a -- list of groups of packages where within each group they all depend on each -- other, directly or indirectly. -- dependencyCycles :: PackageFixedDeps pkg => PackageIndex pkg -> [[pkg]] dependencyCycles index = [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] where adjacencyList = [ (pkg, packageId pkg, depends pkg) | pkg <- allPackages index ] -- | Builds a graph of the package dependencies. -- -- Dependencies on other packages that are not in the index are discarded. -- You can check if there are any such dependencies with 'brokenPackages'. -- dependencyGraph :: PackageFixedDeps pkg => PackageIndex pkg -> (Graph.Graph, Graph.Vertex -> pkg, PackageIdentifier -> Maybe Graph.Vertex) dependencyGraph index = (graph, vertexToPkg, pkgIdToVertex) where graph = Array.listArray bounds $ map (catMaybes . map pkgIdToVertex . depends) pkgs vertexToPkg vertex = pkgTable ! vertex pkgIdToVertex = binarySearch 0 topBound pkgTable = Array.listArray bounds pkgs pkgIdTable = Array.listArray bounds (map packageId pkgs) pkgs = sortBy (comparing packageId) (allPackages index) topBound = length pkgs - 1 bounds = (0, topBound) binarySearch a b key | a > b = Nothing | otherwise = case compare key (pkgIdTable ! mid) of LT -> binarySearch a (mid-1) key EQ -> Just mid GT -> binarySearch (mid+1) b key where mid = (a + b) `div` 2 cabal-install-1.22.6.0/Distribution/Client/GZipUtils.hs0000644000000000000000000000347412540225656020776 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.GZipUtils -- Copyright : (c) Dmitry Astapov 2010 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Provides a convenience functions for working with files that may or may not -- be zipped. ----------------------------------------------------------------------------- module Distribution.Client.GZipUtils ( maybeDecompress, ) where import qualified Data.ByteString.Lazy.Internal as BS (ByteString(..)) import Data.ByteString.Lazy (ByteString) import Codec.Compression.GZip import Codec.Compression.Zlib.Internal -- | Attempts to decompress the `bytes' under the assumption that -- "data format" error at the very beginning of the stream means -- that it is already decompressed. Caller should make sanity checks -- to verify that it is not, in fact, garbage. -- -- This is to deal with http proxies that lie to us and transparently -- decompress without removing the content-encoding header. See: -- -- maybeDecompress :: ByteString -> ByteString maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes where -- DataError at the beginning of the stream probably means that stream is not compressed. -- Returning it as-is. -- TODO: alternatively, we might consider looking for the two magic bytes -- at the beginning of the gzip header. foldStream (StreamError DataError _) = bytes foldStream somethingElse = doFold somethingElse doFold StreamEnd = BS.Empty doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg cabal-install-1.22.6.0/Distribution/Client/Upload.hs0000644000000000000000000001660112540225656020324 0ustar0000000000000000-- This is a quick hack for uploading packages to Hackage. -- See http://hackage.haskell.org/trac/hackage/wiki/CabalUpload module Distribution.Client.Upload (check, upload, report) where import qualified Data.ByteString.Lazy.Char8 as B (concat, length, pack, readFile, unpack) import Data.ByteString.Lazy.Char8 (ByteString) import Distribution.Client.Types (Username(..), Password(..),Repo(..),RemoteRepo(..)) import Distribution.Client.HttpUtils (isOldHackageURI, cabalBrowse) import Distribution.Simple.Utils (debug, notice, warn, info) import Distribution.Verbosity (Verbosity) import Distribution.Text (display) import Distribution.Client.Config import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import qualified Distribution.Client.BuildReports.Upload as BuildReport import Network.Browser ( request ) import Network.HTTP ( Header(..), HeaderName(..), findHeader , Request(..), RequestMethod(..), Response(..) ) import Network.URI (URI(uriPath), parseURI) import Data.Char (intToDigit) import Numeric (showHex) import System.IO (hFlush, stdin, stdout, hGetEcho, hSetEcho) import Control.Exception (bracket) import System.Random (randomRIO) import System.FilePath ((), takeExtension, takeFileName) import qualified System.FilePath.Posix as FilePath.Posix (combine) import System.Directory import Control.Monad (forM_, when) --FIXME: how do we find this path for an arbitrary hackage server? -- is it always at some fixed location relative to the server root? legacyUploadURI :: URI Just legacyUploadURI = parseURI "http://hackage.haskell.org/cgi-bin/hackage-scripts/protected/upload-pkg" checkURI :: URI Just checkURI = parseURI "http://hackage.haskell.org/cgi-bin/hackage-scripts/check-pkg" upload :: Verbosity -> [Repo] -> Maybe Username -> Maybe Password -> [FilePath] -> IO () upload verbosity repos mUsername mPassword paths = do let uploadURI = if isOldHackageURI targetRepoURI then legacyUploadURI else targetRepoURI{uriPath = uriPath targetRepoURI `FilePath.Posix.combine` "upload"} Username username <- maybe promptUsername return mUsername Password password <- maybe promptPassword return mPassword let auth = Just (username, password) flip mapM_ paths $ \path -> do notice verbosity $ "Uploading " ++ path ++ "... " handlePackage verbosity uploadURI auth path where targetRepoURI = remoteRepoURI $ last [ remoteRepo | Left remoteRepo <- map repoKind repos ] --FIXME: better error message when no repos are given promptUsername :: IO Username promptUsername = do putStr "Hackage username: " hFlush stdout fmap Username getLine promptPassword :: IO Password promptPassword = do putStr "Hackage password: " hFlush stdout -- save/restore the terminal echoing status passwd <- bracket (hGetEcho stdin) (hSetEcho stdin) $ \_ -> do hSetEcho stdin False -- no echoing for entering the password fmap Password getLine putStrLn "" return passwd report :: Verbosity -> [Repo] -> Maybe Username -> Maybe Password -> IO () report verbosity repos mUsername mPassword = do Username username <- maybe promptUsername return mUsername Password password <- maybe promptPassword return mPassword let auth = Just (username, password) forM_ repos $ \repo -> case repoKind repo of Left remoteRepo -> do dotCabal <- defaultCabalDir let srcDir = dotCabal "reports" remoteRepoName remoteRepo -- We don't want to bomb out just because we haven't built any packages from this repo yet srcExists <- doesDirectoryExist srcDir when srcExists $ do contents <- getDirectoryContents srcDir forM_ (filter (\c -> takeExtension c == ".log") contents) $ \logFile -> do inp <- readFile (srcDir logFile) let (reportStr, buildLog) = read inp :: (String,String) case BuildReport.parse reportStr of Left errs -> do warn verbosity $ "Errors: " ++ errs -- FIXME Right report' -> do info verbosity $ "Uploading report for " ++ display (BuildReport.package report') cabalBrowse verbosity auth $ BuildReport.uploadReports (remoteRepoURI remoteRepo) [(report', Just buildLog)] return () Right{} -> return () check :: Verbosity -> [FilePath] -> IO () check verbosity paths = do flip mapM_ paths $ \path -> do notice verbosity $ "Checking " ++ path ++ "... " handlePackage verbosity checkURI Nothing path handlePackage :: Verbosity -> URI -> Maybe (String, String) -> FilePath -> IO () handlePackage verbosity uri auth path = do req <- mkRequest uri path debug verbosity $ "\n" ++ show req (_,resp) <- cabalBrowse verbosity auth $ request req debug verbosity $ show resp case rspCode resp of (2,0,0) -> do notice verbosity "Ok" (x,y,z) -> do notice verbosity $ "Error: " ++ path ++ ": " ++ map intToDigit [x,y,z] ++ " " ++ rspReason resp case findHeader HdrContentType resp of Just contenttype | takeWhile (/= ';') contenttype == "text/plain" -> notice verbosity $ B.unpack $ rspBody resp _ -> debug verbosity $ B.unpack $ rspBody resp mkRequest :: URI -> FilePath -> IO (Request ByteString) mkRequest uri path = do pkg <- readBinaryFile path boundary <- genBoundary let body = printMultiPart (B.pack boundary) (mkFormData path pkg) return $ Request { rqURI = uri, rqMethod = POST, rqHeaders = [Header HdrContentType ("multipart/form-data; boundary="++boundary), Header HdrContentLength (show (B.length body)), Header HdrAccept ("text/plain")], rqBody = body } readBinaryFile :: FilePath -> IO ByteString readBinaryFile = B.readFile genBoundary :: IO String genBoundary = do i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer return $ showHex i "" mkFormData :: FilePath -> ByteString -> [BodyPart] mkFormData path pkg = -- yes, web browsers are that stupid (re quoting) [BodyPart [Header hdrContentDisposition $ "form-data; name=package; filename=\""++takeFileName path++"\"", Header HdrContentType "application/x-gzip"] pkg] hdrContentDisposition :: HeaderName hdrContentDisposition = HdrCustom "Content-disposition" -- * Multipart, partly stolen from the cgi package. data BodyPart = BodyPart [Header] ByteString printMultiPart :: ByteString -> [BodyPart] -> ByteString printMultiPart boundary xs = B.concat $ map (printBodyPart boundary) xs ++ [crlf, dd, boundary, dd, crlf] printBodyPart :: ByteString -> BodyPart -> ByteString printBodyPart boundary (BodyPart hs c) = B.concat $ [crlf, dd, boundary, crlf] ++ map (B.pack . show) hs ++ [crlf, c] crlf :: ByteString crlf = B.pack "\r\n" dd :: ByteString dd = B.pack "--" cabal-install-1.22.6.0/Distribution/Client/Init.hs0000644000000000000000000010033612540225656020002 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init -- Copyright : (c) Brent Yorgey 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Implementation of the 'cabal init' command, which creates an initial .cabal -- file for a project. -- ----------------------------------------------------------------------------- module Distribution.Client.Init ( -- * Commands initCabal ) where import System.IO ( hSetBuffering, stdout, BufferMode(..) ) import System.Directory ( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile , getDirectoryContents, createDirectoryIfMissing ) import System.FilePath ( (), (<.>), takeBaseName ) import Data.Time ( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone ) import Data.Char ( toUpper ) import Data.List ( intercalate, nub, groupBy, (\\) ) import Data.Maybe ( fromMaybe, isJust, catMaybes, listToMaybe ) import Data.Function ( on ) import qualified Data.Map as M #if !MIN_VERSION_base(4,8,0) import Data.Traversable ( traverse ) import Control.Applicative ( (<$>) ) #endif import Control.Monad ( when, unless, (>=>), join, forM_ ) import Control.Arrow ( (&&&), (***) ) import Text.PrettyPrint hiding (mode, cat) import Data.Version ( Version(..) ) import Distribution.Version ( orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange ) import Distribution.Verbosity ( Verbosity ) import Distribution.ModuleName ( ModuleName, fromString ) -- And for the Text instance import Distribution.InstalledPackageInfo ( InstalledPackageInfo, sourcePackageId, exposed ) import qualified Distribution.Package as P import Language.Haskell.Extension ( Language(..) ) import Distribution.Client.Init.Types ( InitFlags(..), PackageType(..), Category(..) ) import Distribution.Client.Init.Licenses ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) import Distribution.Client.Init.Heuristics ( guessPackageName, guessAuthorNameMail, guessMainFileCandidates, SourceFileEntry(..), scanForModules, neededBuildPrograms ) import Distribution.License ( License(..), knownLicenses ) import Distribution.ReadE ( runReadE, readP_to_E ) import Distribution.Simple.Setup ( Flag(..), flagToMaybe ) import Distribution.Simple.Configure ( getInstalledPackages ) import Distribution.Simple.Compiler ( PackageDBStack, Compiler ) import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.PackageIndex ( InstalledPackageIndex, moduleNameIndex ) import Distribution.Text ( display, Text(..) ) initCabal :: Verbosity -> PackageDBStack -> Compiler -> ProgramConfiguration -> InitFlags -> IO () initCabal verbosity packageDBs comp conf initFlags = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf hSetBuffering stdout NoBuffering initFlags' <- extendFlags installedPkgIndex initFlags writeLicense initFlags' writeSetupFile initFlags' createSourceDirectories initFlags' success <- writeCabalFile initFlags' when success $ generateWarnings initFlags' --------------------------------------------------------------------------- -- Flag acquisition ----------------------------------------------------- --------------------------------------------------------------------------- -- | Fill in more details by guessing, discovering, or prompting the -- user. extendFlags :: InstalledPackageIndex -> InitFlags -> IO InitFlags extendFlags pkgIx = getPackageName >=> getVersion >=> getLicense >=> getAuthorInfo >=> getHomepage >=> getSynopsis >=> getCategory >=> getExtraSourceFiles >=> getLibOrExec >=> getLanguage >=> getGenComments >=> getSrcDir >=> getModulesBuildToolsAndDeps pkgIx -- | Combine two actions which may return a value, preferring the first. That -- is, run the second action only if the first doesn't return a value. infixr 1 ?>> (?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a) f ?>> g = do ma <- f if isJust ma then return ma else g -- | Witness the isomorphism between Maybe and Flag. maybeToFlag :: Maybe a -> Flag a maybeToFlag = maybe NoFlag Flag -- | Get the package name: use the package directory (supplied, or the current -- directory by default) as a guess. getPackageName :: InitFlags -> IO InitFlags getPackageName flags = do guess <- traverse guessPackageName (flagToMaybe $ packageDir flags) ?>> Just `fmap` (getCurrentDirectory >>= guessPackageName) pkgName' <- return (flagToMaybe $ packageName flags) ?>> maybePrompt flags (prompt "Package name" guess) ?>> return guess return $ flags { packageName = maybeToFlag pkgName' } -- | Package version: use 0.1.0.0 as a last resort, but try prompting the user -- if possible. getVersion :: InitFlags -> IO InitFlags getVersion flags = do let v = Just $ Version [0,1,0,0] [] v' <- return (flagToMaybe $ version flags) ?>> maybePrompt flags (prompt "Package version" v) ?>> return v return $ flags { version = maybeToFlag v' } -- | Choose a license. getLicense :: InitFlags -> IO InitFlags getLicense flags = do lic <- return (flagToMaybe $ license flags) ?>> fmap (fmap (either UnknownLicense id) . join) (maybePrompt flags (promptListOptional "Please choose a license" listedLicenses)) return $ flags { license = maybeToFlag lic } where listedLicenses = knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing , Apache Nothing, OtherLicense] -- | The author's name and email. Prompt, or try to guess from an existing -- darcs repo. getAuthorInfo :: InitFlags -> IO InitFlags getAuthorInfo flags = do (authorName, authorEmail) <- (flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail authorName' <- return (flagToMaybe $ author flags) ?>> maybePrompt flags (promptStr "Author name" authorName) ?>> return authorName authorEmail' <- return (flagToMaybe $ email flags) ?>> maybePrompt flags (promptStr "Maintainer email" authorEmail) ?>> return authorEmail return $ flags { author = maybeToFlag authorName' , email = maybeToFlag authorEmail' } -- | Prompt for a homepage URL. getHomepage :: InitFlags -> IO InitFlags getHomepage flags = do hp <- queryHomepage hp' <- return (flagToMaybe $ homepage flags) ?>> maybePrompt flags (promptStr "Project homepage URL" hp) ?>> return hp return $ flags { homepage = maybeToFlag hp' } -- | Right now this does nothing, but it could be changed to do some -- intelligent guessing. queryHomepage :: IO (Maybe String) queryHomepage = return Nothing -- get default remote darcs repo? -- | Prompt for a project synopsis. getSynopsis :: InitFlags -> IO InitFlags getSynopsis flags = do syn <- return (flagToMaybe $ synopsis flags) ?>> maybePrompt flags (promptStr "Project synopsis" Nothing) return $ flags { synopsis = maybeToFlag syn } -- | Prompt for a package category. -- Note that it should be possible to do some smarter guessing here too, i.e. -- look at the name of the top level source directory. getCategory :: InitFlags -> IO InitFlags getCategory flags = do cat <- return (flagToMaybe $ category flags) ?>> fmap join (maybePrompt flags (promptListOptional "Project category" [Codec ..])) return $ flags { category = maybeToFlag cat } -- | Try to guess extra source files (don't prompt the user). getExtraSourceFiles :: InitFlags -> IO InitFlags getExtraSourceFiles flags = do extraSrcFiles <- return (extraSrc flags) ?>> Just `fmap` guessExtraSourceFiles flags return $ flags { extraSrc = extraSrcFiles } -- | Try to guess things to include in the extra-source-files field. -- For now, we just look for things in the root directory named -- 'readme', 'changes', or 'changelog', with any sort of -- capitalization and any extension. guessExtraSourceFiles :: InitFlags -> IO [FilePath] guessExtraSourceFiles flags = do dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags files <- getDirectoryContents dir return $ filter isExtra files where isExtra = (`elem` ["README", "CHANGES", "CHANGELOG"]) . map toUpper . takeBaseName -- | Ask whether the project builds a library or executable. getLibOrExec :: InitFlags -> IO InitFlags getLibOrExec flags = do isLib <- return (flagToMaybe $ packageType flags) ?>> maybePrompt flags (either (const Library) id `fmap` promptList "What does the package build" [Library, Executable] Nothing display False) ?>> return (Just Library) mainFile <- if isLib /= Just Executable then return Nothing else getMainFile flags return $ flags { packageType = maybeToFlag isLib , mainIs = maybeToFlag mainFile } -- | Try to guess the main file of the executable, and prompt the user to choose -- one of them. Top-level modules including the word 'Main' in the file name -- will be candidates, and shorter filenames will be preferred. getMainFile :: InitFlags -> IO (Maybe FilePath) getMainFile flags = return (flagToMaybe $ mainIs flags) ?>> do candidates <- guessMainFileCandidates flags let showCandidate = either (++" (does not yet exist)") id defaultFile = listToMaybe candidates maybePrompt flags (either id (either id id) `fmap` promptList "What is the main module of the executable" candidates defaultFile showCandidate True) ?>> return (fmap (either id id) defaultFile) -- | Ask for the base language of the package. getLanguage :: InitFlags -> IO InitFlags getLanguage flags = do lang <- return (flagToMaybe $ language flags) ?>> maybePrompt flags (either UnknownLanguage id `fmap` promptList "What base language is the package written in" [Haskell2010, Haskell98] (Just Haskell2010) display True) ?>> return (Just Haskell2010) return $ flags { language = maybeToFlag lang } -- | Ask whether to generate explanatory comments. getGenComments :: InitFlags -> IO InitFlags getGenComments flags = do genComments <- return (not <$> flagToMaybe (noComments flags)) ?>> maybePrompt flags (promptYesNo promptMsg (Just False)) ?>> return (Just False) return $ flags { noComments = maybeToFlag (fmap not genComments) } where promptMsg = "Include documentation on what each field means (y/n)" -- | Ask for the source root directory. getSrcDir :: InitFlags -> IO InitFlags getSrcDir flags = do srcDirs <- return (sourceDirs flags) ?>> fmap (:[]) `fmap` guessSourceDir flags ?>> fmap (fmap ((:[]) . either id id) . join) (maybePrompt flags (promptListOptional' "Source directory" ["src"] id)) return $ flags { sourceDirs = srcDirs } -- | Try to guess source directory. Could try harder; for the -- moment just looks to see whether there is a directory called 'src'. guessSourceDir :: InitFlags -> IO (Maybe String) guessSourceDir flags = do dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags srcIsDir <- doesDirectoryExist (dir "src") return $ if srcIsDir then Just "src" else Nothing -- | Get the list of exposed modules and extra tools needed to build them. getModulesBuildToolsAndDeps :: InstalledPackageIndex -> InitFlags -> IO InitFlags getModulesBuildToolsAndDeps pkgIx flags = do dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags -- XXX really should use guessed source roots. sourceFiles <- scanForModules dir Just mods <- return (exposedModules flags) ?>> (return . Just . map moduleName $ sourceFiles) tools <- return (buildTools flags) ?>> (return . Just . neededBuildPrograms $ sourceFiles) deps <- return (dependencies flags) ?>> Just <$> importsToDeps flags (fromString "Prelude" : -- to ensure we get base as a dep ( nub -- only need to consider each imported package once . filter (`notElem` mods) -- don't consider modules from -- this package itself . concatMap imports $ sourceFiles ) ) pkgIx exts <- return (otherExts flags) ?>> (return . Just . nub . concatMap extensions $ sourceFiles) return $ flags { exposedModules = Just mods , buildTools = tools , dependencies = deps , otherExts = exts } importsToDeps :: InitFlags -> [ModuleName] -> InstalledPackageIndex -> IO [P.Dependency] importsToDeps flags mods pkgIx = do let modMap :: M.Map ModuleName [InstalledPackageInfo] modMap = M.map (filter exposed) $ moduleNameIndex pkgIx modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])] modDeps = map (id &&& flip M.lookup modMap) mods message flags "\nGuessing dependencies..." nub . catMaybes <$> mapM (chooseDep flags) modDeps -- Given a module and a list of installed packages providing it, -- choose a dependency (i.e. package + version range) to use for that -- module. chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo]) -> IO (Maybe P.Dependency) chooseDep flags (m, Nothing) = message flags ("\nWarning: no package found providing " ++ display m ++ ".") >> return Nothing chooseDep flags (m, Just []) = message flags ("\nWarning: no package found providing " ++ display m ++ ".") >> return Nothing -- We found some packages: group them by name. chooseDep flags (m, Just ps) = case pkgGroups of -- if there's only one group, i.e. multiple versions of a single package, -- we make it into a dependency, choosing the latest-ish version (see toDep). [grp] -> Just <$> toDep grp -- otherwise, we refuse to choose between different packages and make the user -- do it. grps -> do message flags ("\nWarning: multiple packages found providing " ++ display m ++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps)) message flags "You will need to pick one and manually add it to the Build-depends: field." return Nothing where pkgGroups = groupBy ((==) `on` P.pkgName) (map sourcePackageId ps) -- Given a list of available versions of the same package, pick a dependency. toDep :: [P.PackageIdentifier] -> IO P.Dependency -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize . P.pkgVersion $ pid) -- Otherwise, choose the latest version and issue a warning. toDep pids = do message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.") return $ P.Dependency (P.pkgName . head $ pids) (pvpize . maximum . map P.pkgVersion $ pids) pvpize :: Version -> VersionRange pvpize v = orLaterVersion v' `intersectVersionRanges` earlierVersion (incVersion 1 v') where v' = (v { versionBranch = take 2 (versionBranch v) }) incVersion :: Int -> Version -> Version incVersion n (Version vlist tags) = Version (incVersion' n vlist) tags where incVersion' 0 [] = [1] incVersion' 0 (v:_) = [v+1] incVersion' m [] = replicate m 0 ++ [1] incVersion' m (v:vs) = v : incVersion' (m-1) vs --------------------------------------------------------------------------- -- Prompting/user interaction ------------------------------------------- --------------------------------------------------------------------------- -- | Run a prompt or not based on the nonInteractive flag of the -- InitFlags structure. maybePrompt :: InitFlags -> IO t -> IO (Maybe t) maybePrompt flags p = case nonInteractive flags of Flag True -> return Nothing _ -> Just `fmap` p -- | Create a prompt with optional default value that returns a -- String. promptStr :: String -> Maybe String -> IO String promptStr = promptDefault' Just id -- | Create a yes/no prompt with optional default value. -- promptYesNo :: String -> Maybe Bool -> IO Bool promptYesNo = promptDefault' recogniseYesNo showYesNo where recogniseYesNo s | s == "y" || s == "Y" = Just True | s == "n" || s == "N" = Just False | otherwise = Nothing showYesNo True = "y" showYesNo False = "n" -- | Create a prompt with optional default value that returns a value -- of some Text instance. prompt :: Text t => String -> Maybe t -> IO t prompt = promptDefault' (either (const Nothing) Just . runReadE (readP_to_E id parse)) display -- | Create a prompt with an optional default value. promptDefault' :: (String -> Maybe t) -- ^ parser -> (t -> String) -- ^ pretty-printer -> String -- ^ prompt message -> Maybe t -- ^ optional default value -> IO t promptDefault' parser pretty pr def = do putStr $ mkDefPrompt pr (pretty `fmap` def) inp <- getLine case (inp, def) of ("", Just d) -> return d _ -> case parser inp of Just t -> return t Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!" promptDefault' parser pretty pr def -- | Create a prompt from a prompt string and a String representation -- of an optional default value. mkDefPrompt :: String -> Maybe String -> String mkDefPrompt pr def = pr ++ "?" ++ defStr def where defStr Nothing = " " defStr (Just s) = " [default: " ++ s ++ "] " promptListOptional :: (Text t, Eq t) => String -- ^ prompt -> [t] -- ^ choices -> IO (Maybe (Either String t)) promptListOptional pr choices = promptListOptional' pr choices display promptListOptional' :: Eq t => String -- ^ prompt -> [t] -- ^ choices -> (t -> String) -- ^ show an item -> IO (Maybe (Either String t)) promptListOptional' pr choices displayItem = fmap rearrange $ promptList pr (Nothing : map Just choices) (Just Nothing) (maybe "(none)" displayItem) True where rearrange = either (Just . Left) (fmap Right) -- | Create a prompt from a list of items. promptList :: Eq t => String -- ^ prompt -> [t] -- ^ choices -> Maybe t -- ^ optional default value -> (t -> String) -- ^ show an item -> Bool -- ^ whether to allow an 'other' option -> IO (Either String t) promptList pr choices def displayItem other = do putStrLn $ pr ++ ":" let options1 = map (\c -> (Just c == def, displayItem c)) choices options2 = zip ([1..]::[Int]) (options1 ++ [(False, "Other (specify)") | other]) mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2 promptList' displayItem (length options2) choices def other where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest | otherwise = " " ++ star i ++ rest where rest = show n ++ ") " star True = "*" star False = " " promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t) promptList' displayItem numChoices choices def other = do putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def) inp <- getLine case (inp, def) of ("", Just d) -> return $ Right d _ -> case readMaybe inp of Nothing -> invalidChoice inp Just n -> getChoice n where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice." promptList' displayItem numChoices choices def other getChoice n | n < 1 || n > numChoices = invalidChoice (show n) | n < numChoices || (n == numChoices && not other) = return . Right $ choices !! (n-1) | otherwise = Left `fmap` promptStr "Please specify" Nothing readMaybe :: (Read a) => String -> Maybe a readMaybe s = case reads s of [(a,"")] -> Just a _ -> Nothing --------------------------------------------------------------------------- -- File generation ------------------------------------------------------ --------------------------------------------------------------------------- writeLicense :: InitFlags -> IO () writeLicense flags = do message flags "\nGenerating LICENSE..." year <- show <$> getYear let authors = fromMaybe "???" . flagToMaybe . author $ flags let licenseFile = case license flags of Flag BSD2 -> Just $ bsd2 authors year Flag BSD3 -> Just $ bsd3 authors year Flag (GPL (Just (Version {versionBranch = [2]}))) -> Just gplv2 Flag (GPL (Just (Version {versionBranch = [3]}))) -> Just gplv3 Flag (LGPL (Just (Version {versionBranch = [2, 1]}))) -> Just lgpl21 Flag (LGPL (Just (Version {versionBranch = [3]}))) -> Just lgpl3 Flag (AGPL (Just (Version {versionBranch = [3]}))) -> Just agplv3 Flag (Apache (Just (Version {versionBranch = [2, 0]}))) -> Just apache20 Flag MIT -> Just $ mit authors year Flag (MPL (Version {versionBranch = [2, 0]})) -> Just mpl20 Flag ISC -> Just $ isc authors year _ -> Nothing case licenseFile of Just licenseText -> writeFileSafe flags "LICENSE" licenseText Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself." getYear :: IO Integer getYear = do u <- getCurrentTime z <- getCurrentTimeZone let l = utcToLocalTime z u (y, _, _) = toGregorian $ localDay l return y writeSetupFile :: InitFlags -> IO () writeSetupFile flags = do message flags "Generating Setup.hs..." writeFileSafe flags "Setup.hs" setupFile where setupFile = unlines [ "import Distribution.Simple" , "main = defaultMain" ] writeCabalFile :: InitFlags -> IO Bool writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do message flags "Error: no package name provided." return False writeCabalFile flags@(InitFlags{packageName = Flag p}) = do let cabalFileName = display p ++ ".cabal" message flags $ "Generating " ++ cabalFileName ++ "..." writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags) return True -- | Write a file \"safely\", backing up any existing version (unless -- the overwrite flag is set). writeFileSafe :: InitFlags -> FilePath -> String -> IO () writeFileSafe flags fileName content = do moveExistingFile flags fileName writeFile fileName content -- | Create source directories, if they were given. createSourceDirectories :: InitFlags -> IO () createSourceDirectories flags = case sourceDirs flags of Just dirs -> forM_ dirs (createDirectoryIfMissing True) Nothing -> return () -- | Move an existing file, if there is one, and the overwrite flag is -- not set. moveExistingFile :: InitFlags -> FilePath -> IO () moveExistingFile flags fileName = unless (overwrite flags == Flag True) $ do e <- doesFileExist fileName when e $ do newName <- findNewName fileName message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName copyFile fileName newName findNewName :: FilePath -> IO FilePath findNewName oldName = findNewName' 0 where findNewName' :: Integer -> IO FilePath findNewName' n = do let newName = oldName <.> ("save" ++ show n) e <- doesFileExist newName if e then findNewName' (n+1) else return newName -- | Generate a .cabal file from an InitFlags structure. NOTE: this -- is rather ad-hoc! What we would REALLY like is to have a -- standard low-level AST type representing .cabal files, which -- preserves things like comments, and to write an *inverse* -- parser/pretty-printer pair between .cabal files and this AST. -- Then instead of this ad-hoc code we could just map an InitFlags -- structure onto a low-level AST structure and use the existing -- pretty-printing code to generate the file. generateCabalFile :: String -> InitFlags -> String generateCabalFile fileName c = renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $ (if minimal c /= Flag True then showComment (Just $ "Initial " ++ fileName ++ " generated by cabal " ++ "init. For further documentation, see " ++ "http://haskell.org/cabal/users-guide/") $$ text "" else empty) $$ vcat [ field "name" (packageName c) (Just "The name of the package.") True , field "version" (version c) (Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttp://www.haskell.org/haskellwiki/Package_versioning_policy\n" ++ "PVP summary: +-+------- breaking API changes\n" ++ " | | +----- non-breaking API additions\n" ++ " | | | +--- code changes with no API change") True , fieldS "synopsis" (synopsis c) (Just "A short (one-line) description of the package.") True , fieldS "description" NoFlag (Just "A longer description of the package.") True , fieldS "homepage" (homepage c) (Just "URL for the project homepage or repository.") False , fieldS "bug-reports" NoFlag (Just "A URL where users can report bugs.") False , field "license" (license c) (Just "The license under which the package is released.") True , fieldS "license-file" (Flag "LICENSE") (Just "The file containing the license text.") True , fieldS "author" (author c) (Just "The package author(s).") True , fieldS "maintainer" (email c) (Just "An email address to which users can send suggestions, bug reports, and patches.") True , fieldS "copyright" NoFlag (Just "A copyright notice.") True , fieldS "category" (either id display `fmap` category c) Nothing True , fieldS "build-type" (Flag "Simple") Nothing True , fieldS "extra-source-files" (listFieldS (extraSrc c)) (Just "Extra files to be distributed with the package, such as examples or a README.") True , field "cabal-version" (Flag $ orLaterVersion (Version [1,10] [])) (Just "Constraint on the version of Cabal needed to build this package.") False , case packageType c of Flag Executable -> text "\nexecutable" <+> text (maybe "" display . flagToMaybe $ packageName c) $$ nest 2 (vcat [ fieldS "main-is" (mainIs c) (Just ".hs or .lhs file containing the Main module.") True , generateBuildInfo Executable c ]) Flag Library -> text "\nlibrary" $$ nest 2 (vcat [ fieldS "exposed-modules" (listField (exposedModules c)) (Just "Modules exported by the library.") True , generateBuildInfo Library c ]) _ -> empty ] where generateBuildInfo :: PackageType -> InitFlags -> Doc generateBuildInfo pkgtype c' = vcat [ fieldS "other-modules" (listField (otherModules c')) (Just $ case pkgtype of Library -> "Modules included in this library but not exported." Executable -> "Modules included in this executable, other than Main.") True , fieldS "other-extensions" (listField (otherExts c')) (Just "LANGUAGE extensions used by modules in this package.") True , fieldS "build-depends" (listField (dependencies c')) (Just "Other library packages from which modules are imported.") True , fieldS "hs-source-dirs" (listFieldS (sourceDirs c')) (Just "Directories containing source files.") True , fieldS "build-tools" (listFieldS (buildTools c')) (Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.") False , field "default-language" (language c') (Just "Base language which the package is written in.") True ] listField :: Text s => Maybe [s] -> Flag String listField = listFieldS . fmap (map display) listFieldS :: Maybe [String] -> Flag String listFieldS = Flag . maybe "" (intercalate ", ") field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc field s f = fieldS s (fmap display f) fieldS :: String -- ^ Name of the field -> Flag String -- ^ Field contents -> Maybe String -- ^ Comment to explain the field -> Bool -- ^ Should the field be included (commented out) even if blank? -> Doc fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty fieldS s f com _ = case (isJust com, noComments c, minimal c) of (_, _, Flag True) -> id (_, Flag True, _) -> id (True, _, _) -> (showComment com $$) . ($$ text "") (False, _, _) -> ($$ text "") $ comment f <> text s <> colon <> text (replicate (20 - length s) ' ') <> text (fromMaybe "" . flagToMaybe $ f) comment NoFlag = text "-- " comment (Flag "") = text "-- " comment _ = text "" showComment :: Maybe String -> Doc showComment (Just t) = vcat . map (text . ("-- "++)) . lines . renderStyle style { lineLength = 76, ribbonsPerLine = 1.05 } . vcat . map (fcat . map text . breakLine) . lines $ t showComment Nothing = text "" breakLine [] = [] breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs' breakLine' [] = [] breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs' -- | Generate warnings for missing fields etc. generateWarnings :: InitFlags -> IO () generateWarnings flags = do message flags "" when (synopsis flags `elem` [NoFlag, Flag ""]) (message flags "Warning: no synopsis given. You should edit the .cabal file and add one.") message flags "You may want to edit the .cabal file and add a Description field." -- | Possibly generate a message to stdout, taking into account the -- --quiet flag. message :: InitFlags -> String -> IO () message (InitFlags{quiet = Flag True}) _ = return () message _ s = putStrLn s cabal-install-1.22.6.0/Distribution/Client/Run.hs0000644000000000000000000000765412540225656017654 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Run -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Implementation of the 'run' command. ----------------------------------------------------------------------------- module Distribution.Client.Run ( run, splitRunArgs ) where import Distribution.Client.Utils (tryCanonicalizePath) import Distribution.PackageDescription (Executable (..), PackageDescription (..)) import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) import Distribution.Simple.BuildPaths (exeExtension) import Distribution.Simple.LocalBuildInfo (ComponentName (..), LocalBuildInfo (..), getComponentLocalBuildInfo, depLibraryPaths) import Distribution.Simple.Utils (die, notice, rawSystemExitWithEnv, addLibraryPath) import Distribution.System (Platform (..)) import Distribution.Verbosity (Verbosity) import qualified Distribution.Simple.GHCJS as GHCJS #if !MIN_VERSION_base(4,8,0) import Data.Functor ((<$>)) #endif import Data.List (find) import System.Directory (getCurrentDirectory) import Distribution.Compat.Environment (getEnvironment) import System.FilePath ((<.>), ()) -- | Return the executable to run and any extra arguments that should be -- forwarded to it. splitRunArgs :: LocalBuildInfo -> [String] -> IO (Executable, [String]) splitRunArgs lbi args = case exes of [] -> die "Couldn't find any executables." [exe] -> case args of [] -> return (exe, []) (x:xs) | x == exeName exe -> return (exe, xs) | otherwise -> return (exe, args) _ -> case args of [] -> die $ "This package contains multiple executables. " ++ "You must pass the executable name as the first argument " ++ "to 'cabal run'." (x:xs) -> case find (\exe -> exeName exe == x) exes of Nothing -> die $ "No executable named '" ++ x ++ "'." Just exe -> return (exe, xs) where pkg_descr = localPkgDescr lbi exes = executables pkg_descr -- | Run a given executable. run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () run verbosity lbi exe exeArgs = do curDir <- getCurrentDirectory let buildPref = buildDir lbi pkg_descr = localPkgDescr lbi dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", curDir dataDir pkg_descr) (path, runArgs) <- case compilerFlavor (compiler lbi) of GHCJS -> do let (script, cmd, cmdArgs) = GHCJS.runCmd (withPrograms lbi) (buildPref exeName exe exeName exe) script' <- tryCanonicalizePath script return (cmd, cmdArgs ++ [script']) _ -> do p <- tryCanonicalizePath $ buildPref exeName exe (exeName exe <.> exeExtension) return (p, []) env <- (dataDirEnvVar:) <$> getEnvironment -- Add (DY)LD_LIBRARY_PATH if needed env' <- if withDynExe lbi then do let (Platform _ os) = hostPlatform lbi clbi = getComponentLocalBuildInfo lbi (CExeName (exeName exe)) paths <- depLibraryPaths True False lbi clbi return (addLibraryPath os paths env) else return env notice verbosity $ "Running " ++ exeName exe ++ "..." rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' cabal-install-1.22.6.0/Distribution/Client/World.hs0000644000000000000000000001437212540225656020172 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.World -- Copyright : (c) Peter Robinson 2009 -- License : BSD-like -- -- Maintainer : thaldyron@gmail.com -- Stability : provisional -- Portability : portable -- -- Interface to the world-file that contains a list of explicitly -- requested packages. Meant to be imported qualified. -- -- A world file entry stores the package-name, package-version, and -- user flags. -- For example, the entry generated by -- # cabal install stm-io-hooks --flags="-debug" -- looks like this: -- # stm-io-hooks -any --flags="-debug" -- To rebuild/upgrade the packages in world (e.g. when updating the compiler) -- use -- # cabal install world -- ----------------------------------------------------------------------------- module Distribution.Client.World ( WorldPkgInfo(..), insert, delete, getContents, ) where import Distribution.Package ( Dependency(..) ) import Distribution.PackageDescription ( FlagAssignment, FlagName(FlagName) ) import Distribution.Verbosity ( Verbosity ) import Distribution.Simple.Utils ( die, info, chattyTry, writeFileAtomic ) import Distribution.Text ( Text(..), display, simpleParse ) import qualified Distribution.Compat.ReadP as Parse import Distribution.Compat.Exception ( catchIO ) import qualified Text.PrettyPrint as Disp import Text.PrettyPrint ( (<>), (<+>) ) import Data.Char as Char import Data.List ( unionBy, deleteFirstsBy, nubBy ) import System.IO.Error ( isDoesNotExistError ) import qualified Data.ByteString.Lazy.Char8 as B import Prelude hiding (getContents) data WorldPkgInfo = WorldPkgInfo Dependency FlagAssignment deriving (Show,Eq) -- | Adds packages to the world file; creates the file if it doesn't -- exist yet. Version constraints and flag assignments for a package are -- updated if already present. IO errors are non-fatal. insert :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () insert = modifyWorld $ unionBy equalUDep -- | Removes packages from the world file. -- Note: Currently unused as there is no mechanism in Cabal (yet) to -- handle uninstalls. IO errors are non-fatal. delete :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () delete = modifyWorld $ flip (deleteFirstsBy equalUDep) -- | WorldPkgInfo values are considered equal if they refer to -- the same package, i.e., we don't care about differing versions or flags. equalUDep :: WorldPkgInfo -> WorldPkgInfo -> Bool equalUDep (WorldPkgInfo (Dependency pkg1 _) _) (WorldPkgInfo (Dependency pkg2 _) _) = pkg1 == pkg2 -- | Modifies the world file by applying an update-function ('unionBy' -- for 'insert', 'deleteFirstsBy' for 'delete') to the given list of -- packages. IO errors are considered non-fatal. modifyWorld :: ([WorldPkgInfo] -> [WorldPkgInfo] -> [WorldPkgInfo]) -- ^ Function that defines how -- the list of user packages are merged with -- existing world packages. -> Verbosity -> FilePath -- ^ Location of the world file -> [WorldPkgInfo] -- ^ list of user supplied packages -> IO () modifyWorld _ _ _ [] = return () modifyWorld f verbosity world pkgs = chattyTry "Error while updating world-file. " $ do pkgsOldWorld <- getContents world -- Filter out packages that are not in the world file: let pkgsNewWorld = nubBy equalUDep $ f pkgs pkgsOldWorld -- 'Dependency' is not an Ord instance, so we need to check for -- equivalence the awkward way: if not (all (`elem` pkgsOldWorld) pkgsNewWorld && all (`elem` pkgsNewWorld) pkgsOldWorld) then do info verbosity "Updating world file..." writeFileAtomic world . B.pack $ unlines [ (display pkg) | pkg <- pkgsNewWorld] else info verbosity "World file is already up to date." -- | Returns the content of the world file as a list getContents :: FilePath -> IO [WorldPkgInfo] getContents world = do content <- safelyReadFile world let result = map simpleParse (lines $ B.unpack content) case sequence result of Nothing -> die "Could not parse world file." Just xs -> return xs where safelyReadFile :: FilePath -> IO B.ByteString safelyReadFile file = B.readFile file `catchIO` handler where handler e | isDoesNotExistError e = return B.empty | otherwise = ioError e instance Text WorldPkgInfo where disp (WorldPkgInfo dep flags) = disp dep <+> dispFlags flags where dispFlags [] = Disp.empty dispFlags fs = Disp.text "--flags=" <> Disp.doubleQuotes (flagAssToDoc fs) flagAssToDoc = foldr (\(FlagName fname,val) flagAssDoc -> (if not val then Disp.char '-' else Disp.empty) Disp.<> Disp.text fname Disp.<+> flagAssDoc) Disp.empty parse = do dep <- parse Parse.skipSpaces flagAss <- Parse.option [] parseFlagAssignment return $ WorldPkgInfo dep flagAss where parseFlagAssignment :: Parse.ReadP r FlagAssignment parseFlagAssignment = do _ <- Parse.string "--flags" Parse.skipSpaces _ <- Parse.char '=' Parse.skipSpaces inDoubleQuotes $ Parse.many1 flag where inDoubleQuotes :: Parse.ReadP r a -> Parse.ReadP r a inDoubleQuotes = Parse.between (Parse.char '"') (Parse.char '"') flag = do Parse.skipSpaces val <- negative Parse.+++ positive name <- ident Parse.skipSpaces return (FlagName name,val) negative = do _ <- Parse.char '-' return False positive = return True ident :: Parse.ReadP r String ident = do -- First character must be a letter/digit to avoid flags -- like "+-debug": c <- Parse.satisfy Char.isAlphaNum cs <- Parse.munch (\ch -> Char.isAlphaNum ch || ch == '_' || ch == '-') return (c:cs) cabal-install-1.22.6.0/Distribution/Client/Configure.hs0000644000000000000000000002456712540225656021033 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Configure -- Copyright : (c) David Himmelstrup 2005, -- Duncan Coutts 2005 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- High level interface to configuring a package. ----------------------------------------------------------------------------- module Distribution.Client.Configure ( configure, chooseCabalVersion, ) where import Distribution.Client.Dependency import Distribution.Client.Dependency.Types (AllowNewer(..), isAllowNewer) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.Setup ( ConfigExFlags(..), configureCommand, filterConfigureFlags ) import Distribution.Client.Types as Source import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import Distribution.Client.Targets ( userToPackageConstraint ) import Distribution.Simple.Compiler ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) import Distribution.Simple.Program (ProgramConfiguration ) import Distribution.Simple.Setup ( ConfigFlags(..), fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Utils ( defaultPackageDesc ) import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Package ( Package(..), packageName, Dependency(..), thisPackageVersion ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription ) import Distribution.Version ( anyVersion, thisVersion ) import Distribution.Simple.Utils as Utils ( notice, info, debug, die ) import Distribution.System ( Platform ) import Distribution.Verbosity as Verbosity ( Verbosity ) import Distribution.Version ( Version(..), VersionRange, orLaterVersion ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid (Monoid(..)) #endif -- | Choose the Cabal version such that the setup scripts compiled against this -- version will support the given command-line flags. chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange chooseCabalVersion configExFlags maybeVersion = maybe defaultVersionRange thisVersion maybeVersion where -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed -- for '--allow-newer' to work. allowNewer = fromFlagOrDefault False $ fmap isAllowNewer (configAllowNewer configExFlags) defaultVersionRange = if allowNewer then orLaterVersion (Version [1,19,2] []) else anyVersion -- | Configure the package found in the local directory configure :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> Platform -> ProgramConfiguration -> ConfigFlags -> ConfigExFlags -> [String] -> IO () configure verbosity packageDBs repos comp platform conf configFlags configExFlags extraArgs = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos progress <- planLocalPackage verbosity comp platform configFlags configExFlags installedPkgIndex sourcePkgDb notice verbosity "Resolving dependencies..." maybePlan <- foldProgress logMsg (return . Left) (return . Right) progress case maybePlan of Left message -> do info verbosity message setupWrapper verbosity (setupScriptOptions installedPkgIndex) Nothing configureCommand (const configFlags) extraArgs Right installPlan -> case InstallPlan.ready installPlan of [pkg@(ReadyPackage (SourcePackage _ _ (LocalUnpackedPackage _) _) _ _ _)] -> configurePackage verbosity (InstallPlan.planPlatform installPlan) (InstallPlan.planCompiler installPlan) (setupScriptOptions installedPkgIndex) configFlags pkg extraArgs _ -> die $ "internal error: configure install plan should have exactly " ++ "one local ready package." where setupScriptOptions index = SetupScriptOptions { useCabalVersion = chooseCabalVersion configExFlags (flagToMaybe (configCabalVersion configExFlags)), useCompiler = Just comp, usePlatform = Just platform, usePackageDB = packageDBs', usePackageIndex = index', useProgramConfig = conf, useDistPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (configDistPref configFlags), useLoggingHandle = Nothing, useWorkingDir = Nothing, useWin32CleanHack = False, forceExternalSetupMethod = False, setupCacheLock = Nothing } where -- Hack: we typically want to allow the UserPackageDB for finding the -- Cabal lib when compiling any Setup.hs even if we're doing a global -- install. However we also allow looking in a specific package db. (packageDBs', index') = case packageDBs of (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) -- but if the user is using an odd db stack, don't touch it dbs -> (dbs, Just index) logMsg message rest = debug verbosity message >> rest -- | Make an 'InstallPlan' for the unpacked package in the current directory, -- and all its dependencies. -- planLocalPackage :: Verbosity -> Compiler -> Platform -> ConfigFlags -> ConfigExFlags -> InstalledPackageIndex -> SourcePackageDb -> IO (Progress String String InstallPlan) planLocalPackage verbosity comp platform configFlags configExFlags installedPkgIndex (SourcePackageDb _ packagePrefs) = do pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) (compilerInfo comp) let -- We create a local package and ask to resolve a dependency on it localPkg = SourcePackage { packageInfoId = packageId pkg, Source.packageDescription = pkg, packageSource = LocalUnpackedPackage ".", packageDescrOverride = Nothing } testsEnabled = fromFlagOrDefault False $ configTests configFlags benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags resolverParams = removeUpperBounds (fromFlagOrDefault AllowNewerNone $ configAllowNewer configExFlags) . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | Dependency name ver <- configPreferences configExFlags ] . addConstraints -- version constraints from the config file or command line -- TODO: should warn or error on constraints that are not on direct -- deps or flag constraints not on the package in question. (map userToPackageConstraint (configExConstraints configExFlags)) . addConstraints -- package flags from the config file or command line [ PackageConstraintFlags (packageName pkg) (configConfigurationsFlags configFlags) ] . addConstraints -- '--enable-tests' and '--enable-benchmarks' constraints from -- command line [ PackageConstraintStanzas (packageName pkg) $ [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] ] $ standardInstallPolicy installedPkgIndex (SourcePackageDb mempty packagePrefs) [SpecificSourcePackage localPkg] return (resolveDependencies platform (compilerInfo comp) solver resolverParams) -- | Call an installer for an 'SourcePackage' but override the configure -- flags with the ones given by the 'ReadyPackage'. In particular the -- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly -- versioned package dependencies. So we ignore any previous partial flag -- assignment or dependency constraints and use the new ones. -- -- NB: when updating this function, don't forget to also update -- 'installReadyPackage' in D.C.Install. configurePackage :: Verbosity -> Platform -> CompilerInfo -> SetupScriptOptions -> ConfigFlags -> ReadyPackage -> [String] -> IO () configurePackage verbosity platform comp scriptOptions configFlags (ReadyPackage (SourcePackage _ gpkg _ _) flags stanzas deps) extraArgs = setupWrapper verbosity scriptOptions (Just pkg) configureCommand configureFlags extraArgs where configureFlags = filterConfigureFlags configFlags { configConfigurationsFlags = flags, -- We generate the legacy constraints as well as the new style precise -- deps. In the end only one set gets passed to Setup.hs configure, -- depending on the Cabal version we are talking to. configConstraints = [ thisPackageVersion (packageId deppkg) | deppkg <- deps ], configDependencies = [ (packageName (Installed.sourcePackageId deppkg), Installed.installedPackageId deppkg) | deppkg <- deps ], -- Use '--exact-configuration' if supported. configExactConfiguration = toFlag True, configVerbosity = toFlag verbosity, configBenchmarks = toFlag (BenchStanzas `elem` stanzas), configTests = toFlag (TestStanzas `elem` stanzas) } pkg = case finalizePackageDescription flags (const True) platform comp [] (enableStanzas stanzas gpkg) of Left _ -> error "finalizePackageDescription ReadyPackage failed" Right (desc, _) -> desc cabal-install-1.22.6.0/Distribution/Client/Targets.hs0000644000000000000000000007321112540225656020511 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Targets -- Copyright : (c) Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- -- Handling for user-specified targets ----------------------------------------------------------------------------- module Distribution.Client.Targets ( -- * User targets UserTarget(..), readUserTargets, -- * Package specifiers PackageSpecifier(..), pkgSpecifierTarget, pkgSpecifierConstraints, -- * Resolving user targets to package specifiers resolveUserTargets, -- ** Detailed interface UserTargetProblem(..), readUserTarget, reportUserTargetProblems, expandUserTarget, PackageTarget(..), fetchPackageTarget, readPackageTarget, PackageTargetProblem(..), reportPackageTargetProblems, disambiguatePackageTargets, disambiguatePackageName, -- * User constraints UserConstraint(..), readUserConstraint, userToPackageConstraint ) where import Distribution.Package ( Package(..), PackageName(..) , PackageIdentifier(..), packageName, packageVersion , Dependency(Dependency) ) import Distribution.Client.Types ( SourcePackage(..), PackageLocation(..), OptionalStanza(..) ) import Distribution.Client.Dependency.Types ( PackageConstraint(..) ) import qualified Distribution.Client.World as World import Distribution.Client.PackageIndex (PackageIndex) import qualified Distribution.Client.PackageIndex as PackageIndex import qualified Distribution.Client.Tar as Tar import Distribution.Client.FetchUtils import Distribution.Client.Utils ( tryFindPackageDesc ) import Distribution.PackageDescription ( GenericPackageDescription, FlagName(..), FlagAssignment ) import Distribution.PackageDescription.Parse ( readPackageDescription, parsePackageDescription, ParseResult(..) ) import Distribution.Version ( Version(Version), thisVersion, anyVersion, isAnyVersion , VersionRange ) import Distribution.Text ( Text(..), display ) import Distribution.Verbosity (Verbosity) import Distribution.Simple.Utils ( die, warn, intercalate, fromUTF8, lowercase ) import Data.List ( find, nub ) import Data.Maybe ( listToMaybe ) import Data.Either ( partitionEithers ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import qualified Data.Map as Map import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BS.Char8 import qualified Distribution.Client.GZipUtils as GZipUtils import Control.Monad (liftM) import qualified Distribution.Compat.ReadP as Parse import Distribution.Compat.ReadP ( (+++), (<++) ) import qualified Text.PrettyPrint as Disp import Text.PrettyPrint ( (<>), (<+>) ) import Data.Char ( isSpace, isAlphaNum ) import System.FilePath ( takeExtension, dropExtension, takeDirectory, splitPath ) import System.Directory ( doesFileExist, doesDirectoryExist ) import Network.URI ( URI(..), URIAuth(..), parseAbsoluteURI ) -- ------------------------------------------------------------ -- * User targets -- ------------------------------------------------------------ -- | Various ways that a user may specify a package or package collection. -- data UserTarget = -- | A partially specified package, identified by name and possibly with -- an exact version or a version constraint. -- -- > cabal install foo -- > cabal install foo-1.0 -- > cabal install 'foo < 2' -- UserTargetNamed Dependency -- | A special virtual package that refers to the collection of packages -- recorded in the world file that the user specifically installed. -- -- > cabal install world -- | UserTargetWorld -- | A specific package that is unpacked in a local directory, often the -- current directory. -- -- > cabal install . -- > cabal install ../lib/other -- -- * Note: in future, if multiple @.cabal@ files are allowed in a single -- directory then this will refer to the collection of packages. -- | UserTargetLocalDir FilePath -- | A specific local unpacked package, identified by its @.cabal@ file. -- -- > cabal install foo.cabal -- > cabal install ../lib/other/bar.cabal -- | UserTargetLocalCabalFile FilePath -- | A specific package that is available as a local tarball file -- -- > cabal install dist/foo-1.0.tar.gz -- > cabal install ../build/baz-1.0.tar.gz -- | UserTargetLocalTarball FilePath -- | A specific package that is available as a remote tarball file -- -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz -- | UserTargetRemoteTarball URI deriving (Show,Eq) -- ------------------------------------------------------------ -- * Package specifier -- ------------------------------------------------------------ -- | A fully or partially resolved reference to a package. -- data PackageSpecifier pkg = -- | A partially specified reference to a package (either source or -- installed). It is specified by package name and optionally some -- additional constraints. Use a dependency resolver to pick a specific -- package satisfying these constraints. -- NamedPackage PackageName [PackageConstraint] -- | A fully specified source package. -- | SpecificSourcePackage pkg deriving Show pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName pkgSpecifierTarget (NamedPackage name _) = name pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg pkgSpecifierConstraints :: Package pkg => PackageSpecifier pkg -> [PackageConstraint] pkgSpecifierConstraints (NamedPackage _ constraints) = constraints pkgSpecifierConstraints (SpecificSourcePackage pkg) = [PackageConstraintVersion (packageName pkg) (thisVersion (packageVersion pkg))] -- ------------------------------------------------------------ -- * Parsing and checking user targets -- ------------------------------------------------------------ readUserTargets :: Verbosity -> [String] -> IO [UserTarget] readUserTargets _verbosity targetStrs = do (problems, targets) <- liftM partitionEithers (mapM readUserTarget targetStrs) reportUserTargetProblems problems return targets data UserTargetProblem = UserTargetUnexpectedFile String | UserTargetNonexistantFile String | UserTargetUnexpectedUriScheme String | UserTargetUnrecognisedUri String | UserTargetUnrecognised String | UserTargetBadWorldPkg deriving Show readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) readUserTarget targetstr = case testNamedTargets targetstr of Just (Dependency (PackageName "world") verrange) | verrange == anyVersion -> return (Right UserTargetWorld) | otherwise -> return (Left UserTargetBadWorldPkg) Just dep -> return (Right (UserTargetNamed dep)) Nothing -> do fileTarget <- testFileTargets targetstr case fileTarget of Just target -> return target Nothing -> case testUriTargets targetstr of Just target -> return target Nothing -> return (Left (UserTargetUnrecognised targetstr)) where testNamedTargets = readPToMaybe parseDependencyOrPackageId testFileTargets filename = do isDir <- doesDirectoryExist filename isFile <- doesFileExist filename parentDirExists <- case takeDirectory filename of [] -> return False dir -> doesDirectoryExist dir let result | isDir = Just (Right (UserTargetLocalDir filename)) | isFile && extensionIsTarGz filename = Just (Right (UserTargetLocalTarball filename)) | isFile && takeExtension filename == ".cabal" = Just (Right (UserTargetLocalCabalFile filename)) | isFile = Just (Left (UserTargetUnexpectedFile filename)) | parentDirExists = Just (Left (UserTargetNonexistantFile filename)) | otherwise = Nothing return result testUriTargets str = case parseAbsoluteURI str of Just uri@URI { uriScheme = scheme, uriAuthority = Just URIAuth { uriRegName = host } } | scheme /= "http:" -> Just (Left (UserTargetUnexpectedUriScheme targetstr)) | null host -> Just (Left (UserTargetUnrecognisedUri targetstr)) | otherwise -> Just (Right (UserTargetRemoteTarball uri)) _ -> Nothing extensionIsTarGz f = takeExtension f == ".gz" && takeExtension (dropExtension f) == ".tar" parseDependencyOrPackageId :: Parse.ReadP r Dependency parseDependencyOrPackageId = parse +++ liftM pkgidToDependency parse where pkgidToDependency :: PackageIdentifier -> Dependency pkgidToDependency p = case packageVersion p of Version [] _ -> Dependency (packageName p) anyVersion version -> Dependency (packageName p) (thisVersion version) readPToMaybe :: Parse.ReadP a a -> String -> Maybe a readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str , all isSpace s ] reportUserTargetProblems :: [UserTargetProblem] -> IO () reportUserTargetProblems problems = do case [ target | UserTargetUnrecognised target <- problems ] of [] -> return () target -> die $ unlines [ "Unrecognised target '" ++ name ++ "'." | name <- target ] ++ "Targets can be:\n" ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" ++ " - the special 'world' target\n" ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" case [ () | UserTargetBadWorldPkg <- problems ] of [] -> return () _ -> die "The special 'world' target does not take any version." case [ target | UserTargetNonexistantFile target <- problems ] of [] -> return () target -> die $ unlines [ "The file does not exist '" ++ name ++ "'." | name <- target ] case [ target | UserTargetUnexpectedFile target <- problems ] of [] -> return () target -> die $ unlines [ "Unrecognised file target '" ++ name ++ "'." | name <- target ] ++ "File targets can be either package tarballs 'pkgname.tar.gz' " ++ "or cabal files 'pkgname.cabal'." case [ target | UserTargetUnexpectedUriScheme target <- problems ] of [] -> return () target -> die $ unlines [ "URL target not supported '" ++ name ++ "'." | name <- target ] ++ "Only 'http://' URLs are supported." case [ target | UserTargetUnrecognisedUri target <- problems ] of [] -> return () target -> die $ unlines [ "Unrecognise URL target '" ++ name ++ "'." | name <- target ] -- ------------------------------------------------------------ -- * Resolving user targets to package specifiers -- ------------------------------------------------------------ -- | Given a bunch of user-specified targets, try to resolve what it is they -- refer to. They can either be specific packages (local dirs, tarballs etc) -- or they can be named packages (with or without version info). -- resolveUserTargets :: Package pkg => Verbosity -> FilePath -> PackageIndex pkg -> [UserTarget] -> IO [PackageSpecifier SourcePackage] resolveUserTargets verbosity worldFile available userTargets = do -- given the user targets, get a list of fully or partially resolved -- package references packageTargets <- mapM (readPackageTarget verbosity) =<< mapM (fetchPackageTarget verbosity) . concat =<< mapM (expandUserTarget worldFile) userTargets -- users are allowed to give package names case-insensitively, so we must -- disambiguate named package references let (problems, packageSpecifiers) = disambiguatePackageTargets available availableExtra packageTargets -- use any extra specific available packages to help us disambiguate availableExtra = [ packageName pkg | PackageTargetLocation pkg <- packageTargets ] reportPackageTargetProblems verbosity problems return packageSpecifiers -- ------------------------------------------------------------ -- * Package targets -- ------------------------------------------------------------ -- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. -- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. -- data PackageTarget pkg = PackageTargetNamed PackageName [PackageConstraint] UserTarget -- | A package identified by name, but case insensitively, so it needs -- to be resolved to the right case-sensitive name. | PackageTargetNamedFuzzy PackageName [PackageConstraint] UserTarget | PackageTargetLocation pkg deriving Show -- ------------------------------------------------------------ -- * Converting user targets to package targets -- ------------------------------------------------------------ -- | Given a user-specified target, expand it to a bunch of package targets -- (each of which refers to only one package). -- expandUserTarget :: FilePath -> UserTarget -> IO [PackageTarget (PackageLocation ())] expandUserTarget worldFile userTarget = case userTarget of UserTargetNamed (Dependency name vrange) -> let constraints = [ PackageConstraintVersion name vrange | not (isAnyVersion vrange) ] in return [PackageTargetNamedFuzzy name constraints userTarget] UserTargetWorld -> do worldPkgs <- World.getContents worldFile --TODO: should we warn if there are no world targets? return [ PackageTargetNamed name constraints userTarget | World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs , let constraints = [ PackageConstraintVersion name vrange | not (isAnyVersion vrange) ] ++ [ PackageConstraintFlags name flags | not (null flags) ] ] UserTargetLocalDir dir -> return [ PackageTargetLocation (LocalUnpackedPackage dir) ] UserTargetLocalCabalFile file -> do let dir = takeDirectory file _ <- tryFindPackageDesc dir (localPackageError dir) -- just as a check return [ PackageTargetLocation (LocalUnpackedPackage dir) ] UserTargetLocalTarball tarballFile -> return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] UserTargetRemoteTarball tarballURL -> return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] localPackageError :: FilePath -> String localPackageError dir = "Error reading local package.\nCouldn't find .cabal file in: " ++ dir -- ------------------------------------------------------------ -- * Fetching and reading package targets -- ------------------------------------------------------------ -- | Fetch any remote targets so that they can be read. -- fetchPackageTarget :: Verbosity -> PackageTarget (PackageLocation ()) -> IO (PackageTarget (PackageLocation FilePath)) fetchPackageTarget verbosity target = case target of PackageTargetNamed n cs ut -> return (PackageTargetNamed n cs ut) PackageTargetNamedFuzzy n cs ut -> return (PackageTargetNamedFuzzy n cs ut) PackageTargetLocation location -> do location' <- fetchPackage verbosity (fmap (const Nothing) location) return (PackageTargetLocation location') -- | Given a package target that has been fetched, read the .cabal file. -- -- This only affects targets given by location, named targets are unaffected. -- readPackageTarget :: Verbosity -> PackageTarget (PackageLocation FilePath) -> IO (PackageTarget SourcePackage) readPackageTarget verbosity target = case target of PackageTargetNamed pkgname constraints userTarget -> return (PackageTargetNamed pkgname constraints userTarget) PackageTargetNamedFuzzy pkgname constraints userTarget -> return (PackageTargetNamedFuzzy pkgname constraints userTarget) PackageTargetLocation location -> case location of LocalUnpackedPackage dir -> do pkg <- tryFindPackageDesc dir (localPackageError dir) >>= readPackageDescription verbosity return $ PackageTargetLocation $ SourcePackage { packageInfoId = packageId pkg, packageDescription = pkg, packageSource = fmap Just location, packageDescrOverride = Nothing } LocalTarballPackage tarballFile -> readTarballPackageTarget location tarballFile tarballFile RemoteTarballPackage tarballURL tarballFile -> readTarballPackageTarget location tarballFile (show tarballURL) RepoTarballPackage _repo _pkgid _ -> error "TODO: readPackageTarget RepoTarballPackage" -- For repo tarballs this info should be obtained from the index. where readTarballPackageTarget location tarballFile tarballOriginalLoc = do (filename, content) <- extractTarballPackageCabalFile tarballFile tarballOriginalLoc case parsePackageDescription' content of Nothing -> die $ "Could not parse the cabal file " ++ filename ++ " in " ++ tarballFile Just pkg -> return $ PackageTargetLocation $ SourcePackage { packageInfoId = packageId pkg, packageDescription = pkg, packageSource = fmap Just location, packageDescrOverride = Nothing } extractTarballPackageCabalFile :: FilePath -> String -> IO (FilePath, BS.ByteString) extractTarballPackageCabalFile tarballFile tarballOriginalLoc = either (die . formatErr) return . check . Tar.entriesIndex . Tar.filterEntries isCabalFile . Tar.read . GZipUtils.maybeDecompress =<< BS.readFile tarballFile where formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg check (Left e) = Left e check (Right m) = case Map.elems m of [] -> Left noCabalFile [file] -> case Tar.entryContent file of Tar.NormalFile content _ -> Right (Tar.entryPath file, content) _ -> Left noCabalFile _files -> Left multipleCabalFiles where noCabalFile = "No cabal file found" multipleCabalFiles = "Multiple cabal files found" isCabalFile e = case splitPath (Tar.entryPath e) of [ _dir, file] -> takeExtension file == ".cabal" [".", _dir, file] -> takeExtension file == ".cabal" _ -> False parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription parsePackageDescription' content = case parsePackageDescription . fromUTF8 . BS.Char8.unpack $ content of ParseOk _ pkg -> Just pkg _ -> Nothing -- ------------------------------------------------------------ -- * Checking package targets -- ------------------------------------------------------------ data PackageTargetProblem = PackageNameUnknown PackageName UserTarget | PackageNameAmbigious PackageName [PackageName] UserTarget deriving Show -- | Users are allowed to give package names case-insensitively, so we must -- disambiguate named package references. -- disambiguatePackageTargets :: Package pkg' => PackageIndex pkg' -> [PackageName] -> [PackageTarget pkg] -> ( [PackageTargetProblem] , [PackageSpecifier pkg] ) disambiguatePackageTargets availablePkgIndex availableExtra targets = partitionEithers (map disambiguatePackageTarget targets) where disambiguatePackageTarget packageTarget = case packageTarget of PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) PackageTargetNamed pkgname constraints userTarget | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) -> Left (PackageNameUnknown pkgname userTarget) | otherwise -> Right (NamedPackage pkgname constraints) PackageTargetNamedFuzzy pkgname constraints userTarget -> case disambiguatePackageName packageNameEnv pkgname of None -> Left (PackageNameUnknown pkgname userTarget) Ambiguous pkgnames -> Left (PackageNameAmbigious pkgname pkgnames userTarget) Unambiguous pkgname' -> Right (NamedPackage pkgname' constraints') where constraints' = map (renamePackageConstraint pkgname') constraints -- use any extra specific available packages to help us disambiguate packageNameEnv :: PackageNameEnv packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) (extraPackageNameEnv availableExtra) -- | Report problems to the user. That is, if there are any problems -- then raise an exception. reportPackageTargetProblems :: Verbosity -> [PackageTargetProblem] -> IO () reportPackageTargetProblems verbosity problems = do case [ pkg | PackageNameUnknown pkg originalTarget <- problems , not (isUserTagetWorld originalTarget) ] of [] -> return () pkgs -> die $ unlines [ "There is no package named '" ++ display name ++ "'. " | name <- pkgs ] ++ "You may need to run 'cabal update' to get the latest " ++ "list of available packages." case [ (pkg, matches) | PackageNameAmbigious pkg matches _ <- problems ] of [] -> return () ambiguities -> die $ unlines [ "The package name '" ++ display name ++ "' is ambigious. It could be: " ++ intercalate ", " (map display matches) | (name, matches) <- ambiguities ] case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of [] -> return () pkgs -> warn verbosity $ "The following 'world' packages will be ignored because " ++ "they refer to packages that cannot be found: " ++ intercalate ", " (map display pkgs) ++ "\n" ++ "You can suppress this warning by correcting the world file." where isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False -- ------------------------------------------------------------ -- * Disambiguating package names -- ------------------------------------------------------------ data MaybeAmbigious a = None | Unambiguous a | Ambiguous [a] -- | Given a package name and a list of matching names, figure out which one it -- might be referring to. If there is an exact case-sensitive match then that's -- ok. If it matches just one package case-insensitively then that's also ok. -- The only problem is if it matches multiple packages case-insensitively, in -- that case it is ambigious. -- disambiguatePackageName :: PackageNameEnv -> PackageName -> MaybeAmbigious PackageName disambiguatePackageName (PackageNameEnv pkgNameLookup) name = case nub (pkgNameLookup name) of [] -> None [name'] -> Unambiguous name' names -> case find (name==) names of Just name' -> Unambiguous name' Nothing -> Ambiguous names newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) instance Monoid PackageNameEnv where mempty = PackageNameEnv (const []) mappend (PackageNameEnv lookupA) (PackageNameEnv lookupB) = PackageNameEnv (\name -> lookupA name ++ lookupB name) indexPackageNameEnv :: Package pkg => PackageIndex pkg -> PackageNameEnv indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup where pkgNameLookup (PackageName name) = map fst (PackageIndex.searchByName pkgIndex name) extraPackageNameEnv :: [PackageName] -> PackageNameEnv extraPackageNameEnv names = PackageNameEnv pkgNameLookup where pkgNameLookup (PackageName name) = [ PackageName name' | let lname = lowercase name , PackageName name' <- names , lowercase name' == lname ] -- ------------------------------------------------------------ -- * Package constraints -- ------------------------------------------------------------ data UserConstraint = UserConstraintVersion PackageName VersionRange | UserConstraintInstalled PackageName | UserConstraintSource PackageName | UserConstraintFlags PackageName FlagAssignment | UserConstraintStanzas PackageName [OptionalStanza] deriving (Show,Eq) userToPackageConstraint :: UserConstraint -> PackageConstraint -- At the moment, the types happen to be directly equivalent userToPackageConstraint uc = case uc of UserConstraintVersion name ver -> PackageConstraintVersion name ver UserConstraintInstalled name -> PackageConstraintInstalled name UserConstraintSource name -> PackageConstraintSource name UserConstraintFlags name flags -> PackageConstraintFlags name flags UserConstraintStanzas name stanzas -> PackageConstraintStanzas name stanzas renamePackageConstraint :: PackageName -> PackageConstraint -> PackageConstraint renamePackageConstraint name pc = case pc of PackageConstraintVersion _ ver -> PackageConstraintVersion name ver PackageConstraintInstalled _ -> PackageConstraintInstalled name PackageConstraintSource _ -> PackageConstraintSource name PackageConstraintFlags _ flags -> PackageConstraintFlags name flags PackageConstraintStanzas _ stanzas -> PackageConstraintStanzas name stanzas readUserConstraint :: String -> Either String UserConstraint readUserConstraint str = case readPToMaybe parse str of Nothing -> Left msgCannotParse Just c -> Right c where msgCannotParse = "expected a package name followed by a constraint, which is " ++ "either a version range, 'installed', 'source' or flags" --FIXME: use Text instance for FlagName and FlagAssignment instance Text UserConstraint where disp (UserConstraintVersion pkgname verrange) = disp pkgname <+> disp verrange disp (UserConstraintInstalled pkgname) = disp pkgname <+> Disp.text "installed" disp (UserConstraintSource pkgname) = disp pkgname <+> Disp.text "source" disp (UserConstraintFlags pkgname flags) = disp pkgname <+> dispFlagAssignment flags where dispFlagAssignment = Disp.hsep . map dispFlagValue dispFlagValue (f, True) = Disp.char '+' <> dispFlagName f dispFlagValue (f, False) = Disp.char '-' <> dispFlagName f dispFlagName (FlagName f) = Disp.text f disp (UserConstraintStanzas pkgname stanzas) = disp pkgname <+> dispStanzas stanzas where dispStanzas = Disp.hsep . map dispStanza dispStanza TestStanzas = Disp.text "test" dispStanza BenchStanzas = Disp.text "bench" parse = parse >>= parseConstraint where spaces = Parse.satisfy isSpace >> Parse.skipSpaces parseConstraint pkgname = ((parse >>= return . UserConstraintVersion pkgname) +++ (do spaces _ <- Parse.string "installed" return (UserConstraintInstalled pkgname)) +++ (do spaces _ <- Parse.string "source" return (UserConstraintSource pkgname)) +++ (do spaces _ <- Parse.string "test" return (UserConstraintStanzas pkgname [TestStanzas])) +++ (do spaces _ <- Parse.string "bench" return (UserConstraintStanzas pkgname [BenchStanzas]))) <++ (parseFlagAssignment >>= (return . UserConstraintFlags pkgname)) parseFlagAssignment = Parse.many1 (spaces >> parseFlagValue) parseFlagValue = (do Parse.optional (Parse.char '+') f <- parseFlagName return (f, True)) +++ (do _ <- Parse.char '-' f <- parseFlagName return (f, False)) parseFlagName = liftM FlagName ident ident :: Parse.ReadP r String ident = Parse.munch1 identChar >>= \s -> check s >> return s where identChar c = isAlphaNum c || c == '_' || c == '-' check ('-':_) = Parse.pfail check _ = return () cabal-install-1.22.6.0/Distribution/Client/Config.hs0000644000000000000000000011353712540225656020313 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Config -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- Utilities for handling saved state such as known packages, known servers and -- downloaded packages. ----------------------------------------------------------------------------- module Distribution.Client.Config ( SavedConfig(..), loadConfig, showConfig, showConfigWithComments, parseConfig, defaultCabalDir, defaultConfigFile, defaultCacheDir, defaultCompiler, defaultLogsDir, defaultUserInstall, baseSavedConfig, commentSavedConfig, initialSavedConfig, configFieldDescriptions, haddockFlagsFields, installDirsFields, withProgramsFields, withProgramOptionsFields, userConfigDiff, userConfigUpdate ) where import Distribution.Client.Types ( RemoteRepo(..), Username(..), Password(..) ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Setup ( GlobalFlags(..), globalCommand, defaultGlobalFlags , ConfigExFlags(..), configureExOptions, defaultConfigExFlags , InstallFlags(..), installOptions, defaultInstallFlags , UploadFlags(..), uploadCommand , ReportFlags(..), reportCommand , showRepo, parseRepo ) import Distribution.Utils.NubList ( NubList, fromNubList, toNubList) import Distribution.Simple.Compiler ( DebugInfoLevel(..), OptimisationLevel(..) ) import Distribution.Simple.Setup ( ConfigFlags(..), configureOptions, defaultConfigFlags , HaddockFlags(..), haddockOptions, defaultHaddockFlags , installDirsOptions , programConfigurationPaths', programConfigurationOptions , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) import Distribution.Simple.InstallDirs ( InstallDirs(..), defaultInstallDirs , PathTemplate, toPathTemplate ) import Distribution.ParseUtils ( FieldDescr(..), liftField , ParseResult(..), PError(..), PWarning(..) , locatedErrorMsg, showPWarning , readFields, warning, lineNo , simpleField, listField, parseFilePathQ, parseTokenQ ) import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) import qualified Distribution.Text as Text ( Text(..) ) import Distribution.Simple.Command ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) , viewAsFieldDescr ) import Distribution.Simple.Program ( defaultProgramConfiguration ) import Distribution.Simple.Utils ( die, notice, warn, lowercase, cabalVersion ) import Distribution.Compiler ( CompilerFlavor(..), defaultCompilerFlavor ) import Distribution.Verbosity ( Verbosity, normal ) import Data.List ( partition, find, foldl' ) import Data.Maybe ( fromMaybe ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import Control.Monad ( unless, foldM, liftM, liftM2 ) import qualified Distribution.Compat.ReadP as Parse ( option ) import qualified Text.PrettyPrint as Disp ( render, text, empty ) import Text.PrettyPrint ( ($+$) ) import System.Directory ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) import Network.URI ( URI(..), URIAuth(..) ) import System.FilePath ( (<.>), (), takeDirectory ) import System.IO.Error ( isDoesNotExistError ) import Distribution.Compat.Environment ( getEnvironment ) import Distribution.Compat.Exception ( catchIO ) import qualified Paths_cabal_install ( version ) import Data.Version ( showVersion ) import Data.Char ( isSpace ) import qualified Data.Map as M -- -- * Configuration saved in the config file -- data SavedConfig = SavedConfig { savedGlobalFlags :: GlobalFlags, savedInstallFlags :: InstallFlags, savedConfigureFlags :: ConfigFlags, savedConfigureExFlags :: ConfigExFlags, savedUserInstallDirs :: InstallDirs (Flag PathTemplate), savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), savedUploadFlags :: UploadFlags, savedReportFlags :: ReportFlags, savedHaddockFlags :: HaddockFlags } instance Monoid SavedConfig where mempty = SavedConfig { savedGlobalFlags = mempty, savedInstallFlags = mempty, savedConfigureFlags = mempty, savedConfigureExFlags = mempty, savedUserInstallDirs = mempty, savedGlobalInstallDirs = mempty, savedUploadFlags = mempty, savedReportFlags = mempty, savedHaddockFlags = mempty } mappend a b = SavedConfig { savedGlobalFlags = combinedSavedGlobalFlags, savedInstallFlags = combinedSavedInstallFlags, savedConfigureFlags = combinedSavedConfigureFlags, savedConfigureExFlags = combinedSavedConfigureExFlags, savedUserInstallDirs = combinedSavedUserInstallDirs, savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, savedUploadFlags = combinedSavedUploadFlags, savedReportFlags = combinedSavedReportFlags, savedHaddockFlags = combinedSavedHaddockFlags } where -- This is ugly, but necessary. If we're mappending two config files, we -- want the values of the *non-empty* list fields from the second one to -- *override* the corresponding values from the first one. Default -- behaviour (concatenation) is confusing and makes some use cases (see -- #1884) impossible. -- -- However, we also want to allow specifying multiple values for a list -- field in a *single* config file. For example, we want the following to -- continue to work: -- -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ -- remote-repo: private-collection:http://hackage.local/ -- -- So we can't just wrap the list fields inside Flags; we have to do some -- special-casing just for SavedConfig. -- NB: the signature prevents us from using 'combine' on lists. combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a combine' field subfield = (subfield . field $ a) `mappend` (subfield . field $ b) lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] lastNonEmpty' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case b' of [] -> a' _ -> b' lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) -> NubList a lastNonEmptyNL' field subfield = let a' = subfield . field $ a b' = subfield . field $ b in case fromNubList b' of [] -> a' _ -> b' combinedSavedGlobalFlags = GlobalFlags { globalVersion = combine globalVersion, globalNumericVersion = combine globalNumericVersion, globalConfigFile = combine globalConfigFile, globalSandboxConfigFile = combine globalSandboxConfigFile, globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, globalCacheDir = combine globalCacheDir, globalLocalRepos = lastNonEmptyNL globalLocalRepos, globalLogsDir = combine globalLogsDir, globalWorldFile = combine globalWorldFile, globalRequireSandbox = combine globalRequireSandbox, globalIgnoreSandbox = combine globalIgnoreSandbox } where combine = combine' savedGlobalFlags lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags combinedSavedInstallFlags = InstallFlags { installDocumentation = combine installDocumentation, installHaddockIndex = combine installHaddockIndex, installDryRun = combine installDryRun, installMaxBackjumps = combine installMaxBackjumps, installReorderGoals = combine installReorderGoals, installIndependentGoals = combine installIndependentGoals, installShadowPkgs = combine installShadowPkgs, installStrongFlags = combine installStrongFlags, installReinstall = combine installReinstall, installAvoidReinstalls = combine installAvoidReinstalls, installOverrideReinstall = combine installOverrideReinstall, installUpgradeDeps = combine installUpgradeDeps, installOnly = combine installOnly, installOnlyDeps = combine installOnlyDeps, installRootCmd = combine installRootCmd, installSummaryFile = lastNonEmptyNL installSummaryFile, installLogFile = combine installLogFile, installBuildReports = combine installBuildReports, installReportPlanningFailure = combine installReportPlanningFailure, installSymlinkBinDir = combine installSymlinkBinDir, installOneShot = combine installOneShot, installNumJobs = combine installNumJobs, installRunTests = combine installRunTests } where combine = combine' savedInstallFlags lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags combinedSavedConfigureFlags = ConfigFlags { configPrograms = configPrograms . savedConfigureFlags $ b, -- TODO: NubListify configProgramPaths = lastNonEmpty configProgramPaths, -- TODO: NubListify configProgramArgs = lastNonEmpty configProgramArgs, configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, configHcFlavor = combine configHcFlavor, configHcPath = combine configHcPath, configHcPkg = combine configHcPkg, configVanillaLib = combine configVanillaLib, configProfLib = combine configProfLib, configSharedLib = combine configSharedLib, configDynExe = combine configDynExe, configProfExe = combine configProfExe, -- TODO: NubListify configConfigureArgs = lastNonEmpty configConfigureArgs, configOptimization = combine configOptimization, configDebugInfo = combine configDebugInfo, configProgPrefix = combine configProgPrefix, configProgSuffix = combine configProgSuffix, -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. configInstallDirs = (configInstallDirs . savedConfigureFlags $ a) `mappend` (configInstallDirs . savedConfigureFlags $ b), configScratchDir = combine configScratchDir, -- TODO: NubListify configExtraLibDirs = lastNonEmpty configExtraLibDirs, -- TODO: NubListify configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, configDistPref = combine configDistPref, configVerbosity = combine configVerbosity, configUserInstall = combine configUserInstall, -- TODO: NubListify configPackageDBs = lastNonEmpty configPackageDBs, configGHCiLib = combine configGHCiLib, configSplitObjs = combine configSplitObjs, configStripExes = combine configStripExes, configStripLibs = combine configStripLibs, -- TODO: NubListify configConstraints = lastNonEmpty configConstraints, -- TODO: NubListify configDependencies = lastNonEmpty configDependencies, configInstantiateWith = lastNonEmpty configInstantiateWith, -- TODO: NubListify configConfigurationsFlags = lastNonEmpty configConfigurationsFlags, configTests = combine configTests, configBenchmarks = combine configBenchmarks, configCoverage = combine configCoverage, configLibCoverage = combine configLibCoverage, configExactConfiguration = combine configExactConfiguration, configFlagError = combine configFlagError, configRelocatable = combine configRelocatable } where combine = combine' savedConfigureFlags lastNonEmpty = lastNonEmpty' savedConfigureFlags lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags combinedSavedConfigureExFlags = ConfigExFlags { configCabalVersion = combine configCabalVersion, -- TODO: NubListify configExConstraints = lastNonEmpty configExConstraints, -- TODO: NubListify configPreferences = lastNonEmpty configPreferences, configSolver = combine configSolver, configAllowNewer = combine configAllowNewer } where combine = combine' savedConfigureExFlags lastNonEmpty = lastNonEmpty' savedConfigureExFlags -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedUserInstallDirs = savedUserInstallDirs a `mappend` savedUserInstallDirs b -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a `mappend` savedGlobalInstallDirs b combinedSavedUploadFlags = UploadFlags { uploadCheck = combine uploadCheck, uploadUsername = combine uploadUsername, uploadPassword = combine uploadPassword, uploadVerbosity = combine uploadVerbosity } where combine = combine' savedUploadFlags combinedSavedReportFlags = ReportFlags { reportUsername = combine reportUsername, reportPassword = combine reportPassword, reportVerbosity = combine reportVerbosity } where combine = combine' savedReportFlags combinedSavedHaddockFlags = HaddockFlags { -- TODO: NubListify haddockProgramPaths = lastNonEmpty haddockProgramPaths, -- TODO: NubListify haddockProgramArgs = lastNonEmpty haddockProgramArgs, haddockHoogle = combine haddockHoogle, haddockHtml = combine haddockHtml, haddockHtmlLocation = combine haddockHtmlLocation, haddockExecutables = combine haddockExecutables, haddockTestSuites = combine haddockTestSuites, haddockBenchmarks = combine haddockBenchmarks, haddockInternal = combine haddockInternal, haddockCss = combine haddockCss, haddockHscolour = combine haddockHscolour, haddockHscolourCss = combine haddockHscolourCss, haddockContents = combine haddockContents, haddockDistPref = combine haddockDistPref, haddockKeepTempFiles = combine haddockKeepTempFiles, haddockVerbosity = combine haddockVerbosity } where combine = combine' savedHaddockFlags lastNonEmpty = lastNonEmpty' savedHaddockFlags updateInstallDirs :: Flag Bool -> SavedConfig -> SavedConfig updateInstallDirs userInstallFlag savedConfig@SavedConfig { savedConfigureFlags = configureFlags, savedUserInstallDirs = userInstallDirs, savedGlobalInstallDirs = globalInstallDirs } = savedConfig { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } where installDirs | userInstall = userInstallDirs | otherwise = globalInstallDirs userInstall = fromFlagOrDefault defaultUserInstall $ configUserInstall configureFlags `mappend` userInstallFlag -- -- * Default config -- -- | These are the absolute basic defaults. The fields that must be -- initialised. When we load the config from the file we layer the loaded -- values over these ones, so any missing fields in the file take their values -- from here. -- baseSavedConfig :: IO SavedConfig baseSavedConfig = do userPrefix <- defaultCabalDir logsDir <- defaultLogsDir worldFile <- defaultWorldFile return mempty { savedConfigureFlags = mempty { configHcFlavor = toFlag defaultCompiler, configUserInstall = toFlag defaultUserInstall, configVerbosity = toFlag normal }, savedUserInstallDirs = mempty { prefix = toFlag (toPathTemplate userPrefix) }, savedGlobalFlags = mempty { globalLogsDir = toFlag logsDir, globalWorldFile = toFlag worldFile } } -- | This is the initial configuration that we write out to to the config file -- if the file does not exist (or the config we use if the file cannot be read -- for some other reason). When the config gets loaded it gets layered on top -- of 'baseSavedConfig' so we do not need to include it into the initial -- values we save into the config file. -- initialSavedConfig :: IO SavedConfig initialSavedConfig = do cacheDir <- defaultCacheDir logsDir <- defaultLogsDir worldFile <- defaultWorldFile extraPath <- defaultExtraPath return mempty { savedGlobalFlags = mempty { globalCacheDir = toFlag cacheDir, globalRemoteRepos = toNubList [defaultRemoteRepo], globalWorldFile = toFlag worldFile }, savedConfigureFlags = mempty { configProgramPathExtra = toNubList extraPath }, savedInstallFlags = mempty { installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], installBuildReports= toFlag AnonymousReports, installNumJobs = toFlag Nothing } } --TODO: misleading, there's no way to override this default -- either make it possible or rename to simply getCabalDir. defaultCabalDir :: IO FilePath defaultCabalDir = getAppUserDataDirectory "cabal" defaultConfigFile :: IO FilePath defaultConfigFile = do dir <- defaultCabalDir return $ dir "config" defaultCacheDir :: IO FilePath defaultCacheDir = do dir <- defaultCabalDir return $ dir "packages" defaultLogsDir :: IO FilePath defaultLogsDir = do dir <- defaultCabalDir return $ dir "logs" -- | Default position of the world file defaultWorldFile :: IO FilePath defaultWorldFile = do dir <- defaultCabalDir return $ dir "world" defaultExtraPath :: IO [FilePath] defaultExtraPath = do dir <- defaultCabalDir return [dir "bin"] defaultCompiler :: CompilerFlavor defaultCompiler = fromMaybe GHC defaultCompilerFlavor defaultUserInstall :: Bool defaultUserInstall = True -- We do per-user installs by default on all platforms. We used to default to -- global installs on Windows but that no longer works on Windows Vista or 7. defaultRemoteRepo :: RemoteRepo defaultRemoteRepo = RemoteRepo name uri where name = "hackage.haskell.org" uri = URI "http:" (Just (URIAuth "" name "")) "/packages/archive" "" "" -- -- * Config file reading -- loadConfig :: Verbosity -> Flag FilePath -> Flag Bool -> IO SavedConfig loadConfig verbosity configFileFlag userInstallFlag = addBaseConf $ do let sources = [ ("commandline option", return . flagToMaybe $ configFileFlag), ("env var CABAL_CONFIG", lookup "CABAL_CONFIG" `liftM` getEnvironment), ("default config file", Just `liftM` defaultConfigFile) ] getSource [] = error "no config file path candidate found." getSource ((msg,action): xs) = action >>= maybe (getSource xs) (return . (,) msg) (source, configFile) <- getSource sources minp <- readConfigFile mempty configFile case minp of Nothing -> do notice verbosity $ "Config file path source is " ++ source ++ "." notice verbosity $ "Config file " ++ configFile ++ " not found." notice verbosity $ "Writing default configuration to " ++ configFile commentConf <- commentSavedConfig initialConf <- initialSavedConfig writeConfigFile configFile commentConf initialConf return initialConf Just (ParseOk ws conf) -> do unless (null ws) $ warn verbosity $ unlines (map (showPWarning configFile) ws) return conf Just (ParseFailed err) -> do let (line, msg) = locatedErrorMsg err die $ "Error parsing config file " ++ configFile ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg where addBaseConf body = do base <- baseSavedConfig extra <- body return (updateInstallDirs userInstallFlag (base `mappend` extra)) readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) readConfigFile initial file = handleNotExists $ fmap (Just . parseConfig initial) (readFile file) where handleNotExists action = catchIO action $ \ioe -> if isDoesNotExistError ioe then return Nothing else ioError ioe writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () writeConfigFile file comments vals = do let tmpFile = file <.> "tmp" createDirectoryIfMissing True (takeDirectory file) writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" renameFile tmpFile file where explanation = unlines ["-- This is the configuration file for the 'cabal' command line tool." ,"" ,"-- The available configuration options are listed below." ,"-- Some of them have default values listed." ,"" ,"-- Lines (like this one) beginning with '--' are comments." ,"-- Be careful with spaces and indentation because they are" ,"-- used to indicate layout for nested sections." ,"" ,"-- Cabal library version: " ++ showVersion cabalVersion ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version ,"","" ] -- | These are the default values that get used in Cabal if a no value is -- given. We use these here to include in comments when we write out the -- initial config file so that the user can see what default value they are -- overriding. -- commentSavedConfig :: IO SavedConfig commentSavedConfig = do userInstallDirs <- defaultInstallDirs defaultCompiler True True globalInstallDirs <- defaultInstallDirs defaultCompiler False True return SavedConfig { savedGlobalFlags = defaultGlobalFlags, savedInstallFlags = defaultInstallFlags, savedConfigureExFlags = defaultConfigExFlags, savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) { configUserInstall = toFlag defaultUserInstall }, savedUserInstallDirs = fmap toFlag userInstallDirs, savedGlobalInstallDirs = fmap toFlag globalInstallDirs, savedUploadFlags = commandDefaultFlags uploadCommand, savedReportFlags = commandDefaultFlags reportCommand, savedHaddockFlags = defaultHaddockFlags } -- | All config file fields. -- configFieldDescriptions :: [FieldDescr SavedConfig] configFieldDescriptions = toSavedConfig liftGlobalFlag (commandOptions (globalCommand []) ParseArgs) ["version", "numeric-version", "config-file", "sandbox-config-file"] [] ++ toSavedConfig liftConfigFlag (configureOptions ParseArgs) (["builddir", "constraint", "dependency"] ++ map fieldName installDirsFields) --FIXME: this is only here because viewAsFieldDescr gives us a parser -- that only recognises 'ghc' etc, the case-sensitive flag names, not -- what the normal case-insensitive parser gives us. [simpleField "compiler" (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) configHcFlavor (\v flags -> flags { configHcFlavor = v }) -- TODO: The following is a temporary fix. The "optimization" -- and "debug-info" fields are OptArg, and viewAsFieldDescr -- fails on that. Instead of a hand-written hackaged parser -- and printer, we should handle this case properly in the -- library. ,liftField configOptimization (\v flags -> flags { configOptimization = v }) $ let name = "optimization" in FieldDescr name (\f -> case f of Flag NoOptimisation -> Disp.text "False" Flag NormalOptimisation -> Disp.text "True" Flag MaximumOptimisation -> Disp.text "2" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoOptimisation) | str == "True" -> ParseOk [] (Flag NormalOptimisation) | str == "0" -> ParseOk [] (Flag NoOptimisation) | str == "1" -> ParseOk [] (Flag NormalOptimisation) | str == "2" -> ParseOk [] (Flag MaximumOptimisation) | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ let name = "debug-info" in FieldDescr name (\f -> case f of Flag NoDebugInfo -> Disp.text "False" Flag MinimalDebugInfo -> Disp.text "1" Flag NormalDebugInfo -> Disp.text "True" Flag MaximalDebugInfo -> Disp.text "3" _ -> Disp.empty) (\line str _ -> case () of _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) | str == "True" -> ParseOk [] (Flag NormalDebugInfo) | str == "0" -> ParseOk [] (Flag NoDebugInfo) | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) | str == "2" -> ParseOk [] (Flag NormalDebugInfo) | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) | otherwise -> ParseFailed (NoParse name line) where lstr = lowercase str caseWarning = PWarning $ "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") ] ++ toSavedConfig liftConfigExFlag (configureExOptions ParseArgs) [] [] ++ toSavedConfig liftInstallFlag (installOptions ParseArgs) ["dry-run", "only", "only-dependencies", "dependencies-only"] [] ++ toSavedConfig liftUploadFlag (commandOptions uploadCommand ParseArgs) ["verbose", "check"] [] ++ toSavedConfig liftReportFlag (commandOptions reportCommand ParseArgs) ["verbose", "username", "password"] [] --FIXME: this is a hack, hiding the user name and password. -- But otherwise it masks the upload ones. Either need to -- share the options or make then distinct. In any case -- they should probably be per-server. where toSavedConfig lift options exclusions replacements = [ lift (fromMaybe field replacement) | opt <- options , let field = viewAsFieldDescr opt name = fieldName field replacement = find ((== name) . fieldName) replacements , name `notElem` exclusions ] optional = Parse.option mempty . fmap toFlag -- TODO: next step, make the deprecated fields elicit a warning. -- deprecatedFieldDescriptions :: [FieldDescr SavedConfig] deprecatedFieldDescriptions = [ liftGlobalFlag $ listField "repos" (Disp.text . showRepo) parseRepo (fromNubList . globalRemoteRepos) (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) , liftGlobalFlag $ simpleField "cachedir" (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) , liftUploadFlag $ simpleField "hackage-username" (Disp.text . fromFlagOrDefault "" . fmap unUsername) (optional (fmap Username parseTokenQ)) uploadUsername (\d cfg -> cfg { uploadUsername = d }) , liftUploadFlag $ simpleField "hackage-password" (Disp.text . fromFlagOrDefault "" . fmap unPassword) (optional (fmap Password parseTokenQ)) uploadPassword (\d cfg -> cfg { uploadPassword = d }) ] ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields where optional = Parse.option mempty . fmap toFlag modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a modifyFieldName f d = d { fieldName = f (fieldName d) } liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftUserInstallDirs = liftField savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) -> FieldDescr SavedConfig liftGlobalInstallDirs = liftField savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig liftGlobalFlag = liftField savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig liftConfigFlag = liftField savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig liftConfigExFlag = liftField savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig liftInstallFlag = liftField savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig liftUploadFlag = liftField savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig liftReportFlag = liftField savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) parseConfig :: SavedConfig -> String -> ParseResult SavedConfig parseConfig initial = \str -> do fields <- readFields str let (knownSections, others) = partition isKnownSection fields config <- parse others let user0 = savedUserInstallDirs config global0 = savedGlobalInstallDirs config (haddockFlags, user, global, paths, args) <- foldM parseSections (savedHaddockFlags config, user0, global0, [], []) knownSections return config { savedConfigureFlags = (savedConfigureFlags config) { configProgramPaths = paths, configProgramArgs = args }, savedHaddockFlags = haddockFlags, savedUserInstallDirs = user, savedGlobalInstallDirs = global } where isKnownSection (ParseUtils.Section _ "haddock" _ _) = True isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True isKnownSection _ = False parse = parseFields (configFieldDescriptions ++ deprecatedFieldDescriptions) initial parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "haddock" name fs) | name == "" = do h' <- parseFields haddockFlagsFields h fs return (h', u, g, p, a) | otherwise = do warning "The 'haddock' section should be unnamed" return accum parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "install-dirs" name fs) | name' == "user" = do u' <- parseFields installDirsFields u fs return (h, u', g, p, a) | name' == "global" = do g' <- parseFields installDirsFields g fs return (h, u, g', p, a) | otherwise = do warning "The 'install-paths' section should be for 'user' or 'global'" return accum where name' = lowercase name parseSections accum@(h,u,g,p,a) (ParseUtils.Section _ "program-locations" name fs) | name == "" = do p' <- parseFields withProgramsFields p fs return (h, u, g, p', a) | otherwise = do warning "The 'program-locations' section should be unnamed" return accum parseSections accum@(h, u, g, p, a) (ParseUtils.Section _ "program-default-options" name fs) | name == "" = do a' <- parseFields withProgramOptionsFields a fs return (h, u, g, p, a') | otherwise = do warning "The 'program-default-options' section should be unnamed" return accum parseSections accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum showConfig :: SavedConfig -> String showConfig = showConfigWithComments mempty showConfigWithComments :: SavedConfig -> SavedConfig -> String showConfigWithComments comment vals = Disp.render $ ppFields configFieldDescriptions mcomment vals $+$ Disp.text "" $+$ ppSection "haddock" "" haddockFlagsFields (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) $+$ Disp.text "" $+$ installDirsSection "user" savedUserInstallDirs $+$ Disp.text "" $+$ installDirsSection "global" savedGlobalInstallDirs $+$ Disp.text "" $+$ configFlagsSection "program-locations" withProgramsFields configProgramPaths $+$ Disp.text "" $+$ configFlagsSection "program-default-options" withProgramOptionsFields configProgramArgs where mcomment = Just comment installDirsSection name field = ppSection "install-dirs" name installDirsFields (fmap field mcomment) (field vals) configFlagsSection name fields field = ppSection name "" fields (fmap (field . savedConfigureFlags) mcomment) ((field . savedConfigureFlags) vals) -- | Fields for the 'install-dirs' sections. installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] installDirsFields = map viewAsFieldDescr installDirsOptions -- | Fields for the 'haddock' section. haddockFlagsFields :: [FieldDescr HaddockFlags] haddockFlagsFields = [ field | opt <- haddockOptions ParseArgs , let field = viewAsFieldDescr opt name = fieldName field , name `notElem` exclusions ] where exclusions = ["verbose", "builddir"] -- | Fields for the 'program-locations' section. withProgramsFields :: [FieldDescr [(String, FilePath)]] withProgramsFields = map viewAsFieldDescr $ programConfigurationPaths' (++ "-location") defaultProgramConfiguration ParseArgs id (++) -- | Fields for the 'program-default-options' section. withProgramOptionsFields :: [FieldDescr [(String, [String])]] withProgramOptionsFields = map viewAsFieldDescr $ programConfigurationOptions defaultProgramConfiguration ParseArgs id (++) -- | Get the differences (as a pseudo code diff) between the user's -- '~/.cabal/config' and the one that cabal would generate if it didn't exist. userConfigDiff :: GlobalFlags -> IO [String] userConfigDiff globalFlags = do userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty testConfig <- liftM2 mappend baseSavedConfig initialSavedConfig return $ reverse . foldl' createDiff [] . M.toList $ M.unionWith combine (M.fromList . map justFst $ filterShow testConfig) (M.fromList . map justSnd $ filterShow userConfig) where justFst (a, b) = (a, (Just b, Nothing)) justSnd (a, b) = (a, (Nothing, Just b)) combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) combine x y = error $ "Can't happen : userConfigDiff " ++ show x ++ " " ++ show y createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] createDiff acc (key, (Just a, Just b)) | a == b = acc | otherwise = ("+ " ++ key ++ ": " ++ b) : ("- " ++ key ++ ": " ++ a) : acc createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc createDiff acc (_, (Nothing, Nothing)) = acc filterShow :: SavedConfig -> [(String, String)] filterShow cfg = map keyValueSplit . filter (\s -> not (null s) && any (== ':') s) . map nonComment . lines $ showConfig cfg nonComment [] = [] nonComment ('-':'-':_) = [] nonComment (x:xs) = x : nonComment xs topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace keyValueSplit s = let (left, right) = break (== ':') s in (topAndTail left, topAndTail (drop 1 right)) -- | Update the user's ~/.cabal/config' keeping the user's customizations. userConfigUpdate :: Verbosity -> GlobalFlags -> IO () userConfigUpdate verbosity globalFlags = do userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty newConfig <- liftM2 mappend baseSavedConfig initialSavedConfig commentConf <- commentSavedConfig cabalFile <- defaultConfigFile let backup = cabalFile ++ ".backup" notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." renameFile cabalFile backup notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig) cabal-install-1.22.6.0/Distribution/Client/ParseUtils.hs0000644000000000000000000000466112540225656021176 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.ParseUtils -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Parsing utilities. ----------------------------------------------------------------------------- module Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) where import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..), warning, lineNo ) import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) import Control.Monad ( foldM ) import Text.PrettyPrint ( (<>), (<+>), ($+$) ) import qualified Data.Map as Map import qualified Text.PrettyPrint as Disp ( Doc, text, colon, vcat, empty, isEmpty, nest ) --FIXME: replace this with something better parseFields :: [FieldDescr a] -> a -> [ParseUtils.Field] -> ParseResult a parseFields fields = foldM setField where fieldMap = Map.fromList [ (name, f) | f@(FieldDescr name _ _) <- fields ] setField accum (ParseUtils.F line name value) = case Map.lookup name fieldMap of Just (FieldDescr _ _ set) -> set line value accum Nothing -> do warning $ "Unrecognized field " ++ name ++ " on line " ++ show line return accum setField accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum -- | This is a customised version of the functions from Distribution.ParseUtils -- that also optionally print default values for empty fields as comments. -- ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc ppFields fields def cur = Disp.vcat [ ppField name (fmap getter def) (getter cur) | FieldDescr name getter _ <- fields] ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc ppField name mdef cur | Disp.isEmpty cur = maybe Disp.empty (\def -> Disp.text "--" <+> Disp.text name <> Disp.colon <+> def) mdef | otherwise = Disp.text name <> Disp.colon <+> cur ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc ppSection name arg fields def cur | Disp.isEmpty fieldsDoc = Disp.empty | otherwise = Disp.text name <+> argDoc $+$ (Disp.nest 2 fieldsDoc) where fieldsDoc = ppFields fields def cur argDoc | arg == "" = Disp.empty | otherwise = Disp.text arg cabal-install-1.22.6.0/Distribution/Client/Tar.hs0000644000000000000000000010075112540225656017626 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Tar -- Copyright : (c) 2007 Bjorn Bringert, -- 2008 Andrea Vezzosi, -- 2008-2009 Duncan Coutts -- License : BSD3 -- -- Maintainer : duncan@community.haskell.org -- Portability : portable -- -- Reading, writing and manipulating \"@.tar@\" archive files. -- ----------------------------------------------------------------------------- module Distribution.Client.Tar ( -- * High level \"all in one\" operations createTarGzFile, extractTarGzFile, -- * Converting between internal and external representation read, write, writeEntries, -- * Packing and unpacking files to\/from internal representation pack, unpack, -- * Tar entry and associated types Entry(..), entryPath, EntryContent(..), Ownership(..), FileSize, Permissions, EpochTime, DevMajor, DevMinor, TypeCode, Format(..), buildTreeRefTypeCode, buildTreeSnapshotTypeCode, isBuildTreeRefTypeCode, entrySizeInBlocks, entrySizeInBytes, -- * Constructing simple entry values simpleEntry, fileEntry, directoryEntry, -- * TarPath type TarPath, toTarPath, fromTarPath, -- ** Sequences of tar entries Entries(..), foldrEntries, foldlEntries, unfoldrEntries, mapEntries, filterEntries, entriesIndex, ) where import Data.Char (ord) import Data.Int (Int64) import Data.Bits (Bits, shiftL, testBit) import Data.List (foldl') import Numeric (readOct, showOct) import Control.Applicative (Applicative(..)) import Control.Monad (MonadPlus(mplus), when, ap, liftM) import qualified Data.Map as Map import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BS.Char8 import Data.ByteString.Lazy (ByteString) import qualified Codec.Compression.GZip as GZip import qualified Distribution.Client.GZipUtils as GZipUtils import System.FilePath ( () ) import qualified System.FilePath as FilePath.Native import qualified System.FilePath.Windows as FilePath.Windows import qualified System.FilePath.Posix as FilePath.Posix import System.Directory ( getDirectoryContents, doesDirectoryExist , getPermissions, createDirectoryIfMissing, copyFile ) import qualified System.Directory as Permissions ( Permissions(executable) ) import Distribution.Client.Compat.FilePerms ( setFileExecutable ) import System.Posix.Types ( FileMode ) import Distribution.Client.Compat.Time ( EpochTime, getModTime ) import System.IO ( IOMode(ReadMode), openBinaryFile, hFileSize ) import System.IO.Unsafe (unsafeInterleaveIO) import Prelude hiding (read) -- -- * High level operations -- createTarGzFile :: FilePath -- ^ Full Tarball path -> FilePath -- ^ Base directory -> FilePath -- ^ Directory to archive, relative to base dir -> IO () createTarGzFile tar base dir = BS.writeFile tar . GZip.compress . write =<< pack base [dir] extractTarGzFile :: FilePath -- ^ Destination directory -> FilePath -- ^ Expected subdir (to check for tarbombs) -> FilePath -- ^ Tarball -> IO () extractTarGzFile dir expected tar = unpack dir . checkTarbomb expected . read . GZipUtils.maybeDecompress =<< BS.readFile tar -- -- * Entry type -- type FileSize = Int64 type DevMajor = Int type DevMinor = Int type TypeCode = Char type Permissions = FileMode -- | Tar archive entry. -- data Entry = Entry { -- | The path of the file or directory within the archive. This is in a -- tar-specific form. Use 'entryPath' to get a native 'FilePath'. entryTarPath :: !TarPath, -- | The real content of the entry. For 'NormalFile' this includes the -- file data. An entry usually contains a 'NormalFile' or a 'Directory'. entryContent :: !EntryContent, -- | File permissions (Unix style file mode). entryPermissions :: !Permissions, -- | The user and group to which this file belongs. entryOwnership :: !Ownership, -- | The time the file was last modified. entryTime :: !EpochTime, -- | The tar format the archive is using. entryFormat :: !Format } -- | Type code for the local build tree reference entry type. We don't use the -- symbolic link entry type because it allows only 100 ASCII characters for the -- path. buildTreeRefTypeCode :: TypeCode buildTreeRefTypeCode = 'C' -- | Type code for the local build tree snapshot entry type. buildTreeSnapshotTypeCode :: TypeCode buildTreeSnapshotTypeCode = 'S' -- | Is this a type code for a build tree reference? isBuildTreeRefTypeCode :: TypeCode -> Bool isBuildTreeRefTypeCode typeCode | (typeCode == buildTreeRefTypeCode || typeCode == buildTreeSnapshotTypeCode) = True | otherwise = False -- | Native 'FilePath' of the file or directory within the archive. -- entryPath :: Entry -> FilePath entryPath = fromTarPath . entryTarPath -- | Return the size of an entry in bytes. entrySizeInBytes :: Entry -> FileSize entrySizeInBytes = (*512) . fromIntegral . entrySizeInBlocks -- | Return the number of blocks in an entry. entrySizeInBlocks :: Entry -> Int entrySizeInBlocks entry = 1 + case entryContent entry of NormalFile _ size -> bytesToBlocks size OtherEntryType _ _ size -> bytesToBlocks size _ -> 0 where bytesToBlocks s = 1 + ((fromIntegral s - 1) `div` 512) -- | The content of a tar archive entry, which depends on the type of entry. -- -- Portable archives should contain only 'NormalFile' and 'Directory'. -- data EntryContent = NormalFile ByteString !FileSize | Directory | SymbolicLink !LinkTarget | HardLink !LinkTarget | CharacterDevice !DevMajor !DevMinor | BlockDevice !DevMajor !DevMinor | NamedPipe | OtherEntryType !TypeCode ByteString !FileSize data Ownership = Ownership { -- | The owner user name. Should be set to @\"\"@ if unknown. ownerName :: String, -- | The owner group name. Should be set to @\"\"@ if unknown. groupName :: String, -- | Numeric owner user id. Should be set to @0@ if unknown. ownerId :: !Int, -- | Numeric owner group id. Should be set to @0@ if unknown. groupId :: !Int } -- | There have been a number of extensions to the tar file format over the -- years. They all share the basic entry fields and put more meta-data in -- different extended headers. -- data Format = -- | This is the classic Unix V7 tar format. It does not support owner and -- group names, just numeric Ids. It also does not support device numbers. V7Format -- | The \"USTAR\" format is an extension of the classic V7 format. It was -- later standardised by POSIX. It has some restrictions but is the most -- portable format. -- | UstarFormat -- | The GNU tar implementation also extends the classic V7 format, though -- in a slightly different way from the USTAR format. In general for new -- archives the standard USTAR/POSIX should be used. -- | GnuFormat deriving Eq -- | @rw-r--r--@ for normal files ordinaryFilePermissions :: Permissions ordinaryFilePermissions = 0o0644 -- | @rwxr-xr-x@ for executable files executableFilePermissions :: Permissions executableFilePermissions = 0o0755 -- | @rwxr-xr-x@ for directories directoryPermissions :: Permissions directoryPermissions = 0o0755 isExecutable :: Permissions -> Bool isExecutable p = testBit p 0 || testBit p 6 -- user or other executable -- | An 'Entry' with all default values except for the file name and type. It -- uses the portable USTAR/POSIX format (see 'UstarHeader'). -- -- You can use this as a basis and override specific fields, eg: -- -- > (emptyEntry name HardLink) { linkTarget = target } -- simpleEntry :: TarPath -> EntryContent -> Entry simpleEntry tarpath content = Entry { entryTarPath = tarpath, entryContent = content, entryPermissions = case content of Directory -> directoryPermissions _ -> ordinaryFilePermissions, entryOwnership = Ownership "" "" 0 0, entryTime = 0, entryFormat = UstarFormat } -- | A tar 'Entry' for a file. -- -- Entry fields such as file permissions and ownership have default values. -- -- You can use this as a basis and override specific fields. For example if you -- need an executable file you could use: -- -- > (fileEntry name content) { fileMode = executableFileMode } -- fileEntry :: TarPath -> ByteString -> Entry fileEntry name fileContent = simpleEntry name (NormalFile fileContent (BS.length fileContent)) -- | A tar 'Entry' for a directory. -- -- Entry fields such as file permissions and ownership have default values. -- directoryEntry :: TarPath -> Entry directoryEntry name = simpleEntry name Directory -- -- * Tar paths -- -- | The classic tar format allowed just 100 characters for the file name. The -- USTAR format extended this with an extra 155 characters, however it uses a -- complex method of splitting the name between the two sections. -- -- Instead of just putting any overflow into the extended area, it uses the -- extended area as a prefix. The aggravating insane bit however is that the -- prefix (if any) must only contain a directory prefix. That is the split -- between the two areas must be on a directory separator boundary. So there is -- no simple calculation to work out if a file name is too long. Instead we -- have to try to find a valid split that makes the name fit in the two areas. -- -- The rationale presumably was to make it a bit more compatible with old tar -- programs that only understand the classic format. A classic tar would be -- able to extract the file name and possibly some dir prefix, but not the -- full dir prefix. So the files would end up in the wrong place, but that's -- probably better than ending up with the wrong names too. -- -- So it's understandable but rather annoying. -- -- * Tar paths use POSIX format (ie @\'/\'@ directory separators), irrespective -- of the local path conventions. -- -- * The directory separator between the prefix and name is /not/ stored. -- data TarPath = TarPath FilePath -- path name, 100 characters max. FilePath -- path prefix, 155 characters max. deriving (Eq, Ord) -- | Convert a 'TarPath' to a native 'FilePath'. -- -- The native 'FilePath' will use the native directory separator but it is not -- otherwise checked for validity or sanity. In particular: -- -- * The tar path may be invalid as a native path, eg the filename @\"nul\"@ is -- not valid on Windows. -- -- * The tar path may be an absolute path or may contain @\"..\"@ components. -- For security reasons this should not usually be allowed, but it is your -- responsibility to check for these conditions (eg using 'checkSecurity'). -- fromTarPath :: TarPath -> FilePath fromTarPath (TarPath name prefix) = adjustDirectory $ FilePath.Native.joinPath $ FilePath.Posix.splitDirectories prefix ++ FilePath.Posix.splitDirectories name where adjustDirectory | FilePath.Posix.hasTrailingPathSeparator name = FilePath.Native.addTrailingPathSeparator | otherwise = id -- | Convert a native 'FilePath' to a 'TarPath'. -- -- The conversion may fail if the 'FilePath' is too long. See 'TarPath' for a -- description of the problem with splitting long 'FilePath's. -- toTarPath :: Bool -- ^ Is the path for a directory? This is needed because for -- directories a 'TarPath' must always use a trailing @\/@. -> FilePath -> Either String TarPath toTarPath isDir = splitLongPath . addTrailingSep . FilePath.Posix.joinPath . FilePath.Native.splitDirectories where addTrailingSep | isDir = FilePath.Posix.addTrailingPathSeparator | otherwise = id -- | Take a sanitized path, split on directory separators and try to pack it -- into the 155 + 100 tar file name format. -- -- The strategy is this: take the name-directory components in reverse order -- and try to fit as many components into the 100 long name area as possible. -- If all the remaining components fit in the 155 name area then we win. -- splitLongPath :: FilePath -> Either String TarPath splitLongPath path = case packName nameMax (reverse (FilePath.Posix.splitPath path)) of Left err -> Left err Right (name, []) -> Right (TarPath name "") Right (name, first:rest) -> case packName prefixMax remainder of Left err -> Left err Right (_ , _ : _) -> Left "File name too long (cannot split)" Right (prefix, []) -> Right (TarPath name prefix) where -- drop the '/' between the name and prefix: remainder = init first : rest where nameMax, prefixMax :: Int nameMax = 100 prefixMax = 155 packName _ [] = Left "File name empty" packName maxLen (c:cs) | n > maxLen = Left "File name too long" | otherwise = Right (packName' maxLen n [c] cs) where n = length c packName' maxLen n ok (c:cs) | n' <= maxLen = packName' maxLen n' (c:ok) cs where n' = n + length c packName' _ _ ok cs = (FilePath.Posix.joinPath ok, cs) -- | The tar format allows just 100 ASCII characters for the 'SymbolicLink' and -- 'HardLink' entry types. -- newtype LinkTarget = LinkTarget FilePath deriving (Eq, Ord) -- | Convert a tar 'LinkTarget' to a native 'FilePath'. -- fromLinkTarget :: LinkTarget -> FilePath fromLinkTarget (LinkTarget path) = adjustDirectory $ FilePath.Native.joinPath $ FilePath.Posix.splitDirectories path where adjustDirectory | FilePath.Posix.hasTrailingPathSeparator path = FilePath.Native.addTrailingPathSeparator | otherwise = id -- -- * Entries type -- -- | A tar archive is a sequence of entries. data Entries = Next Entry Entries | Done | Fail String unfoldrEntries :: (a -> Either String (Maybe (Entry, a))) -> a -> Entries unfoldrEntries f = unfold where unfold x = case f x of Left err -> Fail err Right Nothing -> Done Right (Just (e, x')) -> Next e (unfold x') foldrEntries :: (Entry -> a -> a) -> a -> (String -> a) -> Entries -> a foldrEntries next done fail' = fold where fold (Next e es) = next e (fold es) fold Done = done fold (Fail err) = fail' err foldlEntries :: (a -> Entry -> a) -> a -> Entries -> Either String a foldlEntries f = fold where fold a (Next e es) = (fold $! f a e) es fold a Done = Right a fold _ (Fail err) = Left err mapEntries :: (Entry -> Entry) -> Entries -> Entries mapEntries f = foldrEntries (Next . f) Done Fail filterEntries :: (Entry -> Bool) -> Entries -> Entries filterEntries p = foldrEntries (\entry rest -> if p entry then Next entry rest else rest) Done Fail checkEntries :: (Entry -> Maybe String) -> Entries -> Entries checkEntries checkEntry = foldrEntries (\entry rest -> case checkEntry entry of Nothing -> Next entry rest Just err -> Fail err) Done Fail entriesIndex :: Entries -> Either String (Map.Map TarPath Entry) entriesIndex = foldlEntries (\m e -> Map.insert (entryTarPath e) e m) Map.empty -- -- * Checking -- -- | This function checks a sequence of tar entries for file name security -- problems. It checks that: -- -- * file paths are not absolute -- -- * file paths do not contain any path components that are \"@..@\" -- -- * file names are valid -- -- These checks are from the perspective of the current OS. That means we check -- for \"@C:\blah@\" files on Windows and \"\/blah\" files on Unix. For archive -- entry types 'HardLink' and 'SymbolicLink' the same checks are done for the -- link target. A failure in any entry terminates the sequence of entries with -- an error. -- checkSecurity :: Entries -> Entries checkSecurity = checkEntries checkEntrySecurity checkTarbomb :: FilePath -> Entries -> Entries checkTarbomb expectedTopDir = checkEntries (checkEntryTarbomb expectedTopDir) checkEntrySecurity :: Entry -> Maybe String checkEntrySecurity entry = case entryContent entry of HardLink link -> check (entryPath entry) `mplus` check (fromLinkTarget link) SymbolicLink link -> check (entryPath entry) `mplus` check (fromLinkTarget link) _ -> check (entryPath entry) where check name | not (FilePath.Native.isRelative name) = Just $ "Absolute file name in tar archive: " ++ show name | not (FilePath.Native.isValid name) = Just $ "Invalid file name in tar archive: " ++ show name | ".." `elem` FilePath.Native.splitDirectories name = Just $ "Invalid file name in tar archive: " ++ show name | otherwise = Nothing checkEntryTarbomb :: FilePath -> Entry -> Maybe String checkEntryTarbomb _ entry | nonFilesystemEntry = Nothing where -- Ignore some special entries we will not unpack anyway nonFilesystemEntry = case entryContent entry of OtherEntryType 'g' _ _ -> True --PAX global header OtherEntryType 'x' _ _ -> True --PAX individual header _ -> False checkEntryTarbomb expectedTopDir entry = case FilePath.Native.splitDirectories (entryPath entry) of (topDir:_) | topDir == expectedTopDir -> Nothing s -> Just $ "File in tar archive is not in the expected directory. " ++ "Expected: " ++ show expectedTopDir ++ " but got the following hierarchy: " ++ show s -- -- * Reading -- read :: ByteString -> Entries read = unfoldrEntries getEntry getEntry :: ByteString -> Either String (Maybe (Entry, ByteString)) getEntry bs | BS.length header < 512 = Left "truncated tar archive" -- Tar files end with at least two blocks of all '0'. Checking this serves -- two purposes. It checks the format but also forces the tail of the data -- which is necessary to close the file if it came from a lazily read file. | BS.head bs == 0 = case BS.splitAt 1024 bs of (end, trailing) | BS.length end /= 1024 -> Left "short tar trailer" | not (BS.all (== 0) end) -> Left "bad tar trailer" | not (BS.all (== 0) trailing) -> Left "tar file has trailing junk" | otherwise -> Right Nothing | otherwise = partial $ do case (chksum_, format_) of (Ok chksum, _ ) | correctChecksum header chksum -> return () (Ok _, Ok _) -> fail "tar checksum error" _ -> fail "data is not in tar format" -- These fields are partial, have to check them format <- format_; mode <- mode_; uid <- uid_; gid <- gid_; size <- size_; mtime <- mtime_; devmajor <- devmajor_; devminor <- devminor_; let content = BS.take size (BS.drop 512 bs) padding = (512 - size) `mod` 512 bs' = BS.drop (512 + size + padding) bs entry = Entry { entryTarPath = TarPath name prefix, entryContent = case typecode of '\0' -> NormalFile content size '0' -> NormalFile content size '1' -> HardLink (LinkTarget linkname) '2' -> SymbolicLink (LinkTarget linkname) '3' -> CharacterDevice devmajor devminor '4' -> BlockDevice devmajor devminor '5' -> Directory '6' -> NamedPipe '7' -> NormalFile content size _ -> OtherEntryType typecode content size, entryPermissions = mode, entryOwnership = Ownership uname gname uid gid, entryTime = mtime, entryFormat = format } return (Just (entry, bs')) where header = BS.take 512 bs name = getString 0 100 header mode_ = getOct 100 8 header uid_ = getOct 108 8 header gid_ = getOct 116 8 header size_ = getOct 124 12 header mtime_ = getOct 136 12 header chksum_ = getOct 148 8 header typecode = getByte 156 header linkname = getString 157 100 header magic = getChars 257 8 header uname = getString 265 32 header gname = getString 297 32 header devmajor_ = getOct 329 8 header devminor_ = getOct 337 8 header prefix = getString 345 155 header -- trailing = getBytes 500 12 header format_ = case magic of "\0\0\0\0\0\0\0\0" -> return V7Format "ustar\NUL00" -> return UstarFormat "ustar \NUL" -> return GnuFormat _ -> fail "tar entry not in a recognised format" correctChecksum :: ByteString -> Int -> Bool correctChecksum header checksum = checksum == checksum' where -- sum of all 512 bytes in the header block, -- treating each byte as an 8-bit unsigned value checksum' = BS.Char8.foldl' (\x y -> x + ord y) 0 header' -- treating the 8 bytes of chksum as blank characters. header' = BS.concat [BS.take 148 header, BS.Char8.replicate 8 ' ', BS.drop 156 header] -- * TAR format primitive input getOct :: (Integral a, Bits a) => Int64 -> Int64 -> ByteString -> Partial a getOct off len header | BS.head bytes == 128 = parseBinInt (BS.unpack (BS.tail bytes)) | null octstr = return 0 | otherwise = case readOct octstr of [(x,[])] -> return x _ -> fail "tar header is malformed (bad numeric encoding)" where bytes = getBytes off len header octstr = BS.Char8.unpack . BS.Char8.takeWhile (\c -> c /= '\NUL' && c /= ' ') . BS.Char8.dropWhile (== ' ') $ bytes -- Some tar programs switch into a binary format when they try to represent -- field values that will not fit in the required width when using the text -- octal format. In particular, the UID/GID fields can only hold up to 2^21 -- while in the binary format can hold up to 2^32. The binary format uses -- '\128' as the header which leaves 7 bytes. Only the last 4 are used. parseBinInt [0, 0, 0, byte3, byte2, byte1, byte0] = return $! shiftL (fromIntegral byte3) 24 + shiftL (fromIntegral byte2) 16 + shiftL (fromIntegral byte1) 8 + shiftL (fromIntegral byte0) 0 parseBinInt _ = fail "tar header uses non-standard number encoding" getBytes :: Int64 -> Int64 -> ByteString -> ByteString getBytes off len = BS.take len . BS.drop off getByte :: Int64 -> ByteString -> Char getByte off bs = BS.Char8.index bs off getChars :: Int64 -> Int64 -> ByteString -> String getChars off len = BS.Char8.unpack . getBytes off len getString :: Int64 -> Int64 -> ByteString -> String getString off len = BS.Char8.unpack . BS.Char8.takeWhile (/='\0') . getBytes off len data Partial a = Error String | Ok a deriving Functor partial :: Partial a -> Either String a partial (Error msg) = Left msg partial (Ok x) = Right x instance Applicative Partial where pure = return (<*>) = ap instance Monad Partial where return = Ok Error m >>= _ = Error m Ok x >>= k = k x fail = Error -- -- * Writing -- -- | Create the external representation of a tar archive by serialising a list -- of tar entries. -- -- * The conversion is done lazily. -- write :: [Entry] -> ByteString write es = BS.concat $ map putEntry es ++ [BS.replicate (512*2) 0] -- | Same as 'write', but for 'Entries'. writeEntries :: Entries -> ByteString writeEntries entries = BS.concat $ foldrEntries (\e res -> putEntry e : res) [BS.replicate (512*2) 0] error entries putEntry :: Entry -> ByteString putEntry entry = case entryContent entry of NormalFile content size -> BS.concat [ header, content, padding size ] OtherEntryType _ content size -> BS.concat [ header, content, padding size ] _ -> header where header = putHeader entry padding size = BS.replicate paddingSize 0 where paddingSize = fromIntegral (negate size `mod` 512) putHeader :: Entry -> ByteString putHeader entry = BS.concat [ BS.take 148 block , BS.Char8.pack $ putOct 7 checksum , BS.Char8.singleton ' ' , BS.drop 156 block ] where -- putHeaderNoChkSum returns a String, so we convert it to the final -- representation before calculating the checksum. block = BS.Char8.pack . putHeaderNoChkSum $ entry checksum = BS.Char8.foldl' (\x y -> x + ord y) 0 block putHeaderNoChkSum :: Entry -> String putHeaderNoChkSum Entry { entryTarPath = TarPath name prefix, entryContent = content, entryPermissions = permissions, entryOwnership = ownership, entryTime = modTime, entryFormat = format } = concat [ putString 100 $ name , putOct 8 $ permissions , putOct 8 $ ownerId ownership , putOct 8 $ groupId ownership , putOct 12 $ contentSize , putOct 12 $ modTime , fill 8 $ ' ' -- dummy checksum , putChar8 $ typeCode , putString 100 $ linkTarget ] ++ case format of V7Format -> fill 255 '\NUL' UstarFormat -> concat [ putString 8 $ "ustar\NUL00" , putString 32 $ ownerName ownership , putString 32 $ groupName ownership , putOct 8 $ deviceMajor , putOct 8 $ deviceMinor , putString 155 $ prefix , fill 12 $ '\NUL' ] GnuFormat -> concat [ putString 8 $ "ustar \NUL" , putString 32 $ ownerName ownership , putString 32 $ groupName ownership , putGnuDev 8 $ deviceMajor , putGnuDev 8 $ deviceMinor , putString 155 $ prefix , fill 12 $ '\NUL' ] where (typeCode, contentSize, linkTarget, deviceMajor, deviceMinor) = case content of NormalFile _ size -> ('0' , size, [], 0, 0) Directory -> ('5' , 0, [], 0, 0) SymbolicLink (LinkTarget link) -> ('2' , 0, link, 0, 0) HardLink (LinkTarget link) -> ('1' , 0, link, 0, 0) CharacterDevice major minor -> ('3' , 0, [], major, minor) BlockDevice major minor -> ('4' , 0, [], major, minor) NamedPipe -> ('6' , 0, [], 0, 0) OtherEntryType code _ size -> (code, size, [], 0, 0) putGnuDev w n = case content of CharacterDevice _ _ -> putOct w n BlockDevice _ _ -> putOct w n _ -> replicate w '\NUL' -- * TAR format primitive output type FieldWidth = Int putString :: FieldWidth -> String -> String putString n s = take n s ++ fill (n - length s) '\NUL' --TODO: check integer widths, eg for large file sizes putOct :: (Show a, Integral a) => FieldWidth -> a -> String putOct n x = let octStr = take (n-1) $ showOct x "" in fill (n - length octStr - 1) '0' ++ octStr ++ putChar8 '\NUL' putChar8 :: Char -> String putChar8 c = [c] fill :: FieldWidth -> Char -> String fill n c = replicate n c -- -- * Unpacking -- unpack :: FilePath -> Entries -> IO () unpack baseDir entries = unpackEntries [] (checkSecurity entries) >>= emulateLinks where -- We're relying here on 'checkSecurity' to make sure we're not scribbling -- files all over the place. unpackEntries _ (Fail err) = fail err unpackEntries links Done = return links unpackEntries links (Next entry es) = case entryContent entry of NormalFile file _ -> extractFile entry path file >> unpackEntries links es Directory -> extractDir path >> unpackEntries links es HardLink link -> (unpackEntries $! saveLink path link links) es SymbolicLink link -> (unpackEntries $! saveLink path link links) es _ -> unpackEntries links es --ignore other file types where path = entryPath entry extractFile entry path content = do -- Note that tar archives do not make sure each directory is created -- before files they contain, indeed we may have to create several -- levels of directory. createDirectoryIfMissing True absDir BS.writeFile absPath content when (isExecutable (entryPermissions entry)) (setFileExecutable absPath) where absDir = baseDir FilePath.Native.takeDirectory path absPath = baseDir path extractDir path = createDirectoryIfMissing True (baseDir path) saveLink path link links = seq (length path) $ seq (length link') $ (path, link'):links where link' = fromLinkTarget link emulateLinks = mapM_ $ \(relPath, relLinkTarget) -> let absPath = baseDir relPath absTarget = FilePath.Native.takeDirectory absPath relLinkTarget in copyFile absTarget absPath -- -- * Packing -- pack :: FilePath -- ^ Base directory -> [FilePath] -- ^ Files and directories to pack, relative to the base dir -> IO [Entry] pack baseDir paths0 = preparePaths baseDir paths0 >>= packPaths baseDir preparePaths :: FilePath -> [FilePath] -> IO [FilePath] preparePaths baseDir paths = fmap concat $ interleave [ do isDir <- doesDirectoryExist (baseDir path) if isDir then do entries <- getDirectoryContentsRecursive (baseDir path) return (FilePath.Native.addTrailingPathSeparator path : map (path ) entries) else return [path] | path <- paths ] packPaths :: FilePath -> [FilePath] -> IO [Entry] packPaths baseDir paths = interleave [ do tarpath <- either fail return (toTarPath isDir relpath) if isDir then packDirectoryEntry filepath tarpath else packFileEntry filepath tarpath | relpath <- paths , let isDir = FilePath.Native.hasTrailingPathSeparator filepath filepath = baseDir relpath ] interleave :: [IO a] -> IO [a] interleave = unsafeInterleaveIO . go where go [] = return [] go (x:xs) = do x' <- x xs' <- interleave xs return (x':xs') packFileEntry :: FilePath -- ^ Full path to find the file on the local disk -> TarPath -- ^ Path to use for the tar Entry in the archive -> IO Entry packFileEntry filepath tarpath = do mtime <- getModTime filepath perms <- getPermissions filepath file <- openBinaryFile filepath ReadMode size <- hFileSize file content <- BS.hGetContents file return (simpleEntry tarpath (NormalFile content (fromIntegral size))) { entryPermissions = if Permissions.executable perms then executableFilePermissions else ordinaryFilePermissions, entryTime = mtime } packDirectoryEntry :: FilePath -- ^ Full path to find the file on the local disk -> TarPath -- ^ Path to use for the tar Entry in the archive -> IO Entry packDirectoryEntry filepath tarpath = do mtime <- getModTime filepath return (directoryEntry tarpath) { entryTime = mtime } getDirectoryContentsRecursive :: FilePath -> IO [FilePath] getDirectoryContentsRecursive dir0 = fmap tail (recurseDirectories dir0 [""]) recurseDirectories :: FilePath -> [FilePath] -> IO [FilePath] recurseDirectories _ [] = return [] recurseDirectories base (dir:dirs) = unsafeInterleaveIO $ do (files, dirs') <- collect [] [] =<< getDirectoryContents (base dir) files' <- recurseDirectories base (dirs' ++ dirs) return (dir : files ++ files') where collect files dirs' [] = return (reverse files, reverse dirs') collect files dirs' (entry:entries) | ignore entry = collect files dirs' entries collect files dirs' (entry:entries) = do let dirEntry = dir entry dirEntry' = FilePath.Native.addTrailingPathSeparator dirEntry isDirectory <- doesDirectoryExist (base dirEntry) if isDirectory then collect files (dirEntry':dirs') entries else collect (dirEntry:files) dirs' entries ignore ['.'] = True ignore ['.', '.'] = True ignore _ = False cabal-install-1.22.6.0/Distribution/Client/Win32SelfUpgrade.hs0000644000000000000000000001675012540225656022131 0ustar0000000000000000{-# LANGUAGE CPP, ForeignFunctionInterface #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Win32SelfUpgrade -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Support for self-upgrading executables on Windows platforms. ----------------------------------------------------------------------------- module Distribution.Client.Win32SelfUpgrade ( -- * Explanation -- -- | Windows inherited a design choice from DOS that while initially innocuous -- has rather unfortunate consequences. It maintains the invariant that every -- open file has a corresponding name on disk. One positive consequence of this -- is that an executable can always find it's own executable file. The downside -- is that a program cannot be deleted or upgraded while it is running without -- hideous workarounds. This module implements one such hideous workaround. -- -- The basic idea is: -- -- * Move our own exe file to a new name -- * Copy a new exe file to the previous name -- * Run the new exe file, passing our own PID and new path -- * Wait for the new process to start -- * Close the new exe file -- * Exit old process -- -- Then in the new process: -- -- * Inform the old process that we've started -- * Wait for the old process to die -- * Delete the old exe file -- * Exit new process -- possibleSelfUpgrade, deleteOldExeFile, ) where #if mingw32_HOST_OS import qualified System.Win32 as Win32 import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) import Foreign.Ptr (Ptr, nullPtr) import System.Process (runProcess) import System.Directory (canonicalizePath) import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) import Distribution.Verbosity as Verbosity (Verbosity, showForCabal) import Distribution.Simple.Utils (debug, info) import Prelude hiding (log) -- | If one of the given files is our own exe file then we arrange things such -- that the nested action can replace our own exe file. -- -- We require that the new process accepts a command line invocation that -- calls 'deleteOldExeFile', passing in the PID and exe file. -- possibleSelfUpgrade :: Verbosity -> [FilePath] -> IO a -> IO a possibleSelfUpgrade verbosity newPaths action = do dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE newPaths' <- mapM canonicalizePath newPaths let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' if not doingSelfUpgrade then action else do info verbosity $ "cabal-install does the replace-own-exe-file dance..." tmpPath <- moveOurExeOutOfTheWay verbosity result <- action scheduleOurDemise verbosity dstPath tmpPath (\pid path -> ["win32selfupgrade", pid, path ,"--verbose=" ++ Verbosity.showForCabal verbosity]) return result -- | The name of a Win32 Event object that we use to synchronise between the -- old and new processes. We need to synchronise to make sure that the old -- process has not yet terminated by the time the new one starts up and looks -- for the old process. Otherwise the old one might have already terminated -- and we could not wait on it terminating reliably (eg the PID might get -- re-used). -- syncEventName :: String syncEventName = "Local\\cabal-install-upgrade" -- | The first part of allowing our exe file to be replaced is to move the -- existing exe file out of the way. Although we cannot delete our exe file -- while we're still running, fortunately we can rename it, at least within -- the same directory. -- moveOurExeOutOfTheWay :: Verbosity -> IO FilePath moveOurExeOutOfTheWay verbosity = do ourPID <- getCurrentProcessId dstPath <- Win32.getModuleFileName Win32.nullHANDLE let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath Win32.moveFile dstPath tmpPath return tmpPath -- | Assuming we've now installed the new exe file in the right place, we -- launch it and ask it to delete our exe file when we eventually terminate. -- scheduleOurDemise :: Verbosity -> FilePath -> FilePath -> (String -> FilePath -> [String]) -> IO () scheduleOurDemise verbosity dstPath tmpPath mkArgs = do ourPID <- getCurrentProcessId event <- createEvent syncEventName let args = mkArgs (show ourPID) tmpPath log $ "launching child " ++ unwords (dstPath : map show args) _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing log $ "waiting for the child to start up" waitForSingleObject event (10*1000) -- wait at most 10 sec log $ "child started ok" where log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) -- | Assuming we're now in the new child process, we've been asked by the old -- process to wait for it to terminate and then we can remove the old exe file -- that it renamed itself to. -- deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () deleteOldExeFile verbosity oldPID tmpPath = do log $ "process started. Will delete exe file of process " ++ show oldPID ++ " at path " ++ tmpPath log $ "getting handle of parent process " ++ show oldPID oldPHANDLE <- Win32.openProcess Win32.sYNCHORNIZE False (fromIntegral oldPID) log $ "synchronising with parent" event <- openEvent syncEventName setEvent event log $ "waiting for parent process to terminate" waitForSingleObject oldPHANDLE Win32.iNFINITE log $ "parent process terminated" log $ "deleting parent's old .exe file" Win32.deleteFile tmpPath where log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) ------------------------ -- Win32 foreign imports -- -- A bunch of functions sadly not provided by the Win32 package. #ifdef x86_64_HOST_ARCH #define CALLCONV ccall #else #define CALLCONV stdcall #endif foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" getCurrentProcessId :: IO DWORD foreign import CALLCONV unsafe "windows.h WaitForSingleObject" waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD waitForSingleObject :: HANDLE -> DWORD -> IO () waitForSingleObject handle timeout = Win32.failIf_ bad "WaitForSingleObject" $ waitForSingleObject_ handle timeout where bad result = not (result == 0 || result == wAIT_TIMEOUT) wAIT_TIMEOUT = 0x00000102 foreign import CALLCONV unsafe "windows.h CreateEventW" createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE createEvent :: String -> IO HANDLE createEvent name = do Win32.failIfNull "CreateEvent" $ Win32.withTString name $ createEvent_ nullPtr False False foreign import CALLCONV unsafe "windows.h OpenEventW" openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE openEvent :: String -> IO HANDLE openEvent name = do Win32.failIfNull "OpenEvent" $ Win32.withTString name $ openEvent_ eVENT_MODIFY_STATE False where eVENT_MODIFY_STATE :: DWORD eVENT_MODIFY_STATE = 0x0002 foreign import CALLCONV unsafe "windows.h SetEvent" setEvent_ :: HANDLE -> IO BOOL setEvent :: HANDLE -> IO () setEvent handle = Win32.failIfFalse_ "SetEvent" $ setEvent_ handle #else import Distribution.Verbosity (Verbosity) import Distribution.Simple.Utils (die) possibleSelfUpgrade :: Verbosity -> [FilePath] -> IO a -> IO a possibleSelfUpgrade _ _ action = action deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () deleteOldExeFile _ _ _ = die "win32selfupgrade not needed except on win32" #endif cabal-install-1.22.6.0/Distribution/Client/Check.hs0000644000000000000000000000630712540225656020117 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Check -- Copyright : (c) Lennart Kolmodin 2008 -- License : BSD-like -- -- Maintainer : kolmodin@haskell.org -- Stability : provisional -- Portability : portable -- -- Check a package for common mistakes -- ----------------------------------------------------------------------------- module Distribution.Client.Check ( check ) where import Control.Monad ( when, unless ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.PackageDescription.Check import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.Verbosity ( Verbosity ) import Distribution.Simple.Utils ( defaultPackageDesc, toUTF8, wrapText ) check :: Verbosity -> IO Bool check verbosity = do pdfile <- defaultPackageDesc verbosity ppd <- readPackageDescription verbosity pdfile -- flatten the generic package description into a regular package -- description -- TODO: this may give more warnings than it should give; -- consider two branches of a condition, one saying -- ghc-options: -Wall -- and the other -- ghc-options: -Werror -- joined into -- ghc-options: -Wall -Werror -- checkPackages will yield a warning on the last line, but it -- would not on each individual branch. -- Hovever, this is the same way hackage does it, so we will yield -- the exact same errors as it will. let pkg_desc = flattenPackageDescription ppd ioChecks <- checkPackageFiles pkg_desc "." let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] unless (null buildImpossible) $ do putStrLn "The package will not build sanely due to these errors:" printCheckMessages buildImpossible unless (null buildWarning) $ do putStrLn "The following warnings are likely affect your build negatively:" printCheckMessages buildWarning unless (null distSuspicious) $ do putStrLn "These warnings may cause trouble when distributing the package:" printCheckMessages distSuspicious unless (null distInexusable) $ do putStrLn "The following errors will cause portability problems on other environments:" printCheckMessages distInexusable let isDistError (PackageDistSuspicious {}) = False isDistError _ = True errors = filter isDistError packageChecks unless (null errors) $ putStrLn "Hackage would reject this package." when (null packageChecks) $ putStrLn "No errors or warnings could be found in the package." return (null packageChecks) where printCheckMessages = mapM_ (putStrLn . format . explanation) format = toUTF8 . wrapText . ("* "++) cabal-install-1.22.6.0/Distribution/Client/Get.hs0000644000000000000000000003071612540225656017622 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Get -- Copyright : (c) Andrea Vezzosi 2008 -- Duncan Coutts 2011 -- John Millikin 2012 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- The 'cabal get' command. ----------------------------------------------------------------------------- module Distribution.Client.Get ( get ) where import Distribution.Package ( PackageId, packageId, packageName ) import Distribution.Simple.Setup ( Flag(..), fromFlag, fromFlagOrDefault ) import Distribution.Simple.Utils ( notice, die, info, writeFileAtomic ) import Distribution.Verbosity ( Verbosity ) import Distribution.Text(display) import qualified Distribution.PackageDescription as PD import Distribution.Client.Setup ( GlobalFlags(..), GetFlags(..) ) import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.Dependency import Distribution.Client.FetchUtils import qualified Distribution.Client.Tar as Tar (extractTarGzFile) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages ) import Distribution.Client.Compat.Process ( readProcessWithExitCode ) import Distribution.Compat.Exception ( catchIO ) import Control.Exception ( finally ) import Control.Monad ( filterM, forM_, unless, when ) import Data.List ( sortBy ) import qualified Data.Map import Data.Maybe ( listToMaybe, mapMaybe ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty ) #endif import Data.Ord ( comparing ) import System.Directory ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist , getCurrentDirectory, setCurrentDirectory ) import System.Exit ( ExitCode(..) ) import System.FilePath ( (), (<.>), addTrailingPathSeparator ) import System.Process ( rawSystem ) -- | Entry point for the 'cabal get' command. get :: Verbosity -> [Repo] -> GlobalFlags -> GetFlags -> [UserTarget] -> IO () get verbosity _ _ _ [] = notice verbosity "No packages requested. Nothing to do." get verbosity repos globalFlags getFlags userTargets = do let useFork = case (getSourceRepository getFlags) of NoFlag -> False _ -> True unless useFork $ mapM_ checkTarget userTargets sourcePkgDb <- getSourcePackages verbosity repos pkgSpecifiers <- resolveUserTargets verbosity (fromFlag $ globalWorldFile globalFlags) (packageIndex sourcePkgDb) userTargets pkgs <- either (die . unlines . map show) return $ resolveWithoutDependencies (resolverParams sourcePkgDb pkgSpecifiers) unless (null prefix) $ createDirectoryIfMissing True prefix if useFork then fork pkgs else unpack pkgs where resolverParams sourcePkgDb pkgSpecifiers = --TODO: add command-line constraint and preference args for unpack standardInstallPolicy mempty sourcePkgDb pkgSpecifiers prefix = fromFlagOrDefault "" (getDestDir getFlags) fork :: [SourcePackage] -> IO () fork pkgs = do let kind = fromFlag . getSourceRepository $ getFlags branchers <- findUsableBranchers mapM_ (forkPackage verbosity branchers prefix kind) pkgs unpack :: [SourcePackage] -> IO () unpack pkgs = do forM_ pkgs $ \pkg -> do location <- fetchPackage verbosity (packageSource pkg) let pkgid = packageId pkg descOverride | usePristine = Nothing | otherwise = packageDescrOverride pkg case location of LocalTarballPackage tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath RemoteTarballPackage _tarballURL tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath RepoTarballPackage _repo _pkgid tarballPath -> unpackPackage verbosity prefix pkgid descOverride tarballPath LocalUnpackedPackage _ -> error "Distribution.Client.Get.unpack: the impossible happened." where usePristine = fromFlagOrDefault False (getPristine getFlags) checkTarget :: UserTarget -> IO () checkTarget target = case target of UserTargetLocalDir dir -> die (notTarball dir) UserTargetLocalCabalFile file -> die (notTarball file) _ -> return () where notTarball t = "The 'get' command is for tarball packages. " ++ "The target '" ++ t ++ "' is not a tarball." -- ------------------------------------------------------------ -- * Unpacking the source tarball -- ------------------------------------------------------------ unpackPackage :: Verbosity -> FilePath -> PackageId -> PackageDescriptionOverride -> FilePath -> IO () unpackPackage verbosity prefix pkgid descOverride pkgPath = do let pkgdirname = display pkgid pkgdir = prefix pkgdirname pkgdir' = addTrailingPathSeparator pkgdir existsDir <- doesDirectoryExist pkgdir when existsDir $ die $ "The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking." existsFile <- doesFileExist pkgdir when existsFile $ die $ "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." notice verbosity $ "Unpacking to " ++ pkgdir' Tar.extractTarGzFile prefix pkgdirname pkgPath case descOverride of Nothing -> return () Just pkgtxt -> do let descFilePath = pkgdir display (packageName pkgid) <.> "cabal" info verbosity $ "Updating " ++ descFilePath ++ " with the latest revision from the index." writeFileAtomic descFilePath pkgtxt -- ------------------------------------------------------------ -- * Forking the source repository -- ------------------------------------------------------------ data BranchCmd = BranchCmd (Verbosity -> FilePath -> IO ExitCode) data Brancher = Brancher { brancherBinary :: String , brancherBuildCmd :: PD.SourceRepo -> Maybe BranchCmd } -- | The set of all supported branch drivers. allBranchers :: [(PD.RepoType, Brancher)] allBranchers = [ (PD.Bazaar, branchBzr) , (PD.Darcs, branchDarcs) , (PD.Git, branchGit) , (PD.Mercurial, branchHg) , (PD.SVN, branchSvn) ] -- | Find which usable branch drivers (selected from 'allBranchers') are -- available and usable on the local machine. -- -- Each driver's main command is run with @--help@, and if the child process -- exits successfully, that brancher is considered usable. findUsableBranchers :: IO (Data.Map.Map PD.RepoType Brancher) findUsableBranchers = do let usable (_, brancher) = flip catchIO (const (return False)) $ do let cmd = brancherBinary brancher (exitCode, _, _) <- readProcessWithExitCode cmd ["--help"] "" return (exitCode == ExitSuccess) pairs <- filterM usable allBranchers return (Data.Map.fromList pairs) -- | Fork a single package from a remote source repository to the local -- file system. forkPackage :: Verbosity -> Data.Map.Map PD.RepoType Brancher -- ^ Branchers supported by the local machine. -> FilePath -- ^ The directory in which new branches or repositories will -- be created. -> (Maybe PD.RepoKind) -- ^ Which repo to choose. -> SourcePackage -- ^ The package to fork. -> IO () forkPackage verbosity branchers prefix kind src = do let desc = PD.packageDescription (packageDescription src) pkgid = display (packageId src) pkgname = display (packageName src) destdir = prefix pkgname destDirExists <- doesDirectoryExist destdir when destDirExists $ do die ("The directory " ++ show destdir ++ " already exists, not forking.") destFileExists <- doesFileExist destdir when destFileExists $ do die ("A file " ++ show destdir ++ " is in the way, not forking.") let repos = PD.sourceRepos desc case findBranchCmd branchers repos kind of Just (BranchCmd io) -> do exitCode <- io verbosity destdir case exitCode of ExitSuccess -> return () ExitFailure _ -> die ("Couldn't fork package " ++ pkgid) Nothing -> case repos of [] -> die ("Package " ++ pkgid ++ " does not have any source repositories.") _ -> die ("Package " ++ pkgid ++ " does not have any usable source repositories.") -- | Given a set of possible branchers, and a set of possible source -- repositories, find a repository that is both 1) likely to be specific to -- this source version and 2) is supported by the local machine. findBranchCmd :: Data.Map.Map PD.RepoType Brancher -> [PD.SourceRepo] -> (Maybe PD.RepoKind) -> Maybe BranchCmd findBranchCmd branchers allRepos maybeKind = cmd where -- Sort repositories by kind, from This to Head to Unknown. Repositories -- with equivalent kinds are selected based on the order they appear in -- the Cabal description file. repos' = sortBy (comparing thisFirst) allRepos thisFirst r = case PD.repoKind r of PD.RepoThis -> 0 :: Int PD.RepoHead -> case PD.repoTag r of -- If the type is 'head' but the author specified a tag, they -- probably meant to create a 'this' repository but screwed up. Just _ -> 0 Nothing -> 1 PD.RepoKindUnknown _ -> 2 -- If the user has specified the repo kind, filter out the repositories -- she's not interested in. repos = maybe repos' (\k -> filter ((==) k . PD.repoKind) repos') maybeKind repoBranchCmd repo = do t <- PD.repoType repo brancher <- Data.Map.lookup t branchers brancherBuildCmd brancher repo cmd = listToMaybe (mapMaybe repoBranchCmd repos) -- | Branch driver for Bazaar. branchBzr :: Brancher branchBzr = Brancher "bzr" $ \repo -> do src <- PD.repoLocation repo let args dst = case PD.repoTag repo of Just tag -> ["branch", src, dst, "-r", "tag:" ++ tag] Nothing -> ["branch", src, dst] return $ BranchCmd $ \verbosity dst -> do notice verbosity ("bzr: branch " ++ show src) rawSystem "bzr" (args dst) -- | Branch driver for Darcs. branchDarcs :: Brancher branchDarcs = Brancher "darcs" $ \repo -> do src <- PD.repoLocation repo let args dst = case PD.repoTag repo of Just tag -> ["get", src, dst, "-t", tag] Nothing -> ["get", src, dst] return $ BranchCmd $ \verbosity dst -> do notice verbosity ("darcs: get " ++ show src) rawSystem "darcs" (args dst) -- | Branch driver for Git. branchGit :: Brancher branchGit = Brancher "git" $ \repo -> do src <- PD.repoLocation repo let branchArgs = case PD.repoBranch repo of Just b -> ["--branch", b] Nothing -> [] let postClone dst = case PD.repoTag repo of Just t -> do cwd <- getCurrentDirectory setCurrentDirectory dst finally (rawSystem "git" (["checkout", t] ++ branchArgs)) (setCurrentDirectory cwd) Nothing -> return ExitSuccess return $ BranchCmd $ \verbosity dst -> do notice verbosity ("git: clone " ++ show src) code <- rawSystem "git" (["clone", src, dst] ++ branchArgs) case code of ExitFailure _ -> return code ExitSuccess -> postClone dst -- | Branch driver for Mercurial. branchHg :: Brancher branchHg = Brancher "hg" $ \repo -> do src <- PD.repoLocation repo let branchArgs = case PD.repoBranch repo of Just b -> ["--branch", b] Nothing -> [] let tagArgs = case PD.repoTag repo of Just t -> ["--rev", t] Nothing -> [] let args dst = ["clone", src, dst] ++ branchArgs ++ tagArgs return $ BranchCmd $ \verbosity dst -> do notice verbosity ("hg: clone " ++ show src) rawSystem "hg" (args dst) -- | Branch driver for Subversion. branchSvn :: Brancher branchSvn = Brancher "svn" $ \repo -> do src <- PD.repoLocation repo let args dst = ["checkout", src, dst] return $ BranchCmd $ \verbosity dst -> do notice verbosity ("svn: checkout " ++ show src) rawSystem "svn" (args dst) cabal-install-1.22.6.0/Distribution/Client/Fetch.hs0000644000000000000000000001525212540225656020132 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Fetch -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- The cabal fetch command ----------------------------------------------------------------------------- module Distribution.Client.Fetch ( fetch, ) where import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.FetchUtils hiding (fetchPackage) import Distribution.Client.Dependency import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.Setup ( GlobalFlags(..), FetchFlags(..) ) import Distribution.Package ( packageId ) import Distribution.Simple.Compiler ( Compiler, compilerInfo, PackageDBStack ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.Setup ( fromFlag ) import Distribution.Simple.Utils ( die, notice, debug ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity ) import Control.Monad ( filterM ) -- ------------------------------------------------------------ -- * The fetch command -- ------------------------------------------------------------ --TODO: -- * add fetch -o support -- * support tarball URLs via ad-hoc download cache (or in -o mode?) -- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied -- * Port various flags from install: -- * --updage-dependencies -- * --constraint and --preference -- * --only-dependencies, but note it conflicts with --no-deps -- | Fetch a list of packages and their dependencies. -- fetch :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> Platform -> ProgramConfiguration -> GlobalFlags -> FetchFlags -> [UserTarget] -> IO () fetch verbosity _ _ _ _ _ _ _ [] = notice verbosity "No packages requested. Nothing to do." fetch verbosity packageDBs repos comp platform conf globalFlags fetchFlags userTargets = do mapM_ checkTarget userTargets installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos pkgSpecifiers <- resolveUserTargets verbosity (fromFlag $ globalWorldFile globalFlags) (packageIndex sourcePkgDb) userTargets pkgs <- planPackages verbosity comp platform fetchFlags installedPkgIndex sourcePkgDb pkgSpecifiers pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs if null pkgs' --TODO: when we add support for remote tarballs then this message -- will need to be changed because for remote tarballs we fetch them -- at the earlier phase. then notice verbosity $ "No packages need to be fetched. " ++ "All the requested packages are already local " ++ "or cached locally." else if dryRun then notice verbosity $ unlines $ "The following packages would be fetched:" : map (display . packageId) pkgs' else mapM_ (fetchPackage verbosity . packageSource) pkgs' where dryRun = fromFlag (fetchDryRun fetchFlags) planPackages :: Verbosity -> Compiler -> Platform -> FetchFlags -> InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier SourcePackage] -> IO [SourcePackage] planPackages verbosity comp platform fetchFlags installedPkgIndex sourcePkgDb pkgSpecifiers | includeDependencies = do solver <- chooseSolver verbosity (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." installPlan <- foldProgress logMsg die return $ resolveDependencies platform (compilerInfo comp) solver resolverParams -- The packages we want to fetch are those packages the 'InstallPlan' -- that are in the 'InstallPlan.Configured' state. return [ pkg | (InstallPlan.Configured (InstallPlan.ConfiguredPackage pkg _ _ _)) <- InstallPlan.toList installPlan ] | otherwise = either (die . unlines . map show) return $ resolveWithoutDependencies resolverParams where resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setShadowPkgs shadowPkgs . setStrongFlags strongFlags -- Reinstall the targets given on the command line so that the dep -- resolver will decide that they need fetching, even if they're -- already installed. Since we want to get the source packages of -- things we might have installed (but not have the sources for). . reinstallTargets $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers includeDependencies = fromFlag (fetchDeps fetchFlags) logMsg message rest = debug verbosity message >> rest reorderGoals = fromFlag (fetchReorderGoals fetchFlags) independentGoals = fromFlag (fetchIndependentGoals fetchFlags) shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) strongFlags = fromFlag (fetchStrongFlags fetchFlags) maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) checkTarget :: UserTarget -> IO () checkTarget target = case target of UserTargetRemoteTarball _uri -> die $ "The 'fetch' command does not yet support remote tarballs. " ++ "In the meantime you can use the 'unpack' commands." _ -> return () fetchPackage :: Verbosity -> PackageLocation a -> IO () fetchPackage verbosity pkgsrc = case pkgsrc of LocalUnpackedPackage _dir -> return () LocalTarballPackage _file -> return () RemoteTarballPackage _uri _ -> die $ "The 'fetch' command does not yet support remote tarballs. " ++ "In the meantime you can use the 'unpack' commands." RepoTarballPackage repo pkgid _ -> do _ <- fetchRepoTarball verbosity repo pkgid return () cabal-install-1.22.6.0/Distribution/Client/HttpUtils.hs0000644000000000000000000001335612540225656021044 0ustar0000000000000000----------------------------------------------------------------------------- -- | Separate module for HTTP actions, using a proxy server if one exists ----------------------------------------------------------------------------- module Distribution.Client.HttpUtils ( DownloadResult(..), downloadURI, getHTTP, cabalBrowse, proxy, isOldHackageURI ) where import Network.HTTP ( Request (..), Response (..), RequestMethod (..) , Header(..), HeaderName(..), lookupHeader ) import Network.HTTP.Proxy ( Proxy(..), fetchProxy) import Network.URI ( URI (..), URIAuth (..) ) import Network.Browser ( BrowserAction, browse, setAllowBasicAuth, setAuthorityGen , setOutHandler, setErrHandler, setProxy, request) import Network.Stream ( Result, ConnError(..) ) import Control.Monad ( liftM ) import qualified Data.ByteString.Lazy.Char8 as ByteString import Data.ByteString.Lazy (ByteString) import qualified Paths_cabal_install (version) import Distribution.Verbosity (Verbosity) import Distribution.Simple.Utils ( die, info, warn, debug, notice , copyFileVerbose, writeFileAtomic ) import Distribution.System ( buildOS, buildArch ) import Distribution.Text ( display ) import Data.Char ( isSpace ) import qualified System.FilePath.Posix as FilePath.Posix ( splitDirectories ) import System.FilePath ( (<.>) ) import System.Directory ( doesFileExist ) data DownloadResult = FileAlreadyInCache | FileDownloaded FilePath deriving (Eq) -- Trim trim :: String -> String trim = f . f where f = reverse . dropWhile isSpace -- |Get the local proxy settings --TODO: print info message when we're using a proxy based on verbosity proxy :: Verbosity -> IO Proxy proxy _verbosity = do p <- fetchProxy True -- Handle empty proxy strings return $ case p of Proxy uri auth -> let uri' = trim uri in if uri' == "" then NoProxy else Proxy uri' auth _ -> p mkRequest :: URI -> Maybe String -- ^ Optional etag to be set in the If-None-Match HTTP header. -> Request ByteString mkRequest uri etag = Request{ rqURI = uri , rqMethod = GET , rqHeaders = Header HdrUserAgent userAgent : ifNoneMatchHdr , rqBody = ByteString.empty } where userAgent = concat [ "cabal-install/", display Paths_cabal_install.version , " (", display buildOS, "; ", display buildArch, ")" ] ifNoneMatchHdr = maybe [] (\t -> [Header HdrIfNoneMatch t]) etag -- |Carry out a GET request, using the local proxy settings getHTTP :: Verbosity -> URI -> Maybe String -- ^ Optional etag to check if we already have the latest file. -> IO (Result (Response ByteString)) getHTTP verbosity uri etag = liftM (\(_, resp) -> Right resp) $ cabalBrowse verbosity Nothing (request (mkRequest uri etag)) cabalBrowse :: Verbosity -> Maybe (String, String) -> BrowserAction s a -> IO a cabalBrowse verbosity auth act = do p <- proxy verbosity browse $ do setProxy p setErrHandler (warn verbosity . ("http error: "++)) setOutHandler (debug verbosity) setAllowBasicAuth False setAuthorityGen (\_ _ -> return auth) act downloadURI :: Verbosity -> URI -- ^ What to download -> FilePath -- ^ Where to put it -> IO DownloadResult downloadURI verbosity uri path | uriScheme uri == "file:" = do copyFileVerbose verbosity (uriPath uri) path return (FileDownloaded path) -- Can we store the hash of the file so we can safely return path when the -- hash matches to avoid unnecessary computation? downloadURI verbosity uri path = do let etagPath = path <.> "etag" targetExists <- doesFileExist path etagPathExists <- doesFileExist etagPath -- In rare cases the target file doesn't exist, but the etag does. etag <- if targetExists && etagPathExists then liftM Just $ readFile etagPath else return Nothing result <- getHTTP verbosity uri etag let result' = case result of Left err -> Left err Right rsp -> case rspCode rsp of (2,0,0) -> Right rsp (3,0,4) -> Right rsp (a,b,c) -> Left err where err = ErrorMisc $ "Error HTTP code: " ++ concatMap show [a,b,c] -- Only write the etag if we get a 200 response code. -- A 304 still sends us an etag header. case result' of Left _ -> return () Right rsp -> case rspCode rsp of (2,0,0) -> case lookupHeader HdrETag (rspHeaders rsp) of Nothing -> return () Just newEtag -> writeFile etagPath newEtag (_,_,_) -> return () case result' of Left err -> die $ "Failed to download " ++ show uri ++ " : " ++ show err Right rsp -> case rspCode rsp of (2,0,0) -> do info verbosity ("Downloaded to " ++ path) writeFileAtomic path $ rspBody rsp return (FileDownloaded path) (3,0,4) -> do notice verbosity "Skipping download: Local and remote files match." return FileAlreadyInCache (_,_,_) -> return (FileDownloaded path) --FIXME: check the content-length header matches the body length. --TODO: stream the download into the file rather than buffering the whole -- thing in memory. -- Utility function for legacy support. isOldHackageURI :: URI -> Bool isOldHackageURI uri = case uriAuthority uri of Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> FilePath.Posix.splitDirectories (uriPath uri) == ["/","packages","archive"] _ -> False cabal-install-1.22.6.0/Distribution/Client/JobControl.hs0000644000000000000000000000436212540225656021154 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.JobControl -- Copyright : (c) Duncan Coutts 2012 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- A job control concurrency abstraction ----------------------------------------------------------------------------- module Distribution.Client.JobControl ( JobControl, newSerialJobControl, newParallelJobControl, spawnJob, collectJob, JobLimit, newJobLimit, withJobLimit, Lock, newLock, criticalSection ) where import Control.Monad import Control.Concurrent hiding (QSem, newQSem, waitQSem, signalQSem) import Control.Exception (SomeException, bracket_, mask, throw, try) import Distribution.Client.Compat.Semaphore data JobControl m a = JobControl { spawnJob :: m a -> m (), collectJob :: m a } newSerialJobControl :: IO (JobControl IO a) newSerialJobControl = do queue <- newChan return JobControl { spawnJob = spawn queue, collectJob = collect queue } where spawn :: Chan (IO a) -> IO a -> IO () spawn = writeChan collect :: Chan (IO a) -> IO a collect = join . readChan newParallelJobControl :: IO (JobControl IO a) newParallelJobControl = do resultVar <- newEmptyMVar return JobControl { spawnJob = spawn resultVar, collectJob = collect resultVar } where spawn :: MVar (Either SomeException a) -> IO a -> IO () spawn resultVar job = mask $ \restore -> forkIO (do res <- try (restore job) putMVar resultVar res) >> return () collect :: MVar (Either SomeException a) -> IO a collect resultVar = takeMVar resultVar >>= either throw return data JobLimit = JobLimit QSem newJobLimit :: Int -> IO JobLimit newJobLimit n = fmap JobLimit (newQSem n) withJobLimit :: JobLimit -> IO a -> IO a withJobLimit (JobLimit sem) = bracket_ (waitQSem sem) (signalQSem sem) newtype Lock = Lock (MVar ()) newLock :: IO Lock newLock = fmap Lock $ newMVar () criticalSection :: Lock -> IO a -> IO a criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act cabal-install-1.22.6.0/Distribution/Client/Setup.hs0000644000000000000000000024643212540225656020207 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Setup -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- ----------------------------------------------------------------------------- module Distribution.Client.Setup ( globalCommand, GlobalFlags(..), defaultGlobalFlags, globalRepos , configureCommand, ConfigFlags(..), filterConfigureFlags , configureExCommand, ConfigExFlags(..), defaultConfigExFlags , configureExOptions , buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) , replCommand, testCommand, benchmarkCommand , installCommand, InstallFlags(..), installOptions, defaultInstallFlags , listCommand, ListFlags(..) , updateCommand , upgradeCommand , infoCommand, InfoFlags(..) , fetchCommand, FetchFlags(..) , freezeCommand, FreezeFlags(..) , getCommand, unpackCommand, GetFlags(..) , checkCommand , formatCommand , uploadCommand, UploadFlags(..) , reportCommand, ReportFlags(..) , runCommand , initCommand, IT.InitFlags(..) , sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) , win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..) , sandboxCommand, defaultSandboxLocation, SandboxFlags(..) , execCommand, ExecFlags(..) , userConfigCommand, UserConfigFlags(..) , parsePackageArgs --TODO: stop exporting these: , showRepo , parseRepo ) where import Distribution.Client.Types ( Username(..), Password(..), Repo(..), RemoteRepo(..), LocalRepo(..) ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.Dependency.Types ( AllowNewer(..), PreSolver(..) ) import qualified Distribution.Client.Init.Types as IT ( InitFlags(..), PackageType(..) ) import Distribution.Client.Targets ( UserConstraint, readUserConstraint ) import Distribution.Utils.NubList ( NubList, toNubList, fromNubList) import Distribution.Simple.Compiler (PackageDB) import Distribution.Simple.Program ( defaultProgramConfiguration ) import Distribution.Simple.Command hiding (boolOpt, boolOpt') import qualified Distribution.Simple.Command as Command import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Setup ( ConfigFlags(..), BuildFlags(..), ReplFlags , TestFlags(..), BenchmarkFlags(..) , SDistFlags(..), HaddockFlags(..) , readPackageDbList, showPackageDbList , Flag(..), toFlag, fromFlag, flagToMaybe, flagToList , optionVerbosity, boolOpt, boolOpt', trueArg, falseArg, optionNumJobs ) import Distribution.Simple.InstallDirs ( PathTemplate, InstallDirs(sysconfdir) , toPathTemplate, fromPathTemplate ) import Distribution.Version ( Version(Version), anyVersion, thisVersion ) import Distribution.Package ( PackageIdentifier, packageName, packageVersion, Dependency(..) ) import Distribution.PackageDescription ( RepoKind(..) ) import Distribution.Text ( Text(..), display ) import Distribution.ReadE ( ReadE(..), readP_to_E, succeedReadE ) import qualified Distribution.Compat.ReadP as Parse ( ReadP, readP_to_S, readS_to_P, char, munch1, pfail, sepBy1, (+++) ) import Distribution.Verbosity ( Verbosity, normal ) import Distribution.Simple.Utils ( wrapText, wrapLine ) import Data.Char ( isSpace, isAlphaNum ) import Data.List ( intercalate, delete, deleteFirstsBy ) import Data.Maybe ( listToMaybe, maybeToList, fromMaybe ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import Control.Monad ( liftM ) import System.FilePath ( () ) import Network.URI ( parseAbsoluteURI, uriToString ) -- ------------------------------------------------------------ -- * Global flags -- ------------------------------------------------------------ -- | Flags that apply at the top level, not to any sub-command. data GlobalFlags = GlobalFlags { globalVersion :: Flag Bool, globalNumericVersion :: Flag Bool, globalConfigFile :: Flag FilePath, globalSandboxConfigFile :: Flag FilePath, globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. globalCacheDir :: Flag FilePath, globalLocalRepos :: NubList FilePath, globalLogsDir :: Flag FilePath, globalWorldFile :: Flag FilePath, globalRequireSandbox :: Flag Bool, globalIgnoreSandbox :: Flag Bool } defaultGlobalFlags :: GlobalFlags defaultGlobalFlags = GlobalFlags { globalVersion = Flag False, globalNumericVersion = Flag False, globalConfigFile = mempty, globalSandboxConfigFile = mempty, globalRemoteRepos = mempty, globalCacheDir = mempty, globalLocalRepos = mempty, globalLogsDir = mempty, globalWorldFile = mempty, globalRequireSandbox = Flag False, globalIgnoreSandbox = Flag False } globalCommand :: [Command action] -> CommandUI GlobalFlags globalCommand commands = CommandUI { commandName = "", commandSynopsis = "Command line interface to the Haskell Cabal infrastructure.", commandUsage = \pname -> "See http://www.haskell.org/cabal/ for more information.\n" ++ "\n" ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", commandDescription = Just $ \pname -> let commands' = commands ++ [commandAddAction helpCommandUI undefined] cmdDescs = getNormalCommandDescriptions commands' -- if new commands are added, we want them to appear even if they -- are not included in the custom listing below. Thus, we calculate -- the `otherCmds` list and append it under the `other` category. -- Alternatively, a new testcase could be added that ensures that -- the set of commands listed here is equal to the set of commands -- that are actually available. otherCmds = deleteFirstsBy (==) (map fst cmdDescs) [ "help" , "update" , "install" , "fetch" , "list" , "info" , "user-config" , "get" , "init" , "configure" , "build" , "clean" , "run" , "repl" , "test" , "bench" , "check" , "sdist" , "upload" , "report" , "freeze" , "haddock" , "hscolour" , "copy" , "register" , "sandbox" , "exec" ] maxlen = maximum $ [length name | (name, _) <- cmdDescs] align str = str ++ replicate (maxlen - length str) ' ' startGroup n = " ["++n++"]" par = "" addCmd n = case lookup n cmdDescs of Nothing -> "" Just d -> " " ++ align n ++ " " ++ d addCmdCustom n d = case lookup n cmdDescs of -- make sure that the -- command still exists. Nothing -> "" Just _ -> " " ++ align n ++ " " ++ d in "Commands:\n" ++ unlines ( [ startGroup "global" , addCmd "update" , addCmd "install" , par , addCmd "help" , addCmd "info" , addCmd "list" , addCmd "fetch" , addCmd "user-config" , par , startGroup "package" , addCmd "get" , addCmd "init" , par , addCmd "configure" , addCmd "build" , addCmd "clean" , par , addCmd "run" , addCmd "repl" , addCmd "test" , addCmd "bench" , par , addCmd "check" , addCmd "sdist" , addCmd "upload" , addCmd "report" , par , addCmd "freeze" , addCmd "haddock" , addCmd "hscolour" , addCmd "copy" , addCmd "register" , par , startGroup "sandbox" , addCmd "sandbox" , addCmd "exec" , addCmdCustom "repl" "Open interpreter with access to sandbox packages." ] ++ if null otherCmds then [] else par :startGroup "other" :[addCmd n | n <- otherCmds]) ++ "\n" ++ "For more information about a command use:\n" ++ " " ++ pname ++ " COMMAND --help\n" ++ "or " ++ pname ++ " help COMMAND\n" ++ "\n" ++ "To install Cabal packages from hackage use:\n" ++ " " ++ pname ++ " install foo [--dry-run]\n" ++ "\n" ++ "Occasionally you need to update the list of available packages:\n" ++ " " ++ pname ++ " update\n", commandNotes = Nothing, commandDefaultFlags = mempty, commandOptions = \showOrParseArgs -> (case showOrParseArgs of ShowArgs -> take 6; ParseArgs -> id) [option ['V'] ["version"] "Print version information" globalVersion (\v flags -> flags { globalVersion = v }) trueArg ,option [] ["numeric-version"] "Print just the version number" globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) trueArg ,option [] ["config-file"] "Set an alternate location for the config file" globalConfigFile (\v flags -> flags { globalConfigFile = v }) (reqArgFlag "FILE") ,option [] ["sandbox-config-file"] "Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')" globalConfigFile (\v flags -> flags { globalSandboxConfigFile = v }) (reqArgFlag "FILE") ,option [] ["require-sandbox"] "requiring the presence of a sandbox for sandbox-aware commands" globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v }) (boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"])) ,option [] ["ignore-sandbox"] "Ignore any existing sandbox" globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v }) trueArg ,option [] ["remote-repo"] "The name and url for a remote repository" globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) (reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList)) ,option [] ["remote-repo-cache"] "The location where downloads from all remote repos are cached" globalCacheDir (\v flags -> flags { globalCacheDir = v }) (reqArgFlag "DIR") ,option [] ["local-repo"] "The location of a local repository" globalLocalRepos (\v flags -> flags { globalLocalRepos = v }) (reqArg' "DIR" (\x -> toNubList [x]) fromNubList) ,option [] ["logs-dir"] "The location to put log files" globalLogsDir (\v flags -> flags { globalLogsDir = v }) (reqArgFlag "DIR") ,option [] ["world-file"] "The location of the world file" globalWorldFile (\v flags -> flags { globalWorldFile = v }) (reqArgFlag "FILE") ] } instance Monoid GlobalFlags where mempty = GlobalFlags { globalVersion = mempty, globalNumericVersion = mempty, globalConfigFile = mempty, globalSandboxConfigFile = mempty, globalRemoteRepos = mempty, globalCacheDir = mempty, globalLocalRepos = mempty, globalLogsDir = mempty, globalWorldFile = mempty, globalRequireSandbox = mempty, globalIgnoreSandbox = mempty } mappend a b = GlobalFlags { globalVersion = combine globalVersion, globalNumericVersion = combine globalNumericVersion, globalConfigFile = combine globalConfigFile, globalSandboxConfigFile = combine globalConfigFile, globalRemoteRepos = combine globalRemoteRepos, globalCacheDir = combine globalCacheDir, globalLocalRepos = combine globalLocalRepos, globalLogsDir = combine globalLogsDir, globalWorldFile = combine globalWorldFile, globalRequireSandbox = combine globalRequireSandbox, globalIgnoreSandbox = combine globalIgnoreSandbox } where combine field = field a `mappend` field b globalRepos :: GlobalFlags -> [Repo] globalRepos globalFlags = remoteRepos ++ localRepos where remoteRepos = [ Repo (Left remote) cacheDir | remote <- fromNubList $ globalRemoteRepos globalFlags , let cacheDir = fromFlag (globalCacheDir globalFlags) remoteRepoName remote ] localRepos = [ Repo (Right LocalRepo) local | local <- fromNubList $ globalLocalRepos globalFlags ] -- ------------------------------------------------------------ -- * Config flags -- ------------------------------------------------------------ configureCommand :: CommandUI ConfigFlags configureCommand = (Cabal.configureCommand defaultProgramConfiguration) { commandDefaultFlags = mempty } configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] configureOptions = commandOptions configureCommand filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags filterConfigureFlags flags cabalLibVersion | cabalLibVersion >= Version [1,22,0] [] = flags_latest -- ^ NB: we expect the latest version to be the most common case. | cabalLibVersion < Version [1,3,10] [] = flags_1_3_10 | cabalLibVersion < Version [1,10,0] [] = flags_1_10_0 | cabalLibVersion < Version [1,14,0] [] = flags_1_14_0 | cabalLibVersion < Version [1,18,0] [] = flags_1_18_0 | cabalLibVersion < Version [1,19,1] [] = flags_1_19_0 | cabalLibVersion < Version [1,19,2] [] = flags_1_19_1 | cabalLibVersion < Version [1,21,1] [] = flags_1_20_0 | cabalLibVersion < Version [1,22,0] [] = flags_1_21_0 | otherwise = flags_latest where -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. flags_latest = flags { configConstraints = [] } -- Cabal < 1.22 doesn't know about '--disable-debug-info'. flags_1_21_0 = flags_latest { configDebugInfo = NoFlag } -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' -- Cabal < 1.21.1 doesn't know about 'enable-profiling' flags_1_20_0 = flags_1_21_0 { configRelocatable = NoFlag , configProfExe = configProfExe flags , configProfLib = configProfLib flags , configCoverage = NoFlag , configLibCoverage = configCoverage flags -- HACK: See #2409. , configProgramPaths = ("cabalConfProf", "/TRUE") `delete` configProgramPaths flags } -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and -- '--enable-library-stripping'. flags_1_19_1 = flags_1_20_0 { configExactConfiguration = NoFlag , configStripLibs = NoFlag } -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. flags_1_19_0 = flags_1_19_1 { configDependencies = [] , configConstraints = configConstraints flags } -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. flags_1_18_0 = flags_1_19_0 { configProgramPathExtra = toNubList [] , configInstallDirs = configInstallDirs_1_18_0} configInstallDirs_1_18_0 = (configInstallDirs flags) { sysconfdir = NoFlag } -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } -- Cabal < 1.10.0 doesn't know about '--disable-tests'. flags_1_10_0 = flags_1_14_0 { configTests = NoFlag } -- Cabal < 1.3.10 does not grok the '--constraints' flag. flags_1_3_10 = flags_1_10_0 { configConstraints = [] } -- ------------------------------------------------------------ -- * Config extra flags -- ------------------------------------------------------------ -- | cabal configure takes some extra flags beyond runghc Setup configure -- data ConfigExFlags = ConfigExFlags { configCabalVersion :: Flag Version, configExConstraints:: [UserConstraint], configPreferences :: [Dependency], configSolver :: Flag PreSolver, configAllowNewer :: Flag AllowNewer } defaultConfigExFlags :: ConfigExFlags defaultConfigExFlags = mempty { configSolver = Flag defaultSolver , configAllowNewer = Flag AllowNewerNone } configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) configureExCommand = configureCommand { commandDefaultFlags = (mempty, defaultConfigExFlags), commandOptions = \showOrParseArgs -> liftOptions fst setFst (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) . optionName) $ configureOptions showOrParseArgs) ++ liftOptions snd setSnd (configureExOptions showOrParseArgs) } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) configureExOptions :: ShowOrParseArgs -> [OptionField ConfigExFlags] configureExOptions _showOrParseArgs = [ option [] ["cabal-lib-version"] ("Select which version of the Cabal lib to use to build packages " ++ "(useful for testing).") configCabalVersion (\v flags -> flags { configCabalVersion = v }) (reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++) (fmap toFlag parse)) (map display . flagToList)) , option [] ["constraint"] "Specify constraints on a package (version, installed/source, flags)" configExConstraints (\v flags -> flags { configExConstraints = v }) (reqArg "CONSTRAINT" (fmap (\x -> [x]) (ReadE readUserConstraint)) (map display)) , option [] ["preference"] "Specify preferences (soft constraints) on the version of a package" configPreferences (\v flags -> flags { configPreferences = v }) (reqArg "CONSTRAINT" (readP_to_E (const "dependency expected") (fmap (\x -> [x]) parse)) (map display)) , optionSolver configSolver (\v flags -> flags { configSolver = v }) , option [] ["allow-newer"] ("Ignore upper bounds in all dependencies or " ++ allowNewerArgument) configAllowNewer (\v flags -> flags { configAllowNewer = v}) (optArg allowNewerArgument (fmap Flag allowNewerParser) (Flag AllowNewerAll) allowNewerPrinter) ] where allowNewerArgument = "DEPS" instance Monoid ConfigExFlags where mempty = ConfigExFlags { configCabalVersion = mempty, configExConstraints= mempty, configPreferences = mempty, configSolver = mempty, configAllowNewer = mempty } mappend a b = ConfigExFlags { configCabalVersion = combine configCabalVersion, configExConstraints= combine configExConstraints, configPreferences = combine configPreferences, configSolver = combine configSolver, configAllowNewer = combine configAllowNewer } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Build flags -- ------------------------------------------------------------ data SkipAddSourceDepsCheck = SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck deriving Eq data BuildExFlags = BuildExFlags { buildOnly :: Flag SkipAddSourceDepsCheck } buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags] buildExOptions _showOrParseArgs = option [] ["only"] "Don't reinstall add-source dependencies (sandbox-only)" buildOnly (\v flags -> flags { buildOnly = v }) (noArg (Flag SkipAddSourceDepsCheck)) : [] buildCommand :: CommandUI (BuildFlags, BuildExFlags) buildCommand = parent { commandDefaultFlags = (commandDefaultFlags parent, mempty), commandOptions = \showOrParseArgs -> liftOptions fst setFst (commandOptions parent showOrParseArgs) ++ liftOptions snd setSnd (buildExOptions showOrParseArgs) } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) parent = Cabal.buildCommand defaultProgramConfiguration instance Monoid BuildExFlags where mempty = BuildExFlags { buildOnly = mempty } mappend a b = BuildExFlags { buildOnly = combine buildOnly } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Repl command -- ------------------------------------------------------------ replCommand :: CommandUI (ReplFlags, BuildExFlags) replCommand = parent { commandDefaultFlags = (commandDefaultFlags parent, mempty), commandOptions = \showOrParseArgs -> liftOptions fst setFst (commandOptions parent showOrParseArgs) ++ liftOptions snd setSnd (buildExOptions showOrParseArgs) } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) parent = Cabal.replCommand defaultProgramConfiguration -- ------------------------------------------------------------ -- * Test command -- ------------------------------------------------------------ testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags) testCommand = parent { commandDefaultFlags = (commandDefaultFlags parent, Cabal.defaultBuildFlags, mempty), commandOptions = \showOrParseArgs -> liftOptions get1 set1 (commandOptions parent showOrParseArgs) ++ liftOptions get2 set2 (Cabal.buildOptions progConf showOrParseArgs) ++ liftOptions get3 set3 (buildExOptions showOrParseArgs) } where get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) parent = Cabal.testCommand progConf = defaultProgramConfiguration -- ------------------------------------------------------------ -- * Bench command -- ------------------------------------------------------------ benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags) benchmarkCommand = parent { commandDefaultFlags = (commandDefaultFlags parent, Cabal.defaultBuildFlags, mempty), commandOptions = \showOrParseArgs -> liftOptions get1 set1 (commandOptions parent showOrParseArgs) ++ liftOptions get2 set2 (Cabal.buildOptions progConf showOrParseArgs) ++ liftOptions get3 set3 (buildExOptions showOrParseArgs) } where get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) parent = Cabal.benchmarkCommand progConf = defaultProgramConfiguration -- ------------------------------------------------------------ -- * Fetch command -- ------------------------------------------------------------ data FetchFlags = FetchFlags { -- fetchOutput :: Flag FilePath, fetchDeps :: Flag Bool, fetchDryRun :: Flag Bool, fetchSolver :: Flag PreSolver, fetchMaxBackjumps :: Flag Int, fetchReorderGoals :: Flag Bool, fetchIndependentGoals :: Flag Bool, fetchShadowPkgs :: Flag Bool, fetchStrongFlags :: Flag Bool, fetchVerbosity :: Flag Verbosity } defaultFetchFlags :: FetchFlags defaultFetchFlags = FetchFlags { -- fetchOutput = mempty, fetchDeps = toFlag True, fetchDryRun = toFlag False, fetchSolver = Flag defaultSolver, fetchMaxBackjumps = Flag defaultMaxBackjumps, fetchReorderGoals = Flag False, fetchIndependentGoals = Flag False, fetchShadowPkgs = Flag False, fetchStrongFlags = Flag False, fetchVerbosity = toFlag normal } fetchCommand :: CommandUI FetchFlags fetchCommand = CommandUI { commandName = "fetch", commandSynopsis = "Downloads packages for later installation.", commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" ], commandDescription = Just $ \_ -> "Note that it currently is not possible to fetch the dependencies for a\n" ++ "package in the current directory.\n", commandNotes = Nothing, commandDefaultFlags = defaultFetchFlags, commandOptions = \ showOrParseArgs -> [ optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) -- , option "o" ["output"] -- "Put the package(s) somewhere specific rather than the usual cache." -- fetchOutput (\v flags -> flags { fetchOutput = v }) -- (reqArgFlag "PATH") , option [] ["dependencies", "deps"] "Resolve and fetch dependencies (default)" fetchDeps (\v flags -> flags { fetchDeps = v }) trueArg , option [] ["no-dependencies", "no-deps"] "Ignore dependencies" fetchDeps (\v flags -> flags { fetchDeps = v }) falseArg , option [] ["dry-run"] "Do not install anything, only print what would be installed." fetchDryRun (\v flags -> flags { fetchDryRun = v }) trueArg ] ++ optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : optionSolverFlags showOrParseArgs fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) } -- ------------------------------------------------------------ -- * Freeze command -- ------------------------------------------------------------ data FreezeFlags = FreezeFlags { freezeDryRun :: Flag Bool, freezeTests :: Flag Bool, freezeBenchmarks :: Flag Bool, freezeSolver :: Flag PreSolver, freezeMaxBackjumps :: Flag Int, freezeReorderGoals :: Flag Bool, freezeIndependentGoals :: Flag Bool, freezeShadowPkgs :: Flag Bool, freezeStrongFlags :: Flag Bool, freezeVerbosity :: Flag Verbosity } defaultFreezeFlags :: FreezeFlags defaultFreezeFlags = FreezeFlags { freezeDryRun = toFlag False, freezeTests = toFlag False, freezeBenchmarks = toFlag False, freezeSolver = Flag defaultSolver, freezeMaxBackjumps = Flag defaultMaxBackjumps, freezeReorderGoals = Flag False, freezeIndependentGoals = Flag False, freezeShadowPkgs = Flag False, freezeStrongFlags = Flag False, freezeVerbosity = toFlag normal } freezeCommand :: CommandUI FreezeFlags freezeCommand = CommandUI { commandName = "freeze", commandSynopsis = "Freeze dependencies.", commandDescription = Just $ \_ -> wrapText $ "Calculates a valid set of dependencies and their exact versions. " ++ "If successful, saves the result to the file `cabal.config`.\n" ++ "\n" ++ "The package versions specified in `cabal.config` will be used for " ++ "any future installs.\n" ++ "\n" ++ "An existing `cabal.config` is ignored and overwritten.\n", commandNotes = Nothing, commandUsage = usageAlternatives "freeze" ["" ,"PACKAGES" ], commandDefaultFlags = defaultFreezeFlags, commandOptions = \ showOrParseArgs -> [ optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) , option [] ["dry-run"] "Do not freeze anything, only print what would be frozen" freezeDryRun (\v flags -> flags { freezeDryRun = v }) trueArg , option [] ["tests"] "freezing of the dependencies of any tests suites in the package description file." freezeTests (\v flags -> flags { freezeTests = v }) (boolOpt [] []) , option [] ["benchmarks"] "freezing of the dependencies of any benchmarks suites in the package description file." freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) (boolOpt [] []) ] ++ optionSolver freezeSolver (\v flags -> flags { freezeSolver = v }) : optionSolverFlags showOrParseArgs freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) } -- ------------------------------------------------------------ -- * Other commands -- ------------------------------------------------------------ updateCommand :: CommandUI (Flag Verbosity) updateCommand = CommandUI { commandName = "update", commandSynopsis = "Updates list of known packages.", commandDescription = Just $ \_ -> "For all known remote repositories, download the package list.\n", commandNotes = Just $ \_ -> relevantConfigValuesText ["remote-repo" ,"remote-repo-cache" ,"local-repo"], commandUsage = usageFlags "update", commandDefaultFlags = toFlag normal, commandOptions = \_ -> [optionVerbosity id const] } upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) upgradeCommand = configureCommand { commandName = "upgrade", commandSynopsis = "(command disabled, use install instead)", commandDescription = Nothing, commandUsage = usageFlagsOrPackages "upgrade", commandDefaultFlags = (mempty, mempty, mempty, mempty), commandOptions = commandOptions installCommand } {- cleanCommand :: CommandUI () cleanCommand = makeCommand name shortDesc longDesc emptyFlags options where name = "clean" shortDesc = "Removes downloaded files" longDesc = Nothing emptyFlags = () options _ = [] -} checkCommand :: CommandUI (Flag Verbosity) checkCommand = CommandUI { commandName = "check", commandSynopsis = "Check the package for common mistakes.", commandDescription = Just $ \_ -> wrapText $ "Expects a .cabal package file in the current directory.\n" ++ "\n" ++ "The checks correspond to the requirements to packages on Hackage. " ++ "If no errors and warnings are reported, Hackage will accept this " ++ "package.\n", commandNotes = Nothing, commandUsage = \pname -> "Usage: " ++ pname ++ " check\n", commandDefaultFlags = toFlag normal, commandOptions = \_ -> [] } formatCommand :: CommandUI (Flag Verbosity) formatCommand = CommandUI { commandName = "format", commandSynopsis = "Reformat the .cabal file using the standard style.", commandDescription = Nothing, commandNotes = Nothing, commandUsage = usageAlternatives "format" ["[FILE]"], commandDefaultFlags = toFlag normal, commandOptions = \_ -> [] } runCommand :: CommandUI (BuildFlags, BuildExFlags) runCommand = CommandUI { commandName = "run", commandSynopsis = "Builds and runs an executable.", commandDescription = Just $ \_ -> wrapText $ "Builds and then runs the specified executable. If no executable is " ++ "specified, but the package contains just one executable, that one " ++ "is built and executed.\n", commandNotes = Nothing, commandUsage = usageAlternatives "run" ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], commandDefaultFlags = mempty, commandOptions = \showOrParseArgs -> liftOptions fst setFst (commandOptions parent showOrParseArgs) ++ liftOptions snd setSnd (buildExOptions showOrParseArgs) } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) parent = Cabal.buildCommand defaultProgramConfiguration -- ------------------------------------------------------------ -- * Report flags -- ------------------------------------------------------------ data ReportFlags = ReportFlags { reportUsername :: Flag Username, reportPassword :: Flag Password, reportVerbosity :: Flag Verbosity } defaultReportFlags :: ReportFlags defaultReportFlags = ReportFlags { reportUsername = mempty, reportPassword = mempty, reportVerbosity = toFlag normal } reportCommand :: CommandUI ReportFlags reportCommand = CommandUI { commandName = "report", commandSynopsis = "Upload build reports to a remote server.", commandDescription = Nothing, commandNotes = Just $ \_ -> "You can store your Hackage login in the ~/.cabal/config file\n", commandUsage = usageAlternatives "report" ["[FLAGS]"], commandDefaultFlags = defaultReportFlags, commandOptions = \_ -> [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) ,option ['u'] ["username"] "Hackage username." reportUsername (\v flags -> flags { reportUsername = v }) (reqArg' "USERNAME" (toFlag . Username) (flagToList . fmap unUsername)) ,option ['p'] ["password"] "Hackage password." reportPassword (\v flags -> flags { reportPassword = v }) (reqArg' "PASSWORD" (toFlag . Password) (flagToList . fmap unPassword)) ] } instance Monoid ReportFlags where mempty = ReportFlags { reportUsername = mempty, reportPassword = mempty, reportVerbosity = mempty } mappend a b = ReportFlags { reportUsername = combine reportUsername, reportPassword = combine reportPassword, reportVerbosity = combine reportVerbosity } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Get flags -- ------------------------------------------------------------ data GetFlags = GetFlags { getDestDir :: Flag FilePath, getPristine :: Flag Bool, getSourceRepository :: Flag (Maybe RepoKind), getVerbosity :: Flag Verbosity } defaultGetFlags :: GetFlags defaultGetFlags = GetFlags { getDestDir = mempty, getPristine = mempty, getSourceRepository = mempty, getVerbosity = toFlag normal } getCommand :: CommandUI GetFlags getCommand = CommandUI { commandName = "get", commandSynopsis = "Download/Extract a package's source code (repository).", commandDescription = Just $ \_ -> wrapText $ "Creates a local copy of a package's source code. By default it gets " ++ "the source\ntarball and unpacks it in a local subdirectory. " ++ "Alternatively, with -s it will\nget the code from the source " ++ "repository specified by the package.\n", commandNotes = Nothing, commandUsage = usagePackages "get", commandDefaultFlags = defaultGetFlags, commandOptions = \_ -> [ optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) ,option "d" ["destdir"] "Where to place the package source, defaults to the current directory." getDestDir (\v flags -> flags { getDestDir = v }) (reqArgFlag "PATH") ,option "s" ["source-repository"] "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." getSourceRepository (\v flags -> flags { getSourceRepository = v }) (optArg "[head|this|...]" (readP_to_E (const "invalid source-repository") (fmap (toFlag . Just) parse)) (Flag Nothing) (map (fmap show) . flagToList)) , option [] ["pristine"] ("Unpack the original pristine tarball, rather than updating the " ++ ".cabal file with the latest revision from the package archive.") getPristine (\v flags -> flags { getPristine = v }) trueArg ] } -- 'cabal unpack' is a deprecated alias for 'cabal get'. unpackCommand :: CommandUI GetFlags unpackCommand = getCommand { commandName = "unpack", commandUsage = usagePackages "unpack" } instance Monoid GetFlags where mempty = GetFlags { getDestDir = mempty, getPristine = mempty, getSourceRepository = mempty, getVerbosity = mempty } mappend a b = GetFlags { getDestDir = combine getDestDir, getPristine = combine getPristine, getSourceRepository = combine getSourceRepository, getVerbosity = combine getVerbosity } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * List flags -- ------------------------------------------------------------ data ListFlags = ListFlags { listInstalled :: Flag Bool, listSimpleOutput :: Flag Bool, listVerbosity :: Flag Verbosity, listPackageDBs :: [Maybe PackageDB] } defaultListFlags :: ListFlags defaultListFlags = ListFlags { listInstalled = Flag False, listSimpleOutput = Flag False, listVerbosity = toFlag normal, listPackageDBs = [] } listCommand :: CommandUI ListFlags listCommand = CommandUI { commandName = "list", commandSynopsis = "List packages matching a search string.", commandDescription = Just $ \_ -> wrapText $ "List all packages, or all packages matching one of the search" ++ " strings.\n" ++ "\n" ++ "If there is a sandbox in the current directory and " ++ "config:ignore-sandbox is False, use the sandbox package database. " ++ "Otherwise, use the package database specified with --package-db. " ++ "If not specified, use the user package database.\n", commandNotes = Nothing, commandUsage = usageAlternatives "list" [ "[FLAGS]" , "[FLAGS] STRINGS"], commandDefaultFlags = defaultListFlags, commandOptions = \_ -> [ optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) , option [] ["installed"] "Only print installed packages" listInstalled (\v flags -> flags { listInstalled = v }) trueArg , option [] ["simple-output"] "Print in a easy-to-parse format" listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) trueArg , option "" ["package-db"] "Use a given package database. May be a specific file, 'global', 'user' or 'clear'." listPackageDBs (\v flags -> flags { listPackageDBs = v }) (reqArg' "DB" readPackageDbList showPackageDbList) ] } instance Monoid ListFlags where mempty = ListFlags { listInstalled = mempty, listSimpleOutput = mempty, listVerbosity = mempty, listPackageDBs = mempty } mappend a b = ListFlags { listInstalled = combine listInstalled, listSimpleOutput = combine listSimpleOutput, listVerbosity = combine listVerbosity, listPackageDBs = combine listPackageDBs } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Info flags -- ------------------------------------------------------------ data InfoFlags = InfoFlags { infoVerbosity :: Flag Verbosity, infoPackageDBs :: [Maybe PackageDB] } defaultInfoFlags :: InfoFlags defaultInfoFlags = InfoFlags { infoVerbosity = toFlag normal, infoPackageDBs = [] } infoCommand :: CommandUI InfoFlags infoCommand = CommandUI { commandName = "info", commandSynopsis = "Display detailed information about a particular package.", commandDescription = Just $ \_ -> wrapText $ "If there is a sandbox in the current directory and " ++ "config:ignore-sandbox is False, use the sandbox package database. " ++ "Otherwise, use the package database specified with --package-db. " ++ "If not specified, use the user package database.\n", commandNotes = Nothing, commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], commandDefaultFlags = defaultInfoFlags, commandOptions = \_ -> [ optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) , option "" ["package-db"] "Use a given package database. May be a specific file, 'global', 'user' or 'clear'." infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) (reqArg' "DB" readPackageDbList showPackageDbList) ] } instance Monoid InfoFlags where mempty = InfoFlags { infoVerbosity = mempty, infoPackageDBs = mempty } mappend a b = InfoFlags { infoVerbosity = combine infoVerbosity, infoPackageDBs = combine infoPackageDBs } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Install flags -- ------------------------------------------------------------ -- | Install takes the same flags as configure along with a few extras. -- data InstallFlags = InstallFlags { installDocumentation :: Flag Bool, installHaddockIndex :: Flag PathTemplate, installDryRun :: Flag Bool, installMaxBackjumps :: Flag Int, installReorderGoals :: Flag Bool, installIndependentGoals :: Flag Bool, installShadowPkgs :: Flag Bool, installStrongFlags :: Flag Bool, installReinstall :: Flag Bool, installAvoidReinstalls :: Flag Bool, installOverrideReinstall :: Flag Bool, installUpgradeDeps :: Flag Bool, installOnly :: Flag Bool, installOnlyDeps :: Flag Bool, installRootCmd :: Flag String, installSummaryFile :: NubList PathTemplate, installLogFile :: Flag PathTemplate, installBuildReports :: Flag ReportLevel, installReportPlanningFailure :: Flag Bool, installSymlinkBinDir :: Flag FilePath, installOneShot :: Flag Bool, installNumJobs :: Flag (Maybe Int), installRunTests :: Flag Bool } defaultInstallFlags :: InstallFlags defaultInstallFlags = InstallFlags { installDocumentation = Flag False, installHaddockIndex = Flag docIndexFile, installDryRun = Flag False, installMaxBackjumps = Flag defaultMaxBackjumps, installReorderGoals = Flag False, installIndependentGoals= Flag False, installShadowPkgs = Flag False, installStrongFlags = Flag False, installReinstall = Flag False, installAvoidReinstalls = Flag False, installOverrideReinstall = Flag False, installUpgradeDeps = Flag False, installOnly = Flag False, installOnlyDeps = Flag False, installRootCmd = mempty, installSummaryFile = mempty, installLogFile = mempty, installBuildReports = Flag NoReports, installReportPlanningFailure = Flag False, installSymlinkBinDir = mempty, installOneShot = Flag False, installNumJobs = mempty, installRunTests = mempty } where docIndexFile = toPathTemplate ("$datadir" "doc" "$arch-$os-$compiler" "index.html") allowNewerParser :: ReadE AllowNewer allowNewerParser = ReadE $ \s -> case s of "" -> Right AllowNewerNone "False" -> Right AllowNewerNone "True" -> Right AllowNewerAll _ -> case readPToMaybe pkgsParser s of Just pkgs -> Right . AllowNewerSome $ pkgs Nothing -> Left ("Cannot parse the list of packages: " ++ s) where pkgsParser = Parse.sepBy1 parse (Parse.char ',') allowNewerPrinter :: Flag AllowNewer -> [Maybe String] allowNewerPrinter (Flag AllowNewerNone) = [Just "False"] allowNewerPrinter (Flag AllowNewerAll) = [Just "True"] allowNewerPrinter (Flag (AllowNewerSome pkgs)) = [Just . intercalate "," . map display $ pkgs] allowNewerPrinter NoFlag = [] defaultMaxBackjumps :: Int defaultMaxBackjumps = 2000 defaultSolver :: PreSolver defaultSolver = Choose allSolvers :: String allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver])) installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) installCommand = CommandUI { commandName = "install", commandSynopsis = "Install packages.", commandUsage = usageAlternatives "install" [ "[FLAGS]" , "[FLAGS] PACKAGES" ], commandDescription = Just $ \_ -> wrapText $ "Installs one or more packages. By default, the installed package" ++ " will be registered in the user's package database or, if a sandbox" ++ " is present in the current directory, inside the sandbox.\n" ++ "\n" ++ "If PACKAGES are specified, downloads and installs those packages." ++ " Otherwise, install the package in the current directory (and/or its" ++ " dependencies) (there must be exactly one .cabal file in the current" ++ " directory).\n" ++ "\n" ++ "When using a sandbox, the flags for `install` only affect the" ++ " current command and have no effect on future commands. (To achieve" ++ " that, `configure` must be used.)\n" ++ " In contrast, without a sandbox, the flags to `install` are saved and" ++ " affect future commands such as `build` and `repl`. See the help for" ++ " `configure` for a list of commands being affected.\n", commandNotes = Just $ \pname -> ( case commandNotes configureCommand of Just desc -> desc pname ++ "\n" Nothing -> "" ) ++ "Examples:\n" ++ " " ++ pname ++ " install " ++ " Package in the current directory\n" ++ " " ++ pname ++ " install foo " ++ " Package from the hackage server\n" ++ " " ++ pname ++ " install foo-1.0 " ++ " Specific version of a package\n" ++ " " ++ pname ++ " install 'foo < 2' " ++ " Constrained package version\n", commandDefaultFlags = (mempty, mempty, mempty, mempty), commandOptions = \showOrParseArgs -> liftOptions get1 set1 (filter ((`notElem` ["constraint", "dependency" , "exact-configuration"]) . optionName) $ configureOptions showOrParseArgs) ++ liftOptions get2 set2 (configureExOptions showOrParseArgs) ++ liftOptions get3 set3 (installOptions showOrParseArgs) ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) } where get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d) get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d) get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d) get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d) haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] haddockOptions showOrParseArgs = [ opt { optionName = "haddock-" ++ name, optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr | descr <- optionDescr opt] } | opt <- commandOptions Cabal.haddockCommand showOrParseArgs , let name = optionName opt , name `elem` ["hoogle", "html", "html-location" ,"executables", "tests", "benchmarks", "all", "internal", "css" ,"hyperlink-source", "hscolour-css" ,"contents-location"] ] where fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] installOptions showOrParseArgs = [ option "" ["documentation"] "building of documentation" installDocumentation (\v flags -> flags { installDocumentation = v }) (boolOpt [] []) , option [] ["doc-index-file"] "A central index of haddock API documentation (template cannot use $pkgid)" installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) (reqArg' "TEMPLATE" (toFlag.toPathTemplate) (flagToList . fmap fromPathTemplate)) , option [] ["dry-run"] "Do not install anything, only print what would be installed." installDryRun (\v flags -> flags { installDryRun = v }) trueArg ] ++ optionSolverFlags showOrParseArgs installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) installReorderGoals (\v flags -> flags { installReorderGoals = v }) installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) installStrongFlags (\v flags -> flags { installStrongFlags = v }) ++ [ option [] ["reinstall"] "Install even if it means installing the same version again." installReinstall (\v flags -> flags { installReinstall = v }) (yesNoOpt showOrParseArgs) , option [] ["avoid-reinstalls"] "Do not select versions that would destructively overwrite installed packages." installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v }) (yesNoOpt showOrParseArgs) , option [] ["force-reinstalls"] "Reinstall packages even if they will most likely break other installed packages." installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) (yesNoOpt showOrParseArgs) , option [] ["upgrade-dependencies"] "Pick the latest version for all dependencies, rather than trying to pick an installed version." installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["only-dependencies"] "Install only the dependencies necessary to build the given packages" installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["dependencies-only"] "A synonym for --only-dependencies" installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) (yesNoOpt showOrParseArgs) , option [] ["root-cmd"] "Command used to gain root privileges, when installing with --global." installRootCmd (\v flags -> flags { installRootCmd = v }) (reqArg' "COMMAND" toFlag flagToList) , option [] ["symlink-bindir"] "Add symlinks to installed executables into this directory." installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) (reqArgFlag "DIR") , option [] ["build-summary"] "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" installSummaryFile (\v flags -> flags { installSummaryFile = v }) (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) , option [] ["build-log"] "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" installLogFile (\v flags -> flags { installLogFile = v }) (reqArg' "TEMPLATE" (toFlag.toPathTemplate) (flagToList . fmap fromPathTemplate)) , option [] ["remote-build-reporting"] "Generate build reports to send to a remote server (none, anonymous or detailed)." installBuildReports (\v flags -> flags { installBuildReports = v }) (reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', " ++ "'anonymous' or 'detailed'") (toFlag `fmap` parse)) (flagToList . fmap display)) , option [] ["report-planning-failure"] "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) trueArg , option [] ["one-shot"] "Do not record the packages in the world file." installOneShot (\v flags -> flags { installOneShot = v }) (yesNoOpt showOrParseArgs) , option [] ["run-tests"] "Run package test suites during installation." installRunTests (\v flags -> flags { installRunTests = v }) trueArg , optionNumJobs installNumJobs (\v flags -> flags { installNumJobs = v }) ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" -- avoids ParseArgs -> [ option [] ["only"] "Only installs the package in the current directory." installOnly (\v flags -> flags { installOnly = v }) trueArg ] _ -> [] instance Monoid InstallFlags where mempty = InstallFlags { installDocumentation = mempty, installHaddockIndex = mempty, installDryRun = mempty, installReinstall = mempty, installAvoidReinstalls = mempty, installOverrideReinstall = mempty, installMaxBackjumps = mempty, installUpgradeDeps = mempty, installReorderGoals = mempty, installIndependentGoals= mempty, installShadowPkgs = mempty, installStrongFlags = mempty, installOnly = mempty, installOnlyDeps = mempty, installRootCmd = mempty, installSummaryFile = mempty, installLogFile = mempty, installBuildReports = mempty, installReportPlanningFailure = mempty, installSymlinkBinDir = mempty, installOneShot = mempty, installNumJobs = mempty, installRunTests = mempty } mappend a b = InstallFlags { installDocumentation = combine installDocumentation, installHaddockIndex = combine installHaddockIndex, installDryRun = combine installDryRun, installReinstall = combine installReinstall, installAvoidReinstalls = combine installAvoidReinstalls, installOverrideReinstall = combine installOverrideReinstall, installMaxBackjumps = combine installMaxBackjumps, installUpgradeDeps = combine installUpgradeDeps, installReorderGoals = combine installReorderGoals, installIndependentGoals= combine installIndependentGoals, installShadowPkgs = combine installShadowPkgs, installStrongFlags = combine installStrongFlags, installOnly = combine installOnly, installOnlyDeps = combine installOnlyDeps, installRootCmd = combine installRootCmd, installSummaryFile = combine installSummaryFile, installLogFile = combine installLogFile, installBuildReports = combine installBuildReports, installReportPlanningFailure = combine installReportPlanningFailure, installSymlinkBinDir = combine installSymlinkBinDir, installOneShot = combine installOneShot, installNumJobs = combine installNumJobs, installRunTests = combine installRunTests } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Upload flags -- ------------------------------------------------------------ data UploadFlags = UploadFlags { uploadCheck :: Flag Bool, uploadUsername :: Flag Username, uploadPassword :: Flag Password, uploadVerbosity :: Flag Verbosity } defaultUploadFlags :: UploadFlags defaultUploadFlags = UploadFlags { uploadCheck = toFlag False, uploadUsername = mempty, uploadPassword = mempty, uploadVerbosity = toFlag normal } uploadCommand :: CommandUI UploadFlags uploadCommand = CommandUI { commandName = "upload", commandSynopsis = "Uploads source packages to Hackage.", commandDescription = Nothing, commandNotes = Just $ \_ -> "You can store your Hackage login in the ~/.cabal/config file\n" ++ relevantConfigValuesText ["username", "password"], commandUsage = \pname -> "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", commandDefaultFlags = defaultUploadFlags, commandOptions = \_ -> [optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v }) ,option ['c'] ["check"] "Do not upload, just do QA checks." uploadCheck (\v flags -> flags { uploadCheck = v }) trueArg ,option ['u'] ["username"] "Hackage username." uploadUsername (\v flags -> flags { uploadUsername = v }) (reqArg' "USERNAME" (toFlag . Username) (flagToList . fmap unUsername)) ,option ['p'] ["password"] "Hackage password." uploadPassword (\v flags -> flags { uploadPassword = v }) (reqArg' "PASSWORD" (toFlag . Password) (flagToList . fmap unPassword)) ] } instance Monoid UploadFlags where mempty = UploadFlags { uploadCheck = mempty, uploadUsername = mempty, uploadPassword = mempty, uploadVerbosity = mempty } mappend a b = UploadFlags { uploadCheck = combine uploadCheck, uploadUsername = combine uploadUsername, uploadPassword = combine uploadPassword, uploadVerbosity = combine uploadVerbosity } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Init flags -- ------------------------------------------------------------ emptyInitFlags :: IT.InitFlags emptyInitFlags = mempty defaultInitFlags :: IT.InitFlags defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal } initCommand :: CommandUI IT.InitFlags initCommand = CommandUI { commandName = "init", commandSynopsis = "Create a new .cabal package file (interactively).", commandDescription = Just $ \_ -> wrapText $ "Cabalise a project by creating a .cabal, Setup.hs, and " ++ "optionally a LICENSE file.\n" ++ "\n" ++ "Calling init with no arguments (recommended) uses an " ++ "interactive mode, which will try to guess as much as " ++ "possible and prompt you for the rest. Command-line " ++ "arguments are provided for scripting purposes. " ++ "If you don't want interactive mode, be sure to pass " ++ "the -n flag.\n", commandNotes = Nothing, commandUsage = \pname -> "Usage: " ++ pname ++ " init [FLAGS]\n", commandDefaultFlags = defaultInitFlags, commandOptions = \_ -> [ option ['n'] ["non-interactive"] "Non-interactive mode." IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v }) trueArg , option ['q'] ["quiet"] "Do not generate log messages to stdout." IT.quiet (\v flags -> flags { IT.quiet = v }) trueArg , option [] ["no-comments"] "Do not generate explanatory comments in the .cabal file." IT.noComments (\v flags -> flags { IT.noComments = v }) trueArg , option ['m'] ["minimal"] "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." IT.minimal (\v flags -> flags { IT.minimal = v }) trueArg , option [] ["overwrite"] "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." IT.overwrite (\v flags -> flags { IT.overwrite = v }) trueArg , option [] ["package-dir"] "Root directory of the package (default = current directory)." IT.packageDir (\v flags -> flags { IT.packageDir = v }) (reqArgFlag "DIRECTORY") , option ['p'] ["package-name"] "Name of the Cabal package to create." IT.packageName (\v flags -> flags { IT.packageName = v }) (reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++) (toFlag `fmap` parse)) (flagToList . fmap display)) , option [] ["version"] "Initial version of the package." IT.version (\v flags -> flags { IT.version = v }) (reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++) (toFlag `fmap` parse)) (flagToList . fmap display)) , option [] ["cabal-version"] "Required version of the Cabal library." IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) (reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++) (toFlag `fmap` parse)) (flagToList . fmap display)) , option ['l'] ["license"] "Project license." IT.license (\v flags -> flags { IT.license = v }) (reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++) (toFlag `fmap` parse)) (flagToList . fmap display)) , option ['a'] ["author"] "Name of the project's author." IT.author (\v flags -> flags { IT.author = v }) (reqArgFlag "NAME") , option ['e'] ["email"] "Email address of the maintainer." IT.email (\v flags -> flags { IT.email = v }) (reqArgFlag "EMAIL") , option ['u'] ["homepage"] "Project homepage and/or repository." IT.homepage (\v flags -> flags { IT.homepage = v }) (reqArgFlag "URL") , option ['s'] ["synopsis"] "Short project synopsis." IT.synopsis (\v flags -> flags { IT.synopsis = v }) (reqArgFlag "TEXT") , option ['c'] ["category"] "Project category." IT.category (\v flags -> flags { IT.category = v }) (reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s)) (flagToList . fmap (either id show))) , option ['x'] ["extra-source-file"] "Extra source file to be distributed with tarball." IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) (reqArg' "FILE" (Just . (:[])) (fromMaybe [])) , option [] ["is-library"] "Build a library." IT.packageType (\v flags -> flags { IT.packageType = v }) (noArg (Flag IT.Library)) , option [] ["is-executable"] "Build an executable." IT.packageType (\v flags -> flags { IT.packageType = v }) (noArg (Flag IT.Executable)) , option [] ["main-is"] "Specify the main module." IT.mainIs (\v flags -> flags { IT.mainIs = v }) (reqArgFlag "FILE") , option [] ["language"] "Specify the default language." IT.language (\v flags -> flags { IT.language = v }) (reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++) (toFlag `fmap` parse)) (flagToList . fmap display)) , option ['o'] ["expose-module"] "Export a module from the package." IT.exposedModules (\v flags -> flags { IT.exposedModules = v }) (reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++) ((Just . (:[])) `fmap` parse)) (maybe [] (fmap display))) , option [] ["extension"] "Use a LANGUAGE extension (in the other-extensions field)." IT.otherExts (\v flags -> flags { IT.otherExts = v }) (reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++) ((Just . (:[])) `fmap` parse)) (maybe [] (fmap display))) , option ['d'] ["dependency"] "Package dependency." IT.dependencies (\v flags -> flags { IT.dependencies = v }) (reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++) ((Just . (:[])) `fmap` parse)) (maybe [] (fmap display))) , option [] ["source-dir"] "Directory containing package source." IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) (reqArg' "DIR" (Just . (:[])) (fromMaybe [])) , option [] ["build-tool"] "Required external build tool." IT.buildTools (\v flags -> flags { IT.buildTools = v }) (reqArg' "TOOL" (Just . (:[])) (fromMaybe [])) , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) ] } where readMaybe s = case reads s of [(x,"")] -> Just x _ -> Nothing -- ------------------------------------------------------------ -- * SDist flags -- ------------------------------------------------------------ -- | Extra flags to @sdist@ beyond runghc Setup sdist -- data SDistExFlags = SDistExFlags { sDistFormat :: Flag ArchiveFormat } deriving Show data ArchiveFormat = TargzFormat | ZipFormat -- | ... deriving (Show, Eq) defaultSDistExFlags :: SDistExFlags defaultSDistExFlags = SDistExFlags { sDistFormat = Flag TargzFormat } sdistCommand :: CommandUI (SDistFlags, SDistExFlags) sdistCommand = Cabal.sdistCommand { commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags), commandOptions = \showOrParseArgs -> liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs) ++ liftOptions snd setSnd sdistExOptions } where setFst a (_,b) = (a,b) setSnd b (a,_) = (a,b) sdistExOptions = [option [] ["archive-format"] "archive-format" sDistFormat (\v flags -> flags { sDistFormat = v }) (choiceOpt [ (Flag TargzFormat, ([], ["targz"]), "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") , (Flag ZipFormat, ([], ["zip"]), "Produce a '.zip' format archive") ]) ] instance Monoid SDistExFlags where mempty = SDistExFlags { sDistFormat = mempty } mappend a b = SDistExFlags { sDistFormat = combine sDistFormat } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Win32SelfUpgrade flags -- ------------------------------------------------------------ data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags { win32SelfUpgradeVerbosity :: Flag Verbosity } defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags { win32SelfUpgradeVerbosity = toFlag normal } win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags win32SelfUpgradeCommand = CommandUI { commandName = "win32selfupgrade", commandSynopsis = "Self-upgrade the executable on Windows", commandDescription = Nothing, commandNotes = Nothing, commandUsage = \pname -> "Usage: " ++ pname ++ " win32selfupgrade PID PATH\n", commandDefaultFlags = defaultWin32SelfUpgradeFlags, commandOptions = \_ -> [optionVerbosity win32SelfUpgradeVerbosity (\v flags -> flags { win32SelfUpgradeVerbosity = v}) ] } instance Monoid Win32SelfUpgradeFlags where mempty = Win32SelfUpgradeFlags { win32SelfUpgradeVerbosity = mempty } mappend a b = Win32SelfUpgradeFlags { win32SelfUpgradeVerbosity = combine win32SelfUpgradeVerbosity } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Sandbox-related flags -- ------------------------------------------------------------ data SandboxFlags = SandboxFlags { sandboxVerbosity :: Flag Verbosity, sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only -- flag. sandboxLocation :: Flag FilePath } defaultSandboxLocation :: FilePath defaultSandboxLocation = ".cabal-sandbox" defaultSandboxFlags :: SandboxFlags defaultSandboxFlags = SandboxFlags { sandboxVerbosity = toFlag normal, sandboxSnapshot = toFlag False, sandboxLocation = toFlag defaultSandboxLocation } sandboxCommand :: CommandUI SandboxFlags sandboxCommand = CommandUI { commandName = "sandbox", commandSynopsis = "Create/modify/delete a sandbox.", commandDescription = Just $ \pname -> concat [ paragraph $ "Sandboxes are isolated package databases that can be used" ++ " to prevent dependency conflicts that arise when many different" ++ " packages are installed in the same database (i.e. the user's" ++ " database in the home directory)." , paragraph $ "A sandbox in the current directory (created by" ++ " `sandbox init`) will be used instead of the user's database for" ++ " commands such as `install` and `build`. Note that (a directly" ++ " invoked) GHC will not automatically be aware of sandboxes;" ++ " only if called via appropriate " ++ pname ++ " commands, e.g. `repl`, `build`, `exec`." , paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox" ++ " in folders above the current one, so cabal will not see the sandbox" ++ " if you are in a subfolder of a sandboxes." , paragraph "Subcommands:" , headLine "init:" , indentParagraph $ "Initialize a sandbox in the current directory." ++ " An existing package database will not be modified, but settings" ++ " (such as the location of the database) can be modified this way." , headLine "delete:" , indentParagraph $ "Remove the sandbox; deleting all the packages" ++ " installed inside." , headLine "add-source:" , indentParagraph $ "Make one or more local package available in the" ++ " sandbox. PATHS may be relative or absolute." ++ " Typical usecase is when you need" ++ " to make a (temporary) modification to a dependency: You download" ++ " the package into a different directory, make the modification," ++ " and add that directory to the sandbox with `add-source`." , indentParagraph $ "Unless given `--snapshot`, any add-source'd" ++ " dependency that was modified since the last build will be" ++ " re-installed automatically." , headLine "delete-source:" , indentParagraph $ "Remove an add-source dependency; however, this will" ++ " not delete the package(s) that have been installed in the sandbox" ++ " from this dependency. You can either unregister the package(s) via" ++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the" ++ " sandbox (`sandbox delete; sandbox init`)." , headLine "list-sources:" , indentParagraph $ "List the directories of local packages made" ++ " available via `" ++ pname ++ " add-source`." , headLine "hc-pkg:" , indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package" ++ " database. Can be used to list specific/all packages that are" ++ " installed in the sandbox. For subcommands, see the help for" ++ " ghc-pkg. Affected by the compiler version specified by `configure`." ], commandNotes = Just $ \_ -> relevantConfigValuesText ["require-sandbox" ,"ignore-sandbox"], commandUsage = usageAlternatives "sandbox" [ "init [FLAGS]" , "delete [FLAGS]" , "add-source [FLAGS] PATHS" , "delete-source [FLAGS] PATHS" , "list-sources [FLAGS]" , "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]" ], commandDefaultFlags = defaultSandboxFlags, commandOptions = \_ -> [ optionVerbosity sandboxVerbosity (\v flags -> flags { sandboxVerbosity = v }) , option [] ["snapshot"] "Take a snapshot instead of creating a link (only applies to 'add-source')" sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v }) trueArg , option [] ["sandbox"] "Sandbox location (default: './.cabal-sandbox')." sandboxLocation (\v flags -> flags { sandboxLocation = v }) (reqArgFlag "DIR") ] } instance Monoid SandboxFlags where mempty = SandboxFlags { sandboxVerbosity = mempty, sandboxSnapshot = mempty, sandboxLocation = mempty } mappend a b = SandboxFlags { sandboxVerbosity = combine sandboxVerbosity, sandboxSnapshot = combine sandboxSnapshot, sandboxLocation = combine sandboxLocation } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * Exec Flags -- ------------------------------------------------------------ data ExecFlags = ExecFlags { execVerbosity :: Flag Verbosity } defaultExecFlags :: ExecFlags defaultExecFlags = ExecFlags { execVerbosity = toFlag normal } execCommand :: CommandUI ExecFlags execCommand = CommandUI { commandName = "exec", commandSynopsis = "Give a command access to the sandbox package repository.", commandDescription = Just $ \pname -> wrapText $ -- TODO: this is too GHC-focused for my liking.. "A directly invoked GHC will not automatically be aware of any" ++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what" ++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:" ++ " COMMAND will be executed in a modified environment and thereby uses" ++ " the sandbox package database.\n" ++ "\n" ++ "If there is no sandbox, behaves as identity (executing COMMAND).\n" ++ "\n" ++ "Note that other " ++ pname ++ " commands change the environment" ++ " variable appropriately already, so there is no need to wrap those" ++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user" ++ " has more control and can, for example, execute custom scripts which" ++ " indirectly execute GHC.\n" ++ "\n" ++ "See `" ++ pname ++ " sandbox`.", commandNotes = Just $ \pname -> "Examples:\n" ++ " Install the executable package pandoc into a sandbox and run it:\n" ++ " " ++ pname ++ " sandbox init\n" ++ " " ++ pname ++ " install pandoc\n" ++ " " ++ pname ++ " exec pandoc foo.md\n\n" ++ " Install the executable package hlint into the user package database\n" ++ " and run it:\n" ++ " " ++ pname ++ " install --user hlint\n" ++ " " ++ pname ++ " exec hlint Foo.hs\n\n" ++ " Execute runghc on Foo.hs with runghc configured to use the\n" ++ " sandbox package database (if a sandbox is being used):\n" ++ " " ++ pname ++ " exec runghc Foo.hs\n", commandUsage = \pname -> "Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n", commandDefaultFlags = defaultExecFlags, commandOptions = \_ -> [ optionVerbosity execVerbosity (\v flags -> flags { execVerbosity = v }) ] } instance Monoid ExecFlags where mempty = ExecFlags { execVerbosity = mempty } mappend a b = ExecFlags { execVerbosity = combine execVerbosity } where combine field = field a `mappend` field b -- ------------------------------------------------------------ -- * UserConfig flags -- ------------------------------------------------------------ data UserConfigFlags = UserConfigFlags { userConfigVerbosity :: Flag Verbosity } instance Monoid UserConfigFlags where mempty = UserConfigFlags { userConfigVerbosity = toFlag normal } mappend a b = UserConfigFlags { userConfigVerbosity = combine userConfigVerbosity } where combine field = field a `mappend` field b userConfigCommand :: CommandUI UserConfigFlags userConfigCommand = CommandUI { commandName = "user-config", commandSynopsis = "Display and update the user's global cabal configuration.", commandDescription = Just $ \_ -> wrapText $ "When upgrading cabal, the set of configuration keys and their default" ++ " values may change. This command provides means to merge the existing" ++ " config in ~/.cabal/config" ++ " (i.e. all bindings that are actually defined and not commented out)" ++ " and the default config of the new version.\n" ++ "\n" ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" ++ " the default configuration that would be created by cabal if the" ++ " config file did not exist.\n" ++ "update: Applies the pseudo-diff to the configuration that would be" ++ " created by default, and write the result back to ~/.cabal/config.", commandNotes = Nothing, commandUsage = usageAlternatives "user-config" ["diff", "update"], commandDefaultFlags = mempty, commandOptions = \ _ -> [ optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) ] } -- ------------------------------------------------------------ -- * GetOpt Utils -- ------------------------------------------------------------ reqArgFlag :: ArgPlaceHolder -> MkOptDescr (b -> Flag String) (Flag String -> b -> b) b reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList liftOptions :: (b -> a) -> (a -> b -> b) -> [OptionField a] -> [OptionField b] liftOptions get set = map (liftOption get set) yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b yesNoOpt ShowArgs sf lf = trueArg sf lf yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf optionSolver :: (flags -> Flag PreSolver) -> (Flag PreSolver -> flags -> flags) -> OptionField flags optionSolver get set = option [] ["solver"] ("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ", where 'choose' chooses between 'topdown' and 'modular' based on compiler version.") get set (reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers) (toFlag `fmap` parse)) (flagToList . fmap display)) optionSolverFlags :: ShowOrParseArgs -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) -> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags) -> [OptionField flags] optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg _getig _setig getsip setsip getstrfl setstrfl = [ option [] ["max-backjumps"] ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") getmbj setmbj (reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag (Parse.readS_to_P reads))) (map show . flagToList)) , option [] ["reorder-goals"] "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." getrg setrg (yesNoOpt showOrParseArgs) -- TODO: Disabled for now because it does not work as advertised (yet). {- , option [] ["independent-goals"] "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." getig setig (yesNoOpt showOrParseArgs) -} , option [] ["shadow-installed-packages"] "If multiple package instances of the same version are installed, treat all but one as shadowed." getsip setsip (yesNoOpt showOrParseArgs) , option [] ["strong-flags"] "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." getstrfl setstrfl (yesNoOpt showOrParseArgs) ] usageFlagsOrPackages :: String -> String -> String usageFlagsOrPackages name pname = "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" ++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" usagePackages :: String -> String -> String usagePackages name pname = "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" usageFlags :: String -> String -> String usageFlags name pname = "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" --TODO: do we want to allow per-package flags? parsePackageArgs :: [String] -> Either String [Dependency] parsePackageArgs = parsePkgArgs [] where parsePkgArgs ds [] = Right (reverse ds) parsePkgArgs ds (arg:args) = case readPToMaybe parseDependencyOrPackageId arg of Just dep -> parsePkgArgs (dep:ds) args Nothing -> Left $ show arg ++ " is not valid syntax for a package name or" ++ " package dependency." readPToMaybe :: Parse.ReadP a a -> String -> Maybe a readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str , all isSpace s ] parseDependencyOrPackageId :: Parse.ReadP r Dependency parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse where pkgidToDependency :: PackageIdentifier -> Dependency pkgidToDependency p = case packageVersion p of Version [] _ -> Dependency (packageName p) anyVersion version -> Dependency (packageName p) (thisVersion version) showRepo :: RemoteRepo -> String showRepo repo = remoteRepoName repo ++ ":" ++ uriToString id (remoteRepoURI repo) [] readRepo :: String -> Maybe RemoteRepo readRepo = readPToMaybe parseRepo parseRepo :: Parse.ReadP r RemoteRepo parseRepo = do name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.") _ <- Parse.char ':' uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~") uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr) return $ RemoteRepo { remoteRepoName = name, remoteRepoURI = uri } -- ------------------------------------------------------------ -- * Helpers for Documentation -- ------------------------------------------------------------ headLine :: String -> String headLine = unlines . map unwords . wrapLine 79 . words paragraph :: String -> String paragraph = (++"\n") . unlines . map unwords . wrapLine 79 . words indentParagraph :: String -> String indentParagraph = unlines . map ((" "++).unwords) . wrapLine 77 . words relevantConfigValuesText :: [String] -> String relevantConfigValuesText vs = "Relevant global configuration keys:\n" ++ concat [" " ++ v ++ "\n" |v <- vs] cabal-install-1.22.6.0/Distribution/Client/Utils.hs0000644000000000000000000002120512540225656020174 0ustar0000000000000000{-# LANGUAGE ForeignFunctionInterface, CPP #-} module Distribution.Client.Utils ( MergeResult(..) , mergeBy, duplicates, duplicatesBy , inDir, determineNumJobs, numberOfProcessors , removeExistingFile , makeAbsoluteToCwd, filePathToByteString , byteStringToFilePath, tryCanonicalizePath , canonicalizePathNoThrow , moreRecentFile, existsAndIsMoreRecentThan , tryFindAddSourcePackageDesc , tryFindPackageDesc , relaxEncodingErrors) where import Distribution.Compat.Exception ( catchIO ) import Distribution.Client.Compat.Time ( getModTime ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Simple.Utils ( die, findPackageDesc ) import qualified Data.ByteString.Lazy as BS import Control.Monad ( when ) import Data.Bits ( (.|.), shiftL, shiftR ) import Data.Char ( ord, chr ) import Data.List ( isPrefixOf, sortBy, groupBy ) import Data.Word ( Word8, Word32) import Foreign.C.Types ( CInt(..) ) import qualified Control.Exception as Exception ( finally ) import System.Directory ( canonicalizePath, doesFileExist, getCurrentDirectory , removeFile, setCurrentDirectory ) import System.FilePath ( (), isAbsolute ) import System.IO ( Handle #if MIN_VERSION_base(4,4,0) , hGetEncoding, hSetEncoding #endif ) import System.IO.Unsafe ( unsafePerformIO ) #if MIN_VERSION_base(4,4,0) import GHC.IO.Encoding ( recover, TextEncoding(TextEncoding) ) import GHC.IO.Encoding.Failure ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) #endif #if defined(mingw32_HOST_OS) import Prelude hiding (ioError) import Control.Monad (liftM2, unless) import System.Directory (doesDirectoryExist) import System.IO.Error (ioError, mkIOError, doesNotExistErrorType) #endif -- | Generic merging utility. For sorted input lists this is a full outer join. -- -- * The result list never contains @(Nothing, Nothing)@. -- mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] mergeBy cmp = merge where merge [] ys = [ OnlyInRight y | y <- ys] merge xs [] = [ OnlyInLeft x | x <- xs] merge (x:xs) (y:ys) = case x `cmp` y of GT -> OnlyInRight y : merge (x:xs) ys EQ -> InBoth x y : merge xs ys LT -> OnlyInLeft x : merge xs (y:ys) data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b duplicates :: Ord a => [a] -> [[a]] duplicates = duplicatesBy compare duplicatesBy :: (a -> a -> Ordering) -> [a] -> [[a]] duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp where eq a b = case cmp a b of EQ -> True _ -> False moreThanOne (_:_:_) = True moreThanOne _ = False -- | Like 'removeFile', but does not throw an exception when the file does not -- exist. removeExistingFile :: FilePath -> IO () removeExistingFile path = do exists <- doesFileExist path when exists $ removeFile path -- | Executes the action in the specified directory. inDir :: Maybe FilePath -> IO a -> IO a inDir Nothing m = m inDir (Just d) m = do old <- getCurrentDirectory setCurrentDirectory d m `Exception.finally` setCurrentDirectory old foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt -- The number of processors is not going to change during the duration of the -- program, so unsafePerformIO is safe here. numberOfProcessors :: Int numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors -- | Determine the number of jobs to use given the value of the '-j' flag. determineNumJobs :: Flag (Maybe Int) -> Int determineNumJobs numJobsFlag = case numJobsFlag of NoFlag -> 1 Flag Nothing -> numberOfProcessors Flag (Just n) -> n -- | Given a relative path, make it absolute relative to the current -- directory. Absolute paths are returned unmodified. makeAbsoluteToCwd :: FilePath -> IO FilePath makeAbsoluteToCwd path | isAbsolute path = return path | otherwise = do cwd <- getCurrentDirectory return $! cwd path -- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is -- encoded as a little-endian 'Word32'. filePathToByteString :: FilePath -> BS.ByteString filePathToByteString p = BS.pack $ foldr conv [] codepts where codepts :: [Word32] codepts = map (fromIntegral . ord) p conv :: Word32 -> [Word8] -> [Word8] conv w32 rest = b0:b1:b2:b3:rest where b0 = fromIntegral $ w32 b1 = fromIntegral $ w32 `shiftR` 8 b2 = fromIntegral $ w32 `shiftR` 16 b3 = fromIntegral $ w32 `shiftR` 24 -- | Reverse operation to 'filePathToByteString'. byteStringToFilePath :: BS.ByteString -> FilePath byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected | otherwise = go 0 where unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" bslen = BS.length bs go i | i == bslen = [] | otherwise = (chr . fromIntegral $ w32) : go (i+4) where w32 :: Word32 w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) b0 = fromIntegral $ BS.index bs i b1 = fromIntegral $ BS.index bs (i + 1) b2 = fromIntegral $ BS.index bs (i + 2) b3 = fromIntegral $ BS.index bs (i + 3) -- | Workaround for the inconsistent behaviour of 'canonicalizePath'. It throws -- an error if the path refers to a non-existent file on *nix, but not on -- Windows. tryCanonicalizePath :: FilePath -> IO FilePath tryCanonicalizePath path = do ret <- canonicalizePath path #if defined(mingw32_HOST_OS) exists <- liftM2 (||) (doesFileExist ret) (doesDirectoryExist ret) unless exists $ ioError $ mkIOError doesNotExistErrorType "canonicalizePath" Nothing (Just ret) #endif return ret -- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws -- an exception, returns the path argument unmodified. canonicalizePathNoThrow :: FilePath -> IO FilePath canonicalizePathNoThrow path = do canonicalizePath path `catchIO` (\_ -> return path) -------------------- -- Modification time -- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead -- of getModificationTime for higher precision. We can't merge the two because -- Distribution.Client.Time uses MIN_VERSION macros. moreRecentFile :: FilePath -> FilePath -> IO Bool moreRecentFile a b = do exists <- doesFileExist b if not exists then return True else do tb <- getModTime b ta <- getModTime a return (ta > tb) -- | Like 'moreRecentFile', but also checks that the first file exists. existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool existsAndIsMoreRecentThan a b = do exists <- doesFileExist a if not exists then return False else a `moreRecentFile` b -- | Sets the handler for encoding errors to one that transliterates invalid -- characters into one present in the encoding (i.e., \'?\'). -- This is opposed to the default behavior, which is to throw an exception on -- error. This function will ignore file handles that have a Unicode encoding -- set. It's a no-op for versions of `base` less than 4.4. relaxEncodingErrors :: Handle -> IO () relaxEncodingErrors handle = do #if MIN_VERSION_base(4,4,0) maybeEncoding <- hGetEncoding handle case maybeEncoding of Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> let relax x = x { recover = recoverEncode TransliterateCodingFailure } in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) _ -> #endif return () -- |Like 'tryFindPackageDesc', but with error specific to add-source deps. tryFindAddSourcePackageDesc :: FilePath -> String -> IO FilePath tryFindAddSourcePackageDesc depPath err = tryFindPackageDesc depPath $ err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " ++ depPath -- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be -- found, with @err@ prefixing the error message. This function simply allows -- us to give a more descriptive error than that provided by @findPackageDesc@. tryFindPackageDesc :: FilePath -> String -> IO FilePath tryFindPackageDesc depPath err = do errOrCabalFile <- findPackageDesc depPath case errOrCabalFile of Right file -> return file Left _ -> die err cabal-install-1.22.6.0/Distribution/Client/Dependency.hs0000644000000000000000000006455712540225656021173 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency -- Copyright : (c) David Himmelstrup 2005, -- Bjorn Bringert 2007 -- Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- Top level interface to dependency resolution. ----------------------------------------------------------------------------- module Distribution.Client.Dependency ( -- * The main package dependency resolver chooseSolver, resolveDependencies, Progress(..), foldProgress, -- * Alternate, simple resolver that does not do dependencies recursively resolveWithoutDependencies, -- * Constructing resolver policies DepResolverParams(..), PackageConstraint(..), PackagesPreferenceDefault(..), PackagePreference(..), InstalledPreference(..), -- ** Standard policy standardInstallPolicy, PackageSpecifier(..), -- ** Sandbox policy applySandboxInstallPolicy, -- ** Extra policy options dontUpgradeNonUpgradeablePackages, hideBrokenInstalledPackages, upgradeDependencies, reinstallTargets, -- ** Policy utils addConstraints, addPreferences, setPreferenceDefault, setReorderGoals, setIndependentGoals, setAvoidReinstalls, setShadowPkgs, setStrongFlags, setMaxBackjumps, addSourcePackages, hideInstalledPackagesSpecificByInstalledPackageId, hideInstalledPackagesSpecificBySourcePackageId, hideInstalledPackagesAllVersions, removeUpperBounds ) where import Distribution.Client.Dependency.TopDown ( topDownResolver ) import Distribution.Client.Dependency.Modular ( modularResolver, SolverConfig(..) ) import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.Types ( SourcePackageDb(SourcePackageDb) , SourcePackage(..) ) import Distribution.Client.Dependency.Types ( PreSolver(..), Solver(..), DependencyResolver, PackageConstraint(..) , debugPackageConstraint , AllowNewer(..), PackagePreferences(..), InstalledPreference(..) , PackagesPreferenceDefault(..) , Progress(..), foldProgress ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) ) import Distribution.Client.Targets import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Package ( PackageName(..), PackageId, Package(..), packageName, packageVersion , InstalledPackageId, Dependency(Dependency)) import qualified Distribution.PackageDescription as PD ( PackageDescription(..), GenericPackageDescription(..) , Library(..), Executable(..), TestSuite(..), Benchmark(..), CondTree) import Distribution.PackageDescription (BuildInfo(targetBuildDepends)) import Distribution.PackageDescription.Configuration (mapCondTree) import Distribution.Version ( Version(..), VersionRange, anyVersion, thisVersion, withinRange , removeUpperBound, simplifyVersionRange ) import Distribution.Compiler ( CompilerId(..), CompilerInfo(..), CompilerFlavor(..) ) import Distribution.System ( Platform ) import Distribution.Simple.Utils ( comparing, warn, info ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity ) import Data.List (maximumBy, foldl', intercalate) import Data.Maybe (fromMaybe) import qualified Data.Map as Map import qualified Data.Set as Set import Data.Set (Set) -- ------------------------------------------------------------ -- * High level planner policy -- ------------------------------------------------------------ -- | The set of parameters to the dependency resolver. These parameters are -- relatively low level but many kinds of high level policies can be -- implemented in terms of adjustments to the parameters. -- data DepResolverParams = DepResolverParams { depResolverTargets :: [PackageName], depResolverConstraints :: [PackageConstraint], depResolverPreferences :: [PackagePreference], depResolverPreferenceDefault :: PackagesPreferenceDefault, depResolverInstalledPkgIndex :: InstalledPackageIndex, depResolverSourcePkgIndex :: PackageIndex.PackageIndex SourcePackage, depResolverReorderGoals :: Bool, depResolverIndependentGoals :: Bool, depResolverAvoidReinstalls :: Bool, depResolverShadowPkgs :: Bool, depResolverStrongFlags :: Bool, depResolverMaxBackjumps :: Maybe Int } debugDepResolverParams :: DepResolverParams -> String debugDepResolverParams p = "targets: " ++ intercalate ", " (map display (depResolverTargets p)) ++ "\nconstraints: " ++ concatMap (("\n " ++) . debugPackageConstraint) (depResolverConstraints p) ++ "\npreferences: " ++ concatMap (("\n " ++) . debugPackagePreference) (depResolverPreferences p) ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) -- | A package selection preference for a particular package. -- -- Preferences are soft constraints that the dependency resolver should try to -- respect where possible. It is not specified if preferences on some packages -- are more important than others. -- data PackagePreference = -- | A suggested constraint on the version number. PackageVersionPreference PackageName VersionRange -- | If we prefer versions of packages that are already installed. | PackageInstalledPreference PackageName InstalledPreference -- | Provide a textual representation of a package preference -- for debugging purposes. -- debugPackagePreference :: PackagePreference -> String debugPackagePreference (PackageVersionPreference pn vr) = display pn ++ " " ++ display (simplifyVersionRange vr) debugPackagePreference (PackageInstalledPreference pn ip) = display pn ++ " " ++ show ip basicDepResolverParams :: InstalledPackageIndex -> PackageIndex.PackageIndex SourcePackage -> DepResolverParams basicDepResolverParams installedPkgIndex sourcePkgIndex = DepResolverParams { depResolverTargets = [], depResolverConstraints = [], depResolverPreferences = [], depResolverPreferenceDefault = PreferLatestForSelected, depResolverInstalledPkgIndex = installedPkgIndex, depResolverSourcePkgIndex = sourcePkgIndex, depResolverReorderGoals = False, depResolverIndependentGoals = False, depResolverAvoidReinstalls = False, depResolverShadowPkgs = False, depResolverStrongFlags = False, depResolverMaxBackjumps = Nothing } addTargets :: [PackageName] -> DepResolverParams -> DepResolverParams addTargets extraTargets params = params { depResolverTargets = extraTargets ++ depResolverTargets params } addConstraints :: [PackageConstraint] -> DepResolverParams -> DepResolverParams addConstraints extraConstraints params = params { depResolverConstraints = extraConstraints ++ depResolverConstraints params } addPreferences :: [PackagePreference] -> DepResolverParams -> DepResolverParams addPreferences extraPreferences params = params { depResolverPreferences = extraPreferences ++ depResolverPreferences params } setPreferenceDefault :: PackagesPreferenceDefault -> DepResolverParams -> DepResolverParams setPreferenceDefault preferenceDefault params = params { depResolverPreferenceDefault = preferenceDefault } setReorderGoals :: Bool -> DepResolverParams -> DepResolverParams setReorderGoals b params = params { depResolverReorderGoals = b } setIndependentGoals :: Bool -> DepResolverParams -> DepResolverParams setIndependentGoals b params = params { depResolverIndependentGoals = b } setAvoidReinstalls :: Bool -> DepResolverParams -> DepResolverParams setAvoidReinstalls b params = params { depResolverAvoidReinstalls = b } setShadowPkgs :: Bool -> DepResolverParams -> DepResolverParams setShadowPkgs b params = params { depResolverShadowPkgs = b } setStrongFlags :: Bool -> DepResolverParams -> DepResolverParams setStrongFlags b params = params { depResolverStrongFlags = b } setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams setMaxBackjumps n params = params { depResolverMaxBackjumps = n } -- | Some packages are specific to a given compiler version and should never be -- upgraded. dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams dontUpgradeNonUpgradeablePackages params = addConstraints extraConstraints params where extraConstraints = [ PackageConstraintInstalled pkgname | all (/=PackageName "base") (depResolverTargets params) , pkgname <- map PackageName [ "base", "ghc-prim", "integer-gmp" , "integer-simple" ] , isInstalled pkgname ] -- TODO: the top down resolver chokes on the base constraints -- below when there are no targets and thus no dep on base. -- Need to refactor constraints separate from needing packages. isInstalled = not . null . InstalledPackageIndex.lookupPackageName (depResolverInstalledPkgIndex params) addSourcePackages :: [SourcePackage] -> DepResolverParams -> DepResolverParams addSourcePackages pkgs params = params { depResolverSourcePkgIndex = foldl (flip PackageIndex.insert) (depResolverSourcePkgIndex params) pkgs } hideInstalledPackagesSpecificByInstalledPackageId :: [InstalledPackageId] -> DepResolverParams -> DepResolverParams hideInstalledPackagesSpecificByInstalledPackageId pkgids params = --TODO: this should work using exclude constraints instead params { depResolverInstalledPkgIndex = foldl' (flip InstalledPackageIndex.deleteInstalledPackageId) (depResolverInstalledPkgIndex params) pkgids } hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] -> DepResolverParams -> DepResolverParams hideInstalledPackagesSpecificBySourcePackageId pkgids params = --TODO: this should work using exclude constraints instead params { depResolverInstalledPkgIndex = foldl' (flip InstalledPackageIndex.deleteSourcePackageId) (depResolverInstalledPkgIndex params) pkgids } hideInstalledPackagesAllVersions :: [PackageName] -> DepResolverParams -> DepResolverParams hideInstalledPackagesAllVersions pkgnames params = --TODO: this should work using exclude constraints instead params { depResolverInstalledPkgIndex = foldl' (flip InstalledPackageIndex.deletePackageName) (depResolverInstalledPkgIndex params) pkgnames } hideBrokenInstalledPackages :: DepResolverParams -> DepResolverParams hideBrokenInstalledPackages params = hideInstalledPackagesSpecificByInstalledPackageId pkgids params where pkgids = map Installed.installedPackageId . InstalledPackageIndex.reverseDependencyClosure (depResolverInstalledPkgIndex params) . map (Installed.installedPackageId . fst) . InstalledPackageIndex.brokenPackages $ depResolverInstalledPkgIndex params -- | Remove upper bounds in dependencies using the policy specified by the -- 'AllowNewer' argument (all/some/none). removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams removeUpperBounds allowNewer params = params { -- NB: It's important to apply 'removeUpperBounds' after -- 'addSourcePackages'. Otherwise, the packages inserted by -- 'addSourcePackages' won't have upper bounds in dependencies relaxed. depResolverSourcePkgIndex = sourcePkgIndex' } where sourcePkgIndex = depResolverSourcePkgIndex params sourcePkgIndex' = case allowNewer of AllowNewerNone -> sourcePkgIndex AllowNewerAll -> fmap relaxAllPackageDeps sourcePkgIndex AllowNewerSome pkgs -> fmap (relaxSomePackageDeps pkgs) sourcePkgIndex relaxAllPackageDeps :: SourcePackage -> SourcePackage relaxAllPackageDeps = onAllBuildDepends doRelax where doRelax (Dependency pkgName verRange) = Dependency pkgName (removeUpperBound verRange) relaxSomePackageDeps :: [PackageName] -> SourcePackage -> SourcePackage relaxSomePackageDeps pkgNames = onAllBuildDepends doRelax where doRelax d@(Dependency pkgName verRange) | pkgName `elem` pkgNames = Dependency pkgName (removeUpperBound verRange) | otherwise = d -- Walk a 'GenericPackageDescription' and apply 'f' to all 'build-depends' -- fields. onAllBuildDepends :: (Dependency -> Dependency) -> SourcePackage -> SourcePackage onAllBuildDepends f srcPkg = srcPkg' where gpd = packageDescription srcPkg pd = PD.packageDescription gpd condLib = PD.condLibrary gpd condExes = PD.condExecutables gpd condTests = PD.condTestSuites gpd condBenchs = PD.condBenchmarks gpd f' = onBuildInfo f onBuildInfo g bi = bi { targetBuildDepends = map g (targetBuildDepends bi) } onLibrary lib = lib { PD.libBuildInfo = f' $ PD.libBuildInfo lib } onExecutable exe = exe { PD.buildInfo = f' $ PD.buildInfo exe } onTestSuite tst = tst { PD.testBuildInfo = f' $ PD.testBuildInfo tst } onBenchmark bmk = bmk { PD.benchmarkBuildInfo = f' $ PD.benchmarkBuildInfo bmk } srcPkg' = srcPkg { packageDescription = gpd' } gpd' = gpd { PD.packageDescription = pd', PD.condLibrary = condLib', PD.condExecutables = condExes', PD.condTestSuites = condTests', PD.condBenchmarks = condBenchs' } pd' = pd { PD.buildDepends = map f (PD.buildDepends pd), PD.library = fmap onLibrary (PD.library pd), PD.executables = map onExecutable (PD.executables pd), PD.testSuites = map onTestSuite (PD.testSuites pd), PD.benchmarks = map onBenchmark (PD.benchmarks pd) } condLib' = fmap (onCondTree onLibrary) condLib condExes' = map (mapSnd $ onCondTree onExecutable) condExes condTests' = map (mapSnd $ onCondTree onTestSuite) condTests condBenchs' = map (mapSnd $ onCondTree onBenchmark) condBenchs mapSnd :: (a -> b) -> (c,a) -> (c,b) mapSnd = fmap onCondTree :: (a -> b) -> PD.CondTree v [Dependency] a -> PD.CondTree v [Dependency] b onCondTree g = mapCondTree g (map f) id upgradeDependencies :: DepResolverParams -> DepResolverParams upgradeDependencies = setPreferenceDefault PreferAllLatest reinstallTargets :: DepResolverParams -> DepResolverParams reinstallTargets params = hideInstalledPackagesAllVersions (depResolverTargets params) params standardInstallPolicy :: InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier SourcePackage] -> DepResolverParams standardInstallPolicy installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) pkgSpecifiers = addPreferences [ PackageVersionPreference name ver | (name, ver) <- Map.toList sourcePkgPrefs ] . addConstraints (concatMap pkgSpecifierConstraints pkgSpecifiers) . addTargets (map pkgSpecifierTarget pkgSpecifiers) . hideInstalledPackagesSpecificBySourcePackageId [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] . addSourcePackages [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] $ basicDepResolverParams installedPkgIndex sourcePkgIndex applySandboxInstallPolicy :: SandboxPackageInfo -> DepResolverParams -> DepResolverParams applySandboxInstallPolicy (SandboxPackageInfo modifiedDeps otherDeps allSandboxPkgs _allDeps) params = addPreferences [ PackageInstalledPreference n PreferInstalled | n <- installedNotModified ] . addTargets installedNotModified . addPreferences [ PackageVersionPreference (packageName pkg) (thisVersion (packageVersion pkg)) | pkg <- otherDeps ] . addConstraints [ PackageConstraintVersion (packageName pkg) (thisVersion (packageVersion pkg)) | pkg <- modifiedDeps ] . addTargets [ packageName pkg | pkg <- modifiedDeps ] . hideInstalledPackagesSpecificBySourcePackageId [ packageId pkg | pkg <- modifiedDeps ] -- We don't need to add source packages for add-source deps to the -- 'installedPkgIndex' since 'getSourcePackages' did that for us. $ params where installedPkgIds = map fst . InstalledPackageIndex.allPackagesBySourcePackageId $ allSandboxPkgs modifiedPkgIds = map packageId modifiedDeps installedNotModified = [ packageName pkg | pkg <- installedPkgIds, pkg `notElem` modifiedPkgIds ] -- ------------------------------------------------------------ -- * Interface to the standard resolver -- ------------------------------------------------------------ chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver chooseSolver _ AlwaysTopDown _ = return TopDown chooseSolver _ AlwaysModular _ = return Modular chooseSolver verbosity Choose cinfo = do let (CompilerId f v) = compilerInfoId cinfo chosenSolver | f == GHC && v <= Version [7] [] = TopDown | otherwise = Modular msg TopDown = warn verbosity "Falling back to topdown solver for GHC < 7." msg Modular = info verbosity "Choosing modular solver." msg chosenSolver return chosenSolver runSolver :: Solver -> SolverConfig -> DependencyResolver runSolver TopDown = const topDownResolver -- TODO: warn about unsupported options runSolver Modular = modularResolver -- | Run the dependency solver. -- -- Since this is potentially an expensive operation, the result is wrapped in a -- a 'Progress' structure that can be unfolded to provide progress information, -- logging messages and the final result or an error. -- resolveDependencies :: Platform -> CompilerInfo -> Solver -> DepResolverParams -> Progress String String InstallPlan --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages resolveDependencies platform comp _solver params | null (depResolverTargets params) = return (mkInstallPlan platform comp []) resolveDependencies platform comp solver params = Step (debugDepResolverParams finalparams) $ fmap (mkInstallPlan platform comp) $ runSolver solver (SolverConfig reorderGoals indGoals noReinstalls shadowing strFlags maxBkjumps) platform comp installedPkgIndex sourcePkgIndex preferences constraints targets where finalparams @ (DepResolverParams targets constraints prefs defpref installedPkgIndex sourcePkgIndex reorderGoals indGoals noReinstalls shadowing strFlags maxBkjumps) = dontUpgradeNonUpgradeablePackages -- TODO: -- The modular solver can properly deal with broken -- packages and won't select them. So the -- 'hideBrokenInstalledPackages' function should be moved -- into a module that is specific to the top-down solver. . (if solver /= Modular then hideBrokenInstalledPackages else id) $ params preferences = interpretPackagesPreference (Set.fromList targets) defpref prefs -- | Make an install plan from the output of the dep resolver. -- It checks that the plan is valid, or it's an error in the dep resolver. -- mkInstallPlan :: Platform -> CompilerInfo -> [InstallPlan.PlanPackage] -> InstallPlan mkInstallPlan platform comp pkgIndex = let index = InstalledPackageIndex.fromList pkgIndex in case InstallPlan.new platform comp index of Right plan -> plan Left problems -> error $ unlines $ "internal error: could not construct a valid install plan." : "The proposed (invalid) plan contained the following problems:" : map InstallPlan.showPlanProblem problems ++ "Proposed plan:" : [InstallPlan.showPlanIndex index] -- | Give an interpretation to the global 'PackagesPreference' as -- specific per-package 'PackageVersionPreference'. -- interpretPackagesPreference :: Set PackageName -> PackagesPreferenceDefault -> [PackagePreference] -> (PackageName -> PackagePreferences) interpretPackagesPreference selected defaultPref prefs = \pkgname -> PackagePreferences (versionPref pkgname) (installPref pkgname) where versionPref pkgname = fromMaybe anyVersion (Map.lookup pkgname versionPrefs) versionPrefs = Map.fromList [ (pkgname, pref) | PackageVersionPreference pkgname pref <- prefs ] installPref pkgname = fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) installPrefs = Map.fromList [ (pkgname, pref) | PackageInstalledPreference pkgname pref <- prefs ] installPrefDefault = case defaultPref of PreferAllLatest -> \_ -> PreferLatest PreferAllInstalled -> \_ -> PreferInstalled PreferLatestForSelected -> \pkgname -> -- When you say cabal install foo, what you really mean is, prefer the -- latest version of foo, but the installed version of everything else if pkgname `Set.member` selected then PreferLatest else PreferInstalled -- ------------------------------------------------------------ -- * Simple resolver that ignores dependencies -- ------------------------------------------------------------ -- | A simplistic method of resolving a list of target package names to -- available packages. -- -- Specifically, it does not consider package dependencies at all. Unlike -- 'resolveDependencies', no attempt is made to ensure that the selected -- packages have dependencies that are satisfiable or consistent with -- each other. -- -- It is suitable for tasks such as selecting packages to download for user -- inspection. It is not suitable for selecting packages to install. -- -- Note: if no installed package index is available, it is OK to pass 'mempty'. -- It simply means preferences for installed packages will be ignored. -- resolveWithoutDependencies :: DepResolverParams -> Either [ResolveNoDepsError] [SourcePackage] resolveWithoutDependencies (DepResolverParams targets constraints prefs defpref installedPkgIndex sourcePkgIndex _reorderGoals _indGoals _avoidReinstalls _shadowing _strFlags _maxBjumps) = collectEithers (map selectPackage targets) where selectPackage :: PackageName -> Either ResolveNoDepsError SourcePackage selectPackage pkgname | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions | otherwise = Right $! maximumBy bestByPrefs choices where -- Constraints requiredVersions = packageConstraints pkgname pkgDependency = Dependency pkgname requiredVersions choices = PackageIndex.lookupDependency sourcePkgIndex pkgDependency -- Preferences PackagePreferences preferredVersions preferInstalled = packagePreferences pkgname bestByPrefs = comparing $ \pkg -> (installPref pkg, versionPref pkg, packageVersion pkg) installPref = case preferInstalled of PreferLatest -> const False PreferInstalled -> not . null . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex . packageId versionPref pkg = packageVersion pkg `withinRange` preferredVersions packageConstraints :: PackageName -> VersionRange packageConstraints pkgname = Map.findWithDefault anyVersion pkgname packageVersionConstraintMap packageVersionConstraintMap = Map.fromList [ (name, range) | PackageConstraintVersion name range <- constraints ] packagePreferences :: PackageName -> PackagePreferences packagePreferences = interpretPackagesPreference (Set.fromList targets) defpref prefs collectEithers :: [Either a b] -> Either [a] [b] collectEithers = collect . partitionEithers where collect ([], xs) = Right xs collect (errs,_) = Left errs partitionEithers :: [Either a b] -> ([a],[b]) partitionEithers = foldr (either left right) ([],[]) where left a (l, r) = (a:l, r) right a (l, r) = (l, a:r) -- | Errors for 'resolveWithoutDependencies'. -- data ResolveNoDepsError = -- | A package name which cannot be resolved to a specific package. -- Also gives the constraint on the version and whether there was -- a constraint on the package being installed. ResolveUnsatisfiable PackageName VersionRange instance Show ResolveNoDepsError where show (ResolveUnsatisfiable name ver) = "There is no available version of " ++ display name ++ " that satisfies " ++ display (simplifyVersionRange ver) cabal-install-1.22.6.0/Distribution/Client/Install.hs0000644000000000000000000017771412540225656020523 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Install -- Copyright : (c) 2005 David Himmelstrup -- 2007 Bjorn Bringert -- 2007-2010 Duncan Coutts -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- High level interface to package installation. ----------------------------------------------------------------------------- module Distribution.Client.Install ( -- * High-level interface install, -- * Lower-level interface that allows to manipulate the install plan makeInstallContext, makeInstallPlan, processInstallPlan, InstallArgs, InstallContext, -- * Prune certain packages from the install plan pruneInstallPlan ) where import Data.List ( isPrefixOf, unfoldr, nub, sort, (\\) ) import qualified Data.Map as Map import qualified Data.Set as S import Data.Maybe ( isJust, fromMaybe, mapMaybe, maybeToList ) import Control.Exception as Exception ( Exception(toException), bracket, catches , Handler(Handler), handleJust, IOException, SomeException ) #ifndef mingw32_HOST_OS import Control.Exception as Exception ( Exception(fromException) ) #endif import System.Exit ( ExitCode(..) ) import Distribution.Compat.Exception ( catchIO, catchExit ) #if !MIN_VERSION_base(4,8,0) import Control.Applicative ( (<$>) ) #endif import Control.Monad ( forM_, when, unless ) import System.Directory ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, createDirectoryIfMissing, removeFile, renameDirectory ) import System.FilePath ( (), (<.>), equalFilePath, takeDirectory ) import System.IO ( openFile, IOMode(AppendMode), hClose ) import System.IO.Error ( isDoesNotExistError, ioeGetFileName ) import Distribution.Client.Targets import Distribution.Client.Configure ( chooseCabalVersion ) import Distribution.Client.Dependency import Distribution.Client.Dependency.Types ( Solver(..) ) import Distribution.Client.FetchUtils import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan (InstallPlan) import Distribution.Client.Setup ( GlobalFlags(..) , ConfigFlags(..), configureCommand, filterConfigureFlags , ConfigExFlags(..), InstallFlags(..) ) import Distribution.Client.Config ( defaultCabalDir, defaultUserInstall ) import Distribution.Client.Sandbox.Timestamp ( withUpdateTimestamps ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox , whenUsingSandbox ) import Distribution.Client.Tar (extractTarGzFile) import Distribution.Client.Types as Source import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) import qualified Distribution.Client.BuildReports.Anonymous as BuildReports import qualified Distribution.Client.BuildReports.Storage as BuildReports ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) import qualified Distribution.Client.InstallSymlink as InstallSymlink ( symlinkBinaries ) import qualified Distribution.Client.PackageIndex as SourcePackageIndex import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade import qualified Distribution.Client.World as World import qualified Distribution.InstalledPackageInfo as Installed import Distribution.Client.Compat.ExecutablePath import Distribution.Client.JobControl import Distribution.Utils.NubList import Distribution.Simple.Compiler ( CompilerId(..), Compiler(compilerId), compilerFlavor , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) import Distribution.Simple.Program (ProgramConfiguration, defaultProgramConfiguration) import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Setup ( haddockCommand, HaddockFlags(..) , buildCommand, BuildFlags(..), emptyBuildFlags , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) import qualified Distribution.Simple.Setup as Cabal ( Flag(..) , copyCommand, CopyFlags(..), emptyCopyFlags , registerCommand, RegisterFlags(..), emptyRegisterFlags , testCommand, TestFlags(..), emptyTestFlags ) import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, rawSystemExit, comparing , writeFileAtomic, withTempFile , withUTF8FileContents ) import Distribution.Simple.InstallDirs as InstallDirs ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate , initialPathTemplateEnv, installDirsTemplateEnv ) import Distribution.Package ( PackageIdentifier(..), PackageId, packageName, packageVersion , Package(..), PackageFixedDeps(..), PackageKey , Dependency(..), thisPackageVersion, InstalledPackageId, installedPackageId ) import qualified Distribution.PackageDescription as PackageDescription import Distribution.PackageDescription ( PackageDescription, GenericPackageDescription(..), Flag(..) , FlagName(..), FlagAssignment ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription ) import Distribution.ParseUtils ( showPWarning ) import Distribution.Version ( Version, VersionRange, foldVersionRange ) import Distribution.Simple.Utils as Utils ( notice, info, warn, debug, debugNoWrap, die , intercalate, withTempDirectory ) import Distribution.Client.Utils ( determineNumJobs, inDir, mergeBy, MergeResult(..) , tryCanonicalizePath ) import Distribution.System ( Platform, OS(Windows), buildOS ) import Distribution.Text ( display ) import Distribution.Verbosity as Verbosity ( Verbosity, showForCabal, normal, verbose ) import Distribution.Simple.BuildPaths ( exeExtension ) --TODO: -- * assign flags to packages individually -- * complain about flags that do not apply to any package given as target -- so flags do not apply to dependencies, only listed, can use flag -- constraints for dependencies -- * only record applicable flags in world file -- * allow flag constraints -- * allow installed constraints -- * allow flag and installed preferences -- * change world file to use cabal section syntax -- * allow persistent configure flags for each package individually -- ------------------------------------------------------------ -- * Top level user actions -- ------------------------------------------------------------ -- | Installs the packages needed to satisfy a list of dependencies. -- install :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> Platform -> ProgramConfiguration -> UseSandbox -> Maybe SandboxPackageInfo -> GlobalFlags -> ConfigFlags -> ConfigExFlags -> InstallFlags -> HaddockFlags -> [UserTarget] -> IO () install verbosity packageDBs repos comp platform conf useSandbox mSandboxPkgInfo globalFlags configFlags configExFlags installFlags haddockFlags userTargets0 = do installContext <- makeInstallContext verbosity args (Just userTargets0) planResult <- foldProgress logMsg (return . Left) (return . Right) =<< makeInstallPlan verbosity args installContext case planResult of Left message -> do reportPlanningFailure verbosity args installContext message die' message Right installPlan -> processInstallPlan verbosity args installContext installPlan where args :: InstallArgs args = (packageDBs, repos, comp, platform, conf, useSandbox, mSandboxPkgInfo, globalFlags, configFlags, configExFlags, installFlags, haddockFlags) die' message = die (message ++ if isUseSandbox useSandbox then installFailedInSandbox else []) -- TODO: use a better error message, remove duplication. installFailedInSandbox = "\nNote: when using a sandbox, all packages are required to have " ++ "consistent dependencies. " ++ "Try reinstalling/unregistering the offending packages or " ++ "recreating the sandbox." logMsg message rest = debugNoWrap verbosity message >> rest -- TODO: Make InstallContext a proper data type with documented fields. -- | Common context for makeInstallPlan and processInstallPlan. type InstallContext = ( InstalledPackageIndex, SourcePackageDb , [UserTarget], [PackageSpecifier SourcePackage] ) -- TODO: Make InstallArgs a proper data type with documented fields or just get -- rid of it completely. -- | Initial arguments given to 'install' or 'makeInstallContext'. type InstallArgs = ( PackageDBStack , [Repo] , Compiler , Platform , ProgramConfiguration , UseSandbox , Maybe SandboxPackageInfo , GlobalFlags , ConfigFlags , ConfigExFlags , InstallFlags , HaddockFlags ) -- | Make an install context given install arguments. makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] -> IO InstallContext makeInstallContext verbosity (packageDBs, repos, comp, _, conf,_,_, globalFlags, _, _, _, _) mUserTargets = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos (userTargets, pkgSpecifiers) <- case mUserTargets of Nothing -> -- We want to distinguish between the case where the user has given an -- empty list of targets on the command-line and the case where we -- specifically want to have an empty list of targets. return ([], []) Just userTargets0 -> do -- For install, if no target is given it means we use the current -- directory as the single target. let userTargets | null userTargets0 = [UserTargetLocalDir "."] | otherwise = userTargets0 pkgSpecifiers <- resolveUserTargets verbosity (fromFlag $ globalWorldFile globalFlags) (packageIndex sourcePkgDb) userTargets return (userTargets, pkgSpecifiers) return (installedPkgIndex, sourcePkgDb, userTargets, pkgSpecifiers) -- | Make an install plan given install context and install arguments. makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext -> IO (Progress String String InstallPlan) makeInstallPlan verbosity (_, _, comp, platform, _, _, mSandboxPkgInfo, _, configFlags, configExFlags, installFlags, _) (installedPkgIndex, sourcePkgDb, _, pkgSpecifiers) = do solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." return $ planPackages comp platform mSandboxPkgInfo solver configFlags configExFlags installFlags installedPkgIndex sourcePkgDb pkgSpecifiers -- | Given an install plan, perform the actual installations. processInstallPlan :: Verbosity -> InstallArgs -> InstallContext -> InstallPlan -> IO () processInstallPlan verbosity args@(_,_, comp, _, _, _, _, _, _, _, installFlags, _) (installedPkgIndex, sourcePkgDb, userTargets, pkgSpecifiers) installPlan = do checkPrintPlan verbosity comp installedPkgIndex installPlan sourcePkgDb installFlags pkgSpecifiers unless (dryRun || nothingToInstall) $ do installPlan' <- performInstallations verbosity args installedPkgIndex installPlan postInstallActions verbosity args userTargets installPlan' where dryRun = fromFlag (installDryRun installFlags) nothingToInstall = null (InstallPlan.ready installPlan) -- ------------------------------------------------------------ -- * Installation planning -- ------------------------------------------------------------ planPackages :: Compiler -> Platform -> Maybe SandboxPackageInfo -> Solver -> ConfigFlags -> ConfigExFlags -> InstallFlags -> InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier SourcePackage] -> Progress String String InstallPlan planPackages comp platform mSandboxPkgInfo solver configFlags configExFlags installFlags installedPkgIndex sourcePkgDb pkgSpecifiers = resolveDependencies platform (compilerInfo comp) solver resolverParams >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return where resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setAvoidReinstalls avoidReinstalls . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . setPreferenceDefault (if upgradeDeps then PreferAllLatest else PreferLatestForSelected) . removeUpperBounds allowNewer . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | Dependency name ver <- configPreferences configExFlags ] . addConstraints -- version constraints from the config file or command line (map userToPackageConstraint (configExConstraints configExFlags)) . addConstraints --FIXME: this just applies all flags to all targets which -- is silly. We should check if the flags are appropriate [ PackageConstraintFlags (pkgSpecifierTarget pkgSpecifier) flags | let flags = configConfigurationsFlags configFlags , not (null flags) , pkgSpecifier <- pkgSpecifiers ] . addConstraints [ PackageConstraintStanzas (pkgSpecifierTarget pkgSpecifier) stanzas | pkgSpecifier <- pkgSpecifiers ] . maybe id applySandboxInstallPolicy mSandboxPkgInfo . (if reinstall then reinstallTargets else id) $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers stanzas = concat [ if testsEnabled then [TestStanzas] else [] , if benchmarksEnabled then [BenchStanzas] else [] ] testsEnabled = fromFlagOrDefault False $ configTests configFlags benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags reinstall = fromFlag (installReinstall installFlags) reorderGoals = fromFlag (installReorderGoals installFlags) independentGoals = fromFlag (installIndependentGoals installFlags) avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) shadowPkgs = fromFlag (installShadowPkgs installFlags) strongFlags = fromFlag (installStrongFlags installFlags) maxBackjumps = fromFlag (installMaxBackjumps installFlags) upgradeDeps = fromFlag (installUpgradeDeps installFlags) onlyDeps = fromFlag (installOnlyDeps installFlags) allowNewer = fromFlag (configAllowNewer configExFlags) -- | Remove the provided targets from the install plan. pruneInstallPlan :: Package pkg => [PackageSpecifier pkg] -> InstallPlan -> Progress String String InstallPlan pruneInstallPlan pkgSpecifiers = -- TODO: this is a general feature and should be moved to D.C.Dependency -- Also, the InstallPlan.remove should return info more precise to the -- problem, rather than the very general PlanProblem type. either (Fail . explain) Done . InstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) where explain :: [InstallPlan.PlanProblem] -> String explain problems = "Cannot select only the dependencies (as requested by the " ++ "'--only-dependencies' flag), " ++ (case pkgids of [pkgid] -> "the package " ++ display pkgid ++ " is " _ -> "the packages " ++ intercalate ", " (map display pkgids) ++ " are ") ++ "required by a dependency of one of the other targets." where pkgids = nub [ depid | InstallPlan.PackageMissingDeps _ depids <- problems , depid <- depids , packageName depid `elem` targetnames ] targetnames = map pkgSpecifierTarget pkgSpecifiers -- ------------------------------------------------------------ -- * Informational messages -- ------------------------------------------------------------ -- | Perform post-solver checks of the install plan and print it if -- either requested or needed. checkPrintPlan :: Verbosity -> Compiler -> InstalledPackageIndex -> InstallPlan -> SourcePackageDb -> InstallFlags -> [PackageSpecifier SourcePackage] -> IO () checkPrintPlan verbosity comp installed installPlan sourcePkgDb installFlags pkgSpecifiers = do -- User targets that are already installed. let preExistingTargets = [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, InstallPlan.PreExisting p <- InstallPlan.toList installPlan, packageName p `elem` tgts ] -- If there's nothing to install, we print the already existing -- target packages as an explanation. when nothingToInstall $ notice verbosity $ unlines $ "All the requested packages are already installed:" : map (display . packageId) preExistingTargets ++ ["Use --reinstall if you want to reinstall anyway."] let lPlan = linearizeInstallPlan comp installed installPlan -- Are any packages classified as reinstalls? let reinstalledPkgs = concatMap (extractReinstalls . snd) lPlan -- Packages that are already broken. let oldBrokenPkgs = map Installed.installedPackageId . PackageIndex.reverseDependencyClosure installed . map (Installed.installedPackageId . fst) . PackageIndex.brokenPackages $ installed let excluded = reinstalledPkgs ++ oldBrokenPkgs -- Packages that are reverse dependencies of replaced packages are very -- likely to be broken. We exclude packages that are already broken. let newBrokenPkgs = filter (\ p -> not (Installed.installedPackageId p `elem` excluded)) (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) let containsReinstalls = not (null reinstalledPkgs) let breaksPkgs = not (null newBrokenPkgs) let adaptedVerbosity | containsReinstalls && not overrideReinstall = verbosity `max` verbose | otherwise = verbosity -- We print the install plan if we are in a dry-run or if we are confronted -- with a dangerous install plan. when (dryRun || containsReinstalls && not overrideReinstall) $ printPlan (dryRun || breaksPkgs && not overrideReinstall) adaptedVerbosity lPlan sourcePkgDb -- If the install plan is dangerous, we print various warning messages. In -- particular, if we can see that packages are likely to be broken, we even -- bail out (unless installation has been forced with --force-reinstalls). when containsReinstalls $ do if breaksPkgs then do (if dryRun || overrideReinstall then warn verbosity else die) $ unlines $ "The following packages are likely to be broken by the reinstalls:" : map (display . Installed.sourcePackageId) newBrokenPkgs ++ if overrideReinstall then if dryRun then [] else ["Continuing even though the plan contains dangerous reinstalls."] else ["Use --force-reinstalls if you want to install anyway."] else unless dryRun $ warn verbosity "Note that reinstalls are always dangerous. Continuing anyway..." where nothingToInstall = null (InstallPlan.ready installPlan) dryRun = fromFlag (installDryRun installFlags) overrideReinstall = fromFlag (installOverrideReinstall installFlags) linearizeInstallPlan :: Compiler -> InstalledPackageIndex -> InstallPlan -> [(ReadyPackage, PackageStatus)] linearizeInstallPlan comp installedPkgIndex plan = unfoldr next plan where next plan' = case InstallPlan.ready plan' of [] -> Nothing (pkg:_) -> Just ((pkg, status), plan'') where pkgid = installedPackageId pkg status = packageStatus comp installedPkgIndex pkg plan'' = InstallPlan.completed pkgid (BuildOk DocsNotTried TestsNotTried (Just $ Installed.emptyInstalledPackageInfo { Installed.sourcePackageId = packageId pkg , Installed.installedPackageId = pkgid })) (InstallPlan.processing [pkg] plan') --FIXME: This is a bit of a hack, -- pretending that each package is installed -- It's doubly a hack because the installed package ID -- didn't get updated... data PackageStatus = NewPackage | NewVersion [Version] | Reinstall [InstalledPackageId] [PackageChange] type PackageChange = MergeResult PackageIdentifier PackageIdentifier extractReinstalls :: PackageStatus -> [InstalledPackageId] extractReinstalls (Reinstall ipids _) = ipids extractReinstalls _ = [] packageStatus :: Compiler -> InstalledPackageIndex -> ReadyPackage -> PackageStatus packageStatus _comp installedPkgIndex cpkg = case PackageIndex.lookupPackageName installedPkgIndex (packageName cpkg) of [] -> NewPackage ps -> case filter ((== packageId cpkg) . Installed.sourcePackageId) (concatMap snd ps) of [] -> NewVersion (map fst ps) pkgs@(pkg:_) -> Reinstall (map Installed.installedPackageId pkgs) (changes pkg cpkg) where changes :: Installed.InstalledPackageInfo -> ReadyPackage -> [MergeResult PackageIdentifier PackageIdentifier] changes pkg pkg' = filter changed $ mergeBy (comparing packageName) -- get dependencies of installed package (convert to source pkg ids via -- index) (nub . sort . concatMap (maybeToList . fmap Installed.sourcePackageId . PackageIndex.lookupInstalledPackageId installedPkgIndex) . Installed.depends $ pkg) -- get dependencies of configured package (nub . sort . depends $ pkg') changed (InBoth pkgid pkgid') = pkgid /= pkgid' changed _ = True printPlan :: Bool -- is dry run -> Verbosity -> [(ReadyPackage, PackageStatus)] -> SourcePackageDb -> IO () printPlan dryRun verbosity plan sourcePkgDb = case plan of [] -> return () pkgs | verbosity >= Verbosity.verbose -> putStr $ unlines $ ("In order, the following " ++ wouldWill ++ " be installed:") : map showPkgAndReason pkgs | otherwise -> notice verbosity $ unlines $ ("In order, the following " ++ wouldWill ++ " be installed (use -v for more details):") : map showPkg pkgs where wouldWill | dryRun = "would" | otherwise = "will" showPkg (pkg, _) = display (packageId pkg) ++ showLatest (pkg) showPkgAndReason (pkg', pr) = display (packageId pkg') ++ showLatest pkg' ++ showFlagAssignment (nonDefaultFlags pkg') ++ showStanzas (stanzas pkg') ++ showDep pkg' ++ case pr of NewPackage -> " (new package)" NewVersion _ -> " (new version)" Reinstall _ cs -> " (reinstall)" ++ case cs of [] -> "" diff -> " (changes: " ++ intercalate ", " (map change diff) ++ ")" showLatest :: ReadyPackage -> String showLatest pkg = case mLatestVersion of Just latestVersion -> if packageVersion pkg < latestVersion then (" (latest: " ++ display latestVersion ++ ")") else "" Nothing -> "" where mLatestVersion :: Maybe Version mLatestVersion = case SourcePackageIndex.lookupPackageName (packageIndex sourcePkgDb) (packageName pkg) of [] -> Nothing x -> Just $ packageVersion $ last x toFlagAssignment :: [Flag] -> FlagAssignment toFlagAssignment = map (\ f -> (flagName f, flagDefault f)) nonDefaultFlags :: ReadyPackage -> FlagAssignment nonDefaultFlags (ReadyPackage spkg fa _ _) = let defaultAssignment = toFlagAssignment (genPackageFlags (Source.packageDescription spkg)) in fa \\ defaultAssignment stanzas :: ReadyPackage -> [OptionalStanza] stanzas (ReadyPackage _ _ sts _) = sts showStanzas :: [OptionalStanza] -> String showStanzas = concatMap ((' ' :) . showStanza) showStanza TestStanzas = "*test" showStanza BenchStanzas = "*bench" -- FIXME: this should be a proper function in a proper place showFlagAssignment :: FlagAssignment -> String showFlagAssignment = concatMap ((' ' :) . showFlagValue) showFlagValue (f, True) = '+' : showFlagName f showFlagValue (f, False) = '-' : showFlagName f showFlagName (FlagName f) = f change (OnlyInLeft pkgid) = display pkgid ++ " removed" change (InBoth pkgid pkgid') = display pkgid ++ " -> " ++ display (packageVersion pkgid') change (OnlyInRight pkgid') = display pkgid' ++ " added" showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps = " (via: " ++ unwords (map display rdeps) ++ ")" | otherwise = "" revDepGraphEdges = [ (rpid,packageId pkg) | (pkg,_) <- plan, rpid <- depends pkg ] revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) -- ------------------------------------------------------------ -- * Post installation stuff -- ------------------------------------------------------------ -- | Report a solver failure. This works slightly differently to -- 'postInstallActions', as (by definition) we don't have an install plan. reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String -> IO () reportPlanningFailure verbosity (_, _, comp, platform, _, _, _ ,_, configFlags, _, installFlags, _) (_, sourcePkgDb, _, pkgSpecifiers) message = do when reportFailure $ do -- Only create reports for explicitly named packages let pkgids = filter (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ mapMaybe theSpecifiedPackage pkgSpecifiers buildReports = BuildReports.fromPlanningFailure platform (compilerId comp) pkgids (configConfigurationsFlags configFlags) when (not (null buildReports)) $ info verbosity $ "Solver failure will be reported for " ++ intercalate "," (map display pkgids) -- Save reports BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports platform -- Save solver log case logFile of Nothing -> return () Just template -> forM_ pkgids $ \pkgid -> let env = initialPathTemplateEnv pkgid dummyPackageKey (compilerInfo comp) platform path = fromPathTemplate $ substPathTemplate env template in writeFile path message where reportFailure = fromFlag (installReportPlanningFailure installFlags) logFile = flagToMaybe (installLogFile installFlags) -- A PackageKey is calculated from the transitive closure of -- dependencies, but when the solver fails we don't have that. -- So we fail. dummyPackageKey = error "reportPlanningFailure: package key not available" -- | If a 'PackageSpecifier' refers to a single package, return Just that package. theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId theSpecifiedPackage pkgSpec = case pkgSpec of NamedPackage name [PackageConstraintVersion name' version] | name == name' -> PackageIdentifier name <$> trivialRange version NamedPackage _ _ -> Nothing SpecificSourcePackage pkg -> Just $ packageId pkg where -- | If a range includes only a single version, return Just that version. trivialRange :: VersionRange -> Maybe Version trivialRange = foldVersionRange Nothing Just -- "== v" (\_ -> Nothing) (\_ -> Nothing) (\_ _ -> Nothing) (\_ _ -> Nothing) -- | Various stuff we do after successful or unsuccessfully installing a bunch -- of packages. This includes: -- -- * build reporting, local and remote -- * symlinking binaries -- * updating indexes -- * updating world file -- * error reporting -- postInstallActions :: Verbosity -> InstallArgs -> [UserTarget] -> InstallPlan -> IO () postInstallActions verbosity (packageDBs, _, comp, platform, conf, useSandbox, mSandboxPkgInfo ,globalFlags, configFlags, _, installFlags, _) targets installPlan = do unless oneShot $ World.insert verbosity worldFile --FIXME: does not handle flags [ World.WorldPkgInfo dep [] | UserTargetNamed dep <- targets ] let buildReports = BuildReports.fromInstallPlan installPlan BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports (InstallPlan.planPlatform installPlan) when (reportingLevel >= AnonymousReports) $ BuildReports.storeAnonymous buildReports when (reportingLevel == DetailedReports) $ storeDetailedBuildReports verbosity logsDir buildReports regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox configFlags installFlags installPlan symlinkBinaries verbosity comp configFlags installFlags installPlan printBuildFailures installPlan updateSandboxTimestampsFile useSandbox mSandboxPkgInfo comp platform installPlan where reportingLevel = fromFlag (installBuildReports installFlags) logsDir = fromFlag (globalLogsDir globalFlags) oneShot = fromFlag (installOneShot installFlags) worldFile = fromFlag $ globalWorldFile globalFlags storeDetailedBuildReports :: Verbosity -> FilePath -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () storeDetailedBuildReports verbosity logsDir reports = sequence_ [ do dotCabal <- defaultCabalDir let logFileName = display (BuildReports.package report) <.> "log" logFile = logsDir logFileName reportsDir = dotCabal "reports" remoteRepoName remoteRepo reportFile = reportsDir logFileName handleMissingLogFile $ do buildLog <- readFile logFile createDirectoryIfMissing True reportsDir -- FIXME writeFile reportFile (show (BuildReports.show report, buildLog)) | (report, Just Repo { repoKind = Left remoteRepo }) <- reports , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] where isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True isLikelyToHaveLogFile BuildReports.BuildFailed {} = True isLikelyToHaveLogFile BuildReports.InstallFailed {} = True isLikelyToHaveLogFile BuildReports.InstallOk {} = True isLikelyToHaveLogFile _ = False handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> warn verbosity $ "Missing log file for build report: " ++ fromMaybe "" (ioeGetFileName ioe) missingFile ioe | isDoesNotExistError ioe = Just ioe missingFile _ = Nothing regenerateHaddockIndex :: Verbosity -> [PackageDB] -> Compiler -> Platform -> ProgramConfiguration -> UseSandbox -> ConfigFlags -> InstallFlags -> InstallPlan -> IO () regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox configFlags installFlags installPlan | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do defaultDirs <- InstallDirs.defaultInstallDirs (compilerFlavor comp) (fromFlag (configUserInstall configFlags)) True let indexFileTemplate = fromFlag (installHaddockIndex installFlags) indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate notice verbosity $ "Updating documentation index " ++ indexFile --TODO: might be nice if the install plan gave us the new InstalledPackageInfo installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf Haddock.regenerateHaddockIndex verbosity installedPkgIndex conf indexFile | otherwise = return () where haddockIndexFileIsRequested = fromFlag (installDocumentation installFlags) && isJust (flagToMaybe (installHaddockIndex installFlags)) -- We want to regenerate the index if some new documentation was actually -- installed. Since the index can be only per-user or per-sandbox (see -- #1337), we don't do it for global installs or special cases where we're -- installing into a specific db. shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall) && someDocsWereInstalled installPlan where someDocsWereInstalled = any installedDocs . InstallPlan.toList normalUserInstall = (UserPackageDB `elem` packageDBs) && all (not . isSpecificPackageDB) packageDBs installedDocs (InstallPlan.Installed _ (BuildOk DocsOk _ _)) = True installedDocs _ = False isSpecificPackageDB (SpecificPackageDB _) = True isSpecificPackageDB _ = False substHaddockIndexFileName defaultDirs = fromPathTemplate . substPathTemplate env where env = env0 ++ installDirsTemplateEnv absoluteDirs env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) ++ InstallDirs.platformTemplateEnv platform ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform absoluteDirs = InstallDirs.substituteInstallDirTemplates env0 templateDirs templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) symlinkBinaries :: Verbosity -> Compiler -> ConfigFlags -> InstallFlags -> InstallPlan -> IO () symlinkBinaries verbosity comp configFlags installFlags plan = do failed <- InstallSymlink.symlinkBinaries comp configFlags installFlags plan case failed of [] -> return () [(_, exe, path)] -> warn verbosity $ "could not create a symlink in " ++ bindir ++ " for " ++ exe ++ " because the file exists there already but is not " ++ "managed by cabal. You can create a symlink for this executable " ++ "manually if you wish. The executable file has been installed at " ++ path exes -> warn verbosity $ "could not create symlinks in " ++ bindir ++ " for " ++ intercalate ", " [ exe | (_, exe, _) <- exes ] ++ " because the files exist there already and are not " ++ "managed by cabal. You can create symlinks for these executables " ++ "manually if you wish. The executable files have been installed at " ++ intercalate ", " [ path | (_, _, path) <- exes ] where bindir = fromFlag (installSymlinkBinDir installFlags) printBuildFailures :: InstallPlan -> IO () printBuildFailures plan = case [ (pkg, reason) | InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of [] -> return () failed -> die . unlines $ "Error: some packages failed to install:" : [ display (packageId pkg) ++ printFailureReason reason | (pkg, reason) <- failed ] where printFailureReason reason = case reason of DependentFailed pkgid -> " depends on " ++ display pkgid ++ " which failed to install." DownloadFailed e -> " failed while downloading the package." ++ showException e UnpackFailed e -> " failed while unpacking the package." ++ showException e ConfigureFailed e -> " failed during the configure step." ++ showException e BuildFailed e -> " failed during the building phase." ++ showException e TestsFailed e -> " failed during the tests phase." ++ showException e InstallFailed e -> " failed during the final install step." ++ showException e -- This will never happen, but we include it for completeness PlanningFailed -> " failed during the planning phase." showException e = " The exception was:\n " ++ show e ++ maybeOOM e #ifdef mingw32_HOST_OS maybeOOM _ = "" #else maybeOOM e = maybe "" onExitFailure (fromException e) onExitFailure (ExitFailure n) | n == 9 || n == -9 = "\nThis may be due to an out-of-memory condition." onExitFailure _ = "" #endif -- | If we're working inside a sandbox and some add-source deps were installed, -- update the timestamps of those deps. updateSandboxTimestampsFile :: UseSandbox -> Maybe SandboxPackageInfo -> Compiler -> Platform -> InstallPlan -> IO () updateSandboxTimestampsFile (UseSandbox sandboxDir) (Just (SandboxPackageInfo _ _ _ allAddSourceDeps)) comp platform installPlan = withUpdateTimestamps sandboxDir (compilerId comp) platform $ \_ -> do let allInstalled = [ pkg | InstallPlan.Installed pkg _ <- InstallPlan.toList installPlan ] allSrcPkgs = [ pkg | ReadyPackage pkg _ _ _ <- allInstalled ] allPaths = [ pth | LocalUnpackedPackage pth <- map packageSource allSrcPkgs] allPathsCanonical <- mapM tryCanonicalizePath allPaths return $! filter (`S.member` allAddSourceDeps) allPathsCanonical updateSandboxTimestampsFile _ _ _ _ _ = return () -- ------------------------------------------------------------ -- * Actually do the installations -- ------------------------------------------------------------ data InstallMisc = InstallMisc { rootCmd :: Maybe FilePath, libVersion :: Maybe Version } -- | If logging is enabled, contains location of the log file and the verbosity -- level for logging. type UseLogFile = Maybe (PackageIdentifier -> PackageKey -> FilePath, Verbosity) performInstallations :: Verbosity -> InstallArgs -> InstalledPackageIndex -> InstallPlan -> IO InstallPlan performInstallations verbosity (packageDBs, _, comp, _, conf, useSandbox, _, globalFlags, configFlags, configExFlags, installFlags, haddockFlags) installedPkgIndex installPlan = do -- With 'install -j' it can be a bit hard to tell whether a sandbox is used. whenUsingSandbox useSandbox $ \sandboxDir -> when parallelInstall $ notice verbosity $ "Notice: installing into a sandbox located at " ++ sandboxDir jobControl <- if parallelInstall then newParallelJobControl else newSerialJobControl buildLimit <- newJobLimit numJobs fetchLimit <- newJobLimit (min numJobs numFetchJobs) installLock <- newLock -- serialise installation cacheLock <- newLock -- serialise access to setup exe cache executeInstallPlan verbosity comp jobControl useLogFile installPlan $ \rpkg -> -- Calculate the package key (ToDo: Is this right for source install) let pkg_key = readyPackageKey comp rpkg in installReadyPackage platform cinfo configFlags rpkg $ \configFlags' src pkg pkgoverride -> fetchSourcePackage verbosity fetchLimit src $ \src' -> installLocalPackage verbosity buildLimit (packageId pkg) src' distPref $ \mpath -> installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key (setupScriptOptions installedPkgIndex cacheLock) miscOptions configFlags' installFlags haddockFlags cinfo platform pkg pkgoverride mpath useLogFile where platform = InstallPlan.planPlatform installPlan cinfo = InstallPlan.planCompiler installPlan numJobs = determineNumJobs (installNumJobs installFlags) numFetchJobs = 2 parallelInstall = numJobs >= 2 distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) (configDistPref configFlags) setupScriptOptions index lock = SetupScriptOptions { useCabalVersion = chooseCabalVersion configExFlags (libVersion miscOptions), useCompiler = Just comp, usePlatform = Just platform, -- Hack: we typically want to allow the UserPackageDB for finding the -- Cabal lib when compiling any Setup.hs even if we're doing a global -- install. However we also allow looking in a specific package db. usePackageDB = if UserPackageDB `elem` packageDBs then packageDBs else let (db@GlobalPackageDB:dbs) = packageDBs in db : UserPackageDB : dbs, --TODO: use Ord instance: -- insert UserPackageDB packageDBs usePackageIndex = if UserPackageDB `elem` packageDBs then Just index else Nothing, useProgramConfig = conf, useDistPref = distPref, useLoggingHandle = Nothing, useWorkingDir = Nothing, forceExternalSetupMethod = parallelInstall, useWin32CleanHack = False, setupCacheLock = Just lock } reportingLevel = fromFlag (installBuildReports installFlags) logsDir = fromFlag (globalLogsDir globalFlags) -- Should the build output be written to a log file instead of stdout? useLogFile :: UseLogFile useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) logFileTemplate where installLogFile' = flagToMaybe $ installLogFile installFlags defaultTemplate = toPathTemplate $ logsDir "$pkgid" <.> "log" -- If the user has specified --remote-build-reporting=detailed, use the -- default log file location. If the --build-log option is set, use the -- provided location. Otherwise don't use logging, unless building in -- parallel (in which case the default location is used). logFileTemplate :: Maybe PathTemplate logFileTemplate | useDefaultTemplate = Just defaultTemplate | otherwise = installLogFile' -- If the user has specified --remote-build-reporting=detailed or -- --build-log, use more verbose logging. loggingVerbosity :: Verbosity loggingVerbosity | overrideVerbosity = max Verbosity.verbose verbosity | otherwise = verbosity useDefaultTemplate :: Bool useDefaultTemplate | reportingLevel == DetailedReports = True | isJust installLogFile' = False | parallelInstall = True | otherwise = False overrideVerbosity :: Bool overrideVerbosity | reportingLevel == DetailedReports = True | isJust installLogFile' = True | parallelInstall = False | otherwise = False substLogFileName :: PathTemplate -> PackageIdentifier -> PackageKey -> FilePath substLogFileName template pkg pkg_key = fromPathTemplate . substPathTemplate env $ template where env = initialPathTemplateEnv (packageId pkg) pkg_key (compilerInfo comp) platform miscOptions = InstallMisc { rootCmd = if fromFlag (configUserInstall configFlags) || (isUseSandbox useSandbox) then Nothing -- ignore --root-cmd if --user -- or working inside a sandbox. else flagToMaybe (installRootCmd installFlags), libVersion = flagToMaybe (configCabalVersion configExFlags) } executeInstallPlan :: Verbosity -> Compiler -> JobControl IO (PackageId, PackageKey, BuildResult) -> UseLogFile -> InstallPlan -> (ReadyPackage -> IO BuildResult) -> IO InstallPlan executeInstallPlan verbosity comp jobCtl useLogFile plan0 installPkg = tryNewTasks 0 plan0 where tryNewTasks taskCount plan = do case InstallPlan.ready plan of [] | taskCount == 0 -> return plan | otherwise -> waitForTasks taskCount plan pkgs -> do sequence_ [ do info verbosity $ "Ready to install " ++ display pkgid spawnJob jobCtl $ do buildResult <- installPkg pkg return (packageId pkg, pkg_key, buildResult) | pkg <- pkgs , let pkgid = packageId pkg pkg_key = readyPackageKey comp pkg ] let taskCount' = taskCount + length pkgs plan' = InstallPlan.processing pkgs plan waitForTasks taskCount' plan' waitForTasks taskCount plan = do info verbosity $ "Waiting for install task to finish..." (pkgid, pkg_key, buildResult) <- collectJob jobCtl printBuildResult pkgid pkg_key buildResult let taskCount' = taskCount-1 plan' = updatePlan pkgid buildResult plan tryNewTasks taskCount' plan' updatePlan :: PackageIdentifier -> BuildResult -> InstallPlan -> InstallPlan updatePlan pkgid (Right buildSuccess) = InstallPlan.completed (Source.fakeInstalledPackageId pkgid) buildSuccess updatePlan pkgid (Left buildFailure) = InstallPlan.failed (Source.fakeInstalledPackageId pkgid) buildFailure depsFailure where depsFailure = DependentFailed pkgid -- So this first pkgid failed for whatever reason (buildFailure). -- All the other packages that depended on this pkgid, which we -- now cannot build, we mark as failing due to 'DependentFailed' -- which kind of means it was not their fault. -- Print build log if something went wrong, and 'Installed $PKGID' -- otherwise. printBuildResult :: PackageId -> PackageKey -> BuildResult -> IO () printBuildResult pkgid pkg_key buildResult = case buildResult of (Right _) -> notice verbosity $ "Installed " ++ display pkgid (Left _) -> do notice verbosity $ "Failed to install " ++ display pkgid when (verbosity >= normal) $ case useLogFile of Nothing -> return () Just (mkLogFileName, _) -> do let logName = mkLogFileName pkgid pkg_key putStr $ "Build log ( " ++ logName ++ " ):\n" printFile logName printFile :: FilePath -> IO () printFile path = readFile path >>= putStr -- | Call an installer for an 'SourcePackage' but override the configure -- flags with the ones given by the 'ReadyPackage'. In particular the -- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly -- versioned package dependencies. So we ignore any previous partial flag -- assignment or dependency constraints and use the new ones. -- -- NB: when updating this function, don't forget to also update -- 'configurePackage' in D.C.Configure. installReadyPackage :: Platform -> CompilerInfo -> ConfigFlags -> ReadyPackage -> (ConfigFlags -> PackageLocation (Maybe FilePath) -> PackageDescription -> PackageDescriptionOverride -> a) -> a installReadyPackage platform cinfo configFlags (ReadyPackage (SourcePackage _ gpkg source pkgoverride) flags stanzas deps) installPkg = installPkg configFlags { configConfigurationsFlags = flags, -- We generate the legacy constraints as well as the new style precise deps. -- In the end only one set gets passed to Setup.hs configure, depending on -- the Cabal version we are talking to. configConstraints = [ thisPackageVersion (packageId deppkg) | deppkg <- deps ], configDependencies = [ (packageName (Installed.sourcePackageId deppkg), Installed.installedPackageId deppkg) | deppkg <- deps ], -- Use '--exact-configuration' if supported. configExactConfiguration = toFlag True, configBenchmarks = toFlag False, configTests = toFlag (TestStanzas `elem` stanzas) } source pkg pkgoverride where pkg = case finalizePackageDescription flags (const True) platform cinfo [] (enableStanzas stanzas gpkg) of Left _ -> error "finalizePackageDescription ReadyPackage failed" Right (desc, _) -> desc fetchSourcePackage :: Verbosity -> JobLimit -> PackageLocation (Maybe FilePath) -> (PackageLocation FilePath -> IO BuildResult) -> IO BuildResult fetchSourcePackage verbosity fetchLimit src installPkg = do fetched <- checkFetched src case fetched of Just src' -> installPkg src' Nothing -> onFailure DownloadFailed $ do loc <- withJobLimit fetchLimit $ fetchPackage verbosity src installPkg loc installLocalPackage :: Verbosity -> JobLimit -> PackageIdentifier -> PackageLocation FilePath -> FilePath -> (Maybe FilePath -> IO BuildResult) -> IO BuildResult installLocalPackage verbosity jobLimit pkgid location distPref installPkg = case location of LocalUnpackedPackage dir -> installPkg (Just dir) LocalTarballPackage tarballPath -> installLocalTarballPackage verbosity jobLimit pkgid tarballPath distPref installPkg RemoteTarballPackage _ tarballPath -> installLocalTarballPackage verbosity jobLimit pkgid tarballPath distPref installPkg RepoTarballPackage _ _ tarballPath -> installLocalTarballPackage verbosity jobLimit pkgid tarballPath distPref installPkg installLocalTarballPackage :: Verbosity -> JobLimit -> PackageIdentifier -> FilePath -> FilePath -> (Maybe FilePath -> IO BuildResult) -> IO BuildResult installLocalTarballPackage verbosity jobLimit pkgid tarballPath distPref installPkg = do tmp <- getTemporaryDirectory withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> onFailure UnpackFailed $ do let relUnpackedPath = display pkgid absUnpackedPath = tmpDirPath relUnpackedPath descFilePath = absUnpackedPath display (packageName pkgid) <.> "cabal" withJobLimit jobLimit $ do info verbosity $ "Extracting " ++ tarballPath ++ " to " ++ tmpDirPath ++ "..." extractTarGzFile tmpDirPath relUnpackedPath tarballPath exists <- doesFileExist descFilePath when (not exists) $ die $ "Package .cabal file not found: " ++ show descFilePath maybeRenameDistDir absUnpackedPath installPkg (Just absUnpackedPath) where -- 'cabal sdist' puts pre-generated files in the 'dist' -- directory. This fails when a nonstandard build directory name -- is used (as is the case with sandboxes), so we need to rename -- the 'dist' dir here. -- -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still -- fails even with this workaround. We probably can live with that. maybeRenameDistDir :: FilePath -> IO () maybeRenameDistDir absUnpackedPath = do let distDirPath = absUnpackedPath defaultDistPref distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") distDirPathNew = absUnpackedPath distPref distDirExists <- doesDirectoryExist distDirPath when (distDirExists && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do -- NB: we need to handle the case when 'distDirPathNew' is a -- subdirectory of 'distDirPath' (e.g. the former is -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" ++ distDirPathTmp ++ "'." renameDirectory distDirPath distDirPathTmp when (distDirPath `isPrefixOf` distDirPathNew) $ createDirectoryIfMissingVerbose verbosity False distDirPath debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" ++ distDirPathNew ++ "'." renameDirectory distDirPathTmp distDirPathNew installUnpackedPackage :: Verbosity -> JobLimit -> Lock -> Int -> PackageKey -> SetupScriptOptions -> InstallMisc -> ConfigFlags -> InstallFlags -> HaddockFlags -> CompilerInfo -> Platform -> PackageDescription -> PackageDescriptionOverride -> Maybe FilePath -- ^ Directory to change to before starting the installation. -> UseLogFile -- ^ File to log output to (if any) -> IO BuildResult installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key scriptOptions miscOptions configFlags installFlags haddockFlags cinfo platform pkg pkgoverride workingDir useLogFile = do -- Override the .cabal file if necessary case pkgoverride of Nothing -> return () Just pkgtxt -> do let descFilePath = fromMaybe "." workingDir display (packageName pkgid) <.> "cabal" info verbosity $ "Updating " ++ display (packageName pkgid) <.> "cabal" ++ " with the latest revision from the index." writeFileAtomic descFilePath pkgtxt -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if -- the setup script was compiled against an old version of the Cabal lib). configFlags' <- addDefaultInstallDirs configFlags -- Filter out flags not supported by the old versions of the Cabal lib. let configureFlags :: Version -> ConfigFlags configureFlags = filterConfigureFlags configFlags' { configVerbosity = toFlag verbosity' } -- Path to the optional log file. mLogPath <- maybeLogPath -- Configure phase onFailure ConfigureFailed $ withJobLimit buildLimit $ do when (numJobs > 1) $ notice verbosity $ "Configuring " ++ display pkgid ++ "..." setup configureCommand configureFlags mLogPath -- Build phase onFailure BuildFailed $ do when (numJobs > 1) $ notice verbosity $ "Building " ++ display pkgid ++ "..." setup buildCommand' buildFlags mLogPath -- Doc generation phase docsResult <- if shouldHaddock then (do setup haddockCommand haddockFlags' mLogPath return DocsOk) `catchIO` (\_ -> return DocsFailed) `catchExit` (\_ -> return DocsFailed) else return DocsNotTried -- Tests phase onFailure TestsFailed $ do when (testsEnabled && PackageDescription.hasTests pkg) $ setup Cabal.testCommand testFlags mLogPath let testsResult | testsEnabled = TestsOk | otherwise = TestsNotTried -- Install phase onFailure InstallFailed $ criticalSection installLock $ do -- Capture installed package configuration file maybePkgConf <- maybeGenPkgConf mLogPath -- Actual installation withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg $ do case rootCmd miscOptions of (Just cmd) -> reexec cmd Nothing -> do setup Cabal.copyCommand copyFlags mLogPath when shouldRegister $ do setup Cabal.registerCommand registerFlags mLogPath return (Right (BuildOk docsResult testsResult maybePkgConf)) where pkgid = packageId pkg buildCommand' = buildCommand defaultProgramConfiguration buildFlags _ = emptyBuildFlags { buildDistPref = configDistPref configFlags, buildVerbosity = toFlag verbosity' } shouldHaddock = fromFlag (installDocumentation installFlags) haddockFlags' _ = haddockFlags { haddockVerbosity = toFlag verbosity', haddockDistPref = configDistPref configFlags } testsEnabled = fromFlag (configTests configFlags) && fromFlagOrDefault False (installRunTests installFlags) testFlags _ = Cabal.emptyTestFlags { Cabal.testDistPref = configDistPref configFlags } copyFlags _ = Cabal.emptyCopyFlags { Cabal.copyDistPref = configDistPref configFlags, Cabal.copyDest = toFlag InstallDirs.NoCopyDest, Cabal.copyVerbosity = toFlag verbosity' } shouldRegister = PackageDescription.hasLibs pkg registerFlags _ = Cabal.emptyRegisterFlags { Cabal.regDistPref = configDistPref configFlags, Cabal.regVerbosity = toFlag verbosity' } verbosity' = maybe verbosity snd useLogFile tempTemplate name = name ++ "-" ++ display pkgid addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags addDefaultInstallDirs configFlags' = do defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False return $ configFlags' { configInstallDirs = fmap Cabal.Flag . InstallDirs.substituteInstallDirTemplates env $ InstallDirs.combineInstallDirs fromFlagOrDefault defInstallDirs (configInstallDirs configFlags) } where CompilerId flavor _ = compilerInfoId cinfo env = initialPathTemplateEnv pkgid pkg_key cinfo platform userInstall = fromFlagOrDefault defaultUserInstall (configUserInstall configFlags') maybeGenPkgConf :: Maybe FilePath -> IO (Maybe Installed.InstalledPackageInfo) maybeGenPkgConf mLogPath = if shouldRegister then do tmp <- getTemporaryDirectory withTempFile tmp (tempTemplate "pkgConf") $ \pkgConfFile handle -> do hClose handle let registerFlags' version = (registerFlags version) { Cabal.regGenPkgConf = toFlag (Just pkgConfFile) } setup Cabal.registerCommand registerFlags' mLogPath withUTF8FileContents pkgConfFile $ \pkgConfText -> case Installed.parseInstalledPackageInfo pkgConfText of Installed.ParseFailed perror -> pkgConfParseFailed perror Installed.ParseOk warns pkgConf -> do unless (null warns) $ warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) return (Just pkgConf) else return Nothing pkgConfParseFailed :: Installed.PError -> IO a pkgConfParseFailed perror = die $ "Couldn't parse the output of 'setup register --gen-pkg-config':" ++ show perror maybeLogPath :: IO (Maybe FilePath) maybeLogPath = case useLogFile of Nothing -> return Nothing Just (mkLogFileName, _) -> do let logFileName = mkLogFileName (packageId pkg) pkg_key logDir = takeDirectory logFileName unless (null logDir) $ createDirectoryIfMissing True logDir logFileExists <- doesFileExist logFileName when logFileExists $ removeFile logFileName return (Just logFileName) setup cmd flags mLogPath = Exception.bracket (maybe (return Nothing) (\path -> Just `fmap` openFile path AppendMode) mLogPath) (maybe (return ()) hClose) (\logFileHandle -> setupWrapper verbosity scriptOptions { useLoggingHandle = logFileHandle , useWorkingDir = workingDir } (Just pkg) cmd flags []) reexec cmd = do -- look for our own executable file and re-exec ourselves using a helper -- program like sudo to elevate privileges: self <- getExecutablePath weExist <- doesFileExist self if weExist then inDir workingDir $ rawSystemExit verbosity cmd [self, "install", "--only" ,"--verbose=" ++ showForCabal verbosity] else die $ "Unable to find cabal executable at: " ++ self -- helper onFailure :: (SomeException -> BuildFailure) -> IO BuildResult -> IO BuildResult onFailure result action = action `catches` [ Handler $ \ioe -> handler (ioe :: IOException) , Handler $ \exit -> handler (exit :: ExitCode) ] where handler :: Exception e => e -> IO BuildResult handler = return . Left . result . toException -- ------------------------------------------------------------ -- * Weird windows hacks -- ------------------------------------------------------------ withWin32SelfUpgrade :: Verbosity -> PackageKey -> ConfigFlags -> CompilerInfo -> Platform -> PackageDescription -> IO a -> IO a withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg action = do defaultDirs <- InstallDirs.defaultInstallDirs compFlavor (fromFlag (configUserInstall configFlags)) (PackageDescription.hasLibs pkg) Win32SelfUpgrade.possibleSelfUpgrade verbosity (exeInstallPaths defaultDirs) action where pkgid = packageId pkg (CompilerId compFlavor _) = compilerInfoId cinfo exeInstallPaths defaultDirs = [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension | exe <- PackageDescription.executables pkg , PackageDescription.buildable (PackageDescription.buildInfo exe) , let exeName = prefix ++ PackageDescription.exeName exe ++ suffix prefix = substTemplate prefixTemplate suffix = substTemplate suffixTemplate ] where fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault defaultDirs (configInstallDirs configFlags) absoluteDirs = InstallDirs.absoluteInstallDirs pkgid pkg_key cinfo InstallDirs.NoCopyDest platform templateDirs substTemplate = InstallDirs.fromPathTemplate . InstallDirs.substPathTemplate env where env = InstallDirs.initialPathTemplateEnv pkgid pkg_key cinfo platform cabal-install-1.22.6.0/Distribution/Client/Types.hs0000644000000000000000000002302012540225656020175 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Types -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Various common data types for the entire cabal-install system ----------------------------------------------------------------------------- module Distribution.Client.Types where import Distribution.Package ( PackageName, PackageId, Package(..), PackageFixedDeps(..) , mkPackageKey, PackageKey, InstalledPackageId(..) , PackageInstalled(..) ) import Distribution.InstalledPackageInfo ( InstalledPackageInfo, packageKey ) import Distribution.PackageDescription ( Benchmark(..), GenericPackageDescription(..), FlagAssignment , TestSuite(..) ) import Distribution.PackageDescription.Configuration ( mapTreeData ) import Distribution.Client.PackageIndex ( PackageIndex ) import Distribution.Version ( VersionRange ) import Distribution.Simple.Compiler ( Compiler, packageKeySupported ) import Distribution.Text (display) import Data.Map (Map) import Network.URI (URI) import Data.ByteString.Lazy (ByteString) import Control.Exception ( SomeException ) newtype Username = Username { unUsername :: String } newtype Password = Password { unPassword :: String } -- | This is the information we get from a @00-index.tar.gz@ hackage index. -- data SourcePackageDb = SourcePackageDb { packageIndex :: PackageIndex SourcePackage, packagePreferences :: Map PackageName VersionRange } -- ------------------------------------------------------------ -- * Various kinds of information about packages -- ------------------------------------------------------------ -- | TODO: This is a hack to help us transition from Cabal-1.6 to 1.8. -- What is new in 1.8 is that installed packages and dependencies between -- installed packages are now identified by an opaque InstalledPackageId -- rather than a source PackageId. -- -- We should use simply an 'InstalledPackageInfo' here but to ease the -- transition we are temporarily using this variant where we pretend that -- installed packages still specify their deps in terms of PackageIds. -- -- Crucially this means that 'InstalledPackage' can be an instance of -- 'PackageFixedDeps' where as 'InstalledPackageInfo' is no longer an instance -- of that class. This means we can make 'PackageIndex'es of InstalledPackage -- where as the InstalledPackageInfo now has its own monomorphic index type. -- data InstalledPackage = InstalledPackage InstalledPackageInfo [PackageId] instance Package InstalledPackage where packageId (InstalledPackage pkg _) = packageId pkg instance PackageFixedDeps InstalledPackage where depends (InstalledPackage _ deps) = deps instance PackageInstalled InstalledPackage where installedPackageId (InstalledPackage pkg _) = installedPackageId pkg installedDepends (InstalledPackage pkg _) = installedDepends pkg -- | In order to reuse the implementation of PackageIndex which relies on -- 'InstalledPackageId', we need to be able to synthesize these IDs prior -- to installation. Eventually, we'll move to a representation of -- 'InstalledPackageId' which can be properly computed before compilation -- (of course, it's a bit of a misnomer since the packages are not actually -- installed yet.) In any case, we'll synthesize temporary installed package -- IDs to use as keys during install planning. These should never be written -- out! Additionally, they need to be guaranteed unique within the install -- plan. fakeInstalledPackageId :: PackageId -> InstalledPackageId fakeInstalledPackageId = InstalledPackageId . (".fake."++) . display -- | A 'ConfiguredPackage' is a not-yet-installed package along with the -- total configuration information. The configuration information is total in -- the sense that it provides all the configuration information and so the -- final configure process will be independent of the environment. -- data ConfiguredPackage = ConfiguredPackage SourcePackage -- package info, including repo FlagAssignment -- complete flag assignment for the package [OptionalStanza] -- list of enabled optional stanzas for the package [PackageId] -- set of exact dependencies. These must be -- consistent with the 'buildDepends' in the -- 'PackageDescription' that you'd get by applying -- the flag assignment and optional stanzas. deriving Show instance Package ConfiguredPackage where packageId (ConfiguredPackage pkg _ _ _) = packageId pkg instance PackageFixedDeps ConfiguredPackage where depends (ConfiguredPackage _ _ _ deps) = deps instance PackageInstalled ConfiguredPackage where installedPackageId = fakeInstalledPackageId . packageId installedDepends = map fakeInstalledPackageId . depends -- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be -- installed already, hence itself ready to be installed. data ReadyPackage = ReadyPackage SourcePackage -- see 'ConfiguredPackage'. FlagAssignment -- [OptionalStanza] -- [InstalledPackageInfo] -- Installed dependencies. deriving Show instance Package ReadyPackage where packageId (ReadyPackage pkg _ _ _) = packageId pkg instance PackageFixedDeps ReadyPackage where depends (ReadyPackage _ _ _ deps) = map packageId deps instance PackageInstalled ReadyPackage where installedPackageId = fakeInstalledPackageId . packageId installedDepends (ReadyPackage _ _ _ ipis) = map installedPackageId ipis -- | Extracts a package key from ReadyPackage, a common operation needed -- to calculate build paths. readyPackageKey :: Compiler -> ReadyPackage -> PackageKey readyPackageKey comp (ReadyPackage pkg _ _ deps) = mkPackageKey (packageKeySupported comp) (packageId pkg) (map packageKey deps) [] -- | Sometimes we need to convert a 'ReadyPackage' back to a -- 'ConfiguredPackage'. For example, a failed 'PlanPackage' can be *either* -- Ready or Configured. readyPackageToConfiguredPackage :: ReadyPackage -> ConfiguredPackage readyPackageToConfiguredPackage (ReadyPackage srcpkg flags stanzas deps) = ConfiguredPackage srcpkg flags stanzas (map packageId deps) -- | A package description along with the location of the package sources. -- data SourcePackage = SourcePackage { packageInfoId :: PackageId, packageDescription :: GenericPackageDescription, packageSource :: PackageLocation (Maybe FilePath), packageDescrOverride :: PackageDescriptionOverride } deriving Show -- | We sometimes need to override the .cabal file in the tarball with -- the newer one from the package index. type PackageDescriptionOverride = Maybe ByteString instance Package SourcePackage where packageId = packageInfoId data OptionalStanza = TestStanzas | BenchStanzas deriving (Eq, Ord, Show) enableStanzas :: [OptionalStanza] -> GenericPackageDescription -> GenericPackageDescription enableStanzas stanzas gpkg = gpkg { condBenchmarks = flagBenchmarks $ condBenchmarks gpkg , condTestSuites = flagTests $ condTestSuites gpkg } where enableTest t = t { testEnabled = TestStanzas `elem` stanzas } enableBenchmark bm = bm { benchmarkEnabled = BenchStanzas `elem` stanzas } flagBenchmarks = map (\(n, bm) -> (n, mapTreeData enableBenchmark bm)) flagTests = map (\(n, t) -> (n, mapTreeData enableTest t)) -- ------------------------------------------------------------ -- * Package locations and repositories -- ------------------------------------------------------------ data PackageLocation local = -- | An unpacked package in the given dir, or current dir LocalUnpackedPackage FilePath -- | A package as a tarball that's available as a local tarball | LocalTarballPackage FilePath -- | A package as a tarball from a remote URI | RemoteTarballPackage URI local -- | A package available as a tarball from a repository. -- -- It may be from a local repository or from a remote repository, with a -- locally cached copy. ie a package available from hackage | RepoTarballPackage Repo PackageId local --TODO: -- * add support for darcs and other SCM style remote repos with a local cache -- | ScmPackage deriving (Show, Functor) data LocalRepo = LocalRepo deriving (Show,Eq) data RemoteRepo = RemoteRepo { remoteRepoName :: String, remoteRepoURI :: URI } deriving (Show,Eq,Ord) data Repo = Repo { repoKind :: Either RemoteRepo LocalRepo, repoLocalDir :: FilePath } deriving (Show,Eq) -- ------------------------------------------------------------ -- * Build results -- ------------------------------------------------------------ type BuildResult = Either BuildFailure BuildSuccess data BuildFailure = PlanningFailed | DependentFailed PackageId | DownloadFailed SomeException | UnpackFailed SomeException | ConfigureFailed SomeException | BuildFailed SomeException | TestsFailed SomeException | InstallFailed SomeException data BuildSuccess = BuildOk DocsResult TestsResult (Maybe InstalledPackageInfo) data DocsResult = DocsNotTried | DocsFailed | DocsOk data TestsResult = TestsNotTried | TestsOk cabal-install-1.22.6.0/Distribution/Client/SetupWrapper.hs0000644000000000000000000006311412540225656021542 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.SetupWrapper -- Copyright : (c) The University of Glasgow 2006, -- Duncan Coutts 2008 -- -- Maintainer : cabal-devel@haskell.org -- Stability : alpha -- Portability : portable -- -- An interface to building and installing Cabal packages. -- If the @Built-Type@ field is specified as something other than -- 'Custom', and the current version of Cabal is acceptable, this performs -- setup actions directly. Otherwise it builds the setup script and -- runs it with the given arguments. module Distribution.Client.SetupWrapper ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions, ) where import qualified Distribution.Make as Make import qualified Distribution.Simple as Simple import Distribution.Version ( Version(..), VersionRange, anyVersion , intersectVersionRanges, orLaterVersion , withinRange ) import Distribution.InstalledPackageInfo (installedPackageId) import Distribution.Package ( InstalledPackageId(..), PackageIdentifier(..), PackageName(..), Package(..), packageName , packageVersion, Dependency(..) ) import Distribution.PackageDescription ( GenericPackageDescription(packageDescription) , PackageDescription(..), specVersion , BuildType(..), knownBuildTypes, defaultRenaming ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.Simple.Configure ( configCompilerEx ) import Distribution.Compiler ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) import Distribution.Simple.Compiler ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) import Distribution.Simple.PreProcess ( runSimplePreProcessor, ppUnlit ) import Distribution.Simple.Program ( ProgramConfiguration, emptyProgramConfiguration , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram , ghcjsProgram ) import Distribution.Simple.Program.Find ( programSearchPathAsPATHVar ) import Distribution.Simple.Program.Run ( getEffectiveEnvironment ) import qualified Distribution.Simple.Program.Strip as Strip import Distribution.Simple.BuildPaths ( defaultDistPref, exeExtension ) import Distribution.Simple.Command ( CommandUI(..), commandShowOptions ) import Distribution.Simple.Program.GHC ( GhcMode(..), GhcOptions(..), renderGhcOptions ) import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Client.Config ( defaultCabalDir ) import Distribution.Client.IndexUtils ( getInstalledPackages ) import Distribution.Client.JobControl ( Lock, criticalSection ) import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Simple.Utils ( die, debug, info, cabalVersion, tryFindPackageDesc, comparing , createDirectoryIfMissingVerbose, installExecutableFile , copyFileVerbose, rewriteFile, intercalate ) import Distribution.Client.Utils ( inDir, tryCanonicalizePath , existsAndIsMoreRecentThan, moreRecentFile #if mingw32_HOST_OS , canonicalizePathNoThrow #endif ) import Distribution.System ( Platform(..), buildPlatform ) import Distribution.Text ( display ) import Distribution.Utils.NubList ( toNubListR ) import Distribution.Verbosity ( Verbosity ) import Distribution.Compat.Exception ( catchIO ) import System.Directory ( doesFileExist ) import System.FilePath ( (), (<.>) ) import System.IO ( Handle, hPutStr ) import System.Exit ( ExitCode(..), exitWith ) import System.Process ( runProcess, waitForProcess ) #if !MIN_VERSION_base(4,8,0) import Control.Applicative ( (<$>), (<*>) ) #endif import Control.Monad ( when, unless ) import Data.List ( foldl1' ) import Data.Maybe ( fromMaybe, isJust ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty ) #endif import Data.Char ( isSpace ) #ifdef mingw32_HOST_OS import Distribution.Simple.Utils ( withTempDirectory ) import Control.Exception ( bracket ) import System.FilePath ( equalFilePath, takeDirectory ) import System.Directory ( doesDirectoryExist ) import qualified System.Win32 as Win32 #endif data SetupScriptOptions = SetupScriptOptions { useCabalVersion :: VersionRange, useCompiler :: Maybe Compiler, usePlatform :: Maybe Platform, usePackageDB :: PackageDBStack, usePackageIndex :: Maybe InstalledPackageIndex, useProgramConfig :: ProgramConfiguration, useDistPref :: FilePath, useLoggingHandle :: Maybe Handle, useWorkingDir :: Maybe FilePath, forceExternalSetupMethod :: Bool, -- Used only by 'cabal clean' on Windows. -- -- Note: win32 clean hack ------------------------- -- On Windows, running './dist/setup/setup clean' doesn't work because the -- setup script will try to delete itself (which causes it to fail horribly, -- unlike on Linux). So we have to move the setup exe out of the way first -- and then delete it manually. This applies only to the external setup -- method. useWin32CleanHack :: Bool, -- Used only when calling setupWrapper from parallel code to serialise -- access to the setup cache; should be Nothing otherwise. -- -- Note: setup exe cache ------------------------ -- When we are installing in parallel, we always use the external setup -- method. Since compiling the setup script each time adds noticeable -- overhead, we use a shared setup script cache -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal -- version) combination the cache holds a compiled setup script -- executable. This only affects the Simple build type; for the Custom, -- Configure and Make build types we always compile the setup script anew. setupCacheLock :: Maybe Lock } defaultSetupScriptOptions :: SetupScriptOptions defaultSetupScriptOptions = SetupScriptOptions { useCabalVersion = anyVersion, useCompiler = Nothing, usePlatform = Nothing, usePackageDB = [GlobalPackageDB, UserPackageDB], usePackageIndex = Nothing, useProgramConfig = emptyProgramConfiguration, useDistPref = defaultDistPref, useLoggingHandle = Nothing, useWorkingDir = Nothing, useWin32CleanHack = False, forceExternalSetupMethod = False, setupCacheLock = Nothing } setupWrapper :: Verbosity -> SetupScriptOptions -> Maybe PackageDescription -> CommandUI flags -> (Version -> flags) -> [String] -> IO () setupWrapper verbosity options mpkg cmd flags extraArgs = do pkg <- maybe getPkg return mpkg let setupMethod = determineSetupMethod options' buildType' options' = options { useCabalVersion = intersectVersionRanges (useCabalVersion options) (orLaterVersion (specVersion pkg)) } buildType' = fromMaybe Custom (buildType pkg) mkArgs cabalLibVersion = commandName cmd : commandShowOptions cmd (flags cabalLibVersion) ++ extraArgs checkBuildType buildType' setupMethod verbosity options' (packageId pkg) buildType' mkArgs where getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options)) >>= readPackageDescription verbosity >>= return . packageDescription checkBuildType (UnknownBuildType name) = die $ "The build-type '" ++ name ++ "' is not known. Use one of: " ++ intercalate ", " (map display knownBuildTypes) ++ "." checkBuildType _ = return () -- | Decide if we're going to be able to do a direct internal call to the -- entry point in the Cabal library or if we're going to have to compile -- and execute an external Setup.hs script. -- determineSetupMethod :: SetupScriptOptions -> BuildType -> SetupMethod determineSetupMethod options buildType' | forceExternalSetupMethod options = externalSetupMethod | isJust (useLoggingHandle options) || buildType' == Custom = externalSetupMethod | cabalVersion `withinRange` useCabalVersion options = internalSetupMethod | otherwise = externalSetupMethod type SetupMethod = Verbosity -> SetupScriptOptions -> PackageIdentifier -> BuildType -> (Version -> [String]) -> IO () -- ------------------------------------------------------------ -- * Internal SetupMethod -- ------------------------------------------------------------ internalSetupMethod :: SetupMethod internalSetupMethod verbosity options _ bt mkargs = do let args = mkargs cabalVersion debug verbosity $ "Using internal setup method with build-type " ++ show bt ++ " and args:\n " ++ show args inDir (useWorkingDir options) $ buildTypeAction bt args buildTypeAction :: BuildType -> ([String] -> IO ()) buildTypeAction Simple = Simple.defaultMainArgs buildTypeAction Configure = Simple.defaultMainWithHooksArgs Simple.autoconfUserHooks buildTypeAction Make = Make.defaultMainArgs buildTypeAction Custom = error "buildTypeAction Custom" buildTypeAction (UnknownBuildType _) = error "buildTypeAction UnknownBuildType" -- ------------------------------------------------------------ -- * External SetupMethod -- ------------------------------------------------------------ externalSetupMethod :: SetupMethod externalSetupMethod verbosity options pkg bt mkargs = do debug verbosity $ "Using external setup method with build-type " ++ show bt createDirectoryIfMissingVerbose verbosity True setupDir (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion path <- if useCachedSetupExecutable then getCachedSetupExecutable options' cabalLibVersion mCabalLibInstalledPkgId else compileSetupExecutable options' cabalLibVersion mCabalLibInstalledPkgId False invokeSetupScript options' path (mkargs cabalLibVersion) where workingDir = case fromMaybe "" (useWorkingDir options) of [] -> "." dir -> dir setupDir = workingDir useDistPref options "setup" setupVersionFile = setupDir "setup" <.> "version" setupHs = setupDir "setup" <.> "hs" setupProgFile = setupDir "setup" <.> exeExtension platform = fromMaybe buildPlatform (usePlatform options) useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) maybeGetInstalledPackages :: SetupScriptOptions -> Compiler -> ProgramConfiguration -> IO InstalledPackageIndex maybeGetInstalledPackages options' comp conf = case usePackageIndex options' of Just index -> return index Nothing -> getInstalledPackages verbosity comp (usePackageDB options') conf cabalLibVersionToUse :: IO (Version, (Maybe InstalledPackageId) ,SetupScriptOptions) cabalLibVersionToUse = do savedVer <- savedVersion case savedVer of Just version | version `withinRange` useCabalVersion options -> do updateSetupScript version bt -- Does the previously compiled setup executable still exist and -- is it up-to date? useExisting <- canUseExistingSetup version if useExisting then return (version, Nothing, options) else installedVersion _ -> installedVersion where -- This check duplicates the checks in 'getCachedSetupExecutable' / -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice -- because the selected Cabal version may change as a result of this -- check. canUseExistingSetup :: Version -> IO Bool canUseExistingSetup version = if useCachedSetupExecutable then do (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version doesFileExist cachedSetupProgFile else (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile installedVersion :: IO (Version, Maybe InstalledPackageId ,SetupScriptOptions) installedVersion = do (comp, conf, options') <- configureCompiler options (version, mipkgid, options'') <- installedCabalVersion options' comp conf updateSetupScript version bt writeFile setupVersionFile (show version ++ "\n") return (version, mipkgid, options'') savedVersion :: IO (Maybe Version) savedVersion = do versionString <- readFile setupVersionFile `catchIO` \_ -> return "" case reads versionString of [(version,s)] | all isSpace s -> return (Just version) _ -> return Nothing -- | Update a Setup.hs script, creating it if necessary. updateSetupScript :: Version -> BuildType -> IO () updateSetupScript _ Custom = do useHs <- doesFileExist customSetupHs useLhs <- doesFileExist customSetupLhs unless (useHs || useLhs) $ die "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." let src = (if useHs then customSetupHs else customSetupLhs) srcNewer <- src `moreRecentFile` setupHs when srcNewer $ if useHs then copyFileVerbose verbosity src setupHs else runSimplePreProcessor ppUnlit src setupHs verbosity where customSetupHs = workingDir "Setup.hs" customSetupLhs = workingDir "Setup.lhs" updateSetupScript cabalLibVersion _ = rewriteFile setupHs (buildTypeScript cabalLibVersion) buildTypeScript :: Version -> String buildTypeScript cabalLibVersion = case bt of Simple -> "import Distribution.Simple; main = defaultMain\n" Configure -> "import Distribution.Simple; main = defaultMainWithHooks " ++ if cabalLibVersion >= Version [1,3,10] [] then "autoconfUserHooks\n" else "defaultUserHooks\n" Make -> "import Distribution.Make; main = defaultMain\n" Custom -> error "buildTypeScript Custom" UnknownBuildType _ -> error "buildTypeScript UnknownBuildType" installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramConfiguration -> IO (Version, Maybe InstalledPackageId ,SetupScriptOptions) installedCabalVersion options' _ _ | packageName pkg == PackageName "Cabal" = return (packageVersion pkg, Nothing, options') installedCabalVersion options' compiler conf = do index <- maybeGetInstalledPackages options' compiler conf let cabalDep = Dependency (PackageName "Cabal") (useCabalVersion options') options'' = options' { usePackageIndex = Just index } case PackageIndex.lookupDependency index cabalDep of [] -> die $ "The package '" ++ display (packageName pkg) ++ "' requires Cabal library version " ++ display (useCabalVersion options) ++ " but no suitable version is installed." pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs in return (packageVersion ipkginfo ,Just . installedPackageId $ ipkginfo, options'') bestVersion :: (a -> Version) -> [a] -> a bestVersion f = firstMaximumBy (comparing (preference . f)) where -- Like maximumBy, but picks the first maximum element instead of the -- last. In general, we expect the preferred version to go first in the -- list. For the default case, this has the effect of choosing the version -- installed in the user package DB instead of the global one. See #1463. -- -- Note: firstMaximumBy could be written as just -- `maximumBy cmp . reverse`, but the problem is that the behaviour of -- maximumBy is not fully specified in the case when there is not a single -- greatest element. firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a firstMaximumBy _ [] = error "Distribution.Client.firstMaximumBy: empty list" firstMaximumBy cmp xs = foldl1' maxBy xs where maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } preference version = (sameVersion, sameMajorVersion ,stableVersion, latestVersion) where sameVersion = version == cabalVersion sameMajorVersion = majorVersion version == majorVersion cabalVersion majorVersion = take 2 . versionBranch stableVersion = case versionBranch version of (_:x:_) -> even x _ -> False latestVersion = version configureCompiler :: SetupScriptOptions -> IO (Compiler, ProgramConfiguration, SetupScriptOptions) configureCompiler options' = do (comp, conf) <- case useCompiler options' of Just comp -> return (comp, useProgramConfig options') Nothing -> do (comp, _, conf) <- configCompilerEx (Just GHC) Nothing Nothing (useProgramConfig options') verbosity return (comp, conf) -- Whenever we need to call configureCompiler, we also need to access the -- package index, so let's cache it in SetupScriptOptions. index <- maybeGetInstalledPackages options' comp conf return (comp, conf, options' { useCompiler = Just comp, usePackageIndex = Just index, useProgramConfig = conf }) -- | Path to the setup exe cache directory and path to the cached setup -- executable. cachedSetupDirAndProg :: SetupScriptOptions -> Version -> IO (FilePath, FilePath) cachedSetupDirAndProg options' cabalLibVersion = do cabalDir <- defaultCabalDir let setupCacheDir = cabalDir "setup-exe-cache" cachedSetupProgFile = setupCacheDir ("setup-" ++ buildTypeString ++ "-" ++ cabalVersionString ++ "-" ++ platformString ++ "-" ++ compilerVersionString) <.> exeExtension return (setupCacheDir, cachedSetupProgFile) where buildTypeString = show bt cabalVersionString = "Cabal-" ++ (display cabalLibVersion) compilerVersionString = display $ fromMaybe buildCompilerId (fmap compilerId . useCompiler $ options') platformString = display platform -- | Look up the setup executable in the cache; update the cache if the setup -- executable is not found. getCachedSetupExecutable :: SetupScriptOptions -> Version -> Maybe InstalledPackageId -> IO FilePath getCachedSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId = do (setupCacheDir, cachedSetupProgFile) <- cachedSetupDirAndProg options' cabalLibVersion cachedSetupExists <- doesFileExist cachedSetupProgFile if cachedSetupExists then debug verbosity $ "Found cached setup executable: " ++ cachedSetupProgFile else criticalSection' $ do -- The cache may have been populated while we were waiting. cachedSetupExists' <- doesFileExist cachedSetupProgFile if cachedSetupExists' then debug verbosity $ "Found cached setup executable: " ++ cachedSetupProgFile else do debug verbosity $ "Setup executable not found in the cache." src <- compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId True createDirectoryIfMissingVerbose verbosity True setupCacheDir installExecutableFile verbosity src cachedSetupProgFile -- Do not strip if we're using GHCJS, since the result may be a script when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ Strip.stripExe verbosity platform (useProgramConfig options') cachedSetupProgFile return cachedSetupProgFile where criticalSection' = fromMaybe id (fmap criticalSection $ setupCacheLock options') -- | If the Setup.hs is out of date wrt the executable then recompile it. -- Currently this is GHC/GHCJS only. It should really be generalised. -- compileSetupExecutable :: SetupScriptOptions -> Version -> Maybe InstalledPackageId -> Bool -> IO FilePath compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId forceCompile = do setupHsNewer <- setupHs `moreRecentFile` setupProgFile cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile let outOfDate = setupHsNewer || cabalVersionNewer when (outOfDate || forceCompile) $ do debug verbosity "Setup executable needs to be updated, compiling..." (compiler, conf, options'') <- configureCompiler options' let cabalPkgid = PackageIdentifier (PackageName "Cabal") cabalLibVersion (program, extraOpts) = case compilerFlavor compiler of GHCJS -> (ghcjsProgram, ["-build-runner"]) _ -> (ghcProgram, ["-threaded"]) ghcOptions = mempty { ghcOptVerbosity = Flag verbosity , ghcOptMode = Flag GhcModeMake , ghcOptInputFiles = toNubListR [setupHs] , ghcOptOutputFile = Flag setupProgFile , ghcOptObjDir = Flag setupDir , ghcOptHiDir = Flag setupDir , ghcOptSourcePathClear = Flag True , ghcOptSourcePath = toNubListR [workingDir] , ghcOptPackageDBs = usePackageDB options'' , ghcOptPackages = toNubListR $ maybe [] (\ipkgid -> [(ipkgid, cabalPkgid, defaultRenaming)]) maybeCabalLibInstalledPkgId , ghcOptExtra = toNubListR extraOpts } let ghcCmdLine = renderGhcOptions compiler ghcOptions case useLoggingHandle options of Nothing -> runDbProgram verbosity program conf ghcCmdLine -- If build logging is enabled, redirect compiler output to the log file. (Just logHandle) -> do output <- getDbProgramOutput verbosity program conf ghcCmdLine hPutStr logHandle output return setupProgFile invokeSetupScript :: SetupScriptOptions -> FilePath -> [String] -> IO () invokeSetupScript options' path args = do info verbosity $ unwords (path : args) case useLoggingHandle options' of Nothing -> return () Just logHandle -> info verbosity $ "Redirecting build log to " ++ show logHandle -- Since useWorkingDir can change the relative path, the path argument must -- be turned into an absolute path. On some systems, runProcess will take -- path as relative to the new working directory instead of the current -- working directory. path' <- tryCanonicalizePath path -- See 'Note: win32 clean hack' above. #if mingw32_HOST_OS -- setupProgFile may not exist if we're using a cached program setupProgFile' <- canonicalizePathNoThrow setupProgFile let win32CleanHackNeeded = (useWin32CleanHack options') -- Skip when a cached setup script is used. && setupProgFile' `equalFilePath` path' if win32CleanHackNeeded then doWin32CleanHack path' else doInvoke path' #else doInvoke path' #endif where doInvoke path' = do searchpath <- programSearchPathAsPATHVar (getProgramSearchPath (useProgramConfig options')) env <- getEffectiveEnvironment [("PATH", Just searchpath)] process <- runProcess path' args (useWorkingDir options') env Nothing (useLoggingHandle options') (useLoggingHandle options') exitCode <- waitForProcess process unless (exitCode == ExitSuccess) $ exitWith exitCode #if mingw32_HOST_OS doWin32CleanHack path' = do info verbosity $ "Using the Win32 clean hack." -- Recursively removes the temp dir on exit. withTempDirectory verbosity workingDir "cabal-tmp" $ \tmpDir -> bracket (moveOutOfTheWay tmpDir path') (maybeRestore path') doInvoke moveOutOfTheWay tmpDir path' = do let newPath = tmpDir "setup" <.> exeExtension Win32.moveFile path' newPath return newPath maybeRestore oldPath path' = do let oldPathDir = takeDirectory oldPath oldPathDirExists <- doesDirectoryExist oldPathDir -- 'setup clean' didn't complete, 'dist/setup' still exists. when oldPathDirExists $ Win32.moveFile path' oldPath #endif cabal-install-1.22.6.0/Distribution/Client/Freeze.hs0000644000000000000000000002056112540225656020320 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Freeze -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : cabal-devel@gmail.com -- Stability : provisional -- Portability : portable -- -- The cabal freeze command ----------------------------------------------------------------------------- module Distribution.Client.Freeze ( freeze, ) where import Distribution.Client.Config ( SavedConfig(..) ) import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.Dependency import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.InstallPlan ( PlanPackage ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.Setup ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) ) import Distribution.Client.Sandbox.PackageEnvironment ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, userPackageEnvironmentFile ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) ) import Distribution.Package ( Package, PackageIdentifier, packageId, packageName, packageVersion ) import Distribution.Simple.Compiler ( Compiler, compilerInfo, PackageDBStack ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.Setup ( fromFlag, fromFlagOrDefault ) import Distribution.Simple.Utils ( die, notice, debug, writeFileAtomic ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity ) import Control.Monad ( when ) import qualified Data.ByteString.Lazy.Char8 as BS.Char8 #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty ) #endif import Data.Version ( showVersion ) import Distribution.Version ( thisVersion ) -- ------------------------------------------------------------ -- * The freeze command -- ------------------------------------------------------------ -- | Freeze all of the dependencies by writing a constraints section -- constraining each dependency to an exact version. -- freeze :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> Platform -> ProgramConfiguration -> Maybe SandboxPackageInfo -> GlobalFlags -> FreezeFlags -> IO () freeze verbosity packageDBs repos comp platform conf mSandboxPkgInfo globalFlags freezeFlags = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos pkgSpecifiers <- resolveUserTargets verbosity (fromFlag $ globalWorldFile globalFlags) (packageIndex sourcePkgDb) [UserTargetLocalDir "."] sanityCheck pkgSpecifiers pkgs <- planPackages verbosity comp platform mSandboxPkgInfo freezeFlags installedPkgIndex sourcePkgDb pkgSpecifiers if null pkgs then notice verbosity $ "No packages to be frozen. " ++ "As this package has no dependencies." else if dryRun then notice verbosity $ unlines $ "The following packages would be frozen:" : formatPkgs pkgs else freezePackages verbosity pkgs where dryRun = fromFlag (freezeDryRun freezeFlags) sanityCheck pkgSpecifiers = do when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ die $ "internal error: 'resolveUserTargets' returned " ++ "unexpected named package specifiers!" when (length pkgSpecifiers /= 1) $ die $ "internal error: 'resolveUserTargets' returned " ++ "unexpected source package specifiers!" planPackages :: Verbosity -> Compiler -> Platform -> Maybe SandboxPackageInfo -> FreezeFlags -> InstalledPackageIndex -> SourcePackageDb -> [PackageSpecifier SourcePackage] -> IO [PlanPackage] planPackages verbosity comp platform mSandboxPkgInfo freezeFlags installedPkgIndex sourcePkgDb pkgSpecifiers = do solver <- chooseSolver verbosity (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." installPlan <- foldProgress logMsg die return $ resolveDependencies platform (compilerInfo comp) solver resolverParams return $ either id (error "planPackages: installPlan contains broken packages") (pruneInstallPlan installPlan pkgSpecifiers) where resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . addConstraints [ PackageConstraintStanzas (pkgSpecifierTarget pkgSpecifier) stanzas | pkgSpecifier <- pkgSpecifiers ] . maybe id applySandboxInstallPolicy mSandboxPkgInfo $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers logMsg message rest = debug verbosity message >> rest stanzas = concat [ if testsEnabled then [TestStanzas] else [] , if benchmarksEnabled then [BenchStanzas] else [] ] testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags reorderGoals = fromFlag (freezeReorderGoals freezeFlags) independentGoals = fromFlag (freezeIndependentGoals freezeFlags) shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) strongFlags = fromFlag (freezeStrongFlags freezeFlags) maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) -- | Remove all unneeded packages from an install plan. -- -- A package is unneeded if it is either -- -- 1) the package that we are freezing, or -- -- 2) not a dependency (directly or transitively) of the package we are -- freezing. This is useful for removing previously installed packages -- which are no longer required from the install plan. pruneInstallPlan :: InstallPlan.InstallPlan -> [PackageSpecifier SourcePackage] -> Either [PlanPackage] [(PlanPackage, [PackageIdentifier])] pruneInstallPlan installPlan pkgSpecifiers = mapLeft (removeSelf pkgIds . PackageIndex.allPackages) $ PackageIndex.dependencyClosure pkgIdx pkgIds where pkgIdx = PackageIndex.fromList $ InstallPlan.toList installPlan pkgIds = [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] mapLeft f (Left v) = Left $ f v mapLeft _ (Right v) = Right v removeSelf [thisPkg] = filter (\pp -> packageId pp /= thisPkg) removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " ++ "unexpected package specifiers!" freezePackages :: Package pkg => Verbosity -> [pkg] -> IO () freezePackages verbosity pkgs = do pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ loadUserConfig verbosity "" writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv where addFrozenConstraints config = config { savedConfigureExFlags = (savedConfigureExFlags config) { configExConstraints = constraints pkgs } } constraints = map $ pkgIdToConstraint . packageId where pkgIdToConstraint pkg = UserConstraintVersion (packageName pkg) (thisVersion $ packageVersion pkg) createPkgEnv config = mempty { pkgEnvSavedConfig = config } showPkgEnv = BS.Char8.pack . showPackageEnvironment formatPkgs :: Package pkg => [pkg] -> [String] formatPkgs = map $ showPkg . packageId where showPkg pid = name pid ++ " == " ++ version pid name = display . packageName version = showVersion . packageVersion cabal-install-1.22.6.0/Distribution/Client/Update.hs0000644000000000000000000000366412540225656020327 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Update -- Copyright : (c) David Himmelstrup 2005 -- License : BSD-like -- -- Maintainer : lemmih@gmail.com -- Stability : provisional -- Portability : portable -- -- ----------------------------------------------------------------------------- module Distribution.Client.Update ( update ) where import Distribution.Client.Types ( Repo(..), RemoteRepo(..), LocalRepo(..) ) import Distribution.Client.HttpUtils ( DownloadResult(..) ) import Distribution.Client.FetchUtils ( downloadIndex ) import Distribution.Client.IndexUtils ( updateRepoIndexCache ) import Distribution.Simple.Utils ( writeFileAtomic, warn, notice ) import Distribution.Verbosity ( Verbosity ) import qualified Data.ByteString.Lazy as BS import Distribution.Client.GZipUtils (maybeDecompress) import System.FilePath (dropExtension) -- | 'update' downloads the package list from all known servers update :: Verbosity -> [Repo] -> IO () update verbosity [] = warn verbosity $ "No remote package servers have been specified. Usually " ++ "you would have one specified in the config file." update verbosity repos = do mapM_ (updateRepo verbosity) repos updateRepo :: Verbosity -> Repo -> IO () updateRepo verbosity repo = case repoKind repo of Right LocalRepo -> return () Left remoteRepo -> do notice verbosity $ "Downloading the latest package list from " ++ remoteRepoName remoteRepo downloadResult <- downloadIndex verbosity remoteRepo (repoLocalDir repo) case downloadResult of FileAlreadyInCache -> return () FileDownloaded indexPath -> do writeFileAtomic (dropExtension indexPath) . maybeDecompress =<< BS.readFile indexPath updateRepoIndexCache verbosity repo cabal-install-1.22.6.0/Distribution/Client/InstallPlan.hs0000644000000000000000000006371312540225656021327 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.InstallPlan -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- Package installation plan -- ----------------------------------------------------------------------------- module Distribution.Client.InstallPlan ( InstallPlan, ConfiguredPackage(..), PlanPackage(..), -- * Operations on 'InstallPlan's new, toList, ready, processing, completed, failed, remove, showPlanIndex, showInstallPlan, -- ** Query functions planPlatform, planCompiler, -- * Checking validity of plans valid, closed, consistent, acyclic, configuredPackageValid, -- ** Details on invalid plans PlanProblem(..), showPlanProblem, PackageProblem(..), showPackageProblem, problems, configuredPackageProblems ) where import Distribution.Client.Types ( SourcePackage(packageDescription), ConfiguredPackage(..) , ReadyPackage(..), readyPackageToConfiguredPackage , InstalledPackage, BuildFailure, BuildSuccess(..), enableStanzas , InstalledPackage(..), fakeInstalledPackageId ) import Distribution.Package ( PackageIdentifier(..), PackageName(..), Package(..), packageName , PackageFixedDeps(..), Dependency(..), InstalledPackageId , PackageInstalled(..) ) import Distribution.Version ( Version, withinRange ) import Distribution.PackageDescription ( GenericPackageDescription(genPackageFlags) , Flag(flagName), FlagName(..) ) import Distribution.Client.PackageUtils ( externalBuildDepends ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription ) import Distribution.Simple.PackageIndex ( PackageIndex, FakeMap ) import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Text ( display ) import Distribution.System ( Platform ) import Distribution.Compiler ( CompilerInfo(..) ) import Distribution.Client.Utils ( duplicates, duplicatesBy, mergeBy, MergeResult(..) ) import Distribution.Simple.Utils ( comparing, intercalate ) import qualified Distribution.InstalledPackageInfo as Installed import Data.List ( sort, sortBy ) import Data.Maybe ( fromMaybe, maybeToList ) import qualified Data.Graph as Graph import Data.Graph (Graph) import Control.Exception ( assert ) import Data.Maybe (catMaybes) import qualified Data.Map as Map type PlanIndex = PackageIndex PlanPackage -- When cabal tries to install a number of packages, including all their -- dependencies it has a non-trivial problem to solve. -- -- The Problem: -- -- In general we start with a set of installed packages and a set of source -- packages. -- -- Installed packages have fixed dependencies. They have already been built and -- we know exactly what packages they were built against, including their exact -- versions. -- -- Source package have somewhat flexible dependencies. They are specified as -- version ranges, though really they're predicates. To make matters worse they -- have conditional flexible dependencies. Configuration flags can affect which -- packages are required and can place additional constraints on their -- versions. -- -- These two sets of package can and usually do overlap. There can be installed -- packages that are also available as source packages which means they could -- be re-installed if required, though there will also be packages which are -- not available as source and cannot be re-installed. Very often there will be -- extra versions available than are installed. Sometimes we may like to prefer -- installed packages over source ones or perhaps always prefer the latest -- available version whether installed or not. -- -- The goal is to calculate an installation plan that is closed, acyclic and -- consistent and where every configured package is valid. -- -- An installation plan is a set of packages that are going to be used -- together. It will consist of a mixture of installed packages and source -- packages along with their exact version dependencies. An installation plan -- is closed if for every package in the set, all of its dependencies are -- also in the set. It is consistent if for every package in the set, all -- dependencies which target that package have the same version. -- Note that plans do not necessarily compose. You might have a valid plan for -- package A and a valid plan for package B. That does not mean the composition -- is simultaneously valid for A and B. In particular you're most likely to -- have problems with inconsistent dependencies. -- On the other hand it is true that every closed sub plan is valid. data PlanPackage = PreExisting InstalledPackage | Configured ConfiguredPackage | Processing ReadyPackage | Installed ReadyPackage BuildSuccess | Failed ConfiguredPackage BuildFailure -- ^ NB: packages in the Failed state can be *either* Ready -- or Configured. instance Package PlanPackage where packageId (PreExisting pkg) = packageId pkg packageId (Configured pkg) = packageId pkg packageId (Processing pkg) = packageId pkg packageId (Installed pkg _) = packageId pkg packageId (Failed pkg _) = packageId pkg instance PackageFixedDeps PlanPackage where depends (PreExisting pkg) = depends pkg depends (Configured pkg) = depends pkg depends (Processing pkg) = depends pkg depends (Installed pkg _) = depends pkg depends (Failed pkg _) = depends pkg instance PackageInstalled PlanPackage where installedPackageId (PreExisting pkg) = installedPackageId pkg installedPackageId (Configured pkg) = installedPackageId pkg installedPackageId (Processing pkg) = installedPackageId pkg -- NB: defer to the actual installed package info in this case installedPackageId (Installed _ (BuildOk _ _ (Just ipkg))) = installedPackageId ipkg installedPackageId (Installed pkg _) = installedPackageId pkg installedPackageId (Failed pkg _) = installedPackageId pkg installedDepends (PreExisting pkg) = installedDepends pkg installedDepends (Configured pkg) = installedDepends pkg installedDepends (Processing pkg) = installedDepends pkg installedDepends (Installed _ (BuildOk _ _ (Just ipkg))) = installedDepends ipkg installedDepends (Installed pkg _) = installedDepends pkg installedDepends (Failed pkg _) = installedDepends pkg data InstallPlan = InstallPlan { planIndex :: PlanIndex, planFakeMap :: FakeMap, planGraph :: Graph, planGraphRev :: Graph, planPkgOf :: Graph.Vertex -> PlanPackage, planVertexOf :: InstalledPackageId -> Graph.Vertex, planPlatform :: Platform, planCompiler :: CompilerInfo } invariant :: InstallPlan -> Bool invariant plan = valid (planPlatform plan) (planCompiler plan) (planFakeMap plan) (planIndex plan) internalError :: String -> a internalError msg = error $ "InstallPlan: internal error: " ++ msg showPlanIndex :: PlanIndex -> String showPlanIndex index = intercalate "\n" (map showPlanPackage (PackageIndex.allPackages index)) where showPlanPackage p = showPlanPackageTag p ++ " " ++ display (packageId p) ++ " (" ++ display (installedPackageId p) ++ ")" showInstallPlan :: InstallPlan -> String showInstallPlan plan = showPlanIndex (planIndex plan) ++ "\n" ++ "fake map:\n " ++ intercalate "\n " (map showKV (Map.toList (planFakeMap plan))) where showKV (k,v) = display k ++ " -> " ++ display v showPlanPackageTag :: PlanPackage -> String showPlanPackageTag (PreExisting _) = "PreExisting" showPlanPackageTag (Configured _) = "Configured" showPlanPackageTag (Processing _) = "Processing" showPlanPackageTag (Installed _ _) = "Installed" showPlanPackageTag (Failed _ _) = "Failed" -- | Build an installation plan from a valid set of resolved packages. -- new :: Platform -> CompilerInfo -> PlanIndex -> Either [PlanProblem] InstallPlan new platform cinfo index = -- NB: Need to pre-initialize the fake-map with pre-existing -- packages let isPreExisting (PreExisting _) = True isPreExisting _ = False fakeMap = Map.fromList . map (\p -> (fakeInstalledPackageId (packageId p), installedPackageId p)) . filter isPreExisting $ PackageIndex.allPackages index in case problems platform cinfo fakeMap index of [] -> Right InstallPlan { planIndex = index, planFakeMap = fakeMap, planGraph = graph, planGraphRev = Graph.transposeG graph, planPkgOf = vertexToPkgId, planVertexOf = fromMaybe noSuchPkgId . pkgIdToVertex, planPlatform = platform, planCompiler = cinfo } where (graph, vertexToPkgId, pkgIdToVertex) = PackageIndex.dependencyGraph index -- NB: doesn't need to know planFakeMap because the -- fakemap is empty at this point. noSuchPkgId = internalError "package is not in the graph" probs -> Left probs toList :: InstallPlan -> [PlanPackage] toList = PackageIndex.allPackages . planIndex -- | Remove packages from the install plan. This will result in an -- error if there are remaining packages that depend on any matching -- package. This is primarily useful for obtaining an install plan for -- the dependencies of a package or set of packages without actually -- installing the package itself, as when doing development. -- remove :: (PlanPackage -> Bool) -> InstallPlan -> Either [PlanProblem] InstallPlan remove shouldRemove plan = new (planPlatform plan) (planCompiler plan) newIndex where newIndex = PackageIndex.fromList $ filter (not . shouldRemove) (toList plan) -- | The packages that are ready to be installed. That is they are in the -- configured state and have all their dependencies installed already. -- The plan is complete if the result is @[]@. -- ready :: InstallPlan -> [ReadyPackage] ready plan = assert check readyPackages where check = if null readyPackages && null processingPackages then null configuredPackages else True configuredPackages = [ pkg | Configured pkg <- toList plan ] processingPackages = [ pkg | Processing pkg <- toList plan] readyPackages :: [ReadyPackage] readyPackages = [ ReadyPackage srcPkg flags stanzas deps | pkg@(ConfiguredPackage srcPkg flags stanzas _) <- configuredPackages -- select only the package that have all of their deps installed: , deps <- maybeToList (hasAllInstalledDeps pkg) ] hasAllInstalledDeps :: ConfiguredPackage -> Maybe [Installed.InstalledPackageInfo] hasAllInstalledDeps = mapM isInstalledDep . installedDepends isInstalledDep :: InstalledPackageId -> Maybe Installed.InstalledPackageInfo isInstalledDep pkgid = -- NB: Need to check if the ID has been updated in planFakeMap, in which case we -- might be dealing with an old pointer case PackageIndex.fakeLookupInstalledPackageId (planFakeMap plan) (planIndex plan) pkgid of Just (Configured _) -> Nothing Just (Processing _) -> Nothing Just (Failed _ _) -> internalError depOnFailed Just (PreExisting (InstalledPackage instPkg _)) -> Just instPkg Just (Installed _ (BuildOk _ _ (Just instPkg))) -> Just instPkg Just (Installed _ (BuildOk _ _ Nothing)) -> internalError depOnNonLib Nothing -> internalError incomplete incomplete = "install plan is not closed" depOnFailed = "configured package depends on failed package" depOnNonLib = "configured package depends on a non-library package" -- | Marks packages in the graph as currently processing (e.g. building). -- -- * The package must exist in the graph and be in the configured state. -- processing :: [ReadyPackage] -> InstallPlan -> InstallPlan processing pkgs plan = assert (invariant plan') plan' where plan' = plan { planIndex = PackageIndex.merge (planIndex plan) processingPkgs } processingPkgs = PackageIndex.fromList [Processing pkg | pkg <- pkgs] -- | Marks a package in the graph as completed. Also saves the build result for -- the completed package in the plan. -- -- * The package must exist in the graph and be in the processing state. -- * The package must have had no uninstalled dependent packages. -- completed :: InstalledPackageId -> BuildSuccess -> InstallPlan -> InstallPlan completed pkgid buildResult plan = assert (invariant plan') plan' where plan' = plan { -- NB: installation can change the IPID, so better -- record it in the fake mapping... planFakeMap = insert_fake_mapping buildResult $ planFakeMap plan, planIndex = PackageIndex.insert installed . PackageIndex.deleteInstalledPackageId pkgid $ planIndex plan } -- ...but be sure to use the *old* IPID for the lookup for the -- preexisting record installed = Installed (lookupProcessingPackage plan pkgid) buildResult insert_fake_mapping (BuildOk _ _ (Just ipi)) = Map.insert pkgid (installedPackageId ipi) insert_fake_mapping _ = id -- | Marks a package in the graph as having failed. It also marks all the -- packages that depended on it as having failed. -- -- * The package must exist in the graph and be in the processing -- state. -- failed :: InstalledPackageId -- ^ The id of the package that failed to install -> BuildFailure -- ^ The build result to use for the failed package -> BuildFailure -- ^ The build result to use for its dependencies -> InstallPlan -> InstallPlan failed pkgid buildResult buildResult' plan = assert (invariant plan') plan' where -- NB: failures don't update IPIDs plan' = plan { planIndex = PackageIndex.merge (planIndex plan) failures } pkg = lookupProcessingPackage plan pkgid failures = PackageIndex.fromList $ Failed (readyPackageToConfiguredPackage pkg) buildResult : [ Failed pkg' buildResult' | Just pkg' <- map checkConfiguredPackage $ packagesThatDependOn plan pkgid ] -- | Lookup the reachable packages in the reverse dependency graph. -- packagesThatDependOn :: InstallPlan -> InstalledPackageId -> [PlanPackage] packagesThatDependOn plan pkgid = map (planPkgOf plan) . tail . Graph.reachable (planGraphRev plan) . planVertexOf plan $ Map.findWithDefault pkgid pkgid (planFakeMap plan) -- | Lookup a package that we expect to be in the processing state. -- lookupProcessingPackage :: InstallPlan -> InstalledPackageId -> ReadyPackage lookupProcessingPackage plan pkgid = -- NB: processing packages are guaranteed to not indirect through -- planFakeMap case PackageIndex.lookupInstalledPackageId (planIndex plan) pkgid of Just (Processing pkg) -> pkg _ -> internalError $ "not in processing state or no such pkg " ++ display pkgid -- | Check a package that we expect to be in the configured or failed state. -- checkConfiguredPackage :: PlanPackage -> Maybe ConfiguredPackage checkConfiguredPackage (Configured pkg) = Just pkg checkConfiguredPackage (Failed _ _) = Nothing checkConfiguredPackage pkg = internalError $ "not configured or no such pkg " ++ display (packageId pkg) -- ------------------------------------------------------------ -- * Checking validity of plans -- ------------------------------------------------------------ -- | A valid installation plan is a set of packages that is 'acyclic', -- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the -- plan has to have a valid configuration (see 'configuredPackageValid'). -- -- * if the result is @False@ use 'problems' to get a detailed list. -- valid :: Platform -> CompilerInfo -> FakeMap -> PlanIndex -> Bool valid platform cinfo fakeMap index = null (problems platform cinfo fakeMap index) data PlanProblem = PackageInvalid ConfiguredPackage [PackageProblem] | PackageMissingDeps PlanPackage [PackageIdentifier] | PackageCycle [PlanPackage] | PackageInconsistency PackageName [(PackageIdentifier, Version)] | PackageStateInvalid PlanPackage PlanPackage showPlanProblem :: PlanProblem -> String showPlanProblem (PackageInvalid pkg packageProblems) = "Package " ++ display (packageId pkg) ++ " has an invalid configuration, in particular:\n" ++ unlines [ " " ++ showPackageProblem problem | problem <- packageProblems ] showPlanProblem (PackageMissingDeps pkg missingDeps) = "Package " ++ display (packageId pkg) ++ " depends on the following packages which are missing from the plan: " ++ intercalate ", " (map display missingDeps) showPlanProblem (PackageCycle cycleGroup) = "The following packages are involved in a dependency cycle " ++ intercalate ", " (map (display.packageId) cycleGroup) showPlanProblem (PackageInconsistency name inconsistencies) = "Package " ++ display name ++ " is required by several packages," ++ " but they require inconsistent versions:\n" ++ unlines [ " package " ++ display pkg ++ " requires " ++ display (PackageIdentifier name ver) | (pkg, ver) <- inconsistencies ] showPlanProblem (PackageStateInvalid pkg pkg') = "Package " ++ display (packageId pkg) ++ " is in the " ++ showPlanState pkg ++ " state but it depends on package " ++ display (packageId pkg') ++ " which is in the " ++ showPlanState pkg' ++ " state" where showPlanState (PreExisting _) = "pre-existing" showPlanState (Configured _) = "configured" showPlanState (Processing _) = "processing" showPlanState (Installed _ _) = "installed" showPlanState (Failed _ _) = "failed" -- | For an invalid plan, produce a detailed list of problems as human readable -- error messages. This is mainly intended for debugging purposes. -- Use 'showPlanProblem' for a human readable explanation. -- problems :: Platform -> CompilerInfo -> FakeMap -> PlanIndex -> [PlanProblem] problems platform cinfo fakeMap index = [ PackageInvalid pkg packageProblems | Configured pkg <- PackageIndex.allPackages index , let packageProblems = configuredPackageProblems platform cinfo pkg , not (null packageProblems) ] ++ [ PackageMissingDeps pkg (catMaybes (map (fmap packageId . PackageIndex.fakeLookupInstalledPackageId fakeMap index) missingDeps)) | (pkg, missingDeps) <- PackageIndex.brokenPackages' fakeMap index ] ++ [ PackageCycle cycleGroup | cycleGroup <- PackageIndex.dependencyCycles' fakeMap index ] ++ [ PackageInconsistency name inconsistencies | (name, inconsistencies) <- PackageIndex.dependencyInconsistencies' fakeMap index ] ++ [ PackageStateInvalid pkg pkg' | pkg <- PackageIndex.allPackages index , Just pkg' <- map (PackageIndex.fakeLookupInstalledPackageId fakeMap index) (installedDepends pkg) , not (stateDependencyRelation pkg pkg') ] -- | The graph of packages (nodes) and dependencies (edges) must be acyclic. -- -- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out -- which packages are involved in dependency cycles. -- acyclic :: PlanIndex -> Bool acyclic = null . PackageIndex.dependencyCycles -- | An installation plan is closed if for every package in the set, all of -- its dependencies are also in the set. That is, the set is closed under the -- dependency relation. -- -- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out -- which packages depend on packages not in the index. -- closed :: PlanIndex -> Bool closed = null . PackageIndex.brokenPackages -- | An installation plan is consistent if all dependencies that target a -- single package name, target the same version. -- -- This is slightly subtle. It is not the same as requiring that there be at -- most one version of any package in the set. It only requires that of -- packages which have more than one other package depending on them. We could -- actually make the condition even more precise and say that different -- versions are OK so long as they are not both in the transitive closure of -- any other package (or equivalently that their inverse closures do not -- intersect). The point is we do not want to have any packages depending -- directly or indirectly on two different versions of the same package. The -- current definition is just a safe approximation of that. -- -- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to -- find out which packages are. -- consistent :: PlanIndex -> Bool consistent = null . PackageIndex.dependencyInconsistencies -- | The states of packages have that depend on each other must respect -- this relation. That is for very case where package @a@ depends on -- package @b@ we require that @dependencyStatesOk a b = True@. -- stateDependencyRelation :: PlanPackage -> PlanPackage -> Bool stateDependencyRelation (PreExisting _) (PreExisting _) = True stateDependencyRelation (Configured _) (PreExisting _) = True stateDependencyRelation (Configured _) (Configured _) = True stateDependencyRelation (Configured _) (Processing _) = True stateDependencyRelation (Configured _) (Installed _ _) = True stateDependencyRelation (Processing _) (PreExisting _) = True stateDependencyRelation (Processing _) (Installed _ _) = True stateDependencyRelation (Installed _ _) (PreExisting _) = True stateDependencyRelation (Installed _ _) (Installed _ _) = True stateDependencyRelation (Failed _ _) (PreExisting _) = True -- failed can depends on configured because a package can depend on -- several other packages and if one of the deps fail then we fail -- but we still depend on the other ones that did not fail: stateDependencyRelation (Failed _ _) (Configured _) = True stateDependencyRelation (Failed _ _) (Processing _) = True stateDependencyRelation (Failed _ _) (Installed _ _) = True stateDependencyRelation (Failed _ _) (Failed _ _) = True stateDependencyRelation _ _ = False -- | A 'ConfiguredPackage' is valid if the flag assignment is total and if -- in the configuration given by the flag assignment, all the package -- dependencies are satisfied by the specified packages. -- configuredPackageValid :: Platform -> CompilerInfo -> ConfiguredPackage -> Bool configuredPackageValid platform cinfo pkg = null (configuredPackageProblems platform cinfo pkg) data PackageProblem = DuplicateFlag FlagName | MissingFlag FlagName | ExtraFlag FlagName | DuplicateDeps [PackageIdentifier] | MissingDep Dependency | ExtraDep PackageIdentifier | InvalidDep Dependency PackageIdentifier showPackageProblem :: PackageProblem -> String showPackageProblem (DuplicateFlag (FlagName flag)) = "duplicate flag in the flag assignment: " ++ flag showPackageProblem (MissingFlag (FlagName flag)) = "missing an assignment for the flag: " ++ flag showPackageProblem (ExtraFlag (FlagName flag)) = "extra flag given that is not used by the package: " ++ flag showPackageProblem (DuplicateDeps pkgids) = "duplicate packages specified as selected dependencies: " ++ intercalate ", " (map display pkgids) showPackageProblem (MissingDep dep) = "the package has a dependency " ++ display dep ++ " but no package has been selected to satisfy it." showPackageProblem (ExtraDep pkgid) = "the package configuration specifies " ++ display pkgid ++ " but (with the given flag assignment) the package does not actually" ++ " depend on any version of that package." showPackageProblem (InvalidDep dep pkgid) = "the package depends on " ++ display dep ++ " but the configuration specifies " ++ display pkgid ++ " which does not satisfy the dependency." configuredPackageProblems :: Platform -> CompilerInfo -> ConfiguredPackage -> [PackageProblem] configuredPackageProblems platform cinfo (ConfiguredPackage pkg specifiedFlags stanzas specifiedDeps) = [ DuplicateFlag flag | ((flag,_):_) <- duplicates specifiedFlags ] ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] ++ [ DuplicateDeps pkgs | pkgs <- duplicatesBy (comparing packageName) specifiedDeps ] ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps , not (packageSatisfiesDependency pkgid dep) ] where mergedFlags = mergeBy compare (sort $ map flagName (genPackageFlags (packageDescription pkg))) (sort $ map fst specifiedFlags) mergedDeps = mergeBy (\dep pkgid -> dependencyName dep `compare` packageName pkgid) (sortBy (comparing dependencyName) requiredDeps) (sortBy (comparing packageName) specifiedDeps) packageSatisfiesDependency (PackageIdentifier name version) (Dependency name' versionRange) = assert (name == name') $ version `withinRange` versionRange dependencyName (Dependency name _) = name requiredDeps :: [Dependency] requiredDeps = --TODO: use something lower level than finalizePackageDescription case finalizePackageDescription specifiedFlags (const True) platform cinfo [] (enableStanzas stanzas $ packageDescription pkg) of Right (resolvedPkg, _) -> externalBuildDepends resolvedPkg Left _ -> error "configuredPackageInvalidDeps internal error" cabal-install-1.22.6.0/Distribution/Client/Sandbox.hs0000644000000000000000000010654112540225656020501 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- UI for the sandboxing functionality. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox ( sandboxInit, sandboxDelete, sandboxAddSource, sandboxAddSourceSnapshot, sandboxDeleteSource, sandboxListSources, sandboxHcPkg, dumpPackageEnvironment, withSandboxBinDirOnSearchPath, getSandboxConfigFilePath, loadConfigOrSandboxConfig, initPackageDBIfNeeded, maybeWithSandboxDirOnSearchPath, WereDepsReinstalled(..), reinstallAddSourceDeps, maybeReinstallAddSourceDeps, SandboxPackageInfo(..), maybeWithSandboxPackageInfo, tryGetIndexFilePath, sandboxBuildDir, getInstalledPackagesInSandbox, updateSandboxConfigFileFlag, -- FIXME: move somewhere else configPackageDB', configCompilerAux' ) where import Distribution.Client.Setup ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) , GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags , defaultSandboxLocation, globalRepos ) import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps , maybeAddCompilerTimestampRecord , withAddTimestamps , withRemoveTimestamps ) import Distribution.Client.Config ( SavedConfig(..), loadConfig ) import Distribution.Client.Dependency ( foldProgress ) import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) import Distribution.Client.Install ( InstallArgs, makeInstallContext, makeInstallPlan, processInstallPlan ) import Distribution.Utils.NubList ( fromNubList ) import Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironment(..), IncludeComments(..), PackageEnvironmentType(..) , createPackageEnvironmentFile, classifyPackageEnvironment , tryLoadSandboxPackageEnvironmentFile, loadUserConfig , commentPackageEnvironment, showPackageEnvironmentWithComments , sandboxPackageEnvironmentFile, userPackageEnvironmentFile ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) , UseSandbox(..) ) import Distribution.Client.Types ( PackageLocation(..) , SourcePackage(..) ) import Distribution.Client.Utils ( inDir, tryCanonicalizePath , tryFindAddSourcePackageDesc ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.PackageDescription.Parse ( readPackageDescription ) import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) , PackageDBStack ) import Distribution.Simple.Configure ( configCompilerAuxEx , interpretPackageDbFlags , getPackageDBContents ) import Distribution.Simple.PreProcess ( knownSuffixHandlers ) import Distribution.Simple.Program ( ProgramConfiguration ) import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) , fromFlagOrDefault ) import Distribution.Simple.SrcDist ( prepareTree ) import Distribution.Simple.Utils ( die, debug, notice, info, warn , debugNoWrap, defaultPackageDesc , intercalate, topHandlerWith , createDirectoryIfMissingVerbose ) import Distribution.Package ( Package(..) ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity, lessVerbose ) import Distribution.Client.Compat.Environment ( lookupEnv, setEnv ) import Distribution.Client.Compat.FilePerms ( setFileHidden ) import qualified Distribution.Client.Sandbox.Index as Index import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.Simple.Register as Register import qualified Data.Map as M import qualified Data.Set as S import Control.Exception ( assert, bracket_ ) import Control.Monad ( forM, liftM2, unless, when ) import Data.Bits ( shiftL, shiftR, xor ) import Data.Char ( ord ) import Data.IORef ( newIORef, writeIORef, readIORef ) import Data.List ( delete, foldl' ) import Data.Maybe ( fromJust, fromMaybe ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty, mappend ) #endif import Data.Word ( Word32 ) import Numeric ( showHex ) import System.Directory ( createDirectory , doesDirectoryExist , doesFileExist , getCurrentDirectory , removeDirectoryRecursive , removeFile , renameDirectory ) import System.FilePath ( (), getSearchPath , searchPathSeparator , takeDirectory ) -- -- * Constants -- -- | The name of the sandbox subdirectory where we keep snapshots of add-source -- dependencies. snapshotDirectoryName :: FilePath snapshotDirectoryName = "snapshots" -- | Non-standard build dir that is used for building add-source deps instead of -- "dist". Fixes surprising behaviour in some cases (see issue #1281). sandboxBuildDir :: FilePath -> FilePath sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" where sandboxDirHash = jenkins sandboxDir -- See http://en.wikipedia.org/wiki/Jenkins_hash_function jenkins :: String -> Word32 jenkins str = loop_finish $ foldl' loop 0 str where loop :: Word32 -> Char -> Word32 loop hash key_i' = hash''' where key_i = toEnum . ord $ key_i' hash' = hash + key_i hash'' = hash' + (shiftL hash' 10) hash''' = hash'' `xor` (shiftR hash'' 6) loop_finish :: Word32 -> Word32 loop_finish hash = hash''' where hash' = hash + (shiftL hash 3) hash'' = hash' `xor` (shiftR hash' 11) hash''' = hash'' + (shiftL hash'' 15) -- -- * Basic sandbox functions. -- -- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the -- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to -- 'NoFlag'. updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags updateSandboxConfigFileFlag globalFlags = case globalSandboxConfigFile globalFlags of Flag _ -> return globalFlags NoFlag -> do f' <- fmap (fromMaybe NoFlag . fmap Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" return globalFlags { globalSandboxConfigFile = f' } -- | Return the path to the sandbox config file - either the default or the one -- specified with @--sandbox-config-file@. getSandboxConfigFilePath :: GlobalFlags -> IO FilePath getSandboxConfigFilePath globalFlags = do let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags case sandboxConfigFileFlag of NoFlag -> do pkgEnvDir <- getCurrentDirectory return (pkgEnvDir sandboxPackageEnvironmentFile) Flag path -> return path -- | Load the @cabal.sandbox.config@ file (and possibly the optional -- @cabal.config@). In addition to a @PackageEnvironment@, also return a -- canonical path to the sandbox. Exit with error if the sandbox directory or -- the package environment file do not exist. tryLoadSandboxConfig :: Verbosity -> GlobalFlags -> IO (FilePath, PackageEnvironment) tryLoadSandboxConfig verbosity globalFlags = do path <- getSandboxConfigFilePath globalFlags tryLoadSandboxPackageEnvironmentFile verbosity path (globalConfigFile globalFlags) -- | Return the name of the package index file for this package environment. tryGetIndexFilePath :: SavedConfig -> IO FilePath tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config) -- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of -- 'SavedConfig'. tryGetIndexFilePath' :: GlobalFlags -> IO FilePath tryGetIndexFilePath' globalFlags = do let paths = fromNubList $ globalLocalRepos globalFlags case paths of [] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ "no local repos found. " ++ checkConfiguration _ -> return $ (last paths) Index.defaultIndexFileName where checkConfiguration = "Please check your configuration ('" ++ userPackageEnvironmentFile ++ "')." -- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error -- message than just pattern-matching. getSandboxPackageDB :: ConfigFlags -> IO PackageDB getSandboxPackageDB configFlags = do case configPackageDBs configFlags of [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? [] -> die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt [_] -> die $ "Unexpected contents of the 'package-db' field. " ++ sandboxConfigCorrupt _ -> die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt where sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." -- | Which packages are installed in the sandbox package DB? getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackagesInSandbox verbosity configFlags comp conf = do sandboxDB <- getSandboxPackageDB configFlags getPackageDBContents verbosity comp sandboxDB conf -- | Temporarily add $SANDBOX_DIR/bin to $PATH. withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir where -- TODO: Instead of modifying the global process state, it'd be better to -- set the environment individually for each subprocess invocation. This -- will have to wait until the Shell monad is implemented; without it the -- required changes are too intrusive. addBinDir :: IO () addBinDir = do mbOldPath <- lookupEnv "PATH" let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) mbOldPath setEnv "PATH" newPath rmBinDir :: IO () rmBinDir = do oldPath <- getSearchPath let newPath = intercalate [searchPathSeparator] (delete sandboxBin oldPath) setEnv "PATH" newPath sandboxBin = sandboxDir "bin" -- | Initialise a package DB for this compiler if it doesn't exist. initPackageDBIfNeeded :: Verbosity -> ConfigFlags -> Compiler -> ProgramConfiguration -> IO () initPackageDBIfNeeded verbosity configFlags comp conf = do SpecificPackageDB dbPath <- getSandboxPackageDB configFlags packageDBExists <- doesDirectoryExist dbPath unless packageDBExists $ Register.initPackageDB verbosity comp conf dbPath when packageDBExists $ debug verbosity $ "The package database already exists: " ++ dbPath -- | Entry point for the 'cabal sandbox dump-pkgenv' command. dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags commentPkgEnv <- commentPackageEnvironment sandboxDir putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv -- | Entry point for the 'cabal sandbox init' command. sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxInit verbosity sandboxFlags globalFlags = do -- Warn if there's a 'cabal-dev' sandbox. isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") (doesFileExist $ "cabal-dev" "cabal.config") when isCabalDevSandbox $ warn verbosity $ "You are apparently using a legacy (cabal-dev) sandbox. " ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " ++ "You may want to delete the 'cabal-dev' directory to prevent issues." -- Create the sandbox directory. let sandboxDir' = fromFlagOrDefault defaultSandboxLocation (sandboxLocation sandboxFlags) createDirectoryIfMissingVerbose verbosity True sandboxDir' sandboxDir <- tryCanonicalizePath sandboxDir' setFileHidden sandboxDir -- Determine which compiler to use (using the value from ~/.cabal/config). userConfig <- loadConfig verbosity (globalConfigFile globalFlags) NoFlag (comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig) -- Create the package environment file. pkgEnvFile <- getSandboxConfigFilePath globalFlags createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile NoComments comp platform (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let config = pkgEnvSavedConfig pkgEnv configFlags = savedConfigureFlags config -- Create the index file if it doesn't exist. indexFile <- tryGetIndexFilePath config indexFileExists <- doesFileExist indexFile if indexFileExists then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir Index.createEmpty verbosity indexFile -- Create the package DB for the default compiler. initPackageDBIfNeeded verbosity configFlags comp conf maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform -- | Entry point for the 'cabal sandbox delete' command. sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxDelete verbosity _sandboxFlags globalFlags = do (useSandbox, _) <- loadConfigOrSandboxConfig verbosity globalFlags { globalRequireSandbox = Flag False } mempty case useSandbox of NoSandbox -> warn verbosity "Not in a sandbox." UseSandbox sandboxDir -> do curDir <- getCurrentDirectory pkgEnvFile <- getSandboxConfigFilePath globalFlags -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard -- location. let isNonDefaultConfigLocation = pkgEnvFile /= (curDir sandboxPackageEnvironmentFile) if isNonDefaultConfigLocation then warn verbosity $ "Sandbox config file is in non-default location: '" ++ pkgEnvFile ++ "'.\n Please delete manually." else removeFile pkgEnvFile -- Remove the sandbox directory, unless we're using a shared sandbox. let isNonDefaultSandboxLocation = sandboxDir /= (curDir defaultSandboxLocation) when isNonDefaultSandboxLocation $ die $ "Non-default sandbox location used: '" ++ sandboxDir ++ "'.\nAssuming a shared sandbox. Please delete '" ++ sandboxDir ++ "' manually." notice verbosity $ "Deleting the sandbox located at " ++ sandboxDir removeDirectoryRecursive sandboxDir -- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> BuildTreeRefType -> IO () doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do let savedConfig = pkgEnvSavedConfig pkgEnv indexFile <- tryGetIndexFilePath savedConfig -- If we're running 'sandbox add-source' for the first time for this compiler, -- we need to create an initial timestamp record. (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile (compilerId comp) platform withAddTimestamps sandboxDir $ do -- FIXME: path canonicalisation is done in addBuildTreeRefs, but we do it -- twice because of the timestamps file. buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType return buildTreeRefs' -- | Entry point for the 'cabal sandbox add-source' command. sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef -- | Entry point for the 'cabal sandbox add-source --snapshot' command. sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment -> IO () sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do let snapshotDir = sandboxDir snapshotDirectoryName -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private -- location. createDirectoryIfMissingVerbose verbosity True snapshotDir -- Collect the package descriptions first, so that if some path does not refer -- to a cabal package, we fail immediately. pkgs <- forM buildTreeRefs $ \buildTreeRef -> inDir (Just buildTreeRef) $ return . flattenPackageDescription =<< readPackageDescription verbosity =<< defaultPackageDesc verbosity -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If -- 'prepareTree' throws an error at any point, the old snapshots will still be -- in consistent state. tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> inDir (Just buildTreeRef) $ do let targetDir = snapshotDir (display . packageId $ pkg) targetTmpDir = targetDir ++ "-tmp" dirExists <- doesDirectoryExist targetTmpDir when dirExists $ removeDirectoryRecursive targetDir createDirectory targetTmpDir prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers return (targetTmpDir, targetDir) -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to -- "snapshots/$PKGNAME-$VERSION". snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do dirExists <- doesDirectoryExist targetDir when dirExists $ removeDirectoryRecursive targetDir renameDirectory targetTmpDir targetDir return targetDir -- Once the packages are copied, just 'add-source' them as usual. doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef -- | Entry point for the 'cabal sandbox delete-source' command. sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags -> IO () sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) withRemoveTimestamps sandboxDir $ do Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ "source dependency, but does not remove the package " ++ "from the sandbox package DB.\n\n" ++ "Use 'sandbox hc-pkg -- unregister' to do that." -- | Entry point for the 'cabal sandbox list-sources' command. sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () sandboxListSources verbosity _sandboxFlags globalFlags = do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv) refs <- Index.listBuildTreeRefs verbosity Index.ListIgnored Index.LinksAndSnapshots indexFile when (null refs) $ notice verbosity $ "Index file '" ++ indexFile ++ "' has no references to local build trees." when (not . null $ refs) $ do notice verbosity $ "Source dependencies registered " ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" mapM_ putStrLn refs notice verbosity $ "\nTo unregister source dependencies, " ++ "use the 'sandbox delete-source' command." -- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ -- tool with provided arguments, restricted to the sandbox. sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv dbStack = configPackageDB' configFlags (comp, _platform, conf) <- configCompilerAux' configFlags Register.invokeHcPkg verbosity comp conf dbStack extraArgs -- | Check which type of package environment we're in and return a -- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates -- whether we're working in a sandbox. loadConfigOrSandboxConfig :: Verbosity -> GlobalFlags -- ^ For @--config-file@ and -- @--sandbox-config-file@. -> Flag Bool -- ^ Ignored if we're in a sandbox. -> IO (UseSandbox, SavedConfig) loadConfigOrSandboxConfig verbosity globalFlags userInstallFlag = do let configFileFlag = globalConfigFile globalFlags sandboxConfigFileFlag = globalSandboxConfigFile globalFlags ignoreSandboxFlag = globalIgnoreSandbox globalFlags pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag case pkgEnvType of -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. SandboxPackageEnvironment -> do (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags -- ^ Prints an error message and exits on error. let config = pkgEnvSavedConfig pkgEnv return (UseSandbox sandboxDir, config) -- Only @cabal.config@ is present. UserPackageEnvironment -> do config <- loadConfig verbosity configFileFlag userInstallFlag userConfig <- loadUserConfig verbosity pkgEnvDir let config' = config `mappend` userConfig dieIfSandboxRequired config' return (NoSandbox, config') -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. AmbientPackageEnvironment -> do config <- loadConfig verbosity configFileFlag userInstallFlag dieIfSandboxRequired config return (NoSandbox, config) where -- Return the path to the package environment directory - either the -- current directory or the one that @--sandbox-config-file@ resides in. getPkgEnvDir :: (Flag FilePath) -> IO FilePath getPkgEnvDir sandboxConfigFileFlag = do case sandboxConfigFileFlag of NoFlag -> getCurrentDirectory Flag path -> tryCanonicalizePath . takeDirectory $ path -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. dieIfSandboxRequired :: SavedConfig -> IO () dieIfSandboxRequired config = checkFlag flag where flag = (globalRequireSandbox . savedGlobalFlags $ config) `mappend` (globalRequireSandbox globalFlags) checkFlag (Flag True) = die $ "'require-sandbox' is set to True, but no sandbox is present. " ++ "Use '--no-require-sandbox' if you want to override " ++ "'require-sandbox' temporarily." checkFlag (Flag False) = return () checkFlag (NoFlag) = return () -- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do -- nothing. maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a maybeWithSandboxDirOnSearchPath NoSandbox act = act maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = withSandboxBinDirOnSearchPath sandboxDir $ act -- | Had reinstallAddSourceDeps actually reinstalled any dependencies? data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled -- | Reinstall those add-source dependencies that have been modified since -- we've last installed them. Assumes that we're working inside a sandbox. reinstallAddSourceDeps :: Verbosity -> ConfigFlags -> ConfigExFlags -> InstallFlags -> GlobalFlags -> FilePath -> IO WereDepsReinstalled reinstallAddSourceDeps verbosity configFlags' configExFlags installFlags globalFlags sandboxDir = topHandler' $ do let sandboxDistPref = sandboxBuildDir sandboxDir configFlags = configFlags' { configDistPref = Flag sandboxDistPref } haddockFlags = mempty { haddockDistPref = Flag sandboxDistPref } (comp, platform, conf) <- configCompilerAux' configFlags retVal <- newIORef NoDepsReinstalled withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir $ \sandboxPkgInfo -> unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do let args :: InstallArgs args = ((configPackageDB' configFlags) ,(globalRepos globalFlags) ,comp, platform, conf ,UseSandbox sandboxDir, Just sandboxPkgInfo ,globalFlags, configFlags, configExFlags, installFlags ,haddockFlags) -- This can actually be replaced by a call to 'install', but we use a -- lower-level API because of layer separation reasons. Additionally, we -- might want to use some lower-level features this in the future. withSandboxBinDirOnSearchPath sandboxDir $ do installContext <- makeInstallContext verbosity args Nothing installPlan <- foldProgress logMsg die' return =<< makeInstallPlan verbosity args installContext processInstallPlan verbosity args installContext installPlan writeIORef retVal ReinstalledSomeDeps readIORef retVal where die' message = die (message ++ installFailedInSandbox) -- TODO: use a better error message, remove duplication. installFailedInSandbox = "Note: when using a sandbox, all packages are required to have " ++ "consistent dependencies. " ++ "Try reinstalling/unregistering the offending packages or " ++ "recreating the sandbox." logMsg message rest = debugNoWrap verbosity message >> rest topHandler' = topHandlerWith $ \_ -> do warn verbosity "Couldn't reinstall some add-source dependencies." -- Here we can't know whether any deps have been reinstalled, so we have -- to be conservative. return ReinstalledSomeDeps -- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that -- we don't update the timestamp file here - this is done in -- 'postInstallActions'. withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> FilePath -> (SandboxPackageInfo -> IO ()) -> IO () withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir cont = do -- List all add-source deps. indexFile <- tryGetIndexFilePath' globalFlags buildTreeRefs <- Index.listBuildTreeRefs verbosity Index.DontListIgnored Index.OnlyLinks indexFile let allAddSourceDepsSet = S.fromList buildTreeRefs -- List all packages installed in the sandbox. installedPkgIndex <- getInstalledPackagesInSandbox verbosity configFlags comp conf let err = "Error reading sandbox package information." -- Get the package descriptions for all add-source deps. depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) isInstalled pkgid = not . null . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid installedDepsMap = M.filter (isInstalled . packageId) depsMap -- Get the package ids of modified (and installed) add-source deps. modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir (compilerId comp) platform installedDepsMap -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to -- be a subset of the keys of 'depsMap'. let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) | modDepPath <- modifiedAddSourceDeps ] modifiedDepsMap = M.fromList modifiedDeps assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) if (null modifiedDeps) then info verbosity $ "Found no modified add-source deps." else notice verbosity $ "Some add-source dependencies have been modified. " ++ "They will be reinstalled..." -- Get the package ids of the remaining add-source deps (some are possibly not -- installed). let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) -- Finally, assemble a 'SandboxPackageInfo'. cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet where toSourcePackage (path, pkgDesc) = SourcePackage (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing -- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and a no-op -- otherwise. maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags -> Compiler -> Platform -> ProgramConfiguration -> UseSandbox -> (Maybe SandboxPackageInfo -> IO ()) -> IO () maybeWithSandboxPackageInfo verbosity configFlags globalFlags comp platform conf useSandbox cont = case useSandbox of NoSandbox -> cont Nothing UseSandbox sandboxDir -> withSandboxPackageInfo verbosity configFlags globalFlags comp platform conf sandboxDir (\spi -> cont (Just spi)) -- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that -- case. maybeReinstallAddSourceDeps :: Verbosity -> Flag (Maybe Int) -- ^ The '-j' flag -> ConfigFlags -- ^ Saved configure flags -- (from dist/setup-config) -> GlobalFlags -> IO (UseSandbox, SavedConfig ,WereDepsReinstalled) maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' globalFlags' = do (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags' (configUserInstall configFlags') case useSandbox of NoSandbox -> return (NoSandbox, config, NoDepsReinstalled) UseSandbox sandboxDir -> do -- Reinstall the modified add-source deps. let configFlags = savedConfigureFlags config `mappendSomeSavedFlags` configFlags' configExFlags = defaultConfigExFlags `mappend` savedConfigureExFlags config installFlags' = defaultInstallFlags `mappend` savedInstallFlags config installFlags = installFlags' { installNumJobs = installNumJobs installFlags' `mappend` numJobsFlag } globalFlags = savedGlobalFlags config -- This makes it possible to override things like 'remote-repo-cache' -- from the command line. These options are hidden, and are only -- useful for debugging, so this should be fine. `mappend` globalFlags' depsReinstalled <- reinstallAddSourceDeps verbosity configFlags configExFlags installFlags globalFlags sandboxDir return (UseSandbox sandboxDir, config, depsReinstalled) where -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ -- because we don't want to auto-enable things like 'library-profiling' for -- all add-source dependencies even if the user has passed -- '--enable-library-profiling' to 'cabal configure'. These options are -- supposed to be set in 'cabal.config'. mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags mappendSomeSavedFlags sandboxConfigFlags savedFlags = sandboxConfigFlags { configHcFlavor = configHcFlavor sandboxConfigFlags `mappend` configHcFlavor savedFlags, configHcPath = configHcPath sandboxConfigFlags `mappend` configHcPath savedFlags, configHcPkg = configHcPkg sandboxConfigFlags `mappend` configHcPkg savedFlags, configProgramPaths = configProgramPaths sandboxConfigFlags `mappend` configProgramPaths savedFlags, configProgramArgs = configProgramArgs sandboxConfigFlags `mappend` configProgramArgs savedFlags, -- NOTE: Unconditionally choosing the value from -- 'dist/setup-config'. Sandbox package DB location may have been -- changed by 'configure -w'. configPackageDBs = configPackageDBs savedFlags -- FIXME: Is this compatible with the 'inherit' feature? } -- -- Utils (transitionary) -- -- FIXME: configPackageDB' and configCompilerAux' don't really belong in this -- module -- configPackageDB' :: ConfigFlags -> PackageDBStack configPackageDB' cfg = interpretPackageDbFlags userInstall (configPackageDBs cfg) where userInstall = fromFlagOrDefault True (configUserInstall cfg) configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramConfiguration) configCompilerAux' configFlags = configCompilerAuxEx configFlags --FIXME: make configCompilerAux use a sensible verbosity { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } cabal-install-1.22.6.0/Distribution/Client/IndexUtils.hs0000644000000000000000000005673312540225656021202 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.IndexUtils -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- Extra utils related to the package indexes. ----------------------------------------------------------------------------- module Distribution.Client.IndexUtils ( getIndexFileAge, getInstalledPackages, getSourcePackages, getSourcePackagesStrict, convert, readPackageIndexFile, parsePackageIndex, readRepoIndex, updateRepoIndexCache, updatePackageIndexCacheFile, BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType ) where import qualified Distribution.Client.Tar as Tar import Distribution.Client.Types import Distribution.Package ( PackageId, PackageIdentifier(..), PackageName(..) , Package(..), packageVersion, packageName , Dependency(Dependency), InstalledPackageId(..) ) import Distribution.Client.PackageIndex (PackageIndex) import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse import Distribution.PackageDescription ( GenericPackageDescription ) import Distribution.PackageDescription.Parse ( parsePackageDescription ) import Distribution.Simple.Compiler ( Compiler, PackageDBStack ) import Distribution.Simple.Program ( ProgramConfiguration ) import qualified Distribution.Simple.Configure as Configure ( getInstalledPackages ) import Distribution.ParseUtils ( ParseResult(..) ) import Distribution.Version ( Version(Version), intersectVersionRanges ) import Distribution.Text ( display, simpleParse ) import Distribution.Verbosity ( Verbosity, normal, lessVerbose ) import Distribution.Simple.Utils ( die, warn, info, fromUTF8 ) import Data.Char (isAlphaNum) import Data.Maybe (mapMaybe, fromMaybe) import Data.List (isPrefixOf) #if !MIN_VERSION_base(4,8,0) import Data.Monoid (Monoid(..)) #endif import qualified Data.Map as Map import Control.Monad (MonadPlus(mplus), when, liftM) import Control.Exception (evaluate) import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BS.Char8 import qualified Data.ByteString.Char8 as BSS import Data.ByteString.Lazy (ByteString) import Distribution.Client.GZipUtils (maybeDecompress) import Distribution.Client.Utils ( byteStringToFilePath , tryFindAddSourcePackageDesc ) import Distribution.Compat.Exception (catchIO) import Distribution.Client.Compat.Time (getFileAge, getModTime) import System.Directory (doesFileExist) import System.FilePath ((), takeExtension, splitDirectories, normalise) import System.FilePath.Posix as FilePath.Posix ( takeFileName ) import System.IO import System.IO.Unsafe (unsafeInterleaveIO) import System.IO.Error (isDoesNotExistError) import Numeric (showFFloat) getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackages verbosity comp packageDbs conf = Configure.getInstalledPackages verbosity' comp packageDbs conf where --FIXME: make getInstalledPackages use sensible verbosity in the first place verbosity' = lessVerbose verbosity convert :: InstalledPackageIndex -> PackageIndex InstalledPackage convert index' = PackageIndex.fromList -- There can be multiple installed instances of each package version, -- like when the same package is installed in the global & user DBs. -- InstalledPackageIndex.allPackagesBySourcePackageId gives us the -- installed packages with the most preferred instances first, so by -- picking the first we should get the user one. This is almost but not -- quite the same as what ghc does. [ InstalledPackage ipkg (sourceDeps index' ipkg) | (_,ipkg:_) <- InstalledPackageIndex.allPackagesBySourcePackageId index' ] where -- The InstalledPackageInfo only lists dependencies by the -- InstalledPackageId, which means we do not directly know the corresponding -- source dependency. The only way to find out is to lookup the -- InstalledPackageId to get the InstalledPackageInfo and look at its -- source PackageId. But if the package is broken because it depends on -- other packages that do not exist then we have a problem we cannot find -- the original source package id. Instead we make up a bogus package id. -- This should have the same effect since it should be a dependency on a -- nonexistent package. sourceDeps index ipkg = [ maybe (brokenPackageId depid) packageId mdep | let depids = InstalledPackageInfo.depends ipkg getpkg = InstalledPackageIndex.lookupInstalledPackageId index , (depid, mdep) <- zip depids (map getpkg depids) ] brokenPackageId (InstalledPackageId str) = PackageIdentifier (PackageName (str ++ "-broken")) (Version [] []) ------------------------------------------------------------------------ -- Reading the source package index -- -- | Read a repository index from disk, from the local files specified by -- a list of 'Repo's. -- -- All the 'SourcePackage's are marked as having come from the appropriate -- 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. -- getSourcePackages :: Verbosity -> [Repo] -> IO SourcePackageDb getSourcePackages verbosity repos = getSourcePackages' verbosity repos ReadPackageIndexLazyIO -- | Like 'getSourcePackages', but reads the package index strictly. Useful if -- you want to write to the package index after having read it. getSourcePackagesStrict :: Verbosity -> [Repo] -> IO SourcePackageDb getSourcePackagesStrict verbosity repos = getSourcePackages' verbosity repos ReadPackageIndexStrict -- | Common implementation used by getSourcePackages and -- getSourcePackagesStrict. getSourcePackages' :: Verbosity -> [Repo] -> ReadPackageIndexMode -> IO SourcePackageDb getSourcePackages' verbosity [] _mode = do warn verbosity $ "No remote package servers have been specified. Usually " ++ "you would have one specified in the config file." return SourcePackageDb { packageIndex = mempty, packagePreferences = mempty } getSourcePackages' verbosity repos mode = do info verbosity "Reading available packages..." pkgss <- mapM (\r -> readRepoIndex verbosity r mode) repos let (pkgs, prefs) = mconcat pkgss prefs' = Map.fromListWith intersectVersionRanges [ (name, range) | Dependency name range <- prefs ] _ <- evaluate pkgs _ <- evaluate prefs' return SourcePackageDb { packageIndex = pkgs, packagePreferences = prefs' } -- | Read a repository index from disk, from the local file specified by -- the 'Repo'. -- -- All the 'SourcePackage's are marked as having come from the given 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. -- readRepoIndex :: Verbosity -> Repo -> ReadPackageIndexMode -> IO (PackageIndex SourcePackage, [Dependency]) readRepoIndex verbosity repo mode = let indexFile = repoLocalDir repo "00-index.tar" cacheFile = repoLocalDir repo "00-index.cache" in handleNotFound $ do warnIfIndexIsOld =<< getIndexFileAge repo whenCacheOutOfDate indexFile cacheFile $ do updatePackageIndexCacheFile verbosity indexFile cacheFile readPackageIndexCacheFile mkAvailablePackage indexFile cacheFile mode where mkAvailablePackage pkgEntry = SourcePackage { packageInfoId = pkgid, packageDescription = packageDesc pkgEntry, packageSource = case pkgEntry of NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, packageDescrOverride = case pkgEntry of NormalPackage _ _ pkgtxt _ -> Just pkgtxt _ -> Nothing } where pkgid = packageId pkgEntry handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e then do case repoKind repo of Left remoteRepo -> warn verbosity $ "The package list for '" ++ remoteRepoName remoteRepo ++ "' does not exist. Run 'cabal update' to download it." Right _localRepo -> warn verbosity $ "The package list for the local repo '" ++ repoLocalDir repo ++ "' is missing. The repo is invalid." return mempty else ioError e isOldThreshold = 15 --days warnIfIndexIsOld dt = do when (dt >= isOldThreshold) $ case repoKind repo of Left remoteRepo -> warn verbosity $ "The package list for '" ++ remoteRepoName remoteRepo ++ "' is " ++ showFFloat (Just 1) dt " days old.\nRun " ++ "'cabal update' to get the latest list of available packages." Right _localRepo -> return () -- | Return the age of the index file in days (as a Double). getIndexFileAge :: Repo -> IO Double getIndexFileAge repo = getFileAge $ repoLocalDir repo "00-index.tar" -- | It is not necessary to call this, as the cache will be updated when the -- index is read normally. However you can do the work earlier if you like. -- updateRepoIndexCache :: Verbosity -> Repo -> IO () updateRepoIndexCache verbosity repo = whenCacheOutOfDate indexFile cacheFile $ do updatePackageIndexCacheFile verbosity indexFile cacheFile where indexFile = repoLocalDir repo "00-index.tar" cacheFile = repoLocalDir repo "00-index.cache" whenCacheOutOfDate :: FilePath -> FilePath -> IO () -> IO () whenCacheOutOfDate origFile cacheFile action = do exists <- doesFileExist cacheFile if not exists then action else do origTime <- getModTime origFile cacheTime <- getModTime cacheFile when (origTime > cacheTime) action ------------------------------------------------------------------------ -- Reading the index file -- -- | An index entry is either a normal package, or a local build tree reference. data PackageEntry = NormalPackage PackageId GenericPackageDescription ByteString BlockNo | BuildTreeRef BuildTreeRefType PackageId GenericPackageDescription FilePath BlockNo -- | A build tree reference is either a link or a snapshot. data BuildTreeRefType = SnapshotRef | LinkRef deriving Eq refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType refTypeFromTypeCode t | t == Tar.buildTreeRefTypeCode = LinkRef | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef | otherwise = error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode type MkPackageEntry = IO PackageEntry instance Package PackageEntry where packageId (NormalPackage pkgid _ _ _) = pkgid packageId (BuildTreeRef _ pkgid _ _ _) = pkgid packageDesc :: PackageEntry -> GenericPackageDescription packageDesc (NormalPackage _ descr _ _) = descr packageDesc (BuildTreeRef _ _ descr _ _) = descr -- | Read a compressed \"00-index.tar.gz\" file into a 'PackageIndex'. -- -- This is supposed to be an \"all in one\" way to easily get at the info in -- the Hackage package index. -- -- It takes a function to map a 'GenericPackageDescription' into any more -- specific instance of 'Package' that you might want to use. In the simple -- case you can just use @\_ p -> p@ here. -- readPackageIndexFile :: Package pkg => (PackageEntry -> pkg) -> FilePath -> IO (PackageIndex pkg, [Dependency]) readPackageIndexFile mkPkg indexFile = do (mkPkgs, prefs) <- either fail return . parsePackageIndex . maybeDecompress =<< BS.readFile indexFile pkgEntries <- sequence mkPkgs pkgs <- evaluate $ PackageIndex.fromList (map mkPkg pkgEntries) return (pkgs, prefs) -- | Parse an uncompressed \"00-index.tar\" repository index file represented -- as a 'ByteString'. -- parsePackageIndex :: ByteString -> Either String ([MkPackageEntry], [Dependency]) parsePackageIndex = accum 0 [] [] . Tar.read where accum blockNo pkgs prefs es = case es of Tar.Fail err -> Left err Tar.Done -> Right (reverse pkgs, reverse prefs) Tar.Next e es' -> accum blockNo' pkgs' prefs' es' where (pkgs', prefs') = extract blockNo pkgs prefs e blockNo' = blockNo + Tar.entrySizeInBlocks e extract blockNo pkgs prefs entry = fromMaybe (pkgs, prefs) $ tryExtractPkg `mplus` tryExtractPrefs where tryExtractPkg = do mkPkgEntry <- extractPkg entry blockNo return (mkPkgEntry:pkgs, prefs) tryExtractPrefs = do prefs' <- extractPrefs entry return (pkgs, prefs'++prefs) extractPkg :: Tar.Entry -> BlockNo -> Maybe MkPackageEntry extractPkg entry blockNo = case Tar.entryContent entry of Tar.NormalFile content _ | takeExtension fileName == ".cabal" -> case splitDirectories (normalise fileName) of [pkgname,vers,_] -> case simpleParse vers of Just ver -> Just $ return (NormalPackage pkgid descr content blockNo) where pkgid = PackageIdentifier (PackageName pkgname) ver parsed = parsePackageDescription . fromUTF8 . BS.Char8.unpack $ content descr = case parsed of ParseOk _ d -> d _ -> error $ "Couldn't read cabal file " ++ show fileName _ -> Nothing _ -> Nothing Tar.OtherEntryType typeCode content _ | Tar.isBuildTreeRefTypeCode typeCode -> Just $ do let path = byteStringToFilePath content err = "Error reading package index." cabalFile <- tryFindAddSourcePackageDesc path err descr <- PackageDesc.Parse.readPackageDescription normal cabalFile return $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) descr path blockNo _ -> Nothing where fileName = Tar.entryPath entry extractPrefs :: Tar.Entry -> Maybe [Dependency] extractPrefs entry = case Tar.entryContent entry of Tar.NormalFile content _ | takeFileName (Tar.entryPath entry) == "preferred-versions" -> Just . parsePreferredVersions . BS.Char8.unpack $ content _ -> Nothing parsePreferredVersions :: String -> [Dependency] parsePreferredVersions = mapMaybe simpleParse . filter (not . isPrefixOf "--") . lines ------------------------------------------------------------------------ -- Reading and updating the index cache -- updatePackageIndexCacheFile :: Verbosity -> FilePath -> FilePath -> IO () updatePackageIndexCacheFile verbosity indexFile cacheFile = do info verbosity "Updating the index cache file..." (mkPkgs, prefs) <- either fail return . parsePackageIndex . maybeDecompress =<< BS.readFile indexFile pkgEntries <- sequence mkPkgs let cache = mkCache pkgEntries prefs writeFile cacheFile (showIndexCache cache) where mkCache pkgs prefs = [ CachePreference pref | pref <- prefs ] ++ [ CachePackageId pkgid blockNo | (NormalPackage pkgid _ _ blockNo) <- pkgs ] ++ [ CacheBuildTreeRef refType blockNo | (BuildTreeRef refType _ _ _ blockNo) <- pkgs] data ReadPackageIndexMode = ReadPackageIndexStrict | ReadPackageIndexLazyIO readPackageIndexCacheFile :: Package pkg => (PackageEntry -> pkg) -> FilePath -> FilePath -> ReadPackageIndexMode -> IO (PackageIndex pkg, [Dependency]) readPackageIndexCacheFile mkPkg indexFile cacheFile mode = do cache <- liftM readIndexCache (BSS.readFile cacheFile) myWithFile indexFile ReadMode $ \indexHnd -> packageIndexFromCache mkPkg indexHnd cache mode where myWithFile f m act = case mode of ReadPackageIndexStrict -> withFile f m act ReadPackageIndexLazyIO -> do indexHnd <- openFile f m act indexHnd packageIndexFromCache :: Package pkg => (PackageEntry -> pkg) -> Handle -> [IndexCacheEntry] -> ReadPackageIndexMode -> IO (PackageIndex pkg, [Dependency]) packageIndexFromCache mkPkg hnd entrs mode = accum mempty [] entrs where accum srcpkgs prefs [] = do -- Have to reverse entries, since in a tar file, later entries mask -- earlier ones, and PackageIndex.fromList does the same, but we -- accumulate the list of entries in reverse order, so need to reverse. pkgIndex <- evaluate $ PackageIndex.fromList (reverse srcpkgs) return (pkgIndex, prefs) accum srcpkgs prefs (CachePackageId pkgid blockno : entries) = do -- Given the cache entry, make a package index entry. -- The magic here is that we use lazy IO to read the .cabal file -- from the index tarball if it turns out that we need it. -- Most of the time we only need the package id. ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do pkgtxt <- getEntryContent blockno pkg <- readPackageDescription pkgtxt return (pkg, pkgtxt) let srcpkg = case mode of ReadPackageIndexLazyIO -> mkPkg (NormalPackage pkgid pkg pkgtxt blockno) ReadPackageIndexStrict -> pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg pkgtxt blockno) accum (srcpkg:srcpkgs) prefs entries accum srcpkgs prefs (CacheBuildTreeRef refType blockno : entries) = do -- We have to read the .cabal file eagerly here because we can't cache the -- package id for build tree references - the user might edit the .cabal -- file after the reference was added to the index. path <- liftM byteStringToFilePath . getEntryContent $ blockno pkg <- do let err = "Error reading package index from cache." file <- tryFindAddSourcePackageDesc path err PackageDesc.Parse.readPackageDescription normal file let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) accum (srcpkg:srcpkgs) prefs entries accum srcpkgs prefs (CachePreference pref : entries) = accum srcpkgs (pref:prefs) entries getEntryContent :: BlockNo -> IO ByteString getEntryContent blockno = do hSeek hnd AbsoluteSeek (fromIntegral (blockno * 512)) header <- BS.hGet hnd 512 size <- getEntrySize header BS.hGet hnd (fromIntegral size) getEntrySize :: ByteString -> IO Tar.FileSize getEntrySize header = case Tar.read header of Tar.Next e _ -> case Tar.entryContent e of Tar.NormalFile _ size -> return size Tar.OtherEntryType typecode _ size | Tar.isBuildTreeRefTypeCode typecode -> return size _ -> interror "unexpected tar entry type" _ -> interror "could not read tar file entry" readPackageDescription :: ByteString -> IO GenericPackageDescription readPackageDescription content = case parsePackageDescription . fromUTF8 . BS.Char8.unpack $ content of ParseOk _ d -> return d _ -> interror "failed to parse .cabal file" interror msg = die $ "internal error when reading package index: " ++ msg ++ "The package index or index cache is probably " ++ "corrupt. Running cabal update might fix it." ------------------------------------------------------------------------ -- Index cache data structure -- -- | Tar files are block structured with 512 byte blocks. Every header and file -- content starts on a block boundary. -- type BlockNo = Int data IndexCacheEntry = CachePackageId PackageId BlockNo | CacheBuildTreeRef BuildTreeRefType BlockNo | CachePreference Dependency deriving (Eq) packageKey, blocknoKey, buildTreeRefKey, preferredVersionKey :: String packageKey = "pkg:" blocknoKey = "b#" buildTreeRefKey = "build-tree-ref:" preferredVersionKey = "pref-ver:" readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry readIndexCacheEntry = \line -> case BSS.words line of [key, pkgnamestr, pkgverstr, sep, blocknostr] | key == BSS.pack packageKey && sep == BSS.pack blocknoKey -> case (parseName pkgnamestr, parseVer pkgverstr [], parseBlockNo blocknostr) of (Just pkgname, Just pkgver, Just blockno) -> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno) _ -> Nothing [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> case (parseRefType typecodestr, parseBlockNo blocknostr) of (Just refType, Just blockno) -> Just (CacheBuildTreeRef refType blockno) _ -> Nothing (key: remainder) | key == BSS.pack preferredVersionKey -> fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder))) _ -> Nothing where parseName str | BSS.all (\c -> isAlphaNum c || c == '-') str = Just (PackageName (BSS.unpack str)) | otherwise = Nothing parseVer str vs = case BSS.readInt str of Nothing -> Nothing Just (v, str') -> case BSS.uncons str' of Just ('.', str'') -> parseVer str'' (v:vs) Just _ -> Nothing Nothing -> Just (Version (reverse (v:vs)) []) parseBlockNo str = case BSS.readInt str of Just (blockno, remainder) | BSS.null remainder -> Just blockno _ -> Nothing parseRefType str = case BSS.uncons str of Just (typeCode, remainder) | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode -> Just (refTypeFromTypeCode typeCode) _ -> Nothing showIndexCacheEntry :: IndexCacheEntry -> String showIndexCacheEntry entry = unwords $ case entry of CachePackageId pkgid b -> [ packageKey , display (packageName pkgid) , display (packageVersion pkgid) , blocknoKey , show b ] CacheBuildTreeRef t b -> [ buildTreeRefKey , [typeCodeFromRefType t] , show b ] CachePreference dep -> [ preferredVersionKey , display dep ] readIndexCache :: BSS.ByteString -> [IndexCacheEntry] readIndexCache = mapMaybe readIndexCacheEntry . BSS.lines showIndexCache :: [IndexCacheEntry] -> String showIndexCache = unlines . map showIndexCacheEntry cabal-install-1.22.6.0/Distribution/Client/List.hs0000644000000000000000000005726012540225656020021 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.List -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2008-2011 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- -- Search for and print information about packages ----------------------------------------------------------------------------- module Distribution.Client.List ( list, info ) where import Distribution.Package ( PackageName(..), Package(..), packageName, packageVersion , Dependency(..), simplifyDependency ) import Distribution.ModuleName (ModuleName) import Distribution.License (License) import qualified Distribution.InstalledPackageInfo as Installed import qualified Distribution.PackageDescription as Source import Distribution.PackageDescription ( Flag(..), FlagName(..) ) import Distribution.PackageDescription.Configuration ( flattenPackageDescription ) import Distribution.Simple.Compiler ( Compiler, PackageDBStack ) import Distribution.Simple.Program (ProgramConfiguration) import Distribution.Simple.Utils ( equating, comparing, die, notice ) import Distribution.Simple.Setup (fromFlag) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Version ( Version(..), VersionRange, withinRange, anyVersion , intersectVersionRanges, simplifyVersionRange ) import Distribution.Verbosity (Verbosity) import Distribution.Text ( Text(disp), display ) import Distribution.Client.Types ( SourcePackage(..), Repo, SourcePackageDb(..) ) import Distribution.Client.Dependency.Types ( PackageConstraint(..), ExtDependency(..) ) import Distribution.Client.Targets ( UserTarget, resolveUserTargets, PackageSpecifier(..) ) import Distribution.Client.Setup ( GlobalFlags(..), ListFlags(..), InfoFlags(..) ) import Distribution.Client.Utils ( mergeBy, MergeResult(..) ) import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.FetchUtils ( isFetched ) import Data.List ( sortBy, groupBy, sort, nub, intersperse, maximumBy, partition ) import Data.Maybe ( listToMaybe, fromJust, fromMaybe, isJust ) import qualified Data.Map as Map import Data.Tree as Tree import Control.Monad ( MonadPlus(mplus), join ) import Control.Exception ( assert ) import Text.PrettyPrint as Disp import System.Directory ( doesDirectoryExist ) -- | Return a list of packages matching given search strings. getPkgList :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> ProgramConfiguration -> ListFlags -> [String] -> IO [PackageDisplayInfo] getPkgList verbosity packageDBs repos comp conf listFlags pats = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos let sourcePkgIndex = packageIndex sourcePkgDb prefs name = fromMaybe anyVersion (Map.lookup name (packagePreferences sourcePkgDb)) pkgsInfo :: [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] pkgsInfo -- gather info for all packages | null pats = mergePackages (InstalledPackageIndex.allPackages installedPkgIndex) ( PackageIndex.allPackages sourcePkgIndex) -- gather info for packages matching search term | otherwise = pkgsInfoMatching pkgsInfoMatching :: [(PackageName, [Installed.InstalledPackageInfo], [SourcePackage])] pkgsInfoMatching = let matchingInstalled = matchingPackages InstalledPackageIndex.searchByNameSubstring installedPkgIndex matchingSource = matchingPackages (\ idx n -> concatMap snd (PackageIndex.searchByNameSubstring idx n)) sourcePkgIndex in mergePackages matchingInstalled matchingSource matches :: [PackageDisplayInfo] matches = [ mergePackageInfo pref installedPkgs sourcePkgs selectedPkg False | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo , not onlyInstalled || not (null installedPkgs) , let pref = prefs pkgname selectedPkg = latestWithPref pref sourcePkgs ] return matches where onlyInstalled = fromFlag (listInstalled listFlags) matchingPackages search index = [ pkg | pat <- pats , pkg <- search index pat ] -- | Show information about packages. list :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> ProgramConfiguration -> ListFlags -> [String] -> IO () list verbosity packageDBs repos comp conf listFlags pats = do matches <- getPkgList verbosity packageDBs repos comp conf listFlags pats if simpleOutput then putStr $ unlines [ display (pkgName pkg) ++ " " ++ display version | pkg <- matches , version <- if onlyInstalled then installedVersions pkg else nub . sort $ installedVersions pkg ++ sourceVersions pkg ] -- Note: this only works because for 'list', one cannot currently -- specify any version constraints, so listing all installed -- and source ones works. else if null matches then notice verbosity "No matches found." else putStr $ unlines (map showPackageSummaryInfo matches) where onlyInstalled = fromFlag (listInstalled listFlags) simpleOutput = fromFlag (listSimpleOutput listFlags) info :: Verbosity -> PackageDBStack -> [Repo] -> Compiler -> ProgramConfiguration -> GlobalFlags -> InfoFlags -> [UserTarget] -> IO () info verbosity _ _ _ _ _ _ [] = notice verbosity "No packages requested. Nothing to do." info verbosity packageDBs repos comp conf globalFlags _listFlags userTargets = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf sourcePkgDb <- getSourcePackages verbosity repos let sourcePkgIndex = packageIndex sourcePkgDb prefs name = fromMaybe anyVersion (Map.lookup name (packagePreferences sourcePkgDb)) -- Users may specify names of packages that are only installed, not -- just available source packages, so we must resolve targets using -- the combination of installed and source packages. let sourcePkgs' = PackageIndex.fromList $ map packageId (InstalledPackageIndex.allPackages installedPkgIndex) ++ map packageId (PackageIndex.allPackages sourcePkgIndex) pkgSpecifiers <- resolveUserTargets verbosity (fromFlag $ globalWorldFile globalFlags) sourcePkgs' userTargets pkgsinfo <- sequence [ do pkginfo <- either die return $ gatherPkgInfo prefs installedPkgIndex sourcePkgIndex pkgSpecifier updateFileSystemPackageDetails pkginfo | pkgSpecifier <- pkgSpecifiers ] putStr $ unlines (map showPackageDetailedInfo pkgsinfo) where gatherPkgInfo :: (PackageName -> VersionRange) -> InstalledPackageIndex -> PackageIndex.PackageIndex SourcePackage -> PackageSpecifier SourcePackage -> Either String PackageDisplayInfo gatherPkgInfo prefs installedPkgIndex sourcePkgIndex (NamedPackage name constraints) | null (selectedInstalledPkgs) && null (selectedSourcePkgs) = Left $ "There is no available version of " ++ display name ++ " that satisfies " ++ display (simplifyVersionRange verConstraint) | otherwise = Right $ mergePackageInfo pref installedPkgs sourcePkgs selectedSourcePkg' showPkgVersion where (pref, installedPkgs, sourcePkgs) = sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex selectedInstalledPkgs = InstalledPackageIndex.lookupDependency installedPkgIndex (Dependency name verConstraint) selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex (Dependency name verConstraint) selectedSourcePkg' = latestWithPref pref selectedSourcePkgs -- display a specific package version if the user -- supplied a non-trivial version constraint showPkgVersion = not (null verConstraints) verConstraint = foldr intersectVersionRanges anyVersion verConstraints verConstraints = [ vr | PackageConstraintVersion _ vr <- constraints ] gatherPkgInfo prefs installedPkgIndex sourcePkgIndex (SpecificSourcePackage pkg) = Right $ mergePackageInfo pref installedPkgs sourcePkgs selectedPkg True where name = packageName pkg selectedPkg = Just pkg (pref, installedPkgs, sourcePkgs) = sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex sourcePkgsInfo :: (PackageName -> VersionRange) -> PackageName -> InstalledPackageIndex -> PackageIndex.PackageIndex SourcePackage -> (VersionRange, [Installed.InstalledPackageInfo], [SourcePackage]) sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = (pref, installedPkgs, sourcePkgs) where pref = prefs name installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName installedPkgIndex name) sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name -- | The info that we can display for each package. It is information per -- package name and covers all installed and available versions. -- data PackageDisplayInfo = PackageDisplayInfo { pkgName :: PackageName, selectedVersion :: Maybe Version, selectedSourcePkg :: Maybe SourcePackage, installedVersions :: [Version], sourceVersions :: [Version], preferredVersions :: VersionRange, homepage :: String, bugReports :: String, sourceRepo :: String, synopsis :: String, description :: String, category :: String, license :: License, author :: String, maintainer :: String, dependencies :: [ExtDependency], flags :: [Flag], hasLib :: Bool, hasExe :: Bool, executables :: [String], modules :: [ModuleName], haddockHtml :: FilePath, haveTarball :: Bool } showPackageSummaryInfo :: PackageDisplayInfo -> String showPackageSummaryInfo pkginfo = renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ char '*' <+> disp (pkgName pkginfo) $+$ (nest 4 $ vcat [ maybeShow (synopsis pkginfo) "Synopsis:" reflowParagraphs , text "Default available version:" <+> case selectedSourcePkg pkginfo of Nothing -> text "[ Not available from any configured repository ]" Just pkg -> disp (packageVersion pkg) , text "Installed versions:" <+> case installedVersions pkginfo of [] | hasLib pkginfo -> text "[ Not installed ]" | otherwise -> text "[ Unknown ]" versions -> dispTopVersions 4 (preferredVersions pkginfo) versions , maybeShow (homepage pkginfo) "Homepage:" text , text "License: " <+> text (display (license pkginfo)) ]) $+$ text "" where maybeShow [] _ _ = empty maybeShow l s f = text s <+> (f l) showPackageDetailedInfo :: PackageDisplayInfo -> String showPackageDetailedInfo pkginfo = renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ char '*' <+> disp (pkgName pkginfo) <> maybe empty (\v -> char '-' <> disp v) (selectedVersion pkginfo) <+> text (replicate (16 - length (display (pkgName pkginfo))) ' ') <> parens pkgkind $+$ (nest 4 $ vcat [ entry "Synopsis" synopsis hideIfNull reflowParagraphs , entry "Versions available" sourceVersions (altText null "[ Not available from server ]") (dispTopVersions 9 (preferredVersions pkginfo)) , entry "Versions installed" installedVersions (altText null (if hasLib pkginfo then "[ Not installed ]" else "[ Unknown ]")) (dispTopVersions 4 (preferredVersions pkginfo)) , entry "Homepage" homepage orNotSpecified text , entry "Bug reports" bugReports orNotSpecified text , entry "Description" description hideIfNull reflowParagraphs , entry "Category" category hideIfNull text , entry "License" license alwaysShow disp , entry "Author" author hideIfNull reflowLines , entry "Maintainer" maintainer hideIfNull reflowLines , entry "Source repo" sourceRepo orNotSpecified text , entry "Executables" executables hideIfNull (commaSep text) , entry "Flags" flags hideIfNull (commaSep dispFlag) , entry "Dependencies" dependencies hideIfNull (commaSep disp) , entry "Documentation" haddockHtml showIfInstalled text , entry "Cached" haveTarball alwaysShow dispYesNo , if not (hasLib pkginfo) then empty else text "Modules:" $+$ nest 4 (vcat (map disp . sort . modules $ pkginfo)) ]) $+$ text "" where entry fname field cond format = case cond (field pkginfo) of Nothing -> label <+> format (field pkginfo) Just Nothing -> empty Just (Just other) -> label <+> text other where label = text fname <> char ':' <> padding padding = text (replicate (13 - length fname ) ' ') normal = Nothing hide = Just Nothing replace msg = Just (Just msg) alwaysShow = const normal hideIfNull v = if null v then hide else normal showIfInstalled v | not isInstalled = hide | null v = replace "[ Not installed ]" | otherwise = normal altText nul msg v = if nul v then replace msg else normal orNotSpecified = altText null "[ Not specified ]" commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f dispFlag f = case flagName f of FlagName n -> text n dispYesNo True = text "Yes" dispYesNo False = text "No" isInstalled = not (null (installedVersions pkginfo)) hasExes = length (executables pkginfo) >= 2 --TODO: exclude non-buildable exes pkgkind | hasLib pkginfo && hasExes = text "programs and library" | hasLib pkginfo && hasExe pkginfo = text "program and library" | hasLib pkginfo = text "library" | hasExes = text "programs" | hasExe pkginfo = text "program" | otherwise = empty reflowParagraphs :: String -> Doc reflowParagraphs = vcat . intersperse (text "") -- re-insert blank lines . map (fsep . map text . concatMap words) -- reflow paragraphs . filter (/= [""]) . groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines . lines reflowLines :: String -> Doc reflowLines = vcat . map text . lines -- | We get the 'PackageDisplayInfo' by combining the info for the installed -- and available versions of a package. -- -- * We're building info about a various versions of a single named package so -- the input package info records are all supposed to refer to the same -- package name. -- mergePackageInfo :: VersionRange -> [Installed.InstalledPackageInfo] -> [SourcePackage] -> Maybe SourcePackage -> Bool -> PackageDisplayInfo mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = assert (length installedPkgs + length sourcePkgs > 0) $ PackageDisplayInfo { pkgName = combine packageName source packageName installed, selectedVersion = if showVer then fmap packageVersion selectedPkg else Nothing, selectedSourcePkg = sourceSelected, installedVersions = map packageVersion installedPkgs, sourceVersions = map packageVersion sourcePkgs, preferredVersions = versionPref, license = combine Source.license source Installed.license installed, maintainer = combine Source.maintainer source Installed.maintainer installed, author = combine Source.author source Installed.author installed, homepage = combine Source.homepage source Installed.homepage installed, bugReports = maybe "" Source.bugReports source, sourceRepo = fromMaybe "" . join . fmap (uncons Nothing Source.repoLocation . sortBy (comparing Source.repoKind) . Source.sourceRepos) $ source, --TODO: installed package info is missing synopsis synopsis = maybe "" Source.synopsis source, description = combine Source.description source Installed.description installed, category = combine Source.category source Installed.category installed, flags = maybe [] Source.genPackageFlags sourceGeneric, hasLib = isJust installed || fromMaybe False (fmap (isJust . Source.condLibrary) sourceGeneric), hasExe = fromMaybe False (fmap (not . null . Source.condExecutables) sourceGeneric), executables = map fst (maybe [] Source.condExecutables sourceGeneric), modules = combine (map Installed.exposedName . Installed.exposedModules) installed (maybe [] getListOfExposedModules . Source.library) source, dependencies = combine (map (SourceDependency . simplifyDependency) . Source.buildDepends) source (map InstalledDependency . Installed.depends) installed, haddockHtml = fromMaybe "" . join . fmap (listToMaybe . Installed.haddockHTMLs) $ installed, haveTarball = False } where combine f x g y = fromJust (fmap f x `mplus` fmap g y) installed :: Maybe Installed.InstalledPackageInfo installed = latestWithPref versionPref installedPkgs getListOfExposedModules lib = Source.exposedModules lib ++ map Source.moduleReexportName (Source.reexportedModules lib) sourceSelected | isJust selectedPkg = selectedPkg | otherwise = latestWithPref versionPref sourcePkgs sourceGeneric = fmap packageDescription sourceSelected source = fmap flattenPackageDescription sourceGeneric uncons :: b -> (a -> b) -> [a] -> b uncons z _ [] = z uncons _ f (x:_) = f x -- | Not all the info is pure. We have to check if the docs really are -- installed, because the registered package info lies. Similarly we have to -- check if the tarball has indeed been fetched. -- updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo updateFileSystemPackageDetails pkginfo = do fetched <- maybe (return False) (isFetched . packageSource) (selectedSourcePkg pkginfo) docsExist <- doesDirectoryExist (haddockHtml pkginfo) return pkginfo { haveTarball = fetched, haddockHtml = if docsExist then haddockHtml pkginfo else "" } latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg latestWithPref _ [] = Nothing latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) where prefThenVersion pkg = let ver = packageVersion pkg in (withinRange ver pref, ver) -- | Rearrange installed and source packages into groups referring to the -- same package by name. In the result pairs, the lists are guaranteed to not -- both be empty. -- mergePackages :: [Installed.InstalledPackageInfo] -> [SourcePackage] -> [( PackageName , [Installed.InstalledPackageInfo] , [SourcePackage] )] mergePackages installedPkgs sourcePkgs = map collect $ mergeBy (\i a -> fst i `compare` fst a) (groupOn packageName installedPkgs) (groupOn packageName sourcePkgs) where collect (OnlyInLeft (name,is) ) = (name, is, []) collect ( InBoth (_,is) (name,as)) = (name, is, as) collect (OnlyInRight (name,as)) = (name, [], as) groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] groupOn key = map (\xs -> (key (head xs), xs)) . groupBy (equating key) . sortBy (comparing key) dispTopVersions :: Int -> VersionRange -> [Version] -> Doc dispTopVersions n pref vs = (Disp.fsep . Disp.punctuate (Disp.char ',') . map (\ver -> if ispref ver then disp ver else parens (disp ver)) . sort . take n . interestingVersions ispref $ vs) <+> trailingMessage where ispref ver = withinRange ver pref extra = length vs - n trailingMessage | extra <= 0 = Disp.empty | otherwise = Disp.parens $ Disp.text "and" <+> Disp.int (length vs - n) <+> if extra == 1 then Disp.text "other" else Disp.text "others" -- | Reorder a bunch of versions to put the most interesting / significant -- versions first. A preferred version range is taken into account. -- -- This may be used in a user interface to select a small number of versions -- to present to the user, e.g. -- -- > let selectVersions = sort . take 5 . interestingVersions pref -- interestingVersions :: (Version -> Bool) -> [Version] -> [Version] interestingVersions pref = map ((\ns -> Version ns []) . fst) . filter snd . concat . Tree.levels . swizzleTree . reorderTree (\(Node (v,_) _) -> pref (Version v [])) . reverseTree . mkTree . map versionBranch where swizzleTree = unfoldTree (spine []) where spine ts' (Node x []) = (x, ts') spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t reorderTree _ (Node x []) = Node x [] reorderTree p (Node x ts) = Node x (ts' ++ ts'') where (ts',ts'') = partition p (map (reorderTree p) ts) reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) mkTree xs = unfoldTree step (False, [], xs) where step (node,ns,vs) = ( (reverse ns, node) , [ (any null vs', n:ns, filter (not . null) vs') | (n, vs') <- groups vs ] ) groups = map (\g -> (head (head g), map tail g)) . groupBy (equating head) cabal-install-1.22.6.0/Distribution/Client/Sandbox/0000755000000000000000000000000012540225656020136 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Sandbox/Index.hs0000644000000000000000000002431612540225656021547 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox.Index -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Querying and modifying local build tree references in the package index. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox.Index ( createEmpty, addBuildTreeRefs, removeBuildTreeRefs, ListIgnoredBuildTreeRefs(..), RefTypesToList(..), listBuildTreeRefs, validateIndexPath, defaultIndexFileName ) where import qualified Distribution.Client.Tar as Tar import Distribution.Client.IndexUtils ( BuildTreeRefType(..) , refTypeFromTypeCode , typeCodeFromRefType , updatePackageIndexCacheFile , getSourcePackagesStrict ) import Distribution.Client.PackageIndex ( allPackages ) import Distribution.Client.Types ( Repo(..), LocalRepo(..) , SourcePackageDb(..) , SourcePackage(..), PackageLocation(..) ) import Distribution.Client.Utils ( byteStringToFilePath, filePathToByteString , makeAbsoluteToCwd, tryCanonicalizePath , canonicalizePathNoThrow , tryFindAddSourcePackageDesc ) import Distribution.Simple.Utils ( die, debug ) import Distribution.Verbosity ( Verbosity ) import qualified Data.ByteString.Lazy as BS import Control.Exception ( evaluate ) import Control.Monad ( liftM, unless ) import Data.List ( (\\), intersect, nub ) import Data.Maybe ( catMaybes ) import System.Directory ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist, renameFile ) import System.FilePath ( (), (<.>), takeDirectory, takeExtension , replaceExtension ) import System.IO ( IOMode(..), SeekMode(..) , hSeek, withBinaryFile ) -- | A reference to a local build tree. data BuildTreeRef = BuildTreeRef { buildTreeRefType :: !BuildTreeRefType, buildTreePath :: !FilePath } defaultIndexFileName :: FilePath defaultIndexFileName = "00-index.tar" -- | Given a path, ensure that it refers to a local build tree. buildTreeRefFromPath :: BuildTreeRefType -> FilePath -> IO (Maybe BuildTreeRef) buildTreeRefFromPath refType dir = do dirExists <- doesDirectoryExist dir unless dirExists $ die $ "directory '" ++ dir ++ "' does not exist" _ <- tryFindAddSourcePackageDesc dir "Error adding source reference." return . Just $ BuildTreeRef refType dir -- | Given a tar archive entry, try to parse it as a local build tree reference. readBuildTreeRef :: Tar.Entry -> Maybe BuildTreeRef readBuildTreeRef entry = case Tar.entryContent entry of (Tar.OtherEntryType typeCode bs size) | (Tar.isBuildTreeRefTypeCode typeCode) && (size == BS.length bs) -> Just $! BuildTreeRef (refTypeFromTypeCode typeCode) (byteStringToFilePath bs) | otherwise -> Nothing _ -> Nothing -- | Given a sequence of tar archive entries, extract all references to local -- build trees. readBuildTreeRefs :: Tar.Entries -> [BuildTreeRef] readBuildTreeRefs = catMaybes . Tar.foldrEntries (\e r -> readBuildTreeRef e : r) [] error -- | Given a path to a tar archive, extract all references to local build trees. readBuildTreeRefsFromFile :: FilePath -> IO [BuildTreeRef] readBuildTreeRefsFromFile = liftM (readBuildTreeRefs . Tar.read) . BS.readFile -- | Given a local build tree ref, serialise it to a tar archive entry. writeBuildTreeRef :: BuildTreeRef -> Tar.Entry writeBuildTreeRef (BuildTreeRef refType path) = Tar.simpleEntry tarPath content where bs = filePathToByteString path -- Provide a filename for tools that treat custom entries as ordinary files. tarPath' = "local-build-tree-reference" -- fromRight can't fail because the path is shorter than 255 characters. tarPath = fromRight $ Tar.toTarPath True tarPath' content = Tar.OtherEntryType (typeCodeFromRefType refType) bs (BS.length bs) -- TODO: Move this to D.C.Utils? fromRight (Left err) = error err fromRight (Right a) = a -- | Check that the provided path is either an existing directory, or a tar -- archive in an existing directory. validateIndexPath :: FilePath -> IO FilePath validateIndexPath path' = do path <- makeAbsoluteToCwd path' if (== ".tar") . takeExtension $ path then return path else do dirExists <- doesDirectoryExist path unless dirExists $ die $ "directory does not exist: '" ++ path ++ "'" return $ path defaultIndexFileName -- | Create an empty index file. createEmpty :: Verbosity -> FilePath -> IO () createEmpty verbosity path = do indexExists <- doesFileExist path if indexExists then debug verbosity $ "Package index already exists: " ++ path else do debug verbosity $ "Creating the index file '" ++ path ++ "'" createDirectoryIfMissing True (takeDirectory path) -- Equivalent to 'tar cvf empty.tar --files-from /dev/null'. let zeros = BS.replicate (512*20) 0 BS.writeFile path zeros -- | Add given local build tree references to the index. addBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> BuildTreeRefType -> IO () addBuildTreeRefs _ _ [] _ = error "Distribution.Client.Sandbox.Index.addBuildTreeRefs: unexpected" addBuildTreeRefs verbosity path l' refType = do checkIndexExists path l <- liftM nub . mapM tryCanonicalizePath $ l' treesInIndex <- fmap (map buildTreePath) (readBuildTreeRefsFromFile path) -- Add only those paths that aren't already in the index. treesToAdd <- mapM (buildTreeRefFromPath refType) (l \\ treesInIndex) let entries = map writeBuildTreeRef (catMaybes treesToAdd) unless (null entries) $ do offset <- fmap (Tar.foldrEntries (\e acc -> Tar.entrySizeInBytes e + acc) 0 error . Tar.read) $ BS.readFile path _ <- evaluate offset debug verbosity $ "Writing at offset: " ++ show offset withBinaryFile path ReadWriteMode $ \h -> do hSeek h AbsoluteSeek (fromIntegral offset) BS.hPut h (Tar.write entries) debug verbosity $ "Successfully appended to '" ++ path ++ "'" updatePackageIndexCacheFile verbosity path (path `replaceExtension` "cache") -- | Remove given local build tree references from the index. removeBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> IO [FilePath] removeBuildTreeRefs _ _ [] = error "Distribution.Client.Sandbox.Index.removeBuildTreeRefs: unexpected" removeBuildTreeRefs verbosity path l' = do checkIndexExists path l <- mapM canonicalizePathNoThrow l' let tmpFile = path <.> "tmp" -- Performance note: on my system, it takes 'index --remove-source' -- approx. 3,5s to filter a 65M file. Real-life indices are expected to be -- much smaller. BS.writeFile tmpFile . Tar.writeEntries . Tar.filterEntries (p l) . Tar.read =<< BS.readFile path renameFile tmpFile path debug verbosity $ "Successfully renamed '" ++ tmpFile ++ "' to '" ++ path ++ "'" updatePackageIndexCacheFile verbosity path (path `replaceExtension` "cache") -- FIXME: return only the refs that vere actually removed. return l where p l entry = case readBuildTreeRef entry of Nothing -> True -- FIXME: removing snapshot deps is done with `delete-source -- .cabal-sandbox/snapshots/$SNAPSHOT_NAME`. Perhaps we also want to -- support removing snapshots by providing the original path. (Just (BuildTreeRef _ pth)) -> pth `notElem` l -- | A build tree ref can become ignored if the user later adds a build tree ref -- with the same package ID. We display ignored build tree refs when the user -- runs 'cabal sandbox list-sources', but do not look at their timestamps in -- 'reinstallAddSourceDeps'. data ListIgnoredBuildTreeRefs = ListIgnored | DontListIgnored -- | Which types of build tree refs should be listed? data RefTypesToList = OnlySnapshots | OnlyLinks | LinksAndSnapshots -- | List the local build trees that are referred to from the index. listBuildTreeRefs :: Verbosity -> ListIgnoredBuildTreeRefs -> RefTypesToList -> FilePath -> IO [FilePath] listBuildTreeRefs verbosity listIgnored refTypesToList path = do checkIndexExists path buildTreeRefs <- case listIgnored of DontListIgnored -> do paths <- listWithoutIgnored case refTypesToList of LinksAndSnapshots -> return paths _ -> do allPathsFiltered <- fmap (map buildTreePath . filter predicate) listWithIgnored _ <- evaluate (length allPathsFiltered) return (paths `intersect` allPathsFiltered) ListIgnored -> fmap (map buildTreePath . filter predicate) listWithIgnored _ <- evaluate (length buildTreeRefs) return buildTreeRefs where predicate :: BuildTreeRef -> Bool predicate = case refTypesToList of OnlySnapshots -> (==) SnapshotRef . buildTreeRefType OnlyLinks -> (==) LinkRef . buildTreeRefType LinksAndSnapshots -> const True listWithIgnored :: IO [BuildTreeRef] listWithIgnored = readBuildTreeRefsFromFile $ path listWithoutIgnored :: IO [FilePath] listWithoutIgnored = do let repo = Repo { repoKind = Right LocalRepo , repoLocalDir = takeDirectory path } pkgIndex <- fmap packageIndex . getSourcePackagesStrict verbosity $ [repo] return [ pkgPath | (LocalUnpackedPackage pkgPath) <- map packageSource . allPackages $ pkgIndex ] -- | Check that the package index file exists and exit with error if it does not. checkIndexExists :: FilePath -> IO () checkIndexExists path = do indexExists <- doesFileExist path unless indexExists $ die $ "index does not exist: '" ++ path ++ "'" cabal-install-1.22.6.0/Distribution/Client/Sandbox/Timestamp.hs0000644000000000000000000003265612540225656022451 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox.Timestamp -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Timestamp file handling (for add-source dependencies). ----------------------------------------------------------------------------- module Distribution.Client.Sandbox.Timestamp ( AddSourceTimestamp, withAddTimestamps, withRemoveTimestamps, withUpdateTimestamps, maybeAddCompilerTimestampRecord, listModifiedDeps, ) where import Control.Exception (IOException) import Control.Monad (filterM, forM, when) import Data.Char (isSpace) import Data.List (partition) import System.Directory (renameFile) import System.FilePath ((<.>), ()) import qualified Data.Map as M import Distribution.Compiler (CompilerId) import Distribution.Package (packageName) import Distribution.PackageDescription.Configuration (flattenPackageDescription) import Distribution.PackageDescription.Parse (readPackageDescription) import Distribution.Simple.Setup (Flag (..), SDistFlags (..), defaultSDistFlags, sdistCommand) import Distribution.Simple.Utils (debug, die, warn) import Distribution.System (Platform) import Distribution.Text (display) import Distribution.Verbosity (Verbosity, lessVerbose, normal) import Distribution.Version (Version (..), orLaterVersion) import Distribution.Client.Sandbox.Index (ListIgnoredBuildTreeRefs (ListIgnored), RefTypesToList(OnlyLinks) ,listBuildTreeRefs) import Distribution.Client.SetupWrapper (SetupScriptOptions (..), defaultSetupScriptOptions, setupWrapper) import Distribution.Client.Utils (inDir, removeExistingFile, tryCanonicalizePath, tryFindAddSourcePackageDesc) import Distribution.Compat.Exception (catchIO) import Distribution.Client.Compat.Time (EpochTime, getCurTime, getModTime) -- | Timestamp of an add-source dependency. type AddSourceTimestamp = (FilePath, EpochTime) -- | Timestamp file record - a string identifying the compiler & platform plus a -- list of add-source timestamps. type TimestampFileRecord = (String, [AddSourceTimestamp]) timestampRecordKey :: CompilerId -> Platform -> String timestampRecordKey compId platform = display platform ++ "-" ++ display compId -- | The 'add-source-timestamps' file keeps the timestamps of all add-source -- dependencies. It is initially populated by 'sandbox add-source' and kept -- current by 'reinstallAddSourceDeps' and 'configure -w'. The user can install -- add-source deps manually with 'cabal install' after having edited them, so we -- can err on the side of caution sometimes. -- FIXME: We should keep this info in the index file, together with build tree -- refs. timestampFileName :: FilePath timestampFileName = "add-source-timestamps" -- | Read the timestamp file. Exits with error if the timestamp file is -- corrupted. Returns an empty list if the file doesn't exist. readTimestampFile :: FilePath -> IO [TimestampFileRecord] readTimestampFile timestampFile = do timestampString <- readFile timestampFile `catchIO` \_ -> return "[]" case reads timestampString of [(timestamps, s)] | all isSpace s -> return timestamps _ -> die $ "The timestamps file is corrupted. " ++ "Please delete & recreate the sandbox." -- | Write the timestamp file, atomically. writeTimestampFile :: FilePath -> [TimestampFileRecord] -> IO () writeTimestampFile timestampFile timestamps = do writeFile timestampTmpFile (show timestamps) renameFile timestampTmpFile timestampFile where timestampTmpFile = timestampFile <.> "tmp" -- | Read, process and write the timestamp file in one go. withTimestampFile :: FilePath -> ([TimestampFileRecord] -> IO [TimestampFileRecord]) -> IO () withTimestampFile sandboxDir process = do let timestampFile = sandboxDir timestampFileName timestampRecords <- readTimestampFile timestampFile >>= process writeTimestampFile timestampFile timestampRecords -- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps -- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list -- for each path. If a timestamp for a given path already exists in the list, -- update it. addTimestamps :: EpochTime -> [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] addTimestamps initial timestamps newPaths = [ (p, initial) | p <- newPaths ] ++ oldTimestamps where (oldTimestamps, _toBeUpdated) = partition (\(path, _) -> path `notElem` newPaths) timestamps -- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps -- we've reinstalled and a new timestamp value, update the timestamp value for -- the deps in the list. If there are new paths in the list, ignore them. updateTimestamps :: [AddSourceTimestamp] -> [FilePath] -> EpochTime -> [AddSourceTimestamp] updateTimestamps timestamps pathsToUpdate newTimestamp = foldr updateTimestamp [] timestamps where updateTimestamp t@(path, _oldTimestamp) rest | path `elem` pathsToUpdate = (path, newTimestamp) : rest | otherwise = t : rest -- | Given a list of 'TimestampFileRecord's and a list of paths to add-source -- deps we've removed, remove those deps from the list. removeTimestamps :: [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] removeTimestamps l pathsToRemove = foldr removeTimestamp [] l where removeTimestamp t@(path, _oldTimestamp) rest = if path `elem` pathsToRemove then rest else t : rest -- | If a timestamp record for this compiler doesn't exist, add a new one. maybeAddCompilerTimestampRecord :: Verbosity -> FilePath -> FilePath -> CompilerId -> Platform -> IO () maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile compId platform = do let key = timestampRecordKey compId platform withTimestampFile sandboxDir $ \timestampRecords -> do case lookup key timestampRecords of Just _ -> return timestampRecords Nothing -> do buildTreeRefs <- listBuildTreeRefs verbosity ListIgnored OnlyLinks indexFile now <- getCurTime let timestamps = map (\p -> (p, now)) buildTreeRefs return $ (key, timestamps):timestampRecords -- | Given an IO action that returns a list of build tree refs, add those -- build tree refs to the timestamps file (for all compilers). withAddTimestamps :: FilePath -> IO [FilePath] -> IO () withAddTimestamps sandboxDir act = do let initialTimestamp = 0 withActionOnAllTimestamps (addTimestamps initialTimestamp) sandboxDir act -- | Given an IO action that returns a list of build tree refs, remove those -- build tree refs from the timestamps file (for all compilers). withRemoveTimestamps :: FilePath -> IO [FilePath] -> IO () withRemoveTimestamps = withActionOnAllTimestamps removeTimestamps -- | Given an IO action that returns a list of build tree refs, update the -- timestamps of the returned build tree refs to the current time (only for the -- given compiler & platform). withUpdateTimestamps :: FilePath -> CompilerId -> Platform ->([AddSourceTimestamp] -> IO [FilePath]) -> IO () withUpdateTimestamps = withActionOnCompilerTimestamps updateTimestamps -- | Helper for implementing 'withAddTimestamps' and -- 'withRemoveTimestamps'. Runs a given action on the list of -- 'AddSourceTimestamp's for all compilers, applies 'f' to the result and then -- updates the timestamp file. The IO action is run only once. withActionOnAllTimestamps :: ([AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp]) -> FilePath -> IO [FilePath] -> IO () withActionOnAllTimestamps f sandboxDir act = withTimestampFile sandboxDir $ \timestampRecords -> do paths <- act return [(key, f timestamps paths) | (key, timestamps) <- timestampRecords] -- | Helper for implementing 'withUpdateTimestamps'. Runs a given action on the -- list of 'AddSourceTimestamp's for this compiler, applies 'f' to the result -- and then updates the timestamp file record. The IO action is run only once. withActionOnCompilerTimestamps :: ([AddSourceTimestamp] -> [FilePath] -> EpochTime -> [AddSourceTimestamp]) -> FilePath -> CompilerId -> Platform -> ([AddSourceTimestamp] -> IO [FilePath]) -> IO () withActionOnCompilerTimestamps f sandboxDir compId platform act = do let needle = timestampRecordKey compId platform withTimestampFile sandboxDir $ \timestampRecords -> do timestampRecords' <- forM timestampRecords $ \r@(key, timestamps) -> if key == needle then do paths <- act timestamps now <- getCurTime return (key, f timestamps paths now) else return r return timestampRecords' -- | List all source files of a given add-source dependency. Exits with error if -- something is wrong (e.g. there is no .cabal file in the given directory). -- FIXME: This function is not thread-safe because of 'inDir'. allPackageSourceFiles :: Verbosity -> FilePath -> IO [FilePath] allPackageSourceFiles verbosity packageDir = inDir (Just packageDir) $ do pkg <- do let err = "Error reading source files of add-source dependency." desc <- tryFindAddSourcePackageDesc packageDir err flattenPackageDescription `fmap` readPackageDescription verbosity desc let file = "cabal-sdist-list-sources" flags = defaultSDistFlags { sDistVerbosity = Flag $ if verbosity == normal then lessVerbose verbosity else verbosity, sDistListSources = Flag file } setupOpts = defaultSetupScriptOptions { -- 'sdist --list-sources' was introduced in Cabal 1.18. useCabalVersion = orLaterVersion $ Version [1,18,0] [] } doListSources :: IO [FilePath] doListSources = do setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) [] srcs <- fmap lines . readFile $ file mapM tryCanonicalizePath srcs onFailedListSources :: IOException -> IO () onFailedListSources e = do warn verbosity $ "Could not list sources of the add-source dependency '" ++ display (packageName pkg) ++ "'. Skipping the timestamp check." debug verbosity $ "Exception was: " ++ show e -- Run setup sdist --list-sources=TMPFILE ret <- doListSources `catchIO` (\e -> onFailedListSources e >> return []) removeExistingFile file return ret -- | Has this dependency been modified since we have last looked at it? isDepModified :: Verbosity -> EpochTime -> AddSourceTimestamp -> IO Bool isDepModified verbosity now (packageDir, timestamp) = do debug verbosity ("Checking whether the dependency is modified: " ++ packageDir) depSources <- allPackageSourceFiles verbosity packageDir go depSources where go [] = return False go (dep:rest) = do -- FIXME: What if the clock jumps backwards at any point? For now we only -- print a warning. modTime <- getModTime dep when (modTime > now) $ warn verbosity $ "File '" ++ dep ++ "' has a modification time that is in the future." if modTime >= timestamp then do debug verbosity ("Dependency has a modified source file: " ++ dep) return True else go rest -- | List all modified dependencies. listModifiedDeps :: Verbosity -> FilePath -> CompilerId -> Platform -> M.Map FilePath a -- ^ The set of all installed add-source deps. -> IO [FilePath] listModifiedDeps verbosity sandboxDir compId platform installedDepsMap = do timestampRecords <- readTimestampFile (sandboxDir timestampFileName) let needle = timestampRecordKey compId platform timestamps <- maybe noTimestampRecord return (lookup needle timestampRecords) now <- getCurTime fmap (map fst) . filterM (isDepModified verbosity now) . filter (\ts -> fst ts `M.member` installedDepsMap) $ timestamps where noTimestampRecord = die $ "Сouldn't find a timestamp record for the given " ++ "compiler/platform pair. " ++ "Please report this on the Cabal bug tracker: " ++ "https://github.com/haskell/cabal/issues/new ." cabal-install-1.22.6.0/Distribution/Client/Sandbox/PackageEnvironment.hs0000644000000000000000000006045612540225656024265 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox.PackageEnvironment -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Utilities for working with the package environment file. Patterned after -- Distribution.Client.Config. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox.PackageEnvironment ( PackageEnvironment(..) , IncludeComments(..) , PackageEnvironmentType(..) , classifyPackageEnvironment , createPackageEnvironmentFile , tryLoadSandboxPackageEnvironmentFile , readPackageEnvironmentFile , showPackageEnvironment , showPackageEnvironmentWithComments , setPackageDB , sandboxPackageDBPath , loadUserConfig , basePackageEnvironment , initialPackageEnvironment , commentPackageEnvironment , sandboxPackageEnvironmentFile , userPackageEnvironmentFile ) where import Distribution.Client.Config ( SavedConfig(..), commentSavedConfig , loadConfig, configFieldDescriptions , haddockFlagsFields , installDirsFields, withProgramsFields , withProgramOptionsFields , defaultCompiler ) import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) import Distribution.Client.Setup ( GlobalFlags(..), ConfigExFlags(..) , InstallFlags(..) , defaultSandboxLocation ) import Distribution.Utils.NubList ( toNubList ) import Distribution.Simple.Compiler ( Compiler, PackageDB(..) , compilerFlavor, showCompilerIdWithAbi ) import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate , defaultInstallDirs, combineInstallDirs , fromPathTemplate, toPathTemplate ) import Distribution.Simple.Setup ( Flag(..) , ConfigFlags(..), HaddockFlags(..) , fromFlagOrDefault, toFlag, flagToMaybe ) import Distribution.Simple.Utils ( die, info, notice, warn ) import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..) , commaListField, commaNewLineListField , liftField, lineNo, locatedErrorMsg , parseFilePathQ, readFields , showPWarning, simpleField , syntaxError, warning ) import Distribution.System ( Platform ) import Distribution.Verbosity ( Verbosity, normal ) import Control.Monad ( foldM, liftM2, when, unless ) import Data.List ( partition ) import Data.Maybe ( isJust ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import Distribution.Compat.Exception ( catchIO ) import System.Directory ( doesDirectoryExist, doesFileExist , renameFile ) import System.FilePath ( (<.>), (), takeDirectory ) import System.IO.Error ( isDoesNotExistError ) import Text.PrettyPrint ( ($+$) ) import qualified Text.PrettyPrint as Disp import qualified Distribution.Compat.ReadP as Parse import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) import qualified Distribution.Text as Text -- -- * Configuration saved in the package environment file -- -- TODO: would be nice to remove duplication between -- D.C.Sandbox.PackageEnvironment and D.C.Config. data PackageEnvironment = PackageEnvironment { -- The 'inherit' feature is not used ATM, but could be useful in the future -- for constructing nested sandboxes (see discussion in #1196). pkgEnvInherit :: Flag FilePath, pkgEnvSavedConfig :: SavedConfig } instance Monoid PackageEnvironment where mempty = PackageEnvironment { pkgEnvInherit = mempty, pkgEnvSavedConfig = mempty } mappend a b = PackageEnvironment { pkgEnvInherit = combine pkgEnvInherit, pkgEnvSavedConfig = combine pkgEnvSavedConfig } where combine f = f a `mappend` f b -- | The automatically-created package environment file that should not be -- touched by the user. sandboxPackageEnvironmentFile :: FilePath sandboxPackageEnvironmentFile = "cabal.sandbox.config" -- | Optional package environment file that can be used to customize the default -- settings. Created by the user. userPackageEnvironmentFile :: FilePath userPackageEnvironmentFile = "cabal.config" -- | Type of the current package environment. data PackageEnvironmentType = SandboxPackageEnvironment -- ^ './cabal.sandbox.config' | UserPackageEnvironment -- ^ './cabal.config' | AmbientPackageEnvironment -- ^ '~/.cabal/config' -- | Is there a 'cabal.sandbox.config' or 'cabal.config' in this -- directory? classifyPackageEnvironment :: FilePath -> Flag FilePath -> Flag Bool -> IO PackageEnvironmentType classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag = do isSandbox <- liftM2 (||) (return forceSandboxConfig) (configExists sandboxPackageEnvironmentFile) isUser <- configExists userPackageEnvironmentFile return (classify isSandbox isUser) where configExists fname = doesFileExist (pkgEnvDir fname) ignoreSandbox = fromFlagOrDefault False ignoreSandboxFlag forceSandboxConfig = isJust . flagToMaybe $ sandboxConfigFileFlag classify :: Bool -> Bool -> PackageEnvironmentType classify True _ | not ignoreSandbox = SandboxPackageEnvironment classify _ True = UserPackageEnvironment classify _ False = AmbientPackageEnvironment -- | Defaults common to 'initialPackageEnvironment' and -- 'commentPackageEnvironment'. commonPackageEnvironmentConfig :: FilePath -> SavedConfig commonPackageEnvironmentConfig sandboxDir = mempty { savedConfigureFlags = mempty { -- TODO: Currently, we follow cabal-dev and set 'user-install: False' in -- the config file. In the future we may want to distinguish between -- global, sandbox and user install types. configUserInstall = toFlag False, configInstallDirs = installDirs }, savedUserInstallDirs = installDirs, savedGlobalInstallDirs = installDirs, savedGlobalFlags = mempty { globalLogsDir = toFlag $ sandboxDir "logs", -- Is this right? cabal-dev uses the global world file. globalWorldFile = toFlag $ sandboxDir "world" } } where installDirs = sandboxInstallDirs sandboxDir -- | 'commonPackageEnvironmentConfig' wrapped inside a 'PackageEnvironment'. commonPackageEnvironment :: FilePath -> PackageEnvironment commonPackageEnvironment sandboxDir = mempty { pkgEnvSavedConfig = commonPackageEnvironmentConfig sandboxDir } -- | Given a path to a sandbox, return the corresponding InstallDirs record. sandboxInstallDirs :: FilePath -> InstallDirs (Flag PathTemplate) sandboxInstallDirs sandboxDir = mempty { prefix = toFlag (toPathTemplate sandboxDir) } -- | These are the absolute basic defaults, the fields that must be -- initialised. When we load the package environment from the file we layer the -- loaded values over these ones. basePackageEnvironment :: PackageEnvironment basePackageEnvironment = mempty { pkgEnvSavedConfig = mempty { savedConfigureFlags = mempty { configHcFlavor = toFlag defaultCompiler, configVerbosity = toFlag normal } } } -- | Initial configuration that we write out to the package environment file if -- it does not exist. When the package environment gets loaded this -- configuration gets layered on top of 'basePackageEnvironment'. initialPackageEnvironment :: FilePath -> Compiler -> Platform -> IO PackageEnvironment initialPackageEnvironment sandboxDir compiler platform = do defInstallDirs <- defaultInstallDirs (compilerFlavor compiler) {- userInstall= -} False {- _hasLibs= -} False let initialConfig = commonPackageEnvironmentConfig sandboxDir installDirs = combineInstallDirs (\d f -> Flag $ fromFlagOrDefault d f) defInstallDirs (savedUserInstallDirs initialConfig) return $ mempty { pkgEnvSavedConfig = initialConfig { savedUserInstallDirs = installDirs, savedGlobalInstallDirs = installDirs, savedGlobalFlags = (savedGlobalFlags initialConfig) { globalLocalRepos = toNubList [sandboxDir "packages"] }, savedConfigureFlags = setPackageDB sandboxDir compiler platform (savedConfigureFlags initialConfig), savedInstallFlags = (savedInstallFlags initialConfig) { installSummaryFile = toNubList [toPathTemplate (sandboxDir "logs" "build.log")] } } } -- | Return the path to the sandbox package database. sandboxPackageDBPath :: FilePath -> Compiler -> Platform -> String sandboxPackageDBPath sandboxDir compiler platform = sandboxDir (Text.display platform ++ "-" ++ showCompilerIdWithAbi compiler ++ "-packages.conf.d") -- The path in sandboxPackageDBPath should be kept in sync with the -- path in the bootstrap.sh which is used to bootstrap cabal-install -- into a sandbox. -- | Use the package DB location specific for this compiler. setPackageDB :: FilePath -> Compiler -> Platform -> ConfigFlags -> ConfigFlags setPackageDB sandboxDir compiler platform configFlags = configFlags { configPackageDBs = [Just (SpecificPackageDB $ sandboxPackageDBPath sandboxDir compiler platform)] } -- | Almost the same as 'savedConf `mappend` pkgEnv', but some settings are -- overridden instead of mappend'ed. overrideSandboxSettings :: PackageEnvironment -> PackageEnvironment -> PackageEnvironment overrideSandboxSettings pkgEnv0 pkgEnv = pkgEnv { pkgEnvSavedConfig = mappendedConf { savedConfigureFlags = (savedConfigureFlags mappendedConf) { configPackageDBs = configPackageDBs pkgEnvConfigureFlags } , savedInstallFlags = (savedInstallFlags mappendedConf) { installSummaryFile = installSummaryFile pkgEnvInstallFlags } }, pkgEnvInherit = pkgEnvInherit pkgEnv0 } where pkgEnvConf = pkgEnvSavedConfig pkgEnv mappendedConf = (pkgEnvSavedConfig pkgEnv0) `mappend` pkgEnvConf pkgEnvConfigureFlags = savedConfigureFlags pkgEnvConf pkgEnvInstallFlags = savedInstallFlags pkgEnvConf -- | Default values that get used if no value is given. Used here to include in -- comments when we write out the initial package environment. commentPackageEnvironment :: FilePath -> IO PackageEnvironment commentPackageEnvironment sandboxDir = do commentConf <- commentSavedConfig let baseConf = commonPackageEnvironmentConfig sandboxDir return $ mempty { pkgEnvSavedConfig = commentConf `mappend` baseConf } -- | If this package environment inherits from some other package environment, -- return that package environment; otherwise return mempty. inheritedPackageEnvironment :: Verbosity -> PackageEnvironment -> IO PackageEnvironment inheritedPackageEnvironment verbosity pkgEnv = do case (pkgEnvInherit pkgEnv) of NoFlag -> return mempty confPathFlag@(Flag _) -> do conf <- loadConfig verbosity confPathFlag NoFlag return $ mempty { pkgEnvSavedConfig = conf } -- | Load the user package environment if it exists (the optional "cabal.config" -- file). userPackageEnvironment :: Verbosity -> FilePath -> IO PackageEnvironment userPackageEnvironment verbosity pkgEnvDir = do let path = pkgEnvDir userPackageEnvironmentFile minp <- readPackageEnvironmentFile mempty path case minp of Nothing -> return mempty Just (ParseOk warns parseResult) -> do when (not $ null warns) $ warn verbosity $ unlines (map (showPWarning path) warns) return parseResult Just (ParseFailed err) -> do let (line, msg) = locatedErrorMsg err warn verbosity $ "Error parsing user package environment file " ++ path ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg return mempty -- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. loadUserConfig :: Verbosity -> FilePath -> IO SavedConfig loadUserConfig verbosity pkgEnvDir = fmap pkgEnvSavedConfig $ userPackageEnvironment verbosity pkgEnvDir -- | Common error handling code used by 'tryLoadSandboxPackageEnvironment' and -- 'updatePackageEnvironment'. handleParseResult :: Verbosity -> FilePath -> Maybe (ParseResult PackageEnvironment) -> IO PackageEnvironment handleParseResult verbosity path minp = case minp of Nothing -> die $ "The package environment file '" ++ path ++ "' doesn't exist" Just (ParseOk warns parseResult) -> do when (not $ null warns) $ warn verbosity $ unlines (map (showPWarning path) warns) return parseResult Just (ParseFailed err) -> do let (line, msg) = locatedErrorMsg err die $ "Error parsing package environment file " ++ path ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg -- | Try to load the given package environment file, exiting with error if it -- doesn't exist. Also returns the path to the sandbox directory. The path -- parameter should refer to an existing file. tryLoadSandboxPackageEnvironmentFile :: Verbosity -> FilePath -> (Flag FilePath) -> IO (FilePath, PackageEnvironment) tryLoadSandboxPackageEnvironmentFile verbosity pkgEnvFile configFileFlag = do let pkgEnvDir = takeDirectory pkgEnvFile minp <- readPackageEnvironmentFile mempty pkgEnvFile pkgEnv <- handleParseResult verbosity pkgEnvFile minp -- Get the saved sandbox directory. -- TODO: Use substPathTemplate with -- compilerTemplateEnv ++ platformTemplateEnv ++ abiTemplateEnv. let sandboxDir = fromFlagOrDefault defaultSandboxLocation . fmap fromPathTemplate . prefix . savedUserInstallDirs . pkgEnvSavedConfig $ pkgEnv -- Do some sanity checks dirExists <- doesDirectoryExist sandboxDir -- TODO: Also check for an initialised package DB? unless dirExists $ die ("No sandbox exists at " ++ sandboxDir) info verbosity $ "Using a sandbox located at " ++ sandboxDir let base = basePackageEnvironment let common = commonPackageEnvironment sandboxDir user <- userPackageEnvironment verbosity pkgEnvDir inherited <- inheritedPackageEnvironment verbosity user -- Layer the package environment settings over settings from ~/.cabal/config. cabalConfig <- fmap unsetSymlinkBinDir $ loadConfig verbosity configFileFlag NoFlag return (sandboxDir, updateInstallDirs $ (base `mappend` (toPkgEnv cabalConfig) `mappend` common `mappend` inherited `mappend` user) `overrideSandboxSettings` pkgEnv) where toPkgEnv config = mempty { pkgEnvSavedConfig = config } updateInstallDirs pkgEnv = let config = pkgEnvSavedConfig pkgEnv configureFlags = savedConfigureFlags config installDirs = savedUserInstallDirs config in pkgEnv { pkgEnvSavedConfig = config { savedConfigureFlags = configureFlags { configInstallDirs = installDirs } } } -- We don't want to inherit the value of 'symlink-bindir' from -- '~/.cabal/config'. See #1514. unsetSymlinkBinDir config = let installFlags = savedInstallFlags config in config { savedInstallFlags = installFlags { installSymlinkBinDir = NoFlag } } -- | Should the generated package environment file include comments? data IncludeComments = IncludeComments | NoComments -- | Create a new package environment file, replacing the existing one if it -- exists. Note that the path parameters should point to existing directories. createPackageEnvironmentFile :: Verbosity -> FilePath -> FilePath -> IncludeComments -> Compiler -> Platform -> IO () createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile incComments compiler platform = do notice verbosity $ "Writing a default package environment file to " ++ pkgEnvFile commentPkgEnv <- commentPackageEnvironment sandboxDir initialPkgEnv <- initialPackageEnvironment sandboxDir compiler platform writePackageEnvironmentFile pkgEnvFile incComments commentPkgEnv initialPkgEnv -- | Descriptions of all fields in the package environment file. pkgEnvFieldDescrs :: [FieldDescr PackageEnvironment] pkgEnvFieldDescrs = [ simpleField "inherit" (fromFlagOrDefault Disp.empty . fmap Disp.text) (optional parseFilePathQ) pkgEnvInherit (\v pkgEnv -> pkgEnv { pkgEnvInherit = v }) -- FIXME: Should we make these fields part of ~/.cabal/config ? , commaNewLineListField "constraints" Text.disp Text.parse (configExConstraints . savedConfigureExFlags . pkgEnvSavedConfig) (\v pkgEnv -> updateConfigureExFlags pkgEnv (\flags -> flags { configExConstraints = v })) , commaListField "preferences" Text.disp Text.parse (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) (\v pkgEnv -> updateConfigureExFlags pkgEnv (\flags -> flags { configPreferences = v })) ] ++ map toPkgEnv configFieldDescriptions' where optional = Parse.option mempty . fmap toFlag configFieldDescriptions' :: [FieldDescr SavedConfig] configFieldDescriptions' = filter (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") configFieldDescriptions toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment toPkgEnv fieldDescr = liftField pkgEnvSavedConfig (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) fieldDescr updateConfigureExFlags :: PackageEnvironment -> (ConfigExFlags -> ConfigExFlags) -> PackageEnvironment updateConfigureExFlags pkgEnv f = pkgEnv { pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig $ pkgEnv } } -- | Read the package environment file. readPackageEnvironmentFile :: PackageEnvironment -> FilePath -> IO (Maybe (ParseResult PackageEnvironment)) readPackageEnvironmentFile initial file = handleNotExists $ fmap (Just . parsePackageEnvironment initial) (readFile file) where handleNotExists action = catchIO action $ \ioe -> if isDoesNotExistError ioe then return Nothing else ioError ioe -- | Parse the package environment file. parsePackageEnvironment :: PackageEnvironment -> String -> ParseResult PackageEnvironment parsePackageEnvironment initial str = do fields <- readFields str let (knownSections, others) = partition isKnownSection fields pkgEnv <- parse others let config = pkgEnvSavedConfig pkgEnv installDirs0 = savedUserInstallDirs config (haddockFlags, installDirs, paths, args) <- foldM parseSections (savedHaddockFlags config, installDirs0, [], []) knownSections return pkgEnv { pkgEnvSavedConfig = config { savedConfigureFlags = (savedConfigureFlags config) { configProgramPaths = paths, configProgramArgs = args }, savedHaddockFlags = haddockFlags, savedUserInstallDirs = installDirs, savedGlobalInstallDirs = installDirs } } where isKnownSection :: ParseUtils.Field -> Bool isKnownSection (ParseUtils.Section _ "haddock" _ _) = True isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True isKnownSection _ = False parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment parse = parseFields pkgEnvFieldDescrs initial parseSections :: SectionsAccum -> ParseUtils.Field -> ParseResult SectionsAccum parseSections accum@(h,d,p,a) (ParseUtils.Section _ "haddock" name fs) | name == "" = do h' <- parseFields haddockFlagsFields h fs return (h', d, p, a) | otherwise = do warning "The 'haddock' section should be unnamed" return accum parseSections (h,d,p,a) (ParseUtils.Section line "install-dirs" name fs) | name == "" = do d' <- parseFields installDirsFields d fs return (h, d',p,a) | otherwise = syntaxError line $ "Named 'install-dirs' section: '" ++ name ++ "'. Note that named 'install-dirs' sections are not allowed in the '" ++ userPackageEnvironmentFile ++ "' file." parseSections accum@(h, d,p,a) (ParseUtils.Section _ "program-locations" name fs) | name == "" = do p' <- parseFields withProgramsFields p fs return (h, d, p', a) | otherwise = do warning "The 'program-locations' section should be unnamed" return accum parseSections accum@(h, d, p, a) (ParseUtils.Section _ "program-default-options" name fs) | name == "" = do a' <- parseFields withProgramOptionsFields a fs return (h, d, p, a') | otherwise = do warning "The 'program-default-options' section should be unnamed" return accum parseSections accum f = do warning $ "Unrecognized stanza on line " ++ show (lineNo f) return accum -- | Accumulator type for 'parseSections'. type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) , [(String, FilePath)], [(String, [String])]) -- | Write out the package environment file. writePackageEnvironmentFile :: FilePath -> IncludeComments -> PackageEnvironment -> PackageEnvironment -> IO () writePackageEnvironmentFile path incComments comments pkgEnv = do let tmpPath = (path <.> "tmp") writeFile tmpPath $ explanation ++ pkgEnvStr ++ "\n" renameFile tmpPath path where pkgEnvStr = case incComments of IncludeComments -> showPackageEnvironmentWithComments (Just comments) pkgEnv NoComments -> showPackageEnvironment pkgEnv explanation = unlines ["-- This is a Cabal package environment file." ,"-- THIS FILE IS AUTO-GENERATED. DO NOT EDIT DIRECTLY." ,"-- Please create a 'cabal.config' file in the same directory" ,"-- if you want to change the default settings for this sandbox." ,"","" ] -- | Pretty-print the package environment. showPackageEnvironment :: PackageEnvironment -> String showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv -- | Pretty-print the package environment with default values for empty fields -- commented out (just like the default ~/.cabal/config). showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) -> PackageEnvironment -> String showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ ppFields pkgEnvFieldDescrs mdefPkgEnv pkgEnv $+$ Disp.text "" $+$ ppSection "install-dirs" "" installDirsFields (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) where installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig cabal-install-1.22.6.0/Distribution/Client/Sandbox/Types.hs0000644000000000000000000000446012540225656021602 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Sandbox.Types -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Helpers for writing code that works both inside and outside a sandbox. ----------------------------------------------------------------------------- module Distribution.Client.Sandbox.Types ( UseSandbox(..), isUseSandbox, whenUsingSandbox, SandboxPackageInfo(..) ) where import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex import Distribution.Client.Types (SourcePackage) #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif import qualified Data.Set as S -- | Are we using a sandbox? data UseSandbox = UseSandbox FilePath | NoSandbox instance Monoid UseSandbox where mempty = NoSandbox NoSandbox `mappend` s = s u0@(UseSandbox _) `mappend` NoSandbox = u0 (UseSandbox _) `mappend` u1@(UseSandbox _) = u1 -- | Convert a @UseSandbox@ value to a boolean. Useful in conjunction with -- @when@. isUseSandbox :: UseSandbox -> Bool isUseSandbox (UseSandbox _) = True isUseSandbox NoSandbox = False -- | Execute an action only if we're in a sandbox, feeding to it the path to the -- sandbox directory. whenUsingSandbox :: UseSandbox -> (FilePath -> IO ()) -> IO () whenUsingSandbox NoSandbox _ = return () whenUsingSandbox (UseSandbox sandboxDir) act = act sandboxDir -- | Data about the packages installed in the sandbox that is passed from -- 'reinstallAddSourceDeps' to the solver. data SandboxPackageInfo = SandboxPackageInfo { modifiedAddSourceDependencies :: ![SourcePackage], -- ^ Modified add-source deps that we want to reinstall. These are guaranteed -- to be already installed in the sandbox. otherAddSourceDependencies :: ![SourcePackage], -- ^ Remaining add-source deps. Some of these may be not installed in the -- sandbox. otherInstalledSandboxPackages :: !InstalledPackageIndex.InstalledPackageIndex, -- ^ All packages installed in the sandbox. Intersection with -- 'modifiedAddSourceDependencies' and/or 'otherAddSourceDependencies' can be -- non-empty. allAddSourceDependencies :: !(S.Set FilePath) -- ^ A set of paths to all add-source dependencies, for convenience. } cabal-install-1.22.6.0/Distribution/Client/Compat/0000755000000000000000000000000012540225656017763 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Compat/FilePerms.hs0000644000000000000000000000205012540225656022202 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# OPTIONS_HADDOCK hide #-} module Distribution.Client.Compat.FilePerms ( setFileOrdinary, setFileExecutable, setFileHidden, ) where #ifndef mingw32_HOST_OS import System.Posix.Types ( FileMode ) import System.Posix.Internals ( c_chmod ) import Foreign.C ( withCString ) import Foreign.C ( throwErrnoPathIfMinus1_ ) #else import System.Win32.File (setFileAttributes, fILE_ATTRIBUTE_HIDDEN) #endif /* mingw32_HOST_OS */ setFileHidden, setFileOrdinary, setFileExecutable :: FilePath -> IO () #ifndef mingw32_HOST_OS setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x setFileHidden _ = return () setFileMode :: FilePath -> FileMode -> IO () setFileMode name m = withCString name $ \s -> throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) #else setFileOrdinary _ = return () setFileExecutable _ = return () setFileHidden path = setFileAttributes path fILE_ATTRIBUTE_HIDDEN #endif cabal-install-1.22.6.0/Distribution/Client/Compat/Semaphore.hs0000644000000000000000000000562512540225656022252 0ustar0000000000000000{-# LANGUAGE DeriveDataTypeable #-} {-# OPTIONS_GHC -funbox-strict-fields #-} module Distribution.Client.Compat.Semaphore ( QSem , newQSem , waitQSem , signalQSem ) where import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, writeTVar) import Control.Exception (mask_, onException) import Control.Monad (join, when) import Data.Typeable (Typeable) -- | 'QSem' is a quantity semaphore in which the resource is aqcuired -- and released in units of one. It provides guaranteed FIFO ordering -- for satisfying blocked `waitQSem` calls. -- data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) deriving (Eq, Typeable) newQSem :: Int -> IO QSem newQSem i = atomically $ do q <- newTVar i b1 <- newTVar [] b2 <- newTVar [] return (QSem q b1 b2) waitQSem :: QSem -> IO () waitQSem s@(QSem q _b1 b2) = mask_ $ join $ atomically $ do -- join, because if we need to block, we have to add a TVar to -- the block queue. -- mask_, because we need a chance to set up an exception handler -- after the join returns. v <- readTVar q if v == 0 then do b <- newTVar False ys <- readTVar b2 writeTVar b2 (b:ys) return (wait b) else do writeTVar q $! v - 1 return (return ()) where -- -- very careful here: if we receive an exception, then we need to -- (a) write True into the TVar, so that another signalQSem doesn't -- try to wake up this thread, and -- (b) if the TVar is *already* True, then we need to do another -- signalQSem to avoid losing a unit of the resource. -- -- The 'wake' function does both (a) and (b), so we can just call -- it here. -- wait t = flip onException (wake s t) $ atomically $ do b <- readTVar t when (not b) retry wake :: QSem -> TVar Bool -> IO () wake s x = join $ atomically $ do b <- readTVar x if b then return (signalQSem s) else do writeTVar x True return (return ()) {- property we want: bracket waitQSem (\_ -> signalQSem) (\_ -> ...) never loses a unit of the resource. -} signalQSem :: QSem -> IO () signalQSem s@(QSem q b1 b2) = mask_ $ join $ atomically $ do -- join, so we don't force the reverse inside the txn -- mask_ is needed so we don't lose a wakeup v <- readTVar q if v /= 0 then do writeTVar q $! v + 1 return (return ()) else do xs <- readTVar b1 checkwake1 xs where checkwake1 [] = do ys <- readTVar b2 checkwake2 ys checkwake1 (x:xs) = do writeTVar b1 xs return (wake s x) checkwake2 [] = do writeTVar q 1 return (return ()) checkwake2 ys = do let (z:zs) = reverse ys writeTVar b1 zs writeTVar b2 [] return (wake s z) cabal-install-1.22.6.0/Distribution/Client/Compat/Environment.hs0000644000000000000000000000533012540225656022624 0ustar0000000000000000{-# LANGUAGE CPP, ForeignFunctionInterface #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Compat.Environment -- Copyright : (c) Simon Hengel 2012 -- License : BSD-style (see the file LICENSE) -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- A cross-platform library for setting environment variables. -- ----------------------------------------------------------------------------- module Distribution.Client.Compat.Environment ( lookupEnv, setEnv ) where #ifdef mingw32_HOST_OS import GHC.Windows import Foreign.C import Control.Monad #else import Foreign.C.Types import Foreign.C.String import Foreign.C.Error (throwErrnoIfMinus1_) import System.Posix.Internals ( withFilePath ) #endif /* mingw32_HOST_OS */ #if MIN_VERSION_base(4,6,0) import System.Environment (lookupEnv) #else import System.Environment (getEnv) import Distribution.Compat.Exception (catchIO) #endif #if !MIN_VERSION_base(4,6,0) -- | @lookupEnv var@ returns the value of the environment variable @var@, or -- @Nothing@ if there is no such value. lookupEnv :: String -> IO (Maybe String) lookupEnv name = (Just `fmap` getEnv name) `catchIO` const (return Nothing) #endif /* !MIN_VERSION_base(4,6,0) */ -- | @setEnv name value@ sets the specified environment variable to @value@. -- -- Throws `Control.Exception.IOException` if either @name@ or @value@ is the -- empty string or contains an equals sign. setEnv :: String -> String -> IO () setEnv key value_ | null value = error "Distribuiton.Compat.setEnv: empty string" | otherwise = setEnv_ key value where -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We -- still strip it manually so that the null check above succeeds if a value -- starts with NUL. value = takeWhile (/= '\NUL') value_ setEnv_ :: String -> String -> IO () #ifdef mingw32_HOST_OS setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do success <- c_SetEnvironmentVariable k v unless success (throwGetLastError "setEnv") # if defined(i386_HOST_ARCH) # define WINDOWS_CCONV stdcall # elif defined(x86_64_HOST_ARCH) # define WINDOWS_CCONV ccall # else # error Unknown mingw32 arch # endif /* i386_HOST_ARCH */ foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> IO Bool #else setEnv_ key value = do withFilePath key $ \ keyP -> withFilePath value $ \ valueP -> throwErrnoIfMinus1_ "setenv" $ c_setenv keyP valueP (fromIntegral (fromEnum True)) foreign import ccall unsafe "setenv" c_setenv :: CString -> CString -> CInt -> IO CInt #endif /* mingw32_HOST_OS */ cabal-install-1.22.6.0/Distribution/Client/Compat/ExecutablePath.hs0000644000000000000000000001277712540225656023233 0ustar0000000000000000{-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE CPP #-} -- Copied verbatim from base-4.6.0.0. We can't simply import -- System.Environment.getExecutablePath because we need compatibility with older -- GHCs. module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where -- The imports are purposely kept completely disjoint to prevent edits -- to one OS implementation from breaking another. #if defined(darwin_HOST_OS) import Data.Word import Foreign.C import Foreign.Marshal.Alloc import Foreign.Ptr import Foreign.Storable import System.Posix.Internals #elif defined(linux_HOST_OS) import Foreign.C import Foreign.Marshal.Array import System.Posix.Internals #elif defined(mingw32_HOST_OS) import Data.Word import Foreign.C import Foreign.Marshal.Array import Foreign.Ptr import System.Posix.Internals #else import Foreign.C import Foreign.Marshal.Alloc import Foreign.Ptr import Foreign.Storable import System.Posix.Internals #endif -- GHC 7.0.* compatibility. 'System.Posix.Internals' in base-4.3.* doesn't -- provide 'peekFilePath' and 'peekFilePathLen'. #if !MIN_VERSION_base(4,4,0) #ifdef mingw32_HOST_OS peekFilePath :: CWString -> IO FilePath peekFilePath = peekCWString #else peekFilePath :: CString -> IO FilePath peekFilePath = peekCString peekFilePathLen :: CStringLen -> IO FilePath peekFilePathLen = peekCStringLen #endif #endif -- The exported function is defined outside any if-guard to make sure -- every OS implements it with the same type. -- | Returns the absolute pathname of the current executable. -- -- Note that for scripts and interactive sessions, this is the path to -- the interpreter (e.g. ghci.) -- -- /Since: 4.6.0.0/ getExecutablePath :: IO FilePath -------------------------------------------------------------------------------- -- Mac OS X #if defined(darwin_HOST_OS) type UInt32 = Word32 foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt -- | Returns the path of the main executable. The path may be a -- symbolic link and not the real file. -- -- See dyld(3) _NSGetExecutablePath :: IO FilePath _NSGetExecutablePath = allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X alloca $ \ bufsize -> do poke bufsize 1024 status <- c__NSGetExecutablePath buf bufsize if status == 0 then peekFilePath buf else do reqBufsize <- fromIntegral `fmap` peek bufsize allocaBytes reqBufsize $ \ newBuf -> do status2 <- c__NSGetExecutablePath newBuf bufsize if status2 == 0 then peekFilePath newBuf else error "_NSGetExecutablePath: buffer too small" foreign import ccall unsafe "stdlib.h realpath" c_realpath :: CString -> CString -> IO CString -- | Resolves all symbolic links, extra \/ characters, and references -- to \/.\/ and \/..\/. Returns an absolute pathname. -- -- See realpath(3) realpath :: FilePath -> IO FilePath realpath path = withFilePath path $ \ fileName -> allocaBytes 1024 $ \ resolvedName -> do _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName peekFilePath resolvedName getExecutablePath = _NSGetExecutablePath >>= realpath -------------------------------------------------------------------------------- -- Linux #elif defined(linux_HOST_OS) foreign import ccall unsafe "readlink" c_readlink :: CString -> CString -> CSize -> IO CInt -- | Reads the @FilePath@ pointed to by the symbolic link and returns -- it. -- -- See readlink(2) readSymbolicLink :: FilePath -> IO FilePath readSymbolicLink file = allocaArray0 4096 $ \buf -> do withFilePath file $ \s -> do len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ c_readlink s buf 4096 peekFilePathLen (buf,fromIntegral len) getExecutablePath = readSymbolicLink $ "/proc/self/exe" -------------------------------------------------------------------------------- -- Windows #elif defined(mingw32_HOST_OS) # if defined(i386_HOST_ARCH) # define WINDOWS_CCONV stdcall # elif defined(x86_64_HOST_ARCH) # define WINDOWS_CCONV ccall # else # error Unknown mingw32 arch # endif foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 where go size = allocaArray (fromIntegral size) $ \ buf -> do ret <- c_GetModuleFileName nullPtr buf size case ret of 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" _ | ret < size -> peekFilePath buf | otherwise -> go (size * 2) -------------------------------------------------------------------------------- -- Fallback to argv[0] #else foreign import ccall unsafe "getFullProgArgv" c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () getExecutablePath = alloca $ \ p_argc -> alloca $ \ p_argv -> do c_getFullProgArgv p_argc p_argv argc <- peek p_argc if argc > 0 -- If argc > 0 then argv[0] is guaranteed by the standard -- to be a pointer to a null-terminated string. then peek p_argv >>= peek >>= peekFilePath else error $ "getExecutablePath: " ++ msg where msg = "no OS specific implementation and program name couldn't be " ++ "found in argv" -------------------------------------------------------------------------------- #endif cabal-install-1.22.6.0/Distribution/Client/Compat/Time.hs0000644000000000000000000001111312540225656021212 0ustar0000000000000000{-# LANGUAGE CPP, ForeignFunctionInterface #-} module Distribution.Client.Compat.Time (EpochTime, getModTime, getFileAge, getCurTime) where import Data.Int (Int64) import System.Directory (getModificationTime) #if MIN_VERSION_directory(1,2,0) import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds, posixDayLength) import Data.Time (getCurrentTime, diffUTCTime) #else import System.Time (ClockTime(..), getClockTime ,diffClockTimes, normalizeTimeDiff, tdDay, tdHour) #endif #if defined mingw32_HOST_OS #if MIN_VERSION_base(4,7,0) import Data.Bits ((.|.), finiteBitSize, unsafeShiftL) #else import Data.Bits ((.|.), bitSize, unsafeShiftL) #endif import Data.Int (Int32) import Data.Word (Word64) import Foreign (allocaBytes, peekByteOff) import System.IO.Error (mkIOError, doesNotExistErrorType) import System.Win32.Types (BOOL, DWORD, LPCTSTR, LPVOID, withTString) #ifdef x86_64_HOST_ARCH #define CALLCONV ccall #else #define CALLCONV stdcall #endif foreign import CALLCONV "windows.h GetFileAttributesExW" c_getFileAttributesEx :: LPCTSTR -> Int32 -> LPVOID -> IO BOOL getFileAttributesEx :: String -> LPVOID -> IO BOOL getFileAttributesEx path lpFileInformation = withTString path $ \c_path -> c_getFileAttributesEx c_path getFileExInfoStandard lpFileInformation getFileExInfoStandard :: Int32 getFileExInfoStandard = 0 size_WIN32_FILE_ATTRIBUTE_DATA :: Int size_WIN32_FILE_ATTRIBUTE_DATA = 36 index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime :: Int index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime = 20 index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime :: Int index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime = 24 #else import Foreign.C.Types (CTime(..)) import System.Posix.Files (getFileStatus, modificationTime) #endif -- | The number of seconds since the UNIX epoch. type EpochTime = Int64 -- | Return modification time of given file. Works around the low clock -- resolution problem that 'getModificationTime' has on GHC < 7.8. -- -- This is a modified version of the code originally written for OpenShake by -- Neil Mitchell. See module Development.Shake.FileTime. getModTime :: FilePath -> IO EpochTime #if defined mingw32_HOST_OS -- Directly against the Win32 API. getModTime path = allocaBytes size_WIN32_FILE_ATTRIBUTE_DATA $ \info -> do res <- getFileAttributesEx path info if not res then do let err = mkIOError doesNotExistErrorType "Distribution.Client.Compat.Time.getModTime" Nothing (Just path) ioError err else do dwLow <- peekByteOff info index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime dwHigh <- peekByteOff info index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime return $! windowsTimeToPOSIXSeconds dwLow dwHigh where windowsTimeToPOSIXSeconds :: DWORD -> DWORD -> EpochTime windowsTimeToPOSIXSeconds dwLow dwHigh = let wINDOWS_TICK = 10000000 sEC_TO_UNIX_EPOCH = 11644473600 #if MIN_VERSION_base(4,7,0) qwTime = (fromIntegral dwHigh `unsafeShiftL` finiteBitSize dwHigh) .|. (fromIntegral dwLow) #else qwTime = (fromIntegral dwHigh `unsafeShiftL` bitSize dwHigh) .|. (fromIntegral dwLow) #endif res = ((qwTime :: Word64) `div` wINDOWS_TICK) - sEC_TO_UNIX_EPOCH -- TODO: What if the result is not representable as POSIX seconds? -- Probably fine to return garbage. in fromIntegral res #else -- Directly against the unix library. getModTime path = do -- CTime is Int32 in base 4.5, Int64 in base >= 4.6, and an abstract type in -- base < 4.5. t <- fmap modificationTime $ getFileStatus path #if MIN_VERSION_base(4,5,0) let CTime i = t return (fromIntegral i) #else return (read . show $ t) #endif #endif -- | Return age of given file in days. getFileAge :: FilePath -> IO Double getFileAge file = do t0 <- getModificationTime file #if MIN_VERSION_directory(1,2,0) t1 <- getCurrentTime return $ realToFrac (t1 `diffUTCTime` t0) / realToFrac posixDayLength #else t1 <- getClockTime let dt = normalizeTimeDiff (t1 `diffClockTimes` t0) return $ fromIntegral ((24 * tdDay dt) + tdHour dt) / 24.0 #endif getCurTime :: IO EpochTime getCurTime = do #if MIN_VERSION_directory(1,2,0) (truncate . utcTimeToPOSIXSeconds) `fmap` getCurrentTime #else (TOD s _) <- getClockTime return $! fromIntegral s #endif cabal-install-1.22.6.0/Distribution/Client/Compat/Process.hs0000644000000000000000000000347512540225656021746 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Compat.Process -- Copyright : (c) 2013 Liu Hao, Brent Yorgey -- License : BSD-style (see the file LICENSE) -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Cross-platform utilities for invoking processes. -- ----------------------------------------------------------------------------- module Distribution.Client.Compat.Process ( readProcessWithExitCode ) where #if !MIN_VERSION_base(4,6,0) import Prelude hiding (catch) #endif import Control.Exception (catch, throw) import System.Exit (ExitCode (ExitFailure)) import System.IO.Error (isDoesNotExistError) import qualified System.Process as P -- | @readProcessWithExitCode@ creates an external process, reads its -- standard output and standard error strictly, waits until the -- process terminates, and then returns the @ExitCode@ of the -- process, the standard output, and the standard error. -- -- See the documentation of the version from @System.Process@ for -- more information. -- -- The version from @System.Process@ behaves inconsistently across -- platforms when an executable with the given name is not found: in -- some cases it returns an @ExitFailure@, in others it throws an -- exception. This variant catches \"does not exist\" exceptions and -- turns them into @ExitFailure@s. readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) readProcessWithExitCode cmd args input = P.readProcessWithExitCode cmd args input `catch` \e -> if isDoesNotExistError e then return (ExitFailure 127, "", "") else throw e cabal-install-1.22.6.0/Distribution/Client/Dependency/0000755000000000000000000000000012540225656020616 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Dependency/Types.hs0000644000000000000000000002145012540225656022260 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFunctor #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency.Types -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Common types for dependency resolution. ----------------------------------------------------------------------------- module Distribution.Client.Dependency.Types ( ExtDependency(..), PreSolver(..), Solver(..), DependencyResolver, AllowNewer(..), isAllowNewer, PackageConstraint(..), debugPackageConstraint, PackagePreferences(..), InstalledPreference(..), PackagesPreferenceDefault(..), Progress(..), foldProgress, ) where #if !MIN_VERSION_base(4,8,0) import Control.Applicative ( Applicative(..) ) #endif import Control.Applicative ( Alternative(..) ) import Data.Char ( isAlpha, toLower ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import Distribution.Client.Types ( OptionalStanza(..), SourcePackage(..) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Compat.ReadP ( (<++) ) import qualified Distribution.Compat.ReadP as Parse ( pfail, munch1 ) import Distribution.PackageDescription ( FlagAssignment, FlagName(..) ) import qualified Distribution.Client.PackageIndex as PackageIndex ( PackageIndex ) import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) import Distribution.Package ( Dependency, PackageName, InstalledPackageId ) import Distribution.Version ( VersionRange, simplifyVersionRange ) import Distribution.Compiler ( CompilerInfo ) import Distribution.System ( Platform ) import Distribution.Text ( Text(..), display ) import Text.PrettyPrint ( text ) import Prelude hiding (fail) -- | Covers source dependencies and installed dependencies in -- one type. data ExtDependency = SourceDependency Dependency | InstalledDependency InstalledPackageId instance Text ExtDependency where disp (SourceDependency dep) = disp dep disp (InstalledDependency dep) = disp dep parse = (SourceDependency `fmap` parse) <++ (InstalledDependency `fmap` parse) -- | All the solvers that can be selected. data PreSolver = AlwaysTopDown | AlwaysModular | Choose deriving (Eq, Ord, Show, Bounded, Enum) -- | All the solvers that can be used. data Solver = TopDown | Modular deriving (Eq, Ord, Show, Bounded, Enum) instance Text PreSolver where disp AlwaysTopDown = text "topdown" disp AlwaysModular = text "modular" disp Choose = text "choose" parse = do name <- Parse.munch1 isAlpha case map toLower name of "topdown" -> return AlwaysTopDown "modular" -> return AlwaysModular "choose" -> return Choose _ -> Parse.pfail -- | A dependency resolver is a function that works out an installation plan -- given the set of installed and available packages and a set of deps to -- solve for. -- -- The reason for this interface is because there are dozens of approaches to -- solving the package dependency problem and we want to make it easy to swap -- in alternatives. -- type DependencyResolver = Platform -> CompilerInfo -> InstalledPackageIndex -> PackageIndex.PackageIndex SourcePackage -> (PackageName -> PackagePreferences) -> [PackageConstraint] -> [PackageName] -> Progress String String [InstallPlan.PlanPackage] -- | Per-package constraints. Package constraints must be respected by the -- solver. Multiple constraints for each package can be given, though obviously -- it is possible to construct conflicting constraints (eg impossible version -- range or inconsistent flag assignment). -- data PackageConstraint = PackageConstraintVersion PackageName VersionRange | PackageConstraintInstalled PackageName | PackageConstraintSource PackageName | PackageConstraintFlags PackageName FlagAssignment | PackageConstraintStanzas PackageName [OptionalStanza] deriving (Show,Eq) -- | Provide a textual representation of a package constraint -- for debugging purposes. -- debugPackageConstraint :: PackageConstraint -> String debugPackageConstraint (PackageConstraintVersion pn vr) = display pn ++ " " ++ display (simplifyVersionRange vr) debugPackageConstraint (PackageConstraintInstalled pn) = display pn ++ " installed" debugPackageConstraint (PackageConstraintSource pn) = display pn ++ " source" debugPackageConstraint (PackageConstraintFlags pn fs) = "flags " ++ display pn ++ " " ++ unwords (map (uncurry showFlag) fs) where showFlag (FlagName f) True = "+" ++ f showFlag (FlagName f) False = "-" ++ f debugPackageConstraint (PackageConstraintStanzas pn ss) = "stanzas " ++ display pn ++ " " ++ unwords (map showStanza ss) where showStanza TestStanzas = "test" showStanza BenchStanzas = "bench" -- | A per-package preference on the version. It is a soft constraint that the -- 'DependencyResolver' should try to respect where possible. It consists of -- a 'InstalledPreference' which says if we prefer versions of packages -- that are already installed. It also has a 'PackageVersionPreference' which -- is a suggested constraint on the version number. The resolver should try to -- use package versions that satisfy the suggested version constraint. -- -- It is not specified if preferences on some packages are more important than -- others. -- data PackagePreferences = PackagePreferences VersionRange InstalledPreference -- | Whether we prefer an installed version of a package or simply the latest -- version. -- data InstalledPreference = PreferInstalled | PreferLatest deriving Show -- | Global policy for all packages to say if we prefer package versions that -- are already installed locally or if we just prefer the latest available. -- data PackagesPreferenceDefault = -- | Always prefer the latest version irrespective of any existing -- installed version. -- -- * This is the standard policy for upgrade. -- PreferAllLatest -- | Always prefer the installed versions over ones that would need to be -- installed. Secondarily, prefer latest versions (eg the latest installed -- version or if there are none then the latest source version). | PreferAllInstalled -- | Prefer the latest version for packages that are explicitly requested -- but prefers the installed version for any other packages. -- -- * This is the standard policy for install. -- | PreferLatestForSelected deriving Show -- | Policy for relaxing upper bounds in dependencies. For example, given -- 'build-depends: array >= 0.3 && < 0.5', are we allowed to relax the upper -- bound and choose a version of 'array' that is greater or equal to 0.5? By -- default the upper bounds are always strictly honored. data AllowNewer = -- | Default: honor the upper bounds in all dependencies, never choose -- versions newer than allowed. AllowNewerNone -- | Ignore upper bounds in dependencies on the given packages. | AllowNewerSome [PackageName] -- | Ignore upper bounds in dependencies on all packages. | AllowNewerAll -- | Convert 'AllowNewer' to a boolean. isAllowNewer :: AllowNewer -> Bool isAllowNewer AllowNewerNone = False isAllowNewer (AllowNewerSome _) = True isAllowNewer AllowNewerAll = True -- | A type to represent the unfolding of an expensive long running -- calculation that may fail. We may get intermediate steps before the final -- result which may be used to indicate progress and\/or logging messages. -- data Progress step fail done = Step step (Progress step fail done) | Fail fail | Done done deriving Functor -- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two -- base cases, one for a final result and one for failure. -- -- Eg to convert into a simple 'Either' result use: -- -- > foldProgress (flip const) Left Right -- foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) -> Progress step fail done -> a foldProgress step fail done = fold where fold (Step s p) = step s (fold p) fold (Fail f) = fail f fold (Done r) = done r instance Monad (Progress step fail) where return a = Done a p >>= f = foldProgress Step Fail f p instance Applicative (Progress step fail) where pure a = Done a p <*> x = foldProgress Step Fail (flip fmap x) p instance Monoid fail => Alternative (Progress step fail) where empty = Fail mempty p <|> q = foldProgress Step (const q) Done p cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular.hs0000644000000000000000000000510712540225656022560 0ustar0000000000000000module Distribution.Client.Dependency.Modular ( modularResolver, SolverConfig(..)) where -- Here, we try to map between the external cabal-install solver -- interface and the internal interface that the solver actually -- expects. There are a number of type conversions to perform: we -- have to convert the package indices to the uniform index used -- by the solver; we also have to convert the initial constraints; -- and finally, we have to convert back the resulting install -- plan. import Data.Map as M ( fromListWith ) import Distribution.Client.Dependency.Modular.Assignment ( Assignment, toCPs ) import Distribution.Client.Dependency.Modular.Dependency ( RevDepMap ) import Distribution.Client.Dependency.Modular.ConfiguredConversion ( convCP ) import Distribution.Client.Dependency.Modular.IndexConversion ( convPIs ) import Distribution.Client.Dependency.Modular.Log ( logToProgress ) import Distribution.Client.Dependency.Modular.Package ( PN ) import Distribution.Client.Dependency.Modular.Solver ( SolverConfig(..), solve ) import Distribution.Client.Dependency.Types ( DependencyResolver, PackageConstraint(..) ) import Distribution.Client.InstallPlan ( PlanPackage ) import Distribution.System ( Platform(..) ) -- | Ties the two worlds together: classic cabal-install vs. the modular -- solver. Performs the necessary translations before and after. modularResolver :: SolverConfig -> DependencyResolver modularResolver sc (Platform arch os) cinfo iidx sidx pprefs pcs pns = fmap (uncurry postprocess) $ -- convert install plan logToProgress (maxBackjumps sc) $ -- convert log format into progress format solve sc idx pprefs gcs pns where -- Indices have to be converted into solver-specific uniform index. idx = convPIs os arch cinfo (shadowPkgs sc) (strongFlags sc) iidx sidx -- Constraints have to be converted into a finite map indexed by PN. gcs = M.fromListWith (++) (map (\ pc -> (pcName pc, [pc])) pcs) -- Results have to be converted into an install plan. postprocess :: Assignment -> RevDepMap -> [PlanPackage] postprocess a rdm = map (convCP iidx sidx) (toCPs a rdm) -- Helper function to extract the PN from a constraint. pcName :: PackageConstraint -> PN pcName (PackageConstraintVersion pn _) = pn pcName (PackageConstraintInstalled pn ) = pn pcName (PackageConstraintSource pn ) = pn pcName (PackageConstraintFlags pn _) = pn pcName (PackageConstraintStanzas pn _) = pn cabal-install-1.22.6.0/Distribution/Client/Dependency/TopDown.hs0000644000000000000000000012221312540225656022545 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency.Types -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Common types for dependency resolution. ----------------------------------------------------------------------------- module Distribution.Client.Dependency.TopDown ( topDownResolver ) where import Distribution.Client.Dependency.TopDown.Types import qualified Distribution.Client.Dependency.TopDown.Constraints as Constraints import Distribution.Client.Dependency.TopDown.Constraints ( Satisfiable(..) ) import Distribution.Client.IndexUtils ( convert ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan ( PlanPackage(..) ) import Distribution.Client.Types ( SourcePackage(..), ConfiguredPackage(..), InstalledPackage(..) , enableStanzas ) import Distribution.Client.Dependency.Types ( DependencyResolver, PackageConstraint(..) , PackagePreferences(..), InstalledPreference(..) , Progress(..), foldProgress ) import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Client.PackageIndex (PackageIndex) import Distribution.Package ( PackageName(..), PackageId, Package(..), packageVersion, packageName , Dependency(Dependency), thisPackageVersion , simplifyDependency, PackageFixedDeps(depends) ) import Distribution.PackageDescription ( PackageDescription(buildDepends) ) import Distribution.Client.PackageUtils ( externalBuildDepends ) import Distribution.PackageDescription.Configuration ( finalizePackageDescription, flattenPackageDescription ) import Distribution.Version ( VersionRange, withinRange, simplifyVersionRange , UpperBound(..), asVersionIntervals ) import Distribution.Compiler ( CompilerInfo ) import Distribution.System ( Platform ) import Distribution.Simple.Utils ( equating, comparing ) import Distribution.Text ( display ) import Data.List ( foldl', maximumBy, minimumBy, nub, sort, sortBy, groupBy ) import Data.Maybe ( fromJust, fromMaybe, catMaybes ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(mempty) ) #endif import Control.Monad ( guard ) import qualified Data.Set as Set import Data.Set (Set) import qualified Data.Map as Map import qualified Data.Graph as Graph import qualified Data.Array as Array import Control.Exception ( assert ) -- ------------------------------------------------------------ -- * Search state types -- ------------------------------------------------------------ type Constraints = Constraints.Constraints InstalledPackageEx UnconfiguredPackage ExclusionReason type SelectedPackages = PackageIndex SelectedPackage -- ------------------------------------------------------------ -- * The search tree type -- ------------------------------------------------------------ data SearchSpace inherited pkg = ChoiceNode inherited [[(pkg, SearchSpace inherited pkg)]] | Failure Failure -- ------------------------------------------------------------ -- * Traverse a search tree -- ------------------------------------------------------------ explore :: (PackageName -> PackagePreferences) -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) SelectablePackage -> Progress Log Failure (SelectedPackages, Constraints) explore _ (Failure failure) = Fail failure explore _ (ChoiceNode (s,c,_) []) = Done (s,c) explore pref (ChoiceNode _ choices) = case [ choice | [choice] <- choices ] of ((_, node'):_) -> Step (logInfo node') (explore pref node') [] -> Step (logInfo node') (explore pref node') where choice = minimumBy (comparing topSortNumber) choices pkgname = packageName . fst . head $ choice (_, node') = maximumBy (bestByPref pkgname) choice where topSortNumber choice = case fst (head choice) of InstalledOnly (InstalledPackageEx _ i _) -> i SourceOnly (UnconfiguredPackage _ i _ _) -> i InstalledAndSource _ (UnconfiguredPackage _ i _ _) -> i bestByPref pkgname = case packageInstalledPreference of PreferLatest -> comparing (\(p,_) -> ( isPreferred p, packageId p)) PreferInstalled -> comparing (\(p,_) -> (isInstalled p, isPreferred p, packageId p)) where isInstalled (SourceOnly _) = False isInstalled _ = True isPreferred p = packageVersion p `withinRange` preferredVersions (PackagePreferences preferredVersions packageInstalledPreference) = pref pkgname logInfo node = Select selected discarded where (selected, discarded) = case node of Failure _ -> ([], []) ChoiceNode (_,_,changes) _ -> changes -- ------------------------------------------------------------ -- * Generate a search tree -- ------------------------------------------------------------ type ConfigurePackage = PackageIndex SelectablePackage -> SelectablePackage -> Either [Dependency] SelectedPackage -- | (packages selected, packages discarded) type SelectionChanges = ([SelectedPackage], [PackageId]) searchSpace :: ConfigurePackage -> Constraints -> SelectedPackages -> SelectionChanges -> Set PackageName -> SearchSpace (SelectedPackages, Constraints, SelectionChanges) SelectablePackage searchSpace configure constraints selected changes next = assert (Set.null (selectedSet `Set.intersection` next)) $ assert (selectedSet `Set.isSubsetOf` Constraints.packages constraints) $ assert (next `Set.isSubsetOf` Constraints.packages constraints) $ ChoiceNode (selected, constraints, changes) [ [ (pkg, select name pkg) | pkg <- PackageIndex.lookupPackageName available name ] | name <- Set.elems next ] where available = Constraints.choices constraints selectedSet = Set.fromList (map packageName (PackageIndex.allPackages selected)) select name pkg = case configure available pkg of Left missing -> Failure $ ConfigureFailed pkg [ (dep, Constraints.conflicting constraints dep) | dep <- missing ] Right pkg' -> case constrainDeps pkg' newDeps (addDeps constraints newPkgs) [] of Left failure -> Failure failure Right (constraints', newDiscarded) -> searchSpace configure constraints' selected' (newSelected, newDiscarded) next' where selected' = foldl' (flip PackageIndex.insert) selected newSelected newSelected = case Constraints.isPaired constraints (packageId pkg) of Nothing -> [pkg'] Just pkgid' -> [pkg', pkg''] where Just pkg'' = fmap (\(InstalledOnly p) -> InstalledOnly p) (PackageIndex.lookupPackageId available pkgid') newPkgs = [ name' | (Dependency name' _, _) <- newDeps , null (PackageIndex.lookupPackageName selected' name') ] newDeps = concatMap packageConstraints newSelected next' = Set.delete name $ foldl' (flip Set.insert) next newPkgs packageConstraints :: SelectedPackage -> [(Dependency, Bool)] packageConstraints = either installedConstraints availableConstraints . preferSource where preferSource (InstalledOnly pkg) = Left pkg preferSource (SourceOnly pkg) = Right pkg preferSource (InstalledAndSource _ pkg) = Right pkg installedConstraints (InstalledPackageEx _ _ deps) = [ (thisPackageVersion dep, True) | dep <- deps ] availableConstraints (SemiConfiguredPackage _ _ _ deps) = [ (dep, False) | dep <- deps ] addDeps :: Constraints -> [PackageName] -> Constraints addDeps = foldr $ \pkgname cs -> case Constraints.addTarget pkgname cs of Satisfiable cs' () -> cs' _ -> impossible "addDeps unsatisfiable" constrainDeps :: SelectedPackage -> [(Dependency, Bool)] -> Constraints -> [PackageId] -> Either Failure (Constraints, [PackageId]) constrainDeps pkg [] cs discard = case addPackageSelectConstraint (packageId pkg) cs of Satisfiable cs' discard' -> Right (cs', discard' ++ discard) _ -> impossible "constrainDeps unsatisfiable(1)" constrainDeps pkg ((dep, installedConstraint):deps) cs discard = case addPackageDependencyConstraint (packageId pkg) dep installedConstraint cs of Satisfiable cs' discard' -> constrainDeps pkg deps cs' (discard' ++ discard) Unsatisfiable -> impossible "constrainDeps unsatisfiable(2)" ConflictsWith conflicts -> Left (DependencyConflict pkg dep installedConstraint conflicts) -- ------------------------------------------------------------ -- * The main algorithm -- ------------------------------------------------------------ search :: ConfigurePackage -> (PackageName -> PackagePreferences) -> Constraints -> Set PackageName -> Progress Log Failure (SelectedPackages, Constraints) search configure pref constraints = explore pref . searchSpace configure constraints mempty ([], []) -- ------------------------------------------------------------ -- * The top level resolver -- ------------------------------------------------------------ -- | The main exported resolver, with string logging and failure types to fit -- the standard 'DependencyResolver' interface. -- topDownResolver :: DependencyResolver topDownResolver platform cinfo installedPkgIndex sourcePkgIndex preferences constraints targets = mapMessages (topDownResolver' platform cinfo (convert installedPkgIndex) sourcePkgIndex preferences constraints targets) where mapMessages :: Progress Log Failure a -> Progress String String a mapMessages = foldProgress (Step . showLog) (Fail . showFailure) Done -- | The native resolver with detailed structured logging and failure types. -- topDownResolver' :: Platform -> CompilerInfo -> PackageIndex InstalledPackage -> PackageIndex SourcePackage -> (PackageName -> PackagePreferences) -> [PackageConstraint] -> [PackageName] -> Progress Log Failure [PlanPackage] topDownResolver' platform cinfo installedPkgIndex sourcePkgIndex preferences constraints targets = fmap (uncurry finalise) . (\cs -> search configure preferences cs initialPkgNames) =<< pruneBottomUp platform cinfo =<< addTopLevelConstraints constraints =<< addTopLevelTargets targets emptyConstraintSet where configure = configurePackage platform cinfo emptyConstraintSet :: Constraints emptyConstraintSet = Constraints.empty (annotateInstalledPackages topSortNumber installedPkgIndex') (annotateSourcePackages constraints topSortNumber sourcePkgIndex') (installedPkgIndex', sourcePkgIndex') = selectNeededSubset installedPkgIndex sourcePkgIndex initialPkgNames topSortNumber = topologicalSortNumbering installedPkgIndex' sourcePkgIndex' initialPkgNames = Set.fromList targets finalise selected' constraints' = PackageIndex.allPackages . fst . improvePlan installedPkgIndex' constraints' . PackageIndex.fromList $ finaliseSelectedPackages preferences selected' constraints' addTopLevelTargets :: [PackageName] -> Constraints -> Progress a Failure Constraints addTopLevelTargets [] cs = Done cs addTopLevelTargets (pkg:pkgs) cs = case Constraints.addTarget pkg cs of Satisfiable cs' () -> addTopLevelTargets pkgs cs' Unsatisfiable -> Fail (NoSuchPackage pkg) ConflictsWith _conflicts -> impossible "addTopLevelTargets conflicts" addTopLevelConstraints :: [PackageConstraint] -> Constraints -> Progress Log Failure Constraints addTopLevelConstraints [] cs = Done cs addTopLevelConstraints (PackageConstraintFlags _ _ :deps) cs = addTopLevelConstraints deps cs addTopLevelConstraints (PackageConstraintVersion pkg ver:deps) cs = case addTopLevelVersionConstraint pkg ver cs of Satisfiable cs' pkgids -> Step (AppliedVersionConstraint pkg ver pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelVersionConstraintUnsatisfiable pkg ver) ConflictsWith conflicts -> Fail (TopLevelVersionConstraintConflict pkg ver conflicts) addTopLevelConstraints (PackageConstraintInstalled pkg:deps) cs = case addTopLevelInstalledConstraint pkg cs of Satisfiable cs' pkgids -> Step (AppliedInstalledConstraint pkg InstalledConstraint pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelInstallConstraintUnsatisfiable pkg InstalledConstraint) ConflictsWith conflicts -> Fail (TopLevelInstallConstraintConflict pkg InstalledConstraint conflicts) addTopLevelConstraints (PackageConstraintSource pkg:deps) cs = case addTopLevelSourceConstraint pkg cs of Satisfiable cs' pkgids -> Step (AppliedInstalledConstraint pkg SourceConstraint pkgids) (addTopLevelConstraints deps cs') Unsatisfiable -> Fail (TopLevelInstallConstraintUnsatisfiable pkg SourceConstraint) ConflictsWith conflicts -> Fail (TopLevelInstallConstraintConflict pkg SourceConstraint conflicts) addTopLevelConstraints (PackageConstraintStanzas _ _ : deps) cs = addTopLevelConstraints deps cs -- | Add exclusion on available packages that cannot be configured. -- pruneBottomUp :: Platform -> CompilerInfo -> Constraints -> Progress Log Failure Constraints pruneBottomUp platform comp constraints = foldr prune Done (initialPackages constraints) constraints where prune pkgs rest cs = foldr addExcludeConstraint rest unconfigurable cs where unconfigurable = [ (pkg, missing) -- if necessary we could look up missing reasons | (Just pkg', pkg) <- zip (map getSourcePkg pkgs) pkgs , Left missing <- [configure cs pkg'] ] addExcludeConstraint (pkg, missing) rest cs = let reason = ExcludedByConfigureFail missing in case addPackageExcludeConstraint (packageId pkg) reason cs of Satisfiable cs' [pkgid]| packageId pkg == pkgid -> Step (ExcludeUnconfigurable pkgid) (rest cs') Satisfiable _ _ -> impossible "pruneBottomUp satisfiable" _ -> Fail $ ConfigureFailed pkg [ (dep, Constraints.conflicting cs dep) | dep <- missing ] configure cs (UnconfiguredPackage (SourcePackage _ pkg _ _) _ flags stanzas) = finalizePackageDescription flags (dependencySatisfiable cs) platform comp [] (enableStanzas stanzas pkg) dependencySatisfiable cs = not . null . PackageIndex.lookupDependency (Constraints.choices cs) -- collect each group of packages (by name) in reverse topsort order initialPackages = reverse . sortBy (comparing (topSortNumber . head)) . PackageIndex.allPackagesByName . Constraints.choices topSortNumber (InstalledOnly (InstalledPackageEx _ i _)) = i topSortNumber (SourceOnly (UnconfiguredPackage _ i _ _)) = i topSortNumber (InstalledAndSource _ (UnconfiguredPackage _ i _ _)) = i getSourcePkg (InstalledOnly _ ) = Nothing getSourcePkg (SourceOnly spkg) = Just spkg getSourcePkg (InstalledAndSource _ spkg) = Just spkg configurePackage :: Platform -> CompilerInfo -> ConfigurePackage configurePackage platform cinfo available spkg = case spkg of InstalledOnly ipkg -> Right (InstalledOnly ipkg) SourceOnly apkg -> fmap SourceOnly (configure apkg) InstalledAndSource ipkg apkg -> fmap (InstalledAndSource ipkg) (configure apkg) where configure (UnconfiguredPackage apkg@(SourcePackage _ p _ _) _ flags stanzas) = case finalizePackageDescription flags dependencySatisfiable platform cinfo [] (enableStanzas stanzas p) of Left missing -> Left missing Right (pkg, flags') -> Right $ SemiConfiguredPackage apkg flags' stanzas (externalBuildDepends pkg) dependencySatisfiable = not . null . PackageIndex.lookupDependency available -- | Annotate each installed packages with its set of transitive dependencies -- and its topological sort number. -- annotateInstalledPackages :: (PackageName -> TopologicalSortNumber) -> PackageIndex InstalledPackage -> PackageIndex InstalledPackageEx annotateInstalledPackages dfsNumber installed = PackageIndex.fromList [ InstalledPackageEx pkg (dfsNumber (packageName pkg)) (transitiveDepends pkg) | pkg <- PackageIndex.allPackages installed ] where transitiveDepends :: InstalledPackage -> [PackageId] transitiveDepends = map (packageId . toPkg) . tail . Graph.reachable graph . fromJust . toVertex . packageId (graph, toPkg, toVertex) = PackageIndex.dependencyGraph installed -- | Annotate each available packages with its topological sort number and any -- user-supplied partial flag assignment. -- annotateSourcePackages :: [PackageConstraint] -> (PackageName -> TopologicalSortNumber) -> PackageIndex SourcePackage -> PackageIndex UnconfiguredPackage annotateSourcePackages constraints dfsNumber sourcePkgIndex = PackageIndex.fromList [ UnconfiguredPackage pkg (dfsNumber name) (flagsFor name) (stanzasFor name) | pkg <- PackageIndex.allPackages sourcePkgIndex , let name = packageName pkg ] where flagsFor = fromMaybe [] . flip Map.lookup flagsMap flagsMap = Map.fromList [ (name, flags) | PackageConstraintFlags name flags <- constraints ] stanzasFor = fromMaybe [] . flip Map.lookup stanzasMap stanzasMap = Map.fromListWith (++) [ (name, stanzas) | PackageConstraintStanzas name stanzas <- constraints ] -- | One of the heuristics we use when guessing which path to take in the -- search space is an ordering on the choices we make. It's generally better -- to make decisions about packages higer in the dep graph first since they -- place constraints on packages lower in the dep graph. -- -- To pick them in that order we annotate each package with its topological -- sort number. So if package A depends on package B then package A will have -- a lower topological sort number than B and we'll make a choice about which -- version of A to pick before we make a choice about B (unless there is only -- one possible choice for B in which case we pick that immediately). -- -- To construct these topological sort numbers we combine and flatten the -- installed and source package sets. We consider only dependencies between -- named packages, not including versions and for not-yet-configured packages -- we look at all the possible dependencies, not just those under any single -- flag assignment. This means we can actually get impossible combinations of -- edges and even cycles, but that doesn't really matter here, it's only a -- heuristic. -- topologicalSortNumbering :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> (PackageName -> TopologicalSortNumber) topologicalSortNumbering installedPkgIndex sourcePkgIndex = \pkgname -> let Just vertex = toVertex pkgname in topologicalSortNumbers Array.! vertex where topologicalSortNumbers = Array.array (Array.bounds graph) (zip (Graph.topSort graph) [0..]) (graph, _, toVertex) = Graph.graphFromEdges $ [ ((), packageName pkg, nub deps) | pkgs@(pkg:_) <- PackageIndex.allPackagesByName installedPkgIndex , let deps = [ packageName dep | pkg' <- pkgs , dep <- depends pkg' ] ] ++ [ ((), packageName pkg, nub deps) | pkgs@(pkg:_) <- PackageIndex.allPackagesByName sourcePkgIndex , let deps = [ depName | SourcePackage _ pkg' _ _ <- pkgs , Dependency depName _ <- buildDepends (flattenPackageDescription pkg') ] ] -- | We don't need the entire index (which is rather large and costly if we -- force it by examining the whole thing). So trace out the maximul subset of -- each index that we could possibly ever need. Do this by flattening packages -- and looking at the names of all possible dependencies. -- selectNeededSubset :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> Set PackageName -> (PackageIndex InstalledPackage ,PackageIndex SourcePackage) selectNeededSubset installedPkgIndex sourcePkgIndex = select mempty mempty where select :: PackageIndex InstalledPackage -> PackageIndex SourcePackage -> Set PackageName -> (PackageIndex InstalledPackage ,PackageIndex SourcePackage) select installedPkgIndex' sourcePkgIndex' remaining | Set.null remaining = (installedPkgIndex', sourcePkgIndex') | otherwise = select installedPkgIndex'' sourcePkgIndex'' remaining'' where (next, remaining') = Set.deleteFindMin remaining moreInstalled = PackageIndex.lookupPackageName installedPkgIndex next moreSource = PackageIndex.lookupPackageName sourcePkgIndex next moreRemaining = -- we filter out packages already included in the indexes -- this avoids an infinite loop if a package depends on itself -- like base-3.0.3.0 with base-4.0.0.0 filter notAlreadyIncluded $ [ packageName dep | pkg <- moreInstalled , dep <- depends pkg ] ++ [ name | SourcePackage _ pkg _ _ <- moreSource , Dependency name _ <- buildDepends (flattenPackageDescription pkg) ] installedPkgIndex'' = foldl' (flip PackageIndex.insert) installedPkgIndex' moreInstalled sourcePkgIndex'' = foldl' (flip PackageIndex.insert) sourcePkgIndex' moreSource remaining'' = foldl' (flip Set.insert) remaining' moreRemaining notAlreadyIncluded name = null (PackageIndex.lookupPackageName installedPkgIndex' name) && null (PackageIndex.lookupPackageName sourcePkgIndex' name) -- ------------------------------------------------------------ -- * Post processing the solution -- ------------------------------------------------------------ finaliseSelectedPackages :: (PackageName -> PackagePreferences) -> SelectedPackages -> Constraints -> [PlanPackage] finaliseSelectedPackages pref selected constraints = map finaliseSelected (PackageIndex.allPackages selected) where remainingChoices = Constraints.choices constraints finaliseSelected (InstalledOnly ipkg ) = finaliseInstalled ipkg finaliseSelected (SourceOnly apkg) = finaliseSource Nothing apkg finaliseSelected (InstalledAndSource ipkg apkg) = case PackageIndex.lookupPackageId remainingChoices (packageId ipkg) of --picked package not in constraints Nothing -> impossible "finaliseSelected no pkg" -- to constrain to avail only: Just (SourceOnly _) -> impossible "finaliseSelected src only" Just (InstalledOnly _) -> finaliseInstalled ipkg Just (InstalledAndSource _ _) -> finaliseSource (Just ipkg) apkg finaliseInstalled (InstalledPackageEx pkg _ _) = InstallPlan.PreExisting pkg finaliseSource mipkg (SemiConfiguredPackage pkg flags stanzas deps) = InstallPlan.Configured (ConfiguredPackage pkg flags stanzas deps') where deps' = map (packageId . pickRemaining mipkg) deps pickRemaining mipkg dep@(Dependency _name versionRange) = case PackageIndex.lookupDependency remainingChoices dep of [] -> impossible "pickRemaining no pkg" [pkg'] -> pkg' remaining -> assert (checkIsPaired remaining) $ maximumBy bestByPref remaining where -- We order candidate packages to pick for a dependency by these -- three factors. The last factor is just highest version wins. bestByPref = comparing (\p -> (isCurrent p, isPreferred p, packageVersion p)) -- Is the package already used by the installed version of this -- package? If so we should pick that first. This stops us from doing -- silly things like deciding to rebuild haskell98 against base 3. isCurrent = case mipkg :: Maybe InstalledPackageEx of Nothing -> \_ -> False Just ipkg -> \p -> packageId p `elem` depends ipkg -- If there is no upper bound on the version range then we apply a -- preferred version according to the hackage or user's suggested -- version constraints. TODO: distinguish hacks from prefs bounded = boundedAbove versionRange isPreferred p | bounded = True -- any constant will do | otherwise = packageVersion p `withinRange` preferredVersions where (PackagePreferences preferredVersions _) = pref (packageName p) boundedAbove :: VersionRange -> Bool boundedAbove vr = case asVersionIntervals vr of [] -> True -- this is the inconsistent version range. intervals -> case last intervals of (_, UpperBound _ _) -> True (_, NoUpperBound ) -> False -- We really only expect to find more than one choice remaining when -- we're finalising a dependency on a paired package. checkIsPaired [p1, p2] = case Constraints.isPaired constraints (packageId p1) of Just p2' -> packageId p2' == packageId p2 Nothing -> False checkIsPaired _ = False -- | Improve an existing installation plan by, where possible, swapping -- packages we plan to install with ones that are already installed. -- This may add additional constraints due to the dependencies of installed -- packages on other installed packages. -- improvePlan :: PackageIndex InstalledPackage -> Constraints -> PackageIndex PlanPackage -> (PackageIndex PlanPackage, Constraints) improvePlan installed constraints0 selected0 = foldl' improve (selected0, constraints0) (reverseTopologicalOrder selected0) where improve (selected, constraints) = fromMaybe (selected, constraints) . improvePkg selected constraints -- The idea is to improve the plan by swapping a configured package for -- an equivalent installed one. For a particular package the condition is -- that the package be in a configured state, that a the same version be -- already installed with the exact same dependencies and all the packages -- in the plan that it depends on are in the installed state improvePkg selected constraints pkgid = do Configured pkg <- PackageIndex.lookupPackageId selected pkgid ipkg <- PackageIndex.lookupPackageId installed pkgid guard $ all (isInstalled selected) (depends pkg) tryInstalled selected constraints [ipkg] isInstalled selected pkgid = case PackageIndex.lookupPackageId selected pkgid of Just (PreExisting _) -> True _ -> False tryInstalled :: PackageIndex PlanPackage -> Constraints -> [InstalledPackage] -> Maybe (PackageIndex PlanPackage, Constraints) tryInstalled selected constraints [] = Just (selected, constraints) tryInstalled selected constraints (pkg:pkgs) = case constraintsOk (packageId pkg) (depends pkg) constraints of Nothing -> Nothing Just constraints' -> tryInstalled selected' constraints' pkgs' where selected' = PackageIndex.insert (PreExisting pkg) selected pkgs' = catMaybes (map notSelected (depends pkg)) ++ pkgs notSelected pkgid = case (PackageIndex.lookupPackageId installed pkgid ,PackageIndex.lookupPackageId selected pkgid) of (Just pkg', Nothing) -> Just pkg' _ -> Nothing constraintsOk _ [] constraints = Just constraints constraintsOk pkgid (pkgid':pkgids) constraints = case addPackageDependencyConstraint pkgid dep True constraints of Satisfiable constraints' _ -> constraintsOk pkgid pkgids constraints' _ -> Nothing where dep = thisPackageVersion pkgid' reverseTopologicalOrder :: PackageFixedDeps pkg => PackageIndex pkg -> [PackageId] reverseTopologicalOrder index = map (packageId . toPkg) . Graph.topSort . Graph.transposeG $ graph where (graph, toPkg, _) = PackageIndex.dependencyGraph index -- ------------------------------------------------------------ -- * Adding and recording constraints -- ------------------------------------------------------------ addPackageSelectConstraint :: PackageId -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageSelectConstraint pkgid = Constraints.constrain pkgname constraint reason where pkgname = packageName pkgid constraint ver _ = ver == packageVersion pkgid reason = SelectedOther pkgid addPackageExcludeConstraint :: PackageId -> ExclusionReason -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageExcludeConstraint pkgid reason = Constraints.constrain pkgname constraint reason where pkgname = packageName pkgid constraint ver installed | ver == packageVersion pkgid = installed | otherwise = True addPackageDependencyConstraint :: PackageId -> Dependency -> Bool -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addPackageDependencyConstraint pkgid dep@(Dependency pkgname verrange) installedConstraint = Constraints.constrain pkgname constraint reason where constraint ver installed = ver `withinRange` verrange && if installedConstraint then installed else True reason = ExcludedByPackageDependency pkgid dep installedConstraint addTopLevelVersionConstraint :: PackageName -> VersionRange -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addTopLevelVersionConstraint pkgname verrange = Constraints.constrain pkgname constraint reason where constraint ver _installed = ver `withinRange` verrange reason = ExcludedByTopLevelConstraintVersion pkgname verrange addTopLevelInstalledConstraint, addTopLevelSourceConstraint :: PackageName -> Constraints -> Satisfiable Constraints [PackageId] ExclusionReason addTopLevelInstalledConstraint pkgname = Constraints.constrain pkgname constraint reason where constraint _ver installed = installed reason = ExcludedByTopLevelConstraintInstalled pkgname addTopLevelSourceConstraint pkgname = Constraints.constrain pkgname constraint reason where constraint _ver installed = not installed reason = ExcludedByTopLevelConstraintSource pkgname -- ------------------------------------------------------------ -- * Reasons for constraints -- ------------------------------------------------------------ -- | For every constraint we record we also record the reason that constraint -- is needed. So if we end up failing due to conflicting constraints then we -- can give an explnanation as to what was conflicting and why. -- data ExclusionReason = -- | We selected this other version of the package. That means we exclude -- all the other versions. SelectedOther PackageId -- | We excluded this version of the package because it failed to -- configure probably because of unsatisfiable deps. | ExcludedByConfigureFail [Dependency] -- | We excluded this version of the package because another package that -- we selected imposed a dependency which this package did not satisfy. | ExcludedByPackageDependency PackageId Dependency Bool -- | We excluded this version of the package because it did not satisfy -- a dependency given as an original top level input. -- | ExcludedByTopLevelConstraintVersion PackageName VersionRange | ExcludedByTopLevelConstraintInstalled PackageName | ExcludedByTopLevelConstraintSource PackageName deriving Eq -- | Given an excluded package and the reason it was excluded, produce a human -- readable explanation. -- showExclusionReason :: PackageId -> ExclusionReason -> String showExclusionReason pkgid (SelectedOther pkgid') = display pkgid ++ " was excluded because " ++ display pkgid' ++ " was selected instead" showExclusionReason pkgid (ExcludedByConfigureFail missingDeps) = display pkgid ++ " was excluded because it could not be configured. " ++ "It requires " ++ listOf displayDep missingDeps showExclusionReason pkgid (ExcludedByPackageDependency pkgid' dep installedConstraint) = display pkgid ++ " was excluded because " ++ display pkgid' ++ " requires " ++ (if installedConstraint then "an installed instance of " else "") ++ displayDep dep showExclusionReason pkgid (ExcludedByTopLevelConstraintVersion pkgname verRange) = display pkgid ++ " was excluded because of the top level constraint " ++ displayDep (Dependency pkgname verRange) showExclusionReason pkgid (ExcludedByTopLevelConstraintInstalled pkgname) = display pkgid ++ " was excluded because of the top level constraint '" ++ display pkgname ++ " installed' which means that only installed instances " ++ "of the package may be selected." showExclusionReason pkgid (ExcludedByTopLevelConstraintSource pkgname) = display pkgid ++ " was excluded because of the top level constraint '" ++ display pkgname ++ " source' which means that only source versions " ++ "of the package may be selected." -- ------------------------------------------------------------ -- * Logging progress and failures -- ------------------------------------------------------------ data Log = Select [SelectedPackage] [PackageId] | AppliedVersionConstraint PackageName VersionRange [PackageId] | AppliedInstalledConstraint PackageName InstalledConstraint [PackageId] | ExcludeUnconfigurable PackageId data Failure = NoSuchPackage PackageName | ConfigureFailed SelectablePackage [(Dependency, [(PackageId, [ExclusionReason])])] | DependencyConflict SelectedPackage Dependency Bool [(PackageId, [ExclusionReason])] | TopLevelVersionConstraintConflict PackageName VersionRange [(PackageId, [ExclusionReason])] | TopLevelVersionConstraintUnsatisfiable PackageName VersionRange | TopLevelInstallConstraintConflict PackageName InstalledConstraint [(PackageId, [ExclusionReason])] | TopLevelInstallConstraintUnsatisfiable PackageName InstalledConstraint showLog :: Log -> String showLog (Select selected discarded) = case (selectedMsg, discardedMsg) of ("", y) -> y (x, "") -> x (x, y) -> x ++ " and " ++ y where selectedMsg = "selecting " ++ case selected of [] -> "" [s] -> display (packageId s) ++ " " ++ kind s (s:ss) -> listOf id $ (display (packageId s) ++ " " ++ kind s) : [ display (packageVersion s') ++ " " ++ kind s' | s' <- ss ] kind (InstalledOnly _) = "(installed)" kind (SourceOnly _) = "(source)" kind (InstalledAndSource _ _) = "(installed or source)" discardedMsg = case discarded of [] -> "" _ -> "discarding " ++ listOf id [ element | (pkgid:pkgids) <- groupBy (equating packageName) (sort discarded) , element <- display pkgid : map (display . packageVersion) pkgids ] showLog (AppliedVersionConstraint pkgname ver pkgids) = "applying constraint " ++ display (Dependency pkgname ver) ++ if null pkgids then "" else " which excludes " ++ listOf display pkgids showLog (AppliedInstalledConstraint pkgname inst pkgids) = "applying constraint " ++ display pkgname ++ " '" ++ (case inst of InstalledConstraint -> "installed"; _ -> "source") ++ "' " ++ if null pkgids then "" else "which excludes " ++ listOf display pkgids showLog (ExcludeUnconfigurable pkgid) = "excluding " ++ display pkgid ++ " (it cannot be configured)" showFailure :: Failure -> String showFailure (NoSuchPackage pkgname) = "The package " ++ display pkgname ++ " is unknown." showFailure (ConfigureFailed pkg missingDeps) = "cannot configure " ++ displayPkg pkg ++ ". It requires " ++ listOf (displayDep . fst) missingDeps ++ '\n' : unlines (map (uncurry whyNot) missingDeps) where whyNot (Dependency name ver) [] = "There is no available version of " ++ display name ++ " that satisfies " ++ displayVer ver whyNot dep conflicts = "For the dependency on " ++ displayDep dep ++ " there are these packages: " ++ listOf display pkgs ++ ". However none of them are available.\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] where pkgs = map fst conflicts showFailure (DependencyConflict pkg dep installedConstraint conflicts) = "dependencies conflict: " ++ displayPkg pkg ++ " requires " ++ (if installedConstraint then "an installed instance of " else "") ++ displayDep dep ++ " however:\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelVersionConstraintConflict name ver conflicts) = "constraints conflict: we have the top level constraint " ++ displayDep (Dependency name ver) ++ ", but\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelVersionConstraintUnsatisfiable name ver) = "There is no available version of " ++ display name ++ " that satisfies " ++ displayVer ver showFailure (TopLevelInstallConstraintConflict name InstalledConstraint conflicts) = "constraints conflict: " ++ "top level constraint '" ++ display name ++ " installed' however\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelInstallConstraintUnsatisfiable name InstalledConstraint) = "There is no installed version of " ++ display name showFailure (TopLevelInstallConstraintConflict name SourceConstraint conflicts) = "constraints conflict: " ++ "top level constraint '" ++ display name ++ " source' however\n" ++ unlines [ showExclusionReason (packageId pkg') reason | (pkg', reasons) <- conflicts, reason <- reasons ] showFailure (TopLevelInstallConstraintUnsatisfiable name SourceConstraint) = "There is no available source version of " ++ display name displayVer :: VersionRange -> String displayVer = display . simplifyVersionRange displayDep :: Dependency -> String displayDep = display . simplifyDependency -- ------------------------------------------------------------ -- * Utils -- ------------------------------------------------------------ impossible :: String -> a impossible msg = internalError $ "assertion failure: " ++ msg internalError :: String -> a internalError msg = error $ "internal error: " ++ msg displayPkg :: Package pkg => pkg -> String displayPkg = display . packageId listOf :: (a -> String) -> [a] -> String listOf _ [] = [] listOf disp [x0] = disp x0 listOf disp (x0:x1:xs) = disp x0 ++ go x1 xs where go x [] = " and " ++ disp x go x (x':xs') = ", " ++ disp x ++ go x' xs' cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/0000755000000000000000000000000012540225656022221 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/PSQ.hs0000644000000000000000000000536212540225656023226 0ustar0000000000000000{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveFoldable, DeriveTraversable #-} module Distribution.Client.Dependency.Modular.PSQ where -- Priority search queues. -- -- I am not yet sure what exactly is needed. But we need a data structure with -- key-based lookup that can be sorted. We're using a sequence right now with -- (inefficiently implemented) lookup, because I think that queue-based -- operations and sorting turn out to be more efficiency-critical in practice. import Data.Foldable import Data.Function import Data.List as S hiding (foldr) import Data.Traversable import Prelude hiding (foldr) newtype PSQ k v = PSQ [(k, v)] deriving (Eq, Show, Functor, Foldable, Traversable) keys :: PSQ k v -> [k] keys (PSQ xs) = fmap fst xs lookup :: Eq k => k -> PSQ k v -> Maybe v lookup k (PSQ xs) = S.lookup k xs map :: (v1 -> v2) -> PSQ k v1 -> PSQ k v2 map f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f v)) xs) mapKeys :: (k1 -> k2) -> PSQ k1 v -> PSQ k2 v mapKeys f (PSQ xs) = PSQ (fmap (\ (k, v) -> (f k, v)) xs) mapWithKey :: (k -> a -> b) -> PSQ k a -> PSQ k b mapWithKey f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f k v)) xs) mapWithKeyState :: (s -> k -> a -> (b, s)) -> PSQ k a -> s -> PSQ k b mapWithKeyState p (PSQ xs) s0 = PSQ (foldr (\ (k, v) r s -> case p s k v of (w, n) -> (k, w) : (r n)) (const []) xs s0) delete :: Eq k => k -> PSQ k a -> PSQ k a delete k (PSQ xs) = PSQ (snd (partition ((== k) . fst) xs)) fromList :: [(k, a)] -> PSQ k a fromList = PSQ cons :: k -> a -> PSQ k a -> PSQ k a cons k x (PSQ xs) = PSQ ((k, x) : xs) snoc :: PSQ k a -> k -> a -> PSQ k a snoc (PSQ xs) k x = PSQ (xs ++ [(k, x)]) casePSQ :: PSQ k a -> r -> (k -> a -> PSQ k a -> r) -> r casePSQ (PSQ xs) n c = case xs of [] -> n (k, v) : ys -> c k v (PSQ ys) splits :: PSQ k a -> PSQ k (a, PSQ k a) splits = go id where go f xs = casePSQ xs (PSQ []) (\ k v ys -> cons k (v, f ys) (go (f . cons k v) ys)) sortBy :: (a -> a -> Ordering) -> PSQ k a -> PSQ k a sortBy cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` snd) xs) sortByKeys :: (k -> k -> Ordering) -> PSQ k a -> PSQ k a sortByKeys cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` fst) xs) filterKeys :: (k -> Bool) -> PSQ k a -> PSQ k a filterKeys p (PSQ xs) = PSQ (S.filter (p . fst) xs) filter :: (a -> Bool) -> PSQ k a -> PSQ k a filter p (PSQ xs) = PSQ (S.filter (p . snd) xs) length :: PSQ k a -> Int length (PSQ xs) = S.length xs -- | "Lazy length". -- -- Only approximates the length, but doesn't force the list. llength :: PSQ k a -> Int llength (PSQ []) = 0 llength (PSQ (_:[])) = 1 llength (PSQ (_:_:[])) = 2 llength (PSQ _) = 3 null :: PSQ k a -> Bool null (PSQ xs) = S.null xs toList :: PSQ k a -> [(k, a)] toList (PSQ xs) = xs cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Index.hs0000644000000000000000000000243012540225656023623 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Index where import Data.List as L import Data.Map as M import Prelude hiding (pi) import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Tree -- | An index contains information about package instances. This is a nested -- dictionary. Package names are mapped to instances, which in turn is mapped -- to info. type Index = Map PN (Map I PInfo) -- | Info associated with a package instance. -- Currently, dependencies, flags, encapsulations and failure reasons. -- Packages that have a failure reason recorded for them are disabled -- globally, for reasons external to the solver. We currently use this -- for shadowing which essentially is a GHC limitation, and for -- installed packages that are broken. data PInfo = PInfo (FlaggedDeps PN) FlagInfo Encaps (Maybe FailReason) deriving (Show) -- | Encapsulations. A list of package names. type Encaps = [PN] mkIndex :: [(PN, I, PInfo)] -> Index mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs)) groupMap :: Ord a => [(a, b)] -> Map a [b] groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Configured.hs0000644000000000000000000000064112540225656024643 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Configured where import Distribution.PackageDescription (FlagAssignment) -- from Cabal import Distribution.Client.Types (OptionalStanza) import Distribution.Client.Dependency.Modular.Package -- | A configured package is a package instance together with -- a flag assignment and complete dependencies. data CP qpn = CP (PI qpn) FlagAssignment [OptionalStanza] [PI qpn] cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Tree.hs0000644000000000000000000001141612540225656023457 0ustar0000000000000000{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} module Distribution.Client.Dependency.Modular.Tree where import Control.Monad hiding (mapM) import Data.Foldable import Data.Traversable import Prelude hiding (foldr, mapM) import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Version -- | Type of the search tree. Inlining the choice nodes for now. data Tree a = PChoice QPN a (PSQ I (Tree a)) | FChoice QFN a Bool Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's weak/trivial, second Bool whether it's manual | SChoice QSN a Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's trivial | GoalChoice (PSQ OpenGoal (Tree a)) -- PSQ should never be empty | Done RevDepMap | Fail (ConflictSet QPN) FailReason deriving (Eq, Show, Functor) -- Above, a choice is called trivial if it clearly does not matter. The -- special case of triviality we actually consider is if there are no new -- dependencies introduced by this node. -- -- A (flag) choice is called weak if we do want to defer it. This is the -- case for flags that should be implied by what's currently installed on -- the system, as opposed to flags that are used to explicitly enable or -- disable some functionality. data FailReason = InconsistentInitialConstraints | Conflicting [Dep QPN] | CannotInstall | CannotReinstall | Shadowed | Broken | GlobalConstraintVersion VR | GlobalConstraintInstalled | GlobalConstraintSource | GlobalConstraintFlag | ManualFlag | BuildFailureNotInIndex PN | MalformedFlagChoice QFN | MalformedStanzaChoice QSN | EmptyGoalChoice | Backjump deriving (Eq, Show) -- | Functor for the tree type. data TreeF a b = PChoiceF QPN a (PSQ I b) | FChoiceF QFN a Bool Bool (PSQ Bool b) | SChoiceF QSN a Bool (PSQ Bool b) | GoalChoiceF (PSQ OpenGoal b) | DoneF RevDepMap | FailF (ConflictSet QPN) FailReason deriving (Functor, Foldable, Traversable) out :: Tree a -> TreeF a (Tree a) out (PChoice p i ts) = PChoiceF p i ts out (FChoice p i b m ts) = FChoiceF p i b m ts out (SChoice p i b ts) = SChoiceF p i b ts out (GoalChoice ts) = GoalChoiceF ts out (Done x ) = DoneF x out (Fail c x ) = FailF c x inn :: TreeF a (Tree a) -> Tree a inn (PChoiceF p i ts) = PChoice p i ts inn (FChoiceF p i b m ts) = FChoice p i b m ts inn (SChoiceF p i b ts) = SChoice p i b ts inn (GoalChoiceF ts) = GoalChoice ts inn (DoneF x ) = Done x inn (FailF c x ) = Fail c x -- | Determines whether a tree is active, i.e., isn't a failure node. active :: Tree a -> Bool active (Fail _ _) = False active _ = True -- | Determines how many active choices are available in a node. Note that we -- count goal choices as having one choice, always. choices :: Tree a -> Int choices (PChoice _ _ ts) = P.length (P.filter active ts) choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts) choices (SChoice _ _ _ ts) = P.length (P.filter active ts) choices (GoalChoice _ ) = 1 choices (Done _ ) = 1 choices (Fail _ _ ) = 0 -- | Variant of 'choices' that only approximates the number of choices, -- using 'llength'. lchoices :: Tree a -> Int lchoices (PChoice _ _ ts) = P.llength (P.filter active ts) lchoices (FChoice _ _ _ _ ts) = P.llength (P.filter active ts) lchoices (SChoice _ _ _ ts) = P.llength (P.filter active ts) lchoices (GoalChoice _ ) = 1 lchoices (Done _ ) = 1 lchoices (Fail _ _ ) = 0 -- | Catamorphism on trees. cata :: (TreeF a b -> b) -> Tree a -> b cata phi x = (phi . fmap (cata phi) . out) x trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b trav psi x = cata (inn . psi) x -- | Paramorphism on trees. para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b para phi = phi . fmap (\ x -> (para phi x, x)) . out cataM :: Monad m => (TreeF a b -> m b) -> Tree a -> m b cataM phi = phi <=< mapM (cataM phi) <=< return . out -- | Anamorphism on trees. ana :: (b -> TreeF a b) -> b -> Tree a ana psi = inn . fmap (ana psi) . psi anaM :: Monad m => (b -> m (TreeF a b)) -> b -> m (Tree a) anaM psi = return . inn <=< mapM (anaM psi) <=< psi cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs0000644000000000000000000000266312540225656026717 0ustar0000000000000000module Distribution.Client.Dependency.Modular.ConfiguredConversion where import Data.Maybe import Prelude hiding (pi) import Distribution.Client.InstallPlan import Distribution.Client.Types import Distribution.Compiler import qualified Distribution.Client.PackageIndex as CI import qualified Distribution.Simple.PackageIndex as SI import Distribution.System import Distribution.Client.Dependency.Modular.Configured import Distribution.Client.Dependency.Modular.Package mkPlan :: Platform -> CompilerInfo -> SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> [CP QPN] -> Either [PlanProblem] InstallPlan mkPlan plat comp iidx sidx cps = new plat comp (SI.fromList (map (convCP iidx sidx) cps)) convCP :: SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> CP QPN -> PlanPackage convCP iidx sidx (CP qpi fa es ds) = case convPI qpi of Left pi -> PreExisting $ InstalledPackage (fromJust $ SI.lookupInstalledPackageId iidx pi) (map convPI' ds) Right pi -> Configured $ ConfiguredPackage (fromJust $ CI.lookupPackageId sidx pi) fa es (map convPI' ds) convPI :: PI QPN -> Either InstalledPackageId PackageId convPI (PI _ (I _ (Inst pi))) = Left pi convPI qpi = Right $ convPI' qpi convPI' :: PI QPN -> PackageId convPI' (PI (Q _ pn) (I v _)) = PackageIdentifier pn v cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Assignment.hs0000644000000000000000000001466012540225656024674 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Assignment where import Control.Applicative import Control.Monad import Data.Array as A import Data.List as L import Data.Map as M import Data.Maybe import Data.Graph import Prelude hiding (pi) import Distribution.PackageDescription (FlagAssignment) -- from Cabal import Distribution.Client.Types (OptionalStanza) import Distribution.Client.Dependency.Modular.Configured import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Version -- | A (partial) package assignment. Qualified package names -- are associated with instances. type PAssignment = Map QPN I -- | A (partial) package preassignment. Qualified package names -- are associated with constrained instances. Constrained instances -- record constraints about the instances that can still be chosen, -- and in the extreme case fix a concrete instance. type PPreAssignment = Map QPN (CI QPN) type FAssignment = Map QFN Bool type SAssignment = Map QSN Bool -- | A (partial) assignment of variables. data Assignment = A PAssignment FAssignment SAssignment deriving (Show, Eq) -- | A preassignment comprises knowledge about variables, but not -- necessarily fixed values. data PreAssignment = PA PPreAssignment FAssignment SAssignment -- | Extend a package preassignment. -- -- Takes the variable that causes the new constraints, a current preassignment -- and a set of new dependency constraints. -- -- We're trying to extend the preassignment with each dependency one by one. -- Each dependency is for a particular variable. We check if we already have -- constraints for that variable in the current preassignment. If so, we're -- trying to merge the constraints. -- -- Either returns a witness of the conflict that would arise during the merge, -- or the successfully extended assignment. extend :: Var QPN -> PPreAssignment -> [Dep QPN] -> Either (ConflictSet QPN, [Dep QPN]) PPreAssignment extend var pa qa = foldM (\ a (Dep qpn ci) -> let ci' = M.findWithDefault (Constrained []) qpn a in case (\ x -> M.insert qpn x a) <$> merge ci' ci of Left (c, (d, d')) -> Left (c, L.map (Dep qpn) (simplify (P qpn) d d')) Right x -> Right x) pa qa where -- We're trying to remove trivial elements of the conflict. If we're just -- making a choice pkg == instance, and pkg => pkg == instance is a part -- of the conflict, then this info is clear from the context and does not -- have to be repeated. simplify v (Fixed _ (Goal var' _)) c | v == var && var' == var = [c] simplify v c (Fixed _ (Goal var' _)) | v == var && var' == var = [c] simplify _ c d = [c, d] -- | Delivers an ordered list of fully configured packages. -- -- TODO: This function is (sort of) ok. However, there's an open bug -- w.r.t. unqualification. There might be several different instances -- of one package version chosen by the solver, which will lead to -- clashes. toCPs :: Assignment -> RevDepMap -> [CP QPN] toCPs (A pa fa sa) rdm = let -- get hold of the graph g :: Graph vm :: Vertex -> ((), QPN, [QPN]) cvm :: QPN -> Maybe Vertex -- Note that the RevDepMap contains duplicate dependencies. Therefore the nub. (g, vm, cvm) = graphFromEdges (L.map (\ (x, xs) -> ((), x, nub xs)) (M.toList rdm)) tg :: Graph tg = transposeG g -- Topsort the dependency graph, yielding a list of pkgs in the right order. -- The graph will still contain all the installed packages, and it might -- contain duplicates, because several variables might actually resolve to -- the same package in the presence of qualified package names. ps :: [PI QPN] ps = L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) $ topSort g -- Determine the flags per package, by walking over and regrouping the -- complete flag assignment by package. fapp :: Map QPN FlagAssignment fapp = M.fromListWith (++) $ L.map (\ ((FN (PI qpn _) fn), b) -> (qpn, [(fn, b)])) $ M.toList $ fa -- Stanzas per package. sapp :: Map QPN [OptionalStanza] sapp = M.fromListWith (++) $ L.map (\ ((SN (PI qpn _) sn), b) -> (qpn, if b then [sn] else [])) $ M.toList $ sa -- Dependencies per package. depp :: QPN -> [PI QPN] depp qpn = let v :: Vertex v = fromJust (cvm qpn) dvs :: [Vertex] dvs = tg A.! v in L.map (\ dv -> case vm dv of (_, x, _) -> PI x (pa M.! x)) dvs in L.map (\ pi@(PI qpn _) -> CP pi (M.findWithDefault [] qpn fapp) (M.findWithDefault [] qpn sapp) (depp qpn)) ps -- | Finalize an assignment and a reverse dependency map. -- -- This is preliminary, and geared towards output right now. finalize :: Index -> Assignment -> RevDepMap -> IO () finalize idx (A pa fa _) rdm = let -- get hold of the graph g :: Graph vm :: Vertex -> ((), QPN, [QPN]) (g, vm) = graphFromEdges' (L.map (\ (x, xs) -> ((), x, xs)) (M.toList rdm)) -- topsort the dependency graph, yielding a list of pkgs in the right order f :: [PI QPN] f = L.filter (not . instPI) (L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) (topSort g)) fapp :: Map QPN [(QFN, Bool)] -- flags per package fapp = M.fromListWith (++) $ L.map (\ (qfn@(FN (PI qpn _) _), b) -> (qpn, [(qfn, b)])) $ M.toList $ fa -- print one instance ppi pi@(PI qpn _) = showPI pi ++ status pi ++ " " ++ pflags (M.findWithDefault [] qpn fapp) -- print install status status :: PI QPN -> String status (PI (Q _ pn) _) = case insts of [] -> " (new)" vs -> " (" ++ intercalate ", " (L.map showVer vs) ++ ")" where insts = L.map (\ (I v _) -> v) $ L.filter isInstalled $ M.keys (M.findWithDefault M.empty pn idx) isInstalled (I _ (Inst _ )) = True isInstalled _ = False -- print flag assignment pflags = unwords . L.map (uncurry showFBool) in -- show packages with associated flag assignments putStr (unlines (L.map ppi f)) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Preference.hs0000644000000000000000000003152412540225656024640 0ustar0000000000000000{-# LANGUAGE CPP #-} module Distribution.Client.Dependency.Modular.Preference where -- Reordering or pruning the tree in order to prefer or make certain choices. import qualified Data.List as L import qualified Data.Map as M #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif import Data.Ord import Distribution.Client.Dependency.Types ( PackageConstraint(..), PackagePreferences(..), InstalledPreference(..) ) import Distribution.Client.Types ( OptionalStanza(..) ) import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Tree import Distribution.Client.Dependency.Modular.Version -- | Generic abstraction for strategies that just rearrange the package order. -- Only packages that match the given predicate are reordered. packageOrderFor :: (PN -> Bool) -> (PN -> I -> I -> Ordering) -> Tree a -> Tree a packageOrderFor p cmp = trav go where go (PChoiceF v@(Q _ pn) r cs) | p pn = PChoiceF v r (P.sortByKeys (flip (cmp pn)) cs) | otherwise = PChoiceF v r cs go x = x -- | Ordering that treats preferred versions as greater than non-preferred -- versions. preferredVersionsOrdering :: VR -> Ver -> Ver -> Ordering preferredVersionsOrdering vr v1 v2 = compare (checkVR vr v1) (checkVR vr v2) -- | Traversal that tries to establish package preferences (not constraints). -- Works by reordering choice nodes. preferPackagePreferences :: (PN -> PackagePreferences) -> Tree a -> Tree a preferPackagePreferences pcs = packageOrderFor (const True) preference where preference pn i1@(I v1 _) i2@(I v2 _) = let PackagePreferences vr ipref = pcs pn in preferredVersionsOrdering vr v1 v2 `mappend` -- combines lexically locationsOrdering ipref i1 i2 -- Note that we always rank installed before uninstalled, and later -- versions before earlier, but we can change the priority of the -- two orderings. locationsOrdering PreferInstalled v1 v2 = preferInstalledOrdering v1 v2 `mappend` preferLatestOrdering v1 v2 locationsOrdering PreferLatest v1 v2 = preferLatestOrdering v1 v2 `mappend` preferInstalledOrdering v1 v2 -- | Ordering that treats installed instances as greater than uninstalled ones. preferInstalledOrdering :: I -> I -> Ordering preferInstalledOrdering (I _ (Inst _)) (I _ (Inst _)) = EQ preferInstalledOrdering (I _ (Inst _)) _ = GT preferInstalledOrdering _ (I _ (Inst _)) = LT preferInstalledOrdering _ _ = EQ -- | Compare instances by their version numbers. preferLatestOrdering :: I -> I -> Ordering preferLatestOrdering (I v1 _) (I v2 _) = compare v1 v2 -- | Helper function that tries to enforce a single package constraint on a -- given instance for a P-node. Translates the constraint into a -- tree-transformer that either leaves the subtree untouched, or replaces it -- with an appropriate failure node. processPackageConstraintP :: ConflictSet QPN -> I -> PackageConstraint -> Tree a -> Tree a processPackageConstraintP c (I v _) (PackageConstraintVersion _ vr) r | checkVR vr v = r | otherwise = Fail c (GlobalConstraintVersion vr) processPackageConstraintP c i (PackageConstraintInstalled _) r | instI i = r | otherwise = Fail c GlobalConstraintInstalled processPackageConstraintP c i (PackageConstraintSource _) r | not (instI i) = r | otherwise = Fail c GlobalConstraintSource processPackageConstraintP _ _ _ r = r -- | Helper function that tries to enforce a single package constraint on a -- given flag setting for an F-node. Translates the constraint into a -- tree-transformer that either leaves the subtree untouched, or replaces it -- with an appropriate failure node. processPackageConstraintF :: Flag -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a processPackageConstraintF f c b' (PackageConstraintFlags _ fa) r = case L.lookup f fa of Nothing -> r Just b | b == b' -> r | otherwise -> Fail c GlobalConstraintFlag processPackageConstraintF _ _ _ _ r = r -- | Helper function that tries to enforce a single package constraint on a -- given flag setting for an F-node. Translates the constraint into a -- tree-transformer that either leaves the subtree untouched, or replaces it -- with an appropriate failure node. processPackageConstraintS :: OptionalStanza -> ConflictSet QPN -> Bool -> PackageConstraint -> Tree a -> Tree a processPackageConstraintS s c b' (PackageConstraintStanzas _ ss) r = if not b' && s `elem` ss then Fail c GlobalConstraintFlag else r processPackageConstraintS _ _ _ _ r = r -- | Traversal that tries to establish various kinds of user constraints. Works -- by selectively disabling choices that have been ruled out by global user -- constraints. enforcePackageConstraints :: M.Map PN [PackageConstraint] -> Tree QGoalReasonChain -> Tree QGoalReasonChain enforcePackageConstraints pcs = trav go where go (PChoiceF qpn@(Q _ pn) gr ts) = let c = toConflictSet (Goal (P qpn) gr) -- compose the transformation functions for each of the relevant constraint g = \ i -> foldl (\ h pc -> h . processPackageConstraintP c i pc) id (M.findWithDefault [] pn pcs) in PChoiceF qpn gr (P.mapWithKey g ts) go (FChoiceF qfn@(FN (PI (Q _ pn) _) f) gr tr m ts) = let c = toConflictSet (Goal (F qfn) gr) -- compose the transformation functions for each of the relevant constraint g = \ b -> foldl (\ h pc -> h . processPackageConstraintF f c b pc) id (M.findWithDefault [] pn pcs) in FChoiceF qfn gr tr m (P.mapWithKey g ts) go (SChoiceF qsn@(SN (PI (Q _ pn) _) f) gr tr ts) = let c = toConflictSet (Goal (S qsn) gr) -- compose the transformation functions for each of the relevant constraint g = \ b -> foldl (\ h pc -> h . processPackageConstraintS f c b pc) id (M.findWithDefault [] pn pcs) in SChoiceF qsn gr tr (P.mapWithKey g ts) go x = x -- | Transformation that tries to enforce manual flags. Manual flags -- can only be re-set explicitly by the user. This transformation should -- be run after user preferences have been enforced. For manual flags, -- it checks if a user choice has been made. If not, it disables all but -- the first choice. enforceManualFlags :: Tree QGoalReasonChain -> Tree QGoalReasonChain enforceManualFlags = trav go where go (FChoiceF qfn gr tr True ts) = FChoiceF qfn gr tr True $ let c = toConflictSet (Goal (F qfn) gr) in case span isDisabled (P.toList ts) of ([], y : ys) -> P.fromList (y : L.map (\ (b, _) -> (b, Fail c ManualFlag)) ys) _ -> ts -- something has been manually selected, leave things alone where isDisabled (_, Fail _ GlobalConstraintFlag) = True isDisabled _ = False go x = x -- | Prefer installed packages over non-installed packages, generally. -- All installed packages or non-installed packages are treated as -- equivalent. preferInstalled :: Tree a -> Tree a preferInstalled = packageOrderFor (const True) (const preferInstalledOrdering) -- | Prefer packages with higher version numbers over packages with -- lower version numbers, for certain packages. preferLatestFor :: (PN -> Bool) -> Tree a -> Tree a preferLatestFor p = packageOrderFor p (const preferLatestOrdering) -- | Prefer packages with higher version numbers over packages with -- lower version numbers, for all packages. preferLatest :: Tree a -> Tree a preferLatest = preferLatestFor (const True) -- | Require installed packages. requireInstalled :: (PN -> Bool) -> Tree (QGoalReasonChain, a) -> Tree (QGoalReasonChain, a) requireInstalled p = trav go where go (PChoiceF v@(Q _ pn) i@(gr, _) cs) | p pn = PChoiceF v i (P.mapWithKey installed cs) | otherwise = PChoiceF v i cs where installed (I _ (Inst _)) x = x installed _ _ = Fail (toConflictSet (Goal (P v) gr)) CannotInstall go x = x -- | Avoid reinstalls. -- -- This is a tricky strategy. If a package version is installed already and the -- same version is available from a repo, the repo version will never be chosen. -- This would result in a reinstall (either destructively, or potentially, -- shadowing). The old instance won't be visible or even present anymore, but -- other packages might have depended on it. -- -- TODO: It would be better to actually check the reverse dependencies of installed -- packages. If they're not depended on, then reinstalling should be fine. Even if -- they are, perhaps this should just result in trying to reinstall those other -- packages as well. However, doing this all neatly in one pass would require to -- change the builder, or at least to change the goal set after building. avoidReinstalls :: (PN -> Bool) -> Tree (QGoalReasonChain, a) -> Tree (QGoalReasonChain, a) avoidReinstalls p = trav go where go (PChoiceF qpn@(Q _ pn) i@(gr, _) cs) | p pn = PChoiceF qpn i disableReinstalls | otherwise = PChoiceF qpn i cs where disableReinstalls = let installed = [ v | (I v (Inst _), _) <- toList cs ] in P.mapWithKey (notReinstall installed) cs notReinstall vs (I v InRepo) _ | v `elem` vs = Fail (toConflictSet (Goal (P qpn) gr)) CannotReinstall notReinstall _ _ x = x go x = x -- | Always choose the first goal in the list next, abandoning all -- other choices. -- -- This is unnecessary for the default search strategy, because -- it descends only into the first goal choice anyway, -- but may still make sense to just reduce the tree size a bit. firstGoal :: Tree a -> Tree a firstGoal = trav go where go (GoalChoiceF xs) = -- casePSQ xs (GoalChoiceF xs) (\ _ t _ -> out t) -- more space efficient, but removes valuable debug info casePSQ xs (GoalChoiceF (fromList [])) (\ g t _ -> GoalChoiceF (fromList [(g, t)])) go x = x -- Note that we keep empty choice nodes, because they mean success. -- | Transformation that tries to make a decision on base as early as -- possible. In nearly all cases, there's a single choice for the base -- package. Also, fixing base early should lead to better error messages. preferBaseGoalChoice :: Tree a -> Tree a preferBaseGoalChoice = trav go where go (GoalChoiceF xs) = GoalChoiceF (P.sortByKeys preferBase xs) go x = x preferBase :: OpenGoal -> OpenGoal -> Ordering preferBase (OpenGoal (Simple (Dep (Q [] pn) _)) _) _ | unPN pn == "base" = LT preferBase _ (OpenGoal (Simple (Dep (Q [] pn) _)) _) | unPN pn == "base" = GT preferBase _ _ = EQ -- | Transformation that sorts choice nodes so that -- child nodes with a small branching degree are preferred. As a -- special case, choices with 0 branches will be preferred (as they -- are immediately considered inconsistent), and choices with 1 -- branch will also be preferred (as they don't involve choice). preferEasyGoalChoices :: Tree a -> Tree a preferEasyGoalChoices = trav go where go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing choices) xs) go x = x -- | Transformation that tries to avoid making weak flag choices early. -- Weak flags are trivial flags (not influencing dependencies) or such -- flags that are explicitly declared to be weak in the index. deferWeakFlagChoices :: Tree a -> Tree a deferWeakFlagChoices = trav go where go (GoalChoiceF xs) = GoalChoiceF (P.sortBy defer xs) go x = x defer :: Tree a -> Tree a -> Ordering defer (FChoice _ _ True _ _) _ = GT defer _ (FChoice _ _ True _ _) = LT defer _ _ = EQ -- | Variant of 'preferEasyGoalChoices'. -- -- Only approximates the number of choices in the branches. Less accurate, -- more efficient. lpreferEasyGoalChoices :: Tree a -> Tree a lpreferEasyGoalChoices = trav go where go (GoalChoiceF xs) = GoalChoiceF (P.sortBy (comparing lchoices) xs) go x = x -- | Variant of 'preferEasyGoalChoices'. -- -- I first thought that using a paramorphism might be faster here, -- but it doesn't seem to make any difference. preferEasyGoalChoices' :: Tree a -> Tree a preferEasyGoalChoices' = para (inn . go) where go (GoalChoiceF xs) = GoalChoiceF (P.map fst (P.sortBy (comparing (choices . snd)) xs)) go x = fmap fst x cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Version.hs0000644000000000000000000000175012540225656024205 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Version where import qualified Distribution.Version as CV -- from Cabal import Distribution.Text -- from Cabal -- | Preliminary type for versions. type Ver = CV.Version -- | String representation of a version. showVer :: Ver -> String showVer = display -- | Version range. Consists of a lower and upper bound. type VR = CV.VersionRange -- | String representation of a version range. showVR :: VR -> String showVR = display -- | Unconstrained version range. anyVR :: VR anyVR = CV.anyVersion -- | Version range fixing a single version. eqVR :: Ver -> VR eqVR = CV.thisVersion -- | Intersect two version ranges. (.&&.) :: VR -> VR -> VR (.&&.) = CV.intersectVersionRanges -- | Simplify a version range. simplifyVR :: VR -> VR simplifyVR = CV.simplifyVersionRange -- | Checking a version against a version range. checkVR :: VR -> Ver -> Bool checkVR = flip CV.withinRange -- | Make a version number. mkV :: [Int] -> Ver mkV xs = CV.Version xs [] cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Builder.hs0000644000000000000000000002140512540225656024145 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Builder where -- Building the search tree. -- -- In this phase, we build a search tree that is too large, i.e, it contains -- invalid solutions. We keep track of the open goals at each point. We -- nondeterministically pick an open goal (via a goal choice node), create -- subtrees according to the index and the available solutions, and extend the -- set of open goals by superficially looking at the dependencies recorded in -- the index. -- -- For each goal, we keep track of all the *reasons* why it is being -- introduced. These are for debugging and error messages, mainly. A little bit -- of care has to be taken due to the way we treat flags. If a package has -- flag-guarded dependencies, we cannot introduce them immediately. Instead, we -- store the entire dependency. import Control.Monad.Reader hiding (sequence, mapM) import Data.List as L import Data.Map as M import Prelude hiding (sequence, mapM) import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Tree -- | The state needed during the build phase of the search tree. data BuildState = BS { index :: Index, -- ^ information about packages and their dependencies scope :: Scope, -- ^ information about encapsulations rdeps :: RevDepMap, -- ^ set of all package goals, completed and open, with reverse dependencies open :: PSQ OpenGoal (), -- ^ set of still open goals (flag and package goals) next :: BuildType -- ^ kind of node to generate next } -- | Extend the set of open goals with the new goals listed. -- -- We also adjust the map of overall goals, and keep track of the -- reverse dependencies of each of the goals. extendOpen :: QPN -> [OpenGoal] -> BuildState -> BuildState extendOpen qpn' gs s@(BS { rdeps = gs', open = o' }) = go gs' o' gs where go :: RevDepMap -> PSQ OpenGoal () -> [OpenGoal] -> BuildState go g o [] = s { rdeps = g, open = o } go g o (ng@(OpenGoal (Flagged _ _ _ _) _gr) : ngs) = go g (cons ng () o) ngs -- Note: for 'Flagged' goals, we always insert, so later additions win. -- This is important, because in general, if a goal is inserted twice, -- the later addition will have better dependency information. go g o (ng@(OpenGoal (Stanza _ _ ) _gr) : ngs) = go g (cons ng () o) ngs go g o (ng@(OpenGoal (Simple (Dep qpn _)) _gr) : ngs) | qpn == qpn' = go g o ngs -- we ignore self-dependencies at this point; TODO: more care may be needed | qpn `M.member` g = go (M.adjust (qpn':) qpn g) o ngs | otherwise = go (M.insert qpn [qpn'] g) (cons ng () o) ngs -- code above is correct; insert/adjust have different arg order -- | Update the current scope by taking into account the encapsulations that -- are defined for the current package. establishScope :: QPN -> Encaps -> BuildState -> BuildState establishScope (Q pp pn) ecs s = s { scope = L.foldl (\ m e -> M.insert e pp' m) (scope s) ecs } where pp' = pn : pp -- new path -- | Given the current scope, qualify all the package names in the given set of -- dependencies and then extend the set of open goals accordingly. scopedExtendOpen :: QPN -> I -> QGoalReasonChain -> FlaggedDeps PN -> FlagInfo -> BuildState -> BuildState scopedExtendOpen qpn i gr fdeps fdefs s = extendOpen qpn gs s where sc = scope s -- Qualify all package names qfdeps = L.map (fmap (qualify sc)) fdeps -- qualify all the package names -- Introduce all package flags qfdefs = L.map (\ (fn, b) -> Flagged (FN (PI qpn i) fn) b [] []) $ M.toList fdefs -- Combine new package and flag goals gs = L.map (flip OpenGoal gr) (qfdefs ++ qfdeps) -- NOTE: -- -- In the expression @qfdefs ++ qfdeps@ above, flags occur potentially -- multiple times, both via the flag declaration and via dependencies. -- The order is potentially important, because the occurrences via -- dependencies may record flag-dependency information. After a number -- of bugs involving computing this information incorrectly, however, -- we're currently not using carefully computed inter-flag dependencies -- anymore, but instead use 'simplifyVar' when computing conflict sets -- to map all flags of one package to a single flag for conflict set -- purposes, thereby treating them all as interdependent. -- -- If we ever move to a more clever algorithm again, then the line above -- needs to be looked at very carefully, and probably be replaced by -- more systematically computed flag dependency information. -- | Datatype that encodes what to build next data BuildType = Goals -- ^ build a goal choice node | OneGoal OpenGoal -- ^ build a node for this goal | Instance QPN I PInfo QGoalReasonChain -- ^ build a tree for a concrete instance deriving Show build :: BuildState -> Tree (QGoalReasonChain, Scope) build = ana go where go :: BuildState -> TreeF (QGoalReasonChain, Scope) BuildState -- If we have a choice between many goals, we just record the choice in -- the tree. We select each open goal in turn, and before we descend, remove -- it from the queue of open goals. go bs@(BS { rdeps = rds, open = gs, next = Goals }) | P.null gs = DoneF rds | otherwise = GoalChoiceF (P.mapWithKey (\ g (_sc, gs') -> bs { next = OneGoal g, open = gs' }) (P.splits gs)) -- If we have already picked a goal, then the choice depends on the kind -- of goal. -- -- For a package, we look up the instances available in the global info, -- and then handle each instance in turn. go bs@(BS { index = idx, scope = sc, next = OneGoal (OpenGoal (Simple (Dep qpn@(Q _ pn) _)) gr) }) = case M.lookup pn idx of Nothing -> FailF (toConflictSet (Goal (P qpn) gr)) (BuildFailureNotInIndex pn) Just pis -> PChoiceF qpn (gr, sc) (P.fromList (L.map (\ (i, info) -> (i, bs { next = Instance qpn i info gr })) (M.toList pis))) -- TODO: data structure conversion is rather ugly here -- For a flag, we create only two subtrees, and we create them in the order -- that is indicated by the flag default. -- -- TODO: Should we include the flag default in the tree? go bs@(BS { scope = sc, next = OneGoal (OpenGoal (Flagged qfn@(FN (PI qpn _) _) (FInfo b m w) t f) gr) }) = FChoiceF qfn (gr, sc) (w || trivial) m (P.fromList (reorder b [(True, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn True : gr)) t) bs) { next = Goals }), (False, (extendOpen qpn (L.map (flip OpenGoal (FDependency qfn False : gr)) f) bs) { next = Goals })])) where reorder True = id reorder False = reverse trivial = L.null t && L.null f go bs@(BS { scope = sc, next = OneGoal (OpenGoal (Stanza qsn@(SN (PI qpn _) _) t) gr) }) = SChoiceF qsn (gr, sc) trivial (P.fromList [(False, bs { next = Goals }), (True, (extendOpen qpn (L.map (flip OpenGoal (SDependency qsn : gr)) t) bs) { next = Goals })]) where trivial = L.null t -- For a particular instance, we change the state: we update the scope, -- and furthermore we update the set of goals. -- -- TODO: We could inline this above. go bs@(BS { next = Instance qpn i (PInfo fdeps fdefs ecs _) gr }) = go ((establishScope qpn ecs (scopedExtendOpen qpn i (PDependency (PI qpn i) : gr) fdeps fdefs bs)) { next = Goals }) -- | Interface to the tree builder. Just takes an index and a list of package names, -- and computes the initial state and then the tree from there. buildTree :: Index -> Bool -> [PN] -> Tree (QGoalReasonChain, Scope) buildTree idx ind igs = build (BS idx sc (M.fromList (L.map (\ qpn -> (qpn, [])) qpns)) (P.fromList (L.map (\ qpn -> (OpenGoal (Simple (Dep qpn (Constrained []))) [UserGoal], ())) qpns)) Goals) where sc | ind = makeIndependent igs | otherwise = emptyScope qpns = L.map (qualify sc) igs cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Explore.hs0000644000000000000000000001655212540225656024204 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Explore where import Control.Applicative as A import Data.Foldable import Data.List as L import Data.Map as M import Data.Set as S import Distribution.Client.Dependency.Modular.Assignment import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Log import Distribution.Client.Dependency.Modular.Message import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Tree -- | Backjumping. -- -- A tree traversal that tries to propagate conflict sets -- up the tree from the leaves, and thereby cut branches. -- All the tricky things are done in the function 'combine'. backjump :: Tree a -> Tree (Maybe (ConflictSet QPN)) backjump = snd . cata go where go (FailF c fr) = (Just c, Fail c fr) go (DoneF rdm ) = (Nothing, Done rdm) go (PChoiceF qpn _ ts) = (c, PChoice qpn c (P.fromList ts')) where ~(c, ts') = combine (P qpn) (P.toList ts) S.empty go (FChoiceF qfn _ b m ts) = (c, FChoice qfn c b m (P.fromList ts')) where ~(c, ts') = combine (F qfn) (P.toList ts) S.empty go (SChoiceF qsn _ b ts) = (c, SChoice qsn c b (P.fromList ts')) where ~(c, ts') = combine (S qsn) (P.toList ts) S.empty go (GoalChoiceF ts) = (c, GoalChoice (P.fromList ts')) where ~(cs, ts') = unzip $ L.map (\ (k, (x, v)) -> (x, (k, v))) $ P.toList ts c = case cs of [] -> Nothing d : _ -> d -- | The 'combine' function is at the heart of backjumping. It takes -- the variable we're currently considering, and a list of children -- annotated with their respective conflict sets, and an accumulator -- for the result conflict set. It returns a combined conflict set -- for the parent node, and a (potentially shortened) list of children -- with the annotations removed. -- -- It is *essential* that we produce the results as early as possible. -- In particular, we have to produce the list of children prior to -- traversing the entire list -- otherwise we lose the desired behaviour -- of being able to traverse the tree from left to right incrementally. -- -- We can shorten the list of children if we find an individual conflict -- set that does not contain the current variable. In this case, we can -- just lift the conflict set to the current level, because the current -- level cannot possibly have contributed to this conflict, so no other -- choice at the current level would avoid the conflict. -- -- If any of the children might contain a successful solution -- (indicated by Nothing), then Nothing will be the combined -- conflict set. If all children contain conflict sets, we can -- take the union as the combined conflict set. combine :: Var QPN -> [(a, (Maybe (ConflictSet QPN), b))] -> ConflictSet QPN -> (Maybe (ConflictSet QPN), [(a, b)]) combine _ [] c = (Just c, []) combine var ((k, ( d, v)) : xs) c = (\ ~(e, ys) -> (e, (k, v) : ys)) $ case d of Just e | not (simplifyVar var `S.member` e) -> (Just e, []) | otherwise -> combine var xs (e `S.union` c) Nothing -> (Nothing, snd $ combine var xs S.empty) -- | Naive backtracking exploration of the search tree. This will yield correct -- assignments only once the tree itself is validated. explore :: Alternative m => Tree a -> (Assignment -> m (Assignment, RevDepMap)) explore = cata go where go (FailF _ _) _ = A.empty go (DoneF rdm) a = pure (a, rdm) go (PChoiceF qpn _ ts) (A pa fa sa) = asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> r (A (M.insert qpn k pa) fa sa)) -- record the pkg choice ts go (FChoiceF qfn _ _ _ ts) (A pa fa sa) = asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> r (A pa (M.insert qfn k fa) sa)) -- record the flag choice ts go (SChoiceF qsn _ _ ts) (A pa fa sa) = asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> r (A pa fa (M.insert qsn k sa))) -- record the flag choice ts go (GoalChoiceF ts) a = casePSQ ts A.empty -- empty goal choice is an internal error (\ _k v _xs -> v a) -- commit to the first goal choice -- | Version of 'explore' that returns a 'Log'. exploreLog :: Tree (Maybe (ConflictSet QPN)) -> (Assignment -> Log Message (Assignment, RevDepMap)) exploreLog = cata go where go (FailF c fr) _ = failWith (Failure c fr) go (DoneF rdm) a = succeedWith Success (a, rdm) go (PChoiceF qpn c ts) (A pa fa sa) = backjumpInfo c $ asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> tryWith (TryP (PI qpn k)) $ -- log and ... r (A (M.insert qpn k pa) fa sa)) -- record the pkg choice ts go (FChoiceF qfn c _ _ ts) (A pa fa sa) = backjumpInfo c $ asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> tryWith (TryF qfn k) $ -- log and ... r (A pa (M.insert qfn k fa) sa)) -- record the pkg choice ts go (SChoiceF qsn c _ ts) (A pa fa sa) = backjumpInfo c $ asum $ -- try children in order, P.mapWithKey -- when descending ... (\ k r -> tryWith (TryS qsn k) $ -- log and ... r (A pa fa (M.insert qsn k sa))) -- record the pkg choice ts go (GoalChoiceF ts) a = casePSQ ts (failWith (Failure S.empty EmptyGoalChoice)) -- empty goal choice is an internal error (\ k v _xs -> continueWith (Next (close k)) (v a)) -- commit to the first goal choice -- | Add in information about pruned trees. -- -- TODO: This isn't quite optimal, because we do not merely report the shape of the -- tree, but rather make assumptions about where that shape originated from. It'd be -- better if the pruning itself would leave information that we could pick up at this -- point. backjumpInfo :: Maybe (ConflictSet QPN) -> Log Message a -> Log Message a backjumpInfo c m = m <|> case c of -- important to produce 'm' before matching on 'c'! Nothing -> A.empty Just cs -> failWith (Failure cs Backjump) -- | Interface. exploreTree :: Alternative m => Tree a -> m (Assignment, RevDepMap) exploreTree t = explore t (A M.empty M.empty M.empty) -- | Interface. exploreTreeLog :: Tree (Maybe (ConflictSet QPN)) -> Log Message (Assignment, RevDepMap) exploreTreeLog t = exploreLog t (A M.empty M.empty M.empty) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Solver.hs0000644000000000000000000000524112540225656024031 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Solver where import Data.Map as M import Distribution.Client.Dependency.Types import Distribution.Client.Dependency.Modular.Assignment import Distribution.Client.Dependency.Modular.Builder import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Explore import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Log import Distribution.Client.Dependency.Modular.Message import Distribution.Client.Dependency.Modular.Package import qualified Distribution.Client.Dependency.Modular.Preference as P import Distribution.Client.Dependency.Modular.Validate -- | Various options for the modular solver. data SolverConfig = SolverConfig { preferEasyGoalChoices :: Bool, independentGoals :: Bool, avoidReinstalls :: Bool, shadowPkgs :: Bool, strongFlags :: Bool, maxBackjumps :: Maybe Int } solve :: SolverConfig -> -- solver parameters Index -> -- all available packages as an index (PN -> PackagePreferences) -> -- preferences Map PN [PackageConstraint] -> -- global constraints [PN] -> -- global goals Log Message (Assignment, RevDepMap) solve sc idx userPrefs userConstraints userGoals = explorePhase $ heuristicsPhase $ preferencesPhase $ validationPhase $ prunePhase $ buildPhase where explorePhase = exploreTreeLog . backjump heuristicsPhase = P.firstGoal . -- after doing goal-choice heuristics, commit to the first choice (saves space) P.deferWeakFlagChoices . P.preferBaseGoalChoice . if preferEasyGoalChoices sc then P.lpreferEasyGoalChoices else id preferencesPhase = P.preferPackagePreferences userPrefs validationPhase = P.enforceManualFlags . -- can only be done after user constraints P.enforcePackageConstraints userConstraints . validateTree idx prunePhase = (if avoidReinstalls sc then P.avoidReinstalls (const True) else id) . -- packages that can never be "upgraded": P.requireInstalled (`elem` [ PackageName "base" , PackageName "ghc-prim" , PackageName "integer-gmp" , PackageName "integer-simple" ]) buildPhase = buildTree idx (independentGoals sc) userGoals cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Package.hs0000644000000000000000000000640512540225656024115 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} module Distribution.Client.Dependency.Modular.Package (module Distribution.Client.Dependency.Modular.Package, module Distribution.Package) where import Data.List as L import Data.Map as M import Distribution.Package -- from Cabal import Distribution.Text -- from Cabal import Distribution.Client.Dependency.Modular.Version -- | A package name. type PN = PackageName -- | Unpacking a package name. unPN :: PN -> String unPN (PackageName pn) = pn -- | Package version. A package name plus a version number. type PV = PackageId -- | Qualified package version. type QPV = Q PV -- | Package id. Currently just a black-box string. type PId = InstalledPackageId -- | Location. Info about whether a package is installed or not, and where -- exactly it is located. For installed packages, uniquely identifies the -- package instance via its 'PId'. -- -- TODO: More information is needed about the repo. data Loc = Inst PId | InRepo deriving (Eq, Ord, Show) -- | Instance. A version number and a location. data I = I Ver Loc deriving (Eq, Ord, Show) -- | String representation of an instance. showI :: I -> String showI (I v InRepo) = showVer v showI (I v (Inst (InstalledPackageId i))) = showVer v ++ "/installed" ++ shortId i where -- A hack to extract the beginning of the package ABI hash shortId = snip (splitAt 4) (++ "...") . snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':) snip p f xs = case p xs of (ys, zs) -> (if L.null zs then id else f) ys -- | Package instance. A package name and an instance. data PI qpn = PI qpn I deriving (Eq, Ord, Show, Functor) -- | String representation of a package instance. showPI :: PI QPN -> String showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i -- | Checks if a package instance corresponds to an installed package. instPI :: PI qpn -> Bool instPI (PI _ (I _ (Inst _))) = True instPI _ = False instI :: I -> Bool instI (I _ (Inst _)) = True instI _ = False -- | Package path. (Stored in "reverse" order.) type PP = [PN] -- | String representation of a package path. showPP :: PP -> String showPP = intercalate "." . L.map display . reverse -- | A qualified entity. Pairs a package path with the entity. data Q a = Q PP a deriving (Eq, Ord, Show) -- | Standard string representation of a qualified entity. showQ :: (a -> String) -> (Q a -> String) showQ showa (Q [] x) = showa x showQ showa (Q pp x) = showPP pp ++ "." ++ showa x -- | Qualified package name. type QPN = Q PN -- | String representation of a qualified package path. showQPN :: QPN -> String showQPN = showQ display -- | The scope associates every package with a path. The convention is that packages -- not in the data structure have an empty path associated with them. type Scope = Map PN PP -- | An empty scope structure, for initialization. emptyScope :: Scope emptyScope = M.empty -- | Create artificial parents for each of the package names, making -- them all independent. makeIndependent :: [PN] -> Scope makeIndependent ps = L.foldl (\ sc (n, p) -> M.insert p [PackageName (show n)] sc) emptyScope (zip ([0..] :: [Int]) ps) qualify :: Scope -> PN -> QPN qualify sc pn = Q (findWithDefault [] pn sc) pn unQualify :: Q a -> a unQualify (Q _ x) = x cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Message.hs0000644000000000000000000001377312540225656024154 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Message where import qualified Data.List as L import Prelude hiding (pi) import Distribution.Text -- from Cabal import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Tree data Message = Enter -- ^ increase indentation level | Leave -- ^ decrease indentation level | TryP (PI QPN) | TryF QFN Bool | TryS QSN Bool | Next (Goal QPN) | Success | Failure (ConflictSet QPN) FailReason -- | Transforms the structured message type to actual messages (strings). -- -- Takes an additional relevance predicate. The predicate gets a stack of goal -- variables and can decide whether messages regarding these goals are relevant. -- You can plug in 'const True' if you're interested in a full trace. If you -- want a slice of the trace concerning a particular conflict set, then plug in -- a predicate returning 'True' on the empty stack and if the head is in the -- conflict set. -- -- The second argument indicates if the level numbers should be shown. This is -- recommended for any trace that involves backtracking, because only the level -- numbers will allow to keep track of backjumps. showMessages :: ([Var QPN] -> Bool) -> Bool -> [Message] -> [String] showMessages p sl = go [] 0 where go :: [Var QPN] -> Int -> [Message] -> [String] go _ _ [] = [] -- complex patterns go v l (TryP (PI qpn i) : Enter : Failure c fr : Leave : ms) = goPReject v l qpn [i] c fr ms go v l (TryF qfn b : Enter : Failure c fr : Leave : ms) = (atLevel (add (F qfn) v) l $ "rejecting: " ++ showQFNBool qfn b ++ showFR c fr) (go v l ms) go v l (TryS qsn b : Enter : Failure c fr : Leave : ms) = (atLevel (add (S qsn) v) l $ "rejecting: " ++ showQSNBool qsn b ++ showFR c fr) (go v l ms) go v l (Next (Goal (P qpn) gr) : TryP pi : ms@(Enter : Next _ : _)) = (atLevel (add (P qpn) v) l $ "trying: " ++ showPI pi ++ showGRs gr) (go (add (P qpn) v) l ms) go v l (Failure c Backjump : ms@(Leave : Failure c' Backjump : _)) | c == c' = go v l ms -- standard display go v l (Enter : ms) = go v (l+1) ms go v l (Leave : ms) = go (drop 1 v) (l-1) ms go v l (TryP pi@(PI qpn _) : ms) = (atLevel (add (P qpn) v) l $ "trying: " ++ showPI pi) (go (add (P qpn) v) l ms) go v l (TryF qfn b : ms) = (atLevel (add (F qfn) v) l $ "trying: " ++ showQFNBool qfn b) (go (add (F qfn) v) l ms) go v l (TryS qsn b : ms) = (atLevel (add (S qsn) v) l $ "trying: " ++ showQSNBool qsn b) (go (add (S qsn) v) l ms) go v l (Next (Goal (P qpn) gr) : ms) = (atLevel (add (P qpn) v) l $ "next goal: " ++ showQPN qpn ++ showGRs gr) (go v l ms) go v l (Next _ : ms) = go v l ms -- ignore flag goals in the log go v l (Success : ms) = (atLevel v l $ "done") (go v l ms) go v l (Failure c fr : ms) = (atLevel v l $ "fail" ++ showFR c fr) (go v l ms) add :: Var QPN -> [Var QPN] -> [Var QPN] add v vs = simplifyVar v : vs -- special handler for many subsequent package rejections goPReject :: [Var QPN] -> Int -> QPN -> [I] -> ConflictSet QPN -> FailReason -> [Message] -> [String] goPReject v l qpn is c fr (TryP (PI qpn' i) : Enter : Failure _ fr' : Leave : ms) | qpn == qpn' && fr == fr' = goPReject v l qpn (i : is) c fr ms goPReject v l qpn is c fr ms = (atLevel (P qpn : v) l $ "rejecting: " ++ showQPN qpn ++ "-" ++ L.intercalate ", " (map showI (reverse is)) ++ showFR c fr) (go v l ms) -- write a message, but only if it's relevant; we can also enable or disable the display of the current level atLevel v l x xs | sl && p v = let s = show l in ("[" ++ replicate (3 - length s) '_' ++ s ++ "] " ++ x) : xs | p v = x : xs | otherwise = xs showGRs :: QGoalReasonChain -> String showGRs (gr : _) = showGR gr showGRs [] = "" showGR :: GoalReason QPN -> String showGR UserGoal = " (user goal)" showGR (PDependency pi) = " (dependency of " ++ showPI pi ++ ")" showGR (FDependency qfn b) = " (dependency of " ++ showQFNBool qfn b ++ ")" showGR (SDependency qsn) = " (dependency of " ++ showQSNBool qsn True ++ ")" showFR :: ConflictSet QPN -> FailReason -> String showFR _ InconsistentInitialConstraints = " (inconsistent initial constraints)" showFR _ (Conflicting ds) = " (conflict: " ++ L.intercalate ", " (map showDep ds) ++ ")" showFR _ CannotInstall = " (only already installed instances can be used)" showFR _ CannotReinstall = " (avoiding to reinstall a package with same version but new dependencies)" showFR _ Shadowed = " (shadowed by another installed package with same version)" showFR _ Broken = " (package is broken)" showFR _ (GlobalConstraintVersion vr) = " (global constraint requires " ++ display vr ++ ")" showFR _ GlobalConstraintInstalled = " (global constraint requires installed instance)" showFR _ GlobalConstraintSource = " (global constraint requires source instance)" showFR _ GlobalConstraintFlag = " (global constraint requires opposite flag selection)" showFR _ ManualFlag = " (manual flag can only be changed explicitly)" showFR _ (BuildFailureNotInIndex pn) = " (unknown package: " ++ display pn ++ ")" showFR c Backjump = " (backjumping, conflict set: " ++ showCS c ++ ")" -- The following are internal failures. They should not occur. In the -- interest of not crashing unnecessarily, we still just print an error -- message though. showFR _ (MalformedFlagChoice qfn) = " (INTERNAL ERROR: MALFORMED FLAG CHOICE: " ++ showQFN qfn ++ ")" showFR _ (MalformedStanzaChoice qsn) = " (INTERNAL ERROR: MALFORMED STANZA CHOICE: " ++ showQSN qsn ++ ")" showFR _ EmptyGoalChoice = " (INTERNAL ERROR: EMPTY GOAL CHOICE)" cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Log.hs0000644000000000000000000001375112540225656023305 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Log where import Control.Applicative import Data.List as L import Data.Set as S import Distribution.Client.Dependency.Types -- from Cabal import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Message import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Tree (FailReason(..)) -- | The 'Log' datatype. -- -- Represents the progress of a computation lazily. -- -- Parameterized over the type of actual messages and the final result. type Log m a = Progress m () a -- | Turns a log into a list of messages paired with a final result. A final result -- of 'Nothing' indicates failure. A final result of 'Just' indicates success. -- Keep in mind that forcing the second component of the returned pair will force the -- entire log. runLog :: Log m a -> ([m], Maybe a) runLog (Done x) = ([], Just x) runLog (Fail _) = ([], Nothing) runLog (Step m p) = let (ms, r) = runLog p in (m : ms, r) -- | Postprocesses a log file. Takes as an argument a limit on allowed backjumps. -- If the limit is 'Nothing', then infinitely many backjumps are allowed. If the -- limit is 'Just 0', backtracking is completely disabled. logToProgress :: Maybe Int -> Log Message a -> Progress String String a logToProgress mbj l = let (ms, s) = runLog l -- 'Nothing' for 's' means search tree exhaustively searched and failed (es, e) = proc 0 ms -- catch first error (always) -- 'Nothing' in 'e' means no backjump found (ns, t) = case mbj of Nothing -> (ms, Nothing) Just n -> proc n ms -- 'Nothing' in 't' means backjump limit not reached -- prefer first error over later error (exh, r) = case t of -- backjump limit not reached Nothing -> case s of Nothing -> (True, e) -- failed after exhaustive search Just _ -> (True, Nothing) -- success -- backjump limit reached; prefer first error Just _ -> (False, e) -- failed after backjump limit was reached in go es es -- trace for first error (showMessages (const True) True ns) -- shortened run r s exh where -- Proc takes the allowed number of backjumps and a list of messages and explores the -- message list until the maximum number of backjumps has been reached. The log until -- that point as well as whether we have encountered an error or not are returned. proc :: Int -> [Message] -> ([Message], Maybe (ConflictSet QPN)) proc _ [] = ([], Nothing) proc n ( Failure cs Backjump : xs@(Leave : Failure cs' Backjump : _)) | cs == cs' = proc n xs -- repeated backjumps count as one proc 0 ( Failure cs Backjump : _ ) = ([], Just cs) proc n (x@(Failure _ Backjump) : xs) = (\ ~(ys, r) -> (x : ys, r)) (proc (n - 1) xs) proc n (x : xs) = (\ ~(ys, r) -> (x : ys, r)) (proc n xs) -- This function takes a lot of arguments. The first two are both supposed to be -- the log up to the first error. That's the error that will always be printed in -- case we do not find a solution. We pass this log twice, because we evaluate it -- in parallel with the full log, but we also want to retain the reference to its -- beginning for when we print it. This trick prevents a space leak! -- -- The third argument is the full log, the fifth and six error conditions. -- The seventh argument indicates whether the search was exhaustive. -- -- The order of arguments is important! In particular 's' must not be evaluated -- unless absolutely necessary. It contains the final result, and if we shortcut -- with an error due to backjumping, evaluating 's' would still require traversing -- the entire tree. go ms (_ : ns) (x : xs) r s exh = Step x (go ms ns xs r s exh) go ms [] (x : xs) r s exh = Step x (go ms [] xs r s exh) go ms _ [] (Just cs) _ exh = Fail $ "Could not resolve dependencies:\n" ++ unlines (showMessages (L.foldr (\ v _ -> v `S.member` cs) True) False ms) ++ (if exh then "Dependency tree exhaustively searched.\n" else "Backjump limit reached (change with --max-backjumps).\n") go _ _ [] _ (Just s) _ = Done s go _ _ [] _ _ _ = Fail ("Could not resolve dependencies; something strange happened.") -- should not happen logToProgress' :: Log Message a -> Progress String String a logToProgress' l = let (ms, r) = runLog l xs = showMessages (const True) True ms in go xs r where go [x] Nothing = Fail x go [] Nothing = Fail "" go [] (Just r) = Done r go (x:xs) r = Step x (go xs r) runLogIO :: Log Message a -> IO (Maybe a) runLogIO x = do let (ms, r) = runLog x putStr (unlines $ showMessages (const True) True ms) return r failWith :: m -> Log m a failWith m = Step m (Fail ()) succeedWith :: m -> a -> Log m a succeedWith m x = Step m (Done x) continueWith :: m -> Log m a -> Log m a continueWith = Step tryWith :: Message -> Log Message a -> Log Message a tryWith m x = Step m (Step Enter x) <|> failWith Leave cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/IndexConversion.hs0000644000000000000000000002245312540225656025700 0ustar0000000000000000module Distribution.Client.Dependency.Modular.IndexConversion where import Data.List as L import Data.Map as M import Data.Maybe import Prelude hiding (pi) import qualified Distribution.Client.PackageIndex as CI import Distribution.Client.Types import Distribution.Compiler import Distribution.InstalledPackageInfo as IPI import Distribution.Package -- from Cabal import Distribution.PackageDescription as PD -- from Cabal import qualified Distribution.Simple.PackageIndex as SI import Distribution.System import Distribution.Client.Dependency.Modular.Dependency as D import Distribution.Client.Dependency.Modular.Flag as F import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Tree import Distribution.Client.Dependency.Modular.Version -- | Convert both the installed package index and the source package -- index into one uniform solver index. -- -- We use 'allPackagesBySourcePackageId' for the installed package index -- because that returns us several instances of the same package and version -- in order of preference. This allows us in principle to \"shadow\" -- packages if there are several installed packages of the same version. -- There are currently some shortcomings in both GHC and Cabal in -- resolving these situations. However, the right thing to do is to -- fix the problem there, so for now, shadowing is only activated if -- explicitly requested. convPIs :: OS -> Arch -> CompilerInfo -> Bool -> Bool -> SI.InstalledPackageIndex -> CI.PackageIndex SourcePackage -> Index convPIs os arch comp sip strfl iidx sidx = mkIndex (convIPI' sip iidx ++ convSPI' os arch comp strfl sidx) -- | Convert a Cabal installed package index to the simpler, -- more uniform index format of the solver. convIPI' :: Bool -> SI.InstalledPackageIndex -> [(PN, I, PInfo)] convIPI' sip idx = -- apply shadowing whenever there are multiple installed packages with -- the same version [ maybeShadow (convIP idx pkg) | (_pkgid, pkgs) <- SI.allPackagesBySourcePackageId idx , (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ] where -- shadowing is recorded in the package info shadow (pn, i, PInfo fdeps fds encs _) | sip = (pn, i, PInfo fdeps fds encs (Just Shadowed)) shadow x = x convIPI :: Bool -> SI.InstalledPackageIndex -> Index convIPI sip = mkIndex . convIPI' sip -- | Convert a single installed package into the solver-specific format. convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo) convIP idx ipi = let ipid = IPI.installedPackageId ipi i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) pn = pkgName (sourcePackageId ipi) in case mapM (convIPId pn idx) (IPI.depends ipi) of Nothing -> (pn, i, PInfo [] M.empty [] (Just Broken)) Just fds -> (pn, i, PInfo fds M.empty [] Nothing) -- TODO: Installed packages should also store their encapsulations! -- | Convert dependencies specified by an installed package id into -- flagged dependencies of the solver. -- -- May return Nothing if the package can't be found in the index. That -- indicates that the original package having this dependency is broken -- and should be ignored. convIPId :: PN -> SI.InstalledPackageIndex -> InstalledPackageId -> Maybe (FlaggedDep PN) convIPId pn' idx ipid = case SI.lookupInstalledPackageId idx ipid of Nothing -> Nothing Just ipi -> let i = I (pkgVersion (sourcePackageId ipi)) (Inst ipid) pn = pkgName (sourcePackageId ipi) in Just (D.Simple (Dep pn (Fixed i (Goal (P pn') [])))) -- | Convert a cabal-install source package index to the simpler, -- more uniform index format of the solver. convSPI' :: OS -> Arch -> CompilerInfo -> Bool -> CI.PackageIndex SourcePackage -> [(PN, I, PInfo)] convSPI' os arch cinfo strfl = L.map (convSP os arch cinfo strfl) . CI.allPackages convSPI :: OS -> Arch -> CompilerInfo -> Bool -> CI.PackageIndex SourcePackage -> Index convSPI os arch cinfo strfl = mkIndex . convSPI' os arch cinfo strfl -- | Convert a single source package into the solver-specific format. convSP :: OS -> Arch -> CompilerInfo -> Bool -> SourcePackage -> (PN, I, PInfo) convSP os arch cinfo strfl (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) = let i = I pv InRepo in (pn, i, convGPD os arch cinfo strfl (PI pn i) gpd) -- We do not use 'flattenPackageDescription' or 'finalizePackageDescription' -- from 'Distribution.PackageDescription.Configuration' here, because we -- want to keep the condition tree, but simplify much of the test. -- | Convert a generic package description to a solver-specific 'PInfo'. -- -- TODO: We currently just take all dependencies from all specified library, -- executable and test components. This does not quite seem fair. convGPD :: OS -> Arch -> CompilerInfo -> Bool -> PI PN -> GenericPackageDescription -> PInfo convGPD os arch comp strfl pi (GenericPackageDescription _ flags libs exes tests benchs) = let fds = flagInfo strfl flags in PInfo (maybe [] (convCondTree os arch comp pi fds (const True) ) libs ++ concatMap (convCondTree os arch comp pi fds (const True) . snd) exes ++ prefix (Stanza (SN pi TestStanzas)) (L.map (convCondTree os arch comp pi fds (const True) . snd) tests) ++ prefix (Stanza (SN pi BenchStanzas)) (L.map (convCondTree os arch comp pi fds (const True) . snd) benchs)) fds [] -- TODO: add encaps Nothing prefix :: (FlaggedDeps qpn -> FlaggedDep qpn) -> [FlaggedDeps qpn] -> FlaggedDeps qpn prefix _ [] = [] prefix f fds = [f (concat fds)] -- | Convert flag information. Automatic flags are now considered weak -- unless strong flags have been selected explicitly. flagInfo :: Bool -> [PD.Flag] -> FlagInfo flagInfo strfl = M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b m (not (strfl || m)))) -- | Convert condition trees to flagged dependencies. convCondTree :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo -> (a -> Bool) -> -- how to detect if a branch is active CondTree ConfVar [Dependency] a -> FlaggedDeps PN convCondTree os arch comp pi@(PI pn _) fds p (CondNode info ds branches) | p info = L.map (D.Simple . convDep pn) ds -- unconditional dependencies ++ concatMap (convBranch os arch comp pi fds p) branches | otherwise = [] -- | Branch interpreter. -- -- Here, we try to simplify one of Cabal's condition tree branches into the -- solver's flagged dependency format, which is weaker. Condition trees can -- contain complex logical expression composed from flag choices and special -- flags (such as architecture, or compiler flavour). We try to evaluate the -- special flags and subsequently simplify to a tree that only depends on -- simple flag choices. convBranch :: OS -> Arch -> CompilerInfo -> PI PN -> FlagInfo -> (a -> Bool) -> -- how to detect if a branch is active (Condition ConfVar, CondTree ConfVar [Dependency] a, Maybe (CondTree ConfVar [Dependency] a)) -> FlaggedDeps PN convBranch os arch cinfo pi fds p (c', t', mf') = go c' ( convCondTree os arch cinfo pi fds p t') (maybe [] (convCondTree os arch cinfo pi fds p) mf') where go :: Condition ConfVar -> FlaggedDeps PN -> FlaggedDeps PN -> FlaggedDeps PN go (Lit True) t _ = t go (Lit False) _ f = f go (CNot c) t f = go c f t go (CAnd c d) t f = go c (go d t f) f go (COr c d) t f = go c t (go d t f) go (Var (Flag fn)) t f = extractCommon t f ++ [Flagged (FN pi fn) (fds ! fn) t f] go (Var (OS os')) t f | os == os' = t | otherwise = f go (Var (Arch arch')) t f | arch == arch' = t | otherwise = f go (Var (Impl cf cvr)) t f | matchImpl (compilerInfoId cinfo) || -- fixme: Nothing should be treated as unknown, rather than empty -- list. This code should eventually be changed to either -- support partial resolution of compiler flags or to -- complain about incompletely configured compilers. any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t | otherwise = f where matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv -- If both branches contain the same package as a simple dep, we lift it to -- the next higher-level, but without constraints. This heuristic together -- with deferring flag choices will then usually first resolve this package, -- and try an already installed version before imposing a default flag choice -- that might not be what we want. extractCommon :: FlaggedDeps PN -> FlaggedDeps PN -> FlaggedDeps PN extractCommon ps ps' = [ D.Simple (Dep pn (Constrained [])) | D.Simple (Dep pn _) <- ps, D.Simple (Dep pn' _) <- ps', pn == pn' ] -- | Convert a Cabal dependency to a solver-specific dependency. convDep :: PN -> Dependency -> Dep PN convDep pn' (Dependency pn vr) = Dep pn (Constrained [(vr, Goal (P pn') [])]) -- | Convert a Cabal package identifier to a solver-specific dependency. convPI :: PN -> PackageIdentifier -> Dep PN convPI pn' (PackageIdentifier pn v) = Dep pn (Constrained [(eqVR v, Goal (P pn') [])]) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Dependency.hs0000644000000000000000000001573012540225656024641 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} module Distribution.Client.Dependency.Modular.Dependency where import Prelude hiding (pi) import Data.List as L import Data.Map as M import Data.Set as S import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.Version -- | The type of variables that play a role in the solver. -- Note that the tree currently does not use this type directly, -- and rather has separate tree nodes for the different types of -- variables. This fits better with the fact that in most cases, -- these have to be treated differently. -- -- TODO: This isn't the ideal location to declare the type, -- but we need them for constrained instances. data Var qpn = P qpn | F (FN qpn) | S (SN qpn) deriving (Eq, Ord, Show, Functor) -- | For computing conflict sets, we map flag choice vars to a -- single flag choice. This means that all flag choices are treated -- as interdependent. So if one flag of a package ends up in a -- conflict set, then all flags are being treated as being part of -- the conflict set. simplifyVar :: Var qpn -> Var qpn simplifyVar (P qpn) = P qpn simplifyVar (F (FN pi _)) = F (FN pi (mkFlag "flag")) simplifyVar (S qsn) = S qsn showVar :: Var QPN -> String showVar (P qpn) = showQPN qpn showVar (F qfn) = showQFN qfn showVar (S qsn) = showQSN qsn type ConflictSet qpn = Set (Var qpn) showCS :: ConflictSet QPN -> String showCS = intercalate ", " . L.map showVar . S.toList -- | Constrained instance. If the choice has already been made, this is -- a fixed instance, and we record the package name for which the choice -- is for convenience. Otherwise, it is a list of version ranges paired with -- the goals / variables that introduced them. data CI qpn = Fixed I (Goal qpn) | Constrained [VROrigin qpn] deriving (Eq, Show, Functor) instance ResetGoal CI where resetGoal g (Fixed i _) = Fixed i g resetGoal g (Constrained vrs) = Constrained (L.map (\ (x, y) -> (x, resetGoal g y)) vrs) type VROrigin qpn = (VR, Goal qpn) -- | Helper function to collapse a list of version ranges with origins into -- a single, simplified, version range. collapse :: [VROrigin qpn] -> VR collapse = simplifyVR . L.foldr (.&&.) anyVR . L.map fst showCI :: CI QPN -> String showCI (Fixed i _) = "==" ++ showI i showCI (Constrained vr) = showVR (collapse vr) -- | Merge constrained instances. We currently adopt a lazy strategy for -- merging, i.e., we only perform actual checking if one of the two choices -- is fixed. If the merge fails, we return a conflict set indicating the -- variables responsible for the failure, as well as the two conflicting -- fragments. -- -- Note that while there may be more than one conflicting pair of version -- ranges, we only return the first we find. -- -- TODO: Different pairs might have different conflict sets. We're -- obviously interested to return a conflict that has a "better" conflict -- set in the sense the it contains variables that allow us to backjump -- further. We might apply some heuristics here, such as to change the -- order in which we check the constraints. merge :: Ord qpn => CI qpn -> CI qpn -> Either (ConflictSet qpn, (CI qpn, CI qpn)) (CI qpn) merge c@(Fixed i g1) d@(Fixed j g2) | i == j = Right c | otherwise = Left (S.union (toConflictSet g1) (toConflictSet g2), (c, d)) merge c@(Fixed (I v _) g1) (Constrained rs) = go rs -- I tried "reverse rs" here, but it seems to slow things down ... where go [] = Right c go (d@(vr, g2) : vrs) | checkVR vr v = go vrs | otherwise = Left (S.union (toConflictSet g1) (toConflictSet g2), (c, Constrained [d])) merge c@(Constrained _) d@(Fixed _ _) = merge d c merge (Constrained rs) (Constrained ss) = Right (Constrained (rs ++ ss)) type FlaggedDeps qpn = [FlaggedDep qpn] -- | Flagged dependencies can either be plain dependency constraints, -- or flag-dependent dependency trees. data FlaggedDep qpn = Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn) | Stanza (SN qpn) (TrueFlaggedDeps qpn) | Simple (Dep qpn) deriving (Eq, Show, Functor) type TrueFlaggedDeps qpn = FlaggedDeps qpn type FalseFlaggedDeps qpn = FlaggedDeps qpn -- | A dependency (constraint) associates a package name with a -- constrained instance. data Dep qpn = Dep qpn (CI qpn) deriving (Eq, Show, Functor) showDep :: Dep QPN -> String showDep (Dep qpn (Fixed i (Goal v _)) ) = (if P qpn /= v then showVar v ++ " => " else "") ++ showQPN qpn ++ "==" ++ showI i showDep (Dep qpn (Constrained [(vr, Goal v _)])) = showVar v ++ " => " ++ showQPN qpn ++ showVR vr showDep (Dep qpn ci ) = showQPN qpn ++ showCI ci instance ResetGoal Dep where resetGoal g (Dep qpn ci) = Dep qpn (resetGoal g ci) -- | A map containing reverse dependencies between qualified -- package names. type RevDepMap = Map QPN [QPN] -- | Goals are solver variables paired with information about -- why they have been introduced. data Goal qpn = Goal (Var qpn) (GoalReasonChain qpn) deriving (Eq, Show, Functor) class ResetGoal f where resetGoal :: Goal qpn -> f qpn -> f qpn instance ResetGoal Goal where resetGoal = const -- | For open goals as they occur during the build phase, we need to store -- additional information about flags. data OpenGoal = OpenGoal (FlaggedDep QPN) QGoalReasonChain deriving (Eq, Show) -- | Reasons why a goal can be added to a goal set. data GoalReason qpn = UserGoal | PDependency (PI qpn) | FDependency (FN qpn) Bool | SDependency (SN qpn) deriving (Eq, Show, Functor) -- | The first element is the immediate reason. The rest are the reasons -- for the reasons ... type GoalReasonChain qpn = [GoalReason qpn] type QGoalReasonChain = GoalReasonChain QPN goalReasonToVars :: GoalReason qpn -> ConflictSet qpn goalReasonToVars UserGoal = S.empty goalReasonToVars (PDependency (PI qpn _)) = S.singleton (P qpn) goalReasonToVars (FDependency qfn _) = S.singleton (simplifyVar (F qfn)) goalReasonToVars (SDependency qsn) = S.singleton (S qsn) goalReasonChainToVars :: Ord qpn => GoalReasonChain qpn -> ConflictSet qpn goalReasonChainToVars = S.unions . L.map goalReasonToVars goalReasonChainsToVars :: Ord qpn => [GoalReasonChain qpn] -> ConflictSet qpn goalReasonChainsToVars = S.unions . L.map goalReasonChainToVars -- | Closes a goal, i.e., removes all the extraneous information that we -- need only during the build phase. close :: OpenGoal -> Goal QPN close (OpenGoal (Simple (Dep qpn _)) gr) = Goal (P qpn) gr close (OpenGoal (Flagged qfn _ _ _ ) gr) = Goal (F qfn) gr close (OpenGoal (Stanza qsn _) gr) = Goal (S qsn) gr -- | Compute a conflic set from a goal. The conflict set contains the -- closure of goal reasons as well as the variable of the goal itself. toConflictSet :: Ord qpn => Goal qpn -> ConflictSet qpn toConflictSet (Goal g grs) = S.insert (simplifyVar g) (goalReasonChainToVars grs) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Validate.hs0000644000000000000000000002713212540225656024313 0ustar0000000000000000module Distribution.Client.Dependency.Modular.Validate where -- Validation of the tree. -- -- The task here is to make sure all constraints hold. After validation, any -- assignment returned by exploration of the tree should be a complete valid -- assignment, i.e., actually constitute a solution. import Control.Applicative import Control.Monad.Reader hiding (sequence) import Data.List as L import Data.Map as M import Data.Traversable import Prelude hiding (sequence) import Distribution.Client.Dependency.Modular.Assignment import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Tree -- In practice, most constraints are implication constraints (IF we have made -- a number of choices, THEN we also have to ensure that). We call constraints -- that for which the preconditions are fulfilled ACTIVE. We maintain a set -- of currently active constraints that we pass down the node. -- -- We aim at detecting inconsistent states as early as possible. -- -- Whenever we make a choice, there are two things that need to happen: -- -- (1) We must check that the choice is consistent with the currently -- active constraints. -- -- (2) The choice increases the set of active constraints. For the new -- active constraints, we must check that they are consistent with -- the current state. -- -- We can actually merge (1) and (2) by saying the the current choice is -- a new active constraint, fixing the choice. -- -- If a test fails, we have detected an inconsistent state. We can -- disable the current subtree and do not have to traverse it any further. -- -- We need a good way to represent the current state, i.e., the current -- set of active constraints. Since the main situation where we have to -- search in it is (1), it seems best to store the state by package: for -- every package, we store which versions are still allowed. If for any -- package, we have inconsistent active constraints, we can also stop. -- This is a particular way to read task (2): -- -- (2, weak) We only check if the new constraints are consistent with -- the choices we've already made, and add them to the active set. -- -- (2, strong) We check if the new constraints are consistent with the -- choices we've already made, and the constraints we already have. -- -- It currently seems as if we're implementing the weak variant. However, -- when used together with 'preferEasyGoalChoices', we will find an -- inconsistent state in the very next step. -- -- What do we do about flags? -- -- Like for packages, we store the flag choices we have already made. -- Now, regarding (1), we only have to test whether we've decided the -- current flag before. Regarding (2), the interesting bit is in discovering -- the new active constraints. To this end, we look up the constraints for -- the package the flag belongs to, and traverse its flagged dependencies. -- Wherever we find the flag in question, we start recording dependencies -- underneath as new active dependencies. If we encounter other flags, we -- check if we've chosen them already and either proceed or stop. -- | The state needed during validation. data ValidateState = VS { index :: Index, saved :: Map QPN (FlaggedDeps QPN), -- saved, scoped, dependencies pa :: PreAssignment } type Validate = Reader ValidateState validate :: Tree (QGoalReasonChain, Scope) -> Validate (Tree QGoalReasonChain) validate = cata go where go :: TreeF (QGoalReasonChain, Scope) (Validate (Tree QGoalReasonChain)) -> Validate (Tree QGoalReasonChain) go (PChoiceF qpn (gr, sc) ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn gr sc) ts) go (FChoiceF qfn (gr, _sc) b m ts) = do -- Flag choices may occur repeatedly (because they can introduce new constraints -- in various places). However, subsequent choices must be consistent. We thereby -- collapse repeated flag choice nodes. PA _ pfa _ <- asks pa -- obtain current flag-preassignment case M.lookup qfn pfa of Just rb -> -- flag has already been assigned; collapse choice to the correct branch case P.lookup rb ts of Just t -> goF qfn gr rb t Nothing -> return $ Fail (toConflictSet (Goal (F qfn) gr)) (MalformedFlagChoice qfn) Nothing -> -- flag choice is new, follow both branches FChoice qfn gr b m <$> sequence (P.mapWithKey (goF qfn gr) ts) go (SChoiceF qsn (gr, _sc) b ts) = do -- Optional stanza choices are very similar to flag choices. PA _ _ psa <- asks pa -- obtain current stanza-preassignment case M.lookup qsn psa of Just rb -> -- stanza choice has already been made; collapse choice to the correct branch case P.lookup rb ts of Just t -> goS qsn gr rb t Nothing -> return $ Fail (toConflictSet (Goal (S qsn) gr)) (MalformedStanzaChoice qsn) Nothing -> -- stanza choice is new, follow both branches SChoice qsn gr b <$> sequence (P.mapWithKey (goS qsn gr) ts) -- We don't need to do anything for goal choices or failure nodes. go (GoalChoiceF ts) = GoalChoice <$> sequence ts go (DoneF rdm ) = pure (Done rdm) go (FailF c fr ) = pure (Fail c fr) -- What to do for package nodes ... goP :: QPN -> QGoalReasonChain -> Scope -> I -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goP qpn@(Q _pp pn) gr sc i r = do PA ppa pfa psa <- asks pa -- obtain current preassignment idx <- asks index -- obtain the index svd <- asks saved -- obtain saved dependencies let (PInfo deps _ _ mfr) = idx ! pn ! i -- obtain dependencies and index-dictated exclusions introduced by the choice let qdeps = L.map (fmap (qualify sc)) deps -- qualify the deps in the current scope -- the new active constraints are given by the instance we have chosen, -- plus the dependency information we have for that instance let goal = Goal (P qpn) gr let newactives = Dep qpn (Fixed i goal) : L.map (resetGoal goal) (extractDeps pfa psa qdeps) -- We now try to extend the partial assignment with the new active constraints. let mnppa = extend (P qpn) ppa newactives -- In case we continue, we save the scoped dependencies let nsvd = M.insert qpn qdeps svd case mfr of Just fr -> -- The index marks this as an invalid choice. We can stop. return (Fail (toConflictSet goal) fr) _ -> case mnppa of Left (c, d) -> -- We have an inconsistency. We can stop. return (Fail c (Conflicting d)) Right nppa -> -- We have an updated partial assignment for the recursive validation. local (\ s -> s { pa = PA nppa pfa psa, saved = nsvd }) r -- What to do for flag nodes ... goF :: QFN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goF qfn@(FN (PI qpn _i) _f) gr b r = do PA ppa pfa psa <- asks pa -- obtain current preassignment svd <- asks saved -- obtain saved dependencies -- Note that there should be saved dependencies for the package in question, -- because while building, we do not choose flags before we see the packages -- that define them. let qdeps = svd ! qpn -- We take the *saved* dependencies, because these have been qualified in the -- correct scope. -- -- Extend the flag assignment let npfa = M.insert qfn b pfa -- We now try to get the new active dependencies we might learn about because -- we have chosen a new flag. let newactives = extractNewDeps (F qfn) gr b npfa psa qdeps -- As in the package case, we try to extend the partial assignment. case extend (F qfn) ppa newactives of Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found Right nppa -> local (\ s -> s { pa = PA nppa npfa psa }) r -- What to do for stanza nodes (similar to flag nodes) ... goS :: QSN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goS qsn@(SN (PI qpn _i) _f) gr b r = do PA ppa pfa psa <- asks pa -- obtain current preassignment svd <- asks saved -- obtain saved dependencies -- Note that there should be saved dependencies for the package in question, -- because while building, we do not choose flags before we see the packages -- that define them. let qdeps = svd ! qpn -- We take the *saved* dependencies, because these have been qualified in the -- correct scope. -- -- Extend the flag assignment let npsa = M.insert qsn b psa -- We now try to get the new active dependencies we might learn about because -- we have chosen a new flag. let newactives = extractNewDeps (S qsn) gr b pfa npsa qdeps -- As in the package case, we try to extend the partial assignment. case extend (S qsn) ppa newactives of Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found Right nppa -> local (\ s -> s { pa = PA nppa pfa npsa }) r -- | We try to extract as many concrete dependencies from the given flagged -- dependencies as possible. We make use of all the flag knowledge we have -- already acquired. extractDeps :: FAssignment -> SAssignment -> FlaggedDeps QPN -> [Dep QPN] extractDeps fa sa deps = do d <- deps case d of Simple sd -> return sd Flagged qfn _ td fd -> case M.lookup qfn fa of Nothing -> mzero Just True -> extractDeps fa sa td Just False -> extractDeps fa sa fd Stanza qsn td -> case M.lookup qsn sa of Nothing -> mzero Just True -> extractDeps fa sa td Just False -> [] -- | We try to find new dependencies that become available due to the given -- flag or stanza choice. We therefore look for the choice in question, and then call -- 'extractDeps' for everything underneath. extractNewDeps :: Var QPN -> QGoalReasonChain -> Bool -> FAssignment -> SAssignment -> FlaggedDeps QPN -> [Dep QPN] extractNewDeps v gr b fa sa = go where go deps = do d <- deps case d of Simple _ -> mzero Flagged qfn' _ td fd | v == F qfn' -> L.map (resetGoal (Goal v gr)) $ if b then extractDeps fa sa td else extractDeps fa sa fd | otherwise -> case M.lookup qfn' fa of Nothing -> mzero Just True -> go td Just False -> go fd Stanza qsn' td | v == S qsn' -> L.map (resetGoal (Goal v gr)) $ if b then extractDeps fa sa td else [] | otherwise -> case M.lookup qsn' sa of Nothing -> mzero Just True -> go td Just False -> [] -- | Interface. validateTree :: Index -> Tree (QGoalReasonChain, Scope) -> Tree QGoalReasonChain validateTree idx t = runReader (validate t) (VS idx M.empty (PA M.empty M.empty M.empty)) cabal-install-1.22.6.0/Distribution/Client/Dependency/Modular/Flag.hs0000644000000000000000000000373412540225656023435 0ustar0000000000000000{-# LANGUAGE DeriveFunctor #-} module Distribution.Client.Dependency.Modular.Flag where import Data.Map as M import Prelude hiding (pi) import Distribution.PackageDescription hiding (Flag) -- from Cabal import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Types (OptionalStanza(..)) -- | Flag name. Consists of a package instance and the flag identifier itself. data FN qpn = FN (PI qpn) Flag deriving (Eq, Ord, Show, Functor) -- | Extract the package name from a flag name. getPN :: FN qpn -> qpn getPN (FN (PI qpn _) _) = qpn -- | Flag identifier. Just a string. type Flag = FlagName unFlag :: Flag -> String unFlag (FlagName fn) = fn mkFlag :: String -> Flag mkFlag fn = FlagName fn -- | Flag info. Default value, whether the flag is manual, and -- whether the flag is weak. Manual flags can only be set explicitly. -- Weak flags are typically deferred by the solver. data FInfo = FInfo { fdefault :: Bool, fmanual :: Bool, fweak :: Bool } deriving (Eq, Ord, Show) -- | Flag defaults. type FlagInfo = Map Flag FInfo -- | Qualified flag name. type QFN = FN QPN -- | Stanza name. Paired with a package name, much like a flag. data SN qpn = SN (PI qpn) OptionalStanza deriving (Eq, Ord, Show, Functor) -- | Qualified stanza name. type QSN = SN QPN unStanza :: OptionalStanza -> String unStanza TestStanzas = "test" unStanza BenchStanzas = "bench" showQFNBool :: QFN -> Bool -> String showQFNBool qfn@(FN pi _f) b = showPI pi ++ ":" ++ showFBool qfn b showQSNBool :: QSN -> Bool -> String showQSNBool qsn@(SN pi _f) b = showPI pi ++ ":" ++ showSBool qsn b showFBool :: FN qpn -> Bool -> String showFBool (FN _ f) True = "+" ++ unFlag f showFBool (FN _ f) False = "-" ++ unFlag f showSBool :: SN qpn -> Bool -> String showSBool (SN _ s) True = "*" ++ unStanza s showSBool (SN _ s) False = "!" ++ unStanza s showQFN :: QFN -> String showQFN (FN pi f) = showPI pi ++ ":" ++ unFlag f showQSN :: QSN -> String showQSN (SN pi f) = showPI pi ++ ":" ++ unStanza f cabal-install-1.22.6.0/Distribution/Client/Dependency/TopDown/0000755000000000000000000000000012540225656022210 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Dependency/TopDown/Constraints.hs0000644000000000000000000005734712540225656025073 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency.TopDown.Constraints -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- A set of satisfiable constraints on a set of packages. ----------------------------------------------------------------------------- module Distribution.Client.Dependency.TopDown.Constraints ( Constraints, empty, packages, choices, isPaired, addTarget, constrain, Satisfiable(..), conflicting, ) where import Distribution.Client.Dependency.TopDown.Types import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Client.PackageIndex (PackageIndex) import Distribution.Package ( PackageName, PackageId, PackageIdentifier(..) , Package(packageId), packageName, packageVersion , Dependency, PackageFixedDeps(depends) ) import Distribution.Version ( Version ) import Distribution.Client.Utils ( mergeBy, MergeResult(..) ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(mempty) ) #endif import Data.Either ( partitionEithers ) import qualified Data.Map as Map import Data.Map (Map) import qualified Data.Set as Set import Data.Set (Set) import Control.Exception ( assert ) -- | A set of satisfiable constraints on a set of packages. -- -- The 'Constraints' type keeps track of a set of targets (identified by -- package name) that we know that we need. It also keeps track of a set of -- constraints over all packages in the environment. -- -- It maintains the guarantee that, for the target set, the constraints are -- satisfiable, meaning that there is at least one instance available for each -- package name that satisfies the constraints on that package name. -- -- Note that it is possible to over-constrain a package in the environment that -- is not in the target set -- the satisfiability guarantee is only maintained -- for the target set. This is useful because it allows us to exclude packages -- without needing to know if it would ever be needed or not (e.g. allows -- excluding broken installed packages). -- -- Adding a constraint for a target package can fail if it would mean that -- there are no remaining choices. -- -- Adding a constraint for package that is not a target never fails. -- -- Adding a new target package can fail if that package already has conflicting -- constraints. -- data Constraints installed source reason = Constraints -- | Targets that we know we need. This is the set for which we -- guarantee the constraints are satisfiable. !(Set PackageName) -- | The available/remaining set. These are packages that have available -- choices remaining. This is guaranteed to cover the target packages, -- but can also cover other packages in the environment. New targets can -- only be added if there are available choices remaining for them. !(PackageIndex (InstalledOrSource installed source)) -- | The excluded set. Choices that we have excluded by applying -- constraints. Excluded choices are tagged with the reason. !(PackageIndex (ExcludedPkg (InstalledOrSource installed source) reason)) -- | Paired choices, this is an ugly hack. !(Map PackageName (Version, Version)) -- | Purely for the invariant, we keep a copy of the original index !(PackageIndex (InstalledOrSource installed source)) -- | Reasons for excluding all, or some choices for a package version. -- -- Each package version can have a source instance, an installed instance or -- both. We distinguish reasons for constraints that excluded both instances, -- from reasons for constraints that excluded just one instance. -- data ExcludedPkg pkg reason = ExcludedPkg pkg [reason] -- ^ reasons for excluding both source and installed instances [reason] -- ^ reasons for excluding the installed instance [reason] -- ^ reasons for excluding the source instance instance Package pkg => Package (ExcludedPkg pkg reason) where packageId (ExcludedPkg p _ _ _) = packageId p -- | There is a conservation of packages property. Packages are never gained or -- lost, they just transfer from the remaining set to the excluded set. -- invariant :: (Package installed, Package source) => Constraints installed source a -> Bool invariant (Constraints targets available excluded _ original) = -- Relationship between available, excluded and original all check merged -- targets is a subset of available && all (PackageIndex.elemByPackageName available) (Set.elems targets) where merged = mergeBy (\a b -> packageId a `compare` mergedPackageId b) (PackageIndex.allPackages original) (mergeBy (\a b -> packageId a `compare` packageId b) (PackageIndex.allPackages available) (PackageIndex.allPackages excluded)) where mergedPackageId (OnlyInLeft p ) = packageId p mergedPackageId (OnlyInRight p) = packageId p mergedPackageId (InBoth p _) = packageId p -- If the package was originally installed only, then check (InBoth (InstalledOnly _) cur) = case cur of -- now it's either still remaining as installed only OnlyInLeft (InstalledOnly _) -> True -- or it has been excluded OnlyInRight (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True _ -> False -- If the package was originally available only, then check (InBoth (SourceOnly _) cur) = case cur of -- now it's either still remaining as source only OnlyInLeft (SourceOnly _) -> True -- or it has been excluded OnlyInRight (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True _ -> False -- If the package was originally installed and source, then check (InBoth (InstalledAndSource _ _) cur) = case cur of -- We can have both remaining: OnlyInLeft (InstalledAndSource _ _) -> True -- both excluded, in particular it can have had the just source or -- installed excluded and later had both excluded so we do not mind if -- the source or installed excluded is empty or non-empty. OnlyInRight (ExcludedPkg (InstalledAndSource _ _) _ _ _) -> True -- the installed remaining and the source excluded: InBoth (InstalledOnly _) (ExcludedPkg (SourceOnly _) [] [] (_:_)) -> True -- the source remaining and the installed excluded: InBoth (SourceOnly _) (ExcludedPkg (InstalledOnly _) [] (_:_) []) -> True _ -> False check _ = False -- | An update to the constraints can move packages between the two piles -- but not gain or loose packages. transitionsTo :: (Package installed, Package source) => Constraints installed source a -> Constraints installed source a -> Bool transitionsTo constraints @(Constraints _ available excluded _ _) constraints'@(Constraints _ available' excluded' _ _) = invariant constraints && invariant constraints' && null availableGained && null excludedLost && map (mapInstalledOrSource packageId packageId) availableLost == map (mapInstalledOrSource packageId packageId) excludedGained where (availableLost, availableGained) = partitionEithers (foldr lostAndGained [] availableChange) (excludedLost, excludedGained) = partitionEithers (foldr lostAndGained [] excludedChange) availableChange = mergeBy (\a b -> packageId a `compare` packageId b) (PackageIndex.allPackages available) (PackageIndex.allPackages available') excludedChange = mergeBy (\a b -> packageId a `compare` packageId b) [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded ] [ pkg | ExcludedPkg pkg _ _ _ <- PackageIndex.allPackages excluded' ] lostAndGained mr rest = case mr of OnlyInLeft pkg -> Left pkg : rest InBoth (InstalledAndSource pkg _) (SourceOnly _) -> Left (InstalledOnly pkg) : rest InBoth (InstalledAndSource _ pkg) (InstalledOnly _) -> Left (SourceOnly pkg) : rest InBoth (SourceOnly _) (InstalledAndSource pkg _) -> Right (InstalledOnly pkg) : rest InBoth (InstalledOnly _) (InstalledAndSource _ pkg) -> Right (SourceOnly pkg) : rest OnlyInRight pkg -> Right pkg : rest _ -> rest mapInstalledOrSource f g pkg = case pkg of InstalledOnly a -> InstalledOnly (f a) SourceOnly b -> SourceOnly (g b) InstalledAndSource a b -> InstalledAndSource (f a) (g b) -- | We construct 'Constraints' with an initial 'PackageIndex' of all the -- packages available. -- empty :: (PackageFixedDeps installed, Package source) => PackageIndex installed -> PackageIndex source -> Constraints installed source reason empty installed source = Constraints targets pkgs excluded pairs pkgs where targets = mempty excluded = mempty pkgs = PackageIndex.fromList . map toInstalledOrSource $ mergeBy (\a b -> packageId a `compare` packageId b) (PackageIndex.allPackages installed) (PackageIndex.allPackages source) toInstalledOrSource (OnlyInLeft i ) = InstalledOnly i toInstalledOrSource (OnlyInRight a) = SourceOnly a toInstalledOrSource (InBoth i a) = InstalledAndSource i a -- pick up cases like base-3 and 4 where one version depends on the other: pairs = Map.fromList [ (name, (packageVersion pkgid1, packageVersion pkgid2)) | [pkg1, pkg2] <- PackageIndex.allPackagesByName installed , let name = packageName pkg1 pkgid1 = packageId pkg1 pkgid2 = packageId pkg2 , any ((pkgid1==) . packageId) (depends pkg2) || any ((pkgid2==) . packageId) (depends pkg1) ] -- | The package targets. -- packages :: (Package installed, Package source) => Constraints installed source reason -> Set PackageName packages (Constraints ts _ _ _ _) = ts -- | The package choices that are still available. -- choices :: (Package installed, Package source) => Constraints installed source reason -> PackageIndex (InstalledOrSource installed source) choices (Constraints _ available _ _ _) = available isPaired :: (Package installed, Package source) => Constraints installed source reason -> PackageId -> Maybe PackageId isPaired (Constraints _ _ _ pairs _) (PackageIdentifier name version) = case Map.lookup name pairs of Just (v1, v2) | version == v1 -> Just (PackageIdentifier name v2) | version == v2 -> Just (PackageIdentifier name v1) _ -> Nothing data Satisfiable constraints discarded reason = Satisfiable constraints discarded | Unsatisfiable | ConflictsWith [(PackageId, [reason])] addTarget :: (Package installed, Package source) => PackageName -> Constraints installed source reason -> Satisfiable (Constraints installed source reason) () reason addTarget pkgname constraints@(Constraints targets available excluded paired original) -- If it's already a target then there's no change | pkgname `Set.member` targets = Satisfiable constraints () -- If there is some possible choice available for this target then we're ok | PackageIndex.elemByPackageName available pkgname = let targets' = Set.insert pkgname targets constraints' = Constraints targets' available excluded paired original in assert (constraints `transitionsTo` constraints') $ Satisfiable constraints' () -- If it's not available and it is excluded then we return the conflicts | PackageIndex.elemByPackageName excluded pkgname = ConflictsWith conflicts -- Otherwise, it's not available and it has not been excluded so the -- package is simply completely unknown. | otherwise = Unsatisfiable where conflicts = [ (packageId pkg, reasons) | let excludedChoices = PackageIndex.lookupPackageName excluded pkgname , ExcludedPkg pkg isReasons iReasons sReasons <- excludedChoices , let reasons = isReasons ++ iReasons ++ sReasons ] constrain :: (Package installed, Package source) => PackageName -- ^ which package to constrain -> (Version -> Bool -> Bool) -- ^ the constraint test -> reason -- ^ the reason for the constraint -> Constraints installed source reason -> Satisfiable (Constraints installed source reason) [PackageId] reason constrain pkgname constraint reason constraints@(Constraints targets available excluded paired original) | pkgname `Set.member` targets && not anyRemaining = if null conflicts then Unsatisfiable else ConflictsWith conflicts | otherwise = let constraints' = Constraints targets available' excluded' paired original in assert (constraints `transitionsTo` constraints') $ Satisfiable constraints' (map packageId newExcluded) where -- This tells us if any packages would remain at all for this package name if -- we applied this constraint. This amounts to checking if any package -- satisfies the given constraint, including version range and installation -- status. -- (available', excluded', newExcluded, anyRemaining, conflicts) = updatePkgsStatus available excluded [] False [] (mergeBy (\pkg pkg' -> packageVersion pkg `compare` packageVersion pkg') (PackageIndex.lookupPackageName available pkgname) (PackageIndex.lookupPackageName excluded pkgname)) testConstraint pkg = let ver = packageVersion pkg in case Map.lookup (packageName pkg) paired of Just (v1, v2) | ver == v1 || ver == v2 -> case pkg of InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) SourceOnly spkg -> SourceOnly (spkg, sOk) InstalledAndSource ipkg spkg -> InstalledAndSource (ipkg, iOk) (spkg, sOk) where iOk = constraint v1 True || constraint v2 True sOk = constraint v1 False || constraint v2 False _ -> case pkg of InstalledOnly ipkg -> InstalledOnly (ipkg, iOk) SourceOnly spkg -> SourceOnly (spkg, sOk) InstalledAndSource ipkg spkg -> InstalledAndSource (ipkg, iOk) (spkg, sOk) where iOk = constraint ver True sOk = constraint ver False -- For the info about available and excluded versions of the package in -- question, update the info given the current constraint -- -- We update the available package map and the excluded package map -- we also collect: -- * the change in available packages (for logging) -- * whether there are any remaining choices -- * any constraints that conflict with the current constraint updatePkgsStatus _ _ nePkgs ok cs _ | seq nePkgs $ seq ok $ seq cs False = undefined updatePkgsStatus aPkgs ePkgs nePkgs ok cs [] = (aPkgs, ePkgs, reverse nePkgs, ok, reverse cs) updatePkgsStatus aPkgs ePkgs nePkgs ok cs (pkg:pkgs) = let (aPkgs', ePkgs', mnePkg, ok', mc) = updatePkgStatus aPkgs ePkgs pkg nePkgs' = maybeCons mnePkg nePkgs cs' = maybeCons mc cs in updatePkgsStatus aPkgs' ePkgs' nePkgs' (ok' || ok) cs' pkgs maybeCons Nothing xs = xs maybeCons (Just x) xs = x:xs -- For the info about an available or excluded version of the package in -- question, update the info given the current constraint. -- updatePkgStatus aPkgs ePkgs pkg = case viewPackageStatus pkg of AllAvailable (InstalledOnly (aiPkg, False)) -> removeAvailable False (InstalledOnly aiPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) Nothing AllAvailable (SourceOnly (asPkg, False)) -> removeAvailable False (SourceOnly asPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) Nothing AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, False)) -> removeAvailable False (InstalledAndSource aiPkg asPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledAndSource aiPkg asPkg) [reason] [] []) Nothing AllAvailable (InstalledAndSource (aiPkg, True) (asPkg, False)) -> removeAvailable True (SourceOnly asPkg) (PackageIndex.insert (InstalledOnly aiPkg)) (ExcludedPkg (SourceOnly asPkg) [] [] [reason]) Nothing AllAvailable (InstalledAndSource (aiPkg, False) (asPkg, True)) -> removeAvailable True (InstalledOnly aiPkg) (PackageIndex.insert (SourceOnly asPkg)) (ExcludedPkg (InstalledOnly aiPkg) [] [reason] []) Nothing AllAvailable _ -> noChange True Nothing AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, False) _ _ srs) -> removeAvailable False (InstalledOnly aiPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledAndSource aiPkg esPkg) [reason] [] srs) Nothing AvailableExcluded (_aiPkg, True) (ExcludedPkg (esPkg, False) _ _ srs) -> addExtraExclusion True (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) Nothing AvailableExcluded (aiPkg, False) (ExcludedPkg (esPkg, True) _ _ srs) -> removeAvailable True (InstalledOnly aiPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledAndSource aiPkg esPkg) [] [reason] srs) (Just (pkgid, srs)) AvailableExcluded (_aiPkg, True) (ExcludedPkg (_esPkg, True) _ _ srs) -> noChange True (Just (pkgid, srs)) ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (asPkg, False) -> removeAvailable False (SourceOnly asPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledAndSource eiPkg asPkg) [reason] irs []) Nothing ExcludedAvailable (ExcludedPkg (eiPkg, True) _ irs _) (asPkg, False) -> removeAvailable False (SourceOnly asPkg) (PackageIndex.deletePackageId pkgid) (ExcludedPkg (InstalledAndSource eiPkg asPkg) [] irs [reason]) (Just (pkgid, irs)) ExcludedAvailable (ExcludedPkg (eiPkg, False) _ irs _) (_asPkg, True) -> addExtraExclusion True (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) Nothing ExcludedAvailable (ExcludedPkg (_eiPkg, True) _ irs _) (_asPkg, True) -> noChange True (Just (pkgid, irs)) AllExcluded (ExcludedPkg (InstalledOnly (eiPkg, False)) _ irs _) -> addExtraExclusion False (ExcludedPkg (InstalledOnly eiPkg) [] (reason:irs) []) Nothing AllExcluded (ExcludedPkg (InstalledOnly (_eiPkg, True)) _ irs _) -> noChange False (Just (pkgid, irs)) AllExcluded (ExcludedPkg (SourceOnly (esPkg, False)) _ _ srs) -> addExtraExclusion False (ExcludedPkg (SourceOnly esPkg) [] [] (reason:srs)) Nothing AllExcluded (ExcludedPkg (SourceOnly (_esPkg, True)) _ _ srs) -> noChange False (Just (pkgid, srs)) AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, False)) isrs irs srs) -> addExtraExclusion False (ExcludedPkg (InstalledAndSource eiPkg esPkg) (reason:isrs) irs srs) Nothing AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, True) (esPkg, False)) isrs irs srs) -> addExtraExclusion False (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs irs (reason:srs)) (Just (pkgid, irs)) AllExcluded (ExcludedPkg (InstalledAndSource (eiPkg, False) (esPkg, True)) isrs irs srs) -> addExtraExclusion False (ExcludedPkg (InstalledAndSource eiPkg esPkg) isrs (reason:irs) srs) (Just (pkgid, srs)) AllExcluded (ExcludedPkg (InstalledAndSource (_eiPkg, True) (_esPkg, True)) isrs irs srs) -> noChange False (Just (pkgid, isrs ++ irs ++ srs)) where removeAvailable ok nePkg adjustAvailable ePkg c = let aPkgs' = adjustAvailable aPkgs ePkgs' = PackageIndex.insert ePkg ePkgs in aPkgs' `seq` ePkgs' `seq` (aPkgs', ePkgs', Just nePkg, ok, c) addExtraExclusion ok ePkg c = let ePkgs' = PackageIndex.insert ePkg ePkgs in ePkgs' `seq` (aPkgs, ePkgs', Nothing, ok, c) noChange ok c = (aPkgs, ePkgs, Nothing, ok, c) pkgid = case pkg of OnlyInLeft p -> packageId p OnlyInRight p -> packageId p InBoth p _ -> packageId p viewPackageStatus :: (Package installed, Package source) => MergeResult (InstalledOrSource installed source) (ExcludedPkg (InstalledOrSource installed source) reason) -> PackageStatus (installed, Bool) (source, Bool) reason viewPackageStatus merged = case merged of OnlyInLeft aPkg -> AllAvailable (testConstraint aPkg) OnlyInRight (ExcludedPkg ePkg isrs irs srs) -> AllExcluded (ExcludedPkg (testConstraint ePkg) isrs irs srs) InBoth (InstalledOnly aiPkg) (ExcludedPkg (SourceOnly esPkg) [] [] srs) -> case testConstraint (InstalledAndSource aiPkg esPkg) of InstalledAndSource (aiPkg', iOk) (esPkg', sOk) -> AvailableExcluded (aiPkg', iOk) (ExcludedPkg (esPkg', sOk) [] [] srs) _ -> impossible InBoth (SourceOnly asPkg) (ExcludedPkg (InstalledOnly eiPkg) [] irs []) -> case testConstraint (InstalledAndSource eiPkg asPkg) of InstalledAndSource (eiPkg', iOk) (asPkg', sOk) -> ExcludedAvailable (ExcludedPkg (eiPkg', iOk) [] irs []) (asPkg', sOk) _ -> impossible _ -> impossible where impossible = error "impossible: viewPackageStatus invariant violation" -- A intermediate structure that enumerates all the possible cases given the -- invariant. This helps us to get simpler and complete pattern matching in -- updatePkg above -- data PackageStatus installed source reason = AllAvailable (InstalledOrSource installed source) | AllExcluded (ExcludedPkg (InstalledOrSource installed source) reason) | AvailableExcluded installed (ExcludedPkg source reason) | ExcludedAvailable (ExcludedPkg installed reason) source conflicting :: (Package installed, Package source) => Constraints installed source reason -> Dependency -> [(PackageId, [reason])] conflicting (Constraints _ _ excluded _ _) dep = [ (packageId pkg, reasonsAll ++ reasonsAvail ++ reasonsInstalled) --TODO | ExcludedPkg pkg reasonsAll reasonsAvail reasonsInstalled <- PackageIndex.lookupDependency excluded dep ] cabal-install-1.22.6.0/Distribution/Client/Dependency/TopDown/Types.hs0000644000000000000000000000600312540225656023647 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Dependency.TopDown.Types -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Types for the top-down dependency resolver. ----------------------------------------------------------------------------- module Distribution.Client.Dependency.TopDown.Types where import Distribution.Client.Types ( SourcePackage(..), InstalledPackage, OptionalStanza ) import Distribution.Package ( PackageIdentifier, Dependency , Package(packageId), PackageFixedDeps(depends) ) import Distribution.PackageDescription ( FlagAssignment ) -- ------------------------------------------------------------ -- * The various kinds of packages -- ------------------------------------------------------------ type SelectablePackage = InstalledOrSource InstalledPackageEx UnconfiguredPackage type SelectedPackage = InstalledOrSource InstalledPackageEx SemiConfiguredPackage data InstalledOrSource installed source = InstalledOnly installed | SourceOnly source | InstalledAndSource installed source deriving Eq type TopologicalSortNumber = Int data InstalledPackageEx = InstalledPackageEx InstalledPackage !TopologicalSortNumber [PackageIdentifier] -- transitive closure of installed deps data UnconfiguredPackage = UnconfiguredPackage SourcePackage !TopologicalSortNumber FlagAssignment [OptionalStanza] data SemiConfiguredPackage = SemiConfiguredPackage SourcePackage -- package info FlagAssignment -- total flag assignment for the package [OptionalStanza] -- enabled optional stanzas [Dependency] -- dependencies we end up with when we apply -- the flag assignment instance Package InstalledPackageEx where packageId (InstalledPackageEx p _ _) = packageId p instance PackageFixedDeps InstalledPackageEx where depends (InstalledPackageEx _ _ deps) = deps instance Package UnconfiguredPackage where packageId (UnconfiguredPackage p _ _ _) = packageId p instance Package SemiConfiguredPackage where packageId (SemiConfiguredPackage p _ _ _) = packageId p instance (Package installed, Package source) => Package (InstalledOrSource installed source) where packageId (InstalledOnly p ) = packageId p packageId (SourceOnly p ) = packageId p packageId (InstalledAndSource p _) = packageId p -- | We can have constraints on selecting just installed or just source -- packages. -- -- In particular, installed packages can only depend on other installed -- packages while packages that are not yet installed but which we plan to -- install can depend on installed or other not-yet-installed packages. -- data InstalledConstraint = InstalledConstraint | SourceConstraint deriving (Eq, Show) cabal-install-1.22.6.0/Distribution/Client/BuildReports/0000755000000000000000000000000012540225656021156 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/BuildReports/Storage.hs0000644000000000000000000001335312540225656023123 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Reporting -- Copyright : (c) David Waern 2008 -- License : BSD-like -- -- Maintainer : david.waern@gmail.com -- Stability : experimental -- Portability : portable -- -- Anonymous build report data structure, printing and parsing -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Storage ( -- * Storing and retrieving build reports storeAnonymous, storeLocal, -- retrieve, -- * 'InstallPlan' support fromInstallPlan, fromPlanningFailure, ) where import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import Distribution.Client.BuildReports.Anonymous (BuildReport) import Distribution.Client.Types import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.InstallPlan ( InstallPlan ) import Distribution.Package ( PackageId, packageId ) import Distribution.PackageDescription ( FlagAssignment ) import Distribution.Simple.InstallDirs ( PathTemplate, fromPathTemplate , initialPathTemplateEnv, substPathTemplate ) import Distribution.System ( Platform(Platform) ) import Distribution.Compiler ( CompilerId(..), CompilerInfo(..) ) import Distribution.Simple.Utils ( comparing, equating ) import Data.List ( groupBy, sortBy ) import Data.Maybe ( catMaybes ) import System.FilePath ( (), takeDirectory ) import System.Directory ( createDirectoryIfMissing ) storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () storeAnonymous reports = sequence_ [ appendFile file (concatMap format reports') | (repo, reports') <- separate reports , let file = repoLocalDir repo "build-reports.log" ] --TODO: make this concurrency safe, either lock the report file or make sure -- the writes for each report are atomic (under 4k and flush at boundaries) where format r = '\n' : BuildReport.show r ++ "\n" separate :: [(BuildReport, Maybe Repo)] -> [(Repo, [BuildReport])] separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) . map concat . groupBy (equating (repoName . head)) . sortBy (comparing (repoName . head)) . groupBy (equating repoName) . onlyRemote repoName (_,_,rrepo) = remoteRepoName rrepo onlyRemote :: [(BuildReport, Maybe Repo)] -> [(BuildReport, Repo, RemoteRepo)] onlyRemote rs = [ (report, repo, remoteRepo) | (report, Just repo@Repo { repoKind = Left remoteRepo }) <- rs ] storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] -> Platform -> IO () storeLocal cinfo templates reports platform = sequence_ [ do createDirectoryIfMissing True (takeDirectory file) appendFile file output --TODO: make this concurrency safe, either lock the report file or make -- sure the writes for each report are atomic | (file, reports') <- groupByFileName [ (reportFileName template report, report) | template <- templates , (report, _repo) <- reports ] , let output = concatMap format reports' ] where format r = '\n' : BuildReport.show r ++ "\n" reportFileName template report = fromPathTemplate (substPathTemplate env template) where env = initialPathTemplateEnv (BuildReport.package report) -- ToDo: In principle, we can support $pkgkey, but only -- if the configure step succeeds. So add a Maybe field -- to the build report, and either use that or make up -- a fake identifier if it's not available. (error "storeLocal: package key not available") cinfo platform groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) . groupBy (equating fst) . sortBy (comparing fst) -- ------------------------------------------------------------ -- * InstallPlan support -- ------------------------------------------------------------ fromInstallPlan :: InstallPlan -> [(BuildReport, Maybe Repo)] fromInstallPlan plan = catMaybes . map (fromPlanPackage platform comp) . InstallPlan.toList $ plan where platform = InstallPlan.planPlatform plan comp = compilerInfoId (InstallPlan.planCompiler plan) fromPlanPackage :: Platform -> CompilerId -> InstallPlan.PlanPackage -> Maybe (BuildReport, Maybe Repo) fromPlanPackage (Platform arch os) comp planPackage = case planPackage of InstallPlan.Installed (ReadyPackage srcPkg flags _ deps) result -> Just $ ( BuildReport.new os arch comp (packageId srcPkg) flags (map packageId deps) (Right result) , extractRepo srcPkg) InstallPlan.Failed (ConfiguredPackage srcPkg flags _ deps) result -> Just $ ( BuildReport.new os arch comp (packageId srcPkg) flags deps (Left result) , extractRepo srcPkg ) _ -> Nothing where extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) = Just repo extractRepo _ = Nothing fromPlanningFailure :: Platform -> CompilerId -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] fromPlanningFailure (Platform arch os) comp pkgids flags = [ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing) | pkgid <- pkgids ] cabal-install-1.22.6.0/Distribution/Client/BuildReports/Upload.hs0000644000000000000000000000555412540225656022747 0ustar0000000000000000{-# LANGUAGE CPP, PatternGuards #-} -- This is a quick hack for uploading build reports to Hackage. module Distribution.Client.BuildReports.Upload ( BuildLog , BuildReportId , uploadReports , postBuildReport , putBuildLog ) where import Network.Browser ( BrowserAction, request, setAllowRedirects ) import Network.HTTP ( Header(..), HeaderName(..) , Request(..), RequestMethod(..), Response(..) ) import Network.TCP (HandleStream) import Network.URI (URI, uriPath, parseRelativeReference, relativeTo) import Control.Monad ( forM_ ) import System.FilePath.Posix ( () ) import qualified Distribution.Client.BuildReports.Anonymous as BuildReport import Distribution.Client.BuildReports.Anonymous (BuildReport) import Distribution.Text (display) type BuildReportId = URI type BuildLog = String uploadReports :: URI -> [(BuildReport, Maybe BuildLog)] -> BrowserAction (HandleStream BuildLog) () uploadReports uri reports = do forM_ reports $ \(report, mbBuildLog) -> do buildId <- postBuildReport uri report case mbBuildLog of Just buildLog -> putBuildLog buildId buildLog Nothing -> return () postBuildReport :: URI -> BuildReport -> BrowserAction (HandleStream BuildLog) BuildReportId postBuildReport uri buildReport = do setAllowRedirects False (_, response) <- request Request { rqURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" }, rqMethod = POST, rqHeaders = [Header HdrContentType ("text/plain"), Header HdrContentLength (show (length body)), Header HdrAccept ("text/plain")], rqBody = body } case rspCode response of (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location #if defined(VERSION_network_uri) return $ relativeTo rel uri #elif defined(VERSION_network) #if MIN_VERSION_network(2,4,0) return $ relativeTo rel uri #else relativeTo rel uri #endif #endif | Header HdrLocation location <- rspHeaders response ] -> return $ buildId _ -> error "Unrecognised response from server." where body = BuildReport.show buildReport putBuildLog :: BuildReportId -> BuildLog -> BrowserAction (HandleStream BuildLog) () putBuildLog reportId buildLog = do --FIXME: do something if the request fails (_, _response) <- request Request { rqURI = reportId{uriPath = uriPath reportId "log"}, rqMethod = PUT, rqHeaders = [Header HdrContentType ("text/plain"), Header HdrContentLength (show (length buildLog)), Header HdrAccept ("text/plain")], rqBody = buildLog } return () cabal-install-1.22.6.0/Distribution/Client/BuildReports/Anonymous.hs0000644000000000000000000002641412540225656023511 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Reporting -- Copyright : (c) David Waern 2008 -- License : BSD-like -- -- Maintainer : david.waern@gmail.com -- Stability : experimental -- Portability : portable -- -- Anonymous build report data structure, printing and parsing -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Anonymous ( BuildReport(..), InstallOutcome(..), Outcome(..), -- * Constructing and writing reports new, -- * parsing and pretty printing parse, parseList, show, -- showList, ) where import qualified Distribution.Client.Types as BR ( BuildResult, BuildFailure(..), BuildSuccess(..) , DocsResult(..), TestsResult(..) ) import Distribution.Client.Utils ( mergeBy, MergeResult(..) ) import qualified Paths_cabal_install (version) import Distribution.Package ( PackageIdentifier(..), PackageName(..) ) import Distribution.PackageDescription ( FlagName(..), FlagAssignment ) --import Distribution.Version -- ( Version ) import Distribution.System ( OS, Arch ) import Distribution.Compiler ( CompilerId(..) ) import qualified Distribution.Text as Text ( Text(disp, parse) ) import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..), Field(..) , simpleField, listField, ppFields, readFields , syntaxError, locatedErrorMsg ) import Distribution.Simple.Utils ( comparing ) import qualified Distribution.Compat.ReadP as Parse ( ReadP, pfail, munch1, skipSpaces ) import qualified Text.PrettyPrint as Disp ( Doc, render, char, text ) import Text.PrettyPrint ( (<+>), (<>) ) import Data.List ( unfoldr, sortBy ) import Data.Char as Char ( isAlpha, isAlphaNum ) import Prelude hiding (show) data BuildReport = BuildReport { -- | The package this build report is about package :: PackageIdentifier, -- | The OS and Arch the package was built on os :: OS, arch :: Arch, -- | The Haskell compiler (and hopefully version) used compiler :: CompilerId, -- | The uploading client, ie cabal-install-x.y.z client :: PackageIdentifier, -- | Which configurations flags we used flagAssignment :: FlagAssignment, -- | Which dependent packages we were using exactly dependencies :: [PackageIdentifier], -- | Did installing work ok? installOutcome :: InstallOutcome, -- Which version of the Cabal library was used to compile the Setup.hs -- cabalVersion :: Version, -- Which build tools we were using (with versions) -- tools :: [PackageIdentifier], -- | Configure outcome, did configure work ok? docsOutcome :: Outcome, -- | Configure outcome, did configure work ok? testsOutcome :: Outcome } data InstallOutcome = PlanningFailed | DependencyFailed PackageIdentifier | DownloadFailed | UnpackFailed | SetupFailed | ConfigureFailed | BuildFailed | TestsFailed | InstallFailed | InstallOk deriving Eq data Outcome = NotTried | Failed | Ok deriving Eq new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment -> [PackageIdentifier] -> BR.BuildResult -> BuildReport new os' arch' comp pkgid flags deps result = BuildReport { package = pkgid, os = os', arch = arch', compiler = comp, client = cabalInstallID, flagAssignment = flags, dependencies = deps, installOutcome = convertInstallOutcome, -- cabalVersion = undefined docsOutcome = convertDocsOutcome, testsOutcome = convertTestsOutcome } where convertInstallOutcome = case result of Left BR.PlanningFailed -> PlanningFailed Left (BR.DependentFailed p) -> DependencyFailed p Left (BR.DownloadFailed _) -> DownloadFailed Left (BR.UnpackFailed _) -> UnpackFailed Left (BR.ConfigureFailed _) -> ConfigureFailed Left (BR.BuildFailed _) -> BuildFailed Left (BR.TestsFailed _) -> TestsFailed Left (BR.InstallFailed _) -> InstallFailed Right (BR.BuildOk _ _ _) -> InstallOk convertDocsOutcome = case result of Left _ -> NotTried Right (BR.BuildOk BR.DocsNotTried _ _) -> NotTried Right (BR.BuildOk BR.DocsFailed _ _) -> Failed Right (BR.BuildOk BR.DocsOk _ _) -> Ok convertTestsOutcome = case result of Left (BR.TestsFailed _) -> Failed Left _ -> NotTried Right (BR.BuildOk _ BR.TestsNotTried _) -> NotTried Right (BR.BuildOk _ BR.TestsOk _) -> Ok cabalInstallID :: PackageIdentifier cabalInstallID = PackageIdentifier (PackageName "cabal-install") Paths_cabal_install.version -- ------------------------------------------------------------ -- * External format -- ------------------------------------------------------------ initialBuildReport :: BuildReport initialBuildReport = BuildReport { package = requiredField "package", os = requiredField "os", arch = requiredField "arch", compiler = requiredField "compiler", client = requiredField "client", flagAssignment = [], dependencies = [], installOutcome = requiredField "install-outcome", -- cabalVersion = Nothing, -- tools = [], docsOutcome = NotTried, testsOutcome = NotTried } where requiredField fname = error ("required field: " ++ fname) -- ----------------------------------------------------------------------------- -- Parsing parse :: String -> Either String BuildReport parse s = case parseFields s of ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror ParseOk _ report -> Right report --FIXME: this does not allow for optional or repeated fields parseFields :: String -> ParseResult BuildReport parseFields input = do fields <- mapM extractField =<< readFields input let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name) sortedFieldDescrs (sortBy (comparing (\(_,name,_) -> name)) fields) checkMerged initialBuildReport merged where extractField :: Field -> ParseResult (Int, String, String) extractField (F line name value) = return (line, name, value) extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza" extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza" checkMerged report [] = return report checkMerged report (merged:remaining) = case merged of InBoth fieldDescr (line, _name, value) -> do report' <- fieldSet fieldDescr line value report checkMerged report' remaining OnlyInRight (line, name, _) -> syntaxError line ("Unrecognized field " ++ name) OnlyInLeft fieldDescr -> fail ("Missing field " ++ fieldName fieldDescr) parseList :: String -> [BuildReport] parseList str = [ report | Right report <- map parse (split str) ] where split :: String -> [String] split = filter (not . null) . unfoldr chunk . lines chunk [] = Nothing chunk ls = case break null ls of (r, rs) -> Just (unlines r, dropWhile null rs) -- ----------------------------------------------------------------------------- -- Pretty-printing show :: BuildReport -> String show = Disp.render . ppFields fieldDescrs -- ----------------------------------------------------------------------------- -- Description of the fields, for parsing/printing fieldDescrs :: [FieldDescr BuildReport] fieldDescrs = [ simpleField "package" Text.disp Text.parse package (\v r -> r { package = v }) , simpleField "os" Text.disp Text.parse os (\v r -> r { os = v }) , simpleField "arch" Text.disp Text.parse arch (\v r -> r { arch = v }) , simpleField "compiler" Text.disp Text.parse compiler (\v r -> r { compiler = v }) , simpleField "client" Text.disp Text.parse client (\v r -> r { client = v }) , listField "flags" dispFlag parseFlag flagAssignment (\v r -> r { flagAssignment = v }) , listField "dependencies" Text.disp Text.parse dependencies (\v r -> r { dependencies = v }) , simpleField "install-outcome" Text.disp Text.parse installOutcome (\v r -> r { installOutcome = v }) , simpleField "docs-outcome" Text.disp Text.parse docsOutcome (\v r -> r { docsOutcome = v }) , simpleField "tests-outcome" Text.disp Text.parse testsOutcome (\v r -> r { testsOutcome = v }) ] sortedFieldDescrs :: [FieldDescr BuildReport] sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs dispFlag :: (FlagName, Bool) -> Disp.Doc dispFlag (FlagName name, True) = Disp.text name dispFlag (FlagName name, False) = Disp.char '-' <> Disp.text name parseFlag :: Parse.ReadP r (FlagName, Bool) parseFlag = do name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') case name of ('-':flag) -> return (FlagName flag, False) flag -> return (FlagName flag, True) instance Text.Text InstallOutcome where disp PlanningFailed = Disp.text "PlanningFailed" disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid disp DownloadFailed = Disp.text "DownloadFailed" disp UnpackFailed = Disp.text "UnpackFailed" disp SetupFailed = Disp.text "SetupFailed" disp ConfigureFailed = Disp.text "ConfigureFailed" disp BuildFailed = Disp.text "BuildFailed" disp TestsFailed = Disp.text "TestsFailed" disp InstallFailed = Disp.text "InstallFailed" disp InstallOk = Disp.text "InstallOk" parse = do name <- Parse.munch1 Char.isAlphaNum case name of "PlanningFailed" -> return PlanningFailed "DependencyFailed" -> do Parse.skipSpaces pkgid <- Text.parse return (DependencyFailed pkgid) "DownloadFailed" -> return DownloadFailed "UnpackFailed" -> return UnpackFailed "SetupFailed" -> return SetupFailed "ConfigureFailed" -> return ConfigureFailed "BuildFailed" -> return BuildFailed "TestsFailed" -> return TestsFailed "InstallFailed" -> return InstallFailed "InstallOk" -> return InstallOk _ -> Parse.pfail instance Text.Text Outcome where disp NotTried = Disp.text "NotTried" disp Failed = Disp.text "Failed" disp Ok = Disp.text "Ok" parse = do name <- Parse.munch1 Char.isAlpha case name of "NotTried" -> return NotTried "Failed" -> return Failed "Ok" -> return Ok _ -> Parse.pfail cabal-install-1.22.6.0/Distribution/Client/BuildReports/Types.hs0000644000000000000000000000244112540225656022617 0ustar0000000000000000----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.BuildReports.Types -- Copyright : (c) Duncan Coutts 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Portability : portable -- -- Types related to build reporting -- ----------------------------------------------------------------------------- module Distribution.Client.BuildReports.Types ( ReportLevel(..), ) where import qualified Distribution.Text as Text ( Text(..) ) import qualified Distribution.Compat.ReadP as Parse ( pfail, munch1 ) import qualified Text.PrettyPrint as Disp ( text ) import Data.Char as Char ( isAlpha, toLower ) data ReportLevel = NoReports | AnonymousReports | DetailedReports deriving (Eq, Ord, Show) instance Text.Text ReportLevel where disp NoReports = Disp.text "none" disp AnonymousReports = Disp.text "anonymous" disp DetailedReports = Disp.text "detailed" parse = do name <- Parse.munch1 Char.isAlpha case lowercase name of "none" -> return NoReports "anonymous" -> return AnonymousReports "detailed" -> return DetailedReports _ -> Parse.pfail lowercase :: String -> String lowercase = map Char.toLower cabal-install-1.22.6.0/Distribution/Client/Init/0000755000000000000000000000000012540225656017443 5ustar0000000000000000cabal-install-1.22.6.0/Distribution/Client/Init/Licenses.hs0000644000000000000000000053677512540225656021573 0ustar0000000000000000module Distribution.Client.Init.Licenses ( License , bsd2 , bsd3 , gplv2 , gplv3 , lgpl21 , lgpl3 , agplv3 , apache20 , mit , mpl20 , isc ) where type License = String bsd2 :: String -> String -> License bsd2 authors year = unlines [ "Copyright (c) " ++ year ++ ", " ++ authors , "All rights reserved." , "" , "Redistribution and use in source and binary forms, with or without" , "modification, are permitted provided that the following conditions are" , "met:" , "" , "1. Redistributions of source code must retain the above copyright" , " notice, this list of conditions and the following disclaimer." , "" , "2. Redistributions in binary form must reproduce the above copyright" , " notice, this list of conditions and the following disclaimer in the" , " documentation and/or other materials provided with the" , " distribution." , "" , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ] bsd3 :: String -> String -> License bsd3 authors year = unlines [ "Copyright (c) " ++ year ++ ", " ++ authors , "" , "All rights reserved." , "" , "Redistribution and use in source and binary forms, with or without" , "modification, are permitted provided that the following conditions are met:" , "" , " * Redistributions of source code must retain the above copyright" , " notice, this list of conditions and the following disclaimer." , "" , " * Redistributions in binary form must reproduce the above" , " copyright notice, this list of conditions and the following" , " disclaimer in the documentation and/or other materials provided" , " with the distribution." , "" , " * Neither the name of " ++ authors ++ " nor the names of other" , " contributors may be used to endorse or promote products derived" , " from this software without specific prior written permission." , "" , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ] gplv2 :: License gplv2 = unlines [ " GNU GENERAL PUBLIC LICENSE" , " Version 2, June 1991" , "" , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The licenses for most software are designed to take away your" , "freedom to share and change it. By contrast, the GNU General Public" , "License is intended to guarantee your freedom to share and change free" , "software--to make sure the software is free for all its users. This" , "General Public License applies to most of the Free Software" , "Foundation's software and to any other program whose authors commit to" , "using it. (Some other Free Software Foundation software is covered by" , "the GNU Lesser General Public License instead.) You can apply it to" , "your programs, too." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "this service if you wish), that you receive source code or can get it" , "if you want it, that you can change the software or use pieces of it" , "in new free programs; and that you know you can do these things." , "" , " To protect your rights, we need to make restrictions that forbid" , "anyone to deny you these rights or to ask you to surrender the rights." , "These restrictions translate to certain responsibilities for you if you" , "distribute copies of the software, or if you modify it." , "" , " For example, if you distribute copies of such a program, whether" , "gratis or for a fee, you must give the recipients all the rights that" , "you have. You must make sure that they, too, receive or can get the" , "source code. And you must show them these terms so they know their" , "rights." , "" , " We protect your rights with two steps: (1) copyright the software, and" , "(2) offer you this license which gives you legal permission to copy," , "distribute and/or modify the software." , "" , " Also, for each author's protection and ours, we want to make certain" , "that everyone understands that there is no warranty for this free" , "software. If the software is modified by someone else and passed on, we" , "want its recipients to know that what they have is not the original, so" , "that any problems introduced by others will not reflect on the original" , "authors' reputations." , "" , " Finally, any free program is threatened constantly by software" , "patents. We wish to avoid the danger that redistributors of a free" , "program will individually obtain patent licenses, in effect making the" , "program proprietary. To prevent this, we have made it clear that any" , "patent must be licensed for everyone's free use or not licensed at all." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " GNU GENERAL PUBLIC LICENSE" , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" , "" , " 0. This License applies to any program or other work which contains" , "a notice placed by the copyright holder saying it may be distributed" , "under the terms of this General Public License. The \"Program\", below," , "refers to any such program or work, and a \"work based on the Program\"" , "means either the Program or any derivative work under copyright law:" , "that is to say, a work containing the Program or a portion of it," , "either verbatim or with modifications and/or translated into another" , "language. (Hereinafter, translation is included without limitation in" , "the term \"modification\".) Each licensee is addressed as \"you\"." , "" , "Activities other than copying, distribution and modification are not" , "covered by this License; they are outside its scope. The act of" , "running the Program is not restricted, and the output from the Program" , "is covered only if its contents constitute a work based on the" , "Program (independent of having been made by running the Program)." , "Whether that is true depends on what the Program does." , "" , " 1. You may copy and distribute verbatim copies of the Program's" , "source code as you receive it, in any medium, provided that you" , "conspicuously and appropriately publish on each copy an appropriate" , "copyright notice and disclaimer of warranty; keep intact all the" , "notices that refer to this License and to the absence of any warranty;" , "and give any other recipients of the Program a copy of this License" , "along with the Program." , "" , "You may charge a fee for the physical act of transferring a copy, and" , "you may at your option offer warranty protection in exchange for a fee." , "" , " 2. You may modify your copy or copies of the Program or any portion" , "of it, thus forming a work based on the Program, and copy and" , "distribute such modifications or work under the terms of Section 1" , "above, provided that you also meet all of these conditions:" , "" , " a) You must cause the modified files to carry prominent notices" , " stating that you changed the files and the date of any change." , "" , " b) You must cause any work that you distribute or publish, that in" , " whole or in part contains or is derived from the Program or any" , " part thereof, to be licensed as a whole at no charge to all third" , " parties under the terms of this License." , "" , " c) If the modified program normally reads commands interactively" , " when run, you must cause it, when started running for such" , " interactive use in the most ordinary way, to print or display an" , " announcement including an appropriate copyright notice and a" , " notice that there is no warranty (or else, saying that you provide" , " a warranty) and that users may redistribute the program under" , " these conditions, and telling the user how to view a copy of this" , " License. (Exception: if the Program itself is interactive but" , " does not normally print such an announcement, your work based on" , " the Program is not required to print an announcement.)" , "" , "These requirements apply to the modified work as a whole. If" , "identifiable sections of that work are not derived from the Program," , "and can be reasonably considered independent and separate works in" , "themselves, then this License, and its terms, do not apply to those" , "sections when you distribute them as separate works. But when you" , "distribute the same sections as part of a whole which is a work based" , "on the Program, the distribution of the whole must be on the terms of" , "this License, whose permissions for other licensees extend to the" , "entire whole, and thus to each and every part regardless of who wrote it." , "" , "Thus, it is not the intent of this section to claim rights or contest" , "your rights to work written entirely by you; rather, the intent is to" , "exercise the right to control the distribution of derivative or" , "collective works based on the Program." , "" , "In addition, mere aggregation of another work not based on the Program" , "with the Program (or with a work based on the Program) on a volume of" , "a storage or distribution medium does not bring the other work under" , "the scope of this License." , "" , " 3. You may copy and distribute the Program (or a work based on it," , "under Section 2) in object code or executable form under the terms of" , "Sections 1 and 2 above provided that you also do one of the following:" , "" , " a) Accompany it with the complete corresponding machine-readable" , " source code, which must be distributed under the terms of Sections" , " 1 and 2 above on a medium customarily used for software interchange; or," , "" , " b) Accompany it with a written offer, valid for at least three" , " years, to give any third party, for a charge no more than your" , " cost of physically performing source distribution, a complete" , " machine-readable copy of the corresponding source code, to be" , " distributed under the terms of Sections 1 and 2 above on a medium" , " customarily used for software interchange; or," , "" , " c) Accompany it with the information you received as to the offer" , " to distribute corresponding source code. (This alternative is" , " allowed only for noncommercial distribution and only if you" , " received the program in object code or executable form with such" , " an offer, in accord with Subsection b above.)" , "" , "The source code for a work means the preferred form of the work for" , "making modifications to it. For an executable work, complete source" , "code means all the source code for all modules it contains, plus any" , "associated interface definition files, plus the scripts used to" , "control compilation and installation of the executable. However, as a" , "special exception, the source code distributed need not include" , "anything that is normally distributed (in either source or binary" , "form) with the major components (compiler, kernel, and so on) of the" , "operating system on which the executable runs, unless that component" , "itself accompanies the executable." , "" , "If distribution of executable or object code is made by offering" , "access to copy from a designated place, then offering equivalent" , "access to copy the source code from the same place counts as" , "distribution of the source code, even though third parties are not" , "compelled to copy the source along with the object code." , "" , " 4. You may not copy, modify, sublicense, or distribute the Program" , "except as expressly provided under this License. Any attempt" , "otherwise to copy, modify, sublicense or distribute the Program is" , "void, and will automatically terminate your rights under this License." , "However, parties who have received copies, or rights, from you under" , "this License will not have their licenses terminated so long as such" , "parties remain in full compliance." , "" , " 5. You are not required to accept this License, since you have not" , "signed it. However, nothing else grants you permission to modify or" , "distribute the Program or its derivative works. These actions are" , "prohibited by law if you do not accept this License. Therefore, by" , "modifying or distributing the Program (or any work based on the" , "Program), you indicate your acceptance of this License to do so, and" , "all its terms and conditions for copying, distributing or modifying" , "the Program or works based on it." , "" , " 6. Each time you redistribute the Program (or any work based on the" , "Program), the recipient automatically receives a license from the" , "original licensor to copy, distribute or modify the Program subject to" , "these terms and conditions. You may not impose any further" , "restrictions on the recipients' exercise of the rights granted herein." , "You are not responsible for enforcing compliance by third parties to" , "this License." , "" , " 7. If, as a consequence of a court judgment or allegation of patent" , "infringement or for any other reason (not limited to patent issues)," , "conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot" , "distribute so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you" , "may not distribute the Program at all. For example, if a patent" , "license would not permit royalty-free redistribution of the Program by" , "all those who receive copies directly or indirectly through you, then" , "the only way you could satisfy both it and this License would be to" , "refrain entirely from distribution of the Program." , "" , "If any portion of this section is held invalid or unenforceable under" , "any particular circumstance, the balance of the section is intended to" , "apply and the section as a whole is intended to apply in other" , "circumstances." , "" , "It is not the purpose of this section to induce you to infringe any" , "patents or other property right claims or to contest validity of any" , "such claims; this section has the sole purpose of protecting the" , "integrity of the free software distribution system, which is" , "implemented by public license practices. Many people have made" , "generous contributions to the wide range of software distributed" , "through that system in reliance on consistent application of that" , "system; it is up to the author/donor to decide if he or she is willing" , "to distribute software through any other system and a licensee cannot" , "impose that choice." , "" , "This section is intended to make thoroughly clear what is believed to" , "be a consequence of the rest of this License." , "" , " 8. If the distribution and/or use of the Program is restricted in" , "certain countries either by patents or by copyrighted interfaces, the" , "original copyright holder who places the Program under this License" , "may add an explicit geographical distribution limitation excluding" , "those countries, so that distribution is permitted only in or among" , "countries not thus excluded. In such case, this License incorporates" , "the limitation as if written in the body of this License." , "" , " 9. The Free Software Foundation may publish revised and/or new versions" , "of the General Public License from time to time. Such new versions will" , "be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , "Each version is given a distinguishing version number. If the Program" , "specifies a version number of this License which applies to it and \"any" , "later version\", you have the option of following the terms and conditions" , "either of that version or of any later version published by the Free" , "Software Foundation. If the Program does not specify a version number of" , "this License, you may choose any version ever published by the Free Software" , "Foundation." , "" , " 10. If you wish to incorporate parts of the Program into other free" , "programs whose distribution conditions are different, write to the author" , "to ask for permission. For software which is copyrighted by the Free" , "Software Foundation, write to the Free Software Foundation; we sometimes" , "make exceptions for this. Our decision will be guided by the two goals" , "of preserving the free status of all derivatives of our free software and" , "of promoting the sharing and reuse of software generally." , "" , " NO WARRANTY" , "" , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," , "REPAIR OR CORRECTION." , "" , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" , "POSSIBILITY OF SUCH DAMAGES." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "convey the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software; you can redistribute it and/or modify" , " it under the terms of the GNU General Public License as published by" , " the Free Software Foundation; either version 2 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU General Public License for more details." , "" , " You should have received a copy of the GNU General Public License along" , " with this program; if not, write to the Free Software Foundation, Inc.," , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , "If the program is interactive, make it output a short notice like this" , "when it starts in an interactive mode:" , "" , " Gnomovision version 69, Copyright (C) year name of author" , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." , " This is free software, and you are welcome to redistribute it" , " under certain conditions; type `show c' for details." , "" , "The hypothetical commands `show w' and `show c' should show the appropriate" , "parts of the General Public License. Of course, the commands you use may" , "be called something other than `show w' and `show c'; they could even be" , "mouse-clicks or menu items--whatever suits your program." , "" , "You should also get your employer (if you work as a programmer) or your" , "school, if any, to sign a \"copyright disclaimer\" for the program, if" , "necessary. Here is a sample; alter the names:" , "" , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" , " `Gnomovision' (which makes passes at compilers) written by James Hacker." , "" , " , 1 April 1989" , " Ty Coon, President of Vice" , "" , "This General Public License does not permit incorporating your program into" , "proprietary programs. If your program is a subroutine library, you may" , "consider it more useful to permit linking proprietary applications with the" , "library. If this is what you want to do, use the GNU Lesser General" , "Public License instead of this License." ] gplv3 :: License gplv3 = unlines [ " GNU GENERAL PUBLIC LICENSE" , " Version 3, 29 June 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The GNU General Public License is a free, copyleft license for" , "software and other kinds of works." , "" , " The licenses for most software and other practical works are designed" , "to take away your freedom to share and change the works. By contrast," , "the GNU General Public License is intended to guarantee your freedom to" , "share and change all versions of a program--to make sure it remains free" , "software for all its users. We, the Free Software Foundation, use the" , "GNU General Public License for most of our software; it applies also to" , "any other work released this way by its authors. You can apply it to" , "your programs, too." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "them if you wish), that you receive source code or can get it if you" , "want it, that you can change the software or use pieces of it in new" , "free programs, and that you know you can do these things." , "" , " To protect your rights, we need to prevent others from denying you" , "these rights or asking you to surrender the rights. Therefore, you have" , "certain responsibilities if you distribute copies of the software, or if" , "you modify it: responsibilities to respect the freedom of others." , "" , " For example, if you distribute copies of such a program, whether" , "gratis or for a fee, you must pass on to the recipients the same" , "freedoms that you received. You must make sure that they, too, receive" , "or can get the source code. And you must show them these terms so they" , "know their rights." , "" , " Developers that use the GNU GPL protect your rights with two steps:" , "(1) assert copyright on the software, and (2) offer you this License" , "giving you legal permission to copy, distribute and/or modify it." , "" , " For the developers' and authors' protection, the GPL clearly explains" , "that there is no warranty for this free software. For both users' and" , "authors' sake, the GPL requires that modified versions be marked as" , "changed, so that their problems will not be attributed erroneously to" , "authors of previous versions." , "" , " Some devices are designed to deny users access to install or run" , "modified versions of the software inside them, although the manufacturer" , "can do so. This is fundamentally incompatible with the aim of" , "protecting users' freedom to change the software. The systematic" , "pattern of such abuse occurs in the area of products for individuals to" , "use, which is precisely where it is most unacceptable. Therefore, we" , "have designed this version of the GPL to prohibit the practice for those" , "products. If such problems arise substantially in other domains, we" , "stand ready to extend this provision to those domains in future versions" , "of the GPL, as needed to protect the freedom of users." , "" , " Finally, every program is threatened constantly by software patents." , "States should not allow patents to restrict development and use of" , "software on general-purpose computers, but in those that do, we wish to" , "avoid the special danger that patents applied to a free program could" , "make it effectively proprietary. To prevent this, the GPL assures that" , "patents cannot be used to render the program non-free." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " TERMS AND CONDITIONS" , "" , " 0. Definitions." , "" , " \"This License\" refers to version 3 of the GNU General Public License." , "" , " \"Copyright\" also means copyright-like laws that apply to other kinds of" , "works, such as semiconductor masks." , "" , " \"The Program\" refers to any copyrightable work licensed under this" , "License. Each licensee is addressed as \"you\". \"Licensees\" and" , "\"recipients\" may be individuals or organizations." , "" , " To \"modify\" a work means to copy from or adapt all or part of the work" , "in a fashion requiring copyright permission, other than the making of an" , "exact copy. The resulting work is called a \"modified version\" of the" , "earlier work or a work \"based on\" the earlier work." , "" , " A \"covered work\" means either the unmodified Program or a work based" , "on the Program." , "" , " To \"propagate\" a work means to do anything with it that, without" , "permission, would make you directly or secondarily liable for" , "infringement under applicable copyright law, except executing it on a" , "computer or modifying a private copy. Propagation includes copying," , "distribution (with or without modification), making available to the" , "public, and in some countries other activities as well." , "" , " To \"convey\" a work means any kind of propagation that enables other" , "parties to make or receive copies. Mere interaction with a user through" , "a computer network, with no transfer of a copy, is not conveying." , "" , " An interactive user interface displays \"Appropriate Legal Notices\"" , "to the extent that it includes a convenient and prominently visible" , "feature that (1) displays an appropriate copyright notice, and (2)" , "tells the user that there is no warranty for the work (except to the" , "extent that warranties are provided), that licensees may convey the" , "work under this License, and how to view a copy of this License. If" , "the interface presents a list of user commands or options, such as a" , "menu, a prominent item in the list meets this criterion." , "" , " 1. Source Code." , "" , " The \"source code\" for a work means the preferred form of the work" , "for making modifications to it. \"Object code\" means any non-source" , "form of a work." , "" , " A \"Standard Interface\" means an interface that either is an official" , "standard defined by a recognized standards body, or, in the case of" , "interfaces specified for a particular programming language, one that" , "is widely used among developers working in that language." , "" , " The \"System Libraries\" of an executable work include anything, other" , "than the work as a whole, that (a) is included in the normal form of" , "packaging a Major Component, but which is not part of that Major" , "Component, and (b) serves only to enable use of the work with that" , "Major Component, or to implement a Standard Interface for which an" , "implementation is available to the public in source code form. A" , "\"Major Component\", in this context, means a major essential component" , "(kernel, window system, and so on) of the specific operating system" , "(if any) on which the executable work runs, or a compiler used to" , "produce the work, or an object code interpreter used to run it." , "" , " The \"Corresponding Source\" for a work in object code form means all" , "the source code needed to generate, install, and (for an executable" , "work) run the object code and to modify the work, including scripts to" , "control those activities. However, it does not include the work's" , "System Libraries, or general-purpose tools or generally available free" , "programs which are used unmodified in performing those activities but" , "which are not part of the work. For example, Corresponding Source" , "includes interface definition files associated with source files for" , "the work, and the source code for shared libraries and dynamically" , "linked subprograms that the work is specifically designed to require," , "such as by intimate data communication or control flow between those" , "subprograms and other parts of the work." , "" , " The Corresponding Source need not include anything that users" , "can regenerate automatically from other parts of the Corresponding" , "Source." , "" , " The Corresponding Source for a work in source code form is that" , "same work." , "" , " 2. Basic Permissions." , "" , " All rights granted under this License are granted for the term of" , "copyright on the Program, and are irrevocable provided the stated" , "conditions are met. This License explicitly affirms your unlimited" , "permission to run the unmodified Program. The output from running a" , "covered work is covered by this License only if the output, given its" , "content, constitutes a covered work. This License acknowledges your" , "rights of fair use or other equivalent, as provided by copyright law." , "" , " You may make, run and propagate covered works that you do not" , "convey, without conditions so long as your license otherwise remains" , "in force. You may convey covered works to others for the sole purpose" , "of having them make modifications exclusively for you, or provide you" , "with facilities for running those works, provided that you comply with" , "the terms of this License in conveying all material for which you do" , "not control copyright. Those thus making or running the covered works" , "for you must do so exclusively on your behalf, under your direction" , "and control, on terms that prohibit them from making any copies of" , "your copyrighted material outside their relationship with you." , "" , " Conveying under any other circumstances is permitted solely under" , "the conditions stated below. Sublicensing is not allowed; section 10" , "makes it unnecessary." , "" , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." , "" , " No covered work shall be deemed part of an effective technological" , "measure under any applicable law fulfilling obligations under article" , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" , "similar laws prohibiting or restricting circumvention of such" , "measures." , "" , " When you convey a covered work, you waive any legal power to forbid" , "circumvention of technological measures to the extent such circumvention" , "is effected by exercising rights under this License with respect to" , "the covered work, and you disclaim any intention to limit operation or" , "modification of the work as a means of enforcing, against the work's" , "users, your or third parties' legal rights to forbid circumvention of" , "technological measures." , "" , " 4. Conveying Verbatim Copies." , "" , " You may convey verbatim copies of the Program's source code as you" , "receive it, in any medium, provided that you conspicuously and" , "appropriately publish on each copy an appropriate copyright notice;" , "keep intact all notices stating that this License and any" , "non-permissive terms added in accord with section 7 apply to the code;" , "keep intact all notices of the absence of any warranty; and give all" , "recipients a copy of this License along with the Program." , "" , " You may charge any price or no price for each copy that you convey," , "and you may offer support or warranty protection for a fee." , "" , " 5. Conveying Modified Source Versions." , "" , " You may convey a work based on the Program, or the modifications to" , "produce it from the Program, in the form of source code under the" , "terms of section 4, provided that you also meet all of these conditions:" , "" , " a) The work must carry prominent notices stating that you modified" , " it, and giving a relevant date." , "" , " b) The work must carry prominent notices stating that it is" , " released under this License and any conditions added under section" , " 7. This requirement modifies the requirement in section 4 to" , " \"keep intact all notices\"." , "" , " c) You must license the entire work, as a whole, under this" , " License to anyone who comes into possession of a copy. This" , " License will therefore apply, along with any applicable section 7" , " additional terms, to the whole of the work, and all its parts," , " regardless of how they are packaged. This License gives no" , " permission to license the work in any other way, but it does not" , " invalidate such permission if you have separately received it." , "" , " d) If the work has interactive user interfaces, each must display" , " Appropriate Legal Notices; however, if the Program has interactive" , " interfaces that do not display Appropriate Legal Notices, your" , " work need not make them do so." , "" , " A compilation of a covered work with other separate and independent" , "works, which are not by their nature extensions of the covered work," , "and which are not combined with it such as to form a larger program," , "in or on a volume of a storage or distribution medium, is called an" , "\"aggregate\" if the compilation and its resulting copyright are not" , "used to limit the access or legal rights of the compilation's users" , "beyond what the individual works permit. Inclusion of a covered work" , "in an aggregate does not cause this License to apply to the other" , "parts of the aggregate." , "" , " 6. Conveying Non-Source Forms." , "" , " You may convey a covered work in object code form under the terms" , "of sections 4 and 5, provided that you also convey the" , "machine-readable Corresponding Source under the terms of this License," , "in one of these ways:" , "" , " a) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by the" , " Corresponding Source fixed on a durable physical medium" , " customarily used for software interchange." , "" , " b) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by a" , " written offer, valid for at least three years and valid for as" , " long as you offer spare parts or customer support for that product" , " model, to give anyone who possesses the object code either (1) a" , " copy of the Corresponding Source for all the software in the" , " product that is covered by this License, on a durable physical" , " medium customarily used for software interchange, for a price no" , " more than your reasonable cost of physically performing this" , " conveying of source, or (2) access to copy the" , " Corresponding Source from a network server at no charge." , "" , " c) Convey individual copies of the object code with a copy of the" , " written offer to provide the Corresponding Source. This" , " alternative is allowed only occasionally and noncommercially, and" , " only if you received the object code with such an offer, in accord" , " with subsection 6b." , "" , " d) Convey the object code by offering access from a designated" , " place (gratis or for a charge), and offer equivalent access to the" , " Corresponding Source in the same way through the same place at no" , " further charge. You need not require recipients to copy the" , " Corresponding Source along with the object code. If the place to" , " copy the object code is a network server, the Corresponding Source" , " may be on a different server (operated by you or a third party)" , " that supports equivalent copying facilities, provided you maintain" , " clear directions next to the object code saying where to find the" , " Corresponding Source. Regardless of what server hosts the" , " Corresponding Source, you remain obligated to ensure that it is" , " available for as long as needed to satisfy these requirements." , "" , " e) Convey the object code using peer-to-peer transmission, provided" , " you inform other peers where the object code and Corresponding" , " Source of the work are being offered to the general public at no" , " charge under subsection 6d." , "" , " A separable portion of the object code, whose source code is excluded" , "from the Corresponding Source as a System Library, need not be" , "included in conveying the object code work." , "" , " A \"User Product\" is either (1) a \"consumer product\", which means any" , "tangible personal property which is normally used for personal, family," , "or household purposes, or (2) anything designed or sold for incorporation" , "into a dwelling. In determining whether a product is a consumer product," , "doubtful cases shall be resolved in favor of coverage. For a particular" , "product received by a particular user, \"normally used\" refers to a" , "typical or common use of that class of product, regardless of the status" , "of the particular user or of the way in which the particular user" , "actually uses, or expects or is expected to use, the product. A product" , "is a consumer product regardless of whether the product has substantial" , "commercial, industrial or non-consumer uses, unless such uses represent" , "the only significant mode of use of the product." , "" , " \"Installation Information\" for a User Product means any methods," , "procedures, authorization keys, or other information required to install" , "and execute modified versions of a covered work in that User Product from" , "a modified version of its Corresponding Source. The information must" , "suffice to ensure that the continued functioning of the modified object" , "code is in no case prevented or interfered with solely because" , "modification has been made." , "" , " If you convey an object code work under this section in, or with, or" , "specifically for use in, a User Product, and the conveying occurs as" , "part of a transaction in which the right of possession and use of the" , "User Product is transferred to the recipient in perpetuity or for a" , "fixed term (regardless of how the transaction is characterized), the" , "Corresponding Source conveyed under this section must be accompanied" , "by the Installation Information. But this requirement does not apply" , "if neither you nor any third party retains the ability to install" , "modified object code on the User Product (for example, the work has" , "been installed in ROM)." , "" , " The requirement to provide Installation Information does not include a" , "requirement to continue to provide support service, warranty, or updates" , "for a work that has been modified or installed by the recipient, or for" , "the User Product in which it has been modified or installed. Access to a" , "network may be denied when the modification itself materially and" , "adversely affects the operation of the network or violates the rules and" , "protocols for communication across the network." , "" , " Corresponding Source conveyed, and Installation Information provided," , "in accord with this section must be in a format that is publicly" , "documented (and with an implementation available to the public in" , "source code form), and must require no special password or key for" , "unpacking, reading or copying." , "" , " 7. Additional Terms." , "" , " \"Additional permissions\" are terms that supplement the terms of this" , "License by making exceptions from one or more of its conditions." , "Additional permissions that are applicable to the entire Program shall" , "be treated as though they were included in this License, to the extent" , "that they are valid under applicable law. If additional permissions" , "apply only to part of the Program, that part may be used separately" , "under those permissions, but the entire Program remains governed by" , "this License without regard to the additional permissions." , "" , " When you convey a copy of a covered work, you may at your option" , "remove any additional permissions from that copy, or from any part of" , "it. (Additional permissions may be written to require their own" , "removal in certain cases when you modify the work.) You may place" , "additional permissions on material, added by you to a covered work," , "for which you have or can give appropriate copyright permission." , "" , " Notwithstanding any other provision of this License, for material you" , "add to a covered work, you may (if authorized by the copyright holders of" , "that material) supplement the terms of this License with terms:" , "" , " a) Disclaiming warranty or limiting liability differently from the" , " terms of sections 15 and 16 of this License; or" , "" , " b) Requiring preservation of specified reasonable legal notices or" , " author attributions in that material or in the Appropriate Legal" , " Notices displayed by works containing it; or" , "" , " c) Prohibiting misrepresentation of the origin of that material, or" , " requiring that modified versions of such material be marked in" , " reasonable ways as different from the original version; or" , "" , " d) Limiting the use for publicity purposes of names of licensors or" , " authors of the material; or" , "" , " e) Declining to grant rights under trademark law for use of some" , " trade names, trademarks, or service marks; or" , "" , " f) Requiring indemnification of licensors and authors of that" , " material by anyone who conveys the material (or modified versions of" , " it) with contractual assumptions of liability to the recipient, for" , " any liability that these contractual assumptions directly impose on" , " those licensors and authors." , "" , " All other non-permissive additional terms are considered \"further" , "restrictions\" within the meaning of section 10. If the Program as you" , "received it, or any part of it, contains a notice stating that it is" , "governed by this License along with a term that is a further" , "restriction, you may remove that term. If a license document contains" , "a further restriction but permits relicensing or conveying under this" , "License, you may add to a covered work material governed by the terms" , "of that license document, provided that the further restriction does" , "not survive such relicensing or conveying." , "" , " If you add terms to a covered work in accord with this section, you" , "must place, in the relevant source files, a statement of the" , "additional terms that apply to those files, or a notice indicating" , "where to find the applicable terms." , "" , " Additional terms, permissive or non-permissive, may be stated in the" , "form of a separately written license, or stated as exceptions;" , "the above requirements apply either way." , "" , " 8. Termination." , "" , " You may not propagate or modify a covered work except as expressly" , "provided under this License. Any attempt otherwise to propagate or" , "modify it is void, and will automatically terminate your rights under" , "this License (including any patent licenses granted under the third" , "paragraph of section 11)." , "" , " However, if you cease all violation of this License, then your" , "license from a particular copyright holder is reinstated (a)" , "provisionally, unless and until the copyright holder explicitly and" , "finally terminates your license, and (b) permanently, if the copyright" , "holder fails to notify you of the violation by some reasonable means" , "prior to 60 days after the cessation." , "" , " Moreover, your license from a particular copyright holder is" , "reinstated permanently if the copyright holder notifies you of the" , "violation by some reasonable means, this is the first time you have" , "received notice of violation of this License (for any work) from that" , "copyright holder, and you cure the violation prior to 30 days after" , "your receipt of the notice." , "" , " Termination of your rights under this section does not terminate the" , "licenses of parties who have received copies or rights from you under" , "this License. If your rights have been terminated and not permanently" , "reinstated, you do not qualify to receive new licenses for the same" , "material under section 10." , "" , " 9. Acceptance Not Required for Having Copies." , "" , " You are not required to accept this License in order to receive or" , "run a copy of the Program. Ancillary propagation of a covered work" , "occurring solely as a consequence of using peer-to-peer transmission" , "to receive a copy likewise does not require acceptance. However," , "nothing other than this License grants you permission to propagate or" , "modify any covered work. These actions infringe copyright if you do" , "not accept this License. Therefore, by modifying or propagating a" , "covered work, you indicate your acceptance of this License to do so." , "" , " 10. Automatic Licensing of Downstream Recipients." , "" , " Each time you convey a covered work, the recipient automatically" , "receives a license from the original licensors, to run, modify and" , "propagate that work, subject to this License. You are not responsible" , "for enforcing compliance by third parties with this License." , "" , " An \"entity transaction\" is a transaction transferring control of an" , "organization, or substantially all assets of one, or subdividing an" , "organization, or merging organizations. If propagation of a covered" , "work results from an entity transaction, each party to that" , "transaction who receives a copy of the work also receives whatever" , "licenses to the work the party's predecessor in interest had or could" , "give under the previous paragraph, plus a right to possession of the" , "Corresponding Source of the work from the predecessor in interest, if" , "the predecessor has it or can get it with reasonable efforts." , "" , " You may not impose any further restrictions on the exercise of the" , "rights granted or affirmed under this License. For example, you may" , "not impose a license fee, royalty, or other charge for exercise of" , "rights granted under this License, and you may not initiate litigation" , "(including a cross-claim or counterclaim in a lawsuit) alleging that" , "any patent claim is infringed by making, using, selling, offering for" , "sale, or importing the Program or any portion of it." , "" , " 11. Patents." , "" , " A \"contributor\" is a copyright holder who authorizes use under this" , "License of the Program or a work on which the Program is based. The" , "work thus licensed is called the contributor's \"contributor version\"." , "" , " A contributor's \"essential patent claims\" are all patent claims" , "owned or controlled by the contributor, whether already acquired or" , "hereafter acquired, that would be infringed by some manner, permitted" , "by this License, of making, using, or selling its contributor version," , "but do not include claims that would be infringed only as a" , "consequence of further modification of the contributor version. For" , "purposes of this definition, \"control\" includes the right to grant" , "patent sublicenses in a manner consistent with the requirements of" , "this License." , "" , " Each contributor grants you a non-exclusive, worldwide, royalty-free" , "patent license under the contributor's essential patent claims, to" , "make, use, sell, offer for sale, import and otherwise run, modify and" , "propagate the contents of its contributor version." , "" , " In the following three paragraphs, a \"patent license\" is any express" , "agreement or commitment, however denominated, not to enforce a patent" , "(such as an express permission to practice a patent or covenant not to" , "sue for patent infringement). To \"grant\" such a patent license to a" , "party means to make such an agreement or commitment not to enforce a" , "patent against the party." , "" , " If you convey a covered work, knowingly relying on a patent license," , "and the Corresponding Source of the work is not available for anyone" , "to copy, free of charge and under the terms of this License, through a" , "publicly available network server or other readily accessible means," , "then you must either (1) cause the Corresponding Source to be so" , "available, or (2) arrange to deprive yourself of the benefit of the" , "patent license for this particular work, or (3) arrange, in a manner" , "consistent with the requirements of this License, to extend the patent" , "license to downstream recipients. \"Knowingly relying\" means you have" , "actual knowledge that, but for the patent license, your conveying the" , "covered work in a country, or your recipient's use of the covered work" , "in a country, would infringe one or more identifiable patents in that" , "country that you have reason to believe are valid." , "" , " If, pursuant to or in connection with a single transaction or" , "arrangement, you convey, or propagate by procuring conveyance of, a" , "covered work, and grant a patent license to some of the parties" , "receiving the covered work authorizing them to use, propagate, modify" , "or convey a specific copy of the covered work, then the patent license" , "you grant is automatically extended to all recipients of the covered" , "work and works based on it." , "" , " A patent license is \"discriminatory\" if it does not include within" , "the scope of its coverage, prohibits the exercise of, or is" , "conditioned on the non-exercise of one or more of the rights that are" , "specifically granted under this License. You may not convey a covered" , "work if you are a party to an arrangement with a third party that is" , "in the business of distributing software, under which you make payment" , "to the third party based on the extent of your activity of conveying" , "the work, and under which the third party grants, to any of the" , "parties who would receive the covered work from you, a discriminatory" , "patent license (a) in connection with copies of the covered work" , "conveyed by you (or copies made from those copies), or (b) primarily" , "for and in connection with specific products or compilations that" , "contain the covered work, unless you entered into that arrangement," , "or that patent license was granted, prior to 28 March 2007." , "" , " Nothing in this License shall be construed as excluding or limiting" , "any implied license or other defenses to infringement that may" , "otherwise be available to you under applicable patent law." , "" , " 12. No Surrender of Others' Freedom." , "" , " If conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot convey a" , "covered work so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you may" , "not convey it at all. For example, if you agree to terms that obligate you" , "to collect a royalty for further conveying from those to whom you convey" , "the Program, the only way you could satisfy both those terms and this" , "License would be to refrain entirely from conveying the Program." , "" , " 13. Use with the GNU Affero General Public License." , "" , " Notwithstanding any other provision of this License, you have" , "permission to link or combine any covered work with a work licensed" , "under version 3 of the GNU Affero General Public License into a single" , "combined work, and to convey the resulting work. The terms of this" , "License will continue to apply to the part which is the covered work," , "but the special requirements of the GNU Affero General Public License," , "section 13, concerning interaction through a network will apply to the" , "combination as such." , "" , " 14. Revised Versions of this License." , "" , " The Free Software Foundation may publish revised and/or new versions of" , "the GNU General Public License from time to time. Such new versions will" , "be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Program specifies that a certain numbered version of the GNU General" , "Public License \"or any later version\" applies to it, you have the" , "option of following the terms and conditions either of that numbered" , "version or of any later version published by the Free Software" , "Foundation. If the Program does not specify a version number of the" , "GNU General Public License, you may choose any version ever published" , "by the Free Software Foundation." , "" , " If the Program specifies that a proxy can decide which future" , "versions of the GNU General Public License can be used, that proxy's" , "public statement of acceptance of a version permanently authorizes you" , "to choose that version for the Program." , "" , " Later license versions may give you additional or different" , "permissions. However, no additional obligations are imposed on any" , "author or copyright holder as a result of your choosing to follow a" , "later version." , "" , " 15. Disclaimer of Warranty." , "" , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. Limitation of Liability." , "" , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" , "SUCH DAMAGES." , "" , " 17. Interpretation of Sections 15 and 16." , "" , " If the disclaimer of warranty and limitation of liability provided" , "above cannot be given local legal effect according to their terms," , "reviewing courts shall apply local law that most closely approximates" , "an absolute waiver of all civil liability in connection with the" , "Program, unless a warranty or assumption of liability accompanies a" , "copy of the Program in return for a fee." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "state the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software: you can redistribute it and/or modify" , " it under the terms of the GNU General Public License as published by" , " the Free Software Foundation, either version 3 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU General Public License for more details." , "" , " You should have received a copy of the GNU General Public License" , " along with this program. If not, see ." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , " If the program does terminal interaction, make it output a short" , "notice like this when it starts in an interactive mode:" , "" , " Copyright (C) " , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." , " This is free software, and you are welcome to redistribute it" , " under certain conditions; type `show c' for details." , "" , "The hypothetical commands `show w' and `show c' should show the appropriate" , "parts of the General Public License. Of course, your program's commands" , "might be different; for a GUI interface, you would use an \"about box\"." , "" , " You should also get your employer (if you work as a programmer) or school," , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." , "For more information on this, and how to apply and follow the GNU GPL, see" , "." , "" , " The GNU General Public License does not permit incorporating your program" , "into proprietary programs. If your program is a subroutine library, you" , "may consider it more useful to permit linking proprietary applications with" , "the library. If this is what you want to do, use the GNU Lesser General" , "Public License instead of this License. But first, please read" , "." ] agplv3 :: License agplv3 = unlines [ " GNU AFFERO GENERAL PUBLIC LICENSE" , " Version 3, 19 November 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , " Preamble" , "" , " The GNU Affero General Public License is a free, copyleft license for" , "software and other kinds of works, specifically designed to ensure" , "cooperation with the community in the case of network server software." , "" , " The licenses for most software and other practical works are designed" , "to take away your freedom to share and change the works. By contrast," , "our General Public Licenses are intended to guarantee your freedom to" , "share and change all versions of a program--to make sure it remains free" , "software for all its users." , "" , " When we speak of free software, we are referring to freedom, not" , "price. Our General Public Licenses are designed to make sure that you" , "have the freedom to distribute copies of free software (and charge for" , "them if you wish), that you receive source code or can get it if you" , "want it, that you can change the software or use pieces of it in new" , "free programs, and that you know you can do these things." , "" , " Developers that use our General Public Licenses protect your rights" , "with two steps: (1) assert copyright on the software, and (2) offer" , "you this License which gives you legal permission to copy, distribute" , "and/or modify the software." , "" , " A secondary benefit of defending all users' freedom is that" , "improvements made in alternate versions of the program, if they" , "receive widespread use, become available for other developers to" , "incorporate. Many developers of free software are heartened and" , "encouraged by the resulting cooperation. However, in the case of" , "software used on network servers, this result may fail to come about." , "The GNU General Public License permits making a modified version and" , "letting the public access it on a server without ever releasing its" , "source code to the public." , "" , " The GNU Affero General Public License is designed specifically to" , "ensure that, in such cases, the modified source code becomes available" , "to the community. It requires the operator of a network server to" , "provide the source code of the modified version running there to the" , "users of that server. Therefore, public use of a modified version, on" , "a publicly accessible server, gives the public access to the source" , "code of the modified version." , "" , " An older license, called the Affero General Public License and" , "published by Affero, was designed to accomplish similar goals. This is" , "a different license, not a version of the Affero GPL, but Affero has" , "released a new version of the Affero GPL which permits relicensing under" , "this license." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow." , "" , " TERMS AND CONDITIONS" , "" , " 0. Definitions." , "" , " \"This License\" refers to version 3 of the GNU Affero General Public License." , "" , " \"Copyright\" also means copyright-like laws that apply to other kinds of" , "works, such as semiconductor masks." , "" , " \"The Program\" refers to any copyrightable work licensed under this" , "License. Each licensee is addressed as \"you\". \"Licensees\" and" , "\"recipients\" may be individuals or organizations." , "" , " To \"modify\" a work means to copy from or adapt all or part of the work" , "in a fashion requiring copyright permission, other than the making of an" , "exact copy. The resulting work is called a \"modified version\" of the" , "earlier work or a work \"based on\" the earlier work." , "" , " A \"covered work\" means either the unmodified Program or a work based" , "on the Program." , "" , " To \"propagate\" a work means to do anything with it that, without" , "permission, would make you directly or secondarily liable for" , "infringement under applicable copyright law, except executing it on a" , "computer or modifying a private copy. Propagation includes copying," , "distribution (with or without modification), making available to the" , "public, and in some countries other activities as well." , "" , " To \"convey\" a work means any kind of propagation that enables other" , "parties to make or receive copies. Mere interaction with a user through" , "a computer network, with no transfer of a copy, is not conveying." , "" , " An interactive user interface displays \"Appropriate Legal Notices\"" , "to the extent that it includes a convenient and prominently visible" , "feature that (1) displays an appropriate copyright notice, and (2)" , "tells the user that there is no warranty for the work (except to the" , "extent that warranties are provided), that licensees may convey the" , "work under this License, and how to view a copy of this License. If" , "the interface presents a list of user commands or options, such as a" , "menu, a prominent item in the list meets this criterion." , "" , " 1. Source Code." , "" , " The \"source code\" for a work means the preferred form of the work" , "for making modifications to it. \"Object code\" means any non-source" , "form of a work." , "" , " A \"Standard Interface\" means an interface that either is an official" , "standard defined by a recognized standards body, or, in the case of" , "interfaces specified for a particular programming language, one that" , "is widely used among developers working in that language." , "" , " The \"System Libraries\" of an executable work include anything, other" , "than the work as a whole, that (a) is included in the normal form of" , "packaging a Major Component, but which is not part of that Major" , "Component, and (b) serves only to enable use of the work with that" , "Major Component, or to implement a Standard Interface for which an" , "implementation is available to the public in source code form. A" , "\"Major Component\", in this context, means a major essential component" , "(kernel, window system, and so on) of the specific operating system" , "(if any) on which the executable work runs, or a compiler used to" , "produce the work, or an object code interpreter used to run it." , "" , " The \"Corresponding Source\" for a work in object code form means all" , "the source code needed to generate, install, and (for an executable" , "work) run the object code and to modify the work, including scripts to" , "control those activities. However, it does not include the work's" , "System Libraries, or general-purpose tools or generally available free" , "programs which are used unmodified in performing those activities but" , "which are not part of the work. For example, Corresponding Source" , "includes interface definition files associated with source files for" , "the work, and the source code for shared libraries and dynamically" , "linked subprograms that the work is specifically designed to require," , "such as by intimate data communication or control flow between those" , "subprograms and other parts of the work." , "" , " The Corresponding Source need not include anything that users" , "can regenerate automatically from other parts of the Corresponding" , "Source." , "" , " The Corresponding Source for a work in source code form is that" , "same work." , "" , " 2. Basic Permissions." , "" , " All rights granted under this License are granted for the term of" , "copyright on the Program, and are irrevocable provided the stated" , "conditions are met. This License explicitly affirms your unlimited" , "permission to run the unmodified Program. The output from running a" , "covered work is covered by this License only if the output, given its" , "content, constitutes a covered work. This License acknowledges your" , "rights of fair use or other equivalent, as provided by copyright law." , "" , " You may make, run and propagate covered works that you do not" , "convey, without conditions so long as your license otherwise remains" , "in force. You may convey covered works to others for the sole purpose" , "of having them make modifications exclusively for you, or provide you" , "with facilities for running those works, provided that you comply with" , "the terms of this License in conveying all material for which you do" , "not control copyright. Those thus making or running the covered works" , "for you must do so exclusively on your behalf, under your direction" , "and control, on terms that prohibit them from making any copies of" , "your copyrighted material outside their relationship with you." , "" , " Conveying under any other circumstances is permitted solely under" , "the conditions stated below. Sublicensing is not allowed; section 10" , "makes it unnecessary." , "" , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." , "" , " No covered work shall be deemed part of an effective technological" , "measure under any applicable law fulfilling obligations under article" , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" , "similar laws prohibiting or restricting circumvention of such" , "measures." , "" , " When you convey a covered work, you waive any legal power to forbid" , "circumvention of technological measures to the extent such circumvention" , "is effected by exercising rights under this License with respect to" , "the covered work, and you disclaim any intention to limit operation or" , "modification of the work as a means of enforcing, against the work's" , "users, your or third parties' legal rights to forbid circumvention of" , "technological measures." , "" , " 4. Conveying Verbatim Copies." , "" , " You may convey verbatim copies of the Program's source code as you" , "receive it, in any medium, provided that you conspicuously and" , "appropriately publish on each copy an appropriate copyright notice;" , "keep intact all notices stating that this License and any" , "non-permissive terms added in accord with section 7 apply to the code;" , "keep intact all notices of the absence of any warranty; and give all" , "recipients a copy of this License along with the Program." , "" , " You may charge any price or no price for each copy that you convey," , "and you may offer support or warranty protection for a fee." , "" , " 5. Conveying Modified Source Versions." , "" , " You may convey a work based on the Program, or the modifications to" , "produce it from the Program, in the form of source code under the" , "terms of section 4, provided that you also meet all of these conditions:" , "" , " a) The work must carry prominent notices stating that you modified" , " it, and giving a relevant date." , "" , " b) The work must carry prominent notices stating that it is" , " released under this License and any conditions added under section" , " 7. This requirement modifies the requirement in section 4 to" , " \"keep intact all notices\"." , "" , " c) You must license the entire work, as a whole, under this" , " License to anyone who comes into possession of a copy. This" , " License will therefore apply, along with any applicable section 7" , " additional terms, to the whole of the work, and all its parts," , " regardless of how they are packaged. This License gives no" , " permission to license the work in any other way, but it does not" , " invalidate such permission if you have separately received it." , "" , " d) If the work has interactive user interfaces, each must display" , " Appropriate Legal Notices; however, if the Program has interactive" , " interfaces that do not display Appropriate Legal Notices, your" , " work need not make them do so." , "" , " A compilation of a covered work with other separate and independent" , "works, which are not by their nature extensions of the covered work," , "and which are not combined with it such as to form a larger program," , "in or on a volume of a storage or distribution medium, is called an" , "\"aggregate\" if the compilation and its resulting copyright are not" , "used to limit the access or legal rights of the compilation's users" , "beyond what the individual works permit. Inclusion of a covered work" , "in an aggregate does not cause this License to apply to the other" , "parts of the aggregate." , "" , " 6. Conveying Non-Source Forms." , "" , " You may convey a covered work in object code form under the terms" , "of sections 4 and 5, provided that you also convey the" , "machine-readable Corresponding Source under the terms of this License," , "in one of these ways:" , "" , " a) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by the" , " Corresponding Source fixed on a durable physical medium" , " customarily used for software interchange." , "" , " b) Convey the object code in, or embodied in, a physical product" , " (including a physical distribution medium), accompanied by a" , " written offer, valid for at least three years and valid for as" , " long as you offer spare parts or customer support for that product" , " model, to give anyone who possesses the object code either (1) a" , " copy of the Corresponding Source for all the software in the" , " product that is covered by this License, on a durable physical" , " medium customarily used for software interchange, for a price no" , " more than your reasonable cost of physically performing this" , " conveying of source, or (2) access to copy the" , " Corresponding Source from a network server at no charge." , "" , " c) Convey individual copies of the object code with a copy of the" , " written offer to provide the Corresponding Source. This" , " alternative is allowed only occasionally and noncommercially, and" , " only if you received the object code with such an offer, in accord" , " with subsection 6b." , "" , " d) Convey the object code by offering access from a designated" , " place (gratis or for a charge), and offer equivalent access to the" , " Corresponding Source in the same way through the same place at no" , " further charge. You need not require recipients to copy the" , " Corresponding Source along with the object code. If the place to" , " copy the object code is a network server, the Corresponding Source" , " may be on a different server (operated by you or a third party)" , " that supports equivalent copying facilities, provided you maintain" , " clear directions next to the object code saying where to find the" , " Corresponding Source. Regardless of what server hosts the" , " Corresponding Source, you remain obligated to ensure that it is" , " available for as long as needed to satisfy these requirements." , "" , " e) Convey the object code using peer-to-peer transmission, provided" , " you inform other peers where the object code and Corresponding" , " Source of the work are being offered to the general public at no" , " charge under subsection 6d." , "" , " A separable portion of the object code, whose source code is excluded" , "from the Corresponding Source as a System Library, need not be" , "included in conveying the object code work." , "" , " A \"User Product\" is either (1) a \"consumer product\", which means any" , "tangible personal property which is normally used for personal, family," , "or household purposes, or (2) anything designed or sold for incorporation" , "into a dwelling. In determining whether a product is a consumer product," , "doubtful cases shall be resolved in favor of coverage. For a particular" , "product received by a particular user, \"normally used\" refers to a" , "typical or common use of that class of product, regardless of the status" , "of the particular user or of the way in which the particular user" , "actually uses, or expects or is expected to use, the product. A product" , "is a consumer product regardless of whether the product has substantial" , "commercial, industrial or non-consumer uses, unless such uses represent" , "the only significant mode of use of the product." , "" , " \"Installation Information\" for a User Product means any methods," , "procedures, authorization keys, or other information required to install" , "and execute modified versions of a covered work in that User Product from" , "a modified version of its Corresponding Source. The information must" , "suffice to ensure that the continued functioning of the modified object" , "code is in no case prevented or interfered with solely because" , "modification has been made." , "" , " If you convey an object code work under this section in, or with, or" , "specifically for use in, a User Product, and the conveying occurs as" , "part of a transaction in which the right of possession and use of the" , "User Product is transferred to the recipient in perpetuity or for a" , "fixed term (regardless of how the transaction is characterized), the" , "Corresponding Source conveyed under this section must be accompanied" , "by the Installation Information. But this requirement does not apply" , "if neither you nor any third party retains the ability to install" , "modified object code on the User Product (for example, the work has" , "been installed in ROM)." , "" , " The requirement to provide Installation Information does not include a" , "requirement to continue to provide support service, warranty, or updates" , "for a work that has been modified or installed by the recipient, or for" , "the User Product in which it has been modified or installed. Access to a" , "network may be denied when the modification itself materially and" , "adversely affects the operation of the network or violates the rules and" , "protocols for communication across the network." , "" , " Corresponding Source conveyed, and Installation Information provided," , "in accord with this section must be in a format that is publicly" , "documented (and with an implementation available to the public in" , "source code form), and must require no special password or key for" , "unpacking, reading or copying." , "" , " 7. Additional Terms." , "" , " \"Additional permissions\" are terms that supplement the terms of this" , "License by making exceptions from one or more of its conditions." , "Additional permissions that are applicable to the entire Program shall" , "be treated as though they were included in this License, to the extent" , "that they are valid under applicable law. If additional permissions" , "apply only to part of the Program, that part may be used separately" , "under those permissions, but the entire Program remains governed by" , "this License without regard to the additional permissions." , "" , " When you convey a copy of a covered work, you may at your option" , "remove any additional permissions from that copy, or from any part of" , "it. (Additional permissions may be written to require their own" , "removal in certain cases when you modify the work.) You may place" , "additional permissions on material, added by you to a covered work," , "for which you have or can give appropriate copyright permission." , "" , " Notwithstanding any other provision of this License, for material you" , "add to a covered work, you may (if authorized by the copyright holders of" , "that material) supplement the terms of this License with terms:" , "" , " a) Disclaiming warranty or limiting liability differently from the" , " terms of sections 15 and 16 of this License; or" , "" , " b) Requiring preservation of specified reasonable legal notices or" , " author attributions in that material or in the Appropriate Legal" , " Notices displayed by works containing it; or" , "" , " c) Prohibiting misrepresentation of the origin of that material, or" , " requiring that modified versions of such material be marked in" , " reasonable ways as different from the original version; or" , "" , " d) Limiting the use for publicity purposes of names of licensors or" , " authors of the material; or" , "" , " e) Declining to grant rights under trademark law for use of some" , " trade names, trademarks, or service marks; or" , "" , " f) Requiring indemnification of licensors and authors of that" , " material by anyone who conveys the material (or modified versions of" , " it) with contractual assumptions of liability to the recipient, for" , " any liability that these contractual assumptions directly impose on" , " those licensors and authors." , "" , " All other non-permissive additional terms are considered \"further" , "restrictions\" within the meaning of section 10. If the Program as you" , "received it, or any part of it, contains a notice stating that it is" , "governed by this License along with a term that is a further" , "restriction, you may remove that term. If a license document contains" , "a further restriction but permits relicensing or conveying under this" , "License, you may add to a covered work material governed by the terms" , "of that license document, provided that the further restriction does" , "not survive such relicensing or conveying." , "" , " If you add terms to a covered work in accord with this section, you" , "must place, in the relevant source files, a statement of the" , "additional terms that apply to those files, or a notice indicating" , "where to find the applicable terms." , "" , " Additional terms, permissive or non-permissive, may be stated in the" , "form of a separately written license, or stated as exceptions;" , "the above requirements apply either way." , "" , " 8. Termination." , "" , " You may not propagate or modify a covered work except as expressly" , "provided under this License. Any attempt otherwise to propagate or" , "modify it is void, and will automatically terminate your rights under" , "this License (including any patent licenses granted under the third" , "paragraph of section 11)." , "" , " However, if you cease all violation of this License, then your" , "license from a particular copyright holder is reinstated (a)" , "provisionally, unless and until the copyright holder explicitly and" , "finally terminates your license, and (b) permanently, if the copyright" , "holder fails to notify you of the violation by some reasonable means" , "prior to 60 days after the cessation." , "" , " Moreover, your license from a particular copyright holder is" , "reinstated permanently if the copyright holder notifies you of the" , "violation by some reasonable means, this is the first time you have" , "received notice of violation of this License (for any work) from that" , "copyright holder, and you cure the violation prior to 30 days after" , "your receipt of the notice." , "" , " Termination of your rights under this section does not terminate the" , "licenses of parties who have received copies or rights from you under" , "this License. If your rights have been terminated and not permanently" , "reinstated, you do not qualify to receive new licenses for the same" , "material under section 10." , "" , " 9. Acceptance Not Required for Having Copies." , "" , " You are not required to accept this License in order to receive or" , "run a copy of the Program. Ancillary propagation of a covered work" , "occurring solely as a consequence of using peer-to-peer transmission" , "to receive a copy likewise does not require acceptance. However," , "nothing other than this License grants you permission to propagate or" , "modify any covered work. These actions infringe copyright if you do" , "not accept this License. Therefore, by modifying or propagating a" , "covered work, you indicate your acceptance of this License to do so." , "" , " 10. Automatic Licensing of Downstream Recipients." , "" , " Each time you convey a covered work, the recipient automatically" , "receives a license from the original licensors, to run, modify and" , "propagate that work, subject to this License. You are not responsible" , "for enforcing compliance by third parties with this License." , "" , " An \"entity transaction\" is a transaction transferring control of an" , "organization, or substantially all assets of one, or subdividing an" , "organization, or merging organizations. If propagation of a covered" , "work results from an entity transaction, each party to that" , "transaction who receives a copy of the work also receives whatever" , "licenses to the work the party's predecessor in interest had or could" , "give under the previous paragraph, plus a right to possession of the" , "Corresponding Source of the work from the predecessor in interest, if" , "the predecessor has it or can get it with reasonable efforts." , "" , " You may not impose any further restrictions on the exercise of the" , "rights granted or affirmed under this License. For example, you may" , "not impose a license fee, royalty, or other charge for exercise of" , "rights granted under this License, and you may not initiate litigation" , "(including a cross-claim or counterclaim in a lawsuit) alleging that" , "any patent claim is infringed by making, using, selling, offering for" , "sale, or importing the Program or any portion of it." , "" , " 11. Patents." , "" , " A \"contributor\" is a copyright holder who authorizes use under this" , "License of the Program or a work on which the Program is based. The" , "work thus licensed is called the contributor's \"contributor version\"." , "" , " A contributor's \"essential patent claims\" are all patent claims" , "owned or controlled by the contributor, whether already acquired or" , "hereafter acquired, that would be infringed by some manner, permitted" , "by this License, of making, using, or selling its contributor version," , "but do not include claims that would be infringed only as a" , "consequence of further modification of the contributor version. For" , "purposes of this definition, \"control\" includes the right to grant" , "patent sublicenses in a manner consistent with the requirements of" , "this License." , "" , " Each contributor grants you a non-exclusive, worldwide, royalty-free" , "patent license under the contributor's essential patent claims, to" , "make, use, sell, offer for sale, import and otherwise run, modify and" , "propagate the contents of its contributor version." , "" , " In the following three paragraphs, a \"patent license\" is any express" , "agreement or commitment, however denominated, not to enforce a patent" , "(such as an express permission to practice a patent or covenant not to" , "sue for patent infringement). To \"grant\" such a patent license to a" , "party means to make such an agreement or commitment not to enforce a" , "patent against the party." , "" , " If you convey a covered work, knowingly relying on a patent license," , "and the Corresponding Source of the work is not available for anyone" , "to copy, free of charge and under the terms of this License, through a" , "publicly available network server or other readily accessible means," , "then you must either (1) cause the Corresponding Source to be so" , "available, or (2) arrange to deprive yourself of the benefit of the" , "patent license for this particular work, or (3) arrange, in a manner" , "consistent with the requirements of this License, to extend the patent" , "license to downstream recipients. \"Knowingly relying\" means you have" , "actual knowledge that, but for the patent license, your conveying the" , "covered work in a country, or your recipient's use of the covered work" , "in a country, would infringe one or more identifiable patents in that" , "country that you have reason to believe are valid." , "" , " If, pursuant to or in connection with a single transaction or" , "arrangement, you convey, or propagate by procuring conveyance of, a" , "covered work, and grant a patent license to some of the parties" , "receiving the covered work authorizing them to use, propagate, modify" , "or convey a specific copy of the covered work, then the patent license" , "you grant is automatically extended to all recipients of the covered" , "work and works based on it." , "" , " A patent license is \"discriminatory\" if it does not include within" , "the scope of its coverage, prohibits the exercise of, or is" , "conditioned on the non-exercise of one or more of the rights that are" , "specifically granted under this License. You may not convey a covered" , "work if you are a party to an arrangement with a third party that is" , "in the business of distributing software, under which you make payment" , "to the third party based on the extent of your activity of conveying" , "the work, and under which the third party grants, to any of the" , "parties who would receive the covered work from you, a discriminatory" , "patent license (a) in connection with copies of the covered work" , "conveyed by you (or copies made from those copies), or (b) primarily" , "for and in connection with specific products or compilations that" , "contain the covered work, unless you entered into that arrangement," , "or that patent license was granted, prior to 28 March 2007." , "" , " Nothing in this License shall be construed as excluding or limiting" , "any implied license or other defenses to infringement that may" , "otherwise be available to you under applicable patent law." , "" , " 12. No Surrender of Others' Freedom." , "" , " If conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot convey a" , "covered work so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you may" , "not convey it at all. For example, if you agree to terms that obligate you" , "to collect a royalty for further conveying from those to whom you convey" , "the Program, the only way you could satisfy both those terms and this" , "License would be to refrain entirely from conveying the Program." , "" , " 13. Remote Network Interaction; Use with the GNU General Public License." , "" , " Notwithstanding any other provision of this License, if you modify the" , "Program, your modified version must prominently offer all users" , "interacting with it remotely through a computer network (if your version" , "supports such interaction) an opportunity to receive the Corresponding" , "Source of your version by providing access to the Corresponding Source" , "from a network server at no charge, through some standard or customary" , "means of facilitating copying of software. This Corresponding Source" , "shall include the Corresponding Source for any work covered by version 3" , "of the GNU General Public License that is incorporated pursuant to the" , "following paragraph." , "" , " Notwithstanding any other provision of this License, you have" , "permission to link or combine any covered work with a work licensed" , "under version 3 of the GNU General Public License into a single" , "combined work, and to convey the resulting work. The terms of this" , "License will continue to apply to the part which is the covered work," , "but the work with which it is combined will remain governed by version" , "3 of the GNU General Public License." , "" , " 14. Revised Versions of this License." , "" , " The Free Software Foundation may publish revised and/or new versions of" , "the GNU Affero General Public License from time to time. Such new versions" , "will be similar in spirit to the present version, but may differ in detail to" , "address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Program specifies that a certain numbered version of the GNU Affero General" , "Public License \"or any later version\" applies to it, you have the" , "option of following the terms and conditions either of that numbered" , "version or of any later version published by the Free Software" , "Foundation. If the Program does not specify a version number of the" , "GNU Affero General Public License, you may choose any version ever published" , "by the Free Software Foundation." , "" , " If the Program specifies that a proxy can decide which future" , "versions of the GNU Affero General Public License can be used, that proxy's" , "public statement of acceptance of a version permanently authorizes you" , "to choose that version for the Program." , "" , " Later license versions may give you additional or different" , "permissions. However, no additional obligations are imposed on any" , "author or copyright holder as a result of your choosing to follow a" , "later version." , "" , " 15. Disclaimer of Warranty." , "" , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. Limitation of Liability." , "" , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" , "SUCH DAMAGES." , "" , " 17. Interpretation of Sections 15 and 16." , "" , " If the disclaimer of warranty and limitation of liability provided" , "above cannot be given local legal effect according to their terms," , "reviewing courts shall apply local law that most closely approximates" , "an absolute waiver of all civil liability in connection with the" , "Program, unless a warranty or assumption of liability accompanies a" , "copy of the Program in return for a fee." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Programs" , "" , " If you develop a new program, and you want it to be of the greatest" , "possible use to the public, the best way to achieve this is to make it" , "free software which everyone can redistribute and change under these terms." , "" , " To do so, attach the following notices to the program. It is safest" , "to attach them to the start of each source file to most effectively" , "state the exclusion of warranty; and each file should have at least" , "the \"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This program is free software: you can redistribute it and/or modify" , " it under the terms of the GNU Affero General Public License as published by" , " the Free Software Foundation, either version 3 of the License, or" , " (at your option) any later version." , "" , " This program is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" , " GNU Affero General Public License for more details." , "" , " You should have received a copy of the GNU Affero General Public License" , " along with this program. If not, see ." , "" , "Also add information on how to contact you by electronic and paper mail." , "" , " If your software can interact with users remotely through a computer" , "network, you should also make sure that it provides a way for users to" , "get its source. For example, if your program is a web application, its" , "interface could display a \"Source\" link that leads users to an archive" , "of the code. There are many ways you could offer source, and different" , "solutions will be better for different programs; see section 13 for the" , "specific requirements." , "" , " You should also get your employer (if you work as a programmer) or school," , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." , "For more information on this, and how to apply and follow the GNU AGPL, see" , "." ] lgpl21 :: License lgpl21 = unlines [ " GNU LESSER GENERAL PUBLIC LICENSE" , " Version 2.1, February 1999" , "" , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , "[This is the first released version of the Lesser GPL. It also counts" , " as the successor of the GNU Library Public License, version 2, hence" , " the version number 2.1.]" , "" , " Preamble" , "" , " The licenses for most software are designed to take away your" , "freedom to share and change it. By contrast, the GNU General Public" , "Licenses are intended to guarantee your freedom to share and change" , "free software--to make sure the software is free for all its users." , "" , " This license, the Lesser General Public License, applies to some" , "specially designated software packages--typically libraries--of the" , "Free Software Foundation and other authors who decide to use it. You" , "can use it too, but we suggest you first think carefully about whether" , "this license or the ordinary General Public License is the better" , "strategy to use in any particular case, based on the explanations below." , "" , " When we speak of free software, we are referring to freedom of use," , "not price. Our General Public Licenses are designed to make sure that" , "you have the freedom to distribute copies of free software (and charge" , "for this service if you wish); that you receive source code or can get" , "it if you want it; that you can change the software and use pieces of" , "it in new free programs; and that you are informed that you can do" , "these things." , "" , " To protect your rights, we need to make restrictions that forbid" , "distributors to deny you these rights or to ask you to surrender these" , "rights. These restrictions translate to certain responsibilities for" , "you if you distribute copies of the library or if you modify it." , "" , " For example, if you distribute copies of the library, whether gratis" , "or for a fee, you must give the recipients all the rights that we gave" , "you. You must make sure that they, too, receive or can get the source" , "code. If you link other code with the library, you must provide" , "complete object files to the recipients, so that they can relink them" , "with the library after making changes to the library and recompiling" , "it. And you must show them these terms so they know their rights." , "" , " We protect your rights with a two-step method: (1) we copyright the" , "library, and (2) we offer you this license, which gives you legal" , "permission to copy, distribute and/or modify the library." , "" , " To protect each distributor, we want to make it very clear that" , "there is no warranty for the free library. Also, if the library is" , "modified by someone else and passed on, the recipients should know" , "that what they have is not the original version, so that the original" , "author's reputation will not be affected by problems that might be" , "introduced by others." , "" , " Finally, software patents pose a constant threat to the existence of" , "any free program. We wish to make sure that a company cannot" , "effectively restrict the users of a free program by obtaining a" , "restrictive license from a patent holder. Therefore, we insist that" , "any patent license obtained for a version of the library must be" , "consistent with the full freedom of use specified in this license." , "" , " Most GNU software, including some libraries, is covered by the" , "ordinary GNU General Public License. This license, the GNU Lesser" , "General Public License, applies to certain designated libraries, and" , "is quite different from the ordinary General Public License. We use" , "this license for certain libraries in order to permit linking those" , "libraries into non-free programs." , "" , " When a program is linked with a library, whether statically or using" , "a shared library, the combination of the two is legally speaking a" , "combined work, a derivative of the original library. The ordinary" , "General Public License therefore permits such linking only if the" , "entire combination fits its criteria of freedom. The Lesser General" , "Public License permits more lax criteria for linking other code with" , "the library." , "" , " We call this license the \"Lesser\" General Public License because it" , "does Less to protect the user's freedom than the ordinary General" , "Public License. It also provides other free software developers Less" , "of an advantage over competing non-free programs. These disadvantages" , "are the reason we use the ordinary General Public License for many" , "libraries. However, the Lesser license provides advantages in certain" , "special circumstances." , "" , " For example, on rare occasions, there may be a special need to" , "encourage the widest possible use of a certain library, so that it becomes" , "a de-facto standard. To achieve this, non-free programs must be" , "allowed to use the library. A more frequent case is that a free" , "library does the same job as widely used non-free libraries. In this" , "case, there is little to gain by limiting the free library to free" , "software only, so we use the Lesser General Public License." , "" , " In other cases, permission to use a particular library in non-free" , "programs enables a greater number of people to use a large body of" , "free software. For example, permission to use the GNU C Library in" , "non-free programs enables many more people to use the whole GNU" , "operating system, as well as its variant, the GNU/Linux operating" , "system." , "" , " Although the Lesser General Public License is Less protective of the" , "users' freedom, it does ensure that the user of a program that is" , "linked with the Library has the freedom and the wherewithal to run" , "that program using a modified version of the Library." , "" , " The precise terms and conditions for copying, distribution and" , "modification follow. Pay close attention to the difference between a" , "\"work based on the library\" and a \"work that uses the library\". The" , "former contains code derived from the library, whereas the latter must" , "be combined with the library in order to run." , "" , " GNU LESSER GENERAL PUBLIC LICENSE" , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" , "" , " 0. This License Agreement applies to any software library or other" , "program which contains a notice placed by the copyright holder or" , "other authorized party saying it may be distributed under the terms of" , "this Lesser General Public License (also called \"this License\")." , "Each licensee is addressed as \"you\"." , "" , " A \"library\" means a collection of software functions and/or data" , "prepared so as to be conveniently linked with application programs" , "(which use some of those functions and data) to form executables." , "" , " The \"Library\", below, refers to any such software library or work" , "which has been distributed under these terms. A \"work based on the" , "Library\" means either the Library or any derivative work under" , "copyright law: that is to say, a work containing the Library or a" , "portion of it, either verbatim or with modifications and/or translated" , "straightforwardly into another language. (Hereinafter, translation is" , "included without limitation in the term \"modification\".)" , "" , " \"Source code\" for a work means the preferred form of the work for" , "making modifications to it. For a library, complete source code means" , "all the source code for all modules it contains, plus any associated" , "interface definition files, plus the scripts used to control compilation" , "and installation of the library." , "" , " Activities other than copying, distribution and modification are not" , "covered by this License; they are outside its scope. The act of" , "running a program using the Library is not restricted, and output from" , "such a program is covered only if its contents constitute a work based" , "on the Library (independent of the use of the Library in a tool for" , "writing it). Whether that is true depends on what the Library does" , "and what the program that uses the Library does." , "" , " 1. You may copy and distribute verbatim copies of the Library's" , "complete source code as you receive it, in any medium, provided that" , "you conspicuously and appropriately publish on each copy an" , "appropriate copyright notice and disclaimer of warranty; keep intact" , "all the notices that refer to this License and to the absence of any" , "warranty; and distribute a copy of this License along with the" , "Library." , "" , " You may charge a fee for the physical act of transferring a copy," , "and you may at your option offer warranty protection in exchange for a" , "fee." , "" , " 2. You may modify your copy or copies of the Library or any portion" , "of it, thus forming a work based on the Library, and copy and" , "distribute such modifications or work under the terms of Section 1" , "above, provided that you also meet all of these conditions:" , "" , " a) The modified work must itself be a software library." , "" , " b) You must cause the files modified to carry prominent notices" , " stating that you changed the files and the date of any change." , "" , " c) You must cause the whole of the work to be licensed at no" , " charge to all third parties under the terms of this License." , "" , " d) If a facility in the modified Library refers to a function or a" , " table of data to be supplied by an application program that uses" , " the facility, other than as an argument passed when the facility" , " is invoked, then you must make a good faith effort to ensure that," , " in the event an application does not supply such function or" , " table, the facility still operates, and performs whatever part of" , " its purpose remains meaningful." , "" , " (For example, a function in a library to compute square roots has" , " a purpose that is entirely well-defined independent of the" , " application. Therefore, Subsection 2d requires that any" , " application-supplied function or table used by this function must" , " be optional: if the application does not supply it, the square" , " root function must still compute square roots.)" , "" , "These requirements apply to the modified work as a whole. If" , "identifiable sections of that work are not derived from the Library," , "and can be reasonably considered independent and separate works in" , "themselves, then this License, and its terms, do not apply to those" , "sections when you distribute them as separate works. But when you" , "distribute the same sections as part of a whole which is a work based" , "on the Library, the distribution of the whole must be on the terms of" , "this License, whose permissions for other licensees extend to the" , "entire whole, and thus to each and every part regardless of who wrote" , "it." , "" , "Thus, it is not the intent of this section to claim rights or contest" , "your rights to work written entirely by you; rather, the intent is to" , "exercise the right to control the distribution of derivative or" , "collective works based on the Library." , "" , "In addition, mere aggregation of another work not based on the Library" , "with the Library (or with a work based on the Library) on a volume of" , "a storage or distribution medium does not bring the other work under" , "the scope of this License." , "" , " 3. You may opt to apply the terms of the ordinary GNU General Public" , "License instead of this License to a given copy of the Library. To do" , "this, you must alter all the notices that refer to this License, so" , "that they refer to the ordinary GNU General Public License, version 2," , "instead of to this License. (If a newer version than version 2 of the" , "ordinary GNU General Public License has appeared, then you can specify" , "that version instead if you wish.) Do not make any other change in" , "these notices." , "" , " Once this change is made in a given copy, it is irreversible for" , "that copy, so the ordinary GNU General Public License applies to all" , "subsequent copies and derivative works made from that copy." , "" , " This option is useful when you wish to copy part of the code of" , "the Library into a program that is not a library." , "" , " 4. You may copy and distribute the Library (or a portion or" , "derivative of it, under Section 2) in object code or executable form" , "under the terms of Sections 1 and 2 above provided that you accompany" , "it with the complete corresponding machine-readable source code, which" , "must be distributed under the terms of Sections 1 and 2 above on a" , "medium customarily used for software interchange." , "" , " If distribution of object code is made by offering access to copy" , "from a designated place, then offering equivalent access to copy the" , "source code from the same place satisfies the requirement to" , "distribute the source code, even though third parties are not" , "compelled to copy the source along with the object code." , "" , " 5. A program that contains no derivative of any portion of the" , "Library, but is designed to work with the Library by being compiled or" , "linked with it, is called a \"work that uses the Library\". Such a" , "work, in isolation, is not a derivative work of the Library, and" , "therefore falls outside the scope of this License." , "" , " However, linking a \"work that uses the Library\" with the Library" , "creates an executable that is a derivative of the Library (because it" , "contains portions of the Library), rather than a \"work that uses the" , "library\". The executable is therefore covered by this License." , "Section 6 states terms for distribution of such executables." , "" , " When a \"work that uses the Library\" uses material from a header file" , "that is part of the Library, the object code for the work may be a" , "derivative work of the Library even though the source code is not." , "Whether this is true is especially significant if the work can be" , "linked without the Library, or if the work is itself a library. The" , "threshold for this to be true is not precisely defined by law." , "" , " If such an object file uses only numerical parameters, data" , "structure layouts and accessors, and small macros and small inline" , "functions (ten lines or less in length), then the use of the object" , "file is unrestricted, regardless of whether it is legally a derivative" , "work. (Executables containing this object code plus portions of the" , "Library will still fall under Section 6.)" , "" , " Otherwise, if the work is a derivative of the Library, you may" , "distribute the object code for the work under the terms of Section 6." , "Any executables containing that work also fall under Section 6," , "whether or not they are linked directly with the Library itself." , "" , " 6. As an exception to the Sections above, you may also combine or" , "link a \"work that uses the Library\" with the Library to produce a" , "work containing portions of the Library, and distribute that work" , "under terms of your choice, provided that the terms permit" , "modification of the work for the customer's own use and reverse" , "engineering for debugging such modifications." , "" , " You must give prominent notice with each copy of the work that the" , "Library is used in it and that the Library and its use are covered by" , "this License. You must supply a copy of this License. If the work" , "during execution displays copyright notices, you must include the" , "copyright notice for the Library among them, as well as a reference" , "directing the user to the copy of this License. Also, you must do one" , "of these things:" , "" , " a) Accompany the work with the complete corresponding" , " machine-readable source code for the Library including whatever" , " changes were used in the work (which must be distributed under" , " Sections 1 and 2 above); and, if the work is an executable linked" , " with the Library, with the complete machine-readable \"work that" , " uses the Library\", as object code and/or source code, so that the" , " user can modify the Library and then relink to produce a modified" , " executable containing the modified Library. (It is understood" , " that the user who changes the contents of definitions files in the" , " Library will not necessarily be able to recompile the application" , " to use the modified definitions.)" , "" , " b) Use a suitable shared library mechanism for linking with the" , " Library. A suitable mechanism is one that (1) uses at run time a" , " copy of the library already present on the user's computer system," , " rather than copying library functions into the executable, and (2)" , " will operate properly with a modified version of the library, if" , " the user installs one, as long as the modified version is" , " interface-compatible with the version that the work was made with." , "" , " c) Accompany the work with a written offer, valid for at" , " least three years, to give the same user the materials" , " specified in Subsection 6a, above, for a charge no more" , " than the cost of performing this distribution." , "" , " d) If distribution of the work is made by offering access to copy" , " from a designated place, offer equivalent access to copy the above" , " specified materials from the same place." , "" , " e) Verify that the user has already received a copy of these" , " materials or that you have already sent this user a copy." , "" , " For an executable, the required form of the \"work that uses the" , "Library\" must include any data and utility programs needed for" , "reproducing the executable from it. However, as a special exception," , "the materials to be distributed need not include anything that is" , "normally distributed (in either source or binary form) with the major" , "components (compiler, kernel, and so on) of the operating system on" , "which the executable runs, unless that component itself accompanies" , "the executable." , "" , " It may happen that this requirement contradicts the license" , "restrictions of other proprietary libraries that do not normally" , "accompany the operating system. Such a contradiction means you cannot" , "use both them and the Library together in an executable that you" , "distribute." , "" , " 7. You may place library facilities that are a work based on the" , "Library side-by-side in a single library together with other library" , "facilities not covered by this License, and distribute such a combined" , "library, provided that the separate distribution of the work based on" , "the Library and of the other library facilities is otherwise" , "permitted, and provided that you do these two things:" , "" , " a) Accompany the combined library with a copy of the same work" , " based on the Library, uncombined with any other library" , " facilities. This must be distributed under the terms of the" , " Sections above." , "" , " b) Give prominent notice with the combined library of the fact" , " that part of it is a work based on the Library, and explaining" , " where to find the accompanying uncombined form of the same work." , "" , " 8. You may not copy, modify, sublicense, link with, or distribute" , "the Library except as expressly provided under this License. Any" , "attempt otherwise to copy, modify, sublicense, link with, or" , "distribute the Library is void, and will automatically terminate your" , "rights under this License. However, parties who have received copies," , "or rights, from you under this License will not have their licenses" , "terminated so long as such parties remain in full compliance." , "" , " 9. You are not required to accept this License, since you have not" , "signed it. However, nothing else grants you permission to modify or" , "distribute the Library or its derivative works. These actions are" , "prohibited by law if you do not accept this License. Therefore, by" , "modifying or distributing the Library (or any work based on the" , "Library), you indicate your acceptance of this License to do so, and" , "all its terms and conditions for copying, distributing or modifying" , "the Library or works based on it." , "" , " 10. Each time you redistribute the Library (or any work based on the" , "Library), the recipient automatically receives a license from the" , "original licensor to copy, distribute, link with or modify the Library" , "subject to these terms and conditions. You may not impose any further" , "restrictions on the recipients' exercise of the rights granted herein." , "You are not responsible for enforcing compliance by third parties with" , "this License." , "" , " 11. If, as a consequence of a court judgment or allegation of patent" , "infringement or for any other reason (not limited to patent issues)," , "conditions are imposed on you (whether by court order, agreement or" , "otherwise) that contradict the conditions of this License, they do not" , "excuse you from the conditions of this License. If you cannot" , "distribute so as to satisfy simultaneously your obligations under this" , "License and any other pertinent obligations, then as a consequence you" , "may not distribute the Library at all. For example, if a patent" , "license would not permit royalty-free redistribution of the Library by" , "all those who receive copies directly or indirectly through you, then" , "the only way you could satisfy both it and this License would be to" , "refrain entirely from distribution of the Library." , "" , "If any portion of this section is held invalid or unenforceable under any" , "particular circumstance, the balance of the section is intended to apply," , "and the section as a whole is intended to apply in other circumstances." , "" , "It is not the purpose of this section to induce you to infringe any" , "patents or other property right claims or to contest validity of any" , "such claims; this section has the sole purpose of protecting the" , "integrity of the free software distribution system which is" , "implemented by public license practices. Many people have made" , "generous contributions to the wide range of software distributed" , "through that system in reliance on consistent application of that" , "system; it is up to the author/donor to decide if he or she is willing" , "to distribute software through any other system and a licensee cannot" , "impose that choice." , "" , "This section is intended to make thoroughly clear what is believed to" , "be a consequence of the rest of this License." , "" , " 12. If the distribution and/or use of the Library is restricted in" , "certain countries either by patents or by copyrighted interfaces, the" , "original copyright holder who places the Library under this License may add" , "an explicit geographical distribution limitation excluding those countries," , "so that distribution is permitted only in or among countries not thus" , "excluded. In such case, this License incorporates the limitation as if" , "written in the body of this License." , "" , " 13. The Free Software Foundation may publish revised and/or new" , "versions of the Lesser General Public License from time to time." , "Such new versions will be similar in spirit to the present version," , "but may differ in detail to address new problems or concerns." , "" , "Each version is given a distinguishing version number. If the Library" , "specifies a version number of this License which applies to it and" , "\"any later version\", you have the option of following the terms and" , "conditions either of that version or of any later version published by" , "the Free Software Foundation. If the Library does not specify a" , "license version number, you may choose any version ever published by" , "the Free Software Foundation." , "" , " 14. If you wish to incorporate parts of the Library into other free" , "programs whose distribution conditions are incompatible with these," , "write to the author to ask for permission. For software which is" , "copyrighted by the Free Software Foundation, write to the Free" , "Software Foundation; we sometimes make exceptions for this. Our" , "decision will be guided by the two goals of preserving the free status" , "of all derivatives of our free software and of promoting the sharing" , "and reuse of software generally." , "" , " NO WARRANTY" , "" , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." , "" , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" , "DAMAGES." , "" , " END OF TERMS AND CONDITIONS" , "" , " How to Apply These Terms to Your New Libraries" , "" , " If you develop a new library, and you want it to be of the greatest" , "possible use to the public, we recommend making it free software that" , "everyone can redistribute and change. You can do so by permitting" , "redistribution under these terms (or, alternatively, under the terms of the" , "ordinary General Public License)." , "" , " To apply these terms, attach the following notices to the library. It is" , "safest to attach them to the start of each source file to most effectively" , "convey the exclusion of warranty; and each file should have at least the" , "\"copyright\" line and a pointer to where the full notice is found." , "" , " " , " Copyright (C) " , "" , " This library is free software; you can redistribute it and/or" , " modify it under the terms of the GNU Lesser General Public" , " License as published by the Free Software Foundation; either" , " version 2.1 of the License, or (at your option) any later version." , "" , " This library is distributed in the hope that it will be useful," , " but WITHOUT ANY WARRANTY; without even the implied warranty of" , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" , " Lesser General Public License for more details." , "" , " You should have received a copy of the GNU Lesser General Public" , " License along with this library; if not, write to the Free Software" , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" , "" , "Also add information on how to contact you by electronic and paper mail." , "" , "You should also get your employer (if you work as a programmer) or your" , "school, if any, to sign a \"copyright disclaimer\" for the library, if" , "necessary. Here is a sample; alter the names:" , "" , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." , "" , " , 1 April 1990" , " Ty Coon, President of Vice" , "" , "That's all there is to it!" ] lgpl3 :: License lgpl3 = unlines [ " GNU LESSER GENERAL PUBLIC LICENSE" , " Version 3, 29 June 2007" , "" , " Copyright (C) 2007 Free Software Foundation, Inc. " , " Everyone is permitted to copy and distribute verbatim copies" , " of this license document, but changing it is not allowed." , "" , "" , " This version of the GNU Lesser General Public License incorporates" , "the terms and conditions of version 3 of the GNU General Public" , "License, supplemented by the additional permissions listed below." , "" , " 0. Additional Definitions." , "" , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" , "General Public License." , "" , " \"The Library\" refers to a covered work governed by this License," , "other than an Application or a Combined Work as defined below." , "" , " An \"Application\" is any work that makes use of an interface provided" , "by the Library, but which is not otherwise based on the Library." , "Defining a subclass of a class defined by the Library is deemed a mode" , "of using an interface provided by the Library." , "" , " A \"Combined Work\" is a work produced by combining or linking an" , "Application with the Library. The particular version of the Library" , "with which the Combined Work was made is also called the \"Linked" , "Version\"." , "" , " The \"Minimal Corresponding Source\" for a Combined Work means the" , "Corresponding Source for the Combined Work, excluding any source code" , "for portions of the Combined Work that, considered in isolation, are" , "based on the Application, and not on the Linked Version." , "" , " The \"Corresponding Application Code\" for a Combined Work means the" , "object code and/or source code for the Application, including any data" , "and utility programs needed for reproducing the Combined Work from the" , "Application, but excluding the System Libraries of the Combined Work." , "" , " 1. Exception to Section 3 of the GNU GPL." , "" , " You may convey a covered work under sections 3 and 4 of this License" , "without being bound by section 3 of the GNU GPL." , "" , " 2. Conveying Modified Versions." , "" , " If you modify a copy of the Library, and, in your modifications, a" , "facility refers to a function or data to be supplied by an Application" , "that uses the facility (other than as an argument passed when the" , "facility is invoked), then you may convey a copy of the modified" , "version:" , "" , " a) under this License, provided that you make a good faith effort to" , " ensure that, in the event an Application does not supply the" , " function or data, the facility still operates, and performs" , " whatever part of its purpose remains meaningful, or" , "" , " b) under the GNU GPL, with none of the additional permissions of" , " this License applicable to that copy." , "" , " 3. Object Code Incorporating Material from Library Header Files." , "" , " The object code form of an Application may incorporate material from" , "a header file that is part of the Library. You may convey such object" , "code under terms of your choice, provided that, if the incorporated" , "material is not limited to numerical parameters, data structure" , "layouts and accessors, or small macros, inline functions and templates" , "(ten or fewer lines in length), you do both of the following:" , "" , " a) Give prominent notice with each copy of the object code that the" , " Library is used in it and that the Library and its use are" , " covered by this License." , "" , " b) Accompany the object code with a copy of the GNU GPL and this license" , " document." , "" , " 4. Combined Works." , "" , " You may convey a Combined Work under terms of your choice that," , "taken together, effectively do not restrict modification of the" , "portions of the Library contained in the Combined Work and reverse" , "engineering for debugging such modifications, if you also do each of" , "the following:" , "" , " a) Give prominent notice with each copy of the Combined Work that" , " the Library is used in it and that the Library and its use are" , " covered by this License." , "" , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" , " document." , "" , " c) For a Combined Work that displays copyright notices during" , " execution, include the copyright notice for the Library among" , " these notices, as well as a reference directing the user to the" , " copies of the GNU GPL and this license document." , "" , " d) Do one of the following:" , "" , " 0) Convey the Minimal Corresponding Source under the terms of this" , " License, and the Corresponding Application Code in a form" , " suitable for, and under terms that permit, the user to" , " recombine or relink the Application with a modified version of" , " the Linked Version to produce a modified Combined Work, in the" , " manner specified by section 6 of the GNU GPL for conveying" , " Corresponding Source." , "" , " 1) Use a suitable shared library mechanism for linking with the" , " Library. A suitable mechanism is one that (a) uses at run time" , " a copy of the Library already present on the user's computer" , " system, and (b) will operate properly with a modified version" , " of the Library that is interface-compatible with the Linked" , " Version." , "" , " e) Provide Installation Information, but only if you would otherwise" , " be required to provide such information under section 6 of the" , " GNU GPL, and only to the extent that such information is" , " necessary to install and execute a modified version of the" , " Combined Work produced by recombining or relinking the" , " Application with a modified version of the Linked Version. (If" , " you use option 4d0, the Installation Information must accompany" , " the Minimal Corresponding Source and Corresponding Application" , " Code. If you use option 4d1, you must provide the Installation" , " Information in the manner specified by section 6 of the GNU GPL" , " for conveying Corresponding Source.)" , "" , " 5. Combined Libraries." , "" , " You may place library facilities that are a work based on the" , "Library side by side in a single library together with other library" , "facilities that are not Applications and are not covered by this" , "License, and convey such a combined library under terms of your" , "choice, if you do both of the following:" , "" , " a) Accompany the combined library with a copy of the same work based" , " on the Library, uncombined with any other library facilities," , " conveyed under the terms of this License." , "" , " b) Give prominent notice with the combined library that part of it" , " is a work based on the Library, and explaining where to find the" , " accompanying uncombined form of the same work." , "" , " 6. Revised Versions of the GNU Lesser General Public License." , "" , " The Free Software Foundation may publish revised and/or new versions" , "of the GNU Lesser General Public License from time to time. Such new" , "versions will be similar in spirit to the present version, but may" , "differ in detail to address new problems or concerns." , "" , " Each version is given a distinguishing version number. If the" , "Library as you received it specifies that a certain numbered version" , "of the GNU Lesser General Public License \"or any later version\"" , "applies to it, you have the option of following the terms and" , "conditions either of that published version or of any later version" , "published by the Free Software Foundation. If the Library as you" , "received it does not specify a version number of the GNU Lesser" , "General Public License, you may choose any version of the GNU Lesser" , "General Public License ever published by the Free Software Foundation." , "" , " If the Library as you received it specifies that a proxy can decide" , "whether future versions of the GNU Lesser General Public License shall" , "apply, that proxy's public statement of acceptance of any version is" , "permanent authorization for you to choose that version for the" , "Library." ] apache20 :: License apache20 = unlines [ "" , " Apache License" , " Version 2.0, January 2004" , " http://www.apache.org/licenses/" , "" , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" , "" , " 1. Definitions." , "" , " \"License\" shall mean the terms and conditions for use, reproduction," , " and distribution as defined by Sections 1 through 9 of this document." , "" , " \"Licensor\" shall mean the copyright owner or entity authorized by" , " the copyright owner that is granting the License." , "" , " \"Legal Entity\" shall mean the union of the acting entity and all" , " other entities that control, are controlled by, or are under common" , " control with that entity. For the purposes of this definition," , " \"control\" means (i) the power, direct or indirect, to cause the" , " direction or management of such entity, whether by contract or" , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" , " outstanding shares, or (iii) beneficial ownership of such entity." , "" , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" , " exercising permissions granted by this License." , "" , " \"Source\" form shall mean the preferred form for making modifications," , " including but not limited to software source code, documentation" , " source, and configuration files." , "" , " \"Object\" form shall mean any form resulting from mechanical" , " transformation or translation of a Source form, including but" , " not limited to compiled object code, generated documentation," , " and conversions to other media types." , "" , " \"Work\" shall mean the work of authorship, whether in Source or" , " Object form, made available under the License, as indicated by a" , " copyright notice that is included in or attached to the work" , " (an example is provided in the Appendix below)." , "" , " \"Derivative Works\" shall mean any work, whether in Source or Object" , " form, that is based on (or derived from) the Work and for which the" , " editorial revisions, annotations, elaborations, or other modifications" , " represent, as a whole, an original work of authorship. For the purposes" , " of this License, Derivative Works shall not include works that remain" , " separable from, or merely link (or bind by name) to the interfaces of," , " the Work and Derivative Works thereof." , "" , " \"Contribution\" shall mean any work of authorship, including" , " the original version of the Work and any modifications or additions" , " to that Work or Derivative Works thereof, that is intentionally" , " submitted to Licensor for inclusion in the Work by the copyright owner" , " or by an individual or Legal Entity authorized to submit on behalf of" , " the copyright owner. For the purposes of this definition, \"submitted\"" , " means any form of electronic, verbal, or written communication sent" , " to the Licensor or its representatives, including but not limited to" , " communication on electronic mailing lists, source code control systems," , " and issue tracking systems that are managed by, or on behalf of, the" , " Licensor for the purpose of discussing and improving the Work, but" , " excluding communication that is conspicuously marked or otherwise" , " designated in writing by the copyright owner as \"Not a Contribution.\"" , "" , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" , " on behalf of whom a Contribution has been received by Licensor and" , " subsequently incorporated within the Work." , "" , " 2. Grant of Copyright License. Subject to the terms and conditions of" , " this License, each Contributor hereby grants to You a perpetual," , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" , " copyright license to reproduce, prepare Derivative Works of," , " publicly display, publicly perform, sublicense, and distribute the" , " Work and such Derivative Works in Source or Object form." , "" , " 3. Grant of Patent License. Subject to the terms and conditions of" , " this License, each Contributor hereby grants to You a perpetual," , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" , " (except as stated in this section) patent license to make, have made," , " use, offer to sell, sell, import, and otherwise transfer the Work," , " where such license applies only to those patent claims licensable" , " by such Contributor that are necessarily infringed by their" , " Contribution(s) alone or by combination of their Contribution(s)" , " with the Work to which such Contribution(s) was submitted. If You" , " institute patent litigation against any entity (including a" , " cross-claim or counterclaim in a lawsuit) alleging that the Work" , " or a Contribution incorporated within the Work constitutes direct" , " or contributory patent infringement, then any patent licenses" , " granted to You under this License for that Work shall terminate" , " as of the date such litigation is filed." , "" , " 4. Redistribution. You may reproduce and distribute copies of the" , " Work or Derivative Works thereof in any medium, with or without" , " modifications, and in Source or Object form, provided that You" , " meet the following conditions:" , "" , " (a) You must give any other recipients of the Work or" , " Derivative Works a copy of this License; and" , "" , " (b) You must cause any modified files to carry prominent notices" , " stating that You changed the files; and" , "" , " (c) You must retain, in the Source form of any Derivative Works" , " that You distribute, all copyright, patent, trademark, and" , " attribution notices from the Source form of the Work," , " excluding those notices that do not pertain to any part of" , " the Derivative Works; and" , "" , " (d) If the Work includes a \"NOTICE\" text file as part of its" , " distribution, then any Derivative Works that You distribute must" , " include a readable copy of the attribution notices contained" , " within such NOTICE file, excluding those notices that do not" , " pertain to any part of the Derivative Works, in at least one" , " of the following places: within a NOTICE text file distributed" , " as part of the Derivative Works; within the Source form or" , " documentation, if provided along with the Derivative Works; or," , " within a display generated by the Derivative Works, if and" , " wherever such third-party notices normally appear. The contents" , " of the NOTICE file are for informational purposes only and" , " do not modify the License. You may add Your own attribution" , " notices within Derivative Works that You distribute, alongside" , " or as an addendum to the NOTICE text from the Work, provided" , " that such additional attribution notices cannot be construed" , " as modifying the License." , "" , " You may add Your own copyright statement to Your modifications and" , " may provide additional or different license terms and conditions" , " for use, reproduction, or distribution of Your modifications, or" , " for any such Derivative Works as a whole, provided Your use," , " reproduction, and distribution of the Work otherwise complies with" , " the conditions stated in this License." , "" , " 5. Submission of Contributions. Unless You explicitly state otherwise," , " any Contribution intentionally submitted for inclusion in the Work" , " by You to the Licensor shall be under the terms and conditions of" , " this License, without any additional terms or conditions." , " Notwithstanding the above, nothing herein shall supersede or modify" , " the terms of any separate license agreement you may have executed" , " with Licensor regarding such Contributions." , "" , " 6. Trademarks. This License does not grant permission to use the trade" , " names, trademarks, service marks, or product names of the Licensor," , " except as required for reasonable and customary use in describing the" , " origin of the Work and reproducing the content of the NOTICE file." , "" , " 7. Disclaimer of Warranty. Unless required by applicable law or" , " agreed to in writing, Licensor provides the Work (and each" , " Contributor provides its Contributions) on an \"AS IS\" BASIS," , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" , " implied, including, without limitation, any warranties or conditions" , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" , " PARTICULAR PURPOSE. You are solely responsible for determining the" , " appropriateness of using or redistributing the Work and assume any" , " risks associated with Your exercise of permissions under this License." , "" , " 8. Limitation of Liability. In no event and under no legal theory," , " whether in tort (including negligence), contract, or otherwise," , " unless required by applicable law (such as deliberate and grossly" , " negligent acts) or agreed to in writing, shall any Contributor be" , " liable to You for damages, including any direct, indirect, special," , " incidental, or consequential damages of any character arising as a" , " result of this License or out of the use or inability to use the" , " Work (including but not limited to damages for loss of goodwill," , " work stoppage, computer failure or malfunction, or any and all" , " other commercial damages or losses), even if such Contributor" , " has been advised of the possibility of such damages." , "" , " 9. Accepting Warranty or Additional Liability. While redistributing" , " the Work or Derivative Works thereof, You may choose to offer," , " and charge a fee for, acceptance of support, warranty, indemnity," , " or other liability obligations and/or rights consistent with this" , " License. However, in accepting such obligations, You may act only" , " on Your own behalf and on Your sole responsibility, not on behalf" , " of any other Contributor, and only if You agree to indemnify," , " defend, and hold each Contributor harmless for any liability" , " incurred by, or claims asserted against, such Contributor by reason" , " of your accepting any such warranty or additional liability." , "" , " END OF TERMS AND CONDITIONS" , "" , " APPENDIX: How to apply the Apache License to your work." , "" , " To apply the Apache License to your work, attach the following" , " boilerplate notice, with the fields enclosed by brackets \"[]\"" , " replaced with your own identifying information. (Don't include" , " the brackets!) The text should be enclosed in the appropriate" , " comment syntax for the file format. We also recommend that a" , " file or class name and description of purpose be included on the" , " same \"printed page\" as the copyright notice for easier" , " identification within third-party archives." , "" , " Copyright [yyyy] [name of copyright owner]" , "" , " Licensed under the Apache License, Version 2.0 (the \"License\");" , " you may not use this file except in compliance with the License." , " You may obtain a copy of the License at" , "" , " http://www.apache.org/licenses/LICENSE-2.0" , "" , " Unless required by applicable law or agreed to in writing, software" , " distributed under the License is distributed on an \"AS IS\" BASIS," , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." , " See the License for the specific language governing permissions and" , " limitations under the License." ] mit :: String -> String -> License mit authors year = unlines [ "Copyright (c) " ++ year ++ " " ++ authors , "" , "Permission is hereby granted, free of charge, to any person obtaining" , "a copy of this software and associated documentation files (the" , "\"Software\"), to deal in the Software without restriction, including" , "without limitation the rights to use, copy, modify, merge, publish," , "distribute, sublicense, and/or sell copies of the Software, and to" , "permit persons to whom the Software is furnished to do so, subject to" , "the following conditions:" , "" , "The above copyright notice and this permission notice shall be included" , "in all copies or substantial portions of the Software." , "" , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." ] mpl20 :: License mpl20 = unlines [ "Mozilla Public License Version 2.0" , "==================================" , "" , "1. Definitions" , "--------------" , "" , "1.1. \"Contributor\"" , " means each individual or legal entity that creates, contributes to" , " the creation of, or owns Covered Software." , "" , "1.2. \"Contributor Version\"" , " means the combination of the Contributions of others (if any) used" , " by a Contributor and that particular Contributor's Contribution." , "" , "1.3. \"Contribution\"" , " means Covered Software of a particular Contributor." , "" , "1.4. \"Covered Software\"" , " means Source Code Form to which the initial Contributor has attached" , " the notice in Exhibit A, the Executable Form of such Source Code" , " Form, and Modifications of such Source Code Form, in each case" , " including portions thereof." , "" , "1.5. \"Incompatible With Secondary Licenses\"" , " means" , "" , " (a) that the initial Contributor has attached the notice described" , " in Exhibit B to the Covered Software; or" , "" , " (b) that the Covered Software was made available under the terms of" , " version 1.1 or earlier of the License, but not also under the" , " terms of a Secondary License." , "" , "1.6. \"Executable Form\"" , " means any form of the work other than Source Code Form." , "" , "1.7. \"Larger Work\"" , " means a work that combines Covered Software with other material, in" , " a separate file or files, that is not Covered Software." , "" , "1.8. \"License\"" , " means this document." , "" , "1.9. \"Licensable\"" , " means having the right to grant, to the maximum extent possible," , " whether at the time of the initial grant or subsequently, any and" , " all of the rights conveyed by this License." , "" , "1.10. \"Modifications\"" , " means any of the following:" , "" , " (a) any file in Source Code Form that results from an addition to," , " deletion from, or modification of the contents of Covered" , " Software; or" , "" , " (b) any new file in Source Code Form that contains any Covered" , " Software." , "" , "1.11. \"Patent Claims\" of a Contributor" , " means any patent claim(s), including without limitation, method," , " process, and apparatus claims, in any patent Licensable by such" , " Contributor that would be infringed, but for the grant of the" , " License, by the making, using, selling, offering for sale, having" , " made, import, or transfer of either its Contributions or its" , " Contributor Version." , "" , "1.12. \"Secondary License\"" , " means either the GNU General Public License, Version 2.0, the GNU" , " Lesser General Public License, Version 2.1, the GNU Affero General" , " Public License, Version 3.0, or any later versions of those" , " licenses." , "" , "1.13. \"Source Code Form\"" , " means the form of the work preferred for making modifications." , "" , "1.14. \"You\" (or \"Your\")" , " means an individual or a legal entity exercising rights under this" , " License. For legal entities, \"You\" includes any entity that" , " controls, is controlled by, or is under common control with You. For" , " purposes of this definition, \"control\" means (a) the power, direct" , " or indirect, to cause the direction or management of such entity," , " whether by contract or otherwise, or (b) ownership of more than" , " fifty percent (50%) of the outstanding shares or beneficial" , " ownership of such entity." , "" , "2. License Grants and Conditions" , "--------------------------------" , "" , "2.1. Grants" , "" , "Each Contributor hereby grants You a world-wide, royalty-free," , "non-exclusive license:" , "" , "(a) under intellectual property rights (other than patent or trademark)" , " Licensable by such Contributor to use, reproduce, make available," , " modify, display, perform, distribute, and otherwise exploit its" , " Contributions, either on an unmodified basis, with Modifications, or" , " as part of a Larger Work; and" , "" , "(b) under Patent Claims of such Contributor to make, use, sell, offer" , " for sale, have made, import, and otherwise transfer either its" , " Contributions or its Contributor Version." , "" , "2.2. Effective Date" , "" , "The licenses granted in Section 2.1 with respect to any Contribution" , "become effective for each Contribution on the date the Contributor first" , "distributes such Contribution." , "" , "2.3. Limitations on Grant Scope" , "" , "The licenses granted in this Section 2 are the only rights granted under" , "this License. No additional rights or licenses will be implied from the" , "distribution or licensing of Covered Software under this License." , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" , "Contributor:" , "" , "(a) for any code that a Contributor has removed from Covered Software;" , " or" , "" , "(b) for infringements caused by: (i) Your and any other third party's" , " modifications of Covered Software, or (ii) the combination of its" , " Contributions with other software (except as part of its Contributor" , " Version); or" , "" , "(c) under Patent Claims infringed by Covered Software in the absence of" , " its Contributions." , "" , "This License does not grant any rights in the trademarks, service marks," , "or logos of any Contributor (except as may be necessary to comply with" , "the notice requirements in Section 3.4)." , "" , "2.4. Subsequent Licenses" , "" , "No Contributor makes additional grants as a result of Your choice to" , "distribute the Covered Software under a subsequent version of this" , "License (see Section 10.2) or under the terms of a Secondary License (if" , "permitted under the terms of Section 3.3)." , "" , "2.5. Representation" , "" , "Each Contributor represents that the Contributor believes its" , "Contributions are its original creation(s) or it has sufficient rights" , "to grant the rights to its Contributions conveyed by this License." , "" , "2.6. Fair Use" , "" , "This License is not intended to limit any rights You have under" , "applicable copyright doctrines of fair use, fair dealing, or other" , "equivalents." , "" , "2.7. Conditions" , "" , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" , "in Section 2.1." , "" , "3. Responsibilities" , "-------------------" , "" , "3.1. Distribution of Source Form" , "" , "All distribution of Covered Software in Source Code Form, including any" , "Modifications that You create or to which You contribute, must be under" , "the terms of this License. You must inform recipients that the Source" , "Code Form of the Covered Software is governed by the terms of this" , "License, and how they can obtain a copy of this License. You may not" , "attempt to alter or restrict the recipients' rights in the Source Code" , "Form." , "" , "3.2. Distribution of Executable Form" , "" , "If You distribute Covered Software in Executable Form then:" , "" , "(a) such Covered Software must also be made available in Source Code" , " Form, as described in Section 3.1, and You must inform recipients of" , " the Executable Form how they can obtain a copy of such Source Code" , " Form by reasonable means in a timely manner, at a charge no more" , " than the cost of distribution to the recipient; and" , "" , "(b) You may distribute such Executable Form under the terms of this" , " License, or sublicense it under different terms, provided that the" , " license for the Executable Form does not attempt to limit or alter" , " the recipients' rights in the Source Code Form under this License." , "" , "3.3. Distribution of a Larger Work" , "" , "You may create and distribute a Larger Work under terms of Your choice," , "provided that You also comply with the requirements of this License for" , "the Covered Software. If the Larger Work is a combination of Covered" , "Software with a work governed by one or more Secondary Licenses, and the" , "Covered Software is not Incompatible With Secondary Licenses, this" , "License permits You to additionally distribute such Covered Software" , "under the terms of such Secondary License(s), so that the recipient of" , "the Larger Work may, at their option, further distribute the Covered" , "Software under the terms of either this License or such Secondary" , "License(s)." , "" , "3.4. Notices" , "" , "You may not remove or alter the substance of any license notices" , "(including copyright notices, patent notices, disclaimers of warranty," , "or limitations of liability) contained within the Source Code Form of" , "the Covered Software, except that You may alter any license notices to" , "the extent required to remedy known factual inaccuracies." , "" , "3.5. Application of Additional Terms" , "" , "You may choose to offer, and to charge a fee for, warranty, support," , "indemnity or liability obligations to one or more recipients of Covered" , "Software. However, You may do so only on Your own behalf, and not on" , "behalf of any Contributor. You must make it absolutely clear that any" , "such warranty, support, indemnity, or liability obligation is offered by" , "You alone, and You hereby agree to indemnify every Contributor for any" , "liability incurred by such Contributor as a result of warranty, support," , "indemnity or liability terms You offer. You may include additional" , "disclaimers of warranty and limitations of liability specific to any" , "jurisdiction." , "" , "4. Inability to Comply Due to Statute or Regulation" , "---------------------------------------------------" , "" , "If it is impossible for You to comply with any of the terms of this" , "License with respect to some or all of the Covered Software due to" , "statute, judicial order, or regulation then You must: (a) comply with" , "the terms of this License to the maximum extent possible; and (b)" , "describe the limitations and the code they affect. Such description must" , "be placed in a text file included with all distributions of the Covered" , "Software under this License. Except to the extent prohibited by statute" , "or regulation, such description must be sufficiently detailed for a" , "recipient of ordinary skill to be able to understand it." , "" , "5. Termination" , "--------------" , "" , "5.1. The rights granted under this License will terminate automatically" , "if You fail to comply with any of its terms. However, if You become" , "compliant, then the rights granted under this License from a particular" , "Contributor are reinstated (a) provisionally, unless and until such" , "Contributor explicitly and finally terminates Your grants, and (b) on an" , "ongoing basis, if such Contributor fails to notify You of the" , "non-compliance by some reasonable means prior to 60 days after You have" , "come back into compliance. Moreover, Your grants from a particular" , "Contributor are reinstated on an ongoing basis if such Contributor" , "notifies You of the non-compliance by some reasonable means, this is the" , "first time You have received notice of non-compliance with this License" , "from such Contributor, and You become compliant prior to 30 days after" , "Your receipt of the notice." , "" , "5.2. If You initiate litigation against any entity by asserting a patent" , "infringement claim (excluding declaratory judgment actions," , "counter-claims, and cross-claims) alleging that a Contributor Version" , "directly or indirectly infringes any patent, then the rights granted to" , "You by any and all Contributors for the Covered Software under Section" , "2.1 of this License shall terminate." , "" , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" , "end user license agreements (excluding distributors and resellers) which" , "have been validly granted by You or Your distributors under this License" , "prior to termination shall survive termination." , "" , "************************************************************************" , "* *" , "* 6. Disclaimer of Warranty *" , "* ------------------------- *" , "* *" , "* Covered Software is provided under this License on an \"as is\" *" , "* basis, without warranty of any kind, either expressed, implied, or *" , "* statutory, including, without limitation, warranties that the *" , "* Covered Software is free of defects, merchantable, fit for a *" , "* particular purpose or non-infringing. The entire risk as to the *" , "* quality and performance of the Covered Software is with You. *" , "* Should any Covered Software prove defective in any respect, You *" , "* (not any Contributor) assume the cost of any necessary servicing, *" , "* repair, or correction. This disclaimer of warranty constitutes an *" , "* essential part of this License. No use of any Covered Software is *" , "* authorized under this License except under this disclaimer. *" , "* *" , "************************************************************************" , "" , "************************************************************************" , "* *" , "* 7. Limitation of Liability *" , "* -------------------------- *" , "* *" , "* Under no circumstances and under no legal theory, whether tort *" , "* (including negligence), contract, or otherwise, shall any *" , "* Contributor, or anyone who distributes Covered Software as *" , "* permitted above, be liable to You for any direct, indirect, *" , "* special, incidental, or consequential damages of any character *" , "* including, without limitation, damages for lost profits, loss of *" , "* goodwill, work stoppage, computer failure or malfunction, or any *" , "* and all other commercial damages or losses, even if such party *" , "* shall have been informed of the possibility of such damages. This *" , "* limitation of liability shall not apply to liability for death or *" , "* personal injury resulting from such party's negligence to the *" , "* extent applicable law prohibits such limitation. Some *" , "* jurisdictions do not allow the exclusion or limitation of *" , "* incidental or consequential damages, so this exclusion and *" , "* limitation may not apply to You. *" , "* *" , "************************************************************************" , "" , "8. Litigation" , "-------------" , "" , "Any litigation relating to this License may be brought only in the" , "courts of a jurisdiction where the defendant maintains its principal" , "place of business and such litigation shall be governed by laws of that" , "jurisdiction, without reference to its conflict-of-law provisions." , "Nothing in this Section shall prevent a party's ability to bring" , "cross-claims or counter-claims." , "" , "9. Miscellaneous" , "----------------" , "" , "This License represents the complete agreement concerning the subject" , "matter hereof. If any provision of this License is held to be" , "unenforceable, such provision shall be reformed only to the extent" , "necessary to make it enforceable. Any law or regulation which provides" , "that the language of a contract shall be construed against the drafter" , "shall not be used to construe this License against a Contributor." , "" , "10. Versions of the License" , "---------------------------" , "" , "10.1. New Versions" , "" , "Mozilla Foundation is the license steward. Except as provided in Section" , "10.3, no one other than the license steward has the right to modify or" , "publish new versions of this License. Each version will be given a" , "distinguishing version number." , "" , "10.2. Effect of New Versions" , "" , "You may distribute the Covered Software under the terms of the version" , "of the License under which You originally received the Covered Software," , "or under the terms of any subsequent version published by the license" , "steward." , "" , "10.3. Modified Versions" , "" , "If you create software not governed by this License, and you want to" , "create a new license for such software, you may create and use a" , "modified version of this License if you rename the license and remove" , "any references to the name of the license steward (except to note that" , "such modified license differs from this License)." , "" , "10.4. Distributing Source Code Form that is Incompatible With Secondary" , "Licenses" , "" , "If You choose to distribute Source Code Form that is Incompatible With" , "Secondary Licenses under the terms of this version of the License, the" , "notice described in Exhibit B of this License must be attached." , "" , "Exhibit A - Source Code Form License Notice" , "-------------------------------------------" , "" , " This Source Code Form is subject to the terms of the Mozilla Public" , " License, v. 2.0. If a copy of the MPL was not distributed with this" , " file, You can obtain one at http://mozilla.org/MPL/2.0/." , "" , "If it is not possible or desirable to put the notice in a particular" , "file, then You may include the notice in a location (such as a LICENSE" , "file in a relevant directory) where a recipient would be likely to look" , "for such a notice." , "" , "You may add additional accurate notices of copyright ownership." , "" , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" , "---------------------------------------------------------" , "" , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" , " defined by the Mozilla Public License, v. 2.0." ] isc :: String -> String -> License isc authors year = unlines [ "Copyright (c) " ++ year ++ " " ++ authors , "" , "Permission to use, copy, modify, and/or distribute this software for any purpose" , "with or without fee is hereby granted, provided that the above copyright notice" , "and this permission notice appear in all copies." , "" , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" , "THIS SOFTWARE." ] cabal-install-1.22.6.0/Distribution/Client/Init/Types.hs0000644000000000000000000001241012540225656021101 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.Types -- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Some types used by the 'cabal init' command. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.Types where import Distribution.Simple.Setup ( Flag(..) ) import Distribution.Version import Distribution.Verbosity import qualified Distribution.Package as P import Distribution.License import Distribution.ModuleName import Language.Haskell.Extension ( Language(..), Extension ) import qualified Text.PrettyPrint as Disp import qualified Distribution.Compat.ReadP as Parse import Distribution.Text #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif -- | InitFlags is really just a simple type to represent certain -- portions of a .cabal file. Rather than have a flag for EVERY -- possible field, we just have one for each field that the user is -- likely to want and/or that we are likely to be able to -- intelligently guess. data InitFlags = InitFlags { nonInteractive :: Flag Bool , quiet :: Flag Bool , packageDir :: Flag FilePath , noComments :: Flag Bool , minimal :: Flag Bool , packageName :: Flag P.PackageName , version :: Flag Version , cabalVersion :: Flag VersionRange , license :: Flag License , author :: Flag String , email :: Flag String , homepage :: Flag String , synopsis :: Flag String , category :: Flag (Either String Category) , extraSrc :: Maybe [String] , packageType :: Flag PackageType , mainIs :: Flag FilePath , language :: Flag Language , exposedModules :: Maybe [ModuleName] , otherModules :: Maybe [ModuleName] , otherExts :: Maybe [Extension] , dependencies :: Maybe [P.Dependency] , sourceDirs :: Maybe [String] , buildTools :: Maybe [String] , initVerbosity :: Flag Verbosity , overwrite :: Flag Bool } deriving (Show) -- the Monoid instance for Flag has later values override earlier -- ones, which is why we want Maybe [foo] for collecting foo values, -- not Flag [foo]. data PackageType = Library | Executable deriving (Show, Read, Eq) instance Text PackageType where disp = Disp.text . show parse = Parse.choice $ map (fmap read . Parse.string . show) [Library, Executable] instance Monoid InitFlags where mempty = InitFlags { nonInteractive = mempty , quiet = mempty , packageDir = mempty , noComments = mempty , minimal = mempty , packageName = mempty , version = mempty , cabalVersion = mempty , license = mempty , author = mempty , email = mempty , homepage = mempty , synopsis = mempty , category = mempty , extraSrc = mempty , packageType = mempty , mainIs = mempty , language = mempty , exposedModules = mempty , otherModules = mempty , otherExts = mempty , dependencies = mempty , sourceDirs = mempty , buildTools = mempty , initVerbosity = mempty , overwrite = mempty } mappend a b = InitFlags { nonInteractive = combine nonInteractive , quiet = combine quiet , packageDir = combine packageDir , noComments = combine noComments , minimal = combine minimal , packageName = combine packageName , version = combine version , cabalVersion = combine cabalVersion , license = combine license , author = combine author , email = combine email , homepage = combine homepage , synopsis = combine synopsis , category = combine category , extraSrc = combine extraSrc , packageType = combine packageType , mainIs = combine mainIs , language = combine language , exposedModules = combine exposedModules , otherModules = combine otherModules , otherExts = combine otherExts , dependencies = combine dependencies , sourceDirs = combine sourceDirs , buildTools = combine buildTools , initVerbosity = combine initVerbosity , overwrite = combine overwrite } where combine field = field a `mappend` field b -- | Some common package categories. data Category = Codec | Concurrency | Control | Data | Database | Development | Distribution | Game | Graphics | Language | Math | Network | Sound | System | Testing | Text | Web deriving (Read, Show, Eq, Ord, Bounded, Enum) instance Text Category where disp = Disp.text . show parse = Parse.choice $ map (fmap read . Parse.string . show) [Codec .. ] cabal-install-1.22.6.0/Distribution/Client/Init/Heuristics.hs0000644000000000000000000003343512540225656022131 0ustar0000000000000000{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Init.Heuristics -- Copyright : (c) Benedikt Huber 2009 -- License : BSD-like -- -- Maintainer : cabal-devel@haskell.org -- Stability : provisional -- Portability : portable -- -- Heuristics for creating initial cabal files. -- ----------------------------------------------------------------------------- module Distribution.Client.Init.Heuristics ( guessPackageName, scanForModules, SourceFileEntry(..), neededBuildPrograms, guessMainFileCandidates, guessAuthorNameMail, knownCategories, ) where import Distribution.Text (simpleParse) import Distribution.Simple.Setup (Flag(..), flagToMaybe) import Distribution.ModuleName ( ModuleName, toFilePath ) import Distribution.Client.PackageIndex ( allPackagesByName ) import qualified Distribution.Package as P import qualified Distribution.PackageDescription as PD ( category, packageDescription ) import Distribution.Simple.Utils ( intercalate ) import Distribution.Client.Utils ( tryCanonicalizePath ) import Language.Haskell.Extension ( Extension ) import Distribution.Client.Types ( packageDescription, SourcePackageDb(..) ) #if !MIN_VERSION_base(4,8,0) import Control.Applicative ( pure, (<$>), (<*>) ) #endif import Control.Arrow ( first ) import Control.Monad ( liftM ) import Data.Char ( isAlphaNum, isNumber, isUpper, isLower, isSpace ) import Data.Either ( partitionEithers ) import Data.List ( isInfixOf, isPrefixOf, isSuffixOf, sortBy ) import Data.Maybe ( mapMaybe, catMaybes, maybeToList ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty, mappend, mconcat, ) #endif import Data.Ord ( comparing ) import qualified Data.Set as Set ( fromList, toList ) import System.Directory ( getCurrentDirectory, getDirectoryContents, doesDirectoryExist, doesFileExist, getHomeDirectory, ) import Distribution.Compat.Environment ( getEnvironment ) import System.FilePath ( takeExtension, takeBaseName, dropExtension, (), (<.>), splitDirectories, makeRelative ) import Distribution.Client.Init.Types ( InitFlags(..) ) import Distribution.Client.Compat.Process ( readProcessWithExitCode ) import System.Exit ( ExitCode(..) ) -- | Return a list of candidate main files for this executable: top-level -- modules including the word 'Main' in the file name. The list is sorted in -- order of preference, shorter file names are preferred. 'Right's are existing -- candidates and 'Left's are those that do not yet exist. guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath] guessMainFileCandidates flags = do dir <- maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) files <- getDirectoryContents dir let existingCandidates = filter isMain files -- We always want to give the user at least one default choice. If either -- Main.hs or Main.lhs has already been created, then we don't want to -- suggest the other; however, if neither has been created, then we -- suggest both. newCandidates = if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"] then [] else ["Main.hs", "Main.lhs"] candidates = sortBy (\x y -> comparing (length . either id id) x y `mappend` compare x y) (map Left newCandidates ++ map Right existingCandidates) return candidates where isMain f = (isInfixOf "Main" f || isInfixOf "main" f) && (isSuffixOf ".hs" f || isSuffixOf ".lhs" f) -- | Guess the package name based on the given root directory. guessPackageName :: FilePath -> IO P.PackageName guessPackageName = liftM (P.PackageName . repair . last . splitDirectories) . tryCanonicalizePath where -- Treat each span of non-alphanumeric characters as a hyphen. Each -- hyphenated component of a package name must contain at least one -- alphabetic character. An arbitrary character ('x') will be prepended if -- this is not the case for the first component, and subsequent components -- will simply be run together. For example, "1+2_foo-3" will become -- "x12-foo3". repair = repair' ('x' :) id repair' invalid valid x = case dropWhile (not . isAlphaNum) x of "" -> repairComponent "" x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x' in c ++ repairRest r where repairComponent c | all isNumber c = invalid c | otherwise = valid c repairRest = repair' id ('-' :) -- |Data type of source files found in the working directory data SourceFileEntry = SourceFileEntry { relativeSourcePath :: FilePath , moduleName :: ModuleName , fileExtension :: String , imports :: [ModuleName] , extensions :: [Extension] } deriving Show sfToFileName :: FilePath -> SourceFileEntry -> FilePath sfToFileName projectRoot (SourceFileEntry relPath m ext _ _) = projectRoot relPath toFilePath m <.> ext -- |Search for source files in the given directory -- and return pairs of guessed Haskell source path and -- module names. scanForModules :: FilePath -> IO [SourceFileEntry] scanForModules rootDir = scanForModulesIn rootDir rootDir scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry] scanForModulesIn projectRoot srcRoot = scan srcRoot [] where scan dir hierarchy = do entries <- getDirectoryContents (projectRoot dir) (files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries) let modules = catMaybes [ guessModuleName hierarchy file | file <- files , isUpper (head file) ] modules' <- mapM (findImportsAndExts projectRoot) modules recMods <- mapM (scanRecursive dir hierarchy) dirs return $ concat (modules' : recMods) tagIsDir parent entry = do isDir <- doesDirectoryExist (parent entry) return $ (if isDir then Right else Left) entry guessModuleName hierarchy entry | takeBaseName entry == "Setup" = Nothing | ext `elem` sourceExtensions = SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure [] | otherwise = Nothing where relRoot = makeRelative projectRoot srcRoot unqualModName = dropExtension entry modName = simpleParse $ intercalate "." . reverse $ (unqualModName : hierarchy) ext = case takeExtension entry of '.':e -> e; e -> e scanRecursive parent hierarchy entry | isUpper (head entry) = scan (parent entry) (entry : hierarchy) | isLower (head entry) && not (ignoreDir entry) = scanForModulesIn projectRoot $ foldl () srcRoot (reverse (entry : hierarchy)) | otherwise = return [] ignoreDir ('.':_) = True ignoreDir dir = dir `elem` ["dist", "_darcs"] findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry findImportsAndExts projectRoot sf = do s <- readFile (sfToFileName projectRoot sf) let modules = mapMaybe ( getModName . drop 1 . filter (not . null) . dropWhile (/= "import") . words ) . filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering . lines $ s -- XXX we should probably make a better attempt at parsing -- comments above. Unfortunately we can't use a full-fledged -- Haskell parser since cabal's dependencies must be kept at a -- minimum. -- A poor man's LANGUAGE pragma parser. exts = mapMaybe simpleParse . concatMap getPragmas . filter isLANGUAGEPragma . map fst . drop 1 . takeWhile (not . null . snd) . iterate (takeBraces . snd) $ ("",s) takeBraces = break (== '}') . dropWhile (/= '{') isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`) getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13 splitCommas "" = [] splitCommas xs = x : splitCommas (drop 1 y) where (x,y) = break (==',') xs return sf { imports = modules , extensions = exts } where getModName :: [String] -> Maybe ModuleName getModName [] = Nothing getModName ("qualified":ws) = getModName ws getModName (ms:_) = simpleParse ms -- Unfortunately we cannot use the version exported by Distribution.Simple.Program knownSuffixHandlers :: [(String,String)] knownSuffixHandlers = [ ("gc", "greencard") , ("chs", "chs") , ("hsc", "hsc2hs") , ("x", "alex") , ("y", "happy") , ("ly", "happy") , ("cpphs", "cpp") ] sourceExtensions :: [String] sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers neededBuildPrograms :: [SourceFileEntry] -> [String] neededBuildPrograms entries = [ handler | ext <- nubSet (map fileExtension entries) , handler <- maybeToList (lookup ext knownSuffixHandlers) ] -- | Guess author and email using darcs and git configuration options. Use -- the following in decreasing order of preference: -- -- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*) -- 2. Local repo configs -- 3. Global vcs configs -- 4. The generic $EMAIL -- -- Name and email are processed separately, so the guess might end up being -- a name from DARCS_EMAIL and an email from git config. -- -- Darcs has preference, for tradition's sake. guessAuthorNameMail :: IO (Flag String, Flag String) guessAuthorNameMail = fmap authorGuessPure authorGuessIO -- Ordered in increasing preference, since Flag-as-monoid is identical to -- Last. authorGuessPure :: AuthorGuessIO -> AuthorGuess authorGuessPure (AuthorGuessIO env darcsLocalF darcsGlobalF gitLocal gitGlobal) = mconcat [ emailEnv env , gitGlobal , darcsCfg darcsGlobalF , gitLocal , darcsCfg darcsLocalF , gitEnv env , darcsEnv env ] authorGuessIO :: IO AuthorGuessIO authorGuessIO = AuthorGuessIO <$> getEnvironment <*> (maybeReadFile $ "_darcs" "prefs" "author") <*> (maybeReadFile =<< liftM ( (".darcs" "author")) getHomeDirectory) <*> gitCfg Local <*> gitCfg Global -- Types and functions used for guessing the author are now defined: type AuthorGuess = (Flag String, Flag String) type Enviro = [(String, String)] data GitLoc = Local | Global data AuthorGuessIO = AuthorGuessIO Enviro -- ^ Environment lookup table (Maybe String) -- ^ Contents of local darcs author info (Maybe String) -- ^ Contents of global darcs author info AuthorGuess -- ^ Git config --local AuthorGuess -- ^ Git config --global darcsEnv :: Enviro -> AuthorGuess darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL" gitEnv :: Enviro -> AuthorGuess gitEnv env = (name, mail) where name = maybeFlag "GIT_AUTHOR_NAME" env mail = maybeFlag "GIT_AUTHOR_EMAIL" env darcsCfg :: Maybe String -> AuthorGuess darcsCfg = maybe mempty nameAndMail emailEnv :: Enviro -> AuthorGuess emailEnv env = (mempty, mail) where mail = maybeFlag "EMAIL" env gitCfg :: GitLoc -> IO AuthorGuess gitCfg which = do name <- gitVar which "user.name" mail <- gitVar which "user.email" return (name, mail) gitVar :: GitLoc -> String -> IO (Flag String) gitVar which = fmap happyOutput . gitConfigQuery which happyOutput :: (ExitCode, a, t) -> Flag a happyOutput v = case v of (ExitSuccess, s, _) -> Flag s _ -> mempty gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String) gitConfigQuery which key = fmap trim' $ readProcessWithExitCode "git" ["config", w, key] "" where w = case which of Local -> "--local" Global -> "--global" trim' (a, b, c) = (a, trim b, c) maybeFlag :: String -> Enviro -> Flag String maybeFlag k = maybe mempty Flag . lookup k maybeReadFile :: String -> IO (Maybe String) maybeReadFile f = do exists <- doesFileExist f if exists then fmap Just $ readFile f else return Nothing -- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached knownCategories :: SourcePackageDb -> [String] knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet [ cat | pkg <- map head (allPackagesByName sourcePkgIndex) , let catList = (PD.category . PD.packageDescription . packageDescription) pkg , cat <- splitString ',' catList ] -- Parse name and email, from darcs pref files or environment variable nameAndMail :: String -> (Flag String, Flag String) nameAndMail str | all isSpace nameOrEmail = mempty | null erest = (mempty, Flag $ trim nameOrEmail) | otherwise = (Flag $ trim nameOrEmail, Flag mail) where (nameOrEmail,erest) = break (== '<') str (mail,_) = break (== '>') (tail erest) trim :: String -> String trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse where removeLeadingSpace = dropWhile isSpace -- split string at given character, and remove whitespace splitString :: Char -> String -> [String] splitString sep str = go str where go s = if null s' then [] else tok : go rest where s' = dropWhile (\c -> c == sep || isSpace c) s (tok,rest) = break (==sep) s' nubSet :: (Ord a) => [a] -> [a] nubSet = Set.toList . Set.fromList {- test db testProjectRoot = do putStrLn "Guessed package name" (guessPackageName >=> print) testProjectRoot putStrLn "Guessed name and email" guessAuthorNameMail >>= print mods <- scanForModules testProjectRoot putStrLn "Guessed modules" mapM_ print mods putStrLn "Needed build programs" print (neededBuildPrograms mods) putStrLn "List of known categories" print $ knownCategories db -}