stack-2.15.7/app/0000755000000000000000000000000014502056212011656 5ustar0000000000000000stack-2.15.7/doc/0000755000000000000000000000000014620153600011643 5ustar0000000000000000stack-2.15.7/src/0000755000000000000000000000000014620153445011674 5ustar0000000000000000stack-2.15.7/src/Codec/0000755000000000000000000000000014620153445012711 5ustar0000000000000000stack-2.15.7/src/Codec/Archive/0000755000000000000000000000000014620153445014272 5ustar0000000000000000stack-2.15.7/src/Codec/Archive/Tar/0000755000000000000000000000000014620153445015020 5ustar0000000000000000stack-2.15.7/src/Control/0000755000000000000000000000000014266741654013327 5ustar0000000000000000stack-2.15.7/src/Control/Concurrent/0000755000000000000000000000000014604306200015425 5ustar0000000000000000stack-2.15.7/src/Data/0000755000000000000000000000000014331513215012537 5ustar0000000000000000stack-2.15.7/src/Data/Attoparsec/0000755000000000000000000000000014620153445014652 5ustar0000000000000000stack-2.15.7/src/Data/Monoid/0000755000000000000000000000000014445120722013767 5ustar0000000000000000stack-2.15.7/src/GHC/0000755000000000000000000000000014502056213012267 5ustar0000000000000000stack-2.15.7/src/GHC/Utils/0000755000000000000000000000000014502056213013367 5ustar0000000000000000stack-2.15.7/src/GHC/Utils/GhcPkg/0000755000000000000000000000000014502056213014532 5ustar0000000000000000stack-2.15.7/src/GHC/Utils/GhcPkg/Main/0000755000000000000000000000000014604306200015413 5ustar0000000000000000stack-2.15.7/src/Network/0000755000000000000000000000000014331513215013317 5ustar0000000000000000stack-2.15.7/src/Network/HTTP/0000755000000000000000000000000014604306200014073 5ustar0000000000000000stack-2.15.7/src/Options/0000755000000000000000000000000014266741654013342 5ustar0000000000000000stack-2.15.7/src/Options/Applicative/0000755000000000000000000000000014604306200015557 5ustar0000000000000000stack-2.15.7/src/Options/Applicative/Builder/0000755000000000000000000000000014620153445017156 5ustar0000000000000000stack-2.15.7/src/Path/0000755000000000000000000000000014604306200012557 5ustar0000000000000000stack-2.15.7/src/Stack/0000755000000000000000000000000014620153474012743 5ustar0000000000000000stack-2.15.7/src/Stack/Build/0000755000000000000000000000000014620153474014002 5ustar0000000000000000stack-2.15.7/src/Stack/Config/0000755000000000000000000000000014620153445014146 5ustar0000000000000000stack-2.15.7/src/Stack/Constants/0000755000000000000000000000000014502056213014707 5ustar0000000000000000stack-2.15.7/src/Stack/Ghci/0000755000000000000000000000000014604306201013603 5ustar0000000000000000stack-2.15.7/src/Stack/Options/0000755000000000000000000000000014620153474014376 5ustar0000000000000000stack-2.15.7/src/Stack/Setup/0000755000000000000000000000000014620153446014042 5ustar0000000000000000stack-2.15.7/src/Stack/Storage/0000755000000000000000000000000014620153446014346 5ustar0000000000000000stack-2.15.7/src/Stack/Types/0000755000000000000000000000000014620153446014046 5ustar0000000000000000stack-2.15.7/src/Stack/Types/Build/0000755000000000000000000000000014620153446015105 5ustar0000000000000000stack-2.15.7/src/Stack/Types/Config/0000755000000000000000000000000014620153446015253 5ustar0000000000000000stack-2.15.7/src/System/0000755000000000000000000000000014331513215013152 5ustar0000000000000000stack-2.15.7/src/System/Process/0000755000000000000000000000000014620153446014577 5ustar0000000000000000stack-2.15.7/src/setup-shim/0000755000000000000000000000000014620153474013774 5ustar0000000000000000stack-2.15.7/src/unix/0000755000000000000000000000000014445120723012655 5ustar0000000000000000stack-2.15.7/src/unix/Stack/0000755000000000000000000000000014445120723013722 5ustar0000000000000000stack-2.15.7/src/unix/Stack/Constants/0000755000000000000000000000000014502056214015673 5ustar0000000000000000stack-2.15.7/src/unix/Stack/Docker/0000755000000000000000000000000014604306201015123 5ustar0000000000000000stack-2.15.7/src/unix/System/0000755000000000000000000000000014502056214014136 5ustar0000000000000000stack-2.15.7/src/unix/System/Info/0000755000000000000000000000000014445120723015034 5ustar0000000000000000stack-2.15.7/src/unix/cbits/0000755000000000000000000000000014331513215013755 5ustar0000000000000000stack-2.15.7/src/windows/0000755000000000000000000000000014445120723013364 5ustar0000000000000000stack-2.15.7/src/windows/Stack/0000755000000000000000000000000014445120723014431 5ustar0000000000000000stack-2.15.7/src/windows/Stack/Constants/0000755000000000000000000000000014502056214016402 5ustar0000000000000000stack-2.15.7/src/windows/Stack/Docker/0000755000000000000000000000000014604306201015632 5ustar0000000000000000stack-2.15.7/src/windows/System/0000755000000000000000000000000014604306201014642 5ustar0000000000000000stack-2.15.7/src/windows/System/Info/0000755000000000000000000000000014445120723015543 5ustar0000000000000000stack-2.15.7/src/windows/System/Posix/0000755000000000000000000000000014604306201015744 5ustar0000000000000000stack-2.15.7/tests/0000755000000000000000000000000014502056215012243 5ustar0000000000000000stack-2.15.7/tests/integration/0000755000000000000000000000000014502056214014565 5ustar0000000000000000stack-2.15.7/tests/integration/lib/0000755000000000000000000000000014620153474015342 5ustar0000000000000000stack-2.15.7/tests/unit/0000755000000000000000000000000014502056216013223 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/0000755000000000000000000000000014620153474014275 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/Build/0000755000000000000000000000000014502056215015326 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/Config/0000755000000000000000000000000014620153446015501 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/Ghci/0000755000000000000000000000000014604306201015135 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/Types/0000755000000000000000000000000014502056216015374 5ustar0000000000000000stack-2.15.7/tests/unit/Stack/Untar/0000755000000000000000000000000014502056216015361 5ustar0000000000000000stack-2.15.7/tests/unit/package-dump/0000755000000000000000000000000014502056216015561 5ustar0000000000000000stack-2.15.7/tests/unit/unix/0000755000000000000000000000000014502056216014206 5ustar0000000000000000stack-2.15.7/tests/unit/unix/Stack/0000755000000000000000000000000014502056216015253 5ustar0000000000000000stack-2.15.7/tests/unit/unix/Stack/Ghci/0000755000000000000000000000000014502056216016125 5ustar0000000000000000stack-2.15.7/tests/unit/windows/0000755000000000000000000000000014502056216014715 5ustar0000000000000000stack-2.15.7/tests/unit/windows/Stack/0000755000000000000000000000000014502056216015762 5ustar0000000000000000stack-2.15.7/tests/unit/windows/Stack/Ghci/0000755000000000000000000000000014502056216016634 5ustar0000000000000000stack-2.15.7/src/Codec/Archive/Tar/Utf8.hs0000644000000000000000000001634414620153445016212 0ustar0000000000000000module Codec.Archive.Tar.Utf8 ( module Codec.Archive.Tar , entryPath , unpack ) where -- | A module that is equivalent to "Codec.Archive.Tar" from the @tar@ package, -- except that @unpack@ assumes that the file paths in an archive are UTF8 -- encoded. import Codec.Archive.Tar hiding ( entryPath, unpack ) import Codec.Archive.Tar.Check ( checkSecurity ) import Codec.Archive.Tar.Entry ( Entry (..), TarPath, fromLinkTarget ) import qualified Codec.Archive.Tar.Entry as Tar import Control.Exception ( Exception, catch, throwIO ) import Data.Bits ( (.|.), (.&.), shiftL ) import qualified Data.ByteString.Lazy as LBS import Data.Char ( chr, ord ) import Data.Int ( Int64 ) import Data.Maybe ( fromMaybe ) import Data.Time.Clock.POSIX ( posixSecondsToUTCTime ) import System.Directory ( copyFile, createDirectoryIfMissing, setModificationTime ) import System.FilePath ( () ) import qualified System.FilePath as FP import System.IO.Error ( isPermissionError ) type EpochTime = Int64 -- | Native 'FilePath' of the file or directory within the archive. -- -- Assumes that the 'TarPath' of an 'Entry' is UTF8 encoded. entryPath :: Entry -> FilePath entryPath = fromTarPath . entryTarPath -- | Convert a 'TarPath' to a native 'FilePath'. -- -- The native 'FilePath' will use the native directory separator but it is not -- otherwise checked for validity or sanity. In particular: -- -- * The tar path may be invalid as a native path, eg the file name @\"nul\"@ -- is not valid on Windows. -- -- * The tar path may be an absolute path or may contain @\"..\"@ components. -- For security reasons this should not usually be allowed, but it is your -- responsibility to check for these conditions (eg using 'checkSecurity'). -- -- Assumes that the 'TarPath' is UTF8 encoded. fromTarPath :: TarPath -> FilePath fromTarPath tp = decodeIfUtf8Encoded $ Tar.fromTarPath tp -- | Create local files and directories based on the entries of a tar archive. -- -- This is a portable implementation of unpacking suitable for portable -- archives. It handles 'NormalFile' and 'Directory' entries and has simulated -- support for 'SymbolicLink' and 'HardLink' entries. Links are implemented by -- copying the target file. This therefore works on Windows as well as Unix. -- All other entry types are ignored, that is they are not unpacked and no -- exception is raised. -- -- If the 'Entries' ends in an error then it is raised an an exception. Any -- files or directories that have been unpacked before the error was -- encountered will not be deleted. For this reason you may want to unpack -- into an empty directory so that you can easily clean up if unpacking fails -- part-way. -- -- On its own, this function only checks for security (using 'checkSecurity'). -- You can do other checks by applying checking functions to the 'Entries' that -- you pass to this function. For example: -- -- > unpack dir (checkTarbomb expectedDir entries) -- -- If you care about the priority of the reported errors then you may want to -- use 'checkSecurity' before 'checkTarbomb' or other checks. -- -- Assumes that the 'TarPath' of an `Entry` is UTF8 encoded. unpack :: Exception e => FilePath -> Entries e -> IO () unpack baseDir entries = unpackEntries [] (checkSecurity entries) >>= emulateLinks where -- We're relying here on 'checkSecurity' to make sure we're not scribbling -- files all over the place. unpackEntries _ (Fail err) = either throwIO throwIO err unpackEntries links Done = return links unpackEntries links (Next entry es) = case entryContent entry of NormalFile file _ -> extractFile path file mtime >> unpackEntries links es Directory -> extractDir path mtime >> unpackEntries links es HardLink link -> (unpackEntries $! saveLink path link links) es SymbolicLink link -> (unpackEntries $! saveLink path link links) es _ -> unpackEntries links es --ignore other file types where path = entryPath entry mtime = entryTime entry extractFile path content mtime = do -- Note that tar archives do not make sure each directory is created -- before files they contain, indeed we may have to create several -- levels of directory. createDirectoryIfMissing True absDir LBS.writeFile absPath content setModTime absPath mtime where absDir = baseDir FP.takeDirectory path absPath = baseDir path extractDir path mtime = do createDirectoryIfMissing True absPath setModTime absPath mtime where absPath = baseDir path saveLink path link links = seq (length path) $ seq (length link') $ (path, link'):links where link' = fromLinkTarget link emulateLinks = mapM_ $ \(relPath, relLinkTarget) -> let absPath = baseDir relPath absTarget = FP.takeDirectory absPath relLinkTarget in copyFile absTarget absPath setModTime :: FilePath -> EpochTime -> IO () setModTime path t = setModificationTime path (posixSecondsToUTCTime (fromIntegral t)) `catch` \e -> if isPermissionError e then return () else throwIO e -- | If the given 'String' can be interpreted as a string of bytes that encodes -- a string using UTF8, then yields the string decoded, otherwise yields the -- given 'String'. -- Inspired by the utf8-string package. decodeIfUtf8Encoded :: String -> String decodeIfUtf8Encoded s = fromMaybe s $ decode s where decode :: String -> Maybe String decode [] = Just "" decode (c:cs) | c' < 0x80 = decode' c cs | c' < 0xc0 = Nothing | c' < 0xe0 = multi1 | c' < 0xf0 = multiByte 2 0b1111 0x00000800 | c' < 0xf8 = multiByte 3 0b0111 0x00010000 | c' < 0xfc = multiByte 4 0b0011 0x00200000 | c' < 0xfe = multiByte 5 0b0001 0x04000000 | otherwise = Nothing where c' = ord c isValidByte b = b <= 0xff && b .&. 0b11000000 == 0b10000000 combine b1 b2 = (b1 `shiftL` 6) .|. (b2 .&. 0b00111111) multi1 = case cs of c1:ds | isValidByte c1' -> let d = combine (c' .&. 0b00011111) c1' in if d >= 0x80 then decode' (chr d) ds else Nothing where c1' = ord c1 _ -> Nothing multiByte :: Int -> Int -> Int -> Maybe String multiByte i mask overlong = aux i cs (c' .&. mask) where aux 0 rs acc | isValidAcc = decode' (chr acc) rs | otherwise = Nothing where isValidAcc = overlong <= acc && acc <= 0x10ffff && (acc < 0xd800 || 0xdfff < acc) && (acc < 0xfffe || 0xffff < acc) aux n (r : rs) acc | isValidByte r' = aux (n - 1) rs $ combine acc r' where r' = ord r aux _ _ _ = Nothing decode' :: Char -> String -> Maybe String decode' x xs = do xs' <- decode xs pure $ x : xs' stack-2.15.7/src/Control/Concurrent/Execute.hs0000644000000000000000000001515314604306200017370 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- Concurrent execution with dependencies. Types currently hard-coded for needs -- of stack, but could be generalized easily. module Control.Concurrent.Execute ( ActionType (..) , ActionId (..) , ActionContext (..) , Action (..) , Concurrency (..) , runActions ) where import Control.Concurrent.STM ( check ) import Stack.Prelude import Data.List ( sortBy ) import qualified Data.Set as Set -- | Type representing exceptions thrown by functions exported by the -- "Control.Concurrent.Execute" module. data ExecuteException = InconsistentDependenciesBug deriving (Show, Typeable) instance Exception ExecuteException where displayException InconsistentDependenciesBug = bugReport "[S-2816]" "Inconsistent dependencies were discovered while executing your build \ \plan." -- | Type representing types of Stack build actions. data ActionType = ATBuild -- ^ Action for building a package's library and executables. If -- 'taskAllInOne' is 'True', then this will also build benchmarks and tests. -- It is 'False' when the library's benchmarks or test-suites have cyclic -- dependencies. | ATBuildFinal -- ^ Task for building the package's benchmarks and test-suites. Requires -- that the library was already built. | ATRunTests -- ^ Task for running the package's test-suites. | ATRunBenchmarks -- ^ Task for running the package's benchmarks. deriving (Show, Eq, Ord) -- | Types representing the unique ids of Stack build actions. data ActionId = ActionId !PackageIdentifier !ActionType deriving (Eq, Ord, Show) -- | Type representing Stack build actions. data Action = Action { actionId :: !ActionId -- ^ The action's unique id. , actionDeps :: !(Set ActionId) -- ^ Actions on which this action depends. , action :: !(ActionContext -> IO ()) -- ^ The action's 'IO' action, given a context. , concurrency :: !Concurrency -- ^ Whether this action may be run concurrently with others. } -- | Type representing permissions for actions to be run concurrently with -- others. data Concurrency = ConcurrencyAllowed | ConcurrencyDisallowed deriving Eq data ActionContext = ActionContext { remaining :: !(Set ActionId) -- ^ Does not include the current action. , downstream :: [Action] -- ^ Actions which depend on the current action. , concurrency :: !Concurrency -- ^ Whether this action may be run concurrently with others. } data ExecuteState = ExecuteState { actions :: TVar [Action] , exceptions :: TVar [SomeException] , inAction :: TVar (Set ActionId) , completed :: TVar Int , keepGoing :: Bool } runActions :: Int -- ^ threads -> Bool -- ^ keep going after one task has failed -> [Action] -> (TVar Int -> TVar (Set ActionId) -> IO ()) -- ^ progress updated -> IO [SomeException] runActions threads keepGoing actions withProgress = do es <- ExecuteState <$> newTVarIO (sortActions actions) -- esActions <*> newTVarIO [] -- esExceptions <*> newTVarIO Set.empty -- esInAction <*> newTVarIO 0 -- esCompleted <*> pure keepGoing -- esKeepGoing _ <- async $ withProgress es.completed es.inAction if threads <= 1 then runActions' es else replicateConcurrently_ threads $ runActions' es readTVarIO es.exceptions -- | Sort actions such that those that can't be run concurrently are at -- the end. sortActions :: [Action] -> [Action] sortActions = sortBy (compareConcurrency `on` (.concurrency)) where -- NOTE: Could derive Ord. However, I like to make this explicit so -- that changes to the datatype must consider how it's affecting -- this. compareConcurrency ConcurrencyAllowed ConcurrencyDisallowed = LT compareConcurrency ConcurrencyDisallowed ConcurrencyAllowed = GT compareConcurrency _ _ = EQ runActions' :: ExecuteState -> IO () runActions' es = loop where loop :: IO () loop = join $ atomically $ breakOnErrs $ withActions processActions breakOnErrs :: STM (IO ()) -> STM (IO ()) breakOnErrs inner = do errs <- readTVar es.exceptions if null errs || es.keepGoing then inner else doNothing withActions :: ([Action] -> STM (IO ())) -> STM (IO ()) withActions inner = do actions <- readTVar es.actions if null actions then doNothing else inner actions processActions :: [Action] -> STM (IO ()) processActions actions = do inAction <- readTVar es.inAction case break (Set.null . (.actionDeps)) actions of (_, []) -> do check (Set.null inAction) unless es.keepGoing $ modifyTVar es.exceptions (toException InconsistentDependenciesBug:) doNothing (xs, action:ys) -> processAction inAction (xs ++ ys) action processAction :: Set ActionId -> [Action] -> Action -> STM (IO ()) processAction inAction otherActions action = do let concurrency = action.concurrency unless (concurrency == ConcurrencyAllowed) $ check (Set.null inAction) let action' = action.actionId otherActions' = Set.fromList $ map (.actionId) otherActions remaining = Set.union otherActions' inAction downstream = downstreamActions action' otherActions actionContext = ActionContext { remaining , downstream , concurrency } writeTVar es.actions otherActions modifyTVar es.inAction (Set.insert action') pure $ do mask $ \restore -> do eres <- try $ restore $ action.action actionContext atomically $ do modifyTVar es.inAction (Set.delete action') modifyTVar es.completed (+1) case eres of Left err -> modifyTVar es.exceptions (err:) Right () -> modifyTVar es.actions $ map (dropDep action') loop -- | Filter a list of actions to include only those that depend on the given -- action. downstreamActions :: ActionId -> [Action] -> [Action] downstreamActions aid = filter (\a -> aid `Set.member` a.actionDeps) -- | Given two actions (the first specified by its id) yield an action -- equivalent to the second but excluding any dependency on the first action. dropDep :: ActionId -> Action -> Action dropDep action' action = action { actionDeps = Set.delete action' action.actionDeps } -- | @IO ()@ lifted into 'STM'. doNothing :: STM (IO ()) doNothing = pure $ pure () stack-2.15.7/src/Data/Attoparsec/Args.hs0000644000000000000000000000276014604306200016076 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Parsing of Stack command line arguments module Data.Attoparsec.Args ( EscapingMode (..) , argsParser , parseArgs , parseArgsFromString ) where import Data.Attoparsec.Text ( () ) import qualified Data.Attoparsec.Text as P import qualified Data.Text as T import Stack.Prelude -- | Mode for parsing escape characters. data EscapingMode = Escaping | NoEscaping deriving (Enum, Eq, Show) -- | Parse arguments using 'argsParser'. parseArgs :: EscapingMode -> Text -> Either String [String] parseArgs mode = P.parseOnly (argsParser mode) -- | Parse using 'argsParser' from a string. parseArgsFromString :: EscapingMode -> String -> Either String [String] parseArgsFromString mode = P.parseOnly (argsParser mode) . T.pack -- | A basic argument parser. It supports space-separated text, and string -- quotation with identity escaping: \x -> x. argsParser :: EscapingMode -> P.Parser [String] argsParser mode = many ( P.skipSpace *> (quoted <|> unquoted) ) <* P.skipSpace <* (P.endOfInput "unterminated string") where quoted = P.char '"' *> str <* P.char '"' unquoted = P.many1 naked str = many ( case mode of Escaping -> escaped <|> nonquote NoEscaping -> nonquote ) escaped = P.char '\\' *> P.anyChar nonquote = P.satisfy (/= '"') naked = P.satisfy (not . flip elem ("\" " :: String)) stack-2.15.7/src/Data/Attoparsec/Combinators.hs0000644000000000000000000000130614445120722017463 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | More readable combinators for writing parsers. module Data.Attoparsec.Combinators ( alternating , appending , concating , pured ) where import Stack.Prelude -- | Concatenate two parsers. appending :: (Applicative f, Semigroup a) => f a -> f a -> f a appending a b = (<>) <$> a <*> b -- | Alternative parsers. alternating :: Alternative f => f a -> f a -> f a alternating a b = a <|> b -- | Pure something. pured :: (Applicative g, Applicative f) => g a -> g (f a) pured = fmap pure -- | Concating the result of an action. concating :: (Monoid m, Applicative f) => f [m] -> f m concating = fmap mconcat stack-2.15.7/src/Data/Attoparsec/Interpreter.hs0000644000000000000000000001375714620153445017526 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {- | This module implements parsing of additional arguments embedded in a comment when Stack is invoked as a script interpreter ===Specifying arguments in script interpreter mode @/stack/@ can execute a Haskell source file using @/runghc/@ and if required it can also install and setup the compiler and any package dependencies automatically. For using a Haskell source file as an executable script on a Unix like OS, the first line of the file must specify @stack@ as the interpreter using a shebang directive e.g. > #!/usr/bin/env stack Additional arguments can be specified in a haskell comment following the @#!@ line. The contents inside the comment must be a single valid stack command line, starting with @stack@ as the command and followed by the options to use for executing this file. The comment must be on the line immediately following the @#!@ line. The comment must start in the first column of the line. When using a block style comment the command can be split on multiple lines. Here is an example of a single line comment: > #!/usr/bin/env stack > -- stack --resolver lts-3.14 --install-ghc runghc --package random Here is an example of a multi line block comment: @ #!\/usr\/bin\/env stack {\- stack --resolver lts-3.14 --install-ghc runghc --package random -\} @ When the @#!@ line is not present, the file can still be executed using @stack \@ command if the file starts with a valid stack interpreter comment. This can be used to execute the file on Windows for example. Nested block comments are not supported. -} module Data.Attoparsec.Interpreter ( interpreterArgsParser -- for unit tests , getInterpreterArgs ) where import Data.Attoparsec.Args ( EscapingMode (..), argsParser ) import Data.Attoparsec.Text ( () ) import qualified Data.Attoparsec.Text as P import Data.Char ( isSpace ) import Conduit ( decodeUtf8C, withSourceFile ) import Data.Conduit.Attoparsec ( ParseError (..), Position (..), sinkParserEither ) import Data.List ( intercalate ) import Data.List.NonEmpty ( singleton ) import Data.Text ( pack ) import RIO.NonEmpty ( nonEmpty ) import Stack.Constants ( stackProgName ) import Stack.Prelude import System.FilePath ( takeExtension ) import System.IO ( hPutStrLn ) -- | Parser to extract the Stack command line embedded inside a comment -- after validating the placement and formatting rules for a valid -- interpreter specification. interpreterArgsParser :: Bool -> String -> P.Parser String interpreterArgsParser isLiterate progName = P.option "" sheBangLine *> interpreterComment where sheBangLine = P.string "#!" *> P.manyTill P.anyChar P.endOfLine commentStart psr = (psr (progName ++ " options comment")) *> P.skipSpace *> (P.string (pack progName) show progName) -- Treat newlines as spaces inside the block comment anyCharNormalizeSpace = let normalizeSpace c = if isSpace c then ' ' else c in P.satisfyWith normalizeSpace $ const True comment start end = commentStart start *> ((end >> pure "") <|> (P.space *> (P.manyTill anyCharNormalizeSpace end "-}"))) horizontalSpace = P.satisfy P.isHorizontalSpace lineComment = comment "--" (P.endOfLine <|> P.endOfInput) literateLineComment = comment (">" *> horizontalSpace *> "--") (P.endOfLine <|> P.endOfInput) blockComment = comment "{-" (P.string "-}") literateBlockComment = (">" *> horizontalSpace *> "{-") *> P.skipMany (("" <$ horizontalSpace) <|> (P.endOfLine *> ">")) *> (P.string (pack progName) progName) *> P.manyTill' (P.satisfy (not . P.isEndOfLine) <|> (' ' <$ (P.endOfLine *> ">" ">"))) "-}" interpreterComment = if isLiterate then literateLineComment <|> literateBlockComment else lineComment <|> blockComment -- | Extract Stack arguments from a correctly placed and correctly formatted -- comment when it is being used as an interpreter getInterpreterArgs :: String -> IO (NonEmpty String) getInterpreterArgs file = do eArgStr <- withSourceFile file parseFile case eArgStr of Left err -> handleFailure $ decodeError err Right str -> parseArgStr str where parseFile src = runConduit $ src .| decodeUtf8C .| sinkParserEither (interpreterArgsParser isLiterate stackProgName) isLiterate = takeExtension file == ".lhs" -- FIXME We should print anything only when explicit verbose mode is -- specified by the user on command line. But currently the -- implementation does not accept or parse any command line flags in -- interpreter mode. We can only invoke the interpreter as -- "stack " strictly without any options. stackWarn s = hPutStrLn stderr $ stackProgName ++ ": WARNING! " ++ s handleFailure err = do mapM_ stackWarn (lines err) stackWarn "Missing or unusable Stack options specification" stackWarn "Using runghc without any additional Stack options" pure $ singleton "runghc" parseArgStr str = case P.parseOnly (argsParser Escaping) (pack str) of Left err -> handleFailure ("Error parsing command specified in the " ++ "Stack options comment: " ++ err) Right args -> maybe (handleFailure "Empty argument list in Stack options comment") pure (nonEmpty args) decodeError e = case e of ParseError ctxs _ (Position l col _) -> if null ctxs then "Parse error" else ("Expecting " ++ intercalate " or " ctxs) ++ " at line " ++ show l ++ ", column " ++ show col DivergentParser -> "Divergent parser" stack-2.15.7/src/Data/Monoid/Map.hs0000644000000000000000000000106214445120722015037 0ustar0000000000000000module Data.Monoid.Map ( MonoidMap (..) ) where import qualified Data.Map as M import Stack.Prelude -- | Utility newtype wrapper to make Map's Monoid also use the -- element's Monoid. newtype MonoidMap k a = MonoidMap (Map k a) deriving (Eq, Functor, Generic, Ord, Read, Show) instance (Ord k, Semigroup a) => Semigroup (MonoidMap k a) where MonoidMap mp1 <> MonoidMap mp2 = MonoidMap (M.unionWith (<>) mp1 mp2) instance (Ord k, Semigroup a) => Monoid (MonoidMap k a) where mappend = (<>) mempty = MonoidMap mempty stack-2.15.7/src/GHC/Utils/GhcPkg/Main/Compat.hs0000644000000000000000000006072614604306200017205 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} -- This module is based on GHC's utils\ghc-pkg\Main.hs at -- commit f66fc15f2e6849125074bcfeb44334a663323ca6 (see GHC merge request -- !11142), with: -- * changeDBDir' does not perform an effective @ghc-pkg recache@, -- * the cache is not used, -- * consistency checks are not performed, -- * use Stack program name, -- * use "Stack.Prelude" rather than "Prelude", -- * use 'RIO' @env@ monad, -- * use well-typed representations of paths from the @path@ package, -- * add pretty messages and exceptions, -- * redundant code deleted, -- * Hlint applied, and -- * explicit import lists. -- -- The version of the ghc-pkg executable supplied with GHCs published before -- 28 August 2023 does not efficiently bulk unregister. This module exports a -- function that does efficiently bulk unregister. module GHC.Utils.GhcPkg.Main.Compat ( ghcPkgUnregisterForce ) where ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow 2004-2009. -- -- Package management tool -- ----------------------------------------------------------------------------- import qualified Data.Foldable as F import qualified Data.Traversable as F import Distribution.InstalledPackageInfo as Cabal import Distribution.Package ( UnitId, mungedId ) import qualified Distribution.Parsec as Cabal import Distribution.Text ( display ) import Distribution.Version ( nullVersion ) import GHC.IO.Exception (IOErrorType(InappropriateType)) import qualified GHC.Unit.Database as GhcPkg import Path ( SomeBase (..), fileExtension, mapSomeBase, parseRelFile , parseSomeDir, prjSomeBase ) import qualified Path as P import Path.IO ( createDirIfMissing, doesDirExist, listDir, removeFile ) import qualified RIO.ByteString as BS import RIO.List ( isPrefixOf, stripSuffix ) import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import Stack.Constants ( relFilePackageCache ) import Stack.Prelude hiding ( display ) import System.Environment ( getEnv ) import System.FilePath as FilePath import System.IO ( readFile ) import System.IO.Error ( ioeGetErrorType, ioError, isDoesNotExistError ) -- | Function equivalent to: -- -- > ghc-pkg --no-user-package-db --package-db= unregister [--ipid]

-- ghcPkgUnregisterForce :: HasTerm env => Path Abs Dir -- ^ Path to the global package database -> Path Abs Dir -- ^ Path to the package database -> Bool -- ^ Apply ghc-pkg's --ipid, --unit-id flag? -> [String] -- ^ Packages to unregister -> RIO env () ghcPkgUnregisterForce globalDb pkgDb hasIpid pkgarg_strs = do pkgargs <- forM pkgarg_strs $ readPackageArg as_arg prettyDebugL $ flow "Unregistering from" : (pretty pkgDb <> ":") : mkNarrativeList (Just Current) False (map (fromString . show) pkgargs :: [StyleDoc]) unregisterPackages globalDb pkgargs pkgDb where as_arg = if hasIpid then AsUnitId else AsDefault -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "GHC.Utils.GhcPkg.Main.Compat" module. data GhcPkgPrettyException = CannotParse !String !String !String | CannotOpenDBForModification !(SomeBase Dir) !IOException | SingleFileDBUnsupported !(SomeBase Dir) | ParsePackageInfoExceptions !String | CannotFindPackage !PackageArg !(Maybe (SomeBase Dir)) | CannotParseRelFileBug !String | CannotParseDirectoryWithDBug !String deriving (Show, Typeable) instance Pretty GhcPkgPrettyException where pretty (CannotParse str what e) = "[S-6512]" <> line <> fillSep [ flow "cannot parse" , style Current (fromString str) , flow "as a" , fromString what <> ":" ] <> blankLine <> fromString e pretty (CannotOpenDBForModification db_path e) = "[S-3384]" <> line <> fillSep [ flow "Couldn't open database" , pretty db_path , flow "for modification:" ] <> blankLine <> string (displayException e) pretty (SingleFileDBUnsupported path) = "[S-1430]" <> line <> fillSep [ flow "ghc no longer supports single-file style package databases" , parens (pretty path) , "use" , style Shell (flow "ghc-pkg init") , flow "to create the database with the correct format." ] pretty (ParsePackageInfoExceptions errs) = "[S-5996]" <> line <> flow errs pretty (CannotFindPackage pkgarg mdb_path) = "[S-3189]" <> line <> fillSep [ flow "cannot find package" , style Current (pkg_msg pkgarg) , maybe "" (\db_path -> fillSep ["in", pretty db_path]) mdb_path ] where pkg_msg (Substring pkgpat _) = fillSep ["matching", fromString pkgpat] pkg_msg pkgarg' = fromString $ show pkgarg' pretty (CannotParseRelFileBug relFileName) = bugPrettyReport "[S-9323]" $ fillSep [ flow "changeDBDir': Could not parse" , style File (fromString relFileName) , flow "as a relative path to a file." ] pretty (CannotParseDirectoryWithDBug dirName) = bugPrettyReport "[S-7651]" $ fillSep [ flow "adjustOldDatabasePath: Could not parse" , style Dir (fromString dirName) , flow "as a directory." ] instance Exception GhcPkgPrettyException -- ----------------------------------------------------------------------------- -- Do the business -- | Enum flag representing argument type data AsPackageArg = AsUnitId | AsDefault -- | Represents how a package may be specified by a user on the command line. data PackageArg -- | A package identifier foo-0.1, or a glob foo-* = Id GlobPackageIdentifier -- | An installed package ID foo-0.1-HASH. This is guaranteed to uniquely -- match a single entry in the package database. | IUId UnitId -- | A glob against the package name. The first string is the literal -- glob, the second is a function which returns @True@ if the argument -- matches. | Substring String (String -> Bool) instance Show PackageArg where show (Id pkgid) = displayGlobPkgId pkgid show (IUId ipid) = display ipid show (Substring pkgpat _) = pkgpat parseCheck :: Cabal.Parsec a => String -> String -> RIO env a parseCheck str what = case Cabal.eitherParsec str of Left e -> prettyThrowIO $ CannotParse str what e Right x -> pure x -- | Either an exact 'PackageIdentifier', or a glob for all packages -- matching 'PackageName'. data GlobPackageIdentifier = ExactPackageIdentifier MungedPackageId | GlobPackageIdentifier MungedPackageName displayGlobPkgId :: GlobPackageIdentifier -> String displayGlobPkgId (ExactPackageIdentifier pid) = display pid displayGlobPkgId (GlobPackageIdentifier pn) = display pn ++ "-*" readGlobPkgId :: String -> RIO env GlobPackageIdentifier readGlobPkgId str = case stripSuffix "-*" str of Nothing -> ExactPackageIdentifier <$> parseCheck str "package identifier (exact)" Just str' -> GlobPackageIdentifier <$> parseCheck str' "package identifier (glob)" readPackageArg :: AsPackageArg -> String -> RIO env PackageArg readPackageArg AsUnitId str = IUId <$> parseCheck str "installed package id" readPackageArg AsDefault str = Id <$> readGlobPkgId str -- ----------------------------------------------------------------------------- -- Package databases data PackageDB (mode :: GhcPkg.DbMode) = PackageDB { location :: !(SomeBase Dir) -- We only need possibly-relative package db location. The relative -- location is used as an identifier for the db, so it is important we do -- not modify it. , packageDbLock :: !(GhcPkg.DbOpenMode mode GhcPkg.PackageDbLock) -- If package db is open in read write mode, we keep its lock around for -- transactional updates. , packages :: [InstalledPackageInfo] } -- | A stack of package databases. Convention: head is the topmost in the stack. type PackageDBStack = [PackageDB 'GhcPkg.DbReadOnly] -- | Selector for picking the right package DB to modify as 'modify' changes the -- first database that contains a specific package. newtype DbModifySelector = ContainsPkg PackageArg getPkgDatabases :: forall env. HasTerm env => Path Abs Dir -- ^ Path to the global package database. -> PackageArg -> Path Abs Dir -- ^ Path to the package database. -> RIO env ( PackageDBStack -- the real package DB stack: [global,user] ++ DBs specified on the -- command line with -f. , GhcPkg.DbOpenMode GhcPkg.DbReadWrite (PackageDB GhcPkg.DbReadWrite) -- which one to modify, if any , PackageDBStack -- the package DBs specified on the command line, or [global,user] -- otherwise. This is used as the list of package DBs for commands -- that just read the DB, such as 'list'. ) getPkgDatabases globalDb pkgarg pkgDb = do -- Second we determine the location of the global package config. On Windows, -- this is found relative to the ghc-pkg.exe binary, whereas on Unix the -- location is passed to the binary using the --global-package-db flag by the -- wrapper script. let sys_databases = [Abs globalDb] e_pkg_path <- tryIO (liftIO $ System.Environment.getEnv "GHC_PACKAGE_PATH") let env_stack = case nonEmpty <$> e_pkg_path of Left _ -> sys_databases Right Nothing -> [] Right (Just path) | isSearchPathSeparator (NE.last path) -> mapMaybe parseSomeDir (splitSearchPath (NE.init path)) <> sys_databases | otherwise -> mapMaybe parseSomeDir (splitSearchPath $ NE.toList path) -- -f flags on the command line add to the database stack, unless any of them -- are present in the stack already. let final_stack = [Abs pkgDb | Abs pkgDb `notElem` env_stack] <> env_stack (db_stack, db_to_operate_on) <- getDatabases pkgDb final_stack let flag_db_stack = [ db | db <- db_stack, db.location == Abs pkgDb ] prettyDebugL $ flow "Db stack:" : map (pretty . (.location)) db_stack F.forM_ db_to_operate_on $ \db -> prettyDebugL [ "Modifying:" , pretty db.location ] prettyDebugL $ flow "Flag db stack:" : map (pretty . (.location)) flag_db_stack pure (db_stack, db_to_operate_on, flag_db_stack) where getDatabases flag_db_name final_stack = do -- The package db we open in read write mode is the first one included in -- flag_db_names that contains specified package. Therefore we need to -- open each one in read/write mode first and decide whether it's for -- modification based on its contents. (db_stack, mto_modify) <- stateSequence Nothing [ \case to_modify@(Just _) -> (, to_modify) <$> readDatabase db_path Nothing -> if db_path /= Abs flag_db_name then (, Nothing) <$> readDatabase db_path else do let hasPkg :: PackageDB mode -> Bool hasPkg = not . null . findPackage pkgarg . (.packages) openRo (e::IOException) = do db <- readDatabase db_path if hasPkg db then prettyThrowIO $ CannotOpenDBForModification db_path e else pure (db, Nothing) -- If we fail to open the database in read/write mode, we need -- to check if it's for modification first before throwing an -- error, so we attempt to open it in read only mode. handle openRo $ do db <- readParseDatabase (GhcPkg.DbOpenReadWrite $ ContainsPkg pkgarg) db_path let ro_db = db { packageDbLock = GhcPkg.DbOpenReadOnly } if hasPkg db then pure (ro_db, Just db) else do -- If the database is not for modification after all, -- drop the write lock as we are already finished with -- the database. case db.packageDbLock of GhcPkg.DbOpenReadWrite lock -> liftIO $ GhcPkg.unlockPackageDb lock pure (ro_db, Nothing) | db_path <- final_stack ] to_modify <- case mto_modify of Just db -> pure db Nothing -> cannotFindPackage pkgarg Nothing pure (db_stack, GhcPkg.DbOpenReadWrite to_modify) where -- Parse package db in read-only mode. readDatabase :: SomeBase Dir -> RIO env (PackageDB 'GhcPkg.DbReadOnly) readDatabase = readParseDatabase GhcPkg.DbOpenReadOnly stateSequence :: Monad m => s -> [s -> m (a, s)] -> m ([a], s) stateSequence s [] = pure ([], s) stateSequence s (m:ms) = do (a, s') <- m s (as, s'') <- stateSequence s' ms pure (a : as, s'') readParseDatabase :: forall mode t env. HasTerm env => GhcPkg.DbOpenMode mode t -> SomeBase Dir -> RIO env (PackageDB mode) readParseDatabase mode path = do e <- tryIO $ prjSomeBase listDir path case e of Left err | ioeGetErrorType err == InappropriateType -> do -- We provide a limited degree of backwards compatibility for -- old single-file style db: mdb <- tryReadParseOldFileStyleDatabase mode path case mdb of Just db -> pure db Nothing -> prettyThrowIO $ SingleFileDBUnsupported path | otherwise -> liftIO $ ioError err Right (_, fs) -> ignore_cache where confs = filter isConf fs isConf :: Path Abs File -> Bool isConf f = case fileExtension f of Nothing -> False Just ext -> ext == ".conf" ignore_cache :: RIO env (PackageDB mode) ignore_cache = do -- If we're opening for modification, we need to acquire a lock even if -- we don't open the cache now, because we are going to modify it later. lock <- liftIO $ F.mapM (const $ GhcPkg.lockPackageDb (prjSomeBase toFilePath cache)) mode pkgs <- mapM parseSingletonPackageConf confs mkPackageDB pkgs lock where cache = mapSomeBase (P. relFilePackageCache) path mkPackageDB :: [InstalledPackageInfo] -> GhcPkg.DbOpenMode mode GhcPkg.PackageDbLock -> RIO env (PackageDB mode) mkPackageDB pkgs lock = pure PackageDB { location = path , packageDbLock = lock , packages = pkgs } parseSingletonPackageConf :: HasTerm env => Path Abs File -> RIO env InstalledPackageInfo parseSingletonPackageConf file = do prettyDebugL [ flow "Reading package config:" , pretty file ] BS.readFile (toFilePath file) >>= fmap fst . parsePackageInfo -- ----------------------------------------------------------------------------- -- Workaround for old single-file style package dbs -- Single-file style package dbs have been deprecated for some time, but -- it turns out that Cabal was using them in one place. So this code is for a -- workaround to allow older Cabal versions to use this newer ghc. -- We check if the file db contains just "[]" and if so, we look for a new -- dir-style db in path.d/, ie in a dir next to the given file. -- We cannot just replace the file with a new dir style since Cabal still -- assumes it's a file and tries to overwrite with 'writeFile'. -- ghc itself also cooperates in this workaround tryReadParseOldFileStyleDatabase :: HasTerm env => GhcPkg.DbOpenMode mode t -> SomeBase Dir -> RIO env (Maybe (PackageDB mode)) tryReadParseOldFileStyleDatabase mode path = do -- assumes we've already established that path exists and is not a dir content <- liftIO $ readFile (prjSomeBase toFilePath path) `catchIO` \_ -> pure "" if take 2 content == "[]" then do path_dir <- adjustOldDatabasePath path prettyWarnL [ flow "Ignoring old file-style db and trying" , pretty path_dir ] direxists <- prjSomeBase doesDirExist path_dir if direxists then do db <- readParseDatabase mode path_dir -- but pretend it was at the original location pure $ Just db { location = path } else do lock <- F.forM mode $ \_ -> do prjSomeBase (createDirIfMissing True) path_dir liftIO $ GhcPkg.lockPackageDb $ prjSomeBase (toFilePath . (P. relFilePackageCache)) path_dir pure $ Just PackageDB { location = path , packageDbLock = lock , packages = [] } -- if the path is not a file, or is not an empty db then we fail else pure Nothing adjustOldFileStylePackageDB :: PackageDB mode -> RIO env (PackageDB mode) adjustOldFileStylePackageDB db = do -- assumes we have not yet established if it's an old style or not mcontent <- liftIO $ fmap Just (readFile (prjSomeBase toFilePath db.location)) `catchIO` \_ -> pure Nothing case fmap (take 2) mcontent of -- it is an old style and empty db, so look for a dir kind in location.d/ Just "[]" -> do adjustedDatabasePath <- adjustOldDatabasePath db.location pure db { location = adjustedDatabasePath } -- it is old style but not empty, we have to bail Just _ -> prettyThrowIO $ SingleFileDBUnsupported db.location -- probably not old style, carry on as normal Nothing -> pure db adjustOldDatabasePath :: SomeBase Dir -> RIO env (SomeBase Dir) adjustOldDatabasePath = prjSomeBase addDToDirName where addDToDirName dir = do let dirNameWithD = toFilePath dir <> ".d" maybe (prettyThrowIO $ CannotParseDirectoryWithDBug dirNameWithD) pure (parseSomeDir dirNameWithD) parsePackageInfo :: BS.ByteString -> RIO env (InstalledPackageInfo, [String]) parsePackageInfo str = case parseInstalledPackageInfo str of Right (warnings, ok) -> pure (mungePackageInfo ok, ws) where ws = [ msg | msg <- warnings , not ("Unrecognized field pkgroot" `isPrefixOf` msg) ] Left err -> prettyThrowIO $ ParsePackageInfoExceptions (unlines (F.toList err)) mungePackageInfo :: InstalledPackageInfo -> InstalledPackageInfo mungePackageInfo ipi = ipi -- ----------------------------------------------------------------------------- -- Making changes to a package database newtype DBOp = RemovePackage InstalledPackageInfo changeNewDB :: HasTerm env => [DBOp] -> PackageDB 'GhcPkg.DbReadWrite -> RIO env () changeNewDB cmds new_db = do new_db' <- adjustOldFileStylePackageDB new_db prjSomeBase (createDirIfMissing True) new_db'.location changeDBDir' cmds new_db' changeDBDir' :: HasTerm env => [DBOp] -> PackageDB 'GhcPkg.DbReadWrite -> RIO env () changeDBDir' cmds db = do mapM_ do_cmd cmds case db.packageDbLock of GhcPkg.DbOpenReadWrite lock -> liftIO $ GhcPkg.unlockPackageDb lock where do_cmd (RemovePackage p) = do let relFileConfName = display (installedUnitId p) <> ".conf" relFileConf <- maybe (prettyThrowIO $ CannotParseRelFileBug relFileConfName) pure (parseRelFile relFileConfName) let file = mapSomeBase (P. relFileConf) db.location prettyDebugL [ "Removing" , pretty file ] removeFileSafe file unregisterPackages :: forall env. HasTerm env => Path Abs Dir -- ^ Path to the global package database. -> [PackageArg] -> Path Abs Dir -- ^ Path to the package database. -> RIO env () unregisterPackages globalDb pkgargs pkgDb = do pkgsByPkgDBs <- F.foldlM (getPkgsByPkgDBs []) [] pkgargs forM_ pkgsByPkgDBs unregisterPackages' where -- Update a list of 'packages by package database' for a package. Assumes that -- a package to be unregistered is in no more than one database. getPkgsByPkgDBs :: [(PackageDB GhcPkg.DbReadWrite, [UnitId])] -- ^ List of considered 'packages by package database' -> [(PackageDB GhcPkg.DbReadWrite, [UnitId])] -- ^ List of to be considered 'packages by package database' -> PackageArg -- Package to update -> RIO env [(PackageDB GhcPkg.DbReadWrite, [UnitId])] -- No more 'packages by package database' to consider? We need to try to get -- another package database. getPkgsByPkgDBs pkgsByPkgDBs [] pkgarg = getPkgDatabases globalDb pkgarg pkgDb >>= \case (_, GhcPkg.DbOpenReadWrite (db :: PackageDB GhcPkg.DbReadWrite), _) -> do pks <- do let pkgs = db.packages ps = findPackage pkgarg pkgs -- This shouldn't happen if getPkgsByPkgDBs picks the DB correctly. when (null ps) $ cannotFindPackage pkgarg $ Just db pure (map installedUnitId ps) let pkgsByPkgDB = (db, pks) pure (pkgsByPkgDB : pkgsByPkgDBs) -- Consider the next 'packages by package database' in the list of ones to -- consider. getPkgsByPkgDBs pkgsByPkgDBs ( pkgsByPkgDB : pkgsByPkgDBs') pkgarg = do let (db, pks') = pkgsByPkgDB pkgs = db.packages ps = findPackage pkgarg pkgs pks = map installedUnitId ps pkgByPkgDB' = (db, pks <> pks') if null ps then -- Not found in the package database? Add the package database to those -- considered and try with the remaining package databases to consider. getPkgsByPkgDBs ( pkgsByPkgDB : pkgsByPkgDBs ) pkgsByPkgDBs' pkgarg else -- Found in the package database? Add to the list of packages to be -- unregistered from that package database. TO DO: Perhaps check not -- already in that list for better error messages when there are -- duplicated requests to unregister. pure (pkgsByPkgDBs <> (pkgByPkgDB' : pkgsByPkgDBs')) unregisterPackages' :: (PackageDB GhcPkg.DbReadWrite, [UnitId]) -> RIO env () unregisterPackages' (db, pks) = do let pkgs = db.packages cmds = [ RemovePackage pkg | pkg <- pkgs, installedUnitId pkg `elem` pks ] new_db = db{ packages = pkgs' } where deleteFirstsBy' :: (a -> b -> Bool) -> [a] -> [b] -> [a] deleteFirstsBy' eq = foldl' (deleteBy' eq) deleteBy' :: (a -> b -> Bool) -> [a] -> b -> [a] deleteBy' _ [] _ = [] deleteBy' eq (y:ys) x = if y `eq` x then ys else y : deleteBy' eq ys x pkgs' = deleteFirstsBy' (\p1 p2 -> installedUnitId p1 == p2) pkgs pks -- Use changeNewDB, rather than changeDB, to avoid duplicating -- updateInternalDB db cmds changeNewDB cmds new_db findPackage :: PackageArg -> [InstalledPackageInfo] -> [InstalledPackageInfo] findPackage pkgarg = filter (pkgarg `matchesPkg`) cannotFindPackage :: PackageArg -> Maybe (PackageDB mode) -> RIO env a cannotFindPackage pkgarg mdb = prettyThrowIO $ CannotFindPackage pkgarg ((.location) <$> mdb) matches :: GlobPackageIdentifier -> MungedPackageId -> Bool GlobPackageIdentifier pn `matches` pid' = pn == mungedName pid' ExactPackageIdentifier pid `matches` pid' = mungedName pid == mungedName pid' && ( mungedVersion pid == mungedVersion pid' || mungedVersion pid == nullVersion ) matchesPkg :: PackageArg -> InstalledPackageInfo -> Bool (Id pid) `matchesPkg` pkg = pid `matches` mungedId pkg (IUId ipid) `matchesPkg` pkg = ipid == installedUnitId pkg (Substring _ m) `matchesPkg` pkg = m (display (mungedId pkg)) -- removeFileSave doesn't throw an exceptions, if the file is already deleted removeFileSafe :: SomeBase File -> RIO env () removeFileSafe fn = do prjSomeBase removeFile fn `catchIO` \ e -> unless (isDoesNotExistError e) $ liftIO $ ioError e stack-2.15.7/src/Network/HTTP/StackClient.hs0000644000000000000000000002371714604306200016645 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} -- | -- Wrapper functions of 'Network.HTTP.Simple' and 'Network.HTTP.Client' to -- add the 'User-Agent' HTTP request header to each request. module Network.HTTP.StackClient ( httpJSON , httpLbs , httpNoBody , httpSink , withResponse , setRequestCheckStatus , setRequestMethod , setRequestHeader , setRequestHeaders , addRequestHeader , setRequestBody , getResponseHeaders , getResponseBody , getResponseStatusCode , parseRequest , getUri , path , checkResponse , parseUrlThrow , requestHeaders , getGlobalManager , applyDigestAuth , displayDigestAuthException , Request , RequestBody (RequestBodyBS, RequestBodyLBS) , Response (..) , HttpException (..) , HttpExceptionContent (..) , notFound404 , hAccept , hContentLength , hContentMD5 , method , methodPost , methodPut , formDataBody , partFileRequestBody , partBS , partLBS , setGitHubHeaders , download , redownload , requestBody , verifiedDownload , verifiedDownloadWithProgress , CheckHexDigest (..) , DownloadRequest , drRetryPolicyDefault , VerifiedDownloadException (..) , HashCheck (..) , mkDownloadRequest , setHashChecks , setLengthCheck , setRetryPolicy , setForceDownload ) where import Control.Monad.State ( get, put, modify ) import Data.Aeson ( FromJSON ) import qualified Data.ByteString as Strict import Data.Conduit ( ConduitM, ConduitT, awaitForever, (.|), yield, await ) import Data.Conduit.Lift ( evalStateC ) import qualified Data.Conduit.List as CL import Data.List.Extra ( (!?) ) import Data.Monoid ( Sum (..) ) import qualified Data.Text as T import Data.Time.Clock ( NominalDiffTime, diffUTCTime, getCurrentTime ) import Network.HTTP.Client ( HttpException (..), HttpExceptionContent (..), Request , RequestBody (..), Response (..), checkResponse, getUri , method, parseRequest, parseUrlThrow, path, requestBody ) import Network.HTTP.Client.MultipartFormData ( formDataBody, partBS, partFileRequestBody, partLBS ) import Network.HTTP.Client.TLS ( applyDigestAuth, displayDigestAuthException , getGlobalManager ) import Network.HTTP.Conduit ( requestHeaders ) import Network.HTTP.Download ( CheckHexDigest (..), DownloadRequest, HashCheck (..) , VerifiedDownloadException (..), drRetryPolicyDefault , mkDownloadRequest, modifyRequest, setForceDownload , setHashChecks, setLengthCheck, setRetryPolicy ) import qualified Network.HTTP.Download as Download import Network.HTTP.Simple ( addRequestHeader, getResponseBody, getResponseHeaders , getResponseStatusCode, setRequestBody , setRequestCheckStatus, setRequestHeader, setRequestHeaders , setRequestMethod ) import qualified Network.HTTP.Simple ( httpJSON, httpLbs, httpNoBody, httpSink, withResponse ) import Network.HTTP.Types ( hAccept, hContentLength, hContentMD5, methodPost, methodPut , notFound404 ) import Path ( Abs, File, Path ) import Prelude ( until ) import RIO import RIO.PrettyPrint ( HasTerm ) import Text.Printf ( printf ) setUserAgent :: Request -> Request setUserAgent = setRequestHeader "User-Agent" ["The Haskell Stack"] httpJSON :: (MonadIO m, FromJSON a) => Request -> m (Response a) httpJSON = Network.HTTP.Simple.httpJSON . setUserAgent httpLbs :: MonadIO m => Request -> m (Response LByteString) httpLbs = Network.HTTP.Simple.httpLbs . setUserAgent httpNoBody :: MonadIO m => Request -> m (Response ()) httpNoBody = Network.HTTP.Simple.httpNoBody . setUserAgent httpSink :: MonadUnliftIO m => Request -> (Response () -> ConduitM Strict.ByteString Void m a) -> m a httpSink = Network.HTTP.Simple.httpSink . setUserAgent withResponse :: (MonadUnliftIO m, MonadIO n) => Request -> (Response (ConduitM i Strict.ByteString n ()) -> m a) -> m a withResponse = Network.HTTP.Simple.withResponse . setUserAgent -- | Set the user-agent request header setGitHubHeaders :: Request -> Request setGitHubHeaders = setRequestHeader "Accept" ["application/vnd.github.v3+json"] -- | Download the given URL to the given location. If the file already exists, -- no download is performed. Otherwise, creates the parent directory, downloads -- to a temporary file, and on file download completion moves to the -- appropriate destination. -- -- Throws an exception if things go wrong download :: HasTerm env => Request -> Path Abs File -- ^ destination -> RIO env Bool -- ^ Was a downloaded performed (True) or did the file already exist (False)? download req = Download.download (setUserAgent req) -- | Same as 'download', but will download a file a second time if it is already present. -- -- Returns 'True' if the file was downloaded, 'False' otherwise redownload :: HasTerm env => Request -> Path Abs File -- ^ destination -> RIO env Bool redownload req = Download.redownload (setUserAgent req) -- | Copied and extended version of Network.HTTP.Download.download. -- -- Has the following additional features: -- * Verifies that response content-length header (if present) -- matches expected length -- * Limits the download to (close to) the expected # of bytes -- * Verifies that the expected # bytes were downloaded (not too few) -- * Verifies md5 if response includes content-md5 header -- * Verifies the expected hashes -- -- Throws VerifiedDownloadException. -- Throws IOExceptions related to file system operations. -- Throws HttpException. verifiedDownload :: HasTerm env => DownloadRequest -> Path Abs File -- ^ destination -> (Maybe Integer -> ConduitM ByteString Void (RIO env) ()) -- ^ custom hook to observe progress -> RIO env Bool -- ^ Whether a download was performed verifiedDownload dr = Download.verifiedDownload dr' where dr' = modifyRequest setUserAgent dr verifiedDownloadWithProgress :: HasTerm env => DownloadRequest -> Path Abs File -> Text -> Maybe Int -> RIO env Bool verifiedDownloadWithProgress req destpath lbl msize = verifiedDownload req destpath (chattyDownloadProgress lbl msize) chattyDownloadProgress :: ( HasLogFunc env , MonadIO m , MonadReader env m ) => Text -> Maybe Int -> f -> ConduitT ByteString c m () chattyDownloadProgress label mtotalSize _ = do _ <- logSticky $ RIO.display label <> ": download has begun" CL.map (Sum . Strict.length) .| chunksOverTime 1 .| go where go = evalStateC 0 $ awaitForever $ \(Sum size) -> do modify (+ size) totalSoFar <- get logSticky $ fromString $ case mtotalSize of Nothing -> chattyProgressNoTotal totalSoFar Just 0 -> chattyProgressNoTotal totalSoFar Just totalSize -> chattyProgressWithTotal totalSoFar totalSize -- Example: ghc: 42.13 KiB downloaded... chattyProgressNoTotal totalSoFar = printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...") (T.unpack label) -- Example: ghc: 50.00 MiB / 100.00 MiB (50.00%) downloaded... chattyProgressWithTotal totalSoFar total = printf ( "%s: " <> bytesfmt "%7.2f" totalSoFar <> " / " <> bytesfmt "%.2f" total <> " (%6.2f%%) downloaded..." ) (T.unpack label) percentage where percentage :: Double percentage = fromIntegral totalSoFar / fromIntegral total * 100 -- | Given a printf format string for the decimal part and a number of -- bytes, formats the bytes using an appropriate unit and returns the -- formatted string. -- -- >>> bytesfmt "%.2" 512368 -- "500.359375 KiB" bytesfmt :: Integral a => String -> a -> String bytesfmt formatter bs = printf (formatter <> " %s") (fromIntegral (signum bs) * dec :: Double) bytesSuffix where (dec, i) = getSuffix (abs bs) getSuffix n = until p (\(x, y) -> (x / 1024, y + 1)) (fromIntegral n, 0) where p (n', numDivs) = n' < 1024 || numDivs == length bytesSuffixes - 1 bytesSuffixes :: [String] bytesSuffixes = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"] bytesSuffix = fromMaybe (error "bytesfmt: the impossible happened! Index out of range.") (bytesSuffixes !? i) -- Await eagerly (collect with monoidal append), -- but space out yields by at least the given amount of time. -- The final yield may come sooner, and may be a superfluous mempty. -- Note that Integer and Float literals can be turned into NominalDiffTime -- (these literals are interpreted as "seconds") chunksOverTime :: (Monoid a, Semigroup a, MonadIO m) => NominalDiffTime -> ConduitM a a m () chunksOverTime diff = do currentTime <- liftIO getCurrentTime evalStateC (currentTime, mempty) go where -- State is a tuple of: -- * the last time a yield happened (or the beginning of the sink) -- * the accumulated awaits since the last yield go = await >>= \case Nothing -> do (_, acc) <- get yield acc Just a -> do (lastTime, acc) <- get let acc' = acc <> a currentTime <- liftIO getCurrentTime if diff < diffUTCTime currentTime lastTime then put (currentTime, mempty) >> yield acc' else put (lastTime, acc') go stack-2.15.7/src/Options/Applicative/Args.hs0000644000000000000000000000255214353310533017020 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Accepting arguments to be passed through to a sub-process. module Options.Applicative.Args ( argsArgument , argsOption , cmdOption ) where import Data.Attoparsec.Args ( EscapingMode (..), parseArgsFromString ) import qualified Options.Applicative as O import Stack.Prelude -- | An argument which accepts a list of arguments -- e.g. @--ghc-options="-X P.hs \"this\""@. argsArgument :: O.Mod O.ArgumentFields [String] -> O.Parser [String] argsArgument = O.argument (do s <- O.str either O.readerError pure (parseArgsFromString Escaping s)) -- | An option which accepts a list of arguments -- e.g. @--ghc-options="-X P.hs \"this\""@. argsOption :: O.Mod O.OptionFields [String] -> O.Parser [String] argsOption = O.option (do s <- O.str either O.readerError pure (parseArgsFromString Escaping s)) -- | An option which accepts a command and a list of arguments -- e.g. @--exec "echo hello world"@ cmdOption :: O.Mod O.OptionFields (String, [String]) -> O.Parser (String, [String]) cmdOption = O.option (do s <- O.str xs <- either O.readerError pure (parseArgsFromString Escaping s) case xs of [] -> O.readerError "Must provide a command" x:xs' -> pure (x, xs')) stack-2.15.7/src/Options/Applicative/Builder/Extra.hs0000644000000000000000000003067114620153445020604 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Extra functions for optparse-applicative. module Options.Applicative.Builder.Extra ( boolFlags , boolFlagsNoDefault , firstBoolFlagsNoDefault , firstBoolFlagsTrue , firstBoolFlagsFalse , enableDisableFlags , enableDisableFlagsNoDefault , extraHelpOption , execExtraHelp , textOption , textArgument , optionalFirst , optionalFirstTrue , optionalFirstFalse , absFileOption , relFileOption , absDirOption , relDirOption , eitherReader' , fileCompleter , fileExtCompleter , dirCompleter , PathCompleterOpts (..) , defaultPathCompleterOpts , pathCompleterWith , unescapeBashArg , showHelpText ) where import Data.List ( isPrefixOf ) import qualified Data.Text as T import Options.Applicative ( ArgumentFields, Completer, FlagFields, Mod, OptionFields , ParseError (..), Parser, ReadM, abortOption, argument , completer, eitherReader, execParser, flag', fullDesc, help , hidden, idm, info, infoOption, internal, long, metavar , mkCompleter, option, progDesc, strArgument ) import Options.Applicative.Types ( readerAsk ) import Path ( parseAbsDir, parseAbsFile, parseRelDir, parseRelFile ) import Stack.Prelude import System.Directory ( doesDirectoryExist, getCurrentDirectory , getDirectoryContents ) import System.Environment ( withArgs ) import System.FilePath ( (), isRelative, splitFileName, takeBaseName , takeExtension ) -- | Type representing exceptions thrown by functions exported by the -- "Options.Applicative.Builder.Extra" module. data OptionsApplicativeExtraException = FlagNotFoundBug deriving (Show, Typeable) instance Exception OptionsApplicativeExtraException where displayException FlagNotFoundBug = "Error: [S-2797]\n" ++ "The impossible happened! No valid flags found in \ \enableDisableFlagsNoDefault. Please report this bug at Stack's \ \repository." -- | Enable/disable flags for a 'Bool'. boolFlags :: Bool -- ^ Default value -> String -- ^ Flag name -> String -- ^ Help suffix -> Mod FlagFields Bool -> Parser Bool boolFlags defaultValue name helpSuffix = enableDisableFlags defaultValue True False name $ concat [ helpSuffix , " (default: " , if defaultValue then "enabled" else "disabled" , ")" ] -- | Enable/disable flags for a 'Bool', without a default case (to allow -- chaining with '<|>'). boolFlagsNoDefault :: String -- ^ Flag name -> String -- ^ Help suffix -> Mod FlagFields Bool -> Parser Bool boolFlagsNoDefault = enableDisableFlagsNoDefault True False -- | Flag with no default of True or False firstBoolFlagsNoDefault :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (First Bool) firstBoolFlagsNoDefault name helpSuffix mod' = First <$> enableDisableFlags Nothing (Just True) (Just False) name helpSuffix mod' -- | Flag with a Semigroup instance and a default of True firstBoolFlagsTrue :: String -> String -> Mod FlagFields FirstTrue -> Parser FirstTrue firstBoolFlagsTrue name helpSuffix = enableDisableFlags mempty (FirstTrue (Just True)) (FirstTrue (Just False)) name $ helpSuffix ++ " (default: enabled)" -- | Flag with a Semigroup instance and a default of False firstBoolFlagsFalse :: String -> String -> Mod FlagFields FirstFalse -> Parser FirstFalse firstBoolFlagsFalse name helpSuffix = enableDisableFlags mempty (FirstFalse (Just True)) (FirstFalse (Just False)) name $ helpSuffix ++ " (default: disabled)" -- | Enable/disable flags for any type. enableDisableFlags :: a -- ^ Default value -> a -- ^ Enabled value -> a -- ^ Disabled value -> String -- ^ Name -> String -- ^ Help suffix -> Mod FlagFields a -> Parser a enableDisableFlags defaultValue enabledValue disabledValue name helpSuffix mods = enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods <|> pure defaultValue -- | Enable/disable flags for any type, without a default (to allow chaining with '<|>') enableDisableFlagsNoDefault :: a -- ^ Enabled value -> a -- ^ Disabled value -> String -- ^ Name -> String -- ^ Help suffix -> Mod FlagFields a -> Parser a enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods = last <$> some ( flag' enabledValue ( hidden <> internal <> long name <> help helpSuffix <> mods ) <|> flag' disabledValue ( hidden <> internal <> long ("no-" ++ name) <> help helpSuffix <> mods ) <|> flag' disabledValue ( long ("[no-]" ++ name) <> help ("Enable/disable " ++ helpSuffix) <> mods ) ) where last xs = case reverse xs of [] -> impureThrow FlagNotFoundBug x:_ -> x -- | Show an extra help option (e.g. @--docker-help@ shows help for all -- @--docker*@ args). -- -- To actually have that help appear, use 'execExtraHelp' before executing the -- main parser. extraHelpOption :: Bool -- ^ Hide from the brief description? -> String -- ^ Program name, e.g. @"stack"@ -> String -- ^ Option glob expression, e.g. @"docker*"@ -> String -- ^ Help option name, e.g. @"docker-help"@ -> Parser (a -> a) extraHelpOption hide progName fakeName helpName = infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal) <*> infoOption (optDesc' ++ ".") ( long fakeName <> help optDesc' <> (if hide then hidden <> internal else idm) ) where optDesc' = concat [ "Run '" , takeBaseName progName , " --" , helpName , "' for details." ] -- | Display extra help if extra help option passed in arguments. -- -- Since optparse-applicative doesn't allow an arbitrary IO action for an -- 'abortOption', this was the best way I found that doesn't require manually -- formatting the help. execExtraHelp :: [String] -- ^ Command line arguments -> String -- ^ Extra help option name, e.g. @"docker-help"@ -> Parser a -- ^ Option parser for the relevant command -> String -- ^ Option description -> IO () execExtraHelp args helpOpt parser pd = when (args == ["--" ++ helpOpt]) $ withArgs ["--help"] $ do _ <- execParser (info ( hiddenHelper <*> ( (,) <$> parser <*> some (strArgument (metavar "OTHER ARGUMENTS") :: Parser String) ) ) (fullDesc <> progDesc pd)) pure () where hiddenHelper = abortOption showHelpText (long "help" <> hidden <> internal) -- | 'option', specialized to 'Text'. textOption :: Mod OptionFields Text -> Parser Text textOption = option (T.pack <$> readerAsk) -- | 'argument', specialized to 'Text'. textArgument :: Mod ArgumentFields Text -> Parser Text textArgument = argument (T.pack <$> readerAsk) -- | Like 'optional', but returning a 'First'. optionalFirst :: Alternative f => f a -> f (First a) optionalFirst = fmap First . optional -- | Like 'optional', but returning a 'FirstTrue'. optionalFirstTrue :: Alternative f => f Bool -> f FirstTrue optionalFirstTrue = fmap FirstTrue . optional -- | Like 'optional', but returning a 'FirstFalse'. optionalFirstFalse :: Alternative f => f Bool -> f FirstFalse optionalFirstFalse = fmap FirstFalse . optional absFileOption :: Mod OptionFields (Path Abs File) -> Parser (Path Abs File) absFileOption mods = option (eitherReader' parseAbsFile) $ completer (pathCompleterWith defaultPathCompleterOpts { relative = False }) <> mods relFileOption :: Mod OptionFields (Path Rel File) -> Parser (Path Rel File) relFileOption mods = option (eitherReader' parseRelFile) $ completer (pathCompleterWith defaultPathCompleterOpts { absolute = False }) <> mods absDirOption :: Mod OptionFields (Path Abs Dir) -> Parser (Path Abs Dir) absDirOption mods = option (eitherReader' parseAbsDir) $ completer ( pathCompleterWith defaultPathCompleterOpts { relative = False , fileFilter = const False } ) <> mods relDirOption :: Mod OptionFields (Path Rel Dir) -> Parser (Path Rel Dir) relDirOption mods = option (eitherReader' parseRelDir) $ completer ( pathCompleterWith defaultPathCompleterOpts { absolute = False , fileFilter = const False } ) <> mods -- | Like 'eitherReader', but accepting any @'Show' e@ on the 'Left'. eitherReader' :: Show e => (String -> Either e a) -> ReadM a eitherReader' f = eitherReader (mapLeft show . f) data PathCompleterOpts = PathCompleterOpts { absolute :: Bool , relative :: Bool , rootDir :: Maybe FilePath , fileFilter :: FilePath -> Bool , dirFilter :: FilePath -> Bool } defaultPathCompleterOpts :: PathCompleterOpts defaultPathCompleterOpts = PathCompleterOpts { absolute = True , relative = True , rootDir = Nothing , fileFilter = const True , dirFilter = const True } fileCompleter :: Completer fileCompleter = pathCompleterWith defaultPathCompleterOpts fileExtCompleter :: [String] -> Completer fileExtCompleter exts = pathCompleterWith defaultPathCompleterOpts { fileFilter = (`elem` exts) . takeExtension } dirCompleter :: Completer dirCompleter = pathCompleterWith defaultPathCompleterOpts { fileFilter = const False } pathCompleterWith :: PathCompleterOpts -> Completer pathCompleterWith pco = mkCompleter $ \inputRaw -> do -- Unescape input, to handle single and double quotes. Note that the -- results do not need to be re-escaped, due to some fiddly bash -- magic. let input = unescapeBashArg inputRaw let (inputSearchDir0, searchPrefix) = splitFileName input inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0 msearchDir <- case (isRelative inputSearchDir, pco.absolute, pco.relative) of (True, _, True) -> do rootDir <- maybe getCurrentDirectory pure pco.rootDir pure $ Just (rootDir inputSearchDir) (False, True, _) -> pure $ Just inputSearchDir _ -> pure Nothing case msearchDir of Nothing | input == "" && pco.absolute -> pure ["/"] | otherwise -> pure [] Just searchDir -> do entries <- getDirectoryContents searchDir `catch` \(_ :: IOException) -> pure [] fmap catMaybes $ forM entries $ \entry -> -- Skip . and .. unless user is typing . or .. if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."] then pure Nothing else if searchPrefix `isPrefixOf` entry then do let path = searchDir entry case (pco.fileFilter path, pco.dirFilter path) of (True, True) -> pure $ Just (inputSearchDir entry) (fileAllowed, dirAllowed) -> do isDir <- doesDirectoryExist path if (if isDir then dirAllowed else fileAllowed) then pure $ Just (inputSearchDir entry) else pure Nothing else pure Nothing unescapeBashArg :: String -> String unescapeBashArg ('\'' : rest) = rest unescapeBashArg ('\"' : rest) = go rest where special = "$`\"\\\n" :: String go [] = [] go ('\\' : x : xs) | x `elem` special = x : xs | otherwise = '\\' : x : go xs go (x : xs) = x : go xs unescapeBashArg input = go input where go [] = [] go ('\\' : x : xs) = x : go xs go (x : xs) = x : go xs showHelpText :: ParseError showHelpText = ShowHelpText Nothing stack-2.15.7/src/Options/Applicative/Complicated.hs0000644000000000000000000001416314604306200020344 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Simple interface to complicated program arguments. -- -- This is a "fork" of the @optparse-simple@ package that has some workarounds -- for optparse-applicative issues that become problematic with programs that -- have many options and subcommands. Because it makes the interface more -- complex, these workarounds are not suitable for pushing upstream to -- optparse-applicative. module Options.Applicative.Complicated ( addCommand , addSubCommands , complicatedOptions , complicatedParser ) where import Control.Monad.Trans.Except ( runExceptT ) import Control.Monad.Trans.Writer ( runWriter, tell ) import Options.Applicative ( Parser, ParserFailure, ParserHelp, ParserResult (..) , abortOption, command, execParserPure, footer, fullDesc , handleParseResult, header, help, info, infoOption, long , metavar, noBacktrack, prefs, progDesc, showHelpOnEmpty , hsubparser ) import Options.Applicative.Builder.Extra ( showHelpText ) import Stack.Prelude import Stack.Types.AddCommand ( AddCommand ) import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid ) import Stack.Types.Runner ( Runner ) import System.Environment ( getArgs ) -- | Generate and execute a complicated options parser. complicatedOptions :: Version -- ^ numeric version -> Maybe String -- ^ version string -> String -- ^ Hpack numeric version, as string -> String -- ^ header -> String -- ^ program description (displayed between usage and options listing in -- the help output) -> String -- ^ footer -> Parser GlobalOptsMonoid -- ^ common settings -> Maybe ( ParserFailure ParserHelp -> [String] -> IO (GlobalOptsMonoid, (RIO Runner (), GlobalOptsMonoid)) ) -- ^ optional handler for parser failure; 'handleParseResult' is called by -- default -> AddCommand -- ^ commands (use 'addCommand') -> IO (GlobalOptsMonoid, RIO Runner ()) complicatedOptions numericVersion stringVersion numericHpackVersion h pd footerStr commonParser mOnFailure commandParser = do args <- getArgs (a, (b, c)) <- let parserPrefs = prefs $ noBacktrack <> showHelpOnEmpty in case execParserPure parserPrefs parser args of -- call onFailure handler if it's present and parsing options failed Failure f | Just onFailure <- mOnFailure -> onFailure f args parseResult -> handleParseResult parseResult pure (mappend c a, b) where parser = info ( helpOption <*> versionOptions <*> complicatedParser "COMMAND|FILE" commonParser commandParser ) desc desc = fullDesc <> header h <> progDesc pd <> footer footerStr versionOptions = case stringVersion of Nothing -> versionOption (versionString numericVersion) Just s -> versionOption s <*> numericVersionOption <*> numericHpackVersionOption versionOption s = infoOption s ( long "version" <> help "Show version." ) numericVersionOption = infoOption (versionString numericVersion) ( long "numeric-version" <> help "Show only version number." ) numericHpackVersionOption = infoOption numericHpackVersion ( long "hpack-numeric-version" <> help "Show only Hpack's version number." ) -- | Add a command to the options dispatcher. addCommand :: String -- ^ command string -> String -- ^ title of command -> String -- ^ footer of command help -> (opts -> RIO Runner ()) -- ^ constructor to wrap up command in common data type -> (opts -> GlobalOptsMonoid -> GlobalOptsMonoid) -- ^ extend common settings from local settings -> Parser GlobalOptsMonoid -- ^ common parser -> Parser opts -- ^ command parser -> AddCommand addCommand cmd title footerStr constr extendCommon = addCommand' cmd title footerStr (\a c -> (constr a, extendCommon a c)) -- | Add a command that takes sub-commands to the options dispatcher. addSubCommands :: String -- ^ command string -> String -- ^ title of command -> String -- ^ footer of command help -> Parser GlobalOptsMonoid -- ^ common parser -> AddCommand -- ^ sub-commands (use 'addCommand') -> AddCommand addSubCommands cmd title footerStr commonParser commandParser = addCommand' cmd title footerStr (\(c1, (a, c2)) c3 -> (a, mconcat [c3, c2, c1])) commonParser (complicatedParser "COMMAND" commonParser commandParser) -- | Add a command to the options dispatcher. addCommand' :: String -- ^ command string -> String -- ^ title of command -> String -- ^ footer of command help -> (opts -> GlobalOptsMonoid -> (RIO Runner (),GlobalOptsMonoid)) -- ^ constructor to wrap up command in common data type -> Parser GlobalOptsMonoid -- ^ common parser -> Parser opts -- ^ command parser -> AddCommand addCommand' cmd title footerStr constr commonParser inner = lift $ tell $ command cmd ( info (constr <$> inner <*> commonParser) (progDesc title <> footer footerStr) ) -- | Generate a complicated options parser. complicatedParser :: String -- ^ metavar for the sub-command -> Parser GlobalOptsMonoid -- ^ common settings -> AddCommand -- ^ commands (use 'addCommand') -> Parser (GlobalOptsMonoid, (RIO Runner (), GlobalOptsMonoid)) complicatedParser commandMetavar commonParser commandParser = (,) <$> commonParser <*> case runWriter (runExceptT commandParser) of (Right (), m) -> hsubparser (m <> metavar commandMetavar) (Left b, _) -> pure (b, mempty) -- | Non-hidden help option. helpOption :: Parser (a -> a) helpOption = abortOption showHelpText $ long "help" <> help "Show this help text." stack-2.15.7/src/Path/CheckInstall.hs0000644000000000000000000000442714445120723015475 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Path.CheckInstall ( warnInstallSearchPathIssues ) where import Control.Monad.Extra ( (&&^), anyM ) import qualified Data.Text as T import Stack.Prelude import Stack.Types.Config ( HasConfig ) import qualified System.Directory as D import qualified System.FilePath as FP -- | Checks if the installed executable will be available on the user's PATH. -- This doesn't use @envSearchPath menv@ because it includes paths only visible -- when running in the Stack environment. warnInstallSearchPathIssues :: HasConfig env => FilePath -> [Text] -> RIO env () warnInstallSearchPathIssues destDir installed = do searchPath <- liftIO FP.getSearchPath destDirIsInPATH <- liftIO $ anyM ( \dir -> D.doesDirectoryExist dir &&^ fmap (FP.equalFilePath destDir) (D.canonicalizePath dir) ) searchPath if destDirIsInPATH then forM_ installed $ \exe -> do mexePath <- (liftIO . D.findExecutable . T.unpack) exe case mexePath of Just exePath -> do exeDir <- (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath unless (exeDir `FP.equalFilePath` destDir) $ prettyWarnL [ flow "The" , style File . fromString . T.unpack $ exe , flow "executable found on the PATH environment variable is" , style File . fromString $ exePath , flow "and not the version that was just installed." , flow "This means that" , style File . fromString . T.unpack $ exe , "calls on the command line will not use this version." ] Nothing -> prettyWarnL [ flow "Installation path" , style Dir . fromString $ destDir , flow "is on the PATH but the" , style File . fromString . T.unpack $ exe , flow "executable that was just installed could not be found on \ \the PATH." ] else prettyWarnL [ flow "Installation path " , style Dir . fromString $ destDir , "not found on the PATH environment variable." ] stack-2.15.7/src/Path/Extended.hs0000644000000000000000000000117014445120723014661 0ustar0000000000000000module Path.Extended ( fileExtension , addExtension , replaceExtension ) where import Control.Monad.Catch ( MonadThrow ) import qualified Path ( addExtension, fileExtension, replaceExtension ) import Path ( File, Path ) fileExtension :: MonadThrow m => Path b File -> m String fileExtension = Path.fileExtension addExtension :: MonadThrow m => String -> Path b File -> m (Path b File) addExtension = Path.addExtension replaceExtension :: MonadThrow m => String -> Path b File -> m (Path b File) replaceExtension ext = Path.replaceExtension ('.' : ext) stack-2.15.7/src/Path/Extra.hs0000644000000000000000000001443214604306200014202 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} -- | Extra Path utilities. module Path.Extra ( toFilePathNoTrailingSep , parseCollapsedAbsDir , parseCollapsedAbsFile , concatAndCollapseAbsDir , rejectMissingFile , rejectMissingDir , pathToByteString , pathToLazyByteString , pathToText , tryGetModificationTime , forgivingResolveDir , forgivingResolveFile , forgivingResolveFile' ) where import Data.Time ( UTCTime ) import Path ( Abs, Dir, File, Path, PathException (..), parseAbsDir , parseAbsFile, toFilePath ) import Path.IO ( doesDirExist, doesFileExist, getCurrentDir , getModificationTime ) import RIO import System.IO.Error ( isDoesNotExistError ) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy.Char8 as BSL import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified System.Directory as D import qualified System.FilePath as FP -- | Convert to FilePath but don't add a trailing slash. toFilePathNoTrailingSep :: Path loc Dir -> FilePath toFilePathNoTrailingSep = FP.dropTrailingPathSeparator . toFilePath -- | Collapse intermediate "." and ".." directories from path, then parse -- it with 'parseAbsDir'. -- (probably should be moved to the Path module) parseCollapsedAbsDir :: MonadThrow m => FilePath -> m (Path Abs Dir) parseCollapsedAbsDir = parseAbsDir . collapseFilePath -- | Collapse intermediate "." and ".." directories from path, then parse -- it with 'parseAbsFile'. -- (probably should be moved to the Path module) parseCollapsedAbsFile :: MonadThrow m => FilePath -> m (Path Abs File) parseCollapsedAbsFile = parseAbsFile . collapseFilePath -- | Add a relative FilePath to the end of a Path -- We can't parse the FilePath first because we need to account for ".." -- in the FilePath (#2895) concatAndCollapseAbsDir :: MonadThrow m => Path Abs Dir -> FilePath -> m (Path Abs Dir) concatAndCollapseAbsDir base rel = parseCollapsedAbsDir (toFilePath base FP. rel) -- | Collapse intermediate "." and ".." directories from a path. -- -- > collapseFilePath "./foo" == "foo" -- > collapseFilePath "/bar/../baz" == "/baz" -- > collapseFilePath "/../baz" == "/../baz" -- > collapseFilePath "parent/foo/baz/../bar" == "parent/foo/bar" -- > collapseFilePath "parent/foo/baz/../../bar" == "parent/bar" -- > collapseFilePath "parent/foo/.." == "parent" -- > collapseFilePath "/parent/foo/../../bar" == "/bar" -- -- (adapted from @Text.Pandoc.Shared@) collapseFilePath :: FilePath -> FilePath collapseFilePath = FP.joinPath . reverse . foldl' go [] . FP.splitDirectories where go rs "." = rs go r@(p:rs) ".." = case p of ".." -> "..":r (checkPathSeparator -> True) -> "..":r _ -> rs go _ (checkPathSeparator -> True) = [[FP.pathSeparator]] go rs x = x:rs checkPathSeparator [x] = FP.isPathSeparator x checkPathSeparator _ = False -- | If given file in 'Maybe' does not exist, ensure we have 'Nothing'. This -- is to be used in conjunction with 'forgivingAbsence' and -- 'resolveFile'. -- -- Previously the idiom @forgivingAbsence (resolveFile …)@ alone was used, -- which relied on 'canonicalizePath' throwing 'isDoesNotExistError' when -- path does not exist. As it turns out, this behavior is actually not -- intentional and unreliable, see -- . This was “fixed” in -- version @1.2.3.0@ of @directory@ package (now it never throws). To make -- it work with all versions, we need to use the following idiom: -- -- > forgivingAbsence (resolveFile …) >>= rejectMissingFile rejectMissingFile :: MonadIO m => Maybe (Path Abs File) -> m (Maybe (Path Abs File)) rejectMissingFile Nothing = pure Nothing rejectMissingFile (Just p) = bool Nothing (Just p) <$> doesFileExist p -- | See 'rejectMissingFile'. rejectMissingDir :: MonadIO m => Maybe (Path Abs Dir) -> m (Maybe (Path Abs Dir)) rejectMissingDir Nothing = pure Nothing rejectMissingDir (Just p) = bool Nothing (Just p) <$> doesDirExist p -- | Convert to a lazy ByteString using toFilePath and UTF8. pathToLazyByteString :: Path b t -> BSL.ByteString pathToLazyByteString = BSL.fromStrict . pathToByteString -- | Convert to a ByteString using toFilePath and UTF8. pathToByteString :: Path b t -> BS.ByteString pathToByteString = T.encodeUtf8 . pathToText pathToText :: Path b t -> T.Text pathToText = T.pack . toFilePath tryGetModificationTime :: MonadIO m => Path Abs File -> m (Either () UTCTime) tryGetModificationTime = liftIO . tryJust (guard . isDoesNotExistError) . getModificationTime -- | 'Path.IO.resolveDir' (@path-io@ package) throws 'InvalidAbsDir' (@path@ -- package) if the directory does not exist; this function yields 'Nothing'. forgivingResolveDir :: MonadIO m => Path Abs Dir -- ^ Base directory -> FilePath -- ^ Path to resolve -> m (Maybe (Path Abs Dir)) forgivingResolveDir b p = liftIO $ D.canonicalizePath (toFilePath b FP. p) >>= \cp -> catch (Just <$> parseAbsDir cp) ( \e -> case e of InvalidAbsDir _ -> pure Nothing _ -> throwIO e ) -- | 'Path.IO.resolveFile' (@path-io@ package) throws 'InvalidAbsFile' (@path@ -- package) if the file does not exist; this function yields 'Nothing'. forgivingResolveFile :: MonadIO m => Path Abs Dir -- ^ Base directory -> FilePath -- ^ Path to resolve -> m (Maybe (Path Abs File)) forgivingResolveFile b p = liftIO $ D.canonicalizePath (toFilePath b FP. p) >>= \cp -> catch (Just <$> parseAbsFile cp) ( \e -> case e of InvalidAbsFile _ -> pure Nothing _ -> throwIO e ) -- | 'Path.IO.resolveFile'' (@path-io@ package) throws 'InvalidAbsFile' (@path@ -- package) if the file does not exist; this function yields 'Nothing'. forgivingResolveFile' :: MonadIO m => FilePath -- ^ Path to resolve -> m (Maybe (Path Abs File)) forgivingResolveFile' p = getCurrentDir >>= flip forgivingResolveFile p stack-2.15.7/src/Path/Find.hs0000644000000000000000000000766514445120723014020 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} -- | Finding files. module Path.Find ( findFileUp , findDirUp , findFiles , findInParents ) where import qualified Data.List as L import Path ( Abs, Dir, File, Path, parent, toFilePath ) import Path.IO ( listDir ) import RIO import System.IO.Error ( isPermissionError ) import System.PosixCompat.Files ( getSymbolicLinkStatus, isSymbolicLink ) -- | Find the location of a file matching the given predicate. findFileUp :: (MonadIO m, MonadThrow m) => Path Abs Dir -- ^ Start here. -> (Path Abs File -> Bool) -- ^ Predicate to match the file. -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. -> m (Maybe (Path Abs File)) -- ^ Absolute file path. findFileUp = findPathUp snd -- | Find the location of a directory matching the given predicate. findDirUp :: (MonadIO m,MonadThrow m) => Path Abs Dir -- ^ Start here. -> (Path Abs Dir -> Bool) -- ^ Predicate to match the directory. -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. -> m (Maybe (Path Abs Dir)) -- ^ Absolute directory path. findDirUp = findPathUp fst -- | Find the location of a path matching the given predicate. findPathUp :: (MonadIO m,MonadThrow m) => (([Path Abs Dir],[Path Abs File]) -> [Path Abs t]) -- ^ Choose path type from pair. -> Path Abs Dir -- ^ Start here. -> (Path Abs t -> Bool) -- ^ Predicate to match the path. -> Maybe (Path Abs Dir) -- ^ Do not ascend above this directory. -> m (Maybe (Path Abs t)) -- ^ Absolute path. findPathUp pathType dir p upperBound = do entries <- listDir dir case L.find p (pathType entries) of Just path -> pure (Just path) Nothing | Just dir == upperBound -> pure Nothing | parent dir == dir -> pure Nothing | otherwise -> findPathUp pathType (parent dir) p upperBound -- | Find files matching predicate below a root directory. -- -- NOTE: this skips symbolic directory links, to avoid loops. This may -- not make sense for all uses of file finding. -- -- TODO: write one of these that traverses symbolic links but -- efficiently ignores loops. findFiles :: Path Abs Dir -- ^ Root directory to begin with. -> (Path Abs File -> Bool) -- ^ Predicate to match files. -> (Path Abs Dir -> Bool) -- ^ Predicate for which directories to traverse. -> IO [Path Abs File] -- ^ List of matching files. findFiles dir p traversep = do (dirs,files) <- catchJust (\ e -> if isPermissionError e then Just () else Nothing) (listDir dir) (\ _ -> pure ([], [])) filteredFiles <- evaluate $ force (filter p files) filteredDirs <- filterM (fmap not . isSymLink) dirs subResults <- forM filteredDirs (\entry -> if traversep entry then findFiles entry p traversep else pure []) pure (concat (filteredFiles : subResults)) isSymLink :: Path Abs t -> IO Bool isSymLink = fmap isSymbolicLink . getSymbolicLinkStatus . toFilePath -- | @findInParents f path@ applies @f@ to @path@ and its 'parent's until -- it finds a 'Just' or reaches the root directory. findInParents :: MonadIO m => (Path Abs Dir -> m (Maybe a)) -> Path Abs Dir -> m (Maybe a) findInParents f path = do mres <- f path case mres of Just res -> pure (Just res) Nothing -> do let next = parent path if next == path then pure Nothing else findInParents f next stack-2.15.7/src/Stack.hs0000644000000000000000000001225014620153445013275 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Main Stack tool entry point. module Stack ( main ) where import GHC.IO.Encoding ( mkTextEncoding, textEncodingName ) import Options.Applicative.Builder.Extra ( execExtraHelp ) import Stack.BuildInfo ( versionString' ) import Stack.CLI ( commandLineHandler ) import Stack.Constants ( stackProgName ) import Stack.Docker ( dockerCmdName, dockerHelpOptName ) import Stack.Nix ( nixCmdName, nixHelpOptName ) import Stack.Options.DockerParser ( dockerOptsParser ) import Stack.Options.GlobalParser ( globalOptsFromMonoid ) import Stack.Options.NixParser ( nixOptsParser ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withRunnerGlobal ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Runner ( Runner ) import Stack.Types.Version ( VersionCheck (..), checkVersion, showStackVersion , stackVersion ) import System.Directory ( getCurrentDirectory ) import System.Environment ( getArgs, getProgName ) import System.IO ( hGetEncoding, hPutStrLn, hSetEncoding ) import System.Terminal ( hIsTerminalDeviceOrMinTTY ) -- | Type representing exceptions thrown by functions in the "Stack" module. data StackException = InvalidReExecVersion String String deriving (Show, Typeable) instance Exception StackException where displayException (InvalidReExecVersion expected actual) = concat [ "Error: [S-2186]\n" , "When re-executing '" , stackProgName , "' in a container, the incorrect version was found\nExpected: " , expected , "; found: " , actual ] main :: IO () main = do -- Line buffer the output by default, particularly for non-terminal runs. -- See https://github.com/commercialhaskell/stack/pull/360 hSetBuffering stdout LineBuffering hSetBuffering stdin LineBuffering hSetBuffering stderr LineBuffering hSetTranslit stdout hSetTranslit stderr args <- getArgs progName <- getProgName isTerminal <- hIsTerminalDeviceOrMinTTY stdout execExtraHelp args dockerHelpOptName (dockerOptsParser False) ("Only showing --" ++ dockerCmdName ++ "* options.") execExtraHelp args nixHelpOptName (nixOptsParser False) ("Only showing --" ++ nixCmdName ++ "* options.") currentDir <- getCurrentDirectory eGlobalRun <- try $ commandLineHandler currentDir progName False case eGlobalRun of Left (exitCode :: ExitCode) -> throwIO exitCode Right (globalMonoid, run) -> do global <- globalOptsFromMonoid isTerminal globalMonoid when (global.logLevel == LevelDebug) $ hPutStrLn stderr versionString' case global.reExecVersion of Just expectVersion -> do expectVersion' <- parseVersionThrowing expectVersion unless (checkVersion MatchMinor expectVersion' stackVersion) $ throwIO $ InvalidReExecVersion expectVersion showStackVersion _ -> pure () withRunnerGlobal global $ run `catches` [ Handler handleExitCode , Handler handlePrettyException , Handler handlePantryException , Handler handleSomeException ] -- | Change the character encoding of the given Handle to transliterate on -- unsupported characters instead of throwing an exception hSetTranslit :: Handle -> IO () hSetTranslit h = do menc <- hGetEncoding h case fmap textEncodingName menc of Just name | '/' `notElem` name -> do enc' <- mkTextEncoding $ name ++ "//TRANSLIT" hSetEncoding h enc' _ -> pure () -- | Handle ExitCode exceptions. handleExitCode :: ExitCode -> RIO Runner a handleExitCode = exitWith -- | Handle PrettyException exceptions. handlePrettyException :: PrettyException -> RIO Runner a handlePrettyException = handleAnyPrettyException -- | Handle (pretty) PantryException exceptions. handlePantryException :: PantryException -> RIO Runner a handlePantryException = handleAnyPrettyException -- | Handle any pretty exception. handleAnyPrettyException :: (Exception e, Pretty e) => e -> RIO Runner a handleAnyPrettyException e = do -- The code below loads the entire Stack configuration, when all that is -- needed are the Stack colours. A tailored approach may be better. result <- tryAny $ withConfig NoReexec $ prettyError $ pretty e case result of -- Falls back to the command line's Stack colours if there is any error in -- loading the entire Stack configuration. Left _ -> prettyError $ pretty e Right _ -> pure () exitFailure -- | Handle SomeException exceptions. This special handler stops "stack: " from -- being printed before the exception. handleSomeException :: SomeException -> RIO Runner a handleSomeException (SomeException e) = do logError $ fromString $ displayException e exitFailure stack-2.15.7/src/Stack/Build.hs0000644000000000000000000003372214620153445014343 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Build the project. module Stack.Build ( buildCmd , build , buildLocalTargets , loadPackage , mkBaseConfigOpts , splitObjsWarning ) where import Data.Attoparsec.Args ( EscapingMode (Escaping), parseArgs ) import Data.List ( (\\) ) import Data.List.Extra ( groupSort ) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T -- import qualified Distribution.PackageDescription as C -- import Distribution.Types.Dependency ( Dependency (..), depLibraries ) import Distribution.Version ( mkVersion ) import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import Stack.Build.ConstructPlan ( constructPlan ) import Stack.Build.Execute ( executePlan, preFetch, printPlan ) import Stack.Build.Installed ( getInstalled, toInstallMap ) import Stack.Build.Source ( localDependencies, projectLocalPackages ) import Stack.Build.Target ( NeedTargets (..) ) import Stack.FileWatch ( fileWatch, fileWatchPoll ) import Stack.Package ( buildableExes, resolvePackage ) import Stack.Prelude hiding ( loadPackage ) import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) import Stack.Setup ( withNewLocalBuildTargets ) import Stack.Types.Build ( Plan (..), Task (..), TaskType (..), taskLocation , taskProvides ) import Stack.Types.Build.Exception ( BuildException (..), BuildPrettyException (..) ) import Stack.Types.BuildConfig ( HasBuildConfig, stackYamlL ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildCommand (..), BuildOptsCLI (..), FileWatchOpts (..) ) import Stack.Types.BuildOptsMonoid ( buildOptsMonoidBenchmarksL, buildOptsMonoidHaddockL , buildOptsMonoidInstallExesL, buildOptsMonoidTestsL ) import Stack.Types.Compiler ( getGhcVersion ) import Stack.Types.CompilerPaths ( cabalVersionL ) import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), HasSourceMap , actualCompilerVersionL, installationRootDeps , installationRootLocal, packageDatabaseDeps , packageDatabaseExtra, packageDatabaseLocal ) import Stack.Types.GlobalOpts ( globalOptsBuildOptsMonoidL ) import Stack.Types.NamedComponent ( exeComponents ) import Stack.Types.Package ( InstallLocation (..), LocalPackage (..), Package (..) , PackageConfig (..), lpFiles, lpFilesForComponents ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SourceMap ( SMTargets (..) , SourceMap (..), Target (..) ) import System.Terminal ( fixCodePage ) newtype CabalVersionPrettyException = CabalVersionNotSupported Version deriving (Show, Typeable) instance Pretty CabalVersionPrettyException where pretty (CabalVersionNotSupported cabalVer) = "[S-5973]" <> line <> fillSep [ flow "Stack does not support Cabal versions before 1.24, but \ \version" , fromString $ versionString cabalVer , flow "was found. To fix this, consider updating the snapshot to" , style Shell "lts-7.0" , flow "or later or to" , style Shell "nightly-2016-05-26" , flow "or later." ] instance Exception CabalVersionPrettyException -- | Helper for build and install commands buildCmd :: BuildOptsCLI -> RIO Runner () buildCmd opts = do when (any (("-prof" `elem`) . fromRight [] . parseArgs Escaping) opts.ghcOptions) $ prettyThrowIO GHCProfOptionInvalid local (over globalOptsL modifyGO) $ case opts.fileWatch of FileWatchPoll -> fileWatchPoll (inner . Just) FileWatch -> fileWatch (inner . Just) NoFileWatch -> inner Nothing where inner :: Maybe (Set (Path Abs File) -> IO ()) -> RIO Runner () inner setLocalFiles = withConfig YesReexec $ withEnvConfig NeedTargets opts $ Stack.Build.build setLocalFiles -- Read the build command from the CLI and enable it to run modifyGO = case opts.command of Test -> set (globalOptsBuildOptsMonoidL . buildOptsMonoidTestsL) (Just True) Haddock -> set (globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL) (Just True) Bench -> set (globalOptsBuildOptsMonoidL . buildOptsMonoidBenchmarksL) (Just True) Install -> set (globalOptsBuildOptsMonoidL . buildOptsMonoidInstallExesL) (Just True) Build -> id -- Default case is just Build -- | Build. -- -- If a buildLock is passed there is an important contract here. That lock must -- protect the snapshot, and it must be safe to unlock it if there are no further -- modifications to the snapshot to be performed by this build. build :: HasEnvConfig env => Maybe (Set (Path Abs File) -> IO ()) -- ^ callback after discovering all local files -> RIO env () build msetLocalFiles = do mcp <- view $ configL . to (.modifyCodePage) ghcVersion <- view $ actualCompilerVersionL . to getGhcVersion fixCodePage mcp ghcVersion $ do bopts <- view buildOptsL sourceMap <- view $ envConfigL . to (.sourceMap) locals <- projectLocalPackages depsLocals <- localDependencies let allLocals = locals <> depsLocals boptsCli <- view $ envConfigL . to (.buildOptsCLI) -- Set local files, necessary for file watching stackYaml <- view stackYamlL for_ msetLocalFiles $ \setLocalFiles -> do files <- if boptsCli.watchAll then sequence [lpFiles lp | lp <- allLocals] else forM allLocals $ \lp -> do let pn = lp.package.name case Map.lookup pn sourceMap.targets.targets of Nothing -> pure Set.empty Just (TargetAll _) -> lpFiles lp Just (TargetComps components) -> lpFilesForComponents components lp liftIO $ setLocalFiles $ Set.insert stackYaml $ Set.unions files checkComponentsBuildable allLocals installMap <- toInstallMap sourceMap (installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <- getInstalled installMap baseConfigOpts <- mkBaseConfigOpts boptsCli plan <- constructPlan baseConfigOpts localDumpPkgs loadPackage sourceMap installedMap boptsCli.initialBuildSteps allowLocals <- view $ configL . to (.allowLocals) unless allowLocals $ case justLocals plan of [] -> pure () localsIdents -> throwM $ LocalPackagesPresent localsIdents checkCabalVersion warnAboutSplitObjs bopts warnIfExecutablesWithSameNameCouldBeOverwritten locals plan when bopts.preFetch $ preFetch plan if boptsCli.dryrun then printPlan plan else executePlan boptsCli baseConfigOpts locals globalDumpPkgs snapshotDumpPkgs localDumpPkgs installedMap sourceMap.targets.targets plan buildLocalTargets :: HasEnvConfig env => NonEmpty Text -> RIO env (Either SomeException ()) buildLocalTargets targets = tryAny $ withNewLocalBuildTargets (NE.toList targets) $ build Nothing justLocals :: Plan -> [PackageIdentifier] justLocals = map taskProvides . filter ((== Local) . taskLocation) . Map.elems . (.tasks) checkCabalVersion :: HasEnvConfig env => RIO env () checkCabalVersion = do cabalVer <- view cabalVersionL when (cabalVer < mkVersion [1, 24]) $ prettyThrowM $ CabalVersionNotSupported cabalVer -- | See https://github.com/commercialhaskell/stack/issues/1198. warnIfExecutablesWithSameNameCouldBeOverwritten :: HasTerm env => [LocalPackage] -> Plan -> RIO env () warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do logDebug "Checking if we are going to build multiple executables with the same name" forM_ (Map.toList warnings) $ \(exe, (toBuild, otherLocals)) -> do let exe_s | length toBuild > 1 = flow "several executables with the same name:" | otherwise = "executable" exesText pkgs = fillSep $ punctuate "," [ style PkgComponent (fromString $ packageNameString p <> ":" <> T.unpack exe) | p <- pkgs ] prettyWarnL $ [ "Building" , exe_s , exesText toBuild <> "." ] <> [ fillSep [ flow "Only one of them will be available via" , style Shell "stack exec" , flow "or locally installed." ] | length toBuild > 1 ] <> [ fillSep [ flow "Other executables with the same name might be overwritten:" , exesText otherLocals <> "." ] | not (null otherLocals) ] where -- Cases of several local packages having executables with the same name. -- The Map entries have the following form: -- -- executable name: ( package names for executables that are being built -- , package names for other local packages that have an -- executable with the same name -- ) warnings :: Map Text ([PackageName],[PackageName]) warnings = Map.mapMaybe (\(pkgsToBuild, localPkgs) -> case (pkgsToBuild, NE.toList localPkgs \\ NE.toList pkgsToBuild) of (_ :| [], []) -> -- We want to build the executable of single local package -- and there are no other local packages with an executable of -- the same name. Nothing to warn about, ignore. Nothing (_, otherLocals) -> -- We could be here for two reasons (or their combination): -- 1) We are building two or more executables with the same -- name that will end up overwriting each other. -- 2) In addition to the executable(s) that we want to build -- there are other local packages with an executable of the -- same name that might get overwritten. -- Both cases warrant a warning. Just (NE.toList pkgsToBuild, otherLocals)) (Map.intersectionWith (,) exesToBuild localExes) exesToBuild :: Map Text (NonEmpty PackageName) exesToBuild = collect [ (exe, pkgName') | (pkgName', task) <- Map.toList plan.tasks , TTLocalMutable lp <- [task.taskType] , exe <- (Set.toList . exeComponents . (.components)) lp ] localExes :: Map Text (NonEmpty PackageName) localExes = collect [ (exe, pkg.name) | pkg <- map (.package) locals , exe <- Set.toList (buildableExes pkg) ] collect :: Ord k => [(k, v)] -> Map k (NonEmpty v) collect = Map.mapMaybe nonEmpty . Map.fromDistinctAscList . groupSort warnAboutSplitObjs :: HasTerm env => BuildOpts -> RIO env () warnAboutSplitObjs bopts | bopts.splitObjs = prettyWarnL [ flow "Building with" , style Shell "--split-objs" , flow "is enabled." , flow splitObjsWarning ] warnAboutSplitObjs _ = pure () splitObjsWarning :: String splitObjsWarning = "Note that this feature is EXPERIMENTAL, and its behavior may be changed and \ \improved. You will need to clean your workdirs before use. If you want to \ \compile all dependencies with split-objs, you will need to delete the \ \snapshot (and all snapshots that could reference that snapshot)." -- | Get the @BaseConfigOpts@ necessary for constructing configure options mkBaseConfigOpts :: (HasEnvConfig env) => BuildOptsCLI -> RIO env BaseConfigOpts mkBaseConfigOpts buildOptsCLI = do buildOpts <- view buildOptsL snapDB <- packageDatabaseDeps localDB <- packageDatabaseLocal snapInstallRoot <- installationRootDeps localInstallRoot <- installationRootLocal extraDBs <- packageDatabaseExtra pure BaseConfigOpts { snapDB , localDB , snapInstallRoot , localInstallRoot , buildOpts , buildOptsCLI , extraDBs } -- | Provide a function for loading package information from the package index loadPackage :: (HasBuildConfig env, HasSourceMap env) => PackageLocationImmutable -> Map FlagName Bool -> [Text] -- ^ GHC options -> [Text] -- ^ Cabal configure options -> RIO env Package loadPackage loc flags ghcOptions cabalConfigOpts = do compilerVersion <- view actualCompilerVersionL platform <- view platformL let pkgConfig = PackageConfig { enableTests = False , enableBenchmarks = False , flags , ghcOptions , cabalConfigOpts , compilerVersion , platform } resolvePackage pkgConfig <$> loadCabalFileImmutable loc checkComponentsBuildable :: MonadThrow m => [LocalPackage] -> m () checkComponentsBuildable lps = unless (null unbuildable) $ prettyThrowM $ SomeTargetsNotBuildable unbuildable where unbuildable = [ (lp.package.name, c) | lp <- lps , c <- Set.toList lp.unbuildable ] stack-2.15.7/src/Stack/Build/Cache.hs0000644000000000000000000004031014604306200015324 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Cache information about previous builds module Stack.Build.Cache ( tryGetBuildCache , tryGetConfigCache , tryGetCabalMod , tryGetSetupConfigMod , tryGetPackageProjectRoot , getInstalledExes , tryGetFlagCache , deleteCaches , markExeInstalled , markExeNotInstalled , writeFlagCache , writeBuildCache , writeConfigCache , writeCabalMod , writeSetupConfigMod , writePackageProjectRoot , TestStatus (..) , setTestStatus , getTestStatus , writePrecompiledCache , readPrecompiledCache -- Exported for testing , BuildCache (..) ) where import Crypto.Hash ( hashWith, SHA256 (..) ) import qualified Data.ByteArray as Mem ( convert ) import Data.ByteString.Builder ( byteString ) import qualified Data.Map as M import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Yaml as Yaml import Foreign.C.Types ( CTime ) import Path ( (), filename, parent, parseRelFile ) import Path.IO ( ensureDir, ignoringAbsence, listDir, makeRelative , removeFile ) import Stack.Constants ( bindirSuffix, relDirInstalledPackages ) import Stack.Constants.Config ( buildCachesDir, configCabalMod, configPackageProjectRoot , configSetupConfigMod, testSuccessFile ) import Stack.Prelude import Stack.Storage.Project ( ConfigCacheKey, configCacheKey, deactiveConfigCache , loadConfigCache, saveConfigCache ) import Stack.Storage.User ( PrecompiledCacheKey, loadPrecompiledCache , precompiledCacheKey, savePrecompiledCache ) import Stack.Types.Build ( BuildCache (..), ConfigCache, FileCacheInfo , InstallLocation (..), Installed (..), PrecompiledCache (..) ) import Stack.Types.Cache ( ConfigCacheType (..) ) import Stack.Types.CompilerPaths ( cabalVersionL ) import Stack.Types.Config ( stackRootL ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..), ConfigureOpts (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL , installationRootDeps, installationRootLocal , platformGhcRelDir ) import Stack.Types.GhcPkgId ( ghcPkgIdString ) import Stack.Types.Installed (InstalledLibraryInfo (..), foldOnGhcPkgId' ) import Stack.Types.NamedComponent ( NamedComponent (..) ) import Stack.Types.SourceMap ( smRelDir ) import System.PosixCompat.Files ( modificationTime, getFileStatus, setFileTimes ) -- | Directory containing files to mark an executable as installed exeInstalledDir :: (HasEnvConfig env) => InstallLocation -> RIO env (Path Abs Dir) exeInstalledDir Snap = ( relDirInstalledPackages) <$> installationRootDeps exeInstalledDir Local = ( relDirInstalledPackages) <$> installationRootLocal -- | Get all of the installed executables getInstalledExes :: (HasEnvConfig env) => InstallLocation -> RIO env [PackageIdentifier] getInstalledExes loc = do dir <- exeInstalledDir loc (_, files) <- liftIO $ handleIO (const $ pure ([], [])) $ listDir dir pure $ concat $ M.elems $ -- If there are multiple install records (from a Stack version before -- https://github.com/commercialhaskell/stack/issues/2373 was fixed), then -- we don't know which is correct - ignore them. M.fromListWith (\_ _ -> []) $ map (\x -> (pkgName x, [x])) $ mapMaybe (parsePackageIdentifier . toFilePath . filename) files -- | Mark the given executable as installed markExeInstalled :: (HasEnvConfig env) => InstallLocation -> PackageIdentifier -> RIO env () markExeInstalled loc ident = do dir <- exeInstalledDir loc ensureDir dir ident' <- parseRelFile $ packageIdentifierString ident let fp = dir ident' -- Remove old install records for this package. -- TODO: This is a bit in-efficient. Put all this metadata into one file? installed <- getInstalledExes loc forM_ (filter (\x -> pkgName ident == pkgName x) installed) (markExeNotInstalled loc) -- TODO consideration for the future: list all of the executables installed, -- and invalidate this file in getInstalledExes if they no longer exist writeBinaryFileAtomic fp "Installed" -- | Mark the given executable as not installed markExeNotInstalled :: (HasEnvConfig env) => InstallLocation -> PackageIdentifier -> RIO env () markExeNotInstalled loc ident = do dir <- exeInstalledDir loc ident' <- parseRelFile $ packageIdentifierString ident liftIO $ ignoringAbsence (removeFile $ dir ident') buildCacheFile :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -> NamedComponent -> m (Path Abs File) buildCacheFile dir component = do cachesDir <- buildCachesDir dir smh <- view $ envConfigL . to (.sourceMapHash) smDirName <- smRelDir smh let nonLibComponent prefix name = prefix <> "-" <> T.unpack name cacheFileName <- parseRelFile $ case component of CLib -> "lib" CSubLib name -> nonLibComponent "sub-lib" name CFlib name -> nonLibComponent "flib" name CExe name -> nonLibComponent "exe" name CTest name -> nonLibComponent "test" name CBench name -> nonLibComponent "bench" name pure $ cachesDir smDirName cacheFileName -- | Try to read the dirtiness cache for the given package directory. tryGetBuildCache :: HasEnvConfig env => Path Abs Dir -> NamedComponent -> RIO env (Maybe (Map FilePath FileCacheInfo)) tryGetBuildCache dir component = do fp <- buildCacheFile dir component ensureDir $ parent fp let decode :: MonadIO m => m BuildCache decode = Yaml.decodeFileThrow (toFilePath fp) either (const Nothing) (Just . (.times)) <$> liftIO (tryAny decode) -- | Try to read the dirtiness cache for the given package directory. tryGetConfigCache :: HasEnvConfig env => Path Abs Dir -> RIO env (Maybe ConfigCache) tryGetConfigCache dir = loadConfigCache $ configCacheKey dir ConfigCacheTypeConfig -- | Try to read the mod time of the Cabal file from the last build tryGetCabalMod :: HasEnvConfig env => Path Abs Dir -> RIO env (Maybe CTime) tryGetCabalMod dir = do fp <- toFilePath <$> configCabalMod dir tryGetFileMod fp -- | Try to read the mod time of setup-config file from the last build tryGetSetupConfigMod :: HasEnvConfig env => Path Abs Dir -> RIO env (Maybe CTime) tryGetSetupConfigMod dir = do fp <- toFilePath <$> configSetupConfigMod dir tryGetFileMod fp tryGetFileMod :: MonadIO m => FilePath -> m (Maybe CTime) tryGetFileMod fp = liftIO $ either (const Nothing) (Just . modificationTime) <$> tryIO (getFileStatus fp) -- | Try to read the project root from the last build of a package tryGetPackageProjectRoot :: HasEnvConfig env => Path Abs Dir -> RIO env (Maybe ByteString) tryGetPackageProjectRoot dir = do fp <- toFilePath <$> configPackageProjectRoot dir tryReadFileBinary fp tryReadFileBinary :: MonadIO m => FilePath -> m (Maybe ByteString) tryReadFileBinary fp = liftIO $ either (const Nothing) Just <$> tryIO (readFileBinary fp) -- | Write the dirtiness cache for this package's files. writeBuildCache :: HasEnvConfig env => Path Abs Dir -> NamedComponent -> Map FilePath FileCacheInfo -> RIO env () writeBuildCache dir component times = do fp <- toFilePath <$> buildCacheFile dir component liftIO $ Yaml.encodeFile fp BuildCache { times = times } -- | Write the dirtiness cache for this package's configuration. writeConfigCache :: HasEnvConfig env => Path Abs Dir -> ConfigCache -> RIO env () writeConfigCache dir = saveConfigCache (configCacheKey dir ConfigCacheTypeConfig) -- | See 'tryGetCabalMod' writeCabalMod :: HasEnvConfig env => Path Abs Dir -> CTime -> RIO env () writeCabalMod dir x = do fp <- configCabalMod dir writeBinaryFileAtomic fp "Just used for its modification time" liftIO $ setFileTimes (toFilePath fp) x x -- | See 'tryGetSetupConfigMod' writeSetupConfigMod :: HasEnvConfig env => Path Abs Dir -> Maybe CTime -> RIO env () writeSetupConfigMod dir Nothing = do fp <- configSetupConfigMod dir ignoringAbsence $ removeFile fp writeSetupConfigMod dir (Just x) = do fp <- configSetupConfigMod dir writeBinaryFileAtomic fp "Just used for its modification time" liftIO $ setFileTimes (toFilePath fp) x x -- | See 'tryGetPackageProjectRoot' writePackageProjectRoot :: HasEnvConfig env => Path Abs Dir -> ByteString -> RIO env () writePackageProjectRoot dir projectRoot = do fp <- configPackageProjectRoot dir writeBinaryFileAtomic fp (byteString projectRoot) -- | Delete the caches for the project. deleteCaches :: HasEnvConfig env => Path Abs Dir -> RIO env () deleteCaches dir = {- FIXME confirm that this is acceptable to remove bfp <- buildCacheFile dir removeFileIfExists bfp -} deactiveConfigCache $ configCacheKey dir ConfigCacheTypeConfig flagCacheKey :: (HasEnvConfig env) => Installed -> RIO env ConfigCacheKey flagCacheKey installed = do installationRoot <- installationRootLocal case installed of Library _ installedInfo -> do let gid = installedInfo.ghcPkgId pure $ configCacheKey installationRoot (ConfigCacheTypeFlagLibrary gid) Executable ident -> pure $ configCacheKey installationRoot (ConfigCacheTypeFlagExecutable ident) -- | Loads the flag cache for the given installed extra-deps tryGetFlagCache :: HasEnvConfig env => Installed -> RIO env (Maybe ConfigCache) tryGetFlagCache gid = do key <- flagCacheKey gid loadConfigCache key writeFlagCache :: HasEnvConfig env => Installed -> ConfigCache -> RIO env () writeFlagCache gid cache = do key <- flagCacheKey gid saveConfigCache key cache successBS, failureBS, unknownBS :: IsString s => s successBS = "success" failureBS = "failure" unknownBS = "unknown" -- | Status of a test suite data TestStatus = TSSuccess | TSFailure | TSUnknown -- | Mark test suite status setTestStatus :: HasEnvConfig env => Path Abs Dir -> TestStatus -> RIO env () setTestStatus dir status = do fp <- testSuccessFile dir writeBinaryFileAtomic fp $ case status of TSSuccess -> successBS TSFailure -> failureBS TSUnknown -> unknownBS -- | Check if the test suite already passed getTestStatus :: HasEnvConfig env => Path Abs Dir -> RIO env TestStatus getTestStatus dir = do fp <- testSuccessFile dir -- we could ensure the file is the right size first, but we're not expected an -- attack from the user's filesystem eres <- tryIO (readFileBinary $ toFilePath fp) pure $ case eres of Right bs | bs == successBS -> TSSuccess | bs == failureBS -> TSFailure _ -> TSUnknown -------------------------------------- -- Precompiled Cache -- -- Idea is simple: cache information about packages built in other snapshots, -- and then for identical matches (same flags, config options, dependencies) -- just copy over the executables and reregister the libraries. -------------------------------------- -- | The key containing information on the given package/configuration -- combination. The key contains a hash of the non-directory configure -- options for quick lookup if there's a match. -- -- We only pay attention to non-directory options. We don't want to avoid a -- cache hit just because it was installed in a different directory. getPrecompiledCacheKey :: HasEnvConfig env => PackageLocationImmutable -> ConfigureOpts -> Bool -- ^ build haddocks -> RIO env PrecompiledCacheKey getPrecompiledCacheKey loc configureOpts buildHaddocks = do compiler <- view actualCompilerVersionL cabalVersion <- view cabalVersionL -- The goal here is to come up with a string representing the package location -- which is unique. Luckily @TreeKey@s are exactly that! treeKey <- getPackageLocationTreeKey loc let packageKey = utf8BuilderToText $ display treeKey platformGhcDir <- platformGhcRelDir -- In Cabal versions 1.22 and later, the configure options contain the -- installed package IDs, which is what we need for a unique hash. See also -- issue: https://github.com/commercialhaskell/stack/issues/1103 let optionsToHash = configureOpts.nonPathRelated optionsHash = Mem.convert $ hashWith SHA256 $ encodeUtf8 $ tshow optionsToHash pure $ precompiledCacheKey platformGhcDir compiler cabalVersion packageKey optionsHash buildHaddocks -- | Write out information about a newly built package writePrecompiledCache :: HasEnvConfig env => BaseConfigOpts -> PackageLocationImmutable -> ConfigureOpts -> Bool -- ^ build haddocks -> Installed -- ^ library -> Set Text -- ^ executables -> RIO env () writePrecompiledCache baseConfigOpts loc copts buildHaddocks mghcPkgId exes = do key <- getPrecompiledCacheKey loc copts buildHaddocks ec <- view envConfigL let stackRootRelative = makeRelative (view stackRootL ec) exes' <- forM (Set.toList exes) $ \exe -> do name <- parseRelFile $ T.unpack exe stackRootRelative $ baseConfigOpts.snapInstallRoot bindirSuffix name let installedLibToPath libName ghcPkgId pcAction = do libPath <- pathFromPkgId stackRootRelative ghcPkgId pc <- pcAction pure $ case libName of Nothing -> pc { library = Just libPath } _ -> pc { subLibs = libPath : pc.subLibs } precompiled <- foldOnGhcPkgId' installedLibToPath mghcPkgId ( pure PrecompiledCache { library = Nothing , subLibs = [] , exes = exes' } ) savePrecompiledCache key precompiled -- reuse precompiled cache with haddocks also in case when haddocks are -- not required when buildHaddocks $ do key' <- getPrecompiledCacheKey loc copts False savePrecompiledCache key' precompiled where pathFromPkgId stackRootRelative ipid = do ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf" stackRootRelative $ baseConfigOpts.snapDB ipid' -- | Check the cache for a precompiled package matching the given configuration. readPrecompiledCache :: forall env. HasEnvConfig env => PackageLocationImmutable -- ^ target package -> ConfigureOpts -> Bool -- ^ build haddocks -> RIO env (Maybe (PrecompiledCache Abs)) readPrecompiledCache loc copts buildHaddocks = do key <- getPrecompiledCacheKey loc copts buildHaddocks mcache <- loadPrecompiledCache key maybe (pure Nothing) (fmap Just . mkAbs) mcache where -- Since commit ed9ccc08f327bad68dd2d09a1851ce0d055c0422, pcLibrary paths are -- stored as relative to the Stack root. Therefore, we need to prepend the -- Stack root when checking that the file exists. For the older cached paths, -- the file will contain an absolute path, which will make `stackRoot ` -- a no-op. mkAbs :: PrecompiledCache Rel -> RIO env (PrecompiledCache Abs) mkAbs pc0 = do stackRoot <- view stackRootL let mkAbs' = (stackRoot ) pure PrecompiledCache { library = mkAbs' <$> pc0.library , subLibs = mkAbs' <$> pc0.subLibs , exes = mkAbs' <$> pc0.exes } stack-2.15.7/src/Stack/Build/ConstructPlan.hs0000644000000000000000000014135514620153474017146 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -- | Construct a @Plan@ for how to build module Stack.Build.ConstructPlan ( constructPlan ) where import Control.Monad.Trans.Maybe ( MaybeT (..) ) import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map import Data.Monoid.Map ( MonoidMap(..) ) import qualified Data.Set as Set import qualified Data.Text as T import Distribution.Types.BuildType ( BuildType (Configure) ) import Distribution.Types.PackageName ( mkPackageName ) import Path ( parent ) import qualified RIO.NonEmpty as NE import RIO.Process ( findExecutable ) import RIO.State ( State, StateT (..), execState, get, modify, modify', put ) import RIO.Writer ( WriterT (..), pass, tell ) import Stack.Build.Cache ( tryGetFlagCache ) import Stack.Build.Haddock ( shouldHaddockDeps ) import Stack.Build.Source ( loadLocalPackage ) import Stack.Constants ( compilerOptionsCabalFlag ) import Stack.Package ( applyForceCustomBuild, buildableExes, packageUnknownTools , processPackageDepsToList ) import Stack.Prelude hiding ( loadPackage ) import Stack.SourceMap ( getPLIVersion, mkProjectPackage ) import Stack.Types.Build ( CachePkgSrc (..), ConfigCache (..), Plan (..), Task (..) , TaskConfigOpts (..), TaskType (..) , installLocationIsMutable, taskIsTarget, taskLocation , taskProvides, taskTargetIsMutable, toCachePkgSrc ) import Stack.Types.Build.ConstructPlan ( AddDepRes (..), CombinedMap, Ctx (..), M, PackageInfo (..) , ToolWarning(..), UnregisterState (..), W (..) , adrHasLibrary, adrVersion, toTask ) import Stack.Types.Build.Exception ( BadDependency (..), BuildException (..) , BuildPrettyException (..), ConstructPlanException (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), stackYamlL ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), BuildSubset (..) ) import Stack.Types.CompCollection ( collectionMember ) import Stack.Types.Compiler ( WhichCompiler (..) ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..) ) import Stack.Types.Config ( Config (..), HasConfig (..), stackRootL ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..), ConfigureOpts (..) ) import qualified Stack.Types.ConfigureOpts as ConfigureOpts import Stack.Types.Curator ( Curator (..) ) import Stack.Types.Dependency ( DepValue (..), isDepTypeLibrary ) import Stack.Types.DumpPackage ( DumpPackage (..), dpParentLibIdent ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..) ) import Stack.Types.EnvSettings ( EnvSettings (..), minimalEnvSettings ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Installed ( InstallLocation (..), Installed (..), InstalledMap , installedVersion ) import Stack.Types.IsMutable ( IsMutable (..) ) import Stack.Types.NamedComponent ( exeComponents, renderComponent ) import Stack.Types.Package ( ExeName (..), LocalPackage (..), Package (..) , PackageSource (..), installedMapGhcPkgId , packageIdentifier, psVersion, runMemoizedWith ) import Stack.Types.ProjectConfig ( isPCGlobalProject ) import Stack.Types.Runner ( HasRunner (..), globalOptsL ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), FromSnapshot (..) , GlobalPackage (..), SMTargets (..), SourceMap (..) ) import Stack.Types.Version ( VersionRange, latestApplicableVersion, versionRangeText , withinRange ) import System.Environment ( lookupEnv ) -- | Computes a build plan. This means figuring out which build 'Task's to take, -- and the interdependencies among the build 'Task's. In particular: -- -- 1) It determines which packages need to be built, based on the transitive -- deps of the current targets. For local packages, this is indicated by the -- 'lpWanted' boolean. For extra packages to build, this comes from the -- @extraToBuild0@ argument of type @Set PackageName@. These are usually -- packages that have been specified on the command line. -- -- 2) It will only rebuild an upstream package if it isn't present in the -- 'InstalledMap', or if some of its dependencies have changed. -- -- 3) It will only rebuild a local package if its files are dirty or some of its -- dependencies have changed. constructPlan :: forall env. HasEnvConfig env => BaseConfigOpts -> [DumpPackage] -- ^ locally registered -> ( PackageLocationImmutable -> Map FlagName Bool -> [Text] -- ^ GHC options -> [Text] -- ^ Cabal configure options -> RIO EnvConfig Package ) -- ^ load upstream package -> SourceMap -> InstalledMap -> Bool -- ^ Only include initial build steps required for GHCi? -> RIO env Plan constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap initialBuildSteps = do logDebug "Constructing the build plan" when hasBaseInDeps $ prettyWarn $ fillSep [ flow "You are trying to upgrade or downgrade the" , style Current "base" , flow "package, which is almost certainly not what you really \ \want. Please, consider using another GHC version if you \ \need a certain version of" , style Current "base" <> "," , flow "or removing" , style Current "base" , flow "as an" , style Shell "extra-deps" <> "." , flow "For further information, see" , style Url "https://github.com/commercialhaskell/stack/issues/3940" <> "." ] <> line econfig <- view envConfigL globalCabalVersion <- view $ compilerPathsL . to (.cabalVersion) sources <- getSources globalCabalVersion curator <- view $ buildConfigL . to (.curator) pathEnvVar <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" let ctx = mkCtx econfig globalCabalVersion sources curator pathEnvVar targetPackageNames = Map.keys sourceMap.targets.targets -- Ignore the result of 'getCachedDepOrAddDep'. onTarget = void . getCachedDepOrAddDep inner = mapM_ onTarget targetPackageNames (((), W efinals installExes dirtyReason warnings parents), m) <- liftIO $ runRIO ctx (runStateT (runWriterT inner) Map.empty) -- Report any warnings mapM_ prettyWarn (warnings []) -- Separate out errors let (errlibs, adrs) = partitionEithers $ map toEither $ Map.toList m (errfinals, finals) = partitionEithers $ map toEither $ Map.toList efinals errs = errlibs ++ errfinals if null errs then do let tasks = Map.fromList $ mapMaybe (toMaybe . second toTask) adrs takeSubset Plan { tasks = tasks , finals = Map.fromList finals , unregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps , installExes = if baseConfigOpts0.buildOpts.installExes || baseConfigOpts0.buildOpts.installCompilerTool then installExes else Map.empty } else do stackYaml <- view stackYamlL stackRoot <- view stackRootL isImplicitGlobal <- view $ configL . to (isPCGlobalProject . (.project)) prettyThrowM $ ConstructPlanFailed errs stackYaml stackRoot isImplicitGlobal parents ctx.wanted prunedGlobalDeps where sourceProject = sourceMap.project sourceDeps = sourceMap.deps hasBaseInDeps = Map.member (mkPackageName "base") sourceDeps mkCtx ctxEnvConfig globalCabalVersion sources curator pathEnvVar = Ctx { baseConfigOpts = baseConfigOpts0 , loadPackage = \w x y z -> runRIO ctxEnvConfig $ applyForceCustomBuild globalCabalVersion <$> loadPackage0 w x y z , combinedMap = combineMap sources installedMap , ctxEnvConfig , callStack = [] , wanted = Map.keysSet sourceMap.targets.targets , localNames = Map.keysSet sourceProject , curator , pathEnvVar } toEither :: (k, Either e v) -> Either e (k, v) toEither (_, Left e) = Left e toEither (k, Right v) = Right (k, v) toMaybe :: (k, Maybe v) -> Maybe (k, v) toMaybe (_, Nothing) = Nothing toMaybe (k, Just v) = Just (k, v) takeSubset :: Plan -> RIO env Plan takeSubset = case baseConfigOpts0.buildOptsCLI.buildSubset of BSAll -> pure BSOnlySnapshot -> stripLocals BSOnlyDependencies -> stripNonDeps BSOnlyLocals -> errorOnSnapshot -- | Strip out anything from the 'Plan' intended for the local database. stripLocals :: Plan -> RIO env Plan stripLocals plan = pure plan { tasks = Map.filter checkTask plan.tasks , finals = Map.empty , unregisterLocal = Map.empty , installExes = Map.filter (/= Local) plan.installExes } where checkTask task = taskLocation task == Snap stripNonDeps :: Plan -> RIO env Plan stripNonDeps plan = pure plan { tasks = Map.filter checkTask plan.tasks , finals = Map.empty , installExes = Map.empty -- TODO maybe don't disable this? } where deps = Map.keysSet sourceDeps checkTask task = taskProvides task `Set.member` missingForDeps providesDep task = pkgName (taskProvides task) `Set.member` deps tasks = Map.elems plan.tasks missing = Map.fromList $ map (taskProvides &&& (.configOpts.missing)) tasks missingForDeps = flip execState mempty $ for_ tasks $ \task -> when (providesDep task) $ collectMissing mempty (taskProvides task) collectMissing dependents pid = do when (pid `elem` dependents) $ impureThrow $ TaskCycleBug pid modify' (<> Set.singleton pid) mapM_ (collectMissing (pid:dependents)) (fromMaybe mempty $ Map.lookup pid missing) -- | Throw an exception if there are any snapshot packages in the plan. errorOnSnapshot :: Plan -> RIO env Plan errorOnSnapshot plan@(Plan tasks _finals _unregister installExes) = do let snapTasks = Map.keys $ Map.filter (\t -> taskLocation t == Snap) tasks snapExes = Map.keys $ Map.filter (== Snap) installExes unless (null snapTasks && null snapExes) $ prettyThrowIO $ NotOnlyLocal snapTasks snapExes pure plan prunedGlobalDeps :: Map PackageName [PackageName] prunedGlobalDeps = flip Map.mapMaybe sourceMap.globalPkgs $ \case ReplacedGlobalPackage deps -> let pruned = filter (not . inSourceMap) deps in if null pruned then Nothing else Just pruned GlobalPackage _ -> Nothing where inSourceMap pname = pname `Map.member` sourceDeps || pname `Map.member` sourceProject getSources :: Version -> RIO env (Map PackageName PackageSource) getSources globalCabalVersion = do let loadLocalPackage' pp = do lp <- loadLocalPackage pp let lpPackage' = applyForceCustomBuild globalCabalVersion lp.package pure lp { package = lpPackage' } pPackages <- for sourceProject $ \pp -> do lp <- loadLocalPackage' pp pure $ PSFilePath lp bopts <- view $ configL . to (.build) deps <- for sourceDeps $ \dp -> case dp.location of PLImmutable loc -> pure $ PSRemote loc (getPLIVersion loc) dp.fromSnapshot dp.depCommon PLMutable dir -> do pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) lp <- loadLocalPackage' pp pure $ PSFilePath lp pure $ pPackages <> deps -- | Determine which packages to unregister based on the given tasks and -- already registered local packages. mkUnregisterLocal :: Map PackageName Task -- ^ Tasks -> Map PackageName Text -- ^ Reasons why packages are dirty and must be rebuilt -> [DumpPackage] -- ^ Local package database dump -> Bool -- ^ If true, we're doing a special initialBuildSteps build - don't -- unregister target packages. -> Map GhcPkgId (PackageIdentifier, Text) mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps = -- We'll take multiple passes through the local packages. This will allow us -- to detect that a package should be unregistered, as well as all packages -- directly or transitively depending on it. loop Map.empty localDumpPkgs where loop :: Map GhcPkgId (PackageIdentifier, Text) -- ^ Current local packages to unregister. -> [DumpPackage] -- ^ Current local packages to keep. -> Map GhcPkgId (PackageIdentifier, Text) -- ^ Revised local packages to unregister. loop toUnregister keep -- If any new packages were added to the unregister Map, we need to loop -- through the remaining packages again to detect if a transitive dependency -- is being unregistered. | us.anyAdded = loop us.toUnregister us.toKeep -- Nothing added, so we've already caught them all. Return the Map we've -- already calculated. | otherwise = us.toUnregister where -- Run the unregister checking function on all packages we currently think -- we'll be keeping. us = execState (mapM_ go keep) initialUnregisterState initialUnregisterState = UnregisterState { toUnregister , toKeep = [] , anyAdded = False } go :: DumpPackage -> State UnregisterState () go dp = do us <- get case maybeUnregisterReason us.toUnregister ident mParentLibId deps of -- Not unregistering, add it to the keep list. Nothing -> put us { toKeep = dp : us.toKeep } -- Unregistering, add it to the unregister Map; and indicate that a -- package was in fact added to the unregister Map, so we loop again. Just reason -> put us { toUnregister = Map.insert gid (ident, reason) us.toUnregister , anyAdded = True } where gid = dp.ghcPkgId ident = dp.packageIdent mParentLibId = dpParentLibIdent dp deps = dp.depends maybeUnregisterReason :: Map GhcPkgId (PackageIdentifier, Text) -- ^ Current local packages to unregister. -> PackageIdentifier -- ^ Package identifier. -> Maybe PackageIdentifier -- ^ If package for sub library, package identifier of the parent. -> [GhcPkgId] -- ^ Dependencies of the package. -> Maybe Text -- ^ If to be unregistered, the reason for doing so. maybeUnregisterReason toUnregister ident mParentLibId deps -- If the package is not for a sub library, then it is directly relevant. If -- it is, then the relevant package is the parent. If we are planning on -- running a task on the relevant package, then the package must be -- unregistered, unless it is a target and an initial-build-steps build is -- being done. | Just task <- Map.lookup relevantPkgName tasks = if initialBuildSteps && taskIsTarget task && taskProvides task == relevantPkgId then Nothing else Just $ fromMaybe "" $ Map.lookup relevantPkgName dirtyReason -- Check if a dependency is going to be unregistered | (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps = Just $ "Dependency being unregistered: " <> T.pack (packageIdentifierString dep) -- None of the above, keep it! | otherwise = Nothing where -- If the package is not for a sub library, then the relevant package -- identifier is that of the package. If it is, then the relevant package -- identifier is that of the parent. relevantPkgId :: PackageIdentifier relevantPkgId = fromMaybe ident mParentLibId -- If the package is not for a sub library, then the relevant package name -- is that of the package. If it is, then the relevant package name is -- that of the parent. relevantPkgName :: PackageName relevantPkgName = maybe (pkgName ident) pkgName mParentLibId -- | Given a 'LocalPackage' and its 'lpTestBench', adds a 'Task' for running its -- tests and benchmarks. -- -- If @isAllInOne@ is 'True', then this means that the build step will also -- build the tests. Otherwise, this indicates that there's a cyclic dependency -- and an additional build step needs to be done. -- -- This will also add all the deps needed to build the tests / benchmarks. If -- @isAllInOne@ is 'True' (the common case), then all of these should have -- already been taken care of as part of the build step. addFinal :: LocalPackage -> Package -> Bool -- ^ Will the build step also build the tests? -> Bool -- ^ Should Haddock documentation be built? -> M () addFinal lp package isAllInOne buildHaddocks = do depsRes <- addPackageDeps package res <- case depsRes of Left e -> pure $ Left e Right (missing, present, _minLoc) -> do ctx <- ask pure $ Right Task { configOpts = TaskConfigOpts missing $ \missing' -> let allDeps = Map.union present missing' in ConfigureOpts.configureOpts (view envConfigL ctx) ctx.baseConfigOpts allDeps True -- local Mutable package , buildHaddocks , present , taskType = TTLocalMutable lp , allInOne = isAllInOne , cachePkgSrc = CacheSrcLocal (toFilePath (parent lp.cabalFP)) , buildTypeConfig = packageBuildTypeConfig package } tell mempty { wFinals = Map.singleton package.name res } -- | Given a 'PackageName', adds all of the build tasks to build the package, if -- needed. First checks if the package name is in the library map. -- -- 'constructPlan' invokes this on all the target packages, setting -- @treatAsDep'@ to False, because those packages are direct build targets. -- 'addPackageDeps' invokes this while recursing into the dependencies of a -- package. As such, it sets @treatAsDep'@ to True, forcing this package to be -- marked as a dependency, even if it is directly wanted. This makes sense - if -- we left out packages that are deps, it would break the --only-dependencies -- build plan. getCachedDepOrAddDep :: PackageName -> M (Either ConstructPlanException AddDepRes) getCachedDepOrAddDep name = do libMap <- get case Map.lookup name libMap of Just res -> do logDebugPlanS "getCachedDepOrAddDep" $ "Using cached result for " <> fromPackageName name <> ": " <> fromString (show res) pure res Nothing -> checkCallStackAndAddDep name -- | Given a 'PackageName', known not to be in the library map, adds all of the -- build tasks to build the package. First checks that the package name is not -- already in the call stack. checkCallStackAndAddDep :: PackageName -> M (Either ConstructPlanException AddDepRes) checkCallStackAndAddDep name = do ctx <- ask res <- if name `elem` ctx.callStack then do logDebugPlanS "checkCallStackAndAddDep" $ "Detected cycle " <> fromPackageName name <> ": " <> fromString (show $ map packageNameString ctx.callStack) pure $ Left $ DependencyCycleDetected $ name : ctx.callStack else case Map.lookup name ctx.combinedMap of -- TODO look up in the package index and see if there's a -- recommendation available Nothing -> do logDebugPlanS "checkCallStackAndAddDep" $ "No package info for " <> fromPackageName name <> "." pure $ Left $ UnknownPackage name Just packageInfo -> -- Add the current package name to the head of the call stack. local (\ctx' -> ctx' { callStack = name : ctx'.callStack }) $ addDep name packageInfo updateLibMap name res pure res -- | Given a 'PackageName' and its 'PackageInfo' from the combined map, adds all -- of the build tasks to build the package. Assumes that the head of the call -- stack is the current package name. addDep :: PackageName -> PackageInfo -> M (Either ConstructPlanException AddDepRes) addDep name packageInfo = do logDebugPlanS "addDep" $ "Package info for " <> fromPackageName name <> ": " <> fromString (show packageInfo) case packageInfo of PIOnlyInstalled loc installed -> do -- FIXME Slightly hacky, no flags since they likely won't affect -- executable names. This code does not feel right. let version = installedVersion installed askPkgLoc = liftRIO $ do mrev <- getLatestHackageRevision YesRequireHackageIndex name version case mrev of Nothing -> do -- This could happen for GHC boot libraries missing from -- Hackage. cs <- asks (NE.nonEmpty . (.callStack)) cs' <- maybe (throwIO CallStackEmptyBug) (pure . NE.tail) cs prettyWarnL $ flow "No latest package revision found for" : style Current (fromPackageName name) <> "," : flow "dependency callstack:" : mkNarrativeList Nothing False (map fromPackageName cs' :: [StyleDoc]) pure Nothing Just (_rev, cfKey, treeKey) -> pure $ Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey tellExecutablesUpstream name askPkgLoc loc Map.empty pure $ Right $ ADRFound loc installed PIOnlySource ps -> do tellExecutables name ps installPackage name ps Nothing PIBoth ps installed -> do tellExecutables name ps installPackage name ps (Just installed) -- | For given 'PackageName' and 'PackageSource' values, adds relevant -- executables to the collected output. tellExecutables :: PackageName -> PackageSource -> M () tellExecutables _name (PSFilePath lp) | lp.wanted = tellExecutablesPackage Local lp.package | otherwise = pure () -- Ignores ghcOptions because they don't matter for enumerating executables. tellExecutables name (PSRemote pkgloc _version _fromSnapshot cp) = tellExecutablesUpstream name (pure $ Just pkgloc) Snap cp.flags -- | For a given 'PackageName' value, known to be immutable, adds relevant -- executables to the collected output. tellExecutablesUpstream :: PackageName -> M (Maybe PackageLocationImmutable) -> InstallLocation -> Map FlagName Bool -> M () tellExecutablesUpstream name retrievePkgLoc loc flags = do ctx <- ask when (name `Set.member` ctx.wanted) $ do mPkgLoc <- retrievePkgLoc forM_ mPkgLoc $ \pkgLoc -> do p <- ctx.loadPackage pkgLoc flags [] [] tellExecutablesPackage loc p -- | For given 'InstallLocation' and 'Package' values, adds relevant executables -- to the collected output. In most cases, the relevant executables are all the -- executables of the package. If the package is a wanted local one, the -- executables are those executables that are wanted executables. tellExecutablesPackage :: InstallLocation -> Package -> M () tellExecutablesPackage loc p = do cm <- asks (.combinedMap) -- Determine which components are enabled so we know which ones to copy let myComps = case Map.lookup p.name cm of Nothing -> assert False Set.empty Just (PIOnlyInstalled _ _) -> Set.empty Just (PIOnlySource ps) -> goSource ps Just (PIBoth ps _) -> goSource ps goSource (PSFilePath lp) | lp.wanted = exeComponents lp.components | otherwise = Set.empty goSource PSRemote{} = Set.empty tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ buildableExes p } where filterComps myComps x | Set.null myComps = x | otherwise = Set.intersection x myComps -- | Given a 'PackageSource' and perhaps an 'Installed' value, adds build -- 'Task's for the package and its dependencies. installPackage :: PackageName -> PackageSource -> Maybe Installed -> M (Either ConstructPlanException AddDepRes) installPackage name ps minstalled = do ctx <- ask case ps of PSRemote pkgLoc _version _fromSnapshot cp -> do logDebugPlanS "installPackage" $ "Doing all-in-one build for upstream package " <> fromPackageName name <> "." package <- ctx.loadPackage pkgLoc cp.flags cp.ghcOptions cp.cabalConfigOpts resolveDepsAndInstall True cp.buildHaddocks ps package minstalled PSFilePath lp -> do case lp.testBench of Nothing -> do logDebugPlanS "installPackage" $ "No test or bench component for " <> fromPackageName name <> " so doing an all-in-one build." resolveDepsAndInstall True lp.buildHaddocks ps lp.package minstalled Just tb -> do -- Attempt to find a plan which performs an all-in-one build. Ignore -- the writer action + reset the state if it fails. libMap <- get res <- pass $ do res <- addPackageDeps tb let writerFunc w = case res of Left _ -> mempty _ -> w pure (res, writerFunc) case res of Right deps -> do logDebugPlanS "installPackage" $ "For " <> fromPackageName name <> ", successfully added package deps." -- in curator builds we can't do all-in-one build as -- test/benchmark failure could prevent library from being -- available to its dependencies but when it's already available -- it's OK to do that splitRequired <- expectedTestOrBenchFailures <$> asks (.curator) let isAllInOne = not splitRequired adr <- installPackageGivenDeps isAllInOne lp.buildHaddocks ps tb minstalled deps let finalAllInOne = case adr of ADRToInstall _ | splitRequired -> False _ -> True -- FIXME: this redundantly adds the deps (but they'll all just -- get looked up in the map) addFinal lp tb finalAllInOne False pure $ Right adr Left _ -> do -- Reset the state to how it was before attempting to find an -- all-in-one build plan. logDebugPlanS "installPackage" $ "Before trying cyclic plan, resetting lib result map to: " <> fromString (show libMap) put libMap -- Otherwise, fall back on building the tests / benchmarks in a -- separate step. res' <- resolveDepsAndInstall False lp.buildHaddocks ps lp.package minstalled when (isRight res') $ do -- Insert it into the map so that it's available for addFinal. updateLibMap name res' addFinal lp tb False False pure res' where expectedTestOrBenchFailures maybeCurator = fromMaybe False $ do curator <- maybeCurator pure $ Set.member name curator.expectTestFailure || Set.member name curator.expectBenchmarkFailure resolveDepsAndInstall :: Bool -- ^ will the build step also build any tests? -> Bool -- ^ Should Haddock documentation be built? -> PackageSource -> Package -> Maybe Installed -> M (Either ConstructPlanException AddDepRes) resolveDepsAndInstall isAllInOne buildHaddocks ps package minstalled = do res <- addPackageDeps package case res of Left err -> pure $ Left err Right deps -> Right <$> installPackageGivenDeps isAllInOne buildHaddocks ps package minstalled deps -- | Checks if we need to install the given 'Package', given the results -- of 'addPackageDeps'. If dependencies are missing, the package is dirty, or -- it's not installed, then it needs to be installed. installPackageGivenDeps :: Bool -- ^ will the build step also build any tests? -> Bool -- ^ Should Haddock documentation be built? -> PackageSource -> Package -> Maybe Installed -> ( Set PackageIdentifier , Map PackageIdentifier GhcPkgId , IsMutable ) -> M AddDepRes installPackageGivenDeps isAllInOne buildHaddocks ps package minstalled (missing, present, minMutable) = do let name = package.name ctx <- ask mRightVersionInstalled <- case (minstalled, Set.null missing) of (Just installed, True) -> do shouldInstall <- checkDirtiness ps installed package present buildHaddocks pure $ if shouldInstall then Nothing else Just installed (Just _, False) -> do let t = T.intercalate ", " $ map (T.pack . packageNameString . pkgName) (Set.toList missing) tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t } pure Nothing (Nothing, _) -> pure Nothing let loc = psLocation ps mutable = installLocationIsMutable loc <> minMutable pure $ case mRightVersionInstalled of Just installed -> ADRFound loc installed Nothing -> ADRToInstall Task { configOpts = TaskConfigOpts missing $ \missing' -> let allDeps = Map.union present missing' in ConfigureOpts.configureOpts (view envConfigL ctx) ctx.baseConfigOpts allDeps (psLocal ps) mutable package , buildHaddocks , present , taskType = case ps of PSFilePath lp -> TTLocalMutable lp PSRemote pkgLoc _version _fromSnapshot _cp -> TTRemotePackage mutable package pkgLoc , allInOne = isAllInOne , cachePkgSrc = toCachePkgSrc ps , buildTypeConfig = packageBuildTypeConfig package } -- | Is the build type of the package Configure packageBuildTypeConfig :: Package -> Bool packageBuildTypeConfig pkg = pkg.buildType == Configure -- Update response in the library map. If it is an error, and there's already an -- error about cyclic dependencies, prefer the cyclic error. updateLibMap :: PackageName -> Either ConstructPlanException AddDepRes -> M () updateLibMap name val = modify $ \mp -> case (Map.lookup name mp, val) of (Just (Left DependencyCycleDetected{}), Left _) -> mp _ -> Map.insert name val mp addEllipsis :: Text -> Text addEllipsis t | T.length t < 100 = t | otherwise = T.take 97 t <> "..." -- | Given a package, recurses into all of its dependencies. The resulting -- triple indicates: (1) which packages are missing. This means that their -- 'GhcPkgId's will be figured out during the build, after they've been built; -- (2) the packages that are already installed and which will be used; and -- (3) whether the package itself is mutable or immutable. addPackageDeps :: Package -> M ( Either ConstructPlanException ( Set PackageIdentifier , Map PackageIdentifier GhcPkgId , IsMutable ) ) addPackageDeps package = do checkAndWarnForUnknownTools package let pkgId = packageIdentifier package deps <- processPackageDepsToList package (processDep pkgId) pure $ case partitionEithers deps of -- Note that the Monoid for 'IsMutable' means that if any is 'Mutable', -- the result is 'Mutable'. Otherwise the result is 'Immutable'. ([], pairs) -> Right $ mconcat pairs (errs, _) -> Left $ DependencyPlanFailures package (Map.fromList errs) -- | Given a dependency, yields either information for an error message or a -- triple indicating: (1) if the dependency is to be installed, its package -- identifier; (2) if the dependency is installed and a library, its package -- identifier and 'GhcPkgId'; and (3) if the dependency is, or will be when -- installed, mutable or immutable. processDep :: PackageIdentifier -- ^ The package which has the dependency being processed. -> PackageName -- ^ The name of the dependency. -> DepValue -- ^ The version range and dependency type of the dependency. -> M ( Either ( PackageName , (VersionRange, Maybe (Version, BlobKey), BadDependency) ) (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, IsMutable) ) processDep pkgId name value = do eRes <- getCachedDepOrAddDep name case eRes of Left e -> do addParent let bd = case e of UnknownPackage name' -> assert (name' == name) NotInBuildPlan DependencyCycleDetected names -> BDDependencyCycleDetected names -- ultimately we won't show any information on this to the user, -- we'll allow the dependency failures alone to display to avoid -- spamming the user too much DependencyPlanFailures _ _ -> Couldn'tResolveItsDependencies version mLatestApplicable <- getLatestApplicableVersionAndRev name range pure $ Left (name, (range, mLatestApplicable, bd)) Right adr | isDepTypeLibrary value.depType && not (adrHasLibrary adr) -> pure $ Left (name, (range, Nothing, HasNoLibrary)) Right adr -> do addParent inRange <- adrInRange pkgId name range adr if inRange then pure $ Right $ processAdr adr else do mLatestApplicable <- getLatestApplicableVersionAndRev name range pure $ Left ( name , ( range , mLatestApplicable , DependencyMismatch $ adrVersion adr ) ) where range = value.versionRange version = pkgVersion pkgId -- Update the parents map, for later use in plan construction errors -- - see 'getShortestDepsPath'. addParent = let parentMap = Map.singleton name [(pkgId, range)] in tell mempty { wParents = MonoidMap parentMap } getLatestApplicableVersionAndRev :: PackageName -> VersionRange -> M (Maybe (Version, BlobKey)) getLatestApplicableVersionAndRev name range = do ctx <- ask vsAndRevs <- runRIO ctx $ getHackagePackageVersions YesRequireHackageIndex UsePreferredVersions name pure $ do lappVer <- latestApplicableVersion range $ Map.keysSet vsAndRevs revs <- Map.lookup lappVer vsAndRevs (cabalHash, _) <- Map.maxView revs Just (lappVer, cabalHash) -- | Function to determine whether the result of 'addDep' is within range, given -- the version range of the dependency and taking into account Stack's -- @allow-newer@ configuration. adrInRange :: PackageIdentifier -- ^ The package which has the dependency. -> PackageName -- ^ The name of the dependency. -> VersionRange -- ^ The version range of the dependency. -> AddDepRes -- ^ The result of 'addDep'. -> M Bool adrInRange pkgId name range adr = if adrVersion adr `withinRange` range then pure True else do allowNewer <- view $ configL . to (.allowNewer) allowNewerDeps <- view $ configL . to (.allowNewerDeps) if allowNewer then case allowNewerDeps of Nothing -> do warn_ True $ fillSep [ style Shell "allow-newer" , "enabled" ] pure True Just boundsIgnoredDeps -> do let pkgName' = fromPackageName pkgName isBoundsIgnoreDep = pkgName `elem` boundsIgnoredDeps reason = if isBoundsIgnoreDep then fillSep [ style Current pkgName' , flow "is an" , style Shell "allow-newer-dep" , flow "and" , style Shell "allow-newer" , "enabled" ] else fillSep [ style Current pkgName' , flow "is not an" , style Shell "allow-newer-dep" , flow "although" , style Shell "allow-newer" , "enabled" ] warn_ isBoundsIgnoreDep reason pure isBoundsIgnoreDep else do when (isJust allowNewerDeps) $ warn_ False $ fillSep [ "although" , style Shell "allow-newer-deps" , flow "are specified," , style Shell "allow-newer" , "is" , style Shell "false" ] -- We ignore dependency information for packages in a snapshot pkgInSnapshot <- inSnapshot pkgName version adrInSnapshot <- inSnapshot name (adrVersion adr) if pkgInSnapshot && adrInSnapshot then do warn_ True ( flow "trusting snapshot over Cabal file dependency \ \information" ) pure True else pure False where PackageIdentifier pkgName version = pkgId warn_ isIgnoring reason = tell mempty { wWarnings = (msg:) } where msg = fillSep [ if isIgnoring then "Ignoring" else flow "Not ignoring" , style Current (fromPackageName pkgName) <> "'s" , flow "bounds on" , style Current (fromPackageName name) , parens (fromString . T.unpack $ versionRangeText range) , flow "and using" , style Current (fromPackageId $ PackageIdentifier name (adrVersion adr)) <> "." ] <> line <> fillSep [ "Reason:" , reason <> "." ] -- | Given a result of 'addDep', yields a triple indicating: (1) if the -- dependency is to be installed, its package identifier; (2) if the dependency -- is installed and a library, its package identifier and 'GhcPkgId'; and (3) if -- the dependency is, or will be when installed, mutable or immutable. processAdr :: AddDepRes -> (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, IsMutable) processAdr adr = case adr of ADRToInstall task -> (Set.singleton $ taskProvides task, Map.empty, taskTargetIsMutable task) ADRFound loc (Executable _) -> (Set.empty, Map.empty, installLocationIsMutable loc) ADRFound loc (Library ident installedInfo) -> ( Set.empty , installedMapGhcPkgId ident installedInfo , installLocationIsMutable loc ) checkDirtiness :: PackageSource -> Installed -> Package -> Map PackageIdentifier GhcPkgId -> Bool -- ^ Is Haddock documentation being built? -> M Bool checkDirtiness ps installed package present buildHaddocks = do ctx <- ask moldOpts <- runRIO ctx $ tryGetFlagCache installed let configureOpts = ConfigureOpts.configureOpts (view envConfigL ctx) ctx.baseConfigOpts present (psLocal ps) (installLocationIsMutable $ psLocation ps) -- should be Local i.e. mutable always package components = case ps of PSFilePath lp -> Set.map (encodeUtf8 . renderComponent) lp.components PSRemote{} -> Set.empty wantConfigCache = ConfigCache { configureOpts , deps = Set.fromList $ Map.elems present , components , buildHaddocks , pkgSrc = toCachePkgSrc ps , pathEnvVar = ctx.pathEnvVar } config = view configL ctx mreason <- case moldOpts of Nothing -> pure $ Just "old configure information not found" Just oldOpts | Just reason <- describeConfigDiff config oldOpts wantConfigCache -> pure $ Just reason | True <- psForceDirty ps -> pure $ Just "--force-dirty specified" | otherwise -> do dirty <- psDirty ps pure $ case dirty of Just files -> Just $ "local file changes: " <> addEllipsis (T.pack $ unwords $ Set.toList files) Nothing -> Nothing case mreason of Nothing -> pure False Just reason -> do tell mempty { wDirty = Map.singleton package.name reason } pure True describeConfigDiff :: Config -> ConfigCache -> ConfigCache -> Maybe Text describeConfigDiff config old new | old.pkgSrc /= new.pkgSrc = Just $ "switching from " <> pkgSrcName old.pkgSrc <> " to " <> pkgSrcName new.pkgSrc | not (new.deps `Set.isSubsetOf` old.deps) = Just "dependencies changed" | not $ Set.null newComponents = Just $ "components added: " `T.append` T.intercalate ", " (map (decodeUtf8With lenientDecode) (Set.toList newComponents)) | not old.buildHaddocks && new.buildHaddocks = Just "rebuilding with haddocks" | oldOpts /= newOpts = Just $ T.pack $ concat [ "flags changed from " , show oldOpts , " to " , show newOpts ] | otherwise = Nothing where stripGhcOptions = go where go [] = [] go ("--ghc-option":x:xs) = go' Ghc x xs go ("--ghc-options":x:xs) = go' Ghc x xs go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' Ghc x xs go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' Ghc x xs go (x:xs) = x : go xs go' wc x xs = checkKeepers wc x $ go xs checkKeepers wc x xs = case filter isKeeper $ T.words x of [] -> xs keepers -> T.pack (compilerOptionsCabalFlag wc) : T.unwords keepers : xs -- GHC options which affect build results and therefore should always force -- a rebuild -- -- For the most part, we only care about options generated by Stack itself isKeeper = (== "-fhpc") -- more to be added later userOpts = filter (not . isStackOpt) . (if config.rebuildGhcOptions then id else stripGhcOptions) . map T.pack . (\(ConfigureOpts x y) -> x ++ y) . (.configureOpts) where -- options set by Stack isStackOpt :: Text -> Bool isStackOpt t = any (`T.isPrefixOf` t) [ "--dependency=" , "--constraint=" , "--package-db=" , "--libdir=" , "--bindir=" , "--datadir=" , "--libexecdir=" , "--sysconfdir" , "--docdir=" , "--htmldir=" , "--haddockdir=" , "--enable-tests" , "--enable-benchmarks" , "--exact-configuration" -- Treat these as causing dirtiness, to resolve -- https://github.com/commercialhaskell/stack/issues/2984 -- -- , "--enable-library-profiling" -- , "--enable-executable-profiling" -- , "--enable-profiling" ] || t == "--user" (oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new) removeMatching (x:xs) (y:ys) | x == y = removeMatching xs ys removeMatching xs ys = (xs, ys) newComponents = new.components `Set.difference` old.components pkgSrcName (CacheSrcLocal fp) = T.pack fp pkgSrcName CacheSrcUpstream = "upstream source" psForceDirty :: PackageSource -> Bool psForceDirty (PSFilePath lp) = lp.forceDirty psForceDirty PSRemote{} = False psDirty :: (MonadIO m, HasEnvConfig env, MonadReader env m) => PackageSource -> m (Maybe (Set FilePath)) psDirty (PSFilePath lp) = runMemoizedWith lp.dirtyFiles psDirty PSRemote {} = pure Nothing -- files never change in a remote package psLocal :: PackageSource -> Bool psLocal (PSFilePath _ ) = True psLocal PSRemote{} = False psLocation :: PackageSource -> InstallLocation psLocation (PSFilePath _) = Local psLocation PSRemote{} = Snap -- | For the given package, warn about any unknown tools that are not on the -- PATH and not one of the executables of the package. checkAndWarnForUnknownTools :: Package -> M () checkAndWarnForUnknownTools p = do let unknownTools = Set.toList $ packageUnknownTools p -- Check whether the tool is on the PATH or a package executable before -- warning about it. warnings <- fmap catMaybes $ forM unknownTools $ \toolName -> runMaybeT $ notOnPath toolName *> notPackageExe toolName *> warn toolName tell mempty { wWarnings = (map toolWarningText warnings ++) } pure () where -- From Cabal 2.0, build-tools can specify a pre-built executable that should -- already be on the PATH. notOnPath toolName = MaybeT $ do let settings = minimalEnvSettings { includeLocals = True } config <- view configL menv <- liftIO $ config.processContextSettings settings eFound <- runRIO menv $ findExecutable $ T.unpack toolName skipIf $ isRight eFound -- From Cabal 1.12, build-tools can specify another executable in the same -- package. notPackageExe toolName = MaybeT $ skipIf $ collectionMember toolName p.executables warn name = MaybeT . pure . Just $ ToolWarning (ExeName name) p.name skipIf p' = pure $ if p' then Nothing else Just () toolWarningText :: ToolWarning -> StyleDoc toolWarningText (ToolWarning (ExeName toolName) pkgName') = fillSep [ flow "No packages found in snapshot which provide a" , style PkgComponent (fromString $ show toolName) , flow "executable, which is a build-tool dependency of" , style Current (fromPackageName pkgName') ] -- | Is the given package/version combo defined in the snapshot or in the global -- database? inSnapshot :: PackageName -> Version -> M Bool inSnapshot name version = do ctx <- ask pure $ fromMaybe False $ do ps <- Map.lookup name ctx.combinedMap case ps of PIOnlySource (PSRemote _ srcVersion FromSnapshot _) -> pure $ srcVersion == version PIBoth (PSRemote _ srcVersion FromSnapshot _) _ -> pure $ srcVersion == version -- OnlyInstalled occurs for global database PIOnlyInstalled loc (Library pid _) -> assert (loc == Snap) $ assert (pkgVersion pid == version) $ Just True _ -> pure False -- TODO: Consider intersecting version ranges for multiple deps on a -- package. This is why VersionRange is in the parent map. logDebugPlanS :: (HasCallStack, HasRunner env, MonadIO m, MonadReader env m) => LogSource -> Utf8Builder -> m () logDebugPlanS s msg = do debugPlan <- view $ globalOptsL . to (.planInLog) when debugPlan $ logDebugS s msg -- | A function to yield a 'PackageInfo' value from: (1) a 'PackageSource' -- value; and (2) a pair of an 'InstallLocation' value and an 'Installed' value. -- Checks that the version of the 'PackageSource' value and the version of the -- `Installed` value are the same. combineSourceInstalled :: PackageSource -> (InstallLocation, Installed) -> PackageInfo combineSourceInstalled ps (location, installed) = assert (psVersion ps == installedVersion installed) $ case location of -- Always trust something in the snapshot Snap -> PIOnlyInstalled location installed Local -> PIBoth ps installed -- | A function to yield a 'CombinedMap' value from: (1) a dictionary of package -- names, and where the source code of the named package is located; and (2) an -- 'InstalledMap' value. combineMap :: Map PackageName PackageSource -> InstalledMap -> CombinedMap combineMap = Map.merge (Map.mapMissing (\_ s -> PIOnlySource s)) (Map.mapMissing (\_ i -> uncurry PIOnlyInstalled i)) (Map.zipWithMatched (\_ s i -> combineSourceInstalled s i)) stack-2.15.7/src/Stack/Build/Execute.hs0000644000000000000000000005343414620153445015747 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- | Perform a build module Stack.Build.Execute ( printPlan , preFetch , executePlan -- * Running Setup.hs , ExcludeTHLoading (..) , KeepOutputOpen (..) ) where import Control.Concurrent.Execute ( Action (..), ActionId (..), ActionType (..) , Concurrency (..), runActions ) import Control.Concurrent.STM ( check ) import qualified Data.List as L import Data.List.Split ( chunksOf ) import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import Data.Tuple ( swap ) import Distribution.System ( OS (..), Platform (..) ) import Distribution.Version ( mkVersion ) import Path ( (), parent ) import Path.CheckInstall ( warnInstallSearchPathIssues ) import Path.Extra ( forgivingResolveFile, rejectMissingFile ) import Path.IO ( ensureDir ) import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import RIO.Process ( HasProcessContext (..), proc, runProcess_ ) import Stack.Build.ExecuteEnv ( ExecuteEnv (..), withExecuteEnv ) import Stack.Build.ExecutePackage ( singleBench, singleBuild, singleTest ) import Stack.Build.Haddock ( generateDepsHaddockIndex , generateLocalHaddockForHackageArchives , generateLocalHaddockIndex, generateSnapHaddockIndex , openHaddocksInBrowser ) import Stack.Constants ( bindirSuffix ) import Stack.Coverage ( deleteHpcReports, generateHpcMarkupIndex , generateHpcUnifiedReport ) import Stack.GhcPkg ( unregisterGhcPkgIds ) import Stack.Prelude import Stack.Types.Build ( ExcludeTHLoading (..), KeepOutputOpen (..), Plan (..) , Task (..), TaskConfigOpts (..), TaskType (..), taskLocation , taskProvides ) import Stack.Types.Build.Exception ( BuildPrettyException (..) ) import Stack.Types.BuildOpts ( BuildOpts (..), TestOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.BuildOptsMonoid ( ProgressBarFormat (..) ) import Stack.Types.Compiler ( ActualCompiler (..) ) import Stack.Types.CompilerPaths ( HasCompiler (..), getGhcPkgExe ) import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( HasEnvConfig (..), actualCompilerVersionL , bindirCompilerTools, installationRootDeps , installationRootLocal, packageDatabaseLocal ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Installed ( InstallLocation (..), InstalledMap , installedPackageIdentifier ) import Stack.Types.NamedComponent ( NamedComponent, benchComponents, testComponents ) import Stack.Types.Package ( LocalPackage (..), Package (..), packageIdentifier ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner, terminalL ) import Stack.Types.SourceMap ( Target ) import qualified System.Directory as D import System.Environment ( getExecutablePath ) import qualified System.FilePath as FP -- | Fetch the packages necessary for a build, for example in combination with -- a dry run. preFetch :: HasEnvConfig env => Plan -> RIO env () preFetch plan | Set.null pkgLocs = logDebug "Nothing to fetch" | otherwise = do logDebug $ "Prefetching: " <> mconcat (L.intersperse ", " (display <$> Set.toList pkgLocs)) fetchPackages pkgLocs where pkgLocs = Set.unions $ map toPkgLoc $ Map.elems plan.tasks toPkgLoc task = case task.taskType of TTLocalMutable{} -> Set.empty TTRemotePackage _ _ pkgloc -> Set.singleton pkgloc -- | Print a description of build plan for human consumption. printPlan :: (HasRunner env, HasTerm env) => Plan -> RIO env () printPlan plan = do case Map.elems plan.unregisterLocal of [] -> prettyInfo $ flow "No packages would be unregistered." <> line xs -> do let unregisterMsg (ident, reason) = fillSep $ fromString (packageIdentifierString ident) : [ parens $ flow (T.unpack reason) | not $ T.null reason ] prettyInfo $ flow "Would unregister locally:" <> line <> bulletedList (map unregisterMsg xs) <> line case Map.elems plan.tasks of [] -> prettyInfo $ flow "Nothing to build." <> line xs -> do prettyInfo $ flow "Would build:" <> line <> bulletedList (map displayTask xs) <> line let hasTests = not . Set.null . testComponents . taskComponents hasBenches = not . Set.null . benchComponents . taskComponents tests = Map.elems $ Map.filter hasTests plan.finals benches = Map.elems $ Map.filter hasBenches plan.finals unless (null tests) $ do prettyInfo $ flow "Would test:" <> line <> bulletedList (map displayTask tests) <> line unless (null benches) $ do prettyInfo $ flow "Would benchmark:" <> line <> bulletedList (map displayTask benches) <> line case Map.toList plan.installExes of [] -> prettyInfo $ flow "No executables to be installed." <> line xs -> do let executableMsg (name, loc) = fillSep $ fromString (T.unpack name) : "from" : ( case loc of Snap -> "snapshot" :: StyleDoc Local -> "local" :: StyleDoc ) : ["database."] prettyInfo $ flow "Would install executables:" <> line <> bulletedList (map executableMsg xs) <> line -- | For a dry run displayTask :: Task -> StyleDoc displayTask task = fillSep $ [ fromString (packageIdentifierString (taskProvides task)) <> ":" , "database=" <> ( case taskLocation task of Snap -> "snapshot" :: StyleDoc Local -> "local" :: StyleDoc ) <> "," , "source=" <> ( case task.taskType of TTLocalMutable lp -> pretty $ parent lp.cabalFP TTRemotePackage _ _ pl -> fromString $ T.unpack $ textDisplay pl ) <> if Set.null missing then mempty else "," ] <> [ fillSep $ "after:" : mkNarrativeList Nothing False (map fromPackageId (Set.toList missing) :: [StyleDoc]) | not $ Set.null missing ] where missing = task.configOpts.missing -- | Perform the actual plan executePlan :: HasEnvConfig env => BuildOptsCLI -> BaseConfigOpts -> [LocalPackage] -> [DumpPackage] -- ^ global packages -> [DumpPackage] -- ^ snapshot packages -> [DumpPackage] -- ^ local packages -> InstalledMap -> Map PackageName Target -> Plan -> RIO env () executePlan boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages installedMap targets plan = do logDebug "Executing the build plan" bopts <- view buildOptsL withExecuteEnv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages mlargestPackageName (executePlan' installedMap targets plan) copyExecutables plan.installExes config <- view configL menv' <- liftIO $ config.processContextSettings EnvSettings { includeLocals = True , includeGhcPackagePath = True , stackExe = True , localeUtf8 = False , keepGhcRts = False } withProcessContext menv' $ forM_ boptsCli.exec $ \(cmd, args) -> proc cmd args runProcess_ where mlargestPackageName = Set.lookupMax $ Set.map (length . packageNameString) $ Map.keysSet plan.tasks <> Map.keysSet plan.finals copyExecutables :: HasEnvConfig env => Map Text InstallLocation -> RIO env () copyExecutables exes | Map.null exes = pure () copyExecutables exes = do snapBin <- ( bindirSuffix) <$> installationRootDeps localBin <- ( bindirSuffix) <$> installationRootLocal compilerSpecific <- (.installCompilerTool) <$> view buildOptsL destDir <- if compilerSpecific then bindirCompilerTools else view $ configL . to (.localBin) ensureDir destDir destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir platform <- view platformL let ext = case platform of Platform _ Windows -> ".exe" _ -> "" currExe <- liftIO getExecutablePath -- needed for windows, see below installed <- forMaybeM (Map.toList exes) $ \(name, loc) -> do let bindir = case loc of Snap -> snapBin Local -> localBin mfp <- forgivingResolveFile bindir (T.unpack name ++ ext) >>= rejectMissingFile case mfp of Nothing -> do prettyWarnL [ flow "Couldn't find executable" , style Current (fromString $ T.unpack name) , flow "in directory" , pretty bindir <> "." ] pure Nothing Just file -> do let destFile = destDir' FP. T.unpack name ++ ext prettyInfoL [ flow "Copying from" , pretty file , "to" , style File (fromString destFile) <> "." ] liftIO $ case platform of Platform _ Windows | FP.equalFilePath destFile currExe -> windowsRenameCopy (toFilePath file) destFile _ -> D.copyFile (toFilePath file) destFile pure $ Just (name <> T.pack ext) unless (null installed) $ do prettyInfo $ fillSep [ flow "Copied executables to" , pretty destDir <> ":" ] <> line <> bulletedList (map (fromString . T.unpack . textDisplay) installed :: [StyleDoc]) unless compilerSpecific $ warnInstallSearchPathIssues destDir' installed -- | Windows can't write over the current executable. Instead, we rename the -- current executable to something else and then do the copy. windowsRenameCopy :: FilePath -> FilePath -> IO () windowsRenameCopy src dest = do D.copyFile src new D.renameFile dest old D.renameFile new dest where new = dest ++ ".new" old = dest ++ ".old" -- | Perform the actual plan (internal) executePlan' :: HasEnvConfig env => InstalledMap -> Map PackageName Target -> Plan -> ExecuteEnv -> RIO env () executePlan' installedMap0 targets plan ee = do let !buildOpts = ee.buildOpts let !testOpts = buildOpts.testOpts when testOpts.coverage deleteHpcReports cv <- view actualCompilerVersionL case nonEmpty $ Map.toList plan.unregisterLocal of Nothing -> pure () Just ids -> do localDB <- packageDatabaseLocal unregisterPackages cv localDB ids liftIO $ atomically $ modifyTVar' ee.localDumpPkgs $ \initMap -> foldl' (flip Map.delete) initMap $ Map.keys plan.unregisterLocal run <- askRunInIO -- If running tests concurrently with each other, then create an MVar -- which is empty while each test is being run. concurrentTests <- view $ configL . to (.concurrentTests) mtestLock <- if concurrentTests then pure Nothing else Just <$> liftIO (newMVar ()) let actions = concatMap (toActions installedMap' mtestLock run ee) $ Map.elems $ Map.merge (Map.mapMissing (\_ b -> (Just b, Nothing))) (Map.mapMissing (\_ f -> (Nothing, Just f))) (Map.zipWithMatched (\_ b f -> (Just b, Just f))) plan.tasks plan.finals threads <- view $ configL . to (.jobs) let keepGoing = fromMaybe (not (Map.null plan.finals)) buildOpts.keepGoing terminal <- view terminalL terminalWidth <- view termWidthL errs <- liftIO $ runActions threads keepGoing actions $ \doneVar actionsVar -> do let total = length actions loop prev | prev == total = run $ logStickyDone ( "Completed " <> display total <> " action(s).") | otherwise = do inProgress <- readTVarIO actionsVar let packageNames = map (\(ActionId pkgID _) -> pkgName pkgID) (toList inProgress) nowBuilding :: [PackageName] -> Utf8Builder nowBuilding [] = "" nowBuilding names = mconcat $ ": " : L.intersperse ", " (map fromPackageName names) progressFormat = buildOpts.progressBar progressLine prev' total' = "Progress " <> display prev' <> "/" <> display total' <> if progressFormat == CountOnlyBar then mempty else nowBuilding packageNames ellipsize n text = if T.length text <= n || progressFormat /= CappedBar then text else T.take (n - 1) text <> "…" when (terminal && progressFormat /= NoBar) $ run $ logSticky $ display $ ellipsize terminalWidth $ utf8BuilderToText $ progressLine prev total done <- atomically $ do done <- readTVar doneVar check $ done /= prev pure done loop done when (total > 1) $ loop 0 when testOpts.coverage $ do generateHpcUnifiedReport generateHpcMarkupIndex unless (null errs) $ prettyThrowM $ ExecutionFailure errs when buildOpts.buildHaddocks $ do if buildOpts.haddockForHackage then generateLocalHaddockForHackageArchives ee.locals else do snapshotDumpPkgs <- liftIO (readTVarIO ee.snapshotDumpPkgs) localDumpPkgs <- liftIO (readTVarIO ee.localDumpPkgs) generateLocalHaddockIndex ee.baseConfigOpts localDumpPkgs ee.locals generateDepsHaddockIndex ee.baseConfigOpts ee.globalDumpPkgs snapshotDumpPkgs localDumpPkgs ee.locals generateSnapHaddockIndex ee.baseConfigOpts ee.globalDumpPkgs snapshotDumpPkgs when buildOpts.openHaddocks $ do let planPkgs, localPkgs, installedPkgs, availablePkgs :: Map PackageName (PackageIdentifier, InstallLocation) planPkgs = Map.map (taskProvides &&& taskLocation) plan.tasks localPkgs = Map.fromList [ (p.name, (packageIdentifier p, Local)) | p <- map (.package) ee.locals ] installedPkgs = Map.map (swap . second installedPackageIdentifier) installedMap' availablePkgs = Map.unions [planPkgs, localPkgs, installedPkgs] openHaddocksInBrowser ee.baseConfigOpts availablePkgs (Map.keysSet targets) where installedMap' = Map.difference installedMap0 $ Map.fromList $ map (\(ident, _) -> (pkgName ident, ())) $ Map.elems plan.unregisterLocal unregisterPackages :: (HasCompiler env, HasPlatform env, HasProcessContext env, HasTerm env) => ActualCompiler -> Path Abs Dir -> NonEmpty (GhcPkgId, (PackageIdentifier, Text)) -> RIO env () unregisterPackages cv localDB ids = do let logReason ident reason = prettyInfoL ( [ fromString (packageIdentifierString ident) <> ":" , "unregistering" ] <> [ parens (flow $ T.unpack reason) | not $ T.null reason ] ) let unregisterSinglePkg select (gid, (ident, reason)) = do logReason ident reason pkg <- getGhcPkgExe unregisterGhcPkgIds True pkg localDB $ select ident gid :| [] case cv of -- GHC versions >= 8.2.1 support batch unregistering of packages. See -- https://gitlab.haskell.org/ghc/ghc/issues/12637 ACGhc v | v >= mkVersion [8, 2, 1] -> do platform <- view platformL -- According to -- https://support.microsoft.com/en-us/help/830473/command-prompt-cmd-exe-command-line-string-limitation -- the maximum command line length on Windows since XP is 8191 characters. -- We use conservative batch size of 100 ids on this OS thus argument name -- '-ipid', package name, its version and a hash should fit well into this -- limit. On Unix-like systems we're limited by ARG_MAX which is normally -- hundreds of kilobytes so batch size of 500 should work fine. let batchSize = case platform of Platform _ Windows -> 100 _ -> 500 let chunksOfNE size = mapMaybe nonEmpty . chunksOf size . NE.toList for_ (chunksOfNE batchSize ids) $ \batch -> do for_ batch $ \(_, (ident, reason)) -> logReason ident reason pkg <- getGhcPkgExe unregisterGhcPkgIds True pkg localDB $ fmap (Right . fst) batch -- GHC versions >= 7.9 support unregistering of packages via their GhcPkgId. ACGhc v | v >= mkVersion [7, 9] -> for_ ids . unregisterSinglePkg $ \_ident gid -> Right gid _ -> for_ ids . unregisterSinglePkg $ \ident _gid -> Left ident toActions :: HasEnvConfig env => InstalledMap -> Maybe (MVar ()) -> (RIO env () -> IO ()) -> ExecuteEnv -> (Maybe Task, Maybe Task) -- build and final -> [Action] toActions installedMap mtestLock runInBase ee (mbuild, mfinal) = abuild ++ afinal where abuild = case mbuild of Nothing -> [] Just task -> [ Action { actionId = ActionId (taskProvides task) ATBuild , actionDeps = Set.map (`ActionId` ATBuild) task.configOpts.missing , action = \ac -> runInBase $ singleBuild ac ee task installedMap False , concurrency = ConcurrencyAllowed } ] afinal = case mfinal of Nothing -> [] Just task -> ( if task.allInOne then id else (:) Action { actionId = ActionId pkgId ATBuildFinal , actionDeps = addBuild (Set.map (`ActionId` ATBuild) task.configOpts.missing) , action = \ac -> runInBase $ singleBuild ac ee task installedMap True , concurrency = ConcurrencyAllowed } ) $ -- These are the "final" actions - running tests and benchmarks. ( if Set.null tests then id else (:) Action { actionId = ActionId pkgId ATRunTests , actionDeps = finalDeps , action = \ac -> withLock mtestLock $ runInBase $ singleTest topts (Set.toList tests) ac ee task installedMap -- Always allow tests tasks to run concurrently with other tasks, -- particularly build tasks. Note that 'mtestLock' can optionally -- make it so that only one test is run at a time. , concurrency = ConcurrencyAllowed } ) $ ( if Set.null benches then id else (:) Action { actionId = ActionId pkgId ATRunBenchmarks , actionDeps = finalDeps , action = \ac -> runInBase $ singleBench beopts (Set.toList benches) ac ee task installedMap -- Never run benchmarks concurrently with any other task, see -- #3663 , concurrency = ConcurrencyDisallowed } ) [] where pkgId = taskProvides task comps = taskComponents task tests = testComponents comps benches = benchComponents comps finalDeps = if task.allInOne then addBuild mempty else Set.singleton (ActionId pkgId ATBuildFinal) addBuild = case mbuild of Nothing -> id Just _ -> Set.insert $ ActionId pkgId ATBuild withLock Nothing f = f withLock (Just lock) f = withMVar lock $ \() -> f bopts = ee.buildOpts topts = bopts.testOpts beopts = bopts.benchmarkOpts taskComponents :: Task -> Set NamedComponent taskComponents task = case task.taskType of TTLocalMutable lp -> lp.components -- FIXME probably just want lpWanted TTRemotePackage{} -> Set.empty stack-2.15.7/src/Stack/Build/ExecuteEnv.hs0000644000000000000000000012226114620153445016413 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- | Provides all the necessary types and functions for running cabal Setup.hs -- commands. Only used in the "Execute" and "ExecutePackage" modules module Stack.Build.ExecuteEnv ( ExecuteEnv (..) , withExecuteEnv , withSingleContext , ExcludeTHLoading (..) , KeepOutputOpen (..) , ExecutableBuildStatus (..) , OutputType (..) ) where import Control.Concurrent.Companion ( Companion, withCompanion ) import Control.Concurrent.Execute ( ActionContext (..), ActionId (..), Concurrency (..) ) import Crypto.Hash ( SHA256 (..), hashWith ) import Data.Attoparsec.Text ( char, choice, digit, parseOnly ) import qualified Data.Attoparsec.Text as P ( string ) import qualified Data.ByteArray as Mem ( convert ) import qualified Data.ByteString as S import qualified Data.ByteString.Base64.URL as B64URL import qualified Data.ByteString.Builder ( toLazyByteString ) import qualified Data.ByteString.Char8 as S8 import Data.Char ( isSpace ) import Conduit ( ConduitT, awaitForever, sinkHandle, withSinkFile , withSourceFile, yield ) import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL import qualified Data.Conduit.Text as CT import qualified Data.List as L import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import Data.Text.Encoding ( decodeUtf8 ) import Data.Time ( ZonedTime, defaultTimeLocale, formatTime, getZonedTime ) import qualified Distribution.PackageDescription as C import qualified Distribution.Simple.Build.Macros as C import Distribution.System ( OS (..), Platform (..) ) import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Verbosity ( showForCabal ) import Distribution.Version ( mkVersion ) import Path ( PathException, (), parent, parseRelDir, parseRelFile ) import Path.Extra ( forgivingResolveFile, toFilePathNoTrailingSep ) import Path.IO ( doesDirExist, doesFileExist, ensureDir, ignoringAbsence , removeFile, renameDir, renameFile ) import RIO.Process ( eceExitCode, proc, runProcess_, setStdout, useHandleOpen , withWorkingDir ) import Stack.Config ( checkOwnership ) import Stack.Constants ( cabalPackageName, relDirDist, relDirSetup , relDirSetupExeCache, relDirSetupExeSrc, relFileBuildLock , relFileSetupHs, relFileSetupLhs, relFileSetupLower , relFileSetupMacrosH, setupGhciShimCode, stackProgName ) import Stack.Constants.Config ( distDirFromDir, distRelativeDir ) import Stack.Package ( buildLogPath ) import Stack.Prelude import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) import Stack.Types.Build ( ConvertPathsToAbsolute (..), ExcludeTHLoading (..) , KeepOutputOpen (..), TaskType (..), taskTypeLocation , taskTypePackageIdentifier ) import Stack.Types.Build.Exception ( BuildException (..), BuildPrettyException (..) ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.BuildOptsMonoid ( CabalVerbosity (..) ) import Stack.Types.Compiler ( ActualCompiler (..), WhichCompiler (..) , compilerVersionString, getGhcVersion, whichCompilerL ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..), cabalVersionL , getCompilerPath ) import Stack.Types.Config ( Config (..), HasConfig (..), stackRootL ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) import Stack.Types.Dependency ( DepValue(..) ) import Stack.Types.DumpLogs ( DumpLogs (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( HasEnvConfig (..), actualCompilerVersionL , platformGhcRelDir, shouldForceGhcColorFlag ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) import Stack.Types.Installed ( InstallLocation (..), Installed (..) ) import Stack.Types.Package ( LocalPackage (..), Package (..), packageIdentifier ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Version ( withinRange ) import qualified System.Directory as D import System.Environment ( lookupEnv ) import System.FileLock ( SharedExclusive (..), withFileLock, withTryFileLock ) -- | Has an executable been built or not? data ExecutableBuildStatus = ExecutableBuilt | ExecutableNotBuilt deriving (Eq, Ord, Show) data ExecuteEnv = ExecuteEnv { installLock :: !(MVar ()) , buildOpts :: !BuildOpts , buildOptsCLI :: !BuildOptsCLI , baseConfigOpts :: !BaseConfigOpts , ghcPkgIds :: !(TVar (Map PackageIdentifier Installed)) , tempDir :: !(Path Abs Dir) , setupHs :: !(Path Abs File) -- ^ Temporary Setup.hs for simple builds , setupShimHs :: !(Path Abs File) -- ^ Temporary SetupShim.hs, to provide access to initial-build-steps , setupExe :: !(Maybe (Path Abs File)) -- ^ Compiled version of eeSetupHs , cabalPkgVer :: !Version -- ^ The version of the compiler's Cabal boot package. , totalWanted :: !Int , locals :: ![LocalPackage] , globalDB :: !(Path Abs Dir) , globalDumpPkgs :: !(Map GhcPkgId DumpPackage) , snapshotDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) , localDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) , logFiles :: !(TChan (Path Abs Dir, Path Abs File)) , customBuilt :: !(IORef (Set PackageName)) -- ^ Stores which packages with custom-setup have already had their -- Setup.hs built. , largestPackageName :: !(Maybe Int) -- ^ For nicer interleaved output: track the largest package name size , pathEnvVar :: !Text -- ^ Value of the PATH environment variable } -- | Type representing setup executable circumstances. data SetupExe = SimpleSetupExe !(Path Abs File) -- ^ The build type is Simple and there is a path to an existing setup -- executable. | OtherSetupHs !(Path Abs File) -- ^ Other circumstances with a path to the source code for the setup -- executable. buildSetupArgs :: [String] buildSetupArgs = [ "-rtsopts" , "-threaded" , "-clear-package-db" , "-global-package-db" , "-hide-all-packages" , "-package" , "base" , "-main-is" , "StackSetupShim.mainOverride" ] simpleSetupCode :: Builder simpleSetupCode = "import Distribution.Simple\nmain = defaultMain" simpleSetupHash :: String simpleSetupHash = T.unpack $ decodeUtf8 $ S.take 8 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ toStrictBytes $ Data.ByteString.Builder.toLazyByteString $ encodeUtf8Builder (T.pack (unwords buildSetupArgs)) <> setupGhciShimCode <> simpleSetupCode -- | Get a compiled Setup exe getSetupExe :: HasEnvConfig env => Path Abs File -- ^ Setup.hs input file -> Path Abs File -- ^ SetupShim.hs input file -> Path Abs Dir -- ^ temporary directory -> RIO env (Maybe (Path Abs File)) getSetupExe setupHs setupShimHs tmpdir = do wc <- view $ actualCompilerVersionL . whichCompilerL platformDir <- platformGhcRelDir config <- view configL cabalVersionString <- view $ cabalVersionL . to versionString actualCompilerVersionString <- view $ actualCompilerVersionL . to compilerVersionString platform <- view platformL let baseNameS = concat [ "Cabal-simple_" , simpleSetupHash , "_" , cabalVersionString , "_" , actualCompilerVersionString ] exeNameS = baseNameS ++ case platform of Platform _ Windows -> ".exe" _ -> "" outputNameS = case wc of Ghc -> exeNameS setupDir = view stackRootL config relDirSetupExeCache platformDir exePath <- (setupDir ) <$> parseRelFile exeNameS exists <- liftIO $ D.doesFileExist $ toFilePath exePath if exists then pure $ Just exePath else do tmpExePath <- fmap (setupDir ) $ parseRelFile $ "tmp-" ++ exeNameS tmpOutputPath <- fmap (setupDir ) $ parseRelFile $ "tmp-" ++ outputNameS ensureDir setupDir let args = buildSetupArgs ++ [ "-package" , "Cabal-" ++ cabalVersionString , toFilePath setupHs , toFilePath setupShimHs , "-o" , toFilePath tmpOutputPath ] compilerPath <- getCompilerPath withWorkingDir (toFilePath tmpdir) $ proc (toFilePath compilerPath) args (\pc0 -> do let pc = setStdout (useHandleOpen stderr) pc0 runProcess_ pc) `catch` \ece -> prettyThrowM $ SetupHsBuildFailure (eceExitCode ece) Nothing compilerPath args Nothing [] renameFile tmpExePath exePath pure $ Just exePath -- | Execute a function that takes an 'ExecuteEnv'. withExecuteEnv :: forall env a. HasEnvConfig env => BuildOpts -> BuildOptsCLI -> BaseConfigOpts -> [LocalPackage] -> [DumpPackage] -- ^ global packages -> [DumpPackage] -- ^ snapshot packages -> [DumpPackage] -- ^ local packages -> Maybe Int -- ^ largest package name, for nicer interleaved output -> (ExecuteEnv -> RIO env a) -> RIO env a withExecuteEnv buildOpts buildOptsCLI baseConfigOpts locals globalPackages snapshotPackages localPackages largestPackageName inner = createTempDirFunction stackProgName $ \tempDir -> do installLock <- liftIO $ newMVar () ghcPkgIds <- liftIO $ newTVarIO Map.empty config <- view configL customBuilt <- newIORef Set.empty -- Create files for simple setup and setup shim, if necessary let setupSrcDir = view stackRootL config relDirSetupExeSrc ensureDir setupSrcDir let setupStub = "setup-" ++ simpleSetupHash setupFileName <- parseRelFile (setupStub ++ ".hs") setupHiName <- parseRelFile (setupStub ++ ".hi") setupOName <- parseRelFile (setupStub ++ ".o") let setupHs = setupSrcDir setupFileName setupHi = setupSrcDir setupHiName setupO = setupSrcDir setupOName setupHsExists <- doesFileExist setupHs unless setupHsExists $ writeBinaryFileAtomic setupHs simpleSetupCode -- See https://github.com/commercialhaskell/stack/issues/6267. Remove any -- historical *.hi or *.o files. This can be dropped when Stack drops -- support for the problematic versions of GHC. ignoringAbsence (removeFile setupHi) ignoringAbsence (removeFile setupO) let setupShimStub = "setup-shim-" ++ simpleSetupHash setupShimFileName <- parseRelFile (setupShimStub ++ ".hs") setupShimHiName <- parseRelFile (setupShimStub ++ ".hi") setupShimOName <- parseRelFile (setupShimStub ++ ".o") let setupShimHs = setupSrcDir setupShimFileName setupShimHi = setupSrcDir setupShimHiName setupShimO = setupSrcDir setupShimOName setupShimHsExists <- doesFileExist setupShimHs unless setupShimHsExists $ writeBinaryFileAtomic setupShimHs setupGhciShimCode -- See https://github.com/commercialhaskell/stack/issues/6267. Remove any -- historical *.hi or *.o files. This can be dropped when Stack drops -- support for the problematic versions of GHC. ignoringAbsence (removeFile setupShimHi) ignoringAbsence (removeFile setupShimO) setupExe <- getSetupExe setupHs setupShimHs tempDir cabalPkgVer <- view cabalVersionL globalDB <- view $ compilerPathsL . to (.globalDB) let globalDumpPkgs = toDumpPackagesByGhcPkgId globalPackages snapshotDumpPkgs <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId snapshotPackages) localDumpPkgs <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages) logFiles <- liftIO $ atomically newTChan let totalWanted = length $ filter (.wanted) locals pathEnvVar <- liftIO $ maybe mempty T.pack <$> lookupEnv "PATH" inner ExecuteEnv { buildOpts , buildOptsCLI -- Uncertain as to why we cannot run configures in parallel. This -- appears to be a Cabal library bug. Original issue: -- https://github.com/commercialhaskell/stack/issues/84. Ideally -- we'd be able to remove this. , installLock , baseConfigOpts , ghcPkgIds , tempDir , setupHs , setupShimHs , setupExe , cabalPkgVer , totalWanted , locals , globalDB , globalDumpPkgs , snapshotDumpPkgs , localDumpPkgs , logFiles , customBuilt , largestPackageName , pathEnvVar } `finally` dumpLogs logFiles totalWanted where toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dp.ghcPkgId, dp)) createTempDirFunction | buildOpts.keepTmpFiles = withKeepSystemTempDir | otherwise = withSystemTempDir dumpLogs :: TChan (Path Abs Dir, Path Abs File) -> Int -> RIO env () dumpLogs chan totalWanted = do allLogs <- fmap reverse $ liftIO $ atomically drainChan case allLogs of -- No log files generated, nothing to dump [] -> pure () firstLog:_ -> do toDump <- view $ configL . to (.dumpLogs) case toDump of DumpAllLogs -> mapM_ (dumpLog "") allLogs DumpWarningLogs -> mapM_ dumpLogIfWarning allLogs DumpNoLogs | totalWanted > 1 -> prettyInfoL [ flow "Build output has been captured to log files, use" , style Shell "--dump-logs" , flow "to see it on the console." ] | otherwise -> pure () prettyInfoL [ flow "Log files have been written to:" , pretty (parent (snd firstLog)) ] -- We only strip the colors /after/ we've dumped logs, so that we get pretty -- colors in our dump output on the terminal. colors <- shouldForceGhcColorFlag when colors $ liftIO $ mapM_ (stripColors . snd) allLogs where drainChan :: STM [(Path Abs Dir, Path Abs File)] drainChan = do mx <- tryReadTChan chan case mx of Nothing -> pure [] Just x -> do xs <- drainChan pure $ x:xs dumpLogIfWarning :: (Path Abs Dir, Path Abs File) -> RIO env () dumpLogIfWarning (pkgDir, filepath) = do firstWarning <- withSourceFile (toFilePath filepath) $ \src -> runConduit $ src .| CT.decodeUtf8Lenient .| CT.lines .| CL.map stripCR .| CL.filter isWarning .| CL.take 1 unless (null firstWarning) $ dumpLog " due to warnings" (pkgDir, filepath) isWarning :: Text -> Bool isWarning t = ": Warning:" `T.isSuffixOf` t -- prior to GHC 8 || ": warning:" `T.isInfixOf` t -- GHC 8 is slightly different || "mwarning:" `T.isInfixOf` t -- colorized output dumpLog :: String -> (Path Abs Dir, Path Abs File) -> RIO env () dumpLog msgSuffix (pkgDir, filepath) = do prettyNote $ fillSep ( ( fillSep ( flow "Dumping log file" : [ flow msgSuffix | not (L.null msgSuffix) ] ) <> ":" ) : [ pretty filepath <> "." ] ) <> line compilerVer <- view actualCompilerVersionL withSourceFile (toFilePath filepath) $ \src -> runConduit $ src .| CT.decodeUtf8Lenient .| mungeBuildOutput ExcludeTHLoading ConvertPathsToAbsolute pkgDir compilerVer .| CL.mapM_ (logInfo . display) prettyNote $ fillSep [ flow "End of log file:" , pretty filepath <> "." ] <> line stripColors :: Path Abs File -> IO () stripColors fp = do let colorfp = toFilePath fp ++ "-color" withSourceFile (toFilePath fp) $ \src -> withSinkFile colorfp $ \sink -> runConduit $ src .| sink withSourceFile colorfp $ \src -> withSinkFile (toFilePath fp) $ \sink -> runConduit $ src .| noColors .| sink where noColors = do CB.takeWhile (/= 27) -- ESC mnext <- CB.head case mnext of Nothing -> pure () Just x -> assert (x == 27) $ do -- Color sequences always end with an m CB.dropWhile (/= 109) -- m CB.drop 1 -- drop the m itself noColors -- | Make a padded prefix for log messages packageNamePrefix :: ExecuteEnv -> PackageName -> String packageNamePrefix ee name' = let name = packageNameString name' paddedName = case ee.largestPackageName of Nothing -> name Just len -> assert (len >= length name) $ take len $ name ++ L.repeat ' ' in paddedName <> "> " announceTask :: HasLogFunc env => ExecuteEnv -> TaskType -> Utf8Builder -> RIO env () announceTask ee taskType action = logInfo $ fromString (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) <> action prettyAnnounceTask :: HasTerm env => ExecuteEnv -> TaskType -> StyleDoc -> RIO env () prettyAnnounceTask ee taskType action = prettyInfo $ fromString (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) <> action -- | Ensure we're the only action using the directory. See -- withLockedDistDir :: forall env a. HasEnvConfig env => (StyleDoc -> RIO env ()) -- ^ A pretty announce function -> Path Abs Dir -- ^ root directory for package -> RIO env a -> RIO env a withLockedDistDir announce root inner = do distDir <- distRelativeDir let lockFP = root distDir relFileBuildLock ensureDir $ parent lockFP mres <- withRunInIO $ \run -> withTryFileLock (toFilePath lockFP) Exclusive $ \_lock -> run inner case mres of Just res -> pure res Nothing -> do let complainer :: Companion (RIO env) complainer delay = do delay 5000000 -- 5 seconds announce $ fillSep [ flow "blocking for directory lock on" , pretty lockFP ] forever $ do delay 30000000 -- 30 seconds announce $ fillSep [ flow "still blocking for directory lock on" , pretty lockFP <> ";" , flow "maybe another Stack process is running?" ] withCompanion complainer $ \stopComplaining -> withRunInIO $ \run -> withFileLock (toFilePath lockFP) Exclusive $ \_ -> run $ stopComplaining *> inner -- | How we deal with output from GHC, either dumping to a log file or the -- console (with some prefix). data OutputType = OTLogFile !(Path Abs File) !Handle | OTConsole !(Maybe Utf8Builder) -- | This sets up a context for executing build steps which need to run -- Cabal (via a compiled Setup.hs). In particular it does the following: -- -- * Ensures the package exists in the file system, downloading if necessary. -- -- * Opens a log file if the built output shouldn't go to stderr. -- -- * Ensures that either a simple Setup.hs is built, or the package's -- custom setup is built. -- -- * Provides the user a function with which run the Cabal process. withSingleContext :: forall env a. HasEnvConfig env => ActionContext -> ExecuteEnv -> TaskType -> Map PackageIdentifier GhcPkgId -- ^ All dependencies' package ids to provide to Setup.hs. -> Maybe String -> ( Package -- Package info -> Path Abs File -- Cabal file path -> Path Abs Dir -- Package root directory file path -- Note that the `Path Abs Dir` argument is redundant with the -- `Path Abs File` argument, but we provide both to avoid recalculating -- `parent` of the `File`. -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -- Function to run Cabal with args -> (Utf8Builder -> RIO env ()) -- An plain 'announce' function, for different build phases -> OutputType -> RIO env a) -> RIO env a withSingleContext ac ee taskType allDeps msuffix inner0 = withPackage $ \package cabalFP pkgDir -> withOutputType pkgDir package $ \outputType -> withCabal package pkgDir outputType $ \cabal -> inner0 package cabalFP pkgDir cabal announce outputType where pkgId = taskTypePackageIdentifier taskType announce = announceTask ee taskType prettyAnnounce = prettyAnnounceTask ee taskType wanted = case taskType of TTLocalMutable lp -> lp.wanted TTRemotePackage{} -> False -- Output to the console if this is the last task, and the user asked to build -- it specifically. When the action is a 'ConcurrencyDisallowed' action -- (benchmarks), then we can also be sure to have exclusive access to the -- console, so output is also sent to the console in this case. -- -- See the discussion on #426 for thoughts on sending output to the console --from concurrent tasks. console = ( wanted && all (\(ActionId ident _) -> ident == pkgId) (Set.toList ac.remaining) && ee.totalWanted == 1 ) || ac.concurrency == ConcurrencyDisallowed withPackage inner = case taskType of TTLocalMutable lp -> do let root = parent lp.cabalFP withLockedDistDir prettyAnnounce root $ inner lp.package lp.cabalFP root TTRemotePackage _ package pkgloc -> do suffix <- parseRelDir $ packageIdentifierString $ packageIdentifier package let dir = ee.tempDir suffix unpackPackageLocation dir pkgloc -- See: https://github.com/commercialhaskell/stack/issues/157 distDir <- distRelativeDir let oldDist = dir relDirDist newDist = dir distDir exists <- doesDirExist oldDist when exists $ do -- Previously used takeDirectory, but that got confused -- by trailing slashes, see: -- https://github.com/commercialhaskell/stack/issues/216 -- -- Instead, use Path which is a bit more resilient ensureDir $ parent newDist renameDir oldDist newDist let name = pkgName pkgId cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal" let cabalFP = dir cabalfpRel inner package cabalFP dir withOutputType pkgDir package inner -- Not in interleaved mode. When building a single wanted package, dump -- to the console with no prefix. | console = inner $ OTConsole Nothing -- If the user requested interleaved output, dump to the console with a -- prefix. | ee.buildOpts.interleavedOutput = inner $ OTConsole $ Just $ fromString (packageNamePrefix ee package.name) -- Neither condition applies, dump to a file. | otherwise = do logPath <- buildLogPath package msuffix ensureDir (parent logPath) let fp = toFilePath logPath -- We only want to dump logs for local non-dependency packages case taskType of TTLocalMutable lp | lp.wanted -> liftIO $ atomically $ writeTChan ee.logFiles (pkgDir, logPath) _ -> pure () withBinaryFile fp WriteMode $ \h -> inner $ OTLogFile logPath h withCabal :: Package -> Path Abs Dir -> OutputType -> ( (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -> RIO env a ) -> RIO env a withCabal package pkgDir outputType inner = do config <- view configL unless config.allowDifferentUser $ checkOwnership (pkgDir config.workDir) let envSettings = EnvSettings { includeLocals = taskTypeLocation taskType == Local , includeGhcPackagePath = False , stackExe = False , localeUtf8 = True , keepGhcRts = False } menv <- liftIO $ config.processContextSettings envSettings distRelativeDir' <- distRelativeDir setupexehs <- -- Avoid broken Setup.hs files causing problems for simple build -- types, see: -- https://github.com/commercialhaskell/stack/issues/370 case (package.buildType, ee.setupExe) of (C.Simple, Just setupExe) -> pure $ SimpleSetupExe setupExe _ -> liftIO $ OtherSetupHs <$> getSetupHs pkgDir inner $ \keepOutputOpen stripTHLoading args -> do let cabalPackageArg -- Omit cabal package dependency when building -- Cabal. See -- https://github.com/commercialhaskell/stack/issues/1356 | package.name == mkPackageName "Cabal" = [] | otherwise = ["-package=" ++ packageIdentifierString (PackageIdentifier cabalPackageName ee.cabalPkgVer)] packageDBArgs = ( "-clear-package-db" : "-global-package-db" : map (("-package-db=" ++) . toFilePathNoTrailingSep) ee.baseConfigOpts.extraDBs ) ++ ( ( "-package-db=" ++ toFilePathNoTrailingSep ee.baseConfigOpts.snapDB ) : ( "-package-db=" ++ toFilePathNoTrailingSep ee.baseConfigOpts.localDB ) : ["-hide-all-packages"] ) warnCustomNoDeps :: RIO env () warnCustomNoDeps = case (taskType, package.buildType) of (TTLocalMutable lp, C.Custom) | lp.wanted -> prettyWarnL [ flow "Package" , fromPackageName package.name , flow "uses a custom Cabal build, but does not use a \ \custom-setup stanza" ] _ -> pure () getPackageArgs :: Path Abs Dir -> RIO env [String] getPackageArgs setupDir = case package.setupDeps of -- The package is using the Cabal custom-setup configuration -- introduced in Cabal 1.24. In this case, the package is -- providing an explicit list of dependencies, and we should -- simply use all of them. Just customSetupDeps -> do unless (Map.member (mkPackageName "Cabal") customSetupDeps) $ prettyWarnL [ fromPackageName package.name , flow "has a setup-depends field, but it does not mention \ \a Cabal dependency. This is likely to cause build \ \errors." ] matchedDeps <- forM (Map.toList customSetupDeps) $ \(name, depValue) -> do let matches (PackageIdentifier name' version) = name == name' && version `withinRange` depValue.versionRange case filter (matches . fst) (Map.toList allDeps) of x:xs -> do unless (null xs) $ prettyWarnL [ flow "Found multiple installed packages for \ \custom-setup dep:" , style Current (fromPackageName name) <> "." ] pure ("-package-id=" ++ ghcPkgIdString (snd x), Just (fst x)) [] -> do prettyWarnL [ flow "Could not find custom-setup dep:" , style Current (fromPackageName name) <> "." ] pure ("-package=" ++ packageNameString name, Nothing) let depsArgs = map fst matchedDeps -- Generate setup_macros.h and provide it to ghc let macroDeps = mapMaybe snd matchedDeps cppMacrosFile = setupDir relFileSetupMacrosH cppArgs = ["-optP-include", "-optP" ++ toFilePath cppMacrosFile] writeBinaryFileAtomic cppMacrosFile ( encodeUtf8Builder ( T.pack ( C.generatePackageVersionMacros package.version macroDeps ) ) ) pure (packageDBArgs ++ depsArgs ++ cppArgs) -- This branch is usually taken for builds, and is always taken -- for `stack sdist`. -- -- This approach is debatable. It adds access to the snapshot -- package database for Cabal. There are two possible objections: -- -- 1. This doesn't isolate the build enough; arbitrary other -- packages available could cause the build to succeed or fail. -- -- 2. This doesn't provide enough packages: we should also -- include the local database when building local packages. -- -- Currently, this branch is only taken via `stack sdist` or when -- explicitly requested in the stack.yaml file. Nothing -> do warnCustomNoDeps let packageDBArgs' = case package.buildType of -- The Configure build type is very similar to Simple. As -- such, Stack builds the setup executable in much the -- same way as it would in the case of Simple. C.Configure -> [ "-hide-all-packages" , "-package base" ] -- NOTE: This is different from packageDBArgs above in -- that it does not include the local database and does -- not pass in the -hide-all-packages argument _ -> map (("-package-db=" ++) . toFilePathNoTrailingSep) ee.baseConfigOpts.extraDBs <> [ "-package-db=" <> toFilePathNoTrailingSep ee.baseConfigOpts.snapDB ] pure $ [ "-clear-package-db" , "-global-package-db" ] <> packageDBArgs' <> cabalPackageArg setupArgs = ("--builddir=" ++ toFilePathNoTrailingSep distRelativeDir') : args runExe :: Path Abs File -> [String] -> RIO env () runExe exeName fullArgs = do compilerVer <- view actualCompilerVersionL runAndOutput compilerVer `catch` \ece -> do (mlogFile, bss) <- case outputType of OTConsole _ -> pure (Nothing, []) OTLogFile logFile h -> if keepOutputOpen == KeepOpen then pure (Nothing, []) -- expected failure build continues further else do liftIO $ hClose h fmap (Just logFile,) $ withSourceFile (toFilePath logFile) $ \src -> runConduit $ src .| CT.decodeUtf8Lenient .| mungeBuildOutput stripTHLoading makeAbsolute pkgDir compilerVer .| CL.consume prettyThrowM $ CabalExitedUnsuccessfully (eceExitCode ece) pkgId exeName fullArgs mlogFile bss where runAndOutput :: ActualCompiler -> RIO env () runAndOutput compilerVer = withWorkingDir (toFilePath pkgDir) $ withProcessContext menv $ case outputType of OTLogFile _ h -> do let prefixWithTimestamps = if config.prefixTimestamps then PrefixWithTimestamps else WithoutTimestamps void $ sinkProcessStderrStdout (toFilePath exeName) fullArgs (sinkWithTimestamps prefixWithTimestamps h) (sinkWithTimestamps prefixWithTimestamps h) OTConsole mprefix -> let prefix = fromMaybe mempty mprefix in void $ sinkProcessStderrStdout (toFilePath exeName) fullArgs (outputSink KeepTHLoading LevelWarn compilerVer prefix) (outputSink stripTHLoading LevelInfo compilerVer prefix) outputSink :: HasCallStack => ExcludeTHLoading -> LogLevel -> ActualCompiler -> Utf8Builder -> ConduitM S.ByteString Void (RIO env) () outputSink excludeTH level compilerVer prefix = CT.decodeUtf8Lenient .| mungeBuildOutput excludeTH makeAbsolute pkgDir compilerVer .| CL.mapM_ (logGeneric "" level . (prefix <>) . display) -- If users want control, we should add a config option for this makeAbsolute :: ConvertPathsToAbsolute makeAbsolute = case stripTHLoading of ExcludeTHLoading -> ConvertPathsToAbsolute KeepTHLoading -> KeepPathsAsIs exeName <- case setupexehs of SimpleSetupExe setupExe -> pure setupExe OtherSetupHs setuphs -> do distDir <- distDirFromDir pkgDir let setupDir = distDir relDirSetup outputFile = setupDir relFileSetupLower customBuilt <- liftIO $ readIORef ee.customBuilt if Set.member package.name customBuilt then pure outputFile else do ensureDir setupDir compilerPath <- view $ compilerPathsL . to (.compiler) packageArgs <- getPackageArgs setupDir runExe compilerPath $ [ "--make" , "-odir", toFilePathNoTrailingSep setupDir , "-hidir", toFilePathNoTrailingSep setupDir , "-i", "-i." ] <> packageArgs <> [ toFilePath setuphs , toFilePath ee.setupShimHs , "-main-is" , "StackSetupShim.mainOverride" , "-o", toFilePath outputFile , "-threaded" ] -- Apply GHC options -- https://github.com/commercialhaskell/stack/issues/4526 <> map T.unpack ( Map.findWithDefault [] AGOEverything config.ghcOptionsByCat <> case config.applyGhcOptions of AGOEverything -> ee.buildOptsCLI.ghcOptions AGOTargets -> [] AGOLocals -> [] ) liftIO $ atomicModifyIORef' ee.customBuilt $ \oldCustomBuilt -> (Set.insert package.name oldCustomBuilt, ()) pure outputFile let cabalVerboseArg = let CabalVerbosity cv = ee.buildOpts.cabalVerbose in "--verbose=" <> showForCabal cv runExe exeName $ cabalVerboseArg:setupArgs -- | Strip Template Haskell "Loading package" lines and making paths absolute. mungeBuildOutput :: forall m. (MonadIO m, MonadUnliftIO m) => ExcludeTHLoading -- ^ exclude TH loading? -> ConvertPathsToAbsolute -- ^ convert paths to absolute? -> Path Abs Dir -- ^ package's root directory -> ActualCompiler -- ^ compiler we're building with -> ConduitM Text Text m () mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $ CT.lines .| CL.map stripCR .| CL.filter (not . isTHLoading) .| filterLinkerWarnings .| toAbsolute where -- | Is this line a Template Haskell "Loading package" line -- ByteString isTHLoading :: Text -> Bool isTHLoading = case excludeTHLoading of KeepTHLoading -> const False ExcludeTHLoading -> \bs -> "Loading package " `T.isPrefixOf` bs && ("done." `T.isSuffixOf` bs || "done.\r" `T.isSuffixOf` bs) filterLinkerWarnings :: ConduitM Text Text m () filterLinkerWarnings -- Check for ghc 7.8 since it's the only one prone to producing -- linker warnings on Windows x64 | getGhcVersion compilerVer >= mkVersion [7, 8] = doNothing | otherwise = CL.filter (not . isLinkerWarning) isLinkerWarning :: Text -> Bool isLinkerWarning str = ( "ghc.exe: warning:" `T.isPrefixOf` str || "ghc.EXE: warning:" `T.isPrefixOf` str ) && "is linked instead of __imp_" `T.isInfixOf` str -- | Convert GHC error lines with file paths to have absolute file paths toAbsolute :: ConduitM Text Text m () toAbsolute = case makeAbsolute of KeepPathsAsIs -> doNothing ConvertPathsToAbsolute -> CL.mapM toAbsolutePath toAbsolutePath :: Text -> m Text toAbsolutePath bs = do let (x, y) = T.break (== ':') bs mabs <- if isValidSuffix y then fmap (fmap ((T.takeWhile isSpace x <>) . T.pack . toFilePath)) $ forgivingResolveFile pkgDir (T.unpack $ T.dropWhile isSpace x) `catch` \(_ :: PathException) -> pure Nothing else pure Nothing case mabs of Nothing -> pure bs Just fp -> pure $ fp `T.append` y doNothing :: ConduitM Text Text m () doNothing = awaitForever yield -- | Match the error location format at the end of lines isValidSuffix = isRight . parseOnly lineCol lineCol = char ':' >> choice [ num >> char ':' >> num >> optional (char '-' >> num) >> pure () , char '(' >> num >> char ',' >> num >> P.string ")-(" >> num >> char ',' >> num >> char ')' >> pure () ] >> char ':' >> pure () where num = some digit -- | Whether to prefix log lines with timestamps. data PrefixWithTimestamps = PrefixWithTimestamps | WithoutTimestamps -- | Write stream of lines to handle, but adding timestamps. sinkWithTimestamps :: MonadIO m => PrefixWithTimestamps -> Handle -> ConduitT ByteString Void m () sinkWithTimestamps prefixWithTimestamps h = case prefixWithTimestamps of PrefixWithTimestamps -> CB.lines .| CL.mapM addTimestamp .| CL.map (<> "\n") .| sinkHandle h WithoutTimestamps -> sinkHandle h where addTimestamp theLine = do now <- liftIO getZonedTime pure (formatZonedTimeForLog now <> " " <> theLine) -- | Format a time in ISO8601 format. We choose ZonedTime over UTCTime -- because a user expects to see logs in their local time, and would -- be confused to see UTC time. Stack's debug logs also use the local -- time zone. formatZonedTimeForLog :: ZonedTime -> ByteString formatZonedTimeForLog = S8.pack . formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%S%6Q" -- | Find the Setup.hs or Setup.lhs in the given directory. If none exists, -- throw an exception. getSetupHs :: Path Abs Dir -- ^ project directory -> IO (Path Abs File) getSetupHs dir = do exists1 <- doesFileExist fp1 if exists1 then pure fp1 else do exists2 <- doesFileExist fp2 if exists2 then pure fp2 else throwM $ NoSetupHsFound dir where fp1 = dir relFileSetupHs fp2 = dir relFileSetupLhs stack-2.15.7/src/Stack/Build/ExecutePackage.hs0000644000000000000000000016406414620153445017225 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- | Perform a build module Stack.Build.ExecutePackage ( singleBuild , singleTest , singleBench ) where import Control.Concurrent.Execute ( ActionContext (..), ActionId (..) ) import Control.Monad.Extra ( whenJust ) import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as BL import Conduit ( runConduitRes ) import qualified Data.Conduit.Filesystem as CF import qualified Data.Conduit.List as CL import Data.Conduit.Process.Typed ( createSource ) import qualified Data.Conduit.Text as CT import qualified Data.List as L import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.PackageDescription as C import Distribution.System ( OS (..), Platform (..) ) import qualified Distribution.Text as C import Distribution.Types.MungedPackageName ( encodeCompatPackageName ) import Distribution.Types.UnqualComponentName ( mkUnqualComponentName ) import Distribution.Version ( mkVersion ) import Path ( (), addExtension, filename, isProperPrefixOf, parent , parseRelDir, parseRelFile, stripProperPrefix ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO ( copyFile, doesFileExist, ensureDir, ignoringAbsence , removeDirRecur, removeFile ) import RIO.NonEmpty ( nonEmpty ) import RIO.Process ( byteStringInput, findExecutable, getStderr, getStdout , inherit, modifyEnvVars, proc, setStderr, setStdin , setStdout, showProcessArgDebug, useHandleOpen, waitExitCode , withProcessWait, withWorkingDir, HasProcessContext ) import Stack.Build.Cache ( TestStatus (..), deleteCaches, getTestStatus , markExeInstalled, markExeNotInstalled, readPrecompiledCache , setTestStatus, tryGetCabalMod, tryGetConfigCache , tryGetPackageProjectRoot, tryGetSetupConfigMod , writeBuildCache, writeCabalMod, writeConfigCache , writeFlagCache, writePrecompiledCache , writePackageProjectRoot, writeSetupConfigMod ) import Stack.Build.ExecuteEnv ( ExcludeTHLoading (..), ExecutableBuildStatus (..) , ExecuteEnv (..), KeepOutputOpen (..), OutputType (..) , withSingleContext ) import Stack.Build.Source ( addUnlistedToBuildCache ) import Stack.Config.ConfigureScript ( ensureConfigureScript ) import Stack.Constants ( bindirSuffix, compilerOptionsCabalFlag, relDirBuild , testGhcEnvRelFile ) import Stack.Constants.Config ( distDirFromDir, distRelativeDir, hpcDirFromDir , hpcRelativeDir, setupConfigFromDir ) import Stack.Coverage ( generateHpcReport, updateTixFile ) import Stack.GhcPkg ( ghcPkg, unregisterGhcPkgIds ) import Stack.Package ( buildLogPath, buildableExes, buildableSubLibs , hasBuildableMainLibrary, mainLibraryHasExposedModules ) import Stack.PackageDump ( conduitDumpPackage, ghcPkgDescribe ) import Stack.Prelude import Stack.Types.Build ( ConfigCache (..), PrecompiledCache (..), Task (..) , TaskConfigOpts (..), TaskType (..), taskAnyMissing , taskIsTarget, taskLocation, taskProvides , taskTargetIsMutable, taskTypePackageIdentifier ) import qualified Stack.Types.Build as ConfigCache ( ConfigCache (..) ) import Stack.Types.Build.Exception ( BuildException (..), BuildPrettyException (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), projectRootL ) import Stack.Types.BuildOpts ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) , TestOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.CompCollection ( collectionKeyValueList, collectionLookup , foldComponentToAnotherCollection, getBuildableListText ) import Stack.Types.Compiler ( ActualCompiler (..), WhichCompiler (..), getGhcVersion , whichCompilerL ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) , cpWhich, getGhcPkgExe ) import qualified Stack.Types.Component as Component import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..), ConfigureOpts (..) ) import Stack.Types.Curator ( Curator (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL , appropriateGhcColorFlag ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.GhcPkgId ( GhcPkgId, unGhcPkgId ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Installed ( InstallLocation (..), Installed (..), InstalledMap , InstalledLibraryInfo (..) ) import Stack.Types.IsMutable ( IsMutable (..) ) import Stack.Types.NamedComponent ( NamedComponent, exeComponents, isCBench, isCTest , renderComponent ) import Stack.Types.Package ( LocalPackage (..), Package (..), installedMapGhcPkgId , runMemoizedWith, simpleInstalledLib , toCabalMungedPackageName ) import Stack.Types.PackageFile ( PackageWarning (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner, globalOptsL ) import System.IO.Error ( isDoesNotExistError ) import System.PosixCompat.Files ( createLink, getFileStatus, modificationTime ) import System.Random ( randomIO ) -- | Generate the ConfigCache getConfigCache :: HasEnvConfig env => ExecuteEnv -> Task -> InstalledMap -> Bool -> Bool -> RIO env (Map PackageIdentifier GhcPkgId, ConfigCache) getConfigCache ee task installedMap enableTest enableBench = do let extra = -- We enable tests if the test suite dependencies are already -- installed, so that we avoid unnecessary recompilation based on -- cabal_macros.h changes when switching between 'stack build' and -- 'stack test'. See: -- https://github.com/commercialhaskell/stack/issues/805 case task.taskType of TTLocalMutable _ -> -- FIXME: make this work with exact-configuration. -- Not sure how to plumb the info atm. See -- https://github.com/commercialhaskell/stack/issues/2049 [ "--enable-tests" | enableTest] ++ [ "--enable-benchmarks" | enableBench] TTRemotePackage{} -> [] idMap <- liftIO $ readTVarIO ee.ghcPkgIds let getMissing ident = case Map.lookup ident idMap of Nothing -- Expect to instead find it in installedMap if it's -- an initialBuildSteps target. | ee.buildOptsCLI.initialBuildSteps && taskIsTarget task , Just (_, installed) <- Map.lookup (pkgName ident) installedMap -> pure $ installedToGhcPkgId ident installed Just installed -> pure $ installedToGhcPkgId ident installed _ -> throwM $ PackageIdMissingBug ident installedToGhcPkgId ident (Library ident' libInfo) = assert (ident == ident') (installedMapGhcPkgId ident libInfo) installedToGhcPkgId _ (Executable _) = mempty TaskConfigOpts missing mkOpts = task.configOpts missingMapList <- traverse getMissing $ toList missing let missing' = Map.unions missingMapList configureOpts' = mkOpts missing' configureOpts = configureOpts' { nonPathRelated = configureOpts'.nonPathRelated ++ map T.unpack extra } deps = Set.fromList $ Map.elems missing' ++ Map.elems task.present components = case task.taskType of TTLocalMutable lp -> Set.map (encodeUtf8 . renderComponent) lp.components TTRemotePackage{} -> Set.empty cache = ConfigCache { configureOpts , deps , components , buildHaddocks = task.buildHaddocks , pkgSrc = task.cachePkgSrc , pathEnvVar = ee.pathEnvVar } allDepsMap = Map.union missing' task.present pure (allDepsMap, cache) -- | Ensure that the configuration for the package matches what is given ensureConfig :: HasEnvConfig env => ConfigCache -- ^ newConfigCache -> Path Abs Dir -- ^ package directory -> BuildOpts -> RIO env () -- ^ announce -> (ExcludeTHLoading -> [String] -> RIO env ()) -- ^ cabal -> Path Abs File -- ^ Cabal file -> Task -> RIO env Bool ensureConfig newConfigCache pkgDir buildOpts announce cabal cabalFP task = do newCabalMod <- liftIO $ modificationTime <$> getFileStatus (toFilePath cabalFP) setupConfigfp <- setupConfigFromDir pkgDir let getNewSetupConfigMod = liftIO $ either (const Nothing) (Just . modificationTime) <$> tryJust (guard . isDoesNotExistError) (getFileStatus (toFilePath setupConfigfp)) newSetupConfigMod <- getNewSetupConfigMod newProjectRoot <- S8.pack . toFilePath <$> view projectRootL -- See https://github.com/commercialhaskell/stack/issues/3554. This can be -- dropped when Stack drops support for GHC < 8.4. taskAnyMissingHackEnabled <- view $ actualCompilerVersionL . to getGhcVersion . to (< mkVersion [8, 4]) needConfig <- if buildOpts.reconfigure -- The reason 'taskAnyMissing' is necessary is a bug in Cabal. See: -- . -- The problem is that Cabal may end up generating the same package ID -- for a dependency, even if the ABI has changed. As a result, without -- check, Stack would think that a reconfigure is unnecessary, when in -- fact we _do_ need to reconfigure. The details here suck. We really -- need proper hashes for package identifiers. || (taskAnyMissingHackEnabled && taskAnyMissing task) then pure True else do -- We can ignore the components portion of the config -- cache, because it's just used to inform 'construct -- plan that we need to plan to build additional -- components. These components don't affect the actual -- package configuration. let ignoreComponents :: ConfigCache -> ConfigCache ignoreComponents cc = cc { ConfigCache.components = Set.empty } -- Determine the old and new configuration in the local directory, to -- determine if we need to reconfigure. mOldConfigCache <- tryGetConfigCache pkgDir mOldCabalMod <- tryGetCabalMod pkgDir -- Cabal's setup-config is created per OS/Cabal version, multiple -- projects using the same package could get a conflict because of this mOldSetupConfigMod <- tryGetSetupConfigMod pkgDir mOldProjectRoot <- tryGetPackageProjectRoot pkgDir pure $ fmap ignoreComponents mOldConfigCache /= Just (ignoreComponents newConfigCache) || mOldCabalMod /= Just newCabalMod || mOldSetupConfigMod /= newSetupConfigMod || mOldProjectRoot /= Just newProjectRoot when task.buildTypeConfig $ -- When build-type is Configure, we need to have a configure script in the -- local directory. If it doesn't exist, build it with autoreconf -i. See: -- https://github.com/commercialhaskell/stack/issues/3534 ensureConfigureScript pkgDir when needConfig $ do deleteCaches pkgDir announce cp <- view compilerPathsL let (GhcPkgExe pkgPath) = cp.pkg let programNames = case cpWhich cp of Ghc -> [ ("ghc", toFilePath cp.compiler) , ("ghc-pkg", toFilePath pkgPath) ] exes <- forM programNames $ \(name, file) -> do mpath <- findExecutable file pure $ case mpath of Left _ -> [] Right x -> pure $ concat ["--with-", name, "=", x] -- Configure cabal with arguments determined by -- Stack.Types.Build.ureOpts cabal KeepTHLoading $ "configure" : concat [ concat exes , newConfigCache.configureOpts.pathRelated , newConfigCache.configureOpts.nonPathRelated ] -- Only write the cache for local packages. Remote packages are built in a -- temporary directory so the cache would never be used anyway. case task.taskType of TTLocalMutable{} -> writeConfigCache pkgDir newConfigCache TTRemotePackage{} -> pure () writeCabalMod pkgDir newCabalMod -- This file gets updated one more time by the configure step, so get the -- most recent value. We could instead change our logic above to check if -- our config mod file is newer than the file above, but this seems -- reasonable too. getNewSetupConfigMod >>= writeSetupConfigMod pkgDir writePackageProjectRoot pkgDir newProjectRoot pure needConfig -- | Make a padded prefix for log messages packageNamePrefix :: ExecuteEnv -> PackageName -> String packageNamePrefix ee name' = let name = packageNameString name' paddedName = case ee.largestPackageName of Nothing -> name Just len -> assert (len >= length name) $ take len $ name ++ L.repeat ' ' in paddedName <> "> " announceTask :: HasLogFunc env => ExecuteEnv -> TaskType -> Utf8Builder -> RIO env () announceTask ee taskType action = logInfo $ fromString (packageNamePrefix ee (pkgName (taskTypePackageIdentifier taskType))) <> action -- Implements running a package's build, used to implement 'ATBuild' and -- 'ATBuildFinal' tasks. In particular this does the following: -- -- * Checks if the package exists in the precompiled cache, and if so, -- add it to the database instead of performing the build. -- -- * Runs the configure step if needed ('ensureConfig') -- -- * Runs the build step -- -- * Generates haddocks -- -- * Registers the library and copies the built executables into the -- local install directory. Note that this is literally invoking Cabal -- with @copy@, and not the copying done by @stack install@ - that is -- handled by 'copyExecutables'. singleBuild :: forall env. (HasEnvConfig env, HasRunner env) => ActionContext -> ExecuteEnv -> Task -> InstalledMap -> Bool -- ^ Is this a final build? -> RIO env () singleBuild ac ee task installedMap isFinalBuild = do cabalVersion <- view $ envConfigL . to (.compilerPaths.cabalVersion) -- The old version of Cabal (the library) copy did not allow the components -- to be copied to be specified. let isOldCabalCopy = cabalVersion < mkVersion [2, 0] (allDepsMap, cache) <- getConfigCache ee task installedMap enableTests enableBenchmarks let bcoSnapInstallRoot = ee.baseConfigOpts.snapInstallRoot mprecompiled <- getPrecompiled cache task.taskType bcoSnapInstallRoot minstalled <- case mprecompiled of Just precompiled -> copyPreCompiled ee task pkgId precompiled Nothing -> do curator <- view $ buildConfigL . to (.curator) realConfigAndBuild isOldCabalCopy cache curator allDepsMap case minstalled of Nothing -> pure () Just installed -> do writeFlagCache installed cache liftIO $ atomically $ modifyTVar ee.ghcPkgIds $ Map.insert pkgId installed where pkgId = taskProvides task PackageIdentifier pname _ = pkgId doHaddock curator package = task.buildHaddocks && not isFinalBuild -- Works around haddock failing on bytestring-builder since it has no -- modules when bytestring is new enough. && mainLibraryHasExposedModules package -- Special help for the curator tool to avoid haddocks that are known -- to fail && maybe True (Set.notMember pname . (.skipHaddock)) curator buildingFinals = isFinalBuild || task.allInOne enableTests = buildingFinals && any isCTest (taskComponents task) enableBenchmarks = buildingFinals && any isCBench (taskComponents task) annSuffix isOldCabalCopy executableBuildStatuses = if result == "" then "" else " (" <> result <> ")" where result = T.intercalate " + " $ concat [ ["lib" | task.allInOne && hasLib] , ["sub-lib" | task.allInOne && hasSubLib] , ["exe" | task.allInOne && hasExe] , ["test" | enableTests] , ["bench" | enableBenchmarks] ] (hasLib, hasSubLib, hasExe) = case task.taskType of TTLocalMutable lp -> let package = lp.package hasLibrary = hasBuildableMainLibrary package hasSubLibraries = not $ null package.subLibraries hasExecutables = not . Set.null $ exesToBuild isOldCabalCopy executableBuildStatuses lp in (hasLibrary, hasSubLibraries, hasExecutables) -- This isn't true, but we don't want to have this info for upstream deps. _ -> (False, False, False) realConfigAndBuild isOldCabalCopy cache mcurator allDepsMap = withSingleContext ac ee task.taskType allDepsMap Nothing $ \package cabalFP pkgDir cabal0 announce _outputType -> do let cabal = cabal0 CloseOnException executableBuildStatuses <- getExecutableBuildStatuses package pkgDir when ( not (cabalIsSatisfied isOldCabalCopy executableBuildStatuses) && taskIsTarget task ) $ prettyInfoL [ flow "Building all executables for" , style Current (fromPackageName package.name) , flow "once. After a successful build of all of them, only \ \specified executables will be rebuilt." ] _neededConfig <- ensureConfig cache pkgDir ee.buildOpts ( announce ( "configure" <> display (annSuffix isOldCabalCopy executableBuildStatuses) ) ) cabal cabalFP task let installedMapHasThisPkg :: Bool installedMapHasThisPkg = case Map.lookup package.name installedMap of Just (_, Library ident _) -> ident == pkgId Just (_, Executable _) -> True _ -> False case ( ee.buildOptsCLI.onlyConfigure , ee.buildOptsCLI.initialBuildSteps && taskIsTarget task ) of -- A full build is done if there are downstream actions, -- because their configure step will require that this -- package is built. See -- https://github.com/commercialhaskell/stack/issues/2787 (True, _) | null ac.downstream -> pure Nothing (_, True) | null ac.downstream || installedMapHasThisPkg -> do initialBuildSteps isOldCabalCopy executableBuildStatuses cabal announce pure Nothing _ -> fulfillCuratorBuildExpectations pname mcurator enableTests enableBenchmarks Nothing (Just <$> realBuild isOldCabalCopy cache package pkgDir cabal0 announce executableBuildStatuses) initialBuildSteps isOldCabalCopy executableBuildStatuses cabal announce = do announce ( "initial-build-steps" <> display (annSuffix isOldCabalCopy executableBuildStatuses) ) cabal KeepTHLoading ["repl", "stack-initial-build-steps"] realBuild :: Bool -- ^ Is Cabal copy limited to all libraries and executables? -> ConfigCache -> Package -> Path Abs Dir -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -> (Utf8Builder -> RIO env ()) -- ^ A plain 'announce' function -> Map Text ExecutableBuildStatus -> RIO env Installed realBuild isOldCabalCopy cache package pkgDir cabal0 announce executableBuildStatuses = do let cabal = cabal0 CloseOnException wc <- view $ actualCompilerVersionL . whichCompilerL markExeNotInstalled (taskLocation task) pkgId case task.taskType of TTLocalMutable lp -> do when enableTests $ setTestStatus pkgDir TSUnknown caches <- runMemoizedWith lp.newBuildCaches mapM_ (uncurry (writeBuildCache pkgDir)) (Map.toList caches) TTRemotePackage{} -> pure () -- FIXME: only output these if they're in the build plan. let postBuildCheck _succeeded = do mlocalWarnings <- case task.taskType of TTLocalMutable lp -> do warnings <- checkForUnlistedFiles task.taskType pkgDir -- TODO: Perhaps only emit these warnings for non extra-dep? pure (Just (lp.cabalFP, warnings)) _ -> pure Nothing -- NOTE: once -- https://github.com/commercialhaskell/stack/issues/2649 -- is resolved, we will want to partition the warnings -- based on variety, and output in different lists. let showModuleWarning (UnlistedModulesWarning comp modules) = "- In" <+> fromString (T.unpack (renderComponent comp)) <> ":" <> line <> indent 4 ( mconcat $ L.intersperse line $ map (style Good . fromString . C.display) modules ) forM_ mlocalWarnings $ \(cabalFP, warnings) -> unless (null warnings) $ prettyWarn $ flow "The following modules should be added to \ \exposed-modules or other-modules in" <+> pretty cabalFP <> ":" <> line <> indent 4 ( mconcat $ L.intersperse line $ map showModuleWarning warnings ) <> blankLine <> flow "Missing modules in the Cabal file are likely to cause \ \undefined reference errors from the linker, along with \ \other problems." actualCompiler <- view actualCompilerVersionL () <- announce ( "build" <> display (annSuffix isOldCabalCopy executableBuildStatuses) <> " with " <> display actualCompiler ) config <- view configL extraOpts <- extraBuildOptions wc ee.buildOpts let stripTHLoading | config.hideTHLoading = ExcludeTHLoading | otherwise = KeepTHLoading (buildOpts, copyOpts) <- case (task.taskType, task.allInOne, isFinalBuild) of (_, True, True) -> throwM AllInOneBuildBug (TTLocalMutable lp, False, False) -> let componentOpts = primaryComponentOptions isOldCabalCopy executableBuildStatuses lp in pure (componentOpts, componentOpts) (TTLocalMutable lp, False, True) -> pure (finalComponentOptions lp, []) (TTLocalMutable lp, True, False) -> let componentOpts = primaryComponentOptions isOldCabalCopy executableBuildStatuses lp in pure (componentOpts <> finalComponentOptions lp, componentOpts) (TTRemotePackage{}, _, _) -> pure ([], []) cabal stripTHLoading ("build" : buildOpts <> extraOpts) `catch` \ex -> case ex of CabalExitedUnsuccessfully{} -> postBuildCheck False >> prettyThrowM ex _ -> throwM ex postBuildCheck True mcurator <- view $ buildConfigL . to (.curator) when (doHaddock mcurator package) $ do let isTaskTargetMutable = taskTargetIsMutable task == Mutable isHaddockForHackage = ee.buildOpts.haddockForHackage && isTaskTargetMutable announce $ if isHaddockForHackage then "haddock for Hackage" else "haddock" -- For GHC 8.4 and later, provide the --quickjump option. let quickjump = case actualCompiler of ACGhc ghcVer | ghcVer >= mkVersion [8, 4] -> ["--haddock-option=--quickjump"] _ -> [] fulfillHaddockExpectations pname mcurator $ \keep -> do let args = concat ( ( if isHaddockForHackage then [ [ "--for-hackage" ] ] else [ [ "--html" , "--hoogle" , "--html-location=../$pkg-$version/" ] , [ "--haddock-option=--hyperlinked-source" | ee.buildOpts.haddockHyperlinkSource ] , [ "--internal" | ee.buildOpts.haddockInternal ] , quickjump ] ) <> [ [ "--haddock-option=" <> opt | opt <- ee.buildOpts.haddockOpts.additionalArgs ] ] ) cabal0 keep KeepTHLoading $ "haddock" : args let hasLibrary = hasBuildableMainLibrary package hasSubLibraries = not $ null package.subLibraries hasExecutables = not $ null package.executables shouldCopy = not isFinalBuild && (hasLibrary || hasSubLibraries || hasExecutables) when shouldCopy $ withMVar ee.installLock $ \() -> do announce "copy/register" let copyArgs = "copy" : if isOldCabalCopy then [] else copyOpts eres <- try $ cabal KeepTHLoading copyArgs case eres of Left err@CabalExitedUnsuccessfully{} -> throwM $ CabalCopyFailed (package.buildType == C.Simple) (displayException err) _ -> pure () when (hasLibrary || hasSubLibraries) $ cabal KeepTHLoading ["register"] copyDdumpFilesIfNeeded buildingFinals ee.buildOpts.ddumpDir installedPkg <- fetchAndMarkInstalledPackage ee (taskLocation task) package pkgId postProcessRemotePackage task.taskType ac cache ee installedPkg package pkgId pkgDir pure installedPkg -- | Action in the case that the task relates to a remote package. postProcessRemotePackage :: (HasEnvConfig env) => TaskType -> ActionContext -> ConfigCache -> ExecuteEnv -> Installed -> Package -> PackageIdentifier -> Path b Dir -> RIO env () postProcessRemotePackage taskType ac cache ee installedPackage package pkgId pkgDir = case taskType of TTRemotePackage isMutable _ loc -> do when (isMutable == Immutable) $ writePrecompiledCache ee.baseConfigOpts loc cache.configureOpts cache.buildHaddocks installedPackage (buildableExes package) -- For packages from a package index, pkgDir is in the tmp directory. We -- eagerly delete it if no other tasks require it, to reduce space usage -- in tmp (#3018). let remaining = Set.filter (\(ActionId x _) -> x == pkgId) ac.remaining when (null remaining) $ removeDirRecur pkgDir _ -> pure () -- | Once all the Cabal-related tasks have run for a package, we should be able -- to gather the information needed to create an 'Installed' package value. For -- now, either there's a main library (in which case we consider the 'GhcPkgId' -- values of the package's libraries) or we just consider it's an executable -- (and mark all the executables as installed, if any). -- -- Note that this also modifies the installedDumpPkgsTVar which is used for -- generating Haddocks. -- fetchAndMarkInstalledPackage :: (HasTerm env, HasEnvConfig env) => ExecuteEnv -> InstallLocation -> Package -> PackageIdentifier -> RIO env Installed fetchAndMarkInstalledPackage ee taskInstallLocation package pkgId = do let baseConfigOpts = ee.baseConfigOpts (installedPkgDb, installedDumpPkgsTVar) = case taskInstallLocation of Snap -> ( baseConfigOpts.snapDB , ee.snapshotDumpPkgs ) Local -> ( baseConfigOpts.localDB , ee.localDumpPkgs ) -- Only pure the sub-libraries to cache them if we also cache the main -- library (that is, if it exists) if hasBuildableMainLibrary package then do let getAndStoreGhcPkgId = loadInstalledPkg [installedPkgDb] installedDumpPkgsTVar foldSubLibToMap subLib mapInMonad = do let mungedName = toCabalMungedPackageName package.name subLib.name maybeGhcpkgId <- getAndStoreGhcPkgId (encodeCompatPackageName mungedName) mapInMonad <&> case maybeGhcpkgId of Just v -> Map.insert subLib.name v _ -> id subLibsPkgIds <- foldComponentToAnotherCollection package.subLibraries foldSubLibToMap mempty mGhcPkgId <- getAndStoreGhcPkgId package.name case mGhcPkgId of Nothing -> throwM $ Couldn'tFindPkgId package.name Just ghcPkgId -> pure $ simpleInstalledLib pkgId ghcPkgId subLibsPkgIds else do markExeInstalled taskInstallLocation pkgId -- TODO unify somehow -- with writeFlagCache? pure $ Executable pkgId -- | Copy ddump-* files, if we are building finals and a non-empty ddump-dir -- has been specified. copyDdumpFilesIfNeeded :: HasEnvConfig env => Bool -> Maybe Text -> RIO env () copyDdumpFilesIfNeeded buildingFinals mDdumpPath = when buildingFinals $ whenJust mDdumpPath $ \ddumpPath -> unless (T.null ddumpPath) $ do distDir <- distRelativeDir ddumpRelDir <- parseRelDir $ T.unpack ddumpPath prettyDebugL [ "ddump-dir:" , pretty ddumpRelDir ] prettyDebugL [ "dist-dir:" , pretty distDir ] runConduitRes $ CF.sourceDirectoryDeep False (toFilePath distDir) .| CL.filter (L.isInfixOf ".dump-") .| CL.mapM_ (\src -> liftIO $ do parentDir <- parent <$> parseRelDir src destBaseDir <- (ddumpRelDir ) <$> stripProperPrefix distDir parentDir -- exclude .stack-work dir unless (".stack-work" `L.isInfixOf` toFilePath destBaseDir) $ do ensureDir destBaseDir src' <- parseRelFile src copyFile src' (destBaseDir filename src')) -- | Get the build status of all the package executables. Do so by -- testing whether their expected output file exists, e.g. -- -- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha -- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.exe -- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.jsexe/ (NOTE: a dir) getExecutableBuildStatuses :: HasEnvConfig env => Package -> Path Abs Dir -> RIO env (Map Text ExecutableBuildStatus) getExecutableBuildStatuses package pkgDir = do distDir <- distDirFromDir pkgDir platform <- view platformL fmap Map.fromList (mapM (checkExeStatus platform distDir) (Set.toList (buildableExes package))) -- | Check whether the given executable is defined in the given dist directory. checkExeStatus :: HasLogFunc env => Platform -> Path b Dir -> Text -> RIO env (Text, ExecutableBuildStatus) checkExeStatus platform distDir name = do exename <- parseRelDir (T.unpack name) exists <- checkPath (distDir relDirBuild exename) pure ( name , if exists then ExecutableBuilt else ExecutableNotBuilt) where checkPath base = case platform of Platform _ Windows -> do fileandext <- parseRelFile (file ++ ".exe") doesFileExist (base fileandext) _ -> do fileandext <- parseRelFile file doesFileExist (base fileandext) where file = T.unpack name getPrecompiled :: (HasEnvConfig env) => ConfigCache -> TaskType -> Path Abs Dir -> RIO env (Maybe (PrecompiledCache Abs)) getPrecompiled cache taskType bcoSnapInstallRoot = case taskType of TTRemotePackage Immutable _ loc -> do mpc <- readPrecompiledCache loc cache.configureOpts cache.buildHaddocks case mpc of Nothing -> pure Nothing -- Only pay attention to precompiled caches that refer to packages -- within the snapshot. Just pc | maybe False (bcoSnapInstallRoot `isProperPrefixOf`) pc.library -> pure Nothing -- If old precompiled cache files are left around but snapshots are -- deleted, it is possible for the precompiled file to refer to the -- very library we're building, and if flags are changed it may try to -- copy the library to itself. This check prevents that from -- happening. Just pc -> do let allM _ [] = pure True allM f (x:xs) = do b <- f x if b then allM f xs else pure False b <- liftIO $ allM doesFileExist $ maybe id (:) pc.library pc.exes pure $ if b then Just pc else Nothing _ -> pure Nothing copyPreCompiled :: (HasLogFunc env, HasCompiler env, HasTerm env, HasProcessContext env, HasEnvConfig env) => ExecuteEnv -> Task -> PackageIdentifier -> PrecompiledCache b0 -> RIO env (Maybe Installed) copyPreCompiled ee task pkgId (PrecompiledCache mlib subLibs exes) = do let PackageIdentifier pname pversion = pkgId announceTask ee task.taskType "using precompiled package" -- We need to copy .conf files for the main library and all sub-libraries -- which exist in the cache, from their old snapshot to the new one. -- However, we must unregister any such library in the new snapshot, in case -- it was built with different flags. let subLibNames = Set.toList $ buildableSubLibs $ case task.taskType of TTLocalMutable lp -> lp.package TTRemotePackage _ p _ -> p toMungedPackageId :: Text -> MungedPackageId toMungedPackageId subLib = let subLibName = LSubLibName $ mkUnqualComponentName $ T.unpack subLib in MungedPackageId (MungedPackageName pname subLibName) pversion toPackageId :: MungedPackageId -> PackageIdentifier toPackageId (MungedPackageId n v) = PackageIdentifier (encodeCompatPackageName n) v allToUnregister :: [Either PackageIdentifier GhcPkgId] allToUnregister = mcons (Left pkgId <$ mlib) (map (Left . toPackageId . toMungedPackageId) subLibNames) allToRegister = mcons mlib subLibs unless (null allToRegister) $ withMVar ee.installLock $ \() -> do -- We want to ignore the global and user package databases. ghc-pkg -- allows us to specify --no-user-package-db and --package-db= on -- the command line. let pkgDb = ee.baseConfigOpts.snapDB ghcPkgExe <- getGhcPkgExe -- First unregister, silently, everything that needs to be unregistered. case nonEmpty allToUnregister of Nothing -> pure () Just allToUnregister' -> catchAny (unregisterGhcPkgIds False ghcPkgExe pkgDb allToUnregister') (const (pure ())) -- Now, register the cached conf files. forM_ allToRegister $ \libpath -> ghcPkg ghcPkgExe [pkgDb] ["register", "--force", toFilePath libpath] liftIO $ forM_ exes $ \exe -> do ensureDir bindir let dst = bindir filename exe createLink (toFilePath exe) (toFilePath dst) `catchIO` \_ -> copyFile exe dst case (mlib, exes) of (Nothing, _:_) -> markExeInstalled (taskLocation task) pkgId _ -> pure () -- Find the package in the database let pkgDbs = [ee.baseConfigOpts.snapDB] case mlib of Nothing -> pure $ Just $ Executable pkgId Just _ -> do mpkgid <- loadInstalledPkg pkgDbs ee.snapshotDumpPkgs pname pure $ Just $ case mpkgid of Nothing -> assert False $ Executable pkgId Just pkgid -> simpleInstalledLib pkgId pkgid mempty where bindir = ee.baseConfigOpts.snapInstallRoot bindirSuffix loadInstalledPkg :: ( HasCompiler env, HasProcessContext env, HasTerm env ) => [Path Abs Dir] -> TVar (Map GhcPkgId DumpPackage) -> PackageName -> RIO env (Maybe GhcPkgId) loadInstalledPkg pkgDbs tvar name = do pkgexe <- getGhcPkgExe dps <- ghcPkgDescribe pkgexe name pkgDbs $ conduitDumpPackage .| CL.consume case dps of [] -> pure Nothing [dp] -> do liftIO $ atomically $ modifyTVar' tvar (Map.insert dp.ghcPkgId dp) pure $ Just dp.ghcPkgId _ -> throwM $ MultipleResultsBug name dps fulfillHaddockExpectations :: (MonadUnliftIO m, HasTerm env, MonadReader env m) => PackageName -> Maybe Curator -> (KeepOutputOpen -> m ()) -> m () fulfillHaddockExpectations pname mcurator action | expectHaddockFailure mcurator = do eres <- tryAny $ action KeepOpen case eres of Right () -> prettyWarnL [ style Current (fromPackageName pname) <> ":" , flow "unexpected Haddock success." ] Left _ -> pure () where expectHaddockFailure = maybe False (Set.member pname . (.expectHaddockFailure)) fulfillHaddockExpectations _ _ action = action CloseOnException -- | Check if any unlisted files have been found, and add them to the build cache. checkForUnlistedFiles :: HasEnvConfig env => TaskType -> Path Abs Dir -> RIO env [PackageWarning] checkForUnlistedFiles (TTLocalMutable lp) pkgDir = do caches <- runMemoizedWith lp.newBuildCaches (addBuildCache,warnings) <- addUnlistedToBuildCache lp.package lp.cabalFP lp.components caches forM_ (Map.toList addBuildCache) $ \(component, newToCache) -> do let cache = Map.findWithDefault Map.empty component caches writeBuildCache pkgDir component $ Map.unions (cache : newToCache) pure warnings checkForUnlistedFiles TTRemotePackage{} _ = pure [] -- | Implements running a package's tests. Also handles producing -- coverage reports if coverage is enabled. singleTest :: HasEnvConfig env => TestOpts -> [Text] -> ActionContext -> ExecuteEnv -> Task -> InstalledMap -> RIO env () singleTest topts testsToRun ac ee task installedMap = do -- FIXME: Since this doesn't use cabal, we should be able to avoid using a -- full blown 'withSingleContext'. (allDepsMap, _cache) <- getConfigCache ee task installedMap True False mcurator <- view $ buildConfigL . to (.curator) let pname = pkgName $ taskProvides task expectFailure = expectTestFailure pname mcurator withSingleContext ac ee task.taskType allDepsMap (Just "test") $ \package _cabalfp pkgDir _cabal announce outputType -> do config <- view configL let needHpc = topts.coverage toRun <- if topts.disableRun then do announce "Test running disabled by --no-run-tests flag." pure False else if topts.rerunTests then pure True else do status <- getTestStatus pkgDir case status of TSSuccess -> do unless (null testsToRun) $ announce "skipping already passed test" pure False TSFailure | expectFailure -> do announce "skipping already failed test that's expected to fail" pure False | otherwise -> do announce "rerunning previously failed test" pure True TSUnknown -> pure True when toRun $ do buildDir <- distDirFromDir pkgDir hpcDir <- hpcDirFromDir pkgDir when needHpc (ensureDir hpcDir) let suitesToRun = [ testSuitePair | testSuitePair <- ((fmap . fmap) (.interface) <$> collectionKeyValueList) package.testSuites , let testName = fst testSuitePair , testName `elem` testsToRun ] errs <- fmap Map.unions $ forM suitesToRun $ \(testName, suiteInterface) -> do let stestName = T.unpack testName (testName', isTestTypeLib) <- case suiteInterface of C.TestSuiteLibV09{} -> pure (stestName ++ "Stub", True) C.TestSuiteExeV10{} -> pure (stestName, False) interface -> throwM (TestSuiteTypeUnsupported interface) let exeName = testName' ++ case config.platform of Platform _ Windows -> ".exe" _ -> "" tixPath <- fmap (pkgDir ) $ parseRelFile $ exeName ++ ".tix" exePath <- fmap (buildDir ) $ parseRelFile $ "build/" ++ testName' ++ "/" ++ exeName exists <- doesFileExist exePath -- in Stack.Package.packageFromPackageDescription we filter out -- package itself of any dependencies so any tests requiring loading -- of their own package library will fail so to prevent this we return -- it back here but unfortunately unconditionally installed <- case Map.lookup pname installedMap of Just (_, installed) -> pure $ Just installed Nothing -> do idMap <- liftIO $ readTVarIO ee.ghcPkgIds pure $ Map.lookup (taskProvides task) idMap let pkgGhcIdList = case installed of Just (Library _ libInfo) -> [libInfo.ghcPkgId] _ -> [] -- doctest relies on template-haskell in QuickCheck-based tests thGhcId <- case L.find ((== "template-haskell") . pkgName . (.packageIdent) . snd) (Map.toList ee.globalDumpPkgs) of Just (ghcId, _) -> pure ghcId Nothing -> throwIO TemplateHaskellNotFoundBug -- env variable GHC_ENVIRONMENT is set for doctest so module names for -- packages with proper dependencies should no longer get ambiguous -- see e.g. https://github.com/doctest/issues/119 -- also we set HASKELL_DIST_DIR to a package dist directory so -- doctest will be able to load modules autogenerated by Cabal let setEnv f pc = modifyEnvVars pc $ \envVars -> Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePath buildDir) $ Map.insert "GHC_ENVIRONMENT" (T.pack f) envVars fp' = ee.tempDir testGhcEnvRelFile -- Add a random suffix to avoid conflicts between parallel jobs -- See https://github.com/commercialhaskell/stack/issues/5024 randomInt <- liftIO (randomIO :: IO Int) let randomSuffix = "." <> show (abs randomInt) fp <- toFilePath <$> addExtension randomSuffix fp' let snapDBPath = toFilePathNoTrailingSep ee.baseConfigOpts.snapDB localDBPath = toFilePathNoTrailingSep ee.baseConfigOpts.localDB ghcEnv = "clear-package-db\n" <> "global-package-db\n" <> "package-db " <> fromString snapDBPath <> "\n" <> "package-db " <> fromString localDBPath <> "\n" <> foldMap ( \ghcId -> "package-id " <> display (unGhcPkgId ghcId) <> "\n" ) (pkgGhcIdList ++ thGhcId:Map.elems allDepsMap) writeFileUtf8Builder fp ghcEnv menv <- liftIO $ setEnv fp =<< config.processContextSettings EnvSettings { includeLocals = taskLocation task == Local , includeGhcPackagePath = True , stackExe = True , localeUtf8 = False , keepGhcRts = False } let emptyResult = Map.singleton testName Nothing withProcessContext menv $ if exists then do -- We clear out the .tix files before doing a run. when needHpc $ do tixexists <- doesFileExist tixPath when tixexists $ prettyWarnL [ flow "Removing HPC file" , pretty tixPath <> "." ] liftIO $ ignoringAbsence (removeFile tixPath) let args = topts.additionalArgs argsDisplay = case args of [] -> "" _ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args) announce $ "test (suite: " <> display testName <> display argsDisplay <> ")" -- Clear "Progress: ..." message before -- redirecting output. case outputType of OTConsole _ -> do logStickyDone "" liftIO $ hFlush stdout liftIO $ hFlush stderr OTLogFile _ _ -> pure () let output = case outputType of OTConsole Nothing -> Nothing <$ inherit OTConsole (Just prefix) -> fmap ( \src -> Just $ runConduit $ src .| CT.decodeUtf8Lenient .| CT.lines .| CL.map stripCR .| CL.mapM_ (\t -> logInfo $ prefix <> display t) ) createSource OTLogFile _ h -> Nothing <$ useHandleOpen h optionalTimeout action | Just maxSecs <- topts.maximumTimeSeconds, maxSecs > 0 = timeout (maxSecs * 1000000) action | otherwise = Just <$> action mec <- withWorkingDir (toFilePath pkgDir) $ optionalTimeout $ proc (toFilePath exePath) args $ \pc0 -> do changeStdin <- if isTestTypeLib then do logPath <- buildLogPath package (Just stestName) ensureDir (parent logPath) pure $ setStdin $ byteStringInput $ BL.fromStrict $ encodeUtf8 $ fromString $ show ( logPath , mkUnqualComponentName (T.unpack testName) ) else do isTerminal <- view $ globalOptsL . to (.terminal) if topts.allowStdin && isTerminal then pure id else pure $ setStdin $ byteStringInput mempty let pc = changeStdin $ setStdout output $ setStderr output pc0 withProcessWait pc $ \p -> do case (getStdout p, getStderr p) of (Nothing, Nothing) -> pure () (Just x, Just y) -> concurrently_ x y (x, y) -> assert False $ concurrently_ (fromMaybe (pure ()) x) (fromMaybe (pure ()) y) waitExitCode p -- Add a trailing newline, incase the test -- output didn't finish with a newline. case outputType of OTConsole Nothing -> prettyInfo blankLine _ -> pure () -- Move the .tix file out of the package -- directory into the hpc work dir, for -- tidiness. when needHpc $ updateTixFile package.name tixPath testName' let announceResult result = announce $ "Test suite " <> display testName <> " " <> result case mec of Just ExitSuccess -> do announceResult "passed" pure Map.empty Nothing -> do announceResult "timed out" if expectFailure then pure Map.empty else pure $ Map.singleton testName Nothing Just ec -> do announceResult "failed" if expectFailure then pure Map.empty else pure $ Map.singleton testName (Just ec) else do unless expectFailure $ logError $ displayShow $ TestSuiteExeMissing (package.buildType == C.Simple) exeName (packageNameString package.name) (T.unpack testName) pure emptyResult when needHpc $ do let testsToRun' = map f testsToRun f tName = case (.interface) <$> mComponent of Just C.TestSuiteLibV09{} -> tName <> "Stub" _ -> tName where mComponent = collectionLookup tName package.testSuites generateHpcReport pkgDir package testsToRun' bs <- liftIO $ case outputType of OTConsole _ -> pure "" OTLogFile logFile h -> do hClose h S.readFile $ toFilePath logFile let succeeded = Map.null errs unless (succeeded || expectFailure) $ throwM $ TestSuiteFailure (taskProvides task) errs (case outputType of OTLogFile fp _ -> Just fp OTConsole _ -> Nothing) bs setTestStatus pkgDir $ if succeeded then TSSuccess else TSFailure -- | Implements running a package's benchmarks. singleBench :: HasEnvConfig env => BenchmarkOpts -> [Text] -> ActionContext -> ExecuteEnv -> Task -> InstalledMap -> RIO env () singleBench beopts benchesToRun ac ee task installedMap = do (allDepsMap, _cache) <- getConfigCache ee task installedMap False True withSingleContext ac ee task.taskType allDepsMap (Just "bench") $ \_package _cabalfp _pkgDir cabal announce _outputType -> do let args = map T.unpack benchesToRun <> maybe [] ((:[]) . ("--benchmark-options=" <>)) beopts.additionalArgs toRun <- if beopts.disableRun then do announce "Benchmark running disabled by --no-run-benchmarks flag." pure False else pure True when toRun $ do announce "benchmarks" cabal CloseOnException KeepTHLoading ("bench" : args) -- Do not pass `-hpcdir` as GHC option if the coverage is not enabled. -- This helps running stack-compiled programs with dynamic interpreters like -- `hint`. Cfr: https://github.com/commercialhaskell/stack/issues/997 extraBuildOptions :: (HasEnvConfig env, HasRunner env) => WhichCompiler -> BuildOpts -> RIO env [String] extraBuildOptions wc bopts = do colorOpt <- appropriateGhcColorFlag let optsFlag = compilerOptionsCabalFlag wc baseOpts = maybe "" (" " ++) colorOpt if bopts.testOpts.coverage then do hpcIndexDir <- toFilePathNoTrailingSep <$> hpcRelativeDir pure [optsFlag, "-hpcdir " ++ hpcIndexDir ++ baseOpts] else pure [optsFlag, baseOpts] -- Library, sub-library, foreign library and executable build components. primaryComponentOptions :: Bool -- ^ Is Cabal copy limited to all libraries and executables? -> Map Text ExecutableBuildStatus -> LocalPackage -> [String] primaryComponentOptions isOldCabalCopy executableBuildStatuses lp = -- TODO: get this information from target parsing instead, which will allow -- users to turn off library building if desired ( if hasBuildableMainLibrary package then map T.unpack $ T.append "lib:" (T.pack (packageNameString package.name)) : map (T.append "flib:") (getBuildableListText package.foreignLibraries) else [] ) ++ map (T.unpack . T.append "lib:") (getBuildableListText package.subLibraries) ++ map (T.unpack . T.append "exe:") (Set.toList $ exesToBuild isOldCabalCopy executableBuildStatuses lp) where package = lp.package -- | History of this function: -- -- * Normally it would do either all executables or if the user specified -- requested components, just build them. Afterwards, due to this Cabal bug -- , we had to make Stack build -- all executables every time. -- -- * In this was -- flagged up as very undesirable behavior on a large project, hence the -- behavior below that we build all executables once (modulo success), and -- thereafter pay attention to user-wanted components. -- -- * The Cabal bug was fixed, in that the copy command of later Cabal versions -- allowed components to be specified. Consequently, Cabal may be satisified, -- even if all of a package's executables have not yet been built. exesToBuild :: Bool -- ^ Is Cabal copy limited to all libraries and executables? -> Map Text ExecutableBuildStatus -> LocalPackage -> Set Text exesToBuild isOldCabalCopy executableBuildStatuses lp = if cabalIsSatisfied isOldCabalCopy executableBuildStatuses && lp.wanted then exeComponents lp.components else buildableExes lp.package -- | Do the current executables satisfy Cabal's requirements? cabalIsSatisfied :: Bool -- ^ Is Cabal copy limited to all libraries and executables? -> Map k ExecutableBuildStatus -> Bool cabalIsSatisfied False _ = True cabalIsSatisfied True executableBuildStatuses = all (== ExecutableBuilt) $ Map.elems executableBuildStatuses -- Test-suite and benchmark build components. finalComponentOptions :: LocalPackage -> [String] finalComponentOptions lp = map (T.unpack . renderComponent) $ Set.toList $ Set.filter (\c -> isCTest c || isCBench c) lp.components taskComponents :: Task -> Set NamedComponent taskComponents task = case task.taskType of TTLocalMutable lp -> lp.components -- FIXME probably just want lpWanted TTRemotePackage{} -> Set.empty expectTestFailure :: PackageName -> Maybe Curator -> Bool expectTestFailure pname = maybe False (Set.member pname . (.expectTestFailure)) expectBenchmarkFailure :: PackageName -> Maybe Curator -> Bool expectBenchmarkFailure pname = maybe False (Set.member pname . (.expectBenchmarkFailure)) fulfillCuratorBuildExpectations :: (HasCallStack, HasTerm env) => PackageName -> Maybe Curator -> Bool -> Bool -> b -> RIO env b -> RIO env b fulfillCuratorBuildExpectations pname mcurator enableTests _ defValue action | enableTests && expectTestFailure pname mcurator = do eres <- tryAny action case eres of Right res -> do prettyWarnL [ style Current (fromPackageName pname) <> ":" , flow "unexpected test build success." ] pure res Left _ -> pure defValue fulfillCuratorBuildExpectations pname mcurator _ enableBench defValue action | enableBench && expectBenchmarkFailure pname mcurator = do eres <- tryAny action case eres of Right res -> do prettyWarnL [ style Current (fromPackageName pname) <> ":" , flow "unexpected benchmark build success." ] pure res Left _ -> pure defValue fulfillCuratorBuildExpectations _ _ _ _ _ action = action stack-2.15.7/src/Stack/Build/Haddock.hs0000644000000000000000000003552314620153445015701 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Generate haddocks module Stack.Build.Haddock ( generateDepsHaddockIndex , generateLocalHaddockIndex , generateSnapHaddockIndex , openHaddocksInBrowser , shouldHaddockDeps , shouldHaddockPackage , generateLocalHaddockForHackageArchives ) where import qualified Codec.Archive.Tar as Tar import qualified Codec.Compression.GZip as GZip import qualified Data.Foldable as F import qualified Data.HashSet as HS import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import Data.Time ( UTCTime ) import Distribution.Text ( display ) import Path ( (), addExtension, fromAbsDir, fromAbsFile, fromRelDir , parent, parseRelDir, parseRelFile ) import Path.Extra ( parseCollapsedAbsFile, toFilePathNoTrailingSep , tryGetModificationTime ) import Path.IO ( copyDirRecur', doesFileExist, ensureDir, ignoringAbsence , removeDirRecur ) import qualified RIO.ByteString.Lazy as BL import RIO.List ( intercalate ) import RIO.Process ( HasProcessContext, withWorkingDir ) import Stack.Constants ( docDirSuffix, htmlDirSuffix, relDirAll, relFileIndexHtml ) import Stack.Constants.Config ( distDirFromDir ) import Stack.Prelude hiding ( Display (..) ) import Stack.Types.Build.Exception ( BuildException (..) ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..) ) import Stack.Types.ConfigureOpts ( BaseConfigOpts (..) ) import Stack.Types.BuildOpts ( BuildOpts (..), HaddockOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( HasEnvConfig (..) ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Package ( InstallLocation (..), LocalPackage (..), Package (..) ) import qualified System.FilePath as FP import Web.Browser ( openBrowser ) openHaddocksInBrowser :: HasTerm env => BaseConfigOpts -> Map PackageName (PackageIdentifier, InstallLocation) -- ^ Available packages and their locations for the current project -> Set PackageName -- ^ Build targets as determined by 'Stack.Build.Source.loadSourceMap' -> RIO env () openHaddocksInBrowser bco pkgLocations buildTargets = do let cliTargets = bco.buildOptsCLI.targetsCLI getDocIndex = do let localDocs = haddockIndexFile (localDepsDocDir bco) localExists <- doesFileExist localDocs if localExists then pure localDocs else do let snapDocs = haddockIndexFile (snapDocDir bco) snapExists <- doesFileExist snapDocs if snapExists then pure snapDocs else throwIO HaddockIndexNotFound docFile <- case (cliTargets, map (`Map.lookup` pkgLocations) (Set.toList buildTargets)) of ([_], [Just (pkgId, iloc)]) -> do pkgRelDir <- (parseRelDir . packageIdentifierString) pkgId let docLocation = case iloc of Snap -> snapDocDir bco Local -> localDocDir bco let docFile = haddockIndexFile (docLocation pkgRelDir) exists <- doesFileExist docFile if exists then pure docFile else do prettyWarnL [ flow "Expected to find documentation at" , pretty docFile <> "," , flow "but that file is missing. Opening doc index instead." ] getDocIndex _ -> getDocIndex prettyInfo $ "Opening" <+> pretty docFile <+> "in the browser." _ <- liftIO $ openBrowser (toFilePath docFile) pure () -- | Determine whether we should haddock for a package. shouldHaddockPackage :: BuildOpts -> Set PackageName -- ^ Packages that we want to generate haddocks for in any case (whether or -- not we are going to generate haddocks for dependencies) -> PackageName -> Bool shouldHaddockPackage bopts wanted name = if Set.member name wanted then bopts.buildHaddocks else shouldHaddockDeps bopts -- | Determine whether to build haddocks for dependencies. shouldHaddockDeps :: BuildOpts -> Bool shouldHaddockDeps bopts = fromMaybe bopts.buildHaddocks bopts.haddockDeps -- | Generate Haddock index and contents for local packages. generateLocalHaddockIndex :: (HasCompiler env, HasProcessContext env, HasTerm env) => BaseConfigOpts -> Map GhcPkgId DumpPackage -- ^ Local package dump -> [LocalPackage] -> RIO env () generateLocalHaddockIndex bco localDumpPkgs locals = do let dumpPackages = mapMaybe ( \LocalPackage {package = Package {name, version}} -> F.find ( \dp -> dp.packageIdent == PackageIdentifier name version ) localDumpPkgs ) locals generateHaddockIndex "local packages" bco dumpPackages "." (localDocDir bco) -- | Generate Haddock index and contents for local packages and their -- dependencies. generateDepsHaddockIndex :: (HasCompiler env, HasProcessContext env, HasTerm env) => BaseConfigOpts -> Map GhcPkgId DumpPackage -- ^ Global dump information -> Map GhcPkgId DumpPackage -- ^ Snapshot dump information -> Map GhcPkgId DumpPackage -- ^ Local dump information -> [LocalPackage] -> RIO env () generateDepsHaddockIndex bco globalDumpPkgs snapshotDumpPkgs localDumpPkgs locals = do let deps = ( mapMaybe (`lookupDumpPackage` allDumpPkgs) . nubOrd . findTransitiveDepends . mapMaybe getGhcPkgId ) locals depDocDir = localDepsDocDir bco generateHaddockIndex "local packages and dependencies" bco deps ".." depDocDir where getGhcPkgId :: LocalPackage -> Maybe GhcPkgId getGhcPkgId LocalPackage {package = Package {name, version}} = let pkgId = PackageIdentifier name version mdpPkg = F.find (\dp -> dp.packageIdent == pkgId) localDumpPkgs in fmap (.ghcPkgId) mdpPkg findTransitiveDepends :: [GhcPkgId] -> [GhcPkgId] findTransitiveDepends = (`go` HS.empty) . HS.fromList where go todo checked = case HS.toList todo of [] -> HS.toList checked (ghcPkgId:_) -> let deps = case lookupDumpPackage ghcPkgId allDumpPkgs of Nothing -> HS.empty Just pkgDP -> HS.fromList pkgDP.depends deps' = deps `HS.difference` checked todo' = HS.delete ghcPkgId (deps' `HS.union` todo) checked' = HS.insert ghcPkgId checked in go todo' checked' allDumpPkgs = [localDumpPkgs, snapshotDumpPkgs, globalDumpPkgs] -- | Generate Haddock index and contents for all snapshot packages. generateSnapHaddockIndex :: (HasCompiler env, HasProcessContext env, HasTerm env) => BaseConfigOpts -> Map GhcPkgId DumpPackage -- ^ Global package dump -> Map GhcPkgId DumpPackage -- ^ Snapshot package dump -> RIO env () generateSnapHaddockIndex bco globalDumpPkgs snapshotDumpPkgs = generateHaddockIndex "snapshot packages" bco (Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs) "." (snapDocDir bco) -- | Generate Haddock index and contents for specified packages. generateHaddockIndex :: (HasCompiler env, HasProcessContext env, HasTerm env) => Text -> BaseConfigOpts -> [DumpPackage] -> FilePath -> Path Abs Dir -> RIO env () generateHaddockIndex descr bco dumpPackages docRelFP destDir = do ensureDir destDir interfaceOpts <- (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages unless (null interfaceOpts) $ do let destIndexFile = haddockIndexFile destDir eindexModTime <- liftIO (tryGetModificationTime destIndexFile) let needUpdate = case eindexModTime of Left _ -> True Right indexModTime -> or [mt > indexModTime | (_, mt, _, _) <- interfaceOpts] prettyDescr = style Current (fromString $ T.unpack descr) if needUpdate then do prettyInfo $ fillSep [ flow "Updating Haddock index for" , prettyDescr , "in:" ] <> line <> pretty destIndexFile liftIO (mapM_ copyPkgDocs interfaceOpts) haddockExeName <- view $ compilerPathsL . to (toFilePath . (.haddock)) withWorkingDir (toFilePath destDir) $ readProcessNull haddockExeName ( map (("--optghc=-package-db=" ++ ) . toFilePathNoTrailingSep) [bco.snapDB, bco.localDB] ++ bco.buildOpts.haddockOpts.additionalArgs ++ ["--gen-contents", "--gen-index"] ++ [x | (xs, _, _, _) <- interfaceOpts, x <- xs] ) else prettyInfo $ fillSep [ flow "Haddock index for" , prettyDescr , flow "already up to date at:" ] <> line <> pretty destIndexFile where toInterfaceOpt :: DumpPackage -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File)) toInterfaceOpt DumpPackage {haddockInterfaces, packageIdent, haddockHtml} = case haddockInterfaces of [] -> pure Nothing srcInterfaceFP:_ -> do srcInterfaceAbsFile <- parseCollapsedAbsFile srcInterfaceFP let (PackageIdentifier name _) = packageIdent destInterfaceRelFP = docRelFP FP. packageIdentifierString packageIdent FP. (packageNameString name FP.<.> "haddock") docPathRelFP = fmap ((docRelFP FP.) . FP.takeFileName) haddockHtml interfaces = intercalate "," $ mcons docPathRelFP [srcInterfaceFP] destInterfaceAbsFile <- parseCollapsedAbsFile (toFilePath destDir FP. destInterfaceRelFP) esrcInterfaceModTime <- tryGetModificationTime srcInterfaceAbsFile pure $ case esrcInterfaceModTime of Left _ -> Nothing Right srcInterfaceModTime -> Just ( [ "-i", interfaces ] , srcInterfaceModTime , srcInterfaceAbsFile , destInterfaceAbsFile ) copyPkgDocs :: (a, UTCTime, Path Abs File, Path Abs File) -> IO () copyPkgDocs (_, srcInterfaceModTime, srcInterfaceAbsFile, destInterfaceAbsFile) = do -- Copy dependencies' haddocks to documentation directory. This way, -- relative @../$pkg-$ver@ links work and it's easy to upload docs to a web -- server or otherwise view them in a non-local-filesystem context. We copy -- instead of symlink for two reasons: (1) symlinks aren't reliably supported -- on Windows, and (2) the filesystem containing dependencies' docs may not be -- available where viewing the docs (e.g. if building in a Docker container). edestInterfaceModTime <- tryGetModificationTime destInterfaceAbsFile case edestInterfaceModTime of Left _ -> doCopy Right destInterfaceModTime | destInterfaceModTime < srcInterfaceModTime -> doCopy | otherwise -> pure () where doCopy = do ignoringAbsence (removeDirRecur destHtmlAbsDir) ensureDir destHtmlAbsDir onException (copyDirRecur' (parent srcInterfaceAbsFile) destHtmlAbsDir) (ignoringAbsence (removeDirRecur destHtmlAbsDir)) destHtmlAbsDir = parent destInterfaceAbsFile -- | Find first DumpPackage matching the GhcPkgId lookupDumpPackage :: GhcPkgId -> [Map GhcPkgId DumpPackage] -> Maybe DumpPackage lookupDumpPackage ghcPkgId dumpPkgs = listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs -- | Path of haddock index file. haddockIndexFile :: Path Abs Dir -> Path Abs File haddockIndexFile destDir = destDir relFileIndexHtml -- | Path of local packages documentation directory. localDocDir :: BaseConfigOpts -> Path Abs Dir localDocDir bco = bco.localInstallRoot docDirSuffix -- | Path of documentation directory for the dependencies of local packages localDepsDocDir :: BaseConfigOpts -> Path Abs Dir localDepsDocDir bco = localDocDir bco relDirAll -- | Path of snapshot packages documentation directory. snapDocDir :: BaseConfigOpts -> Path Abs Dir snapDocDir bco = bco.snapInstallRoot docDirSuffix generateLocalHaddockForHackageArchives :: (HasEnvConfig env, HasTerm env) => [LocalPackage] -> RIO env () generateLocalHaddockForHackageArchives = mapM_ ( \lp -> let pkg = lp.package pkgId = PackageIdentifier pkg.name pkg.version pkgDir = parent lp.cabalFP in generateLocalHaddockForHackageArchive pkgDir pkgId ) -- | Generate an archive file containing local Haddock documentation for -- Hackage, in a form accepted by Hackage. generateLocalHaddockForHackageArchive :: (HasEnvConfig env, HasTerm env) => Path Abs Dir -- ^ The package directory. -> PackageIdentifier -- ^ The package name and version. -> RIO env () generateLocalHaddockForHackageArchive pkgDir pkgId = do distDir <- distDirFromDir pkgDir let pkgIdName = display pkgId name = pkgIdName <> "-docs" (nameRelDir, tarGzFileName) = fromMaybe (error "impossible") ( do relDir <- parseRelDir name nameRelFile <- parseRelFile name tarGz <- addExtension ".gz" =<< addExtension ".tar" nameRelFile pure (relDir, tarGz) ) tarGzFile = distDir tarGzFileName docDir = distDir docDirSuffix htmlDirSuffix createTarGzFile tarGzFile docDir nameRelDir prettyInfo $ fillSep [ flow "Archive of Haddock documentation for Hackage for" , style Current (fromString pkgIdName) , flow "created at:" ] <> line <> pretty tarGzFile createTarGzFile :: Path Abs File -- ^ Full path to archive file -> Path Abs Dir -- ^ Base directory -> Path Rel Dir -- ^ Directory to archive, relative to base directory -> RIO env () createTarGzFile tar base dir = do entries <- liftIO $ Tar.pack base' [dir'] BL.writeFile tar' $ GZip.compress $ Tar.write entries where base' = fromAbsDir base dir' = fromRelDir dir tar' = fromAbsFile tar stack-2.15.7/src/Stack/Build/Installed.hs0000644000000000000000000003131714620153445016260 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- Determine which packages are already installed module Stack.Build.Installed ( getInstalled , toInstallMap ) where import Data.Conduit ( ZipSink (..), getZipSink ) import qualified Data.Conduit.List as CL import qualified Data.Set as Set import qualified Data.Map.Strict as Map import Stack.Build.Cache ( getInstalledExes ) import Stack.Constants ( wiredInPackages ) import Stack.PackageDump ( conduitDumpPackage, ghcPkgDump, pruneDeps ) import Stack.Prelude import Stack.SourceMap ( getPLIVersion, loadVersion ) import Stack.Types.CompilerPaths ( getGhcPkgExe ) import Stack.Types.DumpPackage ( DumpPackage (..), SublibDump (..), dpParentLibIdent ) import Stack.Types.EnvConfig ( HasEnvConfig, packageDatabaseDeps, packageDatabaseExtra , packageDatabaseLocal ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Installed ( InstallLocation (..), InstallMap, Installed (..) , InstalledLibraryInfo (..), InstalledMap , InstalledPackageLocation (..), PackageDatabase (..) , PackageDbVariety (..), toPackageDbVariety ) import Stack.Types.SourceMap ( DepPackage (..), ProjectPackage (..), SourceMap (..) ) toInstallMap :: MonadIO m => SourceMap -> m InstallMap toInstallMap sourceMap = do projectInstalls <- for sourceMap.project $ \pp -> do version <- loadVersion pp.projectCommon pure (Local, version) depInstalls <- for sourceMap.deps $ \dp -> case dp.location of PLImmutable pli -> pure (Snap, getPLIVersion pli) PLMutable _ -> do version <- loadVersion dp.depCommon pure (Local, version) pure $ projectInstalls <> depInstalls -- | Returns the new InstalledMap and all of the locally registered packages. getInstalled :: HasEnvConfig env => InstallMap -- ^ does not contain any installed information -> RIO env ( InstalledMap , [DumpPackage] -- globally installed , [DumpPackage] -- snapshot installed , [DumpPackage] -- locally installed ) getInstalled {-opts-} installMap = do logDebug "Finding out which packages are already installed" snapDBPath <- packageDatabaseDeps localDBPath <- packageDatabaseLocal extraDBPaths <- packageDatabaseExtra let loadDatabase' = loadDatabase {-opts mcache-} installMap (installedLibs0, globalDumpPkgs) <- loadDatabase' GlobalPkgDb [] (installedLibs1, _extraInstalled) <- foldM (\lhs' pkgdb -> loadDatabase' (UserPkgDb ExtraPkgDb pkgdb) (fst lhs') ) (installedLibs0, globalDumpPkgs) extraDBPaths (installedLibs2, snapshotDumpPkgs) <- loadDatabase' (UserPkgDb (InstalledTo Snap) snapDBPath) installedLibs1 (installedLibs3, localDumpPkgs) <- loadDatabase' (UserPkgDb (InstalledTo Local) localDBPath) installedLibs2 let installedLibs = foldr' gatherAndTransformSubLoadHelper mempty installedLibs3 -- Add in the executables that are installed, making sure to only trust a -- listed installation under the right circumstances (see below) let exesToSM loc = Map.unions . map (exeToSM loc) exeToSM loc (PackageIdentifier name version) = case Map.lookup name installMap of -- Doesn't conflict with anything, so that's OK Nothing -> m Just (iLoc, iVersion) -- Not the version we want, ignore it | version /= iVersion || mismatchingLoc loc iLoc -> Map.empty | otherwise -> m where m = Map.singleton name (loc, Executable $ PackageIdentifier name version) mismatchingLoc installed target | target == installed = False | installed == Local = False -- snapshot dependency could end up -- in a local install as being mutable | otherwise = True exesSnap <- getInstalledExes Snap exesLocal <- getInstalledExes Local let installedMap = Map.unions [ exesToSM Local exesLocal , exesToSM Snap exesSnap , installedLibs ] pure ( installedMap , globalDumpPkgs , snapshotDumpPkgs , localDumpPkgs ) -- | Outputs both the modified InstalledMap and the Set of all installed -- packages in this database -- -- The goal is to ascertain that the dependencies for a package are present, -- that it has profiling if necessary, and that it matches the version and -- location needed by the SourceMap. loadDatabase :: forall env. HasEnvConfig env => InstallMap -- ^ to determine which installed things we should include -> PackageDatabase -- ^ package database. -> [LoadHelper] -- ^ from parent databases -> RIO env ([LoadHelper], [DumpPackage]) loadDatabase installMap db lhs0 = do pkgexe <- getGhcPkgExe (lhs1', dps) <- ghcPkgDump pkgexe pkgDb $ conduitDumpPackage .| sink lhs1 <- mapMaybeM processLoadResult lhs1' let lhs = pruneDeps id (.ghcPkgId) (.depsGhcPkgId) const (lhs0 ++ lhs1) pure (map (\lh -> lh { depsGhcPkgId = [] }) $ Map.elems lhs, dps) where pkgDb = case db of GlobalPkgDb -> [] UserPkgDb _ fp -> [fp] sinkDP = CL.map (isAllowed installMap db' &&& toLoadHelper db') .| CL.consume where db' = toPackageDbVariety db sink = getZipSink $ (,) <$> ZipSink sinkDP <*> ZipSink CL.consume processLoadResult :: (Allowed, LoadHelper) -> RIO env (Maybe LoadHelper) processLoadResult (Allowed, lh) = pure (Just lh) processLoadResult (reason, lh) = do logDebug $ "Ignoring package " <> fromPackageName (fst lh.pair) <> case db of GlobalPkgDb -> mempty UserPkgDb loc fp -> ", from " <> displayShow (loc, fp) <> "," <> " due to" <> case reason of UnknownPkg -> " it being unknown to the resolver / extra-deps." WrongLocation db' loc -> " wrong location: " <> displayShow (db', loc) WrongVersion actual wanted -> " wanting version " <> fromString (versionString wanted) <> " instead of " <> fromString (versionString actual) pure Nothing -- | Type representing results of 'isAllowed'. data Allowed = Allowed -- ^ The installed package can be included in the set of relevant installed -- packages. | UnknownPkg -- ^ The installed package cannot be included in the set of relevant -- installed packages because the package is unknown. | WrongLocation PackageDbVariety InstallLocation -- ^ The installed package cannot be included in the set of relevant -- installed packages because the package is in the wrong package database. | WrongVersion Version Version -- ^ The installed package cannot be included in the set of relevant -- installed packages because the package has the wrong version. deriving (Eq, Show) -- | Check if an installed package can be included in the set of relevant -- installed packages or not, based on the package selections made by the user. -- This does not perform any dirtiness or flag change checks. isAllowed :: InstallMap -> PackageDbVariety -- ^ The package database providing the installed package. -> DumpPackage -- ^ The installed package to check. -> Allowed isAllowed installMap pkgDb dp = case Map.lookup name installMap of Nothing -> -- If the sourceMap has nothing to say about this package, -- check if it represents a sub-library first -- See: https://github.com/commercialhaskell/stack/issues/3899 case dpParentLibIdent dp of Just (PackageIdentifier parentLibName version') -> case Map.lookup parentLibName installMap of Nothing -> checkNotFound Just instInfo | version' == version -> checkFound instInfo | otherwise -> checkNotFound -- different versions Nothing -> checkNotFound Just pii -> checkFound pii where PackageIdentifier name version = dp.packageIdent -- Ensure that the installed location matches where the sourceMap says it -- should be installed. checkLocation Snap = -- snapshot deps could become mutable after getting any mutable dependency. True checkLocation Local = case pkgDb of GlobalDb -> False -- 'locally' installed snapshot packages can come from 'extra' package -- databases. ExtraDb -> True WriteOnlyDb -> False MutableDb -> True -- Check if an installed package is allowed if it is found in the sourceMap. checkFound (installLoc, installVer) | not (checkLocation installLoc) = WrongLocation pkgDb installLoc | version /= installVer = WrongVersion version installVer | otherwise = Allowed -- Check if an installed package is allowed if it is not found in the -- sourceMap. checkNotFound = case pkgDb of -- The sourceMap has nothing to say about this global package, so we can use -- it. GlobalDb -> Allowed ExtraDb -> Allowed -- For non-global packages, don't include unknown packages. -- See: https://github.com/commercialhaskell/stack/issues/292 WriteOnlyDb -> UnknownPkg MutableDb -> UnknownPkg -- | Type representing certain information about an installed package. data LoadHelper = LoadHelper { ghcPkgId :: !GhcPkgId -- ^ The package's id. , subLibDump :: !(Maybe SublibDump) , depsGhcPkgId :: ![GhcPkgId] -- ^ Unless the package's name is that of a 'wired-in' package, a list of -- the ids of the installed packages that are the package's dependencies. , pair :: !(PackageName, (InstallLocation, Installed)) -- ^ A pair of (a) the package's name and (b) a pair of the relevant -- database (write-only or mutable) and information about the library -- installed. } deriving Show toLoadHelper :: PackageDbVariety -> DumpPackage -> LoadHelper toLoadHelper pkgDb dp = LoadHelper { ghcPkgId , depsGhcPkgId , subLibDump = dp.sublib , pair } where ghcPkgId = dp.ghcPkgId ident@(PackageIdentifier name _) = dp.packageIdent depsGhcPkgId = -- We always want to consider the wired in packages as having all of their -- dependencies installed, since we have no ability to reinstall them. This -- is especially important for using different minor versions of GHC, where -- the dependencies of wired-in packages may change slightly and therefore -- not match the snapshot. if name `Set.member` wiredInPackages then [] else dp.depends installedLibInfo = InstalledLibraryInfo ghcPkgId (Right <$> dp.license) mempty toInstallLocation :: PackageDbVariety -> InstallLocation toInstallLocation GlobalDb = Snap toInstallLocation ExtraDb = Snap toInstallLocation WriteOnlyDb = Snap toInstallLocation MutableDb = Local pair = (name, (toInstallLocation pkgDb, Library ident installedLibInfo)) -- | This is where sublibraries and main libraries are assembled into a single -- entity Installed package, where all ghcPkgId live. gatherAndTransformSubLoadHelper :: LoadHelper -> Map PackageName (InstallLocation, Installed) -> Map PackageName (InstallLocation, Installed) gatherAndTransformSubLoadHelper lh = Map.insertWith onPreviousLoadHelper key value where -- Here we assume that both have the same location which already was a prior -- assumption in Stack. onPreviousLoadHelper (pLoc, Library pn incomingLibInfo) (_, Library _ existingLibInfo) = ( pLoc , Library pn existingLibInfo { subLib = Map.union incomingLibInfo.subLib existingLibInfo.subLib , ghcPkgId = if isJust lh.subLibDump then existingLibInfo.ghcPkgId else incomingLibInfo.ghcPkgId } ) onPreviousLoadHelper newVal _oldVal = newVal (key, value) = case lh.subLibDump of Nothing -> (rawPackageName, rawValue) Just sd -> (sd.packageName, updateAsSublib sd <$> rawValue) (rawPackageName, rawValue) = lh.pair updateAsSublib sd (Library (PackageIdentifier _sublibMungedPackageName version) libInfo) = Library (PackageIdentifier key version) libInfo { subLib = Map.singleton sd.libraryName libInfo.ghcPkgId } updateAsSublib _ v = v stack-2.15.7/src/Stack/Build/Source.hs0000644000000000000000000005175514620153474015613 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- Load information on package sources module Stack.Build.Source ( projectLocalPackages , localDependencies , loadCommonPackage , loadLocalPackage , loadSourceMap , getLocalFlags , addUnlistedToBuildCache , hashSourceMapData ) where import Data.ByteString.Builder ( toLazyByteString ) import qualified Data.List as L import qualified Data.Map as Map import qualified Data.Map.Merge.Lazy as Map import qualified Data.Map.Strict as M import qualified Data.Set as Set import qualified Distribution.PackageDescription as C import qualified Pantry.SHA256 as SHA256 import Stack.Build.Cache ( tryGetBuildCache ) import Stack.Build.Haddock ( shouldHaddockDeps ) import Stack.Package ( buildableBenchmarks, buildableExes, buildableTestSuites , hasBuildableMainLibrary, resolvePackage ) import Stack.PackageFile ( getPackageFile ) import Stack.Prelude import Stack.SourceMap ( DumpedGlobalPackage, checkFlagsUsedThrowing , getCompilerInfo, immutableLocSha, mkProjectPackage , pruneGlobals ) import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.BuildOpts ( BuildOpts (..), TestOpts (..) ) import Stack.Types.BuildOptsCLI ( ApplyCLIFlag (..), BuildOptsCLI (..) , boptsCLIAllProgOptions ) import Stack.Types.CabalConfigKey ( CabalConfigKey (..) ) import Stack.Types.CompilerPaths ( HasCompiler, getCompilerPath ) import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) import Stack.Types.Curator ( Curator (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), HasSourceMap (..) , actualCompilerVersionL ) import Stack.Types.FileDigestCache ( readFileDigest ) import Stack.Types.NamedComponent ( NamedComponent (..), isCSubLib, splitComponents ) import Stack.Types.Package ( FileCacheInfo (..), LocalPackage (..), Package (..) , PackageConfig (..), dotCabalGetPath, memoizeRefWith , runMemoizedWith ) import Stack.Types.PackageFile ( PackageComponentFile (..), PackageWarning ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), ProjectPackage (..) , SMActual (..), SMTargets (..), SourceMap (..) , SourceMapHash (..), Target (..), ppGPD, ppRoot ) import Stack.Types.UnusedFlags ( FlagSource (..) ) import System.FilePath ( takeFileName ) import System.IO.Error ( isDoesNotExistError ) -- | loads and returns project packages projectLocalPackages :: HasEnvConfig env => RIO env [LocalPackage] projectLocalPackages = do sm <- view $ envConfigL . to (.sourceMap) for (toList sm.project) loadLocalPackage -- | loads all local dependencies - project packages and local extra-deps localDependencies :: HasEnvConfig env => RIO env [LocalPackage] localDependencies = do bopts <- view $ configL . to (.build) sourceMap <- view $ envConfigL . to (.sourceMap) forMaybeM (Map.elems sourceMap.deps) $ \dp -> case dp.location of PLMutable dir -> do pp <- mkProjectPackage YesPrintWarnings dir (shouldHaddockDeps bopts) Just <$> loadLocalPackage pp _ -> pure Nothing -- | Given the parsed targets and build command line options constructs a source -- map loadSourceMap :: HasBuildConfig env => SMTargets -> BuildOptsCLI -> SMActual DumpedGlobalPackage -> RIO env SourceMap loadSourceMap targets boptsCli sma = do bconfig <- view buildConfigL let compiler = sma.compiler project = M.map applyOptsFlagsPP sma.project bopts = bconfig.config.build applyOptsFlagsPP p@ProjectPackage{ projectCommon = c } = p { projectCommon = applyOptsFlags (M.member c.name targets.targets) True c } deps0 = targets.deps <> sma.deps deps = M.map applyOptsFlagsDep deps0 applyOptsFlagsDep d@DepPackage{ depCommon = c } = d { depCommon = applyOptsFlags (M.member c.name targets.deps) False c } applyOptsFlags isTarget isProjectPackage common = let name = common.name flags = getLocalFlags boptsCli name ghcOptions = generalGhcOptions bconfig boptsCli isTarget isProjectPackage cabalConfigOpts = generalCabalConfigOpts bconfig boptsCli common.name isTarget isProjectPackage in common { flags = if M.null flags then common.flags else flags , ghcOptions = ghcOptions ++ common.ghcOptions , cabalConfigOpts = cabalConfigOpts ++ common.cabalConfigOpts , buildHaddocks = if isTarget then bopts.buildHaddocks else shouldHaddockDeps bopts } packageCliFlags = Map.fromList $ mapMaybe maybeProjectFlags $ Map.toList boptsCli.flags maybeProjectFlags (ACFByName name, fs) = Just (name, fs) maybeProjectFlags _ = Nothing globalPkgs = pruneGlobals sma.globals (Map.keysSet deps) logDebug "Checking flags" checkFlagsUsedThrowing packageCliFlags FSCommandLine project deps logDebug "SourceMap constructed" pure SourceMap { targets , compiler , project , deps , globalPkgs } -- | Get a 'SourceMapHash' for a given 'SourceMap' -- -- Basic rules: -- -- * If someone modifies a GHC installation in any way after Stack looks at it, -- they voided the warranty. This includes installing a brand new build to the -- same directory, or registering new packages to the global database. -- -- * We should include everything in the hash that would relate to immutable -- packages and identifying the compiler itself. Mutable packages (both -- project packages and dependencies) will never make it into the snapshot -- database, and can be ignored. -- -- * Target information is only relevant insofar as it effects the dependency -- map. The actual current targets for this build are irrelevant to the cache -- mechanism, and can be ignored. -- -- * Make sure things like profiling and haddocks are included in the hash -- hashSourceMapData :: (HasBuildConfig env, HasCompiler env) => BuildOptsCLI -> SourceMap -> RIO env SourceMapHash hashSourceMapData boptsCli sm = do compilerPath <- getUtf8Builder . fromString . toFilePath <$> getCompilerPath compilerInfo <- getCompilerInfo immDeps <- forM (Map.elems sm.deps) depPackageHashableContent bc <- view buildConfigL let -- extra bytestring specifying GHC options supposed to be applied to GHC -- boot packages so we'll have different hashes when bare resolver -- 'ghc-X.Y.Z' is used, no extra-deps and e.g. user wants builds with -- profiling or without bootGhcOpts = map display (generalGhcOptions bc boptsCli False False) hashedContent = toLazyByteString $ compilerPath <> compilerInfo <> getUtf8Builder (mconcat bootGhcOpts) <> mconcat immDeps pure $ SourceMapHash (SHA256.hashLazyBytes hashedContent) depPackageHashableContent :: (HasConfig env) => DepPackage -> RIO env Builder depPackageHashableContent dp = case dp.location of PLMutable _ -> pure "" PLImmutable pli -> do let flagToBs (f, enabled) = (if enabled then "" else "-") <> fromString (C.unFlagName f) flags = map flagToBs $ Map.toList dp.depCommon.flags ghcOptions = map display dp.depCommon.ghcOptions cabalConfigOpts = map display dp.depCommon.cabalConfigOpts haddocks = if dp.depCommon.buildHaddocks then "haddocks" else "" hash = immutableLocSha pli pure $ hash <> haddocks <> getUtf8Builder (mconcat flags) <> getUtf8Builder (mconcat ghcOptions) <> getUtf8Builder (mconcat cabalConfigOpts) -- | All flags for a local package. getLocalFlags :: BuildOptsCLI -> PackageName -> Map FlagName Bool getLocalFlags boptsCli name = Map.unions [ Map.findWithDefault Map.empty (ACFByName name) cliFlags , Map.findWithDefault Map.empty ACFAllProjectPackages cliFlags ] where cliFlags = boptsCli.flags -- | Get the options to pass to @./Setup.hs configure@ generalCabalConfigOpts :: BuildConfig -> BuildOptsCLI -> PackageName -> Bool -> Bool -> [Text] generalCabalConfigOpts bconfig boptsCli name isTarget isLocal = concat [ Map.findWithDefault [] CCKEverything config.cabalConfigOpts , if isLocal then Map.findWithDefault [] CCKLocals config.cabalConfigOpts else [] , if isTarget then Map.findWithDefault [] CCKTargets config.cabalConfigOpts else [] , Map.findWithDefault [] (CCKPackage name) config.cabalConfigOpts , if includeExtraOptions then boptsCLIAllProgOptions boptsCli else [] ] where config = view configL bconfig includeExtraOptions = case config.applyProgOptions of APOTargets -> isTarget APOLocals -> isLocal APOEverything -> True -- | Get the configured options to pass from GHC, based on the build -- configuration and commandline. generalGhcOptions :: BuildConfig -> BuildOptsCLI -> Bool -> Bool -> [Text] generalGhcOptions bconfig boptsCli isTarget isLocal = concat [ Map.findWithDefault [] AGOEverything config.ghcOptionsByCat , if isLocal then Map.findWithDefault [] AGOLocals config.ghcOptionsByCat else [] , if isTarget then Map.findWithDefault [] AGOTargets config.ghcOptionsByCat else [] , concat [["-fhpc"] | isLocal && bopts.testOpts.coverage] , if bopts.libProfile || bopts.exeProfile then ["-fprof-auto", "-fprof-cafs"] else [] , [ "-g" | not $ bopts.libStrip || bopts.exeStrip ] , if includeExtraOptions then boptsCli.ghcOptions else [] ] where bopts = config.build config = view configL bconfig includeExtraOptions = case config.applyGhcOptions of AGOTargets -> isTarget AGOLocals -> isLocal AGOEverything -> True loadCommonPackage :: forall env. (HasBuildConfig env, HasSourceMap env) => CommonPackage -> RIO env Package loadCommonPackage common = do config <- getPackageConfig common.flags common.ghcOptions common.cabalConfigOpts gpkg <- liftIO common.gpd pure $ resolvePackage config gpkg -- | Upgrade the initial project package info to a full-blown @LocalPackage@ -- based on the selected components loadLocalPackage :: forall env. (HasBuildConfig env, HasSourceMap env) => ProjectPackage -> RIO env LocalPackage loadLocalPackage pp = do sm <- view sourceMapL let common = pp.projectCommon bopts <- view buildOptsL mcurator <- view $ buildConfigL . to (.curator) config <- getPackageConfig common.flags common.ghcOptions common.cabalConfigOpts gpkg <- ppGPD pp let name = common.name mtarget = M.lookup name sm.targets.targets (exeCandidates, testCandidates, benchCandidates) = case mtarget of Just (TargetComps comps) -> -- Currently, a named library component (a sub-library) cannot be -- specified as a build target. let (_s, e, t, b) = splitComponents $ Set.toList comps in (e, t, b) Just (TargetAll _packageType) -> ( buildableExes pkg , if bopts.tests && maybe True (Set.notMember name . (.skipTest)) mcurator then buildableTestSuites pkg else Set.empty , if bopts.benchmarks && maybe True (Set.notMember name . (.skipBenchmark)) mcurator then buildableBenchmarks pkg else Set.empty ) Nothing -> mempty -- See https://github.com/commercialhaskell/stack/issues/2862 isWanted = case mtarget of Nothing -> False -- FIXME: When issue #1406 ("stack 0.1.8 lost ability to build -- individual executables or library") is resolved, 'hasLibrary' is only -- relevant if the library is part of the target spec. Just _ -> hasBuildableMainLibrary pkg || not (Set.null nonLibComponents) || not (null pkg.subLibraries) filterSkippedComponents = Set.filter (not . (`elem` bopts.skipComponents)) (exes, tests, benches) = ( filterSkippedComponents exeCandidates , filterSkippedComponents testCandidates , filterSkippedComponents benchCandidates ) nonLibComponents = toComponents exes tests benches toComponents e t b = Set.unions [ Set.map CExe e , Set.map CTest t , Set.map CBench b ] btconfig = config { enableTests = not $ Set.null tests , enableBenchmarks = not $ Set.null benches } -- We resolve the package in 2 different configurations: -- -- - pkg doesn't have tests or benchmarks enabled. -- -- - btpkg has them enabled if they are present. -- -- The latter two configurations are used to compute the deps when -- --enable-benchmarks or --enable-tests are configured. This allows us to -- do an optimization where these are passed if the deps are present. This -- can avoid doing later unnecessary reconfigures. pkg = resolvePackage config gpkg btpkg | Set.null tests && Set.null benches = Nothing | otherwise = Just (resolvePackage btconfig gpkg) componentFiles <- memoizeRefWith $ fst <$> getPackageFilesForTargets pkg pp.cabalFP nonLibComponents checkCacheResults <- memoizeRefWith $ do componentFiles' <- runMemoizedWith componentFiles forM (Map.toList componentFiles') $ \(component, files) -> do mbuildCache <- tryGetBuildCache (ppRoot pp) component checkCacheResult <- checkBuildCache (fromMaybe Map.empty mbuildCache) (Set.toList files) pure (component, checkCacheResult) let dirtyFiles = do checkCacheResults' <- checkCacheResults let allDirtyFiles = Set.unions $ map (\(_, (x, _)) -> x) checkCacheResults' pure $ if not (Set.null allDirtyFiles) then let tryStripPrefix y = fromMaybe y (L.stripPrefix (toFilePath $ ppRoot pp) y) in Just $ Set.map tryStripPrefix allDirtyFiles else Nothing newBuildCaches = M.fromList . map (\(c, (_, cache)) -> (c, cache)) <$> checkCacheResults pure LocalPackage { package = pkg , testBench = btpkg , componentFiles , buildHaddocks = pp.projectCommon.buildHaddocks , forceDirty = bopts.forceDirty , dirtyFiles , newBuildCaches , cabalFP = pp.cabalFP , wanted = isWanted , components = nonLibComponents -- TODO: refactor this so that it's easier to be sure that these -- components are indeed unbuildable. -- -- The reasoning here is that if the STLocalComps specification made it -- through component parsing, but the components aren't present, then they -- must not be buildable. , unbuildable = toComponents (exes `Set.difference` buildableExes pkg) (tests `Set.difference` buildableTestSuites pkg) (benches `Set.difference` buildableBenchmarks pkg) } -- | Compare the current filesystem state to the cached information, and -- determine (1) if the files are dirty, and (2) the new cache values. checkBuildCache :: HasEnvConfig env => Map FilePath FileCacheInfo -- ^ old cache -> [Path Abs File] -- ^ files in package -> RIO env (Set FilePath, Map FilePath FileCacheInfo) checkBuildCache oldCache files = do fileDigests <- fmap Map.fromList $ forM files $ \fp -> do mdigest <- getFileDigestMaybe (toFilePath fp) pure (toFilePath fp, mdigest) fmap (mconcat . Map.elems) $ sequence $ Map.merge (Map.mapMissing (\fp mdigest -> go fp mdigest Nothing)) (Map.mapMissing (\fp fci -> go fp Nothing (Just fci))) (Map.zipWithMatched (\fp mdigest fci -> go fp mdigest (Just fci))) fileDigests oldCache where go :: FilePath -> Maybe SHA256 -> Maybe FileCacheInfo -> RIO env (Set FilePath, Map FilePath FileCacheInfo) -- Filter out the cabal_macros file to avoid spurious recompilations go fp _ _ | takeFileName fp == "cabal_macros.h" = pure (Set.empty, Map.empty) -- Common case where it's in the cache and on the filesystem. go fp (Just digest') (Just fci) | fci.hash == digest' = pure (Set.empty, Map.singleton fp fci) | otherwise = pure (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') -- Missing file. Add it to dirty files, but no FileCacheInfo. go fp Nothing _ = pure (Set.singleton fp, Map.empty) -- Missing cache. Add it to dirty files and compute FileCacheInfo. go fp (Just digest') Nothing = pure (Set.singleton fp, Map.singleton fp $ FileCacheInfo digest') -- | Returns entries to add to the build cache for any newly found unlisted -- modules addUnlistedToBuildCache :: HasEnvConfig env => Package -> Path Abs File -> Set NamedComponent -> Map NamedComponent (Map FilePath a) -> RIO env (Map NamedComponent [Map FilePath FileCacheInfo], [PackageWarning]) addUnlistedToBuildCache pkg cabalFP nonLibComponents buildCaches = do (componentFiles, warnings) <- getPackageFilesForTargets pkg cabalFP nonLibComponents results <- forM (M.toList componentFiles) $ \(component, files) -> do let buildCache = M.findWithDefault M.empty component buildCaches newFiles = Set.toList $ Set.map toFilePath files `Set.difference` Map.keysSet buildCache addBuildCache <- mapM addFileToCache newFiles pure ((component, addBuildCache), warnings) pure (M.fromList (map fst results), concatMap snd results) where addFileToCache fp = do mdigest <- getFileDigestMaybe fp case mdigest of Nothing -> pure Map.empty Just digest' -> pure $ Map.singleton fp $ FileCacheInfo digest' -- | Gets list of Paths for files relevant to a set of components in a package. -- Note that the library component, if any, is always automatically added to the -- set of components. getPackageFilesForTargets :: HasEnvConfig env => Package -> Path Abs File -> Set NamedComponent -> RIO env (Map NamedComponent (Set (Path Abs File)), [PackageWarning]) getPackageFilesForTargets pkg cabalFP nonLibComponents = do PackageComponentFile components' compFiles otherFiles warnings <- getPackageFile pkg cabalFP let necessaryComponents = Set.insert CLib $ Set.filter isCSubLib (M.keysSet components') components = necessaryComponents `Set.union` nonLibComponents componentsFiles = M.map (\files -> Set.union otherFiles (Set.map dotCabalGetPath $ Set.fromList files) ) $ M.filterWithKey (\component _ -> component `elem` components) compFiles pure (componentsFiles, warnings) -- | Get file digest, if it exists getFileDigestMaybe :: HasEnvConfig env => FilePath -> RIO env (Maybe SHA256) getFileDigestMaybe fp = do cache <- view $ envConfigL . to (.fileDigestCache) catch (Just <$> readFileDigest cache fp) (\e -> if isDoesNotExistError e then pure Nothing else throwM e) -- | Get 'PackageConfig' for package given its name. getPackageConfig :: (HasBuildConfig env, HasSourceMap env) => Map FlagName Bool -> [Text] -- ^ GHC options -> [Text] -- ^ cabal config opts -> RIO env PackageConfig getPackageConfig flags ghcOptions cabalConfigOpts = do platform <- view platformL compilerVersion <- view actualCompilerVersionL pure PackageConfig { enableTests = False , enableBenchmarks = False , flags = flags , ghcOptions = ghcOptions , cabalConfigOpts = cabalConfigOpts , compilerVersion = compilerVersion , platform = platform } stack-2.15.7/src/Stack/Build/Target.hs0000644000000000000000000005500514620153445015567 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -- | Parsing command line targets -- -- There are two relevant data sources for performing this parsing: the project -- configuration, and command line arguments. Project configurations includes -- the resolver (defining a LoadedSnapshot of global and snapshot packages), -- local dependencies, and project packages. It also defines local flag -- overrides. -- -- The command line arguments specify both additional local flag overrides and -- targets in their raw form. -- -- Flags are simple: we just combine CLI flags with config flags and make one -- big map of flags, preferring CLI flags when present. -- -- Raw targets can be a package name, a package name with component, just a -- component, or a package name and version number. We first must resolve these -- raw targets into both simple targets and additional dependencies. This works -- as follows: -- -- * If a component is specified, find a unique project package which defines -- that component, and convert it into a name+component target. -- -- * Ensure that all name+component values refer to valid components in the -- given project package. -- -- * For names, check if the name is present in the snapshot, local deps, or -- project packages. If it is not, then look up the most recent version in the -- package index and convert to a name+version. -- -- * For name+version, first ensure that the name is not used by a project -- package. Next, if that name+version is present in the snapshot or local -- deps _and_ its location is PLIndex, we have the package. Otherwise, add to -- local deps with the appropriate PLIndex. -- -- If in either of the last two bullets we added a package to local deps, print -- a warning to the user recommending modifying the extra-deps. -- -- Combine the various 'ResolveResults's together into 'Target' values, by -- combining various components for a single package and ensuring that no -- conflicting statements were made about targets. -- -- At this point, we now have a Map from package name to SimpleTarget, and an -- updated Map of local dependencies. We still have the aggregated flags, and -- the snapshot and project packages. -- -- Finally, we upgrade the snapshot by using calculatePackagePromotion. module Stack.Build.Target ( -- * Types Target (..) , NeedTargets (..) , PackageType (..) , parseTargets -- * Convenience helpers , gpdVersion -- * Test suite exports , parseRawTarget , RawTarget (..) , UnresolvedComponent (..) ) where import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import Path ( isProperPrefixOf ) import Path.Extra ( forgivingResolveDir, rejectMissingDir ) import Path.IO ( getCurrentDir ) import RIO.Process ( HasProcessContext ) import Stack.SourceMap ( additionalDepPackage ) import Stack.Prelude import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.Config ( Config (..) ) import Stack.Types.NamedComponent ( NamedComponent (..), renderComponent ) import Stack.Types.Build.Exception ( BuildPrettyException (..) ) import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.SourceMap ( DepPackage (..), GlobalPackage (..), PackageType (..) , ProjectPackage, SMActual (..), SMTargets (..) , SMWanted (..), Target (..), ppComponents, ppRoot ) -- | Do we need any targets? For example, `stack build` will fail if -- no targets are provided. data NeedTargets = NeedTargets | AllowNoTargets -------------------------------------------------------------------------------- -- Get the RawInput -------------------------------------------------------------------------------- -- | Raw target information passed on the command line. newtype RawInput = RawInput { rawInput :: Text } getRawInput :: BuildOptsCLI -> Map PackageName ProjectPackage -> ([Text], [RawInput]) getRawInput boptscli locals = let textTargets' = boptscli.targetsCLI textTargets = -- Handle the no targets case, which means we pass in the names of all -- project packages if null textTargets' then map (T.pack . packageNameString) (Map.keys locals) else textTargets' in (textTargets', map RawInput textTargets) -------------------------------------------------------------------------------- -- Turn RawInput into RawTarget -------------------------------------------------------------------------------- -- | The name of a component, which applies to executables, test -- suites, and benchmarks type ComponentName = Text -- | Either a fully resolved component, or a component name that could be -- either an executable, test, or benchmark data UnresolvedComponent = ResolvedComponent !NamedComponent | UnresolvedComponent !ComponentName deriving (Eq, Ord, Show) -- | Raw command line input, without checking against any databases or list of -- locals. Does not deal with directories data RawTarget = RTPackageComponent !PackageName !UnresolvedComponent | RTComponent !ComponentName | RTPackage !PackageName -- Explicitly _not_ supporting revisions on the command line. If you want -- that, you should be modifying your stack.yaml! (In fact, you should -- probably do that anyway, we're just letting people be lazy, since we're -- Haskeletors.) | RTPackageIdentifier !PackageIdentifier deriving (Eq, Show) -- | Same as @parseRawTarget@, but also takes directories into account. parseRawTargetDirs :: MonadIO m => Path Abs Dir -- ^ current directory -> Map PackageName ProjectPackage -> RawInput -- ^ raw target information from the commandline -> m (Either StyleDoc [(RawInput, RawTarget)]) parseRawTargetDirs root locals ri = case parseRawTarget t of Just rt -> pure $ Right [(ri, rt)] Nothing -> do mdir <- forgivingResolveDir root (T.unpack t) >>= rejectMissingDir case mdir of Nothing -> pure $ Left $ if | T.isPrefixOf "stack-yaml=" t -> projectOptionTypo | T.isSuffixOf ".yaml" t -> projectYamlExtTypo | otherwise -> fillSep [ flow "Directory not found:" , style Dir (fromString $ T.unpack t) <> "." ] Just dir -> case mapMaybe (childOf dir) $ Map.toList locals of [] -> pure $ Left $ fillSep [ style Dir (fromString $ T.unpack t) , flow "is not a local package directory and it is not a \ \parent directory of any local package directory." ] names -> pure $ Right $ map ((ri, ) . RTPackage) names where childOf dir (name, pp) = if dir == ppRoot pp || isProperPrefixOf dir (ppRoot pp) then Just name else Nothing RawInput t = ri projectOptionTypo :: StyleDoc projectOptionTypo = let o = "stack-yaml=" in projectTypo 2 (length o) o projectYamlExtTypo :: StyleDoc projectYamlExtTypo = let o = "stack-yaml " in projectTypo (2 + length o) 0 o projectTypo :: Int -> Int -> String -> StyleDoc projectTypo padLength dropLength option = vsep [ style Dir (fromString (replicate padLength ' ') <> fromString (T.unpack t)) <> " is not a directory." , style Highlight (fromString $ "--" <> option) <> style Dir (fromString . drop dropLength $ T.unpack t) <> " might work as a project option." ] -- | If this function returns @Nothing@, the input should be treated as a -- directory. parseRawTarget :: Text -> Maybe RawTarget parseRawTarget t = (RTPackageIdentifier <$> parsePackageIdentifier s) <|> (RTPackage <$> parsePackageName s) <|> (RTComponent <$> T.stripPrefix ":" t) <|> parsePackageComponent where s = T.unpack t parsePackageComponent = case T.splitOn ":" t of [pname, "lib"] | Just pname' <- parsePackageName (T.unpack pname) -> Just $ RTPackageComponent pname' $ ResolvedComponent CLib [pname, cname] | Just pname' <- parsePackageName (T.unpack pname) -> Just $ RTPackageComponent pname' $ UnresolvedComponent cname [pname, typ, cname] | Just pname' <- parsePackageName (T.unpack pname) , Just wrapper <- parseCompType typ -> Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname _ -> Nothing parseCompType t' = case t' of "exe" -> Just CExe "test" -> Just CTest "bench" -> Just CBench _ -> Nothing -------------------------------------------------------------------------------- -- Resolve the raw targets -------------------------------------------------------------------------------- data ResolveResult = ResolveResult { name :: !PackageName , rawInput :: !RawInput , component :: !(Maybe NamedComponent) -- ^ Was a concrete component specified? , addedDep :: !(Maybe PackageLocationImmutable) -- ^ Only if we're adding this as a dependency , packageType :: !PackageType } -- | Convert a 'RawTarget' into a 'ResolveResult' (see description on the -- module). resolveRawTarget :: (HasLogFunc env, HasPantryConfig env, HasProcessContext env) => SMActual GlobalPackage -> Map PackageName PackageLocation -> (RawInput, RawTarget) -> RIO env (Either StyleDoc ResolveResult) resolveRawTarget sma allLocs (rawInput, rt) = go rt where locals = sma.project deps = sma.deps globals = sma.globals -- Helper function: check if a 'NamedComponent' matches the given -- 'ComponentName' isCompNamed :: ComponentName -> NamedComponent -> Bool isCompNamed _ CLib = False isCompNamed t1 (CSubLib t2) = t1 == t2 isCompNamed t1 (CExe t2) = t1 == t2 isCompNamed t1 (CFlib t2) = t1 == t2 isCompNamed t1 (CTest t2) = t1 == t2 isCompNamed t1 (CBench t2) = t1 == t2 go (RTComponent cname) = do -- Associated list from component name to package that defines it. We use an -- assoc list and not a Map so we can detect duplicates. allPairs <- fmap concat $ flip Map.traverseWithKey locals $ \name pp -> do comps <- ppComponents pp pure $ map (name, ) $ Set.toList comps pure $ case filter (isCompNamed cname . snd) allPairs of [] -> Left $ fillSep [ style Target . fromString . T.unpack $ cname , flow "doesn't seem to be a local target. Run" , style Shell $ flow "stack ide targets" , flow "for a list of available targets." ] [(name, component)] -> Right ResolveResult { name , rawInput , component = Just component , addedDep = Nothing , packageType = PTProject } matches -> Left $ fillSep [ flow "Ambiguous component name" , style Target (fromString $ T.unpack cname) <> "," , "matches:" ] <> line <> bulletedList ( map ( \(pn, nc) -> fillSep [ "component" , style PkgComponent (fromString $ T.unpack $ renderComponent nc) , flow "of package" , style PkgComponent (fromPackageName pn) ] ) matches ) go (RTPackageComponent name ucomp) = case Map.lookup name locals of Nothing -> pure $ Left $ fillSep [ flow "Unknown local package:" , style Target (fromPackageName name) <> "." ] Just pp -> do comps <- ppComponents pp pure $ case ucomp of ResolvedComponent component | component `Set.member` comps -> Right ResolveResult { name , rawInput , component = Just component , addedDep = Nothing , packageType = PTProject } | otherwise -> Left $ fillSep [ "Component" , style Target (fromString $ T.unpack $ renderComponent component) , flow "does not exist in package" , style Target (fromPackageName name) <> "." ] UnresolvedComponent comp' -> case filter (isCompNamed comp') $ Set.toList comps of [] -> Left $ fillSep [ "Component" , style Target (fromString $ T.unpack comp') , flow "does not exist in package" , style Target (fromPackageName name) <> "." ] [component] -> Right ResolveResult { name , rawInput , component = Just component , addedDep = Nothing , packageType = PTProject } matches -> Left $ fillSep [ flow "Ambiguous component name" , style Target (fromString $ T.unpack comp') , flow "for package" , style Target (fromPackageName name) , flow "matches components:" , fillSep $ mkNarrativeList (Just PkgComponent) False (map ncToStyleDoc matches) ] where ncToStyleDoc :: NamedComponent -> StyleDoc ncToStyleDoc = fromString . T.unpack . renderComponent go (RTPackage name) | Map.member name locals = pure $ Right ResolveResult { name , rawInput , component = Nothing , addedDep = Nothing , packageType = PTProject } | Map.member name deps = pure $ deferToConstructPlan name | Just gp <- Map.lookup name globals = case gp of GlobalPackage _ -> pure $ deferToConstructPlan name ReplacedGlobalPackage _ -> hackageLatest name | otherwise = hackageLatest name -- Note that we use getLatestHackageRevision below, even though it's -- non-reproducible, to avoid user confusion. In any event, reproducible -- builds should be done by updating your config files! go (RTPackageIdentifier ident@(PackageIdentifier name version)) | Map.member name locals = pure $ Left $ fillSep [ style Target (fromPackageName name) , flow "target has a specific version number, but it is a local \ \package. To avoid confusion, we will not install the \ \specified version or build the local one. To build the \ \local package, specify the target without an explicit \ \version." ] | otherwise = case Map.lookup name allLocs of -- Installing it from the package index, so we're cool with overriding -- it if necessary Just ( PLImmutable ( PLIHackage (PackageIdentifier _name versionLoc) _cfKey _treeKey ) ) -> if version == versionLoc then pure $ deferToConstructPlan name else hackageLatestRevision name version -- The package was coming from something besides the index, so refuse -- to do the override Just loc' -> pure $ Left $ fillSep [ flow "Package with identifier was targeted on the command \ \line:" , style Target (fromPackageId ident) <> "," , flow "but it was specified from a non-index location:" , flow $ T.unpack $ textDisplay loc' <> "." , flow "Recommendation: add the correctly desired version to \ \extra-deps." ] -- Not present at all, add it from Hackage Nothing -> do mrev <- getLatestHackageRevision YesRequireHackageIndex name version pure $ case mrev of Nothing -> deferToConstructPlan name Just (_rev, cfKey, treeKey) -> Right ResolveResult { name , rawInput , component = Nothing , addedDep = Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey , packageType = PTDependency } hackageLatest name = do mloc <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions pure $ case mloc of Nothing -> deferToConstructPlan name Just loc -> Right ResolveResult { name , rawInput , component = Nothing , addedDep = Just loc , packageType = PTDependency } hackageLatestRevision name version = do mrev <- getLatestHackageRevision YesRequireHackageIndex name version pure $ case mrev of Nothing -> deferToConstructPlan name Just (_rev, cfKey, treeKey) -> Right ResolveResult { name , rawInput , component = Nothing , addedDep = Just $ PLIHackage (PackageIdentifier name version) cfKey treeKey , packageType = PTDependency } -- This is actually an error case. We _could_ pure a Left value here, but it -- turns out to be better to defer this until the ConstructPlan phase, and let -- it complain about the missing package so that we get more errors together, -- plus the fancy colored output from that module. deferToConstructPlan name = Right ResolveResult { name , rawInput , component = Nothing , addedDep = Nothing , packageType = PTDependency } -------------------------------------------------------------------------------- -- Combine the ResolveResults -------------------------------------------------------------------------------- combineResolveResults :: forall env. HasLogFunc env => [ResolveResult] -> RIO env ( [StyleDoc] , Map PackageName Target , Map PackageName PackageLocationImmutable ) combineResolveResults results = do addedDeps <- fmap Map.unions $ forM results $ \result -> case result.addedDep of Nothing -> pure Map.empty Just pl -> pure $ Map.singleton result.name pl let m0 = Map.unionsWith (++) $ map (\rr -> Map.singleton rr.name [rr]) results (errs, ms) = partitionEithers $ flip map (Map.toList m0) $ \(name, rrs) -> let mcomps = map (.component) rrs in -- Confirm that there is either exactly 1 with no component, or that -- all rrs are components case rrs of [] -> assert False $ Left $ flow "Somehow got no rrComponent values, that can't happen." [rr] | isNothing rr.component -> Right $ Map.singleton name $ TargetAll rr.packageType _ | all isJust mcomps -> Right $ Map.singleton name $ TargetComps $ Set.fromList $ catMaybes mcomps | otherwise -> Left $ fillSep [ flow "The package" , style Target $ fromPackageName name , flow "was specified in multiple, incompatible ways:" , fillSep $ mkNarrativeList (Just Target) False (map rrToStyleDoc rrs) ] pure (errs, Map.unions ms, addedDeps) where rrToStyleDoc :: ResolveResult -> StyleDoc rrToStyleDoc = fromString . T.unpack . (.rawInput.rawInput) -------------------------------------------------------------------------------- -- OK, let's do it! -------------------------------------------------------------------------------- parseTargets :: HasBuildConfig env => NeedTargets -> Bool -> BuildOptsCLI -> SMActual GlobalPackage -> RIO env SMTargets parseTargets needTargets haddockDeps boptscli smActual = do logDebug "Parsing the targets" bconfig <- view buildConfigL workingDir <- getCurrentDir locals <- view $ buildConfigL . to (.smWanted.project) let (textTargets', rawInput) = getRawInput boptscli locals (errs1, concat -> rawTargets) <- fmap partitionEithers $ forM rawInput $ parseRawTargetDirs workingDir locals let depLocs = Map.map (.location) smActual.deps (errs2, resolveResults) <- fmap partitionEithers $ forM rawTargets $ resolveRawTarget smActual depLocs (errs3, targets, addedDeps) <- combineResolveResults resolveResults case concat [errs1, errs2, errs3] of [] -> pure () errs -> prettyThrowIO $ TargetParseException errs case (Map.null targets, needTargets) of (False, _) -> pure () (True, AllowNoTargets) -> pure () (True, NeedTargets) | null textTargets' && bcImplicitGlobal bconfig -> prettyThrowIO $ TargetParseException [ fillSep [ flow "The specified targets matched no packages. Perhaps you \ \need to run" , style Shell (flow "stack init") <> "?" ] ] | null textTargets' && Map.null locals -> prettyThrowIO $ TargetParseException [ flow "The project contains no local packages (packages not \ \marked with 'extra-dep')." ] | otherwise -> prettyThrowIO $ TargetParseException [ flow "The specified targets matched no packages." ] addedDeps' <- mapM (additionalDepPackage haddockDeps . PLImmutable) addedDeps pure SMTargets { targets = targets , deps = addedDeps' } where bcImplicitGlobal bconfig = case bconfig.config.project of PCProject _ -> False PCGlobalProject -> True PCNoProject _ -> False stack-2.15.7/src/Stack/BuildInfo.hs0000644000000000000000000000602414604306201015142 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} #ifdef USE_GIT_INFO {-# LANGUAGE TemplateHaskell #-} #endif -- Extracted from "Stack" so that module does not use CPP or Template Haskell, -- and therefore doesn't need to be recompiled as often. module Stack.BuildInfo ( versionString' , hpackVersion , maybeGitHash ) where #ifndef HIDE_DEP_VERSIONS import qualified Build_stack #endif import Data.Version ( versionBranch ) import Distribution.System ( buildArch ) import qualified Distribution.Text as Cabal ( display ) #ifdef USE_GIT_INFO import GitHash ( giCommitCount, giHash, tGitInfoCwdTry ) import Options.Applicative.Simple ( simpleVersion ) #endif import qualified Paths_stack as Meta import Stack.Constants ( isStackUploadDisabled ) import Stack.Prelude #ifndef USE_GIT_INFO import Stack.Types.Version ( showStackVersion ) #endif versionString' :: String #ifdef USE_GIT_INFO versionString' = concat $ concat [ [$(simpleVersion Meta.version)] -- Leave out number of commits for --depth=1 clone -- See https://github.com/commercialhaskell/stack/issues/792 , case giCommitCount <$> $$tGitInfoCwdTry of Left _ -> [] Right 1 -> [] Right count -> [" (", show count, " commits)"] , [afterVersion] ] #else versionString' = showStackVersion ++ afterVersion #endif where afterVersion = concat [ preReleaseString , ' ' : Cabal.display buildArch , depsString , warningString , stackUploadDisabledWarningString ] preReleaseString = case versionBranch Meta.version of (_:y:_) | even y -> " PRE-RELEASE" (_:_:z:_) | even z -> " RELEASE-CANDIDATE" _ -> "" #ifdef HIDE_DEP_VERSIONS depsString = " hpack-" ++ VERSION_hpack #else depsString = "\nCompiled with:\n" ++ unlines (map ("- " ++) Build_stack.deps) #endif #ifdef SUPPORTED_BUILD warningString = "" #else warningString = unlines [ "" , "Warning: this is an unsupported build that may use different versions of" , "dependencies and GHC than the officially released binaries, and therefore may" , "not behave identically. If you encounter problems, please try the latest" , "official build by running 'stack upgrade --force-download'." ] #endif stackUploadDisabledWarningString = if isStackUploadDisabled then unlines [ "" , "Warning: 'stack upload' is disabled and will not make HTTP request(s). It will" , "output information about the HTTP request(s) that would have been made if it" , "was enabled." ] else "" -- | Hpack version we're compiled against hpackVersion :: String hpackVersion = VERSION_hpack -- | If USE_GIT_INFO is enabled, the Git hash in the build directory, otherwise -- Nothing. maybeGitHash :: Maybe String maybeGitHash = #ifdef USE_GIT_INFO (either (const Nothing) (Just . giHash) $$tGitInfoCwdTry) #else Nothing #endif stack-2.15.7/src/Stack/BuildOpts.hs0000644000000000000000000000557414620153445015215 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Default configuration options for building. module Stack.BuildOpts ( defaultBuildOpts , defaultTestOpts , defaultHaddockOpts , defaultBenchmarkOpts ) where import Distribution.Verbosity ( normal ) import Stack.Prelude import Stack.Types.BuildOpts ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) , TestOpts (..) ) import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid (..), CabalVerbosity (..) , ProgressBarFormat (..), TestOptsMonoid (..) ) defaultBuildOpts :: BuildOpts defaultBuildOpts = BuildOpts { libProfile = defaultFirstFalse buildMonoid.libProfile , exeProfile = defaultFirstFalse buildMonoid.exeProfile , libStrip = defaultFirstTrue buildMonoid.libStrip , exeStrip = defaultFirstTrue buildMonoid.exeStrip , buildHaddocks = False , haddockOpts = defaultHaddockOpts , openHaddocks = defaultFirstFalse buildMonoid.openHaddocks , haddockDeps = Nothing , haddockInternal = defaultFirstFalse buildMonoid.haddockInternal , haddockHyperlinkSource = defaultFirstTrue buildMonoid.haddockHyperlinkSource , haddockForHackage = defaultFirstFalse buildMonoid.haddockForHackage , installExes = defaultFirstFalse buildMonoid.installExes , installCompilerTool = defaultFirstFalse buildMonoid.installCompilerTool , preFetch = defaultFirstFalse buildMonoid.preFetch , keepGoing = Nothing , keepTmpFiles = defaultFirstFalse buildMonoid.keepTmpFiles , forceDirty = defaultFirstFalse buildMonoid.forceDirty , tests = defaultFirstFalse buildMonoid.tests , testOpts = defaultTestOpts , benchmarks = defaultFirstFalse buildMonoid.benchmarks , benchmarkOpts = defaultBenchmarkOpts , reconfigure = defaultFirstFalse buildMonoid.reconfigure , cabalVerbose = CabalVerbosity normal , splitObjs = defaultFirstFalse buildMonoid.splitObjs , skipComponents = [] , interleavedOutput = defaultFirstTrue buildMonoid.interleavedOutput , progressBar = CappedBar , ddumpDir = Nothing } where buildMonoid = undefined :: BuildOptsMonoid defaultTestOpts :: TestOpts defaultTestOpts = TestOpts { rerunTests = defaultFirstTrue toMonoid.rerunTests , additionalArgs = [] , coverage = defaultFirstFalse toMonoid.coverage , disableRun = defaultFirstFalse toMonoid.disableRun , maximumTimeSeconds = Nothing , allowStdin = defaultFirstTrue toMonoid.allowStdin } where toMonoid = undefined :: TestOptsMonoid defaultHaddockOpts :: HaddockOpts defaultHaddockOpts = HaddockOpts { additionalArgs = [] } defaultBenchmarkOpts :: BenchmarkOpts defaultBenchmarkOpts = BenchmarkOpts { additionalArgs = Nothing , disableRun = False } stack-2.15.7/src/Stack/BuildPlan.hs0000644000000000000000000004634314620153445015161 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Resolving a build plan for a set of packages in a given Stackage snapshot. module Stack.BuildPlan ( BuildPlanException (..) , BuildPlanCheck (..) , checkSnapBuildPlan , DepError (..) , DepErrors , removeSrcPkgDefaultFlags , selectBestSnapshot , showItems ) where import qualified Data.Foldable as F import Data.List (intercalate) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.Package as C import Distribution.PackageDescription ( GenericPackageDescription, flagDefault, flagName , flagManual, genPackageFlags ) import qualified Distribution.PackageDescription as C import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Types.UnqualComponentName ( unUnqualComponentName ) import qualified Distribution.Version as C import qualified RIO.NonEmpty as NE import Stack.Constants ( wiredInPackages ) import Stack.Package ( PackageConfig (..), packageDependencies , resolvePackageDescription ) import Stack.Prelude hiding ( Display (..) ) import Stack.SourceMap ( SnapshotCandidate, loadProjectSnapshotCandidate ) import Stack.Types.Compiler ( ActualCompiler, WhichCompiler (..), compilerVersionText , whichCompiler ) import Stack.Types.Config ( HasConfig ) import Stack.Types.GHCVariant ( HasGHCVariant ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..) , GlobalPackageVersion (..), ProjectPackage (..) , SMActual (..) ) import Stack.Types.Version ( VersionRange, withinRange ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.BuildPlan" module. data BuildPlanException = UnknownPackages (Path Abs File) -- stack.yaml file (Map PackageName (Maybe Version, Set PackageName)) -- truly unknown (Map PackageName (Set PackageIdentifier)) -- shadowed | SnapshotNotFound SnapName | NeitherCompilerOrResolverSpecified T.Text | DuplicatePackagesBug deriving (Show, Typeable) instance Exception BuildPlanException where displayException (SnapshotNotFound snapName) = unlines [ "Error: [S-2045]" , "SnapshotNotFound " ++ snapName' , "Non existing resolver: " ++ snapName' ++ "." , "For a complete list of available snapshots see https://www.stackage.org/snapshots" ] where snapName' = show snapName displayException (UnknownPackages stackYaml unknown shadowed) = "Error: [S-7571]\n" ++ unlines (unknown' ++ shadowed') where unknown' :: [String] unknown' | Map.null unknown = [] | otherwise = concat [ ["The following packages do not exist in the build plan:"] , map go (Map.toList unknown) , case mapMaybe goRecommend $ Map.toList unknown of [] -> [] rec -> ("Recommended action: modify the extra-deps field of " ++ toFilePath stackYaml ++ " to include the following:") : (rec ++ ["Note: further dependencies may need to be added"]) , case mapMaybe getNoKnown $ Map.toList unknown of [] -> [] noKnown -> [ "There are no known versions of the following packages:" , intercalate ", " $ map packageNameString noKnown ] ] where go (dep, (_, users)) | Set.null users = packageNameString dep go (dep, (_, users)) = concat [ packageNameString dep , " (used by " , intercalate ", " $ map packageNameString $ Set.toList users , ")" ] goRecommend (name, (Just version, _)) = Just $ "- " ++ packageIdentifierString (PackageIdentifier name version) goRecommend (_, (Nothing, _)) = Nothing getNoKnown (name, (Nothing, _)) = Just name getNoKnown (_, (Just _, _)) = Nothing shadowed' :: [String] shadowed' | Map.null shadowed = [] | otherwise = concat [ ["The following packages are shadowed by local packages:"] , map go (Map.toList shadowed) , ["Recommended action: modify the extra-deps field of " ++ toFilePath stackYaml ++ " to include the following:"] , extraDeps , ["Note: further dependencies may need to be added"] ] where go (dep, users) | Set.null users = packageNameString dep ++ " (internal Stack error: this should never be null)" go (dep, users) = concat [ packageNameString dep , " (used by " , intercalate ", " $ map (packageNameString . pkgName) $ Set.toList users , ")" ] extraDeps = map (\ident -> "- " ++ packageIdentifierString ident) $ Set.toList $ Set.unions $ Map.elems shadowed displayException (NeitherCompilerOrResolverSpecified url) = concat [ "Error: [S-8559]\n" , "Failed to load custom snapshot at " , T.unpack url , ", because no 'compiler' or 'resolver' is specified." ] displayException DuplicatePackagesBug = bugReport "[S-5743]" "Duplicate packages are not expected here." gpdPackages :: [GenericPackageDescription] -> Map PackageName Version gpdPackages = Map.fromList . map (toPair . C.package . C.packageDescription) where toPair (C.PackageIdentifier name version) = (name, version) gpdPackageDeps :: GenericPackageDescription -> ActualCompiler -> Platform -> Map FlagName Bool -> Map PackageName VersionRange gpdPackageDeps gpd compilerVersion platform flags = Map.filterWithKey (const . not . isLocalLibrary) (packageDependencies pkgDesc) where isLocalLibrary name' = name' == name || name' `Set.member` subs name = gpdPackageName gpd subs = Set.fromList $ map (C.mkPackageName . unUnqualComponentName . fst) $ C.condSubLibraries gpd -- Since tests and benchmarks are both enabled, doesn't matter if we choose -- modified or unmodified pkgDesc = resolvePackageDescription pkgConfig gpd pkgConfig = PackageConfig { enableTests = True , enableBenchmarks = True , flags , ghcOptions = [] , cabalConfigOpts = [] , compilerVersion , platform } -- Remove any src package flags having default values -- Remove any package entries with no flags set removeSrcPkgDefaultFlags :: [C.GenericPackageDescription] -> Map PackageName (Map FlagName Bool) -> Map PackageName (Map FlagName Bool) removeSrcPkgDefaultFlags gpds flags = let defaults = Map.unions (map gpdDefaultFlags gpds) flags' = Map.differenceWith removeSame flags defaults in Map.filter (not . Map.null) flags' where removeSame f1 f2 = let diff v v' = if v == v' then Nothing else Just v in Just $ Map.differenceWith diff f1 f2 gpdDefaultFlags gpd = let tuples = map getDefault (C.genPackageFlags gpd) in Map.singleton (gpdPackageName gpd) (Map.fromList tuples) getDefault f | C.flagDefault f = (C.flagName f, True) | otherwise = (C.flagName f, False) -- | Find the set of @FlagName@s necessary to get the given -- @GenericPackageDescription@ to compile against the given @BuildPlan@. Will -- only modify non-manual flags, and will prefer default values for flags. -- Returns the plan which produces least number of dep errors selectPackageBuildPlan :: Platform -> ActualCompiler -> Map PackageName Version -> GenericPackageDescription -> (Map PackageName (Map FlagName Bool), DepErrors) selectPackageBuildPlan platform compiler pool gpd = (selectPlan . limitSearchSpace . NE.map makePlan) flagCombinations where selectPlan :: NonEmpty (a, DepErrors) -> (a, DepErrors) selectPlan = F.foldr1 fewerErrors where fewerErrors p1 p2 | nErrors p1 == 0 = p1 | nErrors p1 <= nErrors p2 = p1 | otherwise = p2 where nErrors = Map.size . snd -- Avoid exponential complexity in flag combinations making us sad pandas. -- See: https://github.com/commercialhaskell/stack/issues/543 limitSearchSpace :: NonEmpty a -> NonEmpty a limitSearchSpace (x :| xs) = x :| take (maxFlagCombinations - 1) xs where maxFlagCombinations = 128 makePlan :: [(FlagName, Bool)] -> (Map PackageName (Map FlagName Bool), DepErrors) makePlan flags = checkPackageBuildPlan platform compiler pool (Map.fromList flags) gpd flagCombinations :: NonEmpty [(FlagName, Bool)] flagCombinations = mapM getOptions (genPackageFlags gpd) where getOptions :: C.PackageFlag -> NonEmpty (FlagName, Bool) getOptions f | flagManual f = (fname, flagDefault f) :| [] | flagDefault f = (fname, True) :| [(fname, False)] | otherwise = (fname, False) :| [(fname, True)] where fname = flagName f -- | Check whether with the given set of flags a package's dependency -- constraints can be satisfied against a given build plan or pool of packages. checkPackageBuildPlan :: Platform -> ActualCompiler -> Map PackageName Version -> Map FlagName Bool -> GenericPackageDescription -> (Map PackageName (Map FlagName Bool), DepErrors) checkPackageBuildPlan platform compiler pool flags gpd = (Map.singleton pkg flags, errs) where pkg = gpdPackageName gpd errs = checkPackageDeps pkg constraints pool constraints = gpdPackageDeps gpd compiler platform flags -- | Checks if the given package dependencies can be satisfied by the given set -- of packages. Will fail if a package is either missing or has a version -- outside of the version range. checkPackageDeps :: PackageName -- ^ package using dependencies, for constructing DepErrors -> Map PackageName VersionRange -- ^ dependency constraints -> Map PackageName Version -- ^ Available package pool or index -> DepErrors checkPackageDeps myName deps packages = Map.unionsWith combineDepError $ map go $ Map.toList deps where go :: (PackageName, VersionRange) -> DepErrors go (name, range) = case Map.lookup name packages of Nothing -> Map.singleton name DepError { version = Nothing , neededBy = Map.singleton myName range } Just v | withinRange v range -> Map.empty | otherwise -> Map.singleton name DepError { version = Just v , neededBy = Map.singleton myName range } type DepErrors = Map PackageName DepError data DepError = DepError { version :: !(Maybe Version) , neededBy :: !(Map PackageName VersionRange) } deriving Show -- | Combine two 'DepError's for the same 'Version'. combineDepError :: DepError -> DepError -> DepError combineDepError (DepError a x) (DepError b y) = assert (a == b) $ DepError a (Map.unionWith C.intersectVersionRanges x y) -- | Given a bundle of packages (a list of @GenericPackageDescriptions@'s) to -- build and an available package pool (snapshot) check whether the bundle's -- dependencies can be satisfied. If flags is passed as Nothing flag settings -- will be chosen automatically. checkBundleBuildPlan :: Platform -> ActualCompiler -> Map PackageName Version -> Maybe (Map PackageName (Map FlagName Bool)) -> [GenericPackageDescription] -> (Map PackageName (Map FlagName Bool), DepErrors) checkBundleBuildPlan platform compiler pool flags gpds = ( Map.unionsWith dupError (map fst plans) , Map.unionsWith combineDepError (map snd plans) ) where plans = map (pkgPlan flags) gpds pkgPlan Nothing gpd = selectPackageBuildPlan platform compiler pool' gpd pkgPlan (Just f) gpd = checkPackageBuildPlan platform compiler pool' (flags' f gpd) gpd flags' f gpd = fromMaybe Map.empty (Map.lookup (gpdPackageName gpd) f) pool' = Map.union (gpdPackages gpds) pool dupError _ _ = impureThrow DuplicatePackagesBug data BuildPlanCheck = BuildPlanCheckOk (Map PackageName (Map FlagName Bool)) | BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors | BuildPlanCheckFail (Map PackageName (Map FlagName Bool)) DepErrors ActualCompiler -- | Compare 'BuildPlanCheck', where GT means a better plan. compareBuildPlanCheck :: BuildPlanCheck -> BuildPlanCheck -> Ordering compareBuildPlanCheck (BuildPlanCheckPartial _ e1) (BuildPlanCheckPartial _ e2) = -- Note: order of comparison flipped, since it's better to have fewer errors. compare (Map.size e2) (Map.size e1) compareBuildPlanCheck (BuildPlanCheckFail _ e1 _) (BuildPlanCheckFail _ e2 _) = let numUserPkgs e = Map.size $ Map.unions (Map.elems (fmap (.neededBy) e)) in compare (numUserPkgs e2) (numUserPkgs e1) compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckOk{} = EQ compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckPartial{} = GT compareBuildPlanCheck BuildPlanCheckOk{} BuildPlanCheckFail{} = GT compareBuildPlanCheck BuildPlanCheckPartial{} BuildPlanCheckFail{} = GT compareBuildPlanCheck _ _ = LT instance Show BuildPlanCheck where show BuildPlanCheckOk {} = "" show (BuildPlanCheckPartial f e) = T.unpack $ showDepErrors f e show (BuildPlanCheckFail f e c) = T.unpack $ showCompilerErrors f e c -- | Check a set of 'GenericPackageDescription's and a set of flags against a -- given snapshot. Returns how well the snapshot satisfies the dependencies of -- the packages. checkSnapBuildPlan :: (HasConfig env, HasGHCVariant env) => [ResolvedPath Dir] -> Maybe (Map PackageName (Map FlagName Bool)) -> SnapshotCandidate env -> RIO env BuildPlanCheck checkSnapBuildPlan pkgDirs flags snapCandidate = do platform <- view platformL sma <- snapCandidate pkgDirs gpds <- liftIO $ forM (Map.elems sma.project) (.projectCommon.gpd) let compiler = sma.compiler globalVersion (GlobalPackageVersion v) = v depVersion dep | PLImmutable loc <- dep.location = Just $ packageLocationVersion loc | otherwise = Nothing snapPkgs = Map.union (Map.mapMaybe depVersion sma.deps) (Map.map globalVersion sma.globals) (f, errs) = checkBundleBuildPlan platform compiler snapPkgs flags gpds cerrs = compilerErrors compiler errs if Map.null errs then pure $ BuildPlanCheckOk f else if Map.null cerrs then pure $ BuildPlanCheckPartial f errs else pure $ BuildPlanCheckFail f cerrs compiler where compilerErrors compiler errs | whichCompiler compiler == Ghc = ghcErrors errs | otherwise = Map.empty isGhcWiredIn p _ = p `Set.member` wiredInPackages ghcErrors = Map.filterWithKey isGhcWiredIn -- | Find a snapshot and set of flags that is compatible with and matches as -- best as possible with the given 'GenericPackageDescription's. selectBestSnapshot :: (HasConfig env, HasGHCVariant env) => [ResolvedPath Dir] -> NonEmpty SnapName -> RIO env (SnapshotCandidate env, RawSnapshotLocation, BuildPlanCheck) selectBestSnapshot pkgDirs snaps = do prettyInfo $ fillSep [ flow "Selecting the best among" , fromString $ show (NE.length snaps) , "snapshots..." ] <> line F.foldr1 go (NE.map (getResult <=< snapshotLocation) snaps) where go mold mnew = do old@(_snap, _loc, bpc) <- mold case bpc of BuildPlanCheckOk {} -> pure old _ -> fmap (betterSnap old) mnew getResult loc = do candidate <- loadProjectSnapshotCandidate loc NoPrintWarnings False result <- checkSnapBuildPlan pkgDirs Nothing candidate reportResult result loc pure (candidate, loc, result) betterSnap (s1, l1, r1) (s2, l2, r2) | compareBuildPlanCheck r1 r2 /= LT = (s1, l1, r1) | otherwise = (s2, l2, r2) reportResult BuildPlanCheckOk {} loc = prettyNote $ fillSep [ flow "Matches" , pretty $ PrettyRawSnapshotLocation loc ] <> line reportResult r@BuildPlanCheckPartial {} loc = prettyWarn $ fillSep [ flow "Partially matches" , pretty $ PrettyRawSnapshotLocation loc ] <> blankLine <> indent 4 (string (show r)) reportResult r@BuildPlanCheckFail {} loc = prettyWarn $ fillSep [ flow "Rejected" , pretty $ PrettyRawSnapshotLocation loc ] <> blankLine <> indent 4 (string (show r)) showItems :: [String] -> Text showItems items = T.concat (map formatItem items) where formatItem item = T.concat [ " - " , T.pack item , "\n" ] showPackageFlags :: PackageName -> Map FlagName Bool -> Text showPackageFlags pkg fl = if not $ Map.null fl then T.concat [ " - " , T.pack $ packageNameString pkg , ": " , T.pack $ intercalate ", " $ map formatFlags (Map.toList fl) , "\n" ] else "" where formatFlags (f, v) = show f ++ " = " ++ show v showMapPackages :: Map PackageName a -> Text showMapPackages mp = showItems $ map packageNameString $ Map.keys mp showCompilerErrors :: Map PackageName (Map FlagName Bool) -> DepErrors -> ActualCompiler -> Text showCompilerErrors flags errs compiler = T.concat [ compilerVersionText compiler , " cannot be used for these packages:\n" , showMapPackages $ Map.unions (Map.elems (fmap (.neededBy) errs)) , showDepErrors flags errs -- TODO only in debug mode ] showDepErrors :: Map PackageName (Map FlagName Bool) -> DepErrors -> Text showDepErrors flags errs = T.concat [ T.concat $ map formatError (Map.toList errs) , if T.null flagVals then "" else "Using package flags:\n" <> flagVals ] where formatError (depName, DepError mversion neededBy) = T.concat [ showDepVersion depName mversion , T.concat (map showRequirement (Map.toList neededBy)) ] showDepVersion depName mversion = T.concat [ T.pack $ packageNameString depName , case mversion of Nothing -> " not found" Just version -> T.concat [ " version " , T.pack $ versionString version , " found" ] , "\n" ] showRequirement (user, range) = T.concat [ " - " , T.pack $ packageNameString user , " requires " , T.pack $ display range , "\n" ] flagVals = T.concat (map showFlags userPkgs) userPkgs = Map.keys $ Map.unions (Map.elems (fmap (.neededBy) errs)) showFlags pkg = maybe "" (showPackageFlags pkg) (Map.lookup pkg flags) stack-2.15.7/src/Stack/CLI.hs0000644000000000000000000005432614620153445013716 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.CLI ( commandLineHandler ) where import Data.Attoparsec.Interpreter ( getInterpreterArgs ) import Data.Char ( toLower ) import qualified Data.List as L import Data.List.NonEmpty ( prependList ) import Options.Applicative ( Parser, ParserFailure, ParserHelp, ParserResult (..), flag, switch , handleParseResult, help, helpError, idm, long, metavar , overFailure, renderFailure, strArgument, switch ) import Options.Applicative.Help ( errorHelp, stringChunk, vcatChunks ) import Options.Applicative.Builder.Extra ( boolFlags, extraHelpOption ) import Options.Applicative.Complicated ( addCommand, addSubCommands, complicatedOptions ) import RIO.NonEmpty ( (<|) ) import qualified RIO.NonEmpty as NE import qualified RIO.Process ( exec ) import RIO.Process ( withProcessContextNoLogging ) import Stack.Build ( buildCmd ) import Stack.BuildInfo ( hpackVersion, versionString' ) import Stack.Clean ( CleanCommand (..), cleanCmd ) import Stack.ConfigCmd as ConfigCmd import Stack.Constants ( globalFooter, osIsWindows, stackProgName ) import Stack.Coverage ( hpcReportCmd ) import Stack.Docker ( dockerCmdName, dockerHelpOptName, dockerPullCmdName ) import Stack.DockerCmd ( dockerPullCmd, dockerResetCmd ) import Stack.Dot ( dotCmd ) import Stack.Exec ( SpecialExecCmd (..), execCmd ) import Stack.Eval ( evalCmd ) import Stack.Ghci ( ghciCmd ) import Stack.Hoogle ( hoogleCmd ) import Stack.IDE ( ListPackagesCmd (..), OutputStream (..), idePackagesCmd , ideTargetsCmd ) import Stack.Init ( initCmd ) import Stack.List ( listCmd ) import Stack.Ls ( lsCmd ) import Stack.New ( newCmd ) import qualified Stack.Nix as Nix import Stack.Options.BuildParser ( buildOptsParser ) import Stack.Options.CleanParser ( cleanOptsParser ) import Stack.Options.DotParser ( dotOptsParser ) import Stack.Options.EvalParser ( evalOptsParser ) import Stack.Options.ExecParser ( execOptsParser ) import Stack.Options.GhciParser ( ghciOptsParser ) import Stack.Options.GlobalParser ( globalOptsParser ) import Stack.Options.HpcReportParser ( hpcReportOptsParser ) import Stack.Options.InitParser ( initOptsParser ) import Stack.Options.LsParser ( lsOptsParser ) import Stack.Options.NewParser ( newOptsParser ) import Stack.Options.PathParser ( pathParser ) import Stack.Options.SDistParser ( sdistOptsParser ) import Stack.Options.ScriptParser ( scriptOptsParser ) import Stack.Options.SetupParser ( setupOptsParser ) import Stack.Options.UnpackParser ( unpackOptsParser ) import Stack.Options.UpgradeParser ( upgradeOptsParser ) import Stack.Options.UploadParser ( uploadOptsParser ) import Stack.Options.Utils ( GlobalOptsContext (..) ) import qualified Stack.Path ( path ) import Stack.Prelude import Stack.Query ( queryCmd ) import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.SDist ( sdistCmd ) import Stack.Script ( ScriptOpts (..), scriptCmd ) import Stack.SetupCmd ( setupCmd ) import Stack.Templates ( templatesCmd ) import Stack.Types.AddCommand ( AddCommand ) import Stack.Types.BuildOptsCLI ( BuildCommand (..) ) import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) import Stack.Types.Runner ( Runner ) import Stack.Types.Version ( stackVersion ) import Stack.Uninstall ( uninstallCmd ) import Stack.Unpack ( unpackCmd ) import Stack.Update ( updateCmd ) import Stack.Upgrade ( upgradeCmd ) import Stack.Upload ( uploadCmd ) import qualified System.Directory as D import System.Environment ( getProgName, withArgs ) import System.FilePath ( pathSeparator, takeDirectory ) -- | Type representing \'pretty\' exceptions thrown by functions in the -- "Stack.CLI" module. data CliPrettyException = NoArgumentsBug deriving (Show, Typeable) instance Pretty CliPrettyException where pretty NoArgumentsBug = bugPrettyReport "[S-4639]" $ flow "commandLineHandler: no command line arguments on event of failure." instance Exception CliPrettyException -- | Stack's command line handler. commandLineHandler :: FilePath -> String -> Bool -> IO (GlobalOptsMonoid, RIO Runner ()) commandLineHandler currentDir progName isInterpreter = -- Append the relevant default (potentially affecting the LogLevel) *after* -- appending the global options of the `stack` command to the global options -- of the subcommand - see #5326. first (<> defaultGlobalOpts) <$> complicatedOptions stackVersion (Just versionString') hpackVersion "stack - The Haskell Tool Stack" "" ("Stack's documentation is available at https://docs.haskellstack.org/. \ \Command '" <> progName <> " COMMAND --help' for help about a Stack command. Stack also \ \supports the Haskell Error Index at https://errors.haskell.org/.") (globalOpts OuterGlobalOpts) (Just failureCallback) addCommands where defaultGlobalOpts = if isInterpreter then -- Silent except when errors occur - see #2879 mempty { logLevel = First (Just LevelError) } else mempty failureCallback f args = case L.stripPrefix "Invalid argument" (fst (renderFailure f "")) of Just _ -> maybe (prettyThrowIO NoArgumentsBug) ( \args' -> if isInterpreter then parseResultHandler (NE.toList args') f else secondaryCommandHandler args' f >>= interpreterHandler currentDir args' ) (NE.nonEmpty args) Nothing -> parseResultHandler args f parseResultHandler args f = if isInterpreter then do let hlp = errorHelp $ stringChunk (unwords ["Error executing interpreter command:" , progName , unwords args]) handleParseResult (overFailure (vcatErrorHelp hlp) (Failure f)) else handleParseResult (Failure f) -- The order of commands below determines the order in which they are listed -- in `stack --help`. addCommands = do unless isInterpreter $ do build install uninstall test bench haddock new templates init setup path ls unpack update upgrade upload sdist dot ghc hoogle -- These are the only commands allowed in interpreter mode as well exec run ghci repl runghc runhaskell script unless isInterpreter $ do eval clean purge query list ide docker config hpc -- Stack's subcommands are listed below in alphabetical order bench = addBuildCommand' "bench" "Shortcut for 'build --bench'." buildCmd (buildOptsParser Bench) build = addBuildCommand' "build" "Build the package(s) in this directory/configuration." buildCmd (buildOptsParser Build) clean = addCommand' "clean" "Delete build artefacts for the project packages." cleanCmd (cleanOptsParser Clean) config = addSubCommands' ConfigCmd.cfgCmdName "Subcommands for accessing and modifying configuration values." ( do addCommand' ConfigCmd.cfgCmdSetName "Sets a key in YAML configuration file to value." (withConfig NoReexec . cfgCmdSet) configCmdSetParser addCommand' ConfigCmd.cfgCmdEnvName "Print environment variables for use in a shell." (withConfig YesReexec . withDefaultEnvConfig . cfgCmdEnv) configCmdEnvParser ) docker = addSubCommands' dockerCmdName "Subcommands specific to Docker use." ( do addCommand' dockerPullCmdName "Pull latest version of Docker image from registry." dockerPullCmd (pure ()) addCommand' "reset" "Reset the Docker sandbox." dockerResetCmd ( switch ( long "keep-home" <> help "Do not delete sandbox's home directory." ) ) ) dot = addCommand' "dot" "Visualize your project's dependency graph using Graphviz dot." dotCmd (dotOptsParser False) -- Default for --external is False. eval = addCommand' "eval" "Evaluate some Haskell code inline. Shortcut for \ \'stack exec ghc -- -e CODE'." evalCmd (evalOptsParser "CODE") exec = addCommand' "exec" "Execute a command. If the command is absent, the first of any arguments \ \is taken as the command." execCmd (execOptsParser Nothing) ghc = addCommand' "ghc" "Run ghc." execCmd (execOptsParser $ Just ExecGhc) ghci = addGhciCommand' "ghci" "Run ghci in the context of package(s)." ghciCmd ghciOptsParser haddock = addBuildCommand' "haddock" "Shortcut for 'build --haddock'." buildCmd (buildOptsParser Haddock) hoogle = addCommand' "hoogle" "Run hoogle, the Haskell API search engine. Use the '-- ARGUMENT(S)' \ \syntax to pass Hoogle arguments, e.g. 'stack hoogle -- --count=20', \ \or 'stack hoogle -- server --local'." hoogleCmd ( (,,,) <$> many (strArgument ( metavar "-- ARGUMENT(S) (e.g. 'stack hoogle -- server --local')" )) <*> boolFlags True "setup" "If needed: install Hoogle, build Haddock documentation and \ \generate a Hoogle database." idm <*> switch ( long "rebuild" <> help "Rebuild the Hoogle database." ) <*> switch ( long "server" <> help "Start local Hoogle server." ) ) hpc = addSubCommands' "hpc" "Subcommands specific to Haskell Program Coverage." ( addCommand' "report" "Generate unified HPC coverage report from tix files and project \ \targets." hpcReportCmd hpcReportOptsParser ) ide = addSubCommands' "ide" "IDE-specific commands." ( let outputFlag = flag OutputLogInfo OutputStdout ( long "stdout" <> help "Send output to the standard output stream instead of the \ \default, the standard error stream." ) cabalFileFlag = flag ListPackageNames ListPackageCabalFiles ( long "cabal-files" <> help "Print paths to package Cabal files instead of package \ \names." ) exeFlag = switch ( long "exes" <> help "Include executables." ) testFlag = switch ( long "tests" <> help "Include test suites." ) benchFlag = switch ( long "benchmarks" <> help "Include benchmarks." ) in do addCommand' "packages" "List all available local loadable packages." idePackagesCmd ((,) <$> outputFlag <*> cabalFileFlag) addCommand' "targets" "List all targets or pick component types to list." ideTargetsCmd ( (,) <$> ((,,) <$> exeFlag <*> testFlag <*> benchFlag) <*> outputFlag ) ) init = addCommand' "init" "Create Stack project configuration from Cabal or Hpack package \ \specifications." initCmd initOptsParser install = addBuildCommand' "install" "Shortcut for 'build --copy-bins'." buildCmd (buildOptsParser Install) list = addCommand' "list" "List package id's in snapshot (experimental)." listCmd (many $ strArgument $ metavar "PACKAGE") ls = addCommand' "ls" "List command. (Supports snapshots, dependencies, Stack's styles and \ \installed tools.)" lsCmd lsOptsParser new = addCommand' "new" "Create a new project from a template. Run 'stack templates' to see \ \available templates. Will also initialise if there is no stack.yaml \ \file. Note: you can also specify a local file or a remote URL as a \ \template; or force an initialisation." newCmd newOptsParser path = addCommand' "path" "Print out handy path information." Stack.Path.path pathParser purge = addCommand' "purge" "Delete the project Stack working directories (.stack-work by \ \default). Shortcut for 'stack clean --full'." cleanCmd (cleanOptsParser Purge) query = addCommand' "query" "Query general build information (experimental)." queryCmd (many $ strArgument $ metavar "SELECTOR...") repl = addGhciCommand' "repl" "Run ghci in the context of package(s) (alias for 'ghci')." ghciCmd ghciOptsParser run = addCommand' "run" "Build and run an executable. Defaults to the first available \ \executable if none is provided as the first argument." execCmd (execOptsParser $ Just ExecRun) runghc = addCommand' "runghc" "Run runghc." execCmd (execOptsParser $ Just ExecRunGhc) runhaskell = addCommand' "runhaskell" "Run runghc (alias for 'runghc')." execCmd (execOptsParser $ Just ExecRunGhc) script = addCommand "script" "Run a Stack script." globalFooter scriptCmd (\so gom -> gom { resolverRoot = First $ Just $ takeDirectory so.file }) (globalOpts OtherCmdGlobalOpts) scriptOptsParser sdist = addCommand' "sdist" "Create source distribution tarballs." sdistCmd sdistOptsParser setup = addCommand' "setup" "Get the appropriate GHC for your project." setupCmd setupOptsParser templates = addCommand' "templates" "Show how to find templates available for 'stack new'. 'stack new' \ \can accept a template from a remote repository (default: github), \ \local file or remote URL. Note: this downloads the help file." templatesCmd (pure ()) test = addBuildCommand' "test" "Shortcut for 'build --test'." buildCmd (buildOptsParser Test) uninstall = addCommand' "uninstall" "Show how to uninstall Stack or a Stack-supplied tool. This command does \ \not itself uninstall Stack or a Stack-supplied tool." uninstallCmd (pure ()) unpack = addCommand' "unpack" "Unpack one or more packages, or one or more package candidates, locally." unpackCmd unpackOptsParser update = addCommand' "update" "Update the package index." updateCmd (pure ()) upgrade = addCommand'' "upgrade" "Upgrade Stack, installing to Stack's local-bin directory and, if \ \different and permitted, the directory of the current Stack \ \executable." upgradeCmd "Warning: if you use GHCup to install Stack, use only GHCup to \ \upgrade Stack." (upgradeOptsParser onlyLocalBins) where onlyLocalBins = (lowercase progName /= lowercase stackProgName) && not ( osIsWindows && lowercase progName == lowercase (stackProgName <> ".EXE") ) lowercase = map toLower upload = addCommand' "upload" "Upload one or more packages, or documentation for one or more packages, \ \to Hackage." uploadCmd uploadOptsParser -- addCommand hiding global options addCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a -> AddCommand addCommand' cmd title constr = addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts OtherCmdGlobalOpts) -- addCommand with custom footer hiding global options addCommand'' :: String -> String -> (a -> RIO Runner ()) -> String -> Parser a -> AddCommand addCommand'' cmd title constr cmdFooter = addCommand cmd title (globalFooter <> " " <> cmdFooter) constr (\_ gom -> gom) (globalOpts OtherCmdGlobalOpts) addSubCommands' :: String -> String -> AddCommand -> AddCommand addSubCommands' cmd title = addSubCommands cmd title globalFooter (globalOpts OtherCmdGlobalOpts) -- Additional helper that hides global options and shows build options addBuildCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a -> AddCommand addBuildCommand' cmd title constr = addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts BuildCmdGlobalOpts) -- Additional helper that hides global options and shows some ghci options addGhciCommand' :: String -> String -> (a -> RIO Runner ()) -> Parser a -> AddCommand addGhciCommand' cmd title constr = addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts GhciCmdGlobalOpts) globalOpts :: GlobalOptsContext -> Parser GlobalOptsMonoid globalOpts kind = extraHelpOption hide progName (dockerCmdName ++ "*") dockerHelpOptName <*> extraHelpOption hide progName (Nix.nixCmdName ++ "*") Nix.nixHelpOptName <*> globalOptsParser currentDir kind where hide = kind /= OuterGlobalOpts -- | fall-through to external executables in `git` style if they exist -- (i.e. `stack something` looks for `stack-something` before -- failing with "Invalid argument `something'") secondaryCommandHandler :: NonEmpty String -> ParserFailure ParserHelp -> IO (ParserFailure ParserHelp) secondaryCommandHandler args f = -- don't even try when the argument looks like a path or flag if elem pathSeparator cmd || "-" `L.isPrefixOf` NE.head args then pure f else do mExternalExec <- D.findExecutable cmd case mExternalExec of Just ex -> withProcessContextNoLogging $ do -- TODO show the command in verbose mode -- hPutStrLn stderr $ unwords $ -- ["Running", "[" ++ ex, unwords (tail args) ++ "]"] _ <- RIO.Process.exec ex (NE.tail args) pure f Nothing -> pure $ fmap (vcatErrorHelp (noSuchCmd cmd)) f where -- FIXME this is broken when any options are specified before the command -- e.g. stack --verbosity silent cmd cmd = stackProgName <> "-" <> NE.head args noSuchCmd name = errorHelp $ stringChunk ("Auxiliary command not found in path '" ++ name ++ "'.") interpreterHandler :: Monoid t => FilePath -> NonEmpty String -> ParserFailure ParserHelp -> IO (GlobalOptsMonoid, (RIO Runner (), t)) interpreterHandler currentDir args f = do -- args can include top-level config such as --extra-lib-dirs=... (set by -- nix-shell) - we need to find the first argument which is a file, everything -- afterwards is an argument to the script, everything before is an argument -- to Stack (stackArgs, fileArgs) <- spanM (fmap not . D.doesFileExist) args case fileArgs of (file:fileArgs') -> runInterpreterCommand file stackArgs fileArgs' [] -> parseResultHandler (errorCombine (noSuchFile firstArg)) where firstArg = NE.head args spanM p xs@(x :| rest) = do r <- p x if r then case rest of [] -> pure ([x], []) (x': rest') -> do (ys, zs) <- spanM p (x' :| rest') pure (x : ys, zs) else pure ([], NE.toList xs) -- if the first argument contains a path separator then it might be a file, -- or a Stack option referencing a file. In that case we only show the -- interpreter error message and exclude the command related error messages. errorCombine = if pathSeparator `elem` firstArg then overrideErrorHelp else vcatErrorHelp overrideErrorHelp h1 h2 = h2 { helpError = helpError h1 } parseResultHandler fn = handleParseResult (overFailure fn (Failure f)) noSuchFile name = errorHelp $ stringChunk ("File does not exist or is not a regular file '" ++ name ++ "'.") runInterpreterCommand path stackArgs fileArgs = do progName <- getProgName iargs <- getInterpreterArgs path let parseCmdLine = commandLineHandler currentDir progName True -- Implicit file arguments are put before other arguments that -- occur after "--". See #3658 cmdArgs = prependList stackArgs $ case NE.break (== "--") iargs of (beforeSep, []) -> prependList beforeSep $ "--" <| path :| fileArgs (beforeSep, optSep : afterSep) -> prependList beforeSep $ optSep <| path :| fileArgs <> afterSep -- TODO show the command in verbose mode -- hPutStrLn stderr $ unwords $ -- ["Running", "[" ++ progName, unwords cmdArgs ++ "]"] (a,b) <- withArgs (NE.toList cmdArgs) parseCmdLine pure (a,(b,mempty)) -- Vertically combine only the error component of the first argument with the -- error component of the second. vcatErrorHelp :: ParserHelp -> ParserHelp -> ParserHelp vcatErrorHelp h1 h2 = h2 { helpError = vcatChunks [helpError h2, helpError h1] } stack-2.15.7/src/Stack/Clean.hs0000644000000000000000000000742714620153445014331 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @clean@ and @purge@ commands. module Stack.Clean ( CleanOpts (..) , CleanCommand (..) , cleanCmd , clean ) where import Data.List ( (\\), intercalate ) import qualified Data.Map.Strict as Map import Path.IO ( ignoringAbsence, removeDirRecur ) import Stack.Config ( withBuildConfig ) import Stack.Constants.Config ( rootDistDirFromDir, workDirFromDir ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), getProjectWorkDir ) import Stack.Types.Config ( Config ) import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap ( SMWanted (..), ppRoot ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Clean" module. data CleanException = NonLocalPackages [PackageName] | DeletionFailures [(Path Abs Dir, SomeException)] deriving (Show, Typeable) instance Exception CleanException where displayException (NonLocalPackages pkgs) = concat [ "Error: [S-9463]\n" , "The following packages are not part of this project: " , intercalate ", " (map show pkgs) ] displayException (DeletionFailures failures) = concat [ "Error: [S-6321]\n" , "Exception while recursively deleting:\n" , concatMap (\(dir, e) -> toFilePath dir <> "\n" <> displayException e <> "\n") failures , "Perhaps you do not have permission to delete these files or they are in \ \use?" ] -- | Type representing command line options for the @stack clean@ command. data CleanOpts = CleanShallow [PackageName] -- ^ Delete the "dist directories" as defined in -- 'Stack.Constants.Config.distRelativeDir' for the given local packages. If -- no packages are given, all project packages should be cleaned. | CleanFull -- ^ Delete all work directories in the project. -- | Type representing Stack's cleaning commands. data CleanCommand = Clean | Purge -- | Function underlying the @stack clean@ command. cleanCmd :: CleanOpts -> RIO Runner () cleanCmd = withConfig NoReexec . clean -- | Deletes build artifacts in the current project. clean :: CleanOpts -> RIO Config () clean cleanOpts = do toDelete <- withBuildConfig $ dirsToDelete cleanOpts logDebug $ "Need to delete: " <> fromString (show (map toFilePath toDelete)) failures <- catMaybes <$> mapM cleanDir toDelete case failures of [] -> pure () _ -> throwIO $ DeletionFailures failures cleanDir :: Path Abs Dir -> RIO Config (Maybe (Path Abs Dir, SomeException)) cleanDir dir = do logDebug $ "Deleting directory: " <> fromString (toFilePath dir) liftIO (ignoringAbsence (removeDirRecur dir) >> pure Nothing) `catchAny` \ex -> pure $ Just (dir, ex) dirsToDelete :: CleanOpts -> RIO BuildConfig [Path Abs Dir] dirsToDelete cleanOpts = do packages <- view $ buildConfigL . to (.smWanted.project) case cleanOpts of CleanShallow [] -> -- Filter out packages listed as extra-deps mapM (rootDistDirFromDir . ppRoot) $ Map.elems packages CleanShallow targets -> do let localPkgNames = Map.keys packages getPkgDir pkgName' = fmap ppRoot (Map.lookup pkgName' packages) case targets \\ localPkgNames of [] -> mapM rootDistDirFromDir (mapMaybe getPkgDir targets) xs -> throwM (NonLocalPackages xs) CleanFull -> do pkgWorkDirs <- mapM (workDirFromDir . ppRoot) $ Map.elems packages projectWorkDir <- getProjectWorkDir pure (projectWorkDir : pkgWorkDirs) stack-2.15.7/src/Stack/Component.hs0000644000000000000000000002124014604306201015226 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DisambiguateRecordFields #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- | All utility functions for Components in Stack (library, internal library, -- foreign library, executable, tests, benchmarks). In particular, this module -- gathers all the Cabal-to-Stack component translations, which previously -- occurred in the "Stack.Package" module. See "Stack.Types.Component" for more -- details about the design choices. module Stack.Component ( isComponentBuildable , stackLibraryFromCabal , stackExecutableFromCabal , stackForeignLibraryFromCabal , stackBenchmarkFromCabal , stackTestFromCabal , foldOnNameAndBuildInfo , stackUnqualToQual , componentDependencyMap , fromCabalName ) where import qualified Data.Map as Map import qualified Data.Set as Set import Data.Text ( pack ) import Distribution.PackageDescription ( Benchmark (..), Executable, ForeignLib, Library (..) , TestSuite (..) ) import Distribution.Types.BuildInfo ( BuildInfo ) import Distribution.Package ( mkPackageName ) import qualified Distribution.PackageDescription as Cabal import GHC.Records ( HasField ) import Stack.Prelude import Stack.Types.Component ( HasBuildInfo, StackBenchmark (..), StackBuildInfo (..) , StackExecutable (..), StackForeignLibrary (..) , StackLibrary (..), StackTestSuite (..) , StackUnqualCompName (..) ) import Stack.Types.ComponentUtils ( fromCabalName ) import Stack.Types.Dependency ( cabalExeToStackDep, cabalToStackDep ) import Stack.Types.NamedComponent ( NamedComponent ) stackUnqualToQual :: (Text -> NamedComponent) -> StackUnqualCompName -> NamedComponent stackUnqualToQual c (StackUnqualCompName n) = c n foldOnNameAndBuildInfo :: ( HasField "buildInfo" a StackBuildInfo , HasField "name" a StackUnqualCompName , Foldable c ) => c a -> (StackUnqualCompName -> StackBuildInfo -> t -> t) -> t -> t foldOnNameAndBuildInfo initialCollection accumulator input = foldr' iterator input initialCollection where iterator comp = accumulator comp.name comp.buildInfo stackLibraryFromCabal :: Library -> StackLibrary stackLibraryFromCabal cabalLib = StackLibrary { name = case cabalLib.libName of LMainLibName -> StackUnqualCompName mempty LSubLibName v -> fromCabalName v , buildInfo = stackBuildInfoFromCabal cabalLib.libBuildInfo , exposedModules = cabalLib.exposedModules } stackExecutableFromCabal :: Executable -> StackExecutable stackExecutableFromCabal cabalExecutable = StackExecutable { name = fromCabalName cabalExecutable.exeName , buildInfo = stackBuildInfoFromCabal cabalExecutable.buildInfo , modulePath = cabalExecutable.modulePath } stackForeignLibraryFromCabal :: ForeignLib -> StackForeignLibrary stackForeignLibraryFromCabal cabalForeignLib = StackForeignLibrary { name = fromCabalName cabalForeignLib.foreignLibName , buildInfo=stackBuildInfoFromCabal cabalForeignLib.foreignLibBuildInfo } stackBenchmarkFromCabal :: Benchmark -> StackBenchmark stackBenchmarkFromCabal cabalBenchmark = StackBenchmark { name = fromCabalName cabalBenchmark.benchmarkName , interface = cabalBenchmark.benchmarkInterface , buildInfo = stackBuildInfoFromCabal cabalBenchmark.benchmarkBuildInfo } stackTestFromCabal :: TestSuite -> StackTestSuite stackTestFromCabal cabalTest = StackTestSuite { name = fromCabalName cabalTest.testName , interface = cabalTest.testInterface , buildInfo = stackBuildInfoFromCabal cabalTest.testBuildInfo } isComponentBuildable :: HasBuildInfo component => component -> Bool isComponentBuildable componentRec = componentRec.buildInfo.buildable stackBuildInfoFromCabal :: BuildInfo -> StackBuildInfo stackBuildInfoFromCabal buildInfoV = gatherComponentToolsAndDepsFromCabal buildInfoV.buildTools buildInfoV.buildToolDepends buildInfoV.targetBuildDepends StackBuildInfo { buildable = buildInfoV.buildable , otherModules = buildInfoV.otherModules , jsSources = buildInfoV.jsSources , hsSourceDirs = buildInfoV.hsSourceDirs , cSources = buildInfoV.cSources , dependency = mempty , unknownTools = mempty , cppOptions = buildInfoV.cppOptions , targetBuildDepends = buildInfoV.targetBuildDepends , options = buildInfoV.options , allLanguages = Cabal.allLanguages buildInfoV , usedExtensions = Cabal.usedExtensions buildInfoV , includeDirs = buildInfoV.includeDirs , extraLibs = buildInfoV.extraLibs , extraLibDirs = buildInfoV.extraLibDirs , frameworks = buildInfoV.frameworks } -- | Iterate on all three dependency list given, and transform and sort them -- between 'sbiUnknownTools' and legitimate 'DepValue' sbiDependency. Bear in -- mind that this only gathers the component level dependencies. gatherComponentToolsAndDepsFromCabal :: [Cabal.LegacyExeDependency] -- ^ Legacy build tools dependency from -- 'Distribution.Types.BuildInfo.buildTools'. -> [Cabal.ExeDependency] -- ^ Build tools dependency from -- `Distribution.Types.BuildInfo.buildToolDepends'. -> [Cabal.Dependency] -- ^ Cabal-syntax defines -- 'Distribution.Types.BuildInfo.targetBuildDepends'. These are the -- simplest dependencies for a component extracted from the Cabal file such -- as: -- @ -- build-depends: -- foo ^>= 1.2.3.4, -- bar ^>= 1 -- @ -> StackBuildInfo -> StackBuildInfo gatherComponentToolsAndDepsFromCabal legacyBuildTools buildTools targetDeps = gatherTargetDependency . gatherToolsDependency . gatherUnknownTools where gatherUnknownTools sbi = foldl' processLegacyExeDepency sbi legacyBuildTools gatherToolsDependency sbi = foldl' processExeDependency sbi buildTools gatherTargetDependency sbi = foldl' processDependency sbi targetDeps -- This is similar to Cabal's -- 'Distribution.Simple.BuildToolDepends.desugarBuildTool', however it uses -- our own hard-coded map which drops tools shipped with GHC (like hsc2hs), -- and includes some tools from Stackage. processLegacyExeDepency sbi (Cabal.LegacyExeDependency exeName range) = case isKnownLegacyExe exeName of Just pName -> processExeDependency sbi (Cabal.ExeDependency pName (Cabal.mkUnqualComponentName exeName) range) Nothing -> sbi { unknownTools = Set.insert (pack exeName) sbi.unknownTools } processExeDependency sbi exeDep@(Cabal.ExeDependency pName _ _) | isPreInstalledPackages pName = sbi | otherwise = sbi { dependency = Map.insert pName (cabalExeToStackDep exeDep) sbi.dependency } processDependency sbi dep@(Cabal.Dependency pName _ _) = sbi { dependency = Map.insert pName (cabalToStackDep dep) sbi.dependency } componentDependencyMap :: (HasField "buildInfo" r1 r2, HasField "dependency" r2 a) => r1 -> a componentDependencyMap component = component.buildInfo.dependency -- | A hard-coded map for tool dependencies. If a dependency is within this map -- it's considered "known" (the exe will be found at the execution stage). The -- corresponding Cabal function is -- 'Distribution.Simple.BuildToolDepends.desugarBuildTool'. isKnownLegacyExe :: String -> Maybe PackageName isKnownLegacyExe input = case input of "alex" -> justPck "alex" "happy" -> justPck "happy" "cpphs" -> justPck "cpphs" "greencard" -> justPck "greencard" "c2hs" -> justPck "c2hs" "hscolour" -> justPck "hscolour" "hspec-discover" -> justPck "hspec-discover" "hsx2hs" -> justPck "hsx2hs" "gtk2hsC2hs" -> justPck "gtk2hs-buildtools" "gtk2hsHookGenerator" -> justPck "gtk2hs-buildtools" "gtk2hsTypeGen" -> justPck "gtk2hs-buildtools" _ -> Nothing where justPck = Just . mkPackageName -- | Executable-only packages which come pre-installed with GHC and do not need -- to be built. Without this exception, we would either end up unnecessarily -- rebuilding these packages, or failing because the packages do not appear in -- the Stackage snapshot. isPreInstalledPackages :: PackageName -> Bool isPreInstalledPackages input = case input of "hsc2hs" -> True "haddock" -> True _ -> False stack-2.15.7/src/Stack/ComponentFile.hs0000644000000000000000000005524514620153445016052 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | A module which exports all component-level file-gathering logic. It also -- includes utility functions for handling paths and directories. module Stack.ComponentFile ( resolveOrWarn , componentOutputDir , componentBuildDir , packageAutogenDir , buildDir , componentAutogenDir , ComponentFile (..) , stackLibraryFiles , stackExecutableFiles , stackTestSuiteFiles , stackBenchmarkFiles ) where import Control.Exception ( throw ) import Data.Foldable ( foldrM ) import Data.List ( find, isPrefixOf ) import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T import Distribution.ModuleName ( ModuleName ) import qualified Distribution.ModuleName as Cabal import Distribution.PackageDescription ( BenchmarkInterface (..), TestSuiteInterface (..) ) import Distribution.Text ( display ) import Distribution.Utils.Path ( PackageDir, SourceDir, SymbolicPath, getSymbolicPath ) import Distribution.Version ( mkVersion ) import GHC.Records ( HasField ) import qualified HiFileParser as Iface import Path ( (), filename, isProperPrefixOf, parent, parseRelDir , stripProperPrefix ) import Path.Extra ( forgivingResolveDir, forgivingResolveFile , parseCollapsedAbsFile, rejectMissingDir, rejectMissingFile ) import Path.IO ( doesDirExist, doesFileExist, getCurrentDir, listDir ) import Stack.Constants ( haskellDefaultPreprocessorExts, haskellFileExts , relDirAutogen, relDirBuild, relDirGlobalAutogen ) import Stack.Prelude hiding ( Display (..) ) import Stack.Types.Component ( StackBenchmark (..), StackBuildInfo (..) , StackExecutable (..), StackLibrary (..) , StackTestSuite (..), StackUnqualCompName (..) ) import Stack.Types.Config ( Config (..), HasConfig (..), prettyStackDevL ) import Stack.Types.NamedComponent ( NamedComponent (..) ) import Stack.Types.Package ( PackageException (..), dotCabalModule ) import Stack.Types.PackageFile ( GetPackageFileContext (..), DotCabalDescriptor (..) , DotCabalPath (..), PackageWarning (..) ) import qualified System.Directory as D ( doesFileExist ) import qualified System.FilePath as FilePath data ComponentFile = ComponentFile { moduleFileMap :: !(Map ModuleName (Path Abs File)) , otherFile :: ![DotCabalPath] , packageWarning :: ![PackageWarning] } -- | Get all files referenced by the benchmark. stackBenchmarkFiles :: StackBenchmark -> RIO GetPackageFileContext (NamedComponent, ComponentFile) stackBenchmarkFiles bench = resolveComponentFiles (CBench bench.name.unqualCompToText) build names where names = bnames <> exposed exposed = case bench.interface of BenchmarkExeV10 _ fp -> [DotCabalMain fp] BenchmarkUnsupported _ -> [] bnames = map DotCabalModule build.otherModules build = bench.buildInfo -- | Get all files referenced by the test. stackTestSuiteFiles :: StackTestSuite -> RIO GetPackageFileContext (NamedComponent, ComponentFile) stackTestSuiteFiles test = resolveComponentFiles (CTest test.name.unqualCompToText) build names where names = bnames <> exposed exposed = case test.interface of TestSuiteExeV10 _ fp -> [DotCabalMain fp] TestSuiteLibV09 _ mn -> [DotCabalModule mn] TestSuiteUnsupported _ -> [] bnames = map DotCabalModule build.otherModules build = test.buildInfo -- | Get all files referenced by the executable. stackExecutableFiles :: StackExecutable -> RIO GetPackageFileContext (NamedComponent, ComponentFile) stackExecutableFiles exe = resolveComponentFiles (CExe exe.name.unqualCompToText) build names where build = exe.buildInfo names = map DotCabalModule build.otherModules ++ [DotCabalMain exe.modulePath] -- | Get all files referenced by the library. Handle all libraries (CLib and -- SubLib), based on empty name or not. stackLibraryFiles :: StackLibrary -> RIO GetPackageFileContext (NamedComponent, ComponentFile) stackLibraryFiles lib = resolveComponentFiles componentName build names where componentRawName = lib.name.unqualCompToText componentName | componentRawName == mempty = CLib | otherwise = CSubLib componentRawName build = lib.buildInfo names = bnames ++ exposed exposed = map DotCabalModule lib.exposedModules bnames = map DotCabalModule build.otherModules -- | Get all files referenced by the component. resolveComponentFiles :: ( CAndJsSources rec , HasField "hsSourceDirs" rec [SymbolicPath PackageDir SourceDir] ) => NamedComponent -> rec -> [DotCabalDescriptor] -> RIO GetPackageFileContext (NamedComponent, ComponentFile) resolveComponentFiles component build names = do dirs <- mapMaybeM (resolveDirOrWarn . getSymbolicPath) build.hsSourceDirs dir <- asks (parent . (.file)) agdirs <- autogenDirs (modules,files,warnings) <- resolveFilesAndDeps component ((if null dirs then [dir] else dirs) ++ agdirs) names cfiles <- buildOtherSources build pure (component, ComponentFile modules (files <> cfiles) warnings) where autogenDirs = do cabalVer <- asks (.cabalVer) distDir <- asks (.distDir) let compDir = componentAutogenDir cabalVer component distDir pkgDir = maybeToList $ packageAutogenDir cabalVer distDir filterM doesDirExist $ compDir : pkgDir -- | Try to resolve the list of base names in the given directory by looking for -- unique instances of base names applied with the given extensions, plus find -- any of their module and TemplateHaskell dependencies. resolveFilesAndDeps :: NamedComponent -- ^ Package component name -> [Path Abs Dir] -- ^ Directories to look in. -> [DotCabalDescriptor] -- ^ Base names. -> RIO GetPackageFileContext (Map ModuleName (Path Abs File), [DotCabalPath], [PackageWarning]) resolveFilesAndDeps component dirs names0 = do (dotCabalPaths, foundModules, missingModules, _) <- loop names0 S.empty M.empty warnings <- liftM2 (++) (warnUnlisted foundModules) (warnMissing missingModules) pure (foundModules, dotCabalPaths, warnings) where loop :: [DotCabalDescriptor] -> Set ModuleName -> Map FilePath (Path Abs File) -- ^ Known file usages, where the file path has already been resolved. -> RIO GetPackageFileContext ( [DotCabalPath] , Map ModuleName (Path Abs File) , [ModuleName] , Map k a ) loop [] _ _ = pure ([], M.empty, [], M.empty) loop names doneModules0 knownUsages = do resolved <- resolveFiles dirs names let foundFiles = mapMaybe snd resolved foundModules = mapMaybe toResolvedModule resolved missingModules = mapMaybe toMissingModule resolved getDependenciesFold c (ps, ku) = do p <- getDependencies ku component dirs c pure (p : ps, ku <> snd p) (pairs, foundUsages) <- foldrM getDependenciesFold ([], knownUsages) foundFiles let doneModules = S.union doneModules0 (S.fromList (mapMaybe dotCabalModule names)) moduleDeps = S.unions (map fst pairs) thDepFiles = concatMap (M.elems . snd) pairs modulesRemaining = S.difference moduleDeps doneModules -- Ignore missing modules discovered as dependencies - they may -- have been deleted. (resolvedFiles, resolvedModules, _, foundUsages') <- loop (map DotCabalModule (S.toList modulesRemaining)) doneModules foundUsages pure ( nubOrd $ foundFiles <> map DotCabalFilePath thDepFiles <> resolvedFiles , M.union (M.fromList foundModules) resolvedModules , missingModules , foundUsages' ) warnUnlisted foundModules = do let unlistedModules = foundModules `M.difference` M.fromList (mapMaybe (fmap (, ()) . dotCabalModule) names0) pure $ [ UnlistedModulesWarning component (map fst (M.toList unlistedModules)) | not (M.null unlistedModules) ] warnMissing _missingModules = pure [] -- TODO: bring this back - see -- https://github.com/commercialhaskell/stack/issues/2649 {- cabalfp <- asks ctxFile pure $ if null missingModules then [] else [ MissingModulesWarning cabalfp component missingModules] -} -- TODO: In usages of toResolvedModule / toMissingModule, some sort -- of map + partition would probably be better. toResolvedModule :: (DotCabalDescriptor, Maybe DotCabalPath) -> Maybe (ModuleName, Path Abs File) toResolvedModule (DotCabalModule mn, Just (DotCabalModulePath fp)) = Just (mn, fp) toResolvedModule _ = Nothing toMissingModule :: (DotCabalDescriptor, Maybe DotCabalPath) -> Maybe ModuleName toMissingModule (DotCabalModule mn, Nothing) = Just mn toMissingModule _ = Nothing -- | Get the dependencies of a Haskell module file. getDependencies :: Map FilePath (Path Abs File) -- ^ Known file usages, where the file path has already been resolved. -> NamedComponent -> [Path Abs Dir] -> DotCabalPath -> RIO GetPackageFileContext (Set ModuleName, Map FilePath (Path Abs File)) getDependencies knownUsages component dirs dotCabalPath = case dotCabalPath of DotCabalModulePath resolvedFile -> readResolvedHi resolvedFile DotCabalMainPath resolvedFile -> readResolvedHi resolvedFile DotCabalFilePath{} -> pure (S.empty, M.empty) DotCabalCFilePath{} -> pure (S.empty, M.empty) where readResolvedHi resolvedFile = do dumpHIDir <- componentOutputDir component <$> asks (.distDir) dir <- asks (parent . (.file)) let sourceDir = fromMaybe dir $ find (`isProperPrefixOf` resolvedFile) dirs stripSourceDir d = stripProperPrefix d resolvedFile case stripSourceDir sourceDir of Nothing -> pure (S.empty, M.empty) Just fileRel -> do let hiPath = FilePath.replaceExtension (toFilePath (dumpHIDir fileRel)) ".hi" dumpHIExists <- liftIO $ D.doesFileExist hiPath if dumpHIExists then parseHI knownUsages hiPath else pure (S.empty, M.empty) -- | Parse a .hi file into a set of modules and files (a map from a given path -- to a file to the resolved absolute path to the file). parseHI :: Map FilePath (Path Abs File) -- ^ Known file usages, where the file path has already been resolved. -> FilePath -- ^ The path to the *.hi file to be parsed -> RIO GetPackageFileContext (Set ModuleName, Map FilePath (Path Abs File)) parseHI knownUsages hiPath = do dir <- asks (parent . (.file)) result <- liftIO $ catchAnyDeep (Iface.fromFile hiPath) (pure . Left . displayException) case result of Left msg -> do prettyStackDevL [ flow "Failed to decode module interface:" , style File $ fromString hiPath , flow "Decoding failure:" , style Error $ fromString msg ] pure (S.empty, M.empty) Right iface -> do let moduleNames = fmap (fromString . T.unpack . decodeUtf8Lenient . fst) . Iface.unList . Iface.dmods . Iface.deps resolveFileDependency file = case M.lookup file knownUsages of Just p -> pure $ Just (file, p) Nothing -> do resolved <- forgivingResolveFile dir file >>= rejectMissingFile when (isNothing resolved) $ prettyWarnL [ flow "Dependent file listed in:" , style File $ fromString hiPath , flow "does not exist:" , style File $ fromString file ] pure $ (file,) <$> resolved resolveUsages = traverse (resolveFileDependency . Iface.unUsage) . Iface.unList . Iface.usage resolvedUsages <- catMaybes <$> resolveUsages iface pure (S.fromList $ moduleNames iface, M.fromList resolvedUsages) -- | The directory where generated files are put like .o or .hs (from .x files). componentOutputDir :: NamedComponent -> Path Abs Dir -> Path Abs Dir componentOutputDir namedComponent distDir = case namedComponent of CLib -> buildDir distDir CSubLib name -> makeTmp name CFlib name -> makeTmp name CExe name -> makeTmp name CTest name -> makeTmp name CBench name -> makeTmp name where makeTmp name = buildDir distDir componentNameToDir (name <> "/" <> name <> "-tmp") -- | Try to resolve the list of base names in the given directory by -- looking for unique instances of base names applied with the given -- extensions. resolveFiles :: [Path Abs Dir] -- ^ Directories to look in. -> [DotCabalDescriptor] -- ^ Base names. -> RIO GetPackageFileContext [(DotCabalDescriptor, Maybe DotCabalPath)] resolveFiles dirs names = forM names (\name -> fmap (name, ) (findCandidate dirs name)) -- | Find a candidate for the given module-or-filename from the list -- of directories and given extensions. findCandidate :: [Path Abs Dir] -> DotCabalDescriptor -> RIO GetPackageFileContext (Maybe DotCabalPath) findCandidate dirs name = do pkg <- asks (.file) >>= parsePackageNameFromFilePath customPreprocessorExts <- view $ configL . to (.customPreprocessorExts) let haskellPreprocessorExts = haskellDefaultPreprocessorExts ++ customPreprocessorExts candidates <- liftIO $ makeNameCandidates haskellPreprocessorExts case candidates of [candidate] -> pure (Just (cons candidate)) [] -> do case name of DotCabalModule mn | display mn /= paths_pkg pkg -> logPossibilities dirs mn _ -> pure () pure Nothing (candidate:rest) -> do warnMultiple name candidate rest pure (Just (cons candidate)) where cons = case name of DotCabalModule{} -> DotCabalModulePath DotCabalMain{} -> DotCabalMainPath DotCabalFile{} -> DotCabalFilePath DotCabalCFile{} -> DotCabalCFilePath paths_pkg pkg = "Paths_" ++ packageNameString pkg makeNameCandidates haskellPreprocessorExts = fmap (nubOrd . concat) (mapM (makeDirCandidates haskellPreprocessorExts) dirs) makeDirCandidates :: [Text] -> Path Abs Dir -> IO [Path Abs File] makeDirCandidates haskellPreprocessorExts dir = case name of DotCabalMain fp -> resolveCandidate dir fp DotCabalFile fp -> resolveCandidate dir fp DotCabalCFile fp -> resolveCandidate dir fp DotCabalModule mn -> do let perExt ext = resolveCandidate dir (Cabal.toFilePath mn ++ "." ++ T.unpack ext) withHaskellExts <- mapM perExt haskellFileExts withPPExts <- mapM perExt haskellPreprocessorExts pure $ case (concat withHaskellExts, concat withPPExts) of -- If we have exactly 1 Haskell extension and exactly -- 1 preprocessor extension, assume the former file is -- generated from the latter -- -- See https://github.com/commercialhaskell/stack/issues/4076 ([_], [y]) -> [y] -- Otherwise, return everything (xs, ys) -> xs ++ ys resolveCandidate dir = fmap maybeToList . resolveDirFile dir -- | Log that we couldn't find a candidate, but there are -- possibilities for custom preprocessor extensions. -- -- For example: .erb for a Ruby file might exist in one of the -- directories. logPossibilities :: HasTerm env => [Path Abs Dir] -> ModuleName -> RIO env () logPossibilities dirs mn = do possibilities <- fmap concat (makePossibilities mn) unless (null possibilities) $ prettyWarnL [ flow "Unable to find a known candidate for the Cabal entry" , (style Module . fromString $ display mn) <> "," , flow "but did find:" , line <> bulletedList (map pretty possibilities) , flow "If you are using a custom preprocessor for this module" , flow "with its own file extension, consider adding the extension" , flow "to the 'custom-preprocessor-extensions' field in stack.yaml." ] where makePossibilities name = mapM ( \dir -> do (_,files) <- listDir dir pure ( map filename ( filter (isPrefixOf (display name) . toFilePath . filename) files ) ) ) dirs type CAndJsSources rec = (HasField "cSources" rec [FilePath], HasField "jsSources" rec [FilePath]) -- | Get all C sources and extra source files in a build. buildOtherSources :: CAndJsSources rec => rec -> RIO GetPackageFileContext [DotCabalPath] buildOtherSources build = do cwd <- liftIO getCurrentDir dir <- asks (parent . (.file)) file <- asks (.file) let resolveDirFiles files toCabalPath = forMaybeM files $ \fp -> do result <- resolveDirFile dir fp case result of Nothing -> do warnMissingFile "File" cwd fp file pure Nothing Just p -> pure $ Just (toCabalPath p) csources <- resolveDirFiles build.cSources DotCabalCFilePath jsources <- resolveDirFiles build.jsSources DotCabalFilePath pure (csources <> jsources) -- | Resolve file as a child of a specified directory, symlinks -- don't get followed. resolveDirFile :: (MonadIO m, MonadThrow m) => Path Abs Dir -> FilePath.FilePath -> m (Maybe (Path Abs File)) resolveDirFile x y = do -- The standard canonicalizePath does not work for this case p <- parseCollapsedAbsFile (toFilePath x FilePath. y) exists <- doesFileExist p pure $ if exists then Just p else Nothing -- | Warn the user that multiple candidates are available for an -- entry, but that we picked one anyway and continued. warnMultiple :: DotCabalDescriptor -> Path b t -> [Path b t] -> RIO GetPackageFileContext () warnMultiple name candidate rest = -- TODO: figure out how to style 'name' and the dispOne stuff prettyWarnL [ flow "There were multiple candidates for the Cabal entry" , fromString . showName $ name , line <> bulletedList (map dispOne (candidate:rest)) , line <> flow "picking:" , dispOne candidate ] where showName (DotCabalModule name') = display name' showName (DotCabalMain fp) = fp showName (DotCabalFile fp) = fp showName (DotCabalCFile fp) = fp dispOne = fromString . toFilePath -- TODO: figure out why dispOne can't be just `display` -- (remove the .hlint.yaml exception if it can be) -- | Parse a package name from a file path. parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName parsePackageNameFromFilePath fp = do base <- clean $ toFilePath $ filename fp case parsePackageName base of Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp Just x -> pure x where clean = fmap reverse . strip . reverse strip ('l':'a':'b':'a':'c':'.':xs) = pure xs strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) -- | Resolve the directory, if it can't be resolved, warn for the user -- (purely to be helpful). resolveDirOrWarn :: FilePath.FilePath -> RIO GetPackageFileContext (Maybe (Path Abs Dir)) resolveDirOrWarn = resolveOrWarn "Directory" f where f p x = forgivingResolveDir p x >>= rejectMissingDir -- | Make the global autogen dir if Cabal version is new enough. packageAutogenDir :: Version -> Path Abs Dir -> Maybe (Path Abs Dir) packageAutogenDir cabalVer distDir | cabalVer < mkVersion [2, 0] = Nothing | otherwise = Just $ buildDir distDir relDirGlobalAutogen -- | Make the autogen dir. componentAutogenDir :: Version -> NamedComponent -> Path Abs Dir -> Path Abs Dir componentAutogenDir cabalVer component distDir = componentBuildDir cabalVer component distDir relDirAutogen -- | Make the build dir. Note that Cabal >= 2.0 uses the -- 'componentBuildDir' above for some things. buildDir :: Path Abs Dir -> Path Abs Dir buildDir distDir = distDir relDirBuild -- NOTE: don't export this, only use it for valid paths based on -- component names. componentNameToDir :: Text -> Path Rel Dir componentNameToDir name = fromMaybe (throw $ ComponentNotParsedBug sName) (parseRelDir sName) where sName = T.unpack name -- | See 'Distribution.Simple.LocalBuildInfo.componentBuildDir' componentBuildDir :: Version -> NamedComponent -> Path Abs Dir -> Path Abs Dir componentBuildDir cabalVer component distDir | cabalVer < mkVersion [2, 0] = buildDir distDir | otherwise = case component of CLib -> buildDir distDir CSubLib name -> buildDir distDir componentNameToDir name CFlib name -> buildDir distDir componentNameToDir name CExe name -> buildDir distDir componentNameToDir name CTest name -> buildDir distDir componentNameToDir name CBench name -> buildDir distDir componentNameToDir name -- Internal helper to define resolveFileOrWarn and resolveDirOrWarn resolveOrWarn :: Text -> (Path Abs Dir -> String -> RIO GetPackageFileContext (Maybe a)) -> FilePath.FilePath -> RIO GetPackageFileContext (Maybe a) resolveOrWarn subject resolver path = do cwd <- liftIO getCurrentDir file <- asks (.file) dir <- asks (parent . (.file)) result <- resolver dir path when (isNothing result) $ warnMissingFile subject cwd path file pure result warnMissingFile :: Text -> Path Abs Dir -> FilePath -> Path Abs File -> RIO GetPackageFileContext () warnMissingFile subject cwd path fromFile = prettyWarnL [ fromString . T.unpack $ subject -- TODO: needs style? , flow "listed in" , maybe (pretty fromFile) pretty (stripProperPrefix cwd fromFile) , flow "file does not exist:" , style Dir . fromString $ path ] stack-2.15.7/src/Stack/Config.hs0000644000000000000000000014325114620153445014510 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- | The general Stack configuration that starts everything off. This should -- be smart to fallback if there is no stack.yaml, instead relying on -- whatever files are available. -- -- If there is no stack.yaml, and there is a cabal.config, we -- read in those constraints, and if there's a cabal.sandbox.config, -- we read any constraints from there and also find the package -- database from there, etc. And if there's nothing, we should -- probably default to behaving like cabal, possibly with spitting out -- a warning that "you should run `stk init` to make things better". module Stack.Config ( loadConfig , loadConfigYaml , packagesParser , getImplicitGlobalProjectDir , getSnapshots , makeConcreteResolver , checkOwnership , getInContainer , getInNixShell , defaultConfigYaml , getProjectConfig , withBuildConfig , withNewLogFunc , determineStackRootAndOwnership ) where import Control.Monad.Extra ( firstJustM ) import Data.Aeson.Types ( Value ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), logJSONWarnings ) import Data.Array.IArray ( (!), (//) ) import qualified Data.ByteString as S import Data.ByteString.Builder ( byteString ) import Data.Coerce ( coerce ) import qualified Data.IntMap as IntMap import qualified Data.Map as Map import qualified Data.Map.Merge.Strict as MS import qualified Data.Monoid import Data.Monoid.Map ( MonoidMap (..) ) import qualified Data.Text as T import qualified Data.Yaml as Yaml import Distribution.System ( Arch (..), OS (..), Platform (..), buildPlatform ) import qualified Distribution.Text ( simpleParse ) import Distribution.Version ( simplifyVersionRange ) import GHC.Conc ( getNumProcessors ) import Network.HTTP.StackClient ( httpJSON, parseUrlThrow, getResponseBody ) import Options.Applicative ( Parser, help, long, metavar, strOption ) import Path ( PathException (..), (), parent, parseAbsDir , parseAbsFile, parseRelDir, stripProperPrefix ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.Find ( findInParents ) import Path.IO ( XdgDirectory (..), canonicalizePath, doesDirExist , doesFileExist, ensureDir, forgivingAbsence , getAppUserDataDir, getCurrentDir, getXdgDir, resolveDir , resolveDir', resolveFile' ) import RIO.List ( unzip ) import RIO.Process ( HasProcessContext (..), ProcessContext, augmentPathMap , envVarsL , mkProcessContext ) import RIO.Time ( toGregorian ) import Stack.Build.Haddock ( shouldHaddockDeps ) import Stack.Config.Build ( buildOptsFromMonoid ) import Stack.Config.Docker ( dockerOptsFromMonoid ) import Stack.Config.Nix ( nixOptsFromMonoid ) import Stack.Constants ( defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated , defaultUserConfigPath, defaultUserConfigPathDeprecated , implicitGlobalProjectDir , implicitGlobalProjectDirDeprecated, inContainerEnvVar , inNixShellEnvVar, osIsWindows, pantryRootEnvVar , platformVariantEnvVar, relDirBin, relDirStackWork , relFileReadmeTxt, relFileStorage, relDirPantry , relDirPrograms, relDirStackProgName, relDirUpperPrograms , stackDeveloperModeDefault, stackDotYaml, stackProgName , stackRootEnvVar, stackWorkEnvVar, stackXdgEnvVar ) import qualified Stack.Constants as Constants import Stack.Lock ( lockCachedWanted ) import Stack.Prelude import Stack.SourceMap ( additionalDepPackage, checkFlagsUsedThrowing , mkProjectPackage ) import Stack.Storage.Project ( initProjectStorage ) import Stack.Storage.User ( initUserStorage ) import Stack.Storage.Util ( handleMigrationException ) import Stack.Types.AllowNewerDeps ( AllowNewerDeps (..) ) import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) import Stack.Types.Build.Exception ( BuildException (..) ) import Stack.Types.BuildConfig ( BuildConfig (..) ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.ColorWhen ( ColorWhen (..) ) import Stack.Types.Compiler ( defaultCompilerRepository ) import Stack.Types.Config ( Config (..), HasConfig (..), askLatestSnapshotUrl , configProjectRoot, stackRootL, workDirL ) import Stack.Types.Config.Exception ( ConfigException (..), ConfigPrettyException (..) , ParseAbsolutePathException (..), packageIndicesWarning ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..), parseConfigMonoid ) import Stack.Types.Casa ( CasaOptsMonoid (..) ) import Stack.Types.Docker ( DockerOpts (..), DockerOptsMonoid (..) ) import Stack.Types.DumpLogs ( DumpLogs (..) ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Nix ( NixOpts (..) ) import Stack.Types.Platform ( PlatformVariant (..), platformOnlyRelDir ) import Stack.Types.Project ( Project (..) ) import qualified Stack.Types.Project as Project ( Project (..) ) import Stack.Types.ProjectAndConfigMonoid ( ProjectAndConfigMonoid (..), parseProjectAndConfigMonoid ) import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.PvpBounds ( PvpBounds (..), PvpBoundsType (..) ) import Stack.Types.Resolver ( AbstractResolver (..), Snapshots (..) ) import Stack.Types.Runner ( HasRunner (..), Runner (..), globalOptsL, terminalL ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), ProjectPackage (..) , SMWanted (..) ) import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) import Stack.Types.UnusedFlags ( FlagSource (..) ) import Stack.Types.Version ( IntersectingVersionRange (..), VersionCheck (..) , stackVersion, withinRange ) import System.Console.ANSI ( hNowSupportsANSI, setSGRCode ) import System.Environment ( getEnvironment, lookupEnv ) import System.Info.ShortPathName ( getShortPathName ) import System.PosixCompat.Files ( fileOwner, getFileStatus ) import System.Posix.User ( getEffectiveUserID ) -- | If deprecated path exists, use it and print a warning. Otherwise, return -- the new path. tryDeprecatedPath :: HasTerm env => Maybe T.Text -- ^ Description of file for warning (if Nothing, no deprecation warning is -- displayed) -> (Path Abs a -> RIO env Bool) -- ^ Test for existence -> Path Abs a -- ^ New path -> Path Abs a -- ^ Deprecated path -> RIO env (Path Abs a, Bool) -- ^ (Path to use, whether it already exists) tryDeprecatedPath mWarningDesc exists new old = do newExists <- exists new if newExists then pure (new, True) else do oldExists <- exists old if oldExists then do case mWarningDesc of Nothing -> pure () Just desc -> prettyWarnL [ flow "Location of" , flow (T.unpack desc) , "at" , style Dir (fromString $ toFilePath old) , flow "is deprecated; rename it to" , style Dir (fromString $ toFilePath new) , "instead." ] pure (old, True) else pure (new, False) -- | Get the location of the implicit global project directory. If the directory -- already exists at the deprecated location, its location is returned. -- Otherwise, the new location is returned. getImplicitGlobalProjectDir ::HasTerm env => Config -> RIO env (Path Abs Dir) getImplicitGlobalProjectDir config = --TEST no warning printed fst <$> tryDeprecatedPath Nothing doesDirExist (implicitGlobalProjectDir stackRoot) (implicitGlobalProjectDirDeprecated stackRoot) where stackRoot = view stackRootL config -- | Download the 'Snapshots' value from stackage.org. getSnapshots :: HasConfig env => RIO env Snapshots getSnapshots = do latestUrlText <- askLatestSnapshotUrl latestUrl <- parseUrlThrow (T.unpack latestUrlText) logDebug $ "Downloading snapshot versions file from " <> display latestUrlText result <- httpJSON latestUrl logDebug "Done downloading and parsing snapshot versions file" pure $ getResponseBody result -- | Turn an 'AbstractResolver' into a 'Resolver'. makeConcreteResolver :: HasConfig env => AbstractResolver -> RIO env RawSnapshotLocation makeConcreteResolver (ARResolver r) = pure r makeConcreteResolver ar = do r <- case ar of ARGlobal -> do config <- view configL implicitGlobalDir <- getImplicitGlobalProjectDir config let fp = implicitGlobalDir stackDotYaml iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp ProjectAndConfigMonoid project _ <- liftIO iopc pure project.resolver ARLatestNightly -> RSLSynonym . Nightly . (.nightly) <$> getSnapshots ARLatestLTSMajor x -> do snapshots <- getSnapshots case IntMap.lookup x snapshots.lts of Nothing -> throwIO $ NoLTSWithMajorVersion x Just y -> pure $ RSLSynonym $ LTS x y ARLatestLTS -> do snapshots <- getSnapshots if IntMap.null snapshots.lts then throwIO NoLTSFound else let (x, y) = IntMap.findMax snapshots.lts in pure $ RSLSynonym $ LTS x y prettyInfoL [ flow "Selected resolver:" , style Current (fromString $ T.unpack $ textDisplay r) <> "." ] pure r -- | Get the latest snapshot resolver available. getLatestResolver :: HasConfig env => RIO env RawSnapshotLocation getLatestResolver = do snapshots <- getSnapshots let mlts = uncurry LTS <$> listToMaybe (reverse (IntMap.toList snapshots.lts)) pure $ RSLSynonym $ fromMaybe (Nightly snapshots.nightly) mlts -- Interprets ConfigMonoid options. configFromConfigMonoid :: (HasRunner env, HasTerm env) => Path Abs Dir -- ^ Stack root, e.g. ~/.stack -> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml -> Maybe AbstractResolver -> ProjectConfig (Project, Path Abs File) -> ConfigMonoid -> (Config -> RIO env a) -> RIO env a configFromConfigMonoid stackRoot userConfigPath resolver project configMonoid inner = do -- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK -- is set, use that. If neither, use the default ".stack-work" mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar let mproject = case project of PCProject pair -> Just pair PCGlobalProject -> Nothing PCNoProject _deps -> Nothing allowLocals = case project of PCProject _ -> True PCGlobalProject -> True PCNoProject _ -> False configWorkDir0 <- let parseStackWorkEnv x = catch (parseRelDir x) ( \e -> case e of InvalidRelDir _ -> prettyThrowIO $ StackWorkEnvNotRelativeDir x _ -> throwIO e ) in maybe (pure relDirStackWork) (liftIO . parseStackWorkEnv) mstackWorkEnv let workDir = fromFirst configWorkDir0 configMonoid.workDir -- The history of the URL below is as follows: -- -- * Before Stack 1.3.0 it was -- https://www.stackage.org/download/snapshots.json. -- * From Stack 1.3.0 to 2.15.3 it was -- https://s3.amazonaws.com/haddock.stackage.org/snapshots.json. The -- change was made because S3 was expected to have greater uptime than -- stackage.org. -- * In early 2024, the Stackage project was handed over to the Haskell -- Foundation. Following that handover, the URL below was considered -- the most reliable source of the file in question. latestSnapshot = fromFirst "https://stackage-haddock.haskell.org/snapshots.json" configMonoid.latestSnapshot clConnectionCount = fromFirst 8 configMonoid.connectionCount hideTHLoading = fromFirstTrue configMonoid.hideTHLoading prefixTimestamps = fromFirst False configMonoid.prefixTimestamps ghcVariant = getFirst configMonoid.ghcVariant compilerRepository = fromFirst defaultCompilerRepository configMonoid.compilerRepository ghcBuild = getFirst configMonoid.ghcBuild installGHC = fromFirstTrue configMonoid.installGHC skipGHCCheck = fromFirstFalse configMonoid.skipGHCCheck skipMsys = fromFirstFalse configMonoid.skipMsys extraIncludeDirs = configMonoid.extraIncludeDirs extraLibDirs = configMonoid.extraLibDirs customPreprocessorExts = configMonoid.customPreprocessorExts overrideGccPath = getFirst configMonoid.overrideGccPath -- Only place in the codebase where platform is hard-coded. In theory in -- the future, allow it to be configured. (Platform defArch defOS) = buildPlatform arch = fromMaybe defArch $ getFirst configMonoid.arch >>= Distribution.Text.simpleParse os = defOS platform = Platform arch os requireStackVersion = simplifyVersionRange configMonoid.requireStackVersion.intersectingVersionRange compilerCheck = fromFirst MatchMinor configMonoid.compilerCheck platformVariant <- liftIO $ maybe PlatformVariantNone PlatformVariant <$> lookupEnv platformVariantEnvVar let build = buildOptsFromMonoid configMonoid.buildOpts docker <- dockerOptsFromMonoid (fmap fst mproject) resolver configMonoid.dockerOpts nix <- nixOptsFromMonoid configMonoid.nixOpts os systemGHC <- case (getFirst configMonoid.systemGHC, nix.enable) of (Just False, True) -> throwM NixRequiresSystemGhc _ -> pure (fromFirst (docker.enable || nix.enable) configMonoid.systemGHC) when (isJust ghcVariant && systemGHC) $ throwM ManualGHCVariantSettingsAreIncompatibleWithSystemGHC rawEnv <- liftIO getEnvironment pathsEnv <- either throwM pure $ augmentPathMap (map toFilePath configMonoid.extraPath) (Map.fromList (map (T.pack *** T.pack) rawEnv)) origEnv <- mkProcessContext pathsEnv let processContextSettings _ = pure origEnv localProgramsBase <- case getFirst configMonoid.localProgramsBase of Nothing -> getDefaultLocalProgramsBase stackRoot platform origEnv Just path -> pure path let localProgramsFilePath = toFilePath localProgramsBase when (osIsWindows && ' ' `elem` localProgramsFilePath) $ do ensureDir localProgramsBase -- getShortPathName returns the long path name when a short name does not -- exist. shortLocalProgramsFilePath <- liftIO $ getShortPathName localProgramsFilePath when (' ' `elem` shortLocalProgramsFilePath) $ prettyError $ "[S-8432]" <> line <> fillSep [ flow "Stack's 'programs' path contains a space character and \ \has no alternative short ('8 dot 3') name. This will \ \cause problems with packages that use the GNU project's \ \'configure' shell script. Use the" , style Shell "local-programs-path" , flow "configuration option to specify an alternative path. \ \The current path is:" , style File (fromString localProgramsFilePath) <> "." ] platformOnlyDir <- runReaderT platformOnlyRelDir (platform, platformVariant) let localPrograms = localProgramsBase platformOnlyDir localBin <- case getFirst configMonoid.localBinPath of Nothing -> do localDir <- getAppUserDataDir "local" pure $ localDir relDirBin Just userPath -> (case mproject of -- Not in a project Nothing -> resolveDir' userPath -- Resolves to the project dir and appends the user path if it is -- relative Just (_, configYaml) -> resolveDir (parent configYaml) userPath) -- TODO: Either catch specific exceptions or add a -- parseRelAsAbsDirMaybe utility and use it along with -- resolveDirMaybe. `catchAny` const (throwIO (NoSuchDirectory userPath)) jobs <- case getFirst configMonoid.jobs of Nothing -> liftIO getNumProcessors Just i -> pure i let concurrentTests = fromFirst True configMonoid.concurrentTests templateParams = configMonoid.templateParameters scmInit = getFirst configMonoid.scmInit cabalConfigOpts = coerce configMonoid.cabalConfigOpts ghcOptionsByName = coerce configMonoid.ghcOptionsByName ghcOptionsByCat = coerce configMonoid.ghcOptionsByCat setupInfoLocations = configMonoid.setupInfoLocations setupInfoInline = configMonoid.setupInfoInline pvpBounds = fromFirst (PvpBounds PvpBoundsNone False) configMonoid.pvpBounds modifyCodePage = fromFirstTrue configMonoid.modifyCodePage rebuildGhcOptions = fromFirstFalse configMonoid.rebuildGhcOptions applyGhcOptions = fromFirst AGOLocals configMonoid.applyGhcOptions applyProgOptions = fromFirst APOLocals configMonoid.applyProgOptions allowNewer = fromFirst False configMonoid.allowNewer allowNewerDeps = coerce configMonoid.allowNewerDeps defaultTemplate = getFirst configMonoid.defaultTemplate dumpLogs = fromFirst DumpWarningLogs configMonoid.dumpLogs saveHackageCreds = fromFirst True configMonoid.saveHackageCreds hackageBaseUrl = fromFirst Constants.hackageBaseUrl configMonoid.hackageBaseUrl hideSourcePaths = fromFirstTrue configMonoid.hideSourcePaths recommendUpgrade = fromFirstTrue configMonoid.recommendUpgrade notifyIfNixOnPath = fromFirstTrue configMonoid.notifyIfNixOnPath notifyIfGhcUntested = fromFirstTrue configMonoid.notifyIfGhcUntested notifyIfCabalUntested = fromFirstTrue configMonoid.notifyIfCabalUntested notifyIfArchUnknown = fromFirstTrue configMonoid.notifyIfArchUnknown noRunCompile = fromFirstFalse configMonoid.noRunCompile allowDifferentUser <- case getFirst configMonoid.allowDifferentUser of Just True -> pure True _ -> getInContainer configRunner' <- view runnerL useAnsi <- liftIO $ hNowSupportsANSI stderr let stylesUpdate' = (configRunner' ^. stylesUpdateL) <> configMonoid.styles useColor' = configRunner'.useColor mUseColor = do colorWhen <- getFirst configMonoid.colorWhen pure $ case colorWhen of ColorNever -> False ColorAlways -> True ColorAuto -> useAnsi useColor'' = fromMaybe useColor' mUseColor configRunner'' = configRunner' & processContextL .~ origEnv & stylesUpdateL .~ stylesUpdate' & useColorL .~ useColor'' go = configRunner'.globalOpts pic <- case getFirst configMonoid.packageIndex of Nothing -> case getFirst configMonoid.packageIndices of Nothing -> pure defaultPackageIndexConfig Just [pic] -> do prettyWarn packageIndicesWarning pure pic Just x -> prettyThrowIO $ MultiplePackageIndices x Just pic -> pure pic mpantryRoot <- liftIO $ lookupEnv pantryRootEnvVar pantryRoot <- case mpantryRoot of Just dir -> case parseAbsDir dir of Nothing -> throwIO $ ParseAbsolutePathException pantryRootEnvVar dir Just x -> pure x Nothing -> pure $ stackRoot relDirPantry let snapLoc = case getFirst configMonoid.snapshotLocation of Nothing -> defaultSnapshotLocation Just addr -> customSnapshotLocation where customSnapshotLocation (LTS x y) = mkRSLUrl $ addr' <> "/lts/" <> display x <> "/" <> display y <> ".yaml" customSnapshotLocation (Nightly date) = let (year, month, day) = toGregorian date in mkRSLUrl $ addr' <> "/nightly/" <> display year <> "/" <> display month <> "/" <> display day <> ".yaml" mkRSLUrl builder = RSLUrl (utf8BuilderToText builder) Nothing addr' = display $ T.dropWhileEnd (=='/') addr let stackDeveloperMode = fromFirst stackDeveloperModeDefault configMonoid.stackDeveloperMode casa = if fromFirstTrue configMonoid.casaOpts.enable then let casaRepoPrefix = fromFirst (fromFirst defaultCasaRepoPrefix configMonoid.casaRepoPrefix) configMonoid.casaOpts.repoPrefix casaMaxKeysPerRequest = fromFirst defaultCasaMaxPerRequest configMonoid.casaOpts.maxKeysPerRequest in Just (casaRepoPrefix, casaMaxKeysPerRequest) else Nothing withNewLogFunc go useColor'' stylesUpdate' $ \logFunc -> do let runner = configRunner'' & logFuncL .~ logFunc withLocalLogFunc logFunc $ handleMigrationException $ do logDebug $ case casa of Nothing -> "Use of Casa server disabled." Just (repoPrefix, maxKeys) -> "Use of Casa server enabled: (" <> fromString (show repoPrefix) <> ", " <> fromString (show maxKeys) <> ")." withPantryConfig' pantryRoot pic (maybe HpackBundled HpackCommand $ getFirst configMonoid.overrideHpack) clConnectionCount casa snapLoc (\pantryConfig -> initUserStorage (stackRoot relFileStorage) ( \userStorage -> inner Config { workDir , userConfigPath , build , docker , nix , processContextSettings , localProgramsBase , localPrograms , hideTHLoading , prefixTimestamps , platform , platformVariant , ghcVariant , ghcBuild , latestSnapshot , systemGHC , installGHC , skipGHCCheck , skipMsys , compilerCheck , compilerRepository , localBin , requireStackVersion , jobs , overrideGccPath , extraIncludeDirs , extraLibDirs , customPreprocessorExts , concurrentTests , templateParams , scmInit , ghcOptionsByName , ghcOptionsByCat , cabalConfigOpts , setupInfoLocations , setupInfoInline , pvpBounds , modifyCodePage , rebuildGhcOptions , applyGhcOptions , applyProgOptions , allowNewer , allowNewerDeps , defaultTemplate , allowDifferentUser , dumpLogs , project , allowLocals , saveHackageCreds , hackageBaseUrl , runner , pantryConfig , stackRoot , resolver , userStorage , hideSourcePaths , recommendUpgrade , notifyIfNixOnPath , notifyIfGhcUntested , notifyIfCabalUntested , notifyIfArchUnknown , noRunCompile , stackDeveloperMode , casa } ) ) -- | Runs the provided action with the given 'LogFunc' in the environment withLocalLogFunc :: HasLogFunc env => LogFunc -> RIO env a -> RIO env a withLocalLogFunc logFunc = local (set logFuncL logFunc) -- | Runs the provided action with a new 'LogFunc', given a 'StylesUpdate'. withNewLogFunc :: MonadUnliftIO m => GlobalOpts -> Bool -- ^ Use color -> StylesUpdate -> (LogFunc -> m a) -> m a withNewLogFunc go useColor (StylesUpdate update) inner = do logOptions0 <- logOptionsHandle stderr False let logOptions = setLogUseColor useColor $ setLogLevelColors logLevelColors $ setLogSecondaryColor secondaryColor $ setLogAccentColors (const highlightColor) $ setLogUseTime go.timeInLog $ setLogMinLevel go.logLevel $ setLogVerboseFormat (go.logLevel <= LevelDebug) $ setLogTerminal go.terminal logOptions0 withLogFunc logOptions inner where styles = defaultStyles // update logLevelColors :: LogLevel -> Utf8Builder logLevelColors level = fromString $ setSGRCode $ snd $ styles ! logLevelToStyle level secondaryColor = fromString $ setSGRCode $ snd $ styles ! Secondary highlightColor = fromString $ setSGRCode $ snd $ styles ! Highlight -- | Get the default location of the local programs directory. getDefaultLocalProgramsBase :: MonadThrow m => Path Abs Dir -> Platform -> ProcessContext -> m (Path Abs Dir) getDefaultLocalProgramsBase configStackRoot configPlatform override = case configPlatform of -- For historical reasons, on Windows a subdirectory of LOCALAPPDATA is -- used instead of a subdirectory of STACK_ROOT. Unifying the defaults would -- mean that Windows users would manually have to move data from the old -- location to the new one, which is undesirable. Platform _ Windows -> do let envVars = view envVarsL override case T.unpack <$> Map.lookup "LOCALAPPDATA" envVars of Just t -> case parseAbsDir t of Nothing -> throwM $ ParseAbsolutePathException "LOCALAPPDATA" t Just lad -> pure $ lad relDirUpperPrograms relDirStackProgName Nothing -> pure defaultBase _ -> pure defaultBase where defaultBase = configStackRoot relDirPrograms -- | Load the configuration, using current directory, environment variables, -- and defaults as necessary. loadConfig :: (HasRunner env, HasTerm env) => (Config -> RIO env a) -> RIO env a loadConfig inner = do mstackYaml <- view $ globalOptsL . to (.stackYaml) mproject <- loadProjectConfig mstackYaml mresolver <- view $ globalOptsL . to (.resolver) configArgs <- view $ globalOptsL . to (.configMonoid) (configRoot, stackRoot, userOwnsStackRoot) <- determineStackRootAndOwnership configArgs let (mproject', addConfigMonoid) = case mproject of PCProject (proj, fp, cm) -> (PCProject (proj, fp), (cm:)) PCGlobalProject -> (PCGlobalProject, id) PCNoProject deps -> (PCNoProject deps, id) userConfigPath <- getDefaultUserConfigPath configRoot extraConfigs0 <- getExtraConfigs userConfigPath >>= mapM (\file -> loadConfigYaml (parseConfigMonoid (parent file)) file) let extraConfigs = -- non-project config files' existence of a docker section should never -- default docker to enabled, so make it look like they didn't exist map (\c -> c {dockerOpts = c.dockerOpts { defaultEnable = Any False }}) extraConfigs0 let withConfig = configFromConfigMonoid stackRoot userConfigPath mresolver mproject' (mconcat $ configArgs : addConfigMonoid extraConfigs) withConfig $ \config -> do let Platform arch _ = config.platform case arch of OtherArch unknownArch | config.notifyIfArchUnknown -> prettyWarnL [ flow "Unknown value for architecture setting:" , style Shell (fromString unknownArch) <> "." , flow "To mute this message in future, set" , style Shell (flow "notify-if-arch-unknown: false") , flow "in Stack's configuration." ] _ -> pure () unless (stackVersion `withinRange` config.requireStackVersion) (throwM (BadStackVersionException config.requireStackVersion)) unless config.allowDifferentUser $ do unless userOwnsStackRoot $ throwM (UserDoesn'tOwnDirectory stackRoot) forM_ (configProjectRoot config) $ \dir -> checkOwnership (dir config.workDir) inner config -- | Load the build configuration, adds build-specific values to config loaded -- by @loadConfig@. values. withBuildConfig :: RIO BuildConfig a -> RIO Config a withBuildConfig inner = do config <- ask -- If provided, turn the AbstractResolver from the command line into a -- Resolver that can be used below. -- The configResolver and mcompiler are provided on the command line. In order -- to properly deal with an AbstractResolver, we need a base directory (to -- deal with custom snapshot relative paths). We consider the current working -- directory to be the correct base. Let's calculate the mresolver first. mresolver <- forM config.resolver $ \aresolver -> do logDebug ("Using resolver: " <> display aresolver <> " specified on command line") makeConcreteResolver aresolver (project', stackYaml) <- case config.project of PCProject (project, fp) -> do forM_ project.userMsg prettyWarnS pure (project, fp) PCNoProject extraDeps -> do p <- case mresolver of Nothing -> throwIO NoResolverWhenUsingNoProject Just _ -> getEmptyProject mresolver extraDeps pure (p, config.userConfigPath) PCGlobalProject -> do logDebug "Run from outside a project, using implicit global project config" destDir <- getImplicitGlobalProjectDir config let dest :: Path Abs File dest = destDir stackDotYaml dest' :: FilePath dest' = toFilePath dest ensureDir destDir exists <- doesFileExist dest if exists then do iopc <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest ProjectAndConfigMonoid project _ <- liftIO iopc when (view terminalL config) $ case config.resolver of Nothing -> logDebug $ "Using resolver: " <> display project.resolver <> " from implicit global project's config file: " <> fromString dest' Just _ -> pure () pure (project, dest) else do prettyInfoL [ flow "Writing the configuration file for the implicit \ \global project to:" , pretty dest <> "." , flow "Note: You can change the snapshot via the" , style Shell "resolver" , flow "field there." ] p <- getEmptyProject mresolver [] liftIO $ do writeBinaryFileAtomic dest $ byteString $ S.concat [ "# This is the implicit global project's config file, which is only used when\n" , "# 'stack' is run outside of a real project. Settings here do _not_ act as\n" , "# defaults for all projects. To change Stack's default settings, edit\n" , "# '", encodeUtf8 (T.pack $ toFilePath config.userConfigPath), "' instead.\n" , "#\n" , "# For more information about Stack's configuration, see\n" , "# http://docs.haskellstack.org/en/stable/yaml_configuration/\n" , "#\n" , Yaml.encode p] writeBinaryFileAtomic (parent dest relFileReadmeTxt) $ "This is the implicit global project, which is " <> "used only when 'stack' is run\noutside of a " <> "real project.\n" pure (p, dest) mcompiler <- view $ globalOptsL . to (.compiler) let project :: Project project = project' { Project.compiler = mcompiler <|> project'.compiler , Project.resolver = fromMaybe project'.resolver mresolver } extraPackageDBs <- mapM resolveDir' project.extraPackageDBs smWanted <- lockCachedWanted stackYaml project.resolver $ fillProjectWanted stackYaml config project -- Unfortunately redoes getProjectWorkDir, since we don't have a BuildConfig -- yet workDir <- view workDirL let projectStorageFile = parent stackYaml workDir relFileStorage initProjectStorage projectStorageFile $ \projectStorage -> do let bc = BuildConfig { config , smWanted , extraPackageDBs , stackYaml , curator = project.curator , projectStorage } runRIO bc inner where getEmptyProject :: Maybe RawSnapshotLocation -> [PackageIdentifierRevision] -> RIO Config Project getEmptyProject mresolver extraDeps = do r <- case mresolver of Just resolver -> do prettyInfoL [ flow "Using the snapshot" , style Current (fromString $ T.unpack $ textDisplay resolver) , flow "specified on the command line." ] pure resolver Nothing -> do r'' <- getLatestResolver prettyInfoL [ flow "Using the latest snapshot" , style Current (fromString $ T.unpack $ textDisplay r'') <> "." ] pure r'' pure Project { userMsg = Nothing , packages = [] , extraDeps = map (RPLImmutable . flip RPLIHackage Nothing) extraDeps , flagsByPkg = mempty , resolver = r , compiler = Nothing , extraPackageDBs = [] , curator = Nothing , dropPackages = mempty } fillProjectWanted :: (HasLogFunc env, HasPantryConfig env, HasProcessContext env) => Path Abs t -> Config -> Project -> Map RawPackageLocationImmutable PackageLocationImmutable -> WantedCompiler -> Map PackageName (Bool -> RIO env DepPackage) -> RIO env (SMWanted, [CompletedPLI]) fillProjectWanted stackYamlFP config project locCache snapCompiler snapPackages = do let bopts = config.build packages0 <- for project.packages $ \fp@(RelFilePath t) -> do abs' <- resolveDir (parent stackYamlFP) (T.unpack t) let resolved = ResolvedPath fp abs' pp <- mkProjectPackage YesPrintWarnings resolved bopts.buildHaddocks pure (pp.projectCommon.name, pp) -- prefetch git repos to avoid cloning per subdirectory -- see https://github.com/commercialhaskell/stack/issues/5411 let gitRepos = mapMaybe ( \case (RPLImmutable (RPLIRepo repo rpm)) -> Just (repo, rpm) _ -> Nothing ) project.extraDeps logDebug ("Prefetching git repos: " <> display (T.pack (show gitRepos))) fetchReposRaw gitRepos (deps0, mcompleted) <- fmap unzip . forM project.extraDeps $ \rpl -> do (pl, mCompleted) <- case rpl of RPLImmutable rpli -> do (compl, mcompl) <- case Map.lookup rpli locCache of Just compl -> pure (compl, Just compl) Nothing -> do cpl <- completePackageLocation rpli if cplHasCabalFile cpl then pure (cplComplete cpl, Just $ cplComplete cpl) else do warnMissingCabalFile rpli pure (cplComplete cpl, Nothing) pure (PLImmutable compl, CompletedPLI rpli <$> mcompl) RPLMutable p -> pure (PLMutable p, Nothing) dp <- additionalDepPackage (shouldHaddockDeps bopts) pl pure ((dp.depCommon.name, dp), mCompleted) checkDuplicateNames $ map (second (PLMutable . (.resolvedDir))) packages0 ++ map (second (.location)) deps0 let packages1 = Map.fromList packages0 snPackages = snapPackages `Map.difference` packages1 `Map.difference` Map.fromList deps0 `Map.withoutKeys` project.dropPackages snDeps <- for snPackages $ \getDep -> getDep (shouldHaddockDeps bopts) let deps1 = Map.fromList deps0 `Map.union` snDeps let mergeApply m1 m2 f = MS.merge MS.preserveMissing MS.dropMissing (MS.zipWithMatched f) m1 m2 pFlags = project.flagsByPkg packages2 = mergeApply packages1 pFlags $ \_ p flags -> p { projectCommon = p.projectCommon { flags = flags } } deps2 = mergeApply deps1 pFlags $ \_ d flags -> d { depCommon = d.depCommon { flags = flags } } checkFlagsUsedThrowing pFlags FSStackYaml packages1 deps1 let pkgGhcOptions = config.ghcOptionsByName deps = mergeApply deps2 pkgGhcOptions $ \_ d options -> d { depCommon = d.depCommon { ghcOptions = options } } packages = mergeApply packages2 pkgGhcOptions $ \_ p options -> p { projectCommon = p.projectCommon { ghcOptions = options } } unusedPkgGhcOptions = pkgGhcOptions `Map.restrictKeys` Map.keysSet packages2 `Map.restrictKeys` Map.keysSet deps2 unless (Map.null unusedPkgGhcOptions) $ throwM $ InvalidGhcOptionsSpecification (Map.keys unusedPkgGhcOptions) let wanted = SMWanted { compiler = fromMaybe snapCompiler project.compiler , project = packages , deps = deps , snapshotLocation = project.resolver } pure (wanted, catMaybes mcompleted) -- | Check if there are any duplicate package names and, if so, throw an -- exception. checkDuplicateNames :: MonadThrow m => [(PackageName, PackageLocation)] -> m () checkDuplicateNames locals = case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map (second pure) locals of [] -> pure () x -> prettyThrowM $ DuplicateLocalPackageNames x where hasMultiples (_, _:_:_) = True hasMultiples _ = False -- | Get the Stack root, e.g. @~/.stack@, and determine whether the user owns it. -- -- On Windows, the second value is always 'True'. determineStackRootAndOwnership :: MonadIO m => ConfigMonoid -- ^ Parsed command-line arguments -> m (Path Abs Dir, Path Abs Dir, Bool) determineStackRootAndOwnership clArgs = liftIO $ do (configRoot, stackRoot) <- do case getFirst clArgs.stackRoot of Just x -> pure (x, x) Nothing -> do mstackRoot <- lookupEnv stackRootEnvVar case mstackRoot of Nothing -> do wantXdg <- fromMaybe "" <$> lookupEnv stackXdgEnvVar if not (null wantXdg) then do xdgRelDir <- parseRelDir stackProgName (,) <$> getXdgDir XdgConfig (Just xdgRelDir) <*> getXdgDir XdgData (Just xdgRelDir) else do oldStyleRoot <- getAppUserDataDir stackProgName pure (oldStyleRoot, oldStyleRoot) Just x -> case parseAbsDir x of Nothing -> throwIO $ ParseAbsolutePathException stackRootEnvVar x Just parsed -> pure (parsed, parsed) (existingStackRootOrParentDir, userOwnsIt) <- do mdirAndOwnership <- findInParents getDirAndOwnership stackRoot case mdirAndOwnership of Just x -> pure x Nothing -> throwIO (BadStackRoot stackRoot) when (existingStackRootOrParentDir /= stackRoot) $ if userOwnsIt then ensureDir stackRoot else throwIO $ Won'tCreateStackRootInDirectoryOwnedByDifferentUser stackRoot existingStackRootOrParentDir configRoot' <- canonicalizePath configRoot stackRoot' <- canonicalizePath stackRoot pure (configRoot', stackRoot', userOwnsIt) -- | @'checkOwnership' dir@ throws 'UserDoesn'tOwnDirectory' if @dir@ isn't -- owned by the current user. -- -- If @dir@ doesn't exist, its parent directory is checked instead. -- If the parent directory doesn't exist either, -- @'NoSuchDirectory' ('parent' dir)@ is thrown. checkOwnership :: MonadIO m => Path Abs Dir -> m () checkOwnership dir = do mdirAndOwnership <- firstJustM getDirAndOwnership [dir, parent dir] case mdirAndOwnership of Just (_, True) -> pure () Just (dir', False) -> throwIO (UserDoesn'tOwnDirectory dir') Nothing -> throwIO . NoSuchDirectory $ (toFilePathNoTrailingSep . parent) dir -- | @'getDirAndOwnership' dir@ returns @'Just' (dir, 'True')@ when @dir@ -- exists and the current user owns it in the sense of 'isOwnedByUser'. getDirAndOwnership :: MonadIO m => Path Abs Dir -> m (Maybe (Path Abs Dir, Bool)) getDirAndOwnership dir = liftIO $ forgivingAbsence $ do ownership <- isOwnedByUser dir pure (dir, ownership) -- | Check whether the current user (determined with 'getEffectiveUserId') is -- the owner for the given path. -- -- Will always pure 'True' on Windows. isOwnedByUser :: MonadIO m => Path Abs t -> m Bool isOwnedByUser path = liftIO $ if osIsWindows then pure True else do fileStatus <- getFileStatus (toFilePath path) user <- getEffectiveUserID pure (user == fileOwner fileStatus) -- | 'True' if we are currently running inside a Docker container. getInContainer :: MonadIO m => m Bool getInContainer = liftIO (isJust <$> lookupEnv inContainerEnvVar) -- | 'True' if we are currently running inside a Nix. getInNixShell :: MonadIO m => m Bool getInNixShell = liftIO (isJust <$> lookupEnv inNixShellEnvVar) -- | Determine the extra config file locations which exist. -- -- Returns most local first getExtraConfigs :: HasTerm env => Path Abs File -- ^ use config path -> RIO env [Path Abs File] getExtraConfigs userConfigPath = do defaultStackGlobalConfigPath <- getDefaultGlobalConfigPath liftIO $ do env <- getEnvironment mstackConfig <- maybe (pure Nothing) (fmap Just . parseAbsFile) $ lookup "STACK_CONFIG" env mstackGlobalConfig <- maybe (pure Nothing) (fmap Just . parseAbsFile) $ lookup "STACK_GLOBAL_CONFIG" env filterM doesFileExist $ fromMaybe userConfigPath mstackConfig : maybe [] pure (mstackGlobalConfig <|> defaultStackGlobalConfigPath) -- | Load and parse YAML from the given config file. Throws -- 'ParseConfigFileException' when there's a decoding error. loadConfigYaml :: HasLogFunc env => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env a loadConfigYaml parser path = do eres <- loadYaml parser path case eres of Left err -> prettyThrowM (ParseConfigFileException path err) Right res -> pure res -- | Load and parse YAML from the given file. loadYaml :: HasLogFunc env => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env (Either Yaml.ParseException a) loadYaml parser path = do eres <- liftIO $ Yaml.decodeFileEither (toFilePath path) case eres of Left err -> pure (Left err) Right val -> case Yaml.parseEither parser val of Left err -> pure (Left (Yaml.AesonException err)) Right (WithJSONWarnings res warnings) -> do logJSONWarnings (toFilePath path) warnings pure (Right res) -- | Get the location of the project config file, if it exists. getProjectConfig :: HasTerm env => StackYamlLoc -- ^ Override stack.yaml -> RIO env (ProjectConfig (Path Abs File)) getProjectConfig (SYLOverride stackYaml) = pure $ PCProject stackYaml getProjectConfig SYLGlobalProject = pure PCGlobalProject getProjectConfig SYLDefault = do env <- liftIO getEnvironment case lookup "STACK_YAML" env of Just fp -> do prettyInfoS "Getting the project-level configuration file from the \ \STACK_YAML environment variable." PCProject <$> resolveFile' fp Nothing -> do currDir <- getCurrentDir maybe PCGlobalProject PCProject <$> findInParents getStackDotYaml currDir where getStackDotYaml dir = do let fp = dir stackDotYaml fp' = toFilePath fp logDebug $ "Checking for project config at: " <> fromString fp' exists <- doesFileExist fp if exists then pure $ Just fp else pure Nothing getProjectConfig (SYLNoProject extraDeps) = pure $ PCNoProject extraDeps -- | Find the project config file location, respecting environment variables -- and otherwise traversing parents. If no config is found, we supply a default -- based on current directory. loadProjectConfig :: HasTerm env => StackYamlLoc -- ^ Override stack.yaml -> RIO env (ProjectConfig (Project, Path Abs File, ConfigMonoid)) loadProjectConfig mstackYaml = do mfp <- getProjectConfig mstackYaml case mfp of PCProject fp -> do currDir <- getCurrentDir logDebug $ "Loading project config file " <> fromString (maybe (toFilePath fp) toFilePath (stripProperPrefix currDir fp)) PCProject <$> load fp PCGlobalProject -> do logDebug "No project config file found, using defaults." pure PCGlobalProject PCNoProject extraDeps -> do logDebug "Ignoring config files" pure $ PCNoProject extraDeps where load fp = do iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp ProjectAndConfigMonoid project config <- liftIO iopc pure (project, fp, config) -- | Get the location of the default Stack configuration file. If a file already -- exists at the deprecated location, its location is returned. Otherwise, the -- new location is returned. getDefaultGlobalConfigPath :: HasTerm env => RIO env (Maybe (Path Abs File)) getDefaultGlobalConfigPath = case (defaultGlobalConfigPath, defaultGlobalConfigPathDeprecated) of (Just new, Just old) -> Just . fst <$> tryDeprecatedPath (Just "non-project global configuration file") doesFileExist new old (Just new,Nothing) -> pure (Just new) _ -> pure Nothing -- | Get the location of the default user configuration file. If a file already -- exists at the deprecated location, its location is returned. Otherwise, the -- new location is returned. getDefaultUserConfigPath :: HasTerm env => Path Abs Dir -> RIO env (Path Abs File) getDefaultUserConfigPath stackRoot = do (path, exists) <- tryDeprecatedPath (Just "non-project configuration file") doesFileExist (defaultUserConfigPath stackRoot) (defaultUserConfigPathDeprecated stackRoot) unless exists $ do ensureDir (parent path) liftIO $ writeBinaryFileAtomic path defaultConfigYaml pure path packagesParser :: Parser [String] packagesParser = many (strOption (long "package" <> metavar "PACKAGE" <> help "Add a package (can be specified multiple times)")) defaultConfigYaml :: (IsString s, Semigroup s) => s defaultConfigYaml = "# This file contains default non-project-specific settings for Stack, used\n" <> "# in all projects. For more information about Stack's configuration, see\n" <> "# http://docs.haskellstack.org/en/stable/yaml_configuration/\n" <> "\n" <> "# The following parameters are used by 'stack new' to automatically fill fields\n" <> "# in the Cabal file. We recommend uncommenting them and filling them out if\n" <> "# you intend to use 'stack new'.\n" <> "# See https://docs.haskellstack.org/en/stable/yaml_configuration/#templates\n" <> "templates:\n" <> " params:\n" <> "# author-name:\n" <> "# author-email:\n" <> "# copyright:\n" <> "# github-username:\n" <> "\n" <> "# The following parameter specifies Stack's output styles; STYLES is a\n" <> "# colon-delimited sequence of key=value, where 'key' is a style name and\n" <> "# 'value' is a semicolon-delimited list of 'ANSI' SGR (Select Graphic\n" <> "# Rendition) control codes (in decimal). Use 'stack ls stack-colors --basic'\n" <> "# to see the current sequence.\n" <> "# stack-colors: STYLES\n" stack-2.15.7/src/Stack/Config/Build.hs0000644000000000000000000001257614620153445015554 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Build configuration module Stack.Config.Build ( buildOptsFromMonoid , haddockOptsFromMonoid , testOptsFromMonoid , benchmarkOptsFromMonoid ) where import Distribution.Verbosity ( normal ) import Stack.BuildOpts ( defaultBenchmarkOpts, defaultHaddockOpts, defaultTestOpts ) import Stack.Prelude import Stack.Types.BuildOpts ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) , TestOpts (..) ) import qualified Stack.Types.BuildOpts as BenchmarkOpts ( BenchmarkOpts (..) ) import qualified Stack.Types.BuildOpts as HaddockOpts ( HaddockOpts (..) ) import qualified Stack.Types.BuildOpts as TestOpts ( TestOpts (..) ) import Stack.Types.BuildOptsMonoid ( BenchmarkOptsMonoid (..), BuildOptsMonoid (..) , CabalVerbosity (..), HaddockOptsMonoid (..) , ProgressBarFormat (..), TestOptsMonoid (..) ) -- | Interprets BuildOptsMonoid options. buildOptsFromMonoid :: BuildOptsMonoid -> BuildOpts buildOptsFromMonoid buildMonoid = BuildOpts { libProfile = fromFirstFalse ( buildMonoid.libProfile <> FirstFalse (if tracing || profiling then Just True else Nothing) ) , exeProfile = fromFirstFalse ( buildMonoid.exeProfile <> FirstFalse (if tracing || profiling then Just True else Nothing) ) , libStrip = fromFirstTrue ( buildMonoid.libStrip <> FirstTrue (if noStripping then Just False else Nothing) ) , exeStrip = fromFirstTrue ( buildMonoid.exeStrip <> FirstTrue (if noStripping then Just False else Nothing) ) , buildHaddocks = fromFirstFalse buildMonoid.buildHaddocks , haddockOpts = haddockOptsFromMonoid buildMonoid.haddockOpts , openHaddocks = not isHaddockFromHackage && fromFirstFalse buildMonoid.openHaddocks , haddockDeps = if isHaddockFromHackage then Nothing else getFirst buildMonoid.haddockDeps , haddockInternal = not isHaddockFromHackage && fromFirstFalse buildMonoid.haddockInternal , haddockHyperlinkSource = isHaddockFromHackage || fromFirstTrue buildMonoid.haddockHyperlinkSource , haddockForHackage = isHaddockFromHackage , installExes = fromFirstFalse buildMonoid.installExes , installCompilerTool = fromFirstFalse buildMonoid.installCompilerTool , preFetch = fromFirstFalse buildMonoid.preFetch , keepGoing = getFirst buildMonoid.keepGoing , keepTmpFiles = fromFirstFalse buildMonoid.keepTmpFiles , forceDirty = isHaddockFromHackage || fromFirstFalse buildMonoid.forceDirty , tests = fromFirstFalse buildMonoid.tests , testOpts = testOptsFromMonoid buildMonoid.testOpts additionalArgs , benchmarks = fromFirstFalse buildMonoid.benchmarks , benchmarkOpts = benchmarkOptsFromMonoid buildMonoid.benchmarkOpts additionalArgs , reconfigure = fromFirstFalse buildMonoid.reconfigure , cabalVerbose = fromFirst (CabalVerbosity normal) buildMonoid.cabalVerbose , splitObjs = fromFirstFalse buildMonoid.splitObjs , skipComponents = buildMonoid.skipComponents , interleavedOutput = fromFirstTrue buildMonoid.interleavedOutput , progressBar = fromFirst CappedBar buildMonoid.progressBar , ddumpDir = getFirst buildMonoid.ddumpDir } where isHaddockFromHackage = fromFirstFalse buildMonoid.haddockForHackage -- These options are not directly used in bopts, instead they -- transform other options. tracing = getAny buildMonoid.trace profiling = getAny buildMonoid.profile noStripping = getAny buildMonoid.noStrip -- Additional args for tracing / profiling additionalArgs = if tracing || profiling then Just $ "+RTS" : catMaybes [trac, prof, Just "-RTS"] else Nothing trac = if tracing then Just "-xc" else Nothing prof = if profiling then Just "-p" else Nothing -- | Interprets HaddockOptsMonoid options. haddockOptsFromMonoid :: HaddockOptsMonoid -> HaddockOpts haddockOptsFromMonoid hoMonoid = defaultHaddockOpts { HaddockOpts.additionalArgs = hoMonoid.additionalArgs } -- | Interprets TestOptsMonoid options. testOptsFromMonoid :: TestOptsMonoid -> Maybe [String] -> TestOpts testOptsFromMonoid toMonoid madditional = defaultTestOpts { TestOpts.rerunTests = fromFirstTrue toMonoid.rerunTests , TestOpts.additionalArgs = fromMaybe [] madditional <> toMonoid.additionalArgs , TestOpts.coverage = fromFirstFalse toMonoid.coverage , TestOpts.disableRun = fromFirstFalse toMonoid.disableRun , TestOpts.maximumTimeSeconds = fromFirst defaultTestOpts.maximumTimeSeconds toMonoid.maximumTimeSeconds , TestOpts.allowStdin = fromFirstTrue toMonoid.allowStdin } -- | Interprets BenchmarkOptsMonoid options. benchmarkOptsFromMonoid :: BenchmarkOptsMonoid -> Maybe [String] -> BenchmarkOpts benchmarkOptsFromMonoid beoMonoid madditional = defaultBenchmarkOpts { BenchmarkOpts.additionalArgs = fmap (\args -> unwords args <> " ") madditional <> getFirst beoMonoid.additionalArgs , BenchmarkOpts.disableRun = fromFirst defaultBenchmarkOpts.disableRun beoMonoid.disableRun } stack-2.15.7/src/Stack/Config/ConfigureScript.hs0000644000000000000000000001011114502056213017574 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Config.ConfigureScript ( ensureConfigureScript ) where import Path ( () ) import Path.IO ( doesFileExist ) import Stack.Constants ( osIsWindows, relFileConfigure ) import Stack.DefaultColorWhen ( defaultColorWhen ) import Stack.Prelude import RIO.Process ( HasProcessContext, withWorkingDir ) ensureConfigureScript :: (HasProcessContext env, HasTerm env) => Path b Dir -> RIO env () ensureConfigureScript dir = do let fp = dir relFileConfigure exists <- doesFileExist fp unless exists $ do prettyInfoL [ flow "Trying to generate" , style Shell "configure" , "with" , style Shell "autoreconf" , "in" , pretty dir <> "." ] let autoreconf = if osIsWindows then readProcessNull "sh" ["autoreconf", "-i"] else readProcessNull "autoreconf" ["-i"] -- On Windows 10, an upstream issue with the `sh autoreconf -i` -- command means that command clears, but does not then restore, the -- ENABLE_VIRTUAL_TERMINAL_PROCESSING flag for native terminals. The -- following hack re-enables the lost ANSI-capability. fixupOnWindows = when osIsWindows (void $ liftIO defaultColorWhen) withWorkingDir (toFilePath dir) $ autoreconf `catchAny` \ex -> do fixupOnWindows prettyWarn $ fillSep [ flow "Stack failed to run" , style Shell "autoreconf" <> "." ] <> blankLine <> flow "Stack encountered the following error:" <> blankLine <> string (displayException ex) when osIsWindows $ do prettyInfo $ fillSep [ flow "Check that executable" , style File "perl" , flow "is on the path in Stack's MSYS2" , style Dir "\\usr\\bin" , flow "folder, and working, and that script files" , style File "autoreconf" , "and" , style File "aclocal" , flow "are on the path in that location. To check that" , style File "perl" <> "," , style File "autoreconf" , "or" , style File "aclocal" , flow "are on the path in the required location, run commands:" ] <> blankLine <> indent 4 (style Shell $ flow "stack exec where.exe -- perl") <> line <> indent 4 (style Shell $ flow "stack exec where.exe -- autoreconf") <> line <> indent 4 (style Shell $ flow "stack exec where.exe -- aclocal") <> blankLine <> fillSep [ "If" , style File "perl" <> "," , style File "autoreconf" , "or" , style File "aclocal" , flow "is not on the path in the required location, add them \ \with command (note that the relevant package name is" , style File "autotools" , "not" , style File "autoreconf" <> "):" ] <> blankLine <> indent 4 (style Shell $ flow "stack exec pacman -- --sync --refresh mingw-w64-x86_64-autotools") <> blankLine <> fillSep [ flow "Some versions of" , style File "perl" , flow "from MSYS2 are broken. See" , style Url "https://github.com/msys2/MSYS2-packages/issues/1611" , "and" , style Url "https://github.com/commercialhaskell/stack/pull/4781" <> "." , "To test if" , style File "perl" , flow "in the required location is working, try command:" ] <> blankLine <> indent 4 (style Shell $ flow "stack exec perl -- --version") <> blankLine fixupOnWindows stack-2.15.7/src/Stack/Config/Docker.hs0000644000000000000000000001117314620153445015714 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Docker configuration module Stack.Config.Docker ( ConfigDockerException (..) , addDefaultTag , dockerOptsFromMonoid ) where import Data.List ( find ) import qualified Data.Text as T import Distribution.Version ( simplifyVersionRange ) import Stack.Prelude import Stack.Types.Project ( Project (..) ) import Stack.Types.Docker ( DockerOpts (..), DockerMonoidRepoOrImage (..) , DockerOptsMonoid (..), dockerImageArgName ) import Stack.Types.Resolver ( AbstractResolver (..) ) import Stack.Types.Version ( IntersectingVersionRange (..) ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Config.Docker" module. data ConfigDockerException = ResolverNotSupportedException !(Maybe Project) !(Maybe AbstractResolver) -- ^ Only LTS resolvers are supported for default image tag. deriving (Show, Typeable) instance Exception ConfigDockerException where displayException (ResolverNotSupportedException mproject maresolver) = concat [ "Error: [S-8575]\n" , "Resolver not supported for Docker images:\n " , case (mproject, maresolver) of (Nothing, Nothing) -> "no resolver specified" (_, Just aresolver) -> T.unpack $ utf8BuilderToText $ display aresolver (Just project, Nothing) -> T.unpack $ utf8BuilderToText $ display project.resolver , "\nUse an LTS resolver, or set the '" , T.unpack dockerImageArgName , "' explicitly, in your configuration file."] -- | Add a default Docker tag name to a given base image. addDefaultTag :: MonadThrow m => String -- ^ base -> Maybe Project -> Maybe AbstractResolver -> m String addDefaultTag base mproject maresolver = do let exc = throwM $ ResolverNotSupportedException mproject maresolver lts <- case maresolver of Just (ARResolver (RSLSynonym lts@(LTS _ _))) -> pure lts Just _aresolver -> exc Nothing -> case (.resolver) <$> mproject of Just (RSLSynonym lts@(LTS _ _)) -> pure lts _ -> exc pure $ base ++ ":" ++ show lts -- | Interprets DockerOptsMonoid options. dockerOptsFromMonoid :: MonadThrow m => Maybe Project -> Maybe AbstractResolver -> DockerOptsMonoid -> m DockerOpts dockerOptsFromMonoid mproject maresolver dockerMonoid = do let image = case getFirst dockerMonoid.repoOrImage of Nothing -> addDefaultTag "fpco/stack-build" mproject maresolver Just (DockerMonoidImage image') -> pure image' Just (DockerMonoidRepo repo) -> case find (`elem` (":@" :: String)) repo of Nothing -> addDefaultTag repo mproject maresolver -- Repo already specified a tag or digest, so don't append default Just _ -> pure repo let enable = fromFirst (getAny dockerMonoid.defaultEnable) dockerMonoid.enable registryLogin = fromFirst (isJust (emptyToNothing (getFirst dockerMonoid.registryUsername))) dockerMonoid.registryLogin registryUsername = emptyToNothing (getFirst dockerMonoid.registryUsername) registryPassword = emptyToNothing (getFirst dockerMonoid.registryPassword) autoPull = fromFirstTrue dockerMonoid.autoPull detach = fromFirstFalse dockerMonoid.detach persist = fromFirstFalse dockerMonoid.persist containerName = emptyToNothing (getFirst dockerMonoid.containerName) network = emptyToNothing (getFirst dockerMonoid.network) runArgs = dockerMonoid.runArgs mount = dockerMonoid.mount mountMode = emptyToNothing (getFirst dockerMonoid.mountMode) env = dockerMonoid.env setUser = getFirst dockerMonoid.setUser requireDockerVersion = simplifyVersionRange dockerMonoid.requireDockerVersion.intersectingVersionRange stackExe = getFirst dockerMonoid.stackExe pure DockerOpts { enable , image , registryLogin , registryUsername , registryPassword , autoPull , detach , persist , containerName , network , runArgs , mount , mountMode , env , stackExe , setUser , requireDockerVersion } where emptyToNothing Nothing = Nothing emptyToNothing (Just s) | null s = Nothing | otherwise = Just s stack-2.15.7/src/Stack/Config/Nix.hs0000644000000000000000000001076214604306201015236 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Nix configuration module Stack.Config.Nix ( nixCompiler , nixCompilerVersion , nixOptsFromMonoid ) where import Control.Monad.Extra ( ifM ) import qualified Data.Text as T import qualified Data.Text.IO as TIO import Distribution.System ( OS (..) ) import Stack.Constants ( osIsWindows ) import Stack.Prelude import Stack.Types.Runner ( HasRunner ) import Stack.Types.Nix ( NixOpts (..), NixOptsMonoid (..) ) import System.Directory ( doesFileExist ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Config.Nix" module. data ConfigNixException = NixCannotUseShellFileAndPackagesException -- ^ Nix can't be given packages and a shell file at the same time | GHCMajorVersionUnspecified | OnlyGHCSupported deriving (Show, Typeable) instance Exception ConfigNixException where displayException NixCannotUseShellFileAndPackagesException = "Error: [S-2726]\n" ++ "You cannot have packages and a shell-file filled at the same time \ \in your nix-shell configuration." displayException GHCMajorVersionUnspecified = "Error: [S-9317]\n" ++ "GHC major version not specified." displayException OnlyGHCSupported = "Error: [S-8605]\n" ++ "Only GHC is supported by 'stack --nix'." -- | Interprets NixOptsMonoid options. nixOptsFromMonoid :: (HasRunner env, HasTerm env) => NixOptsMonoid -> OS -> RIO env NixOpts nixOptsFromMonoid nixMonoid os = do let defaultPure = case os of OSX -> False _ -> True pureShell = fromFirst defaultPure nixMonoid.pureShell packages = fromFirst [] nixMonoid.packages initFile = getFirst nixMonoid.initFile shellOptions = fromFirst [] nixMonoid.shellOptions ++ prefixAll (T.pack "-I") (fromFirst [] nixMonoid.path) addGCRoots = fromFirstFalse nixMonoid.addGCRoots -- Enable Nix-mode by default on NixOS, unless Docker-mode was specified osIsNixOS <- isNixOS let nixEnable0 = fromFirst osIsNixOS nixMonoid.enable enable <- if nixEnable0 && osIsWindows then do prettyNoteS "Disabling Nix integration, since this is being run in Windows." pure False else pure nixEnable0 when (not (null packages) && isJust initFile) $ throwIO NixCannotUseShellFileAndPackagesException pure NixOpts { enable , pureShell , packages , initFile , shellOptions , addGCRoots } where prefixAll p (x:xs) = p : x : prefixAll p xs prefixAll _ _ = [] nixCompiler :: WantedCompiler -> Either ConfigNixException T.Text nixCompiler compilerVersion = case compilerVersion of WCGhc version -> case T.split (== '.') (fromString $ versionString version) of x : y : minor -> Right $ case minor of [] -> -- The minor version is not specified. Select the latest minor -- version in Nixpkgs corresponding to the requested major -- version. let major = T.concat [x, y] in "(let compilers = builtins.filter \ \(name: builtins.match \ \\"ghc" <> major <> "[[:digit:]]*\" name != null) \ \(lib.attrNames haskell.compiler); in \ \if compilers == [] \ \then abort \"No compiler found for GHC " <> T.pack (versionString version) <> "\"\ \else haskell.compiler.${builtins.head compilers})" _ -> "haskell.compiler.ghc" <> T.concat (x : y : minor) _ -> Left GHCMajorVersionUnspecified WCGhcjs{} -> Left OnlyGHCSupported WCGhcGit{} -> Left OnlyGHCSupported nixCompilerVersion :: WantedCompiler -> Either ConfigNixException T.Text nixCompilerVersion compilerVersion = case compilerVersion of WCGhc version -> case T.split (== '.') (fromString $ versionString version) of x : y : minor -> Right $ "ghc" <> T.concat (x : y : minor) _ -> Left GHCMajorVersionUnspecified WCGhcjs{} -> Left OnlyGHCSupported WCGhcGit{} -> Left OnlyGHCSupported isNixOS :: MonadIO m => m Bool isNixOS = liftIO $ do let fp = "/etc/os-release" ifM (doesFileExist fp) (T.isInfixOf "ID=nixos" <$> TIO.readFile fp) (pure False) stack-2.15.7/src/Stack/ConfigCmd.hs0000644000000000000000000003440214620153445015131 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedLists #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Make changes to project or global configuration. module Stack.ConfigCmd ( ConfigCmdSet (..) , configCmdSetParser , cfgCmdSet , cfgCmdSetName , configCmdEnvParser , cfgCmdEnv , cfgCmdEnvName , cfgCmdName ) where import qualified Data.Aeson.Key as Key import qualified Data.Aeson.KeyMap as KeyMap import Data.Attoparsec.Text as P ( Parser, parseOnly, skip, skipWhile, string, takeText , takeWhile ) import qualified Data.Map.Merge.Strict as Map import qualified Data.Text as T import qualified Data.Yaml as Yaml import qualified Options.Applicative as OA import Options.Applicative.Builder.Extra import qualified Options.Applicative.Types as OA import Pantry ( loadSnapshot ) import Path ( (), parent ) import qualified RIO.Map as Map import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import RIO.Process ( envVarsL ) import Stack.Config ( makeConcreteResolver, getProjectConfig , getImplicitGlobalProjectDir ) import Stack.Constants ( stackDotYaml ) import Stack.Prelude import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.ConfigMonoid ( configMonoidInstallGHCName, configMonoidSystemGHCName ) import Stack.Types.EnvConfig ( EnvConfig ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.GHCVariant ( HasGHCVariant ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.Resolver ( AbstractResolver, readAbstractResolver ) import Stack.Types.Runner ( globalOptsL ) import System.Environment ( getEnvironment ) -- | Type repesenting exceptions thrown by functions exported by the -- "Stack.ConfigCmd" module. data ConfigCmdException = NoProjectConfigAvailable deriving (Show, Typeable) instance Exception ConfigCmdException where displayException NoProjectConfigAvailable = "Error: [S-3136]\n" ++ "'config' command used when no project configuration available." data ConfigCmdSet = ConfigCmdSetSnapshot !(Unresolved AbstractResolver) | ConfigCmdSetResolver !(Unresolved AbstractResolver) | ConfigCmdSetSystemGhc !CommandScope !Bool | ConfigCmdSetInstallGhc !CommandScope !Bool | ConfigCmdSetDownloadPrefix !CommandScope !Text data CommandScope = CommandScopeGlobal -- ^ Apply changes to the global configuration, -- typically at @~/.stack/config.yaml@. | CommandScopeProject -- ^ Apply changes to the project @stack.yaml@. configCmdSetScope :: ConfigCmdSet -> CommandScope configCmdSetScope (ConfigCmdSetSnapshot _) = CommandScopeProject configCmdSetScope (ConfigCmdSetResolver _) = CommandScopeProject configCmdSetScope (ConfigCmdSetSystemGhc scope _) = scope configCmdSetScope (ConfigCmdSetInstallGhc scope _) = scope configCmdSetScope (ConfigCmdSetDownloadPrefix scope _) = scope cfgCmdSet :: (HasConfig env, HasGHCVariant env) => ConfigCmdSet -> RIO env () cfgCmdSet cmd = do conf <- view configL configFilePath <- case configCmdSetScope cmd of CommandScopeProject -> do mstackYamlOption <- view $ globalOptsL . to (.stackYaml) mstackYaml <- getProjectConfig mstackYamlOption case mstackYaml of PCProject stackYaml -> pure stackYaml PCGlobalProject -> fmap ( stackDotYaml) (getImplicitGlobalProjectDir conf) PCNoProject _extraDeps -> throwIO NoProjectConfigAvailable -- maybe modify the ~/.stack/config.yaml file instead? CommandScopeGlobal -> pure conf.userConfigPath rawConfig <- liftIO (readFileUtf8 (toFilePath configFilePath)) config <- either throwM pure (Yaml.decodeEither' $ encodeUtf8 rawConfig) newValue <- cfgCmdSetValue (parent configFilePath) cmd let yamlLines = T.lines rawConfig cmdKeys = cfgCmdSetKeys cmd -- Text newValue' = T.stripEnd $ decodeUtf8With lenientDecode $ Yaml.encode newValue -- Text file = toFilePath configFilePath -- String newYamlLines <- case inConfig config cmdKeys of Nothing -> do prettyInfoL [ pretty configFilePath , flow "has been extended." ] pure $ writeLines yamlLines "" cmdKeys newValue' Just oldValue -> if oldValue == newValue then do prettyInfoL [ pretty configFilePath , flow "already contained the intended configuration and remains \ \unchanged." ] pure yamlLines else switchLine configFilePath (NE.last cmdKeys) newValue' [] yamlLines liftIO $ writeFileUtf8 file (T.unlines newYamlLines) where -- This assumes that if the key does not exist, the lines that can be -- appended to include it are of a form like: -- -- key1: -- key2: -- key3: value -- writeLines yamlLines spaces cmdKeys value = case nonEmpty $ NE.tail cmdKeys of Nothing -> yamlLines <> [spaces <> NE.head cmdKeys <> ": " <> value] Just ks -> writeLines (yamlLines <> [spaces <> NE.head cmdKeys <> ":"]) (spaces <> " ") ks value inConfig v cmdKeys = case v of Yaml.Object obj -> case KeyMap.lookup (Key.fromText (NE.head cmdKeys)) obj of Nothing -> Nothing Just v' -> case nonEmpty $ NE.tail cmdKeys of Nothing -> Just v' Just ks -> inConfig v' ks _ -> Nothing switchLine file cmdKey _ searched [] = do prettyWarnL [ style Current (fromString $ T.unpack cmdKey) , flow "not found in YAML file" , pretty file , flow "as a single line. Multi-line key:value formats are not \ \supported." ] pure $ reverse searched switchLine file cmdKey newValue searched (oldLine:rest) = case parseOnly (parseLine cmdKey) oldLine of Left _ -> switchLine file cmdKey newValue (oldLine:searched) rest Right (kt, spaces1, spaces2, spaces3, comment) -> do let newLine = spaces1 <> renderKey cmdKey kt <> spaces2 <> ":" <> spaces3 <> newValue <> comment prettyInfoL [ pretty file , flow "has been updated." ] pure $ reverse searched <> (newLine:rest) parseLine :: Text -> Parser (KeyType, Text, Text, Text, Text) parseLine key = do spaces1 <- P.takeWhile (== ' ') kt <- parseKey key spaces2 <- P.takeWhile (== ' ') skip (== ':') spaces3 <- P.takeWhile (== ' ') skipWhile (/= ' ') comment <- takeText pure (kt, spaces1, spaces2, spaces3, comment) -- If the key is, for example, install-ghc, this recognises install-ghc, -- 'install-ghc' or "install-ghc". parseKey :: Text -> Parser KeyType parseKey k = parsePlainKey k <|> parseSingleQuotedKey k <|> parseDoubleQuotedKey k parsePlainKey :: Text -> Parser KeyType parsePlainKey key = do _ <- P.string key pure PlainKey parseSingleQuotedKey :: Text -> Parser KeyType parseSingleQuotedKey = parseQuotedKey SingleQuotedKey '\'' parseDoubleQuotedKey :: Text -> Parser KeyType parseDoubleQuotedKey = parseQuotedKey DoubleQuotedKey '"' parseQuotedKey :: KeyType -> Char -> Text -> Parser KeyType parseQuotedKey kt c key = do skip (==c) _ <- P.string key skip (==c) pure kt renderKey :: Text -> KeyType -> Text renderKey key kt = case kt of PlainKey -> key SingleQuotedKey -> '\'' `T.cons` key `T.snoc` '\'' DoubleQuotedKey -> '"' `T.cons` key `T.snoc` '"' -- |Type representing types of representations of keys in YAML files. data KeyType = PlainKey -- ^ For example: install-ghc | SingleQuotedKey -- ^ For example: 'install-ghc' | DoubleQuotedKey -- ^ For example: "install-ghc" deriving (Eq, Show) cfgCmdSetValue :: (HasConfig env, HasGHCVariant env) => Path Abs Dir -- ^ root directory of project -> ConfigCmdSet -> RIO env Yaml.Value cfgCmdSetValue root (ConfigCmdSetSnapshot newSnapshot) = snapshotValue root newSnapshot cfgCmdSetValue root (ConfigCmdSetResolver newSnapshot) = snapshotValue root newSnapshot cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool') = pure $ Yaml.Bool bool' cfgCmdSetValue _ (ConfigCmdSetInstallGhc _ bool') = pure $ Yaml.Bool bool' cfgCmdSetValue _ (ConfigCmdSetDownloadPrefix _ url) = pure $ Yaml.String url snapshotValue :: HasConfig env => Path Abs Dir -- ^ root directory of project -> Unresolved AbstractResolver -> RIO env Yaml.Value snapshotValue root snapshot = do snapshot' <- resolvePaths (Just root) snapshot concreteSnapshot <- makeConcreteResolver snapshot' -- Check that the snapshot actually exists void $ loadSnapshot =<< completeSnapshotLocation concreteSnapshot pure (Yaml.toJSON concreteSnapshot) cfgCmdSetKeys :: ConfigCmdSet -> NonEmpty Text cfgCmdSetKeys (ConfigCmdSetSnapshot _) = ["snapshot"] cfgCmdSetKeys (ConfigCmdSetResolver _) = ["resolver"] cfgCmdSetKeys (ConfigCmdSetSystemGhc _ _) = [configMonoidSystemGHCName] cfgCmdSetKeys (ConfigCmdSetInstallGhc _ _) = [configMonoidInstallGHCName] cfgCmdSetKeys (ConfigCmdSetDownloadPrefix _ _) = ["package-index", "download-prefix"] cfgCmdName :: String cfgCmdName = "config" cfgCmdSetName :: String cfgCmdSetName = "set" cfgCmdEnvName :: String cfgCmdEnvName = "env" configCmdSetParser :: OA.Parser ConfigCmdSet configCmdSetParser = OA.hsubparser $ mconcat [ OA.command "snapshot" ( OA.info ( ConfigCmdSetSnapshot <$> OA.argument readAbstractResolver ( OA.metavar "SNAPSHOT" <> OA.help "E.g. \"nightly\" or \"lts-22.8\"" )) ( OA.progDesc "Change the snapshot of the current project." )) , OA.command "resolver" ( OA.info ( ConfigCmdSetResolver <$> OA.argument readAbstractResolver ( OA.metavar "SNAPSHOT" <> OA.help "E.g. \"nightly\" or \"lts-22.8\"" )) ( OA.progDesc "Change the resolver key of the current project." )) , OA.command (T.unpack configMonoidSystemGHCName) ( OA.info ( ConfigCmdSetSystemGhc <$> scopeFlag <*> boolArgument ) ( OA.progDesc "Configure whether Stack should use a system GHC \ \installation or not." )) , OA.command (T.unpack configMonoidInstallGHCName) ( OA.info ( ConfigCmdSetInstallGhc <$> scopeFlag <*> boolArgument ) ( OA.progDesc "Configure whether Stack should automatically install \ \GHC when necessary." )) , OA.command "package-index" ( OA.info ( OA.hsubparser $ OA.command "download-prefix" ( OA.info ( ConfigCmdSetDownloadPrefix <$> scopeFlag <*> urlArgument ) ( OA.progDesc "Configure download prefix for Stack's package \ \index." ))) ( OA.progDesc "Configure Stack's package index" )) ] scopeFlag :: OA.Parser CommandScope scopeFlag = OA.flag CommandScopeProject CommandScopeGlobal ( OA.long "global" <> OA.help "Modify the user-specific global configuration file ('config.yaml') \ \instead of the project-level configuration file ('stack.yaml')." ) readBool :: OA.ReadM Bool readBool = do s <- OA.readerAsk case s of "true" -> pure True "false" -> pure False _ -> OA.readerError ("Invalid value " ++ show s ++ ": Expected \"true\" or \"false\"") boolArgument :: OA.Parser Bool boolArgument = OA.argument readBool ( OA.metavar "true|false" <> OA.completeWith ["true", "false"] ) urlArgument :: OA.Parser Text urlArgument = OA.strArgument ( OA.metavar "URL" <> OA.value defaultDownloadPrefix <> OA.showDefault <> OA.help "Location of package index. It is highly recommended to use only the \ \official Hackage server or a mirror." ) configCmdEnvParser :: OA.Parser EnvSettings configCmdEnvParser = EnvSettings <$> boolFlags True "locals" "include local package information" mempty <*> boolFlags True "ghc-package-path" "set GHC_PACKAGE_PATH environment variable" mempty <*> boolFlags True "stack-exe" "set STACK_EXE environment variable" mempty <*> boolFlags False "locale-utf8" "set the GHC_CHARENC environment variable to UTF-8" mempty <*> boolFlags False "keep-ghc-rts" "keep any GHCRTS environment variable" mempty data EnvVarAction = EVASet !Text | EVAUnset deriving Show cfgCmdEnv :: EnvSettings -> RIO EnvConfig () cfgCmdEnv es = do origEnv <- liftIO $ Map.fromList . map (first fromString) <$> getEnvironment mkPC <- view $ configL . to (.processContextSettings) pc <- liftIO $ mkPC es let newEnv = pc ^. envVarsL actions = Map.merge (pure EVAUnset) (Map.traverseMissing $ \_k new -> pure (EVASet new)) (Map.zipWithMaybeAMatched $ \_k old new -> pure $ if fromString old == new then Nothing else Just (EVASet new)) origEnv newEnv toLine key EVAUnset = "unset " <> encodeUtf8Builder key <> ";\n" toLine key (EVASet value) = encodeUtf8Builder key <> "='" <> encodeUtf8Builder (T.concatMap escape value) <> -- TODO more efficient to use encodeUtf8BuilderEscaped "'; export " <> encodeUtf8Builder key <> ";\n" escape '\'' = "'\"'\"'" escape c = T.singleton c putBuilder $ Map.foldMapWithKey toLine actions stack-2.15.7/src/Stack/Constants.hs0000644000000000000000000005313614620153474015263 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} -- keep TH usage here -- | Constants used throughout the project. module Stack.Constants ( buildPlanDir , buildPlanCacheDir , haskellFileExts , haskellDefaultPreprocessorExts , stackProgName , stackProgName' , nixProgName , stackDotYaml , stackWorkEnvVar , stackRootEnvVar , stackXdgEnvVar , stackRootOptionName , stackGlobalConfigOptionName , pantryRootEnvVar , inContainerEnvVar , inNixShellEnvVar , stackProgNameUpper , wiredInPackages , cabalPackageName , implicitGlobalProjectDirDeprecated , implicitGlobalProjectDir , defaultUserConfigPathDeprecated , defaultUserConfigPath , defaultGlobalConfigPathDeprecated , defaultGlobalConfigPath , platformVariantEnvVar , compilerOptionsCabalFlag , ghcColorForceFlag , minTerminalWidth , maxTerminalWidth , defaultTerminalWidth , osIsMacOS , osIsWindows , relFileSetupHs , relFileSetupLhs , relFileHpackPackageConfig , relDirGlobalAutogen , relDirAutogen , relDirLogs , relFileCabalMacrosH , relDirBuild , relDirBin , relDirGhci , relDirGhciScript , relDirPantry , relDirPrograms , relDirRoot , relDirUpperPrograms , relDirStackProgName , relDirStackWork , relFileReadmeTxt , relDirScript , relDirScripts , relFileConfigYaml , relDirSnapshots , relDirGlobalHints , relFileGlobalHintsYaml , relDirInstall , relDirCompilerTools , relDirHoogle , relFileDatabaseHoo , relDirPkgdb , relFileStorage , relDirLoadedSnapshotCache , bindirSuffix , docDirSuffix , htmlDirSuffix , relDirHpc , relDirLib , relDirShare , relDirLibexec , relDirEtc , setupGhciShimCode , relDirSetupExeCache , relDirSetupExeSrc , relFileConfigure , relDirDist , relFileSetupMacrosH , relDirSetup , relFileSetupLower , relDirMingw , relDirMingw32 , relDirMingw64 , relDirLocal , relDirUsr , relDirInclude , relFileIndexHtml , relDirAll , relFilePackageCache , relFileDockerfile , relFileGhciScript , relDirCombined , relFileHpcIndexHtml , relDirCustom , relDirPackageConfInplace , relDirExtraTixFiles , relDirInstalledPackages , backupUrlRelPath , relDirDotLocal , relDirDotSsh , relDirDotStackProgName , relDirUnderHome , relDirSrc , relFileLibcMuslx86_64So1 , relFileLibtinfoSo5 , relFileLibtinfoSo6 , relFileLibncurseswSo6 , relFileLibgmpSo10 , relFileLibgmpSo3 , relDirNewCabal , relFileSetupExe , relFileSetupUpper , relFile7zexe , relFile7zdll , relFileMainHs , relFileStack , relFileStackDotExe , relFileStackDotTmpDotExe , relFileStackDotTmp , ghcShowOptionsOutput , ghcBootScript , ghcConfigureScript , ghcConfigureWindows , ghcConfigureMacOS , ghcConfigurePosix , relDirHadrian , relFileHadrianStackDotYaml , hadrianScriptsWindows , hadrianScriptsPosix , libDirs , usrLibDirs , testGhcEnvRelFile , relFileBuildLock , stackDeveloperModeDefault , isStackUploadDisabled , globalFooter , gitHubBasicAuthType , gitHubTokenEnvVar , altGitHubTokenEnvVar , hackageBaseUrl ) where import Data.ByteString.Builder ( byteString ) import Data.Char ( toUpper ) import Data.FileEmbed ( embedFile, makeRelativeToProject ) import qualified Data.Set as Set import qualified Data.Text as T import Distribution.Package ( mkPackageName ) import Hpack.Config ( packageConfig ) import qualified Language.Haskell.TH.Syntax as TH ( runIO, lift ) import Path ( (), mkRelDir, mkRelFile, parseAbsFile ) import Stack.Constants.StackProgName ( stackProgName ) import Stack.Constants.UsrLibDirs ( libDirs, usrLibDirs ) import Stack.Prelude import Stack.Types.Compiler ( WhichCompiler (..) ) import System.Permissions ( osIsMacOS, osIsWindows ) import System.Process ( readProcess ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Constants" module. data ConstantsException = WiredInPackagesNotParsedBug deriving (Show, Typeable) instance Exception ConstantsException where displayException WiredInPackagesNotParsedBug = bugReport "[S-6057]" "Parse error in wiredInPackages." -- | Name of the Stack program. stackProgName' :: Text stackProgName' = T.pack stackProgName -- | Name of the Nix package manager command nixProgName :: String nixProgName = "nix" -- | Extensions used for Haskell modules. Excludes preprocessor ones. haskellFileExts :: [Text] haskellFileExts = ["hs", "hsc", "lhs"] -- | Extensions for modules that are preprocessed by common preprocessors. haskellDefaultPreprocessorExts :: [Text] haskellDefaultPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"] -- | Name of the 'stack' program, uppercased stackProgNameUpper :: String stackProgNameUpper = map toUpper stackProgName -- | The filename used for the Stack project-level configuration file. stackDotYaml :: Path Rel File stackDotYaml = $(mkRelFile "stack.yaml") -- | Environment variable used to override the '.stack-work' relative dir. stackWorkEnvVar :: String stackWorkEnvVar = "STACK_WORK" -- | Environment variable used to override the '~/.stack' location. stackRootEnvVar :: String stackRootEnvVar = "STACK_ROOT" -- | Environment variable used to indicate XDG directories should be used. stackXdgEnvVar :: String stackXdgEnvVar = "STACK_XDG" -- | Option name for the global Stack root. stackRootOptionName :: String stackRootOptionName = "stack-root" -- | Option name for the global Stack configuration file. stackGlobalConfigOptionName :: String stackGlobalConfigOptionName = "global-config" -- | Environment variable used to override the location of the Pantry store pantryRootEnvVar :: String pantryRootEnvVar = "PANTRY_ROOT" -- | Environment variable used to indicate Stack is running in container. inContainerEnvVar :: String inContainerEnvVar = stackProgNameUpper ++ "_IN_CONTAINER" -- | Environment variable used to indicate Stack is running in container. -- although we already have STACK_IN_NIX_EXTRA_ARGS that is set in the same conditions, -- it can happen that STACK_IN_NIX_EXTRA_ARGS is set to empty. inNixShellEnvVar :: String inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIX_SHELL" -- | The comment to \'see -- https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey\' -- appears to be out of date. -- -- See \'Note [About units]\' and \'Wired-in units\' at -- https://gitlab.haskell.org/ghc/ghc/-/blob/master/compiler/GHC/Unit.hs. -- -- The \'wired-in packages\' appear to have been replaced by those that have (e.g) -- -- > ghc-options: -this-unit-id ghc-prim -- -- in their Cabal file because they are \'magic\'. wiredInPackages :: Set PackageName wiredInPackages = case mparsed of Just parsed -> Set.fromList parsed Nothing -> impureThrow WiredInPackagesNotParsedBug where mparsed = mapM parsePackageName [ "ghc-prim" -- A magic package , "integer-gmp" -- No longer magic > 1.0.3.0 (GHC >= 9.0) and deprecated in favour of -- ghc-bignum. With GHC 9.6.5 at least, there seems to be no problem in -- using it. , "integer-simple" -- A magic package , "base" -- A magic package , "rts" -- Said to be not a \'real\' package , "template-haskell" -- A magic package , "dph-seq" -- Deprecated in favour of dph-prim-seq, which does not appear to be -- magic. With GHC 9.6.5 at least, there seems to be no problem in using -- it. , "dph-par" -- Deprecated in favour of dph-prim-par, which does not appear to be -- magic. With GHC 9.6.5 at least, there seems to be no problem in using -- it. , "ghc" -- A magic package , "interactive" -- Type and class declarations at the GHCi command prompt are treated as -- if they were defined in modules all sharing a common package -- interactive. See 'Note [The interactive package]' at -- https://gitlab.haskell.org/ghc/ghc/-/blob/master/compiler/GHC/Runtime/Context.hs -- With GHC 9.6.5 at least, there seems to be no problem in using it. , "ghc-bignum" -- A magic package ] -- | Just to avoid repetition and magic strings. cabalPackageName :: PackageName cabalPackageName = mkPackageName "Cabal" -- | Deprecated implicit global project directory used when outside of a project. implicitGlobalProjectDirDeprecated :: Path Abs Dir -- ^ Stack root. -> Path Abs Dir implicitGlobalProjectDirDeprecated p = p $(mkRelDir "global") -- | Implicit global project directory used when outside of a project. -- Normally, @getImplicitGlobalProjectDir@ should be used instead. implicitGlobalProjectDir :: Path Abs Dir -- ^ Stack root. -> Path Abs Dir implicitGlobalProjectDir p = p $(mkRelDir "global-project") -- | Deprecated default global config path. defaultUserConfigPathDeprecated :: Path Abs Dir -> Path Abs File defaultUserConfigPathDeprecated = ( $(mkRelFile "stack.yaml")) -- | Default global config path. -- Normally, @getDefaultUserConfigPath@ should be used instead. defaultUserConfigPath :: Path Abs Dir -> Path Abs File defaultUserConfigPath = ( $(mkRelFile "config.yaml")) -- | Deprecated default global config path. -- Note that this will be @Nothing@ on Windows, which is by design. defaultGlobalConfigPathDeprecated :: Maybe (Path Abs File) defaultGlobalConfigPathDeprecated = parseAbsFile "/etc/stack/config" -- | Default global config path. -- Normally, @getDefaultGlobalConfigPath@ should be used instead. -- Note that this will be @Nothing@ on Windows, which is by design. defaultGlobalConfigPath :: Maybe (Path Abs File) defaultGlobalConfigPath = parseAbsFile "/etc/stack/config.yaml" -- | Path where build plans are stored. buildPlanDir :: Path Abs Dir -- ^ Stack root -> Path Abs Dir buildPlanDir = ( $(mkRelDir "build-plan")) -- | Path where binary caches of the build plans are stored. buildPlanCacheDir :: Path Abs Dir -- ^ Stack root -> Path Abs Dir buildPlanCacheDir = ( $(mkRelDir "build-plan-cache")) -- | Environment variable that stores a variant to append to platform-specific directory -- names. Used to ensure incompatible binaries aren't shared between Docker builds and host platformVariantEnvVar :: String platformVariantEnvVar = stackProgNameUpper ++ "_PLATFORM_VARIANT" -- | Provides --ghc-options for 'Ghc' compilerOptionsCabalFlag :: WhichCompiler -> String compilerOptionsCabalFlag Ghc = "--ghc-options" -- | The flag to pass to GHC when we want to force its output to be -- colorized. ghcColorForceFlag :: String ghcColorForceFlag = "-fdiagnostics-color=always" -- | The minimum allowed terminal width. Used for pretty-printing. minTerminalWidth :: Int minTerminalWidth = 40 -- | The maximum allowed terminal width. Used for pretty-printing. maxTerminalWidth :: Int maxTerminalWidth = 200 -- | The default terminal width. Used for pretty-printing when we can't -- automatically detect it and when the user doesn't supply one. defaultTerminalWidth :: Int defaultTerminalWidth = 100 relFileSetupHs :: Path Rel File relFileSetupHs = $(mkRelFile "Setup.hs") relFileSetupLhs :: Path Rel File relFileSetupLhs = $(mkRelFile "Setup.lhs") relFileHpackPackageConfig :: Path Rel File relFileHpackPackageConfig = $(mkRelFile packageConfig) relDirGlobalAutogen :: Path Rel Dir relDirGlobalAutogen = $(mkRelDir "global-autogen") relDirAutogen :: Path Rel Dir relDirAutogen = $(mkRelDir "autogen") relDirLogs :: Path Rel Dir relDirLogs = $(mkRelDir "logs") relFileCabalMacrosH :: Path Rel File relFileCabalMacrosH = $(mkRelFile "cabal_macros.h") relDirBuild :: Path Rel Dir relDirBuild = $(mkRelDir "build") relDirBin :: Path Rel Dir relDirBin = $(mkRelDir "bin") relDirGhci :: Path Rel Dir relDirGhci = $(mkRelDir "ghci") relDirGhciScript :: Path Rel Dir relDirGhciScript = $(mkRelDir "ghci-script") relDirPantry :: Path Rel Dir relDirPantry = $(mkRelDir "pantry") relDirPrograms :: Path Rel Dir relDirPrograms = $(mkRelDir "programs") relDirRoot :: Path Rel Dir relDirRoot = $(mkRelDir ".") relDirUpperPrograms :: Path Rel Dir relDirUpperPrograms = $(mkRelDir "Programs") relDirStackProgName :: Path Rel Dir relDirStackProgName = $(mkRelDir stackProgName) relDirStackWork :: Path Rel Dir relDirStackWork = $(mkRelDir ".stack-work") relFileReadmeTxt :: Path Rel File relFileReadmeTxt = $(mkRelFile "README.txt") relDirScript :: Path Rel Dir relDirScript = $(mkRelDir "script") relDirScripts :: Path Rel Dir relDirScripts = $(mkRelDir "scripts") relFileConfigYaml :: Path Rel File relFileConfigYaml = $(mkRelFile "config.yaml") relDirSnapshots :: Path Rel Dir relDirSnapshots = $(mkRelDir "snapshots") relDirGlobalHints :: Path Rel Dir relDirGlobalHints = $(mkRelDir "global-hints") relFileGlobalHintsYaml :: Path Rel File relFileGlobalHintsYaml = $(mkRelFile "global-hints.yaml") relDirInstall :: Path Rel Dir relDirInstall = $(mkRelDir "install") relDirCompilerTools :: Path Rel Dir relDirCompilerTools = $(mkRelDir "compiler-tools") relDirHoogle :: Path Rel Dir relDirHoogle = $(mkRelDir "hoogle") relFileDatabaseHoo :: Path Rel File relFileDatabaseHoo = $(mkRelFile "database.hoo") relDirPkgdb :: Path Rel Dir relDirPkgdb = $(mkRelDir "pkgdb") relFileStorage :: Path Rel File relFileStorage = $(mkRelFile "stack.sqlite3") relDirLoadedSnapshotCache :: Path Rel Dir relDirLoadedSnapshotCache = $(mkRelDir "loaded-snapshot-cached") -- | Suffix applied to an installation root to get the bin dir bindirSuffix :: Path Rel Dir bindirSuffix = relDirBin -- | Suffix applied to an installation root to get the doc dir docDirSuffix :: Path Rel Dir docDirSuffix = $(mkRelDir "doc") -- | Suffix applied to a path to get the @html@ directory. htmlDirSuffix :: Path Rel Dir htmlDirSuffix = $(mkRelDir "html") relDirHpc :: Path Rel Dir relDirHpc = $(mkRelDir "hpc") relDirLib :: Path Rel Dir relDirLib = $(mkRelDir "lib") relDirShare :: Path Rel Dir relDirShare = $(mkRelDir "share") relDirLibexec :: Path Rel Dir relDirLibexec = $(mkRelDir "libexec") relDirEtc :: Path Rel Dir relDirEtc = $(mkRelDir "etc") setupGhciShimCode :: Builder setupGhciShimCode = byteString $(do path <- makeRelativeToProject "src/setup-shim/StackSetupShim.hs" embedFile path) relDirSetupExeCache :: Path Rel Dir relDirSetupExeCache = $(mkRelDir "setup-exe-cache") relDirSetupExeSrc :: Path Rel Dir relDirSetupExeSrc = $(mkRelDir "setup-exe-src") relFileConfigure :: Path Rel File relFileConfigure = $(mkRelFile "configure") relDirDist :: Path Rel Dir relDirDist = $(mkRelDir "dist") relFileSetupMacrosH :: Path Rel File relFileSetupMacrosH = $(mkRelFile "setup_macros.h") relDirSetup :: Path Rel Dir relDirSetup = $(mkRelDir "setup") relFileSetupLower :: Path Rel File relFileSetupLower = $(mkRelFile "setup") relDirMingw :: Path Rel Dir relDirMingw = $(mkRelDir "mingw") relDirMingw32 :: Path Rel Dir relDirMingw32 = $(mkRelDir "mingw32") relDirMingw64 :: Path Rel Dir relDirMingw64 = $(mkRelDir "mingw64") relDirLocal :: Path Rel Dir relDirLocal = $(mkRelDir "local") relDirUsr :: Path Rel Dir relDirUsr = $(mkRelDir "usr") relDirInclude :: Path Rel Dir relDirInclude = $(mkRelDir "include") relFileIndexHtml :: Path Rel File relFileIndexHtml = $(mkRelFile "index.html") relDirAll :: Path Rel Dir relDirAll = $(mkRelDir "all") relFilePackageCache :: Path Rel File relFilePackageCache = $(mkRelFile "package.cache") relFileDockerfile :: Path Rel File relFileDockerfile = $(mkRelFile "Dockerfile") relFileGhciScript :: Path Rel File relFileGhciScript = $(mkRelFile "ghci-script") relDirCombined :: Path Rel Dir relDirCombined = $(mkRelDir "combined") relFileHpcIndexHtml :: Path Rel File relFileHpcIndexHtml = $(mkRelFile "hpc_index.html") relDirCustom :: Path Rel Dir relDirCustom = $(mkRelDir "custom") relDirPackageConfInplace :: Path Rel Dir relDirPackageConfInplace = $(mkRelDir "package.conf.inplace") relDirExtraTixFiles :: Path Rel Dir relDirExtraTixFiles = $(mkRelDir "extra-tix-files") relDirInstalledPackages :: Path Rel Dir relDirInstalledPackages = $(mkRelDir "installed-packages") backupUrlRelPath :: Path Rel File backupUrlRelPath = $(mkRelFile "downloaded.template.file.hsfiles") relDirDotLocal :: Path Rel Dir relDirDotLocal = $(mkRelDir ".local") relDirDotSsh :: Path Rel Dir relDirDotSsh = $(mkRelDir ".ssh") relDirDotStackProgName :: Path Rel Dir relDirDotStackProgName = $(mkRelDir ('.' : stackProgName)) relDirUnderHome :: Path Rel Dir relDirUnderHome = $(mkRelDir "_home") relDirSrc :: Path Rel Dir relDirSrc = $(mkRelDir "src") relFileLibcMuslx86_64So1 :: Path Rel File relFileLibcMuslx86_64So1 = $(mkRelFile "libc.musl-x86_64.so.1") relFileLibtinfoSo5 :: Path Rel File relFileLibtinfoSo5 = $(mkRelFile "libtinfo.so.5") relFileLibtinfoSo6 :: Path Rel File relFileLibtinfoSo6 = $(mkRelFile "libtinfo.so.6") relFileLibncurseswSo6 :: Path Rel File relFileLibncurseswSo6 = $(mkRelFile "libncursesw.so.6") relFileLibgmpSo10 :: Path Rel File relFileLibgmpSo10 = $(mkRelFile "libgmp.so.10") relFileLibgmpSo3 :: Path Rel File relFileLibgmpSo3 = $(mkRelFile "libgmp.so.3") relDirNewCabal :: Path Rel Dir relDirNewCabal = $(mkRelDir "new-cabal") relFileSetupExe :: Path Rel File relFileSetupExe = $(mkRelFile "Setup.exe") relFileSetupUpper :: Path Rel File relFileSetupUpper = $(mkRelFile "Setup") relFile7zexe :: Path Rel File relFile7zexe = $(mkRelFile "7z.exe") relFile7zdll :: Path Rel File relFile7zdll = $(mkRelFile "7z.dll") relFileMainHs :: Path Rel File relFileMainHs = $(mkRelFile "Main.hs") relFileStackDotExe :: Path Rel File relFileStackDotExe = $(mkRelFile "stack.exe") relFileStackDotTmpDotExe :: Path Rel File relFileStackDotTmpDotExe = $(mkRelFile "stack.tmp.exe") relFileStackDotTmp :: Path Rel File relFileStackDotTmp = $(mkRelFile "stack.tmp") relFileStack :: Path Rel File relFileStack = $(mkRelFile "stack") -- Technically, we should be consulting the user's current ghc, -- but that would require loading up a BuildConfig. ghcShowOptionsOutput :: [String] ghcShowOptionsOutput = $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) -- | Relative paths inside a GHC repo to the boot script. ghcBootScript :: Path Rel File ghcBootScript = $(mkRelFile "boot") -- | Relative paths inside a GHC repo to the configure script. ghcConfigureScript :: Path Rel File ghcConfigureScript = $(mkRelFile "configure") -- | Command applicable to GHC's configure script on Windows. See: -- https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md ghcConfigureWindows :: [String] ghcConfigureWindows = ["sh", "configure", "--enable-tarballs-autodownload"] -- | Command applicable to GHC's configure script on macOS. See: -- https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md ghcConfigureMacOS :: [String] ghcConfigureMacOS = ["./configure", "--with-intree-gmp"] -- | Command applicable to GHC's configure script on non-Windows, non-macOS. -- See: https://gitlab.haskell.org/ghc/ghc/-/blob/master/hadrian/README.md ghcConfigurePosix :: [String] ghcConfigurePosix = ["./configure"] relDirHadrian :: Path Rel Dir relDirHadrian = $(mkRelDir "hadrian") relFileHadrianStackDotYaml :: Path Rel File relFileHadrianStackDotYaml = relDirHadrian stackDotYaml -- | Relative paths inside a GHC repo to the Hadrian build batch script. -- The second path is maintained for compatibility with older GHC versions. hadrianScriptsWindows :: [Path Rel File] hadrianScriptsWindows = [ $(mkRelFile "hadrian/build-stack.bat") , $(mkRelFile "hadrian/build.stack.bat") ] -- | Relative paths inside a GHC repo to the Hadrian build shell script -- The second path is maintained for compatibility with older GHC versions. hadrianScriptsPosix :: [Path Rel File] hadrianScriptsPosix = [$(mkRelFile "hadrian/build-stack"), $(mkRelFile "hadrian/build.stack.sh")] -- | Relative file path for a temporary GHC environment file for tests testGhcEnvRelFile :: Path Rel File testGhcEnvRelFile = $(mkRelFile "test-ghc-env") -- | File inside a dist directory to use for locking relFileBuildLock :: Path Rel File relFileBuildLock = $(mkRelFile "build-lock") -- | What should the default be for stack-developer-mode stackDeveloperModeDefault :: Bool stackDeveloperModeDefault = STACK_DEVELOPER_MODE_DEFAULT -- | What should the default be for stack-developer-mode isStackUploadDisabled :: Bool isStackUploadDisabled = STACK_DISABLE_STACK_UPLOAD -- | The footer to the help for Stack's subcommands globalFooter :: String globalFooter = "Command 'stack --help' for global options that apply to all subcommands." -- | The type for GitHub REST API HTTP \'Basic\' authentication. gitHubBasicAuthType :: ByteString gitHubBasicAuthType = "Bearer" -- | Environment variable to hold credentials for GitHub REST API HTTP \'Basic\' -- authentication. gitHubTokenEnvVar :: String gitHubTokenEnvVar = "GH_TOKEN" -- | Alternate environment variable to hold credentials for GitHub REST API HTTP -- \'Basic\' authentication. altGitHubTokenEnvVar :: String altGitHubTokenEnvVar = "GITHUB_TOKEN" hackageBaseUrl :: Text hackageBaseUrl = "https://hackage.haskell.org/" stack-2.15.7/src/Stack/Constants/Config.hs0000644000000000000000000001527714502056213016464 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} module Stack.Constants.Config ( buildCachesDir , configCabalMod , configPackageProjectRoot , configSetupConfigMod , distDirFromDir , distRelativeDir , ghciDirL , hpcDirFromDir , hpcRelativeDir , imageStagingDir , objectInterfaceDirL , projectDockerSandboxDir , rootDistDirFromDir , setupConfigFromDir , templatesDir , testBuiltFile , testSuccessFile , workDirFromDir ) where import Path ( (), mkRelDir, mkRelFile, parseRelDir ) import Stack.Constants ( relDirDist, relDirGhci, relDirHpc ) import Stack.Prelude import Stack.Types.BuildConfig ( HasBuildConfig, projectRootL ) import Stack.Types.Compiler ( compilerVersionString ) import Stack.Types.CompilerPaths ( compilerVersionL ) import Stack.Types.Config ( Config, HasConfig, stackRootL, workDirL ) import Stack.Types.EnvConfig ( HasEnvConfig, platformGhcRelDir, useShaPathOnWindows ) -- | Output .o/.hi directory. objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir) objectInterfaceDirL = to $ \env -> -- FIXME is this idiomatic lens code? let workDir = view workDirL env root = view projectRootL env in root workDir $(mkRelDir "odir/") -- | GHCi files directory. ghciDirL :: HasBuildConfig env => Getting r env (Path Abs Dir) ghciDirL = to $ \env -> -- FIXME is this idiomatic lens code? let workDir = view workDirL env root = view projectRootL env in root workDir relDirGhci -- | The directory containing the files used for dirtiness check of source -- files. buildCachesDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory. -> m (Path Abs Dir) buildCachesDir dir = fmap ( $(mkRelDir "stack-build-caches")) (distDirFromDir dir) -- | The filename used to mark tests as having succeeded. testSuccessFile :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory -> m (Path Abs File) testSuccessFile dir = fmap ( $(mkRelFile "stack-test-success")) (distDirFromDir dir) -- | The filename used to mark tests as having built. testBuiltFile :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory -> m (Path Abs File) testBuiltFile dir = fmap ( $(mkRelFile "stack-test-built")) (distDirFromDir dir) -- | The filename used for modification check of a Cabal file. configCabalMod :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory. -> m (Path Abs File) configCabalMod dir = fmap ( $(mkRelFile "stack-cabal-mod")) (distDirFromDir dir) -- | The filename used for modification check of setup-config. configSetupConfigMod :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory. -> m (Path Abs File) configSetupConfigMod dir = fmap ( $(mkRelFile "stack-setup-config-mod")) (distDirFromDir dir) -- | The filename used for the project root from the last build of a package. configPackageProjectRoot :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory. -> m (Path Abs File) configPackageProjectRoot dir = fmap ( $(mkRelFile "stack-project-root")) (distDirFromDir dir) -- | Directory for HPC work. hpcDirFromDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Package directory. -> m (Path Abs Dir) hpcDirFromDir fp = fmap (fp ) hpcRelativeDir -- | Relative location of directory for HPC work. hpcRelativeDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Rel Dir) hpcRelativeDir = fmap ( relDirHpc) distRelativeDir -- | Package's setup-config storing Cabal configuration. setupConfigFromDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -> m (Path Abs File) setupConfigFromDir fp = do dist <- distDirFromDir fp pure $ dist $(mkRelFile "setup-config") -- | Package's build artifacts directory. distDirFromDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -> m (Path Abs Dir) distDirFromDir fp = fmap (fp ) distRelativeDir -- | The directory containing all dist directories, including all -- different platform/compiler combinations. rootDistDirFromDir :: (HasConfig env, MonadReader env m) => Path Abs Dir -> m (Path Abs Dir) rootDistDirFromDir fp = fmap (fp ) rootDistRelativeDir -- | Relative directory to the top dist directory, containing -- individual platform/compiler combinations as subdirs. rootDistRelativeDir :: (HasConfig env, MonadReader env m) => m (Path Rel Dir) rootDistRelativeDir = do workDir <- view workDirL pure $ workDir relDirDist -- | Package's working directory. workDirFromDir :: (HasConfig env, MonadReader env m) => Path Abs Dir -> m (Path Abs Dir) workDirFromDir fp = view $ workDirL.to (fp ) -- | Directory for project templates. templatesDir :: Config -> Path Abs Dir templatesDir config = view stackRootL config $(mkRelDir "templates") -- | Relative location of build artifacts. distRelativeDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Rel Dir) distRelativeDir = do compilerVer <- view compilerVersionL platform <- platformGhcRelDir -- Compiler version: allows build artefacts to be distinguished by compiler -- version, which will also distinguish one Cabal version from another. compilerDir <- parseRelDir $ compilerVersionString compilerVer platformAndCompiler <- useShaPathOnWindows (platform compilerDir) allDist <- rootDistRelativeDir pure $ allDist platformAndCompiler -- | Docker sandbox from project root. projectDockerSandboxDir :: (HasConfig env, MonadReader env m) => Path Abs Dir -- ^ Project root -> m (Path Abs Dir) -- ^ Docker sandbox projectDockerSandboxDir projectRoot = do workDir <- view workDirL pure $ projectRoot workDir $(mkRelDir "docker/") -- | Image staging dir from project root. imageStagingDir :: (HasConfig env, MonadReader env m, MonadThrow m) => Path Abs Dir -- ^ Project root -> Int -- ^ Index of image -> m (Path Abs Dir) -- ^ Docker sandbox imageStagingDir projectRoot imageIdx = do workDir <- view workDirL idxRelDir <- parseRelDir (show imageIdx) pure $ projectRoot workDir $(mkRelDir "image") idxRelDir stack-2.15.7/src/Stack/Constants/StackProgName.hs0000644000000000000000000000056414445120723017752 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Constants.StackProgName ( stackProgName ) where import Stack.Prelude ( String ) -- | Name of the Stack program. -- NOTE: Defined in this module rather than in "Stack.Constants", due to -- GHC stage restrictions and the use of Template Haskell. stackProgName :: String stackProgName = "stack" stack-2.15.7/src/Stack/Coverage.hs0000644000000000000000000006600514620153445015037 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Generate HPC (Haskell Program Coverage) reports module Stack.Coverage ( HpcReportOpts (..) , hpcReportCmd , deleteHpcReports , updateTixFile , generateHpcReport , generateHpcReportForTargets , generateHpcUnifiedReport , generateHpcMarkupIndex ) where import qualified Data.ByteString.Lazy.Char8 as L8 import qualified Data.List as L import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.Lazy as LT import Distribution.Version ( mkVersion ) import Path ( (), dirname, filename, parent, parseAbsFile, parseRelDir , parseRelFile, stripProperPrefix ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO ( copyFile, doesDirExist, doesFileExist, ensureDir , ignoringAbsence, listDir, removeDirRecur, removeFile , resolveDir', resolveFile' ) import RIO.ByteString.Lazy ( putStrLn ) import RIO.Process ( ProcessException, proc, readProcess_ ) import Stack.Build.Target ( NeedTargets (..) ) import Stack.Constants ( relDirAll, relDirCombined, relDirCustom , relDirExtraTixFiles, relDirPackageConfInplace , relFileHpcIndexHtml, relFileIndexHtml ) import Stack.Constants.Config ( distDirFromDir, hpcRelativeDir ) import Stack.Package ( hasBuildableMainLibrary ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.Compiler ( getGhcVersion ) import Stack.Types.CompCollection ( getBuildableSetText ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL , hpcReportDir ) import Stack.Types.NamedComponent ( NamedComponent (..) ) import Stack.Types.Package ( Package (..), packageIdentifier ) import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap ( PackageType (..), SMTargets (..), SMWanted (..) , SourceMap (..), Target (..), ppRoot ) import System.FilePath ( isPathSeparator ) import Trace.Hpc.Tix ( Tix (..), TixModule (..), readTix, writeTix ) import Web.Browser ( openBrowser ) -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Coverage" module. data CoveragePrettyException = NonTestSuiteTarget PackageName | NoTargetsOrTixSpecified | NotLocalPackage PackageName deriving (Show, Typeable) instance Pretty CoveragePrettyException where pretty (NonTestSuiteTarget name) = "[S-6361]" <> line <> fillSep [ flow "Can't specify anything except test-suites as hpc report \ \targets" , parens (style Target . fromPackageName $ name) , flow "is used with a non test-suite target." ] pretty NoTargetsOrTixSpecified = "[S-2321]" <> line <> flow "Not generating combined report, because no targets or tix files \ \are specified." pretty (NotLocalPackage name) = "[S-9975]" <> line <> fillSep [ flow "Expected a local package, but" , style Target . fromPackageName $ name , flow "is either an extra-dep or in the snapshot." ] instance Exception CoveragePrettyException -- | Type representing command line options for the @stack hpc report@ command. data HpcReportOpts = HpcReportOpts { inputs :: [Text] , all :: Bool , destDir :: Maybe String , openBrowser :: Bool } deriving Show -- | Function underlying the @stack hpc report@ command. hpcReportCmd :: HpcReportOpts -> RIO Runner () hpcReportCmd hropts = do let (tixFiles, targetNames) = L.partition (".tix" `T.isSuffixOf`) hropts.inputs boptsCLI = defaultBuildOptsCLI { targetsCLI = if hropts.all then [] else targetNames } withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ generateHpcReportForTargets hropts tixFiles targetNames -- | Invoked at the beginning of running with "--coverage" deleteHpcReports :: HasEnvConfig env => RIO env () deleteHpcReports = do hpcDir <- hpcReportDir liftIO $ ignoringAbsence (removeDirRecur hpcDir) -- | Move a tix file into a sub-directory of the hpc report directory. Deletes -- the old one if one is present. updateTixFile :: HasEnvConfig env => PackageName -> Path Abs File -> String -> RIO env () updateTixFile pkgName' tixSrc testName = do exists <- doesFileExist tixSrc when exists $ do tixDest <- tixFilePath pkgName' testName liftIO $ ignoringAbsence (removeFile tixDest) ensureDir (parent tixDest) -- Remove exe modules because they are problematic. This could be -- revisited if there's a GHC version that fixes -- https://ghc.haskell.org/trac/ghc/ticket/1853 mtix <- readTixOrLog tixSrc case mtix of Nothing -> prettyError $ "[S-2887]" <> line <> fillSep [ flow "Failed to read" , pretty tixSrc <> "." ] Just tix -> do liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix) -- TODO: ideally we'd do a file move, but IIRC this can -- have problems. Something about moving between drives -- on windows? copyFile tixSrc =<< parseAbsFile (toFilePath tixDest ++ ".premunging") liftIO $ ignoringAbsence (removeFile tixSrc) -- | Get the directory used for hpc reports for the given pkgId. hpcPkgPath :: HasEnvConfig env => PackageName -> RIO env (Path Abs Dir) hpcPkgPath pkgName' = do outputDir <- hpcReportDir pkgNameRel <- parseRelDir (packageNameString pkgName') pure (outputDir pkgNameRel) -- | Get the tix file location, given the name of the file (without extension), -- and the package identifier string. tixFilePath :: HasEnvConfig env => PackageName -> String -> RIO env (Path Abs File) tixFilePath pkgName' testName = do pkgPath <- hpcPkgPath pkgName' tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix") pure (pkgPath tixRel) -- | Generates the HTML coverage report and shows a textual coverage summary for a package. generateHpcReport :: HasEnvConfig env => Path Abs Dir -> Package -> [Text] -> RIO env () generateHpcReport pkgDir package tests = do compilerVersion <- view actualCompilerVersionL -- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See -- https://github.com/commercialhaskell/stack/issues/785 let pkgId = packageIdentifierString $ packageIdentifier package pkgName' = packageNameString package.name ghcVersion = getGhcVersion compilerVersion hasLibrary = hasBuildableMainLibrary package subLibs = package.subLibraries eincludeName <- -- Pre-7.8 uses plain PKG-version in tix files. if ghcVersion < mkVersion [7, 10] then pure $ Right $ Just [pkgId] -- We don't expect to find a package key if there is no library. else if not hasLibrary && null subLibs then pure $ Right Nothing -- Look in the inplace DB for the package key. -- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986 else do -- GHC 8.0 uses package id instead of package key. -- See https://github.com/commercialhaskell/stack/issues/2424 let hpcNameField = if ghcVersion >= mkVersion [8, 0] then "id" else "key" eincludeName <- findPackageFieldForBuiltPackage pkgDir (packageIdentifier package) (getBuildableSetText subLibs) hpcNameField case eincludeName of Left err -> do logError $ display err pure $ Left err Right includeNames -> pure $ Right $ Just $ map T.unpack includeNames forM_ tests $ \testName -> do tixSrc <- tixFilePath package.name (T.unpack testName) let report = fillSep [ flow "coverage report for" , style Current (fromString pkgName') <> "'s" , "test-suite" , style PkgComponent (fromString $ T.unpack testName) ] reportHtml = "coverage report for" <> T.pack pkgName' <> "'s test-suite \"" <> testName <> "\"" reportDir = parent tixSrc case eincludeName of Left err -> generateHpcErrorReport reportDir (display (sanitize (T.unpack err))) -- Restrict to just the current library code, if there is a library in the package (see -- #634 - this will likely be customizable in the future) Right mincludeName -> do let extraArgs = case mincludeName of Nothing -> [] Just includeNames -> "--include" : L.intersperse "--include" (map (++ ":") includeNames) mreportPath <- generateHpcReportInternal tixSrc reportDir report reportHtml extraArgs extraArgs forM_ mreportPath (displayReportPath "The" report . pretty) generateHpcReportInternal :: HasEnvConfig env => Path Abs File -> Path Abs Dir -> StyleDoc -- ^ The pretty name for the report -> Text -- ^ The plain name for the report, used in HTML output -> [String] -> [String] -> RIO env (Maybe (Path Abs File)) generateHpcReportInternal tixSrc reportDir report reportHtml extraMarkupArgs extraReportArgs = do -- If a .tix file exists, move it to the HPC output directory and generate -- a report for it. tixFileExists <- doesFileExist tixSrc if not tixFileExists then do prettyError $ "[S-4634]" <> line <> flow "Didn't find" <> style File ".tix" <> "for" <> report <> flow "- expected to find it at" <> pretty tixSrc <> "." pure Nothing else (`catch` \(err :: ProcessException) -> do logError $ displayShow err generateHpcErrorReport reportDir $ display $ sanitize $ displayException err pure Nothing) $ (`onException` prettyError ( "[S-8215]" <> line <> flow "Error occurred while producing" <> report <> "." )) $ do -- Directories for .mix files. hpcRelDir <- hpcRelativeDir -- Compute arguments used for both "hpc markup" and "hpc report". pkgDirs <- view $ buildConfigL . to (map ppRoot . Map.elems . (.smWanted.project)) let args = -- Use index files from all packages (allows cross-package -- coverage results). concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++ -- Look for index files in the correct dir (relative to each pkgdir). ["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"] prettyInfoL [ "Generating" , report <> "." ] -- Strip @\r@ characters because Windows. outputLines <- map (L8.filter (/= '\r')) . L8.lines . fst <$> proc "hpc" ( "report" : toFilePath tixSrc : (args ++ extraReportArgs) ) readProcess_ if all ("(0/0)" `L8.isSuffixOf`) outputLines then do let msgHtml = "Error: [S-6829]\n\ \The " <> display reportHtml <> " did not consider any code. One possible cause of this is \ \if your test-suite builds the library code (see Stack \ \\ \issue #1008\ \\ \). It may also indicate a bug in Stack or the hpc program. \ \Please report this issue if you think your coverage report \ \should have meaningful results." prettyError $ "[S-6829]" <> line <> fillSep [ "The" , report , flow "did not consider any code. One possible cause of this \ \is if your test-suite builds the library code (see \ \Stack issue #1008). It may also indicate a bug in \ \Stack or the hpc program. Please report this issue if \ \you think your coverage report should have meaningful \ \results." ] generateHpcErrorReport reportDir msgHtml pure Nothing else do let reportPath = reportDir relFileHpcIndexHtml -- Print the summary report to the standard output stream. putUtf8Builder =<< displayWithColor ( fillSep [ "Summary" , report <> ":" ] <> line ) forM_ outputLines putStrLn -- Generate the HTML markup. void $ proc "hpc" ( "markup" : toFilePath tixSrc : ("--destdir=" ++ toFilePathNoTrailingSep reportDir) : (args ++ extraMarkupArgs) ) readProcess_ pure (Just reportPath) generateHpcReportForTargets :: HasEnvConfig env => HpcReportOpts -> [Text] -> [Text] -> RIO env () generateHpcReportForTargets opts tixFiles targetNames = do targetTixFiles <- -- When there aren't any package component arguments, and --all -- isn't passed, default to not considering any targets. if not opts.all && null targetNames then pure [] else do when (opts.all && not (null targetNames)) $ prettyWarnL $ "Since" : style Shell "--all" : flow "is used, it is redundant to specify these targets:" : mkNarrativeList (Just Target) False (map (fromString . T.unpack) targetNames :: [StyleDoc]) targets <- view $ envConfigL . to (.sourceMap.targets.targets) fmap concat $ forM (Map.toList targets) $ \(name, target) -> case target of TargetAll PTDependency -> prettyThrowIO $ NotLocalPackage name TargetComps comps -> do pkgPath <- hpcPkgPath name forM (toList comps) $ \case CTest testName -> (pkgPath ) <$> parseRelFile ( T.unpack testName ++ "/" ++ T.unpack testName ++ ".tix" ) _ -> prettyThrowIO $ NonTestSuiteTarget name TargetAll PTProject -> do pkgPath <- hpcPkgPath name exists <- doesDirExist pkgPath if exists then do (dirs, _) <- listDir pkgPath fmap concat $ forM dirs $ \dir -> do (_, files) <- listDir dir pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) else pure [] tixPaths <- (++ targetTixFiles) <$> mapM (resolveFile' . T.unpack) tixFiles when (null tixPaths) $ prettyThrowIO NoTargetsOrTixSpecified outputDir <- hpcReportDir reportDir <- case opts.destDir of Nothing -> pure (outputDir relDirCombined relDirCustom) Just destDir -> do dest <- resolveDir' destDir ensureDir dest pure dest let report = flow "combined coverage report" reportHtml = "combined coverage report" mreportPath <- generateUnionReport report reportHtml reportDir tixPaths forM_ mreportPath $ \reportPath -> if opts.openBrowser then do prettyInfo $ "Opening" <+> pretty reportPath <+> "in the browser." void $ liftIO $ openBrowser (toFilePath reportPath) else displayReportPath "The" report (pretty reportPath) generateHpcUnifiedReport :: HasEnvConfig env => RIO env () generateHpcUnifiedReport = do outputDir <- hpcReportDir ensureDir outputDir (dirs, _) <- listDir outputDir tixFiles0 <- fmap (concat . concat) $ forM (filter (("combined" /=) . dirnameString) dirs) $ \dir -> do (dirs', _) <- listDir dir forM dirs' $ \dir' -> do (_, files) <- listDir dir' pure (filter ((".tix" `L.isSuffixOf`) . toFilePath) files) extraTixFiles <- findExtraTixFiles let tixFiles = tixFiles0 ++ extraTixFiles reportDir = outputDir relDirCombined relDirAll -- A single *.tix file does not necessarily mean that a unified coverage report -- is redundant. For example, one package may test the library of another -- package that does not test its own library. See -- https://github.com/commercialhaskell/stack/issues/5713 -- -- As an interim solution, a unified coverage report will always be produced -- even if may be redundant in some circumstances. if null tixFiles then prettyInfoL [ flow "No tix files found in" , pretty outputDir <> "," , flow "so not generating a unified coverage report." ] else do let report = flow "unified coverage report" reportHtml = "unified coverage report" mreportPath <- generateUnionReport report reportHtml reportDir tixFiles forM_ mreportPath (displayReportPath "The" report . pretty) generateUnionReport :: HasEnvConfig env => StyleDoc -- ^ Pretty description of the report. -> Text -- ^ Plain description of the report, used in HTML reporting. -> Path Abs Dir -> [Path Abs File] -> RIO env (Maybe (Path Abs File)) generateUnionReport report reportHtml reportDir tixFiles = do (errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles) logDebug $ "Using the following tix files: " <> fromString (show tixFiles) unless (null errs) $ prettyWarn $ fillSep [ flow "The following modules are left out of the" , report , flow "due to version mismatches:" ] <> line <> bulletedList (map fromString errs :: [StyleDoc]) tixDest <- (reportDir ) <$> parseRelFile (dirnameString reportDir ++ ".tix") ensureDir (parent tixDest) liftIO $ writeTix (toFilePath tixDest) tix generateHpcReportInternal tixDest reportDir report reportHtml [] [] readTixOrLog :: HasTerm env => Path b File -> RIO env (Maybe Tix) readTixOrLog path = do mtix <- liftIO (readTix (toFilePath path)) `catchAny` \errorCall -> do prettyError $ "[S-3521]" <> line <> flow "Error while reading tix:" <> line <> string (displayException errorCall) pure Nothing when (isNothing mtix) $ prettyError $ "[S-7786]" <> line <> fillSep [ flow "Failed to read tix file" , pretty path <> "." ] pure mtix -- | Module names which contain '/' have a package name, and so they weren't -- built into the executable. removeExeModules :: Tix -> Tix removeExeModules (Tix ms) = Tix (filter (\(TixModule name _ _ _) -> '/' `elem` name) ms) unionTixes :: [Tix] -> ([String], Tix) unionTixes tixes = (Map.keys errs, Tix (Map.elems outputs)) where (errs, outputs) = Map.mapEither id $ Map.unionsWith merge $ map toMap tixes toMap (Tix ms) = Map.fromList (map (\x@(TixModule k _ _ _) -> (k, Right x)) ms) merge (Right (TixModule k hash1 len1 tix1)) (Right (TixModule _ hash2 len2 tix2)) | hash1 == hash2 && len1 == len2 = Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2)) merge _ _ = Left () generateHpcMarkupIndex :: HasEnvConfig env => RIO env () generateHpcMarkupIndex = do outputDir <- hpcReportDir let outputFile = outputDir relFileIndexHtml ensureDir outputDir (dirs, _) <- listDir outputDir rows <- fmap (concatMap catMaybes) $ forM dirs $ \dir -> do (subdirs, _) <- listDir dir forM subdirs $ \subdir -> do let indexPath = subdir relFileHpcIndexHtml exists' <- doesFileExist indexPath if not exists' then pure Nothing else do relPath <- stripProperPrefix outputDir indexPath let package = dirname dir testsuite = dirname subdir pure $ Just $ T.concat [ "" , pathToHtml package , "" , pathToHtml testsuite , "" ] writeBinaryFileAtomic outputFile $ "" <> -- Part of the css from HPC's output HTML "" <> "" <> "" <> ( if null rows then "No hpc_index.html files found in \"" <> encodeUtf8Builder (pathToHtml outputDir) <> "\"." else "" <> "

NOTE: This is merely a listing of the html files found in the coverage reports directory. Some of these reports may be old.

" <> "" <> foldMap encodeUtf8Builder rows <> "
PackageTestSuiteModification Time
" ) <> "" unless (null rows) $ displayReportPath "\nAn" "index of the generated HTML coverage reports" (pretty outputFile) generateHpcErrorReport :: MonadIO m => Path Abs Dir -> Utf8Builder -> m () generateHpcErrorReport dir err = do ensureDir dir let fp = toFilePath (dir relFileHpcIndexHtml) writeFileUtf8Builder fp $ "" <> "

HPC Report Generation Error

" <> "

" <> err <> "

" <> "" pathToHtml :: Path b t -> Text pathToHtml = T.dropWhileEnd (=='/') . sanitize . toFilePath -- | Escape HTML symbols (copied from Text.Hastache) htmlEscape :: LT.Text -> LT.Text htmlEscape = LT.concatMap proc_ where proc_ '&' = "&" proc_ '\\' = "\" proc_ '"' = """ proc_ '\'' = "'" proc_ '<' = "<" proc_ '>' = ">" proc_ h = LT.singleton h sanitize :: String -> Text sanitize = LT.toStrict . htmlEscape . LT.pack dirnameString :: Path r Dir -> String dirnameString = L.dropWhileEnd isPathSeparator . toFilePath . dirname findPackageFieldForBuiltPackage :: HasEnvConfig env => Path Abs Dir -> PackageIdentifier -> Set.Set Text -> Text -> RIO env (Either Text [Text]) findPackageFieldForBuiltPackage pkgDir pkgId subLibs field = do distDir <- distDirFromDir pkgDir let inplaceDir = distDir relDirPackageConfInplace pkgIdStr = packageIdentifierString pkgId notFoundErr = pure $ Left $ "Failed to find package key for " <> T.pack pkgIdStr extractField path = do contents <- readFileUtf8 (toFilePath path) case asum (map (T.stripPrefix (field <> ": ")) (T.lines contents)) of Just result -> pure $ Right $ T.strip result Nothing -> notFoundErr logDebug $ "Scanning " <> fromString (toFilePath inplaceDir) <> " for files matching " <> fromString pkgIdStr (_, files) <- handleIO (const $ pure ([], [])) $ listDir inplaceDir logDebug $ displayShow files -- From all the files obtained from the scanning process above, we need to -- identify which are .conf files and then ensure that there is at most one -- .conf file for each library and sub-library (some might be missing if that -- component has not been built yet). We should error if there are more than -- one .conf file for a component or if there are no .conf files at all in the -- searched location. let toFilename = T.pack . toFilePath . filename -- strip known prefix and suffix from the found files to determine only -- the .conf files stripKnown = T.stripSuffix ".conf" <=< T.stripPrefix (T.pack (pkgIdStr ++ "-")) stripped = mapMaybe (\file -> fmap (,file) . stripKnown . toFilename $ file) files -- which component could have generated each of these conf files stripHash n = let z = T.dropWhile (/= '-') n in if T.null z then "" else T.tail z matchedComponents = map (\(n, f) -> (stripHash n, [f])) stripped byComponents = Map.restrictKeys (Map.fromListWith (++) matchedComponents) $ Set.insert "" subLibs logDebug $ displayShow byComponents if Map.null $ Map.filter (\fs -> length fs > 1) byComponents then case concat $ Map.elems byComponents of [] -> notFoundErr -- for each of these files, we need to extract the requested field paths -> do (errors, keys) <- partitionEithers <$> traverse extractField paths case errors of (a:_) -> pure $ Left a -- the first error only, since they're repeated anyway [] -> pure $ Right keys else pure $ Left $ "Multiple files matching " <> T.pack (pkgIdStr ++ "-*.conf") <> " found in " <> T.pack (toFilePath inplaceDir) <> ". Maybe try 'stack clean' on this package?" displayReportPath :: HasTerm env => StyleDoc -> StyleDoc -> StyleDoc -> RIO env () displayReportPath prefix report reportPath = prettyInfoL [ prefix , report , flow "is available at" , reportPath <> "." ] findExtraTixFiles :: HasEnvConfig env => RIO env [Path Abs File] findExtraTixFiles = do outputDir <- hpcReportDir let dir = outputDir relDirExtraTixFiles dirExists <- doesDirExist dir if dirExists then do (_, files) <- listDir dir pure $ filter ((".tix" `L.isSuffixOf`) . toFilePath) files else pure [] stack-2.15.7/src/Stack/DefaultColorWhen.hs0000644000000000000000000000126714604306201016500 0ustar0000000000000000{-# LANGUAGE LambdaCase #-} module Stack.DefaultColorWhen ( defaultColorWhen ) where import Stack.Prelude ( stdout ) import Stack.Types.ColorWhen ( ColorWhen (..) ) import System.Console.ANSI ( hNowSupportsANSI ) import System.Environment ( lookupEnv ) -- | The default adopts the standard proposed at http://no-color.org/, that -- color should not be added by default if the @NO_COLOR@ environment variable -- is present. defaultColorWhen :: IO ColorWhen defaultColorWhen = lookupEnv "NO_COLOR" >>= \case Just _ -> pure ColorNever _ -> hNowSupportsANSI stdout >>= \case False -> pure ColorNever _ -> pure ColorAuto stack-2.15.7/src/Stack/DependencyGraph.hs0000644000000000000000000003374214604306201016336 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Module exporting a function to create a pruned dependency graph given a -- 'DotOpts' value. module Stack.DependencyGraph ( createPrunedDependencyGraph , resolveDependencies , pruneGraph ) where import qualified Data.Foldable as F import qualified Data.Set as Set import qualified Data.Map as Map import qualified Data.Traversable as T import Distribution.License ( License (..) ) import qualified Distribution.PackageDescription as PD import Distribution.Types.PackageName ( mkPackageName ) import Path ( parent ) import Stack.Build ( loadPackage ) import Stack.Build.Installed ( getInstalled, toInstallMap ) import Stack.Build.Source ( loadCommonPackage, loadLocalPackage, loadSourceMap ) import Stack.Build.Target( NeedTargets (..), parseTargets ) import Stack.Package ( Package (..), setOfPackageDeps ) import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) import qualified Stack.Prelude ( pkgName ) import Stack.Runners ( ShouldReexec (..), withBuildConfig, withConfig , withEnvConfig ) import Stack.SourceMap ( globalsFromHints, mkProjectPackage, pruneGlobals ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.BuildOptsMonoid ( buildOptsMonoidBenchmarksL, buildOptsMonoidTestsL ) import Stack.Types.Compiler ( wantedToActual ) import Stack.Types.DependencyTree ( DotPayload (..) ) import Stack.Types.DotConfig ( DotConfig (..) ) import Stack.Types.DotOpts ( DotOpts (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasSourceMap (..) ) import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString, parseGhcPkgId ) import Stack.Types.GlobalOpts ( globalOptsBuildOptsMonoidL ) import Stack.Types.Package ( LocalPackage (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), ProjectPackage (..) , SMActual (..), SMWanted (..), SourceMap (..) ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.DependencyGraph" module. data DependencyGraphException = DependencyNotFoundBug GhcPkgId | PackageNotFoundBug PackageName deriving (Show, Typeable) instance Exception DependencyGraphException where displayException (DependencyNotFoundBug depId) = bugReport "[S-7071]" $ concat [ "Expected to find " , ghcPkgIdString depId , " in global DB." ] displayException (PackageNotFoundBug pkgName) = bugReport "[S-7151]" $ concat [ "The '" , packageNameString pkgName , "' package was not found in any of the dependency sources." ] -- | Create the dependency graph and also prune it as specified in the dot -- options. Returns a set of local names and a map from package names to -- dependencies. createPrunedDependencyGraph :: DotOpts -> RIO Runner (Set PackageName, Map PackageName (Set PackageName, DotPayload)) createPrunedDependencyGraph dotOpts = withDotConfig dotOpts $ do localNames <- view $ buildConfigL . to (Map.keysSet . (.smWanted.project)) logDebug "Creating dependency graph" resultGraph <- createDependencyGraph dotOpts let pkgsToPrune = if dotOpts.includeBase then dotOpts.prune else Set.insert "base" dotOpts.prune prunedGraph = pruneGraph localNames pkgsToPrune resultGraph logDebug "Returning pruned dependency graph" pure (localNames, prunedGraph) -- Plumbing for --test and --bench flags withDotConfig :: DotOpts -> RIO DotConfig a -> RIO Runner a withDotConfig opts inner = local (over globalOptsL modifyGO) $ if opts.globalHints then withConfig NoReexec $ withBuildConfig withGlobalHints else withConfig YesReexec withReal where withGlobalHints = do buildConfig <- view buildConfigL globals <- globalsFromHints buildConfig.smWanted.compiler fakeGhcPkgId <- parseGhcPkgId "ignored" actual <- either throwIO pure $ wantedToActual buildConfig.smWanted.compiler let smActual = SMActual { compiler = actual , project = buildConfig.smWanted.project , deps = buildConfig.smWanted.deps , globals = Map.mapWithKey toDump globals } toDump :: PackageName -> Version -> DumpPackage toDump name version = DumpPackage { ghcPkgId = fakeGhcPkgId , packageIdent = PackageIdentifier name version , sublib = Nothing , license = Nothing , libDirs = [] , libraries = [] , hasExposedModules = True , exposedModules = mempty , depends = [] , haddockInterfaces = [] , haddockHtml = Nothing , isExposed = True } actualPkgs = Map.keysSet smActual.deps <> Map.keysSet smActual.project prunedActual = smActual { globals = pruneGlobals smActual.globals actualPkgs } targets <- parseTargets NeedTargets False boptsCLI prunedActual logDebug "Loading source map" sourceMap <- loadSourceMap targets boptsCLI smActual let dc = DotConfig { buildConfig , sourceMap , globalDump = toList smActual.globals } logDebug "DotConfig fully loaded" runRIO dc inner withReal = withEnvConfig NeedTargets boptsCLI $ do envConfig <- ask let sourceMap = envConfig.sourceMap installMap <- toInstallMap sourceMap (_, globalDump, _, _) <- getInstalled installMap let dc = DotConfig { buildConfig = envConfig.buildConfig , sourceMap , globalDump } runRIO dc inner boptsCLI = defaultBuildOptsCLI { targetsCLI = opts.dotTargets , flags = opts.flags } modifyGO = (if opts.testTargets then set (globalOptsBuildOptsMonoidL . buildOptsMonoidTestsL) (Just True) else id) . (if opts.benchTargets then set (globalOptsBuildOptsMonoidL . buildOptsMonoidBenchmarksL) (Just True) else id) -- | Create the dependency graph, the result is a map from a package -- name to a tuple of dependencies and payload if available. This -- function mainly gathers the required arguments for -- @resolveDependencies@. createDependencyGraph :: DotOpts -> RIO DotConfig (Map PackageName (Set PackageName, DotPayload)) createDependencyGraph dotOpts = do sourceMap <- view sourceMapL locals <- for (toList sourceMap.project) loadLocalPackage let graph = Map.fromList $ projectPackageDependencies dotOpts (filter (.wanted) locals) globalDump <- view $ to (.globalDump) -- TODO: Can there be multiple entries for wired-in-packages? If so, -- this will choose one arbitrarily.. let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName dp.packageIdent, dp)) globalDump globalIdMap = Map.fromList $ map ((.ghcPkgId) &&& (.packageIdent)) globalDump let depLoader = createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps loadPackageDeps name version loc flags ghcOptions cabalConfigOpts -- Skip packages that can't be loaded - see -- https://github.com/commercialhaskell/stack/issues/2967 | name `elem` [mkPackageName "rts", mkPackageName "ghc"] = pure ( Set.empty , DotPayload (Just version) (Just $ Right BSD3) Nothing ) | otherwise = fmap (setOfPackageDeps &&& makePayload loc) (loadPackage loc flags ghcOptions cabalConfigOpts) resolveDependencies dotOpts.dependencyDepth graph depLoader where makePayload loc pkg = DotPayload (Just pkg.version) (Just pkg.license) (Just $ PLImmutable loc) -- | Resolve the direct (depth 0) external dependencies of the given local -- packages (assumed to come from project packages) projectPackageDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))] projectPackageDependencies dotOpts locals = map (\lp -> let pkg = localPackageToPackage lp pkgDir = parent lp.cabalFP packageDepsSet = setOfPackageDeps pkg loc = PLMutable $ ResolvedPath (RelFilePath "N/A") pkgDir in (pkg.name, (deps pkg packageDepsSet, lpPayload pkg loc))) locals where deps pkg packageDepsSet = if dotOpts.includeExternal then Set.delete pkg.name packageDepsSet else Set.intersection localNames packageDepsSet localNames = Set.fromList $ map (.package.name) locals lpPayload pkg loc = DotPayload (Just pkg.version) (Just pkg.license) (Just loc) -- | Given a SourceMap and a dependency loader, load the set of dependencies for -- a package createDepLoader :: SourceMap -> Map PackageName DumpPackage -> Map GhcPkgId PackageIdentifier -> ( PackageName -> Version -> PackageLocationImmutable -> Map FlagName Bool -> [Text] -> [Text] -> RIO DotConfig (Set PackageName, DotPayload) ) -> PackageName -> RIO DotConfig (Set PackageName, DotPayload) createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = fromMaybe (throwIO $ PackageNotFoundBug pkgName) (projectPackageDeps <|> dependencyDeps <|> globalDeps) where projectPackageDeps = loadDeps <$> Map.lookup pkgName sourceMap.project where loadDeps pp = do pkg <- loadCommonPackage pp.projectCommon pure (setOfPackageDeps pkg, payloadFromLocal pkg Nothing) dependencyDeps = loadDeps <$> Map.lookup pkgName sourceMap.deps where loadDeps DepPackage{ location = PLMutable dir } = do pp <- mkProjectPackage YesPrintWarnings dir False pkg <- loadCommonPackage pp.projectCommon pure (setOfPackageDeps pkg, payloadFromLocal pkg (Just $ PLMutable dir)) loadDeps dp@DepPackage{ location = PLImmutable loc } = do let common = dp.depCommon gpd <- liftIO common.gpd let PackageIdentifier name version = PD.package $ PD.packageDescription gpd flags = common.flags ghcOptions = common.ghcOptions cabalConfigOpts = common.cabalConfigOpts assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions cabalConfigOpts) -- If package is a global package, use info from ghc-pkg (#4324, #3084) globalDeps = pure . getDepsFromDump <$> Map.lookup pkgName globalDumpMap where getDepsFromDump dump = (Set.fromList deps, payloadFromDump dump) where deps = map ghcIdToPackageName dump.depends ghcIdToPackageName depId = maybe (impureThrow $ DependencyNotFoundBug depId) Stack.Prelude.pkgName (Map.lookup depId globalIdMap) payloadFromLocal pkg = DotPayload (Just pkg.version) (Just pkg.license) payloadFromDump dp = DotPayload (Just $ pkgVersion dp.packageIdent) (Right <$> dp.license) Nothing -- | Resolve the dependency graph up to (Just depth) or until fixpoint is reached resolveDependencies :: (Applicative m, Monad m) => Maybe Int -> Map PackageName (Set PackageName, DotPayload) -> (PackageName -> m (Set PackageName, DotPayload)) -> m (Map PackageName (Set PackageName, DotPayload)) resolveDependencies (Just 0) graph _ = pure graph resolveDependencies limit graph loadPackageDeps = do let values = Set.unions (fst <$> Map.elems graph) keys = Map.keysSet graph next = Set.difference values keys if Set.null next then pure graph else do x <- T.traverse (\name -> (name,) <$> loadPackageDeps name) (F.toList next) resolveDependencies (subtract 1 <$> limit) (Map.unionWith unifier graph (Map.fromList x)) loadPackageDeps where unifier (pkgs1,v1) (pkgs2,_) = (Set.union pkgs1 pkgs2, v1) -- | @pruneGraph dontPrune toPrune graph@ prunes all packages in -- @graph@ with a name in @toPrune@ and removes resulting orphans -- unless they are in @dontPrune@ pruneGraph :: (F.Foldable f, F.Foldable g, Eq a) => f PackageName -> g PackageName -> Map PackageName (Set PackageName, a) -> Map PackageName (Set PackageName, a) pruneGraph dontPrune names = pruneUnreachable dontPrune . Map.mapMaybeWithKey (\pkg (pkgDeps,x) -> if pkg `F.elem` names then Nothing else let filtered = Set.filter (`F.notElem` names) pkgDeps in if Set.null filtered && not (Set.null pkgDeps) then Nothing else Just (filtered,x)) -- | Make sure that all unreachable nodes (orphans) are pruned pruneUnreachable :: (Eq a, F.Foldable f) => f PackageName -> Map PackageName (Set PackageName, a) -> Map PackageName (Set PackageName, a) pruneUnreachable dontPrune = fixpoint prune where fixpoint :: Eq a => (a -> a) -> a -> a fixpoint f v = if f v == v then v else fixpoint f (f v) prune graph' = Map.filterWithKey (\k _ -> reachable k) graph' where reachable k = k `F.elem` dontPrune || k `Set.member` reachables reachables = F.fold (fst <$> graph') localPackageToPackage :: LocalPackage -> Package localPackageToPackage lp = fromMaybe lp.package lp.testBench stack-2.15.7/src/Stack/Docker.hs0000644000000000000000000006456714620153445014526 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Run commands in Docker containers module Stack.Docker ( dockerCmdName , dockerHelpOptName , dockerPullCmdName , entrypoint , preventInContainer , pull , reset , reExecArgName , DockerException (..) , getProjectRoot , runContainerAndExit ) where import qualified Crypto.Hash as Hash ( Digest, MD5, hash ) import Data.Aeson ( eitherDecode ) import Data.Aeson.Types ( FromJSON (..), (.!=) ) import Data.Aeson.WarningParser ( (.:), (.:?) ) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Lazy.Char8 as LBS import Data.Char ( isAscii, isDigit ) import Data.Conduit.List ( sinkNull ) import Data.List ( dropWhileEnd, isInfixOf, isPrefixOf ) import Data.List.Extra ( trim ) import qualified Data.Map.Strict as Map import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time ( UTCTime ) import qualified Data.Version ( parseVersion ) import Distribution.Version ( mkVersion, mkVersion' ) import Path ( (), dirname, filename, parent, parseAbsDir , splitExtension ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO ( copyFile, doesDirExist, doesFileExist, ensureDir , getCurrentDir, getHomeDir, getModificationTime, listDir , removeDirRecur, removeFile, resolveFile' ) import qualified RIO.Directory ( makeAbsolute ) import RIO.Process ( ExitCodeException (..), HasProcessContext, augmentPath , closed, doesExecutableExist, proc, processContextL , readProcessStdout_, readProcess_, runProcess, runProcess_ , setStderr, setStdin, setStdout, useHandleOpen , withWorkingDir ) import Stack.Config ( getInContainer ) import Stack.Constants ( buildPlanDir, inContainerEnvVar, platformVariantEnvVar , relDirBin, relDirDotLocal, relDirDotSsh , relDirDotStackProgName, relDirUnderHome, stackRootEnvVar ) import Stack.Constants.Config ( projectDockerSandboxDir ) import Stack.Docker.Handlers ( handleSetGroups, handleSignals ) import Stack.Prelude import Stack.Setup ( ensureDockerStackExe ) import Stack.Storage.User ( loadDockerImageExeCache, saveDockerImageExeCache ) import Stack.Types.Config ( Config (..), HasConfig (..), configProjectRoot, stackRootL ) import Stack.Types.Docker ( DockerException (..), DockerOpts (..), DockerStackExe (..) , Mount (..), dockerCmdName, dockerContainerPlatform , dockerEntrypointArgName, dockerHelpOptName , dockerPullCmdName, reExecArgName ) import Stack.Types.DockerEntrypoint ( DockerEntrypoint (..), DockerUser (..) ) import Stack.Types.Runner ( HasDockerEntrypointMVar (..), terminalL ) import Stack.Types.Version ( showStackVersion, withinRange ) import System.Environment ( getArgs, getEnv, getEnvironment, getExecutablePath , getProgName ) import qualified System.FilePath as FP import System.IO.Error ( isDoesNotExistError ) import qualified System.Posix.User as User import qualified System.PosixCompat.Files as Files import System.Terminal ( hIsTerminalDeviceOrMinTTY ) import Text.ParserCombinators.ReadP ( readP_to_S ) -- | Function to get command and arguments to run in Docker container getCmdArgs :: HasConfig env => DockerOpts -> Inspect -> Bool -> RIO env (FilePath,[String],[(String,String)],[Mount]) getCmdArgs docker imageInfo isRemoteDocker = do config <- view configL user <- if fromMaybe (not isRemoteDocker) docker.setUser then liftIO $ do uid <- User.getEffectiveUserID gid <- User.getEffectiveGroupID groups <- nubOrd <$> User.getGroups umask <- Files.setFileCreationMask 0o022 -- Only way to get old umask seems to be to change it, so set it back afterward _ <- Files.setFileCreationMask umask pure $ Just DockerUser { uid , gid , groups , umask } else pure Nothing args <- fmap ( [ "--" ++ reExecArgName ++ "=" ++ showStackVersion , "--" ++ dockerEntrypointArgName , show DockerEntrypoint { user } ] ++ ) (liftIO getArgs) case config.docker.stackExe of Just DockerStackExeHost | config.platform == dockerContainerPlatform -> do exePath <- resolveFile' =<< liftIO getExecutablePath cmdArgs args exePath | otherwise -> throwIO UnsupportedStackExeHostPlatformException Just DockerStackExeImage -> do progName <- liftIO getProgName pure (FP.takeBaseName progName, args, [], []) Just (DockerStackExePath path) -> cmdArgs args path Just DockerStackExeDownload -> exeDownload args Nothing | config.platform == dockerContainerPlatform -> do (exePath, exeTimestamp, misCompatible) <- do exePath <- resolveFile' =<< liftIO getExecutablePath exeTimestamp <- getModificationTime exePath isKnown <- loadDockerImageExeCache imageInfo.iiId exePath exeTimestamp pure (exePath, exeTimestamp, isKnown) case misCompatible of Just True -> cmdArgs args exePath Just False -> exeDownload args Nothing -> do e <- try $ sinkProcessStderrStdout "docker" [ "run" , "-v" , toFilePath exePath ++ ":" ++ "/tmp/stack" , T.unpack imageInfo.iiId , "/tmp/stack" , "--version"] sinkNull sinkNull let compatible = case e of Left ExitCodeException{} -> False Right _ -> True saveDockerImageExeCache imageInfo.iiId exePath exeTimestamp compatible if compatible then cmdArgs args exePath else exeDownload args Nothing -> exeDownload args where exeDownload args = do exePath <- ensureDockerStackExe dockerContainerPlatform cmdArgs args exePath cmdArgs args exePath = do -- MSS 2020-04-21 previously used replaceExtension, but semantics changed in path 0.7 -- In any event, I'm not even sure _why_ we need to drop a file extension here -- Originally introduced here: https://github.com/commercialhaskell/stack/commit/6218dadaf5fd7bf312bb1bd0db63b4784ba78cb2 let exeBase = case splitExtension exePath of Left _ -> exePath Right (x, _) -> x let mountPath = hostBinDir FP. toFilePath (filename exeBase) pure (mountPath, args, [], [Mount (toFilePath exePath) mountPath]) -- | Error if running in a container. preventInContainer :: MonadIO m => m () -> m () preventInContainer inner = do inContainer <- getInContainer if inContainer then throwIO OnlyOnHostException else inner -- | Run a command in a new Docker container, then exit the process. runContainerAndExit :: HasConfig env => RIO env void runContainerAndExit = do config <- view configL let docker = config.docker checkDockerVersion docker (env, isStdinTerminal, isStderrTerminal, homeDir) <- liftIO $ (,,,) <$> getEnvironment <*> hIsTerminalDeviceOrMinTTY stdin <*> hIsTerminalDeviceOrMinTTY stderr <*> getHomeDir isStdoutTerminal <- view terminalL let dockerHost = lookup "DOCKER_HOST" env dockerCertPath = lookup "DOCKER_CERT_PATH" env bamboo = lookup "bamboo_buildKey" env jenkins = lookup "JENKINS_HOME" env msshAuthSock = lookup "SSH_AUTH_SOCK" env muserEnv = lookup "USER" env isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost mstackYaml <- for (lookup "STACK_YAML" env) RIO.Directory.makeAbsolute image <- either throwIO pure docker.image when ( isRemoteDocker && maybe False (isInfixOf "boot2docker") dockerCertPath ) ( prettyWarnS "Using boot2docker is NOT supported, and not likely to perform well." ) maybeImageInfo <- inspect image imageInfo <- case maybeImageInfo of Just ii -> pure ii Nothing | docker.autoPull -> do pullImage docker image mii2 <- inspect image case mii2 of Just ii2 -> pure ii2 Nothing -> throwM (InspectFailedException image) | otherwise -> throwM (NotPulledException image) projectRoot <- getProjectRoot sandboxDir <- projectDockerSandboxDir projectRoot let ic = imageInfo.config imageEnvVars = map (break (== '=')) ic.env platformVariant = show $ hashRepoName image stackRoot = view stackRootL config sandboxHomeDir = sandboxDir homeDirName isTerm = not docker.detach && isStdinTerminal && isStdoutTerminal && isStderrTerminal keepStdinOpen = not docker.detach && -- Workaround for https://github.com/docker/docker/issues/12319 -- This is fixed in Docker 1.9.1, but will leave the workaround -- in place for now, for users who haven't upgraded yet. (isTerm || (isNothing bamboo && isNothing jenkins)) let mpath = T.pack <$> lookupImageEnv "PATH" imageEnvVars when (isNothing mpath) $ do prettyWarnL [ flow "The Docker image does not set the PATH environment variable. \ \This will likely fail. For further information, see" , style Url "https://github.com/commercialhaskell/stack/issues/2742" <> "." ] newPathEnv <- either throwM pure $ augmentPath [ hostBinDir , toFilePath (sandboxHomeDir relDirDotLocal relDirBin) ] mpath (cmnd,args,envVars,extraMount) <- getCmdArgs docker imageInfo isRemoteDocker pwd <- getCurrentDir liftIO $ mapM_ ensureDir [sandboxHomeDir, stackRoot] -- Since $HOME is now mounted in the same place in the container we can -- just symlink $HOME/.ssh to the right place for the stack docker user let sshDir = homeDir sshRelDir sshDirExists <- doesDirExist sshDir sshSandboxDirExists <- liftIO (Files.fileExist (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir))) when (sshDirExists && not sshSandboxDirExists) (liftIO (Files.createSymbolicLink (toFilePathNoTrailingSep sshDir) (toFilePathNoTrailingSep (sandboxHomeDir sshRelDir)))) let mountSuffix = maybe "" (":" ++) docker.mountMode containerID <- withWorkingDir (toFilePath projectRoot) $ trim . decodeUtf8 <$> readDockerProcess ( concat [ [ "create" , "-e", inContainerEnvVar ++ "=1" , "-e", stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot , "-e", platformVariantEnvVar ++ "=dk" ++ platformVariant , "-e", "HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir , "-e", "PATH=" ++ T.unpack newPathEnv , "-e", "PWD=" ++ toFilePathNoTrailingSep pwd , "-v" , toFilePathNoTrailingSep homeDir ++ ":" ++ toFilePathNoTrailingSep homeDir ++ mountSuffix , "-v" , toFilePathNoTrailingSep stackRoot ++ ":" ++ toFilePathNoTrailingSep stackRoot ++ mountSuffix , "-v" , toFilePathNoTrailingSep projectRoot ++ ":" ++ toFilePathNoTrailingSep projectRoot ++ mountSuffix , "-v" , toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ toFilePathNoTrailingSep sandboxHomeDir ++ mountSuffix , "-w", toFilePathNoTrailingSep pwd ] , case docker.network of Nothing -> ["--net=host"] Just name -> ["--net=" ++ name] , case muserEnv of Nothing -> [] Just userEnv -> ["-e","USER=" ++ userEnv] , case msshAuthSock of Nothing -> [] Just sshAuthSock -> [ "-e","SSH_AUTH_SOCK=" ++ sshAuthSock , "-v",sshAuthSock ++ ":" ++ sshAuthSock ] , case mstackYaml of Nothing -> [] Just stackYaml -> [ "-e","STACK_YAML=" ++ stackYaml , "-v",stackYaml++ ":" ++ stackYaml ++ ":ro" ] -- Disable the deprecated entrypoint in FP Complete-generated images , [ "--entrypoint=/usr/bin/env" | isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) && ( ic.entrypoint == ["/usr/local/sbin/docker-entrypoint"] || ic.entrypoint == ["/root/entrypoint.sh"] ) ] , concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars , concatMap (mountArg mountSuffix) (extraMount ++ docker.mount) , concatMap (\nv -> ["-e", nv]) docker.env , case docker.containerName of Just name -> ["--name=" ++ name] Nothing -> [] , ["-t" | isTerm] , ["-i" | keepStdinOpen] , docker.runArgs , [image] , [cmnd] , args ] ) e <- handleSignals docker keepStdinOpen containerID case e of Left ExitCodeException{eceExitCode} -> exitWith eceExitCode Right () -> exitSuccess where -- This is using a hash of the Docker repository (without tag or digest) to -- ensure binaries/libraries aren't shared between Docker and host (or -- incompatible Docker images) hashRepoName :: String -> Hash.Digest Hash.MD5 hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@') lookupImageEnv name vars = case lookup name vars of Just ('=':val) -> Just val _ -> Nothing mountArg mountSuffix (Mount host container) = ["-v",host ++ ":" ++ container ++ mountSuffix] sshRelDir = relDirDotSsh -- | Inspect Docker image or container. inspect :: (HasProcessContext env, HasLogFunc env) => String -> RIO env (Maybe Inspect) inspect image = do results <- inspects [image] case Map.toList results of [] -> pure Nothing [(_,i)] -> pure (Just i) _ -> throwIO (InvalidInspectOutputException "expect a single result") -- | Inspect multiple Docker images and/or containers. inspects :: (HasProcessContext env, HasLogFunc env) => [String] -> RIO env (Map Text Inspect) inspects [] = pure Map.empty inspects images = do maybeInspectOut <- -- not using 'readDockerProcess' as the error from a missing image -- needs to be recovered. try (BL.toStrict . fst <$> proc "docker" ("inspect" : images) readProcess_) case maybeInspectOut of Right inspectOut -> -- filtering with 'isAscii' to workaround @docker inspect@ output -- containing invalid UTF-8 case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of Left msg -> throwIO (InvalidInspectOutputException msg) Right results -> pure (Map.fromList (map (\r -> (r.iiId, r)) results)) Left ece | any (`LBS.isPrefixOf` eceStderr ece) missingImagePrefixes -> pure Map.empty Left e -> throwIO e where missingImagePrefixes = ["Error: No such image", "Error: No such object:"] -- | Pull latest version of configured Docker image from registry. pull :: HasConfig env => RIO env () pull = do config <- view configL let docker = config.docker checkDockerVersion docker either throwIO (pullImage docker) docker.image -- | Pull Docker image from registry. pullImage :: (HasProcessContext env, HasTerm env) => DockerOpts -> String -> RIO env () pullImage docker image = do prettyInfoL [ flow "Pulling image from registry:" , style Current (fromString image) <> "." ] when docker.registryLogin $ do prettyInfoS "You may need to log in." proc "docker" ( concat [ ["login"] , maybe [] (\n -> ["--username=" ++ n]) docker.registryUsername , maybe [] (\p -> ["--password=" ++ p]) docker.registryPassword , [takeWhile (/= '/') image] ] ) runProcess_ -- We redirect the stdout of the process to stderr so that the output -- of @docker pull@ will not interfere with the output of other -- commands when using --auto-docker-pull. See issue #2733. ec <- proc "docker" ["pull", image] $ \pc0 -> do let pc = setStdout (useHandleOpen stderr) $ setStderr (useHandleOpen stderr) $ setStdin closed pc0 runProcess pc case ec of ExitSuccess -> pure () ExitFailure _ -> throwIO (PullFailedException image) -- | Check docker version (throws exception if incorrect) checkDockerVersion :: (HasProcessContext env, HasLogFunc env) => DockerOpts -> RIO env () checkDockerVersion docker = do dockerExists <- doesExecutableExist "docker" unless dockerExists (throwIO DockerNotInstalledException) dockerVersionOut <- readDockerProcess ["--version"] case words (decodeUtf8 dockerVersionOut) of (_:_:v:_) -> case fmap mkVersion' $ parseVersion' $ stripVersion v of Just v' | v' < minimumDockerVersion -> throwIO (DockerTooOldException minimumDockerVersion v') | v' `elem` prohibitedDockerVersions -> throwIO (DockerVersionProhibitedException prohibitedDockerVersions v') | not (v' `withinRange` docker.requireDockerVersion) -> throwIO (BadDockerVersionException docker.requireDockerVersion v') | otherwise -> pure () _ -> throwIO InvalidVersionOutputException _ -> throwIO InvalidVersionOutputException where minimumDockerVersion = mkVersion [1, 6, 0] prohibitedDockerVersions = [] stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v) -- version is parsed by Data.Version provided code to avoid -- Cabal's Distribution.Version lack of support for leading zeros in version parseVersion' = fmap fst . listToMaybe . reverse . readP_to_S Data.Version.parseVersion -- | Remove the project's Docker sandbox. reset :: HasConfig env => Bool -> RIO env () reset keepHome = do projectRoot <- getProjectRoot dockerSandboxDir <- projectDockerSandboxDir projectRoot liftIO (removeDirectoryContents dockerSandboxDir [homeDirName | keepHome] []) -- | The Docker container "entrypoint": special actions performed when first -- entering a container, such as switching the UID/GID to the "outside-Docker" -- user's. entrypoint :: (HasDockerEntrypointMVar env, HasProcessContext env, HasLogFunc env) => Config -> DockerEntrypoint -> RIO env () entrypoint config@Config{} de = do entrypointMVar <- view dockerEntrypointMVarL modifyMVar_ entrypointMVar $ \alreadyRan -> do -- Only run the entrypoint once unless alreadyRan $ do envOverride <- view processContextL homeDir <- liftIO $ parseAbsDir =<< getEnv "HOME" -- Get the UserEntry for the 'stack' user in the image, if it exists estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $ User.getUserEntryForName stackUserName -- Switch UID/GID if needed, and update user's home directory case de.user of Nothing -> pure () Just (DockerUser 0 _ _ _) -> pure () Just du -> withProcessContext envOverride $ updateOrCreateStackUser estackUserEntry0 homeDir du case estackUserEntry0 of Left _ -> pure () Right ue -> do -- If the 'stack' user exists in the image, copy any build plans and -- package indices from its original home directory to the host's -- Stack root, to avoid needing to download them origStackHomeDir <- liftIO $ parseAbsDir (User.homeDirectory ue) let origStackRoot = origStackHomeDir relDirDotStackProgName buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot) when buildPlanDirExists $ do (_, buildPlans) <- listDir (buildPlanDir origStackRoot) forM_ buildPlans $ \srcBuildPlan -> do let destBuildPlan = buildPlanDir (view stackRootL config) filename srcBuildPlan exists <- doesFileExist destBuildPlan unless exists $ do ensureDir (parent destBuildPlan) copyFile srcBuildPlan destBuildPlan pure True where updateOrCreateStackUser estackUserEntry homeDir du = do case estackUserEntry of Left _ -> do -- If no 'stack' user in image, create one with correct UID/GID and home -- directory readProcessNull "groupadd" [ "-o" , "--gid",show du.gid , stackUserName ] readProcessNull "useradd" [ "-oN" , "--uid", show du.uid , "--gid", show du.gid , "--home", toFilePathNoTrailingSep homeDir , stackUserName ] Right _ -> do -- If there is already a 'stack' user in the image, adjust its UID/GID -- and home directory readProcessNull "usermod" [ "-o" , "--uid", show du.uid , "--home", toFilePathNoTrailingSep homeDir , stackUserName ] readProcessNull "groupmod" [ "-o" , "--gid", show du.gid , stackUserName ] forM_ du.groups $ \gid -> readProcessNull "groupadd" [ "-o" , "--gid", show gid , "group" ++ show gid ] -- 'setuid' to the wanted UID and GID liftIO $ do User.setGroupID du.gid handleSetGroups du.groups User.setUserID du.uid _ <- Files.setFileCreationMask du.umask pure () stackUserName = "stack" :: String -- | Remove the contents of a directory, without removing the directory itself. -- This is used instead of 'FS.removeTree' to clear bind-mounted directories, -- since removing the root of the bind-mount won't work. removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of -> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal -> [Path Rel File] -- ^ Top-level file names to exclude from removal -> IO () removeDirectoryContents path excludeDirs excludeFiles = do isRootDir <- doesDirExist path when isRootDir $ do (lsd,lsf) <- listDir path forM_ lsd (\d -> unless (dirname d `elem` excludeDirs) (removeDirRecur d)) forM_ lsf (\f -> unless (filename f `elem` excludeFiles) (removeFile f)) -- | Produce a strict 'S.ByteString' from the stdout of a process. Throws a -- 'ReadProcessException' exception if the process fails. -- -- The stderr output is passed straight through, which is desirable for some -- cases e.g. docker pull, in which docker uses stderr for progress output. -- -- Use 'readProcess_' directly to customize this. readDockerProcess :: (HasProcessContext env, HasLogFunc env) => [String] -> RIO env BS.ByteString readDockerProcess args = BL.toStrict <$> proc "docker" args readProcessStdout_ -- | Name of home directory within docker sandbox. homeDirName :: Path Rel Dir homeDirName = relDirUnderHome -- | Directory where 'stack' executable is bind-mounted in Docker container -- This refers to a path in the Linux *container*, and so should remain a -- 'FilePath' (not 'Path Abs Dir') so that it works when the host runs Windows. hostBinDir :: FilePath hostBinDir = "/opt/host/bin" -- | Convenience function to decode ByteString to String. decodeUtf8 :: BS.ByteString -> String decodeUtf8 bs = T.unpack (T.decodeUtf8 bs) -- | Fail with friendly error if project root not set. getProjectRoot :: HasConfig env => RIO env (Path Abs Dir) getProjectRoot = do mroot <- view $ configL . to configProjectRoot maybe (throwIO CannotDetermineProjectRootException) pure mroot -- | Environment variable that contained the old sandbox ID. -- | Use of this variable is deprecated, and only used to detect old images. oldSandboxIdEnvVar :: String oldSandboxIdEnvVar = "DOCKER_SANDBOX_ID" -- | Parsed result of @docker inspect@. data Inspect = Inspect { config :: ImageConfig , created :: UTCTime , iiId :: Text , virtualSize :: Maybe Integer } deriving Show -- | Parse @docker inspect@ output. instance FromJSON Inspect where parseJSON v = do o <- parseJSON v Inspect <$> o .: "Config" <*> o .: "Created" <*> o .: "Id" <*> o .:? "VirtualSize" -- | Parsed @Config@ section of @docker inspect@ output. data ImageConfig = ImageConfig { env :: [String] , entrypoint :: [String] } deriving Show -- | Parse @Config@ section of @docker inspect@ output. instance FromJSON ImageConfig where parseJSON v = do o <- parseJSON v ImageConfig <$> fmap join (o .:? "Env") .!= [] <*> fmap join (o .:? "Entrypoint") .!= [] stack-2.15.7/src/Stack/DockerCmd.hs0000644000000000000000000000153614445120723015133 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions related to Stack's @docker pull@ and @docker reset@ commands. module Stack.DockerCmd ( dockerPullCmd , dockerResetCmd ) where import Stack.Docker ( preventInContainer, pull, reset ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.Runner ( Runner ) -- | Function underlying the @stack docker pull@ command. Pull the current -- Docker image. dockerPullCmd :: () -> RIO Runner () dockerPullCmd () = withConfig NoReexec $ preventInContainer pull -- | Function underlying the @stack docker reset@ command. Reset the Docker -- sandbox. dockerResetCmd :: Bool -- ^ Delete the sandbox's home directory? -> RIO Runner () dockerResetCmd = withConfig NoReexec . preventInContainer . reset stack-2.15.7/src/Stack/Dot.hs0000644000000000000000000000652714620153445014035 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Functions related to Stack's @dot@ command. module Stack.Dot ( dotCmd , printGraph ) where import qualified Data.Foldable as F import qualified Data.Set as Set import qualified Data.Map as Map import qualified Data.Text as Text import qualified Data.Text.IO as Text import Stack.Constants ( wiredInPackages ) import Stack.DependencyGraph ( createPrunedDependencyGraph ) import Stack.Prelude import Stack.Types.DependencyTree ( DotPayload (..) ) import Stack.Types.DotOpts ( DotOpts (..) ) import Stack.Types.Runner ( Runner ) -- | Visualize the project's dependencies as a graphviz graph dotCmd :: DotOpts -> RIO Runner () dotCmd dotOpts = do (localNames, prunedGraph) <- createPrunedDependencyGraph dotOpts printGraph dotOpts localNames prunedGraph -- | Print a graphviz graph of the edges in the Map and highlight the given -- local packages printGraph :: (Applicative m, MonadIO m) => DotOpts -> Set PackageName -- ^ all locals -> Map PackageName (Set PackageName, DotPayload) -> m () printGraph dotOpts locals graph = do liftIO $ Text.putStrLn "strict digraph deps {" printLocalNodes dotOpts filteredLocals printLeaves graph void (Map.traverseWithKey printEdges (fst <$> graph)) liftIO $ Text.putStrLn "}" where filteredLocals = Set.filter (\local' -> local' `Set.notMember` dotOpts.prune) locals -- | Print the local nodes with a different style depending on options printLocalNodes :: (F.Foldable t, MonadIO m) => DotOpts -> t PackageName -> m () printLocalNodes dotOpts locals = liftIO $ Text.putStrLn (Text.intercalate "\n" lpNodes) where applyStyle :: Text -> Text applyStyle n = if dotOpts.includeExternal then n <> " [style=dashed];" else n <> " [style=solid];" lpNodes :: [Text] lpNodes = map (applyStyle . nodeName) (F.toList locals) -- | Print nodes without dependencies printLeaves :: MonadIO m => Map PackageName (Set PackageName, DotPayload) -> m () printLeaves = F.mapM_ printLeaf . Map.keysSet . Map.filter Set.null . fmap fst -- | @printDedges p ps@ prints an edge from p to every ps printEdges :: MonadIO m => PackageName -> Set PackageName -> m () printEdges package deps = F.forM_ deps (printEdge package) -- | Print an edge between the two package names printEdge :: MonadIO m => PackageName -> PackageName -> m () printEdge from to' = liftIO $ Text.putStrLn (Text.concat [ nodeName from , " -> " , nodeName to' , ";" ]) -- | Convert a package name to a graph node name. nodeName :: PackageName -> Text nodeName name = "\"" <> Text.pack (packageNameString name) <> "\"" -- | Print a node with no dependencies printLeaf :: MonadIO m => PackageName -> m () printLeaf package = liftIO . Text.putStrLn . Text.concat $ if isWiredIn package then ["{rank=max; ", nodeName package, " [shape=box]; };"] else ["{rank=max; ", nodeName package, "; };"] -- | Check if the package is wired in (shipped with) ghc isWiredIn :: PackageName -> Bool isWiredIn = (`Set.member` wiredInPackages) stack-2.15.7/src/Stack/Eval.hs0000644000000000000000000000171414604306201014157 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Types and functions related to Stack's @eval@ command. module Stack.Eval ( EvalOpts (..) , evalCmd ) where import Stack.Exec ( ExecOpts (..), ExecOptsExtra, SpecialExecCmd (..) , execCmd ) import Stack.Prelude import Stack.Types.Runner ( Runner ) -- Type representing command line options for the @stack eval@ command. data EvalOpts = EvalOpts { arg :: !String , extra :: !ExecOptsExtra } deriving Show -- | Function underlying the @stack eval@ command. Evaluate some Haskell code -- inline. evalCmd :: EvalOpts -> RIO Runner () evalCmd eval = execCmd execOpts where execOpts = ExecOpts { cmd = ExecGhc , args = ["-e", eval.arg] , extra = eval.extra } stack-2.15.7/src/Stack/Exec.hs0000644000000000000000000001713714604306201014162 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and function related to Stack's @exec@, @ghc@, @run@, @runghc@ and -- @runhaskell@ commands. module Stack.Exec ( ExecOpts (..) , SpecialExecCmd (..) , ExecOptsExtra (..) , execCmd ) where import qualified Data.List as L import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import Distribution.Types.PackageName ( unPackageName ) import RIO.NonEmpty ( head, nonEmpty ) import RIO.Process ( exec ) import Stack.Build ( build ) import Stack.Build.Target ( NeedTargets (..), RawTarget (..), parseRawTarget ) import Stack.GhcPkg ( findGhcPkgField ) import Stack.Setup ( withNewLocalBuildTargets ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..), getGhcPkgExe ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.EnvConfig ( EnvConfig ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.NamedComponent ( NamedComponent (..), isCExe ) import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap ( SMWanted (..), ppComponents ) import System.Directory ( withCurrentDirectory ) import System.FilePath ( isValid ) -- | Type representing exceptions thrown by functions in the "Stack.Exec" -- module. newtype ExecException = InvalidPathForExec FilePath deriving (Show, Typeable) instance Exception ExecException where displayException (InvalidPathForExec path) = concat [ "Error: [S-1541]\n" , "Got an invalid '--cwd' argument for 'stack exec' (" , path , ")." ] -- | Type representing \'pretty\' exceptions thrown by functions in the -- "Stack.Exec" module. data ExecPrettyException = PackageIdNotFoundBug !String | ExecutableToRunNotFound | NoPackageIdReportedBug | InvalidExecTargets ![Text] deriving (Show, Typeable) instance Pretty ExecPrettyException where pretty (PackageIdNotFoundBug name) = bugPrettyReport "[S-8251]" $ fillSep [ flow "Could not find the package id of the package" , style Target (fromString name) <> "." ] pretty ExecutableToRunNotFound = "[S-2483]" <> line <> flow "No executables found." pretty NoPackageIdReportedBug = bugPrettyReport "S-8600" $ flow "execCmd: findGhcPkgField returned Just \"\"." pretty (InvalidExecTargets targets) = "[S-7371]" <> line <> fillSep [ flow "The following are invalid" , style Shell "--package" , "values for" , style Shell (flow "stack ghc") <> "," , style Shell (flow "stack runghc") <> "," , "or" , style Shell (flow "stack runhaskell") <> ":" ] <> line <> bulletedList (map (style Target . string . T.unpack) targets ) instance Exception ExecPrettyException -- Type representing Stack's execution commands. data SpecialExecCmd = ExecCmd String | ExecRun | ExecGhc | ExecRunGhc deriving (Eq, Show) data ExecOptsExtra = ExecOptsExtra { envSettings :: !EnvSettings , packages :: ![String] , rtsOptions :: ![String] , cwd :: !(Maybe FilePath) } deriving Show -- Type representing options for Stack's execution commands. data ExecOpts = ExecOpts { cmd :: !SpecialExecCmd , args :: ![String] , extra :: !ExecOptsExtra } deriving Show -- Type representing valid targets for --package option. data ExecTarget = ExecTarget PackageName (Maybe Version) -- | The function underlying Stack's @exec@, @ghc@, @run@, @runghc@ and -- @runhaskell@ commands. Execute a command. execCmd :: ExecOpts -> RIO Runner () execCmd opts = withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do let (errs, execTargets) = partitionEithers $ map fromTarget targets unless (null errs) $ prettyThrowM $ InvalidExecTargets errs unless (null execTargets) $ build Nothing config <- view configL menv <- liftIO $ config.processContextSettings eo.envSettings withProcessContext menv $ do -- Add RTS options to arguments let argsWithRts args = if null eo.rtsOptions then args :: [String] else args ++ ["+RTS"] ++ eo.rtsOptions ++ ["-RTS"] (cmd, args) <- case (opts.cmd, argsWithRts opts.args) of (ExecCmd cmd, args) -> pure (cmd, args) (ExecRun, args) -> getRunCmd args (ExecGhc, args) -> getGhcCmd execTargets args (ExecRunGhc, args) -> getRunGhcCmd execTargets args runWithPath eo.cwd $ exec cmd args where eo = opts.extra targets = concatMap (T.words . T.pack) eo.packages boptsCLI = defaultBuildOptsCLI { targetsCLI = targets } fromTarget :: Text -> Either Text ExecTarget fromTarget target = case parseRawTarget target >>= toExecTarget of Nothing -> Left target Just execTarget -> Right execTarget toExecTarget :: RawTarget -> Maybe ExecTarget toExecTarget (RTPackageComponent _ _) = Nothing toExecTarget (RTComponent _) = Nothing toExecTarget (RTPackage name) = Just $ ExecTarget name Nothing toExecTarget (RTPackageIdentifier (PackageIdentifier name pkgId)) = Just $ ExecTarget name (Just pkgId) -- return the package-id of the first package in GHC_PACKAGE_PATH getPkgId (ExecTarget pkgName _) = do let name = unPackageName pkgName pkg <- getGhcPkgExe mId <- findGhcPkgField pkg [] name "id" case mId of Just i -> maybe (prettyThrowIO NoPackageIdReportedBug) (pure . head) (nonEmpty $ words $ T.unpack i) -- should never happen as we have already installed the packages _ -> prettyThrowIO (PackageIdNotFoundBug name) getPkgOpts pkgs = map ("-package-id=" ++) <$> mapM getPkgId pkgs getRunCmd args = do packages <- view $ buildConfigL . to (.smWanted.project) pkgComponents <- for (Map.elems packages) ppComponents let executables = concatMap (filter isCExe . Set.toList) pkgComponents let (exe, args') = case args of [] -> (firstExe, args) x:xs -> case L.find (\y -> y == CExe (T.pack x)) executables of Nothing -> (firstExe, args) argExe -> (argExe, xs) where firstExe = listToMaybe executables case exe of Just (CExe exe') -> do withNewLocalBuildTargets [T.cons ':' exe'] $ build Nothing pure (T.unpack exe', args') _ -> prettyThrowIO ExecutableToRunNotFound getGhcCmd pkgs args = do pkgopts <- getPkgOpts pkgs compiler <- view $ compilerPathsL . to (.compiler) pure (toFilePath compiler, pkgopts ++ args) getRunGhcCmd pkgs args = do pkgopts <- getPkgOpts pkgs interpret <- view $ compilerPathsL . to (.interpreter) pure (toFilePath interpret, pkgopts ++ args) runWithPath :: Maybe FilePath -> RIO EnvConfig () -> RIO EnvConfig () runWithPath path callback = case path of Nothing -> callback Just p | not (isValid p) -> throwIO $ InvalidPathForExec p Just p -> withUnliftIO $ \ul -> withCurrentDirectory p $ unliftIO ul callback stack-2.15.7/src/Stack/FileWatch.hs0000644000000000000000000001337314445120723015150 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.FileWatch ( WatchMode (WatchModePoll) , fileWatch , fileWatchPoll ) where import Control.Concurrent.STM ( check ) import qualified Data.Map.Merge.Strict as Map import qualified Data.Map.Strict as Map import qualified Data.Set as Set import GHC.IO.Exception ( IOErrorType (InvalidArgument), IOException (..) ) import Path ( parent ) import Stack.Prelude import System.FSNotify ( WatchConfig, WatchMode (..), confWatchMode, defaultConfig , eventPath, watchDir, withManagerConf ) import System.IO ( getLine ) fileWatch :: HasTerm env => ((Set (Path Abs File) -> IO ()) -> RIO env ()) -> RIO env () fileWatch = fileWatchConf defaultConfig fileWatchPoll :: HasTerm env => ((Set (Path Abs File) -> IO ()) -> RIO env ()) -> RIO env () fileWatchPoll = fileWatchConf $ defaultConfig { confWatchMode = WatchModePoll 1000000 } -- | Run an action, watching for file changes -- -- The action provided takes a callback that is used to set the files to be -- watched. When any of those files are changed, we rerun the action again. fileWatchConf :: HasTerm env => WatchConfig -> ((Set (Path Abs File) -> IO ()) -> RIO env ()) -> RIO env () fileWatchConf cfg inner = withRunInIO $ \run -> withManagerConf cfg $ \manager -> do allFiles <- newTVarIO Set.empty dirtyVar <- newTVarIO True watchVar <- newTVarIO Map.empty let onChange event = atomically $ do files <- readTVar allFiles when (eventPath event `Set.member` files) (writeTVar dirtyVar True) setWatched :: Set (Path Abs File) -> IO () setWatched files = do atomically $ writeTVar allFiles $ Set.map toFilePath files watch0 <- readTVarIO watchVar let actions = Map.merge (Map.mapMissing stopListening) (Map.mapMissing startListening) (Map.zipWithMatched keepListening) watch0 newDirs watch1 <- forM (Map.toList actions) $ \(k, mmv) -> do mv <- mmv pure $ case mv of Nothing -> Map.empty Just v -> Map.singleton k v atomically $ writeTVar watchVar $ Map.unions watch1 where newDirs = Map.fromList $ map (, ()) $ Set.toList $ Set.map parent files keepListening _dir listen () = pure $ Just listen stopListening _ f = do () <- f `catch` \ioe -> -- Ignore invalid argument error - it can happen if -- the directory is removed. case ioe_type ioe of InvalidArgument -> pure () _ -> throwIO ioe pure Nothing startListening dir () = do let dir' = fromString $ toFilePath dir listen <- watchDir manager dir' (const True) onChange pure $ Just listen let watchInput = do l <- getLine unless (l == "quit") $ do run $ case l of "help" -> do prettyInfo $ line <> fillSep [ style Shell "help" <> ":" , flow "display this help." ] <> line <> fillSep [ style Shell "quit" <> ":" , "exit." ] <> line <> fillSep [ style Shell "build" <> ":" , flow "force a rebuild." ] <> line <> fillSep [ style Shell "watched" <> ":" , flow "display watched files." ] "build" -> atomically $ writeTVar dirtyVar True "watched" -> do watch <- readTVarIO allFiles mapM_ (prettyInfo . style File . fromString) (Set.toList watch) "" -> atomically $ writeTVar dirtyVar True _ -> prettyInfoL [ flow "Unknown command:" , style Shell (fromString l) <> "." , "Try" , style Shell "help" <> "." ] watchInput race_ watchInput $ run $ forever $ do atomically $ do dirty <- readTVar dirtyVar check dirty eres <- tryAny $ inner setWatched -- Clear dirtiness flag after the build to avoid an infinite loop caused -- by the build itself triggering dirtiness. This could be viewed as a -- bug, since files changed during the build will not trigger an extra -- rebuild, but overall seems like better behavior. See -- https://github.com/commercialhaskell/stack/issues/822 atomically $ writeTVar dirtyVar False case eres of Left e -> case fromException e of Just ExitSuccess -> prettyInfo $ style Good $ fromString $ displayException e _ -> case fromException e :: Maybe PrettyException of Just pe -> prettyError $ pretty pe _ -> prettyInfo $ style Error $ fromString $ displayException e _ -> prettyInfo $ style Good (flow "Success! Waiting for next file change.") prettyInfoL [ "Type" , style Shell "help" , flow "for the available commands. Press enter to force a rebuild." ] stack-2.15.7/src/Stack/GhcPkg.hs0000644000000000000000000001626214604306201014437 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Functions for the GHC package database. module Stack.GhcPkg ( createDatabase , findGhcPkgField , getGlobalDB , ghcPkg , ghcPkgPathEnvVar , mkGhcPackagePath , unregisterGhcPkgIds ) where import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as BL import qualified Data.List as L import qualified Data.Text as T import qualified Data.Text.Encoding as T import GHC.Utils.GhcPkg.Main.Compat ( ghcPkgUnregisterForce ) import Path ( (), parent ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO ( doesDirExist, doesFileExist, ensureDir, resolveDir' ) import RIO.Process ( HasProcessContext, proc, readProcess_ ) import Stack.Constants ( relFilePackageCache ) import Stack.Prelude import Stack.Types.Compiler ( WhichCompiler (..) ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe (..), HasCompiler , compilerPathsL ) import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) import System.FilePath ( searchPathSeparator ) -- | Get the global package database getGlobalDB :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> RIO env (Path Abs Dir) getGlobalDB pkgexe = do logDebug "Getting global package database location" -- This seems like a strange way to get the global package database -- location, but I don't know of a better one bs <- ghcPkg pkgexe [] ["list", "--global"] >>= either throwIO pure let fp = S8.unpack $ stripTrailingColon $ firstLine bs liftIO $ resolveDir' fp where stripTrailingColon bs | S8.null bs = bs | S8.last bs == ':' = S8.init bs | otherwise = bs firstLine = S8.takeWhile (\c -> c /= '\r' && c /= '\n') -- | Run the ghc-pkg executable ghcPkg :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> [Path Abs Dir] -> [String] -> RIO env (Either SomeException S8.ByteString) ghcPkg pkgexe@(GhcPkgExe pkgPath) pkgDbs args = do eres <- go case eres of Left _ -> do mapM_ (createDatabase pkgexe) pkgDbs go Right _ -> pure eres where pkg = toFilePath pkgPath go = tryAny $ BL.toStrict . fst <$> proc pkg args' readProcess_ args' = packageDbFlags pkgDbs ++ args -- | Create a package database in the given directory, if it doesn't exist. createDatabase :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> Path Abs Dir -> RIO env () createDatabase (GhcPkgExe pkgPath) db = do exists <- doesFileExist (db relFilePackageCache) unless exists $ do -- ghc-pkg requires that the database directory does not exist -- yet. If the directory exists but the package.cache file -- does, we're in a corrupted state. Check for that state. dirExists <- doesDirExist db args <- if dirExists then do prettyWarnL [ flow "The package database located at" , pretty db , flow "is corrupted. It is missing its" , style File "package.cache" , flow "file. Stack is proceeding with a recache." ] pure ["--package-db", toFilePath db, "recache"] else do -- Creating the parent doesn't seem necessary, as ghc-pkg -- seems to be sufficiently smart. But I don't feel like -- finding out it isn't the hard way ensureDir (parent db) pure ["init", toFilePath db] void $ proc (toFilePath pkgPath) args $ \pc -> onException (readProcess_ pc) $ logError $ "Error: [S-9735]\n" <> "Unable to create package database at " <> fromString (toFilePath db) -- | Get the environment variable to use for the package DB paths. ghcPkgPathEnvVar :: WhichCompiler -> Text ghcPkgPathEnvVar Ghc = "GHC_PACKAGE_PATH" -- | Get the necessary ghc-pkg flags for setting up the given package database packageDbFlags :: [Path Abs Dir] -> [String] packageDbFlags pkgDbs = "--no-user-package-db" : map (\x -> "--package-db=" ++ toFilePath x) pkgDbs -- | Get the value of a field of the package. findGhcPkgField :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> [Path Abs Dir] -- ^ package databases -> String -- ^ package identifier, or GhcPkgId -> Text -> RIO env (Maybe Text) findGhcPkgField pkgexe pkgDbs name field = do result <- ghcPkg pkgexe pkgDbs ["field", "--simple-output", name, T.unpack field] pure $ case result of Left{} -> Nothing Right bs -> fmap (stripCR . T.decodeUtf8) $ listToMaybe $ S8.lines bs -- | unregister list of package ghcids, batching available from GHC 8.2.1, -- see https://github.com/commercialhaskell/stack/issues/2662#issuecomment-460342402 -- using GHC package id where available (from GHC 7.9) -- -- The version of the ghc-pkg executable supplied with GHCs published before -- 28 August 2023 does not efficiently bulk unregister. Until an 'efficient' -- ghc-pkg is available, this function no longer uses: -- -- > eres <- ghcPkg pkgexe [pkgDb] args -- > where -- > args = "unregister" : "--user" : "--force" : -- > map packageIdentifierString idents ++ -- > if null gids then [] else "--ipid" : map ghcPkgIdString gids -- -- but uses: -- -- > globalDb <- view $ compilerPathsL.to cpGlobalDB -- > eres <- tryAny $ liftIO $ -- > ghcPkgUnregisterUserForce globalDb pkgDb hasIpid pkgarg_strs -- unregisterGhcPkgIds :: (HasCompiler env, HasProcessContext env, HasTerm env) => Bool -- ^ Report pretty exceptions as warnings? -> GhcPkgExe -> Path Abs Dir -- ^ package database -> NonEmpty (Either PackageIdentifier GhcPkgId) -> RIO env () unregisterGhcPkgIds isWarn pkgexe pkgDb epgids = do globalDb <- view $ compilerPathsL . to (.globalDB) eres <- try $ do ghcPkgUnregisterForce globalDb pkgDb hasIpid pkgarg_strs -- ghcPkgUnregisterForce does not perform an effective -- 'ghc-pkg recache', as that depends on a specific version of the Cabal -- package. ghcPkg pkgexe [pkgDb] ["recache"] case eres of Left (PrettyException e) -> when isWarn $ prettyWarn $ "[S-8729]" <> line <> flow "While unregistering packages, Stack encountered the following \ \error:" <> blankLine <> pretty e Right _ -> pure () where (idents, gids) = partitionEithers $ toList epgids hasIpid = not (null gids) pkgarg_strs = map packageIdentifierString idents <> map ghcPkgIdString gids -- | Get the value for GHC_PACKAGE_PATH mkGhcPackagePath :: Bool -> Path Abs Dir -> Path Abs Dir -> [Path Abs Dir] -> Path Abs Dir -> Text mkGhcPackagePath locals localdb deps extras globaldb = T.pack $ L.intercalate [searchPathSeparator] $ concat [ [toFilePathNoTrailingSep localdb | locals] , [toFilePathNoTrailingSep deps] , [toFilePathNoTrailingSep db | db <- reverse extras] , [toFilePathNoTrailingSep globaldb] ] stack-2.15.7/src/Stack/Ghci.hs0000644000000000000000000014012214620153445014147 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @ghci@ and @repl@ commands. module Stack.Ghci ( GhciOpts (..) , GhciPkgInfo (..) , GhciException (..) , GhciPrettyException (..) , ghciCmd , ghci ) where import Control.Monad.State.Strict ( State, execState, get, modify ) import Data.ByteString.Builder ( byteString ) import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as LBS import qualified Data.List as L import Data.List.Extra ( (!?) ) import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T import qualified Distribution.PackageDescription as C import Path ((), parent, parseRelFile ) import Path.Extra ( forgivingResolveFile', toFilePathNoTrailingSep ) import Path.IO ( XdgDirectory (..), doesFileExist, ensureDir, getXdgDir ) import RIO.NonEmpty ( nonEmpty ) import RIO.Process ( exec, withWorkingDir ) import Stack.Build ( buildLocalTargets ) import Stack.Build.Installed ( getInstalled, toInstallMap ) import Stack.Build.Source ( getLocalFlags, localDependencies, projectLocalPackages ) import Stack.Build.Target ( NeedTargets (..), parseTargets ) import Stack.Constants ( relDirGhciScript, relDirStackProgName, relFileCabalMacrosH , relFileGhciScript, stackProgName' ) import Stack.Constants.Config ( ghciDirL, objectInterfaceDirL ) import Stack.Ghci.Script ( GhciScript, ModuleName, cmdAdd, cmdModule , scriptToLazyByteString ) import Stack.Package ( buildableExes, buildableForeignLibs, getPackageOpts , hasBuildableMainLibrary, listOfPackageDeps , packageFromPackageDescription, readDotBuildinfo , resolvePackageDescription, topSortPackageComponent ) import Stack.PackageFile ( getPackageFile ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig ) import Stack.Types.Build.Exception ( BuildPrettyException (..), pprintTargetParseErrors ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), stackYamlL ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import qualified Stack.Types.BuildOpts as BenchmarkOpts ( BenchmarkOpts (..) ) import qualified Stack.Types.BuildOpts as TestOpts ( TestOpts (..) ) import Stack.Types.BuildOptsCLI ( ApplyCLIFlag, BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.CompCollection ( getBuildableListText ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..) ) import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL , shaPathForBytes ) import Stack.Types.EnvSettings ( defaultEnvSettings ) import Stack.Types.Installed ( InstallMap, InstalledMap ) import Stack.Types.NamedComponent ( NamedComponent (..), isCLib, isCSubLib, renderComponentTo , renderPkgComponent ) import Stack.Types.Package ( BuildInfoOpts (..), LocalPackage (..), Package (..) , PackageConfig (..), dotCabalCFilePath, dotCabalGetPath , dotCabalMainPath ) import Stack.Types.PackageFile ( PackageComponentFile (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner, Runner ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), GlobalPackage , PackageType (..), ProjectPackage (..), SMActual (..) , SMTargets (..), SMWanted (..), SourceMap (..), Target (..) ) import System.IO ( putStrLn ) import System.Permissions ( setScriptPerms ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Ghci" module. data GhciException = InvalidPackageOption !String | LoadingDuplicateModules | MissingFileTarget !String | Can'tSpecifyFilesAndTargets | Can'tSpecifyFilesAndMainIs deriving (Show, Typeable) instance Exception GhciException where displayException (InvalidPackageOption name) = "Error: [S-6716]\n" ++ "Failed to parse '--package' option " ++ name ++ "." displayException LoadingDuplicateModules = unlines [ "Error: [S-9632]" , "Not attempting to start ghci due to these duplicate modules." , "Use '--no-load' to try to start it anyway, without loading any \ \modules (but these are still likely to cause errors)." ] displayException (MissingFileTarget name) = "Error: [S-3600]\n" ++ "Cannot find file target " ++ name ++ "." displayException Can'tSpecifyFilesAndTargets = "Error: [S-9906]\n" ++ "Cannot use 'stack ghci' with both file targets and package targets." displayException Can'tSpecifyFilesAndMainIs = "Error: [S-5188]\n" ++ "Cannot use 'stack ghci' with both file targets and '--main-is' \ \flag." -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Ghci" module. data GhciPrettyException = GhciTargetParseException ![StyleDoc] | CandidatesIndexOutOfRangeBug deriving (Show, Typeable) instance Pretty GhciPrettyException where pretty (GhciTargetParseException errs) = "[S-6948]" <> pprintTargetParseErrors errs <> blankLine <> fillSep [ flow "Note that to specify options to be passed to GHCi, use the" , style Shell "--ghci-options" , "option." ] pretty CandidatesIndexOutOfRangeBug = bugPrettyReport "[S-1939]" $ flow "figureOutMainFile: index out of range." instance Exception GhciPrettyException -- | Typre respresenting command line options for the @stack ghci@ and -- @stack repl@ commands. data GhciOpts = GhciOpts { targets :: ![Text] , args :: ![String] , ghcOptions :: ![String] , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) , ghcCommand :: !(Maybe FilePath) , noLoadModules :: !Bool , additionalPackages :: ![String] , mainIs :: !(Maybe Text) , loadLocalDeps :: !Bool , skipIntermediate :: !Bool , hidePackages :: !(Maybe Bool) , noBuild :: !Bool , onlyMain :: !Bool } deriving Show -- | Type representing information required to load a package or its components. -- -- NOTE: GhciPkgInfo has paths as list instead of a Set to preserve files order -- as a workaround for bug https://ghc.haskell.org/trac/ghc/ticket/13786 data GhciPkgInfo = GhciPkgInfo { name :: !PackageName , opts :: ![(NamedComponent, BuildInfoOpts)] , dir :: !(Path Abs Dir) , modules :: !ModuleMap , cFiles :: ![Path Abs File] -- ^ C files. , mainIs :: !(Map NamedComponent [Path Abs File]) , targetFiles :: !(Maybe [Path Abs File]) , package :: !Package } deriving Show -- | Type representing loaded package description and related information. data GhciPkgDesc = GhciPkgDesc { package :: !Package , cabalFP :: !(Path Abs File) , target :: !Target } -- Mapping from a module name to a map with all of the paths that use that name. -- Each of those paths is associated with a set of components that contain it. -- The purpose of this complex structure is for use in -- 'checkForDuplicateModules'. type ModuleMap = Map ModuleName (Map (Path Abs File) (Set (PackageName, NamedComponent))) unionModuleMaps :: [ModuleMap] -> ModuleMap unionModuleMaps = M.unionsWith (M.unionWith S.union) -- | Function underlying the @stack ghci@ and @stack repl@ commands. Run GHCi in -- the context of a project. ghciCmd :: GhciOpts -> RIO Runner () ghciCmd ghciOpts = let boptsCLI = defaultBuildOptsCLI -- using only additional packages, targets then get overridden in `ghci` { targetsCLI = map T.pack ghciOpts.additionalPackages , initialBuildSteps = True , flags = ghciOpts.flags , ghcOptions = map T.pack ghciOpts.ghcOptions } in withConfig YesReexec $ withEnvConfig AllowNoTargets boptsCLI $ do bopts <- view buildOptsL -- override env so running of tests and benchmarks is disabled let boptsLocal = bopts { testOpts = bopts.testOpts { TestOpts.disableRun = True } , benchmarkOpts = bopts.benchmarkOpts { BenchmarkOpts.disableRun = True } } local (set buildOptsL boptsLocal) (ghci ghciOpts) -- | Launch a GHCi session for the given local package targets with the -- given options and configure it with the load paths and extensions -- of those targets. ghci :: HasEnvConfig env => GhciOpts -> RIO env () ghci opts = do let buildOptsCLI = defaultBuildOptsCLI { targetsCLI = [] , flags = opts.flags } sourceMap <- view $ envConfigL . to (.sourceMap) installMap <- toInstallMap sourceMap locals <- projectLocalPackages depLocals <- localDependencies let localMap = M.fromList [(lp.package.name, lp) | lp <- locals ++ depLocals] -- FIXME:qrilka this looks wrong to go back to SMActual sma = SMActual { compiler = sourceMap.compiler , project = sourceMap.project , deps = sourceMap.deps , globals = sourceMap.globalPkgs } -- Parse --main-is argument. mainIsTargets <- parseMainIsTargets buildOptsCLI sma opts.mainIs -- Parse to either file targets or build targets etargets <- preprocessTargets buildOptsCLI sma opts.targets (inputTargets, mfileTargets) <- case etargets of Right packageTargets -> pure (packageTargets, Nothing) Left rawFileTargets -> do case mainIsTargets of Nothing -> pure () Just _ -> throwM Can'tSpecifyFilesAndMainIs -- Figure out targets based on filepath targets (targetMap, fileInfo, extraFiles) <- findFileTargets locals rawFileTargets pure (targetMap, Just (fileInfo, extraFiles)) -- Get a list of all the local target packages. localTargets <- getAllLocalTargets opts inputTargets mainIsTargets localMap -- Get a list of all the non-local target packages. nonLocalTargets <- getAllNonLocalTargets inputTargets let getInternalDependencies target localPackage = topSortPackageComponent localPackage.package target False internalDependencies = M.intersectionWith getInternalDependencies inputTargets localMap relevantDependencies = M.filter (any isCSubLib) internalDependencies -- Check if additional package arguments are sensible. addPkgs <- checkAdditionalPackages opts.additionalPackages -- Load package descriptions. pkgDescs <- loadGhciPkgDescs buildOptsCLI localTargets -- If necessary, ask user about which main module to load. bopts <- view buildOptsL mainFile <- if opts.noLoadModules then pure Nothing else do -- Figure out package files, in order to ask the user about which main -- module to load. See the note below for why this is done again after the -- build. This could potentially be done more efficiently, because all we -- need is the location of main modules, not the rest. pkgs0 <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs figureOutMainFile bopts mainIsTargets localTargets pkgs0 let pkgTargets pn targets = case targets of TargetAll _ -> [T.pack (packageNameString pn)] TargetComps comps -> [renderPkgComponent (pn, c) | c <- toList comps] -- Build required dependencies and setup local packages. buildDepsAndInitialSteps opts $ concatMap (\(pn, (_, t)) -> pkgTargets pn t) localTargets targetWarnings localTargets nonLocalTargets mfileTargets -- Load the list of modules _after_ building, to catch changes in -- unlisted dependencies (#1180) pkgs <- getGhciPkgInfos installMap addPkgs (fmap fst mfileTargets) pkgDescs checkForIssues pkgs -- Finally, do the invocation of ghci runGhci opts localTargets mainFile pkgs (maybe [] snd mfileTargets) (nonLocalTargets ++ addPkgs) relevantDependencies preprocessTargets :: HasEnvConfig env => BuildOptsCLI -> SMActual GlobalPackage -> [Text] -> RIO env (Either [Path Abs File] (Map PackageName Target)) preprocessTargets buildOptsCLI sma rawTargets = do let (fileTargetsRaw, normalTargetsRaw) = L.partition (\t -> ".hs" `T.isSuffixOf` t || ".lhs" `T.isSuffixOf` t) rawTargets -- Only use file targets if we have no normal targets. if not (null fileTargetsRaw) && null normalTargetsRaw then do fileTargets <- forM fileTargetsRaw $ \fp0 -> do let fp = T.unpack fp0 mpath <- forgivingResolveFile' fp case mpath of Nothing -> throwM (MissingFileTarget fp) Just path -> pure path pure (Left fileTargets) else do -- Try parsing targets before checking if both file and -- module targets are specified (see issue#3342). let boptsCLI = buildOptsCLI { targetsCLI = normalTargetsRaw } normalTargets <- parseTargets AllowNoTargets False boptsCLI sma `catch` \pex@(PrettyException ex) -> case fromException $ toException ex of Just (TargetParseException xs) -> prettyThrowM $ GhciTargetParseException xs _ -> throwM pex unless (null fileTargetsRaw) $ throwM Can'tSpecifyFilesAndTargets pure (Right normalTargets.targets) parseMainIsTargets :: HasEnvConfig env => BuildOptsCLI -> SMActual GlobalPackage -> Maybe Text -> RIO env (Maybe (Map PackageName Target)) parseMainIsTargets buildOptsCLI sma mtarget = forM mtarget $ \target -> do let boptsCLI = buildOptsCLI { targetsCLI = [target] } targets <- parseTargets AllowNoTargets False boptsCLI sma pure targets.targets -- | Display PackageName + NamedComponent displayPkgComponent :: (PackageName, NamedComponent) -> StyleDoc displayPkgComponent = style PkgComponent . fromString . T.unpack . renderPkgComponent findFileTargets :: HasEnvConfig env => [LocalPackage] -> [Path Abs File] -> RIO env (Map PackageName Target, Map PackageName [Path Abs File], [Path Abs File]) findFileTargets locals fileTargets = do filePackages <- forM locals $ \lp -> do PackageComponentFile _ compFiles _ _ <- getPackageFile lp.package lp.cabalFP pure (lp, M.map (map dotCabalGetPath) compFiles) let foundFileTargetComponents :: [(Path Abs File, [(PackageName, NamedComponent)])] foundFileTargetComponents = map (\fp -> (fp, ) $ L.sort $ concatMap (\(lp, files) -> map ((lp.package.name,) . fst) (filter (elem fp . snd) (M.toList files)) ) filePackages ) fileTargets results <- forM foundFileTargetComponents $ \(fp, xs) -> case xs of [] -> do prettyWarnL [ flow "Couldn't find a component for file target" , pretty fp <> "." , flow "This means that the correct GHC options might not be used. \ \Attempting to load the file anyway." ] pure $ Left fp [x] -> do prettyInfoL [ flow "Using configuration for" , displayPkgComponent x , flow "to load" , pretty fp ] pure $ Right (fp, x) (x:_) -> do prettyWarn $ fillSep [ flow "Multiple components contain file target" , pretty fp <> ":" , fillSep $ punctuate "," (map displayPkgComponent xs) ] <> line <> fillSep [ flow "Guessing the first one," , displayPkgComponent x <> "." ] pure $ Right (fp, x) let (extraFiles, associatedFiles) = partitionEithers results targetMap = foldl' unionTargets M.empty $ map (\(_, (name, comp)) -> M.singleton name (TargetComps (S.singleton comp))) associatedFiles infoMap = foldl' (M.unionWith (<>)) M.empty $ map (\(fp, (name, _)) -> M.singleton name [fp]) associatedFiles pure (targetMap, infoMap, extraFiles) getAllLocalTargets :: HasEnvConfig env => GhciOpts -> Map PackageName Target -> Maybe (Map PackageName Target) -> Map PackageName LocalPackage -> RIO env [(PackageName, (Path Abs File, Target))] getAllLocalTargets ghciOpts targets0 mainIsTargets localMap = do -- Use the 'mainIsTargets' as normal targets, for CLI concision. See -- #1845. This is a little subtle - we need to do the target parsing -- independently in order to handle the case where no targets are -- specified. let targets = maybe targets0 (unionTargets targets0) mainIsTargets packages <- view $ envConfigL . to (.sourceMap.project) -- Find all of the packages that are directly demanded by the -- targets. let directlyWanted = flip mapMaybe (M.toList packages) $ \(name, pp) -> case M.lookup name targets of Just simpleTargets -> Just (name, (pp.cabalFP, simpleTargets)) Nothing -> Nothing -- Figure out let extraLoadDeps = getExtraLoadDeps ghciOpts.loadLocalDeps localMap directlyWanted if (ghciOpts.skipIntermediate && not ghciOpts.loadLocalDeps) || null extraLoadDeps then pure directlyWanted else do let extraList' = map (fromPackageName . fst) extraLoadDeps :: [StyleDoc] extraList = mkNarrativeList (Just Current) False extraList' if ghciOpts.loadLocalDeps then prettyInfo $ fillSep $ [ flow "The following libraries will also be loaded into \ \GHCi because they are local dependencies of your \ \targets, and you specified" , style Shell "--load-local-deps" <> ":" ] <> extraList else prettyInfo $ fillSep ( flow "The following libraries will also be loaded into \ \GHCi because they are intermediate dependencies of \ \your targets:" : extraList ) <> line <> fillSep [ "(Use" , style Shell "--skip-intermediate-deps" , flow "to omit these.)" ] pure (directlyWanted ++ extraLoadDeps) getAllNonLocalTargets :: Map PackageName Target -> RIO env [PackageName] getAllNonLocalTargets targets = do let isNonLocal (TargetAll PTDependency) = True isNonLocal _ = False pure $ map fst $ filter (isNonLocal . snd) (M.toList targets) buildDepsAndInitialSteps :: HasEnvConfig env => GhciOpts -> [Text] -> RIO env () buildDepsAndInitialSteps ghciOpts localTargets = do let targets = localTargets ++ map T.pack ghciOpts.additionalPackages -- If necessary, do the build, for local packagee targets, only do -- 'initialBuildSteps'. case nonEmpty targets of -- only new local targets could appear here Just nonEmptyTargets | not ghciOpts.noBuild -> do eres <- buildLocalTargets nonEmptyTargets case eres of Right () -> pure () Left err -> do case fromException err of Just (PrettyException prettyErr) -> prettyError $ pretty prettyErr Nothing -> prettyError $ fromString (displayException err) prettyWarn "Build failed, but trying to launch GHCi anyway" _ -> pure () checkAdditionalPackages :: MonadThrow m => [String] -> m [PackageName] checkAdditionalPackages pkgs = forM pkgs $ \name -> do let mres = (pkgName <$> parsePackageIdentifier name) <|> parsePackageNameThrowing name maybe (throwM $ InvalidPackageOption name) pure mres runGhci :: HasEnvConfig env => GhciOpts -> [(PackageName, (Path Abs File, Target))] -> Maybe (Path Abs File) -> [GhciPkgInfo] -> [Path Abs File] -> [PackageName] -> Map PackageName (Seq NamedComponent) -> RIO env () runGhci ghciOpts targets mainFile pkgs extraFiles exposePackages exposeInternalDep = do config <- view configL let subDepsPackageUnhide pName deps = if null deps then [] else ["-package", fromPackageName pName] pkgopts = hidePkgOpts ++ genOpts ++ ghcOpts shouldHidePackages = fromMaybe (not (null pkgs && null exposePackages)) ghciOpts.hidePackages hidePkgOpts = if shouldHidePackages then ["-hide-all-packages"] -- This is necessary, because current versions of ghci will -- entirely fail to start if base isn't visible. This is because -- it tries to use the interpreter to set buffering options on -- standard IO. ++ (if null targets then ["-package", "base"] else []) ++ concatMap (\n -> ["-package", packageNameString n]) exposePackages ++ M.foldMapWithKey subDepsPackageUnhide exposeInternalDep else [] oneWordOpts bio | shouldHidePackages = bio.oneWordOpts ++ bio.packageFlags | otherwise = bio.oneWordOpts genOpts = nubOrd (concatMap (concatMap (oneWordOpts . snd) . (.opts)) pkgs) (omittedOpts, ghcOpts) = L.partition badForGhci $ concatMap (concatMap ((.opts) . snd) . (.opts)) pkgs ++ map T.unpack ( fold config.ghcOptionsByCat -- ^ include everything, locals, and targets ++ concatMap (getUserOptions . (.name)) pkgs ) getUserOptions pkg = M.findWithDefault [] pkg config.ghcOptionsByName badForGhci x = L.isPrefixOf "-O" x || elem x (words "-debug -threaded -ticky -static -Werror") unless (null omittedOpts) $ prettyWarn $ fillSep ( flow "The following GHC options are incompatible with GHCi \ \and have not been passed to it:" : mkNarrativeList (Just Current) False (map fromString (nubOrd omittedOpts) :: [StyleDoc]) ) <> line oiDir <- view objectInterfaceDirL let odir = [ "-odir=" <> toFilePathNoTrailingSep oiDir , "-hidir=" <> toFilePathNoTrailingSep oiDir ] prettyInfoL ( flow "Configuring GHCi with the following packages:" : mkNarrativeList (Just Current) False (map (fromPackageName . (.name)) pkgs :: [StyleDoc]) ) compilerExeName <- view $ compilerPathsL . to (.compiler) . to toFilePath let execGhci extras = do menv <- liftIO $ config.processContextSettings defaultEnvSettings withPackageWorkingDir $ withProcessContext menv $ exec (fromMaybe compilerExeName ghciOpts.ghcCommand) ( ("--interactive" : ) $ -- This initial "-i" resets the include directories to not -- include CWD. If there aren't any packages, CWD is included. (if null pkgs then id else ("-i" : )) $ odir <> pkgopts <> extras <> ghciOpts.ghcOptions <> ghciOpts.args ) withPackageWorkingDir = case pkgs of [pkg] -> withWorkingDir (toFilePath pkg.dir) _ -> id -- Since usage of 'exec' does not pure, we cannot do any cleanup on ghci -- exit. So, instead leave the generated files. To make this more -- efficient and avoid gratuitous generation of garbage, the file names -- are determined by hashing. This also has the nice side effect of making -- it possible to copy the ghci invocation out of the log and have it -- still work. tmpDirectory <- getXdgDir XdgCache $ Just (relDirStackProgName relDirGhciScript) ghciDir <- view ghciDirL ensureDir ghciDir ensureDir tmpDirectory macrosOptions <- writeMacrosFile ghciDir pkgs if ghciOpts.noLoadModules then execGhci macrosOptions else do checkForDuplicateModules pkgs scriptOptions <- writeGhciScript tmpDirectory (renderScript pkgs mainFile ghciOpts.onlyMain extraFiles) execGhci (macrosOptions ++ scriptOptions) writeMacrosFile :: HasTerm env => Path Abs Dir -> [GhciPkgInfo] -> RIO env [String] writeMacrosFile outputDirectory pkgs = do fps <- fmap (nubOrd . concatMap catMaybes) $ forM pkgs $ \pkg -> forM pkg.opts $ \(_, bio) -> do let cabalMacros = bio.cabalMacros exists <- liftIO $ doesFileExist cabalMacros if exists then pure $ Just cabalMacros else do prettyWarnL ["Didn't find expected autogen file:", pretty cabalMacros] pure Nothing files <- liftIO $ mapM (S8.readFile . toFilePath) fps if null files then pure [] else do out <- liftIO $ writeHashedFile outputDirectory relFileCabalMacrosH $ S8.concat $ map (<> "\n#undef CURRENT_PACKAGE_KEY\n#undef CURRENT_COMPONENT_ID\n") files pure ["-optP-include", "-optP" <> toFilePath out] writeGhciScript :: (MonadIO m) => Path Abs Dir -> GhciScript -> m [String] writeGhciScript outputDirectory script = do scriptPath <- liftIO $ writeHashedFile outputDirectory relFileGhciScript $ LBS.toStrict $ scriptToLazyByteString script let scriptFilePath = toFilePath scriptPath setScriptPerms scriptFilePath pure ["-ghci-script=" <> scriptFilePath] writeHashedFile :: Path Abs Dir -> Path Rel File -> ByteString -> IO (Path Abs File) writeHashedFile outputDirectory relFile contents = do relSha <- shaPathForBytes contents let outDir = outputDirectory relSha outFile = outDir relFile alreadyExists <- doesFileExist outFile unless alreadyExists $ do ensureDir outDir writeBinaryFileAtomic outFile $ byteString contents pure outFile renderScript :: [GhciPkgInfo] -> Maybe (Path Abs File) -> Bool -> [Path Abs File] -> GhciScript renderScript pkgs mainFile onlyMain extraFiles = do let addPhase = cmdAdd $ S.fromList (map Left allModules ++ addMain) addMain = maybe [] (L.singleton . Right) mainFile modulePhase = cmdModule $ S.fromList allModules allModules = nubOrd $ concatMap (M.keys . (.modules)) pkgs case getFileTargets pkgs <> extraFiles of [] -> if onlyMain then if isJust mainFile then cmdAdd (S.fromList addMain) else mempty else addPhase <> modulePhase fileTargets -> cmdAdd (S.fromList (map Right fileTargets)) -- Hacky check if module / main phase should be omitted. This should be -- improved if / when we have a better per-component load. getFileTargets :: [GhciPkgInfo] -> [Path Abs File] getFileTargets = concatMap (concat . maybeToList . (.targetFiles)) -- | Figure out the main-is file to load based on the targets. Asks the user for -- input if there is more than one candidate main-is file. figureOutMainFile :: (HasRunner env, HasTerm env) => BuildOpts -> Maybe (Map PackageName Target) -> [(PackageName, (Path Abs File, Target))] -> [GhciPkgInfo] -> RIO env (Maybe (Path Abs File)) figureOutMainFile bopts mainIsTargets targets0 packages = case candidates of [] -> pure Nothing [c@(_,_,fp)] -> do prettyInfo $ fillSep [ "Using" , style Current "main" , "module:" ] <> line <> renderCandidate c <> line pure (Just fp) candidate:_ -> do prettyWarn $ fillSep [ "The" , style Current "main" , flow "module to load is ambiguous. Candidates are:" ] <> line <> mconcat (L.intersperse line (map renderCandidate candidates)) <> blankLine <> flow "You can specify which one to pick by:" <> line <> bulletedList [ fillSep [ flow "Specifying targets to" , style Shell (flow "stack ghci") , "e.g." , style Shell ( fillSep [ flow "stack ghci" , sampleTargetArg candidate ] ) <> "." ] , fillSep [ flow "Specifying what the" , style Current "main" , flow "is e.g." , style Shell ( fillSep [ flow "stack ghci" , sampleMainIsArg candidate ] ) <> "." ] , flow $ "Choosing from the candidate above [1.." <> show (length candidates) <> "]." ] <> line liftIO userOption where targets = fromMaybe (M.fromList $ map (\(k, (_, x)) -> (k, x)) targets0) mainIsTargets candidates = do pkg <- packages case M.lookup pkg.name targets of Nothing -> [] Just target -> do (component,mains) <- M.toList $ M.filterWithKey (\k _ -> k `S.member` wantedComponents) pkg.mainIs main <- mains pure (pkg.name, component, main) where wantedComponents = wantedPackageComponents bopts target pkg.package renderCandidate c@(pkgName, namedComponent, mainIs) = let candidateIndex = fromString . show . (+1) . fromMaybe 0 . L.elemIndex c pkgNameText = fromPackageName pkgName in hang 4 $ fill 4 ( candidateIndex candidates <> ".") <> fillSep [ "Package" , style Current pkgNameText <> "," , "component" -- This is the format that can be directly copy-pasted as an -- argument to `stack ghci`. , style PkgComponent ( pkgNameText <> ":" <> renderComponentTo namedComponent ) <> "," , "with" , style Shell "main-is" , "file:" , pretty mainIs <> "." ] candidateIndices = take (length candidates) [1 :: Int ..] userOption = do option <- prompt "Specify main module to use (press enter to load none): " let selected = fromMaybe ((+1) $ length candidateIndices) (readMaybe (T.unpack option) :: Maybe Int) case L.elemIndex selected candidateIndices of Nothing -> do putStrLn "Not loading any main modules, as no valid module selected" putStrLn "" pure Nothing Just op -> do (_, _, fp) <- maybe (prettyThrowIO CandidatesIndexOutOfRangeBug) pure (candidates !? op) putStrLn ("Loading main module from candidate " <> show (op + 1) <> ", --main-is " <> toFilePath fp) putStrLn "" pure $ Just fp sampleTargetArg (pkg, comp, _) = fromPackageName pkg <> ":" <> renderComponentTo comp sampleMainIsArg (pkg, comp, _) = fillSep [ "--main-is" , fromPackageName pkg <> ":" <> renderComponentTo comp ] loadGhciPkgDescs :: HasEnvConfig env => BuildOptsCLI -> [(PackageName, (Path Abs File, Target))] -> RIO env [GhciPkgDesc] loadGhciPkgDescs buildOptsCLI localTargets = forM localTargets $ \(name, (cabalFP, target)) -> loadGhciPkgDesc buildOptsCLI name cabalFP target -- | Load package description information for a ghci target. loadGhciPkgDesc :: HasEnvConfig env => BuildOptsCLI -> PackageName -> Path Abs File -> Target -> RIO env GhciPkgDesc loadGhciPkgDesc buildOptsCLI name cabalFP target = do econfig <- view envConfigL compilerVersion <- view actualCompilerVersionL let sm = econfig.sourceMap -- Currently this source map is being build with -- the default targets sourceMapGhcOptions = fromMaybe [] $ ((.projectCommon.ghcOptions) <$> M.lookup name sm.project) <|> ((.depCommon.ghcOptions) <$> M.lookup name sm.deps) sourceMapCabalConfigOpts = fromMaybe [] $ ( (.projectCommon.cabalConfigOpts) <$> M.lookup name sm.project) <|> ((.depCommon.cabalConfigOpts) <$> M.lookup name sm.deps) sourceMapFlags = maybe mempty (.projectCommon.flags) $ M.lookup name sm.project config = PackageConfig { enableTests = True , enableBenchmarks = True , flags = getLocalFlags buildOptsCLI name `M.union` sourceMapFlags , ghcOptions = sourceMapGhcOptions , cabalConfigOpts = sourceMapCabalConfigOpts , compilerVersion = compilerVersion , platform = view platformL econfig } -- TODO we've already parsed this information, otherwise we wouldn't have -- figured out the cabalFP already. In the future: retain that -- GenericPackageDescription in the relevant data structures to avoid -- reparsing. (gpdio, _name, _cabalFP) <- loadCabalFilePath (Just stackProgName') (parent cabalFP) gpkgdesc <- liftIO $ gpdio YesPrintWarnings -- Source the package's *.buildinfo file created by configure if any. See -- https://www.haskell.org/cabal/users-guide/developing-packages.html#system-dependent-parameters buildinfofp <- parseRelFile (packageNameString name ++ ".buildinfo") hasDotBuildinfo <- doesFileExist (parent cabalFP buildinfofp) let mbuildinfofp | hasDotBuildinfo = Just (parent cabalFP buildinfofp) | otherwise = Nothing mbuildinfo <- forM mbuildinfofp readDotBuildinfo let pdp = resolvePackageDescription config gpkgdesc package = packageFromPackageDescription config (C.genPackageFlags gpkgdesc) $ maybe pdp (`C.updatePackageDescription` pdp) mbuildinfo pure GhciPkgDesc { package , cabalFP , target } getGhciPkgInfos :: HasEnvConfig env => InstallMap -> [PackageName] -> Maybe (Map PackageName [Path Abs File]) -> [GhciPkgDesc] -> RIO env [GhciPkgInfo] getGhciPkgInfos installMap addPkgs mfileTargets localTargets = do (installedMap, _, _, _) <- getInstalled installMap let localLibs = [ desc.package.name | desc <- localTargets , hasLocalComp isCLib desc.target ] forM localTargets $ \pkgDesc -> makeGhciPkgInfo installMap installedMap localLibs addPkgs mfileTargets pkgDesc -- | Make information necessary to load the given package in GHCi. makeGhciPkgInfo :: HasEnvConfig env => InstallMap -> InstalledMap -> [PackageName] -> [PackageName] -> Maybe (Map PackageName [Path Abs File]) -> GhciPkgDesc -> RIO env GhciPkgInfo makeGhciPkgInfo installMap installedMap locals addPkgs mfileTargets pkgDesc = do bopts <- view buildOptsL let pkg = pkgDesc.package cabalFP = pkgDesc.cabalFP target = pkgDesc.target name = pkg.name (mods, files, opts) <- getPackageOpts pkg installMap installedMap locals addPkgs cabalFP let filteredOpts = filterWanted opts filterWanted = M.filterWithKey (\k _ -> k `S.member` allWanted) allWanted = wantedPackageComponents bopts target pkg pure GhciPkgInfo { name , opts = M.toList filteredOpts , dir = parent cabalFP , modules = unionModuleMaps $ map ( \(comp, mp) -> M.map (\fp -> M.singleton fp (S.singleton (pkg.name, comp))) mp ) (M.toList (filterWanted mods)) , mainIs = M.map (mapMaybe dotCabalMainPath) files , cFiles = mconcat (M.elems (filterWanted (M.map (mapMaybe dotCabalCFilePath) files))) , targetFiles = mfileTargets >>= M.lookup name , package = pkg } -- NOTE: this should make the same choices as the components code in -- 'loadLocalPackage'. Unfortunately for now we reiterate this logic -- (differently). wantedPackageComponents :: BuildOpts -> Target -> Package -> Set NamedComponent wantedPackageComponents _ (TargetComps cs) _ = cs wantedPackageComponents bopts (TargetAll PTProject) pkg = S.fromList $ ( if hasBuildableMainLibrary pkg then CLib : map CSubLib buildableForeignLibs' else [] ) <> map CExe buildableExes' <> map CSubLib buildableSubLibs <> (if bopts.tests then map CTest buildableTestSuites else []) <> (if bopts.benchmarks then map CBench buildableBenchmarks else []) where buildableForeignLibs' = S.toList $ buildableForeignLibs pkg buildableSubLibs = getBuildableListText pkg.subLibraries buildableExes' = S.toList $ buildableExes pkg buildableTestSuites = getBuildableListText pkg.testSuites buildableBenchmarks = getBuildableListText pkg.benchmarks wantedPackageComponents _ _ _ = S.empty checkForIssues :: HasTerm env => [GhciPkgInfo] -> RIO env () checkForIssues pkgs = when (length pkgs > 1) $ do -- Cabal flag issues could arise only when there are at least 2 packages unless (null cabalFlagIssues) $ do prettyWarn $ flow "There are Cabal flags for this project which may prevent \ \GHCi from loading your code properly. In some cases it \ \can also load some projects which would otherwise fail to \ \build." <> blankLine <> mconcat (L.intersperse blankLine cabalFlagIssues) <> blankLine <> flow "To resolve, remove the flag(s) from the Cabal file(s) and \ \instead put them at the top of the Haskell files." <> blankLine prettyWarnL [ flow "It isn't yet possible to load multiple packages into GHCi in \ \all cases. For further information, see" , style Url "https://ghc.haskell.org/trac/ghc/ticket/10827" <> "." ] where cabalFlagIssues = concatMap mixedFlag [ ( "-XNoImplicitPrelude" , [ flow "-XNoImplicitPrelude will be used, but GHCi will likely fail to \ \build things which depend on the implicit prelude." ] ) , ( "-XCPP" , [ flow "-XCPP will be used, but it can cause issues with multiline \ \strings. For further information, see" , style Url "https://downloads.haskell.org/~ghc/7.10.2/docs/html/users_guide/options-phases.html#cpp-string-gaps" <> "." ] ) , ( "-XNoTraditionalRecordSyntax" , [ flow "-XNoTraditionalRecordSyntax will be used, but it break modules \ \which use record syntax." ] ) , ( "-XTemplateHaskell" , [ flow "-XTemplateHaskell will be used, but it may cause compilation \ \issues due to different parsing of '$' when there's no space \ \after it." ] ) , ( "-XQuasiQuotes" , [ flow "-XQuasiQuotes will be used, but it may cause parse failures \ \due to a different meaning for list comprehension syntax like \ \[x| ... ]" ] ) , ( "-XSafe" , [ flow "-XSafe will be used, but it will fail to compile unsafe \ \modules." ] ) , ( "-XArrows" , [ flow "-XArrows will be used, but it will cause non-arrow usages of \ \proc, (-<), (-<<) to fail" ] ) , ( "-XOverloadedStrings" , [ flow "-XOverloadedStrings will be used, but it can cause type \ \ambiguity in code not usually compiled with it." ] ) , ( "-XOverloadedLists" , [ flow "-XOverloadedLists will be used, but it can cause type \ \ambiguity in code not usually compiled with it." ] ) , ( "-XMonoLocalBinds" , [ flow "-XMonoLocalBinds will be used, but it can cause type errors in \ \code which expects generalized local bindings." ] ) , ( "-XTypeFamilies" , [ flow "-XTypeFamilies will be used, but it implies -XMonoLocalBinds, \ \and so can cause type errors in code which expects generalized \ \local bindings." ] ) , ( "-XGADTs" , [ flow "-XGADTs will be used, but it implies -XMonoLocalBinds, and so \ \can cause type errors in code which expects generalized local \ \bindings." ] ) , ( "-XNewQualifiedOperators" , [ flow "-XNewQualifiedOperators will be used, but this will break \ \usages of the old qualified operator syntax." ] ) ] mixedFlag (flag, msgs) = let x = partitionComps (== flag) in [ fillSep $ msgs ++ showWhich x | mixedSettings x ] mixedSettings (xs, ys) = xs /= [] && ys /= [] showWhich (haveIt, don'tHaveIt) = [ flow "It is specified for:" ] <> mkNarrativeList (Just PkgComponent) False (map (fromString . T.unpack . renderPkgComponent) haveIt :: [StyleDoc]) <> [ flow "But not for:" ] <> mkNarrativeList (Just PkgComponent) False (map (fromString . T.unpack . renderPkgComponent) don'tHaveIt :: [StyleDoc]) partitionComps f = (map fst xs, map fst ys) where (xs, ys) = L.partition (any f . snd) compsWithOpts compsWithOpts = map (\(k, bio) -> (k, bio.oneWordOpts ++ bio.opts)) compsWithBios compsWithBios = [ ((pkg.name, c), bio) | pkg <- pkgs , (c, bio) <- pkg.opts ] -- TODO: Should this also tell the user the filepaths, not just the -- module name? checkForDuplicateModules :: HasTerm env => [GhciPkgInfo] -> RIO env () checkForDuplicateModules pkgs = unless (null duplicates) $ prettyWarn $ flow "Multiple files use the same module name:" <> line <> bulletedList (map prettyDuplicate duplicates) <> line -- MSS 2020-10-13 Disabling, may remove entirely in the future -- See: https://github.com/commercialhaskell/stack/issues/5407#issuecomment-707339928 -- throwM LoadingDuplicateModules where duplicates :: [(ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent)))] duplicates = filter (\(_, mp) -> M.size mp > 1) $ M.toList $ unionModuleMaps (map (.modules) pkgs) prettyDuplicate :: (ModuleName, Map (Path Abs File) (Set (PackageName, NamedComponent))) -> StyleDoc prettyDuplicate (mn, mp) = fillSep [ style Error (pretty mn) , flow "found at the following paths" ] <> line <> bulletedList (map fileDuplicate (M.toList mp)) fileDuplicate :: (Path Abs File, Set (PackageName, NamedComponent)) -> StyleDoc fileDuplicate (fp, comps) = fillSep [ pretty fp , parens $ fillSep $ punctuate "," (map displayPkgComponent (S.toList comps)) ] targetWarnings :: HasBuildConfig env => [(PackageName, (Path Abs File, Target))] -> [PackageName] -> Maybe (Map PackageName [Path Abs File], [Path Abs File]) -> RIO env () targetWarnings localTargets nonLocalTargets mfileTargets = do unless (null nonLocalTargets) $ prettyWarnL [ flow "Some targets" , parens $ fillSep $ punctuate "," $ map (style Good . fromPackageName) nonLocalTargets , flow "are not local packages, and so cannot be directly loaded. In \ \future versions of Stack, this might be supported - see" , style Url "https://github.com/commercialhaskell/stack/issues/1441" , "." , flow "It can still be useful to specify these, as they will be passed \ \to ghci via -package flags." ] when (null localTargets && isNothing mfileTargets) $ do smWanted <- view $ buildConfigL . to (.smWanted) stackYaml <- view stackYamlL prettyNote $ vsep [ flow "No local targets specified, so a plain ghci will be started with \ \no package hiding or package options." , "" , flow $ T.unpack $ utf8BuilderToText $ "You are using snapshot: " <> display smWanted.snapshotLocation , "" , flow "If you want to use package hiding and options, then you can try \ \one of the following:" , "" , bulletedList [ fillSep [ flow "If you want to start a different project configuration \ \than" , pretty stackYaml <> "," , flow "then you can use" , style Shell "stack init" , flow "to create a new stack.yaml for the packages in the \ \current directory." , line ] , flow "If you want to use the project configuration at" , pretty stackYaml <> "," , flow "then you can add to its 'packages' field." ] , "" ] -- Adds in intermediate dependencies between ghci targets. Note that it will -- return a Lib component for these intermediate dependencies even if they don't -- have a library (but that's fine for the usage within this module). -- -- If 'True' is passed for loadAllDeps, this loads all local deps, even if they -- aren't intermediate. getExtraLoadDeps :: Bool -> Map PackageName LocalPackage -> [(PackageName, (Path Abs File, Target))] -> [(PackageName, (Path Abs File, Target))] getExtraLoadDeps loadAllDeps localMap targets = M.toList $ (\mp -> foldl' (flip M.delete) mp (map fst targets)) $ M.mapMaybe id $ execState (mapM_ (mapM_ go . getDeps . fst) targets) (M.fromList (map (second Just) targets)) where getDeps :: PackageName -> [PackageName] getDeps name = case M.lookup name localMap of Just lp -> listOfPackageDeps lp.package -- FIXME just Local? _ -> [] go :: PackageName -> State (Map PackageName (Maybe (Path Abs File, Target))) Bool go name = do cache <- get case (M.lookup name cache, M.lookup name localMap) of (Just (Just _), _) -> pure True (Just Nothing, _) | not loadAllDeps -> pure False (_, Just lp) -> do let deps = listOfPackageDeps lp.package shouldLoad <- or <$> mapM go deps if shouldLoad then do modify (M.insert name (Just (lp.cabalFP, TargetComps (S.singleton CLib)))) pure True else do modify (M.insert name Nothing) pure False (_, _) -> pure False unionTargets :: Ord k => Map k Target -> Map k Target -> Map k Target unionTargets = M.unionWith $ \l r -> case (l, r) of (TargetAll PTDependency, _) -> r (TargetComps sl, TargetComps sr) -> TargetComps (S.union sl sr) (TargetComps _, TargetAll PTProject) -> TargetAll PTProject (TargetComps _, _) -> l (TargetAll PTProject, _) -> TargetAll PTProject hasLocalComp :: (NamedComponent -> Bool) -> Target -> Bool hasLocalComp p t = case t of TargetComps s -> any p (S.toList s) TargetAll PTProject -> True _ -> False stack-2.15.7/src/Stack/Ghci/Script.hs0000644000000000000000000000523214604306201015405 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Ghci.Script ( GhciScript , ModuleName , cmdAdd , cmdModule , scriptToLazyByteString , scriptToBuilder , scriptToFile ) where import Data.ByteString.Builder ( toLazyByteString ) import qualified Data.List as L import qualified Data.Set as S import Distribution.ModuleName ( ModuleName, components ) import Stack.Prelude import System.IO ( hSetBinaryMode ) newtype GhciScript = GhciScript { ghciScript :: [GhciCommand] } instance Semigroup GhciScript where GhciScript xs <> GhciScript ys = GhciScript (ys <> xs) instance Monoid GhciScript where mempty = GhciScript [] mappend = (<>) data GhciCommand = AddCmd (Set (Either ModuleName (Path Abs File))) | ModuleCmd (Set ModuleName) deriving Show cmdAdd :: Set (Either ModuleName (Path Abs File)) -> GhciScript cmdAdd = GhciScript . (:[]) . AddCmd cmdModule :: Set ModuleName -> GhciScript cmdModule = GhciScript . (:[]) . ModuleCmd scriptToLazyByteString :: GhciScript -> LByteString scriptToLazyByteString = toLazyByteString . scriptToBuilder scriptToBuilder :: GhciScript -> Builder scriptToBuilder backwardScript = mconcat $ fmap commandToBuilder script where script = reverse backwardScript.ghciScript scriptToFile :: Path Abs File -> GhciScript -> IO () scriptToFile path script = withFile filepath WriteMode $ \hdl -> do hSetBuffering hdl (BlockBuffering Nothing) hSetBinaryMode hdl True hPutBuilder hdl (scriptToBuilder script) where filepath = toFilePath path -- Command conversion commandToBuilder :: GhciCommand -> Builder commandToBuilder (AddCmd modules) | S.null modules = mempty | otherwise = ":add " <> mconcat ( L.intersperse " " $ fmap ( fromString . quoteFileName . either (mconcat . L.intersperse "." . components) toFilePath ) (S.toAscList modules) ) <> "\n" commandToBuilder (ModuleCmd modules) | S.null modules = ":module +\n" | otherwise = ":module + " <> mconcat ( L.intersperse " " $ fromString . quoteFileName . mconcat . L.intersperse "." . components <$> S.toAscList modules ) <> "\n" -- | Make sure that a filename with spaces in it gets the proper quotes. quoteFileName :: String -> String quoteFileName x = if ' ' `elem` x then show x else x stack-2.15.7/src/Stack/Hoogle.hs0000644000000000000000000002505014620153445014514 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | A wrapper around hoogle. module Stack.Hoogle ( hoogleCmd ) where import qualified Data.ByteString.Lazy.Char8 as BL8 import Data.Char ( isSpace ) import Data.Either.Extra ( eitherToMaybe ) import qualified Data.Text as T import Distribution.PackageDescription ( packageDescription, package ) import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Version ( mkVersion ) import Lens.Micro ( (?~) ) import Path ( parseAbsFile ) import Path.IO ( createDirIfMissing, doesFileExist ) import qualified RIO.Map as Map import RIO.Process ( findExecutable, proc, readProcess_, runProcess_) import qualified Stack.Build ( build ) import Stack.Build.Target ( NeedTargets (..) ) import Stack.Constants ( stackProgName' ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig , withEnvConfig ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.BuildOptsMonoid ( buildOptsMonoidHaddockL ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.EnvConfig ( EnvConfig, HasSourceMap (..), hoogleDatabasePath , hoogleRoot ) import Stack.Types.EnvSettings ( EnvSettings (..) ) import Stack.Types.GlobalOpts ( GlobalOpts (..), globalOptsBuildOptsMonoidL ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SourceMap ( DepPackage (..), SourceMap (..) ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Hoogle" module. data HoogleException = HoogleOnPathNotFoundBug deriving (Show, Typeable) instance Exception HoogleException where displayException HoogleOnPathNotFoundBug = bugReport "[S-9669]" "Cannot find Hoogle executable on PATH, after installing." -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Hoogle" module. data HooglePrettyException = HoogleNotFound StyleDoc | HoogleDatabaseNotFound deriving (Show, Typeable) instance Pretty HooglePrettyException where pretty (HoogleNotFound e) = "[S-1329]" <> line <> e <> line <> fillSep [ flow "Not installing Hoogle due to" , style Shell "--no-setup" <> "." ] pretty HoogleDatabaseNotFound = "[S-3025]" <> line <> fillSep [ flow "No Hoogle database. Not building one due to" , style Shell "--no-setup" <> "." ] instance Exception HooglePrettyException -- | Helper type to duplicate log messages data Muted = Muted | NotMuted -- | Hoogle command. hoogleCmd :: ([String], Bool, Bool, Bool) -> RIO Runner () hoogleCmd (args, setup, rebuild, startServer) = local (over globalOptsL modifyGO) $ withConfig YesReexec $ withDefaultEnvConfig $ do hooglePath <- ensureHoogleInPath generateDbIfNeeded hooglePath runHoogle hooglePath args' where modifyGO :: GlobalOpts -> GlobalOpts modifyGO = globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True args' :: [String] args' = if startServer then ["server", "--local", "--port", "8080"] ++ args else args generateDbIfNeeded :: Path Abs File -> RIO EnvConfig () generateDbIfNeeded hooglePath = do databaseExists <- checkDatabaseExists if databaseExists && not rebuild then pure () else if setup || rebuild then do prettyWarn $ if rebuild then flow "Rebuilding database ..." else fillSep [ flow "No Hoogle database yet. Automatically building \ \Haddock documentation and Hoogle database (use" , style Shell "--no-setup" , flow "to disable) ..." ] buildHaddocks prettyInfoS "Built Haddock documentation." generateDb hooglePath prettyInfoS "Generated Hoogle database." else prettyThrowIO HoogleDatabaseNotFound generateDb :: Path Abs File -> RIO EnvConfig () generateDb hooglePath = do dir <- hoogleRoot createDirIfMissing True dir runHoogle hooglePath ["generate", "--local"] buildHaddocks :: RIO EnvConfig () buildHaddocks = do config <- view configL runRIO config $ -- a bit weird that we have to drop down like this catch (withDefaultEnvConfig $ Stack.Build.build Nothing) (\(_ :: ExitCode) -> pure ()) hooglePackageName = mkPackageName "hoogle" hoogleMinVersion = mkVersion [5, 0] hoogleMinIdent = PackageIdentifier hooglePackageName hoogleMinVersion installHoogle :: RIO EnvConfig (Path Abs File) installHoogle = requiringHoogle Muted $ do Stack.Build.build Nothing mhooglePath' <- findExecutable "hoogle" case mhooglePath' of Right hooglePath -> parseAbsFile hooglePath Left _ -> throwIO HoogleOnPathNotFoundBug requiringHoogle :: Muted -> RIO EnvConfig x -> RIO EnvConfig x requiringHoogle muted f = do hoogleTarget <- do sourceMap <- view $ sourceMapL . to (.deps) case Map.lookup hooglePackageName sourceMap of Just hoogleDep -> case hoogleDep.location of PLImmutable pli -> T.pack . packageIdentifierString <$> restrictMinHoogleVersion muted (packageLocationIdent pli) plm@(PLMutable _) -> T.pack . packageIdentifierString . package . packageDescription <$> loadCabalFile (Just stackProgName') plm Nothing -> do -- not muted because this should happen only once prettyWarnS "No hoogle version was found, trying to install the latest version" mpir <- getLatestHackageVersion YesRequireHackageIndex hooglePackageName UsePreferredVersions let hoogleIdent = case mpir of Nothing -> hoogleMinIdent Just (PackageIdentifierRevision _ ver _) -> PackageIdentifier hooglePackageName ver T.pack . packageIdentifierString <$> restrictMinHoogleVersion muted hoogleIdent config <- view configL let boptsCLI = defaultBuildOptsCLI { targetsCLI = [hoogleTarget] } runRIO config $ withEnvConfig NeedTargets boptsCLI f restrictMinHoogleVersion :: HasLogFunc env => Muted -> PackageIdentifier -> RIO env PackageIdentifier restrictMinHoogleVersion muted ident = if ident < hoogleMinIdent then do muteableLog LevelWarn muted $ "Minimum " <> fromString (packageIdentifierString hoogleMinIdent) <> " is not in your index. Installing the minimum version." pure hoogleMinIdent else do muteableLog LevelInfo muted $ "Minimum version is " <> fromString (packageIdentifierString hoogleMinIdent) <> ". Found acceptable " <> fromString (packageIdentifierString ident) <> " in your index, requiring its installation." pure ident muteableLog :: HasLogFunc env => LogLevel -> Muted -> Utf8Builder -> RIO env () muteableLog logLevel muted msg = case muted of Muted -> pure () NotMuted -> logGeneric "" logLevel msg runHoogle :: Path Abs File -> [String] -> RIO EnvConfig () runHoogle hooglePath hoogleArgs = do config <- view configL menv <- liftIO $ config.processContextSettings envSettings dbpath <- hoogleDatabasePath let databaseArg = ["--database=" ++ toFilePath dbpath] withProcessContext menv $ proc (toFilePath hooglePath) (hoogleArgs ++ databaseArg) runProcess_ checkDatabaseExists = do path <- hoogleDatabasePath liftIO (doesFileExist path) ensureHoogleInPath :: RIO EnvConfig (Path Abs File) ensureHoogleInPath = do config <- view configL menv <- liftIO $ config.processContextSettings envSettings mHooglePath' <- eitherToMaybe <$> runRIO menv (findExecutable "hoogle") let mHooglePath'' = eitherToMaybe <$> requiringHoogle NotMuted (findExecutable "hoogle") mHooglePath <- maybe mHooglePath'' (pure . Just) mHooglePath' eres <- case mHooglePath of Nothing -> pure $ Left (flow "Hoogle isn't installed.") Just hooglePath -> do result <- withProcessContext menv $ proc hooglePath ["--numeric-version"] $ tryAny . fmap fst . readProcess_ let unexpectedResult got = Left $ fillSep [ style Shell (fromString hooglePath) , style Shell "--numeric-version" , flow "did not respond with expected value. Got:" ] <> blankLine <> got pure $ case result of Left err -> unexpectedResult $ string (displayException err) Right bs -> case parseVersion (takeWhile (not . isSpace) (BL8.unpack bs)) of Nothing -> unexpectedResult $ fromString (BL8.unpack bs) Just ver | ver >= hoogleMinVersion -> Right hooglePath | otherwise -> Left $ fillSep [ flow "Installed Hoogle is too old, " , style Shell (fromString hooglePath) , flow "is version" , fromString (versionString ver) , flow "but >= 5.0 is required." ] case eres of Right hooglePath -> parseAbsFile hooglePath Left err | setup -> do prettyWarnL [ err , flow "Automatically installing (use" , style Shell "--no-setup" , flow "to disable) ..." ] installHoogle | otherwise -> prettyThrowIO $ HoogleNotFound err envSettings = EnvSettings { includeLocals = True , includeGhcPackagePath = True , stackExe = True , localeUtf8 = False , keepGhcRts = False } stack-2.15.7/src/Stack/IDE.hs0000644000000000000000000000707314604306201013675 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @ide@ command. module Stack.IDE ( OutputStream (..) , ListPackagesCmd (..) , idePackagesCmd , ideTargetsCmd , listPackages , listTargets ) where import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import Data.Tuple ( swap ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withBuildConfig, withConfig ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.NamedComponent ( NamedComponent, isCBench, isCExe, isCTest , renderPkgComponent ) import Stack.Types.Runner ( Runner ) import Stack.Types.SourceMap ( ProjectPackage (..), SMWanted (..), ppComponentsMaybe ) import System.IO ( putStrLn ) -- Type representing output stream choices for the @stack ide packages@ and -- @stack ide targets@ commands. data OutputStream = OutputLogInfo -- ^ To the same output stream as other log information. | OutputStdout -- ^ To the standard output stream. -- Type representing output choices for the @stack ide packages@ command. data ListPackagesCmd = ListPackageNames -- ^ Package names. | ListPackageCabalFiles -- ^ Paths to Cabal files. -- | Function underlying the @stack ide packages@ command. List packages in the -- project. idePackagesCmd :: (OutputStream, ListPackagesCmd) -> RIO Runner () idePackagesCmd = withConfig NoReexec . withBuildConfig . uncurry listPackages compTypes :: (Bool, Bool, Bool) -> NamedComponent -> Bool compTypes (False, False, False) = const True compTypes (exe, test, bench) = \x -> (exe && isCExe x) || (test && isCTest x) || (bench && isCBench x) -- | Function underlying the @stack ide targets@ command. List targets in the -- project. ideTargetsCmd :: ((Bool, Bool, Bool), OutputStream) -> RIO Runner () ideTargetsCmd = withConfig NoReexec . withBuildConfig . uncurry listTargets . fmap compTypes . swap outputFunc :: HasTerm env => OutputStream -> String -> RIO env () outputFunc OutputLogInfo = prettyInfo . fromString outputFunc OutputStdout = liftIO . putStrLn -- | List the packages inside the current project. listPackages :: HasBuildConfig env => OutputStream -> ListPackagesCmd -> RIO env () listPackages stream flag = do packages <- view $ buildConfigL . to (.smWanted.project) let strs = case flag of ListPackageNames -> map packageNameString (Map.keys packages) ListPackageCabalFiles -> map (toFilePath . (.cabalFP)) (Map.elems packages) mapM_ (outputFunc stream) strs -- | List the targets in the current project. listTargets :: forall env. HasBuildConfig env => OutputStream -> (NamedComponent -> Bool) -> RIO env () listTargets stream isCompType = do packages <- view $ buildConfigL . to (.smWanted.project) pairs <- concat <$> Map.traverseWithKey toNameAndComponent packages outputFunc stream $ T.unpack $ T.intercalate "\n" $ map renderPkgComponent pairs where toNameAndComponent :: PackageName -> ProjectPackage -> RIO env [(PackageName, NamedComponent)] toNameAndComponent pkgName' = fmap (map (pkgName',) . Set.toList) . ppComponentsMaybe (\x -> if isCompType x then Just x else Nothing) stack-2.15.7/src/Stack/Init.hs0000644000000000000000000006722214620153474014213 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- | Types and functions related to Stack's @init@ command. module Stack.Init ( InitOpts (..) , initCmd , initProject ) where import qualified Data.Aeson.KeyMap as KeyMap import qualified Data.ByteString.Builder as B import qualified Data.ByteString.Char8 as BC import qualified Data.Foldable as F import qualified Data.IntMap as IntMap import Data.List.Extra ( groupSortOn ) import Data.List.NonEmpty.Extra ( minimumBy1 ) import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Yaml as Yaml import qualified Distribution.PackageDescription as C import qualified Distribution.Text as C import qualified Distribution.Version as C import Path ( PathException, (), dirname, filename, parent , stripProperPrefix ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.Find ( findFiles ) import Path.IO ( AnyPath, RelPath, doesFileExist, getCurrentDir , makeRelativeToCurrentDir, resolveDir' ) import qualified RIO.FilePath as FP import RIO.List ( (\\), intercalate, isSuffixOf, isPrefixOf ) import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import Stack.BuildPlan ( BuildPlanCheck (..), DepError (..), checkSnapBuildPlan , removeSrcPkgDefaultFlags, selectBestSnapshot ) import Stack.Config ( getSnapshots, makeConcreteResolver ) import Stack.Constants ( stackDotYaml, stackProgName' ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withGlobalProject ) import Stack.SourceMap ( SnapshotCandidate, loadProjectSnapshotCandidate ) import Stack.Types.Config ( HasConfig ) import Stack.Types.GHCVariant ( HasGHCVariant ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Project ( Project (..) ) import Stack.Types.Runner (Runner, globalOptsL ) import Stack.Types.Resolver ( AbstractResolver, Snapshots (..) ) import Stack.Types.Version ( stackMajorVersion ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Init" module. data InitException = NoPackagesToIgnoreBug deriving (Show, Typeable) instance Exception InitException where displayException NoPackagesToIgnoreBug = bugReport "[S-2747]" "No packages to ignore." -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Init" module. data InitPrettyException = SnapshotDownloadFailure SomeException | ConfigFileAlreadyExists FilePath | PackageNameInvalid [(Path Abs File, PackageName)] | NoMatchingSnapshot !(NonEmpty SnapName) | ResolverMismatch !RawSnapshotLocation String | ResolverPartial !RawSnapshotLocation !String deriving (Show, Typeable) instance Pretty InitPrettyException where pretty (ConfigFileAlreadyExists reldest) = "[S-8009]" <> line <> flow "Stack declined to create a project-level YAML configuration file." <> blankLine <> fillSep [ flow "The file" , style File (fromString reldest) , "already exists. To overwrite it, pass the flag" , style Shell "--force" <> "." ] pretty (PackageNameInvalid rels) = "[S-5267]" <> line <> flow "Stack did not create project-level YAML configuration, as (like \ \Hackage) it requires a Cabal file name to match the package it \ \defines." <> blankLine <> flow "Please rename the following Cabal files:" <> line <> bulletedList ( map ( \(fp, name) -> fillSep [ pretty fp , "as" , style File (fromPackageName name <> ".cabal") ] ) rels ) pretty (SnapshotDownloadFailure e) = "[S-8332]" <> line <> flow "Stack failed to create project-level YAML configuration, as it \ \was unable to download the index of available snapshots." <> blankLine <> fillSep [ flow "This sometimes happens because Certificate Authorities are \ \missing on your system. You can try the Stack command again \ \or manually create the configuration file. For help about the \ \content of Stack's YAML configuration files, see (for the \ \most recent release of Stack)" , style Url "http://docs.haskellstack.org/en/stable/yaml_configuration/" <> "." ] <> blankLine <> flow "While downloading the snapshot index, Stack encountered the \ \following error:" <> blankLine <> string (displayException e) pretty (NoMatchingSnapshot names) = "[S-1833]" <> line <> flow "None of the following snapshots provides a compiler matching \ \your package(s):" <> line <> bulletedList (map (fromString . show) (NE.toList names)) <> blankLine <> resolveOptions pretty (ResolverMismatch resolver errDesc) = "[S-6395]" <> line <> fillSep [ "Snapshot" , style Url (pretty $ PrettyRawSnapshotLocation resolver) , flow "does not have a matching compiler to build some or all of \ \your package(s)." ] <> blankLine <> indent 4 (string errDesc) <> line <> resolveOptions pretty (ResolverPartial resolver errDesc) = "[S-2422]" <> line <> fillSep [ "Snapshot" , style Url (pretty $ PrettyRawSnapshotLocation resolver) , flow "does not have all the packages to match your requirements." ] <> blankLine <> indent 4 (string errDesc) <> line <> resolveOptions resolveOptions :: StyleDoc resolveOptions = flow "This may be resolved by:" <> line <> bulletedList [ fillSep [ "Using" , style Shell "--omit-packages" , "to exclude mismatching package(s)." ] , fillSep [ "Using" , style Shell "--snapshot" , "to specify a matching snapshot." ] ] instance Exception InitPrettyException -- | Type representing command line options for the @stack init@ command. data InitOpts = InitOpts { searchDirs :: ![T.Text] -- ^ List of sub directories to search for .cabal files , omitPackages :: Bool -- ^ Exclude conflicting or incompatible user packages , forceOverwrite :: Bool -- ^ Overwrite existing stack.yaml , includeSubDirs :: Bool -- ^ If True, include all .cabal files found in any sub directories } -- | Function underlying the @stack init@ command. Project initialization. initCmd :: InitOpts -> RIO Runner () initCmd initOpts = do pwd <- getCurrentDir go <- view globalOptsL withGlobalProject $ withConfig YesReexec (initProject pwd initOpts go.resolver) -- | Generate a @stack.yaml@ file. initProject :: (HasConfig env, HasGHCVariant env) => Path Abs Dir -> InitOpts -> Maybe AbstractResolver -> RIO env () initProject currDir initOpts mresolver = do let dest = currDir stackDotYaml reldest <- toFilePath <$> makeRelativeToCurrentDir dest exists <- doesFileExist dest when (not initOpts.forceOverwrite && exists) $ prettyThrowIO $ ConfigFileAlreadyExists reldest dirs <- mapM (resolveDir' . T.unpack) initOpts.searchDirs let find = findCabalDirs initOpts.includeSubDirs dirs' = if null dirs then [currDir] else dirs prettyInfo $ fillSep [ flow "Looking for Cabal or" , style File "package.yaml" , flow "files to use to initialise Stack's project-level YAML \ \configuration file." ] <> line cabaldirs <- Set.toList . Set.unions <$> mapM find dirs' (bundle, dupPkgs) <- cabalPackagesCheck cabaldirs let makeRelDir dir = case stripProperPrefix currDir dir of Nothing | currDir == dir -> "." | otherwise -> assert False $ toFilePathNoTrailingSep dir Just rel -> toFilePathNoTrailingSep rel fpToPkgDir fp = let absDir = parent fp in ResolvedPath (RelFilePath $ T.pack $ makeRelDir absDir) absDir pkgDirs = Map.map (fpToPkgDir . fst) bundle (snapshotLoc, flags, extraDeps, rbundle) <- getDefaultResolver initOpts mresolver pkgDirs let ignored = Map.difference bundle rbundle dupPkgMsg | dupPkgs /= [] = "Warning (added by new or init): Some packages were found to have \ \names conflicting with others and have been commented out in the \ \packages section.\n" | otherwise = "" missingPkgMsg | Map.size ignored > 0 = "Warning (added by new or init): Some packages were found to be \ \incompatible with the resolver and have been left commented out \ \in the packages section.\n" | otherwise = "" extraDepMsg | Map.size extraDeps > 0 = "Warning (added by new or init): Specified resolver could not \ \satisfy all dependencies. Some external packages have been added \ \as dependencies.\n" | otherwise = "" makeUserMsg msgs = let msg = concat msgs in if msg /= "" then msg <> "You can omit this message by removing it from stack.yaml\n" else "" userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg] gpdByDir = Map.fromList [ (parent fp, gpd) | (fp, gpd) <- Map.elems bundle] gpds = Map.elems $ Map.mapMaybe (flip Map.lookup gpdByDir . resolvedAbsolute) rbundle deps <- for (Map.toList extraDeps) $ \(n, v) -> PLImmutable . cplComplete <$> completePackageLocation (RPLIHackage (PackageIdentifierRevision n v CFILatest) Nothing) let project = Project { userMsg = if userMsg == "" then Nothing else Just userMsg , packages = resolvedRelative <$> Map.elems rbundle , extraDeps = map toRawPL deps , flagsByPkg = removeSrcPkgDefaultFlags gpds flags , resolver = snapshotLoc , compiler = Nothing , extraPackageDBs = [] , curator = Nothing , dropPackages = mempty } makeRel = fmap toFilePath . makeRelativeToCurrentDir prettyInfoL [ flow "Initialising Stack's project-level YAML configuration file \ \using snapshot" , pretty (PrettyRawSnapshotLocation snapshotLoc) <> "." ] prettyInfoL $ let n = Map.size bundle + length dupPkgs in [ "Considered" , fromString $ show n , "user" , if n == 1 then "package." else "packages." ] when (dupPkgs /= []) $ do rels <- mapM makeRel dupPkgs prettyWarn $ fillSep [ flow "Ignoring these" , fromString $ show (length dupPkgs) , flow "duplicate packages:" ] <> line <> bulletedList (map (style File . fromString) rels) when (Map.size ignored > 0) $ do rels <- mapM makeRel (Map.elems (fmap fst ignored)) prettyWarn $ fillSep [ flow "Ignoring these" , fromString $ show (Map.size ignored) , flow "packages due to dependency conflicts:" ] <> line <> bulletedList (map (style File . fromString) rels) when (Map.size extraDeps > 0) $ prettyWarnL [ fromString $ show (Map.size extraDeps) , flow "external dependencies were added." ] prettyInfoL [ flow $ if exists then "Overwriting existing configuration file" else "Writing configuration to" , style File (fromString reldest) <> "." ] writeBinaryFileAtomic dest $ renderStackYaml project (Map.elems $ fmap (makeRelDir . parent . fst) ignored) (map (makeRelDir . parent) dupPkgs) prettyInfoS "Stack's project-level YAML configuration file has been initialised." -- | Render a stack.yaml file with comments, see: -- https://github.com/commercialhaskell/stack/issues/226 renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder renderStackYaml p ignoredPackages dupPackages = case Yaml.toJSON p of Yaml.Object o -> renderObject o _ -> assert False $ B.byteString $ Yaml.encode p where renderObject o = B.byteString headerHelp <> B.byteString "\n\n" <> F.foldMap (goComment o) comments <> goOthers (o `KeyMap.difference` KeyMap.fromList comments) <> B.byteString footerHelp <> "\n" goComment o (name, comment) = case (convert <$> KeyMap.lookup name o) <|> nonPresentValue name of Nothing -> assert (name == "user-message") mempty Just v -> B.byteString comment <> B.byteString "\n" <> v <> if name == "packages" then commentedPackages else "" <> B.byteString "\n" where convert v = B.byteString (Yaml.encode $ Yaml.object [(name, v)]) -- Some fields in stack.yaml are optional and may not be -- generated. For these, we provided commented out dummy -- values to go along with the comments. nonPresentValue "extra-deps" = Just "# extra-deps: []\n" nonPresentValue "flags" = Just "# flags: {}\n" nonPresentValue "extra-package-dbs" = Just "# extra-package-dbs: []\n" nonPresentValue _ = Nothing commentLine l | null l = "#" | otherwise = "# " ++ l commentHelp = BC.pack . intercalate "\n" . map commentLine commentedPackages = let ignoredComment = commentHelp [ "The following packages have been ignored due to incompatibility with the" , "snapshot compiler, dependency conflicts with other packages" , "or unsatisfied dependencies." ] dupComment = commentHelp [ "The following packages have been ignored due to package name conflict " , "with other packages." ] in commentPackages ignoredComment ignoredPackages <> commentPackages dupComment dupPackages commentPackages comment pkgs | pkgs /= [] = B.byteString comment <> B.byteString "\n" <> B.byteString (BC.pack $ concat $ map (\x -> "#- " ++ x ++ "\n") pkgs ++ ["\n"]) | otherwise = "" goOthers o | KeyMap.null o = mempty | otherwise = assert False $ B.byteString $ Yaml.encode o -- Per Section Help comments = [ ("user-message" , userMsgHelp) , ("resolver" , resolverHelp) , ("packages" , packageHelp) , ("extra-deps" , extraDepsHelp) , ("flags" , "# Override default flag values for local packages and extra-deps") , ("extra-package-dbs", "# Extra package databases containing global packages") ] -- Help strings headerHelp = commentHelp [ "This file was automatically generated by 'stack init'" , "" , "Some commonly used options have been documented as comments in this file." , "For advanced use and comprehensive documentation of the format, please see:" , "https://docs.haskellstack.org/en/stable/yaml_configuration/" ] resolverHelp = commentHelp [ "Resolver to choose a 'specific' stackage snapshot or a compiler version." , "A snapshot resolver dictates the compiler version and the set of packages" , "to be used for project dependencies. For example:" , "" , "resolver: lts-22.21" , "resolver: nightly-2024-05-06" , "resolver: ghc-9.6.5" , "" , "The location of a snapshot can be provided as a file or url. Stack assumes" , "a snapshot provided as a file might change, whereas a url resource does not." , "" , "resolver: ./custom-snapshot.yaml" , "resolver: https://example.com/snapshots/2023-01-01.yaml" ] userMsgHelp = commentHelp [ "A warning or info to be displayed to the user on config load." ] packageHelp = commentHelp [ "User packages to be built." , "Various formats can be used as shown in the example below." , "" , "packages:" , "- some-directory" , "- https://example.com/foo/bar/baz-0.0.2.tar.gz" , " subdirs:" , " - auto-update" , " - wai" ] extraDepsHelp = commentHelp [ "Dependency packages to be pulled from upstream that are not in the resolver." , "These entries can reference officially published versions as well as" , "forks / in-progress versions pinned to a git hash. For example:" , "" , "extra-deps:" , "- acme-missiles-0.3" , "- git: https://github.com/commercialhaskell/stack.git" , " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a" , "" ] footerHelp = commentHelp [ "Control whether we use the GHC we find on the path" , "system-ghc: true" , "" , "Require a specific version of Stack, using version ranges" , "require-stack-version: -any # Default" , "require-stack-version: \"" ++ C.display (C.orLaterVersion stackMajorVersion) ++ "\"" , "" , "Override the architecture used by Stack, especially useful on Windows" , "arch: i386" , "arch: x86_64" , "" , "Extra directories used by Stack for building" , "extra-include-dirs: [/path/to/dir]" , "extra-lib-dirs: [/path/to/dir]" , "" , "Allow a newer minor version of GHC than the snapshot specifies" , "compiler-check: newer-minor" ] getSnapshots' :: HasConfig env => RIO env Snapshots getSnapshots' = catchAny getSnapshots (prettyThrowIO . SnapshotDownloadFailure) -- | Get the default resolver value getDefaultResolver :: (HasConfig env, HasGHCVariant env) => InitOpts -> Maybe AbstractResolver -> Map PackageName (ResolvedPath Dir) -- ^ Src package name: cabal dir -> RIO env ( RawSnapshotLocation , Map PackageName (Map FlagName Bool) , Map PackageName Version , Map PackageName (ResolvedPath Dir)) -- ^ ( Resolver -- , Flags for src packages and extra deps -- , Extra dependencies -- , Src packages actually considered) getDefaultResolver initOpts mresolver pkgDirs = do (candidate, loc) <- case mresolver of Nothing -> selectSnapResolver Just ar -> do sl <- makeConcreteResolver ar c <- loadProjectSnapshotCandidate sl NoPrintWarnings False pure (c, sl) getWorkingResolverPlan initOpts pkgDirs candidate loc where -- TODO support selecting best across regular and custom snapshots selectSnapResolver = do snaps <- fmap getRecommendedSnapshots getSnapshots' (c, l, r) <- selectBestSnapshot (Map.elems pkgDirs) snaps case r of BuildPlanCheckFail {} | not initOpts.omitPackages -> prettyThrowM $ NoMatchingSnapshot snaps _ -> pure (c, l) getWorkingResolverPlan :: (HasConfig env, HasGHCVariant env) => InitOpts -> Map PackageName (ResolvedPath Dir) -- ^ Src packages: cabal dir -> SnapshotCandidate env -> RawSnapshotLocation -> RIO env ( RawSnapshotLocation , Map PackageName (Map FlagName Bool) , Map PackageName Version , Map PackageName (ResolvedPath Dir)) -- ^ ( SnapshotDef -- , Flags for src packages and extra deps -- , Extra dependencies -- , Src packages actually considered) getWorkingResolverPlan initOpts pkgDirs0 snapCandidate snapLoc = do prettyInfoL [ flow "Selected the snapshot" , pretty (PrettyRawSnapshotLocation snapLoc) <> "." ] go pkgDirs0 where go pkgDirs = do eres <- checkBundleResolver initOpts snapLoc snapCandidate (Map.elems pkgDirs) -- if some packages failed try again using the rest case eres of Right (f, edeps)-> pure (snapLoc, f, edeps, pkgDirs) Left ignored | Map.null available -> do prettyWarnS "Could not find a working plan for any of the user packages. \ \Proceeding to create a YAML configuration file anyway." pure (snapLoc, Map.empty, Map.empty, Map.empty) | otherwise -> do when (Map.size available == Map.size pkgDirs) $ throwM NoPackagesToIgnoreBug if length ignored > 1 then prettyWarn ( flow "Ignoring the following packages:" <> line <> bulletedList (map fromPackageName ignored) ) else prettyWarnL [ flow "Ignoring package:" , fromString ( case ignored of [] -> throwM NoPackagesToIgnoreBug x:_ -> packageNameString x ) ] go available where isAvailable k _ = k `notElem` ignored available = Map.filterWithKey isAvailable pkgDirs checkBundleResolver :: (HasConfig env, HasGHCVariant env) => InitOpts -> RawSnapshotLocation -> SnapshotCandidate env -> [ResolvedPath Dir] -- ^ Src package dirs -> RIO env (Either [PackageName] ( Map PackageName (Map FlagName Bool) , Map PackageName Version)) checkBundleResolver initOpts snapshotLoc snapCandidate pkgDirs = do result <- checkSnapBuildPlan pkgDirs Nothing snapCandidate case result of BuildPlanCheckOk f -> pure $ Right (f, Map.empty) BuildPlanCheckPartial _f e -> do -- FIXME:qrilka unused f if initOpts.omitPackages then do warnPartial result prettyWarnS "Omitting packages with unsatisfied dependencies" pure $ Left $ failedUserPkgs e else prettyThrowM $ ResolverPartial snapshotLoc (show result) BuildPlanCheckFail _ e _ | initOpts.omitPackages -> do prettyWarn $ fillSep [ "Resolver compiler mismatch:" , style Current (fromString . T.unpack $ textDisplay snapshotLoc) ] <> line <> indent 4 (string $ show result) pure $ Left $ failedUserPkgs e | otherwise -> prettyThrowM $ ResolverMismatch snapshotLoc (show result) where warnPartial res = do prettyWarn $ fillSep [ "Resolver" , style Current (fromString . T.unpack $ textDisplay snapshotLoc) , flow "will need external packages:" ] <> line <> indent 4 (string $ show res) failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap (.neededBy) e)) getRecommendedSnapshots :: Snapshots -> NonEmpty SnapName getRecommendedSnapshots snapshots = -- in order - Latest LTS, Latest Nightly, all LTS most recent first case nonEmpty supportedLtss of Just (mostRecent :| older) -> mostRecent :| (nightly : older) Nothing -> nightly :| [] where ltss = map (uncurry LTS) (IntMap.toDescList snapshots.lts ) supportedLtss = filter (>= minSupportedLts) ltss nightly = Nightly snapshots.nightly -- |Yields the minimum LTS supported by Stack. minSupportedLts :: SnapName -- See https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md -- under Stack version 2.1.1. minSupportedLts = LTS 3 0 findCabalDirs :: HasConfig env => Bool -> Path Abs Dir -> RIO env (Set (Path Abs Dir)) findCabalDirs recurse dir = Set.fromList . map parent <$> liftIO (findFiles dir isHpackOrCabal subdirFilter) where subdirFilter subdir = recurse && not (isIgnored subdir) isHpack = (== "package.yaml") . toFilePath . filename isCabal = (".cabal" `isSuffixOf`) . toFilePath isHpackOrCabal x = isHpack x || isCabal x isIgnored path = "." `isPrefixOf` dirName || dirName `Set.member` ignoredDirs where dirName = FP.dropTrailingPathSeparator (toFilePath (dirname path)) -- | Special directories that we don't want to traverse for .cabal files ignoredDirs :: Set FilePath ignoredDirs = Set.fromList ["dist"] cabalPackagesCheck :: (HasConfig env, HasGHCVariant env) => [Path Abs Dir] -> RIO env ( Map PackageName (Path Abs File, C.GenericPackageDescription) , [Path Abs File] ) cabalPackagesCheck cabaldirs = do when (null cabaldirs) $ prettyWarn $ fillSep [ flow "Stack did not find any local package directories. You may \ \want to create a package with" , style Shell (flow "stack new") , flow "instead." ] <> blankLine <> fillSep [ flow "Stack will create an empty project. If this is not what \ \you want, please delete the generated" , style File "stack.yaml" , "file." ] relpaths <- mapM prettyPath cabaldirs unless (null relpaths) $ prettyInfo $ flow "Using the Cabal packages:" <> line <> bulletedList (map (style File . fromString) relpaths) <> line -- A package name cannot be empty or missing otherwise it will result in -- Cabal solver failure. Stack requires packages name to match the Cabal -- file name. Just the latter check is enough to cover both the cases. ePackages <- for cabaldirs $ \dir -> do -- Pantry's 'loadCabalFilePath' throws 'MismatchedCabalName' (error -- [S-910]) if the Cabal file name does not match the package it -- defines. (gpdio, _name, cabalFP) <- loadCabalFilePath (Just stackProgName') dir eres <- liftIO $ try (gpdio YesPrintWarnings) case eres :: Either PantryException C.GenericPackageDescription of Right gpd -> pure $ Right (cabalFP, gpd) Left (MismatchedCabalName fp name) -> pure $ Left (fp, name) Left e -> throwIO e let (nameMismatchPkgs, packages) = partitionEithers ePackages when (nameMismatchPkgs /= []) $ prettyThrowIO $ PackageNameInvalid nameMismatchPkgs let dupGroups = mapMaybe nonEmpty . groupSortOn (gpdPackageName . snd) dupAll = concatMap NE.toList $ dupGroups packages -- Among duplicates prefer to include the ones in upper level dirs pathlen = length . FP.splitPath . toFilePath . fst getmin = minimumBy1 (compare `on` pathlen) dupSelected = map getmin (dupGroups packages) dupIgnored = dupAll \\ dupSelected unique = packages \\ dupIgnored when (dupIgnored /= []) $ do dups <- mapM (mapM (prettyPath . fst)) (dupGroups packages) prettyWarn $ flow "The following packages have duplicate package names:" <> line <> foldMap ( \dup -> bulletedList (map fromString (NE.toList dup)) <> line ) dups <> line <> flow "Packages with duplicate names will be ignored. Packages \ \in upper level directories will be preferred." <> line pure (Map.fromList $ map (\(file, gpd) -> (gpdPackageName gpd,(file, gpd))) unique , map fst dupIgnored) prettyPath :: (MonadIO m, RelPath (Path r t) ~ Path Rel t, AnyPath (Path r t)) => Path r t -> m FilePath prettyPath path = do eres <- liftIO $ try $ makeRelativeToCurrentDir path pure $ case eres of Left (_ :: PathException) -> toFilePath path Right res -> toFilePath res stack-2.15.7/src/Stack/List.hs0000644000000000000000000001037614620153445014217 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @list@ command. module Stack.List ( listCmd , listPackages ) where import Pantry ( loadSnapshot ) import qualified RIO.ByteString.Lazy as Lazy import qualified RIO.Map as Map import RIO.Process ( HasProcessContext ) import Stack.Config ( makeConcreteResolver ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.List" module. newtype ListPrettyException = CouldNotParsePackageSelectors [StyleDoc] deriving (Show, Typeable) instance Pretty ListPrettyException where pretty (CouldNotParsePackageSelectors errs) = "[S-4926]" <> line <> bulletedList errs instance Exception ListPrettyException -- | Function underlying the @stack list@ command. List packages. listCmd :: [String] -> RIO Runner () listCmd names = withConfig NoReexec $ do mresolver <- view $ globalOptsL . to (.resolver) mSnapshot <- forM mresolver $ \resolver -> do concrete <- makeConcreteResolver resolver loc <- completeSnapshotLocation concrete loadSnapshot loc listPackages mSnapshot names -- | Intended to work for the command line command. listPackages :: forall env. (HasPantryConfig env, HasProcessContext env, HasTerm env) => Maybe RawSnapshot -- ^ When looking up by name, take from this build plan. -> [String] -- ^ Names or identifiers. -> RIO env () listPackages mSnapshot input = do let (errs1, names) = case mSnapshot of Just snapshot | null input -> ([], Map.keys (rsPackages snapshot)) _ -> partitionEithers $ map parse input (errs2, locs) <- partitionEithers <$> traverse toLoc names case errs1 ++ errs2 of [] -> pure () errs -> prettyThrowM $ CouldNotParsePackageSelectors errs mapM_ (Lazy.putStrLn . fromPackageId) locs where toLoc | Just snapshot <- mSnapshot = toLocSnapshot snapshot | otherwise = toLocNoSnapshot toLocNoSnapshot :: PackageName -> RIO env (Either StyleDoc PackageIdentifier) toLocNoSnapshot name = do mloc1 <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions mloc <- case mloc1 of Just _ -> pure mloc1 Nothing -> do updated <- updateHackageIndex $ Just $ "Could not find package " <> fromPackageName name <> ", updating" case updated of UpdateOccurred -> getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions NoUpdateOccurred -> pure Nothing case mloc of Nothing -> do candidates <- getHackageTypoCorrections name pure $ Left $ fillSep [ flow "Could not find package" , style Current (fromPackageName name) , flow "on Hackage." , if null candidates then mempty else fillSep $ flow "Perhaps you meant one of:" : mkNarrativeList (Just Good) False (map fromPackageName candidates :: [StyleDoc]) ] Just loc -> pure $ Right (packageLocationIdent loc) toLocSnapshot :: RawSnapshot -> PackageName -> RIO env (Either StyleDoc PackageIdentifier) toLocSnapshot snapshot name = case Map.lookup name (rsPackages snapshot) of Nothing -> pure $ Left $ fillSep [ flow "Package does not appear in snapshot:" , style Current (fromPackageName name) <> "." ] Just sp -> do loc <- cplComplete <$> completePackageLocation (rspLocation sp) pure $ Right (packageLocationIdent loc) parse s = case parsePackageName s of Just x -> Right x Nothing -> Left $ fillSep [ flow "Could not parse as package name or identifier:" , style Current (fromString s) <> "." ] stack-2.15.7/src/Stack/Ls.hs0000644000000000000000000003573114620153445013664 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @ls@ command. module Stack.Ls ( LsCmdOpts (..) , LsCmds (..) , SnapshotOpts (..) , LsView (..) , ListDepsOpts (..) , ListDepsFormat (..) , ListDepsFormatOpts (..) , ListDepsTextFilter (..) , ListStylesOpts (..) , ListToolsOpts (..) , lsCmd ) where import Data.Aeson ( FromJSON, Value (..), (.:), encode ) import Data.Array.IArray ( (//), elems ) import qualified Data.ByteString.Lazy.Char8 as LBC8 import Distribution.Package ( mkPackageName ) import qualified Data.Aeson.Types as A import qualified Data.Foldable as F import qualified Data.List as L import qualified Data.Map as Map import qualified Data.Sequence as Seq import qualified Data.Set as Set import Data.Text ( isPrefixOf ) import qualified Data.Text as T import qualified Data.Text.IO as T import qualified Data.Vector as V import Network.HTTP.StackClient ( addRequestHeader, hAccept, httpJSON, getResponseBody , parseRequest ) import Path ( parent ) import RIO.List ( sort ) import Stack.Constants ( osIsWindows ) import Stack.DependencyGraph ( createPrunedDependencyGraph ) import Stack.Prelude hiding ( Nightly, Snapshot ) import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.Setup.Installed ( Tool (..), filterTools, listInstalled, toolString ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.DependencyTree ( DependencyTree (..), DotPayload (..), licenseText , versionText ) import Stack.Types.DotOpts ( DotOpts (..) ) import Stack.Types.EnvConfig ( installationRootDeps ) import Stack.Types.Runner ( HasRunner, Runner, terminalL ) import System.Console.ANSI.Codes ( SGR (Reset), setSGRCode, sgrToCode ) import System.Process.Pager ( pageText ) import System.Directory ( listDirectory ) import System.IO ( putStrLn ) -- | Type representing exceptions thrown by functions exported by the "Stack.Ls" -- module. newtype LsException = ParseFailure [Value] deriving (Show, Typeable) instance Exception LsException where displayException (ParseFailure val) = "Error: [S-3421]\n" ++ "Failure to parse values as a snapshot: " ++ show val -- | Type representing command line options for the @stack ls@ command. newtype LsCmdOpts = LsCmdOpts { lsCmds :: LsCmds } -- | Type representing subcommands for the @stack ls@ command. data LsCmds = LsSnapshot SnapshotOpts | LsDependencies ListDepsOpts | LsStyles ListStylesOpts | LsTools ListToolsOpts -- | Type representing command line options for the @stack ls snapshots@ -- command. data SnapshotOpts = SnapshotOpts { viewType :: LsView , ltsSnapView :: Bool , nightlySnapView :: Bool } deriving (Eq, Ord, Show) -- | Type representing subcommands for the @stack ls snapshots@ command. data LsView = Local | Remote deriving (Eq, Ord, Show) -- | Type representing Stackage snapshot types. data SnapshotType = Lts -- ^ Stackage LTS Haskell | Nightly -- ^ Stackage Nightly deriving (Eq, Ord, Show) data ListDepsOpts = ListDepsOpts { format :: !ListDepsFormat -- ^ Format of printing dependencies , dotOpts :: !DotOpts -- ^ The normal dot options. } data ListDepsFormat = ListDepsText ListDepsFormatOpts [ListDepsTextFilter] | ListDepsTree ListDepsFormatOpts | ListDepsJSON | ListDepsConstraints data ListDepsFormatOpts = ListDepsFormatOpts { sep :: !Text -- ^ Separator between the package name and details. , license :: !Bool -- ^ Print dependency licenses instead of versions. } -- | Type representing items to filter the results of @stack ls dependencies@. data ListDepsTextFilter = FilterPackage PackageName -- ^ Item is a package name. | FilterLocals -- ^ Item represents all local packages. -- | Type representing command line options for the @stack ls stack-colors@ and -- @stack ls stack-colours@ commands. data ListStylesOpts = ListStylesOpts { basic :: Bool , sgr :: Bool , example :: Bool } deriving (Eq, Ord, Show) -- | Type representing command line options for the @stack ls tools@ command. newtype ListToolsOpts = ListToolsOpts { filter :: String } data Snapshot = Snapshot { snapId :: Text , title :: Text , time :: Text } deriving (Eq, Ord, Show) instance FromJSON Snapshot where parseJSON o@(Array _) = parseSnapshot o parseJSON _ = mempty data SnapshotData = SnapshotData { _snapTotalCounts :: Integer , snaps :: [[Snapshot]] } deriving (Eq, Ord, Show) instance FromJSON SnapshotData where parseJSON (Object s) = SnapshotData <$> s .: "totalCount" <*> s .: "snapshots" parseJSON _ = mempty toSnapshot :: [Value] -> Snapshot toSnapshot [String snapId, String title, String time] = Snapshot { snapId , title , time } toSnapshot val = impureThrow $ ParseFailure val parseSnapshot :: Value -> A.Parser Snapshot parseSnapshot = A.withArray "array of snapshot" (pure . toSnapshot . V.toList) displayTime :: Snapshot -> [Text] displayTime snap = [snap.time] displaySnap :: Snapshot -> [Text] displaySnap snap = ["Resolver name: " <> snap.snapId, "\n" <> snap.title <> "\n\n"] displaySingleSnap :: [Snapshot] -> Text displaySingleSnap snapshots = case snapshots of [] -> mempty (x:xs) -> let snaps = displayTime x <> ["\n\n"] <> displaySnap x <> L.concatMap displaySnap xs in T.concat snaps renderData :: Bool -> Text -> IO () renderData True content = pageText content renderData False content = T.putStr content displaySnapshotData :: Bool -> SnapshotData -> IO () displaySnapshotData term sdata = case L.reverse sdata.snaps of [] -> pure () xs -> let snaps = T.concat $ L.map displaySingleSnap xs in renderData term snaps filterSnapshotData :: SnapshotData -> SnapshotType -> SnapshotData filterSnapshotData sdata stype = sdata { snaps = filterSnapData } where snapdata = sdata.snaps filterSnapData = case stype of Lts -> L.map (L.filter (\x -> "lts" `isPrefixOf` x.snapId)) snapdata Nightly -> L.map (L.filter (\x -> "nightly" `isPrefixOf` x.snapId)) snapdata displayLocalSnapshot :: Bool -> [String] -> IO () displayLocalSnapshot term xs = renderData term (localSnaptoText xs) localSnaptoText :: [String] -> Text localSnaptoText xs = T.intercalate "\n" $ L.map T.pack xs handleLocal :: LsCmdOpts -> RIO Runner () handleLocal lsOpts = do (instRoot :: Path Abs Dir) <- withConfig YesReexec $ withDefaultEnvConfig installationRootDeps isStdoutTerminal <- view terminalL let parentInstRoot = parent instRoot snapRootDir | osIsWindows = parentInstRoot | otherwise = parent parentInstRoot snapData' <- liftIO $ listDirectory $ toFilePath snapRootDir let snapData = L.sort snapData' case lsOpts.lsCmds of LsSnapshot sopt -> case (sopt.ltsSnapView, sopt.nightlySnapView) of (True, False) -> liftIO $ displayLocalSnapshot isStdoutTerminal $ L.filter (L.isPrefixOf "lts") snapData (False, True) -> liftIO $ displayLocalSnapshot isStdoutTerminal $ L.filter (L.isPrefixOf "night") snapData _ -> liftIO $ displayLocalSnapshot isStdoutTerminal snapData LsDependencies _ -> pure () LsStyles _ -> pure () LsTools _ -> pure () handleRemote :: HasRunner env => LsCmdOpts -> RIO env () handleRemote lsOpts = do req <- liftIO $ parseRequest urlInfo isStdoutTerminal <- view terminalL let req' = addRequestHeader hAccept "application/json" req result <- httpJSON req' let snapData = getResponseBody result case lsOpts.lsCmds of LsSnapshot sopt -> case (sopt.ltsSnapView, sopt.nightlySnapView) of (True, False) -> liftIO $ displaySnapshotData isStdoutTerminal $ filterSnapshotData snapData Lts (False, True) -> liftIO $ displaySnapshotData isStdoutTerminal $ filterSnapshotData snapData Nightly _ -> liftIO $ displaySnapshotData isStdoutTerminal snapData LsDependencies _ -> pure () LsStyles _ -> pure () LsTools _ -> pure () where urlInfo = "https://www.stackage.org/snapshots" lsCmd :: LsCmdOpts -> RIO Runner () lsCmd lsOpts = case lsOpts.lsCmds of LsSnapshot sopt -> case sopt.viewType of Local -> handleLocal lsOpts Remote -> handleRemote lsOpts LsDependencies depOpts -> listDependencies depOpts LsStyles stylesOpts -> withConfig NoReexec $ listStylesCmd stylesOpts LsTools toolsOpts -> withConfig NoReexec $ listToolsCmd toolsOpts -- | List Stack's output styles listStylesCmd :: ListStylesOpts -> RIO Config () listStylesCmd opts = do lc <- ask -- This is the same test as is used in Stack.Types.Runner.withRunner let useColor = view useColorL lc styles = elems $ defaultStyles // stylesUpdate (view stylesUpdateL lc) isComplex = not opts.basic showSGR = isComplex && opts.sgr showExample = isComplex && opts.example && useColor styleReports = L.map (styleReport showSGR showExample) styles liftIO $ T.putStrLn $ T.intercalate (if isComplex then "\n" else ":") styleReports where styleReport :: Bool -> Bool -> StyleSpec -> Text styleReport showSGR showExample (k, sgrs) = k <> "=" <> codes <> (if showSGR then sgrsList else mempty) <> (if showExample then example else mempty) where codes = T.intercalate ";" (L.map (fromString . show) $ L.concatMap sgrToCode sgrs) sgrsList = " [" <> T.intercalate ", " (L.map (fromString . show) sgrs) <> "]" example = " " <> ansi <> "Example" <> reset ansi = fromString $ setSGRCode sgrs reset = fromString $ setSGRCode [Reset] -- | List Stack's installed tools, sorted (see instance of 'Ord' for 'Tool'). listToolsCmd :: ListToolsOpts -> RIO Config () listToolsCmd opts = do localPrograms <- view $ configL . to (.localPrograms) installed <- sort <$> listInstalled localPrograms let wanted = case opts.filter of [] -> installed "ghc-git" -> [t | t@(ToolGhcGit _ _) <- installed] pkgName -> filtered pkgName installed liftIO $ mapM_ (putStrLn . toolString) wanted where filtered pkgName installed = Tool <$> filterTools (mkPackageName pkgName) (const True) installed listDependencies :: ListDepsOpts -> RIO Runner () listDependencies opts = do let dotOpts = opts.dotOpts (pkgs, resultGraph) <- createPrunedDependencyGraph dotOpts liftIO $ case opts.format of ListDepsTree treeOpts -> T.putStrLn "Packages" >> printTree treeOpts dotOpts 0 [] (treeRoots opts pkgs) resultGraph ListDepsJSON -> printJSON pkgs resultGraph ListDepsText textOpts listDepsTextFilters -> do let resultGraph' = Map.filterWithKey p resultGraph p k _ = Set.notMember k (exclude (Set.toList pkgs) listDepsTextFilters) void $ Map.traverseWithKey (go "" textOpts) (snd <$> resultGraph') where exclude :: [PackageName] -> [ListDepsTextFilter] -> Set PackageName exclude locals = Set.fromList . exclude' locals exclude' :: [PackageName] -> [ListDepsTextFilter] -> [PackageName] exclude' _ [] = [] exclude' locals (f:fs) = case f of FilterPackage pkgName -> pkgName : exclude' locals fs FilterLocals -> locals <> exclude' locals fs ListDepsConstraints -> do let constraintOpts = ListDepsFormatOpts " ==" False T.putStrLn "constraints:" void $ Map.traverseWithKey (go " , " constraintOpts) (snd <$> resultGraph) where go prefix lineOpts name payload = T.putStrLn $ prefix <> listDepsLine lineOpts name payload treeRoots :: ListDepsOpts -> Set PackageName -> Set PackageName treeRoots opts projectPackages' = let targets = opts.dotOpts.dotTargets in if null targets then projectPackages' else Set.fromList $ map (mkPackageName . T.unpack) targets printTree :: ListDepsFormatOpts -> DotOpts -> Int -> [Int] -> Set PackageName -> Map PackageName (Set PackageName, DotPayload) -> IO () printTree opts dotOpts depth remainingDepsCounts packages dependencyMap = F.sequence_ $ Seq.mapWithIndex go (toSeq packages) where toSeq = Seq.fromList . Set.toList go index name = let newDepsCounts = remainingDepsCounts ++ [Set.size packages - index - 1] in case Map.lookup name dependencyMap of Just (deps, payload) -> do printTreeNode opts dotOpts depth newDepsCounts deps payload name if Just depth == dotOpts.dependencyDepth then pure () else printTree opts dotOpts (depth + 1) newDepsCounts deps dependencyMap -- TODO: Define this behaviour, maybe pure an error? Nothing -> pure () printTreeNode :: ListDepsFormatOpts -> DotOpts -> Int -> [Int] -> Set PackageName -> DotPayload -> PackageName -> IO () printTreeNode opts dotOpts depth remainingDepsCounts deps payload name = let remainingDepth = fromMaybe 999 dotOpts.dependencyDepth - depth hasDeps = not $ null deps in T.putStrLn $ treeNodePrefix "" remainingDepsCounts hasDeps remainingDepth <> " " <> listDepsLine opts name payload treeNodePrefix :: Text -> [Int] -> Bool -> Int -> Text treeNodePrefix t [] _ _ = t treeNodePrefix t [0] True 0 = t <> "└──" treeNodePrefix t [_] True 0 = t <> "├──" treeNodePrefix t [0] True _ = t <> "└─┬" treeNodePrefix t [_] True _ = t <> "├─┬" treeNodePrefix t [0] False _ = t <> "└──" treeNodePrefix t [_] False _ = t <> "├──" treeNodePrefix t (0:ns) d remainingDepth = treeNodePrefix (t <> " ") ns d remainingDepth treeNodePrefix t (_:ns) d remainingDepth = treeNodePrefix (t <> "│ ") ns d remainingDepth listDepsLine :: ListDepsFormatOpts -> PackageName -> DotPayload -> Text listDepsLine opts name payload = T.pack (packageNameString name) <> opts.sep <> payloadText opts payload payloadText :: ListDepsFormatOpts -> DotPayload -> Text payloadText opts payload = if opts.license then licenseText payload else versionText payload printJSON :: Set PackageName -> Map PackageName (Set PackageName, DotPayload) -> IO () printJSON pkgs dependencyMap = LBC8.putStrLn $ encode $ DependencyTree pkgs dependencyMap stack-2.15.7/src/Stack/Lock.hs0000644000000000000000000001623014620153445014167 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Lock ( lockCachedWanted , LockedLocation (..) , Locked (..) ) where import Data.Aeson.Types ( FromJSON (..), ToJSON, Value, (.=), object ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), (..:), jsonSubWarnings , jsonSubWarningsT, logJSONWarnings, withObjectWarnings ) import Data.ByteString.Builder ( byteString ) import qualified Data.Map as Map import qualified Data.Text as T import qualified Data.Yaml as Yaml import qualified RIO.NonEmpty as NE import Path ( parent ) import Path.Extended ( addExtension ) import Path.IO ( doesFileExist ) import Stack.Prelude import Stack.SourceMap ( snapToDepPackage ) import Stack.Types.Config.Exception ( ConfigPrettyException (..) ) import Stack.Types.LockFileBehavior ( LockFileBehavior (..) ) import Stack.Types.Runner ( HasRunner, lockFileBehaviorL, rslInLogL ) import Stack.Types.SourceMap ( DepPackage, SMWanted ) -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Lock" module. data LockPrettyException = WritingLockFileError (Path Abs File) Locked deriving (Show, Typeable) instance Pretty LockPrettyException where pretty (WritingLockFileError lockFile newLocked) = "[S-1353]" <> line <> flow "Stack is configured to report an error on writing a lock file." <> blankLine <> fillSep [ flow "Stack just tried to write the following lock file content to" , pretty lockFile <> ":" ] <> blankLine <> string newLocked' where newLocked' = T.unpack . decodeUtf8With lenientDecode $ Yaml.encode newLocked instance Exception LockPrettyException data LockedLocation a b = LockedLocation { original :: a , completed :: b } deriving (Eq, Show) instance (ToJSON a, ToJSON b) => ToJSON (LockedLocation a b) where toJSON ll = object [ "original" .= ll.original, "completed" .= ll.completed ] instance ( FromJSON (WithJSONWarnings (Unresolved a)) , FromJSON (WithJSONWarnings (Unresolved b)) ) => FromJSON (WithJSONWarnings (Unresolved (LockedLocation a b))) where parseJSON = withObjectWarnings "LockedLocation" $ \o -> do original <- jsonSubWarnings $ o ..: "original" completed <- jsonSubWarnings $ o ..: "completed" pure $ LockedLocation <$> original <*> completed -- Special wrapper extracting only 1 RawPackageLocationImmutable -- serialization should not produce locations with multiple subdirs -- so we should be OK using just a head element newtype SingleRPLI = SingleRPLI { singleRPLI :: RawPackageLocationImmutable} instance FromJSON (WithJSONWarnings (Unresolved SingleRPLI)) where parseJSON v = do WithJSONWarnings unresolvedRPLIs ws <- parseJSON v let withWarnings x = WithJSONWarnings x ws pure $ withWarnings $ SingleRPLI . NE.head <$> unresolvedRPLIs data Locked = Locked { snapshotLocations :: [LockedLocation RawSnapshotLocation SnapshotLocation] , pkgImmutableLocations :: [LockedLocation RawPackageLocationImmutable PackageLocationImmutable] } deriving (Eq, Show) instance ToJSON Locked where toJSON lck = object [ "snapshots" .= lck.snapshotLocations , "packages" .= lck.pkgImmutableLocations ] instance FromJSON (WithJSONWarnings (Unresolved Locked)) where parseJSON = withObjectWarnings "Locked" $ \o -> do snapshots <- jsonSubWarningsT $ o ..: "snapshots" packages <- jsonSubWarningsT $ o ..: "packages" let unwrap :: LockedLocation SingleRPLI b -> LockedLocation RawPackageLocationImmutable b unwrap ll = ll { original = ll.original.singleRPLI } pure $ Locked <$> sequenceA snapshots <*> (map unwrap <$> sequenceA packages) loadYamlThrow :: HasLogFunc env => (Value -> Yaml.Parser (WithJSONWarnings a)) -> Path Abs File -> RIO env a loadYamlThrow parser path = do eVal <- liftIO $ Yaml.decodeFileEither (toFilePath path) case eVal of Left parseException -> throwIO $ ParseConfigFileException path parseException Right val -> case Yaml.parseEither parser val of Left err -> throwIO $ Yaml.AesonException err Right (WithJSONWarnings res warnings) -> do logJSONWarnings (toFilePath path) warnings pure res lockCachedWanted :: (HasPantryConfig env, HasRunner env) => Path Abs File -> RawSnapshotLocation -> ( Map RawPackageLocationImmutable PackageLocationImmutable -> WantedCompiler -> Map PackageName (Bool -> RIO env DepPackage) -> RIO env ( SMWanted, [CompletedPLI]) ) -> RIO env SMWanted lockCachedWanted stackFile resolver fillWanted = do lockFile <- liftIO $ addExtension ".lock" stackFile let getLockExists = doesFileExist lockFile lfb <- view lockFileBehaviorL readLockFile <- case lfb of LFBIgnore -> pure False LFBReadWrite -> getLockExists LFBReadOnly -> getLockExists LFBErrorOnWrite -> getLockExists locked <- if readLockFile then do logDebug "Using package location completions from a lock file" unresolvedLocked <- loadYamlThrow parseJSON lockFile resolvePaths (Just $ parent stackFile) unresolvedLocked else do logDebug "Not reading lock file" pure $ Locked [] [] let toMap :: Ord a => [LockedLocation a b] -> Map a b toMap = Map.fromList . map ((.original) &&& (.completed)) slocCache = toMap locked.snapshotLocations pkgLocCache = toMap locked.pkgImmutableLocations debugRSL <- view rslInLogL (snap, slocCompleted, pliCompleted) <- loadAndCompleteSnapshotRaw' debugRSL resolver slocCache pkgLocCache let compiler = snapshotCompiler snap snPkgs = Map.mapWithKey (\n p h -> snapToDepPackage h n p) (snapshotPackages snap) (wanted, prjCompleted) <- fillWanted pkgLocCache compiler snPkgs let lockLocations = map (\(CompletedPLI r c) -> LockedLocation r c) differentSnapLocs (CompletedSL raw complete) | raw == toRawSL complete = Nothing | otherwise = Just $ LockedLocation raw complete newLocked = Locked { snapshotLocations = mapMaybe differentSnapLocs slocCompleted , pkgImmutableLocations = lockLocations $ pliCompleted <> prjCompleted } when (newLocked /= locked) $ case lfb of LFBReadWrite -> writeBinaryFileAtomic lockFile $ header <> byteString (Yaml.encode newLocked) LFBErrorOnWrite -> prettyThrowIO $ WritingLockFileError lockFile newLocked LFBIgnore -> pure () LFBReadOnly -> pure () pure wanted where header = "# This file was autogenerated by Stack.\n\ \# You should not edit this file by hand.\n\ \# For more information, please see the documentation at:\n\ \# https://docs.haskellstack.org/en/stable/lock_files\n\n" stack-2.15.7/src/Stack/New.hs0000644000000000000000000006155214620153445014037 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @new@ command. module Stack.New ( NewOpts (..) , TemplateName , newCmd , new ) where import Control.Monad.Trans.Writer.Strict ( execWriterT ) import Data.Aeson as A import qualified Data.Aeson.KeyMap as KeyMap import qualified Data.ByteString.Base64 as B64 import Data.ByteString.Builder ( lazyByteString ) import qualified Data.ByteString.Lazy as LB import Data.Conduit ( yield ) import qualified Data.List as L import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TLE import Data.Time.Calendar ( toGregorian ) import Data.Time.Clock ( getCurrentTime, utctDay ) import Network.HTTP.Client ( applyBasicAuth ) import Network.HTTP.StackClient ( HttpException (..), HttpExceptionContent (..) , Response (..), VerifiedDownloadException (..) , mkDownloadRequest, notFound404, parseRequest , setForceDownload, setRequestCheckStatus , verifiedDownloadWithProgress ) import Path ( (), dirname, parent, parseRelDir, parseRelFile ) import Path.IO ( doesDirExist, doesFileExist, ensureDir, getCurrentDir ) import RIO.Process ( proc, runProcess_, withWorkingDir ) import Stack.Constants ( altGitHubTokenEnvVar, backupUrlRelPath, gitHubBasicAuthType , gitHubTokenEnvVar, stackDotYaml, wiredInPackages ) import Stack.Constants.Config ( templatesDir ) import Stack.Init ( InitOpts (..), initProject ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withGlobalProject ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SCM ( SCM (..) ) import Stack.Types.TemplateName ( RepoService (..), RepoTemplatePath (..), TemplateName , TemplatePath (..), defaultTemplateName , parseRepoPathWithService, templateName, templatePath ) import System.Environment ( lookupEnv ) import qualified Text.Mustache as Mustache import qualified Text.Mustache.Render as Mustache import Text.ProjectTemplate ( ProjectTemplateException, receiveMem, unpackTemplate ) -------------------------------------------------------------------------------- -- Exceptions -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.New" module. data NewPrettyException = ProjectDirAlreadyExists !String !(Path Abs Dir) | DownloadTemplateFailed !Text !String !VerifiedDownloadException | forall b. LoadTemplateFailed !TemplateName !(Path b File) | forall b. ExtractTemplateFailed !TemplateName !(Path b File) !String | TemplateInvalid !TemplateName !StyleDoc | MagicPackageNameInvalid !String | AttemptedOverwrites !Text ![Path Abs File] deriving Typeable deriving instance Show NewPrettyException instance Pretty NewPrettyException where pretty (ProjectDirAlreadyExists name path) = "[S-2135]" <> line <> fillSep [ flow "Stack failed to create a new directory for project" , style Current (fromString name) <> "," , flow "as the directory" , pretty path , flow "already exists." ] pretty (DownloadTemplateFailed name url err) = "[S-1688]" <> line <> fillSep [ flow "Stack failed to download the template" , style Current (fromString . T.unpack $ name) , "from" , style Url (fromString url) <> "." ] <> blankLine <> ( if isNotFound then flow "Please check that the template exists at that \ \location." <> blankLine else mempty ) <> fillSep [ flow "While downloading, Stack encountered" , msg ] where (msg, isNotFound) = case err of DownloadHttpError (HttpExceptionRequest req content) -> let msg' = flow "an HTTP error. Stack made the request:" <> blankLine <> string (show req) <> blankLine <> flow "and the content of the error was:" <> blankLine <> string (show content) isNotFound404 = case content of StatusCodeException res _ -> responseStatus res == notFound404 _ -> False in (msg', isNotFound404) DownloadHttpError (InvalidUrlException url' reason) -> let msg' = fillSep [ flow "an HTTP error. The URL" , style Url (fromString url') , flow "was considered invalid because" , fromString reason <> "." ] in (msg', False) _ -> let msg' = flow "the following error:" <> blankLine <> fromString (displayException err) in (msg', False) pretty (LoadTemplateFailed name path) = "[S-3650]" <> line <> fillSep [ flow "Stack failed to load the downloaded template" , style Current (fromString $ T.unpack $ templateName name) , "from" , pretty path <> "." ] pretty (ExtractTemplateFailed name path err) = "[S-9582]" <> line <> fillSep [ flow "Stack failed to extract the loaded template" , style Current (fromString $ T.unpack $ templateName name) , "at" , pretty path <> "." ] <> blankLine <> flow "While extracting, Stack encountered the following error:" <> blankLine <> string err pretty (TemplateInvalid name why) = "[S-9490]" <> line <> fillSep [ flow "Stack failed to use the template" , style Current (fromString $ T.unpack $ templateName name) <> "," , "as" , why ] pretty (MagicPackageNameInvalid name) = "[S-5682]" <> line <> fillSep [ flow "Stack declined to create a new directory for project" , style Current (fromString name) <> "," , flow "as package" , fromString name , flow "is 'wired-in' to a version of GHC. That can cause build \ \errors." ] <> blankLine <> fillSep ( flow "The names blocked by Stack are:" : mkNarrativeList Nothing False (map fromPackageName sortedWiredInPackages :: [StyleDoc]) ) where sortedWiredInPackages = L.sort $ S.toList wiredInPackages pretty (AttemptedOverwrites name fps) = "[S-3113]" <> line <> fillSep [ flow "Stack declined to apply the template" , style Current (fromString . T.unpack $ name) <> "," , flow "as it would create files that already exist." ] <> blankLine <> flow "The template would create the following existing files:" <> line <> bulletedList (map (style File . pretty) fps) <> blankLine <> fillSep [ "Use the" , style Shell "--force" , "flag to ignore this and overwrite those files." ] instance Exception NewPrettyException -------------------------------------------------------------------------------- -- Main project creation -- | Type representing command line options for the @stack new@ command (other -- than those applicable also to the @stack init@ command). data NewOpts = NewOpts { projectName :: PackageName -- ^ Name of the project to create. , createBare :: Bool -- ^ Whether to create the project without a directory. , init :: Bool -- ^ Whether to initialise the project for use with Stack. , template :: Maybe TemplateName -- ^ Name of the template to use. , nonceParams :: Map Text Text -- ^ Nonce parameters specified just for this invocation. } -- | Function underlying the @stack new@ command. Create a project directory -- structure and initialize the Stack config. newCmd :: (NewOpts, InitOpts) -> RIO Runner () newCmd (newOpts, initOpts) = withGlobalProject $ withConfig YesReexec $ do dir <- new newOpts initOpts.forceOverwrite exists <- doesFileExist $ dir stackDotYaml when (newOpts.init && (initOpts.forceOverwrite || not exists)) $ do go <- view globalOptsL initProject dir initOpts go.resolver -- | Create a new project with the given options. new :: HasConfig env => NewOpts -> Bool -> RIO env (Path Abs Dir) new opts forceOverwrite = do when (project `elem` wiredInPackages) $ prettyThrowM $ MagicPackageNameInvalid projectName pwd <- getCurrentDir absDir <- if bare then pure pwd else do relDir <- parseRelDir (packageNameString project) pure (pwd relDir) exists <- doesDirExist absDir configTemplate <- view $ configL . to (.defaultTemplate) let template = fromMaybe defaultTemplateName $ asum [ cliOptionTemplate , configTemplate ] if exists && not bare then prettyThrowM $ ProjectDirAlreadyExists projectName absDir else do templateText <- loadTemplate template (logUsing absDir template) files <- applyTemplate project template opts.nonceParams absDir templateText when (not forceOverwrite && bare) $ checkForOverwrite (templateName template) (M.keys files) writeTemplateFiles files runTemplateInits absDir pure absDir where cliOptionTemplate = opts.template project = opts.projectName projectName = packageNameString project bare = opts.createBare logUsing absDir template templateFrom = let loading = case templateFrom of LocalTemp -> flow "Loading local" RemoteTemp -> "Downloading" in prettyInfo ( fillSep [ loading , "template" , style Current (fromString $ T.unpack $ templateName template) , flow "to create project" , style Current (fromString projectName) , "in" , ( if bare then flow "the current directory" else fillSep [ "directory" , pretty $ dirname absDir ] ) <> "..." ] ) data TemplateFrom = LocalTemp | RemoteTemp -- | Download and read in a template's text content. loadTemplate :: forall env. HasConfig env => TemplateName -> (TemplateFrom -> RIO env ()) -> RIO env Text loadTemplate name logIt = do templateDir <- view $ configL . to templatesDir case templatePath name of AbsPath absFile -> logIt LocalTemp >> loadLocalFile absFile eitherByteStringToText UrlPath s -> do let settings = asIsFromUrl s downloadFromUrl settings templateDir RelPath rawParam relFile -> catch (do f <- loadLocalFile relFile eitherByteStringToText logIt LocalTemp pure f) ( \(e :: PrettyException) -> do settings <- fromMaybe (throwM e) (relSettings rawParam) let url = settings.downloadUrl mBasicAuth = settings.basicAuth extract = settings.extract downloadTemplate url mBasicAuth extract (templateDir relFile) ) RepoPath rtp -> do settings <- settingsFromRepoTemplatePath rtp downloadFromUrl settings templateDir where loadLocalFile :: Path b File -> (ByteString -> Either String Text) -> RIO env Text loadLocalFile path extract = do logDebug $ "Opening local template: \"" <> fromString (toFilePath path) <> "\"" exists <- doesFileExist path if exists then do bs <- readFileBinary (toFilePath path) --readFileUtf8 (toFilePath path) case extract bs of Left err -> prettyThrowM $ ExtractTemplateFailed name path err Right template -> pure template else prettyThrowM $ LoadTemplateFailed name path relSettings :: String -> Maybe (RIO env TemplateDownloadSettings) relSettings req = do rtp <- parseRepoPathWithService defaultRepoService (T.pack req) pure (settingsFromRepoTemplatePath rtp) downloadFromUrl :: TemplateDownloadSettings -> Path Abs Dir -> RIO env Text downloadFromUrl settings templateDir = do let url = settings.downloadUrl mBasicAuth = settings.basicAuth rel = fromMaybe backupUrlRelPath (parseRelFile url) downloadTemplate url mBasicAuth settings.extract (templateDir rel) downloadTemplate :: String -> Maybe (ByteString, ByteString) -- ^ Optional HTTP \'Basic\' authentication (type, credentials) -> (ByteString -> Either String Text) -> Path Abs File -> RIO env Text downloadTemplate url mBasicAuth extract path = do req <- parseRequest url let authReq = maybe id (uncurry applyBasicAuth) mBasicAuth req dReq = setForceDownload True $ mkDownloadRequest (setRequestCheckStatus authReq) logIt RemoteTemp catch ( do let label = T.pack $ toFilePath path res <- verifiedDownloadWithProgress dReq path label Nothing if res then logStickyDone ("Downloaded " <> display label <> ".") else logStickyDone "Already downloaded." ) (useCachedVersionOrThrow url path) loadLocalFile path extract useCachedVersionOrThrow :: String -> Path Abs File -> VerifiedDownloadException -> RIO env () useCachedVersionOrThrow url path exception = do exists <- doesFileExist path if exists then prettyWarn ( flow "Tried to download the template but an error was \ \found. Using cached local version. It may not be the \ \most recent version though." ) else prettyThrowM $ DownloadTemplateFailed (templateName name) url exception -- | Type representing settings for the download of Stack project templates. data TemplateDownloadSettings = TemplateDownloadSettings { downloadUrl :: String , basicAuth :: Maybe (ByteString, ByteString) -- ^ Optional HTTP 'Basic' authentication (type, credentials) , extract :: ByteString -> Either String Text } eitherByteStringToText :: ByteString -> Either String Text eitherByteStringToText = mapLeft show . decodeUtf8' asIsFromUrl :: String -> TemplateDownloadSettings asIsFromUrl url = TemplateDownloadSettings { downloadUrl = url , basicAuth = Nothing , extract = eitherByteStringToText } -- | Construct settings for downloading a Stack project template from a -- repository. settingsFromRepoTemplatePath :: HasTerm env => RepoTemplatePath -> RIO env TemplateDownloadSettings settingsFromRepoTemplatePath (RepoTemplatePath GitHub user name) = do let basicAuthMsg token = prettyInfoL [ flow "Using content of" , fromString token , flow " environment variable to authenticate GitHub REST API." ] mBasicAuth <- do wantGitHubToken <- liftIO $ fromMaybe "" <$> lookupEnv gitHubTokenEnvVar if not (L.null wantGitHubToken) then do basicAuthMsg gitHubTokenEnvVar pure $ Just (gitHubBasicAuthType, fromString wantGitHubToken) else do wantAltGitHubToken <- liftIO $ fromMaybe "" <$> lookupEnv altGitHubTokenEnvVar if not (L.null wantAltGitHubToken) then do basicAuthMsg altGitHubTokenEnvVar pure $ Just (gitHubBasicAuthType, fromString wantAltGitHubToken) else pure Nothing pure TemplateDownloadSettings { downloadUrl = concat [ "https://api.github.com/repos/" , T.unpack user , "/stack-templates/contents/" , T.unpack name ] , basicAuth = mBasicAuth , extract = \bs -> do decodedJson <- eitherDecode (LB.fromStrict bs) case decodedJson of Object o | Just (String content) <- KeyMap.lookup "content" o -> do let noNewlines = T.filter (/= '\n') bsContent <- B64.decode $ T.encodeUtf8 (noNewlines content) mapLeft show $ decodeUtf8' bsContent _ -> Left "Couldn't parse GitHub response as a JSON object with a \ \\"content\" field" } settingsFromRepoTemplatePath (RepoTemplatePath GitLab user name) = pure $ asIsFromUrl $ concat [ "https://gitlab.com" , "/" , T.unpack user , "/stack-templates/raw/master/" , T.unpack name ] settingsFromRepoTemplatePath (RepoTemplatePath Bitbucket user name) = pure $ asIsFromUrl $ concat [ "https://bitbucket.org" , "/" , T.unpack user , "/stack-templates/raw/master/" , T.unpack name ] -- | Apply and unpack a template into a directory. applyTemplate :: HasConfig env => PackageName -> TemplateName -> Map Text Text -> Path Abs Dir -> Text -> RIO env (Map (Path Abs File) LB.ByteString) applyTemplate project template nonceParams dir templateText = do config <- view configL currentYear <- do now <- liftIO getCurrentTime let (year, _, _) = toGregorian (utctDay now) pure $ T.pack . show $ year let context = M.unions [nonceParams, nameParams, configParams, yearParam] where nameAsVarId = T.replace "-" "_" $ T.pack $ packageNameString project nameAsModule = T.filter (/= ' ') $ T.toTitle $ T.replace "-" " " $ T.pack $ packageNameString project nameParams = M.fromList [ ("name", T.pack $ packageNameString project) , ("name-as-varid", nameAsVarId) , ("name-as-module", nameAsModule) ] configParams = config.templateParams yearParam = M.singleton "year" currentYear files :: Map FilePath LB.ByteString <- catch ( execWriterT $ runConduit $ yield (T.encodeUtf8 templateText) .| unpackTemplate receiveMem id ) ( \(e :: ProjectTemplateException) -> prettyThrowM $ TemplateInvalid template (string $ displayException e) ) when (M.null files) $ prettyThrowM $ TemplateInvalid template (flow "the template does not contain any files.") let isPkgSpec f = ".cabal" `L.isSuffixOf` f || "package.yaml" `L.isSuffixOf` f unless (any isPkgSpec . M.keys $ files) $ prettyThrowM $ TemplateInvalid template (flow "the template does not contain a Cabal or package.yaml file.") -- Apply Mustache templating to a single file within the project template. let applyMustache bytes -- Workaround for performance problems with mustache and -- large files, applies to Yesod templates with large -- bootstrap CSS files. See -- https://github.com/commercialhaskell/stack/issues/4133. | LB.length bytes < 50000 , Right text <- TLE.decodeUtf8' bytes = do let etemplateCompiled = Mustache.compileTemplate (T.unpack (templateName template)) $ TL.toStrict text templateCompiled <- case etemplateCompiled of Left e -> prettyThrowM $ TemplateInvalid template ( flow "Stack encountered the following error:" <> blankLine -- Text.Parsec.Error.ParseError is not an instance -- of Control.Exception. <> string (show e) ) Right t -> pure t let (substitutionErrors, applied) = Mustache.checkedSubstitute templateCompiled context missingKeys = S.fromList $ concatMap onlyMissingKeys substitutionErrors pure (LB.fromStrict $ encodeUtf8 applied, missingKeys) -- Too large or too binary | otherwise = pure (bytes, S.empty) -- Accumulate any missing keys as the file is processed processFile mks (fpOrig, bytes) = do -- Apply the mustache template to the filenames as well, so that we -- can have file names depend on the project name. (fp, mks1) <- applyMustache $ TLE.encodeUtf8 $ TL.pack fpOrig path <- parseRelFile $ TL.unpack $ TLE.decodeUtf8 fp (bytes', mks2) <- applyMustache bytes pure (mks <> mks1 <> mks2, (dir path, bytes')) (missingKeys, results) <- mapAccumLM processFile S.empty (M.toList files) unless (S.null missingKeys) $ prettyNote $ missingParameters missingKeys config.userConfigPath pure $ M.fromList results where onlyMissingKeys (Mustache.VariableNotFound ks) = map T.unpack ks onlyMissingKeys _ = [] mapAccumLM :: Monad m => (a -> b -> m(a, c)) -> a -> [b] -> m(a, [c]) mapAccumLM _ a [] = pure (a, []) mapAccumLM f a (x:xs) = do (a', c) <- f a x (a'', cs) <- mapAccumLM f a' xs pure (a'', c:cs) missingParameters :: Set String -> Path Abs File -> StyleDoc missingParameters missingKeys userConfigPath = fillSep ( flow "The following parameters were needed by the template but \ \not provided:" : mkNarrativeList Nothing False (map toStyleDoc (S.toList missingKeys)) ) <> blankLine <> fillSep [ flow "You can provide them in Stack's global YAML configuration \ \file" , "(" <> pretty userConfigPath <> ")" , "like this:" ] <> blankLine <> "templates:" <> line <> " params:" <> line <> vsep ( map (\key -> " " <> fromString key <> ": value") (S.toList missingKeys) ) <> blankLine <> flow "Or you can pass each one on the command line as parameters \ \like this:" <> blankLine <> style Shell ( fillSep [ flow "stack new" , fromPackageName project , fromString $ T.unpack (templateName template) , hsep $ map ( \key -> fillSep [ "-p" , "\"" <> fromString key <> ":value\"" ] ) (S.toList missingKeys) ] ) <> line where toStyleDoc :: String -> StyleDoc toStyleDoc = fromString -- | Check if we're going to overwrite any existing files. checkForOverwrite :: (MonadIO m, MonadThrow m) => Text -> [Path Abs File] -> m () checkForOverwrite name files = do overwrites <- filterM doesFileExist files unless (null overwrites) $ prettyThrowM $ AttemptedOverwrites name overwrites -- | Write files to the new project directory. writeTemplateFiles :: MonadIO m => Map (Path Abs File) LB.ByteString -> m () writeTemplateFiles files = liftIO $ forM_ (M.toList files) (\(fp,bytes) -> do ensureDir (parent fp) writeBinaryFileAtomic fp $ lazyByteString bytes) -- | Run any initialization functions, such as Git. runTemplateInits :: HasConfig env => Path Abs Dir -> RIO env () runTemplateInits dir = do config <- view configL case config.scmInit of Nothing -> pure () Just Git -> withWorkingDir (toFilePath dir) $ catchAny (proc "git" ["init"] runProcess_) ( \_ -> prettyWarn $ fillSep [ flow "Stack failed to run a" , style Shell (flow "git init") , flow "command. Ignoring..." ] ) -------------------------------------------------------------------------------- -- Defaults -- | The default service to use to download templates. defaultRepoService :: RepoService defaultRepoService = GitHub stack-2.15.7/src/Stack/Nix.hs0000644000000000000000000001454514604306201014034 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Run commands in a nix-shell module Stack.Nix ( nixCmdName , nixHelpOptName , runShellAndExit ) where import qualified Data.Text as T import Path.IO ( resolveFile ) import RIO.Process ( exec, processContextL ) import Stack.Config ( getInContainer, withBuildConfig ) import Stack.Config.Nix ( nixCompiler, nixCompilerVersion ) import Stack.Constants ( inContainerEnvVar, inNixShellEnvVar , platformVariantEnvVar ) import Stack.Prelude import Stack.Types.BuildConfig ( wantedCompilerVersionL ) import Stack.Types.Config ( Config (..), HasConfig (..), configProjectRoot ) import Stack.Types.Docker ( reExecArgName ) import Stack.Types.Nix ( NixOpts (..) ) import Stack.Types.Version ( showStackVersion ) import System.Environment ( getArgs, getExecutablePath, lookupEnv ) import qualified System.FilePath as F -- | Type representing exceptions thrown by functions exported by the -- "Stack.Nix" module. data NixException = CannotDetermineProjectRoot -- ^ Can't determine the project root (location of the shell file if any). deriving (Show, Typeable) instance Exception NixException where displayException CannotDetermineProjectRoot = "Error: [S-7384]\n" ++ "Cannot determine project root directory." runShellAndExit :: RIO Config void runShellAndExit = do inContainer <- getInContainer -- TODO we can probably assert that this is False based on Stack.Runners now origArgs <- liftIO getArgs let args | inContainer = origArgs -- internal-re-exec version already passed -- first stack when restarting in the container | otherwise = ("--" ++ reExecArgName ++ "=" ++ showStackVersion) : origArgs exePath <- liftIO getExecutablePath config <- view configL envOverride <- view processContextL local (set processContextL envOverride) $ do let cmnd = escape exePath args' = map escape args mshellFile <- case configProjectRoot config of Just projectRoot -> traverse (resolveFile projectRoot) config.nix.initFile Nothing -> pure Nothing -- This will never result in double loading the build config, since: -- -- 1. This function explicitly takes a Config, not a HasConfig -- -- 2. This function ends up exiting before running other code -- (thus the void return type) compilerVersion <- withBuildConfig $ view wantedCompilerVersionL ghc <- either throwIO pure $ nixCompiler compilerVersion ghcVersion <- either throwIO pure $ nixCompilerVersion compilerVersion let pkgsInConfig = config.nix.packages pkgs = pkgsInConfig ++ [ghc, "git", "gcc", "gmp"] pkgsStr = "[" <> T.intercalate " " pkgs <> "]" pureShell = config.nix.pureShell addGCRoots = config.nix.addGCRoots nixopts = case mshellFile of Just fp -> [ toFilePath fp , "--arg" , "ghc" , "with (import {}); " ++ T.unpack ghc , "--argstr", "ghcVersion", T.unpack ghcVersion ] Nothing -> [ "-E" , T.unpack $ T.concat [ "with (import {}); " , "let inputs = ",pkgsStr,"; " , "libPath = lib.makeLibraryPath inputs; " , "stackExtraArgs = lib.concatMap (pkg: " , "[ ''--extra-lib-dirs=${lib.getLib pkg}/lib'' " , " ''--extra-include-dirs=${lib.getDev pkg}/include'' ]" , ") inputs; in " , "runCommand ''myEnv'' { " , "buildInputs = lib.optional stdenv.isLinux glibcLocales ++ inputs; " -- glibcLocales is necessary on Linux to avoid warnings about -- GHC being incapable to set the locale. , T.pack platformVariantEnvVar <> "=''nix''; " , T.pack inNixShellEnvVar <> "=1; " , if inContainer -- If shell is pure, this env var would not -- be seen by stack inside nix then T.pack inContainerEnvVar <> "=1; " else "" , "LD_LIBRARY_PATH = libPath;" -- LD_LIBRARY_PATH is set because for now it's needed by -- builds using Template Haskell , "STACK_IN_NIX_EXTRA_ARGS = stackExtraArgs; " -- overriding default locale so Unicode output using base -- won't be broken , "LANG=\"en_US.UTF-8\";" , "} \"\"" ] ] fullArgs = concat [ [ "--pure" | pureShell ] , if addGCRoots then [ "--indirect" , "--add-root" , toFilePath config.workDir F. "nix-gc-symlinks" F. "gc-root" ] else [] , map T.unpack config.nix.shellOptions , nixopts , ["--run", unwords (cmnd:"$STACK_IN_NIX_EXTRA_ARGS":args')] -- Using --run instead of --command so we cannot end up in the -- nix-shell if stack build is Ctrl-C'd ] pathVar <- liftIO $ lookupEnv "PATH" logDebug $ "PATH is: " <> displayShow pathVar logDebug $ "Using a nix-shell environment " <> ( case mshellFile of Just path -> "from file: " <> fromString (toFilePath path) Nothing -> "with nix packages: " <> display (T.intercalate ", " pkgs) ) exec "nix-shell" fullArgs -- | Shell-escape quotes inside the string and enclose it in quotes. escape :: String -> String escape str = "'" ++ foldr (\c -> if c == '\'' then ("'\"'\"'"++) else (c:)) "" str ++ "'" -- | Command-line argument for "nix" nixCmdName :: String nixCmdName = "nix" nixHelpOptName :: String nixHelpOptName = nixCmdName ++ "-help" stack-2.15.7/src/Stack/Options/BenchParser.hs0000644000000000000000000000217514604306201017121 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Options.BenchParser ( benchOptsParser ) where import Options.Applicative ( Parser, flag', help, long, metavar, strOption ) import Options.Applicative.Builder.Extra ( optionalFirst ) import Stack.Prelude import Stack.Options.Utils ( hideMods ) import Stack.Types.BuildOptsMonoid ( BenchmarkOptsMonoid (..) ) -- | Parser for bench arguments. -- FIXME hiding options benchOptsParser :: Bool -> Parser BenchmarkOptsMonoid benchOptsParser hide0 = BenchmarkOptsMonoid <$> optionalFirst (strOption ( long "benchmark-arguments" <> long "ba" <> metavar "BENCH_ARGS" <> help "Forward BENCH_ARGS to the benchmark suite. Supports templates \ \from 'cabal bench'." <> hide )) <*> optionalFirst (flag' True ( long "no-run-benchmarks" <> help "Disable running of benchmarks. (Benchmarks will still be \ \built.)" <> hide )) where hide = hideMods hide0 stack-2.15.7/src/Stack/Options/BuildMonoidParser.hs0000644000000000000000000002005514620153445020314 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.BuildMonoidParser ( buildOptsMonoidParser , cabalVerboseParser , cabalVerbosityOptsParser , cabalVerbosityParser ) where import qualified Data.Text as T import Distribution.Parsec ( eitherParsec ) import Options.Applicative ( Parser, eitherReader, flag, help, long, metavar, option , strOption ) import Options.Applicative.Builder.Extra ( firstBoolFlagsFalse, firstBoolFlagsNoDefault , firstBoolFlagsTrue, optionalFirst ) import Stack.Build ( splitObjsWarning ) import Stack.Prelude import Stack.Options.BenchParser ( benchOptsParser ) import Stack.Options.TestParser ( testOptsParser ) import Stack.Options.HaddockParser ( haddockOptsParser ) import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid (..), CabalVerbosity, readProgressBarFormat , toFirstCabalVerbosity ) buildOptsMonoidParser :: GlobalOptsContext -> Parser BuildOptsMonoid buildOptsMonoidParser hide0 = BuildOptsMonoid <$> trace' <*> profile <*> noStrip <*> libProfiling <*> exeProfiling <*> libStripping <*> exeStripping <*> haddock <*> haddockOptsParser hideBool <*> openHaddocks <*> haddockDeps <*> haddockInternal <*> haddockHyperlinkSource <*> haddockForHackage <*> copyBins <*> copyCompilerTool <*> preFetch <*> keepGoing <*> keepTmpFiles <*> forceDirty <*> tests <*> testOptsParser hideBool <*> benches <*> benchOptsParser hideBool <*> reconfigure <*> cabalVerbose <*> splitObjs <*> skipComponents <*> interleavedOutput <*> progressBar <*> ddumpDir where hideBool = hide0 /= BuildCmdGlobalOpts hide = hideMods hideBool hideExceptGhci = hideMods (hide0 `notElem` [BuildCmdGlobalOpts, GhciCmdGlobalOpts]) -- These use 'Any' because they are not settable in stack.yaml, so -- there is no need for options like --no-profile. trace' = Any <$> flag False True ( long "trace" <> help "Enable profiling in libraries, executables, etc. for all \ \expressions and generate a backtrace on exception." <> hideExceptGhci ) profile = Any <$> flag False True ( long "profile" <> help "Enable profiling in libraries, executables, etc. for all \ \expressions and generate a profiling report in tests or \ \benchmarks." <> hideExceptGhci ) noStrip = Any <$> flag False True ( long "no-strip" <> help "Disable DWARF debugging symbol stripping in libraries, \ \executables, etc. for all expressions, producing larger \ \executables but allowing the use of standard \ \debuggers/profiling tools/other utilities that use \ \debugging symbols." <> hideExceptGhci ) libProfiling = firstBoolFlagsFalse "library-profiling" "library profiling for TARGETs and all its dependencies." hide exeProfiling = firstBoolFlagsFalse "executable-profiling" "executable profiling for TARGETs and all its dependencies." hide libStripping = firstBoolFlagsTrue "library-stripping" "library stripping for TARGETs and all its dependencies." hide exeStripping = firstBoolFlagsTrue "executable-stripping" "executable stripping for TARGETs and all its dependencies." hide haddock = firstBoolFlagsFalse "haddock" "generating Haddock documentation for the package(s) in this \ \directory/configuration." hide openHaddocks = firstBoolFlagsFalse "open" "opening the local Haddock documentation in the browser." hide haddockDeps = firstBoolFlagsNoDefault "haddock-deps" "building Haddock documentation for dependencies. (default: if building \ \Haddock documentation, true; otherwise, false)" hide haddockInternal = firstBoolFlagsFalse "haddock-internal" "building Haddock documentation for internal modules (like \ \'cabal haddock --internal')." hide haddockHyperlinkSource = firstBoolFlagsTrue "haddock-hyperlink-source" "building hyperlinked source for Haddock documentation (like \ \'haddock --hyperlinked-source')." hide haddockForHackage = firstBoolFlagsFalse "haddock-for-hackage" "building with flags to generate Haddock documentation suitable for upload \ \to Hackage." hide copyBins = firstBoolFlagsFalse "copy-bins" "copying binaries to local-bin (see 'stack path')." hide copyCompilerTool = firstBoolFlagsFalse "copy-compiler-tool" "copying binaries of targets to compiler-tools-bin (see 'stack path')." hide keepGoing = firstBoolFlagsNoDefault "keep-going" "continue running after a step fails. (default: for 'build', false; for \ \'test' or 'bench', true)" hide keepTmpFiles = firstBoolFlagsFalse "keep-tmp-files" "keep intermediate files and build directories." hide preFetch = firstBoolFlagsFalse "prefetch" "fetching packages necessary for the build immediately. Useful with \ \--dry-run." hide forceDirty = firstBoolFlagsFalse "force-dirty" "forcing the treatment of all local packages as having dirty files. \ \Useful for cases where Stack can't detect a file change." hide tests = firstBoolFlagsFalse "test" "testing the package(s) in this directory/configuration." hideExceptGhci benches = firstBoolFlagsFalse "bench" "benchmarking the package(s) in this directory/configuration." hideExceptGhci reconfigure = firstBoolFlagsFalse "reconfigure" "performing the configure step, even if unnecessary. Useful in some \ \corner cases with custom Setup.hs files." hide cabalVerbose = cabalVerbosityOptsParser hideBool splitObjs = firstBoolFlagsFalse "split-objs" ( "split-objs, to reduce output size (at the cost of build time). " ++ splitObjsWarning ) hide skipComponents = many (fmap T.pack (strOption ( long "skip" <> help "Skip given component (can be specified multiple times)." <> hide ))) interleavedOutput = firstBoolFlagsTrue "interleaved-output" "printing concurrent GHC output to the console with a prefix for the \ \package name." hide progressBar = First <$> optional (option (eitherReader readProgressBarFormat) ( long "progress-bar" <> metavar "FORMAT" <> help "Progress bar format (accepts none, count-only, capped and full). \ \(default: capped)" <> hide )) ddumpDir = optionalFirst (strOption ( long "ddump-dir" <> help "Specify output directory for ddump-files." <> hide )) -- | Parser for Cabal verbosity options cabalVerbosityOptsParser :: Bool -> Parser (First CabalVerbosity) cabalVerbosityOptsParser hide = cabalVerbosityParser hide <|> cabalVerboseParser hide -- | Parser for Cabal verbosity option cabalVerbosityParser :: Bool -> Parser (First CabalVerbosity) cabalVerbosityParser hide = let pCabalVerbosity = option (eitherReader eitherParsec) ( long "cabal-verbosity" <> metavar "VERBOSITY" <> help "Cabal verbosity (accepts Cabal's numerical and extended \ \syntax)." <> hideMods hide) in First . Just <$> pCabalVerbosity -- | Parser for the Cabal verbose flag, retained for backward compatibility cabalVerboseParser :: Bool -> Parser (First CabalVerbosity) cabalVerboseParser hide = let pVerboseFlag = firstBoolFlagsFalse "cabal-verbose" "asking Cabal to be verbose in its output." (hideMods hide) in toFirstCabalVerbosity <$> pVerboseFlag stack-2.15.7/src/Stack/Options/BuildParser.hs0000644000000000000000000001452114620153445017147 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Options.BuildParser ( buildOptsParser , flagsParser , targetsParser ) where import qualified Data.List as L import qualified Data.Map as Map import qualified Data.Text as T import Options.Applicative ( Parser, completer, flag, flag', help, internal, long , metavar, option, strOption, switch, value ) import Options.Applicative.Args ( cmdOption ) import Options.Applicative.Builder.Extra ( textArgument, textOption ) import Stack.Options.Completion ( flagCompleter, ghcOptsCompleter, targetCompleter ) import Stack.Options.PackageParser ( readFlag ) import Stack.Prelude import Stack.Types.BuildOptsCLI ( ApplyCLIFlag, BuildCommand, BuildOptsCLI (..) , BuildSubset (..), FileWatchOpts (..) ) -- | Parser for CLI-only build arguments buildOptsParser :: BuildCommand -> Parser BuildOptsCLI buildOptsParser cmd = BuildOptsCLI <$> targetsParser <*> switch ( long "dry-run" <> help "Don't build anything, just prepare to." ) <*> ( (\x y z -> concat [x, y, z]) <$> flag [] ["-Wall", "-Werror"] ( long "pedantic" <> help "Pass the -Wall and -Werror flags to GHC, turning on all \ \warnings that indicate potentially suspicious code and \ \making all warnings into fatal errors. Can be overridden \ \using Stack's --ghc-options option." ) <*> flag [] ["-O0"] ( long "fast" <> help "Pass a -O0 flag to GHC, turning off any GHC \ \optimsations that have been set. Can be overridden using \ \Stack's --ghc-options option." ) <*> many (textOption ( long "ghc-options" <> metavar "OPTIONS" <> completer ghcOptsCompleter <> help "Additional options to be passed to GHC (can be specified \ \multiple times)." )) ) <*> progsOptionsParser <*> flagsParser <*> ( flag' BSOnlyDependencies ( long "dependencies-only" <> help "A synonym for --only-dependencies." ) <|> flag' BSOnlySnapshot ( long "only-snapshot" <> help "Only build packages for the snapshot database, not the \ \local database." ) <|> flag' BSOnlyDependencies ( long "only-dependencies" <> help "Only build packages that are dependencies of targets on \ \the command line." ) <|> flag' BSOnlyLocals ( long "only-locals" <> help "Only build packages in the local database. Fail if the \ \build plan includes the snapshot database." ) <|> pure BSAll ) <*> ( flag' FileWatch ( long "file-watch" <> help "Watch for changes in local files and automatically \ \rebuild." ) <|> flag' FileWatchPoll ( long "file-watch-poll" <> help "Like --file-watch, but polling the filesystem instead of \ \using events." ) <|> pure NoFileWatch ) <*> switch ( long "watch-all" <> help "Watch all local files not taking targets into account." ) <*> many (cmdOption ( long "exec" <> metavar "COMMAND [ARGUMENT(S)]" <> help "Command and argument(s) to run after a successful build." )) <*> switch ( long "only-configure" <> help "Only perform the configure step, not any builds. Intended for \ \tool usage. May break when used on multiple packages at once!" ) <*> pure cmd <*> switch ( long "initial-build-steps" <> help "For target packages, only run initial build steps needed for \ \GHCi." <> internal ) targetsParser :: Parser [Text] targetsParser = many (textArgument ( metavar "TARGET" <> completer targetCompleter <> help "If none specified, use all local packages. See \ \https://docs.haskellstack.org/en/stable/build_command/#target-syntax \ \for details." )) flagsParser :: Parser (Map.Map ApplyCLIFlag (Map.Map FlagName Bool)) flagsParser = Map.unionsWith Map.union <$> many (option readFlag ( long "flag" <> completer flagCompleter <> metavar "PACKAGE:[-]FLAG" <> help "Override flags set in stack.yaml (applies to local packages \ \and extra-deps)." )) progsOptionsParser :: Parser [(Text, [Text])] progsOptionsParser = dummyProgOptionsParser *> (filter (not . L.null . snd) <$> progsOptionsParser') where -- The purpose of this parser is only to generate the desired help text. The -- actual --PROG-options parsers are all internal. dummyProgOptionsParser :: Parser String dummyProgOptionsParser = strOption ( long "PROG-option" <> help ( "Pass an argument to PROG (can be specified multiple times). PROG \ \must be a program recognised by the Cabal library and one of " <> T.unpack (T.intercalate " " progs) <> "." ) <> metavar "ARG" <> value "" ) progs :: [Text] progs = L.sort [ -- configuration "pkg-config" -- preprocessors , "alex" , "c2hs" , "cpphs" , "doctest" , "greencard" , "happy" , "hsc2hs" , "hscolour" -- platform toolchain (GNU) , "ar" -- create, modify, and extract from archives , "gcc" -- C/C++ compiler , "ld" -- linker , "strip" -- discards symbols and other data from object files , "tar" ] progsOptionsParser' :: Parser [(Text, [Text])] progsOptionsParser' = traverse mkProgOptionsParser progs mkProgOptionsParser :: Text -> Parser (Text, [Text]) mkProgOptionsParser prog = fmap (prog,) $ many $ textOption ( long (T.unpack prog <> "-option") <> internal ) stack-2.15.7/src/Stack/Options/CleanParser.hs0000644000000000000000000000155414445120723017132 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.CleanParser ( cleanOptsParser ) where import Options.Applicative ( Parser, flag', help, long, metavar ) import Stack.Clean ( CleanCommand (..), CleanOpts (..) ) import Stack.Prelude import Stack.Types.PackageName ( packageNameArgument ) -- | Command-line parser for the clean command. cleanOptsParser :: CleanCommand -> Parser CleanOpts cleanOptsParser Clean = CleanShallow <$> packages <|> doFullClean where packages = many (packageNameArgument ( metavar "PACKAGE" <> help "If none specified, clean all project packages." )) doFullClean = flag' CleanFull ( long "full" <> help "Delete the project's Stack working directories (.stack-work by \ \default)." ) cleanOptsParser Purge = pure CleanFull stack-2.15.7/src/Stack/Options/ConfigParser.hs0000644000000000000000000002066314620153445017321 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} module Stack.Options.ConfigParser ( configOptsParser ) where import Data.Char ( toUpper ) import Options.Applicative ( Parser, auto, completer, completeWith, eitherReader, help , long, metavar, option, short, strOption ) import Options.Applicative.Builder.Extra ( PathCompleterOpts (..), absDirOption, absFileOption , defaultPathCompleterOpts, dirCompleter, firstBoolFlagsFalse , firstBoolFlagsNoDefault, firstBoolFlagsTrue, optionalFirst , pathCompleterWith ) import Path ( PathException (..), parseRelDir ) import Stack.Constants ( stackRootOptionName ) import Stack.Options.BuildMonoidParser ( buildOptsMonoidParser ) import Stack.Options.DockerParser ( dockerOptsParser ) import Stack.Options.GhcBuildParser ( ghcBuildParser ) import Stack.Options.GhcVariantParser ( ghcVariantParser ) import Stack.Options.NixParser ( nixOptsParser ) import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) import Stack.Prelude hiding ( snapshotLocation ) import Stack.Types.ColorWhen ( readColorWhen ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) import Stack.Types.DumpLogs ( DumpLogs (..) ) import qualified System.FilePath as FilePath -- | Command-line arguments parser for configuration. configOptsParser :: FilePath -> GlobalOptsContext -> Parser ConfigMonoid configOptsParser currentDir hide0 = ( \stackRoot workDir buildOpts dockerOpts nixOpts systemGHC installGHC arch ghcVariant ghcBuild jobs extraIncludeDirs extraLibDirs customPreprocessorExts overrideGccPath overrideHpack skipGHCCheck skipMsys localBinPath setupInfoLocations modifyCodePage allowDifferentUser dumpLogs colorWhen snapshotLocation noRunCompile -> mempty { stackRoot , workDir , buildOpts , dockerOpts , nixOpts , systemGHC , installGHC , skipGHCCheck , arch , ghcVariant , ghcBuild , jobs , extraIncludeDirs , extraLibDirs , customPreprocessorExts , overrideGccPath , overrideHpack , skipMsys , localBinPath , setupInfoLocations , modifyCodePage , allowDifferentUser , dumpLogs , colorWhen , snapshotLocation , noRunCompile } ) <$> optionalFirst (absDirOption ( long stackRootOptionName <> metavar (map toUpper stackRootOptionName) <> help "Absolute path to the global Stack root directory. Overrides \ \any STACK_ROOT environment variable." <> hide )) <*> optionalFirst (option (eitherReader (mapLeft showWorkDirError . parseRelDir)) ( long "work-dir" <> metavar "WORK-DIR" <> completer ( pathCompleterWith ( defaultPathCompleterOpts { absolute = False, fileFilter = const False } ) ) <> help "Relative path to Stack's work directory. Overrides any \ \STACK_WORK environment variable. (default: '.stack-work')" <> hide )) <*> buildOptsMonoidParser hide0 <*> dockerOptsParser True <*> nixOptsParser True <*> firstBoolFlagsNoDefault "system-ghc" "using the system installed GHC (on the PATH) if it is available and \ \its version matches. (default: disabled)" hide <*> firstBoolFlagsTrue "install-ghc" "downloading and installing GHC if necessary. (Can be done manually \ \with 'stack setup'.)" hide <*> optionalFirst (strOption ( long "arch" <> metavar "ARCH" <> help "System architecture, e.g. i386, x86_64, aarch64." <> hide )) <*> optionalFirst (ghcVariantParser (hide0 /= OuterGlobalOpts)) <*> optionalFirst (ghcBuildParser (hide0 /= OuterGlobalOpts)) <*> optionalFirst (option auto ( long "jobs" <> short 'j' <> metavar "JOBS" <> help "Number of concurrent jobs to run." <> hide )) <*> many ((currentDir FilePath.) <$> strOption ( long "extra-include-dirs" <> metavar "DIR" <> completer dirCompleter <> help "Extra directories to check for C header files." <> hide )) <*> many ((currentDir FilePath.) <$> strOption ( long "extra-lib-dirs" <> metavar "DIR" <> completer dirCompleter <> help "Extra directories to check for libraries." <> hide )) <*> many (strOption ( long "custom-preprocessor-extensions" <> metavar "EXT" <> help "Extensions used for custom preprocessors." <> hide )) <*> optionalFirst (absFileOption ( long "with-gcc" <> metavar "PATH-TO-GCC" <> help "Use gcc found at PATH-TO-GCC." <> hide )) <*> optionalFirst (strOption ( long "with-hpack" <> metavar "HPACK" <> help "Use HPACK executable (overrides bundled Hpack)." <> hide )) <*> firstBoolFlagsFalse "skip-ghc-check" "skipping the GHC version and architecture check." hide <*> firstBoolFlagsFalse "skip-msys" "skipping the local MSYS installation (Windows only)." hide <*> optionalFirst ((currentDir FilePath.) <$> strOption ( long "local-bin-path" <> metavar "DIR" <> completer dirCompleter <> help "Override the target directory for 'stack build --copy-bins' \ \and 'stack install'. DIR can be an absolute path or one \ \relative to the current directory." <> hide )) <*> many (strOption ( long "setup-info-yaml" <> help "Alternate URL or path (relative or absolute) for Stack \ \dependencies." <> metavar "URL" )) <*> firstBoolFlagsTrue "modify-code-page" "setting the codepage to support UTF-8 (Windows only)." hide <*> firstBoolFlagsNoDefault "allow-different-user" "permission for users other than the owner of the Stack root directory \ \to use a Stack installation (POSIX only). (default: inside Docker, \ \ true; otherwise, false)" hide <*> fmap toDumpLogs (firstBoolFlagsNoDefault "dump-logs" "dump the build output logs for local packages to the console. \ \(default: dump warning logs)" hide) <*> optionalFirst (option readColorWhen ( long "color" <> long "colour" <> metavar "WHEN" <> completeWith ["always", "never", "auto"] <> help "Specify when to use color in output; WHEN is 'always', \ \'never', or 'auto'. On Windows versions before Windows \ \10, for terminals that do not support color codes, the \ \default is 'never'; color may work on terminals that \ \support color codes." <> hide )) <*> optionalFirst (strOption ( long "snapshot-location-base" <> help "The base location of LTS/Nightly snapshots." <> metavar "URL" )) <*> firstBoolFlagsFalse "script-no-run-compile" "the use of options `--no-run --compile` with `stack script`." hide where hide = hideMods (hide0 /= OuterGlobalOpts) toDumpLogs (First (Just True)) = First (Just DumpAllLogs) toDumpLogs (First (Just False)) = First (Just DumpNoLogs) toDumpLogs (First Nothing) = First Nothing showWorkDirError err = case fromException err of Just (InvalidRelDir x) -> "Stack failed to interpret the value of the option as a valid\n\ \relative path to a directory. Stack will not accept an absolute path. A \ \path\n\ \containing a .. (parent directory) component is not valid.\n\n\ \If set, Stack expects the value to identify the location of Stack's \ \work\n\ \directory, relative to the root directory of the project or package. \ \Stack\n\ \encountered the value:\n" ++ x _ -> displayException err stack-2.15.7/src/Stack/Options/Completion.hs0000644000000000000000000001056214604306201017035 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Options.Completion ( ghcOptsCompleter , targetCompleter , flagCompleter , projectExeCompleter ) where import Data.Char ( isSpace ) import Data.List ( isPrefixOf ) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.PackageDescription as C import Options.Applicative ( Completer, mkCompleter ) import Options.Applicative.Builder.Extra ( unescapeBashArg ) import Stack.Constants ( ghcShowOptionsOutput ) import Stack.Options.GlobalParser ( globalOptsFromMonoid ) import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig , withRunnerGlobal ) import Stack.Prelude import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.Config ( Config (..) ) import Stack.Types.EnvConfig ( EnvConfig ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Project ( Project (..) ) import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.NamedComponent ( renderPkgComponent ) import Stack.Types.SourceMap ( SMWanted (..), ppComponents, ppGPD ) ghcOptsCompleter :: Completer ghcOptsCompleter = mkCompleter $ \inputRaw -> pure $ let input = unescapeBashArg inputRaw (curArgReversed, otherArgsReversed) = break isSpace (reverse input) curArg = reverse curArgReversed otherArgs = reverse otherArgsReversed in if null curArg then [] else map (otherArgs ++) $ filter (curArg `isPrefixOf`) ghcShowOptionsOutput -- TODO: Ideally this would pay attention to --stack-yaml, may require -- changes to optparse-applicative. buildConfigCompleter :: (String -> RIO EnvConfig [String]) -> Completer buildConfigCompleter inner = mkCompleter $ \inputRaw -> do let input = unescapeBashArg inputRaw case input of -- If it looks like a flag, skip this more costly completion. ('-': _) -> pure [] _ -> do go' <- globalOptsFromMonoid False mempty let go = go' { logLevel = LevelOther "silent" } withRunnerGlobal go $ withConfig NoReexec $ withDefaultEnvConfig $ inner input targetCompleter :: Completer targetCompleter = buildConfigCompleter $ \input -> do packages <- view $ buildConfigL . to (.smWanted.project) comps <- for packages ppComponents pure $ concatMap (filter (input `isPrefixOf`) . allComponentNames) (Map.toList comps) where allComponentNames (name, comps) = map (T.unpack . renderPkgComponent . (name,)) (Set.toList comps) flagCompleter :: Completer flagCompleter = buildConfigCompleter $ \input -> do bconfig <- view buildConfigL gpds <- for bconfig.smWanted.project ppGPD let wildcardFlags = nubOrd $ concatMap (\(name, gpd) -> map (\fl -> "*:" ++ flagString name fl) (C.genPackageFlags gpd)) $ Map.toList gpds normalFlags = concatMap (\(name, gpd) -> map (\fl -> packageNameString name ++ ":" ++ flagString name fl) (C.genPackageFlags gpd)) $ Map.toList gpds flagString name fl = let flname = C.unFlagName $ C.flagName fl in (if flagEnabled name fl then "-" else "") ++ flname prjFlags = case bconfig.config.project of PCProject (p, _) -> p.flagsByPkg PCGlobalProject -> mempty PCNoProject _ -> mempty flagEnabled name fl = fromMaybe (C.flagDefault fl) $ Map.lookup (C.flagName fl) $ Map.findWithDefault Map.empty name prjFlags pure $ filter (input `isPrefixOf`) $ case input of ('*' : ':' : _) -> wildcardFlags ('*' : _) -> wildcardFlags _ -> normalFlags projectExeCompleter :: Completer projectExeCompleter = buildConfigCompleter $ \input -> do packages <- view $ buildConfigL . to (.smWanted.project) gpds <- Map.traverseWithKey (const ppGPD) packages pure $ filter (input `isPrefixOf`) $ nubOrd $ concatMap (map (C.unUnqualComponentName . fst) . C.condExecutables) gpds stack-2.15.7/src/Stack/Options/DockerParser.hs0000644000000000000000000001347414445120723017323 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.DockerParser ( dockerOptsParser ) where import Data.List ( intercalate ) import qualified Data.Text as T import Distribution.Version ( anyVersion ) import Options.Applicative ( Parser, auto, completer, help, listCompleter, long, metavar , option, str, value ) import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra ( dirCompleter, eitherReader', fileCompleter , firstBoolFlagsFalse, firstBoolFlagsNoDefault , firstBoolFlagsTrue, optionalFirst ) import Stack.Constants ( stackProgName ) import Stack.Docker ( dockerCmdName ) import Stack.Prelude import Stack.Options.Utils ( hideMods ) import Stack.Types.Version ( IntersectingVersionRange (..) ) import Stack.Types.Docker ( DockerMonoidRepoOrImage (..), DockerOptsMonoid (..) , dockerAutoPullArgName, dockerImageArgName , dockerContainerNameArgName, dockerDetachArgName , dockerEnvArgName, dockerPersistArgName , dockerRegistryLoginArgName, dockerRegistryPasswordArgName , dockerRegistryUsernameArgName, dockerRepoArgName , dockerRunArgsArgName, dockerMountArgName , dockerMountModeArgName, dockerNetworkArgName , dockerSetUserArgName, dockerStackExeArgName , dockerStackExeDownloadVal, dockerStackExeHostVal , dockerStackExeImageVal, parseDockerStackExe ) -- | Options parser configuration for Docker. dockerOptsParser :: Bool -> Parser DockerOptsMonoid dockerOptsParser hide0 = DockerOptsMonoid (Any False) <$> firstBoolFlagsNoDefault dockerCmdName "using a Docker container. --docker implies 'system-ghc: true'." hide <*> fmap First ( Just . DockerMonoidRepo <$> option str ( long (dockerOptName dockerRepoArgName) <> hide <> metavar "NAME" <> help "Docker repository name." ) <|> Just . DockerMonoidImage <$> option str ( long (dockerOptName dockerImageArgName) <> hide <> metavar "IMAGE" <> help "Exact Docker image ID (overrides docker-repo)." ) <|> pure Nothing ) <*> firstBoolFlagsNoDefault (dockerOptName dockerRegistryLoginArgName) "registry requires login." hide <*> firstStrOption ( long (dockerOptName dockerRegistryUsernameArgName) <> hide <> metavar "USERNAME" <> help "Docker registry username." ) <*> firstStrOption ( long (dockerOptName dockerRegistryPasswordArgName) <> hide <> metavar "PASSWORD" <> help "Docker registry password." ) <*> firstBoolFlagsTrue (dockerOptName dockerAutoPullArgName) "automatic pulling latest version of image." hide <*> firstBoolFlagsFalse (dockerOptName dockerDetachArgName) "running a detached Docker container." hide <*> firstBoolFlagsFalse (dockerOptName dockerPersistArgName) "not deleting container after it exits." hide <*> firstStrOption ( long (dockerOptName dockerContainerNameArgName) <> hide <> metavar "NAME" <> help "Docker container name." ) <*> firstStrOption ( long (dockerOptName dockerNetworkArgName) <> hide <> metavar "NETWORK" <> help "Docker network." ) <*> argsOption ( long (dockerOptName dockerRunArgsArgName) <> hide <> value [] <> metavar "'ARG1 [ARG2 ...]'" <> help "Additional options to pass to 'docker run'.") <*> many (option auto ( long (dockerOptName dockerMountArgName) <> hide <> metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <> completer dirCompleter <> help "Mount volumes from host in container (can be specified \ \multiple times)." )) <*> firstStrOption ( long (dockerOptName dockerMountModeArgName) <> hide <> metavar "SUFFIX" <> help "Volume mount mode suffix." ) <*> many (option str ( long (dockerOptName dockerEnvArgName) <> hide <> metavar "NAME=VALUE" <> help "Set environment variable in container (can be specified \ \multiple times)." )) <*> optionalFirst (option (eitherReader' parseDockerStackExe) ( let specialOpts = [ dockerStackExeDownloadVal , dockerStackExeHostVal , dockerStackExeImageVal ] in long (dockerOptName dockerStackExeArgName) <> hide <> metavar (intercalate "|" (specialOpts ++ ["PATH"])) <> completer (listCompleter specialOpts <> fileCompleter) <> help ( concat [ "Location of " , stackProgName , " executable used in container." ] ) )) <*> firstBoolFlagsNoDefault (dockerOptName dockerSetUserArgName) "setting user in container to match host." hide <*> pure (IntersectingVersionRange anyVersion) where dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName firstStrOption = optionalFirst . option str hide = hideMods hide0 stack-2.15.7/src/Stack/Options/DotParser.hs0000644000000000000000000000453114620153445016636 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Function to parse command line arguments for Stack's @dot@ command and -- certain command line arguments for Stack's @ls dependencies@ command. module Stack.Options.DotParser ( dotOptsParser ) where import Data.Char ( isSpace ) import Data.List.Split ( splitOn ) import qualified Data.Set as Set import Distribution.Types.PackageName ( mkPackageName ) import Options.Applicative ( Parser, auto, help, idm, long, metavar, option, strOption , switch ) import Options.Applicative.Builder.Extra ( boolFlags ) import Stack.Options.BuildParser ( flagsParser, targetsParser ) import Stack.Prelude import Stack.Types.DotOpts ( DotOpts (..) ) -- | Parser for arguments to `stack dot` dotOptsParser :: Bool -> Parser DotOpts dotOptsParser externalDefault = DotOpts <$> includeExternal <*> includeBase <*> depthLimit <*> fmap (maybe Set.empty $ Set.fromList . splitNames) prunedPkgs <*> targetsParser <*> flagsParser <*> testTargets <*> benchTargets <*> globalHints where includeExternal = boolFlags externalDefault "external" "inclusion of external dependencies." idm includeBase = boolFlags True "include-base" "inclusion of dependencies on base." idm depthLimit = optional (option auto ( long "depth" <> metavar "DEPTH" <> help "Limit the depth of dependency resolution. (default: no limit)" )) prunedPkgs = optional (strOption ( long "prune" <> metavar "PACKAGES" <> help "Prune specified package(s). PACKAGES is a comma-separated list of \ \package names." )) testTargets = switch ( long "test" <> help "Consider dependencies of test components." ) benchTargets = switch ( long "bench" <> help "Consider dependencies of benchmark components." ) splitNames :: String -> [PackageName] splitNames = map ( mkPackageName . takeWhile (not . isSpace) . dropWhile isSpace ) . splitOn "," globalHints = switch ( long "global-hints" <> help "Do not require an install GHC; instead, use a hints file for \ \global packages." ) stack-2.15.7/src/Stack/Options/EvalParser.hs0000644000000000000000000000126614445120723016777 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @eval@ command. module Stack.Options.EvalParser ( evalOptsParser ) where import Options.Applicative ( Parser, metavar, strArgument ) import Stack.Eval ( EvalOpts (..) ) import Stack.Options.ExecParser ( execOptsExtraParser ) import Stack.Prelude -- | Parse command line arguments for Stack's @eval@ command. evalOptsParser :: String -- ^ metavar -> Parser EvalOpts evalOptsParser meta = EvalOpts <$> eoArgsParser <*> execOptsExtraParser where eoArgsParser :: Parser String eoArgsParser = strArgument (metavar meta) stack-2.15.7/src/Stack/Options/ExecParser.hs0000644000000000000000000000606314604306201016766 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @exec@, @ghc@, @run@, -- @runghc@ and @runhaskell@ commands. module Stack.Options.ExecParser ( execOptsParser , execOptsExtraParser ) where import Options.Applicative ( Parser, completer, help, idm, long, metavar, strArgument , strOption ) import Options.Applicative.Builder.Extra ( boolFlags, dirCompleter ) import Options.Applicative.Args ( argsOption ) import Stack.Exec ( ExecOpts (..), ExecOptsExtra (..), SpecialExecCmd (..) ) import Stack.Options.Completion ( projectExeCompleter ) import Stack.Prelude import Stack.Types.EnvSettings ( EnvSettings (..) ) -- | Parse command line arguments for Stack's @exec@, @ghc@, @run@, -- @runghc@ and @runhaskell@ commands. execOptsParser :: Maybe SpecialExecCmd -> Parser ExecOpts execOptsParser mcmd = ExecOpts <$> maybe eoCmdParser pure mcmd <*> eoArgsParser <*> execOptsExtraParser where eoCmdParser = ExecCmd <$> strArgument ( metavar "COMMAND" <> completer projectExeCompleter ) eoArgsParser = many (strArgument (metavar txt)) where txt = case mcmd of Nothing -> normalTxt Just ExecCmd{} -> normalTxt Just ExecRun -> "-- ARGUMENT(S) (e.g. stack run -- file.txt)" Just ExecGhc -> "-- ARGUMENT(S) (e.g. stack ghc -- X.hs -o x)" Just ExecRunGhc -> "-- ARGUMENT(S) (e.g. stack runghc -- X.hs)" normalTxt = "-- ARGUMENT(S) (e.g. stack exec ghc-pkg -- describe base)" -- | Parser for extra options to exec command execOptsExtraParser :: Parser ExecOptsExtra execOptsExtraParser = ExecOptsExtra <$> eoEnvSettingsParser <*> eoPackagesParser <*> eoRtsOptionsParser <*> eoCwdParser where eoEnvSettingsParser :: Parser EnvSettings eoEnvSettingsParser = EnvSettings True <$> boolFlags True "ghc-package-path" "setting the GHC_PACKAGE_PATH variable for the subprocess." idm <*> boolFlags True "stack-exe" "setting the STACK_EXE environment variable to the path for the \ \stack executable." idm <*> pure False <*> pure True eoPackagesParser :: Parser [String] eoPackagesParser = many (strOption ( long "package" <> metavar "PACKAGE(S)" <> help "Add package(s) as a list of names or identifiers separated by \ \spaces (can be specified multiple times)." )) eoRtsOptionsParser :: Parser [String] eoRtsOptionsParser = concat <$> many (argsOption ( long "rts-options" <> help "Explicit RTS options to pass to application (can be specified \ \multiple times)." <> metavar "RTSFLAG" )) eoCwdParser :: Parser (Maybe FilePath) eoCwdParser = optional (strOption ( long "cwd" <> help "Sets the working directory before executing." <> metavar "DIR" <> completer dirCompleter )) stack-2.15.7/src/Stack/Options/GhcBuildParser.hs0000644000000000000000000000215014445120723017562 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.GhcBuildParser ( ghcBuildParser ) where import Options.Applicative ( Parser, completeWith, help, long, metavar, option ) import Options.Applicative.Types ( readerAsk, readerError ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude import Stack.Types.CompilerBuild ( CompilerBuild, parseCompilerBuild ) -- | GHC build parser ghcBuildParser :: Bool -> Parser CompilerBuild ghcBuildParser hide = option readGHCBuild ( long "ghc-build" <> metavar "BUILD" <> completeWith [ "standard" , "gmp4" , "nopie" , "tinfo6" , "tinfo6-libc6-pre232" , "tinfo6-nopie" , "ncurses6" , "int-native" , "integersimple" ] <> help "Specialized GHC build, e.g. 'gmp4' or 'standard' (usually \ \auto-detected)." <> hideMods hide ) where readGHCBuild = do s <- readerAsk case parseCompilerBuild s of Left e -> readerError (displayException e) Right v -> pure v stack-2.15.7/src/Stack/Options/GhciParser.hs0000644000000000000000000000673514620153445016772 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Options.GhciParser ( ghciOptsParser ) where import Options.Applicative ( Parser, completer, flag, help, idm, internal, long, metavar , strOption, switch ) import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra ( boolFlags, boolFlagsNoDefault, fileExtCompleter , textArgument, textOption ) import Stack.Config ( packagesParser ) import Stack.Ghci ( GhciOpts (..) ) import Stack.Options.BuildParser ( flagsParser ) import Stack.Options.Completion ( ghcOptsCompleter, targetCompleter ) import Stack.Prelude -- | Parser for GHCI options ghciOptsParser :: Parser GhciOpts ghciOptsParser = GhciOpts <$> many (textArgument ( metavar "TARGET/FILE" <> completer (targetCompleter <> fileExtCompleter [".hs", ".lhs"]) <> help "If none specified, use all local packages. See \ \https://docs.haskellstack.org/en/stable/build_command/#target-syntax \ \for details. If a path to a .hs or .lhs file is specified, it \ \will be loaded." )) <*> ( (\x y -> x ++ concat y) <$> flag [] ["-Wall", "-Werror"] ( long "pedantic" <> help "Turn on -Wall and -Werror." ) <*> many (argsOption ( long "ghci-options" <> metavar "OPTIONS" <> completer ghcOptsCompleter <> help "Additional options passed to GHCi (can be specified \ \multiple times)." )) ) <*> ( concat <$> many (argsOption ( long "ghc-options" <> metavar "OPTIONS" <> completer ghcOptsCompleter <> help "Additional options passed to both GHC and GHCi (can be \ \specified multiple times)." )) ) <*> flagsParser <*> optional (strOption ( long "with-ghc" <> metavar "GHC" <> help "Use this GHC to run GHCi." )) <*> ( not <$> boolFlags True "load" "load modules on start-up." idm ) <*> packagesParser <*> optional (textOption ( long "main-is" <> metavar "TARGET" <> completer targetCompleter <> help "Specify which target should contain the main module to load, \ \such as for an executable for test suite or benchmark." )) <*> switch ( long "load-local-deps" <> help "Load all local dependencies of your targets." ) -- TODO: deprecate this? probably useless. <*> switch ( long "skip-intermediate-deps" <> help "Skip loading intermediate target dependencies." <> internal ) <*> optional (boolFlagsNoDefault "package-hiding" "package hiding" idm) <*> switch ( long "no-build" <> help "Don't build before launching GHCi." <> internal ) <*> switch ( long "only-main" <> help "Only load and import the main module. If no main module, no \ \modules will be loaded." ) stack-2.15.7/src/Stack/Options/GhcVariantParser.hs0000644000000000000000000000161414445120723020133 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.GhcVariantParser ( ghcVariantParser ) where import Options.Applicative ( Parser, help, long, metavar, option, readerError ) import Options.Applicative.Types ( readerAsk ) import Stack.Prelude import Stack.Options.Utils ( hideMods ) import Stack.Types.GHCVariant ( GHCVariant, parseGHCVariant ) -- | GHC variant parser ghcVariantParser :: Bool -> Parser GHCVariant ghcVariantParser hide = option readGHCVariant ( long "ghc-variant" <> metavar "VARIANT" <> help "Specialized GHC variant, e.g. int-native or integersimple \ \(incompatible with --system-ghc)." <> hideMods hide ) where readGHCVariant = do s <- readerAsk case parseGHCVariant s of Left e -> readerError (displayException e) Right v -> pure v stack-2.15.7/src/Stack/Options/GlobalParser.hs0000644000000000000000000001403614620153445017311 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Functions to parse Stack's \'global\' command line arguments. module Stack.Options.GlobalParser ( globalOptsFromMonoid , globalOptsParser ) where import Options.Applicative ( Parser, ReadM, auto, completer, help, hidden, internal , long, metavar, option, strOption, value ) import Options.Applicative.Builder.Extra ( fileExtCompleter, firstBoolFlagsFalse , firstBoolFlagsNoDefault, firstBoolFlagsTrue, optionalFirst ) import Options.Applicative.Types ( readerAsk ) import Path.IO ( getCurrentDir, resolveDir', resolveFile' ) import qualified Stack.Docker as Docker import Stack.Prelude import Stack.Options.ConfigParser ( configOptsParser ) import Stack.Options.LogLevelParser ( logLevelOptsParser ) import Stack.Options.ResolverParser ( abstractResolverOptsParser, compilerOptsParser ) import Stack.Options.Utils ( GlobalOptsContext (..), hideMods ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) import Stack.Types.LockFileBehavior ( LockFileBehavior (..), readLockFileBehavior ) import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) import Stack.Types.Docker ( dockerEntrypointArgName ) -- | Parser for global command-line options. globalOptsParser :: FilePath -> GlobalOptsContext -> Parser GlobalOptsMonoid globalOptsParser currentDir kind = GlobalOptsMonoid <$> optionalFirst (strOption ( long Docker.reExecArgName <> hidden <> internal )) <*> optionalFirst (option auto ( long dockerEntrypointArgName <> hidden <> internal )) <*> (First <$> logLevelOptsParser hide0) <*> firstBoolFlagsTrue "time-in-log" "inclusion of timings in logs, for the purposes of using diff with \ \logs." hide <*> firstBoolFlagsFalse "rsl-in-log" "inclusion of raw snapshot layer (rsl) in logs." hide <*> firstBoolFlagsFalse "plan-in-log" "inclusion of information about build plan construction in logs." hide <*> configOptsParser currentDir kind <*> optionalFirst (abstractResolverOptsParser hide0) <*> pure (First Nothing) <*> optionalFirst (compilerOptsParser hide0) -- resolver root is only set via the script command <*> firstBoolFlagsNoDefault "terminal" "overriding terminal detection in the case of running in a false \ \terminal." hide <*> option readStyles ( long "stack-colors" <> long "stack-colours" <> metavar "STYLES" <> value mempty <> help "Specify Stack's output styles; STYLES is a colon-delimited \ \sequence of key=value, where 'key' is a style name and 'value' \ \is a semicolon-delimited list of 'ANSI' SGR (Select Graphic \ \Rendition) control codes (in decimal). Use 'stack ls \ \stack-colors --basic' to see the current sequence. In shells \ \where a semicolon is a command separator, enclose STYLES in \ \quotes." <> hide ) <*> optionalFirst (option auto ( long "terminal-width" <> metavar "INT" <> help "Specify the width of the terminal, used for pretty-print \ \messages." <> hide )) <*> optionalFirst (strOption ( long "stack-yaml" <> metavar "STACK-YAML" <> completer (fileExtCompleter [".yaml"]) <> help "Override project stack.yaml file (overrides any STACK_YAML \ \environment variable)." <> hide )) <*> optionalFirst (option readLockFileBehavior ( long "lock-file" <> help "Specify how to interact with lock files. (default: if \ \resolver is overridden: read-only; otherwise: read/write)" <> hide )) where hide = hideMods hide0 hide0 = kind /= OuterGlobalOpts -- | Create GlobalOpts from GlobalOptsMonoid. globalOptsFromMonoid :: MonadIO m => Bool -> GlobalOptsMonoid -> m GlobalOpts globalOptsFromMonoid defaultTerminal globalMonoid = do resolver <- for (getFirst globalMonoid.resolver) $ \ur -> do root <- case globalMonoid.resolverRoot of First Nothing -> getCurrentDir First (Just dir) -> resolveDir' dir resolvePaths (Just root) ur stackYaml <- case getFirst globalMonoid.stackYaml of Nothing -> pure SYLDefault Just fp -> SYLOverride <$> resolveFile' fp let lockFileBehavior = let defLFB = case getFirst globalMonoid.resolver of Nothing -> LFBReadWrite _ -> LFBReadOnly in fromFirst defLFB globalMonoid.lockFileBehavior pure GlobalOpts { reExecVersion = getFirst globalMonoid.reExecVersion , dockerEntrypoint = getFirst globalMonoid.dockerEntrypoint , logLevel = fromFirst defaultLogLevel globalMonoid.logLevel , timeInLog = fromFirstTrue globalMonoid.timeInLog , rslInLog = fromFirstFalse globalMonoid.rslInLog , planInLog = fromFirstFalse globalMonoid.planInLog , configMonoid = globalMonoid.configMonoid , resolver , compiler = getFirst globalMonoid.compiler , terminal = fromFirst defaultTerminal globalMonoid.terminal , stylesUpdate = globalMonoid.styles , termWidthOpt = getFirst globalMonoid.termWidthOpt , stackYaml , lockFileBehavior } -- | Default logging level should be something useful but not crazy. defaultLogLevel :: LogLevel defaultLogLevel = LevelInfo readStyles :: ReadM StylesUpdate readStyles = parseStylesUpdateFromString <$> readerAsk stack-2.15.7/src/Stack/Options/HaddockParser.hs0000644000000000000000000000147114604306201017435 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.HaddockParser ( haddockOptsParser ) where import Options.Applicative ( Parser, help, long, metavar ) import Options.Applicative.Args ( argsOption ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude import Stack.Types.BuildOptsMonoid ( HaddockOptsMonoid (..) ) -- | Parser for haddock arguments. haddockOptsParser :: Bool -> Parser HaddockOptsMonoid haddockOptsParser hide0 = HaddockOptsMonoid <$> fmap (fromMaybe []) ( optional (argsOption ( long "haddock-arguments" <> metavar "HADDOCK_ARGS" <> help "Arguments passed to the Haddock program." <> hide )) ) where hide = hideMods hide0 stack-2.15.7/src/Stack/Options/HpcReportParser.hs0000644000000000000000000000357614445120723020024 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.HpcReportParser ( hpcReportOptsParser , pvpBoundsOption ) where import qualified Data.Text as T import Options.Applicative ( Parser, completer, completeWith, help, long, metavar , option, readerError, strOption, switch ) import Options.Applicative.Builder.Extra ( dirCompleter, fileExtCompleter, textArgument ) import Options.Applicative.Types ( readerAsk ) import Stack.Coverage ( HpcReportOpts (..) ) import Stack.Options.Completion ( targetCompleter ) import Stack.Prelude import Stack.Types.PvpBounds ( PvpBounds, parsePvpBounds ) -- | Parser for @stack hpc report@. hpcReportOptsParser :: Parser HpcReportOpts hpcReportOptsParser = HpcReportOpts <$> many (textArgument ( metavar "TARGET_OR_TIX" <> completer (targetCompleter <> fileExtCompleter [".tix"]) )) <*> switch ( long "all" <> help "Use results from all packages and components involved in \ \previous --coverage run." ) <*> optional (strOption ( long "destdir" <> metavar "DIR" <> completer dirCompleter <> help "Output directory for HTML report." )) <*> switch ( long "open" <> help "Open the report in the browser." ) pvpBoundsOption :: Parser PvpBounds pvpBoundsOption = option readPvpBounds ( long "pvp-bounds" <> metavar "PVP-BOUNDS" <> completeWith ["none", "lower", "upper", "both"] <> help "How PVP version bounds should be added to Cabal file: none, lower, \ \upper, both." ) where readPvpBounds = do s <- readerAsk case parsePvpBounds $ T.pack s of Left e -> readerError e Right v -> pure v stack-2.15.7/src/Stack/Options/InitParser.hs0000644000000000000000000000265014445120723017011 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @init@ and @new@ -- commands. module Stack.Options.InitParser ( initOptsParser ) where import Options.Applicative ( Parser, completer, help, long, metavar, switch ) import Options.Applicative.Builder.Extra ( dirCompleter, textArgument ) import Stack.Init ( InitOpts (..) ) import Stack.Prelude -- | Parse command line arguments for Stack's @init@ and @new@ commands. initOptsParser :: Parser InitOpts initOptsParser = InitOpts <$> searchDirs <*> omitPackages <*> overwrite <*> fmap not ignoreSubDirs where searchDirs = many (textArgument ( metavar "DIR(S)" <> completer dirCompleter <> help "Directory, or directories, to include in the search for Cabal \ \files, when initialising. The default is the current directory." )) ignoreSubDirs = switch ( long "ignore-subdirs" <> help "Do not search for Cabal files in subdirectories, when \ \initialising." ) overwrite = switch ( long "force" <> help "Force an initialisation that overwrites any existing stack.yaml \ \file." ) omitPackages = switch ( long "omit-packages" <> help "Exclude conflicting or incompatible user packages, when \ \initialising." ) stack-2.15.7/src/Stack/Options/LogLevelParser.hs0000644000000000000000000000335014445120723017615 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Options.LogLevelParser ( logLevelOptsParser ) where import qualified Data.Text as T import Options.Applicative ( Parser, completeWith, flag', help, long, metavar, short , strOption ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude -- | Parser for a logging level. logLevelOptsParser :: Bool -> Parser (Maybe LogLevel) logLevelOptsParser hide = fmap (Just . parse) (strOption ( long "verbosity" <> metavar "VERBOSITY" <> completeWith ["silent", "error", "warn", "info", "debug"] <> help "Set verbosity level: silent, error, warn, info or debug." <> hideMods hide )) <|> flag' (Just verboseLevel) ( short 'v' <> long "verbose" <> help ( "Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\"." ) <> hideMods hide ) <|> flag' (Just silentLevel) ( long "silent" <> help ( "Enable silent mode: verbosity level \"" <> showLevel silentLevel <> "\"." ) <> hideMods hide ) <|> pure Nothing where verboseLevel = LevelDebug silentLevel = LevelOther "silent" showLevel l = case l of LevelDebug -> "debug" LevelInfo -> "info" LevelWarn -> "warn" LevelError -> "error" LevelOther x -> T.unpack x parse s = case s of "debug" -> LevelDebug "info" -> LevelInfo "warn" -> LevelWarn "error" -> LevelError _ -> LevelOther (T.pack s) stack-2.15.7/src/Stack/Options/LsParser.hs0000644000000000000000000001636014620153445016471 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Function to parse command line arguments for Stack's @ls@ command. module Stack.Options.LsParser ( lsOptsParser ) where import qualified Data.Text as T import qualified Options.Applicative as OA import Options.Applicative ( idm ) import Options.Applicative.Builder.Extra ( boolFlags, textOption ) import Stack.Constants ( globalFooter ) import Stack.Ls ( ListDepsFormat (..), ListDepsFormatOpts (..) , ListDepsOpts (..), ListDepsTextFilter (..) , ListStylesOpts (..), ListToolsOpts (..), LsCmdOpts (..) , LsCmds (..), LsView (..), SnapshotOpts (..) ) import Stack.Options.DotParser ( dotOptsParser ) import Stack.Prelude -- | Parse command line arguments for Stack's @ls@ command. lsOptsParser :: OA.Parser LsCmdOpts lsOptsParser = LsCmdOpts <$> OA.hsubparser (lsSnapCmd <> lsDepsCmd <> lsStylesCmd <> lsToolsCmd) lsSnapCmd :: OA.Mod OA.CommandFields LsCmds lsSnapCmd = OA.command "snapshots" $ OA.info lsCmdOptsParser $ OA.progDesc "View snapshots. (default: local)" <> OA.footer localSnapshotMsg lsDepsCmd :: OA.Mod OA.CommandFields LsCmds lsDepsCmd = OA.command "dependencies" $ OA.info lsDepOptsParser $ OA.progDesc "View the dependencies." <> OA.footer globalFooter lsStylesCmd :: OA.Mod OA.CommandFields LsCmds lsStylesCmd = OA.command "stack-colors" (OA.info lsStylesOptsParser (OA.progDesc "View Stack's output styles.")) <> OA.command "stack-colours" (OA.info lsStylesOptsParser (OA.progDesc "View Stack's output styles (alias for \ \'stack-colors').")) lsToolsCmd :: OA.Mod OA.CommandFields LsCmds lsToolsCmd = OA.command "tools" (OA.info lsToolsOptsParser (OA.progDesc "View Stack's installed tools.")) lsCmdOptsParser :: OA.Parser LsCmds lsCmdOptsParser = LsSnapshot <$> lsViewSnapCmd lsDepOptsParser :: OA.Parser LsCmds lsDepOptsParser = LsDependencies <$> listDepsOptsParser lsStylesOptsParser :: OA.Parser LsCmds lsStylesOptsParser = LsStyles <$> listStylesOptsParser lsToolsOptsParser :: OA.Parser LsCmds lsToolsOptsParser = LsTools <$> listToolsOptsParser lsViewSnapCmd :: OA.Parser SnapshotOpts lsViewSnapCmd = SnapshotOpts <$> ( OA.hsubparser (lsViewRemoteCmd <> lsViewLocalCmd) <|> pure Local) <*> OA.switch ( OA.long "lts" <> OA.short 'l' <> OA.help "Only show LTS Haskell snapshots." ) <*> OA.switch ( OA.long "nightly" <> OA.short 'n' <> OA.help "Only show Nightly snapshots." ) lsViewRemoteCmd :: OA.Mod OA.CommandFields LsView lsViewRemoteCmd = OA.command "remote" $ OA.info (pure Remote) $ OA.progDesc "View remote snapshots." <> OA.footer pagerMsg pagerMsg :: String pagerMsg = "On a terminal, uses a pager, if one is available. Respects the PAGER \ \environment variable (subject to that, prefers pager 'less' to 'more')." lsViewLocalCmd :: OA.Mod OA.CommandFields LsView lsViewLocalCmd = OA.command "local" $ OA.info (pure Local) $ OA.progDesc "View local snapshots." <> OA.footer localSnapshotMsg localSnapshotMsg :: String localSnapshotMsg = "A local snapshot is identified by a hash code. " <> pagerMsg -- | Parser for arguments to `stack ls dependencies`. listDepsOptsParser :: OA.Parser ListDepsOpts listDepsOptsParser = OA.subparser ( formatSubCommand "text" "Print dependencies as text (default)." listDepsTextParser <> formatSubCommand "cabal" "Print dependencies as exact Cabal constraints." listDepsConstraintsParser <> formatSubCommand "tree" "Print dependencies as tree." listDepsTreeParser <> formatSubCommand "json" "Print dependencies as JSON." listDepsJsonParser ) <|> toListDepsOptsParser listDepsTextParser formatSubCommand :: String -> String -> OA.Parser ListDepsFormat -> OA.Mod OA.CommandFields ListDepsOpts formatSubCommand cmd desc formatParser = OA.command cmd (OA.info (toListDepsOptsParser formatParser) (OA.progDesc desc)) listDepsTextParser :: OA.Parser ListDepsFormat listDepsTextParser = ListDepsText <$> listDepsFormatOptsParser <*> textFilterParser textFilterParser :: OA.Parser [ListDepsTextFilter] textFilterParser = many (OA.option parseListDepsTextFilter ( OA.long "filter" <> OA.metavar "ITEM" <> OA.help "Item to be filtered out of the results, if present, being either \ \$locals (for all local packages) or a package name (can be \ \specified multiple times)." )) parseListDepsTextFilter :: OA.ReadM ListDepsTextFilter parseListDepsTextFilter = OA.eitherReader $ \s -> if s == "$locals" then Right FilterLocals else case parsePackageName s of Just pkgName -> Right $ FilterPackage pkgName Nothing -> Left $ s <> " is not a valid package name." listDepsConstraintsParser :: OA.Parser ListDepsFormat listDepsConstraintsParser = pure ListDepsConstraints listDepsTreeParser :: OA.Parser ListDepsFormat listDepsTreeParser = ListDepsTree <$> listDepsFormatOptsParser listDepsJsonParser :: OA.Parser ListDepsFormat listDepsJsonParser = pure ListDepsJSON listDepsFormatOptsParser :: OA.Parser ListDepsFormatOpts listDepsFormatOptsParser = ListDepsFormatOpts <$> separatorParser <*> licenseParser separatorParser :: OA.Parser Text separatorParser = fmap escapeSep ( textOption ( OA.long "separator" <> OA.metavar "SEP" <> OA.help "Separator between package name and package version." <> OA.value " " <> OA.showDefault ) ) where escapeSep s = T.replace "\\t" "\t" (T.replace "\\n" "\n" s) licenseParser :: OA.Parser Bool licenseParser = boolFlags False "license" "printing of dependency licenses instead of versions." idm toListDepsOptsParser :: OA.Parser ListDepsFormat -> OA.Parser ListDepsOpts toListDepsOptsParser formatParser = ListDepsOpts <$> formatParser <*> dotOptsParser True listStylesOptsParser :: OA.Parser ListStylesOpts listStylesOptsParser = ListStylesOpts <$> boolFlags False "basic" "a basic report of the styles used. The default is a fuller one." idm <*> boolFlags True "sgr" "the provision of the equivalent SGR instructions (provided by \ \default). Flag ignored for a basic report." idm <*> boolFlags True "example" "the provision of an example of the applied style (provided by default \ \for colored output). Flag ignored for a basic report." idm listToolsOptsParser :: OA.Parser ListToolsOpts listToolsOptsParser = ListToolsOpts <$> OA.strOption ( OA.long "filter" <> OA.metavar "TOOL_NAME" <> OA.value "" <> OA.help "Filter by a tool name (eg 'ghc', 'ghc-git' or 'msys2') \ \- case sensitive. (default: no filter)" ) stack-2.15.7/src/Stack/Options/NewParser.hs0000644000000000000000000000354514604306201016635 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.NewParser ( newOptsParser ) where import qualified Data.Map.Strict as M import Options.Applicative ( Parser, help, idm, long, metavar, short, switch ) import Options.Applicative.Builder.Extra ( boolFlags ) import Stack.Init ( InitOpts ) import Stack.New ( NewOpts (..) ) import Stack.Options.InitParser ( initOptsParser ) import Stack.Prelude import Stack.Types.PackageName ( packageNameArgument ) import Stack.Types.TemplateName ( templateNameArgument, templateParamArgument ) -- | Parser for @stack new@. newOptsParser :: Parser (NewOpts, InitOpts) newOptsParser = (,) <$> newOpts <*> initOptsParser where newOpts = NewOpts <$> packageNameArgument ( metavar "PACKAGE_NAME" <> help "A valid package name." ) <*> switch ( long "bare" <> help "Do not create a subdirectory for the project." ) <*> boolFlags True "init" "the initialisation of the project for use with Stack." idm <*> optional (templateNameArgument ( metavar "TEMPLATE_NAME" <> help "Name of a template - can take the form\ \ [[service:]username/]template with optional service name\ \ (github, gitlab, or bitbucket) and username for the \ \service; or, a local filename such as foo.hsfiles or ~/foo; \ \or, a full URL such as https://example.com/foo.hsfiles." )) <*> fmap M.fromList (many (templateParamArgument ( short 'p' <> long "param" <> metavar "KEY:VALUE" <> help "Parameter for the template in the format key:value." ))) stack-2.15.7/src/Stack/Options/NixParser.hs0000644000000000000000000000464214604306201016641 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Options.NixParser ( nixOptsParser ) where import qualified Data.Text as T import Options.Applicative ( Parser, completer, help, long, metavar, option, str ) import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra ( fileExtCompleter, firstBoolFlagsFalse , firstBoolFlagsNoDefault, optionalFirst ) import Stack.Nix ( nixCmdName ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude import Stack.Types.Nix ( NixOptsMonoid (..) ) nixOptsParser :: Bool -> Parser NixOptsMonoid nixOptsParser hide0 = overrideActivation <$> ( NixOptsMonoid <$> firstBoolFlagsNoDefault nixCmdName "use of a Nix-shell. Implies 'system-ghc: true'." hide <*> firstBoolFlagsNoDefault "nix-pure" "use of a pure Nix-shell. Implies '--nix' and 'system-ghc: true'." hide <*> optionalFirst (textArgsOption ( long "nix-packages" <> metavar "NAMES" <> help "List of packages that should be available in the nix-shell \ \(space separated)." <> hide )) <*> optionalFirst (option str ( long "nix-shell-file" <> metavar "FILE" <> completer (fileExtCompleter [".nix"]) <> help "Nix file to be used to launch a nix-shell (for regular Nix \ \users)." <> hide )) <*> optionalFirst (textArgsOption ( long "nix-shell-options" <> metavar "OPTIONS" <> help "Additional options passed to nix-shell." <> hide )) <*> optionalFirst (textArgsOption ( long "nix-path" <> metavar "PATH_OPTIONS" <> help "Additional options to override NIX_PATH parts (notably \ \'nixpkgs')." <> hide )) <*> firstBoolFlagsFalse "nix-add-gc-roots" "addition of packages to the nix GC roots so nix-collect-garbage does \ \not remove them." hide ) where hide = hideMods hide0 overrideActivation m = if fromFirst False m.pureShell then m { enable = (First . Just . fromFirst True) m.enable } else m textArgsOption = fmap (map T.pack) . argsOption stack-2.15.7/src/Stack/Options/PackageParser.hs0000644000000000000000000000221214604306201017425 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.PackageParser ( readFlag ) where import qualified Data.Map as Map import Options.Applicative ( ReadM, readerError ) import Options.Applicative.Types ( readerAsk ) import Stack.Prelude import Stack.Types.BuildOptsCLI ( ApplyCLIFlag (..) ) -- | Parser for package:[-]flag readFlag :: ReadM (Map ApplyCLIFlag (Map FlagName Bool)) readFlag = do s <- readerAsk case break (== ':') s of (pn, ':':mflag) -> do pn' <- case parsePackageName pn of Nothing | pn == "*" -> pure ACFAllProjectPackages | otherwise -> readerError $ "Invalid package name: " ++ pn Just x -> pure $ ACFByName x let (b, flagS) = case mflag of '-':x -> (False, x) _ -> (True, mflag) flagN <- case parseFlagName flagS of Nothing -> readerError $ "Invalid flag name: " ++ flagS Just x -> pure x pure $ Map.singleton pn' $ Map.singleton flagN b _ -> readerError "Must have a colon." stack-2.15.7/src/Stack/Options/PathParser.hs0000644000000000000000000000146614604306201017000 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @path@ command. module Stack.Options.PathParser ( pathParser ) where import qualified Data.Text as T import Options.Applicative ( Parser, flag, help, long ) import Stack.Path ( pathsFromConfig, pathsFromEnvConfig, pathsFromRunner ) import Stack.Prelude -- | Parse command line arguments for Stack's @path@ command. pathParser :: Parser [Text] pathParser = mapMaybeA ( \(desc, name) -> flag Nothing (Just name) ( long (T.unpack name) <> help desc ) ) paths where toDescName (desc, name, _) = (desc, name) paths = pathsFromRunner : map toDescName pathsFromConfig <> map toDescName pathsFromEnvConfig stack-2.15.7/src/Stack/Options/ResolverParser.hs0000644000000000000000000000250714620153445017712 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} module Stack.Options.ResolverParser ( abstractResolverOptsParser , compilerOptsParser , readCompilerVersion ) where import qualified Data.Text as T import Options.Applicative ( Parser, ReadM, help, long, metavar, option, readerError ) import Options.Applicative.Types ( readerAsk ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude import Stack.Types.Resolver ( AbstractResolver, readAbstractResolver ) -- | Parser for the snapshot abstractResolverOptsParser :: Bool -> Parser (Unresolved AbstractResolver) abstractResolverOptsParser hide = option readAbstractResolver ( long "snapshot" <> long "resolver" <> metavar "SNAPSHOT" <> help "Override snapshot in the project configuration file." <> hideMods hide ) compilerOptsParser :: Bool -> Parser WantedCompiler compilerOptsParser hide = option readCompilerVersion ( long "compiler" <> metavar "COMPILER" <> help "Use the specified compiler." <> hideMods hide ) readCompilerVersion :: ReadM WantedCompiler readCompilerVersion = do s <- readerAsk case parseWantedCompiler (T.pack s) of Left{} -> readerError $ "Failed to parse compiler: " ++ s Right x -> pure x stack-2.15.7/src/Stack/Options/SDistParser.hs0000644000000000000000000000243614445120723017136 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @sdist@ and @upload@ -- commands. module Stack.Options.SDistParser ( sdistOptsParser ) where import Options.Applicative ( Parser, completer, help, idm, long, metavar, strArgument , strOption, switch ) import Options.Applicative.Builder.Extra ( boolFlags, dirCompleter ) import Stack.Prelude import Stack.SDist ( SDistOpts (..) ) import Stack.Options.HpcReportParser ( pvpBoundsOption ) -- | Parse command line arguments for Stack's @sdist@ and @upload@ commands. sdistOptsParser :: Parser SDistOpts sdistOptsParser = SDistOpts <$> many (strArgument ( metavar "DIR" <> completer dirCompleter )) <*> optional pvpBoundsOption <*> ignoreCheckSwitch <*> buildPackageOption <*> optional (strOption ( long "tar-dir" <> help "If specified, copy all the tar to this directory." )) where ignoreCheckSwitch = switch ( long "ignore-check" <> help "Do not check package for common mistakes." ) buildPackageOption = boolFlags False "test-tarball" "building of the resulting tarball." idm stack-2.15.7/src/Stack/Options/ScriptParser.hs0000644000000000000000000000474514603065443017364 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @script@ command. module Stack.Options.ScriptParser ( scriptOptsParser ) where import Options.Applicative ( Parser, completer, eitherReader, flag', help, long, metavar , option, strArgument, strOption ) import Options.Applicative.Builder.Extra ( boolFlags, fileExtCompleter ) import Stack.Options.Completion ( ghcOptsCompleter ) import Stack.Prelude import Stack.Script ( ScriptExecute (..), ScriptOpts (..), ShouldRun (..) ) -- | Parse command line arguments for Stack's @script@ command. scriptOptsParser :: Parser ScriptOpts scriptOptsParser = ScriptOpts <$> many (strOption ( long "package" <> metavar "PACKAGE" <> help "Add a package (can be specified multiple times)." )) <*> strArgument ( metavar "FILE" <> completer (fileExtCompleter [".hs", ".lhs"]) ) <*> many (strArgument ( metavar "-- ARGUMENT(S) (e.g. stack script X.hs -- argument(s) to \ \program)." )) <*> ( flag' SECompile ( long "compile" <> help "Compile the script without optimization and run the \ \executable." ) <|> flag' SEOptimize ( long "optimize" <> help "Compile the script with optimization and run the \ \executable." ) <|> pure SEInterpret ) <*> boolFlags False "use-root" "writing of all compilation outputs to a script-specific location in \ \the scripts directory of the Stack root." mempty <*> many (strOption ( long "ghc-options" <> metavar "OPTIONS" <> completer ghcOptsCompleter <> help "Additional options passed to GHC (can be specified multiple \ \times)." )) <*> many (option extraDepRead ( long "extra-dep" <> metavar "PACKAGE-VERSION" <> help "Extra dependencies to be added to the snapshot." )) <*> ( flag' NoRun ( long "no-run" <> help "Do not run, just compile." ) <|> pure YesRun ) where extraDepRead = eitherReader $ mapLeft show . parsePackageIdentifierRevision . fromString stack-2.15.7/src/Stack/Options/SetupParser.hs0000644000000000000000000000350414620153474017211 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} -- | Functions to parse command line arguments for Stack's @setup@ command. module Stack.Options.SetupParser ( setupOptsParser ) where import qualified Data.Text as T import qualified Options.Applicative as OA import qualified Options.Applicative.Builder.Extra as OA import qualified Options.Applicative.Types as OA import Stack.Prelude import Stack.SetupCmd ( SetupCmdOpts (..) ) -- | Parse command line arguments for Stack's @setup@ command. setupOptsParser :: OA.Parser SetupCmdOpts setupOptsParser = SetupCmdOpts <$> OA.optional (OA.argument readVersion ( OA.metavar "GHC_VERSION" <> OA.help "Version of GHC to install, e.g. 9.6.5. (default: install \ \the version implied by the resolver)" )) <*> OA.boolFlags False "reinstall" "reinstalling GHC, even if available (incompatible with --system-ghc)." OA.idm <*> OA.optional (OA.strOption ( OA.long "ghc-bindist" <> OA.metavar "URL" <> OA.help "Alternate GHC binary distribution (requires custom \ \--ghc-variant)." )) <*> OA.many (OA.strOption ( OA.long "ghcjs-boot-options" <> OA.metavar "GHCJS_BOOT" <> OA.help "Additional ghcjs-boot options." )) <*> OA.boolFlags True "ghcjs-boot-clean" "Control if ghcjs-boot should have --clean option present." OA.idm where readVersion = do s <- OA.readerAsk case parseWantedCompiler ("ghc-" <> T.pack s) of Left _ -> case parseWantedCompiler (T.pack s) of Left _ -> OA.readerError $ "Invalid version: " ++ s Right x -> pure x Right x -> pure x stack-2.15.7/src/Stack/Options/TestParser.hs0000644000000000000000000000321314604306201017013 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.TestParser ( testOptsParser ) where import Options.Applicative ( Parser, auto, flag', help, long, metavar, option ) import Options.Applicative.Args ( argsOption ) import Options.Applicative.Builder.Extra ( firstBoolFlagsTrue, optionalFirst, optionalFirstFalse ) import Stack.Options.Utils ( hideMods ) import Stack.Prelude import Stack.Types.BuildOptsMonoid ( TestOptsMonoid (..) ) -- | Parser for test arguments. -- FIXME hide args testOptsParser :: Bool -> Parser TestOptsMonoid testOptsParser hide0 = TestOptsMonoid <$> firstBoolFlagsTrue "rerun-tests" "running already successful tests." hide <*> fmap concat (many (argsOption ( long "test-arguments" <> long "ta" <> metavar "TEST_ARGS" <> help "Arguments passed in to the test suite program." <> hide ))) <*> optionalFirstFalse (flag' True ( long "coverage" <> help "Generate a code coverage report." <> hide )) <*> optionalFirstFalse (flag' True ( long "no-run-tests" <> help "Disable running of tests. (Tests will still be built.)" <> hide )) <*> optionalFirst (option (fmap Just auto) ( long "test-suite-timeout" <> help "Maximum test suite run time in seconds." <> hide )) <*> firstBoolFlagsTrue "tests-allow-stdin" "allow standard input in test executables." hide where hide = hideMods hide0 stack-2.15.7/src/Stack/Options/UnpackParser.hs0000644000000000000000000000421014604306201017313 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @unpack@ command. module Stack.Options.UnpackParser ( unpackOptsParser ) where import qualified Data.Text as T import Options.Applicative ( Parser, ReadM, argument, eitherReader, help, long, metavar , option, switch ) import Path ( SomeBase (..), parseSomeDir ) import Stack.Prelude import Stack.Unpack ( UnpackOpts (..), UnpackTarget) -- | Parse command line arguments for Stack's @unpack@ command. unpackOptsParser :: Parser UnpackOpts unpackOptsParser = UnpackOpts <$> some unpackTargetParser <*> areCandidatesParser <*> optional dirParser unpackTargetParser :: Parser UnpackTarget unpackTargetParser = argument unpackTargetReader ( metavar "TARGET" <> help "A package or package candidate (can be specified multiple times). A \ \package can be referred to by name only or by identifier \ \(including, optionally, a revision as '@rev:' or \ \'@sha256:'). A package candidate is referred to by its \ \identifier." ) unpackTargetReader :: ReadM UnpackTarget unpackTargetReader = eitherReader $ \s -> case parsePackageIdentifierRevision $ T.pack s of Right pir -> Right (Right pir) Left _ -> case parsePackageName s of Just pn -> Right (Left pn) Nothing -> Left $ s <> " is an invalid way to refer to a package or package \ \candidate to be unpacked." areCandidatesParser :: Parser Bool areCandidatesParser = switch ( long "candidate" <> help "Each target is a package candidate." ) dirParser :: Parser (SomeBase Dir) dirParser = option dirReader ( long "to" <> metavar "DIR" <> help "Optionally, a directory to unpack into. A target will be unpacked \ \ into a subdirectory." ) dirReader :: ReadM (SomeBase Dir) dirReader = eitherReader $ \s -> case parseSomeDir s of Just dir -> Right dir Nothing -> Left $ s <> " is an invalid way to refer to a directory." stack-2.15.7/src/Stack/Options/UpgradeParser.hs0000644000000000000000000000571214445120723017477 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @upgrade@ command. module Stack.Options.UpgradeParser ( upgradeOptsParser ) where import Options.Applicative ( Parser, flag', help, idm, long, metavar, showDefault , strOption, switch, value ) import Options.Applicative.Builder.Extra ( boolFlags ) import Stack.Prelude import Stack.Upgrade ( BinaryOpts (..), SourceOpts (..), UpgradeOpts (..) ) -- | Parse command line arguments for Stack's @upgrade@ command. upgradeOptsParser :: Bool -- ^ The default for --[no]-only-local-bin -> Parser UpgradeOpts upgradeOptsParser onlyLocalBin = UpgradeOpts <$> (sourceOnly <|> optional binaryOpts) <*> (binaryOnly <|> optional sourceOpts) where binaryOnly = flag' Nothing ( long "binary-only" <> help "Do not use a source upgrade path." ) sourceOnly = flag' Nothing ( long "source-only" <> help "Do not use a binary upgrade path." ) binaryOpts = BinaryOpts <$> optional (strOption ( long "binary-platform" <> help "Platform type for archive to download." <> metavar "PLATFORM" )) <*> switch ( long "force-download" <> help "Download the latest available Stack executable, even if not \ \newer." ) <*> boolFlags onlyLocalBin "only-local-bin" "downloading only to Stack's local binary directory" idm <*> optional (strOption ( long "binary-version" <> help "Download a specific Stack version, even if already \ \installed." <> metavar "VERSION" )) <*> optional (strOption ( long "github-org" <> help "GitHub organization name." <> metavar "USER" )) <*> optional (strOption ( long "github-repo" <> help "GitHub repository name." <> metavar "REPO" )) sourceOpts = SourceOpts <$> ( ( \fromGit repo branch -> if fromGit then Just (repo, branch) else Nothing ) <$> switch ( long "git" <> help "Clone from Git instead of downloading from Hackage \ \(more dangerous)." ) <*> strOption ( long "git-repo" <> help "Clone from specified Git repository." <> metavar "URL" <> value "https://github.com/commercialhaskell/stack" <> showDefault ) <*> strOption ( long "git-branch" <> help "Clone from specified Git branch." <> metavar "BRANCH" <> value "master" <> showDefault ) ) stack-2.15.7/src/Stack/Options/UploadParser.hs0000644000000000000000000000503414620153445017333 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions to parse command line arguments for Stack's @upload@ command. module Stack.Options.UploadParser ( uploadOptsParser ) where import qualified Data.Text as T import Options.Applicative ( Parser, completeWith, completer, flag, help, idm, long , metavar, option, readerError, short, strArgument, strOption , switch ) import Options.Applicative.Builder.Extra ( boolFlags, dirCompleter ) import Options.Applicative.Types ( readerAsk ) import Stack.Prelude import Stack.Upload ( UploadOpts (..), UploadVariant (..) ) import Stack.Types.PvpBounds ( PvpBounds (..), parsePvpBounds ) -- | Parse command line arguments for Stack's @upload@ command. uploadOptsParser :: Parser UploadOpts uploadOptsParser = UploadOpts <$> itemsToWorkWithParser <*> documentationParser <*> optional pvpBoundsOption <*> ignoreCheckSwitch <*> buildPackageOption <*> tarDirParser <*> uploadVariantParser where itemsToWorkWithParser = many (strArgument ( metavar "ITEM" <> completer dirCompleter <> help "A relative path to a package directory or, for package upload \ \only, an sdist tarball." )) documentationParser = flag False True ( long "documentation" <> short 'd' <> help "Upload documentation for packages (not packages)." ) pvpBoundsOption :: Parser PvpBounds pvpBoundsOption = option readPvpBounds ( long "pvp-bounds" <> metavar "PVP-BOUNDS" <> completeWith ["none", "lower", "upper", "both"] <> help "For package upload, how PVP version bounds should be added to \ \Cabal file: none, lower, upper, both." ) where readPvpBounds = do s <- readerAsk case parsePvpBounds $ T.pack s of Left e -> readerError e Right v -> pure v ignoreCheckSwitch = switch ( long "ignore-check" <> help "For package upload, do not check packages for common mistakes." ) buildPackageOption = boolFlags False "test-tarball" "building of the resulting sdist tarball(s), for package upload." idm tarDirParser = optional (strOption ( long "tar-dir" <> help "For package upload, if specified, copy all the tar to this \ \directory." )) uploadVariantParser = flag Publishing Candidate ( long "candidate" <> help "Upload as, or for, a package candidate." ) stack-2.15.7/src/Stack/Options/Utils.hs0000644000000000000000000000165514620153446016040 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Options.Utils ( GlobalOptsContext (..) , hideMods ) where import Options.Applicative ( Mod, hidden, idm, internal ) import Stack.Prelude -- | If argument is True, hides the option from usage and help hideMods :: Bool -> Mod f a hideMods hide = if hide then internal <> hidden else idm -- | Allows adjust global options depending on their context -- Note: This was being used to remove ambiguity between the local and global -- implementation of stack init --resolver option. Now that stack init has no -- local --resolver this is not being used anymore but the code is kept for any -- similar future use cases. data GlobalOptsContext = OuterGlobalOpts -- ^ Global options before subcommand name | OtherCmdGlobalOpts -- ^ Global options following any other subcommand | BuildCmdGlobalOpts | GhciCmdGlobalOpts deriving (Eq, Show) stack-2.15.7/src/Stack/Package.hs0000644000000000000000000010174714620153446014643 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- | Dealing with Cabal. module Stack.Package ( readDotBuildinfo , resolvePackage , packageFromPackageDescription , Package (..) , PackageConfig (..) , buildLogPath , PackageException (..) , resolvePackageDescription , packageDependencies , applyForceCustomBuild , hasBuildableMainLibrary , mainLibraryHasExposedModules , packageUnknownTools , buildableForeignLibs , buildableSubLibs , buildableExes , buildableTestSuites , buildableBenchmarks , getPackageOpts , processPackageDepsToList , listOfPackageDeps , setOfPackageDeps , topSortPackageComponent ) where import qualified Data.Map.Strict as M import qualified Data.Set as S import Data.STRef ( STRef, modifySTRef', readSTRef, newSTRef ) import qualified Data.Text as T import Distribution.CabalSpecVersion ( cabalSpecToVersionDigits ) import Distribution.Compiler ( CompilerFlavor (..), PerCompilerFlavor (..) ) import Distribution.ModuleName ( ModuleName ) import Distribution.Package ( mkPackageName ) import Distribution.PackageDescription ( Benchmark (..), BuildInfo (..), BuildType (..) , CondTree (..), Condition (..), ConfVar (..) , Dependency (..), Executable (..), ForeignLib (..) , GenericPackageDescription (..), HookedBuildInfo , Library (..), PackageDescription (..), PackageFlag (..) , SetupBuildInfo (..), TestSuite (..), allLibraries , buildType, depPkgName, depVerRange ) import qualified Distribution.PackageDescription as Executable ( Executable (..) ) import Distribution.Simple.PackageDescription ( readHookedBuildInfo ) import Distribution.System ( OS (..), Arch, Platform (..) ) import Distribution.Text ( display ) import qualified Distribution.Types.CondTree as Cabal import Distribution.Utils.Path ( getSymbolicPath ) import Distribution.Verbosity ( silent ) import Distribution.Version ( anyVersion, mkVersion, orLaterVersion ) import Path ( (), parent, parseAbsDir, parseRelDir, parseRelFile , stripProperPrefix ) import Path.Extra ( concatAndCollapseAbsDir, toFilePathNoTrailingSep ) import Stack.Component ( componentDependencyMap, foldOnNameAndBuildInfo , isComponentBuildable, stackBenchmarkFromCabal , stackExecutableFromCabal, stackForeignLibraryFromCabal , stackLibraryFromCabal, stackTestFromCabal , stackUnqualToQual ) import Stack.ComponentFile ( buildDir, componentAutogenDir, componentBuildDir , componentOutputDir, packageAutogenDir ) import Stack.Constants (relFileCabalMacrosH, relDirLogs) import Stack.Constants.Config ( distDirFromDir ) import Stack.PackageFile ( getPackageFile, stackPackageFileFromCabal ) import Stack.Prelude hiding ( Display (..) ) import Stack.Types.BuildConfig ( HasBuildConfig (..), getProjectWorkDir ) import Stack.Types.CompCollection ( CompCollection, collectionLookup, foldAndMakeCollection , foldComponentToAnotherCollection, getBuildableSetText ) import Stack.Types.Compiler ( ActualCompiler (..) ) import Stack.Types.CompilerPaths ( cabalVersionL ) import Stack.Types.Component ( HasBuildInfo, HasComponentInfo, StackUnqualCompName (..) ) import qualified Stack.Types.Component as Component import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.Dependency ( DepLibrary (..), DepType (..), DepValue (..) , cabalSetupDepsToStackDep, libraryDepFromVersionRange ) import Stack.Types.EnvConfig ( HasEnvConfig ) import Stack.Types.Installed ( InstallMap, Installed (..), InstalledMap , installedToPackageIdOpt ) import Stack.Types.NamedComponent ( NamedComponent (..), isPotentialDependency , subLibComponents ) import Stack.Types.Package ( BioInput(..), BuildInfoOpts (..), Package (..) , PackageConfig (..), PackageException (..) , dotCabalCFilePath, packageIdentifier ) import Stack.Types.PackageFile ( DotCabalPath, PackageComponentFile (..) ) import Stack.Types.SourceMap (Target(..)) import Stack.Types.Version ( VersionRange, intersectVersionRanges, withinRange ) import System.FilePath ( replaceExtension ) import RIO.Seq ((|>)) -- | Read @.buildinfo@ ancillary files produced by some Setup.hs hooks. -- The file includes Cabal file syntax to be merged into the package description -- derived from the package's Cabal file. -- -- NOTE: not to be confused with BuildInfo, an Stack-internal datatype. readDotBuildinfo :: MonadIO m => Path Abs File -> m HookedBuildInfo readDotBuildinfo buildinfofp = liftIO $ readHookedBuildInfo silent (toFilePath buildinfofp) -- | Resolve a parsed Cabal file into a 'Package', which contains all of the -- info needed for Stack to build the 'Package' given the current configuration. resolvePackage :: PackageConfig -> GenericPackageDescription -> Package resolvePackage packageConfig gpkg = packageFromPackageDescription packageConfig (genPackageFlags gpkg) (resolvePackageDescription packageConfig gpkg) packageFromPackageDescription :: PackageConfig -> [PackageFlag] -> PackageDescription -> Package packageFromPackageDescription packageConfig pkgFlags pkg = Package { name = name , version = pkgVersion pkgId , license = licenseRaw pkg , ghcOptions = packageConfig.ghcOptions , cabalConfigOpts = packageConfig.cabalConfigOpts , flags = packageConfig.flags , defaultFlags = M.fromList [(flagName flag, flagDefault flag) | flag <- pkgFlags] , library = stackLibraryFromCabal <$> library pkg , subLibraries = foldAndMakeCollection stackLibraryFromCabal $ subLibraries pkg , foreignLibraries = foldAndMakeCollection stackForeignLibraryFromCabal $ foreignLibs pkg , testSuites = foldAndMakeCollection stackTestFromCabal $ testSuites pkg , benchmarks = foldAndMakeCollection stackBenchmarkFromCabal $ benchmarks pkg , executables = foldAndMakeCollection stackExecutableFromCabal $ executables pkg , buildType = buildType pkg , setupDeps = fmap cabalSetupDepsToStackDep (setupBuildInfo pkg) , cabalSpec = specVersion pkg , file = stackPackageFileFromCabal pkg , testEnabled = packageConfig.enableTests , benchmarkEnabled = packageConfig.enableBenchmarks } where -- Gets all of the modules, files, build files, and data files that constitute -- the package. This is primarily used for dirtiness checking during build, as -- well as use by "stack ghci" pkgId = package pkg name = pkgName pkgId -- | This is an action used to collect info needed for "stack ghci". This info -- isn't usually needed, so computation of it is deferred. getPackageOpts :: (HasEnvConfig env, MonadReader env m, MonadThrow m, MonadUnliftIO m ) => Package -> InstallMap -> InstalledMap -> [PackageName] -> [PackageName] -> Path Abs File -> m ( Map NamedComponent (Map ModuleName (Path Abs File)) , Map NamedComponent [DotCabalPath] , Map NamedComponent BuildInfoOpts ) getPackageOpts stackPackage installMap installedMap omitPkgs addPkgs cabalFP = do PackageComponentFile !componentsModules componentFiles _ _ <- getPackageFile stackPackage cabalFP let subLibs = S.toList $ subLibComponents $ M.keysSet componentsModules excludedSubLibs <- mapM (parsePackageNameThrowing . T.unpack) subLibs componentsOpts <- generatePkgDescOpts installMap installedMap (excludedSubLibs ++ omitPkgs) addPkgs cabalFP stackPackage componentFiles pure (componentsModules, componentFiles, componentsOpts) -- | Generate GHC options for the package's components, and a list of options -- which apply generally to the package, not one specific component. generatePkgDescOpts :: (HasEnvConfig env, MonadThrow m, MonadReader env m, MonadIO m) => InstallMap -> InstalledMap -> [PackageName] -- ^ Packages to omit from the "-package" / "-package-id" flags -> [PackageName] -- ^ Packages to add to the "-package" flags -> Path Abs File -> Package -> Map NamedComponent [DotCabalPath] -> m (Map NamedComponent BuildInfoOpts) generatePkgDescOpts installMap installedMap omitPackages addPackages cabalFP pkg componentPaths = do config <- view configL cabalVersion <- view cabalVersionL distDir <- distDirFromDir cabalDir let generate componentName buildInfo = generateBuildInfoOpts BioInput { installMap , installedMap , cabalDir , distDir , omitPackages , addPackages , buildInfo , dotCabalPaths = fromMaybe [] (M.lookup componentName componentPaths) , configLibDirs = config.extraLibDirs , configIncludeDirs = config.extraIncludeDirs , componentName , cabalVersion } let insertInMap name compVal = M.insert name (generate name compVal) let translatedInsertInMap constructor name = insertInMap (stackUnqualToQual constructor name) let makeBuildInfoOpts selector constructor = foldOnNameAndBuildInfo (selector pkg) (translatedInsertInMap constructor) let aggregateAllBuildInfoOpts = makeBuildInfoOpts (.library) (const CLib) . makeBuildInfoOpts (.subLibraries) CSubLib . makeBuildInfoOpts (.executables) CExe . makeBuildInfoOpts (.benchmarks) CBench . makeBuildInfoOpts (.testSuites) CTest pure $ aggregateAllBuildInfoOpts mempty where cabalDir = parent cabalFP -- | Generate GHC options for the target. Since Cabal also figures out these -- options, currently this is only used for invoking GHCI (via stack ghci). generateBuildInfoOpts :: BioInput -> BuildInfoOpts generateBuildInfoOpts bi = BuildInfoOpts { opts = ghcOpts ++ fmap ("-optP" <>) bi.buildInfo.cppOptions -- NOTE for future changes: Due to this use of nubOrd (and other uses -- downstream), these generated options must not rely on multiple -- argument sequences. For example, ["--main-is", "Foo.hs", "--main- -- is", "Bar.hs"] would potentially break due to the duplicate -- "--main-is" being removed. -- -- See https://github.com/commercialhaskell/stack/issues/1255 , oneWordOpts = nubOrd $ concat [extOpts, srcOpts, includeOpts, libOpts, fworks, cObjectFiles] , packageFlags = deps , cabalMacros = componentAutogen relFileCabalMacrosH } where cObjectFiles = mapMaybe ( fmap toFilePath . makeObjectFilePathFromC bi.cabalDir bi.componentName bi.distDir ) cfiles cfiles = mapMaybe dotCabalCFilePath bi.dotCabalPaths installVersion = snd -- Generates: -package=base -package=base16-bytestring-0.1.1.6 ... deps = concat [ case M.lookup name bi.installedMap of Just (_, Stack.Types.Installed.Library _ident installedInfo) -> installedToPackageIdOpt installedInfo _ -> ["-package=" <> packageNameString name <> maybe "" -- This empty case applies to e.g. base. ((("-" <>) . versionString) . installVersion) (M.lookup name bi.installMap)] | name <- pkgs ] pkgs = bi.addPackages ++ [ name | Dependency name _ _ <- bi.buildInfo.targetBuildDepends -- TODO: Cabal 3.0 introduced multiple public libraries in a single -- dependency , name `notElem` bi.omitPackages ] PerCompilerFlavor ghcOpts _ = bi.buildInfo.options extOpts = map (("-X" ++) . display) bi.buildInfo.allLanguages <> map (("-X" ++) . display) bi.buildInfo.usedExtensions srcOpts = map (("-i" <>) . toFilePathNoTrailingSep) (concat [ [ componentBuildDir bi.cabalVersion bi.componentName bi.distDir ] , [ bi.cabalDir | null bi.buildInfo.hsSourceDirs ] , mapMaybe (toIncludeDir . getSymbolicPath) bi.buildInfo.hsSourceDirs , [ componentAutogen ] , maybeToList (packageAutogenDir bi.cabalVersion bi.distDir) , [ componentOutputDir bi.componentName bi.distDir ] ]) ++ [ "-stubdir=" ++ toFilePathNoTrailingSep (buildDir bi.distDir) ] componentAutogen = componentAutogenDir bi.cabalVersion bi.componentName bi.distDir toIncludeDir "." = Just bi.cabalDir toIncludeDir relDir = concatAndCollapseAbsDir bi.cabalDir relDir includeOpts = map ("-I" <>) (bi.configIncludeDirs <> pkgIncludeOpts) pkgIncludeOpts = [ toFilePathNoTrailingSep absDir | dir <- bi.buildInfo.includeDirs , absDir <- handleDir dir ] libOpts = map ("-l" <>) bi.buildInfo.extraLibs <> map ("-L" <>) (bi.configLibDirs <> pkgLibDirs) pkgLibDirs = [ toFilePathNoTrailingSep absDir | dir <- bi.buildInfo.extraLibDirs , absDir <- handleDir dir ] handleDir dir = case (parseAbsDir dir, parseRelDir dir) of (Just ab, _ ) -> [ab] (_ , Just rel) -> [bi.cabalDir rel] (Nothing, Nothing ) -> [] fworks = map ("-framework=" <>) bi.buildInfo.frameworks -- | Make the .o path from the .c file path for a component. Example: -- -- @ -- executable FOO -- c-sources: cbits/text_search.c -- @ -- -- Produces -- -- /build/FOO/FOO-tmp/cbits/text_search.o -- -- Example: -- -- λ> makeObjectFilePathFromC -- $(mkAbsDir "/Users/chris/Repos/hoogle") -- CLib -- $(mkAbsDir "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist") -- $(mkAbsFile "/Users/chris/Repos/hoogle/cbits/text_search.c") -- Just "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist/build/cbits/text_search.o" -- λ> makeObjectFilePathFromC -- $(mkAbsDir "/Users/chris/Repos/hoogle") -- (CExe "hoogle") -- $(mkAbsDir "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist") -- $(mkAbsFile "/Users/chris/Repos/hoogle/cbits/text_search.c") -- Just "/Users/chris/Repos/hoogle/.stack-work/Cabal-x.x.x/dist/build/hoogle/hoogle-tmp/cbits/text_search.o" -- λ> makeObjectFilePathFromC :: MonadThrow m => Path Abs Dir -- ^ The cabal directory. -> NamedComponent -- ^ The name of the component. -> Path Abs Dir -- ^ Dist directory. -> Path Abs File -- ^ The path to the .c file. -> m (Path Abs File) -- ^ The path to the .o file for the component. makeObjectFilePathFromC cabalDir namedComponent distDir cFilePath = do relCFilePath <- stripProperPrefix cabalDir cFilePath relOFilePath <- parseRelFile (replaceExtension (toFilePath relCFilePath) "o") pure (componentOutputDir namedComponent distDir relOFilePath) -- | Get all dependencies of the package (buildable targets only). packageDependencies :: PackageDescription -> Map PackageName VersionRange packageDependencies pkg = M.fromListWith intersectVersionRanges $ map (depPkgName &&& depVerRange) $ concatMap targetBuildDepends (allBuildInfo' pkg) <> maybe [] setupDepends (setupBuildInfo pkg) -- | Variant of 'allBuildInfo' from Cabal that, like versions before Cabal 2.2 -- only includes buildable components. allBuildInfo' :: PackageDescription -> [BuildInfo] allBuildInfo' pkg_descr = [ bi | lib <- allLibraries pkg_descr , let bi = libBuildInfo lib , buildable bi ] ++ [ bi | flib <- foreignLibs pkg_descr , let bi = foreignLibBuildInfo flib , buildable bi ] ++ [ bi | exe <- executables pkg_descr , let bi = buildInfo exe , buildable bi ] ++ [ bi | tst <- testSuites pkg_descr , let bi = testBuildInfo tst , buildable bi ] ++ [ bi | tst <- benchmarks pkg_descr , let bi = benchmarkBuildInfo tst , buildable bi ] -- | Evaluates the conditions of a 'GenericPackageDescription', yielding -- a resolved 'PackageDescription'. resolvePackageDescription :: PackageConfig -> GenericPackageDescription -> PackageDescription resolvePackageDescription packageConfig ( GenericPackageDescription desc _ defaultFlags mlib subLibs foreignLibs' exes tests benches ) = desc { library = fmap (resolveConditions rc updateLibDeps) mlib , subLibraries = map ( \(n, v) -> (resolveConditions rc updateLibDeps v){libName = LSubLibName n} ) subLibs , foreignLibs = map ( \(n, v) -> (resolveConditions rc updateForeignLibDeps v){foreignLibName = n} ) foreignLibs' , executables = map ( \(n, v) -> (resolveConditions rc updateExeDeps v){exeName = n} ) exes , testSuites = map ( \(n, v) -> (resolveConditions rc updateTestDeps v){testName = n} ) tests , benchmarks = map ( \(n, v) -> (resolveConditions rc updateBenchmarkDeps v){benchmarkName = n} ) benches } where flags = M.union packageConfig.flags (flagMap defaultFlags) rc = mkResolveConditions packageConfig.compilerVersion packageConfig.platform flags updateLibDeps lib deps = lib { libBuildInfo = (libBuildInfo lib) {targetBuildDepends = deps} } updateForeignLibDeps lib deps = lib { foreignLibBuildInfo = (foreignLibBuildInfo lib) {targetBuildDepends = deps} } updateExeDeps exe deps = exe { Executable.buildInfo = (buildInfo exe) {targetBuildDepends = deps} } updateTestDeps test deps = test { testBuildInfo = (testBuildInfo test) {targetBuildDepends = deps} } updateBenchmarkDeps bench deps = bench { benchmarkBuildInfo = (benchmarkBuildInfo bench) {targetBuildDepends = deps} } -- | Make a map from a list of flag specifications. -- -- What is @flagManual@ for? flagMap :: [PackageFlag] -> Map FlagName Bool flagMap = M.fromList . map pair where pair :: PackageFlag -> (FlagName, Bool) pair = flagName &&& flagDefault data ResolveConditions = ResolveConditions { flags :: Map FlagName Bool , compilerVersion :: ActualCompiler , os :: OS , arch :: Arch } -- | Generic a @ResolveConditions@ using sensible defaults. mkResolveConditions :: ActualCompiler -- ^ Compiler version -> Platform -- ^ installation target platform -> Map FlagName Bool -- ^ enabled flags -> ResolveConditions mkResolveConditions compilerVersion (Platform arch os) flags = ResolveConditions { flags , compilerVersion , os , arch } -- | Resolve the condition tree for the library. resolveConditions :: (Semigroup target, Monoid target, Show target) => ResolveConditions -> (target -> cs -> target) -> CondTree ConfVar cs target -> target resolveConditions rc addDeps (CondNode lib deps cs) = basic <> children where basic = addDeps lib deps children = mconcat (map apply cs) where apply (Cabal.CondBranch cond node mcs) = if condSatisfied cond then resolveConditions rc addDeps node else maybe mempty (resolveConditions rc addDeps) mcs condSatisfied c = case c of Var v -> varSatisfied v Lit b -> b CNot c' -> not (condSatisfied c') COr cx cy -> condSatisfied cx || condSatisfied cy CAnd cx cy -> condSatisfied cx && condSatisfied cy varSatisfied v = case v of OS os -> os == rc.os Arch arch -> arch == rc.arch PackageFlag flag -> fromMaybe False $ M.lookup flag rc.flags -- NOTE: ^^^^^ This should never happen, as all flags which are used -- must be declared. Defaulting to False. Impl flavor range -> case (flavor, rc.compilerVersion) of (GHC, ACGhc vghc) -> vghc `withinRange` range _ -> False -- | Path for the package's build log. buildLogPath :: (MonadReader env m, HasBuildConfig env, MonadThrow m) => Package -> Maybe String -> m (Path Abs File) buildLogPath package' msuffix = do env <- ask let stack = getProjectWorkDir env fp <- parseRelFile $ concat $ packageIdentifierString (packageIdentifier package') : maybe id (\suffix -> ("-" :) . (suffix :)) msuffix [".log"] pure $ stack relDirLogs fp {- FIXME -- | Create a 'ProjectPackage' from a directory containing a package. mkProjectPackage :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => PrintWarnings -> ResolvedPath Dir -> RIO env ProjectPackage mkProjectPackage printWarnings dir = do (gpd, name, cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) pure ProjectPackage { ppCabalFP = cabalfp , ppGPD' = gpd printWarnings , ppResolvedDir = dir , ppName = name } -- | Create a 'DepPackage' from a 'PackageLocation' mkDepPackage :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => PackageLocation -> RIO env DepPackage mkDepPackage pl = do (name, gpdio) <- case pl of PLMutable dir -> do (gpdio, name, _cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) pure (name, gpdio NoPrintWarnings) PLImmutable pli -> do PackageIdentifier name _ <- getPackageLocationIdent pli run <- askRunInIO pure (name, run $ loadCabalFileImmutable pli) pure DepPackage { dpGPD' = gpdio , dpLocation = pl , dpName = name } -} -- | Force a package to be treated as a custom build type, see -- applyForceCustomBuild :: Version -- ^ global Cabal version -> Package -> Package applyForceCustomBuild cabalVersion package | forceCustomBuild = package { buildType = Custom , setupDeps = Just $ M.fromList [ ("Cabal", libraryDepFromVersionRange cabalVersionRange) , ("base", libraryDepFromVersionRange anyVersion) ] } | otherwise = package where cabalVersionRange = orLaterVersion $ mkVersion $ cabalSpecToVersionDigits package.cabalSpec forceCustomBuild = package.buildType == Simple && not (cabalVersion `withinRange` cabalVersionRange) -- | Check if the package has a main library that is buildable. hasBuildableMainLibrary :: Package -> Bool hasBuildableMainLibrary package = maybe False isComponentBuildable package.library -- | Check if the main library has any exposed modules. -- -- This should become irrelevant at some point since there's nothing inherently -- wrong or different with packages exposing only modules in internal libraries -- (for instance). mainLibraryHasExposedModules :: Package -> Bool mainLibraryHasExposedModules package = maybe False (not . null . (.exposedModules)) package.library -- | Aggregate all unknown tools from all components. Mostly meant for -- build tools specified in the legacy manner (build-tools:) that failed the -- hard-coded lookup. See 'Stack.Types.Component.unknownTools' for more -- information. packageUnknownTools :: Package -> Set Text packageUnknownTools pkg = lib (bench <> tests <> flib <> sublib <> exe) where lib setT = case pkg.library of Just libV -> addUnknownTools libV setT Nothing -> setT bench = gatherUnknownTools pkg.benchmarks tests = gatherUnknownTools pkg.testSuites flib = gatherUnknownTools pkg.foreignLibraries sublib = gatherUnknownTools pkg.subLibraries exe = gatherUnknownTools pkg.executables addUnknownTools :: HasBuildInfo x => x -> Set Text -> Set Text addUnknownTools = (<>) . (.buildInfo.unknownTools) gatherUnknownTools :: HasBuildInfo x => CompCollection x -> Set Text gatherUnknownTools = foldr' addUnknownTools mempty buildableForeignLibs :: Package -> Set Text buildableForeignLibs pkg = getBuildableSetText pkg.foreignLibraries buildableSubLibs :: Package -> Set Text buildableSubLibs pkg = getBuildableSetText pkg.subLibraries buildableExes :: Package -> Set Text buildableExes pkg = getBuildableSetText pkg.executables buildableTestSuites :: Package -> Set Text buildableTestSuites pkg = getBuildableSetText pkg.testSuites buildableBenchmarks :: Package -> Set Text buildableBenchmarks pkg = getBuildableSetText pkg.benchmarks -- | Apply a generic processing function in a Monad over all of the Package's -- components. processPackageComponent :: forall m a. (Monad m) => Package -> (forall component. HasComponentInfo component => component -> m a -> m a) -- ^ Processing function with all the component's info. -> m a -- ^ Initial value. -> m a processPackageComponent pkg componentFn = do let componentKindProcessor :: forall component. HasComponentInfo component => (Package -> CompCollection component) -> m a -> m a componentKindProcessor target = foldComponentToAnotherCollection (target pkg) componentFn processMainLib = maybe id componentFn pkg.library processAllComp = ( if pkg.benchmarkEnabled then componentKindProcessor (.benchmarks) else id ) . ( if pkg.testEnabled then componentKindProcessor (.testSuites) else id ) . componentKindProcessor (.foreignLibraries) . componentKindProcessor (.executables) . componentKindProcessor (.subLibraries) . processMainLib processAllComp -- | This is a function to iterate in a monad over all of a package's -- dependencies, and yield a collection of results (used with list and set). processPackageMapDeps :: (Monad m) => Package -> (Map PackageName DepValue -> m a -> m a) -> m a -> m a processPackageMapDeps pkg fn = do let packageSetupDepsProcessor resAction = case pkg.setupDeps of Nothing -> resAction Just v -> fn v resAction processAllComp = processPackageComponent pkg (fn . componentDependencyMap) . packageSetupDepsProcessor processAllComp -- | This is a function to iterate in a monad over all of a package component's -- dependencies, and yield a collection of results. processPackageDeps :: (Monad m, Monoid (targetedCollection resT)) => Package -> (resT -> targetedCollection resT -> targetedCollection resT) -> (PackageName -> DepValue -> m resT) -> m (targetedCollection resT) -> m (targetedCollection resT) processPackageDeps pkg combineResults fn = do let asPackageNameSet accessor = S.map (mkPackageName . T.unpack) $ getBuildableSetText $ accessor pkg (!subLibNames, !foreignLibNames) = ( asPackageNameSet (.subLibraries) , asPackageNameSet (.foreignLibraries) ) shouldIgnoreDep (packageNameV :: PackageName) | packageNameV == pkg.name = True | packageNameV `S.member` subLibNames = True | packageNameV `S.member` foreignLibNames = True | otherwise = False innerIterator packageName depValue resListInMonad | shouldIgnoreDep packageName = resListInMonad | otherwise = do resList <- resListInMonad newResElement <- fn packageName depValue pure $ combineResults newResElement resList processPackageMapDeps pkg (flip (M.foldrWithKey' innerIterator)) -- | Iterate/fold on all the package dependencies, components, setup deps and -- all. processPackageDepsToList :: Monad m => Package -> (PackageName -> DepValue -> m resT) -> m [resT] processPackageDepsToList pkg fn = processPackageDeps pkg (:) fn (pure []) -- | List all package's dependencies in a "free" context through the identity -- monad. listOfPackageDeps :: Package -> [PackageName] listOfPackageDeps pkg = runIdentity $ processPackageDepsToList pkg (\pn _ -> pure pn) -- | The set of package's dependencies. setOfPackageDeps :: Package -> Set PackageName setOfPackageDeps pkg = runIdentity $ processPackageDeps pkg S.insert (\pn _ -> pure pn) (pure mempty) -- | This implements a topological sort on all targeted components for the build -- and their dependencies. It's only targeting internal dependencies, so it's doing -- a topological sort on a subset of a package's components. -- -- Note that in Cabal they use the Data.Graph struct to pursue the same goal. But dong this here -- would require a large number intermediate data structure. -- This is needed because we need to get the right GhcPkgId of the relevant internal dependencies -- of a component before building it as a component. topSortPackageComponent :: Package -> Target -> Bool -- ^ Include directTarget or not. False here means we won't -- include the actual targets in the result, only their deps. -- Using it with False here only in GHCi -> Seq NamedComponent topSortPackageComponent package target includeDirectTarget = runST $ do alreadyProcessedRef <- newSTRef (mempty :: Set NamedComponent) let processInitialComponents c = case target of TargetAll{} -> processComponent includeDirectTarget alreadyProcessedRef c TargetComps targetSet -> if S.member c.qualifiedName targetSet then processComponent includeDirectTarget alreadyProcessedRef c else id processPackageComponent package processInitialComponents (pure mempty) where processComponent :: forall s component. HasComponentInfo component => Bool -- ^ Finally add this component in the seq -> STRef s (Set NamedComponent) -> component -> ST s (Seq NamedComponent) -> ST s (Seq NamedComponent) processComponent finallyAddComponent alreadyProcessedRef component res = do let depMap = componentDependencyMap component internalDep = M.lookup package.name depMap processSubDep = processOneDep alreadyProcessedRef internalDep res qualName = component.qualifiedName processSubDepSaveName | finallyAddComponent = (|> qualName) <$> processSubDep | otherwise = processSubDep -- This is an optimization, the only components we are likely to process -- multiple times are the ones we can find in dependencies, otherwise we -- only fold on a single version of each component by design. if isPotentialDependency qualName then do alreadyProcessed <- readSTRef alreadyProcessedRef if S.member qualName alreadyProcessed then res else modifySTRef' alreadyProcessedRef (S.insert qualName) >> processSubDepSaveName else processSubDepSaveName lookupLibName isMain name = if isMain then package.library else collectionLookup name package.subLibraries processOneDep alreadyProcessed mDependency res = case (.depType) <$> mDependency of Just (AsLibrary (DepLibrary mainLibDep subLibDeps)) -> do let processMainLibDep = case (mainLibDep, lookupLibName True mempty) of (True, Just mainLib) -> processComponent True alreadyProcessed mainLib _ -> id processSingleSubLib name = case lookupLibName False name.unqualCompToText of Just lib -> processComponent True alreadyProcessed lib Nothing -> id processSubLibDep r = foldr' processSingleSubLib r subLibDeps processSubLibDep (processMainLibDep res) _ -> res stack-2.15.7/src/Stack/PackageDump.hs0000644000000000000000000003036314620153446015464 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.PackageDump ( Line , eachSection , eachPair , DumpPackage (..) , conduitDumpPackage , ghcPkgDump , ghcPkgDescribe , sinkMatching , pruneDeps ) where import Data.Attoparsec.Args ( EscapingMode (..), argsParser ) import Data.Attoparsec.Text as P import Data.Conduit ( await, leftover, toConsumer, yield ) import qualified Data.Conduit.List as CL import qualified Data.Conduit.Text as CT import qualified Data.Map as Map import qualified Data.Set as Set import qualified Distribution.Text as C import Distribution.Types.MungedPackageName ( decodeCompatPackageName ) import Path.Extra ( toFilePathNoTrailingSep ) import RIO.Process ( HasProcessContext ) import qualified RIO.Text as T import Stack.Component ( fromCabalName ) import Stack.GhcPkg ( createDatabase ) import Stack.Prelude import Stack.Types.CompilerPaths ( GhcPkgExe (..), HasCompiler (..) ) import Stack.Types.Component ( StackUnqualCompName(..) ) import Stack.Types.DumpPackage ( DumpPackage (..), SublibDump (..) ) import Stack.Types.GhcPkgId ( GhcPkgId, parseGhcPkgId ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.PackageDump" module. data PackageDumpException = MissingSingleField Text (Map Text [Line]) | Couldn'tParseField Text [Line] deriving (Show, Typeable) instance Exception PackageDumpException where displayException (MissingSingleField name values) = unlines $ concat [ "Error: [S-4257]\n" , "Expected single value for field name " , show name , " when parsing ghc-pkg dump output:" ] : map (\(k, v) -> " " ++ show (k, v)) (Map.toList values) displayException (Couldn'tParseField name ls) = concat [ "Error: [S-2016]\n" , "Couldn't parse the field " , show name , " from lines: " , show ls , "." ] -- | Call @ghc-pkg dump@ with appropriate flags and stream to the given sink, -- using either the global package database or the given package databases. ghcPkgDump :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> [Path Abs Dir] -- ^ A list of package databases. If empty, use the global package -- database. -> ConduitM Text Void (RIO env) a -- ^ Sink. -> RIO env a ghcPkgDump pkgexe = ghcPkgCmdArgs pkgexe ["dump"] -- | Call @ghc-pkg describe@ with appropriate flags and stream to the given -- sink, using either the global package database or the given package -- databases. ghcPkgDescribe :: (HasCompiler env, HasProcessContext env, HasTerm env) => GhcPkgExe -> PackageName -> [Path Abs Dir] -- ^ A list of package databases. If empty, use the global package -- database. -> ConduitM Text Void (RIO env) a -- ^ Sink. -> RIO env a ghcPkgDescribe pkgexe pkgName' = ghcPkgCmdArgs pkgexe ["describe", "--simple-output", packageNameString pkgName'] -- | Call @ghc-pkg@ and stream to the given sink, using the either the global -- package database or the given package databases. ghcPkgCmdArgs :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> [String] -- ^ A list of commands. -> [Path Abs Dir] -- ^ A list of package databases. If empty, use the global package -- database. -> ConduitM Text Void (RIO env) a -- ^ Sink. -> RIO env a ghcPkgCmdArgs pkgexe@(GhcPkgExe pkgPath) cmd mpkgDbs sink = do case reverse mpkgDbs of (pkgDb:_) -> createDatabase pkgexe pkgDb -- TODO maybe use some retry logic instead? _ -> pure () -- https://github.com/haskell/process/issues/251 snd <$> sinkProcessStderrStdout (toFilePath pkgPath) args CL.sinkNull sink' where args = concat [ case mpkgDbs of [] -> ["--global", "--no-user-package-db"] _ -> "--user" : "--no-user-package-db" : concatMap (\pkgDb -> ["--package-db", toFilePathNoTrailingSep pkgDb]) mpkgDbs , cmd , ["--expand-pkgroot"] ] sink' = CT.decodeUtf8 .| sink -- | Prune a list of possible packages down to those whose dependencies are met. -- -- * id uniquely identifies an item -- -- * There can be multiple items per name pruneDeps :: (Ord name, Ord id) => (id -> name) -- ^ extract the name from an id -> (item -> id) -- ^ the id of an item -> (item -> [id]) -- ^ get the dependencies of an item -> (item -> item -> item) -- ^ choose the desired of two possible items -> [item] -- ^ input items -> Map name item pruneDeps getName getId getDepends chooseBest = Map.fromList . fmap (getName . getId &&& id) . loop Set.empty Set.empty [] where loop foundIds usedNames foundItems dps = case partitionEithers $ map depsMet dps of ([], _) -> foundItems (s', dps') -> let foundIds' = Map.fromListWith chooseBest s' foundIds'' = Set.fromList $ map getId $ Map.elems foundIds' usedNames' = Map.keysSet foundIds' foundItems' = Map.elems foundIds' in loop (Set.union foundIds foundIds'') (Set.union usedNames usedNames') (foundItems ++ foundItems') (catMaybes dps') where depsMet dp | name `Set.member` usedNames = Right Nothing | all (`Set.member` foundIds) (getDepends dp) = Left (name, dp) | otherwise = Right $ Just dp where id' = getId dp name = getName id' -- | Find the package IDs matching the given constraints with all dependencies installed. -- Packages not mentioned in the provided @Map@ are allowed to be present too. sinkMatching :: Monad m => Map PackageName Version -- ^ allowed versions -> ConduitM DumpPackage o m (Map PackageName DumpPackage) sinkMatching allowed = Map.fromList . map (pkgName . (.packageIdent) &&& id) . Map.elems . pruneDeps id (.ghcPkgId) (.depends) const -- Could consider a better comparison in the future <$> (CL.filter (isAllowed . (.packageIdent)) .| CL.consume) where isAllowed (PackageIdentifier name version) = case Map.lookup name allowed of Just version' | version /= version' -> False _ -> True -- | Convert a stream of bytes into a stream of @DumpPackage@s conduitDumpPackage :: MonadThrow m => ConduitM Text DumpPackage m () conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do pairs <- eachPair (\k -> (k, ) <$> CL.consume) .| CL.consume let m = Map.fromList pairs let parseS k = case Map.lookup k m of Just [v] -> pure v _ -> throwM $ MissingSingleField k m -- Can't fail: if not found, same as an empty list. See: -- https://github.com/commercialhaskell/stack/issues/182 parseM k = Map.findWithDefault [] k m parseDepend :: MonadThrow m => Text -> m (Maybe GhcPkgId) parseDepend "builtin_rts" = pure Nothing parseDepend bs = Just <$> parseGhcPkgId bs' where (bs', _builtinRts) = case stripSuffixText " builtin_rts" bs of Nothing -> case stripPrefixText "builtin_rts " bs of Nothing -> (bs, False) Just x -> (x, True) Just x -> (x, True) case Map.lookup "id" m of Just ["builtin_rts"] -> pure Nothing _ -> do name <- parseS "name" >>= parsePackageNameThrowing . T.unpack version <- parseS "version" >>= parseVersionThrowing . T.unpack ghcPkgId <- parseS "id" >>= parseGhcPkgId -- if a package has no modules, these won't exist let libDirKey = "library-dirs" libraries = parseM "hs-libraries" exposedModules = parseM "exposed-modules" exposed = parseM "exposed" license = case parseM "license" of [licenseText] -> C.simpleParse (T.unpack licenseText) _ -> Nothing depends <- mapMaybeM parseDepend $ concatMap T.words $ parseM "depends" -- Handle sub-libraries by recording the name of the parent library -- If name of parent library is missing, this is not a sub-library. let maybePackageName :: Maybe PackageName = parseS "package-name" >>= parsePackageNameThrowing . T.unpack maybeLibName = parseS "lib-name" getLibNameFromLegacyName = case decodeCompatPackageName name of MungedPackageName _parentPackageName (LSubLibName libName') -> fromCabalName libName' MungedPackageName _parentPackageName _ -> "" libName = maybe getLibNameFromLegacyName StackUnqualCompName maybeLibName sublib = flip SublibDump libName <$> maybePackageName parseQuoted key = case mapM (P.parseOnly (argsParser NoEscaping)) val of Left{} -> throwM (Couldn'tParseField key val) Right dirs -> pure (concat dirs) where val = parseM key libDirs <- parseQuoted libDirKey haddockInterfaces <- parseQuoted "haddock-interfaces" haddockHtml <- listToMaybe <$> parseQuoted "haddock-html" pure $ Just DumpPackage { ghcPkgId , packageIdent = PackageIdentifier name version , sublib , license , libDirs , libraries = T.words $ T.unwords libraries , hasExposedModules = not (null libraries || null exposedModules) -- Strip trailing commas from ghc package exposed-modules (looks buggy -- to me...). Then try to parse the module names. , exposedModules = Set.fromList $ mapMaybe (C.simpleParse . T.unpack . T.dropSuffix ",") $ T.words $ T.unwords exposedModules , depends , haddockInterfaces , haddockHtml , isExposed = exposed == ["True"] } stripPrefixText :: Text -> Text -> Maybe Text stripPrefixText x y | x `T.isPrefixOf` y = Just $ T.drop (T.length x) y | otherwise = Nothing stripSuffixText :: Text -> Text -> Maybe Text stripSuffixText x y | x `T.isSuffixOf` y = Just $ T.take (T.length y - T.length x) y | otherwise = Nothing -- | A single line of input, not including line endings type Line = Text -- | Apply the given Sink to each section of output, broken by a single line containing --- eachSection :: Monad m => ConduitM Line Void m a -> ConduitM Text a m () eachSection inner = CL.map (T.filter (/= '\r')) .| CT.lines .| start where peekText = await >>= maybe (pure Nothing) (\bs -> if T.null bs then peekText else leftover bs >> pure (Just bs)) start = peekText >>= maybe (pure ()) (const go) go = do x <- toConsumer $ takeWhileC (/= "---") .| inner yield x CL.drop 1 start -- | Grab each key/value pair eachPair :: Monad m => (Text -> ConduitM Line Void m a) -> ConduitM Line a m () eachPair inner = start where start = await >>= maybe (pure ()) start' start' bs1 = toConsumer (valSrc .| inner key) >>= yield >> start where (key, bs2) = T.break (== ':') bs1 (spaces, bs3) = T.span (== ' ') $ T.drop 1 bs2 ind = T.length key + 1 + T.length spaces valSrc | T.null bs3 = noIndent | otherwise = yield bs3 >> loopIndent ind noIndent = do mx <- await case mx of Nothing -> pure () Just bs -> do let (spaces, val) = T.span (== ' ') bs if T.length spaces == 0 then leftover val else do yield val loopIndent (T.length spaces) loopIndent i = loop where loop = await >>= maybe (pure ()) go go bs | T.length spaces == i && T.all (== ' ') spaces = yield val >> loop | otherwise = leftover bs where (spaces, val) = T.splitAt i bs -- | General purpose utility takeWhileC :: Monad m => (a -> Bool) -> ConduitM a a m () takeWhileC f = loop where loop = await >>= maybe (pure ()) go go x | f x = yield x >> loop | otherwise = leftover x stack-2.15.7/src/Stack/PackageFile.hs0000644000000000000000000001527714604306201015434 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | A module which exports all package-level file-gathering logic. module Stack.PackageFile ( getPackageFile , stackPackageFileFromCabal ) where import qualified Data.Map.Strict as M import qualified Data.Set as S import Distribution.CabalSpecVersion ( CabalSpecVersion ) import qualified Distribution.PackageDescription as Cabal import Distribution.Simple.Glob ( matchDirFileGlob ) import Path ( parent, () ) import Path.Extra ( forgivingResolveFile, rejectMissingFile ) import Path.IO ( doesFileExist ) import Stack.ComponentFile ( ComponentFile (..), resolveOrWarn, stackBenchmarkFiles , stackExecutableFiles, stackLibraryFiles , stackTestSuiteFiles ) import Stack.Constants ( relFileHpackPackageConfig, relFileSetupHs, relFileSetupLhs ) import Stack.Constants.Config ( distDirFromDir ) import Stack.Prelude import Stack.Types.BuildConfig ( HasBuildConfig (..) ) import Stack.Types.CompilerPaths ( cabalVersionL ) import Stack.Types.EnvConfig ( HasEnvConfig (..) ) import Stack.Types.NamedComponent ( NamedComponent (..) ) import Stack.Types.Package ( Package(..) ) import Stack.Types.PackageFile ( GetPackageFileContext (..), PackageComponentFile (..) , StackPackageFile (..) ) import qualified System.FilePath as FilePath import System.IO.Error ( isUserError ) -- | Resolve the file, if it can't be resolved, warn for the user -- (purely to be helpful). resolveFileOrWarn :: FilePath.FilePath -> RIO GetPackageFileContext (Maybe (Path Abs File)) resolveFileOrWarn = resolveOrWarn "File" f where f p x = forgivingResolveFile p x >>= rejectMissingFile -- | Get all files referenced by the package. packageDescModulesAndFiles :: Package -> RIO GetPackageFileContext PackageComponentFile packageDescModulesAndFiles pkg = do packageExtraFile <- resolveGlobFilesFromStackPackageFile pkg.cabalSpec pkg.file let initialValue = mempty{packageExtraFile=packageExtraFile} let accumulator f comp st = (insertComponentFile <$> st) <*> f comp let gatherCompFileCollection createCompFileFn getCompFn res = foldr' (accumulator createCompFileFn) res (getCompFn pkg) gatherCompFileCollection stackLibraryFiles (.library) . gatherCompFileCollection stackLibraryFiles (.subLibraries) . gatherCompFileCollection stackExecutableFiles (.executables) . gatherCompFileCollection stackTestSuiteFiles (.testSuites) . gatherCompFileCollection stackBenchmarkFiles (.benchmarks) $ pure initialValue resolveGlobFilesFromStackPackageFile :: CabalSpecVersion -> StackPackageFile -> RIO GetPackageFileContext (Set (Path Abs File)) resolveGlobFilesFromStackPackageFile csvV (StackPackageFile extraSrcFilesV dataDirV dataFilesV) = resolveGlobFiles csvV (extraSrcFilesV ++ map (dataDirV FilePath.) dataFilesV) -- | Resolve globbing of files (e.g. data files) to absolute paths. resolveGlobFiles :: CabalSpecVersion -- ^ Cabal file version -> [String] -> RIO GetPackageFileContext (Set (Path Abs File)) resolveGlobFiles cabalFileVersion = fmap (S.fromList . concatMap catMaybes) . mapM resolve where resolve name = if '*' `elem` name then explode name else fmap pure (resolveFileOrWarn name) explode name = do dir <- asks (parent . (.file)) names <- matchDirFileGlob' (toFilePath dir) name mapM resolveFileOrWarn names matchDirFileGlob' dir glob = catch (liftIO (matchDirFileGlob minBound cabalFileVersion dir glob)) ( \(e :: IOException) -> if isUserError e then do prettyWarnL [ flow "Wildcard does not match any files:" , style File $ fromString glob , line <> flow "in directory:" , style Dir $ fromString dir ] pure [] else throwIO e ) -- | Gets all of the modules, files, build files, and data files that constitute -- the package. This is primarily used for dirtiness checking during build, as -- well as use by "stack ghci" getPackageFile :: ( HasEnvConfig s, MonadReader s m, MonadThrow m, MonadUnliftIO m ) => Package -> Path Abs File -> m PackageComponentFile getPackageFile pkg cabalFP = debugBracket ("getPackageFiles" <+> pretty cabalFP) $ do let pkgDir = parent cabalFP distDir <- distDirFromDir pkgDir bc <- view buildConfigL cabalVer <- view cabalVersionL packageComponentFile <- runRIO (GetPackageFileContext cabalFP distDir bc cabalVer) (packageDescModulesAndFiles pkg) setupFiles <- if pkg.buildType == Cabal.Custom then do let setupHsPath = pkgDir relFileSetupHs setupLhsPath = pkgDir relFileSetupLhs setupHsExists <- doesFileExist setupHsPath if setupHsExists then pure (S.singleton setupHsPath) else do setupLhsExists <- doesFileExist setupLhsPath if setupLhsExists then pure (S.singleton setupLhsPath) else pure S.empty else pure S.empty moreBuildFiles <- fmap (S.insert cabalFP . S.union setupFiles) $ do let hpackPath = pkgDir relFileHpackPackageConfig hpackExists <- doesFileExist hpackPath pure $ if hpackExists then S.singleton hpackPath else S.empty pure packageComponentFile { packageExtraFile = moreBuildFiles <> packageComponentFile.packageExtraFile } stackPackageFileFromCabal :: Cabal.PackageDescription -> StackPackageFile stackPackageFileFromCabal cabalPkg = StackPackageFile (Cabal.extraSrcFiles cabalPkg) (Cabal.dataDir cabalPkg) (Cabal.dataFiles cabalPkg) insertComponentFile :: PackageComponentFile -> (NamedComponent, ComponentFile) -> PackageComponentFile insertComponentFile packageCompFile (name, compFile) = PackageComponentFile nCompFile nDotCollec packageExtraFile nWarnings where (ComponentFile moduleFileMap dotCabalFileList warningsCollec) = compFile (PackageComponentFile modules files packageExtraFile warnings) = packageCompFile nCompFile = M.insert name moduleFileMap modules nDotCollec = M.insert name dotCabalFileList files nWarnings = warningsCollec ++ warnings stack-2.15.7/src/Stack/Path.hs0000644000000000000000000003253514604306201014171 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @path@ command. module Stack.Path ( EnvConfigPathInfo , path , pathsFromRunner , pathsFromConfig , pathsFromEnvConfig ) where import Data.List ( intercalate ) import qualified Data.Text as T import qualified Data.Text.IO as T import Path ( (), parent ) import Path.Extra ( toFilePathNoTrailingSep ) import RIO.Process ( HasProcessContext (..), exeSearchPathL ) import Stack.Config ( determineStackRootAndOwnership ) import Stack.Constants ( docDirSuffix, stackGlobalConfigOptionName , stackRootOptionName ) import Stack.Constants.Config ( distRelativeDir ) import Stack.GhcPkg as GhcPkg import Stack.Prelude hiding ( pi ) import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), projectRootL , stackYamlL ) import Stack.Types.BuildOptsMonoid ( buildOptsMonoidHaddockL ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..), getCompilerPath ) import Stack.Types.Config ( Config (..), HasConfig (..), stackGlobalConfigL ) import Stack.Types.EnvConfig ( EnvConfig, HasEnvConfig (..), bindirCompilerTools , hpcReportDir, installationRootDeps, installationRootLocal , packageDatabaseDeps, packageDatabaseExtra , packageDatabaseLocal ) import qualified Stack.Types.EnvConfig as EnvConfig import Stack.Types.GHCVariant ( HasGHCVariant (..) ) import Stack.Types.GlobalOpts ( GlobalOpts (..), globalOptsBuildOptsMonoidL ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner (..), Runner, globalOptsL ) import qualified System.FilePath as FP -- | Print out useful path information in a human-readable format (and support -- others later). path :: [Text] -> RIO Runner () path keys = do let -- filter the chosen paths in flags (keys), or show all of them if no -- specific paths chosen. filterKeys (_, key, _) = null keys || elem key keys goodPathsFromRunner = null keys || elem stackRootOptionName' keys goodPathsFromConfig = filter filterKeys pathsFromConfig goodPathsFromEnvConfig = filter filterKeys pathsFromEnvConfig toKeyPath (_, key, p) = (key, p) goodPathsFromConfig' = map toKeyPath goodPathsFromConfig singlePath = (if goodPathsFromRunner then 1 else 0) + length goodPathsFromConfig + length goodPathsFromEnvConfig == 1 toEither (_, k, UseHaddocks a) = Left (k, a) toEither (_, k, WithoutHaddocks a) = Right (k, a) (with, without) = partitionEithers $ map toEither goodPathsFromEnvConfig when goodPathsFromRunner $ printKeysWithRunner singlePath unless (null goodPathsFromConfig') $ runHaddockWithConfig $ printKeysWithConfig goodPathsFromConfig' singlePath unless (null without) $ runHaddockWithEnvConfig False $ printKeysWithEnvConfig without singlePath unless (null with) $ runHaddockWithEnvConfig True $ printKeysWithEnvConfig with singlePath printKeysWithRunner :: Bool -> RIO Runner () printKeysWithRunner single = do clArgs <- view $ globalOptsL . to (.configMonoid) liftIO $ do (_, stackRoot, _) <- determineStackRootAndOwnership clArgs let prefix = if single then "" else stackRootOptionName' <> ": " T.putStrLn $ prefix <> T.pack (toFilePathNoTrailingSep stackRoot) printKeysWithConfig :: HasConfig env => [(Text, Config -> Text)] -> Bool -> RIO env () printKeysWithConfig extractors single = view configL >>= printKeys extractors single printKeysWithEnvConfig :: HasEnvConfig env => [(Text, EnvConfigPathInfo -> Text)] -> Bool -> RIO env () printKeysWithEnvConfig extractors single = fillEnvConfigPathInfo >>= printKeys extractors single printKeys :: [(Text, info -> Text)] -> Bool -> info -> RIO env () printKeys extractors single info = do liftIO $ forM_ extractors $ \(key, extractPath) -> do let prefix = if single then "" else key <> ": " T.putStrLn $ prefix <> extractPath info runHaddockWithEnvConfig :: Bool -> RIO EnvConfig () -> RIO Runner () runHaddockWithEnvConfig x action = runHaddock x (withDefaultEnvConfig action) runHaddockWithConfig :: RIO Config () -> RIO Runner () runHaddockWithConfig = runHaddock False runHaddock :: Bool -> RIO Config () -> RIO Runner () runHaddock x action = local modifyConfig $ withConfig YesReexec action where modifyConfig = set (globalOptsL . globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL) (Just x) fillEnvConfigPathInfo :: HasEnvConfig env => RIO env EnvConfigPathInfo fillEnvConfigPathInfo = do -- We must use a BuildConfig from an EnvConfig to ensure that it contains the -- full environment info including GHC paths etc. buildConfig <- view $ envConfigL . buildConfigL -- This is the modified 'bin-path', -- including the local GHC or MSYS if not configured to operate on -- global GHC. -- It was set up in 'withBuildConfigAndLock -> withBuildConfigExt -> setupEnv'. -- So it's not the *minimal* override path. snapDb <- packageDatabaseDeps localDb <- packageDatabaseLocal extraDbs <- packageDatabaseExtra globalDb <- view $ compilerPathsL . to (.globalDB) snapRoot <- installationRootDeps localRoot <- installationRootLocal toolsDir <- bindirCompilerTools hoogleRoot <- EnvConfig.hoogleRoot distDir <- distRelativeDir hpcDir <- hpcReportDir compiler <- getCompilerPath pure EnvConfigPathInfo { buildConfig , snapDb , localDb , globalDb , snapRoot , localRoot , toolsDir , hoogleRoot , distDir , hpcDir , extraDbs , compiler } data EnvConfigPathInfo = EnvConfigPathInfo { buildConfig :: !BuildConfig , snapDb :: !(Path Abs Dir) , localDb :: !(Path Abs Dir) , globalDb :: !(Path Abs Dir) , snapRoot :: !(Path Abs Dir) , localRoot :: !(Path Abs Dir) , toolsDir :: !(Path Abs Dir) , hoogleRoot :: !(Path Abs Dir) , distDir :: Path Rel Dir , hpcDir :: !(Path Abs Dir) , extraDbs :: ![Path Abs Dir] , compiler :: !(Path Abs File) } instance HasPlatform EnvConfigPathInfo where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasLogFunc EnvConfigPathInfo where logFuncL = configL . logFuncL instance HasRunner EnvConfigPathInfo where runnerL = configL . runnerL instance HasStylesUpdate EnvConfigPathInfo where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm EnvConfigPathInfo where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL instance HasGHCVariant EnvConfigPathInfo where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasConfig EnvConfigPathInfo where configL = buildConfigL . lens (.config) (\x y -> x { config = y }) {-# INLINE configL #-} instance HasPantryConfig EnvConfigPathInfo where pantryConfigL = configL . pantryConfigL instance HasProcessContext EnvConfigPathInfo where processContextL = configL . processContextL instance HasBuildConfig EnvConfigPathInfo where buildConfigL = lens (.buildConfig) (\x y -> x { buildConfig = y }) . buildConfigL data UseHaddocks a = UseHaddocks a | WithoutHaddocks a -- | The paths of interest to a user which do require a 'Config' or 'EnvConfig'. -- The first tuple string is used for a description that the optparse flag uses, -- and the second string as a machine-readable key and also for @--foo@ flags. -- The user can choose a specific path to list like @--stack-root@. But really -- it's mainly for the documentation aspect. pathsFromRunner :: (String, Text) pathsFromRunner = ("Global Stack root directory", stackRootOptionName') -- | The paths of interest to a user which do require an 'EnvConfig'. The first -- tuple string is used for a description that the optparse flag uses, and the -- second string as a machine-readable key and also for @--foo@ flags. The user -- can choose a specific path to list like @--stack-root@. But really it's -- mainly for the documentation aspect. -- -- When printing output we generate @Config@ and pass it to the function -- to generate an appropriate string. Trailing slashes are removed, see #506. pathsFromConfig :: [(String, Text, Config -> Text)] pathsFromConfig = [ ( "Global Stack configuration file" , T.pack stackGlobalConfigOptionName , view (stackGlobalConfigL . to toFilePath . to T.pack) ) , ( "Install location for GHC and other core tools (see 'stack ls tools' command)" , "programs" , view (configL . to (.localPrograms) . to toFilePathNoTrailingSep . to T.pack) ) , ( "Directory where Stack installs executables (e.g. ~/.local/bin (Unix-like OSs) or %APPDATA%\\local\\bin (Windows))" , "local-bin" , view $ configL . to (.localBin) . to toFilePathNoTrailingSep . to T.pack ) ] -- | The paths of interest to a user which require a 'EnvConfig'. The first -- tuple string is used for a description that the optparse flag uses, and the -- second string as a machine-readable key and also for @--foo@ flags. The user -- can choose a specific path to list like @--project-root@. But really it's -- mainly for the documentation aspect. -- -- When printing output we generate @EnvConfigPathInfo@ and pass it to the -- function to generate an appropriate string. Trailing slashes are removed, see -- #506. pathsFromEnvConfig :: [(String, Text, UseHaddocks (EnvConfigPathInfo -> Text))] pathsFromEnvConfig = [ ( "Project root (derived from stack.yaml file)" , "project-root" , WithoutHaddocks $ view (projectRootL . to toFilePathNoTrailingSep . to T.pack) ) , ( "Configuration location (where the stack.yaml file is)" , "config-location" , WithoutHaddocks $ view (stackYamlL . to toFilePath . to T.pack) ) , ( "PATH environment variable" , "bin-path" , WithoutHaddocks $ T.pack . intercalate [FP.searchPathSeparator] . view exeSearchPathL ) , ( "Compiler binary (e.g. ghc)" , "compiler-exe" , WithoutHaddocks $ T.pack . toFilePath . (.compiler) ) , ( "Directory containing the compiler binary (e.g. ghc)" , "compiler-bin" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . parent . (.compiler) ) , ( "Directory containing binaries specific to a particular compiler" , "compiler-tools-bin" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.toolsDir) ) , ( "Extra include directories" , "extra-include-dirs" , WithoutHaddocks $ T.intercalate ", " . map T.pack . (.extraIncludeDirs) . view configL ) , ( "Extra library directories" , "extra-library-dirs" , WithoutHaddocks $ T.intercalate ", " . map T.pack . (.extraLibDirs) . view configL ) , ( "Snapshot package database" , "snapshot-pkg-db" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.snapDb) ) , ( "Local project package database" , "local-pkg-db" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.localDb) ) , ( "Global package database" , "global-pkg-db" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.globalDb) ) , ( "GHC_PACKAGE_PATH environment variable" , "ghc-package-path" , WithoutHaddocks $ \pi -> mkGhcPackagePath True pi.localDb pi.snapDb pi.extraDbs pi.globalDb ) , ( "Snapshot installation root" , "snapshot-install-root" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.snapRoot) ) , ( "Local project installation root" , "local-install-root" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.localRoot) ) , ( "Snapshot documentation root" , "snapshot-doc-root" , UseHaddocks $ \pi -> T.pack (toFilePathNoTrailingSep (pi.snapRoot docDirSuffix)) ) , ( "Local project documentation root" , "local-doc-root" , UseHaddocks $ \pi -> T.pack (toFilePathNoTrailingSep (pi.localRoot docDirSuffix)) ) , ( "Local project documentation root" , "local-hoogle-root" , UseHaddocks $ T.pack . toFilePathNoTrailingSep . (.hoogleRoot) ) , ( "Dist work directory, relative to package directory" , "dist-dir" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.distDir) ) , ( "Where HPC reports and tix files are stored" , "local-hpc-root" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . (.hpcDir) ) ] -- | 'Text' equivalent of 'stackRootOptionName'. stackRootOptionName' :: Text stackRootOptionName' = T.pack stackRootOptionName stack-2.15.7/src/Stack/Prelude.hs0000644000000000000000000003161514604306201014673 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Prelude ( withSystemTempDir , withKeepSystemTempDir , sinkProcessStderrStdout , sinkProcessStdout , logProcessStderrStdout , readProcessNull , withProcessContext , stripCR , prompt , promptPassword , promptBool , FirstTrue (..) , fromFirstTrue , defaultFirstTrue , FirstFalse (..) , fromFirstFalse , defaultFirstFalse , writeBinaryFileAtomic , bugReport , bugPrettyReport , blankLine , putUtf8Builder , putBuilder , ppException , prettyThrowIO , prettyThrowM , mcons , MungedPackageId (..) , MungedPackageName (..) , LibraryName (..) , module X -- * Re-exports from the rio-pretty print package , HasStylesUpdate (..) , HasTerm (..) , Pretty (..) , PrettyException (..) , PrettyRawSnapshotLocation (..) , StyleDoc , Style (..) , StyleSpec , StylesUpdate (..) , (<+>) , align , bulletedList , debugBracket , defaultStyles , displayWithColor , encloseSep , fill , fillSep , foldr' , fromPackageId , fromPackageName , flow , hang , hcat , hsep , indent , line , logLevelToStyle , mkNarrativeList , parens , parseStylesUpdateFromString , prettyDebug , prettyDebugL , prettyError , prettyErrorL , prettyGeneric , prettyInfo , prettyInfoL , prettyInfoS , prettyNote , prettyNoteL , prettyNoteS , prettyWarn , prettyWarnL , prettyWarnNoIndent , prettyWarnS , punctuate , sep , softbreak , softline , spacedBulletedList , string , style , vsep ) where import Data.Monoid as X ( Any (..), Endo (..), First (..), Sum (..) ) import Data.Conduit as X ( ConduitM, runConduit, (.|) ) import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL import Data.Conduit.Process.Typed ( byteStringInput, createSource, withLoggedProcess_ ) import Data.Foldable ( Foldable(foldr') ) import qualified Data.Text.IO as T import Distribution.Types.LibraryName ( LibraryName (..) ) import Distribution.Types.MungedPackageId ( MungedPackageId (..) ) import Distribution.Types.MungedPackageName ( MungedPackageName (..) ) import Pantry as X hiding ( Package (..), loadSnapshot ) import Path as X ( Abs, Dir, File, Path, Rel, toFilePath ) import qualified Path.IO import RIO as X import RIO.File as X hiding ( writeBinaryFileAtomic ) import RIO.PrettyPrint ( HasStylesUpdate (..), HasTerm (..), Pretty (..), Style (..) , StyleDoc, (<+>), align, blankLine, bulletedList , debugBracket, displayWithColor, encloseSep, fill, fillSep , flow, hang, hcat, hsep, indent, line, logLevelToStyle , mkNarrativeList, parens, prettyDebug, prettyDebugL , prettyError, prettyErrorL, prettyGeneric, prettyInfo , prettyInfoL, prettyInfoS, prettyNote, prettyNoteL , prettyNoteS, prettyWarn, prettyWarnL, prettyWarnNoIndent , prettyWarnS, punctuate, sep, softbreak, softline , spacedBulletedList, string, style, stylesUpdateL, useColorL , vsep ) import RIO.PrettyPrint.DefaultStyles (defaultStyles) import RIO.PrettyPrint.PrettyException ( PrettyException (..), ppException, prettyThrowIO , prettyThrowM ) import RIO.PrettyPrint.StylesUpdate ( StylesUpdate (..), parseStylesUpdateFromString ) import RIO.PrettyPrint.Types ( StyleSpec ) import RIO.Process ( HasProcessContext (..), ProcessConfig, ProcessContext , closed, getStderr, getStdout, proc, readProcess_, setStderr , setStdin, setStdout, waitExitCode, withProcessWait_ , workingDirL ) import qualified RIO.Text as T import System.IO.Echo ( withoutInputEcho ) -- | Path version withSystemTempDir :: MonadUnliftIO m => String -> (Path Abs Dir -> m a) -> m a withSystemTempDir str inner = withRunInIO $ \run -> Path.IO.withSystemTempDir str $ run . inner -- | Like `withSystemTempDir`, but the temporary directory is not deleted. withKeepSystemTempDir :: MonadUnliftIO m => String -> (Path Abs Dir -> m a) -> m a withKeepSystemTempDir str inner = withRunInIO $ \run -> do path <- Path.IO.getTempDir dir <- Path.IO.createTempDir path str run $ inner dir -- | Consume the stdout and stderr of a process feeding strict 'ByteString's to -- the consumers. -- -- Throws a 'ReadProcessException' if unsuccessful in launching, or -- 'ExitCodeException' if the process itself fails. sinkProcessStderrStdout :: forall e o env. (HasProcessContext env, HasLogFunc env, HasCallStack) => String -- ^ Command -> [String] -- ^ Command line arguments -> ConduitM ByteString Void (RIO env) e -- ^ Sink for stderr -> ConduitM ByteString Void (RIO env) o -- ^ Sink for stdout -> RIO env (e,o) sinkProcessStderrStdout name args sinkStderr sinkStdout = proc name args $ \pc0 -> do let pc = setStdout createSource $ setStderr createSource -- Don't use closed, since that can break ./configure scripts -- See https://github.com/commercialhaskell/stack/pull/4722 $ setStdin (byteStringInput "") pc0 withProcessWait_ pc $ \p -> (runConduit (getStderr p .| sinkStderr) `concurrently` runConduit (getStdout p .| sinkStdout)) <* waitExitCode p -- | Consume the stdout of a process feeding strict 'ByteString's to a consumer. -- If the process fails, spits out stdout and stderr as error log -- level. Should not be used for long-running processes or ones with -- lots of output; for that use 'sinkProcessStderrStdout'. -- -- Throws a 'ReadProcessException' if unsuccessful. sinkProcessStdout :: (HasProcessContext env, HasLogFunc env, HasCallStack) => String -- ^ Command -> [String] -- ^ Command line arguments -> ConduitM ByteString Void (RIO env) a -- ^ Sink for stdout -> RIO env a sinkProcessStdout name args sinkStdout = proc name args $ \pc -> withLoggedProcess_ (setStdin closed pc) $ \p -> runConcurrently $ Concurrently (runConduit $ getStderr p .| CL.sinkNull) *> Concurrently (runConduit $ getStdout p .| sinkStdout) logProcessStderrStdout :: (HasCallStack, HasProcessContext env, HasLogFunc env) => ProcessConfig stdin stdoutIgnored stderrIgnored -> RIO env () logProcessStderrStdout pc = withLoggedProcess_ pc $ \p -> let logLines = CB.lines .| CL.mapM_ (logInfo . displayBytesUtf8) in runConcurrently $ Concurrently (runConduit $ getStdout p .| logLines) *> Concurrently (runConduit $ getStderr p .| logLines) -- | Read from the process, ignoring any output. -- -- Throws a 'ReadProcessException' exception if the process fails. readProcessNull :: (HasProcessContext env, HasLogFunc env, HasCallStack) => String -- ^ Command -> [String] -- ^ Command line arguments -> RIO env () readProcessNull name args = -- We want the output to appear in any exceptions, so we capture and drop it void $ proc name args readProcess_ -- | Use the new 'ProcessContext', but retain the working directory -- from the parent environment. withProcessContext :: HasProcessContext env => ProcessContext -> RIO env a -> RIO env a withProcessContext pcNew inner = do pcOld <- view processContextL let pcNew' = set workingDirL (view workingDirL pcOld) pcNew local (set processContextL pcNew') inner -- | Remove a trailing carriage pure if present stripCR :: Text -> Text stripCR = T.dropSuffix "\r" -- | Prompt the user by sending text to stdout, and taking a line of -- input from stdin. prompt :: MonadIO m => Text -> m Text prompt txt = liftIO $ do T.putStr txt hFlush stdout T.getLine -- | Prompt the user by sending text to stdout, and collecting a line -- of input from stdin. While taking input from stdin, input echoing is -- disabled, to hide passwords. -- -- Based on code from cabal-install, Distribution.Client.Upload promptPassword :: MonadIO m => Text -> m Text promptPassword txt = liftIO $ do T.putStr txt hFlush stdout -- Save/restore the terminal echoing status (no echoing for entering -- the password). password <- withoutInputEcho T.getLine -- Since the user's newline is not echoed, one needs to be inserted. T.putStrLn "" pure password -- | Prompt the user by sending text to stdout, and collecting a line of -- input from stdin. If something other than "y" or "n" is entered, then -- print a message indicating that "y" or "n" is expected, and ask -- again. promptBool :: MonadIO m => Text -> m Bool promptBool txt = liftIO $ do input <- prompt txt case input of "y" -> pure True "n" -> pure False _ -> do T.putStrLn "Please press either 'y' or 'n', and then enter." promptBool txt -- | Like @First Bool@, but the default is @True@. newtype FirstTrue = FirstTrue { firstTrue :: Maybe Bool } deriving (Eq, Ord, Show) instance Semigroup FirstTrue where FirstTrue (Just x) <> _ = FirstTrue (Just x) FirstTrue Nothing <> x = x instance Monoid FirstTrue where mempty = FirstTrue Nothing mappend = (<>) -- | Get the 'Bool', defaulting to 'True' fromFirstTrue :: FirstTrue -> Bool fromFirstTrue = fromMaybe True . (.firstTrue) -- | Helper for filling in default values defaultFirstTrue :: FirstTrue -> Bool defaultFirstTrue _ = True -- | Like @First Bool@, but the default is @False@. newtype FirstFalse = FirstFalse { firstFalse :: Maybe Bool } deriving (Eq, Ord, Show) instance Semigroup FirstFalse where FirstFalse (Just x) <> _ = FirstFalse (Just x) FirstFalse Nothing <> x = x instance Monoid FirstFalse where mempty = FirstFalse Nothing mappend = (<>) -- | Get the 'Bool', defaulting to 'False' fromFirstFalse :: FirstFalse -> Bool fromFirstFalse = fromMaybe False . (.firstFalse) -- | Helper for filling in default values defaultFirstFalse :: FirstFalse -> Bool defaultFirstFalse _ = False -- | Write a @Builder@ to a file and atomically rename. writeBinaryFileAtomic :: MonadIO m => Path absrel File -> Builder -> m () writeBinaryFileAtomic fp builder = liftIO $ withBinaryFileAtomic (toFilePath fp) WriteMode (`hPutBuilder` builder) newtype PrettyRawSnapshotLocation = PrettyRawSnapshotLocation RawSnapshotLocation instance Pretty PrettyRawSnapshotLocation where pretty (PrettyRawSnapshotLocation (RSLCompiler compiler)) = fromString $ T.unpack $ utf8BuilderToText $ display compiler pretty (PrettyRawSnapshotLocation (RSLUrl url Nothing)) = style Url (fromString $ T.unpack url) pretty (PrettyRawSnapshotLocation (RSLUrl url (Just blob))) = fillSep [ style Url (fromString $ T.unpack url) , parens $ fromString $ T.unpack $ utf8BuilderToText $ display blob ] pretty (PrettyRawSnapshotLocation (RSLFilePath resolved)) = style File (fromString $ show $ resolvedRelative resolved) pretty (PrettyRawSnapshotLocation (RSLSynonym syn)) = fromString $ show syn -- | Report a bug in Stack. bugReport :: String -> String -> String bugReport code msg = "Error: " ++ code ++ "\n" ++ bugDeclaration ++ " " ++ msg ++ " " ++ bugRequest -- | Report a pretty bug in Stack. bugPrettyReport :: String -> StyleDoc -> StyleDoc bugPrettyReport code msg = "Error:" <+> fromString code <> line <> flow bugDeclaration <+> msg <+> flow bugRequest -- | Bug declaration message. bugDeclaration :: String bugDeclaration = "The impossible happened!" -- | Bug report message. bugRequest :: String bugRequest = "Please report this bug at Stack's repository." -- | Maybe cons. mcons :: Maybe a -> [a] -> [a] mcons ma as = maybe as (:as) ma -- | Write a 'Utf8Builder' to the standard output stream. putUtf8Builder :: MonadIO m => Utf8Builder -> m () putUtf8Builder = putBuilder . getUtf8Builder -- | Write a 'Builder' to the standard output stream. putBuilder :: MonadIO m => Builder -> m () putBuilder = hPutBuilder stdout -- | Convert a package identifier to a value of a string-like type. fromPackageId :: IsString a => PackageIdentifier -> a fromPackageId = fromString . packageIdentifierString -- | Convert a package name to a value of a string-like type. fromPackageName :: IsString a => PackageName -> a fromPackageName = fromString . packageNameString stack-2.15.7/src/Stack/Query.hs0000644000000000000000000001163214604306201014375 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @query@ command. module Stack.Query ( queryCmd , queryBuildInfo ) where import Data.Aeson ( Value (Object, Array), (.=), object ) import qualified Data.Aeson.Key as Key import qualified Data.Aeson.KeyMap as KeyMap import Data.List ( isPrefixOf ) import qualified Data.Text as T import Data.Text.Encoding ( decodeUtf8 ) import qualified Data.Text.IO as TIO import Data.Text.Read ( decimal ) import qualified Data.Vector as V import qualified Data.Yaml as Yaml import Path ( parent ) import Stack.Build.Source ( projectLocalPackages ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.Types.BuildConfig ( wantedCompilerVersionL ) import Stack.Types.Compiler ( compilerVersionText ) import Stack.Types.EnvConfig ( HasEnvConfig, actualCompilerVersionL ) import Stack.Types.Runner ( Runner ) import Stack.Types.Package ( LocalPackage (..), Package (..) ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Query"module. data QueryException = SelectorNotFound ![Text] | IndexOutOfRange ![Text] | NoNumericSelector ![Text] | CannotApplySelector !Value ![Text] deriving (Show, Typeable) instance Exception QueryException where displayException (SelectorNotFound sels) = err "[S-4419]" "Selector not found" sels displayException (IndexOutOfRange sels) = err "[S-8422]" "Index out of range" sels displayException (NoNumericSelector sels) = err "[S-4360]" "Encountered array and needed numeric selector" sels displayException (CannotApplySelector value sels) = err "[S-1711]" ("Cannot apply selector to " ++ show value) sels -- | Helper function for 'QueryException' instance of 'Show' err :: String -> String -> [Text] -> String err msg code sels = "Error: " ++ code ++ "\n" ++ msg ++ ": " ++ show sels -- | Function underlying the @stack query@ command. queryCmd :: [String] -- ^ Selectors. -> RIO Runner () queryCmd selectors = withConfig YesReexec $ withDefaultEnvConfig $ queryBuildInfo $ map T.pack selectors -- | Query information about the build and print the result to stdout in YAML -- format. queryBuildInfo :: HasEnvConfig env => [Text] -- ^ Selectors. -> RIO env () queryBuildInfo selectors0 = rawBuildInfo >>= select id selectors0 >>= liftIO . TIO.putStrLn . addGlobalHintsComment . decodeUtf8 . Yaml.encode where select _ [] value = pure value select front (sel:sels) value = case value of Object o -> case KeyMap.lookup (Key.fromText sel) o of Nothing -> throwIO $ SelectorNotFound sels' Just value' -> cont value' Array v -> case decimal sel of Right (i, "") | i >= 0 && i < V.length v -> cont $ v V.! i | otherwise -> throwIO $ IndexOutOfRange sels' _ -> throwIO $ NoNumericSelector sels' _ -> throwIO $ CannotApplySelector value sels' where cont = select (front . (sel:)) sels sels' = front [sel] -- Include comments to indicate that this portion of the "stack -- query" API is not necessarily stable. addGlobalHintsComment | null selectors0 = T.replace globalHintsLine ("\n" <> globalHintsComment <> globalHintsLine) -- Append comment instead of pre-pending. The reasoning here is -- that something *could* expect that the result of 'stack query -- global-hints ghc-boot' is just a string literal. Seems easier -- for to expect the first line of the output to be the literal. | ["global-hints"] `isPrefixOf` selectors0 = (<> ("\n" <> globalHintsComment)) | otherwise = id globalHintsLine = "\nglobal-hints:\n" globalHintsComment = T.concat [ "# Note: global-hints is experimental and may be renamed / removed in the future.\n" , "# See https://github.com/commercialhaskell/stack/issues/3796" ] -- | Get the raw build information object rawBuildInfo :: HasEnvConfig env => RIO env Value rawBuildInfo = do locals <- projectLocalPackages wantedCompiler <- view $ wantedCompilerVersionL . to (utf8BuilderToText . display) actualCompiler <- view $ actualCompilerVersionL . to compilerVersionText pure $ object [ "locals" .= Object (KeyMap.fromList $ map localToPair locals) , "compiler" .= object [ "wanted" .= wantedCompiler , "actual" .= actualCompiler ] ] where localToPair lp = (Key.fromText $ T.pack $ packageNameString p.name, value) where p = lp.package value = object [ "version" .= CabalString p.version , "path" .= toFilePath (parent lp.cabalFP) ] stack-2.15.7/src/Stack/Runners.hs0000644000000000000000000002604014620153446014734 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Utilities for running stack commands. -- -- Instead of using Has-style classes below, the type signatures use -- concrete environments to try and avoid accidentally rerunning -- configuration parsing. For example, we want @withConfig $ -- withConfig $ ...@ to fail. module Stack.Runners ( withBuildConfig , withEnvConfig , withDefaultEnvConfig , withConfig , withGlobalProject , withRunnerGlobal , ShouldReexec (..) ) where import qualified Data.ByteString.Lazy.Char8 as L8 import RIO.Process ( findExecutable, mkDefaultProcessContext, proc , readProcess ) import RIO.Time ( addUTCTime, getCurrentTime ) import Stack.Build.Target ( NeedTargets (..) ) import Stack.Config ( getInContainer, getInNixShell, loadConfig, withBuildConfig , withNewLogFunc ) import Stack.Constants ( defaultTerminalWidth, maxTerminalWidth, minTerminalWidth , nixProgName ) import Stack.DefaultColorWhen ( defaultColorWhen ) import qualified Stack.Docker as Docker import qualified Stack.Nix as Nix import Stack.Prelude import Stack.Setup ( setupEnv ) import Stack.Storage.User ( logUpgradeCheck, upgradeChecksSince ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI, defaultBuildOptsCLI ) import Stack.Types.ColorWhen ( ColorWhen (..) ) import Stack.Types.Config ( Config (..) ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) import Stack.Types.Docker ( DockerOpts (..) ) import Stack.Types.EnvConfig ( EnvConfig ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Nix ( NixOpts (..) ) import Stack.Types.Runner ( Runner (..), globalOptsL, reExecL, stackYamlLocL ) import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) import Stack.Types.Version ( minorVersion, stackMinorVersion, stackVersion ) import System.Console.ANSI ( hNowSupportsANSI ) import System.Terminal ( getTerminalWidth ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Runners" module. data RunnersException = CommandInvalid | DockerAndNixInvalid | NixWithinDockerInvalid | DockerWithinNixInvalid deriving (Show, Typeable) instance Exception RunnersException where displayException CommandInvalid = "Error: [S-7144]\n" ++ "Cannot use this command with options which override the stack.yaml \ \location." displayException DockerAndNixInvalid = "Error: [S-8314]\n" ++ "Cannot use both Docker and Nix at the same time." displayException NixWithinDockerInvalid = "Error: [S-8641]\n" ++ "Cannot use Nix from within a Docker container." displayException DockerWithinNixInvalid = "Error: [S-5107]\n" ++ "Cannot use Docker from within a Nix shell." -- | Ensure that no project settings are used when running 'withConfig'. withGlobalProject :: RIO Runner a -> RIO Runner a withGlobalProject inner = do oldSYL <- view stackYamlLocL case oldSYL of SYLDefault -> local (set stackYamlLocL SYLGlobalProject) inner _ -> throwIO CommandInvalid -- | Helper for 'withEnvConfig' which passes in some default arguments: -- -- * No targets are requested -- -- * Default command line build options are assumed withDefaultEnvConfig :: RIO EnvConfig a -> RIO Config a withDefaultEnvConfig = withEnvConfig AllowNoTargets defaultBuildOptsCLI -- | Upgrade a 'Config' environment to an 'EnvConfig' environment by -- performing further parsing of project-specific configuration (like -- 'withBuildConfig') and then setting up a build environment -- toolchain. This is intended to be run inside a call to -- 'withConfig'. withEnvConfig :: NeedTargets -> BuildOptsCLI -> RIO EnvConfig a -- ^ Action that uses the build config. If Docker is enabled for builds, -- this will be run in a Docker container. -> RIO Config a withEnvConfig needTargets boptsCLI inner = withBuildConfig $ do envConfig <- setupEnv needTargets boptsCLI Nothing logDebug "Starting to execute command inside EnvConfig" runRIO envConfig inner -- | If the settings justify it, should we reexec inside Docker or Nix? data ShouldReexec = YesReexec | NoReexec -- | Load the configuration. Convenience function used -- throughout this module. withConfig :: ShouldReexec -> RIO Config a -> RIO Runner a withConfig shouldReexec inner = loadConfig $ \config -> do -- If we have been relaunched in a Docker container, perform in-container -- initialization (switch UID, etc.). We do this after first loading the -- configuration since it must happen ASAP but needs a configuration. view (globalOptsL . to (.dockerEntrypoint)) >>= traverse_ (Docker.entrypoint config) runRIO config $ do -- Catching all exceptions here, since we don't want this -- check to ever cause Stack to stop working shouldUpgradeCheck `catchAny` \e -> logError $ "Error: [S-7353]\n" <> "Error when running shouldUpgradeCheck: " <> displayShow e case shouldReexec of YesReexec -> reexec inner NoReexec -> inner -- | Perform a Docker or Nix reexec, if warranted. Otherwise run the inner -- action. reexec :: RIO Config a -> RIO Config a reexec inner = do nixEnable' <- asks $ (.nix.enable) notifyIfNixOnPath <- asks (.notifyIfNixOnPath) when (not nixEnable' && notifyIfNixOnPath) $ do eNix <- findExecutable nixProgName case eNix of Left _ -> pure () Right nix -> proc nix ["--version"] $ \pc -> do let nixProgName' = style Shell (fromString nixProgName) muteMsg = fillSep [ flow "To mute this message in future, set" , style Shell (flow "notify-if-nix-on-path: false") , flow "in Stack's configuration." ] reportErr errMsg = prettyWarn $ fillSep [ nixProgName' , flow "is on the PATH" , parens (fillSep ["at", style File (fromString nix)]) , flow "but Stack encountered the following error with" , nixProgName' , style Shell "--version" <> ":" ] <> blankLine <> errMsg <> blankLine <> muteMsg <> line res <- tryAny (readProcess pc) case res of Left e -> reportErr (ppException e) Right (ec, out, err) -> case ec of ExitFailure _ -> reportErr $ string (L8.unpack err) ExitSuccess -> do let trimFinalNewline str = case reverse str of '\n' : rest -> reverse rest _ -> str prettyWarn $ fillSep [ fromString (trimFinalNewline $ L8.unpack out) , flow "is on the PATH" , parens (fillSep ["at", style File (fromString nix)]) , flow "but Stack's Nix integration is disabled." , muteMsg ] <> line dockerEnable' <- asks (.docker.enable) case (nixEnable', dockerEnable') of (True, True) -> throwIO DockerAndNixInvalid (False, False) -> inner -- Want to use Nix (True, False) -> do whenM getInContainer $ throwIO NixWithinDockerInvalid isReexec <- view reExecL if isReexec then inner else Nix.runShellAndExit -- Want to use Docker (False, True) -> do whenM getInNixShell $ throwIO DockerWithinNixInvalid inContainer <- getInContainer if inContainer then do isReexec <- view reExecL if isReexec then inner else throwIO Docker.OnlyOnHostException else Docker.runContainerAndExit -- | Use the 'GlobalOpts' to create a 'Runner' and run the provided -- action. withRunnerGlobal :: GlobalOpts -> RIO Runner a -> IO a withRunnerGlobal go inner = do colorWhen <- maybe defaultColorWhen pure $ getFirst go.configMonoid.colorWhen useColor <- case colorWhen of ColorNever -> pure False ColorAlways -> pure True ColorAuto -> hNowSupportsANSI stderr termWidth <- clipWidth <$> maybe (fromMaybe defaultTerminalWidth <$> getTerminalWidth) pure go.termWidthOpt menv <- mkDefaultProcessContext -- MVar used to ensure the Docker entrypoint is performed exactly once. dockerEntrypointMVar <- newMVar False let update = go.stylesUpdate withNewLogFunc go useColor update $ \logFunc -> do runRIO Runner { globalOpts = go , useColor = useColor , logFunc = logFunc , termWidth = termWidth , processContext = menv , dockerEntrypointMVar = dockerEntrypointMVar } inner where clipWidth w | w < minTerminalWidth = minTerminalWidth | w > maxTerminalWidth = maxTerminalWidth | otherwise = w -- | Check if we should recommend upgrading Stack and, if so, recommend it. shouldUpgradeCheck :: RIO Config () shouldUpgradeCheck = do config <- ask when config.recommendUpgrade $ do now <- getCurrentTime let yesterday = addUTCTime (-(24 * 60 * 60)) now checks <- upgradeChecksSince yesterday when (checks == 0) $ do mversion <- getLatestHackageVersion NoRequireHackageIndex "stack" UsePreferredVersions case mversion of -- Compare the minor version so we avoid patch-level, Hackage-only releases. -- See: https://github.com/commercialhaskell/stack/pull/4729#pullrequestreview-227176315 Just (PackageIdentifierRevision _ version _) | minorVersion version > stackMinorVersion -> do prettyWarn $ fillSep [ flow "You are currently using Stack version" , fromString (versionString stackVersion) , flow "but version" , fromString (versionString version) , flow "is available." ] <> blankLine <> fillSep [ "You can try to upgrade by running" , style Shell (flow "stack upgrade") ] <> blankLine <> fillSep [ flow "Tired of seeing this? Add" , style Shell (flow "recommend-stack-upgrade: false") , "to" , pretty config.userConfigPath <> "." ] <> blankLine _ -> pure () logUpgradeCheck now stack-2.15.7/src/Stack/Script.hs0000644000000000000000000004622614604306201014543 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- Types and functions related to Stack's @script@ command. module Stack.Script ( ScriptOpts (..) , ScriptExecute (..) , ShouldRun (..) , scriptCmd ) where import Data.ByteString.Builder ( toLazyByteString ) import qualified Data.ByteString.Char8 as S8 import qualified Data.Conduit.List as CL import Data.List.Split ( splitWhen ) import qualified Data.Map.Strict as Map import qualified Data.Set as Set import Distribution.Compiler ( CompilerFlavor (..) ) import Distribution.ModuleName ( ModuleName ) import qualified Distribution.PackageDescription as PD import qualified Distribution.Types.CondTree as C import Distribution.Types.ModuleReexport ( moduleReexportName ) import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Types.VersionRange ( withinRange ) import Distribution.System ( Platform (..) ) import qualified Pantry.SHA256 as SHA256 import Path ( (), filename, fromAbsDir, fromAbsFile, fromRelFile , parent, parseRelDir, replaceExtension, splitExtension ) import Path.IO ( getModificationTime, resolveFile' ) import qualified RIO.Directory as Dir import RIO.Process ( exec, proc, readProcessStdout_, withWorkingDir ) import qualified RIO.Text as T import Stack.Build ( build ) import Stack.Build.Installed ( getInstalled, toInstallMap ) import Stack.Constants ( osIsWindows, relDirScripts ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.Setup ( withNewLocalBuildTargets ) import Stack.SourceMap ( getCompilerInfo, immutableLocSha ) import Stack.Types.Compiler ( ActualCompiler (..) ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) ) import Stack.Types.Config ( Config (..), HasConfig (..), stackRootL ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) import qualified Stack.Types.ConfigMonoid as ConfigMonoid ( ConfigMonoid (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL , appropriateGhcColorFlag ) import Stack.Types.EnvSettings ( defaultEnvSettings ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), SourceMap (..) ) import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) import System.FilePath ( splitDrive ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Script" module. data ScriptException = MutableDependenciesForScript [PackageName] | AmbiguousModuleName ModuleName [PackageName] | ArgumentsWithNoRunInvalid | NoRunWithoutCompilationInvalid | FailedToParseScriptFileAsDirBug (Path Rel File) | FailedToParseFileAsDirBug (Path Abs Dir) deriving (Show, Typeable) instance Exception ScriptException where displayException (MutableDependenciesForScript names) = unlines $ "Error: [S-4994]" : "No mutable packages are allowed in the 'script' command. Mutable \ \packages found:" : map (\name -> "- " ++ packageNameString name) names displayException (AmbiguousModuleName mname pkgs) = unlines $ "Error: [S-1691]" : ( "Module " ++ moduleNameString mname ++ " appears in multiple packages: " ) : [ unwords $ map packageNameString pkgs ] displayException ArgumentsWithNoRunInvalid = "Error: [S-5067]\n" ++ "'--no-run' incompatible with arguments." displayException NoRunWithoutCompilationInvalid = "Error: [S-9469]\n" ++ "'--no-run' requires either '--compile' or '--optimize'." displayException (FailedToParseScriptFileAsDirBug fp) = bugReport "[S-5055]" $ "Failed to parse script file name as directory:\n" <> fromRelFile fp <> "\n" displayException (FailedToParseFileAsDirBug p) = bugReport "[S-9464]" $ "Failed to parse path to script file as directory:\n" <> fromAbsDir p <> "\n" -- | Type representing choices of interpreting, compiling (without optimisation) -- and compiling (with optimisation). data ScriptExecute = SEInterpret | SECompile -- ^ Without optimisation. | SEOptimize -- ^ Compile with optimisation. deriving Show -- | Type representing choices of whether to run or not. data ShouldRun = YesRun -- ^ Run. | NoRun -- ^ Do not run. deriving Show -- | Type representing command line options for the @stack script@ command. data ScriptOpts = ScriptOpts { packages :: ![String] , file :: !FilePath , args :: ![String] , compile :: !ScriptExecute , useRoot :: !Bool , ghcOptions :: ![String] , scriptExtraDeps :: ![PackageIdentifierRevision] , shouldRun :: !ShouldRun } deriving Show -- | Run a Stack Script scriptCmd :: ScriptOpts -> RIO Runner () scriptCmd opts = do -- Some warnings in case the user somehow tries to set a stack.yaml location. -- Note that in this functions we use logError instead of logWarn because, -- when using the interpreter mode, only error messages are shown. See: -- https://github.com/commercialhaskell/stack/issues/3007 view (globalOptsL . to (.stackYaml)) >>= \case SYLOverride fp -> logError $ "Ignoring override stack.yaml file for script command: " <> fromString (toFilePath fp) SYLGlobalProject -> logError "Ignoring SYLGlobalProject for script command" SYLDefault -> pure () SYLNoProject _ -> assert False (pure ()) file <- resolveFile' opts.file let scriptFile = filename file isNoRunCompile <- fromFirstFalse . (.noRunCompile) <$> view (globalOptsL . to (.configMonoid)) let scriptDir = parent file modifyGO go = go { configMonoid = go.configMonoid { ConfigMonoid.installGHC = FirstTrue $ Just True } , stackYaml = SYLNoProject opts.scriptExtraDeps } (shouldRun, shouldCompile) = if isNoRunCompile then (NoRun, SECompile) else (opts.shouldRun, opts.compile) root <- withConfig NoReexec $ view stackRootL outputDir <- if opts.useRoot then do scriptFileAsDir <- maybe (throwIO $ FailedToParseScriptFileAsDirBug scriptFile) pure (parseRelDir $ fromRelFile scriptFile) let fileAsDir = scriptDir scriptFileAsDir -- We drop the information about the drive. On Windows, in principle, -- the drive could distinguish between two otherwise identical -- fileAsDir (eg C:\MyScript.hs\ D:\MyScript.hs\). In pactice, we -- tolerate that possibility as being unlikely. (_, escaped) = splitDrive (fromAbsDir fileAsDir) escapedRelDir <- maybe (throwIO $ FailedToParseFileAsDirBug fileAsDir) pure (parseRelDir escaped) pure $ root relDirScripts escapedRelDir else pure scriptDir -- path does not necessarily end with an extension. let dropExtension path = pure $ maybe path fst $ splitExtension path exe <- if osIsWindows then replaceExtension ".exe" (outputDir scriptFile) else dropExtension (outputDir scriptFile) case shouldRun of YesRun -> pure () NoRun -> do unless (null opts.args) $ throwIO ArgumentsWithNoRunInvalid case shouldCompile of SEInterpret -> throwIO NoRunWithoutCompilationInvalid SECompile -> pure () SEOptimize -> pure () -- Optimization: if we're compiling, and the executable is newer than the -- source file, run it immediately. local (over globalOptsL modifyGO) $ case shouldCompile of SEInterpret -> longWay shouldRun shouldCompile file exe SECompile -> shortCut shouldRun shouldCompile file exe SEOptimize -> shortCut shouldRun shouldCompile file exe where runCompiled shouldRun exe = do case shouldRun of YesRun -> exec (fromAbsFile exe) opts.args NoRun -> prettyInfoL [ flow "Compilation finished, executable available at" , style File (fromString (fromAbsFile exe)) <> "." ] shortCut shouldRun shouldCompile file exe = handleIO (const $ longWay shouldRun shouldCompile file exe) $ do srcMod <- getModificationTime file exeMod <- Dir.getModificationTime (fromAbsFile exe) if srcMod < exeMod then runCompiled shouldRun exe else longWay shouldRun shouldCompile file exe longWay shouldRun shouldCompile file exe = withConfig YesReexec $ withDefaultEnvConfig $ do config <- view configL menv <- liftIO $ config.processContextSettings defaultEnvSettings withProcessContext menv $ do colorFlag <- appropriateGhcColorFlag targetsSet <- case opts.packages of [] -> getPackagesFromImports opts.file -- Using the import parser packages -> do let targets = concatMap wordsComma packages targets' <- mapM parsePackageNameThrowing targets pure $ Set.fromList targets' unless (Set.null targetsSet) $ do -- Optimization: use the relatively cheap ghc-pkg list --simple-output -- to check which packages are installed already. If all needed -- packages are available, we can skip the (rather expensive) build -- call below. GhcPkgExe pkg <- view $ compilerPathsL . to (.pkg) -- https://github.com/haskell/process/issues/251 bss <- snd <$> sinkProcessStderrStdout (toFilePath pkg) ["list", "--simple-output"] CL.sinkNull CL.consume -- FIXME use the package info from envConfigPackages, or is that crazy? let installed = Set.fromList $ map toPackageName $ words $ S8.unpack $ S8.concat bss if Set.null $ Set.difference (Set.map packageNameString targetsSet) installed then logDebug "All packages already installed" else do logDebug "Missing packages, performing installation" let targets = map (T.pack . packageNameString) $ Set.toList targetsSet withNewLocalBuildTargets targets $ build Nothing let ghcArgs = concat [ ["-i", "-i" ++ fromAbsDir (parent file)] , ["-hide-all-packages"] , maybeToList colorFlag , map ("-package" ++) $ Set.toList $ Set.insert "base" $ Set.map packageNameString targetsSet , case shouldCompile of SEInterpret -> [] SECompile -> [] SEOptimize -> ["-O2"] , opts.ghcOptions , if opts.useRoot then [ "-outputdir=" ++ fromAbsDir (parent exe) , "-o", fromAbsFile exe ] else [] ] case shouldCompile of SEInterpret -> do interpret <- view $ compilerPathsL . to (.interpreter) exec (toFilePath interpret) (ghcArgs ++ toFilePath file : opts.args) _ -> do -- Use readProcessStdout_ so that (1) if GHC does send any output -- to stdout, we capture it and stop it from being sent to our -- stdout, which could break scripts, and (2) if there's an -- exception, the standard output we did capture will be reported -- to the user. liftIO $ Dir.createDirectoryIfMissing True (fromAbsDir (parent exe)) compilerExeName <- view $ compilerPathsL . to (.compiler) . to toFilePath withWorkingDir (fromAbsDir (parent file)) $ proc compilerExeName (ghcArgs ++ [toFilePath file]) (void . readProcessStdout_) runCompiled shouldRun exe toPackageName = reverse . drop 1 . dropWhile (/= '-') . reverse -- Like words, but splits on both commas and spaces wordsComma = splitWhen (\c -> c == ' ' || c == ',') getPackagesFromImports :: FilePath -- ^ script filename -> RIO EnvConfig (Set PackageName) getPackagesFromImports scriptFP = do (pns, mns) <- liftIO $ parseImports <$> S8.readFile scriptFP if Set.null mns then pure pns else Set.union pns <$> getPackagesFromModuleNames mns getPackagesFromModuleNames :: Set ModuleName -> RIO EnvConfig (Set PackageName) getPackagesFromModuleNames mns = do hash <- hashSnapshot withSnapshotCache hash mapSnapshotPackageModules $ \getModulePackages -> do pns <- forM (Set.toList mns) $ \mn -> do pkgs <- getModulePackages mn case pkgs of [] -> pure Set.empty [pn] -> pure $ Set.singleton pn _ -> throwM $ AmbiguousModuleName mn pkgs pure $ Set.unions pns `Set.difference` blacklist hashSnapshot :: RIO EnvConfig SnapshotCacheHash hashSnapshot = do sourceMap <- view $ envConfigL . to (.sourceMap) compilerInfo <- getCompilerInfo let eitherPliHash (pn, dep) | PLImmutable pli <- dep.location = Right $ immutableLocSha pli | otherwise = Left pn deps = Map.toList sourceMap.deps case partitionEithers (map eitherPliHash deps) of ([], pliHashes) -> do let hashedContent = mconcat $ compilerInfo : pliHashes pure $ SnapshotCacheHash (SHA256.hashLazyBytes $ toLazyByteString hashedContent) (mutables, _) -> throwM $ MutableDependenciesForScript mutables mapSnapshotPackageModules :: RIO EnvConfig (Map PackageName (Set ModuleName)) mapSnapshotPackageModules = do sourceMap <- view $ envConfigL . to (.sourceMap) installMap <- toInstallMap sourceMap (_installedMap, globalDumpPkgs, snapshotDumpPkgs, _localDumpPkgs) <- getInstalled installMap let globals = dumpedPackageModules sourceMap.globalPkgs globalDumpPkgs notHidden = Map.filter (not . (.hidden)) notHiddenDeps = notHidden sourceMap.deps installedDeps = dumpedPackageModules notHiddenDeps snapshotDumpPkgs dumpPkgs = Set.fromList $ map (pkgName . (.packageIdent)) snapshotDumpPkgs notInstalledDeps = Map.withoutKeys notHiddenDeps dumpPkgs otherDeps <- for notInstalledDeps $ \dep -> do gpd <- liftIO dep.depCommon.gpd Set.fromList <$> allExposedModules gpd -- source map construction process should guarantee unique package names in -- these maps pure $ globals <> installedDeps <> otherDeps dumpedPackageModules :: Map PackageName a -> [DumpPackage] -> Map PackageName (Set ModuleName) dumpedPackageModules pkgs dumpPkgs = let pnames = Map.keysSet pkgs `Set.difference` blacklist in Map.fromList [ (pn, dp.exposedModules) | dp <- dumpPkgs , let PackageIdentifier pn _ = dp.packageIdent , pn `Set.member` pnames ] allExposedModules :: PD.GenericPackageDescription -> RIO EnvConfig [ModuleName] allExposedModules gpd = do Platform curArch curOs <- view platformL curCompiler <- view actualCompilerVersionL let checkCond (PD.OS os) = pure $ os == curOs checkCond (PD.Arch arch) = pure $ arch == curArch checkCond (PD.Impl compiler range) = case curCompiler of ACGhc version -> pure $ compiler == GHC && version `withinRange` range ACGhcGit {} -> pure $ compiler == GHC -- currently we don't do flag checking here checkCond other = Left other mlibrary = snd . C.simplifyCondTree checkCond <$> PD.condLibrary gpd pure $ case mlibrary of Just lib -> PD.exposedModules lib ++ map moduleReexportName (PD.reexportedModules lib) Nothing -> mempty -- | The Stackage project introduced the concept of hidden packages, to deal -- with conflicting module names. However, this is a relatively recent addition -- (at time of writing). See: -- http://www.snoyman.com/blog/2017/01/conflicting-module-names. To kick this -- thing off a bit better, we're included a blacklist of packages that should -- never be auto-parsed in. blacklist :: Set PackageName blacklist = Set.fromList [ mkPackageName "Glob" , mkPackageName "HTF" , mkPackageName "async-dejafu" , mkPackageName "binary-ieee754" , mkPackageName "cipher-aes" , mkPackageName "cipher-blowfish" , mkPackageName "cipher-camellia" , mkPackageName "cipher-des" , mkPackageName "cipher-rc4" , mkPackageName "control-monad-free" , mkPackageName "courier" , mkPackageName "crypto-api" , mkPackageName "crypto-cipher-types" , mkPackageName "crypto-numbers" , mkPackageName "crypto-pubkey" , mkPackageName "crypto-random" , mkPackageName "cryptohash" , mkPackageName "cryptohash-conduit" , mkPackageName "cryptohash-md5" , mkPackageName "cryptohash-sha1" , mkPackageName "cryptohash-sha256" , mkPackageName "fay-base" , mkPackageName "gl" , mkPackageName "gtk3" , mkPackageName "hashmap" , mkPackageName "hledger-web" , mkPackageName "hxt-unicode" , mkPackageName "kawhi" , mkPackageName "language-c" , mkPackageName "log" , mkPackageName "monad-extras" , mkPackageName "monads-tf" , mkPackageName "nanospec" , mkPackageName "newtype-generics" , mkPackageName "objective" , mkPackageName "plot-gtk3" , mkPackageName "prompt" , mkPackageName "regex-compat-tdfa" , mkPackageName "regex-pcre-builtin" , mkPackageName "rerebase" , mkPackageName "svg-tree" , mkPackageName "zip" ] parseImports :: ByteString -> (Set PackageName, Set ModuleName) parseImports = fold . mapMaybe (parseLine . stripCR') . S8.lines where -- Remove any carriage pure character present at the end, to support -- Windows-style line endings (CRLF) stripCR' bs | S8.null bs = bs | S8.last bs == '\r' = S8.init bs | otherwise = bs stripPrefix x y | x `S8.isPrefixOf` y = Just $ S8.drop (S8.length x) y | otherwise = Nothing parseLine bs0 = do bs1 <- stripPrefix "import " bs0 let bs2 = S8.dropWhile (== ' ') bs1 bs3 = fromMaybe bs2 $ stripPrefix "qualified " bs2 case stripPrefix "\"" bs3 of Just bs4 -> do pn <- parsePackageNameThrowing $ S8.unpack $ S8.takeWhile (/= '"') bs4 Just (Set.singleton pn, Set.empty) Nothing -> Just ( Set.empty , Set.singleton $ fromString $ T.unpack $ decodeUtf8With lenientDecode $ S8.takeWhile (\c -> c /= ' ' && c /= '(') bs3 ) stack-2.15.7/src/Stack/SDist.hs0000644000000000000000000007024214620153446014331 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} -- Types and functions related to Stack's @sdist@ command. module Stack.SDist ( SDistOpts (..) , sdistCmd , getSDistTarball , checkSDistTarball , checkSDistTarball' , readLocalPackage ) where import qualified Codec.Archive.Tar.Utf8 as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Compression.GZip as GZip import Conduit ( runConduitRes, sourceLazy, sinkFileCautious ) import Control.Concurrent.Execute ( ActionContext (..), Concurrency (..) ) import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as L import Data.Char ( toLower ) import Data.Data ( cast ) import qualified Data.List as List import qualified Data.Map.Strict as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.Encoding.Error as T import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TLE import Data.Time.Clock.POSIX ( getPOSIXTime, utcTimeToPOSIXSeconds ) import Distribution.Package ( Dependency (..) ) import qualified Distribution.PackageDescription as Cabal import qualified Distribution.PackageDescription.Check as Check import qualified Distribution.PackageDescription.Parsec as Cabal import Distribution.PackageDescription.PrettyPrint ( showGenericPackageDescription ) import Distribution.Version ( earlierVersion, hasLowerBound, hasUpperBound, isAnyVersion , orLaterVersion, simplifyVersionRange ) import Path ( (), parent, parseRelDir, parseRelFile ) import Path.IO ( ensureDir, resolveDir' ) import RIO.NonEmpty ( nonEmpty ) import qualified RIO.NonEmpty as NE import Stack.Build ( mkBaseConfigOpts, build, buildLocalTargets ) import Stack.Build.Execute ( ExcludeTHLoading (..), KeepOutputOpen (..) ) import Stack.Build.ExecuteEnv ( withExecuteEnv, withSingleContext ) import Stack.Build.Installed ( getInstalled, toInstallMap ) import Stack.Build.Source ( projectLocalPackages ) import Stack.BuildOpts ( defaultBuildOpts ) import Stack.Constants ( stackProgName, stackProgName' ) import Stack.Constants.Config ( distDirFromDir ) import Stack.Package ( resolvePackage, resolvePackageDescription ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.SourceMap ( mkProjectPackage ) import Stack.Types.Build ( TaskType (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), stackYamlL ) import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.BuildOptsCLI ( defaultBuildOptsCLI ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), actualCompilerVersionL ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Installed ( InstallMap, Installed (..), InstalledMap , InstalledLibraryInfo (..), installedVersion ) import Stack.Types.Package ( LocalPackage (..), Package (..), PackageConfig (..) , packageIdentifier ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.PvpBounds ( PvpBounds (..), PvpBoundsType (..) ) import Stack.Types.Runner ( HasRunner, Runner ) import Stack.Types.SourceMap ( CommonPackage (..), ProjectPackage (..), SMWanted (..) , SourceMap (..), ppRoot ) import qualified Stack.Types.SourceMap as SourceMap ( SourceMap (..) ) import Stack.Types.Version ( intersectVersionRanges, nextMajorVersion ) import System.Directory ( copyFile, createDirectoryIfMissing, executable , getModificationTime, getPermissions ) import qualified System.FilePath as FP -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.SDist" module. data SDistPrettyException = CheckException (NonEmpty Check.PackageCheck) | CabalFilePathsInconsistentBug (Path Abs File) (Path Abs File) | ToTarPathException String deriving (Show, Typeable) instance Pretty SDistPrettyException where pretty (CheckException xs) = "[S-6439]" <> line <> flow "Package check reported the following errors:" <> line <> bulletedList (map (string . show) (NE.toList xs) :: [StyleDoc]) pretty (CabalFilePathsInconsistentBug cabalFP cabalFP') = "[S-9595]" <> line <> fillSep [ flow "The impossible happened! Two Cabal file paths are \ \inconsistent:" , pretty cabalFP , "and" , pretty cabalFP' <> "." ] pretty (ToTarPathException e) = "[S-7875]" <> line <> string e instance Exception SDistPrettyException -- | Type representing command line options for @stack sdist@ command. data SDistOpts = SDistOpts { dirsToWorkWith :: [String] -- ^ Directories to package , pvpBounds :: Maybe PvpBounds -- ^ PVP Bounds overrides , ignoreCheck :: Bool -- ^ Whether to ignore check of the package for common errors , buildTarball :: Bool -- ^ Whether to build the tarball , tarPath :: Maybe FilePath -- ^ Where to copy the tarball } -- | Function underlying the @stack sdist@ command. sdistCmd :: SDistOpts -> RIO Runner () sdistCmd sdistOpts = withConfig YesReexec $ withDefaultEnvConfig $ do -- If no directories are specified, build all sdist tarballs. dirs' <- if null sdistOpts.dirsToWorkWith then do dirs <- view $ buildConfigL . to (map ppRoot . Map.elems . (.smWanted.project)) when (null dirs) $ do stackYaml <- view stackYamlL prettyErrorL [ style Shell "stack sdist" , flow "expects a list of targets, and otherwise defaults to all \ \of the project's packages. However, the configuration at" , pretty stackYaml , flow "contains no packages, so no sdist tarballs will be \ \generated." ] exitFailure pure dirs else mapM resolveDir' sdistOpts.dirsToWorkWith forM_ dirs' $ \dir -> do (tarName, tarBytes, _mcabalRevision) <- getSDistTarball sdistOpts.pvpBounds dir distDir <- distDirFromDir dir tarPath <- (distDir ) <$> parseRelFile tarName ensureDir (parent tarPath) runConduitRes $ sourceLazy tarBytes .| sinkFileCautious (toFilePath tarPath) prettyInfoL [flow "Wrote sdist-format compressed archive to" , pretty tarPath <> "." ] checkSDistTarball sdistOpts tarPath forM_ sdistOpts.tarPath $ copyTarToTarPath tarPath tarName where copyTarToTarPath tarPath tarName targetDir = liftIO $ do let targetTarPath = targetDir FP. tarName createDirectoryIfMissing True $ FP.takeDirectory targetTarPath copyFile (toFilePath tarPath) targetTarPath -- | Given the path to a local package, creates its source distribution tarball. -- -- While this yields a 'FilePath', the name of the tarball, this tarball is not -- written to the disk and instead yielded as a lazy bytestring. getSDistTarball :: HasEnvConfig env => Maybe PvpBounds -- ^ Override Config value -> Path Abs Dir -- ^ Path to local package -> RIO env ( FilePath , L.ByteString , Maybe (PackageIdentifier, L.ByteString) ) -- ^ Filename, tarball contents, and option Cabal file revision to upload getSDistTarball mpvpBounds pkgDir = do config <- view configL let PvpBounds pvpBounds asRevision = fromMaybe config.pvpBounds mpvpBounds tweakCabal = pvpBounds /= PvpBoundsNone pkgFp = toFilePath pkgDir lp <- readLocalPackage pkgDir forM_ lp.package.setupDeps $ \customSetupDeps -> case nonEmpty (map (T.pack . packageNameString) (Map.keys customSetupDeps)) of Just nonEmptyDepTargets -> do eres <- buildLocalTargets nonEmptyDepTargets case eres of Left err -> logError $ "Error: [S-8399]\n" <> "Error building custom-setup dependencies: " <> displayShow err Right _ -> pure () Nothing -> prettyWarnS "unexpected empty custom-setup dependencies." sourceMap <- view $ envConfigL . to (.sourceMap) installMap <- toInstallMap sourceMap (installedMap, _globalDumpPkgs, _snapshotDumpPkgs, _localDumpPkgs) <- getInstalled installMap let deps = Map.fromList [ (pid, libInfo.ghcPkgId) | (_, Library pid libInfo) <- Map.elems installedMap] prettyInfoL [ flow "Getting the file list for" , style File (fromString pkgFp) <> "." ] (fileList, cabalFP) <- getSDistFileList lp deps prettyInfoL [ flow "Building a compressed archive file in the sdist format for" , style File (fromString pkgFp) <> "." ] files <- normalizeTarballPaths (map (T.unpack . stripCR . T.pack) (lines fileList)) -- We're going to loop below and eventually find the Cabal file. When we do, -- we'll upload this reference, if the mpvpBounds value indicates that we -- should be uploading a Cabal file revision. cabalFileRevisionRef <- liftIO (newIORef Nothing) -- NOTE: Could make this use lazy I/O to only read files as needed for upload -- (both GZip.compress and Tar.write are lazy). However, it seems less error -- prone and more predictable to read everything in at once, so that's what -- we're doing for now: let tarPath isDir fp = case Tar.toTarPath isDir (forceUtf8Enc (pkgIdName FP. fp)) of Left e -> prettyThrowIO $ ToTarPathException e Right tp -> pure tp -- convert a String of proper characters to a String of bytes in UTF8 -- encoding masquerading as characters. This is necessary for tricking the -- tar package into proper character encoding. forceUtf8Enc = S8.unpack . T.encodeUtf8 . T.pack packWith f isDir fp = liftIO $ f (pkgFp FP. fp) =<< tarPath isDir fp packDir = packWith Tar.packDirectoryEntry True packFile fp -- This is a Cabal file, we're going to tweak it, but only tweak it as a -- revision. | tweakCabal && isCabalFp fp && asRevision = do lbsIdent <- getCabalLbs pvpBounds (Just 1) cabalFP sourceMap liftIO (writeIORef cabalFileRevisionRef (Just lbsIdent)) packWith packFileEntry False fp -- Same, except we'll include the Cabal file in the original tarball -- upload. | tweakCabal && isCabalFp fp = do (_ident, lbs) <- getCabalLbs pvpBounds Nothing cabalFP sourceMap currTime <- liftIO getPOSIXTime -- Seconds from UNIX epoch tp <- liftIO $ tarPath False fp pure $ (Tar.fileEntry tp lbs) { Tar.entryTime = floor currTime } | otherwise = packWith packFileEntry False fp isCabalFp fp = toFilePath pkgDir FP. fp == toFilePath cabalFP tarName = pkgIdName FP.<.> "tar.gz" pkgIdName = packageIdentifierString pkgId pkgId = packageIdentifier lp.package dirEntries <- mapM packDir (dirsFromFiles files) fileEntries <- mapM packFile files mcabalFileRevision <- liftIO (readIORef cabalFileRevisionRef) pure ( tarName , GZip.compress (Tar.write (dirEntries ++ fileEntries)) , mcabalFileRevision ) -- | Get the PVP bounds-enabled version of the given Cabal file getCabalLbs :: HasEnvConfig env => PvpBoundsType -> Maybe Int -- ^ optional revision -> Path Abs File -- ^ Cabal file -> SourceMap -> RIO env (PackageIdentifier, L.ByteString) getCabalLbs pvpBounds mrev cabalFP sourceMap = do (gpdio, _name, cabalFP') <- loadCabalFilePath (Just stackProgName') (parent cabalFP) gpd <- liftIO $ gpdio NoPrintWarnings unless (cabalFP == cabalFP') $ prettyThrowIO $ CabalFilePathsInconsistentBug cabalFP cabalFP' installMap <- toInstallMap sourceMap (installedMap, _, _, _) <- getInstalled installMap let subLibPackages = Set.fromList $ gpdPackageName gpd : map (Cabal.unqualComponentNameToPackageName . fst) (Cabal.condSubLibraries gpd) gpd' = gtraverseT (addBounds subLibPackages installMap installedMap) gpd gpd'' = case mrev of Nothing -> gpd' Just rev -> gpd' { Cabal.packageDescription = (Cabal.packageDescription gpd') { Cabal.customFieldsPD = (("x-revision", show rev):) $ filter (\(x, _) -> map toLower x /= "x-revision") $ Cabal.customFieldsPD $ Cabal.packageDescription gpd' } } ident = Cabal.package $ Cabal.packageDescription gpd'' -- Sanity rendering and reparsing the input, to ensure there are no Cabal -- bugs, since there have been bugs here before, and currently are at the time -- of writing: -- -- https://github.com/haskell/cabal/issues/1202 -- https://github.com/haskell/cabal/issues/2353 -- https://github.com/haskell/cabal/issues/4863 (current issue) let roundtripErrs = fillSep [ flow "Bug detected in Cabal library. ((parse . render . parse) \ \=== id) does not hold for the Cabal file at" , pretty cabalFP ] <> blankLine (_warnings, eres) = Cabal.runParseResult $ Cabal.parseGenericPackageDescription $ T.encodeUtf8 $ T.pack $ showGenericPackageDescription gpd case eres of Right roundtripped | roundtripped == gpd -> pure () | otherwise -> prettyWarn $ roundtripErrs <> flow "This seems to be fixed in development versions of Cabal, \ \but at time of writing, the fix is not in any released \ \versions." <> blankLine <> fillSep [ flow "Please see this GitHub issue for status:" , style Url "https://github.com/commercialhaskell/stack/issues/3549" ] <> blankLine <> fillSep [ flow "If the issue is closed as resolved, then you may be \ \able to fix this by upgrading to a newer version of \ \Stack via" , style Shell "stack upgrade" , flow "for latest stable version or" , style Shell "stack upgrade --git" , flow "for the latest development version." ] <> blankLine <> fillSep [ flow "If the issue is fixed, but updating doesn't solve the \ \problem, please check if there are similar open \ \issues, and if not, report a new issue to the Stack \ \issue tracker, at" , style Url "https://github.com/commercialhaskell/stack/issues/new" ] <> blankLine <> flow "If the issue is not fixed, feel free to leave a comment \ \on it indicating that you would like it to be fixed." <> blankLine Left (_version, errs) -> prettyWarn $ roundtripErrs <> flow "In particular, parsing the rendered Cabal file is yielding a \ \parse error. Please check if there are already issues \ \tracking this, and if not, please report new issues to the \ \Stack and Cabal issue trackers, via" <> line <> bulletedList [ style Url "https://github.com/commercialhaskell/stack/issues/new" , style Url "https://github.com/haskell/cabal/issues/new" ] <> line <> flow ("The parse error is: " <> unlines (map show (toList errs))) <> blankLine pure ( ident , TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd'' ) where addBounds :: Set PackageName -> InstallMap -> InstalledMap -> Dependency -> Dependency addBounds subLibPackages installMap installedMap dep = if name `Set.member` subLibPackages then dep else case foundVersion of Nothing -> dep Just version -> Dependency name ( simplifyVersionRange $ ( if toAddUpper && not (hasUpperBound range) then addUpper version else id ) -- From Cabal-3.4.0.0, 'hasLowerBound isAnyVersion' is 'True'. $ ( if toAddLower && (isAnyVersion range || not (hasLowerBound range)) then addLower version else id ) range ) s where Dependency name range s = dep foundVersion = case Map.lookup name installMap of Just (_, version) -> Just version Nothing -> case Map.lookup name installedMap of Just (_, installed) -> Just (installedVersion installed) Nothing -> Nothing addUpper version = intersectVersionRanges (earlierVersion $ nextMajorVersion version) addLower version = intersectVersionRanges (orLaterVersion version) (toAddLower, toAddUpper) = case pvpBounds of PvpBoundsNone -> (False, False) PvpBoundsUpper -> (False, True) PvpBoundsLower -> (True, False) PvpBoundsBoth -> (True, True) -- | Traverse a data type. gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a gtraverseT f = gmapT (\x -> case cast x of Nothing -> gtraverseT f x Just b -> fromMaybe x (cast (f b))) -- | Read in a 'LocalPackage' config. This makes some default decisions about -- 'LocalPackage' fields that might not be appropriate for other use-cases. readLocalPackage :: HasEnvConfig env => Path Abs Dir -> RIO env LocalPackage readLocalPackage pkgDir = do config <- getDefaultPackageConfig (gpdio, _, cabalFP) <- loadCabalFilePath (Just stackProgName') pkgDir gpd <- liftIO $ gpdio YesPrintWarnings let package = resolvePackage config gpd pure LocalPackage { package , wanted = False -- HACK: makes it so that sdist output goes to a log -- instead of a file. , cabalFP -- NOTE: these aren't the 'correct' values, but aren't used in the usage of -- this function in this module. , testBench = Nothing , buildHaddocks = False , forceDirty = False , dirtyFiles = pure Nothing , newBuildCaches = pure Map.empty , componentFiles = pure Map.empty , components = Set.empty , unbuildable = Set.empty } -- | Returns a newline-separate list of paths, and the absolute path to the -- Cabal file. getSDistFileList :: HasEnvConfig env => LocalPackage -> Map PackageIdentifier GhcPkgId -> RIO env (String, Path Abs File) getSDistFileList lp deps = withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do let bopts = defaultBuildOpts let boptsCli = defaultBuildOptsCLI baseConfigOpts <- mkBaseConfigOpts boptsCli locals <- projectLocalPackages withExecuteEnv bopts boptsCli baseConfigOpts locals [] [] [] Nothing -- provide empty list of globals. This is a hack around -- custom Setup.hs files $ \ee -> withSingleContext ac ee taskType deps (Just "sdist") $ \_package cabalFP _pkgDir cabal _announce _outputType -> do let outFile = toFilePath tmpdir FP. "source-files-list" cabal CloseOnException KeepTHLoading ["sdist", "--list-sources", outFile] contents <- liftIO (S.readFile outFile) pure (T.unpack $ T.decodeUtf8With T.lenientDecode contents, cabalFP) where ac = ActionContext Set.empty [] ConcurrencyAllowed taskType = TTLocalMutable lp normalizeTarballPaths :: (HasRunner env, HasTerm env) => [FilePath] -> RIO env [FilePath] normalizeTarballPaths fps = do -- TODO: consider whether erroring out is better - otherwise the user might -- upload an incomplete tar? unless (null outsideDir) $ prettyWarn $ flow "These files are outside of the package directory, and will be \ \omitted from the tarball:" <> line <> bulletedList (map (style File . fromString) outsideDir) pure (nubOrd files) where (outsideDir, files) = partitionEithers (map pathToEither fps) pathToEither fp = maybe (Left fp) Right (normalizePath fp) normalizePath :: FilePath -> Maybe FilePath normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise where go [] = Just [] go ("..":_) = Nothing go (_:"..":xs) = go xs go (x:xs) = (x :) <$> go xs dirsFromFiles :: [FilePath] -> [FilePath] dirsFromFiles dirs = Set.toAscList (Set.delete "." results) where results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs go s x | Set.member x s = s | otherwise = go (Set.insert x s) (FP.takeDirectory x) -- | Check package in given tarball. This will log all warnings and will throw -- an exception in case of critical errors. -- -- Note that we temporarily decompress the archive to analyze it. checkSDistTarball :: HasEnvConfig env => SDistOpts -- ^ The configuration of what to check -> Path Abs File -- ^ Absolute path to tarball -> RIO env () checkSDistTarball opts tarball = withTempTarGzContents tarball $ \pkgDir' -> do pkgDir <- (pkgDir' ) <$> (parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball) -- ^ drop ".tar" ^ drop ".gz" when opts.buildTarball ( buildExtractedTarball ResolvedPath { resolvedRelative = RelFilePath "this-is-not-used" -- ugly hack , resolvedAbsolute = pkgDir } ) unless opts.ignoreCheck (checkPackageInExtractedTarball pkgDir) checkPackageInExtractedTarball :: HasEnvConfig env => Path Abs Dir -- ^ Absolute path to tarball -> RIO env () checkPackageInExtractedTarball pkgDir = do (gpdio, name, _cabalfp) <- loadCabalFilePath (Just stackProgName') pkgDir gpd <- liftIO $ gpdio YesPrintWarnings config <- getDefaultPackageConfig let pkgDesc = resolvePackageDescription config gpd prettyInfoL [ flow "Checking package" , style Current (fromPackageName name) , flow "for common mistakes." ] let pkgChecks = -- MSS 2017-12-12: Try out a few different variants of pkgDesc to try -- and provoke an error or warning. I don't know why, but when using -- `Just pkgDesc`, it appears that Cabal does not detect that `^>=` is -- used with `cabal-version: 1.24` or earlier. It seems like pkgDesc -- (the one we create) does not populate the `buildDepends` field, -- whereas flattenPackageDescription from Cabal does. In any event, -- using `Nothing` seems more logical for this check anyway, and the -- fallback to `Just pkgDesc` is just a crazy sanity check. case Check.checkPackage gpd Nothing of [] -> Check.checkPackage gpd (Just pkgDesc) x -> x fileChecks <- liftIO $ Check.checkPackageFiles minBound pkgDesc (toFilePath pkgDir) let checks = pkgChecks ++ fileChecks (errors, warnings) = let criticalIssue (Check.PackageBuildImpossible _) = True criticalIssue (Check.PackageDistInexcusable _) = True criticalIssue _ = False in List.partition criticalIssue checks unless (null warnings) $ prettyWarn $ flow "Package check reported the following warnings:" <> line <> bulletedList (map (fromString . show) warnings) case nonEmpty errors of Nothing -> pure () Just ne -> prettyThrowM $ CheckException ne buildExtractedTarball :: HasEnvConfig env => ResolvedPath Dir -> RIO env () buildExtractedTarball pkgDir = do envConfig <- view envConfigL localPackageToBuild <- readLocalPackage $ resolvedAbsolute pkgDir -- We remove the path based on the name of the package let isPathToRemove path = do localPackage <- readLocalPackage path pure $ localPackage.package.name == localPackageToBuild.package.name pathsToKeep <- Map.fromList <$> filterM (fmap not . isPathToRemove . resolvedAbsolute . (.resolvedDir) . snd) (Map.toList envConfig.buildConfig.smWanted.project) pp <- mkProjectPackage YesPrintWarnings pkgDir False let adjustEnvForBuild env = let updatedEnvConfig = envConfig { sourceMap = updatePackagesInSourceMap envConfig.sourceMap , buildConfig = updateBuildConfig envConfig.buildConfig } updateBuildConfig bc = bc { config = bc.config { build = defaultBuildOpts { tests = True } } } in set envConfigL updatedEnvConfig env updatePackagesInSourceMap sm = sm { SourceMap.project = Map.insert pp.projectCommon.name pp pathsToKeep } local adjustEnvForBuild $ build Nothing -- | Version of 'checkSDistTarball' that first saves lazy bytestring to -- temporary directory and then calls 'checkSDistTarball' on it. checkSDistTarball' :: HasEnvConfig env => SDistOpts -> String -- ^ Tarball name -> L.ByteString -- ^ Tarball contents as a byte string -> RIO env () checkSDistTarball' opts name bytes = withSystemTempDir "stack" $ \tpath -> do npath <- (tpath ) <$> parseRelFile name liftIO $ L.writeFile (toFilePath npath) bytes checkSDistTarball opts npath withTempTarGzContents :: Path Abs File -- ^ Location of tarball -> (Path Abs Dir -> RIO env a) -- ^ Perform actions given dir with tarball contents -> RIO env a withTempTarGzContents apath f = withSystemTempDir "stack" $ \tpath -> do archive <- liftIO $ L.readFile (toFilePath apath) liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive f tpath -------------------------------------------------------------------------------- -- Copy+modified from the tar package to avoid issues with lazy IO ( see -- https://github.com/commercialhaskell/stack/issues/1344 ) packFileEntry :: FilePath -- ^ Full path to find the file on the local disk -> Tar.TarPath -- ^ Path to use for the tar Entry in the archive -> IO Tar.Entry packFileEntry filepath tarpath = do mtime <- getModTime filepath perms <- getPermissions filepath content <- S.readFile filepath let size = fromIntegral (S.length content) entryContent = Tar.NormalFile (L.fromStrict content) size entry = Tar.simpleEntry tarpath entryContent pure entry { Tar.entryPermissions = if executable perms then Tar.executableFilePermissions else Tar.ordinaryFilePermissions , Tar.entryTime = mtime } getModTime :: FilePath -> IO Tar.EpochTime getModTime path = do t <- getModificationTime path pure $ floor . utcTimeToPOSIXSeconds $ t getDefaultPackageConfig :: (MonadIO m, MonadReader env m, HasEnvConfig env) => m PackageConfig getDefaultPackageConfig = do platform <- view platformL compilerVersion <- view actualCompilerVersionL pure PackageConfig { enableTests = False , enableBenchmarks = False , flags = mempty , ghcOptions = [] , cabalConfigOpts = [] , compilerVersion , platform } stack-2.15.7/src/Stack/Setup.hs0000644000000000000000000034574314620153446014416 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} module Stack.Setup ( setupEnv , ensureCompilerAndMsys , ensureDockerStackExe , SetupOpts (..) , defaultSetupInfoYaml , withNewLocalBuildTargets -- * Stack binary download , StackReleaseInfo , getDownloadVersion , stackVersion , preferredPlatforms , downloadStackReleaseInfo , downloadStackExe ) where import qualified Codec.Archive.Tar as Tar import Conduit ( ConduitT, await, concatMapMC, filterCE, foldMC, yield ) import Control.Applicative ( empty ) import Crypto.Hash ( SHA1 (..), SHA256 (..) ) import qualified Data.Aeson.KeyMap as KeyMap import Data.Aeson.Types ( Value (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), logJSONWarnings ) import qualified Data.Attoparsec.Text as P import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as LBS import Data.Char ( isDigit ) import qualified Data.Conduit.Binary as CB import Data.Conduit.Lazy ( lazyConsume ) import qualified Data.Conduit.List as CL import Data.Conduit.Process.Typed ( createSource ) import Data.Conduit.Zlib ( ungzip ) import Data.List.Split ( splitOn ) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.Lazy as TL import qualified Data.Text.Encoding as T import qualified Data.Text.Lazy.Encoding as TL import qualified Data.Text.Encoding.Error as T import qualified Data.Yaml as Yaml import Distribution.System ( Arch (..), OS, Platform (..) ) import qualified Distribution.System as Cabal import Distribution.Text ( simpleParse ) import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Version ( mkVersion ) import Network.HTTP.Client ( redirectCount ) import Network.HTTP.StackClient ( CheckHexDigest (..), HashCheck (..), getResponseBody , getResponseStatusCode, httpLbs, httpJSON, mkDownloadRequest , parseRequest, parseUrlThrow, setGitHubHeaders , setHashChecks, setLengthCheck, setRequestMethod , verifiedDownloadWithProgress, withResponse ) import Network.HTTP.Simple ( getResponseHeader ) import Path ( (), addExtension, filename, parent, parseAbsDir , parseAbsFile, parseRelDir, parseRelFile, takeDrive , toFilePath ) import Path.CheckInstall ( warnInstallSearchPathIssues ) import Path.Extended ( fileExtension ) import Path.Extra ( toFilePathNoTrailingSep ) import Path.IO ( canonicalizePath, doesFileExist, ensureDir, executable , getPermissions, ignoringAbsence, listDir, removeDirRecur , renameDir, renameFile, resolveFile', withTempDir ) import RIO.List ( headMaybe, intercalate, intersperse, isPrefixOf , maximumByMaybe, sort, sortOn, stripPrefix ) import RIO.Process ( EnvVars, HasProcessContext (..), ProcessContext , augmentPath, augmentPathMap, doesExecutableExist, envVarsL , exeSearchPathL, getStdout, mkProcessContext, modifyEnvVars , proc, readProcess_, readProcessStdout, runProcess , runProcess_, setStdout, waitExitCode, withModifyEnvVars , withProcessWait, withWorkingDir, workingDirL ) import Stack.Build.Haddock ( shouldHaddockDeps ) import Stack.Build.Source ( hashSourceMapData, loadSourceMap ) import Stack.Build.Target ( NeedTargets (..), parseTargets ) import Stack.Config.ConfigureScript ( ensureConfigureScript ) import Stack.Constants ( cabalPackageName, ghcBootScript,ghcConfigureMacOS , ghcConfigurePosix, ghcConfigureWindows, hadrianScriptsPosix , hadrianScriptsWindows, libDirs, osIsMacOS, osIsWindows , relDirBin, relDirUsr, relFile7zdll, relFile7zexe , relFileConfigure, relFileHadrianStackDotYaml , relFileLibcMuslx86_64So1, relFileLibgmpSo10 , relFileLibgmpSo3, relFileLibncurseswSo6, relFileLibtinfoSo5 , relFileLibtinfoSo6, relFileMainHs, relFileStack , relFileStackDotExe, relFileStackDotTmp , relFileStackDotTmpDotExe, stackProgName, usrLibDirs ) import Stack.Constants.Config ( distRelativeDir ) import Stack.GhcPkg ( createDatabase, getGlobalDB, ghcPkgPathEnvVar , mkGhcPackagePath ) import Stack.Prelude import Stack.Setup.Installed ( Tool (..), filterTools, getCompilerVersion, installDir , listInstalled, markInstalled, tempInstallDir,toolExtraDirs , toolString, unmarkInstalled ) import Stack.SourceMap ( actualFromGhc, globalsFromDump, pruneGlobals ) import Stack.Storage.User ( loadCompilerPaths, saveCompilerPaths ) import Stack.Types.Build.Exception ( BuildPrettyException (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), projectRootL , wantedCompilerVersionL ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) ) import Stack.Types.Compiler ( ActualCompiler (..), CompilerException (..) , CompilerRepository (..), WhichCompiler (..) , compilerVersionText, getGhcVersion, isWantedCompiler , wantedToActual, whichCompiler, whichCompilerL ) import Stack.Types.CompilerBuild ( CompilerBuild (..), compilerBuildName, compilerBuildSuffix ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe (..), HasCompiler (..) ) import Stack.Types.Config ( Config (..), HasConfig (..), envOverrideSettingsL , ghcInstallHook ) import Stack.Types.DownloadInfo ( DownloadInfo (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), extraBinDirs , packageDatabaseDeps, packageDatabaseExtra , packageDatabaseLocal ) import Stack.Types.EnvSettings ( EnvSettings (..), minimalEnvSettings ) import Stack.Types.ExtraDirs ( ExtraDirs (..) ) import Stack.Types.FileDigestCache ( newFileDigestCache ) import Stack.Types.GHCDownloadInfo ( GHCDownloadInfo (..) ) import Stack.Types.GHCVariant ( GHCVariant (..), HasGHCVariant (..), ghcVariantName , ghcVariantSuffix ) import Stack.Types.Platform ( HasPlatform (..), PlatformVariant (..) , platformOnlyRelDir ) import Stack.Types.Runner ( HasRunner (..) ) import Stack.Types.SetupInfo ( SetupInfo (..) ) import Stack.Types.SourceMap ( SMActual (..), SMWanted (..), SourceMap (..) ) import Stack.Types.Version ( VersionCheck, stackMinorVersion, stackVersion ) import Stack.Types.VersionedDownloadInfo ( VersionedDownloadInfo (..) ) import qualified System.Directory as D import System.Environment ( getExecutablePath, lookupEnv ) import System.IO.Error ( isPermissionError ) import System.FilePath ( searchPathSeparator ) import qualified System.FilePath as FP import System.Permissions ( setFileExecutable ) import System.Uname ( getRelease ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Setup" module data SetupException = WorkingDirectoryInvalidBug | StackBinaryArchiveZipUnsupportedBug deriving (Show, Typeable) instance Exception SetupException where displayException WorkingDirectoryInvalidBug = bugReport "[S-2076]" "Invalid working directory." displayException StackBinaryArchiveZipUnsupportedBug = bugReport "[S-3967]" "FIXME: Handle zip files." -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Setup" module data SetupPrettyException = GHCInstallFailed !SomeException !String !String ![String] !(Path Abs Dir) !(Path Abs Dir) !(Path Abs Dir) | InvalidGhcAt !(Path Abs File) !SomeException | ExecutableNotFound ![Path Abs File] | SandboxedCompilerNotFound ![String] ![Path Abs Dir] | UnsupportedSetupCombo !OS !Arch !StyleDoc !StyleDoc !(Path Abs Dir) | MissingDependencies ![String] | UnknownCompilerVersion !(Set.Set Text) !WantedCompiler !(Set.Set ActualCompiler) | UnknownOSKey !Text | GHCSanityCheckCompileFailed !SomeException !(Path Abs File) | RequireCustomGHCVariant | ProblemWhileDecompressing !(Path Abs File) | SetupInfoMissingSevenz | UnsupportedSetupConfiguration | MSYS2NotFound !Text | UnwantedCompilerVersion | UnwantedArchitecture | GHCInfoNotValidUTF8 !UnicodeException | GHCInfoNotListOfPairs | GHCInfoMissingGlobalPackageDB | GHCInfoMissingTargetPlatform | GHCInfoTargetPlatformInvalid !String | CabalNotFound !(Path Abs File) | GhcBootScriptNotFound | HadrianScriptNotFound | URLInvalid !String | UnknownArchiveExtension !String | Unsupported7z | TarballInvalid !String | TarballFileInvalid !String !(Path Abs File) | UnknownArchiveStructure !(Path Abs File) | StackReleaseInfoNotFound !String | StackBinaryArchiveNotFound ![String] | HadrianBindistNotFound | DownloadAndInstallCompilerError | StackBinaryArchiveUnsupported !Text | StackBinaryNotInArchive !String !Text | FileTypeInArchiveInvalid !Tar.Entry !Text | BinaryUpgradeOnOSUnsupported !Cabal.OS | BinaryUpgradeOnArchUnsupported !Cabal.Arch | ExistingMSYS2NotDeleted !(Path Abs Dir) !IOException deriving (Show, Typeable) instance Pretty SetupPrettyException where pretty (GHCInstallFailed ex step cmd args wd tempDir destDir) = "[S-7441]" <> line <> string (displayException ex) <> line <> hang 2 ( fillSep [ flow "Error encountered while" , fromString step , flow "GHC with" ] <> line <> style Shell (fromString (unwords (cmd : args))) <> line -- TODO: Figure out how to insert \ in the appropriate spots -- hang 2 (shellColor (fillSep (fromString cmd : map fromString args))) <> line <> <> fillSep [ flow "run in" , pretty wd ] ) <> blankLine <> flow "The following directories may now contain files, but won't be \ \used by Stack:" <> line <> bulletedList [pretty tempDir, pretty destDir] <> blankLine <> fillSep [ flow "For more information consider rerunning with" , style Shell "--verbose" , "flag." ] <> line pretty (InvalidGhcAt compiler e) = "[S-2476]" <> line <> fillSep [ flow "Stack considers the compiler at" , pretty compiler , flow "to be invalid." ] <> blankLine <> flow "While assessing that compiler, Stack encountered the error:" <> blankLine <> ppException e pretty (ExecutableNotFound toTry) = "[S-4764]" <> line <> flow "Stack could not find any of the following executables:" <> line <> bulletedList (map pretty toTry) pretty (SandboxedCompilerNotFound names fps) = "[S-9953]" <> line <> fillSep ( ( flow "Stack could not find the sandboxed compiler. It looked for \ \one named one of:" : mkNarrativeList Nothing False ( map fromString names :: [StyleDoc] ) ) <> ( flow "However, it could not find any on one of the paths:" : mkNarrativeList Nothing False fps ) ) <> blankLine <> fillSep [ flow "Perhaps a previously-installed compiler was not completely \ \uninstalled. For further information about uninstalling \ \tools, see the output of" , style Shell (flow "stack uninstall") <> "." ] pretty (UnsupportedSetupCombo os arch tool toolDirAdvice programsDir) = "[S-1852]" <> line <> fillSep [ flow "Stack does not know how to install" , tool , flow "for the combination of operating system" , style Shell (pretty os) , "and architecture" , style Shell (pretty arch) <> "." , flow "Please install manually." ] <> blankLine <> fillSep [ flow "To install manually the version of" , tool <> "," , flow "its root directory should be named" , toolDirAdvice , flow "and the directory should be accompanied by a file with the \ \same name and extension" , style File ".installed" , flow "(which marks the" , tool , flow "version as installed). Both items should be located in the \ \subdirectory for the specified platform in Stack's directory \ \for local tools" , parens (pretty programsDir) <> "." ] pretty (MissingDependencies tools) = "[S-2126]" <> line <> fillSep ( flow "The following executables are missing and must be installed:" : mkNarrativeList Nothing False (map fromString tools :: [StyleDoc]) ) pretty (UnknownCompilerVersion oskeys wanted known) = "[S-9443]" <> line <> fillSep ( ( flow "No setup information found for" : style Current wanted' : flow "on your platform. This probably means a GHC binary \ \distribution has not yet been added for OS key" : mkNarrativeList (Just Shell) False (map (fromString . T.unpack) (sort $ Set.toList oskeys) :: [StyleDoc]) ) <> ( flow "Supported versions:" : mkNarrativeList Nothing False ( map (fromString . T.unpack . compilerVersionText) (sort $ Set.toList known) :: [StyleDoc] ) ) ) where wanted' = fromString . T.unpack . utf8BuilderToText $ display wanted pretty (UnknownOSKey oskey) = "[S-6810]" <> line <> fillSep [ flow "Unable to find installation URLs for OS key:" , fromString $ T.unpack oskey <> "." ] pretty (GHCSanityCheckCompileFailed e ghc) = "[S-5159]" <> line <> fillSep [ flow "The GHC located at" , pretty ghc , flow "failed to compile a sanity check. Please see:" , style Url "http://docs.haskellstack.org/en/stable/install_and_upgrade/" , flow "for more information. Stack encountered the following \ \error:" ] <> blankLine <> string (displayException e) pretty RequireCustomGHCVariant = "[S-8948]" <> line <> fillSep [ flow "A custom" , style Shell "--ghc-variant" , flow "must be specified to use" , style Shell "--ghc-bindist" <> "." ] pretty (ProblemWhileDecompressing archive) = "[S-2905]" <> line <> fillSep [ flow "Problem while decompressing" , pretty archive <> "." ] pretty SetupInfoMissingSevenz = "[S-9561]" <> line <> flow "SetupInfo missing Sevenz EXE/DLL." pretty UnsupportedSetupConfiguration = "[S-7748]" <> line <> flow "Stack does not know how to install GHC on your system \ \configuration. Please install manually." pretty (MSYS2NotFound osKey) = "[S-5308]" <> line <> fillSep [ flow "MSYS2 not found for" , fromString $ T.unpack osKey <> "." ] pretty UnwantedCompilerVersion = "[S-5127]" <> line <> flow "Not the compiler version we want." pretty UnwantedArchitecture = "[S-1540]" <> line <> flow "Not the architecture we want." pretty (GHCInfoNotValidUTF8 e) = "[S-8668]" <> line <> flow "GHC info is not valid UTF-8. Stack encountered the following \ \error:" <> blankLine <> string (displayException e) pretty GHCInfoNotListOfPairs = "[S-4878]" <> line <> flow "GHC info does not parse as a list of pairs." pretty GHCInfoMissingGlobalPackageDB = "[S-2965]" <> line <> flow "Key 'Global Package DB' not found in GHC info." pretty GHCInfoMissingTargetPlatform = "[S-5219]" <> line <> flow "Key 'Target platform' not found in GHC info." pretty (GHCInfoTargetPlatformInvalid targetPlatform) = "[S-8299]" <> line <> fillSep [ flow "Invalid target platform in GHC info:" , fromString targetPlatform <> "." ] pretty (CabalNotFound compiler) = "[S-2574]" <> line <> fillSep [ flow "Cabal library not found in global package database for" , pretty compiler <> "." ] pretty GhcBootScriptNotFound = "[S-8488]" <> line <> flow "No GHC boot script found." pretty HadrianScriptNotFound = "[S-1128]" <> line <> flow "No Hadrian build script found." pretty (URLInvalid url) = "[S-1906]" <> line <> fillSep [ flow "`url` must be either an HTTP URL or a file path:" , fromString url <> "." ] pretty (UnknownArchiveExtension url) = "[S-1648]" <> line <> fillSep [ flow "Unknown extension for url:" , style Url (fromString url) <> "." ] pretty Unsupported7z = "[S-4509]" <> line <> fillSep [ flow "Stack does not know how to deal with" , style File ".7z" , flow "files on non-Windows operating systems." ] pretty (TarballInvalid name) = "[S-3158]" <> line <> fillSep [ style File (fromString name) , flow "must be a tarball file." ] pretty (TarballFileInvalid name archiveFile) = "[S-5252]" <> line <> fillSep [ "Invalid" , style File (fromString name) , "filename:" , pretty archiveFile <> "." ] pretty (UnknownArchiveStructure archiveFile) = "[S-1827]" <> line <> fillSep [ flow "Expected a single directory within unpacked" , pretty archiveFile <> "." ] pretty (StackReleaseInfoNotFound url) = "[S-9476]" <> line <> fillSep [ flow "Could not get release information for Stack from:" , style Url (fromString url) <> "." ] pretty (StackBinaryArchiveNotFound platforms) = "[S-4461]" <> line <> fillSep ( flow "Unable to find binary Stack archive for platforms:" : mkNarrativeList Nothing False (map fromString platforms :: [StyleDoc]) ) pretty HadrianBindistNotFound = "[S-6617]" <> line <> flow "Can't find Hadrian-generated binary distribution." pretty DownloadAndInstallCompilerError = "[S-7227]" <> line <> flow "'downloadAndInstallCompiler' should not be reached with ghc-git." pretty (StackBinaryArchiveUnsupported archiveURL) = "[S-6636]" <> line <> fillSep [ flow "Unknown archive format for Stack archive:" , style Url (fromString $ T.unpack archiveURL) <> "." ] pretty (StackBinaryNotInArchive exeName url) = "[S-7871]" <> line <> fillSep [ flow "Stack executable" , style File (fromString exeName) , flow "not found in archive from" , style Url (fromString $ T.unpack url) <> "." ] pretty (FileTypeInArchiveInvalid e url) = "[S-5046]" <> line <> fillSep [ flow "Invalid file type for tar entry named" , fromString (Tar.entryPath e) , flow "downloaded from" , style Url (fromString $ T.unpack url) <> "." ] pretty (BinaryUpgradeOnOSUnsupported os) = "[S-4132]" <> line <> fillSep [ flow "Binary upgrade not yet supported on OS:" , pretty os <> "." ] pretty (BinaryUpgradeOnArchUnsupported arch) = "[S-3249]" <> line <> fillSep [ flow "Binary upgrade not yet supported on architecture:" , pretty arch <> "." ] pretty (ExistingMSYS2NotDeleted destDir e) = "[S-4230]" <> line <> fillSep [ flow "Could not delete existing MSYS2 directory:" , pretty destDir <> "." , flow "Stack encountered the following error:" ] <> blankLine <> string (displayException e) instance Exception SetupPrettyException -- | Type representing exceptions thrown by 'performPathChecking' data PerformPathCheckingException = ProcessExited ExitCode String [String] deriving (Show, Typeable) instance Exception PerformPathCheckingException where displayException (ProcessExited ec cmd args) = concat [ "Error: [S-1991]\n" , "Process exited with " , displayException ec , ": " , unwords (cmd:args) ] -- | Default location of the stack-setup.yaml file defaultSetupInfoYaml :: String defaultSetupInfoYaml = "https://raw.githubusercontent.com/commercialhaskell/stackage-content/master/stack/stack-setup-2.yaml" data SetupOpts = SetupOpts { installIfMissing :: !Bool , useSystem :: !Bool -- ^ Should we use a system compiler installation, if available? , wantedCompiler :: !WantedCompiler , compilerCheck :: !VersionCheck , stackYaml :: !(Maybe (Path Abs File)) -- ^ If we got the desired GHC version from that file , forceReinstall :: !Bool , sanityCheck :: !Bool -- ^ Run a sanity check on the selected GHC , skipGhcCheck :: !Bool -- ^ Don't check for a compatible GHC version/architecture , skipMsys :: !Bool -- ^ Do not use a custom msys installation on Windows , resolveMissingGHC :: !(Maybe StyleDoc) -- ^ Message shown to user for how to resolve the missing GHC , ghcBindistURL :: !(Maybe String) -- ^ Alternate GHC binary distribution (requires custom GHCVariant) } deriving Show -- | Modify the environment variables (like PATH) appropriately, possibly doing -- installation too setupEnv :: NeedTargets -> BuildOptsCLI -> Maybe StyleDoc -- ^ Message to give user when necessary GHC is not available. -> RIO BuildConfig EnvConfig setupEnv needTargets buildOptsCLI mResolveMissingGHC = do config <- view configL bc <- view buildConfigL let stackYaml = bc.stackYaml platform <- view platformL wcVersion <- view wantedCompilerVersionL wanted <- view wantedCompilerVersionL actual <- either throwIO pure $ wantedToActual wanted let wc = actual^.whichCompilerL let sopts = SetupOpts { installIfMissing = config.installGHC , useSystem = config.systemGHC , wantedCompiler = wcVersion , compilerCheck = config.compilerCheck , stackYaml = Just stackYaml , forceReinstall = False , sanityCheck = False , skipGhcCheck = config.skipGHCCheck , skipMsys = config.skipMsys , resolveMissingGHC = mResolveMissingGHC , ghcBindistURL = Nothing } (compilerPaths, ghcBin) <- ensureCompilerAndMsys sopts let compilerVer = compilerPaths.compilerVersion -- Modify the initial environment to include the GHC path, if a local GHC -- is being used menv0 <- view processContextL env <- either throwM (pure . removeHaskellEnvVars) $ augmentPathMap (map toFilePath ghcBin.bins) (view envVarsL menv0) menv <- mkProcessContext env logDebug "Resolving package entries" (sourceMap, sourceMapHash) <- runWithGHC menv compilerPaths $ do smActual <- actualFromGhc bc.smWanted compilerVer let actualPkgs = Map.keysSet smActual.deps <> Map.keysSet smActual.project prunedActual = smActual { globals = pruneGlobals smActual.globals actualPkgs } haddockDeps = shouldHaddockDeps config.build targets <- parseTargets needTargets haddockDeps buildOptsCLI prunedActual sourceMap <- loadSourceMap targets buildOptsCLI smActual sourceMapHash <- hashSourceMapData buildOptsCLI sourceMap pure (sourceMap, sourceMapHash) fileDigestCache <- newFileDigestCache let envConfig0 = EnvConfig { buildConfig = bc , buildOptsCLI , fileDigestCache , sourceMap , sourceMapHash , compilerPaths } -- extra installation bin directories mkDirs <- runRIO envConfig0 extraBinDirs let mpath = Map.lookup "PATH" env depsPath <- either throwM pure $ augmentPath (toFilePath <$> mkDirs False) mpath localsPath <- either throwM pure $ augmentPath (toFilePath <$> mkDirs True) mpath deps <- runRIO envConfig0 packageDatabaseDeps runWithGHC menv compilerPaths $ createDatabase compilerPaths.pkg deps localdb <- runRIO envConfig0 packageDatabaseLocal runWithGHC menv compilerPaths $ createDatabase compilerPaths.pkg localdb extras <- runReaderT packageDatabaseExtra envConfig0 let mkGPP locals = mkGhcPackagePath locals localdb deps extras compilerPaths.globalDB distDir <- runReaderT distRelativeDir envConfig0 >>= canonicalizePath executablePath <- liftIO getExecutablePath utf8EnvVars <- withProcessContext menv $ getUtf8EnvVars compilerVer mGhcRtsEnvVar <- liftIO $ lookupEnv "GHCRTS" envRef <- liftIO $ newIORef Map.empty let getProcessContext' es = do m <- readIORef envRef case Map.lookup es m of Just eo -> pure eo Nothing -> do eo <- mkProcessContext $ Map.insert "PATH" (if es.includeLocals then localsPath else depsPath) $ (if es.includeGhcPackagePath then Map.insert (ghcPkgPathEnvVar wc) (mkGPP es.includeLocals) else id) $ (if es.stackExe then Map.insert "STACK_EXE" (T.pack executablePath) else id) $ (if es.localeUtf8 then Map.union utf8EnvVars else id) $ case (sopts.skipMsys, platform) of (False, Platform Cabal.I386 Cabal.Windows) -> Map.insert "MSYSTEM" "MINGW32" (False, Platform Cabal.X86_64 Cabal.Windows) -> Map.insert "MSYSTEM" "MINGW64" _ -> id -- See https://github.com/commercialhaskell/stack/issues/3444 $ case (es.keepGhcRts, mGhcRtsEnvVar) of (True, Just ghcRts) -> Map.insert "GHCRTS" (T.pack ghcRts) _ -> id -- For reasoning and duplication, see: -- https://github.com/commercialhaskell/stack/issues/70 $ Map.insert "HASKELL_PACKAGE_SANDBOX" (T.pack $ toFilePathNoTrailingSep deps) $ Map.insert "HASKELL_PACKAGE_SANDBOXES" (T.pack $ if es.includeLocals then intercalate [searchPathSeparator] [ toFilePathNoTrailingSep localdb , toFilePathNoTrailingSep deps , "" ] else intercalate [searchPathSeparator] [ toFilePathNoTrailingSep deps , "" ]) $ Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePathNoTrailingSep distDir) -- Make sure that any .ghc.environment files -- are ignored, since we're setting up our -- own package databases. See -- https://github.com/commercialhaskell/stack/issues/4706 $ (case compilerPaths.compilerVersion of ACGhc version | version >= mkVersion [8, 4, 4] -> Map.insert "GHC_ENVIRONMENT" "-" _ -> id) env () <- atomicModifyIORef envRef $ \m' -> (Map.insert es eo m', ()) pure eo envOverride <- liftIO $ getProcessContext' minimalEnvSettings pure EnvConfig { buildConfig = bc { config = addIncludeLib ghcBin $ set processContextL envOverride (view configL bc) { processContextSettings = getProcessContext' } } , buildOptsCLI , fileDigestCache , sourceMap , sourceMapHash , compilerPaths } -- | A modified env which we know has an installed compiler on the PATH. data WithGHC env = WithGHC !CompilerPaths !env insideL :: Lens' (WithGHC env) env insideL = lens (\(WithGHC _ x) -> x) (\(WithGHC cp _) -> WithGHC cp) instance HasLogFunc env => HasLogFunc (WithGHC env) where logFuncL = insideL . logFuncL instance HasRunner env => HasRunner (WithGHC env) where runnerL = insideL . runnerL instance HasProcessContext env => HasProcessContext (WithGHC env) where processContextL = insideL . processContextL instance HasStylesUpdate env => HasStylesUpdate (WithGHC env) where stylesUpdateL = insideL . stylesUpdateL instance HasTerm env => HasTerm (WithGHC env) where useColorL = insideL . useColorL termWidthL = insideL . termWidthL instance HasPantryConfig env => HasPantryConfig (WithGHC env) where pantryConfigL = insideL . pantryConfigL instance HasConfig env => HasPlatform (WithGHC env) where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasConfig env => HasGHCVariant (WithGHC env) where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasConfig env => HasConfig (WithGHC env) where configL = insideL . configL instance HasBuildConfig env => HasBuildConfig (WithGHC env) where buildConfigL = insideL . buildConfigL instance HasCompiler (WithGHC env) where compilerPathsL = to (\(WithGHC cp _) -> cp) -- | Set up a modified environment which includes the modified PATH that GHC can -- be found on. This is needed for looking up global package information and ghc -- fingerprint (result from 'ghc --info'). runWithGHC :: HasConfig env => ProcessContext -> CompilerPaths -> RIO (WithGHC env) a -> RIO env a runWithGHC pc cp inner = do env <- ask let envg = WithGHC cp $ set envOverrideSettingsL (\_ -> pure pc) $ set processContextL pc env runRIO envg inner -- | A modified environment which we know has MSYS2 on the PATH. newtype WithMSYS env = WithMSYS env insideMSYSL :: Lens' (WithMSYS env) env insideMSYSL = lens (\(WithMSYS x) -> x) (\(WithMSYS _) -> WithMSYS) instance HasLogFunc env => HasLogFunc (WithMSYS env) where logFuncL = insideMSYSL . logFuncL instance HasRunner env => HasRunner (WithMSYS env) where runnerL = insideMSYSL . runnerL instance HasProcessContext env => HasProcessContext (WithMSYS env) where processContextL = insideMSYSL . processContextL instance HasStylesUpdate env => HasStylesUpdate (WithMSYS env) where stylesUpdateL = insideMSYSL . stylesUpdateL instance HasTerm env => HasTerm (WithMSYS env) where useColorL = insideMSYSL . useColorL termWidthL = insideMSYSL . termWidthL instance HasPantryConfig env => HasPantryConfig (WithMSYS env) where pantryConfigL = insideMSYSL . pantryConfigL instance HasConfig env => HasPlatform (WithMSYS env) where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasConfig env => HasGHCVariant (WithMSYS env) where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasConfig env => HasConfig (WithMSYS env) where configL = insideMSYSL . configL instance HasBuildConfig env => HasBuildConfig (WithMSYS env) where buildConfigL = insideMSYSL . buildConfigL -- | Set up a modified environment which includes the modified PATH that MSYS2 -- can be found on. runWithMSYS :: HasConfig env => Maybe ExtraDirs -> RIO (WithMSYS env) a -> RIO env a runWithMSYS mmsysPaths inner = do env <- ask pc0 <- view processContextL pc <- case mmsysPaths of Nothing -> pure pc0 Just msysPaths -> do envars <- either throwM pure $ augmentPathMap (map toFilePath msysPaths.bins) (view envVarsL pc0) mkProcessContext envars let envMsys = WithMSYS $ set envOverrideSettingsL (\_ -> pure pc) $ set processContextL pc env runRIO envMsys inner -- | special helper for GHCJS which needs an updated source map -- only project dependencies should get included otherwise source map hash will -- get changed and EnvConfig will become inconsistent rebuildEnv :: EnvConfig -> NeedTargets -> Bool -> BuildOptsCLI -> RIO env EnvConfig rebuildEnv envConfig needTargets haddockDeps boptsCLI = do let bc = envConfig.buildConfig cp = envConfig.compilerPaths compilerVer = envConfig.sourceMap.compiler runRIO (WithGHC cp bc) $ do smActual <- actualFromGhc bc.smWanted compilerVer let actualPkgs = Map.keysSet smActual.deps <> Map.keysSet smActual.project prunedActual = smActual { globals = pruneGlobals smActual.globals actualPkgs } targets <- parseTargets needTargets haddockDeps boptsCLI prunedActual sourceMap <- loadSourceMap targets boptsCLI smActual pure $ envConfig { sourceMap = sourceMap , buildOptsCLI = boptsCLI } -- | Some commands (script, ghci and exec) set targets dynamically -- see also the note about only local targets for rebuildEnv withNewLocalBuildTargets :: HasEnvConfig env => [Text] -> RIO env a -> RIO env a withNewLocalBuildTargets targets f = do envConfig <- view envConfigL haddockDeps <- view $ configL . to (.build) . to shouldHaddockDeps let boptsCLI = envConfig.buildOptsCLI envConfig' <- rebuildEnv envConfig NeedTargets haddockDeps $ boptsCLI { targetsCLI = targets} local (set envConfigL envConfig') f -- | Add the include and lib paths to the given Config addIncludeLib :: ExtraDirs -> Config -> Config addIncludeLib extraDirs config = config { extraIncludeDirs = config.extraIncludeDirs ++ map toFilePathNoTrailingSep extraDirs.includes , extraLibDirs = config.extraLibDirs ++ map toFilePathNoTrailingSep extraDirs.libs } -- | Ensure both the compiler and the msys toolchain are installed and -- provide the PATHs to add if necessary ensureCompilerAndMsys :: (HasBuildConfig env, HasGHCVariant env) => SetupOpts -> RIO env (CompilerPaths, ExtraDirs) ensureCompilerAndMsys sopts = do getSetupInfo' <- memoizeRef getSetupInfo mmsys2Tool <- ensureMsys sopts getSetupInfo' mmsysPaths <- maybe (pure Nothing) (fmap Just . toolExtraDirs) mmsys2Tool actual <- either throwIO pure $ wantedToActual sopts.wantedCompiler didWarn <- warnUnsupportedCompiler $ getGhcVersion actual -- Modify the initial environment to include the MSYS2 path, if MSYS2 is being -- used (cp, ghcPaths) <- runWithMSYS mmsysPaths $ ensureCompiler sopts getSetupInfo' warnUnsupportedCompilerCabal cp didWarn let paths = maybe ghcPaths (ghcPaths <>) mmsysPaths pure (cp, paths) -- | See warnUnsupportedCompiler :: (HasConfig env, HasTerm env) => Version -> RIO env Bool warnUnsupportedCompiler ghcVersion = do notifyIfGhcUntested <- view $ configL . to (.notifyIfGhcUntested) if | ghcVersion < mkVersion [7, 8] -> do prettyWarnL [ flow "Stack will almost certainly fail with GHC below version 7.8, \ \requested" , fromString (versionString ghcVersion) <> "." , flow "Valiantly attempting to run anyway, but I know this is \ \doomed." , flow "For more information, see:" , style Url "https://github.com/commercialhaskell/stack/issues/648" <> "." ] pure True | ghcVersion >= mkVersion [9, 9] && notifyIfGhcUntested -> do prettyWarnL [ flow "Stack has not been tested with GHC versions 9.10 and above, \ \and using" , fromString (versionString ghcVersion) <> "," , flow "this may fail." ] pure True | otherwise -> do logDebug "Asking for a supported GHC version" pure False -- | See warnUnsupportedCompilerCabal :: (HasConfig env, HasTerm env) => CompilerPaths -> Bool -- ^ already warned about GHC? -> RIO env () warnUnsupportedCompilerCabal cp didWarn = do unless didWarn $ void $ warnUnsupportedCompiler $ getGhcVersion cp.compilerVersion let cabalVersion = cp.cabalVersion notifyIfCabalUntested <- view $ configL . to (.notifyIfCabalUntested) if | cabalVersion < mkVersion [1, 24, 0] -> do prettyWarnL [ flow "Stack no longer supports Cabal versions below 1.24.0.0, but \ \version" , fromString (versionString cabalVersion) , flow "was found. This invocation will most likely fail. To fix \ \this, either use an older version of Stack or a newer \ \resolver. Acceptable resolvers: lts-7.0/nightly-2016-05-26 \ \or later." ] | cabalVersion < mkVersion [2, 2, 0] -> do prettyWarnL [ flow "Stack's support of Cabal versions below 2.2.0.0 is \ \deprecated and may be removed from the next version of \ \ Stack. Cabal version" , fromString (versionString cabalVersion) , flow "was found. Consider using a resolver that is \ \lts-12.0 or later or nightly-2018-03-13 or later." ] | cabalVersion >= mkVersion [3, 11] && notifyIfCabalUntested -> prettyWarnL [ flow "Stack has not been tested with Cabal versions 3.12 and \ \above, but version" , fromString (versionString cabalVersion) , flow "was found, this may fail." ] | otherwise -> pure () -- | Ensure that the msys toolchain is installed if necessary and provide the -- PATHs to add if necessary ensureMsys :: HasBuildConfig env => SetupOpts -> Memoized SetupInfo -> RIO env (Maybe Tool) ensureMsys sopts getSetupInfo' = do platform <- view platformL localPrograms <- view $ configL . to (.localPrograms) installed <- listInstalled localPrograms case platform of Platform _ Cabal.Windows | not sopts.skipMsys -> case getInstalledTool installed (mkPackageName "msys2") (const True) of Just tool -> pure (Just tool) Nothing | sopts.installIfMissing -> do si <- runMemoized getSetupInfo' let msysDir = fillSep [ style Dir "msys2-yyyymmdd" , flow "(where yyyymmdd is the date-based version)" ] osKey <- getOSKey "MSYS2" msysDir config <- view configL VersionedDownloadInfo version info <- case Map.lookup osKey si.msys2 of Just x -> pure x Nothing -> prettyThrowIO $ MSYS2NotFound osKey let tool = Tool (PackageIdentifier (mkPackageName "msys2") version) Just <$> downloadAndInstallTool config.localPrograms info tool (installMsys2Windows si) | otherwise -> do prettyWarnS "Continuing despite missing tool: MSYS2." pure Nothing _ -> pure Nothing installGhcBindist :: HasBuildConfig env => SetupOpts -> Memoized SetupInfo -> [Tool] -> RIO env (Tool, CompilerBuild) installGhcBindist sopts getSetupInfo' installed = do Platform expectedArch _ <- view platformL let wanted = sopts.wantedCompiler isWanted = isWantedCompiler sopts.compilerCheck sopts.wantedCompiler config <- view configL ghcVariant <- view ghcVariantL wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted possibleCompilers <- case wc of Ghc -> do ghcBuilds <- getGhcBuilds forM ghcBuilds $ \ghcBuild -> do ghcPkgName <- parsePackageNameThrowing ( "ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild ) pure (getInstalledTool installed ghcPkgName (isWanted . ACGhc), ghcBuild) let existingCompilers = concatMap (\(installedCompiler, compilerBuild) -> case (installedCompiler, sopts.forceReinstall) of (Just tool, False) -> [(tool, compilerBuild)] _ -> []) possibleCompilers logDebug $ "Found already installed GHC builds: " <> mconcat (intersperse ", " (map (fromString . compilerBuildName . snd) existingCompilers)) case existingCompilers of (tool, build_):_ -> pure (tool, build_) [] | sopts.installIfMissing -> do si <- runMemoized getSetupInfo' downloadAndInstallPossibleCompilers (map snd possibleCompilers) si sopts.wantedCompiler sopts.compilerCheck sopts.ghcBindistURL | otherwise -> do let suggestion = fromMaybe defaultSuggestion sopts.resolveMissingGHC defaultSuggestion = fillSep [ flow "To install the correct version of GHC into the \ \subdirectory for the specified platform in Stack's \ \directory for local tools" , parens (pretty config.localPrograms) <> "," , flow "try running" , style Shell (flow "stack setup") , flow "or use the" , style Shell "--install-ghc" , flow "flag. To use your system GHC installation, run" , style Shell (flow "stack config set system-ghc --global true") <> "," , flow "or use the" , style Shell "--system-ghc" , "flag." ] prettyThrowM $ CompilerVersionMismatch Nothing -- FIXME ((\(x, y, _) -> (x, y)) <$> msystem) (sopts.wantedCompiler, expectedArch) ghcVariant (case possibleCompilers of [] -> CompilerBuildStandard (_, compilerBuild):_ -> compilerBuild) sopts.compilerCheck sopts.stackYaml suggestion -- | Ensure compiler is installed. ensureCompiler :: forall env. (HasConfig env, HasBuildConfig env, HasGHCVariant env) => SetupOpts -> Memoized SetupInfo -> RIO (WithMSYS env) (CompilerPaths, ExtraDirs) ensureCompiler sopts getSetupInfo' = do let wanted = sopts.wantedCompiler wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted hook <- ghcInstallHook hookIsExecutable <- handleIO (\_ -> pure False) $ if osIsWindows then doesFileExist hook -- can't really detect executable on windows, only -- file extension else executable <$> getPermissions hook Platform expectedArch _ <- view platformL let canUseCompiler cp | sopts.skipGhcCheck = pure cp | not $ isWanted cp.compilerVersion = prettyThrowIO UnwantedCompilerVersion | cp.arch /= expectedArch = prettyThrowIO UnwantedArchitecture | otherwise = pure cp isWanted = isWantedCompiler sopts.compilerCheck sopts.wantedCompiler let checkCompiler :: Path Abs File -> RIO (WithMSYS env) (Maybe CompilerPaths) checkCompiler compiler = do eres <- tryAny $ pathsFromCompiler wc CompilerBuildStandard False compiler >>= canUseCompiler case eres of Left e -> do logDebug $ "Not using compiler at " <> displayShow (toFilePath compiler) <> ": " <> displayShow e pure Nothing Right cp -> pure $ Just cp mcp <- if | sopts.useSystem -> do logDebug "Getting system compiler version" runConduit $ sourceSystemCompilers wanted .| concatMapMC checkCompiler .| await | hookIsExecutable -> do -- if the hook fails, we fall through to stacks sandboxed installation hookGHC <- runGHCInstallHook sopts hook maybe (pure Nothing) checkCompiler hookGHC | otherwise -> pure Nothing case mcp of Nothing -> ensureSandboxedCompiler sopts getSetupInfo' Just cp -> do let paths = ExtraDirs { bins = [parent cp.compiler] , includes = [] , libs = [] } pure (cp, paths) -- | Runs @STACK_ROOT\/hooks\/ghc-install.sh@. -- -- Reads and possibly validates the output of the process as the GHC binary and -- returns it. runGHCInstallHook :: HasBuildConfig env => SetupOpts -> Path Abs File -> RIO env (Maybe (Path Abs File)) runGHCInstallHook sopts hook = do logDebug "Getting hook installed compiler version" let wanted = sopts.wantedCompiler menv0 <- view processContextL menv <- mkProcessContext (Map.union (wantedCompilerToEnv wanted) $ removeHaskellEnvVars (view envVarsL menv0)) (exit, out) <- withProcessContext menv $ proc "sh" [toFilePath hook] readProcessStdout case exit of ExitSuccess -> do let ghcPath = stripNewline . TL.unpack . TL.decodeUtf8With T.lenientDecode $ out case parseAbsFile ghcPath of Just compiler -> do when sopts.sanityCheck $ sanityCheck compiler logDebug ("Using GHC compiler at: " <> fromString (toFilePath compiler)) pure (Just compiler) Nothing -> do prettyWarnL [ flow "Path to GHC binary is not a valid path:" , style Dir (fromString ghcPath) <> "." ] pure Nothing ExitFailure i -> do prettyWarnL [ flow "GHC install hook exited with code:" , style Error (fromString $ show i) <> "." ] pure Nothing where wantedCompilerToEnv :: WantedCompiler -> EnvVars wantedCompilerToEnv (WCGhc ver) = Map.fromList [ ("HOOK_GHC_TYPE", "bindist") , ("HOOK_GHC_VERSION", T.pack (versionString ver)) ] wantedCompilerToEnv (WCGhcGit commit flavor) = Map.fromList [ ("HOOK_GHC_TYPE", "git") , ("HOOK_GHC_COMMIT", commit) , ("HOOK_GHC_FLAVOR", flavor) , ("HOOK_GHC_FLAVOUR", flavor) ] wantedCompilerToEnv (WCGhcjs ghcjs_ver ghc_ver) = Map.fromList [ ("HOOK_GHC_TYPE", "ghcjs") , ("HOOK_GHC_VERSION", T.pack (versionString ghc_ver)) , ("HOOK_GHCJS_VERSION", T.pack (versionString ghcjs_ver)) ] newlines :: [Char] newlines = ['\n', '\r'] stripNewline :: String -> String stripNewline = filter (`notElem` newlines) ensureSandboxedCompiler :: HasBuildConfig env => SetupOpts -> Memoized SetupInfo -> RIO (WithMSYS env) (CompilerPaths, ExtraDirs) ensureSandboxedCompiler sopts getSetupInfo' = do let wanted = sopts.wantedCompiler -- List installed tools config <- view configL let localPrograms = config.localPrograms installed <- listInstalled localPrograms logDebug $ "Installed tools: \n - " <> mconcat (intersperse "\n - " (map (fromString . toolString) installed)) (compilerTool, compilerBuild) <- case sopts.wantedCompiler of -- shall we build GHC from source? WCGhcGit commitId flavour -> buildGhcFromSource getSetupInfo' installed config.compilerRepository commitId flavour _ -> installGhcBindist sopts getSetupInfo' installed paths <- toolExtraDirs compilerTool wc <- either throwIO (pure . whichCompiler) $ wantedToActual wanted menv0 <- view processContextL m <- either throwM pure $ augmentPathMap (toFilePath <$> paths.bins) (view envVarsL menv0) menv <- mkProcessContext (removeHaskellEnvVars m) names <- case wanted of WCGhc version -> pure ["ghc-" ++ versionString version, "ghc"] WCGhcGit{} -> pure ["ghc"] WCGhcjs{} -> throwIO GhcjsNotSupported -- Previously, we used findExecutable to locate these executables. This was -- actually somewhat sloppy, as it could discover executables outside of the -- sandbox. This led to a specific issue on Windows with GHC 9.0.1. See -- https://gitlab.haskell.org/ghc/ghc/-/issues/20074. Instead, now, we look -- on the paths specified only. let loop [] = prettyThrowIO $ SandboxedCompilerNotFound names paths.bins loop (x:xs) = do res <- liftIO $ D.findExecutablesInDirectories (map toFilePath paths.bins) x case res of [] -> loop xs compiler:rest -> do unless (null rest) $ do prettyWarn $ flow "Found multiple candidate compilers:" <> line <> bulletedList (map fromString res) <> blankLine <> fillSep [ flow "This usually indicates a failed installation. \ \Trying anyway with" , fromString compiler ] parseAbsFile compiler compiler <- withProcessContext menv $ do compiler <- loop names -- Run this here to ensure that the sanity check uses the modified -- environment, otherwise we may infect GHC_PACKAGE_PATH and break sanity -- checks. when sopts.sanityCheck $ sanityCheck compiler pure compiler cp <- pathsFromCompiler wc compilerBuild True compiler pure (cp, paths) pathsFromCompiler :: forall env. HasConfig env => WhichCompiler -> CompilerBuild -> Bool -> Path Abs File -- ^ executable filepath -> RIO env CompilerPaths pathsFromCompiler wc build sandboxed compiler = withCache $ handleAny onErr $ do let dir = toFilePath $ parent compiler suffixNoVersion | osIsWindows = ".exe" | otherwise = "" msuffixWithVersion = do let prefix = case wc of Ghc -> "ghc-" fmap ("-" ++) $ stripPrefix prefix $ toFilePath $ filename compiler suffixes = maybe id (:) msuffixWithVersion [suffixNoVersion] findHelper :: (WhichCompiler -> [String]) -> RIO env (Path Abs File) findHelper getNames = do toTry <- mapM parseAbsFile [ dir ++ name ++ suffix | suffix <- suffixes, name <- getNames wc ] let loop [] = throwIO $ PrettyException $ ExecutableNotFound toTry loop (guessedPath:rest) = do exists <- doesFileExist guessedPath if exists then pure guessedPath else loop rest prettyDebug $ flow "Looking for executable(s):" <> line <> bulletedList (map pretty toTry) loop toTry pkg <- fmap GhcPkgExe $ findHelper $ \case Ghc -> ["ghc-pkg"] menv0 <- view processContextL menv <- mkProcessContext (removeHaskellEnvVars (view envVarsL menv0)) interpreter <- findHelper $ \case Ghc -> ["runghc"] haddock <- findHelper $ \case Ghc -> ["haddock", "haddock-ghc"] ghcInfo <- proc (toFilePath compiler) ["--info"] $ fmap (toStrictBytes . fst) . readProcess_ infotext <- case decodeUtf8' ghcInfo of Left e -> prettyThrowIO $ GHCInfoNotValidUTF8 e Right info -> pure info infoPairs :: [(String, String)] <- case readMaybe $ T.unpack infotext of Nothing -> prettyThrowIO GHCInfoNotListOfPairs Just infoPairs -> pure infoPairs let infoMap = Map.fromList infoPairs eglobaldb <- tryAny $ case Map.lookup "Global Package DB" infoMap of Nothing -> prettyThrowIO GHCInfoMissingGlobalPackageDB Just db -> parseAbsDir db arch <- case Map.lookup "Target platform" infoMap of Nothing -> prettyThrowIO GHCInfoMissingTargetPlatform Just targetPlatform -> case simpleParse $ takeWhile (/= '-') targetPlatform of Nothing -> prettyThrowIO $ GHCInfoTargetPlatformInvalid targetPlatform Just arch -> pure arch compilerVersion <- case wc of Ghc -> case Map.lookup "Project version" infoMap of Nothing -> do prettyWarnS "Key 'Project version' not found in GHC info." getCompilerVersion wc compiler Just versionString' -> ACGhc <$> parseVersionThrowing versionString' globalDB <- case eglobaldb of Left e -> do prettyWarn $ flow "Stack failed to parse the global DB from GHC info." <> blankLine <> flow "While parsing, Stack encountered the error:" <> blankLine <> string (show e) <> blankLine <> flow "Asking ghc-pkg directly." withProcessContext menv $ getGlobalDB pkg Right x -> pure x globalDump <- withProcessContext menv $ globalsFromDump pkg cabalVersion <- case Map.lookup cabalPackageName globalDump of Nothing -> prettyThrowIO $ CabalNotFound compiler Just dp -> pure $ pkgVersion dp.packageIdent pure CompilerPaths { build , arch , sandboxed , compilerVersion , compiler , pkg , interpreter , haddock , cabalVersion , globalDB , ghcInfo , globalDump } where onErr = throwIO . PrettyException . InvalidGhcAt compiler withCache inner = do eres <- tryAny $ loadCompilerPaths compiler build sandboxed mres <- case eres of Left e -> do prettyWarn $ flow "Trouble loading CompilerPaths cache:" <> blankLine <> string (displayException e) pure Nothing Right x -> pure x case mres of Just cp -> cp <$ logDebug "Loaded compiler information from cache" Nothing -> do cp <- inner saveCompilerPaths cp `catchAny` \e -> prettyWarn $ flow "Unable to save CompilerPaths cache:" <> blankLine <> string (displayException e) pure cp buildGhcFromSource :: forall env. ( HasTerm env, HasProcessContext env, HasBuildConfig env) => Memoized SetupInfo -> [Tool] -> CompilerRepository -> Text -- ^ Commit ID. -> Text -- ^ Hadrain flavour. -> RIO (WithMSYS env) (Tool, CompilerBuild) buildGhcFromSource getSetupInfo' installed (CompilerRepository url) commitId flavour = do config <- view configL let compilerTool = ToolGhcGit commitId flavour -- detect when the correct GHC is already installed if compilerTool `elem` installed then pure (compilerTool, CompilerBuildStandard) else -- clone the repository and execute the given commands withRepo (SimpleRepo url commitId RepoGit) $ do -- withRepo is guaranteed to set workingDirL, so let's get it mcwd <- traverse parseAbsDir =<< view workingDirL cwd <- maybe (throwIO WorkingDirectoryInvalidBug) pure mcwd let threads = config.jobs relFileHadrianStackDotYaml' = toFilePath relFileHadrianStackDotYaml ghcBootScriptPath = cwd ghcBootScript boot = if osIsWindows then proc "python3" ["boot"] runProcess_ else proc (toFilePath ghcBootScriptPath) [] runProcess_ stack args = proc "stack" args'' runProcess_ where args'' = "--stack-yaml=" <> relFileHadrianStackDotYaml' : args' -- If a resolver is specified on the command line, Stack will -- apply it. This allows the resolver specified in Hadrian's -- stack.yaml file to be overridden. args' = maybe args addResolver config.resolver addResolver resolver = "--snapshot=" <> show resolver : args happy = stack ["install", "happy"] alex = stack ["install", "alex"] -- Executed in the Stack environment, because GHC is required. configure = stack ("exec" : "--" : ghcConfigure) ghcConfigure | osIsWindows = ghcConfigureWindows | osIsMacOS = ghcConfigureMacOS | otherwise = ghcConfigurePosix hadrianScripts | osIsWindows = hadrianScriptsWindows | otherwise = hadrianScriptsPosix hadrianArgs = fmap T.unpack [ "-j" <> tshow threads -- parallel build , "--flavour=" <> flavour -- selected flavour , "binary-dist" ] foundHadrianPaths <- filterM doesFileExist $ (cwd ) <$> hadrianScripts hadrianPath <- maybe (prettyThrowIO HadrianScriptNotFound) pure $ listToMaybe foundHadrianPaths exists <- doesFileExist ghcBootScriptPath unless exists $ prettyThrowIO GhcBootScriptNotFound ensureConfigureScript cwd logInfo "Running GHC boot script..." boot doesExecutableExist "happy" >>= \case True -> logInfo "happy executable installed on the PATH." False -> do logInfo "Installing happy executable..." happy doesExecutableExist "alex" >>= \case True -> logInfo "alex executable installed on the PATH." False -> do logInfo "Installing alex executable..." alex logInfo "Running GHC configure script..." configure logSticky $ "Building GHC from source with `" <> display flavour <> "` flavour. It can take a long time (more than one hour)..." -- We need to provide an absolute path to the script since the process -- package only sets working directory _after_ discovering the -- executable. proc (toFilePath hadrianPath) hadrianArgs runProcess_ -- find the bindist and install it bindistPath <- parseRelDir "_build/bindist" (_,files) <- listDir (cwd bindistPath) let isBindist p = do extension <- fileExtension (filename p) pure $ "ghc-" `isPrefixOf` toFilePath (filename p) && extension == ".xz" mbindist <- filterM isBindist files case mbindist of [bindist] -> do let bindist' = T.pack (toFilePath bindist) dlinfo = DownloadInfo { url = bindist' -- we can specify a filepath instead of a URL , contentLength = Nothing , sha1 = Nothing , sha256 = Nothing } ghcdlinfo = GHCDownloadInfo mempty mempty dlinfo installer | osIsWindows = installGHCWindows | otherwise = installGHCPosix ghcdlinfo si <- runMemoized getSetupInfo' _ <- downloadAndInstallTool config.localPrograms dlinfo compilerTool (installer si) pure (compilerTool, CompilerBuildStandard) _ -> do forM_ files (logDebug . fromString . (" - " ++) . toFilePath) prettyThrowIO HadrianBindistNotFound -- | Determine which GHC builds to use depending on which shared libraries are -- available on the system. getGhcBuilds :: HasConfig env => RIO env [CompilerBuild] getGhcBuilds = do config <- view configL case config.ghcBuild of Just ghcBuild -> pure [ghcBuild] Nothing -> determineGhcBuild where determineGhcBuild = do -- TODO: a more reliable, flexible, and data driven approach would be to -- actually download small "test" executables (from setup-info) that link to -- the same gmp/tinfo versions that GHC does (i.e. built in same environment -- as the GHC bindist). The algorithm would go something like this: -- -- check for previous 'uname -a'/`ldconfig -p` plus compiler version/variant -- in cache. -- if cached, then use that as suffix -- otherwise: -- download setup-info -- go through all with right prefix for os/version/variant -- first try "standard" (no extra suffix), then the rest -- download "compatibility check" exe if not already downloaded -- try running it -- if successful, then choose that -- cache compiler suffix with the uname -a and -- ldconfig -p output hash plus compiler version -- -- Of course, could also try to make a static GHC bindist instead of all -- this rigamarole. platform <- view platformL case platform of Platform _ Cabal.Linux -> do -- Some systems don't have ldconfig in the PATH, so make sure to look in -- /sbin and /usr/sbin as well let sbinEnv m = Map.insert "PATH" ("/sbin:/usr/sbin" <> maybe "" (":" <>) (Map.lookup "PATH" m)) m eldconfigOut <- withModifyEnvVars sbinEnv $ proc "ldconfig" ["-p"] $ tryAny . fmap fst . readProcess_ let firstWords = case eldconfigOut of Right ldconfigOut -> mapMaybe (listToMaybe . T.words) $ T.lines $ T.decodeUtf8With T.lenientDecode $ LBS.toStrict ldconfigOut Left _ -> [] checkLib lib | libT `elem` firstWords = do logDebug $ "Found shared library " <> libD <> " in 'ldconfig -p' output" pure True | osIsWindows = -- Cannot parse /usr/lib on Windows pure False | otherwise = hasMatches lib usrLibDirs -- This is a workaround for the fact that libtinfo.so.x doesn't -- appear in the 'ldconfig -p' output on Arch or Slackware even -- when it exists. There doesn't seem to be an easy way to get the -- true list of directories to scan for shared libs, but this -- works for our particular cases. where libD = fromString (toFilePath lib) libT = T.pack (toFilePath lib) hasMatches lib dirs = do matches <- filterM (doesFileExist . ( lib)) dirs case matches of [] -> logDebug ( "Did not find shared library " <> libD ) >> pure False (path:_) -> logDebug ( "Found shared library " <> libD <> " in " <> fromString (Path.toFilePath path) ) >> pure True where libD = fromString (toFilePath lib) getLibc6Version = do elddOut <- -- On Alpine Linux, 'ldd --version' will send output to stderr, -- which we wish to smother. proc "ldd" ["--version"] $ tryAny . readProcess_ pure $ case elddOut of Right (lddOut, _) -> let lddOut' = decodeUtf8Lenient (LBS.toStrict lddOut) in case P.parse lddVersion lddOut' of P.Done _ result -> Just result _ -> Nothing Left _ -> Nothing -- Assumes the first line of ldd has the format: -- -- ldd (...) nn.nn -- -- where nn.nn corresponds to the version of libc6. lddVersion :: P.Parser Version lddVersion = do P.skipWhile (/= ')') P.skip (== ')') P.skipSpace lddMajorVersion <- P.decimal P.skip (== '.') lddMinorVersion <- P.decimal P.skip (not . isDigit) pure $ mkVersion [ lddMajorVersion, lddMinorVersion ] hasMusl <- hasMatches relFileLibcMuslx86_64So1 libDirs mLibc6Version <- getLibc6Version case mLibc6Version of Just libc6Version -> logDebug $ "Found shared library libc6 in version: " <> fromString (versionString libc6Version) Nothing -> logDebug "Did not find a version of shared library libc6." let hasLibc6_2_32 = maybe False (>= mkVersion [2 , 32]) mLibc6Version hastinfo5 <- checkLib relFileLibtinfoSo5 hastinfo6 <- checkLib relFileLibtinfoSo6 hasncurses6 <- checkLib relFileLibncurseswSo6 hasgmp5 <- checkLib relFileLibgmpSo10 hasgmp4 <- checkLib relFileLibgmpSo3 let libComponents = if hasMusl then [ ["musl"] ] else concat [ if hastinfo6 && hasgmp5 then if hasLibc6_2_32 then [["tinfo6"]] else [["tinfo6-libc6-pre232"]] else [[]] , [ [] | hastinfo5 && hasgmp5 ] , [ ["ncurses6"] | hasncurses6 && hasgmp5 ] , [ ["gmp4"] | hasgmp4 ] ] useBuilds $ map (\c -> case c of [] -> CompilerBuildStandard _ -> CompilerBuildSpecialized (intercalate "-" c)) libComponents Platform _ Cabal.FreeBSD -> do let getMajorVer = readMaybe <=< headMaybe . splitOn "." majorVer <- getMajorVer <$> sysRelease if majorVer >= Just (12 :: Int) then useBuilds [CompilerBuildSpecialized "ino64"] else useBuilds [CompilerBuildStandard] Platform _ Cabal.OpenBSD -> do releaseStr <- mungeRelease <$> sysRelease useBuilds [CompilerBuildSpecialized releaseStr] _ -> useBuilds [CompilerBuildStandard] useBuilds builds = do logDebug $ "Potential GHC builds: " <> mconcat (intersperse ", " (map (fromString . compilerBuildName) builds)) pure builds -- | Encode an OpenBSD version (like "6.1") into a valid argument for -- CompilerBuildSpecialized, so "maj6-min1". Later version numbers are prefixed -- with "r". -- The result r must be such that "ghc-" ++ r is a valid package name, -- as recognized by parsePackageNameFromString. mungeRelease :: String -> String mungeRelease = intercalate "-" . prefixMaj . splitOn "." where prefixFst pfx k (rev : revs) = (pfx ++ rev) : k revs prefixFst _ _ [] = [] prefixMaj = prefixFst "maj" prefixMin prefixMin = prefixFst "min" (map ('r':)) sysRelease :: HasTerm env => RIO env String sysRelease = handleIO ( \e -> do prettyWarn $ flow "Could not query OS version:" <> blankLine <> string (displayException e) pure "" ) (liftIO getRelease) -- | Ensure Docker container-compatible 'stack' executable is downloaded ensureDockerStackExe :: HasConfig env => Platform -> RIO env (Path Abs File) ensureDockerStackExe containerPlatform = do config <- view configL containerPlatformDir <- runReaderT platformOnlyRelDir (containerPlatform,PlatformVariantNone) let programsPath = config.localProgramsBase containerPlatformDir tool = Tool (PackageIdentifier (mkPackageName "stack") stackVersion) stackExeDir <- installDir programsPath tool let stackExePath = stackExeDir relFileStack stackExeExists <- doesFileExist stackExePath unless stackExeExists $ do prettyInfoL [ flow "Downloading Docker-compatible" , fromString stackProgName , "executable." ] sri <- downloadStackReleaseInfo Nothing Nothing (Just (versionString stackMinorVersion)) platforms <- runReaderT preferredPlatforms (containerPlatform, PlatformVariantNone) downloadStackExe platforms sri stackExeDir False (const $ pure ()) pure stackExePath -- | Get all executables on the path that might match the wanted compiler sourceSystemCompilers :: (HasLogFunc env, HasProcessContext env) => WantedCompiler -> ConduitT i (Path Abs File) (RIO env) () sourceSystemCompilers wanted = do searchPath <- view exeSearchPathL names <- case wanted of WCGhc version -> pure [ "ghc-" ++ versionString version , "ghc" ] WCGhcjs{} -> throwIO GhcjsNotSupported WCGhcGit{} -> pure [] -- only use sandboxed versions for_ names $ \name -> for_ searchPath $ \dir -> do fp <- resolveFile' $ addExe $ dir FP. name exists <- doesFileExist fp when exists $ yield fp where addExe | osIsWindows = (++ ".exe") | otherwise = id -- | Download the most recent SetupInfo getSetupInfo :: HasConfig env => RIO env SetupInfo getSetupInfo = do config <- view configL let inlineSetupInfo = config.setupInfoInline locations' = config.setupInfoLocations locations = if null locations' then [defaultSetupInfoYaml] else locations' resolvedSetupInfos <- mapM loadSetupInfo locations pure (inlineSetupInfo <> mconcat resolvedSetupInfos) where loadSetupInfo urlOrFile = do bs <- case parseUrlThrow urlOrFile of Just req -> LBS.toStrict . getResponseBody <$> httpLbs req Nothing -> liftIO $ S.readFile urlOrFile WithJSONWarnings si warnings <- either throwM pure (Yaml.decodeEither' bs) when (urlOrFile /= defaultSetupInfoYaml) $ logJSONWarnings urlOrFile warnings pure si getInstalledTool :: [Tool] -- ^ already installed -> PackageName -- ^ package to find -> (Version -> Bool) -- ^ which versions are acceptable -> Maybe Tool getInstalledTool installed name goodVersion = Tool <$> maximumByMaybe (comparing pkgVersion) (filterTools name goodVersion installed) downloadAndInstallTool :: (HasTerm env, HasBuildConfig env) => Path Abs Dir -> DownloadInfo -> Tool -> ( Path Abs File -> ArchiveType -> Path Abs Dir -> Path Abs Dir -> RIO env () ) -> RIO env Tool downloadAndInstallTool programsDir downloadInfo tool installer = do ensureDir programsDir (file, at) <- downloadFromInfo programsDir downloadInfo tool dir <- installDir programsDir tool tempDir <- tempInstallDir programsDir tool liftIO $ ignoringAbsence (removeDirRecur tempDir) ensureDir tempDir unmarkInstalled programsDir tool installer file at tempDir dir markInstalled programsDir tool liftIO $ ignoringAbsence (removeDirRecur tempDir) pure tool -- Exceptions thrown by this function are caught by -- 'downloadAndInstallPossibleCompilers'. downloadAndInstallCompiler :: (HasBuildConfig env, HasGHCVariant env) => CompilerBuild -> SetupInfo -> WantedCompiler -> VersionCheck -> Maybe String -> RIO env Tool downloadAndInstallCompiler ghcBuild si wanted@(WCGhc version) versionCheck mbindistURL = do ghcVariant <- view ghcVariantL (selectedVersion, downloadInfo) <- case mbindistURL of Just bindistURL -> do case ghcVariant of GHCCustom _ -> pure () _ -> throwM RequireCustomGHCVariant pure ( version , GHCDownloadInfo mempty mempty DownloadInfo { url = T.pack bindistURL , contentLength = Nothing , sha1 = Nothing , sha256 = Nothing } ) _ -> do ghcKey <- getGhcKey ghcBuild case Map.lookup ghcKey si.ghcByVersion of Nothing -> throwM $ UnknownOSKey ghcKey Just pairs_ -> getWantedCompilerInfo ghcKey versionCheck wanted ACGhc pairs_ config <- view configL let installer = case config.platform of Platform _ Cabal.Windows -> installGHCWindows _ -> installGHCPosix downloadInfo prettyInfo $ fillSep $ flow "Preparing to install GHC" : case ghcVariant of GHCStandard -> [] v -> ["(" <> fromString (ghcVariantName v) <> ")"] <> case ghcBuild of CompilerBuildStandard -> [] b -> ["(" <> fromString (compilerBuildName b) <> ")"] <> [ flow "to an isolated location. This will not interfere with any \ \system-level installation." ] ghcPkgName <- parsePackageNameThrowing ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion downloadAndInstallTool config.localPrograms downloadInfo.downloadInfo tool (installer si) downloadAndInstallCompiler _ _ WCGhcjs{} _ _ = throwIO GhcjsNotSupported downloadAndInstallCompiler _ _ WCGhcGit{} _ _ = throwIO DownloadAndInstallCompilerError -- Exceptions thrown by this function are caught by -- 'downloadAndInstallPossibleCompilers'. getWantedCompilerInfo :: (Ord k, MonadThrow m) => Text -> VersionCheck -> WantedCompiler -> (k -> ActualCompiler) -> Map k a -> m (k, a) getWantedCompilerInfo key versionCheck wanted toCV pairs_ = case mpair of Just pair -> pure pair Nothing -> throwM $ UnknownCompilerVersion (Set.singleton key) wanted (Set.fromList $ map toCV (Map.keys pairs_)) where mpair = listToMaybe $ sortOn (Down . fst) $ filter (isWantedCompiler versionCheck wanted . toCV . fst) (Map.toList pairs_) -- | Download and install the first available compiler build. downloadAndInstallPossibleCompilers :: (HasGHCVariant env, HasBuildConfig env) => [CompilerBuild] -> SetupInfo -> WantedCompiler -> VersionCheck -> Maybe String -> RIO env (Tool, CompilerBuild) downloadAndInstallPossibleCompilers possibleCompilers si wanted versionCheck mbindistURL = go possibleCompilers Nothing where -- This will stop as soon as one of the builds doesn't throw an @UnknownOSKey@ -- or @UnknownCompilerVersion@ exception (so it will only try subsequent -- builds if one is nonexistent, not if the download or install fails for some -- other reason). The @Unknown*@ exceptions thrown by each attempt are -- combined into a single exception (if only @UnknownOSKey@ is thrown, then -- the first of those is rethrown, but if any @UnknownCompilerVersion@s are -- thrown then the attempted OS keys and available versions are unioned). go [] Nothing = prettyThrowM UnsupportedSetupConfiguration go [] (Just e) = prettyThrowM e go (b:bs) e = do logDebug $ "Trying to setup GHC build: " <> fromString (compilerBuildName b) er <- try $ downloadAndInstallCompiler b si wanted versionCheck mbindistURL case er of Left e'@(UnknownCompilerVersion ks' w' vs') -> case e of Nothing -> go bs (Just e') Just (UnknownOSKey k) -> go bs $ Just $ UnknownCompilerVersion (Set.insert k ks') w' vs' Just (UnknownCompilerVersion ks _ vs) -> go bs $ Just $ UnknownCompilerVersion (Set.union ks' ks) w' (Set.union vs' vs) Just x -> prettyThrowM x Left e'@(UnknownOSKey k') -> case e of Nothing -> go bs (Just e') Just (UnknownOSKey _) -> go bs e Just (UnknownCompilerVersion ks w vs) -> go bs $ Just $ UnknownCompilerVersion (Set.insert k' ks) w vs Just x -> prettyThrowM x Left e' -> prettyThrowM e' Right r -> pure (r, b) getGhcKey :: (HasBuildConfig env, HasGHCVariant env) => CompilerBuild -> RIO env Text getGhcKey ghcBuild = do ghcVariant <- view ghcVariantL wantedComiler <- view $ buildConfigL . to (.smWanted.compiler) ghcVersion <- case wantedComiler of WCGhc version -> pure version WCGhcjs _ _ -> throwIO GhcjsNotSupported WCGhcGit _ _ -> throwIO DownloadAndInstallCompilerError let variantSuffix = ghcVariantSuffix ghcVariant buildSuffix = compilerBuildSuffix ghcBuild ghcDir = style Dir $ mconcat [ "ghc" , fromString variantSuffix , fromString buildSuffix , "-" , fromString $ versionString ghcVersion ] osKey <- getOSKey "GHC" ghcDir pure $ osKey <> T.pack variantSuffix <> T.pack buildSuffix getOSKey :: (HasConfig env, HasPlatform env) => StyleDoc -- ^ Description of the tool that is being set up. -> StyleDoc -- ^ Description of the root directory of the tool. -> RIO env Text getOSKey tool toolDir = do programsDir <- view $ configL . to (.localPrograms) platform <- view platformL case platform of Platform I386 Cabal.Linux -> pure "linux32" Platform X86_64 Cabal.Linux -> pure "linux64" Platform I386 Cabal.OSX -> pure "macosx" Platform X86_64 Cabal.OSX -> pure "macosx" Platform I386 Cabal.FreeBSD -> pure "freebsd32" Platform X86_64 Cabal.FreeBSD -> pure "freebsd64" Platform I386 Cabal.OpenBSD -> pure "openbsd32" Platform X86_64 Cabal.OpenBSD -> pure "openbsd64" Platform I386 Cabal.Windows -> pure "windows32" Platform X86_64 Cabal.Windows -> pure "windows64" Platform Arm Cabal.Linux -> pure "linux-armv7" Platform AArch64 Cabal.Linux -> pure "linux-aarch64" Platform Sparc Cabal.Linux -> pure "linux-sparc" Platform AArch64 Cabal.OSX -> pure "macosx-aarch64" Platform AArch64 Cabal.FreeBSD -> pure "freebsd-aarch64" Platform arch os -> prettyThrowM $ UnsupportedSetupCombo os arch tool toolDir programsDir downloadOrUseLocal :: (HasTerm env, HasBuildConfig env) => Text -> DownloadInfo -> Path Abs File -> RIO env (Path Abs File) downloadOrUseLocal downloadLabel downloadInfo destination = case url of (parseUrlThrow -> Just _) -> do ensureDir (parent destination) chattyDownload downloadLabel downloadInfo destination pure destination (parseAbsFile -> Just path) -> do warnOnIgnoredChecks pure path (parseRelFile -> Just path) -> do warnOnIgnoredChecks root <- view projectRootL pure (root path) _ -> prettyThrowIO $ URLInvalid url where url = T.unpack downloadInfo.url warnOnIgnoredChecks = do let DownloadInfo { contentLength , sha1 , sha256 } = downloadInfo when (isJust contentLength) $ prettyWarnS "`content-length` is not checked and should not be specified when \ \`url` is a file path." when (isJust sha1) $ prettyWarnS "`sha1` is not checked and should not be specified when `url` is a \ \file path." when (isJust sha256) $ prettyWarn "`sha256` is not checked and should not be specified when `url` is a \ \file path" downloadFromInfo :: (HasTerm env, HasBuildConfig env) => Path Abs Dir -> DownloadInfo -> Tool -> RIO env (Path Abs File, ArchiveType) downloadFromInfo programsDir downloadInfo tool = do archiveType <- case extension of ".tar.xz" -> pure TarXz ".tar.bz2" -> pure TarBz2 ".tar.gz" -> pure TarGz ".7z.exe" -> pure SevenZ _ -> prettyThrowIO $ UnknownArchiveExtension url relativeFile <- parseRelFile $ toolString tool ++ extension let destinationPath = programsDir relativeFile localPath <- downloadOrUseLocal (T.pack (toolString tool)) downloadInfo destinationPath pure (localPath, archiveType) where url = T.unpack downloadInfo.url extension = loop url where loop fp | ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z", ".gz"] = loop fp' ++ ext | otherwise = "" where (fp', ext) = FP.splitExtension fp data ArchiveType = TarBz2 | TarXz | TarGz | SevenZ installGHCPosix :: HasConfig env => GHCDownloadInfo -> SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> Path Abs Dir -> RIO env () installGHCPosix downloadInfo _ archiveFile archiveType tempDir destDir = do platform <- view platformL menv0 <- view processContextL menv <- mkProcessContext (removeHaskellEnvVars (view envVarsL menv0)) logDebug $ "menv = " <> displayShow (view envVarsL menv) (zipTool', compOpt) <- case archiveType of TarXz -> pure ("xz", 'J') TarBz2 -> pure ("bzip2", 'j') TarGz -> pure ("gzip", 'z') SevenZ -> prettyThrowIO Unsupported7z -- Slight hack: OpenBSD's tar doesn't support xz. -- https://github.com/commercialhaskell/stack/issues/2283#issuecomment-237980986 let tarDep = case (platform, archiveType) of (Platform _ Cabal.OpenBSD, TarXz) -> checkDependency "gtar" _ -> checkDependency "tar" (zipTool, makeTool, tarTool) <- checkDependencies $ (,,) <$> checkDependency zipTool' <*> (checkDependency "gmake" <|> checkDependency "make") <*> tarDep logDebug $ "ziptool: " <> fromString zipTool logDebug $ "make: " <> fromString makeTool logDebug $ "tar: " <> fromString tarTool let runStep step wd env cmd args = do menv' <- modifyEnvVars menv (Map.union env) let logLines lvl = CB.lines .| CL.mapM_ (lvl . displayBytesUtf8) logStdout = logLines logDebug logStderr = logLines logError void $ withWorkingDir (toFilePath wd) $ withProcessContext menv' $ sinkProcessStderrStdout cmd args logStderr logStdout `catchAny` \ex -> prettyThrowIO (GHCInstallFailed ex step cmd args wd tempDir destDir) logSticky $ "Unpacking GHC into " <> fromString (toFilePath tempDir) <> " ..." logDebug $ "Unpacking " <> fromString (toFilePath archiveFile) runStep "unpacking" tempDir mempty tarTool [compOpt : "xf", toFilePath archiveFile] dir <- expectSingleUnpackedDir archiveFile tempDir mOverrideGccPath <- view $ configL . to (.overrideGccPath) -- The make application uses the CC environment variable to configure the -- program for compiling C programs let mGccEnv = let gccEnvFromPath p = Map.singleton "CC" $ T.pack (toFilePath p) in gccEnvFromPath <$> mOverrideGccPath -- Data.Map.union provides a left-biased union, so mGccEnv will prevail let ghcConfigureEnv = fromMaybe Map.empty mGccEnv `Map.union` downloadInfo.configureEnv logSticky "Configuring GHC ..." runStep "configuring" dir ghcConfigureEnv (toFilePath $ dir relFileConfigure) ( ("--prefix=" ++ toFilePath destDir) : map T.unpack downloadInfo.configureOpts ) logSticky "Installing GHC ..." runStep "installing" dir mempty makeTool ["install"] logStickyDone "Installed GHC." logDebug $ "GHC installed to " <> fromString (toFilePath destDir) -- | Check if given processes appear to be present, throwing an exception if -- missing. checkDependencies :: CheckDependency env a -> RIO env a checkDependencies (CheckDependency f) = f >>= either (prettyThrowIO . MissingDependencies) pure checkDependency :: HasProcessContext env => String -> CheckDependency env String checkDependency tool = CheckDependency $ do exists <- doesExecutableExist tool pure $ if exists then Right tool else Left [tool] newtype CheckDependency env a = CheckDependency (RIO env (Either [String] a)) deriving Functor instance Applicative (CheckDependency env) where pure x = CheckDependency $ pure (Right x) CheckDependency f <*> CheckDependency x = CheckDependency $ do f' <- f x' <- x pure $ case (f', x') of (Left e1, Left e2) -> Left $ e1 ++ e2 (Left e, Right _) -> Left e (Right _, Left e) -> Left e (Right f'', Right x'') -> Right $ f'' x'' instance Alternative (CheckDependency env) where empty = CheckDependency $ pure $ Left [] CheckDependency x <|> CheckDependency y = CheckDependency $ do res1 <- x case res1 of Left _ -> y Right x' -> pure $ Right x' installGHCWindows :: HasBuildConfig env => SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> Path Abs Dir -> RIO env () installGHCWindows si archiveFile archiveType _tempDir destDir = do withUnpackedTarball7z "GHC" si archiveFile archiveType destDir prettyInfoL [ flow "GHC installed to" , pretty destDir <> "." ] installMsys2Windows :: HasBuildConfig env => SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> Path Abs Dir -> RIO env () installMsys2Windows si archiveFile archiveType _tempDir destDir = do exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir when exists $ liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> prettyThrowM $ ExistingMSYS2NotDeleted destDir e withUnpackedTarball7z "MSYS2" si archiveFile archiveType destDir -- I couldn't find this officially documented anywhere, but you need to run -- the MSYS shell once in order to initialize some pacman stuff. Once that run -- happens, you can just run commands as usual. menv0 <- view processContextL newEnv0 <- modifyEnvVars menv0 $ Map.insert "MSYSTEM" "MSYS" newEnv <- either throwM pure $ augmentPathMap [toFilePath $ destDir relDirUsr relDirBin] (view envVarsL newEnv0) menv <- mkProcessContext newEnv withWorkingDir (toFilePath destDir) $ withProcessContext menv $ proc "sh" ["--login", "-c", "true"] runProcess_ -- No longer installing git, it's unreliable -- (https://github.com/commercialhaskell/stack/issues/1046) and the -- MSYS2-installed version has bad CRLF defaults. -- -- Install git. We could install other useful things in the future too. -- runCmd (Cmd (Just destDir) "pacman" menv ["-Sy", "--noconfirm", "git"]) Nothing -- | Unpack a compressed tarball using 7zip. Expects a single directory in the -- unpacked results, which is renamed to the destination directory. withUnpackedTarball7z :: HasBuildConfig env => String -- ^ Name of tool, used in error messages -> SetupInfo -> Path Abs File -- ^ Path to archive file -> ArchiveType -> Path Abs Dir -- ^ Destination directory. -> RIO env () withUnpackedTarball7z name si archiveFile archiveType destDir = do suffix <- case archiveType of TarXz -> pure ".xz" TarBz2 -> pure ".bz2" TarGz -> pure ".gz" _ -> prettyThrowIO $ TarballInvalid name tarFile <- case T.stripSuffix suffix $ T.pack $ toFilePath (filename archiveFile) of Nothing -> prettyThrowIO $ TarballFileInvalid name archiveFile Just x -> parseRelFile $ T.unpack x run7z <- setup7z si -- We use a short name for the temporary directory to reduce the risk of a -- filepath length of more than 260 characters, which can be problematic for -- 7-Zip even if Long Filepaths are enabled on Windows. let tmpName = "stack-tmp" destDrive = takeDrive destDir ensureDir (parent destDir) withRunInIO $ \run -> -- We use a temporary directory in the same drive as that of 'destDir' to -- reduce the risk of a filepath length of more than 260 characters, which can -- be problematic for 7-Zip even if Long Filepaths are enabled on Windows. We -- do not use the system temporary directory as it may be on a different -- drive. withTempDir destDrive tmpName $ \tmpDir -> run $ do liftIO $ ignoringAbsence (removeDirRecur destDir) run7z tmpDir archiveFile run7z tmpDir (tmpDir tarFile) absSrcDir <- expectSingleUnpackedDir archiveFile tmpDir -- On Windows, 'renameDir' does not work across drives. However, we have -- ensured that 'tmpDir' has the same drive as 'destDir'. renameDir absSrcDir destDir expectSingleUnpackedDir :: (MonadIO m, MonadThrow m) => Path Abs File -> Path Abs Dir -> m (Path Abs Dir) expectSingleUnpackedDir archiveFile unpackDir = do contents <- listDir unpackDir case contents of ([dir], _ ) -> pure dir _ -> prettyThrowIO $ UnknownArchiveStructure archiveFile -- | Download 7z as necessary, and get a function for unpacking things. -- -- Returned function takes an unpack directory and archive. setup7z :: (HasBuildConfig env, MonadIO m) => SetupInfo -> RIO env (Path Abs Dir -> Path Abs File -> m ()) setup7z si = do dir <- view $ configL . to (.localPrograms) ensureDir dir let exeDestination = dir relFile7zexe dllDestination = dir relFile7zdll case (si.sevenzDll, si.sevenzExe) of (Just sevenzDll, Just sevenzExe) -> do _ <- downloadOrUseLocal "7z.dll" sevenzDll dllDestination exePath <- downloadOrUseLocal "7z.exe" sevenzExe exeDestination withRunInIO $ \run -> pure $ \outdir archive -> liftIO $ run $ do let cmd = toFilePath exePath args = [ "x" , "-o" ++ toFilePath outdir , "-y" , archiveFP ] archiveFP = toFilePath archive archiveFileName = filename archive archiveDisplay = fromString $ toFilePath archiveFileName isExtract = FP.takeExtension archiveFP == ".tar" prettyInfoL [ if isExtract then "Extracting" else "Decompressing" , pretty archiveFileName <> "..." ] ec <- proc cmd args $ \pc -> if isExtract then withProcessWait (setStdout createSource pc) $ \p -> do total <- runConduit $ getStdout p .| filterCE (== 10) -- newline characters .| foldMC (\count bs -> do let count' = count + S.length bs logSticky $ "Extracted " <> display count' <> " files" pure count' ) 0 logStickyDone $ "Extracted total of " <> display total <> " files from " <> archiveDisplay waitExitCode p else runProcess pc when (ec /= ExitSuccess) $ liftIO $ prettyThrowM (ProblemWhileDecompressing archive) _ -> prettyThrowM SetupInfoMissingSevenz chattyDownload :: HasTerm env => Text -- ^ label -> DownloadInfo -- ^ URL, content-length, sha1, and sha256 -> Path Abs File -- ^ destination -> RIO env () chattyDownload label downloadInfo path = do let url = downloadInfo.url req <- parseUrlThrow $ T.unpack url logSticky $ "Preparing to download " <> display label <> " ..." logDebug $ "Downloading from " <> display url <> " to " <> fromString (toFilePath path) <> " ..." hashChecks <- fmap catMaybes $ forM [ ("sha1", HashCheck SHA1, (.sha1)) , ("sha256", HashCheck SHA256, (.sha256)) ] $ \(name, constr, getter) -> case getter downloadInfo of Just bs -> do logDebug $ "Will check against " <> name <> " hash: " <> displayBytesUtf8 bs pure $ Just $ constr $ CheckHexDigestByteString bs Nothing -> pure Nothing when (null hashChecks) $ prettyWarnS "No sha1 or sha256 found in metadata, download hash won't be checked." let dReq = setHashChecks hashChecks $ setLengthCheck mtotalSize $ mkDownloadRequest req x <- verifiedDownloadWithProgress dReq path label mtotalSize if x then logStickyDone ("Downloaded " <> display label <> ".") else logStickyDone ("Already downloaded " <> display label <> ".") where mtotalSize = downloadInfo.contentLength -- | Perform a basic sanity check of GHC sanityCheck :: (HasLogFunc env, HasProcessContext env) => Path Abs File -> RIO env () sanityCheck ghc = withSystemTempDir "stack-sanity-check" $ \dir -> do let fp = toFilePath $ dir relFileMainHs liftIO $ S.writeFile fp $ T.encodeUtf8 $ T.pack $ unlines [ "import Distribution.Simple" -- ensure Cabal library is present , "main = putStrLn \"Hello World\"" ] logDebug $ "Performing a sanity check on: " <> fromString (toFilePath ghc) eres <- withWorkingDir (toFilePath dir) $ proc (toFilePath ghc) [ fp , "-no-user-package-db" -- Required to stop GHC looking for a package environment in default -- locations. , "-hide-all-packages" -- Required because GHC flag -hide-all-packages is passed. , "-package base" , "-package Cabal" -- required for "import Distribution.Simple" ] $ try . readProcess_ case eres of Left e -> prettyThrowIO $ GHCSanityCheckCompileFailed e ghc Right _ -> pure () -- TODO check that the output of running the command is -- correct -- Remove potentially confusing environment variables removeHaskellEnvVars :: Map Text Text -> Map Text Text removeHaskellEnvVars = Map.delete "GHC_PACKAGE_PATH" . Map.delete "GHC_ENVIRONMENT" . Map.delete "HASKELL_PACKAGE_SANDBOX" . Map.delete "HASKELL_PACKAGE_SANDBOXES" . Map.delete "HASKELL_DIST_DIR" . -- https://github.com/commercialhaskell/stack/issues/1460 Map.delete "DESTDIR" . -- https://github.com/commercialhaskell/stack/issues/3444 Map.delete "GHCRTS" -- | Get map of environment variables to set to change the GHC's encoding to -- UTF-8. getUtf8EnvVars :: (HasPlatform env, HasProcessContext env, HasTerm env) => ActualCompiler -> RIO env (Map Text Text) getUtf8EnvVars compilerVer = if getGhcVersion compilerVer >= mkVersion [7, 10, 3] -- GHC_CHARENC supported by GHC >=7.10.3 then pure $ Map.singleton "GHC_CHARENC" "UTF-8" else legacyLocale where legacyLocale = do menv <- view processContextL Platform _ os <- view platformL if os == Cabal.Windows then -- On Windows, locale is controlled by the code page, so we don't set -- any environment variables. pure Map.empty else do let checkedVars = map checkVar (Map.toList $ view envVarsL menv) -- List of environment variables that will need to be updated to set -- UTF-8 (because they currently do not specify UTF-8). needChangeVars = concatMap fst checkedVars -- Set of locale-related environment variables that have already -- have a value. existingVarNames = Set.unions (map snd checkedVars) -- True if a locale is already specified by one of the "global" -- locale variables. hasAnyExisting = any (`Set.member` existingVarNames) ["LANG", "LANGUAGE", "LC_ALL"] if null needChangeVars && hasAnyExisting then -- If no variables need changes and at least one "global" variable -- is set, no changes to environment need to be made. pure Map.empty else do -- Get a list of known locales by running @locale -a@. elocales <- tryAny (fst <$> proc "locale" ["-a"] readProcess_) let -- Filter the list to only include locales with UTF-8 encoding. utf8Locales = case elocales of Left _ -> [] Right locales -> filter isUtf8Locale ( T.lines $ T.decodeUtf8With T.lenientDecode $ LBS.toStrict locales ) mfallback = getFallbackLocale utf8Locales when (isNothing mfallback) ( prettyWarnS "Unable to set locale to UTF-8 encoding; GHC may \ \fail with 'invalid character'" ) let -- Get the new values of variables to adjust. changes = Map.unions $ map (adjustedVarValue menv utf8Locales mfallback) needChangeVars -- Get the values of variables to add. adds | hasAnyExisting = -- If we already have a "global" variable, then nothing -- needs to be added. Map.empty | otherwise = -- If we don't already have a "global" variable, then set -- LANG to the fallback. case mfallback of Nothing -> Map.empty Just fallback -> Map.singleton "LANG" fallback pure (Map.union changes adds) -- Determines whether an environment variable is locale-related and, if so, -- whether it needs to be adjusted. checkVar :: (Text, Text) -> ([Text], Set Text) checkVar (k,v) = if k `elem` ["LANG", "LANGUAGE"] || "LC_" `T.isPrefixOf` k then if isUtf8Locale v then ([], Set.singleton k) else ([k], Set.singleton k) else ([], Set.empty) -- Adjusted value of an existing locale variable. Looks for valid UTF-8 -- encodings with same language /and/ territory, then with same language, and -- finally the first UTF-8 locale returned by @locale -a@. adjustedVarValue :: ProcessContext -> [Text] -> Maybe Text -> Text -> Map Text Text adjustedVarValue menv utf8Locales mfallback k = case Map.lookup k (view envVarsL menv) of Nothing -> Map.empty Just v -> case concatMap (matchingLocales utf8Locales) [ T.takeWhile (/= '.') v <> "." , T.takeWhile (/= '_') v <> "_"] of (v':_) -> Map.singleton k v' [] -> case mfallback of Just fallback -> Map.singleton k fallback Nothing -> Map.empty -- Determine the fallback locale, by looking for any UTF-8 locale prefixed -- with the list in @fallbackPrefixes@, and if not found, picking the first -- UTF-8 encoding returned by @locale -a@. getFallbackLocale :: [Text] -> Maybe Text getFallbackLocale utf8Locales = case concatMap (matchingLocales utf8Locales) fallbackPrefixes of (v:_) -> Just v [] -> case utf8Locales of [] -> Nothing (v:_) -> Just v -- Filter the list of locales for any with the given prefixes -- (case-insensitive). matchingLocales :: [Text] -> Text -> [Text] matchingLocales utf8Locales prefix = filter (\v -> T.toLower prefix `T.isPrefixOf` T.toLower v) utf8Locales -- Does the locale have one of the encodings in @utf8Suffixes@ -- (case-insensitive)? isUtf8Locale locale = any (\ v -> T.toLower v `T.isSuffixOf` T.toLower locale) utf8Suffixes -- Prefixes of fallback locales (case-insensitive) fallbackPrefixes = ["C.", "en_US.", "en_"] -- Suffixes of UTF-8 locales (case-insensitive) utf8Suffixes = [".UTF-8", ".utf8"] -- Binary Stack upgrades -- | Information on a binary release of Stack data StackReleaseInfo = SRIGitHub !Value -- ^ Metadata downloaded from GitHub releases about available binaries. | SRIHaskellStackOrg !HaskellStackOrg -- ^ Information on the latest available binary for the current platforms. data HaskellStackOrg = HaskellStackOrg { url :: !Text , version :: !Version } deriving Show downloadStackReleaseInfo :: (HasLogFunc env, HasPlatform env) => Maybe String -- GitHub org -> Maybe String -- GitHub repo -> Maybe String -- ^ optional version -> RIO env StackReleaseInfo downloadStackReleaseInfo Nothing Nothing Nothing = do platform <- view platformL -- Fallback list of URLs to try for upgrading. let urls0 = case platform of Platform X86_64 Cabal.Linux -> [ "https://get.haskellstack.org/upgrade/linux-x86_64-static.tar.gz" , "https://get.haskellstack.org/upgrade/linux-x86_64.tar.gz" ] Platform X86_64 Cabal.OSX -> [ "https://get.haskellstack.org/upgrade/osx-x86_64.tar.gz" ] Platform X86_64 Cabal.Windows -> [ "https://get.haskellstack.org/upgrade/windows-x86_64.tar.gz" ] _ -> [] -- Helper function: extract the version from a GitHub releases URL. let extractVersion loc = do version0 <- case reverse $ splitOn "/" $ T.unpack loc of _final:version0:_ -> Right version0 _ -> Left $ "Insufficient pieces in location: " ++ show loc version1 <- maybe (Left "no leading v on version") Right $ stripPrefix "v" version0 maybe (Left $ "Invalid version: " ++ show version1) Right $ parseVersion version1 -- Try out different URLs. If we've exhausted all of them, fall back to GitHub. loop [] = do logDebug "Could not get binary from haskellstack.org, trying GitHub" downloadStackReleaseInfoGitHub Nothing Nothing Nothing -- Try the next URL loop (url:urls) = do -- Make a HEAD request without any redirects req <- setRequestMethod "HEAD" <$> parseRequest (T.unpack url) res <- httpLbs req { redirectCount = 0 } -- Look for a redirect. We're looking for a standard GitHub releases -- URL where we can extract version information from. case getResponseHeader "location" res of [] -> logDebug "No location header found, continuing" *> loop urls -- Exactly one location header. [locBS] -> case decodeUtf8' locBS of Left e -> logDebug ( "Invalid UTF8: " <> displayShow (locBS, e) ) *> loop urls Right loc -> case extractVersion loc of Left s -> logDebug ( "No version found: " <> displayShow (url, loc, s) ) *> loop (loc:urls) -- We found a valid URL, let's use it! Right version -> do let hso = HaskellStackOrg { url = loc , version } logDebug $ "Downloading from haskellstack.org: " <> displayShow hso pure $ SRIHaskellStackOrg hso locs -> logDebug ( "Multiple location headers found: " <> displayShow locs ) *> loop urls loop urls0 downloadStackReleaseInfo morg mrepo mver = downloadStackReleaseInfoGitHub morg mrepo mver -- | Same as above, but always uses GitHub downloadStackReleaseInfoGitHub :: (MonadIO m, MonadThrow m) => Maybe String -- GitHub org -> Maybe String -- GitHub repo -> Maybe String -- ^ optional version -> m StackReleaseInfo downloadStackReleaseInfoGitHub morg mrepo mver = liftIO $ do let org = fromMaybe "commercialhaskell" morg repo = fromMaybe "stack" mrepo let url = concat [ "https://api.github.com/repos/" , org , "/" , repo , "/releases/" , case mver of Nothing -> "latest" Just ver -> "tags/v" ++ ver ] req <- parseRequest url res <- httpJSON $ setGitHubHeaders req let code = getResponseStatusCode res if code >= 200 && code < 300 then pure $ SRIGitHub $ getResponseBody res else prettyThrowIO $ StackReleaseInfoNotFound url preferredPlatforms :: (MonadReader env m, HasPlatform env, MonadThrow m) => m [(Bool, String)] preferredPlatforms = do Platform arch' os' <- view platformL (isWindows, os) <- case os' of Cabal.Linux -> pure (False, "linux") Cabal.Windows -> pure (True, "windows") Cabal.OSX -> pure (False, "osx") Cabal.FreeBSD -> pure (False, "freebsd") _ -> prettyThrowM $ BinaryUpgradeOnOSUnsupported os' arch <- case arch' of I386 -> pure "i386" X86_64 -> pure "x86_64" Arm -> pure "arm" AArch64 -> pure "aarch64" _ -> prettyThrowM $ BinaryUpgradeOnArchUnsupported arch' let hasgmp4 = False -- FIXME import relevant code from Stack.Setup? -- checkLib $(mkRelFile "libgmp.so.3") suffixes -- 'gmp4' ceased to be relevant after Stack 1.9.3 (December 2018). | hasgmp4 = ["-static", "-gmp4", ""] -- 'static' will cease to be relevant after Stack 2.11.1 (May 2023). | otherwise = ["-static", ""] pure $ map (\suffix -> (isWindows, concat [os, "-", arch, suffix])) suffixes downloadStackExe :: HasConfig env => [(Bool, String)] -- ^ acceptable platforms -> StackReleaseInfo -> Path Abs Dir -- ^ destination directory -> Bool -- ^ perform PATH-aware checking, see #3232 -> (Path Abs File -> IO ()) -- ^ test the temp exe before renaming -> RIO env () downloadStackExe platforms0 archiveInfo destDir checkPath testExe = do (isWindows, archiveURL) <- let loop [] = prettyThrowIO $ StackBinaryArchiveNotFound (map snd platforms0) loop ((isWindows, p'):ps) = do let p = T.pack p' prettyInfoL [ flow "Querying for archive location for platform:" , style Current (fromString p') <> "." ] case findArchive archiveInfo p of Just x -> pure (isWindows, x) Nothing -> loop ps in loop platforms0 let (destFile, tmpFile) | isWindows = ( destDir relFileStackDotExe , destDir relFileStackDotTmpDotExe ) | otherwise = ( destDir relFileStack , destDir relFileStackDotTmp ) prettyInfoL [ flow "Downloading from:" , style Url (fromString $ T.unpack archiveURL) <> "." ] liftIO $ if | ".tar.gz" `T.isSuffixOf` archiveURL -> handleTarball tmpFile isWindows archiveURL | ".zip" `T.isSuffixOf` archiveURL -> throwIO StackBinaryArchiveZipUnsupportedBug | otherwise -> prettyThrowIO $ StackBinaryArchiveUnsupported archiveURL prettyInfoS "Download complete, testing executable." -- We need to call getExecutablePath before we overwrite the -- currently running binary: after that, Linux will append -- (deleted) to the filename. currExe <- liftIO getExecutablePath >>= parseAbsFile liftIO $ do setFileExecutable (toFilePath tmpFile) testExe tmpFile relocateStackExeFile currExe tmpFile destFile prettyInfoL [ flow "New Stack executable available at:" , pretty destFile <> "." ] destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir warnInstallSearchPathIssues destDir' ["stack"] when checkPath $ performPathChecking destFile currExe `catchAny` (logError . displayShow) where findArchive (SRIGitHub val) platformPattern = do Object top <- pure val Array assets <- KeyMap.lookup "assets" top getFirst $ foldMap (First . findMatch pattern') assets where pattern' = mconcat ["-", platformPattern, "."] findMatch pattern'' (Object o) = do String name <- KeyMap.lookup "name" o guard $ not $ ".asc" `T.isSuffixOf` name guard $ pattern'' `T.isInfixOf` name String url <- KeyMap.lookup "browser_download_url" o Just url findMatch _ _ = Nothing findArchive (SRIHaskellStackOrg hso) _ = pure hso.url handleTarball :: Path Abs File -> Bool -> T.Text -> IO () handleTarball tmpFile isWindows url = do req <- fmap setGitHubHeaders $ parseUrlThrow $ T.unpack url withResponse req $ \res -> do entries <- fmap (Tar.read . LBS.fromChunks) $ lazyConsume $ getResponseBody res .| ungzip let loop Tar.Done = prettyThrowIO $ StackBinaryNotInArchive exeName url loop (Tar.Fail e) = throwM e loop (Tar.Next e es) = case FP.splitPath (Tar.entryPath e) of -- Ignore the first component, see: -- https://github.com/commercialhaskell/stack/issues/5288 [_ignored, name] | name == exeName -> do case Tar.entryContent e of Tar.NormalFile lbs _ -> do ensureDir destDir LBS.writeFile (toFilePath tmpFile) lbs _ -> prettyThrowIO $ FileTypeInArchiveInvalid e url _ -> loop es loop entries where exeName | isWindows = "stack.exe" | otherwise = "stack" relocateStackExeFile :: HasTerm env => Path Abs File -- ^ Path to the currently running executable -> Path Abs File -- ^ Path to the executable file to be relocated -> Path Abs File -- ^ Path to the new location for the excutable file -> RIO env () relocateStackExeFile currExeFile newExeFile destExeFile = do when (osIsWindows && destExeFile == currExeFile) $ do -- Windows allows a running executable's file to be renamed, but not to be -- overwritten. old <- addExtension ".old" currExeFile prettyInfoL [ flow "Renaming existing:" , pretty currExeFile , "as:" , pretty old <> "." ] renameFile currExeFile old renameFile newExeFile destExeFile -- | Ensure that the Stack executable download is in the same location as the -- currently running executable. See: -- https://github.com/commercialhaskell/stack/issues/3232 performPathChecking :: HasConfig env => Path Abs File -- ^ Path to the newly downloaded file -> Path Abs File -- ^ Path to the currently running executable -> RIO env () performPathChecking newExeFile currExeFile = do unless (newExeFile == currExeFile) $ do prettyInfoL [ flow "Also copying Stack executable to:" , pretty currExeFile <> "." ] tmpFile <- toFilePath <$> addExtension ".tmp" currExeFile eres <- tryIO $ relocateStackExeFile currExeFile newExeFile currExeFile case eres of Right () -> prettyInfoS "Stack executable copied successfully!" Left e | isPermissionError e -> if osIsWindows then do prettyWarn $ flow "Permission error when trying to copy:" <> blankLine <> string (displayException e) else do prettyWarn $ flow "Permission error when trying to copy:" <> blankLine <> string (displayException e) <> blankLine <> fillSep [ flow "Should I try to perform the file copy using" , style Shell "sudo" <> "?" , flow "This may fail." ] toSudo <- promptBool "Try using sudo? (y/n) " when toSudo $ do let run cmd args = do ec <- proc cmd args runProcess when (ec /= ExitSuccess) $ throwIO $ ProcessExited ec cmd args commands = [ ("sudo", [ "cp" , toFilePath newExeFile , tmpFile ]) , ("sudo", [ "mv" , tmpFile , toFilePath currExeFile ]) ] prettyInfo $ flow "Going to run the following commands:" <> blankLine <> bulletedList ( map ( \(cmd, args) -> style Shell $ fillSep $ fromString cmd : map fromString args ) commands ) mapM_ (uncurry run) commands prettyInfo $ line <> flow "sudo file copy worked!" | otherwise -> throwM e getDownloadVersion :: StackReleaseInfo -> Maybe Version getDownloadVersion (SRIGitHub val) = do Object o <- Just val String rawName <- KeyMap.lookup "name" o -- drop the "v" at the beginning of the name parseVersion $ T.unpack (T.drop 1 rawName) getDownloadVersion (SRIHaskellStackOrg hso) = Just hso.version stack-2.15.7/src/Stack/Setup/Installed.hs0000644000000000000000000001625314620153446016324 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} module Stack.Setup.Installed ( getCompilerVersion , markInstalled , unmarkInstalled , listInstalled , Tool (..) , toolString , toolNameString , parseToolText , filterTools , toolExtraDirs , installDir , tempInstallDir ) where import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as BL import Data.Char ( isDigit ) import qualified Data.List as L import qualified Data.Text as T import qualified Data.Text.Encoding as T import Distribution.System ( Platform (..) ) import qualified Distribution.System as Cabal import Path ( (), filename, parseRelDir, parseRelFile ) import Path.IO ( doesDirExist, ignoringAbsence, listDir, removeFile ) import RIO.Process ( HasProcessContext, proc, readProcess_ ) import Stack.Constants ( relDirBin, relDirInclude, relDirLib, relDirLocal, relDirMingw , relDirMingw32, relDirMingw64, relDirUsr ) import Stack.Prelude import Stack.Types.Compiler ( ActualCompiler (..), WhichCompiler (..) ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.ExtraDirs ( ExtraDirs (..) ) data Tool = Tool PackageIdentifier -- ^ e.g. ghc-7.8.4, msys2-20150512 | ToolGhcGit !Text !Text -- ^ e.g. ghc-git-COMMIT_ID-FLAVOUR deriving Eq -- | 'Tool' values are ordered by name (being @ghc-git@, for @ToolGhcGit _ _@) -- alphabetically and then by version (later versions are ordered before -- earlier versions, where applicable). instance Ord Tool where compare (Tool pkgId1) (Tool pkgId2) = if pkgName1 == pkgName2 then compare pkgVersion2 pkgVersion1 -- Later versions ordered first else compare pkgName1 pkgName2 where PackageIdentifier pkgName1 pkgVersion1 = pkgId1 PackageIdentifier pkgName2 pkgVersion2 = pkgId2 compare (Tool pkgId) (ToolGhcGit _ _) = compare (pkgName pkgId) "ghc-git" compare (ToolGhcGit _ _) (Tool pkgId) = compare "ghc-git" (pkgName pkgId) compare (ToolGhcGit c1 f1) (ToolGhcGit c2 f2) = if f1 == f2 then compare c1 c2 else compare f1 f2 toolString :: Tool -> String toolString (Tool ident) = packageIdentifierString ident toolString (ToolGhcGit commit flavour) = "ghc-git-" ++ T.unpack commit ++ "-" ++ T.unpack flavour toolNameString :: Tool -> String toolNameString (Tool ident) = packageNameString $ pkgName ident toolNameString ToolGhcGit{} = "ghc-git" parseToolText :: Text -> Maybe Tool parseToolText (parseWantedCompiler -> Right WCGhcjs{}) = Nothing parseToolText (parseWantedCompiler -> Right (WCGhcGit c f)) = Just (ToolGhcGit c f) parseToolText (parsePackageIdentifier . T.unpack -> Just pkgId) = Just (Tool pkgId) parseToolText _ = Nothing markInstalled :: (MonadIO m, MonadThrow m) => Path Abs Dir -> Tool -> m () markInstalled programsPath tool = do fpRel <- parseRelFile $ toolString tool ++ ".installed" writeBinaryFileAtomic (programsPath fpRel) "installed" unmarkInstalled :: MonadIO m => Path Abs Dir -> Tool -> m () unmarkInstalled programsPath tool = liftIO $ do fpRel <- parseRelFile $ toolString tool ++ ".installed" ignoringAbsence (removeFile $ programsPath fpRel) listInstalled :: (MonadIO m, MonadThrow m) => Path Abs Dir -> m [Tool] listInstalled programsPath = doesDirExist programsPath >>= \case False -> pure [] True -> do (_, files) <- listDir programsPath pure $ mapMaybe toTool files where toTool fp = do x <- T.stripSuffix ".installed" $ T.pack $ toFilePath $ filename fp parseToolText x filterTools :: PackageName -- ^ package to find -> (Version -> Bool) -- ^ which versions are acceptable -> [Tool] -- ^ tools to filter -> [PackageIdentifier] filterTools name goodVersion installed = [ pkgId | Tool pkgId <- installed , pkgName pkgId == name , goodVersion (pkgVersion pkgId) ] getCompilerVersion :: (HasProcessContext env, HasLogFunc env) => WhichCompiler -> Path Abs File -- ^ executable -> RIO env ActualCompiler getCompilerVersion wc exe = case wc of Ghc -> do logDebug "Asking GHC for its version" bs <- fst <$> proc (toFilePath exe) ["--numeric-version"] readProcess_ let (_, ghcVersion) = versionFromEnd $ BL.toStrict bs x <- ACGhc <$> parseVersionThrowing (T.unpack $ T.decodeUtf8 ghcVersion) logDebug $ "GHC version is: " <> display x pure x where versionFromEnd = S8.spanEnd isValid . fst . S8.breakEnd isValid isValid c = c == '.' || isDigit c -- | Binary directories for the given installed package toolExtraDirs :: HasConfig env => Tool -> RIO env ExtraDirs toolExtraDirs tool = do config <- view configL dir <- installDir config.localPrograms tool case (config.platform, toolNameString tool) of (Platform _ Cabal.Windows, isGHC -> True) -> pure mempty { bins = [ dir relDirBin , dir relDirMingw relDirBin ] } (Platform Cabal.I386 Cabal.Windows, "msys2") -> pure mempty { bins = [ dir relDirMingw32 relDirBin , dir relDirUsr relDirBin , dir relDirUsr relDirLocal relDirBin ] , includes = [ dir relDirMingw32 relDirInclude ] , libs = [ dir relDirMingw32 relDirLib , dir relDirMingw32 relDirBin ] } (Platform Cabal.X86_64 Cabal.Windows, "msys2") -> pure mempty { bins = [ dir relDirMingw64 relDirBin , dir relDirUsr relDirBin , dir relDirUsr relDirLocal relDirBin ] , includes = [ dir relDirMingw64 relDirInclude ] , libs = [ dir relDirMingw64 relDirLib , dir relDirMingw64 relDirBin ] } (_, isGHC -> True) -> pure mempty { bins = [ dir relDirBin ] } (Platform _ x, toolName) -> do prettyWarnL [ flow "binDirs: unexpected OS/tool combo:" , flow (show (x, toolName) <> ".") ] pure mempty where isGHC n = "ghc" == n || "ghc-" `L.isPrefixOf` n installDir :: (MonadReader env m, MonadThrow m) => Path Abs Dir -> Tool -> m (Path Abs Dir) installDir programsDir tool = do relativeDir <- parseRelDir $ toolString tool pure $ programsDir relativeDir tempInstallDir :: (MonadReader env m, MonadThrow m) => Path Abs Dir -> Tool -> m (Path Abs Dir) tempInstallDir programsDir tool = do relativeDir <- parseRelDir $ toolString tool ++ ".temp" pure $ programsDir relativeDir stack-2.15.7/src/Stack/SetupCmd.hs0000644000000000000000000000735214604306201015020 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @setup@ command. module Stack.SetupCmd ( SetupCmdOpts (..) , setupCmd , setup ) where import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withBuildConfig, withConfig ) import Stack.Setup ( SetupOpts (..), ensureCompilerAndMsys ) import Stack.Types.BuildConfig ( HasBuildConfig, stackYamlL, wantedCompilerVersionL ) import Stack.Types.CompilerPaths ( CompilerPaths (..) ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.GHCVariant ( HasGHCVariant ) import Stack.Types.Runner ( Runner ) import Stack.Types.Version ( VersionCheck (..) ) -- | Type representing command line options for the @stack setup@ command. data SetupCmdOpts = SetupCmdOpts { compilerVersion :: !(Maybe WantedCompiler) , forceReinstall :: !Bool , ghcBindistUrl :: !(Maybe String) , ghcjsBootOpts :: ![String] , ghcjsBootClean :: !Bool } -- | Function underlying the @stack setup@ command. setupCmd :: SetupCmdOpts -> RIO Runner () setupCmd sco = withConfig YesReexec $ do installGHC <- view $ configL . to (.installGHC) if installGHC then withBuildConfig $ do (wantedCompiler, compilerCheck, mstack) <- case sco.compilerVersion of Just v -> pure (v, MatchMinor, Nothing) Nothing -> (,,) <$> view wantedCompilerVersionL <*> view (configL . to (.compilerCheck)) <*> (Just <$> view stackYamlL) setup sco wantedCompiler compilerCheck mstack else prettyWarnL [ "The" , style Shell "--no-install-ghc" , flow "flag is inconsistent with" , style Shell (flow "stack setup") <> "." , flow "No action taken." ] setup :: (HasBuildConfig env, HasGHCVariant env) => SetupCmdOpts -> WantedCompiler -> VersionCheck -> Maybe (Path Abs File) -> RIO env () setup sco wantedCompiler compilerCheck stackYaml = do config <- view configL sandboxedGhc <- (.sandboxed) . fst <$> ensureCompilerAndMsys SetupOpts { installIfMissing = True , useSystem = config.systemGHC && not sco.forceReinstall , wantedCompiler , compilerCheck , stackYaml , forceReinstall = sco.forceReinstall , sanityCheck = True , skipGhcCheck = False , skipMsys = config.skipMsys , resolveMissingGHC = Nothing , ghcBindistURL = sco.ghcBindistUrl } let compiler = case wantedCompiler of WCGhc _ -> "GHC" WCGhcGit{} -> "GHC (built from source)" WCGhcjs {} -> "GHCJS" compilerHelpMsg = fillSep [ flow "To use this" , compiler , flow "and packages outside of a project, consider using:" , style Shell (flow "stack ghc") <> "," , style Shell (flow "stack ghci") <> "," , style Shell (flow "stack runghc") <> "," , "or" , style Shell (flow "stack exec") <> "." ] if sandboxedGhc then prettyInfoL [ flow "Stack will use a sandboxed" , compiler , flow "it installed." , compilerHelpMsg ] else prettyInfoL [ flow "Stack will use the" , compiler , flow "on your PATH. For more information on paths, see" , style Shell (flow "stack path") , "and" , style Shell (flow "stack exec env") <> "." , compilerHelpMsg ] stack-2.15.7/src/Stack/SourceMap.hs0000644000000000000000000002507314620153446015203 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.SourceMap ( mkProjectPackage , snapToDepPackage , additionalDepPackage , loadVersion , getPLIVersion , loadGlobalHints , DumpedGlobalPackage , actualFromGhc , actualFromHints , checkFlagsUsedThrowing , globalCondCheck , pruneGlobals , globalsFromHints , getCompilerInfo , immutableLocSha , loadProjectSnapshotCandidate , SnapshotCandidate , globalsFromDump ) where import Data.ByteString.Builder ( byteString ) import qualified Data.Conduit.List as CL import qualified Data.Text as T import qualified Distribution.PackageDescription as PD import Distribution.System ( Platform (..) ) import qualified Pantry.SHA256 as SHA256 import qualified RIO.Map as Map import RIO.Process ( HasProcessContext ) import qualified RIO.Set as Set import Stack.Constants ( stackProgName' ) import Stack.PackageDump ( conduitDumpPackage, ghcPkgDump ) import Stack.Prelude import Stack.Types.Build.Exception ( BuildPrettyException (..) ) import Stack.Types.Compiler ( ActualCompiler, actualToWanted, wantedToActual ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe, HasCompiler (..) ) import Stack.Types.Config ( HasConfig ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.UnusedFlags ( FlagSource, UnusedFlags (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( rslInLogL ) import Stack.Types.SourceMap ( CommonPackage (..), DepPackage (..), FromSnapshot (..) , GlobalPackage (..), GlobalPackageVersion (..) , ProjectPackage (..), SMActual (..), SMWanted (..) ) -- | Create a 'ProjectPackage' from a directory containing a package. mkProjectPackage :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => PrintWarnings -> ResolvedPath Dir -> Bool -- ^ Should Haddock documentation be built for the package? -> RIO env ProjectPackage mkProjectPackage printWarnings resolvedDir buildHaddocks = do (gpd, name, cabalFP) <- loadCabalFilePath (Just stackProgName') (resolvedAbsolute resolvedDir) pure ProjectPackage { cabalFP , resolvedDir , projectCommon = CommonPackage { gpd = gpd printWarnings , name , flags = mempty , ghcOptions = mempty , cabalConfigOpts = mempty , buildHaddocks } } -- | Create a 'DepPackage' from a 'PackageLocation', from some additional -- to a snapshot setting (extra-deps or command line) additionalDepPackage :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => Bool -- ^ Should Haddock documentation be built for the package? -> PackageLocation -> RIO env DepPackage additionalDepPackage buildHaddocks location = do (name, gpd) <- case location of PLMutable dir -> do (gpd, name, _cabalfp) <- loadCabalFilePath (Just stackProgName') (resolvedAbsolute dir) pure (name, gpd NoPrintWarnings) PLImmutable pli -> do let PackageIdentifier name _ = packageLocationIdent pli run <- askRunInIO pure (name, run $ loadCabalFileImmutable pli) pure DepPackage { location , hidden = False , fromSnapshot = NotFromSnapshot , depCommon = CommonPackage { gpd , name , flags = mempty , ghcOptions = mempty , cabalConfigOpts = mempty , buildHaddocks } } snapToDepPackage :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => Bool -- ^ Should Haddock documentation be built for the package? -> PackageName -> SnapshotPackage -> RIO env DepPackage snapToDepPackage buildHaddocks name sp = do run <- askRunInIO pure DepPackage { location = PLImmutable sp.spLocation , hidden = sp.spHidden , fromSnapshot = FromSnapshot , depCommon = CommonPackage { gpd = run $ loadCabalFileImmutable sp.spLocation , name , flags = sp.spFlags , ghcOptions = sp.spGhcOptions , cabalConfigOpts = [] -- No spCabalConfigOpts, not present in snapshots , buildHaddocks } } loadVersion :: MonadIO m => CommonPackage -> m Version loadVersion common = do gpd <- liftIO common.gpd pure (pkgVersion $ PD.package $ PD.packageDescription gpd) getPLIVersion :: PackageLocationImmutable -> Version getPLIVersion (PLIHackage (PackageIdentifier _ v) _ _) = v getPLIVersion (PLIArchive _ pm) = pkgVersion $ pmIdent pm getPLIVersion (PLIRepo _ pm) = pkgVersion $ pmIdent pm globalsFromDump :: (HasProcessContext env, HasTerm env) => GhcPkgExe -> RIO env (Map PackageName DumpedGlobalPackage) globalsFromDump pkgexe = do let pkgConduit = conduitDumpPackage .| CL.foldMap (\dp -> Map.singleton dp.ghcPkgId dp) toGlobals ds = Map.fromList $ map (pkgName . (.packageIdent) &&& id) $ Map.elems ds toGlobals <$> ghcPkgDump pkgexe [] pkgConduit globalsFromHints :: HasConfig env => WantedCompiler -> RIO env (Map PackageName Version) globalsFromHints compiler = do mglobalHints <- loadGlobalHints compiler case mglobalHints of Just hints -> pure hints Nothing -> do prettyWarnL [ flow "Unable to load global hints for" , fromString $ T.unpack $ textDisplay compiler ] pure mempty type DumpedGlobalPackage = DumpPackage actualFromGhc :: (HasConfig env, HasCompiler env) => SMWanted -> ActualCompiler -> RIO env (SMActual DumpedGlobalPackage) actualFromGhc smw compiler = do globals <- view $ compilerPathsL . to (.globalDump) pure SMActual { compiler , project = smw.project , deps = smw.deps , globals } actualFromHints :: (HasConfig env) => SMWanted -> ActualCompiler -> RIO env (SMActual GlobalPackageVersion) actualFromHints smw compiler = do globals <- globalsFromHints (actualToWanted compiler) pure SMActual { compiler , project = smw.project , deps = smw.deps , globals = Map.map GlobalPackageVersion globals } -- | Simple cond check for boot packages - checks only OS and Arch globalCondCheck :: (HasConfig env) => RIO env (PD.ConfVar -> Either PD.ConfVar Bool) globalCondCheck = do Platform arch os <- view platformL let condCheck (PD.OS os') = pure $ os' == os condCheck (PD.Arch arch') = pure $ arch' == arch condCheck c = Left c pure condCheck checkFlagsUsedThrowing :: (MonadIO m, MonadThrow m) => Map PackageName (Map FlagName Bool) -> FlagSource -> Map PackageName ProjectPackage -> Map PackageName DepPackage -> m () checkFlagsUsedThrowing packageFlags source prjPackages deps = do unusedFlags <- forMaybeM (Map.toList packageFlags) $ \(pname, flags) -> getUnusedPackageFlags (pname, flags) source prjPackages deps unless (null unusedFlags) $ prettyThrowM $ InvalidFlagSpecification $ Set.fromList unusedFlags getUnusedPackageFlags :: MonadIO m => (PackageName, Map FlagName Bool) -> FlagSource -> Map PackageName ProjectPackage -> Map PackageName DepPackage -> m (Maybe UnusedFlags) getUnusedPackageFlags (name, userFlags) source prj deps = let maybeCommon = fmap (.projectCommon) (Map.lookup name prj) <|> fmap (.depCommon) (Map.lookup name deps) in case maybeCommon of -- Package is not available as project or dependency Nothing -> pure $ Just $ UFNoPackage source name -- Package exists, let's check if the flags are defined Just common -> do gpd <- liftIO common.gpd let pname = pkgName $ PD.package $ PD.packageDescription gpd pkgFlags = Set.fromList $ map PD.flagName $ PD.genPackageFlags gpd unused = Map.keysSet $ Map.withoutKeys userFlags pkgFlags if Set.null unused -- All flags are defined, nothing to do then pure Nothing -- Error about the undefined flags else pure $ Just $ UFFlagsNotDefined source pname pkgFlags unused pruneGlobals :: Map PackageName DumpedGlobalPackage -> Set PackageName -> Map PackageName GlobalPackage pruneGlobals globals deps = let (prunedGlobals, keptGlobals) = partitionReplacedDependencies globals (pkgName . (.packageIdent)) (.ghcPkgId) (.depends) deps in Map.map (GlobalPackage . pkgVersion . (.packageIdent)) keptGlobals <> Map.map ReplacedGlobalPackage prunedGlobals getCompilerInfo :: (HasConfig env, HasCompiler env) => RIO env Builder getCompilerInfo = view $ compilerPathsL . to (byteString . (.ghcInfo)) immutableLocSha :: PackageLocationImmutable -> Builder immutableLocSha = byteString . treeKeyToBs . locationTreeKey where locationTreeKey (PLIHackage _ _ tk) = tk locationTreeKey (PLIArchive _ pm) = pmTreeKey pm locationTreeKey (PLIRepo _ pm) = pmTreeKey pm treeKeyToBs (TreeKey (BlobKey sha _)) = SHA256.toHexBytes sha type SnapshotCandidate env = [ResolvedPath Dir] -> RIO env (SMActual GlobalPackageVersion) loadProjectSnapshotCandidate :: (HasConfig env) => RawSnapshotLocation -> PrintWarnings -> Bool -- ^ Should Haddock documentation be build for the package? -> RIO env (SnapshotCandidate env) loadProjectSnapshotCandidate loc printWarnings buildHaddocks = do debugRSL <- view rslInLogL (snapshot, _, _) <- loadAndCompleteSnapshotRaw' debugRSL loc Map.empty Map.empty deps <- Map.traverseWithKey (snapToDepPackage False) (snapshotPackages snapshot) let wc = snapshotCompiler snapshot globals <- Map.map GlobalPackageVersion <$> globalsFromHints wc pure $ \projectPackages -> do project <- fmap Map.fromList . for projectPackages $ \resolved -> do pp <- mkProjectPackage printWarnings resolved buildHaddocks pure (pp.projectCommon.name, pp) compiler <- either throwIO pure $ wantedToActual $ snapshotCompiler snapshot pure SMActual { compiler , project , deps = Map.difference deps project , globals } stack-2.15.7/src/Stack/Storage/Project.hs0000644000000000000000000001761514620153446016322 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -Wno-unused-top-binds -Wno-identities #-} -- | Work with SQLite database used for caches across a single project. module Stack.Storage.Project ( initProjectStorage , ConfigCacheKey , configCacheKey , loadConfigCache , saveConfigCache , deactiveConfigCache ) where import qualified Data.ByteString as S import qualified Data.Set as Set import Database.Persist.Sqlite ( Entity (..), SelectOpt (..), SqlBackend, Unique, (=.) , (==.), getBy, insert, selectList, update, updateWhere ) import Database.Persist.TH ( mkMigrate, mkPersist, persistLowerCase, share , sqlSettings ) import Pantry.SQLite ( initStorage, withStorage_ ) import Stack.Prelude import Stack.Storage.Util ( handleMigrationException, updateList, updateSet ) import Stack.Types.Build ( CachePkgSrc, ConfigCache (..) ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.Cache ( ConfigCacheType ) import Stack.Types.ConfigureOpts ( ConfigureOpts (..) ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Storage ( ProjectStorage (..) ) share [ mkPersist sqlSettings , mkMigrate "migrateAll" ] [persistLowerCase| ConfigCacheParent sql="config_cache" directory FilePath "default=(hex(randomblob(16)))" type ConfigCacheType pkgSrc CachePkgSrc active Bool pathEnvVar Text haddock Bool default=0 UniqueConfigCacheParent directory type sql="unique_config_cache" deriving Show ConfigCacheDirOption parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade index Int value String sql="option" UniqueConfigCacheDirOption parent index deriving Show ConfigCacheNoDirOption parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade index Int value String sql="option" UniqueConfigCacheNoDirOption parent index deriving Show ConfigCacheDep parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade value GhcPkgId sql="ghc_pkg_id" UniqueConfigCacheDep parent value deriving Show ConfigCacheComponent parent ConfigCacheParentId sql="config_cache_id" OnDeleteCascade value S.ByteString sql="component" UniqueConfigCacheComponent parent value deriving Show |] -- | Initialize the database. initProjectStorage :: HasLogFunc env => Path Abs File -- ^ storage file -> (ProjectStorage -> RIO env a) -> RIO env a initProjectStorage fp f = handleMigrationException $ initStorage "Stack" migrateAll fp $ f . ProjectStorage -- | Run an action in a database transaction withProjectStorage :: (HasBuildConfig env, HasLogFunc env) => ReaderT SqlBackend (RIO env) a -> RIO env a withProjectStorage inner = do storage <- view (buildConfigL . to (.projectStorage.projectStorage)) withStorage_ storage inner -- | Key used to retrieve configuration or flag cache type ConfigCacheKey = Unique ConfigCacheParent -- | Build key used to retrieve configuration or flag cache configCacheKey :: Path Abs Dir -> ConfigCacheType -> ConfigCacheKey configCacheKey dir = UniqueConfigCacheParent (toFilePath dir) -- | Internal helper to read the 'ConfigCache' readConfigCache :: (HasBuildConfig env, HasLogFunc env) => Entity ConfigCacheParent -> ReaderT SqlBackend (RIO env) ConfigCache readConfigCache (Entity parentId configCacheParent) = do let pkgSrc = configCacheParent.configCacheParentPkgSrc pathRelated <- map ((.configCacheDirOptionValue) . entityVal) <$> selectList [ConfigCacheDirOptionParent ==. parentId] [Asc ConfigCacheDirOptionIndex] nonPathRelated <- map ((.configCacheNoDirOptionValue) . entityVal) <$> selectList [ConfigCacheNoDirOptionParent ==. parentId] [Asc ConfigCacheNoDirOptionIndex] let configureOpts = ConfigureOpts { pathRelated , nonPathRelated } deps <- Set.fromList . map ((.configCacheDepValue) . entityVal) <$> selectList [ConfigCacheDepParent ==. parentId] [] components <- Set.fromList . map ((.configCacheComponentValue) . entityVal) <$> selectList [ConfigCacheComponentParent ==. parentId] [] let pathEnvVar = configCacheParent.configCacheParentPathEnvVar let buildHaddocks = configCacheParent.configCacheParentHaddock pure ConfigCache { configureOpts , deps , components , buildHaddocks , pkgSrc , pathEnvVar } -- | Load 'ConfigCache' from the database. loadConfigCache :: (HasBuildConfig env, HasLogFunc env) => ConfigCacheKey -> RIO env (Maybe ConfigCache) loadConfigCache key = withProjectStorage $ do mparent <- getBy key case mparent of Nothing -> pure Nothing Just parentEntity@(Entity _ configCacheParent) | configCacheParent.configCacheParentActive -> Just <$> readConfigCache parentEntity | otherwise -> pure Nothing -- | Insert or update 'ConfigCache' to the database. saveConfigCache :: (HasBuildConfig env, HasLogFunc env) => ConfigCacheKey -> ConfigCache -> RIO env () saveConfigCache key@(UniqueConfigCacheParent dir type_) new = withProjectStorage $ do mparent <- getBy key (parentId, mold) <- case mparent of Nothing -> (, Nothing) <$> insert ConfigCacheParent { configCacheParentDirectory = dir , configCacheParentType = type_ , configCacheParentPkgSrc = new.pkgSrc , configCacheParentActive = True , configCacheParentPathEnvVar = new.pathEnvVar , configCacheParentHaddock = new.buildHaddocks } Just parentEntity@(Entity parentId _) -> do old <- readConfigCache parentEntity update parentId [ ConfigCacheParentPkgSrc =. new.pkgSrc , ConfigCacheParentActive =. True , ConfigCacheParentPathEnvVar =. new.pathEnvVar ] pure (parentId, Just old) updateList ConfigCacheDirOption ConfigCacheDirOptionParent parentId ConfigCacheDirOptionIndex (maybe [] (.configureOpts.pathRelated) mold) new.configureOpts.pathRelated updateList ConfigCacheNoDirOption ConfigCacheNoDirOptionParent parentId ConfigCacheNoDirOptionIndex (maybe [] (.configureOpts.nonPathRelated) mold) new.configureOpts.nonPathRelated updateSet ConfigCacheDep ConfigCacheDepParent parentId ConfigCacheDepValue (maybe Set.empty (.deps) mold) new.deps updateSet ConfigCacheComponent ConfigCacheComponentParent parentId ConfigCacheComponentValue (maybe Set.empty (.components) mold) new.components -- | Mark 'ConfigCache' as inactive in the database. -- We use a flag instead of deleting the records since, in most cases, the same -- cache will be written again within in a few seconds (after -- `cabal configure`), so this avoids unnecessary database churn. deactiveConfigCache :: HasBuildConfig env => ConfigCacheKey -> RIO env () deactiveConfigCache (UniqueConfigCacheParent dir type_) = withProjectStorage $ updateWhere [ConfigCacheParentDirectory ==. dir, ConfigCacheParentType ==. type_] [ConfigCacheParentActive =. False] stack-2.15.7/src/Stack/Storage/User.hs0000644000000000000000000003605014620153446015624 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -Wno-unused-top-binds -Wno-identities #-} -- | Work with SQLite database used for caches across an entire user account. module Stack.Storage.User ( initUserStorage , PrecompiledCacheKey , precompiledCacheKey , loadPrecompiledCache , savePrecompiledCache , loadDockerImageExeCache , saveDockerImageExeCache , loadCompilerPaths , saveCompilerPaths , upgradeChecksSince , logUpgradeCheck ) where import qualified Data.Set as Set import qualified Data.Text as T import Data.Time.Clock ( UTCTime ) import Database.Persist.Sqlite ( Entity (..), SqlBackend, Unique, (=.), (==.), (>=.), count , deleteBy, getBy, insert, insert_, selectList, update , upsert ) import Database.Persist.TH ( mkMigrate, mkPersist, persistLowerCase, share , sqlSettings ) import Distribution.Text ( simpleParse, display ) import Foreign.C.Types ( CTime (..) ) import Pantry.SQLite ( initStorage, withStorage_ ) import Path ( (), mkRelFile, parseRelFile ) import Path.IO ( resolveFile', resolveDir' ) import qualified RIO.FilePath as FP import Stack.Prelude import Stack.Storage.Util ( handleMigrationException, updateSet ) import Stack.Types.Build ( PrecompiledCache (..) ) import Stack.Types.Cache ( Action (..) ) import Stack.Types.Compiler ( ActualCompiler, compilerVersionText ) import Stack.Types.CompilerBuild ( CompilerBuild ) import Stack.Types.CompilerPaths ( CompilerPaths (..), GhcPkgExe (..) ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.Storage ( UserStorage (..) ) import System.Posix.Types ( COff (..) ) import System.PosixCompat.Files ( fileSize, getFileStatus, modificationTime ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Storage.User" module. data StorageUserException = CompilerFileMetadataMismatch | GlobalPackageCacheFileMetadataMismatch | GlobalDumpParseFailure | CompilerCacheArchitectureInvalid Text deriving (Show, Typeable) instance Exception StorageUserException where displayException CompilerFileMetadataMismatch = "Error: [S-8196]\n" ++ "Compiler file metadata mismatch, ignoring cache." displayException GlobalPackageCacheFileMetadataMismatch = "Error: [S-5378]\n" ++ "Global package cache file metadata mismatch, ignoring cache." displayException GlobalDumpParseFailure = "Error: [S-2673]\n" ++ "Global dump did not parse correctly." displayException (CompilerCacheArchitectureInvalid compilerCacheArch) = concat [ "Error: [S-8441]\n" , "Invalid arch: " , show compilerCacheArch ] share [ mkPersist sqlSettings , mkMigrate "migrateAll" ] [persistLowerCase| PrecompiledCacheParent sql="precompiled_cache" platformGhcDir FilePath "default=(hex(randomblob(16)))" compiler Text cabalVersion Text packageKey Text optionsHash ByteString haddock Bool default=0 library FilePath Maybe UniquePrecompiledCacheParent platformGhcDir compiler cabalVersion packageKey optionsHash haddock sql="unique_precompiled_cache" deriving Show PrecompiledCacheSubLib parent PrecompiledCacheParentId sql="precompiled_cache_id" OnDeleteCascade value FilePath sql="sub_lib" UniquePrecompiledCacheSubLib parent value deriving Show PrecompiledCacheExe parent PrecompiledCacheParentId sql="precompiled_cache_id" OnDeleteCaseCascade value FilePath sql="exe" UniquePrecompiledCacheExe parent value deriving Show DockerImageExeCache imageHash Text exePath FilePath exeTimestamp UTCTime compatible Bool DockerImageExeCacheUnique imageHash exePath exeTimestamp deriving Show CompilerCache actualVersion ActualCompiler arch Text -- Include ghc executable size and modified time for sanity checking entries ghcPath FilePath ghcSize Int64 ghcModified Int64 ghcPkgPath FilePath runghcPath FilePath haddockPath FilePath cabalVersion Text globalDb FilePath globalDbCacheSize Int64 globalDbCacheModified Int64 info ByteString -- This is the ugliest part of this table, simply storing a Show/Read version of the -- data. We could do a better job with normalized data and proper table structure. -- However, recomputing this value in the future if the data representation changes -- is very cheap, so we'll take the easy way out for now. globalDump Text UniqueCompilerInfo ghcPath -- Last time certain actions were performed LastPerformed action Action timestamp UTCTime UniqueAction action |] -- | Initialize the database. initUserStorage :: HasLogFunc env => Path Abs File -- ^ storage file -> (UserStorage -> RIO env a) -> RIO env a initUserStorage fp f = handleMigrationException $ initStorage "Stack" migrateAll fp $ f . UserStorage -- | Run an action in a database transaction withUserStorage :: (HasConfig env, HasLogFunc env) => ReaderT SqlBackend (RIO env) a -> RIO env a withUserStorage inner = do storage <- view (configL . to (.userStorage.userStorage)) withStorage_ storage inner -- | Key used to retrieve the precompiled cache type PrecompiledCacheKey = Unique PrecompiledCacheParent -- | Build key used to retrieve the precompiled cache precompiledCacheKey :: Path Rel Dir -> ActualCompiler -> Version -> Text -> ByteString -> Bool -> PrecompiledCacheKey precompiledCacheKey platformGhcDir compiler cabalVersion = UniquePrecompiledCacheParent (toFilePath platformGhcDir) (compilerVersionText compiler) (T.pack $ versionString cabalVersion) -- | Internal helper to read the 'PrecompiledCache' from the database readPrecompiledCache :: (HasConfig env, HasLogFunc env) => PrecompiledCacheKey -> ReaderT SqlBackend (RIO env) (Maybe ( PrecompiledCacheParentId , PrecompiledCache Rel)) readPrecompiledCache key = do mparent <- getBy key forM mparent $ \(Entity parentId precompiledCacheParent) -> do library <- mapM parseRelFile precompiledCacheParent.precompiledCacheParentLibrary subLibs <- mapM (parseRelFile . (.precompiledCacheSubLibValue) . entityVal) =<< selectList [PrecompiledCacheSubLibParent ==. parentId] [] exes <- mapM (parseRelFile . (.precompiledCacheExeValue) . entityVal) =<< selectList [PrecompiledCacheExeParent ==. parentId] [] pure ( parentId , PrecompiledCache { library , subLibs , exes } ) -- | Load 'PrecompiledCache' from the database. loadPrecompiledCache :: (HasConfig env, HasLogFunc env) => PrecompiledCacheKey -> RIO env (Maybe (PrecompiledCache Rel)) loadPrecompiledCache key = withUserStorage $ fmap snd <$> readPrecompiledCache key -- | Insert or update 'PrecompiledCache' to the database. savePrecompiledCache :: (HasConfig env, HasLogFunc env) => PrecompiledCacheKey -> PrecompiledCache Rel -> RIO env () savePrecompiledCache key@( UniquePrecompiledCacheParent precompiledCacheParentPlatformGhcDir precompiledCacheParentCompiler precompiledCacheParentCabalVersion precompiledCacheParentPackageKey precompiledCacheParentOptionsHash precompiledCacheParentHaddock ) new = withUserStorage $ do let precompiledCacheParentLibrary = fmap toFilePath new.library mIdOld <- readPrecompiledCache key (parentId, mold) <- case mIdOld of Nothing -> (, Nothing) <$> insert PrecompiledCacheParent { precompiledCacheParentPlatformGhcDir , precompiledCacheParentCompiler , precompiledCacheParentCabalVersion , precompiledCacheParentPackageKey , precompiledCacheParentOptionsHash , precompiledCacheParentHaddock , precompiledCacheParentLibrary } Just (parentId, old) -> do update parentId [ PrecompiledCacheParentLibrary =. precompiledCacheParentLibrary ] pure (parentId, Just old) updateSet PrecompiledCacheSubLib PrecompiledCacheSubLibParent parentId PrecompiledCacheSubLibValue (maybe Set.empty (toFilePathSet . (.subLibs)) mold) (toFilePathSet new.subLibs) updateSet PrecompiledCacheExe PrecompiledCacheExeParent parentId PrecompiledCacheExeValue (maybe Set.empty (toFilePathSet . (.exes)) mold) (toFilePathSet new.exes) where toFilePathSet = Set.fromList . map toFilePath -- | Get the record of whether an executable is compatible with a Docker image loadDockerImageExeCache :: (HasConfig env, HasLogFunc env) => Text -> Path Abs File -> UTCTime -> RIO env (Maybe Bool) loadDockerImageExeCache imageId exePath exeTimestamp = withUserStorage $ fmap ((.dockerImageExeCacheCompatible) . entityVal) <$> getBy (DockerImageExeCacheUnique imageId (toFilePath exePath) exeTimestamp) -- | Sets the record of whether an executable is compatible with a Docker image saveDockerImageExeCache :: (HasConfig env, HasLogFunc env) => Text -> Path Abs File -> UTCTime -> Bool -> RIO env () saveDockerImageExeCache imageId exePath exeTimestamp compatible = void $ withUserStorage $ upsert ( DockerImageExeCache imageId (toFilePath exePath) exeTimestamp compatible ) [] -- | Type-restricted version of 'fromIntegral' to ensure we're making the value -- bigger, not smaller. sizeToInt64 :: COff -> Int64 sizeToInt64 (COff i) = fromIntegral i -- fromIntegral added for 32-bit systems -- | Type-restricted version of 'fromIntegral' to ensure we're making the value -- bigger, not smaller. timeToInt64 :: CTime -> Int64 timeToInt64 (CTime i) = fromIntegral i -- fromIntegral added for 32-bit systems -- | Load compiler information, if available, and confirm that the referenced -- files are unchanged. May throw exceptions! loadCompilerPaths :: HasConfig env => Path Abs File -- ^ compiler executable -> CompilerBuild -> Bool -- ^ sandboxed? -> RIO env (Maybe CompilerPaths) loadCompilerPaths compiler build sandboxed = do mres <- withUserStorage $ getBy $ UniqueCompilerInfo $ toFilePath compiler for mres $ \(Entity _ compilerCache) -> do compilerStatus <- liftIO $ getFileStatus $ toFilePath compiler when ( compilerCache.compilerCacheGhcSize /= sizeToInt64 (fileSize compilerStatus) || compilerCache.compilerCacheGhcModified /= timeToInt64 (modificationTime compilerStatus) ) (throwIO CompilerFileMetadataMismatch) globalDbStatus <- liftIO $ getFileStatus $ compilerCache.compilerCacheGlobalDb FP. "package.cache" when ( compilerCache.compilerCacheGlobalDbCacheSize /= sizeToInt64 (fileSize globalDbStatus) || compilerCache.compilerCacheGlobalDbCacheModified /= timeToInt64 (modificationTime globalDbStatus) ) (throwIO GlobalPackageCacheFileMetadataMismatch) -- We could use parseAbsFile instead of resolveFile' below to bypass some -- system calls, at the cost of some really wonky error messages in case -- someone screws up their GHC installation pkg <- GhcPkgExe <$> resolveFile' compilerCache.compilerCacheGhcPkgPath interpreter <- resolveFile' compilerCache.compilerCacheRunghcPath haddock <- resolveFile' compilerCache.compilerCacheHaddockPath globalDB <- resolveDir' compilerCache.compilerCacheGlobalDb cabalVersion <- parseVersionThrowing $ T.unpack compilerCache.compilerCacheCabalVersion globalDump <- case readMaybe $ T.unpack compilerCache.compilerCacheGlobalDump of Nothing -> throwIO GlobalDumpParseFailure Just globalDump -> pure globalDump arch <- case simpleParse $ T.unpack compilerCache.compilerCacheArch of Nothing -> throwIO $ CompilerCacheArchitectureInvalid compilerCache.compilerCacheArch Just arch -> pure arch pure CompilerPaths { compiler , compilerVersion = compilerCache.compilerCacheActualVersion , arch , build , pkg , interpreter , haddock , sandboxed , cabalVersion , globalDB , ghcInfo = compilerCache.compilerCacheInfo , globalDump } -- | Save compiler information. May throw exceptions! saveCompilerPaths :: HasConfig env => CompilerPaths -> RIO env () saveCompilerPaths cp = withUserStorage $ do deleteBy $ UniqueCompilerInfo $ toFilePath cp.compiler compilerStatus <- liftIO $ getFileStatus $ toFilePath cp.compiler globalDbStatus <- liftIO $ getFileStatus $ toFilePath $ cp.globalDB $(mkRelFile "package.cache") let GhcPkgExe pkgexe = cp.pkg insert_ CompilerCache { compilerCacheActualVersion = cp.compilerVersion , compilerCacheGhcPath = toFilePath cp.compiler , compilerCacheGhcSize = sizeToInt64 $ fileSize compilerStatus , compilerCacheGhcModified = timeToInt64 $ modificationTime compilerStatus , compilerCacheGhcPkgPath = toFilePath pkgexe , compilerCacheRunghcPath = toFilePath cp.interpreter , compilerCacheHaddockPath = toFilePath cp.haddock , compilerCacheCabalVersion = T.pack $ versionString cp.cabalVersion , compilerCacheGlobalDb = toFilePath cp.globalDB , compilerCacheGlobalDbCacheSize = sizeToInt64 $ fileSize globalDbStatus , compilerCacheGlobalDbCacheModified = timeToInt64 $ modificationTime globalDbStatus , compilerCacheInfo = cp.ghcInfo , compilerCacheGlobalDump = tshow cp.globalDump , compilerCacheArch = T.pack $ Distribution.Text.display cp.arch } -- | How many upgrade checks have occurred since the given timestamp? upgradeChecksSince :: HasConfig env => UTCTime -> RIO env Int upgradeChecksSince since = withUserStorage $ count [ LastPerformedAction ==. UpgradeCheck , LastPerformedTimestamp >=. since ] -- | Log in the database that an upgrade check occurred at the given time. logUpgradeCheck :: HasConfig env => UTCTime -> RIO env () logUpgradeCheck time = withUserStorage $ void $ upsert (LastPerformed UpgradeCheck time) [LastPerformedTimestamp =. time] stack-2.15.7/src/Stack/Storage/Util.hs0000644000000000000000000000536214620153446015625 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE TypeFamilies #-} -- | Utils for the other Stack.Storage modules module Stack.Storage.Util ( handleMigrationException , updateList , updateSet ) where import qualified Data.Set as Set import Database.Persist ( BaseBackend, EntityField, PersistEntity , PersistEntityBackend, PersistField, PersistQueryWrite , SafeToInsert, (<-.), (==.), deleteWhere, insertMany_ ) import Stack.Prelude import Stack.Types.Storage ( StoragePrettyException (..) ) -- | Efficiently update a set of values stored in a database table updateSet :: ( PersistEntityBackend record ~ BaseBackend backend , PersistField parentid , PersistField value , Ord value , PersistEntity record , MonadIO m , PersistQueryWrite backend , SafeToInsert record ) => (parentid -> value -> record) -> EntityField record parentid -> parentid -> EntityField record value -> Set value -> Set value -> ReaderT backend m () updateSet recordCons parentFieldCons parentId valueFieldCons old new = when (old /= new) $ do deleteWhere [ parentFieldCons ==. parentId , valueFieldCons <-. Set.toList (Set.difference old new) ] insertMany_ $ map (recordCons parentId) $ Set.toList (Set.difference new old) -- | Efficiently update a list of values stored in a database table. updateList :: ( PersistEntityBackend record ~ BaseBackend backend , PersistField parentid , Ord value , PersistEntity record , MonadIO m , PersistQueryWrite backend , SafeToInsert record ) => (parentid -> Int -> value -> record) -> EntityField record parentid -> parentid -> EntityField record Int -> [value] -> [value] -> ReaderT backend m () updateList recordCons parentFieldCons parentId indexFieldCons old new = when (old /= new) $ do let oldSet = Set.fromList (zip [0 ..] old) newSet = Set.fromList (zip [0 ..] new) deleteWhere [ parentFieldCons ==. parentId , indexFieldCons <-. map fst (Set.toList $ Set.difference oldSet newSet) ] insertMany_ $ map (uncurry $ recordCons parentId) $ Set.toList (Set.difference newSet oldSet) handleMigrationException :: HasLogFunc env => RIO env a -> RIO env a handleMigrationException inner = do eres <- try inner either ( \e -> case e :: PantryException of MigrationFailure desc fp ex -> prettyThrowIO $ StorageMigrationFailure desc fp ex _ -> throwIO e ) pure eres stack-2.15.7/src/Stack/Templates.hs0000644000000000000000000000522114505617134015235 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Functions related to Stack's @templates@ command. module Stack.Templates ( templatesCmd , templatesHelp ) where import qualified Data.ByteString.Lazy as LB import qualified Data.Text.IO as T import Network.HTTP.StackClient ( HttpException (..), getResponseBody, httpLbs, parseUrlThrow , setGitHubHeaders ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.Runner ( Runner ) -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Templates" module. data TemplatesPrettyException = DownloadTemplatesHelpFailed !HttpException | TemplatesHelpEncodingInvalid !String !UnicodeException deriving Typeable deriving instance Show TemplatesPrettyException instance Pretty TemplatesPrettyException where pretty (DownloadTemplatesHelpFailed err) = "[S-8143]" <> line <> fillSep [ flow "Stack failed to download the help for" , style Shell "stack templates" <> "." ] <> blankLine <> flow "While downloading, Stack encountered the following error:" <> blankLine <> string (displayException err) pretty (TemplatesHelpEncodingInvalid url err) = "[S-6670]" <> line <> fillSep [ flow "Stack failed to decode the help for" , style Shell "stack templates" , flow "downloaded from" , style Url (fromString url) <> "." ] <> blankLine <> flow "While decoding, Stack encountered the following error:" <> blankLine <> string (displayException err) instance Exception TemplatesPrettyException -- | Function underlying the @stack templates@ command. Display instructions for -- how to use templates. templatesCmd :: () -> RIO Runner () templatesCmd () = withConfig NoReexec templatesHelp -- | Display help for the templates command. templatesHelp :: HasTerm env => RIO env () templatesHelp = do let url = defaultTemplatesHelpUrl req <- fmap setGitHubHeaders (parseUrlThrow url) resp <- catch (httpLbs req) (prettyThrowM . DownloadTemplatesHelpFailed) case decodeUtf8' $ LB.toStrict $ getResponseBody resp of Left err -> prettyThrowM $ TemplatesHelpEncodingInvalid url err Right txt -> liftIO $ T.putStrLn txt -- | Default web URL to get the `stack templates` help output. defaultTemplatesHelpUrl :: String defaultTemplatesHelpUrl = "https://raw.githubusercontent.com/commercialhaskell/stack-templates/master/STACK_HELP.md" stack-2.15.7/src/Stack/Types/AddCommand.hs0000644000000000000000000000103314445120723016363 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.AddCommand ( AddCommand ) where import Control.Monad.Trans.Except ( ExceptT ) import Control.Monad.Writer ( Writer ) import qualified Options.Applicative as OA import Stack.Prelude import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid ) import Stack.Types.Runner ( Runner ) type AddCommand = ExceptT (RIO Runner ()) (Writer (OA.Mod OA.CommandFields (RIO Runner (), GlobalOptsMonoid))) () stack-2.15.7/src/Stack/Types/AllowNewerDeps.hs0000644000000000000000000000127114502056214017270 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.AllowNewerDeps ( AllowNewerDeps (..) ) where import Data.Aeson.Types ( FromJSON (..) ) import qualified Distribution.PackageDescription as C import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude newtype AllowNewerDeps = AllowNewerDeps [PackageName] deriving (Eq, Generic, Ord, Read, Show) instance Semigroup AllowNewerDeps where (<>) = mappenddefault instance Monoid AllowNewerDeps where mappend = (<>) mempty = memptydefault instance FromJSON AllowNewerDeps where parseJSON = fmap (AllowNewerDeps . fmap C.mkPackageName) . parseJSON stack-2.15.7/src/Stack/Types/ApplyGhcOptions.hs0000644000000000000000000000143214620153446017465 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) where import Data.Aeson.Types ( FromJSON (..), withText ) import Stack.Prelude -- | Which packages do ghc-options on the command line apply to? data ApplyGhcOptions = AGOTargets -- ^ all local targets | AGOLocals -- ^ all local packages, even non-targets | AGOEverything -- ^ every package deriving (Bounded, Enum, Eq, Ord, Read, Show) instance FromJSON ApplyGhcOptions where parseJSON = withText "ApplyGhcOptions" $ \t -> case t of "targets" -> pure AGOTargets "locals" -> pure AGOLocals "everything" -> pure AGOEverything _ -> fail $ "Invalid ApplyGhcOptions: " ++ show t stack-2.15.7/src/Stack/Types/ApplyProgOptions.hs0000644000000000000000000000155014620153446017674 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) where import Data.Aeson.Types ( FromJSON (..), withText ) import Stack.Prelude -- | Which packages do all and any --PROG-option options on the command line -- apply to? data ApplyProgOptions = APOTargets -- ^ All local packages that are targets. | APOLocals -- ^ All local packages (targets or otherwise). | APOEverything -- ^ All packages (local or otherwise). deriving (Bounded, Enum, Eq, Ord, Read, Show) instance FromJSON ApplyProgOptions where parseJSON = withText "ApplyProgOptions" $ \t -> case t of "targets" -> pure APOTargets "locals" -> pure APOLocals "everything" -> pure APOEverything _ -> fail $ "Invalid ApplyProgOptions: " ++ show t stack-2.15.7/src/Stack/Types/Build.hs0000644000000000000000000002233014620153446015441 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Build-specific types. module Stack.Types.Build ( InstallLocation (..) , Installed (..) , psVersion , Task (..) , taskAnyMissing , taskIsTarget , taskLocation , taskProvides , taskTargetIsMutable , taskTypeLocation , taskTypePackageIdentifier , LocalPackage (..) , Plan (..) , TestOpts (..) , BenchmarkOpts (..) , FileWatchOpts (..) , BuildOpts (..) , BuildSubset (..) , defaultBuildOpts , TaskType (..) , installLocationIsMutable , TaskConfigOpts (..) , BuildCache (..) , ConfigCache (..) , configureOpts , CachePkgSrc (..) , toCachePkgSrc , FileCacheInfo (..) , PrecompiledCache (..) , ExcludeTHLoading (..) , ConvertPathsToAbsolute (..) , KeepOutputOpen (..) ) where import Data.Aeson ( ToJSON, FromJSON ) import qualified Data.ByteString as S import Data.List as L import qualified Data.Map as Map import qualified Data.Text as T import Database.Persist.Sql ( PersistField (..), PersistFieldSql (..) , PersistValue (PersistText), SqlType (SqlString) ) import Path ( parent ) import qualified RIO.Set as Set import Stack.BuildOpts ( defaultBuildOpts ) import Stack.Prelude import Stack.Types.BuildOpts ( BenchmarkOpts (..), BuildOpts (..), TestOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildSubset (..), FileWatchOpts (..) ) import Stack.Types.ConfigureOpts ( ConfigureOpts, configureOpts ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.IsMutable ( IsMutable (..) ) import Stack.Types.Package ( FileCacheInfo (..), InstallLocation (..), Installed (..) , LocalPackage (..), Package (..), PackageSource (..) , packageIdentifier, psVersion ) -- | Package dependency oracle. newtype PkgDepsOracle = PkgDeps PackageName deriving (Eq, NFData, Show, Typeable) -- | Stored on disk to know whether the files have changed. newtype BuildCache = BuildCache { times :: Map FilePath FileCacheInfo -- ^ Modification times of files. } deriving (Eq, FromJSON, Generic, Show, ToJSON, Typeable) instance NFData BuildCache -- | Stored on disk to know whether the flags have changed. data ConfigCache = ConfigCache { configureOpts :: !ConfigureOpts -- ^ All Cabal configure options used for this package. , deps :: !(Set GhcPkgId) -- ^ The GhcPkgIds of all of the dependencies. Since Cabal doesn't take -- the complete GhcPkgId (only a PackageIdentifier) in the configure -- options, just using the previous value is insufficient to know if -- dependencies have changed. , components :: !(Set S.ByteString) -- ^ The components to be built. It's a bit of a hack to include this in -- here, as it's not a configure option (just a build option), but this -- is a convenient way to force compilation when the components change. , buildHaddocks :: !Bool -- ^ Are haddocks to be built? , pkgSrc :: !CachePkgSrc , pathEnvVar :: !Text -- ^ Value of the PATH env var, see -- } deriving (Data, Eq, Generic, Show, Typeable) instance NFData ConfigCache data CachePkgSrc = CacheSrcUpstream | CacheSrcLocal FilePath deriving (Data, Eq, Generic, Read, Show, Typeable) instance NFData CachePkgSrc instance PersistField CachePkgSrc where toPersistValue CacheSrcUpstream = PersistText "upstream" toPersistValue (CacheSrcLocal fp) = PersistText ("local:" <> T.pack fp) fromPersistValue (PersistText t) = if t == "upstream" then Right CacheSrcUpstream else case T.stripPrefix "local:" t of Just fp -> Right $ CacheSrcLocal (T.unpack fp) Nothing -> Left $ "Unexpected CachePkgSrc value: " <> t fromPersistValue _ = Left "Unexpected CachePkgSrc type" instance PersistFieldSql CachePkgSrc where sqlType _ = SqlString toCachePkgSrc :: PackageSource -> CachePkgSrc toCachePkgSrc (PSFilePath lp) = CacheSrcLocal (toFilePath (parent lp.cabalFP)) toCachePkgSrc PSRemote{} = CacheSrcUpstream -- | A type representing tasks to perform when building. data Task = Task { taskType :: !TaskType -- ^ The task type, telling us how to build this , configOpts :: !TaskConfigOpts -- ^ A set of the package identifiers of dependencies for which 'GhcPkgId' -- are missing and a function which yields configure options, given a -- dictionary of those identifiers and their 'GhcPkgId'. , buildHaddocks :: !Bool , present :: !(Map PackageIdentifier GhcPkgId) -- ^ A dictionary of the package identifiers of already-installed -- dependencies, and their 'GhcPkgId'. , allInOne :: !Bool -- ^ indicates that the package can be built in one step , cachePkgSrc :: !CachePkgSrc , buildTypeConfig :: !Bool -- ^ Is the build type of this package Configure. Check out -- ensureConfigureScript in Stack.Build.Execute for the motivation } deriving Show -- | Given the IDs of any missing packages, produce the configure options data TaskConfigOpts = TaskConfigOpts { missing :: !(Set PackageIdentifier) -- ^ Dependencies for which we don't yet have an GhcPkgId , opts :: !(Map PackageIdentifier GhcPkgId -> ConfigureOpts) -- ^ Produce the list of options given the missing @GhcPkgId@s } instance Show TaskConfigOpts where show (TaskConfigOpts missing f) = concat [ "Missing: " , show missing , ". Without those: " , show $ f Map.empty ] -- | Type representing different types of task, depending on what is to be -- built. data TaskType = TTLocalMutable LocalPackage -- ^ Building local source code. | TTRemotePackage IsMutable Package PackageLocationImmutable -- ^ Building something from the package index (upstream). deriving Show -- | Were any of the dependencies missing? taskAnyMissing :: Task -> Bool taskAnyMissing task = not $ Set.null task.configOpts.missing -- | A function to yield the package name and version of a given 'TaskType' -- value. taskTypePackageIdentifier :: TaskType -> PackageIdentifier taskTypePackageIdentifier (TTLocalMutable lp) = packageIdentifier lp.package taskTypePackageIdentifier (TTRemotePackage _ p _) = packageIdentifier p taskIsTarget :: Task -> Bool taskIsTarget t = case t.taskType of TTLocalMutable lp -> lp.wanted _ -> False -- | A function to yield the relevant database (write-only or mutable) of a -- given 'TaskType' value. taskTypeLocation :: TaskType -> InstallLocation taskTypeLocation (TTLocalMutable _) = Local taskTypeLocation (TTRemotePackage Mutable _ _) = Local taskTypeLocation (TTRemotePackage Immutable _ _) = Snap -- | A function to yield the relevant database (write-only or mutable) of the -- given task. taskLocation :: Task -> InstallLocation taskLocation = taskTypeLocation . (.taskType) -- | A function to yield the package name and version to be built by the given -- task. taskProvides :: Task -> PackageIdentifier taskProvides = taskTypePackageIdentifier . (.taskType) taskTargetIsMutable :: Task -> IsMutable taskTargetIsMutable task = case task.taskType of TTLocalMutable _ -> Mutable TTRemotePackage mutable _ _ -> mutable installLocationIsMutable :: InstallLocation -> IsMutable installLocationIsMutable Snap = Immutable installLocationIsMutable Local = Mutable -- | A complete plan of what needs to be built and how to do it data Plan = Plan { tasks :: !(Map PackageName Task) , finals :: !(Map PackageName Task) -- ^ Final actions to be taken (test, benchmark, etc) , unregisterLocal :: !(Map GhcPkgId (PackageIdentifier, Text)) -- ^ Text is reason we're unregistering, for display only , installExes :: !(Map Text InstallLocation) -- ^ Executables that should be installed after successful building } deriving Show -- | Information on a compiled package: the library .conf file (if relevant), -- the sub-libraries (if present) and all of the executable paths. data PrecompiledCache base = PrecompiledCache { library :: !(Maybe (Path base File)) -- ^ .conf file inside the package database , subLibs :: ![Path base File] -- ^ .conf file inside the package database, for each of the sub-libraries , exes :: ![Path base File] -- ^ Full paths to executables } deriving (Eq, Generic, Show, Typeable) instance NFData (PrecompiledCache Abs) instance NFData (PrecompiledCache Rel) data ExcludeTHLoading = ExcludeTHLoading | KeepTHLoading data ConvertPathsToAbsolute = ConvertPathsToAbsolute | KeepPathsAsIs -- | special marker for expected failures in curator builds, using those we need -- to keep log handle open as build continues further even after a failure data KeepOutputOpen = KeepOpen | CloseOnException deriving Eq stack-2.15.7/src/Stack/Types/Build/ConstructPlan.hs0000644000000000000000000002053414620153446020244 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} -- | A module providing types and related helper functions used in module -- "Stack.Build.ConstructPlan". module Stack.Types.Build.ConstructPlan ( PackageInfo (..) , CombinedMap , M , W (..) , AddDepRes (..) , toTask , adrVersion , adrHasLibrary , Ctx (..) , UnregisterState (..) , ToolWarning (..) ) where import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import RIO.Process ( HasProcessContext (..) ) import RIO.State ( StateT ) import RIO.Writer ( WriterT (..) ) import Stack.Package ( hasBuildableMainLibrary ) import Stack.Prelude hiding ( loadPackage ) import Stack.Types.Build ( Task (..), TaskType (..), taskProvides ) import Stack.Types.Build.Exception ( ConstructPlanException ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig(..) ) import Stack.Types.CompilerPaths ( HasCompiler (..) ) import Stack.Types.Config ( HasConfig (..) ) import Stack.Types.ConfigureOpts ( BaseConfigOpts ) import Stack.Types.Curator ( Curator ) import Stack.Types.DumpPackage ( DumpPackage ) import Stack.Types.EnvConfig ( EnvConfig (..), HasEnvConfig (..), HasSourceMap (..) ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.GHCVariant ( HasGHCVariant (..) ) import Stack.Types.Installed ( InstallLocation, Installed (..), installedVersion ) import Stack.Types.Package ( ExeName (..), LocalPackage (..), Package (..) , PackageSource (..) ) import Stack.Types.ParentMap ( ParentMap ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner (..) ) -- | Type representing information about packages, namely information about -- whether or not a package is already installed and, unless the package is not -- to be built (global packages), where its source code is located. data PackageInfo = PIOnlyInstalled InstallLocation Installed -- ^ This indicates that the package is already installed, and that we -- shouldn't build it from source. This is only the case for global -- packages. | PIOnlySource PackageSource -- ^ This indicates that the package isn't installed, and we know where to -- find its source. | PIBoth PackageSource Installed -- ^ This indicates that the package is installed and we know where to find -- its source. We may want to reinstall from source. deriving Show -- | A type synonym representing dictionaries of package names, and combined -- information about the package in respect of whether or not it is already -- installed and, unless the package is not to be built (global packages), where -- its source code is located. type CombinedMap = Map PackageName PackageInfo -- | Type synonym representing values used during the construction of a build -- plan. The type is an instance of 'Monad', hence its name. type M = WriterT W -- ^ The output to be collected ( StateT (Map PackageName (Either ConstructPlanException AddDepRes)) -- ^ Library map (RIO Ctx) ) -- | Type representing values used as the output to be collected during the -- construction of a build plan. data W = W { wFinals :: !(Map PackageName (Either ConstructPlanException Task)) -- ^ A dictionary of package names, and either a final task to perform when -- building the package or an exception. , wInstall :: !(Map Text InstallLocation) -- ^ A dictionary of executables to be installed, and location where the -- executable's binary is placed. , wDirty :: !(Map PackageName Text) -- ^ A dictionary of local packages, and the reason why the local package is -- considered dirty. , wWarnings :: !([StyleDoc] -> [StyleDoc]) -- ^ Warnings. , wParents :: !ParentMap -- ^ A dictionary of package names, and a list of pairs of the identifier -- of a package depending on the package and the version range specified for -- the dependency by that package. Used in the reporting of failure to -- construct a build plan. } deriving Generic instance Semigroup W where (<>) = mappenddefault instance Monoid W where mempty = memptydefault mappend = (<>) -- | Type representing results of 'addDep'. data AddDepRes = ADRToInstall Task -- ^ A task must be performed to provide the package name. | ADRFound InstallLocation Installed -- ^ An existing installation provides the package name. deriving Show toTask :: AddDepRes -> Maybe Task toTask (ADRToInstall task) = Just task toTask (ADRFound _ _) = Nothing adrVersion :: AddDepRes -> Version adrVersion (ADRToInstall task) = pkgVersion $ taskProvides task adrVersion (ADRFound _ installed) = installedVersion installed adrHasLibrary :: AddDepRes -> Bool adrHasLibrary (ADRToInstall task) = case task.taskType of TTLocalMutable lp -> packageHasLibrary lp.package TTRemotePackage _ p _ -> packageHasLibrary p where -- make sure we consider sub-libraries as libraries too packageHasLibrary :: Package -> Bool packageHasLibrary p = hasBuildableMainLibrary p || not (null p.subLibraries) adrHasLibrary (ADRFound _ Library{}) = True adrHasLibrary (ADRFound _ Executable{}) = False -- | Type representing values used as the environment to be read from during the -- construction of a build plan (the \'context\'). data Ctx = Ctx { baseConfigOpts :: !BaseConfigOpts -- ^ Basic information used to determine configure options , loadPackage :: !( PackageLocationImmutable -> Map FlagName Bool -> [Text] -- ^ GHC options. -> [Text] -- ^ Cabal configure options. -> M Package ) , combinedMap :: !CombinedMap -- ^ A dictionary of package names, and combined information about the -- package in respect of whether or not it is already installed and, unless -- the package is not to be built (global packages), where its source code -- is located. , ctxEnvConfig :: !EnvConfig -- ^ Configuration after the environment has been setup. , callStack :: ![PackageName] , wanted :: !(Set PackageName) , localNames :: !(Set PackageName) , curator :: !(Maybe Curator) , pathEnvVar :: !Text } instance HasPlatform Ctx where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasGHCVariant Ctx where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasLogFunc Ctx where logFuncL = configL . logFuncL instance HasRunner Ctx where runnerL = configL . runnerL instance HasStylesUpdate Ctx where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm Ctx where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL instance HasConfig Ctx where configL = buildConfigL . lens (.config) (\x y -> x { config = y }) {-# INLINE configL #-} instance HasPantryConfig Ctx where pantryConfigL = configL . pantryConfigL instance HasProcessContext Ctx where processContextL = configL . processContextL instance HasBuildConfig Ctx where buildConfigL = envConfigL . lens (.buildConfig) (\x y -> x { buildConfig = y }) instance HasSourceMap Ctx where sourceMapL = envConfigL . sourceMapL instance HasCompiler Ctx where compilerPathsL = envConfigL . compilerPathsL instance HasEnvConfig Ctx where envConfigL = lens (.ctxEnvConfig) (\x y -> x { ctxEnvConfig = y }) -- | State to be maintained during the calculation of local packages to -- unregister. data UnregisterState = UnregisterState { toUnregister :: !(Map GhcPkgId (PackageIdentifier, Text)) , toKeep :: ![DumpPackage] , anyAdded :: !Bool } -- | Warn about tools in the snapshot definition. States the tool name -- expected and the package name using it. data ToolWarning = ToolWarning ExeName PackageName deriving Show stack-2.15.7/src/Stack/Types/Build/Exception.hs0000644000000000000000000010555614620153446017413 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.Build.Exception ( BuildException (..) , BuildPrettyException (..) , pprintTargetParseErrors , ConstructPlanException (..) , LatestApplicableVersion , BadDependency (..) ) where import qualified Data.ByteString as S import Data.Char ( isSpace ) import Data.List as L import qualified Data.Map as Map import qualified Data.Map.Strict as M import Data.Monoid.Map ( MonoidMap (..) ) import qualified Data.Set as Set import qualified Data.Text as T import Distribution.System ( Arch ) import qualified Distribution.Text as C import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Types.TestSuiteInterface ( TestSuiteInterface ) import qualified Distribution.Version as C import RIO.NonEmpty ( nonEmpty ) import RIO.Process ( showProcessArgDebug ) import Stack.Constants ( defaultUserConfigPath, wiredInPackages ) import Stack.Prelude import Stack.Types.Compiler ( ActualCompiler, compilerVersionString ) import Stack.Types.CompilerBuild ( CompilerBuild, compilerBuildSuffix ) import Stack.Types.DumpPackage ( DumpPackage ) import Stack.Types.UnusedFlags ( FlagSource (..), UnusedFlags (..) ) import Stack.Types.GHCVariant ( GHCVariant, ghcVariantSuffix ) import Stack.Types.NamedComponent ( NamedComponent, renderPkgComponent ) import Stack.Types.Package ( Package (..), packageIdentifier ) import Stack.Types.ParentMap ( ParentMap ) import Stack.Types.Version ( VersionCheck (..), VersionRange ) -- | Type representing exceptions thrown by functions exported by modules with -- names beginning @Stack.Build@. data BuildException = Couldn'tFindPkgId PackageName | Couldn'tParseTargets [Text] | UnknownTargets (Set PackageName) -- no known version (Map PackageName Version) -- not in snapshot, here's the most recent -- version in the index (Path Abs File) -- stack.yaml | TestSuiteFailure PackageIdentifier (Map Text (Maybe ExitCode)) (Maybe (Path Abs File)) S.ByteString | TestSuiteTypeUnsupported TestSuiteInterface | LocalPackageDoesn'tMatchTarget PackageName Version -- local version Version -- version specified on command line | NoSetupHsFound (Path Abs Dir) | InvalidGhcOptionsSpecification [PackageName] | TestSuiteExeMissing Bool String String String | CabalCopyFailed Bool String | LocalPackagesPresent [PackageIdentifier] | CouldNotLockDistDir !(Path Abs File) | TaskCycleBug PackageIdentifier | PackageIdMissingBug PackageIdentifier | AllInOneBuildBug | MultipleResultsBug PackageName [DumpPackage] | TemplateHaskellNotFoundBug | HaddockIndexNotFound | ShowBuildErrorBug | CallStackEmptyBug deriving (Show, Typeable) instance Exception BuildException where displayException (Couldn'tFindPkgId name) = bugReport "[S-7178]" $ concat [ "After installing " , packageNameString name ,", the package id couldn't be found (via ghc-pkg describe " , packageNameString name , ")." ] displayException (Couldn'tParseTargets targets) = unlines $ "Error: [S-3127]" : "The following targets could not be parsed as package names or \ \directories:" : map T.unpack targets displayException (UnknownTargets noKnown notInSnapshot stackYaml) = unlines $ "Error: [S-2154]" : (noKnown' ++ notInSnapshot') where noKnown' | Set.null noKnown = [] | otherwise = pure $ "The following target packages were not found: " ++ intercalate ", " (map packageNameString $ Set.toList noKnown) ++ "\nSee https://docs.haskellstack.org/en/stable/build_command/#target-syntax for details." notInSnapshot' | Map.null notInSnapshot = [] | otherwise = "The following packages are not in your snapshot, but exist" : "in your package index. Recommended action: add them to your" : ("extra-deps in " ++ toFilePath stackYaml) : "(Note: these are the most recent versions," : "but there's no guarantee that they'll build together)." : "" : map (\(name, version') -> "- " ++ packageIdentifierString (PackageIdentifier name version')) (Map.toList notInSnapshot) displayException (TestSuiteFailure ident codes mlogFile bs) = unlines $ "Error: [S-1995]" : concat [ ["Test suite failure for package " ++ packageIdentifierString ident] , flip map (Map.toList codes) $ \(name, mcode) -> concat [ " " , T.unpack name , ": " , case mcode of Nothing -> " executable not found" Just ec -> " exited with: " ++ displayException ec ] , pure $ case mlogFile of Nothing -> "Logs printed to console" -- TODO Should we load up the full error output and print it here? Just logFile -> "Full log available at " ++ toFilePath logFile , if S.null bs then [] else [ "" , "" , doubleIndent $ T.unpack $ decodeUtf8With lenientDecode bs ] ] where indent' = dropWhileEnd isSpace . unlines . fmap (" " ++) . lines doubleIndent = indent' . indent' displayException (TestSuiteTypeUnsupported interface) = concat [ "Error: [S-3819]\n" , "Unsupported test suite type: " , show interface ] -- Suppressing duplicate output displayException (LocalPackageDoesn'tMatchTarget name localV requestedV) = concat [ "Error: [S-5797]\n" , "Version for local package " , packageNameString name , " is " , versionString localV , ", but you asked for " , versionString requestedV , " on the command line" ] displayException (NoSetupHsFound dir) = concat [ "Error: [S-3118]\n" , "No Setup.hs or Setup.lhs file found in " , toFilePath dir ] displayException (InvalidGhcOptionsSpecification unused) = unlines $ "Error: [S-4925]" : "Invalid GHC options specification:" : map showGhcOptionSrc unused where showGhcOptionSrc name = concat [ "- Package '" , packageNameString name , "' not found" ] displayException (TestSuiteExeMissing isSimpleBuildType exeName pkgName' testName) = missingExeError "[S-7987]" isSimpleBuildType $ concat [ "Test suite executable \"" , exeName , " not found for " , pkgName' , ":test:" , testName ] displayException (CabalCopyFailed isSimpleBuildType innerMsg) = missingExeError "[S-8027]" isSimpleBuildType $ concat [ "'cabal copy' failed. Error message:\n" , innerMsg , "\n" ] displayException (LocalPackagesPresent locals) = unlines $ "Error: [S-5510]" : "Local packages are not allowed when using the 'script' command. \ \Packages found:" : map (\ident -> "- " ++ packageIdentifierString ident) locals displayException (CouldNotLockDistDir lockFile) = unlines [ "Error: [S-7168]" , "Locking the dist directory failed, try to lock file:" , " " ++ toFilePath lockFile , "Maybe you're running another copy of Stack?" ] displayException (TaskCycleBug pid) = bugReport "[S-7868]" $ "Unexpected task cycle for " ++ packageNameString (pkgName pid) displayException (PackageIdMissingBug ident) = bugReport "[S-8923]" $ "singleBuild: missing package ID missing: " ++ show ident displayException AllInOneBuildBug = bugReport "[S-7371]" "Cannot have an all-in-one build that also has a final build step." displayException (MultipleResultsBug name dps) = bugReport "[S-6739]" $ "singleBuild: multiple results when describing installed package " ++ show (name, dps) displayException TemplateHaskellNotFoundBug = bugReport "[S-3121]" "template-haskell is a wired-in GHC boot library but it wasn't found." displayException HaddockIndexNotFound = "Error: [S-6901]\n" ++ "No local or snapshot doc index found to open." displayException ShowBuildErrorBug = bugReport "[S-5452]" "Unexpected case in showBuildError." displayException CallStackEmptyBug = bugReport "[S-2696]" "addDep: call stack is empty." data BuildPrettyException = ConstructPlanFailed [ConstructPlanException] (Path Abs File) (Path Abs Dir) Bool -- Is the project the implicit global project? ParentMap (Set PackageName) (Map PackageName [PackageName]) | ExecutionFailure [SomeException] | CabalExitedUnsuccessfully ExitCode PackageIdentifier (Path Abs File) -- cabal Executable [String] -- cabal arguments (Maybe (Path Abs File)) -- logfiles location [Text] -- log contents | SetupHsBuildFailure ExitCode (Maybe PackageIdentifier) -- which package's custom setup, is simple setup -- if Nothing (Path Abs File) -- ghc Executable [String] -- ghc arguments (Maybe (Path Abs File)) -- logfiles location [Text] -- log contents | TargetParseException [StyleDoc] | SomeTargetsNotBuildable [(PackageName, NamedComponent)] | InvalidFlagSpecification (Set UnusedFlags) | GHCProfOptionInvalid | NotOnlyLocal [PackageName] [Text] | CompilerVersionMismatch (Maybe (ActualCompiler, Arch)) -- found (WantedCompiler, Arch) -- expected GHCVariant -- expected CompilerBuild -- expected VersionCheck (Maybe (Path Abs File)) -- Path to the stack.yaml file StyleDoc -- recommended resolution deriving (Show, Typeable) instance Pretty BuildPrettyException where pretty ( ConstructPlanFailed errs stackYaml stackRoot isImplicitGlobal parents wanted prunedGlobalDeps ) = "[S-4804]" <> line <> flow "Stack failed to construct a build plan." <> blankLine <> pprintExceptions errs stackYaml stackRoot isImplicitGlobal parents wanted prunedGlobalDeps pretty (ExecutionFailure es) = "[S-7282]" <> line <> flow "Stack failed to execute the build plan." <> blankLine <> fillSep [ flow "While executing the build plan, Stack encountered the" , case es of [_] -> "error:" _ -> flow "following errors:" ] <> blankLine <> hcat (L.intersperse blankLine (map ppException es)) pretty (CabalExitedUnsuccessfully exitCode taskProvides' execName fullArgs logFiles bss) = showBuildError "[S-7011]" False exitCode (Just taskProvides') execName fullArgs logFiles bss pretty (SetupHsBuildFailure exitCode mtaskProvides execName fullArgs logFiles bss) = showBuildError "[S-6374]" True exitCode mtaskProvides execName fullArgs logFiles bss pretty (TargetParseException errs) = "[S-8506]" <> pprintTargetParseErrors errs pretty (SomeTargetsNotBuildable xs) = "[S-7086]" <> line <> fillSep ( [ flow "The following components have" , style Shell (flow "buildable: False") , flow "set in the Cabal configuration, and so cannot be targets:" ] <> mkNarrativeList (Just Target) False (map (fromString . T.unpack . renderPkgComponent) xs :: [StyleDoc]) ) <> blankLine <> flow "To resolve this, either provide flags such that these components \ \are buildable, or only specify buildable targets." pretty (InvalidFlagSpecification unused) = "[S-8664]" <> line <> flow "Invalid flag specification:" <> line <> bulletedList (map go (Set.toList unused)) where showFlagSrc :: FlagSource -> StyleDoc showFlagSrc FSCommandLine = flow "(specified on the command line)" showFlagSrc FSStackYaml = flow "(specified in the project-level configuration (e.g. stack.yaml))" go :: UnusedFlags -> StyleDoc go (UFNoPackage src name) = fillSep [ "Package" , style Error (fromPackageName name) , flow "not found" , showFlagSrc src ] go (UFFlagsNotDefined src pname pkgFlags flags) = fillSep ( "Package" : style Current (fromString name) : flow "does not define the following flags" : showFlagSrc src <> ":" : mkNarrativeList (Just Error) False (map (fromString . flagNameString) (Set.toList flags) :: [StyleDoc]) ) <> line <> if Set.null pkgFlags then fillSep [ flow "No flags are defined by package" , style Current (fromString name) <> "." ] else fillSep ( flow "Flags defined by package" : style Current (fromString name) : "are:" : mkNarrativeList (Just Good) False (map (fromString . flagNameString) (Set.toList pkgFlags) :: [StyleDoc]) ) where name = packageNameString pname go (UFSnapshot name) = fillSep [ flow "Attempted to set flag on snapshot package" , style Current (fromPackageName name) <> "," , flow "please add the package to" , style Shell "extra-deps" <> "." ] pretty GHCProfOptionInvalid = "[S-8100]" <> line <> fillSep [ flow "When building with Stack, you should not use GHC's" , style Shell "-prof" , flow "option. Instead, please use Stack's" , style Shell "--library-profiling" , "and" , style Shell "--executable-profiling" , flow "flags. See:" , style Url "https://github.com/commercialhaskell/stack/issues/1015" <> "." ] pretty (NotOnlyLocal packages exes) = "[S-1727]" <> line <> flow "Specified only-locals, but Stack needs to build snapshot contents:" <> line <> if null packages then mempty else fillSep ( "Packages:" : mkNarrativeList Nothing False (map fromPackageName packages :: [StyleDoc]) ) <> line <> if null exes then mempty else fillSep ( "Executables:" : mkNarrativeList Nothing False (map (fromString . T.unpack) exes :: [StyleDoc]) ) <> line pretty (CompilerVersionMismatch mactual (expected, eArch) ghcVariant ghcBuild check mstack resolution) = "[S-6362]" <> line <> fillSep [ case mactual of Nothing -> flow "No compiler found, expected" Just (actual, arch) -> fillSep [ flow "Compiler version mismatched, found" , fromString $ compilerVersionString actual , parens (pretty arch) <> "," , flow "but expected" ] , case check of MatchMinor -> flow "minor version match with" MatchExact -> flow "exact version" NewerMinor -> flow "minor version match or newer with" , fromString $ T.unpack $ utf8BuilderToText $ display expected , parens $ mconcat [ pretty eArch , fromString $ ghcVariantSuffix ghcVariant , fromString $ compilerBuildSuffix ghcBuild ] , parens ( fillSep [ flow "based on" , case mstack of Nothing -> flow "command line arguments" Just stack -> fillSep [ flow "resolver setting in" , pretty stack ] ] ) <> "." ] <> blankLine <> resolution instance Exception BuildPrettyException -- | Helper function to pretty print an error message for target parse errors. pprintTargetParseErrors :: [StyleDoc] -> StyleDoc pprintTargetParseErrors errs = line <> flow "Stack failed to parse the target(s)." <> blankLine <> fillSep [ flow "While parsing, Stack encountered the" , case errs of [err] -> "error:" <> blankLine <> indent 4 err _ -> flow "following errors:" <> blankLine <> bulletedList errs ] <> blankLine <> fillSep [ flow "Stack expects a target to be a package name (e.g." , style Shell "my-package" <> ")," , flow "a package identifier (e.g." , style Shell "my-package-0.1.2.3" <> ")," , flow "a package component (e.g." , style Shell "my-package:test:my-test-suite" <> ")," , flow "or, failing that, a relative path to a directory that is a \ \local package directory or a parent directory of one or more \ \local package directories." ] pprintExceptions :: [ConstructPlanException] -> Path Abs File -> Path Abs Dir -> Bool -> ParentMap -> Set PackageName -> Map PackageName [PackageName] -> StyleDoc pprintExceptions exceptions stackYaml stackRoot isImplicitGlobal parentMap wanted' prunedGlobalDeps = fillSep [ flow ( "While constructing the build plan, Stack encountered the \ \following errors" <> if hasConfigurationRefs then "." else ":" ) , if hasConfigurationRefs then flow "The 'Stack configuration' refers to the set of package versions \ \specified by the snapshot (after any dropped packages, or pruned \ \GHC boot packages; if a boot package is replaced, Stack prunes \ \all other such packages that depend on it) and any extra-deps:" else mempty ] <> blankLine <> mconcat (L.intersperse blankLine (mapMaybe pprintException exceptions')) <> if L.null recommendations then mempty else blankLine <> flow "Some different approaches to resolving some or all of this:" <> blankLine <> indent 2 (spacedBulletedList recommendations) where exceptions' = {- should we dedupe these somehow? nubOrd -} exceptions recommendations = [ allowNewerMsg True False | onlyHasDependencyMismatches ] <> [ fillSep $ allowNewerMsg False onlyHasDependencyMismatches : flow "add these package names under" : style Shell "allow-newer-deps" <> ":" : mkNarrativeList (Just Shell) False (map fromPackageName (Set.elems pkgsWithMismatches) :: [StyleDoc]) | not $ Set.null pkgsWithMismatches ] <> addExtraDepsRecommendations where allowNewerMsg isAll isRepetitive = fillSep $ flow "To ignore" : (if isAll then "all" else "certain") : flow "version constraints and build anyway," : if isRepetitive then ["also"] else [ fillSep $ [ "in" , pretty (defaultUserConfigPath stackRoot) , flow ( "(global configuration)" <> if isImplicitGlobal then "," else mempty ) ] <> ( if isImplicitGlobal then [] else [ "or" , pretty stackYaml , flow "(project-level configuration)," ] ) <> [ "set" , style Shell (flow "allow-newer: true") <> if isAll then "." else mempty ] <> [ "and" | not isAll ] ] addExtraDepsRecommendations | Map.null extras = [] | (Just _) <- Map.lookup (mkPackageName "base") extras = [ fillSep [ flow "Build requires unattainable version of the" , style Current "base" , flow "package. Since" , style Current "base" , flow "is a part of GHC, you most likely need to use a \ \different GHC version with the matching" , style Current "base"<> "." ] ] | otherwise = [ fillSep [ style Recommendation (flow "Recommended action:") , flow "try adding the following to your" , style Shell "extra-deps" , "in" , pretty stackYaml , "(project-level configuration):" ] <> blankLine <> vsep (map pprintExtra (Map.toList extras)) ] pprintExtra (name, (version, BlobKey cabalHash cabalSize)) = let cfInfo = CFIHash cabalHash (Just cabalSize) packageIdRev = PackageIdentifierRevision name version cfInfo in fromString ("- " ++ T.unpack (utf8BuilderToText (display packageIdRev))) allNotInBuildPlan = Set.fromList $ concatMap toNotInBuildPlan exceptions' toNotInBuildPlan (DependencyPlanFailures _ pDeps) = map fst $ filter (\(_, (_, _, badDep)) -> badDep == NotInBuildPlan) (Map.toList pDeps) toNotInBuildPlan _ = [] (onlyHasDependencyMismatches, hasConfigurationRefs, extras, pkgsWithMismatches) = filterExceptions filterExceptions :: ( Bool -- ^ All the errors are DependencyMismatch. This checks if -- 'allow-newer: true' could resolve all reported issues. , Bool -- ^ One or more messages refer to 'the Stack configuration'. This -- triggers a message to explain what that phrase means. , Map PackageName (Version, BlobKey) -- ^ Recommended extras. TO DO: Likely a good idea to distinguish these to -- the user. In particular, those recommended for DependencyMismatch. , Set.Set PackageName -- ^ Set of names of packages with one or more DependencyMismatch errors. ) filterExceptions = L.foldl' go acc0 exceptions' where acc0 = (True, False, Map.empty, Set.empty) go acc (DependencyPlanFailures pkg m) = Map.foldrWithKey go' acc m where pkgName = pkg.name go' name (_, Just extra, NotInBuildPlan) (_, _, m', s) = (False, True, Map.insert name extra m', s) go' _ (_, _, NotInBuildPlan) (_, _, m', s) = (False, True, m', s) go' name (_, Just extra, DependencyMismatch _) (p1, _, m', s) = (p1, True, Map.insert name extra m', Set.insert pkgName s) go' _ (_, _, DependencyMismatch _) (p1, _, m', s) = (p1, True, m', Set.insert pkgName s) go' _ (_, _, Couldn'tResolveItsDependencies _) acc' = acc' go' _ _ (_, p2, m', s) = (False, p2, m', s) go (_, p2, m, s) _ = (False, p2, m, s) pprintException (DependencyCycleDetected pNames) = Just $ flow "Dependency cycle detected in packages:" <> line <> indent 4 (encloseSep "[" "]" "," (map (style Error . fromPackageName) pNames)) pprintException (DependencyPlanFailures pkg pDeps) = case mapMaybe pprintDep (Map.toList pDeps) of [] -> Nothing depErrors -> Just $ fillSep [ flow "In the dependencies for" , pkgIdent <> pprintFlags pkg.flags <> ":" ] <> line <> indent 2 (bulletedList depErrors) <> line <> fillSep ( flow "The above is/are needed" : case getShortestDepsPath parentMap wanted' pkg.name of Nothing -> [flow "for unknown reason - Stack invariant violated."] Just [] -> [ "since" , pkgName' , flow "is a build target." ] Just (target:path) -> [ flow "due to" , encloseSep "" "" " -> " pathElems ] where pathElems = [style Target . fromPackageId $ target] <> map fromPackageId path <> [pkgIdent] ) where pkgName' = style Current (fromPackageName pkg.name) pkgIdent = style Current (fromPackageId $ packageIdentifier pkg) -- Skip these when they are redundant with 'NotInBuildPlan' info. pprintException (UnknownPackage name) | name `Set.member` allNotInBuildPlan = Nothing | name `Set.member` wiredInPackages = Just $ fillSep [ flow "Can't build a package with same name as a wired-in-package:" , style Current . fromPackageName $ name ] | Just pruned <- Map.lookup name prunedGlobalDeps = let prunedDeps = map (style Current . fromPackageName) pruned in Just $ fillSep [ flow "Can't use GHC boot package" , style Current . fromPackageName $ name , flow "when it depends on a replaced boot package. You need to \ \add the following as explicit dependencies to the \ \project:" , line , encloseSep "" "" ", " prunedDeps ] | otherwise = Just $ fillSep [ flow "Unknown package:" , style Current . fromPackageName $ name ] pprintFlags flags | Map.null flags = "" | otherwise = parens $ sep $ map pprintFlag $ Map.toList flags pprintFlag (name, True) = "+" <> fromString (flagNameString name) pprintFlag (name, False) = "-" <> fromString (flagNameString name) pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of NotInBuildPlan | name `elem` fold prunedGlobalDeps -> butMsg $ fillSep [ flow "this GHC boot package has been pruned from the Stack \ \configuration. You need to add the package explicitly to" , style Shell "extra-deps" <> "." ] | otherwise -> butMsg $ inconsistentMsg Nothing -- TODO: For local packages, suggest editing constraints DependencyMismatch version -> butMsg $ inconsistentMsg $ Just version -- I think the main useful info is these explain why missing packages are -- needed. Instead lets give the user the shortest path from a target to the -- package. Couldn'tResolveItsDependencies _version -> Nothing HasNoLibrary -> Just $ fillSep [ errorName , flow "is a library dependency, but the package provides no library." ] BDDependencyCycleDetected names -> Just $ fillSep [ errorName , flow $ "dependency cycle detected: " ++ L.intercalate ", " (map packageNameString names) ] where errorName = style Error . fromPackageName $ name goodRange = style Good (fromString (C.display range)) rangeMsg = if range == C.anyVersion then "needed," else fillSep [ flow "must match" , goodRange <> "," ] butMsg msg = Just $ fillSep [ errorName , rangeMsg , "but" , msg , latestApplicable Nothing ] inconsistentMsg mVersion = fillSep [ style Error $ maybe ( flow "no version" ) ( fromPackageId . PackageIdentifier name ) mVersion , flow "is in the Stack configuration" ] latestApplicable mversion = case mlatestApplicable of Nothing | isNothing mversion -> fillSep [ flow "(no matching package and version found. Perhaps there is \ \an error in the specification of a package's" , style Shell "dependencies" , "or" , style Shell "build-tools" , flow "(Hpack) or" , style Shell "build-depends" <> "," , style Shell "build-tools" , "or" , style Shell "build-tool-depends" , flow "(Cabal file)" , flow "or an omission from the" , style Shell "packages" , flow "list in" , pretty stackYaml , flow "(project-level configuration).)" ] | otherwise -> "" Just (laVer, _) | Just laVer == mversion -> flow "(latest matching version is specified)." | otherwise -> fillSep [ flow "(latest matching version is" , style Good (fromString $ versionString laVer) <> ")." ] data ConstructPlanException = DependencyCycleDetected [PackageName] | DependencyPlanFailures Package (Map PackageName (VersionRange, LatestApplicableVersion, BadDependency)) | UnknownPackage PackageName -- TODO perhaps this constructor will be removed, -- and BadDependency will handle it all -- ^ Recommend adding to extra-deps, give a helpful version number? deriving (Eq, Show, Typeable) -- | The latest applicable version and it's latest Cabal file revision. -- For display purposes only, Nothing if package not found type LatestApplicableVersion = Maybe (Version, BlobKey) -- | Reason why a dependency was not used data BadDependency = NotInBuildPlan | Couldn'tResolveItsDependencies Version | DependencyMismatch Version | HasNoLibrary -- ^ See description of 'DepType' | BDDependencyCycleDetected ![PackageName] deriving (Eq, Ord, Show, Typeable) missingExeError :: String -> Bool -> String -> String missingExeError errorCode isSimpleBuildType msg = unlines $ "Error: " <> errorCode : msg : "Possible causes of this issue:" : map ("* " <>) possibleCauses where possibleCauses = "No module named \"Main\". The 'main-is' source file should usually \ \have a header indicating that it's a 'Main' module." : "A Cabal file that refers to nonexistent other files (e.g. a \ \license-file that doesn't exist). Running 'cabal check' may point \ \out these issues." : [ "The Setup.hs file is changing the installation target dir." | not isSimpleBuildType ] showBuildError :: String -> Bool -> ExitCode -> Maybe PackageIdentifier -> Path Abs File -> [String] -> Maybe (Path Abs File) -> [Text] -> StyleDoc showBuildError errorCode isBuildingSetup exitCode mtaskProvides execName fullArgs logFiles bss = let fullCmd = unwords $ dropQuotes (toFilePath execName) : map (T.unpack . showProcessArgDebug) fullArgs logLocations = maybe mempty (\fp -> line <> flow "Logs have been written to:" <+> pretty fp) logFiles in fromString errorCode <> line <> flow "While building" <+> ( case (isBuildingSetup, mtaskProvides) of (False, Nothing) -> impureThrow ShowBuildErrorBug (False, Just taskProvides') -> "package" <+> style Target (fromString $ dropQuotes (packageIdentifierString taskProvides')) (True, Nothing) -> "simple" <+> style File "Setup.hs" (True, Just taskProvides') -> "custom" <+> style File "Setup.hs" <+> flow "for package" <+> style Target (fromString $ dropQuotes (packageIdentifierString taskProvides')) ) <+> flow "(scroll up to its section to see the error) using:" <> line <> style Shell (fromString fullCmd) <> line <> flow "Process exited with code:" <+> (fromString . show) exitCode <+> ( if exitCode == ExitFailure (-9) then flow "(THIS MAY INDICATE OUT OF MEMORY)" else mempty ) <> logLocations <> if null bss then mempty else blankLine <> string (removeTrailingSpaces (map T.unpack bss)) where removeTrailingSpaces = dropWhileEnd isSpace . unlines dropQuotes = filter ('\"' /=) -- | Get the shortest reason for the package to be in the build plan. In -- other words, trace the parent dependencies back to a 'wanted' -- package. getShortestDepsPath :: ParentMap -> Set PackageName -> PackageName -> Maybe [PackageIdentifier] getShortestDepsPath (MonoidMap parentsMap) wanted' name = if Set.member name wanted' then Just [] else case M.lookup name parentsMap of Nothing -> Nothing Just parents -> Just $ findShortest 256 paths0 where paths0 = M.fromList $ map (\(ident, _) -> (pkgName ident, startDepsPath ident)) parents where -- The 'paths' map is a map from PackageName to the shortest path -- found to get there. It is the frontier of our breadth-first -- search of dependencies. findShortest :: Int -> Map PackageName DepsPath -> [PackageIdentifier] findShortest fuel _ | fuel <= 0 = [ PackageIdentifier (mkPackageName "stack-ran-out-of-jet-fuel") (C.mkVersion [0]) ] findShortest _ paths | M.null paths = [] findShortest fuel paths = case nonEmpty targets of Nothing -> findShortest (fuel - 1) $ M.fromListWith chooseBest $ concatMap extendPath recurses Just targets' -> let (DepsPath _ _ path) = minimum (snd <$> targets') in path where (targets, recurses) = L.partition (\(n, _) -> n `Set.member` wanted') (M.toList paths) chooseBest :: DepsPath -> DepsPath -> DepsPath chooseBest = max -- Extend a path to all its parents. extendPath :: (PackageName, DepsPath) -> [(PackageName, DepsPath)] extendPath (n, dp) = case M.lookup n parentsMap of Nothing -> [] Just parents -> map (\(pkgId, _) -> (pkgName pkgId, extendDepsPath pkgId dp)) parents startDepsPath :: PackageIdentifier -> DepsPath startDepsPath ident = DepsPath { dpLength = 1 , dpNameLength = length (packageNameString (pkgName ident)) , dpPath = [ident] } extendDepsPath :: PackageIdentifier -> DepsPath -> DepsPath extendDepsPath ident dp = DepsPath { dpLength = dp.dpLength + 1 , dpNameLength = dp.dpNameLength + length (packageNameString (pkgName ident)) , dpPath = [ident] } data DepsPath = DepsPath { dpLength :: Int -- ^ Length of dpPath , dpNameLength :: Int -- ^ Length of package names combined , dpPath :: [PackageIdentifier] -- ^ A path where the packages later in the list depend on those that come -- earlier } deriving (Eq, Ord, Show) stack-2.15.7/src/Stack/Types/BuildConfig.hs0000644000000000000000000000715414604306201016565 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE TypeFamilies #-} module Stack.Types.BuildConfig ( BuildConfig (..) , HasBuildConfig (..) , stackYamlL , projectRootL , getProjectWorkDir , wantedCompilerVersionL ) where import Path ( (), parent ) import RIO.Process ( HasProcessContext (..) ) import Stack.Prelude import Stack.Types.Config ( Config, HasConfig (..), workDirL ) import Stack.Types.Curator ( Curator ) import Stack.Types.GHCVariant ( HasGHCVariant (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner (..) ) import Stack.Types.SourceMap ( SMWanted (..) ) import Stack.Types.Storage ( ProjectStorage ) -- | A superset of 'Config' adding information on how to build code. The reason -- for this breakdown is because we will need some of the information from -- 'Config' in order to determine the values here. -- -- These are the components which know nothing about local configuration. data BuildConfig = BuildConfig { config :: !Config , smWanted :: !SMWanted , extraPackageDBs :: ![Path Abs Dir] -- ^ Extra package databases , stackYaml :: !(Path Abs File) -- ^ Location of the stack.yaml file. -- -- Note: if the STACK_YAML environment variable is used, this may be -- different from projectRootL "stack.yaml" if a different file -- name is used. , projectStorage :: !ProjectStorage -- ^ Database connection pool for project Stack database , curator :: !(Maybe Curator) } instance HasPlatform BuildConfig where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasGHCVariant BuildConfig where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasProcessContext BuildConfig where processContextL = configL . processContextL instance HasPantryConfig BuildConfig where pantryConfigL = configL . pantryConfigL instance HasConfig BuildConfig where configL = lens (.config) (\x y -> x { config = y }) instance HasRunner BuildConfig where runnerL = configL . runnerL instance HasLogFunc BuildConfig where logFuncL = runnerL . logFuncL instance HasStylesUpdate BuildConfig where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm BuildConfig where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL class HasConfig env => HasBuildConfig env where buildConfigL :: Lens' env BuildConfig instance HasBuildConfig BuildConfig where buildConfigL = id {-# INLINE buildConfigL #-} stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File) stackYamlL = buildConfigL . lens (.stackYaml) (\x y -> x { stackYaml = y }) -- | Directory containing the project's stack.yaml file projectRootL :: HasBuildConfig env => Getting r env (Path Abs Dir) projectRootL = stackYamlL . to parent -- | Per-project work dir getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir) getProjectWorkDir = do root <- view projectRootL workDir <- view workDirL pure (root workDir) -- | The compiler specified by the @SnapshotDef@. This may be different from the -- actual compiler used! wantedCompilerVersionL :: HasBuildConfig s => Getting r s WantedCompiler wantedCompilerVersionL = buildConfigL . to (.smWanted.compiler) stack-2.15.7/src/Stack/Types/BuildOpts.hs0000644000000000000000000001005514620153446016310 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | Configuration options for building. module Stack.Types.BuildOpts ( BuildOpts (..) , HaddockOpts (..) , TestOpts (..) , BenchmarkOpts (..) , buildOptsHaddockL , buildOptsInstallExesL ) where import Stack.Prelude import Stack.Types.BuildOptsMonoid ( CabalVerbosity (..), ProgressBarFormat (..) ) -- | Build options that is interpreted by the build command. This is built up -- from BuildOptsCLI and BuildOptsMonoid data BuildOpts = BuildOpts { libProfile :: !Bool , exeProfile :: !Bool , libStrip :: !Bool , exeStrip :: !Bool , buildHaddocks :: !Bool -- ^ Build Haddock documentation? , haddockOpts :: !HaddockOpts -- ^ Options to pass to haddock , openHaddocks :: !Bool -- ^ Open haddocks in the browser? , haddockDeps :: !(Maybe Bool) -- ^ Build haddocks for dependencies? , haddockInternal :: !Bool -- ^ Build haddocks for all symbols and packages, like -- @cabal haddock --internal@ , haddockHyperlinkSource :: !Bool -- ^ Build hyperlinked source. Disable for no sources. , haddockForHackage :: !Bool -- ^ Build with flags to generate Haddock documentation suitable to upload -- to Hackage. , installExes :: !Bool -- ^ Install executables to user path after building? , installCompilerTool :: !Bool -- ^ Install executables to compiler tools path after building? , preFetch :: !Bool -- ^ Fetch all packages immediately -- ^ Watch files for changes and automatically rebuild , keepGoing :: !(Maybe Bool) -- ^ Keep building/running after failure , keepTmpFiles :: !Bool -- ^ Keep intermediate files and build directories , forceDirty :: !Bool -- ^ Force treating all local packages as having dirty files , tests :: !Bool -- ^ Turn on tests for local targets , testOpts :: !TestOpts -- ^ Additional test arguments , benchmarks :: !Bool -- ^ Turn on benchmarks for local targets , benchmarkOpts :: !BenchmarkOpts -- ^ Additional test arguments -- ^ Commands (with arguments) to run after a successful build -- ^ Only perform the configure step when building , reconfigure :: !Bool -- ^ Perform the configure step even if already configured , cabalVerbose :: !CabalVerbosity -- ^ Ask Cabal to be verbose in its builds , splitObjs :: !Bool -- ^ Whether to enable split-objs. , skipComponents :: ![Text] -- ^ Which components to skip when building , interleavedOutput :: !Bool -- ^ Should we use the interleaved GHC output when building -- multiple packages? , progressBar :: !ProgressBarFormat -- ^ Format of the progress bar , ddumpDir :: !(Maybe Text) } deriving Show -- | Haddock Options newtype HaddockOpts = HaddockOpts { additionalArgs :: [String] -- ^ Arguments passed to haddock program } deriving (Eq, Show) -- | Options for the 'FinalAction' 'DoTests' data TestOpts = TestOpts { rerunTests :: !Bool -- ^ Whether successful tests will be run gain , additionalArgs :: ![String] -- ^ Arguments passed to the test program , coverage :: !Bool -- ^ Generate a code coverage report , disableRun :: !Bool -- ^ Disable running of tests , maximumTimeSeconds :: !(Maybe Int) -- ^ test suite timeout in seconds , allowStdin :: !Bool -- ^ Whether to allow standard input } deriving (Eq, Show) -- | Options for the 'FinalAction' 'DoBenchmarks' data BenchmarkOpts = BenchmarkOpts { additionalArgs :: !(Maybe String) -- ^ Arguments passed to the benchmark program , disableRun :: !Bool -- ^ Disable running of benchmarks } deriving (Eq, Show) buildOptsInstallExesL :: Lens' BuildOpts Bool buildOptsInstallExesL = lens (.installExes) (\bopts t -> bopts {installExes = t}) buildOptsHaddockL :: Lens' BuildOpts Bool buildOptsHaddockL = lens (.buildHaddocks) (\bopts t -> bopts {buildHaddocks = t}) stack-2.15.7/src/Stack/Types/BuildOptsCLI.hs0000644000000000000000000000631214604306201016630 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Configuration options for building from the command line only. module Stack.Types.BuildOptsCLI ( BuildOptsCLI (..) , defaultBuildOptsCLI , ApplyCLIFlag (..) , BuildSubset (..) , FileWatchOpts (..) , BuildCommand (..) , boptsCLIAllProgOptions , boptsCLIFlagsByName ) where import qualified Data.Map.Strict as Map import qualified Data.Text as T import Stack.Prelude -- | Build options that may only be specified from the CLI data BuildOptsCLI = BuildOptsCLI { targetsCLI :: ![Text] , dryrun :: !Bool , ghcOptions :: ![Text] , progsOptions :: ![(Text, [Text])] , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) , buildSubset :: !BuildSubset , fileWatch :: !FileWatchOpts , watchAll :: !Bool , exec :: ![(String, [String])] , onlyConfigure :: !Bool , command :: !BuildCommand , initialBuildSteps :: !Bool } deriving Show defaultBuildOptsCLI ::BuildOptsCLI defaultBuildOptsCLI = BuildOptsCLI { targetsCLI = [] , dryrun = False , flags = Map.empty , ghcOptions = [] , progsOptions = [] , buildSubset = BSAll , fileWatch = NoFileWatch , watchAll = False , exec = [] , onlyConfigure = False , command = Build , initialBuildSteps = False } -- | How to apply a CLI flag data ApplyCLIFlag = ACFAllProjectPackages -- ^ Apply to all project packages which have such a flag name available. | ACFByName !PackageName -- ^ Apply to the specified package only. deriving (Eq, Ord, Show) -- | Which subset of packages to build data BuildSubset = BSAll | BSOnlySnapshot -- ^ Only install packages in the snapshot database, skipping -- packages intended for the local database. | BSOnlyDependencies | BSOnlyLocals -- ^ Refuse to build anything in the snapshot database, see -- https://github.com/commercialhaskell/stack/issues/5272 deriving (Show, Eq) data FileWatchOpts = NoFileWatch | FileWatch | FileWatchPoll deriving (Show, Eq) -- | Command sum type for conditional arguments. data BuildCommand = Build | Test | Haddock | Bench | Install deriving (Eq, Show) -- | Generate a list of --PROG-option="" arguments for all PROGs. boptsCLIAllProgOptions :: BuildOptsCLI -> [Text] boptsCLIAllProgOptions boptsCLI = concatMap progOptionArgs boptsCLI.progsOptions where -- Generate a list of --PROG-option="" arguments for a PROG. progOptionArgs :: (Text, [Text]) -> [Text] progOptionArgs (prog, opts) = map progOptionArg opts where -- Generate a --PROG-option="" argument for a PROG and option. progOptionArg :: Text -> Text progOptionArg opt = T.concat [ "--" , prog , "-option=\"" , opt , "\"" ] -- | Only flags set via 'ACFByName' boptsCLIFlagsByName :: BuildOptsCLI -> Map PackageName (Map FlagName Bool) boptsCLIFlagsByName = Map.fromList . mapMaybe go . Map.toList . (.flags) where go (ACFAllProjectPackages, _) = Nothing go (ACFByName name, flags) = Just (name, flags) stack-2.15.7/src/Stack/Types/BuildOptsMonoid.hs0000644000000000000000000003044114620153446017457 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Configuration options for building from the command line and/or a -- configuration file. module Stack.Types.BuildOptsMonoid ( BuildOptsMonoid (..) , HaddockOptsMonoid (..) , TestOptsMonoid (..) , BenchmarkOptsMonoid (..) , CabalVerbosity (..) , ProgressBarFormat (..) , buildOptsMonoidHaddockL , buildOptsMonoidTestsL , buildOptsMonoidBenchmarksL , buildOptsMonoidInstallExesL , toFirstCabalVerbosity , readProgressBarFormat ) where import Data.Aeson.Types ( FromJSON (..), withText ) import Data.Aeson.WarningParser ( WithJSONWarnings, (..:?), (..!=), jsonSubWarnings , withObjectWarnings ) import qualified Data.Text as T import Distribution.Parsec ( Parsec (..), simpleParsec ) import Distribution.Verbosity ( Verbosity, normal, verbose ) import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude hiding ( trace ) -- | Build options that may be specified in the stack.yaml or from the CLI data BuildOptsMonoid = BuildOptsMonoid { trace :: !Any , profile :: !Any , noStrip :: !Any , libProfile :: !FirstFalse , exeProfile :: !FirstFalse , libStrip :: !FirstTrue , exeStrip :: !FirstTrue , buildHaddocks :: !FirstFalse , haddockOpts :: !HaddockOptsMonoid , openHaddocks :: !FirstFalse , haddockDeps :: !(First Bool) , haddockInternal :: !FirstFalse , haddockHyperlinkSource :: !FirstTrue , haddockForHackage :: !FirstFalse , installExes :: !FirstFalse , installCompilerTool :: !FirstFalse , preFetch :: !FirstFalse , keepGoing :: !(First Bool) , keepTmpFiles :: !FirstFalse , forceDirty :: !FirstFalse , tests :: !FirstFalse , testOpts :: !TestOptsMonoid , benchmarks :: !FirstFalse , benchmarkOpts :: !BenchmarkOptsMonoid , reconfigure :: !FirstFalse , cabalVerbose :: !(First CabalVerbosity) , splitObjs :: !FirstFalse , skipComponents :: ![Text] , interleavedOutput :: !FirstTrue , progressBar :: !(First ProgressBarFormat) , ddumpDir :: !(First Text) } deriving (Generic, Show) instance FromJSON (WithJSONWarnings BuildOptsMonoid) where parseJSON = withObjectWarnings "BuildOptsMonoid" $ \o -> do let trace = Any False profile = Any False noStrip = Any False libProfile <- FirstFalse <$> o ..:? libProfileArgName exeProfile <-FirstFalse <$> o ..:? exeProfileArgName libStrip <- FirstTrue <$> o ..:? libStripArgName exeStrip <-FirstTrue <$> o ..:? exeStripArgName buildHaddocks <- FirstFalse <$> o ..:? haddockArgName haddockOpts <- jsonSubWarnings (o ..:? haddockOptsArgName ..!= mempty) openHaddocks <- FirstFalse <$> o ..:? openHaddocksArgName haddockDeps <- First <$> o ..:? haddockDepsArgName haddockInternal <- FirstFalse <$> o ..:? haddockInternalArgName haddockHyperlinkSource <- FirstTrue <$> o ..:? haddockHyperlinkSourceArgName haddockForHackage <- FirstFalse <$> o ..:? haddockForHackageArgName installExes <- FirstFalse <$> o ..:? installExesArgName installCompilerTool <- FirstFalse <$> o ..:? installCompilerToolArgName preFetch <- FirstFalse <$> o ..:? preFetchArgName keepGoing <- First <$> o ..:? keepGoingArgName keepTmpFiles <- FirstFalse <$> o ..:? keepTmpFilesArgName forceDirty <- FirstFalse <$> o ..:? forceDirtyArgName tests <- FirstFalse <$> o ..:? testsArgName testOpts <- jsonSubWarnings (o ..:? testOptsArgName ..!= mempty) benchmarks <- FirstFalse <$> o ..:? benchmarksArgName benchmarkOpts <- jsonSubWarnings (o ..:? benchmarkOptsArgName ..!= mempty) reconfigure <- FirstFalse <$> o ..:? reconfigureArgName cabalVerbosity <- First <$> o ..:? cabalVerbosityArgName cabalVerbose' <- FirstFalse <$> o ..:? cabalVerboseArgName let cabalVerbose = cabalVerbosity <> toFirstCabalVerbosity cabalVerbose' splitObjs <- FirstFalse <$> o ..:? splitObjsName skipComponents <- o ..:? skipComponentsName ..!= mempty interleavedOutput <- FirstTrue <$> o ..:? interleavedOutputName progressBar <- First <$> o ..:? progressBarName ddumpDir <- o ..:? ddumpDirName ..!= mempty pure BuildOptsMonoid { trace , profile , noStrip , libProfile , exeProfile , libStrip , exeStrip , buildHaddocks , haddockOpts , openHaddocks , haddockDeps , haddockInternal , haddockHyperlinkSource , haddockForHackage , installExes , installCompilerTool , preFetch , keepGoing , keepTmpFiles , forceDirty , tests , testOpts , benchmarks , benchmarkOpts , reconfigure , cabalVerbose , splitObjs , skipComponents , interleavedOutput , progressBar , ddumpDir } libProfileArgName :: Text libProfileArgName = "library-profiling" exeProfileArgName :: Text exeProfileArgName = "executable-profiling" libStripArgName :: Text libStripArgName = "library-stripping" exeStripArgName :: Text exeStripArgName = "executable-stripping" haddockArgName :: Text haddockArgName = "haddock" haddockOptsArgName :: Text haddockOptsArgName = "haddock-arguments" openHaddocksArgName :: Text openHaddocksArgName = "open-haddocks" haddockDepsArgName :: Text haddockDepsArgName = "haddock-deps" haddockInternalArgName :: Text haddockInternalArgName = "haddock-internal" haddockHyperlinkSourceArgName :: Text haddockHyperlinkSourceArgName = "haddock-hyperlink-source" haddockForHackageArgName :: Text haddockForHackageArgName = "haddock-for-hackage" installExesArgName :: Text installExesArgName = "copy-bins" installCompilerToolArgName :: Text installCompilerToolArgName = "copy-compiler-tool" preFetchArgName :: Text preFetchArgName = "prefetch" keepGoingArgName :: Text keepGoingArgName = "keep-going" keepTmpFilesArgName :: Text keepTmpFilesArgName = "keep-tmp-files" forceDirtyArgName :: Text forceDirtyArgName = "force-dirty" testsArgName :: Text testsArgName = "test" testOptsArgName :: Text testOptsArgName = "test-arguments" benchmarksArgName :: Text benchmarksArgName = "bench" benchmarkOptsArgName :: Text benchmarkOptsArgName = "benchmark-opts" reconfigureArgName :: Text reconfigureArgName = "reconfigure" cabalVerbosityArgName :: Text cabalVerbosityArgName = "cabal-verbosity" cabalVerboseArgName :: Text cabalVerboseArgName = "cabal-verbose" splitObjsName :: Text splitObjsName = "split-objs" skipComponentsName :: Text skipComponentsName = "skip-components" interleavedOutputName :: Text interleavedOutputName = "interleaved-output" progressBarName :: Text progressBarName = "progress-bar" ddumpDirName :: Text ddumpDirName = "ddump-dir" instance Semigroup BuildOptsMonoid where (<>) = mappenddefault instance Monoid BuildOptsMonoid where mempty = memptydefault mappend = (<>) data TestOptsMonoid = TestOptsMonoid { rerunTests :: !FirstTrue , additionalArgs :: ![String] , coverage :: !FirstFalse , disableRun :: !FirstFalse , maximumTimeSeconds :: !(First (Maybe Int)) , allowStdin :: !FirstTrue } deriving (Show, Generic) instance FromJSON (WithJSONWarnings TestOptsMonoid) where parseJSON = withObjectWarnings "TestOptsMonoid" $ \o -> do rerunTests <- FirstTrue <$> o ..:? rerunTestsArgName additionalArgs <- o ..:? testAdditionalArgsName ..!= [] coverage <- FirstFalse <$> o ..:? coverageArgName disableRun <- FirstFalse <$> o ..:? testDisableRunArgName maximumTimeSeconds <- First <$> o ..:? maximumTimeSecondsArgName allowStdin <- FirstTrue <$> o ..:? testsAllowStdinName pure TestOptsMonoid { rerunTests , additionalArgs , coverage , disableRun , maximumTimeSeconds , allowStdin } rerunTestsArgName :: Text rerunTestsArgName = "rerun-tests" testAdditionalArgsName :: Text testAdditionalArgsName = "additional-args" coverageArgName :: Text coverageArgName = "coverage" testDisableRunArgName :: Text testDisableRunArgName = "no-run-tests" maximumTimeSecondsArgName :: Text maximumTimeSecondsArgName = "test-suite-timeout" testsAllowStdinName :: Text testsAllowStdinName = "tests-allow-stdin" instance Semigroup TestOptsMonoid where (<>) = mappenddefault instance Monoid TestOptsMonoid where mempty = memptydefault mappend = (<>) newtype HaddockOptsMonoid = HaddockOptsMonoid { additionalArgs :: [String] } deriving (Generic, Show) instance FromJSON (WithJSONWarnings HaddockOptsMonoid) where parseJSON = withObjectWarnings "HaddockOptsMonoid" $ \o -> do additionalArgs <- o ..:? haddockAdditionalArgsName ..!= [] pure HaddockOptsMonoid { additionalArgs } instance Semigroup HaddockOptsMonoid where (<>) = mappenddefault instance Monoid HaddockOptsMonoid where mempty = memptydefault mappend = (<>) haddockAdditionalArgsName :: Text haddockAdditionalArgsName = "haddock-args" data BenchmarkOptsMonoid = BenchmarkOptsMonoid { additionalArgs :: !(First String) , disableRun :: !(First Bool) } deriving (Generic, Show) instance FromJSON (WithJSONWarnings BenchmarkOptsMonoid) where parseJSON = withObjectWarnings "BenchmarkOptsMonoid" $ \o -> do additionalArgs <- First <$> o ..:? benchmarkAdditionalArgsName disableRun <- First <$> o ..:? benchmarkDisableRunArgName pure BenchmarkOptsMonoid { additionalArgs , disableRun } benchmarkAdditionalArgsName :: Text benchmarkAdditionalArgsName = "benchmark-arguments" benchmarkDisableRunArgName :: Text benchmarkDisableRunArgName = "no-run-benchmarks" instance Semigroup BenchmarkOptsMonoid where (<>) = mappenddefault instance Monoid BenchmarkOptsMonoid where mempty = memptydefault mappend :: BenchmarkOptsMonoid -> BenchmarkOptsMonoid -> BenchmarkOptsMonoid mappend = (<>) newtype CabalVerbosity = CabalVerbosity Verbosity deriving (Eq, Show) toFirstCabalVerbosity :: FirstFalse -> First CabalVerbosity toFirstCabalVerbosity vf = First $ vf.firstFalse <&> \p -> if p then verboseLevel else normalLevel where verboseLevel = CabalVerbosity verbose normalLevel = CabalVerbosity normal instance FromJSON CabalVerbosity where parseJSON = withText "CabalVerbosity" $ \t -> let s = T.unpack t errMsg = fail $ "Unrecognised Cabal verbosity: " ++ s in maybe errMsg pure (simpleParsec s) instance Parsec CabalVerbosity where parsec = CabalVerbosity <$> parsec buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool) buildOptsMonoidHaddockL = lens (.buildHaddocks.firstFalse) (\buildMonoid t -> buildMonoid {buildHaddocks = FirstFalse t}) buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool) buildOptsMonoidTestsL = lens (.tests.firstFalse) (\buildMonoid t -> buildMonoid {tests = FirstFalse t}) buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool) buildOptsMonoidBenchmarksL = lens (.benchmarks.firstFalse) (\buildMonoid t -> buildMonoid {benchmarks = FirstFalse t}) buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool) buildOptsMonoidInstallExesL = lens (.installExes.firstFalse) (\buildMonoid t -> buildMonoid {installExes = FirstFalse t}) -- Type representing formats of Stack's progress bar when building. data ProgressBarFormat = NoBar -- No progress bar at all. | CountOnlyBar -- A bar that only counts packages. | CappedBar -- A bar capped at a length equivalent to the terminal's width. | FullBar -- A full progress bar. deriving (Eq, Show) instance FromJSON ProgressBarFormat where parseJSON = withText "ProgressBarFormat" $ \t -> either fail pure (readProgressBarFormat $ T.unpack t) -- | Parse ProgressBarFormat from a String. readProgressBarFormat :: String -> Either String ProgressBarFormat readProgressBarFormat s | s == "none" = pure NoBar | s == "count-only" = pure CountOnlyBar | s == "capped" = pure CappedBar | s == "full" = pure FullBar | otherwise = Left $ "Invalid progress bar format: " ++ s stack-2.15.7/src/Stack/Types/CabalConfigKey.hs0000644000000000000000000000240414502056214017175 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.CabalConfigKey ( CabalConfigKey (..) , parseCabalConfigKey ) where import Data.Aeson.Types ( FromJSON (..), FromJSONKey (..), FromJSONKeyFunction (..) , withText ) import qualified Data.Text as T import Stack.Prelude -- | Which packages do configure opts apply to? data CabalConfigKey = CCKTargets -- ^ See AGOTargets | CCKLocals -- ^ See AGOLocals | CCKEverything -- ^ See AGOEverything | CCKPackage !PackageName -- ^ A specific package deriving (Show, Read, Eq, Ord) instance FromJSON CabalConfigKey where parseJSON = withText "CabalConfigKey" parseCabalConfigKey instance FromJSONKey CabalConfigKey where fromJSONKey = FromJSONKeyTextParser parseCabalConfigKey parseCabalConfigKey :: (Monad m, MonadFail m) => Text -> m CabalConfigKey parseCabalConfigKey "$targets" = pure CCKTargets parseCabalConfigKey "$locals" = pure CCKLocals parseCabalConfigKey "$everything" = pure CCKEverything parseCabalConfigKey name = case parsePackageName $ T.unpack name of Nothing -> fail $ "Invalid CabalConfigKey: " ++ show name Just x -> pure $ CCKPackage x stack-2.15.7/src/Stack/Types/Cache.hs0000644000000000000000000000406414445120723015406 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.Cache ( ConfigCacheType (..) , Action (..) ) where import qualified Data.Text as T import Database.Persist.Sql ( PersistField (..), PersistFieldSql (..), PersistValue (..) , SqlType (..) ) import Stack.Prelude import Stack.Types.GhcPkgId ( GhcPkgId, parseGhcPkgId, unGhcPkgId ) -- | Type of config cache data ConfigCacheType = ConfigCacheTypeConfig | ConfigCacheTypeFlagLibrary GhcPkgId | ConfigCacheTypeFlagExecutable PackageIdentifier deriving (Eq, Show) instance PersistField ConfigCacheType where toPersistValue ConfigCacheTypeConfig = PersistText "config" toPersistValue (ConfigCacheTypeFlagLibrary v) = PersistText $ "lib:" <> unGhcPkgId v toPersistValue (ConfigCacheTypeFlagExecutable v) = PersistText $ "exe:" <> T.pack (packageIdentifierString v) fromPersistValue (PersistText t) = fromMaybe (Left $ "Unexpected ConfigCacheType value: " <> t) $ config <|> fmap lib (T.stripPrefix "lib:" t) <|> fmap exe (T.stripPrefix "exe:" t) where config | t == "config" = Just (Right ConfigCacheTypeConfig) | otherwise = Nothing lib v = do ghcPkgId <- mapLeft tshow (parseGhcPkgId v) Right $ ConfigCacheTypeFlagLibrary ghcPkgId exe v = do pkgId <- maybe (Left $ "Unexpected ConfigCacheType value: " <> t) Right $ parsePackageIdentifier (T.unpack v) Right $ ConfigCacheTypeFlagExecutable pkgId fromPersistValue _ = Left "Unexpected ConfigCacheType type" instance PersistFieldSql ConfigCacheType where sqlType _ = SqlString data Action = UpgradeCheck deriving (Eq, Ord, Show) instance PersistField Action where toPersistValue UpgradeCheck = PersistInt64 1 fromPersistValue (PersistInt64 1) = Right UpgradeCheck fromPersistValue x = Left $ T.pack $ "Invalid Action: " ++ show x instance PersistFieldSql Action where sqlType _ = SqlInt64 stack-2.15.7/src/Stack/Types/Casa.hs0000644000000000000000000000363414604306201015246 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} -- | Casa configuration types. module Stack.Types.Casa ( CasaOptsMonoid (..) ) where import Data.Aeson.Types ( FromJSON (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings, (..:?), withObjectWarnings ) import Casa.Client ( CasaRepoPrefix ) import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude -- | An uninterpreted representation of Casa configuration options. -- Configurations may be "cascaded" using mappend (left-biased). data CasaOptsMonoid = CasaOptsMonoid { enable :: !FirstTrue , repoPrefix :: !(First CasaRepoPrefix) , maxKeysPerRequest :: !(First Int) } deriving (Generic, Show) -- | Decode uninterpreted Casa configuration options from JSON/YAML. instance FromJSON (WithJSONWarnings CasaOptsMonoid) where parseJSON = withObjectWarnings "CasaOptsMonoid" $ \o -> do enable <- FirstTrue <$> o ..:? casaEnableName repoPrefix <- First <$> o ..:? casaRepoPrefixName maxKeysPerRequest <- First <$> o ..:? casaMaxKeysPerRequestName pure CasaOptsMonoid { enable , repoPrefix , maxKeysPerRequest } -- | Left-biased combine Casa configuration options instance Semigroup CasaOptsMonoid where (<>) = mappenddefault -- | Left-biased combine Casa configurations options instance Monoid CasaOptsMonoid where mempty = memptydefault mappend = (<>) -- | Casa configuration enable setting name. casaEnableName :: Text casaEnableName = "enable" -- | Casa configuration repository prefix setting name. casaRepoPrefixName :: Text casaRepoPrefixName = "repo-prefix" -- | Casa configuration maximum keys per request setting name. casaMaxKeysPerRequestName :: Text casaMaxKeysPerRequestName = "max-keys-per-request" stack-2.15.7/src/Stack/Types/ColorWhen.hs0000644000000000000000000000207014502056214016273 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ColorWhen ( ColorWhen (..) , readColorWhen ) where import Data.Aeson.Types ( FromJSON (..) ) import Options.Applicative ( ReadM ) import qualified Options.Applicative.Types as OA import Stack.Prelude data ColorWhen = ColorNever | ColorAlways | ColorAuto deriving (Eq, Generic, Show) instance FromJSON ColorWhen where parseJSON v = do s <- parseJSON v case s of "never" -> pure ColorNever "always" -> pure ColorAlways "auto" -> pure ColorAuto _ -> fail ("Unknown color use: " <> s <> ". Expected values of " <> "option are 'never', 'always', or 'auto'.") readColorWhen :: ReadM ColorWhen readColorWhen = do s <- OA.readerAsk case s of "never" -> pure ColorNever "always" -> pure ColorAlways "auto" -> pure ColorAuto _ -> OA.readerError "Expected values of color option are 'never', \ \'always', or 'auto'." stack-2.15.7/src/Stack/Types/CompCollection.hs0000644000000000000000000001632314604306201017310 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE ScopedTypeVariables #-} -- | A module providing the type 'CompCollection' and associated helper -- functions. -- -- The corresponding Cabal approach uses lists. See, for example, the -- 'Distribution.Types.PackageDescription.sublibraries', -- 'Distribution.Types.PackageDescription.foreignLibs', -- 'Distribution.Types.PackageDescription.executables', -- 'Distribution.Types.PackageDescription.testSuites', and -- 'Distribution.Types.PackageDescription.benchmarks' fields. -- -- Cabal removes all the unbuildable components very early (at the cost of -- slightly worse error messages). module Stack.Types.CompCollection ( CompCollection , getBuildableSet , getBuildableSetText , getBuildableListText , getBuildableListAs , foldAndMakeCollection , hasBuildableComponent , collectionLookup , collectionKeyValueList , collectionMember , foldComponentToAnotherCollection ) where import qualified Data.Map as M import qualified Data.Set as Set import Stack.Prelude import Stack.Types.Component ( HasBuildInfo, HasName, StackBuildInfo (..) , StackUnqualCompName (..) ) -- | A type representing collections of components, distinguishing buildable -- components and non-buildable components. data CompCollection component = CompCollection { buildableOnes :: {-# UNPACK #-} !(InnerCollection component) , unbuildableOnes :: Set StackUnqualCompName -- ^ The field is lazy beacause it should only serve when users explicitely -- require unbuildable components to be built. The field allows for -- intelligible error messages. } deriving (Show) instance Semigroup (CompCollection component) where a <> b = CompCollection { buildableOnes = a.buildableOnes <> b.buildableOnes , unbuildableOnes = a.unbuildableOnes <> b.unbuildableOnes } instance Monoid (CompCollection component) where mempty = CompCollection { buildableOnes = mempty , unbuildableOnes = mempty } instance Foldable CompCollection where foldMap fn collection = foldMap fn collection.buildableOnes foldr' fn c collection = M.foldr' fn c collection.buildableOnes null = M.null . (.buildableOnes) -- | The 'Data.HashMap.Strict.HashMap' type is a more suitable choice than 'Map' -- for 'Data.Text.Text' based keys in general (it scales better). However, -- constant factors are largely dominant for maps with less than 1000 keys. -- Packages with more than 100 components are extremely unlikely, so we use a -- 'Map'. type InnerCollection component = Map StackUnqualCompName component -- | A function to add a component to a collection of components. Ensures that -- both 'asNameMap' and 'asNameSet' are updated consistently. addComponent :: HasName component => component -- ^ Component to add. -> InnerCollection component -- ^ Existing collection of components. -> InnerCollection component addComponent component = M.insert component.name component -- | For the given function and foldable data structure of components of type -- @compA@, iterates on the elements of that structure and maps each element to -- a component of type @compB@ while building a 'CompCollection'. foldAndMakeCollection :: (HasBuildInfo compB, HasName compB, Foldable sourceCollection) => (compA -> compB) -- ^ Function to apply to each element in the data struture. -> sourceCollection compA -- ^ Given foldable data structure of components of type @compA@. -> CompCollection compB foldAndMakeCollection mapFn = foldl' compIterator mempty where compIterator existingCollection component = compCreator existingCollection (mapFn component) compCreator existingCollection component | component.buildInfo.buildable = existingCollection { buildableOnes = addComponent component existingCollection.buildableOnes } | otherwise = existingCollection { unbuildableOnes = Set.insert component.name existingCollection.unbuildableOnes } -- | Get the names of the buildable components in the given collection, as a -- 'Set' of 'StackUnqualCompName'. getBuildableSet :: CompCollection component -> Set StackUnqualCompName getBuildableSet = M.keysSet . (.buildableOnes) -- | Get the names of the buildable components in the given collection, as a -- 'Set' of 'Text'. getBuildableSetText :: CompCollection component -> Set Text getBuildableSetText = Set.mapMonotonic (.unqualCompToText) . getBuildableSet -- | Get the names of the buildable components in the given collection, as a -- list of 'Text. getBuildableListText :: CompCollection component -> [Text] getBuildableListText = getBuildableListAs (.unqualCompToText) -- | Apply the given function to the names of the buildable components in the -- given collection, yielding a list. getBuildableListAs :: (StackUnqualCompName -> something) -- ^ Function to apply to buildable components. -> CompCollection component -- ^ Collection of components. -> [something] getBuildableListAs fn = Set.foldr' (\v l -> fn v:l) [] . getBuildableSet -- | Yields 'True' if, and only if, the given collection includes at least one -- buildable component. hasBuildableComponent :: CompCollection component -> Bool hasBuildableComponent = not . null . getBuildableSet -- | For the given name of a buildable component and the given collection of -- components, yields 'Just' @component@ if the collection includes a buildable -- component of that name, and 'Nothing' otherwise. collectionLookup :: Text -- ^ Name of the buildable component. -> CompCollection component -- ^ Collection of components. -> Maybe component collectionLookup needle haystack = M.lookup (StackUnqualCompName needle) haystack.buildableOnes -- | For a given collection of components, yields a list of pairs for buildable -- components of the name of the component and the component. collectionKeyValueList :: CompCollection component -> [(Text, component)] collectionKeyValueList haystack = (\(StackUnqualCompName k, !v) -> (k, v)) <$> M.toList haystack.buildableOnes -- | Yields 'True' if, and only if, the given collection of components includes -- a buildable component with the given name. collectionMember :: Text -- ^ Name of the buildable component. -> CompCollection component -- ^ Collection of components. -> Bool collectionMember needle haystack = isJust $ collectionLookup needle haystack -- | Reduce the buildable components of the given collection of components by -- applying the given binary operator to all buildable components, using the -- given starting value (typically the right-identity of the operator). foldComponentToAnotherCollection :: (Monad m) => CompCollection component -- ^ Collection of components. -> (component -> m a -> m a) -- ^ Binary operator. -> m a -- ^ Starting value. -> m a foldComponentToAnotherCollection collection fn initialValue = M.foldr' fn initialValue collection.buildableOnes stack-2.15.7/src/Stack/Types/CompilerBuild.hs0000644000000000000000000000252014620153446017133 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.CompilerBuild ( CompilerBuild (..) , compilerBuildName , compilerBuildSuffix , parseCompilerBuild ) where import Data.Aeson.Types ( FromJSON, parseJSON, withText ) import Data.Text as T import Stack.Prelude data CompilerBuild = CompilerBuildStandard | CompilerBuildSpecialized String deriving Show instance FromJSON CompilerBuild where -- Strange structuring is to give consistent error messages parseJSON = withText "CompilerBuild" (either (fail . show) pure . parseCompilerBuild . T.unpack) -- | Descriptive name for compiler build compilerBuildName :: CompilerBuild -> String compilerBuildName CompilerBuildStandard = "standard" compilerBuildName (CompilerBuildSpecialized s) = s -- | Suffix to use for filenames/directories constructed with compiler build compilerBuildSuffix :: CompilerBuild -> String compilerBuildSuffix CompilerBuildStandard = "" compilerBuildSuffix (CompilerBuildSpecialized s) = '-' : s -- | Parse compiler build from a String. parseCompilerBuild :: (MonadThrow m) => String -> m CompilerBuild parseCompilerBuild "" = pure CompilerBuildStandard parseCompilerBuild "standard" = pure CompilerBuildStandard parseCompilerBuild name = pure (CompilerBuildSpecialized name) stack-2.15.7/src/Stack/Types/CompilerPaths.hs0000644000000000000000000000544714604306201017155 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.CompilerPaths ( CompilerPaths (..) , GhcPkgExe (..) , HasCompiler (..) , cabalVersionL , compilerVersionL , cpWhich , getCompilerPath , getGhcPkgExe ) where import Distribution.System ( Arch ) import Stack.Prelude import Stack.Types.Compiler ( ActualCompiler, WhichCompiler, whichCompiler ) import Stack.Types.CompilerBuild ( CompilerBuild ) import Stack.Types.DumpPackage ( DumpPackage ) -- | Paths on the filesystem for the compiler we're using data CompilerPaths = CompilerPaths { compilerVersion :: !ActualCompiler , arch :: !Arch , build :: !CompilerBuild , compiler :: !(Path Abs File) , pkg :: !GhcPkgExe -- ^ ghc-pkg or equivalent , interpreter :: !(Path Abs File) -- ^ runghc , haddock :: !(Path Abs File) -- ^ haddock, in 'IO' to allow deferring the lookup , sandboxed :: !Bool -- ^ Is this a Stack-sandboxed installation? , cabalVersion :: !Version -- ^ This is the version of Cabal that Stack will use to compile Setup.hs -- files in the build process. -- -- Note that this is not necessarily the same version as the one that Stack -- depends on as a library and which is displayed when running -- @stack ls dependencies | grep Cabal@ in the Stack project. , globalDB :: !(Path Abs Dir) -- ^ Global package database , ghcInfo :: !ByteString -- ^ Output of @ghc --info@ , globalDump :: !(Map PackageName DumpPackage) } deriving Show -- | An environment which ensures that the given compiler is available on the -- PATH class HasCompiler env where compilerPathsL :: SimpleGetter env CompilerPaths instance HasCompiler CompilerPaths where compilerPathsL = id -- | Location of the ghc-pkg executable newtype GhcPkgExe = GhcPkgExe (Path Abs File) deriving Show cabalVersionL :: HasCompiler env => SimpleGetter env Version cabalVersionL = compilerPathsL . to (.cabalVersion) compilerVersionL :: HasCompiler env => SimpleGetter env ActualCompiler compilerVersionL = compilerPathsL . to (.compilerVersion) cpWhich :: (MonadReader env m, HasCompiler env) => m WhichCompiler cpWhich = view $ compilerPathsL . to (whichCompiler . (.compilerVersion)) -- | Get the path for the given compiler ignoring any local binaries. -- -- https://github.com/commercialhaskell/stack/issues/1052 getCompilerPath :: HasCompiler env => RIO env (Path Abs File) getCompilerPath = view $ compilerPathsL . to (.compiler) -- | Get the 'GhcPkgExe' from a 'HasCompiler' environment getGhcPkgExe :: HasCompiler env => RIO env GhcPkgExe getGhcPkgExe = view $ compilerPathsL . to (.pkg) stack-2.15.7/src/Stack/Types/Compiler.hs0000644000000000000000000001156514445120723016161 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} module Stack.Types.Compiler ( ActualCompiler (..) , WhichCompiler (..) , CompilerRepository (..) , CompilerException (..) , defaultCompilerRepository , getGhcVersion , whichCompiler , compilerVersionText , compilerVersionString , isWantedCompiler , wantedToActual , actualToWanted , parseActualCompiler , whichCompilerL ) where import Data.Aeson ( FromJSON (..), FromJSONKey (..), FromJSONKeyFunction (..) , ToJSON (..), Value (..), withText ) import Database.Persist.Sql ( PersistField (..), PersistFieldSql (..), SqlType (..) ) import qualified Data.Text as T import Stack.Prelude import Stack.Types.Version ( VersionCheck, checkVersion ) import Distribution.Version ( mkVersion ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Types.Compiler" module. data CompilerException = GhcjsNotSupported | PantryException PantryException deriving (Show, Typeable) instance Exception CompilerException where displayException GhcjsNotSupported = "Error: [S-7903]\n" ++ "GHCJS is no longer supported by Stack." displayException (PantryException p) = "Error: [S-7972]\n" ++ displayException p -- | Variety of compiler to use. data WhichCompiler = Ghc deriving (Eq, Ord, Show) -- | Specifies a compiler and its version number(s). -- -- Note that despite having this datatype, Stack isn't in a hurry to -- support compilers other than GHC. data ActualCompiler = ACGhc !Version | ACGhcGit !Text !Text deriving (Data, Eq, Generic, Ord, Show, Typeable) instance NFData ActualCompiler instance Display ActualCompiler where display (ACGhc x) = display (WCGhc x) display (ACGhcGit x y) = display (WCGhcGit x y) instance ToJSON ActualCompiler where toJSON = toJSON . compilerVersionText instance FromJSON ActualCompiler where parseJSON (String t) = either (const $ fail "Failed to parse compiler version") pure (parseActualCompiler t) parseJSON _ = fail "Invalid CompilerVersion, must be String" instance FromJSONKey ActualCompiler where fromJSONKey = FromJSONKeyTextParser $ \k -> case parseActualCompiler k of Left _ -> fail $ "Failed to parse CompilerVersion " ++ T.unpack k Right parsed -> pure parsed instance PersistField ActualCompiler where toPersistValue = toPersistValue . compilerVersionText fromPersistValue = (mapLeft tshow . parseActualCompiler) <=< fromPersistValue instance PersistFieldSql ActualCompiler where sqlType _ = SqlString wantedToActual :: WantedCompiler -> Either CompilerException ActualCompiler wantedToActual (WCGhc x) = Right $ ACGhc x wantedToActual (WCGhcjs _ _) = Left GhcjsNotSupported wantedToActual (WCGhcGit x y) = Right $ ACGhcGit x y actualToWanted :: ActualCompiler -> WantedCompiler actualToWanted (ACGhc x) = WCGhc x actualToWanted (ACGhcGit x y) = WCGhcGit x y parseActualCompiler :: T.Text -> Either CompilerException ActualCompiler parseActualCompiler = either (Left . PantryException) wantedToActual . parseWantedCompiler compilerVersionText :: ActualCompiler -> T.Text compilerVersionText = utf8BuilderToText . display compilerVersionString :: ActualCompiler -> String compilerVersionString = T.unpack . compilerVersionText whichCompiler :: ActualCompiler -> WhichCompiler whichCompiler ACGhc{} = Ghc whichCompiler ACGhcGit{} = Ghc isWantedCompiler :: VersionCheck -> WantedCompiler -> ActualCompiler -> Bool isWantedCompiler check (WCGhc wanted) (ACGhc actual) = checkVersion check wanted actual isWantedCompiler _check (WCGhcGit wCommit wFlavour) (ACGhcGit aCommit aFlavour) = wCommit == aCommit && wFlavour == aFlavour isWantedCompiler _ _ _ = False getGhcVersion :: ActualCompiler -> Version getGhcVersion (ACGhc v) = v getGhcVersion (ACGhcGit _ _) = -- We can't return the actual version without running the installed ghc. -- For now we assume that users of ghc-git use it with a recent commit so we -- return a version far in the future. This disables our hacks for older -- versions and passes version checking when we use newer features. mkVersion [999, 0, 0] -- | Repository containing the compiler sources newtype CompilerRepository = CompilerRepository Text deriving Show instance FromJSON CompilerRepository where parseJSON = withText "CompilerRepository" (pure . CompilerRepository) defaultCompilerRepository :: CompilerRepository defaultCompilerRepository = CompilerRepository "https://gitlab.haskell.org/ghc/ghc.git" whichCompilerL :: Getting r ActualCompiler WhichCompiler whichCompilerL = to whichCompiler stack-2.15.7/src/Stack/Types/Component.hs0000644000000000000000000001713314620153446016351 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE ScopedTypeVariables #-} -- | A module providing the types that represent different sorts of components -- of a package (library and sub-library, foreign library, executable, test -- suite and benchmark). module Stack.Types.Component ( StackLibrary (..) , StackForeignLibrary (..) , StackExecutable (..) , StackTestSuite (..) , StackBenchmark (..) , StackUnqualCompName (..) , StackBuildInfo (..) , HasName , HasBuildInfo , HasComponentInfo ) where import Distribution.Compiler ( PerCompilerFlavor ) import Distribution.ModuleName ( ModuleName ) import Distribution.PackageDescription ( BenchmarkInterface, Dependency, TestSuiteInterface ) import Distribution.Simple ( Extension, Language ) import Distribution.Utils.Path ( PackageDir, SourceDir, SymbolicPath ) import GHC.Records ( HasField (..) ) import Stack.Prelude import Stack.Types.ComponentUtils ( StackUnqualCompName (..) ) import Stack.Types.Dependency ( DepValue ) import Stack.Types.NamedComponent ( NamedComponent (..) ) -- | A type representing (unnamed) main library or sub-library components of a -- package. -- -- Cabal-syntax uses data constructors -- 'Distribution.Types.LibraryName.LMainLibName' and -- 'Distribution.Types.LibraryName.LSubLibName' to distinguish main libraries -- and sub-libraries. We do not do so, as the \'missing\' name in the case of a -- main library can be represented by the empty string. -- -- The corresponding Cabal-syntax type is 'Distribution.Types.Library.Library'. data StackLibrary = StackLibrary { name :: StackUnqualCompName , buildInfo :: !StackBuildInfo , exposedModules :: [ModuleName] -- |^ This is only used for gathering the files related to this component. } deriving (Show, Typeable) -- | A type representing foreign library components of a package. -- -- The corresponding Cabal-syntax type is -- 'Distribution.Types.Foreign.Libraries.ForeignLib'. data StackForeignLibrary = StackForeignLibrary { name :: StackUnqualCompName , buildInfo :: !StackBuildInfo } deriving (Show, Typeable) -- | A type representing executable components of a package. -- -- The corresponding Cabal-syntax type is -- 'Distribution.Types.Executable.Executable'. data StackExecutable = StackExecutable { name :: StackUnqualCompName , buildInfo :: !StackBuildInfo , modulePath :: FilePath } deriving (Show, Typeable) -- | A type representing test suite components of a package. -- -- The corresponding Cabal-syntax type is -- 'Distribution.Types.TestSuite.TestSuite'. data StackTestSuite = StackTestSuite { name :: StackUnqualCompName , buildInfo :: !StackBuildInfo , interface :: !TestSuiteInterface } deriving (Show, Typeable) -- | A type representing benchmark components of a package. -- -- The corresponding Cabal-syntax type is -- 'Distribution.Types.Benchmark.Benchmark'. data StackBenchmark = StackBenchmark { name :: StackUnqualCompName , buildInfo :: StackBuildInfo , interface :: BenchmarkInterface -- ^ This is only used for gathering the files related to this component. } deriving (Show, Typeable) -- | Type representing the name of an executable. newtype ExeName = ExeName Text deriving (Data, Eq, Hashable, IsString, Generic, NFData, Ord, Show, Typeable) -- | Type representing information needed to build. The file gathering-related -- fields are lazy because they are not always needed. -- -- The corresponding Cabal-syntax type is -- 'Distribution.Types.BuildInfo.BuildInfo'. -- We don't use the Cabal-syntax type because Cabal provides a list of -- dependencies, and Stack needs a Map and only a small subset of all the -- information in Cabal-syntax type. data StackBuildInfo = StackBuildInfo { buildable :: !Bool -- ^ Corresponding to Cabal-syntax's -- 'Distribution.Types.BuildInfo.buildable'. The component is buildable -- here. , dependency :: !(Map PackageName DepValue) -- ^ Corresponding to Cabal-syntax's -- 'Distribution.Types.BuildInfo.targetBuildDepends'. Dependencies specific -- to a library or executable target. , unknownTools :: Set Text -- ^ From Cabal-syntax's 'Distribution.Types.BuildInfo.buildTools'. We only -- keep the legacy build tool depends that we know (from a hardcoded list). -- We only use the deduplication aspect of the Set here, as this field is -- only used for error reporting in the end. This is lazy because it's an -- error reporting field only. , otherModules :: [ModuleName] -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. , jsSources :: [FilePath] -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. , hsSourceDirs :: [SymbolicPath PackageDir SourceDir] -- ^ Only used in file & opts gathering. See usage in "Stack.ComponentFile" -- module for fle gathering. , cSources :: [FilePath] -- ^ Only used in file gathering. See usage in "Stack.ComponentFile" module. , cppOptions :: [String] -- ^ Only used in opts gathering. See usage in "Stack.Package" module. , targetBuildDepends :: [Dependency] -- ^ Only used in opts gathering. , options :: PerCompilerFlavor [String] -- ^ Only used in opts gathering. , allLanguages :: [Language] -- ^ Only used in opts gathering. , usedExtensions :: [Extension] -- ^ Only used in opts gathering. , includeDirs :: [FilePath] -- ^ Only used in opts gathering. , extraLibs :: [String] -- ^ Only used in opts gathering. , extraLibDirs :: [String] -- ^ Only used in opts gathering. , frameworks :: [String] -- ^ Only used in opts gathering. } deriving (Show) -- | Type synonym for a 'HasField' constraint. type HasName component = HasField "name" component StackUnqualCompName -- | Type synonym for a 'HasField' constraint. type HasBuildInfo component = HasField "buildInfo" component StackBuildInfo instance HasField "qualifiedName" StackLibrary NamedComponent where getField v | rawName == mempty = CLib | otherwise = CSubLib rawName where rawName = v.name.unqualCompToText instance HasField "qualifiedName" StackForeignLibrary NamedComponent where getField = CFlib . (.name.unqualCompToText) instance HasField "qualifiedName" StackExecutable NamedComponent where getField = CExe . (.name.unqualCompToText) instance HasField "qualifiedName" StackTestSuite NamedComponent where getField = CTest . (.name.unqualCompToText) instance HasField "qualifiedName" StackBenchmark NamedComponent where getField = CTest . (.name.unqualCompToText) -- | Type synonym for a 'HasField' constraint which represent a virtual field, -- computed from the type, the NamedComponent constructor and the name. type HasQualiName component = HasField "qualifiedName" component NamedComponent -- | Type synonym for a 'HasField' constraint for all the common component -- fields i.e. @name@, @buildInfo@ and @qualifiedName@. type HasComponentInfo component = (HasName component, HasBuildInfo component, HasQualiName component) stack-2.15.7/src/Stack/Types/ComponentUtils.hs0000644000000000000000000000353614604306201017363 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE ScopedTypeVariables #-} -- | A module providing a type representing the name of an \'unqualified\' -- component and related helper functions. module Stack.Types.ComponentUtils ( StackUnqualCompName (..) , fromCabalName , toCabalName ) where import Distribution.PackageDescription ( UnqualComponentName, mkUnqualComponentName , unUnqualComponentName ) import RIO.Text (pack, unpack) import Stack.Prelude -- | Type representing the name of an \'unqualified\' component (that is, the -- component can be any sort - a (unnamed) main library or sub-library, -- an executable, etc. ). -- -- The corresponding The Cabal-syntax type is -- 'Distribution.Types.UnqualComponentName.UnqualComponentName'. -- Ideally, we would use the Cabal-syntax type and not 'Text', to avoid -- unnecessary work, but there is no 'Hashable' instance for -- 'Distribution.Types.UnqualComponentName.UnqualComponentName' yet. newtype StackUnqualCompName = StackUnqualCompName { unqualCompToText :: Text } deriving (Data, Eq, Generic, Hashable, IsString, NFData, Ord, Read, Show, Typeable) fromCabalName :: UnqualComponentName -> StackUnqualCompName fromCabalName unqualName = StackUnqualCompName $ pack . unUnqualComponentName $ unqualName toCabalName :: StackUnqualCompName -> UnqualComponentName toCabalName (StackUnqualCompName unqualName) = mkUnqualComponentName (unpack unqualName) stack-2.15.7/src/Stack/Types/Config.hs0000644000000000000000000003076514620153446015622 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} module Stack.Types.Config ( Config (..) , HasConfig (..) , askLatestSnapshotUrl , configProjectRoot , ghcInstallHook -- * Lens helpers , buildOptsL , envOverrideSettingsL , globalOptsL , stackGlobalConfigL , stackRootL , workDirL -- * Helper logging functions , prettyStackDevL ) where import Casa.Client ( CasaRepoPrefix ) import Distribution.System ( Platform ) import Path ( (), parent, reldir, relfile ) import RIO.Process ( HasProcessContext (..), ProcessContext ) import Stack.Prelude import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) import Stack.Types.BuildOpts ( BuildOpts ) import Stack.Types.CabalConfigKey ( CabalConfigKey ) import Stack.Types.Compiler ( CompilerRepository ) import Stack.Types.CompilerBuild ( CompilerBuild ) import Stack.Types.Docker ( DockerOpts ) import Stack.Types.DumpLogs ( DumpLogs ) import Stack.Types.EnvSettings ( EnvSettings ) import Stack.Types.GHCVariant ( GHCVariant (..), HasGHCVariant (..) ) import Stack.Types.Nix ( NixOpts ) import Stack.Types.Platform ( HasPlatform (..), PlatformVariant ) import Stack.Types.Project ( Project (..) ) import Stack.Types.ProjectConfig ( ProjectConfig (..) ) import Stack.Types.PvpBounds ( PvpBounds ) import Stack.Types.Resolver ( AbstractResolver ) import Stack.Types.Runner ( HasRunner (..), Runner, globalOptsL ) import Stack.Types.SCM ( SCM ) import Stack.Types.SetupInfo ( SetupInfo ) import Stack.Types.Storage ( UserStorage ) import Stack.Types.TemplateName ( TemplateName ) import Stack.Types.Version ( VersionCheck (..), VersionRange ) -- | The top-level Stackage configuration. data Config = Config { workDir :: !(Path Rel Dir) -- ^ this allows to override .stack-work directory , userConfigPath :: !(Path Abs File) -- ^ Path to user configuration file (usually ~/.stack/config.yaml) , build :: !BuildOpts -- ^ Build configuration , docker :: !DockerOpts -- ^ Docker configuration , nix :: !NixOpts -- ^ Execution environment (e.g nix-shell) configuration , processContextSettings :: !(EnvSettings -> IO ProcessContext) -- ^ Environment variables to be passed to external tools , localProgramsBase :: !(Path Abs Dir) -- ^ Non-platform-specific path containing local installations , localPrograms :: !(Path Abs Dir) -- ^ Path containing local installations (mainly GHC) , hideTHLoading :: !Bool -- ^ Hide the Template Haskell "Loading package ..." messages from the -- console , prefixTimestamps :: !Bool -- ^ Prefix build output with timestamps for each line. , platform :: !Platform -- ^ The platform we're building for, used in many directory names , platformVariant :: !PlatformVariant -- ^ Variant of the platform, also used in directory names , ghcVariant :: !(Maybe GHCVariant) -- ^ The variant of GHC requested by the user. , ghcBuild :: !(Maybe CompilerBuild) -- ^ Override build of the compiler distribution (e.g. standard, gmp4, -- tinfo6) , latestSnapshot :: !Text -- ^ URL of a JSON file providing the latest LTS and Nightly snapshots. , systemGHC :: !Bool -- ^ Should we use the system-installed GHC (on the PATH) if -- available? Can be overridden by command line options. , installGHC :: !Bool -- ^ Should we automatically install GHC if missing or the wrong -- version is available? Can be overridden by command line options. , skipGHCCheck :: !Bool -- ^ Don't bother checking the GHC version or architecture. , skipMsys :: !Bool -- ^ On Windows: don't use a sandboxed MSYS , compilerCheck :: !VersionCheck -- ^ Specifies which versions of the compiler are acceptable. , compilerRepository :: !CompilerRepository -- ^ Specifies the repository containing the compiler sources , localBin :: !(Path Abs Dir) -- ^ Directory we should install executables into , requireStackVersion :: !VersionRange -- ^ Require a version of Stack within this range. , jobs :: !Int -- ^ How many concurrent jobs to run, defaults to number of capabilities , overrideGccPath :: !(Maybe (Path Abs File)) -- ^ Optional gcc override path , extraIncludeDirs :: ![FilePath] -- ^ --extra-include-dirs arguments , extraLibDirs :: ![FilePath] -- ^ --extra-lib-dirs arguments , customPreprocessorExts :: ![Text] -- ^ List of custom preprocessors to complete the hard coded ones , concurrentTests :: !Bool -- ^ Run test suites concurrently , templateParams :: !(Map Text Text) -- ^ Parameters for templates. , scmInit :: !(Maybe SCM) -- ^ Initialize SCM (e.g. git) when creating new projects. , ghcOptionsByName :: !(Map PackageName [Text]) -- ^ Additional GHC options to apply to specific packages. , ghcOptionsByCat :: !(Map ApplyGhcOptions [Text]) -- ^ Additional GHC options to apply to categories of packages , cabalConfigOpts :: !(Map CabalConfigKey [Text]) -- ^ Additional options to be passed to ./Setup.hs configure , setupInfoLocations :: ![String] -- ^ URLs or paths to stack-setup.yaml files, for finding tools. -- If none present, the default setup-info is used. , setupInfoInline :: !SetupInfo -- ^ Additional SetupInfo to use to find tools. , pvpBounds :: !PvpBounds -- ^ How PVP upper bounds should be added to packages , modifyCodePage :: !Bool -- ^ Force the code page to UTF-8 on Windows , rebuildGhcOptions :: !Bool -- ^ Rebuild on GHC options changes , applyGhcOptions :: !ApplyGhcOptions -- ^ Which packages do --ghc-options on the command line apply to? , applyProgOptions :: !ApplyProgOptions -- ^ Which packages do all and any --PROG-option options on the command line -- apply to? , allowNewer :: !Bool -- ^ Ignore version ranges in .cabal files. Funny naming chosen to -- match cabal. , allowNewerDeps :: !(Maybe [PackageName]) -- ^ Ignore dependency upper and lower bounds only for specified -- packages. No effect unless allow-newer is enabled. , defaultTemplate :: !(Maybe TemplateName) -- ^ The default template to use when none is specified. -- (If Nothing, the 'default' default template is used.) , allowDifferentUser :: !Bool -- ^ Allow users other than the Stack root owner to use the Stack -- installation. , dumpLogs :: !DumpLogs -- ^ Dump logs of local non-dependencies when doing a build. , project :: !(ProjectConfig (Project, Path Abs File)) -- ^ Project information and stack.yaml file location , allowLocals :: !Bool -- ^ Are we allowed to build local packages? The script -- command disallows this. , saveHackageCreds :: !Bool -- ^ Should we save Hackage credentials to a file? , hackageBaseUrl :: !Text -- ^ Hackage base URL used when uploading packages , runner :: !Runner , pantryConfig :: !PantryConfig , stackRoot :: !(Path Abs Dir) , resolver :: !(Maybe AbstractResolver) -- ^ Any resolver override from the command line , userStorage :: !UserStorage -- ^ Database connection pool for user Stack database , hideSourcePaths :: !Bool -- ^ Enable GHC hiding source paths? , recommendUpgrade :: !Bool -- ^ Recommend a Stack upgrade? , notifyIfNixOnPath :: !Bool -- ^ Notify if the Nix package manager (nix) is on the PATH, but -- Stack's Nix integration is not enabled? , notifyIfGhcUntested :: !Bool -- ^ Notify if Stack has not been tested with the GHC version? , notifyIfCabalUntested :: !Bool -- ^ Notify if Stack has not been tested with the Cabal version? , notifyIfArchUnknown :: !Bool -- ^ Notify if the specified machine architecture is unknown to Cabal (the -- library)? , noRunCompile :: !Bool -- ^ Use --no-run and --compile options when using `stack script` , stackDeveloperMode :: !Bool -- ^ Turn on Stack developer mode for additional messages? , casa :: !(Maybe (CasaRepoPrefix, Int)) -- ^ Optional Casa configuration } -- | The project root directory, if in a project. configProjectRoot :: Config -> Maybe (Path Abs Dir) configProjectRoot c = case c.project of PCProject (_, fp) -> Just $ parent fp PCGlobalProject -> Nothing PCNoProject _deps -> Nothing -- | Get the URL to request the information on the latest snapshots askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text askLatestSnapshotUrl = view $ configL . to (.latestSnapshot) -- | @STACK_ROOT\/hooks\/@ hooksDir :: HasConfig env => RIO env (Path Abs Dir) hooksDir = do sr <- view $ configL . to (.stackRoot) pure (sr [reldir|hooks|]) -- | @STACK_ROOT\/hooks\/ghc-install.sh@ ghcInstallHook :: HasConfig env => RIO env (Path Abs File) ghcInstallHook = do hd <- hooksDir pure (hd [relfile|ghc-install.sh|]) ----------------------------------- -- Lens classes ----------------------------------- -- | Class for environment values that can provide a 'Config'. class ( HasPlatform env , HasGHCVariant env , HasProcessContext env , HasPantryConfig env , HasTerm env , HasRunner env ) => HasConfig env where configL :: Lens' env Config ----------------------------------- -- Lens instances ----------------------------------- instance HasPlatform Config where platformL = lens (.platform) (\x y -> x { platform = y }) platformVariantL = lens (.platformVariant) (\x y -> x { platformVariant = y }) instance HasGHCVariant Config where ghcVariantL = to $ fromMaybe GHCStandard . (.ghcVariant) instance HasProcessContext Config where processContextL = runnerL . processContextL instance HasPantryConfig Config where pantryConfigL = lens (.pantryConfig) (\x y -> x { pantryConfig = y }) instance HasConfig Config where configL = id {-# INLINE configL #-} instance HasRunner Config where runnerL = lens (.runner) (\x y -> x { runner = y }) instance HasLogFunc Config where logFuncL = runnerL . logFuncL instance HasStylesUpdate Config where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm Config where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL ----------------------------------- -- Helper lenses ----------------------------------- stackRootL :: HasConfig s => Lens' s (Path Abs Dir) stackRootL = configL . lens (.stackRoot) (\x y -> x { stackRoot = y }) stackGlobalConfigL :: HasConfig s => Lens' s (Path Abs File) stackGlobalConfigL = configL . lens (.userConfigPath) (\x y -> x { userConfigPath = y }) buildOptsL :: HasConfig s => Lens' s BuildOpts buildOptsL = configL . lens (.build) (\x y -> x { build = y }) envOverrideSettingsL :: HasConfig env => Lens' env (EnvSettings -> IO ProcessContext) envOverrideSettingsL = configL . lens (.processContextSettings) (\x y -> x { processContextSettings = y }) -- | @".stack-work"@ workDirL :: HasConfig env => Lens' env (Path Rel Dir) workDirL = configL . lens (.workDir) (\x y -> x { workDir = y }) -- | In dev mode, print as a warning, otherwise as debug prettyStackDevL :: HasConfig env => [StyleDoc] -> RIO env () prettyStackDevL docs = do config <- view configL if config.stackDeveloperMode then prettyWarnL docs else prettyDebugL docs stack-2.15.7/src/Stack/Types/Config/Exception.hs0000644000000000000000000002144514620153446017553 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} module Stack.Types.Config.Exception ( ConfigException (..) , ConfigPrettyException (..) , ParseAbsolutePathException (..) , packageIndicesWarning ) where import qualified Data.Text as T import Data.Yaml ( ParseException ) import qualified Data.Yaml as Yaml import Path( dirname, filename ) import Stack.Prelude import Stack.Types.ConfigMonoid ( configMonoidAllowDifferentUserName , configMonoidGHCVariantName, configMonoidSystemGHCName ) import Stack.Types.Version ( VersionRange, stackVersion, versionRangeText ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Config" module. data ConfigException = ParseCustomSnapshotException Text ParseException | NoProjectConfigFound (Path Abs Dir) (Maybe Text) | UnexpectedArchiveContents [Path Abs Dir] [Path Abs File] | UnableToExtractArchive Text (Path Abs File) | BadStackVersionException VersionRange | NoSuchDirectory FilePath | ParseGHCVariantException String | BadStackRoot (Path Abs Dir) | Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir | UserDoesn'tOwnDirectory (Path Abs Dir) | ManualGHCVariantSettingsAreIncompatibleWithSystemGHC | NixRequiresSystemGhc | NoResolverWhenUsingNoProject | NoLTSWithMajorVersion Int | NoLTSFound deriving (Show, Typeable) instance Exception ConfigException where displayException (ParseCustomSnapshotException url exception) = concat [ "Error: [S-8981]\n" , "Could not parse '" , T.unpack url , "':\n" , Yaml.prettyPrintParseException exception , "\nSee https://docs.haskellstack.org/en/stable/custom_snapshot/" ] displayException (NoProjectConfigFound dir mcmd) = concat [ "Error: [S-2206]\n" , "Unable to find a stack.yaml file in the current directory (" , toFilePath dir , ") or its ancestors" , case mcmd of Nothing -> "" Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd ] displayException (UnexpectedArchiveContents dirs files) = concat [ "Error: [S-4964]\n" , "When unpacking an archive specified in your stack.yaml file, " , "did not find expected contents. Expected: a single directory. Found: " , show ( map (toFilePath . dirname) dirs , map (toFilePath . filename) files ) ] displayException (UnableToExtractArchive url file) = concat [ "Error: [S-2040]\n" , "Archive extraction failed. Tarballs and zip archives are supported, \ \couldn't handle the following URL, " , T.unpack url , " downloaded to the file " , toFilePath $ filename file ] displayException (BadStackVersionException requiredRange) = concat [ "Error: [S-1641]\n" , "The version of Stack you are using (" , show stackVersion , ") is outside the required\n" ,"version range specified in stack.yaml (" , T.unpack (versionRangeText requiredRange) , ").\n" , "You can upgrade Stack by running:\n\n" , "stack upgrade" ] displayException (NoSuchDirectory dir) = concat [ "Error: [S-8773]\n" , "No directory could be located matching the supplied path: " , dir ] displayException (ParseGHCVariantException v) = concat [ "Error: [S-3938]\n" , "Invalid ghc-variant value: " , v ] displayException (BadStackRoot stackRoot) = concat [ "Error: [S-8530]\n" , "Invalid Stack root: '" , toFilePath stackRoot , "'. Please provide a valid absolute path." ] displayException (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat [ "Error: [S-7613]\n" , "Preventing creation of Stack root '" , toFilePath envStackRoot , "'. Parent directory '" , toFilePath parentDir , "' is owned by someone else." ] displayException (UserDoesn'tOwnDirectory dir) = concat [ "Error: [S-8707]\n" , "You are not the owner of '" , toFilePath dir , "'. Aborting to protect file permissions." , "\nRetry with '--" , T.unpack configMonoidAllowDifferentUserName , "' to disable this precaution." ] displayException ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat [ "Error: [S-3605]\n" , "Stack can only control the " , configMonoidGHCVariantName , " of its own GHC installations. Please use '--no-" , configMonoidSystemGHCName , "'." ] displayException NixRequiresSystemGhc = T.unpack $ T.concat [ "Error: [S-6816]\n" , "Stack's Nix integration is incompatible with '--no-system-ghc'. " , "Please use '--" , configMonoidSystemGHCName , "' or disable the Nix integration." ] displayException NoResolverWhenUsingNoProject = "Error: [S-5027]\n" ++ "When using the script command, you must provide a resolver argument" displayException (NoLTSWithMajorVersion n) = concat [ "Error: [S-3803]\n" , "No LTS release found with major version " , show n , "." ] displayException NoLTSFound = "Error: [S-5472]\n" ++ "No LTS releases found." -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Config" module. data ConfigPrettyException = ParseConfigFileException !(Path Abs File) !ParseException | StackWorkEnvNotRelativeDir !String | MultiplePackageIndices [PackageIndexConfig] | DuplicateLocalPackageNames ![(PackageName, [PackageLocation])] deriving (Show, Typeable) instance Pretty ConfigPrettyException where pretty (ParseConfigFileException configFile exception) = "[S-6602]" <> line <> fillSep [ flow "Stack could not load and parse" , pretty configFile , flow "as a YAML configuraton file." ] <> blankLine <> flow "While loading and parsing, Stack encountered the following \ \error:" <> blankLine <> string (Yaml.prettyPrintParseException exception) <> blankLine <> fillSep [ flow "For help about the content of Stack's YAML configuration \ \files, see (for the most recent release of Stack)" , style Url "http://docs.haskellstack.org/en/stable/yaml_configuration/" <> "." ] pretty (StackWorkEnvNotRelativeDir x) = "[S-7462]" <> line <> flow "Stack failed to interpret the value of the STACK_WORK \ \environment variable as a valid relative path to a directory. \ \Stack will not accept an absolute path. A path containing a \ \.. (parent directory) component is not valid." <> blankLine <> fillSep [ flow "If set, Stack expects the value to identify the location \ \of Stack's work directory, relative to the root directory \ \of the project or package. Stack encountered the value:" , style Error (fromString x) <> "." ] pretty (MultiplePackageIndices pics) = "[S-3251]" <> line <> fillSep [ flow "When using the" , style Shell "package-indices" , flow "key to override the default package index, you must \ \provide exactly one value, received:" , bulletedList (map (string . show) pics) ] <> blankLine <> packageIndicesWarning pretty (DuplicateLocalPackageNames pairs) = "[S-5470]" <> line <> fillSep [ flow "The same package name is used in more than one local package or" , style Shell "extra-deps" <> "." ] <> mconcat (map go pairs) where go (name, dirs) = blankLine <> fillSep [ style Error (fromPackageName name) , flow "used in:" ] <> line <> bulletedList (map (fromString . T.unpack . textDisplay) dirs) instance Exception ConfigPrettyException data ParseAbsolutePathException = ParseAbsolutePathException String String deriving (Show, Typeable) instance Exception ParseAbsolutePathException where displayException (ParseAbsolutePathException envVar dir) = concat [ "Error: [S-9437]\n" , "Failed to parse " , envVar , " environment variable (expected absolute directory): " , dir ] packageIndicesWarning :: StyleDoc packageIndicesWarning = fillSep [ "The" , style Shell "package-indices" , flow "key is deprecated in favour of" , style Shell "package-index" <> "." ] stack-2.15.7/src/Stack/Types/ConfigMonoid.hs0000644000000000000000000005452314620153446016766 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ConfigMonoid ( ConfigMonoid (..) , parseConfigMonoid , parseConfigMonoidObject , configMonoidAllowDifferentUserName , configMonoidGHCVariantName , configMonoidInstallGHCName , configMonoidSystemGHCName ) where import Data.Aeson.Types ( Object, Value ) import Data.Aeson.WarningParser ( WarningParser, WithJSONWarnings, (..:?), (..!=) , jsonSubWarnings, jsonSubWarningsT, jsonSubWarningsTT , withObjectWarnings ) import Casa.Client ( CasaRepoPrefix ) import Control.Monad.Writer ( tell ) import Data.Coerce ( coerce ) import qualified Data.Map as Map import qualified Data.Map.Strict as M import qualified Data.Monoid as Monoid import Data.Monoid.Map ( MonoidMap (..) ) import qualified Data.Yaml as Yaml import Distribution.Version ( anyVersion ) import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude hiding ( snapshotLocation ) import Stack.Types.AllowNewerDeps ( AllowNewerDeps ) import Stack.Types.ApplyGhcOptions ( ApplyGhcOptions (..) ) import Stack.Types.ApplyProgOptions ( ApplyProgOptions (..) ) import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid ) import Stack.Types.Casa ( CasaOptsMonoid ) import Stack.Types.CabalConfigKey ( CabalConfigKey ) import Stack.Types.ColorWhen ( ColorWhen ) import Stack.Types.Compiler ( CompilerRepository ) import Stack.Types.CompilerBuild ( CompilerBuild ) import Stack.Types.Docker ( DockerOptsMonoid, VersionRangeJSON (..) ) import Stack.Types.DumpLogs ( DumpLogs ) import Stack.Types.GhcOptionKey ( GhcOptionKey (..) ) import Stack.Types.GhcOptions ( GhcOptions (..) ) import Stack.Types.GHCVariant ( GHCVariant ) import Stack.Types.Nix ( NixOptsMonoid ) import Stack.Types.PvpBounds ( PvpBounds ) import Stack.Types.SCM ( SCM ) import Stack.Types.SetupInfo ( SetupInfo ) import Stack.Types.TemplateName ( TemplateName ) import Stack.Types.Version ( IntersectingVersionRange (..), VersionCheck ) import qualified System.FilePath as FilePath -- | An uninterpreted representation of configuration options. -- Configurations may be "cascaded" using mappend (left-biased). data ConfigMonoid = ConfigMonoid { stackRoot :: !(First (Path Abs Dir)) -- ^ See: 'clStackRoot' , workDir :: !(First (Path Rel Dir)) -- ^ See: 'configWorkDir'. , buildOpts :: !BuildOptsMonoid -- ^ build options. , dockerOpts :: !DockerOptsMonoid -- ^ Docker options. , nixOpts :: !NixOptsMonoid -- ^ Options for the execution environment (nix-shell or container) , connectionCount :: !(First Int) -- ^ See: 'configConnectionCount' , hideTHLoading :: !FirstTrue -- ^ See: 'configHideTHLoading' , prefixTimestamps :: !(First Bool) -- ^ See: 'configPrefixTimestamps' , latestSnapshot :: !(First Text) -- ^ See: 'configLatestSnapshot' , packageIndex :: !(First PackageIndexConfig) -- ^ See: 'withPantryConfig' , packageIndices :: !(First [PackageIndexConfig]) -- ^ Deprecated in favour of package-index , systemGHC :: !(First Bool) -- ^ See: 'configSystemGHC' , installGHC :: !FirstTrue -- ^ See: 'configInstallGHC' , skipGHCCheck :: !FirstFalse -- ^ See: 'configSkipGHCCheck' , skipMsys :: !FirstFalse -- ^ See: 'configSkipMsys' , compilerCheck :: !(First VersionCheck) -- ^ See: 'configCompilerCheck' , compilerRepository :: !(First CompilerRepository) -- ^ See: 'configCompilerRepository' , requireStackVersion :: !IntersectingVersionRange -- ^ See: 'configRequireStackVersion' , arch :: !(First String) -- ^ Used for overriding the platform , ghcVariant :: !(First GHCVariant) -- ^ Used for overriding the platform , ghcBuild :: !(First CompilerBuild) -- ^ Used for overriding the GHC build , jobs :: !(First Int) -- ^ See: 'configJobs' , extraIncludeDirs :: ![FilePath] -- ^ See: 'configExtraIncludeDirs' , extraLibDirs :: ![FilePath] -- ^ See: 'configExtraLibDirs' , customPreprocessorExts :: ![Text] -- ^ See: 'configCustomPreprocessorExts' , overrideGccPath :: !(First (Path Abs File)) -- ^ Allow users to override the path to gcc , overrideHpack :: !(First FilePath) -- ^ Use Hpack executable (overrides bundled Hpack) , concurrentTests :: !(First Bool) -- ^ See: 'configConcurrentTests' , localBinPath :: !(First FilePath) -- ^ Used to override the binary installation dir , templateParameters :: !(Map Text Text) -- ^ Template parameters. , scmInit :: !(First SCM) -- ^ Initialize SCM (e.g. git init) when making new projects? , ghcOptionsByName :: !(MonoidMap PackageName (Monoid.Dual [Text])) -- ^ See 'configGhcOptionsByName'. Uses 'Monoid.Dual' so that -- options from the configs on the right come first, so that they -- can be overridden. , ghcOptionsByCat :: !(MonoidMap ApplyGhcOptions (Monoid.Dual [Text])) -- ^ See 'configGhcOptionsAll'. Uses 'Monoid.Dual' so that options -- from the configs on the right come first, so that they can be -- overridden. , cabalConfigOpts :: !(MonoidMap CabalConfigKey (Monoid.Dual [Text])) -- ^ See 'configCabalConfigOpts'. , extraPath :: ![Path Abs Dir] -- ^ Additional paths to search for executables in , setupInfoLocations :: ![String] -- ^ See 'configSetupInfoLocations' , setupInfoInline :: !SetupInfo -- ^ See 'configSetupInfoInline' , localProgramsBase :: !(First (Path Abs Dir)) -- ^ Override the default local programs dir, where e.g. GHC is installed. , pvpBounds :: !(First PvpBounds) -- ^ See 'configPvpBounds' , modifyCodePage :: !FirstTrue -- ^ See 'configModifyCodePage' , rebuildGhcOptions :: !FirstFalse -- ^ See 'configMonoidRebuildGhcOptions' , applyGhcOptions :: !(First ApplyGhcOptions) -- ^ See 'configApplyGhcOptions' , applyProgOptions :: !(First ApplyProgOptions) -- ^ See 'configApplyProgOptions' , allowNewer :: !(First Bool) -- ^ See 'configMonoidAllowNewer' , allowNewerDeps :: !(Maybe AllowNewerDeps) -- ^ See 'configMonoidAllowNewerDeps' , defaultTemplate :: !(First TemplateName) -- ^ The default template to use when none is specified. -- (If Nothing, the 'default' default template is used.) , allowDifferentUser :: !(First Bool) -- ^ Allow users other than the Stack root owner to use the Stack -- installation. , dumpLogs :: !(First DumpLogs) -- ^ See 'configDumpLogs' , saveHackageCreds :: !(First Bool) -- ^ See 'configSaveHackageCreds' , hackageBaseUrl :: !(First Text) -- ^ See 'configHackageBaseUrl' , colorWhen :: !(First ColorWhen) -- ^ When to use 'ANSI' colors , styles :: !StylesUpdate , hideSourcePaths :: !FirstTrue -- ^ See 'configHideSourcePaths' , recommendUpgrade :: !FirstTrue -- ^ See 'configRecommendUpgrade' , notifyIfNixOnPath :: !FirstTrue -- ^ See 'configNotifyIfNixOnPath' , notifyIfGhcUntested :: !FirstTrue -- ^ See 'configNotifyIfGhcUntested' , notifyIfCabalUntested :: !FirstTrue -- ^ See 'configNotifyIfCabalUntested' , notifyIfArchUnknown :: !FirstTrue -- ^ See 'configNotifyIfArchUnknown' , casaOpts :: !CasaOptsMonoid -- ^ Casa configuration options. , casaRepoPrefix :: !(First CasaRepoPrefix) -- ^ Casa repository prefix (deprecated). , snapshotLocation :: !(First Text) -- ^ Custom location of LTS/Nightly snapshots , noRunCompile :: !FirstFalse -- ^ See: 'configNoRunCompile' , stackDeveloperMode :: !(First Bool) -- ^ See 'configStackDeveloperMode' } deriving (Generic, Show) instance Semigroup ConfigMonoid where (<>) = mappenddefault instance Monoid ConfigMonoid where mempty = memptydefault mappend = (<>) parseConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ConfigMonoid) parseConfigMonoid = withObjectWarnings "ConfigMonoid" . parseConfigMonoidObject -- | Parse a partial configuration. Used both to parse both a standalone config -- file and a project file, so that a sub-parser is not required, which would -- interfere with warnings for missing fields. parseConfigMonoidObject :: Path Abs Dir -> Object -> WarningParser ConfigMonoid parseConfigMonoidObject rootDir obj = do -- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical let stackRoot = First Nothing workDir <- First <$> obj ..:? configMonoidWorkDirName buildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty) dockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty) nixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty) connectionCount <- First <$> obj ..:? configMonoidConnectionCountName hideTHLoading <- FirstTrue <$> obj ..:? configMonoidHideTHLoadingName prefixTimestamps <- First <$> obj ..:? configMonoidPrefixTimestampsName murls :: Maybe Value <- obj ..:? configMonoidUrlsName latestSnapshot <- case murls of Nothing -> pure $ First Nothing Just urls -> jsonSubWarnings $ lift $ withObjectWarnings "urls" (\o -> First <$> o ..:? "latest-snapshot" :: WarningParser (First Text)) urls packageIndex <- First <$> jsonSubWarningsT (obj ..:? configMonoidPackageIndexName) packageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName) systemGHC <- First <$> obj ..:? configMonoidSystemGHCName installGHC <- FirstTrue <$> obj ..:? configMonoidInstallGHCName skipGHCCheck <- FirstFalse <$> obj ..:? configMonoidSkipGHCCheckName skipMsys <- FirstFalse <$> obj ..:? configMonoidSkipMsysName requireStackVersion <- IntersectingVersionRange . (.versionRangeJSON) <$> ( obj ..:? configMonoidRequireStackVersionName ..!= VersionRangeJSON anyVersion ) arch <- First <$> obj ..:? configMonoidArchName ghcVariant <- First <$> obj ..:? configMonoidGHCVariantName ghcBuild <- First <$> obj ..:? configMonoidGHCBuildName jobs <- First <$> obj ..:? configMonoidJobsName extraIncludeDirs <- map (toFilePath rootDir FilePath.) <$> obj ..:? configMonoidExtraIncludeDirsName ..!= [] extraLibDirs <- map (toFilePath rootDir FilePath.) <$> obj ..:? configMonoidExtraLibDirsName ..!= [] customPreprocessorExts <- obj ..:? configMonoidCustomPreprocessorExtsName ..!= [] overrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName overrideHpack <- First <$> obj ..:? configMonoidOverrideHpackName concurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName localBinPath <- First <$> obj ..:? configMonoidLocalBinPathName templates <- obj ..:? "templates" (scmInit, templateParameters) <- case templates of Nothing -> pure (First Nothing,M.empty) Just tobj -> do scmInit <- tobj ..:? configMonoidScmInitName params <- tobj ..:? configMonoidTemplateParametersName pure (First scmInit,fromMaybe M.empty params) compilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName compilerRepository <- First <$> (obj ..:? configMonoidCompilerRepositoryName) options <- Map.map (.ghcOptions) <$> obj ..:? configMonoidGhcOptionsName ..!= (mempty :: Map GhcOptionKey GhcOptions) optionsEverything <- case (Map.lookup GOKOldEverything options, Map.lookup GOKEverything options) of (Just _, Just _) -> fail "Cannot specify both `*` and `$everything` GHC options" (Nothing, Just x) -> pure x (Just x, Nothing) -> do tell "The `*` ghc-options key is not recommended. Consider using \ \$locals, or if really needed, $everything" pure x (Nothing, Nothing) -> pure [] let ghcOptionsByCat = coerce $ Map.fromList [ (AGOEverything, optionsEverything) , (AGOLocals, Map.findWithDefault [] GOKLocals options) , (AGOTargets, Map.findWithDefault [] GOKTargets options) ] ghcOptionsByName = coerce $ Map.fromList [(name, opts) | (GOKPackage name, opts) <- Map.toList options] cabalConfigOpts' <- obj ..:? configMonoidConfigureOptionsName ..!= mempty let cabalConfigOpts = coerce (cabalConfigOpts' :: Map CabalConfigKey [Text]) extraPath <- obj ..:? configMonoidExtraPathName ..!= [] setupInfoLocations <- obj ..:? configMonoidSetupInfoLocationsName ..!= [] setupInfoInline <- jsonSubWarningsT (obj ..:? configMonoidSetupInfoInlineName) ..!= mempty localProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName pvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName modifyCodePage <- FirstTrue <$> obj ..:? configMonoidModifyCodePageName rebuildGhcOptions <- FirstFalse <$> obj ..:? configMonoidRebuildGhcOptionsName applyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName applyProgOptions <- First <$> obj ..:? configMonoidApplyProgOptionsName allowNewer <- First <$> obj ..:? configMonoidAllowNewerName allowNewerDeps <- obj ..:? configMonoidAllowNewerDepsName defaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName allowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName dumpLogs <- First <$> obj ..:? configMonoidDumpLogsName saveHackageCreds <- First <$> obj ..:? configMonoidSaveHackageCredsName hackageBaseUrl <- First <$> obj ..:? configMonoidHackageBaseUrlName configMonoidColorWhenUS <- obj ..:? configMonoidColorWhenUSName configMonoidColorWhenGB <- obj ..:? configMonoidColorWhenGBName let colorWhen = First $ configMonoidColorWhenUS <|> configMonoidColorWhenGB configMonoidStylesUS <- obj ..:? configMonoidStylesUSName configMonoidStylesGB <- obj ..:? configMonoidStylesGBName let styles = fromMaybe mempty $ configMonoidStylesUS <|> configMonoidStylesGB hideSourcePaths <- FirstTrue <$> obj ..:? configMonoidHideSourcePathsName recommendUpgrade <- FirstTrue <$> obj ..:? configMonoidRecommendUpgradeName notifyIfNixOnPath <- FirstTrue <$> obj ..:? configMonoidNotifyIfNixOnPathName notifyIfGhcUntested <- FirstTrue <$> obj ..:? configMonoidNotifyIfGhcUntestedName notifyIfCabalUntested <- FirstTrue <$> obj ..:? configMonoidNotifyIfCabalUntestedName notifyIfArchUnknown <- FirstTrue <$> obj ..:? configMonoidNotifyIfArchUnknownName casaOpts <- jsonSubWarnings (obj ..:? configMonoidCasaOptsName ..!= mempty) casaRepoPrefix <- First <$> obj ..:? configMonoidCasaRepoPrefixName snapshotLocation <- First <$> obj ..:? configMonoidSnapshotLocationName noRunCompile <- FirstFalse <$> obj ..:? configMonoidNoRunCompileName stackDeveloperMode <- First <$> obj ..:? configMonoidStackDeveloperModeName pure ConfigMonoid { stackRoot , workDir , buildOpts , dockerOpts , nixOpts , connectionCount , hideTHLoading , prefixTimestamps , latestSnapshot , packageIndex , packageIndices , systemGHC , installGHC , skipGHCCheck , skipMsys , compilerCheck , compilerRepository , requireStackVersion , arch , ghcVariant , ghcBuild , jobs , extraIncludeDirs , extraLibDirs , customPreprocessorExts , overrideGccPath , overrideHpack , concurrentTests , localBinPath , templateParameters , scmInit , ghcOptionsByName , ghcOptionsByCat , cabalConfigOpts , extraPath , setupInfoLocations , setupInfoInline , localProgramsBase , pvpBounds , modifyCodePage , rebuildGhcOptions , applyGhcOptions , applyProgOptions , allowNewer , allowNewerDeps , defaultTemplate , allowDifferentUser , dumpLogs , saveHackageCreds , hackageBaseUrl , colorWhen , styles , hideSourcePaths , recommendUpgrade , notifyIfNixOnPath , notifyIfGhcUntested , notifyIfCabalUntested , notifyIfArchUnknown , casaOpts , casaRepoPrefix , snapshotLocation , noRunCompile , stackDeveloperMode } configMonoidWorkDirName :: Text configMonoidWorkDirName = "work-dir" configMonoidBuildOptsName :: Text configMonoidBuildOptsName = "build" configMonoidDockerOptsName :: Text configMonoidDockerOptsName = "docker" configMonoidNixOptsName :: Text configMonoidNixOptsName = "nix" configMonoidConfigureOptionsName :: Text configMonoidConfigureOptionsName = "configure-options" configMonoidConnectionCountName :: Text configMonoidConnectionCountName = "connection-count" configMonoidHideTHLoadingName :: Text configMonoidHideTHLoadingName = "hide-th-loading" configMonoidPrefixTimestampsName :: Text configMonoidPrefixTimestampsName = "build-output-timestamps" configMonoidUrlsName :: Text configMonoidUrlsName = "urls" configMonoidPackageIndexName :: Text configMonoidPackageIndexName = "package-index" -- Deprecated in favour of package-index configMonoidPackageIndicesName :: Text configMonoidPackageIndicesName = "package-indices" configMonoidSystemGHCName :: Text configMonoidSystemGHCName = "system-ghc" configMonoidInstallGHCName :: Text configMonoidInstallGHCName = "install-ghc" configMonoidSkipGHCCheckName :: Text configMonoidSkipGHCCheckName = "skip-ghc-check" configMonoidSkipMsysName :: Text configMonoidSkipMsysName = "skip-msys" configMonoidRequireStackVersionName :: Text configMonoidRequireStackVersionName = "require-stack-version" configMonoidArchName :: Text configMonoidArchName = "arch" configMonoidGHCVariantName :: Text configMonoidGHCVariantName = "ghc-variant" configMonoidGHCBuildName :: Text configMonoidGHCBuildName = "ghc-build" configMonoidJobsName :: Text configMonoidJobsName = "jobs" configMonoidExtraIncludeDirsName :: Text configMonoidExtraIncludeDirsName = "extra-include-dirs" configMonoidExtraLibDirsName :: Text configMonoidExtraLibDirsName = "extra-lib-dirs" configMonoidCustomPreprocessorExtsName :: Text configMonoidCustomPreprocessorExtsName = "custom-preprocessor-extensions" configMonoidOverrideGccPathName :: Text configMonoidOverrideGccPathName = "with-gcc" configMonoidOverrideHpackName :: Text configMonoidOverrideHpackName = "with-hpack" configMonoidConcurrentTestsName :: Text configMonoidConcurrentTestsName = "concurrent-tests" configMonoidLocalBinPathName :: Text configMonoidLocalBinPathName = "local-bin-path" configMonoidScmInitName :: Text configMonoidScmInitName = "scm-init" configMonoidTemplateParametersName :: Text configMonoidTemplateParametersName = "params" configMonoidCompilerCheckName :: Text configMonoidCompilerCheckName = "compiler-check" configMonoidCompilerRepositoryName :: Text configMonoidCompilerRepositoryName = "compiler-repository" configMonoidGhcOptionsName :: Text configMonoidGhcOptionsName = "ghc-options" configMonoidExtraPathName :: Text configMonoidExtraPathName = "extra-path" configMonoidSetupInfoLocationsName :: Text configMonoidSetupInfoLocationsName = "setup-info-locations" configMonoidSetupInfoInlineName :: Text configMonoidSetupInfoInlineName = "setup-info" configMonoidLocalProgramsBaseName :: Text configMonoidLocalProgramsBaseName = "local-programs-path" configMonoidPvpBoundsName :: Text configMonoidPvpBoundsName = "pvp-bounds" configMonoidModifyCodePageName :: Text configMonoidModifyCodePageName = "modify-code-page" configMonoidRebuildGhcOptionsName :: Text configMonoidRebuildGhcOptionsName = "rebuild-ghc-options" configMonoidApplyGhcOptionsName :: Text configMonoidApplyGhcOptionsName = "apply-ghc-options" configMonoidApplyProgOptionsName :: Text configMonoidApplyProgOptionsName = "apply-prog-options" configMonoidAllowNewerName :: Text configMonoidAllowNewerName = "allow-newer" configMonoidAllowNewerDepsName :: Text configMonoidAllowNewerDepsName = "allow-newer-deps" configMonoidDefaultTemplateName :: Text configMonoidDefaultTemplateName = "default-template" configMonoidAllowDifferentUserName :: Text configMonoidAllowDifferentUserName = "allow-different-user" configMonoidDumpLogsName :: Text configMonoidDumpLogsName = "dump-logs" configMonoidSaveHackageCredsName :: Text configMonoidSaveHackageCredsName = "save-hackage-creds" configMonoidHackageBaseUrlName :: Text configMonoidHackageBaseUrlName = "hackage-base-url" configMonoidColorWhenUSName :: Text configMonoidColorWhenUSName = "color" configMonoidColorWhenGBName :: Text configMonoidColorWhenGBName = "colour" configMonoidStylesUSName :: Text configMonoidStylesUSName = "stack-colors" configMonoidStylesGBName :: Text configMonoidStylesGBName = "stack-colours" configMonoidHideSourcePathsName :: Text configMonoidHideSourcePathsName = "hide-source-paths" configMonoidRecommendUpgradeName :: Text configMonoidRecommendUpgradeName = "recommend-stack-upgrade" configMonoidNotifyIfNixOnPathName :: Text configMonoidNotifyIfNixOnPathName = "notify-if-nix-on-path" configMonoidNotifyIfGhcUntestedName :: Text configMonoidNotifyIfGhcUntestedName = "notify-if-ghc-untested" configMonoidNotifyIfCabalUntestedName :: Text configMonoidNotifyIfCabalUntestedName = "notify-if-cabal-untested" configMonoidNotifyIfArchUnknownName :: Text configMonoidNotifyIfArchUnknownName = "notify-if-arch-unknown" configMonoidCasaOptsName :: Text configMonoidCasaOptsName = "casa" configMonoidCasaRepoPrefixName :: Text configMonoidCasaRepoPrefixName = "casa-repo-prefix" configMonoidSnapshotLocationName :: Text configMonoidSnapshotLocationName = "snapshot-location-base" configMonoidNoRunCompileName :: Text configMonoidNoRunCompileName = "script-no-run-compile" configMonoidStackDeveloperModeName :: Text configMonoidStackDeveloperModeName = "stack-developer-mode" stack-2.15.7/src/Stack/Types/ConfigureOpts.hs0000644000000000000000000002010614620153446017170 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ConfigureOpts ( ConfigureOpts (..) , BaseConfigOpts (..) , configureOpts , configureOptsPathRelated , configureOptsNonPathRelated ) where import qualified Data.Map as Map import qualified Data.Text as T import Distribution.Types.MungedPackageName ( decodeCompatPackageName ) import Distribution.Types.PackageName ( unPackageName ) import Distribution.Types.UnqualComponentName ( unUnqualComponentName ) import qualified Distribution.Version as C import Path ( (), parseRelDir ) import Path.Extra ( toFilePathNoTrailingSep ) import Stack.Constants ( bindirSuffix, compilerOptionsCabalFlag, docDirSuffix , relDirEtc, relDirLib, relDirLibexec, relDirShare ) import Stack.Prelude import Stack.Types.BuildOpts ( BuildOpts (..) ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI ) import Stack.Types.Compiler ( getGhcVersion, whichCompiler ) import Stack.Types.Config ( Config (..), HasConfig (..) ) import Stack.Types.EnvConfig ( EnvConfig, actualCompilerVersionL ) import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) import Stack.Types.IsMutable ( IsMutable (..) ) import Stack.Types.Package ( Package (..) ) import System.FilePath ( pathSeparator ) -- | Basic information used to calculate what the configure options are data BaseConfigOpts = BaseConfigOpts { snapDB :: !(Path Abs Dir) , localDB :: !(Path Abs Dir) , snapInstallRoot :: !(Path Abs Dir) , localInstallRoot :: !(Path Abs Dir) , buildOpts :: !BuildOpts , buildOptsCLI :: !BuildOptsCLI , extraDBs :: ![Path Abs Dir] } deriving Show -- | Render a @BaseConfigOpts@ to an actual list of options configureOpts :: EnvConfig -> BaseConfigOpts -> Map PackageIdentifier GhcPkgId -- ^ dependencies -> Bool -- ^ local non-extra-dep? -> IsMutable -> Package -> ConfigureOpts configureOpts econfig bco deps isLocal isMutable package = ConfigureOpts { pathRelated = configureOptsPathRelated bco isMutable package , nonPathRelated = configureOptsNonPathRelated econfig bco deps isLocal package } configureOptsPathRelated :: BaseConfigOpts -> IsMutable -> Package -> [String] configureOptsPathRelated bco isMutable package = concat [ ["--user", "--package-db=clear", "--package-db=global"] , map (("--package-db=" ++) . toFilePathNoTrailingSep) $ case isMutable of Immutable -> bco.extraDBs ++ [bco.snapDB] Mutable -> bco.extraDBs ++ [bco.snapDB] ++ [bco.localDB] , [ "--libdir=" ++ toFilePathNoTrailingSep (installRoot relDirLib) , "--bindir=" ++ toFilePathNoTrailingSep (installRoot bindirSuffix) , "--datadir=" ++ toFilePathNoTrailingSep (installRoot relDirShare) , "--libexecdir=" ++ toFilePathNoTrailingSep (installRoot relDirLibexec) , "--sysconfdir=" ++ toFilePathNoTrailingSep (installRoot relDirEtc) , "--docdir=" ++ toFilePathNoTrailingSep docDir , "--htmldir=" ++ toFilePathNoTrailingSep docDir , "--haddockdir=" ++ toFilePathNoTrailingSep docDir] ] where installRoot = case isMutable of Immutable -> bco.snapInstallRoot Mutable -> bco.localInstallRoot docDir = case pkgVerDir of Nothing -> installRoot docDirSuffix Just dir -> installRoot docDirSuffix dir pkgVerDir = parseRelDir ( packageIdentifierString (PackageIdentifier package.name package.version) ++ [pathSeparator] ) -- | Same as 'configureOpts', but does not include directory path options configureOptsNonPathRelated :: EnvConfig -> BaseConfigOpts -> Map PackageIdentifier GhcPkgId -- ^ Dependencies. -> Bool -- ^ Is this a local, non-extra-dep? -> Package -> [String] configureOptsNonPathRelated econfig bco deps isLocal package = concat [ depOptions , [ "--enable-library-profiling" | bopts.libProfile || bopts.exeProfile ] , ["--enable-profiling" | bopts.exeProfile && isLocal] , ["--enable-split-objs" | bopts.splitObjs] , [ "--disable-library-stripping" | not $ bopts.libStrip || bopts.exeStrip ] , ["--disable-executable-stripping" | not bopts.exeStrip && isLocal] , map (\(name,enabled) -> "-f" <> (if enabled then "" else "-") <> flagNameString name) (Map.toList flags) , map T.unpack package.cabalConfigOpts , processGhcOptions package.ghcOptions , map ("--extra-include-dirs=" ++) config.extraIncludeDirs , map ("--extra-lib-dirs=" ++) config.extraLibDirs , maybe [] (\customGcc -> ["--with-gcc=" ++ toFilePath customGcc]) config.overrideGccPath , ["--exact-configuration"] , ["--ghc-option=-fhide-source-paths" | hideSourcePaths cv] ] where -- This function parses the GHC options that are providing in the -- stack.yaml file. In order to handle RTS arguments correctly, we need -- to provide the RTS arguments as a single argument. processGhcOptions :: [Text] -> [String] processGhcOptions args = let (preRtsArgs, mid) = break ("+RTS" ==) args (rtsArgs, end) = break ("-RTS" ==) mid fullRtsArgs = case rtsArgs of [] -> -- This means that we didn't have any RTS args - no `+RTS` - and -- therefore no need for a `-RTS`. [] _ -> -- In this case, we have some RTS args. `break` puts the `"-RTS"` -- string in the `snd` list, so we want to append it on the end of -- `rtsArgs` here. -- -- We're not checking that `-RTS` is the first element of `end`. -- This is because the GHC RTS allows you to omit a trailing -RTS -- if that's the last of the arguments. This permits a GHC options -- in stack.yaml that matches what you might pass directly to GHC. [T.unwords $ rtsArgs ++ ["-RTS"]] -- We drop the first element from `end`, because it is always either -- `"-RTS"` (and we don't want that as a separate argument) or the list -- is empty (and `drop _ [] = []`). postRtsArgs = drop 1 end newArgs = concat [preRtsArgs, fullRtsArgs, postRtsArgs] in concatMap (\x -> [compilerOptionsCabalFlag wc, T.unpack x]) newArgs wc = view (actualCompilerVersionL . to whichCompiler) econfig cv = view (actualCompilerVersionL . to getGhcVersion) econfig hideSourcePaths ghcVersion = ghcVersion >= C.mkVersion [8, 2] && config.hideSourcePaths config = view configL econfig bopts = bco.buildOpts -- Unioning atop defaults is needed so that all flags are specified with -- --exact-configuration. flags = package.flags `Map.union` package.defaultFlags depOptions = map toDepOption $ Map.toList deps toDepOption (PackageIdentifier name _, gid) = concat [ "--dependency=" , depOptionKey , "=" , ghcPkgIdString gid ] where MungedPackageName subPkgName lib = decodeCompatPackageName name depOptionKey = case lib of LMainLibName -> unPackageName name LSubLibName cn -> unPackageName subPkgName <> ":" <> unUnqualComponentName cn -- | Configure options to be sent to Setup.hs configure. data ConfigureOpts = ConfigureOpts { pathRelated :: ![String] -- ^ Options related to various paths. We separate these out since they do -- not have an effect on the contents of the compiled binary for checking -- if we can use an existing precompiled cache. , nonPathRelated :: ![String] -- ^ Options other than path-related options. } deriving (Data, Eq, Generic, Show, Typeable) instance NFData ConfigureOpts stack-2.15.7/src/Stack/Types/Curator.hs0000644000000000000000000000527314604306201016017 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Module exporting the 'Curator' type, used to represent Stack's -- project-specific @curator@ option, which supports the needs of the -- [@curator@ tool](https://github.com/commercialhaskell/curator). module Stack.Types.Curator ( Curator (..) ) where import Data.Aeson.Types ( FromJSON (..), ToJSON (..), (.=), object ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), (..:?), (..!=), withObjectWarnings ) import qualified Data.Set as Set import Stack.Prelude -- | Type representing configuration options which support the needs of the -- [@curator@ tool](https://github.com/commercialhaskell/curator). data Curator = Curator { skipTest :: !(Set PackageName) -- ^ Packages for which Stack should ignore test suites. , expectTestFailure :: !(Set PackageName) -- ^ Packages for which Stack should expect building test suites to fail. , skipBenchmark :: !(Set PackageName) -- ^ Packages for which Stack should ignore benchmarks. , expectBenchmarkFailure :: !(Set PackageName) -- ^ Packages for which Stack should expect building benchmarks to fail. , skipHaddock :: !(Set PackageName) -- ^ Packages for which Stack should ignore creating Haddock documentation. , expectHaddockFailure :: !(Set PackageName) -- ^ Packages for which Stack should expect creating Haddock documentation -- to fail. } deriving Show instance ToJSON Curator where toJSON curator = object [ "skip-test" .= Set.map CabalString curator.skipTest , "expect-test-failure" .= Set.map CabalString curator.expectTestFailure , "skip-bench" .= Set.map CabalString curator.skipBenchmark , "expect-benchmark-failure" .= Set.map CabalString curator.expectTestFailure , "skip-haddock" .= Set.map CabalString curator.skipHaddock , "expect-haddock-failure" .= Set.map CabalString curator.expectHaddockFailure ] instance FromJSON (WithJSONWarnings Curator) where parseJSON = withObjectWarnings "Curator" $ \o -> Curator <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "expect-benchmark-failure" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "expect-haddock-failure" ..!= mempty) stack-2.15.7/src/Stack/Types/Dependency.hs0000644000000000000000000000545114620153446016465 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.Dependency ( DepValue (..) , DepType (..) , DepLibrary (..) , cabalToStackDep , cabalExeToStackDep , cabalSetupDepsToStackDep , libraryDepFromVersionRange , isDepTypeLibrary , getDepSublib ) where import qualified Data.Map as Map import qualified Data.Set as Set import qualified Distribution.PackageDescription as Cabal import Distribution.Types.VersionRange ( VersionRange ) import Stack.Prelude import Stack.Types.ComponentUtils ( StackUnqualCompName (..), fromCabalName ) -- | The value for a map from dependency name. This contains both the version -- range and the type of dependency. data DepValue = DepValue { versionRange :: !VersionRange , depType :: !DepType } deriving (Show, Typeable) -- | Is this package being used as a library, or just as a build tool? If the -- former, we need to ensure that a library actually exists. See -- data DepType = AsLibrary !DepLibrary | AsBuildTool deriving (Eq, Show) data DepLibrary = DepLibrary { main :: !Bool , subLib :: Set StackUnqualCompName } deriving (Eq, Show) getDepSublib :: DepValue -> Maybe (Set StackUnqualCompName) getDepSublib val = case val.depType of AsLibrary libVal -> Just libVal.subLib _ -> Nothing defaultDepLibrary :: DepLibrary defaultDepLibrary = DepLibrary True mempty isDepTypeLibrary :: DepType -> Bool isDepTypeLibrary AsLibrary{} = True isDepTypeLibrary AsBuildTool = False cabalToStackDep :: Cabal.Dependency -> DepValue cabalToStackDep (Cabal.Dependency _ verRange libNameSet) = DepValue { versionRange = verRange, depType = AsLibrary depLibrary } where depLibrary = DepLibrary finalHasMain filteredItems (finalHasMain, filteredItems) = foldr' iterator (False, mempty) libNameSet iterator LMainLibName (_, newLibNameSet) = (True, newLibNameSet) iterator (LSubLibName libName) (hasMain, newLibNameSet) = (hasMain, Set.insert (fromCabalName libName) newLibNameSet) cabalExeToStackDep :: Cabal.ExeDependency -> DepValue cabalExeToStackDep (Cabal.ExeDependency _ _name verRange) = DepValue { versionRange = verRange, depType = AsBuildTool } cabalSetupDepsToStackDep :: Cabal.SetupBuildInfo -> Map PackageName DepValue cabalSetupDepsToStackDep setupInfo = foldr' inserter mempty (Cabal.setupDepends setupInfo) where inserter d@(Cabal.Dependency packageName _ _) = Map.insert packageName (cabalToStackDep d) libraryDepFromVersionRange :: VersionRange -> DepValue libraryDepFromVersionRange range = DepValue { versionRange = range , depType = AsLibrary defaultDepLibrary } stack-2.15.7/src/Stack/Types/DependencyTree.hs0000644000000000000000000000642414604306201017275 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.DependencyTree ( DependencyTree (..) , DotPayload (..) , licenseText , versionText ) where import Data.Aeson ( ToJSON (..), Value, (.=), object ) import qualified Data.Set as Set import qualified Data.Map as Map import qualified Data.Text as Text import Distribution.License ( License (..), licenseFromSPDX ) import qualified Distribution.SPDX.License as SPDX import Distribution.Text ( display ) import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) -- | Information about a package in the dependency graph, when available. data DotPayload = DotPayload { version :: Maybe Version -- ^ The package version. , license :: Maybe (Either SPDX.License License) -- ^ The license the package was released under. , location :: Maybe PackageLocation -- ^ The location of the package. } deriving (Eq, Show) data DependencyTree = DependencyTree (Set PackageName) (Map PackageName (Set PackageName, DotPayload)) instance ToJSON DependencyTree where toJSON (DependencyTree _ dependencyMap) = toJSON $ foldToList dependencyToJSON dependencyMap foldToList :: (k -> a -> b) -> Map k a -> [b] foldToList f = Map.foldrWithKey (\k a bs -> bs ++ [f k a]) [] dependencyToJSON :: PackageName -> (Set PackageName, DotPayload) -> Value dependencyToJSON pkg (deps, payload) = let fieldsAlwaysPresent = [ "name" .= packageNameString pkg , "license" .= licenseText payload , "version" .= versionText payload , "dependencies" .= Set.map packageNameString deps ] loc = catMaybes [("location" .=) . pkgLocToJSON <$> payload.location] in object $ fieldsAlwaysPresent ++ loc pkgLocToJSON :: PackageLocation -> Value pkgLocToJSON (PLMutable (ResolvedPath _ dir)) = object [ "type" .= ("project package" :: Text) , "url" .= ("file://" ++ toFilePath dir) ] pkgLocToJSON (PLImmutable (PLIHackage pkgid _ _)) = object [ "type" .= ("hackage" :: Text) , "url" .= ("https://hackage.haskell.org/package/" ++ display pkgid) ] pkgLocToJSON (PLImmutable (PLIArchive archive _)) = let url = case archiveLocation archive of ALUrl u -> u ALFilePath (ResolvedPath _ path) -> Text.pack $ "file://" ++ toFilePath path in object [ "type" .= ("archive" :: Text) , "url" .= url , "sha256" .= archiveHash archive , "size" .= archiveSize archive ] pkgLocToJSON (PLImmutable (PLIRepo repo _)) = object [ "type" .= case repoType repo of RepoGit -> "git" :: Text RepoHg -> "hg" :: Text , "url" .= repoUrl repo , "commit" .= repoCommit repo , "subdir" .= repoSubdir repo ] licenseText :: DotPayload -> Text licenseText payload = maybe "" (Text.pack . display . either licenseFromSPDX id) payload.license versionText :: DotPayload -> Text versionText payload = maybe "" (Text.pack . display) payload.version stack-2.15.7/src/Stack/Types/Docker.hs0000644000000000000000000004577414620153446015632 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Docker types. module Stack.Types.Docker ( DockerException (..) , DockerMonoidRepoOrImage (..) , DockerOpts (..) , DockerOptsMonoid (..) , DockerStackExe (..) , Mount (..) , VersionRangeJSON (..) , dockerAutoPullArgName , dockerCmdName , dockerContainerNameArgName , dockerContainerPlatform , dockerDetachArgName , dockerEnableArgName , dockerEntrypointArgName , dockerEnvArgName , dockerHelpOptName , dockerImageArgName , dockerMountArgName , dockerMountModeArgName , dockerNetworkArgName , dockerPersistArgName , dockerPullCmdName , dockerRegistryLoginArgName , dockerRegistryPasswordArgName , dockerRegistryUsernameArgName , dockerRepoArgName , dockerRequireDockerVersionArgName , dockerRunArgsArgName , dockerSetUserArgName , dockerStackExeArgName , dockerStackExeDownloadVal , dockerStackExeHostVal , dockerStackExeImageVal , parseDockerStackExe , reExecArgName ) where import Data.Aeson.Types ( FromJSON (..), withText ) import Data.Aeson.WarningParser ( WithJSONWarnings, (..:), (..:?), (..!=), withObjectWarnings ) import Data.List ( intercalate ) import qualified Data.Text as T import Distribution.System ( Arch (..), OS (..), Platform (..) ) import Distribution.Text ( display, simpleParse ) import Distribution.Version ( anyVersion ) import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Path ( parseAbsFile ) import Stack.Constants ( stackProgName ) import Stack.Prelude hiding ( Display (..) ) import Stack.Types.Version ( IntersectingVersionRange (..), VersionRange , versionRangeText ) import Text.Read ( Read (..) ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Docker" module. data DockerException = DockerMustBeEnabledException -- ^ Docker must be enabled to use the command. | OnlyOnHostException -- ^ Command must be run on host OS (not in a container). | InspectFailedException String -- ^ @docker inspect@ failed. | NotPulledException String -- ^ Image does not exist. | InvalidImagesOutputException String -- ^ Invalid output from @docker images@. | InvalidPSOutputException String -- ^ Invalid output from @docker ps@. | InvalidInspectOutputException String -- ^ Invalid output from @docker inspect@. | PullFailedException String -- ^ Could not pull a Docker image. | DockerTooOldException Version Version -- ^ Installed version of @docker@ below minimum version. | DockerVersionProhibitedException [Version] Version -- ^ Installed version of @docker@ is prohibited. | BadDockerVersionException VersionRange Version -- ^ Installed version of @docker@ is out of range specified in config file. | InvalidVersionOutputException -- ^ Invalid output from @docker --version@. | HostStackTooOldException Version (Maybe Version) -- ^ Version of @stack@ on host is too old for version in image. | ContainerStackTooOldException Version Version -- ^ Version of @stack@ in container/image is too old for version on host. | CannotDetermineProjectRootException -- ^ Can't determine the project root (where to put docker sandbox). | DockerNotInstalledException -- ^ @docker --version@ failed. | UnsupportedStackExeHostPlatformException -- ^ Using host stack-exe on unsupported platform. | DockerStackExeParseException String -- ^ @stack-exe@ option fails to parse. deriving (Show, Typeable) instance Exception DockerException where displayException DockerMustBeEnabledException = "Error: [S-3223]\n" ++ "Docker must be enabled in your configuration file to use this \ \command." displayException OnlyOnHostException = "Error: [S-9779]\n" ++ "This command must be run on host OS (not in a Docker container)." displayException (InspectFailedException image) = concat [ "Error: [S-9105]\n" , "'docker inspect' failed for image after pull: " , image , "." ] displayException (NotPulledException image) = concat [ "Error: [S-6626]\n" , "The Docker image referenced by your configuration file" , " has not\nbeen downloaded:\n " , image , "\n\nRun '" , unwords [stackProgName, dockerCmdName, dockerPullCmdName] , "' to download it, then try again." ] displayException (InvalidImagesOutputException l) = concat [ "Error: [S-5841]\n" , "Invalid 'docker images' output line: '" , l , "'." ] displayException (InvalidPSOutputException l) = concat [ "Error: [S-9608]\n" , "Invalid 'docker ps' output line: '" , l ,"'." ] displayException (InvalidInspectOutputException msg) = concat [ "Error: [S-2240]\n" , "Invalid 'docker inspect' output: " , msg , "." ] displayException (PullFailedException image) = concat [ "Error: [S-6092]\n" , "Could not pull Docker image:\n " , image , "\nThere may not be an image on the registry for your resolver's LTS \ \version in\n" , "your configuration file." ] displayException (DockerTooOldException minVersion haveVersion) = concat [ "Error: [S-6281]\n" , "Minimum docker version '" , versionString minVersion , "' is required by " , stackProgName , " (you have '" , versionString haveVersion , "')." ] displayException (DockerVersionProhibitedException prohibitedVersions haveVersion) = concat [ "Error: [S-8252]\n" , "These Docker versions are incompatible with " , stackProgName , " (you have '" , versionString haveVersion , "'): " , intercalate ", " (map versionString prohibitedVersions) , "." ] displayException (BadDockerVersionException requiredRange haveVersion) = concat [ "Error: [S-6170]\n" , "The version of 'docker' you are using (" , show haveVersion , ") is outside the required\n" , "version range specified in stack.yaml (" , T.unpack (versionRangeText requiredRange) , ")." ] displayException InvalidVersionOutputException = "Error: [S-5827]\n" ++ "Cannot get Docker version (invalid 'docker --version' output)." displayException (HostStackTooOldException minVersion (Just hostVersion)) = concat [ "Error: [S-7112]\n" , "The host's version of '" , stackProgName , "' is too old for this Docker image.\nVersion " , versionString minVersion , " is required; you have " , versionString hostVersion , "." ] displayException (HostStackTooOldException minVersion Nothing) = concat [ "Error: [S-7112]\n" , "The host's version of '" , stackProgName , "' is too old.\nVersion " , versionString minVersion , " is required." ] displayException (ContainerStackTooOldException requiredVersion containerVersion) = concat [ "Error: [S-5832]\n" , "The Docker container's version of '" , stackProgName , "' is too old.\nVersion " , versionString requiredVersion , " is required; the container has " , versionString containerVersion , "." ] displayException CannotDetermineProjectRootException = "Error: [S-4078]\n" ++ "Cannot determine project root directory for Docker sandbox." displayException DockerNotInstalledException = "Error: [S-7058]\n" ++ "Cannot find 'docker' in PATH. Is Docker installed?" displayException UnsupportedStackExeHostPlatformException = concat [ "Error: [S-6894]\n" , "Using host's " , stackProgName , " executable in Docker container is only supported on " , display dockerContainerPlatform , " platform." ] displayException (DockerStackExeParseException s) = concat [ "Error: [S-1512]\n" , "Failed to parse " , show s , ". Expected " , show dockerStackExeDownloadVal , ", " , show dockerStackExeHostVal , ", " , show dockerStackExeImageVal , " or absolute path to executable." ] -- | Docker configuration. data DockerOpts = DockerOpts { enable :: !Bool -- ^ Is using Docker enabled? , image :: !(Either SomeException String) -- ^ Exact Docker image tag or ID. Overrides docker-repo-*/tag. , registryLogin :: !Bool -- ^ Does registry require login for pulls? , registryUsername :: !(Maybe String) -- ^ Optional username for Docker registry. , registryPassword :: !(Maybe String) -- ^ Optional password for Docker registry. , autoPull :: !Bool -- ^ Automatically pull new images. , detach :: !Bool -- ^ Whether to run a detached container , persist :: !Bool -- ^ Create a persistent container (don't remove it when finished). Implied -- by `dockerDetach`. , containerName :: !(Maybe String) -- ^ Container name to use, only makes sense from command-line with -- `dockerPersist` or `dockerDetach`. , network :: !(Maybe String) -- ^ The network docker uses. , runArgs :: ![String] -- ^ Arguments to pass directly to @docker run@. , mount :: ![Mount] -- ^ Volumes to mount in the container. , mountMode :: !(Maybe String) -- ^ Volume mount mode , env :: ![String] -- ^ Environment variables to set in the container. , stackExe :: !(Maybe DockerStackExe) -- ^ Location of container-compatible Stack executable , setUser :: !(Maybe Bool) -- ^ Set in-container user to match host's , requireDockerVersion :: !VersionRange -- ^ Require a version of Docker within this range. } deriving Show -- | An uninterpreted representation of docker options. Configurations may be -- "cascaded" using mappend (left-biased). data DockerOptsMonoid = DockerOptsMonoid { defaultEnable :: !Any -- ^ Should Docker be defaulted to enabled (does @docker:@ section exist in -- the config)? , enable :: !(First Bool) -- ^ Is using Docker enabled? , repoOrImage :: !(First DockerMonoidRepoOrImage) -- ^ Docker repository name (e.g. @fpco/stack-build@ or -- @fpco/stack-full:lts-2.8@) , registryLogin :: !(First Bool) -- ^ Does registry require login for pulls? , registryUsername :: !(First String) -- ^ Optional username for Docker registry. , registryPassword :: !(First String) -- ^ Optional password for Docker registry. , autoPull :: !FirstTrue -- ^ Automatically pull new images. , detach :: !FirstFalse -- ^ Whether to run a detached container , persist :: !FirstFalse -- ^ Create a persistent container (don't remove it when finished). Implied -- by -- `dockerDetach`. , containerName :: !(First String) -- ^ Container name to use, only makes sense from command-line with -- `dockerPersist` or `dockerDetach`. , network :: !(First String) -- ^ See: 'dockerNetwork' , runArgs :: ![String] -- ^ Arguments to pass directly to @docker run@ , mount :: ![Mount] -- ^ Volumes to mount in the container , mountMode :: !(First String) -- ^ Volume mount mode , env :: ![String] -- ^ Environment variables to set in the container , stackExe :: !(First DockerStackExe) -- ^ Location of container-compatible Stack executable , setUser :: !(First Bool) -- ^ Set in-container user to match host's , requireDockerVersion :: !IntersectingVersionRange -- ^ See: 'dockerRequireDockerVersion' } deriving (Show, Generic) -- | Decode uninterpreted docker options from JSON/YAML. instance FromJSON (WithJSONWarnings DockerOptsMonoid) where parseJSON = withObjectWarnings "DockerOptsMonoid" $ \o -> do let defaultEnable = Any True enable <- First <$> o ..:? dockerEnableArgName repoOrImage <- First <$> ( (Just . DockerMonoidImage <$> o ..: dockerImageArgName) <|> (Just . DockerMonoidRepo <$> o ..: dockerRepoArgName) <|> pure Nothing ) registryLogin <- First <$> o ..:? dockerRegistryLoginArgName registryUsername <- First <$> o ..:? dockerRegistryUsernameArgName registryPassword <- First <$> o ..:? dockerRegistryPasswordArgName autoPull <- FirstTrue <$> o ..:? dockerAutoPullArgName detach <- FirstFalse <$> o ..:? dockerDetachArgName persist <- FirstFalse <$> o ..:? dockerPersistArgName containerName <- First <$> o ..:? dockerContainerNameArgName network <- First <$> o ..:? dockerNetworkArgName runArgs <- o ..:? dockerRunArgsArgName ..!= [] mount <- o ..:? dockerMountArgName ..!= [] mountMode <- First <$> o ..:? dockerMountModeArgName env <- o ..:? dockerEnvArgName ..!= [] stackExe <- First <$> o ..:? dockerStackExeArgName setUser <- First <$> o ..:? dockerSetUserArgName requireDockerVersion <- IntersectingVersionRange . (.versionRangeJSON) <$> ( o ..:? dockerRequireDockerVersionArgName ..!= VersionRangeJSON anyVersion ) pure DockerOptsMonoid { defaultEnable , enable , repoOrImage , registryLogin , registryUsername , registryPassword , autoPull , detach , persist , containerName , network , runArgs , mount , mountMode , env , stackExe , setUser , requireDockerVersion } -- | Left-biased combine Docker options instance Semigroup DockerOptsMonoid where (<>) = mappenddefault -- | Left-biased combine Docker options instance Monoid DockerOptsMonoid where mempty = memptydefault mappend = (<>) -- | Where to get the `stack` executable to run in Docker containers data DockerStackExe = DockerStackExeDownload -- ^ Download from official bindist | DockerStackExeHost -- ^ Host's `stack` (linux-x86_64 only) | DockerStackExeImage -- ^ Docker image's `stack` (versions must match) | DockerStackExePath (Path Abs File) -- ^ Executable at given path deriving Show instance FromJSON DockerStackExe where parseJSON a = do s <- parseJSON a case parseDockerStackExe s of Right dse -> pure dse Left e -> fail (displayException e) -- | Parse 'DockerStackExe'. parseDockerStackExe :: (MonadThrow m) => String -> m DockerStackExe parseDockerStackExe t | t == dockerStackExeDownloadVal = pure DockerStackExeDownload | t == dockerStackExeHostVal = pure DockerStackExeHost | t == dockerStackExeImageVal = pure DockerStackExeImage | otherwise = case parseAbsFile t of Just p -> pure (DockerStackExePath p) Nothing -> throwM (DockerStackExeParseException t) -- | Docker volume mount. data Mount = Mount String String -- | For optparse-applicative. instance Read Mount where readsPrec _ s = case break (== ':') s of (a, ':':b) -> [(Mount a b, "")] (a, []) -> [(Mount a a, "")] _ -> fail "Invalid value for Docker mount (expect '/host/path:/container/path')" -- | Show instance. instance Show Mount where show (Mount a b) = if a == b then a else concat [a, ":", b] -- | For YAML. instance FromJSON Mount where parseJSON v = do s <- parseJSON v case readMaybe s of Nothing -> fail $ "Mount read failed: " ++ s Just x -> pure x -- | Options for Docker repository or image. data DockerMonoidRepoOrImage = DockerMonoidRepo String | DockerMonoidImage String deriving Show -- | Newtype for non-orphan FromJSON instance. newtype VersionRangeJSON = VersionRangeJSON { versionRangeJSON :: VersionRange } -- | Parse VersionRange. instance FromJSON VersionRangeJSON where parseJSON = withText "VersionRange" (\s -> maybe (fail ("Invalid cabal-style VersionRange: " ++ T.unpack s)) (pure . VersionRangeJSON) (Distribution.Text.simpleParse (T.unpack s))) -- | Docker enable argument name. dockerEnableArgName :: Text dockerEnableArgName = "enable" -- | Docker repo arg argument name. dockerRepoArgName :: Text dockerRepoArgName = "repo" -- | Docker image argument name. dockerImageArgName :: Text dockerImageArgName = "image" -- | Docker registry login argument name. dockerRegistryLoginArgName :: Text dockerRegistryLoginArgName = "registry-login" -- | Docker registry username argument name. dockerRegistryUsernameArgName :: Text dockerRegistryUsernameArgName = "registry-username" -- | Docker registry password argument name. dockerRegistryPasswordArgName :: Text dockerRegistryPasswordArgName = "registry-password" -- | Docker auto-pull argument name. dockerAutoPullArgName :: Text dockerAutoPullArgName = "auto-pull" -- | Docker detach argument name. dockerDetachArgName :: Text dockerDetachArgName = "detach" -- | Docker run args argument name. dockerRunArgsArgName :: Text dockerRunArgsArgName = "run-args" -- | Docker mount argument name. dockerMountArgName :: Text dockerMountArgName = "mount" -- | Docker mount mode argument name. dockerMountModeArgName :: Text dockerMountModeArgName = "mount-mode" -- | Docker environment variable argument name. dockerEnvArgName :: Text dockerEnvArgName = "env" -- | Docker container name argument name. dockerContainerNameArgName :: Text dockerContainerNameArgName = "container-name" -- -- | Docker container name argument name. dockerNetworkArgName :: Text dockerNetworkArgName = "network" -- | Docker persist argument name. dockerPersistArgName :: Text dockerPersistArgName = "persist" -- | Docker Stack executable argument name. dockerStackExeArgName :: Text dockerStackExeArgName = "stack-exe" -- | Value for @--docker-stack-exe=download@ dockerStackExeDownloadVal :: String dockerStackExeDownloadVal = "download" -- | Value for @--docker-stack-exe=host@ dockerStackExeHostVal :: String dockerStackExeHostVal = "host" -- | Value for @--docker-stack-exe=image@ dockerStackExeImageVal :: String dockerStackExeImageVal = "image" -- | Docker @set-user@ argument name dockerSetUserArgName :: Text dockerSetUserArgName = "set-user" -- | Docker @require-version@ argument name dockerRequireDockerVersionArgName :: Text dockerRequireDockerVersionArgName = "require-docker-version" -- | Argument name used to pass docker entrypoint data (only used internally) dockerEntrypointArgName :: String dockerEntrypointArgName = "internal-docker-entrypoint" -- | Command-line argument for "docker" dockerCmdName :: String dockerCmdName = "docker" dockerHelpOptName :: String dockerHelpOptName = dockerCmdName ++ "-help" -- | Command-line argument for @docker pull@. dockerPullCmdName :: String dockerPullCmdName = "pull" -- | Command-line option for @--internal-re-exec-version@. reExecArgName :: String reExecArgName = "internal-re-exec-version" -- | Platform that Docker containers run dockerContainerPlatform :: Platform dockerContainerPlatform = Platform X86_64 Linux stack-2.15.7/src/Stack/Types/DockerEntrypoint.hs0000644000000000000000000000143414604306201017676 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} module Stack.Types.DockerEntrypoint ( DockerEntrypoint (..) , DockerUser (..) ) where import Stack.Prelude import System.PosixCompat.Types ( FileMode, GroupID, UserID ) -- | Data passed into Docker container for the Docker entrypoint's use newtype DockerEntrypoint = DockerEntrypoint { user :: Maybe DockerUser -- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in -- container } deriving (Read, Show) -- | Docker host user info data DockerUser = DockerUser { uid :: UserID -- ^ uid , gid :: GroupID -- ^ gid , groups :: [GroupID] -- ^ Supplemental groups , umask :: FileMode -- ^ File creation mask } } deriving (Read, Show) stack-2.15.7/src/Stack/Types/DotConfig.hs0000644000000000000000000000420514604306201016246 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.DotConfig ( DotConfig (..) ) where import RIO.Process ( HasProcessContext (..) ) import Stack.Prelude hiding ( Display (..), pkgName, loadPackage ) import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.Config ( HasConfig (..) ) import Stack.Types.DumpPackage ( DumpPackage (..) ) import Stack.Types.EnvConfig ( HasSourceMap (..) ) import Stack.Types.GHCVariant ( HasGHCVariant (..) ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner (..) ) import Stack.Types.SourceMap ( SourceMap (..) ) data DotConfig = DotConfig { buildConfig :: !BuildConfig , sourceMap :: !SourceMap , globalDump :: ![DumpPackage] } instance HasLogFunc DotConfig where logFuncL = runnerL . logFuncL instance HasPantryConfig DotConfig where pantryConfigL = configL . pantryConfigL instance HasTerm DotConfig where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL instance HasStylesUpdate DotConfig where stylesUpdateL = runnerL . stylesUpdateL instance HasGHCVariant DotConfig where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasPlatform DotConfig where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasRunner DotConfig where runnerL = configL . runnerL instance HasProcessContext DotConfig where processContextL = runnerL . processContextL instance HasConfig DotConfig where configL = buildConfigL . lens (.config) (\x y -> x { config = y }) {-# INLINE configL #-} instance HasBuildConfig DotConfig where buildConfigL = lens (.buildConfig) (\x y -> x { buildConfig = y }) instance HasSourceMap DotConfig where sourceMapL = lens (.sourceMap) (\x y -> x { sourceMap = y }) stack-2.15.7/src/Stack/Types/DotOpts.hs0000644000000000000000000000237114604306201015770 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} -- | Module exporting the `DotOpts` type used by Stack's @dot@ and -- @ls dependencies@ commands. module Stack.Types.DotOpts ( DotOpts (..) ) where import Stack.Prelude import Stack.Types.BuildOptsCLI ( ApplyCLIFlag ) -- | Options record for @stack dot@ and @stack ls dependencies@ data DotOpts = DotOpts { includeExternal :: !Bool -- ^ Include external dependencies , includeBase :: !Bool -- ^ Include dependencies on base , dependencyDepth :: !(Maybe Int) -- ^ Limit the depth of dependency resolution to (Just n) or continue until -- fixpoint , prune :: !(Set PackageName) -- ^ Package names to prune from the graph , dotTargets :: [Text] -- ^ Stack TARGETs to trace dependencies for , flags :: !(Map ApplyCLIFlag (Map FlagName Bool)) -- ^ Flags to apply when calculating dependencies , testTargets :: Bool -- ^ Like the "--test" flag for build, affects the meaning of 'dotTargets'. , benchTargets :: Bool -- ^ Like the "--bench" flag for build, affects the meaning of 'dotTargets'. , globalHints :: Bool -- ^ Use global hints instead of relying on an actual GHC installation. } stack-2.15.7/src/Stack/Types/DownloadInfo.hs0000644000000000000000000000260614620153446016771 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.DownloadInfo ( DownloadInfo (..) , parseDownloadInfoFromObject ) where import Data.Aeson.Types ( FromJSON (..), Object ) import Data.Aeson.WarningParser ( WarningParser, WithJSONWarnings (..), (..:), (..:?) , withObjectWarnings ) import Stack.Prelude -- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6) -- | Information for a file to download. data DownloadInfo = DownloadInfo { url :: Text -- ^ URL or absolute file path , contentLength :: Maybe Int , sha1 :: Maybe ByteString , sha256 :: Maybe ByteString } deriving Show instance FromJSON (WithJSONWarnings DownloadInfo) where parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject -- | Parse JSON in existing object for 'DownloadInfo' parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo parseDownloadInfoFromObject o = do url <- o ..: "url" contentLength <- o ..:? "content-length" sha1TextMay <- o ..:? "sha1" sha256TextMay <- o ..:? "sha256" let sha1 = fmap encodeUtf8 sha1TextMay sha256 = fmap encodeUtf8 sha256TextMay pure DownloadInfo { url , contentLength , sha1 , sha256 } stack-2.15.7/src/Stack/Types/DumpLogs.hs0000644000000000000000000000165214502056214016132 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.DumpLogs ( DumpLogs (..) ) where import Data.Aeson.Types ( FromJSON (..), Value (..), withText ) import Stack.Prelude -- | Which build log files to dump data DumpLogs = DumpNoLogs -- ^ don't dump any logfiles | DumpWarningLogs -- ^ dump logfiles containing warnings | DumpAllLogs -- ^ dump all logfiles deriving (Bounded, Enum, Eq, Ord, Read, Show) instance FromJSON DumpLogs where parseJSON (Bool True) = pure DumpAllLogs parseJSON (Bool False) = pure DumpNoLogs parseJSON v = withText "DumpLogs" (\t -> if | t == "none" -> pure DumpNoLogs | t == "warning" -> pure DumpWarningLogs | t == "all" -> pure DumpAllLogs | otherwise -> fail ("Invalid DumpLogs: " ++ show t)) v stack-2.15.7/src/Stack/Types/DumpPackage.hs0000644000000000000000000000412614604306201016555 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.DumpPackage ( DumpPackage (..) , SublibDump (..) , dpParentLibIdent ) where import qualified Distribution.License as C import Distribution.ModuleName ( ModuleName ) import Stack.Prelude import Stack.Types.Component ( StackUnqualCompName ) import Stack.Types.GhcPkgId ( GhcPkgId ) -- | Type representing dump information for a single package, as output by the -- @ghc-pkg describe@ command. data DumpPackage = DumpPackage { ghcPkgId :: !GhcPkgId -- ^ The @id@ field. , packageIdent :: !PackageIdentifier -- ^ The @name@ and @version@ fields. The @name@ field is the munged package -- name. If the package is not for a sub library, its munged name is its -- name. , sublib :: !(Maybe SublibDump) -- ^ The sub library information if it's a sub-library. , license :: !(Maybe C.License) , libDirs :: ![FilePath] -- ^ The @library-dirs@ field. , libraries :: ![Text] -- ^ The @hs-libraries@ field. , hasExposedModules :: !Bool , exposedModules :: !(Set ModuleName) , depends :: ![GhcPkgId] -- ^ The @depends@ field (packages on which this package depends). , haddockInterfaces :: ![FilePath] , haddockHtml :: !(Maybe FilePath) , isExposed :: !Bool } deriving (Eq, Read, Show) -- | ghc-pkg has a notion of sublibraries when using ghc-pkg dump. We can only -- know it's different through the fields it shows. data SublibDump = SublibDump { packageName :: PackageName -- ^ "package-name" field from ghc-pkg , libraryName :: StackUnqualCompName -- ^ "lib-name" field from ghc-pkg } deriving (Eq, Read, Show) dpParentLibIdent :: DumpPackage -> Maybe PackageIdentifier dpParentLibIdent dp = case (dp.sublib, dp.packageIdent) of (Nothing, _) -> Nothing (Just sublibDump, PackageIdentifier _ v) -> Just $ PackageIdentifier libParentPackageName v where SublibDump { packageName = libParentPackageName } = sublibDump stack-2.15.7/src/Stack/Types/EnvConfig.hs0000644000000000000000000002577514620153446016300 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.EnvConfig ( EnvConfig (..) , HasEnvConfig (..) , HasSourceMap (..) , actualCompilerVersionL , appropriateGhcColorFlag , bindirCompilerTools , compilerVersionDir , extraBinDirs , hoogleDatabasePath , hoogleRoot , hpcReportDir , installationRootDeps , installationRootLocal , packageDatabaseDeps , packageDatabaseExtra , packageDatabaseLocal , platformGhcRelDir , platformGhcVerOnlyRelDir , platformSnapAndCompilerRel , shouldForceGhcColorFlag , snapshotsDir , useShaPathOnWindows , shaPathForBytes ) where import Crypto.Hash ( SHA1 (..), hashWith ) import qualified Data.ByteArray.Encoding as Mem ( Base(Base16), convertToBase ) import qualified Data.ByteString.Char8 as S8 import qualified Data.Text as T import qualified Distribution.Text ( display ) import Distribution.Version ( mkVersion ) import Path ( (), parseAbsDir, parseAbsFile, parseRelDir , parseRelFile ) import RIO.Process ( HasProcessContext (..) ) import Stack.Constants ( bindirSuffix, ghcColorForceFlag, osIsWindows, relDirCompilerTools , relDirHoogle, relDirHpc, relDirInstall, relDirPkgdb , relDirSnapshots, relFileDatabaseHoo ) import Stack.Prelude import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..), getProjectWorkDir ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI ) import Stack.Types.Compiler ( ActualCompiler (..), compilerVersionString, getGhcVersion ) import Stack.Types.CompilerBuild ( compilerBuildSuffix ) import Stack.Types.CompilerPaths ( CompilerPaths (..), HasCompiler (..) ) import Stack.Types.Config ( HasConfig (..), stackRootL ) import Stack.Types.FileDigestCache ( FileDigestCache ) import Stack.Types.GHCVariant ( HasGHCVariant (..), ghcVariantSuffix ) import Stack.Types.Platform ( HasPlatform (..), platformVariantSuffix ) import Stack.Types.Runner ( HasRunner (..) ) import Stack.Types.SourceMap ( SourceMap (..), SourceMapHash, smRelDir ) -- | Configuration after the environment has been setup. data EnvConfig = EnvConfig { buildConfig :: !BuildConfig , buildOptsCLI :: !BuildOptsCLI , fileDigestCache :: !FileDigestCache , sourceMap :: !SourceMap , sourceMapHash :: !SourceMapHash , compilerPaths :: !CompilerPaths } instance HasConfig EnvConfig where configL = buildConfigL . lens (.config) (\x y -> x { config = y }) {-# INLINE configL #-} instance HasBuildConfig EnvConfig where buildConfigL = envConfigL . lens (.buildConfig) (\x y -> x { buildConfig = y }) instance HasPlatform EnvConfig where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasGHCVariant EnvConfig where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasProcessContext EnvConfig where processContextL = configL . processContextL instance HasPantryConfig EnvConfig where pantryConfigL = configL . pantryConfigL instance HasCompiler EnvConfig where compilerPathsL = to (.compilerPaths) instance HasRunner EnvConfig where runnerL = configL . runnerL instance HasLogFunc EnvConfig where logFuncL = runnerL . logFuncL instance HasStylesUpdate EnvConfig where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm EnvConfig where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL class (HasBuildConfig env, HasSourceMap env, HasCompiler env) => HasEnvConfig env where envConfigL :: Lens' env EnvConfig instance HasEnvConfig EnvConfig where envConfigL = id {-# INLINE envConfigL #-} class HasSourceMap env where sourceMapL :: Lens' env SourceMap instance HasSourceMap EnvConfig where sourceMapL = lens (.sourceMap) (\x y -> x { sourceMap = y }) shouldForceGhcColorFlag :: (HasEnvConfig env, HasRunner env) => RIO env Bool shouldForceGhcColorFlag = do canDoColor <- (>= mkVersion [8, 2, 1]) . getGhcVersion <$> view actualCompilerVersionL shouldDoColor <- view useColorL pure $ canDoColor && shouldDoColor appropriateGhcColorFlag :: (HasEnvConfig env, HasRunner env) => RIO env (Maybe String) appropriateGhcColorFlag = f <$> shouldForceGhcColorFlag where f True = Just ghcColorForceFlag f False = Nothing -- | Directory containing snapshots snapshotsDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Abs Dir) snapshotsDir = do root <- view stackRootL platform <- platformGhcRelDir pure $ root relDirSnapshots platform -- | Installation root for dependencies installationRootDeps :: HasEnvConfig env => RIO env (Path Abs Dir) installationRootDeps = do root <- view stackRootL -- TODO: also useShaPathOnWindows here, once #1173 is resolved. psc <- platformSnapAndCompilerRel pure $ root relDirSnapshots psc -- | Installation root for locals installationRootLocal :: HasEnvConfig env => RIO env (Path Abs Dir) installationRootLocal = do workDir <- getProjectWorkDir psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel pure $ workDir relDirInstall psc -- | Get the hoogle database path. hoogleDatabasePath :: HasEnvConfig env => RIO env (Path Abs File) hoogleDatabasePath = do dir <- hoogleRoot pure (dir relFileDatabaseHoo) -- | Path for platform followed by snapshot name followed by compiler -- name. platformSnapAndCompilerRel :: HasEnvConfig env => RIO env (Path Rel Dir) platformSnapAndCompilerRel = do platform <- platformGhcRelDir smh <- view $ envConfigL . to (.sourceMapHash) name <- smRelDir smh ghc <- compilerVersionDir useShaPathOnWindows (platform name ghc) -- | Relative directory for the platform and GHC identifier platformGhcRelDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Rel Dir) platformGhcRelDir = do cp <- view compilerPathsL let cbSuffix = compilerBuildSuffix cp.build verOnly <- platformGhcVerOnlyRelDirStr parseRelDir (mconcat [ verOnly, cbSuffix ]) -- | Installation root for compiler tools bindirCompilerTools :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Abs Dir) bindirCompilerTools = do config <- view configL platform <- platformGhcRelDir compilerVersion <- view actualCompilerVersionL compiler <- parseRelDir $ compilerVersionString compilerVersion pure $ view stackRootL config relDirCompilerTools platform compiler bindirSuffix -- | Hoogle directory. hoogleRoot :: HasEnvConfig env => RIO env (Path Abs Dir) hoogleRoot = do workDir <- getProjectWorkDir psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel pure $ workDir relDirHoogle psc compilerVersionDir :: (HasEnvConfig env, MonadReader env m, MonadThrow m) => m (Path Rel Dir) compilerVersionDir = do compilerVersion <- view actualCompilerVersionL parseRelDir $ case compilerVersion of ACGhc version -> versionString version ACGhcGit {} -> compilerVersionString compilerVersion -- | Package database for installing dependencies into packageDatabaseDeps :: HasEnvConfig env => RIO env (Path Abs Dir) packageDatabaseDeps = do root <- installationRootDeps pure $ root relDirPkgdb -- | Package database for installing local packages into packageDatabaseLocal :: HasEnvConfig env => RIO env (Path Abs Dir) packageDatabaseLocal = do root <- installationRootLocal pure $ root relDirPkgdb -- | Extra package databases packageDatabaseExtra :: (HasEnvConfig env, MonadReader env m) => m [Path Abs Dir] packageDatabaseExtra = view $ buildConfigL . to (.extraPackageDBs) -- | Where HPC reports and tix files get stored. hpcReportDir :: HasEnvConfig env => RIO env (Path Abs Dir) hpcReportDir = do root <- installationRootLocal pure $ root relDirHpc -- | Get the extra bin directories (for the PATH). Puts more local first -- -- Bool indicates whether or not to include the locals extraBinDirs :: HasEnvConfig env => RIO env (Bool -> [Path Abs Dir]) extraBinDirs = do deps <- installationRootDeps local' <- installationRootLocal tools <- bindirCompilerTools pure $ \locals -> if locals then [local' bindirSuffix, deps bindirSuffix, tools] else [deps bindirSuffix, tools] -- | The version of the compiler which will actually be used. May be different -- than that specified in the 'SnapshotDef' and returned by -- 'wantedCompilerVersionL'. actualCompilerVersionL :: HasSourceMap env => SimpleGetter env ActualCompiler actualCompilerVersionL = sourceMapL . to (.compiler) -- | Relative directory for the platform and GHC identifier without GHC bindist -- build platformGhcVerOnlyRelDir :: (HasGHCVariant env, HasPlatform env, MonadReader env m, MonadThrow m) => m (Path Rel Dir) platformGhcVerOnlyRelDir = parseRelDir =<< platformGhcVerOnlyRelDirStr -- | Relative directory for the platform and GHC identifier without GHC bindist -- build (before parsing into a Path) platformGhcVerOnlyRelDirStr :: (HasGHCVariant env, HasPlatform env, MonadReader env m) => m FilePath platformGhcVerOnlyRelDirStr = do platform <- view platformL platformVariant <- view platformVariantL ghcVariant <- view ghcVariantL pure $ mconcat [ Distribution.Text.display platform , platformVariantSuffix platformVariant , ghcVariantSuffix ghcVariant ] -- | This is an attempt to shorten Stack paths on Windows to decrease our -- chances of hitting 260 symbol path limit. The idea is to calculate -- SHA1 hash of the path used on other architectures, encode with base -- 16 and take first 8 symbols of it. useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir) useShaPathOnWindows | osIsWindows = shaPath | otherwise = pure shaPath :: (IsPath Rel t, MonadThrow m) => Path Rel t -> m (Path Rel t) shaPath = shaPathForBytes . encodeUtf8 . T.pack . toFilePath shaPathForBytes :: (IsPath Rel t, MonadThrow m) => ByteString -> m (Path Rel t) shaPathForBytes = parsePath . S8.unpack . S8.take 8 . Mem.convertToBase Mem.Base16 . hashWith SHA1 -- TODO: Move something like this into the path package. Consider -- subsuming path-io's 'AnyPath'? class IsPath b t where parsePath :: MonadThrow m => FilePath -> m (Path b t) instance IsPath Abs Dir where parsePath = parseAbsDir instance IsPath Rel Dir where parsePath = parseRelDir instance IsPath Abs File where parsePath = parseAbsFile instance IsPath Rel File where parsePath = parseRelFile stack-2.15.7/src/Stack/Types/EnvSettings.hs0000644000000000000000000000341314620153446016654 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} module Stack.Types.EnvSettings ( EnvSettings (..) , minimalEnvSettings , defaultEnvSettings , plainEnvSettings ) where import Stack.Prelude -- | Controls which version of the environment is used data EnvSettings = EnvSettings { includeLocals :: !Bool -- ^ include local project bin directory, GHC_PACKAGE_PATH, etc , includeGhcPackagePath :: !Bool -- ^ include the GHC_PACKAGE_PATH variable , stackExe :: !Bool -- ^ set the STACK_EXE variable to the current executable name , localeUtf8 :: !Bool -- ^ set the locale to C.UTF-8 , keepGhcRts :: !Bool -- ^ if True, keep GHCRTS variable in environment } deriving (Eq, Ord, Show) minimalEnvSettings :: EnvSettings minimalEnvSettings = EnvSettings { includeLocals = False , includeGhcPackagePath = False , stackExe = False , localeUtf8 = False , keepGhcRts = False } -- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH. -- -- Note that this also passes through the GHCRTS environment variable. -- See https://github.com/commercialhaskell/stack/issues/3444 defaultEnvSettings :: EnvSettings defaultEnvSettings = EnvSettings { includeLocals = True , includeGhcPackagePath = True , stackExe = True , localeUtf8 = False , keepGhcRts = True } -- | Environment settings which do not embellish the environment -- -- Note that this also passes through the GHCRTS environment variable. -- See https://github.com/commercialhaskell/stack/issues/3444 plainEnvSettings :: EnvSettings plainEnvSettings = EnvSettings { includeLocals = False , includeGhcPackagePath = False , stackExe = False , localeUtf8 = False , keepGhcRts = True } stack-2.15.7/src/Stack/Types/ExtraDirs.hs0000644000000000000000000000103614604306201016276 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} module Stack.Types.ExtraDirs ( ExtraDirs (..) ) where import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude data ExtraDirs = ExtraDirs { bins :: ![Path Abs Dir] , includes :: ![Path Abs Dir] , libs :: ![Path Abs Dir] } deriving (Show, Generic) instance Semigroup ExtraDirs where (<>) = mappenddefault instance Monoid ExtraDirs where mempty = memptydefault mappend = (<>) stack-2.15.7/src/Stack/Types/FileDigestCache.hs0000644000000000000000000000147614502056214017347 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.FileDigestCache ( FileDigestCache , newFileDigestCache , readFileDigest ) where import qualified Data.Map.Strict as Map import Stack.Prelude import qualified Pantry.SHA256 as SHA256 -- | Type synonym representing caches of digests of files. type FileDigestCache = IORef (Map FilePath SHA256) newFileDigestCache :: MonadIO m => m FileDigestCache newFileDigestCache = newIORef Map.empty readFileDigest :: MonadIO m => FileDigestCache -> FilePath -> m SHA256 readFileDigest cache filePath = do digests <- readIORef cache case Map.lookup filePath digests of Just digest -> pure digest Nothing -> do sha256 <- SHA256.hashFile filePath writeIORef cache $ Map.insert filePath sha256 digests pure sha256 stack-2.15.7/src/Stack/Types/GHCDownloadInfo.hs0000644000000000000000000000204214604306201017274 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.GHCDownloadInfo ( GHCDownloadInfo (..) ) where import Data.Aeson.Types ( FromJSON (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), (..:?), (..!=), withObjectWarnings ) import Stack.Prelude import Stack.Types.DownloadInfo ( DownloadInfo, parseDownloadInfoFromObject ) data GHCDownloadInfo = GHCDownloadInfo { configureOpts :: [Text] , configureEnv :: Map Text Text , downloadInfo :: DownloadInfo } deriving Show instance FromJSON (WithJSONWarnings GHCDownloadInfo) where parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do configureOpts <- o ..:? "configure-opts" ..!= mempty configureEnv <- o ..:? "configure-env" ..!= mempty downloadInfo <- parseDownloadInfoFromObject o pure GHCDownloadInfo { configureOpts , configureEnv , downloadInfo } stack-2.15.7/src/Stack/Types/GHCVariant.hs0000644000000000000000000000376514502056214016335 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.GHCVariant ( GHCVariant (..) , HasGHCVariant (..) , ghcVariantName , ghcVariantSuffix , parseGHCVariant ) where import Data.Aeson.Types ( FromJSON, parseJSON, withText ) import Data.List ( stripPrefix ) import qualified Data.Text as T import Stack.Prelude -- | Specialized variant of GHC (e.g. libgmp4 or integer-simple) data GHCVariant = GHCStandard -- ^ Standard bindist | GHCIntegerSimple -- ^ Bindist that uses integer-simple | GHCNativeBignum -- ^ Bindist that uses the Haskell-native big-integer backend | GHCCustom String -- ^ Other bindists deriving Show instance FromJSON GHCVariant where -- Strange structuring is to give consistent error messages parseJSON = withText "GHCVariant" (either (fail . show) pure . parseGHCVariant . T.unpack) -- | Class for environment values which have a GHCVariant class HasGHCVariant env where ghcVariantL :: SimpleGetter env GHCVariant instance HasGHCVariant GHCVariant where ghcVariantL = id {-# INLINE ghcVariantL #-} -- | Render a GHC variant to a String. ghcVariantName :: GHCVariant -> String ghcVariantName GHCStandard = "standard" ghcVariantName GHCIntegerSimple = "integersimple" ghcVariantName GHCNativeBignum = "int-native" ghcVariantName (GHCCustom name) = "custom-" ++ name -- | Render a GHC variant to a String suffix. ghcVariantSuffix :: GHCVariant -> String ghcVariantSuffix GHCStandard = "" ghcVariantSuffix v = "-" ++ ghcVariantName v -- | Parse GHC variant from a String. parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant parseGHCVariant s = case stripPrefix "custom-" s of Just name -> pure (GHCCustom name) Nothing | s == "" -> pure GHCStandard | s == "standard" -> pure GHCStandard | s == "integersimple" -> pure GHCIntegerSimple | s == "int-native" -> pure GHCNativeBignum | otherwise -> pure (GHCCustom s) stack-2.15.7/src/Stack/Types/GhcOptionKey.hs0000644000000000000000000000166214502056214016744 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.GhcOptionKey ( GhcOptionKey (..) ) where import Data.Aeson.Types ( FromJSONKey (..), FromJSONKeyFunction (..) ) import qualified Data.Text as T import Stack.Prelude data GhcOptionKey = GOKOldEverything | GOKEverything | GOKLocals | GOKTargets | GOKPackage !PackageName deriving (Eq, Ord) instance FromJSONKey GhcOptionKey where fromJSONKey = FromJSONKeyTextParser $ \t -> case t of "*" -> pure GOKOldEverything "$everything" -> pure GOKEverything "$locals" -> pure GOKLocals "$targets" -> pure GOKTargets _ -> case parsePackageName $ T.unpack t of Nothing -> fail $ "Invalid package name: " ++ show t Just x -> pure $ GOKPackage x fromJSONKeyList = FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList" stack-2.15.7/src/Stack/Types/GhcOptions.hs0000644000000000000000000000113314604306201016444 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} module Stack.Types.GhcOptions ( GhcOptions (..) ) where import Data.Aeson.Types ( FromJSON (..), withText ) import Data.Attoparsec.Args ( EscapingMode (Escaping), parseArgs ) import qualified Data.Text as T import Stack.Prelude newtype GhcOptions = GhcOptions { ghcOptions :: [Text] } instance FromJSON GhcOptions where parseJSON = withText "GhcOptions" $ \t -> case parseArgs Escaping t of Left e -> fail e Right opts -> pure $ GhcOptions $ map T.pack opts stack-2.15.7/src/Stack/Types/GhcPkgId.hs0000644000000000000000000000464214604306201016017 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | A ghc-pkg id. module Stack.Types.GhcPkgId ( GhcPkgId , unGhcPkgId , ghcPkgIdParser , parseGhcPkgId , ghcPkgIdString ) where import Data.Aeson.Types ( FromJSON (..), ToJSON (..), withText ) import Data.Attoparsec.Text ( Parser, (), choice, endOfInput, many1, parseOnly , satisfy ) import Data.Char ( isAlphaNum ) import qualified Data.Text as T import Database.Persist.Sql ( PersistField, PersistFieldSql ) import Stack.Prelude import Text.Read ( Read (..) ) -- | A parse fail. newtype GhcPkgIdParseFail = GhcPkgIdParseFail Text deriving (Show, Typeable) instance Exception GhcPkgIdParseFail where displayException (GhcPkgIdParseFail bs) = concat [ "Error: [S-5359]\n" , "Invalid package ID: " , show bs ] -- | A ghc-pkg package identifier. newtype GhcPkgId = GhcPkgId Text deriving (Data, Eq, Generic, Ord, PersistField, PersistFieldSql, Typeable) instance Hashable GhcPkgId instance NFData GhcPkgId instance Show GhcPkgId where show = show . ghcPkgIdString instance Read GhcPkgId where readsPrec i = map (first (GhcPkgId . T.pack)) . readsPrec i instance FromJSON GhcPkgId where parseJSON = withText "GhcPkgId" $ \t -> case parseGhcPkgId t of Left e -> fail $ show (e, t) Right x -> pure x instance ToJSON GhcPkgId where toJSON g = toJSON (ghcPkgIdString g) -- | Convenient way to parse a package name from a 'Text'. parseGhcPkgId :: MonadThrow m => Text -> m GhcPkgId parseGhcPkgId x = go x where go = either (const (throwM (GhcPkgIdParseFail x))) pure . parseOnly (ghcPkgIdParser <* endOfInput) -- | A parser for a package-version-hash pair. ghcPkgIdParser :: Parser GhcPkgId ghcPkgIdParser = let elements = "_.-" :: String in GhcPkgId . T.pack <$> many1 (choice [alphaNum, satisfy (`elem` elements)]) -- | Parse an alphanumerical character, as recognised by `isAlphaNum`. alphaNum :: Parser Char alphaNum = satisfy isAlphaNum "alphanumeric" {-# INLINE alphaNum #-} -- | Get a string representation of GHC package id. ghcPkgIdString :: GhcPkgId -> String ghcPkgIdString (GhcPkgId x) = T.unpack x -- | Get a text value of GHC package id unGhcPkgId :: GhcPkgId -> Text unGhcPkgId (GhcPkgId v) = v stack-2.15.7/src/Stack/Types/GlobalOpts.hs0000644000000000000000000000373714620153446016462 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.GlobalOpts ( GlobalOpts (..) , globalOptsBuildOptsMonoidL ) where import Stack.Prelude import Stack.Types.BuildOptsMonoid ( BuildOptsMonoid ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) import Stack.Types.DockerEntrypoint ( DockerEntrypoint ) import Stack.Types.LockFileBehavior ( LockFileBehavior ) import Stack.Types.Resolver ( AbstractResolver ) import Stack.Types.StackYamlLoc ( StackYamlLoc ) -- | Parsed global command-line options. data GlobalOpts = GlobalOpts { reExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version , dockerEntrypoint :: !(Maybe DockerEntrypoint) -- ^ Data used when Stack is acting as a Docker entrypoint (internal use -- only) , logLevel :: !LogLevel -- ^ Log level , timeInLog :: !Bool -- ^ Whether to include timings in logs. , rslInLog :: !Bool -- ^ Whether to include raw snapshot layer (RSL) in logs. , planInLog :: !Bool -- ^ Whether to include debug information about the construction of the -- build plan in logs. , configMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , resolver :: !(Maybe AbstractResolver) -- ^ Resolver override , compiler :: !(Maybe WantedCompiler) -- ^ Compiler override , terminal :: !Bool -- ^ We're in a terminal? , stylesUpdate :: !StylesUpdate -- ^ SGR (Ansi) codes for styles , termWidthOpt :: !(Maybe Int) -- ^ Terminal width override , stackYaml :: !StackYamlLoc -- ^ Override project stack.yaml , lockFileBehavior :: !LockFileBehavior } deriving Show globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid globalOptsBuildOptsMonoidL = lens (.configMonoid) (\x y -> x { configMonoid = y }) . lens (.buildOpts) (\x y -> x { buildOpts = y }) stack-2.15.7/src/Stack/Types/GlobalOptsMonoid.hs0000644000000000000000000000414714620153446017624 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} module Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) where import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude import Stack.Types.ConfigMonoid ( ConfigMonoid ) import Stack.Types.DockerEntrypoint ( DockerEntrypoint ) import Stack.Types.LockFileBehavior ( LockFileBehavior ) import Stack.Types.Resolver ( AbstractResolver ) -- | Parsed global command-line options monoid. data GlobalOptsMonoid = GlobalOptsMonoid { reExecVersion :: !(First String) -- ^ Expected re-exec in container version , dockerEntrypoint :: !(First DockerEntrypoint) -- ^ Data used when Stack is acting as a Docker entrypoint (internal use -- only) , logLevel :: !(First LogLevel) -- ^ Log level , timeInLog :: !FirstTrue -- ^ Whether to include timings in logs. , rslInLog :: !FirstFalse -- ^ Whether to include raw snapshot layer (RSL) in logs. , planInLog :: !FirstFalse -- ^ Whether to include debug information about the construction of the -- build plan in logs. , configMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , resolver :: !(First (Unresolved AbstractResolver)) -- ^ Resolver override , resolverRoot :: !(First FilePath) -- ^ root directory for resolver relative path , compiler :: !(First WantedCompiler) -- ^ Compiler override , terminal :: !(First Bool) -- ^ We're in a terminal? , styles :: !StylesUpdate -- ^ Stack's output styles , termWidthOpt :: !(First Int) -- ^ Terminal width override , stackYaml :: !(First FilePath) -- ^ Override project stack.yaml , lockFileBehavior :: !(First LockFileBehavior) -- ^ See 'globalLockFileBehavior' } deriving Generic instance Semigroup GlobalOptsMonoid where (<>) = mappenddefault instance Monoid GlobalOptsMonoid where mempty = memptydefault mappend = (<>) stack-2.15.7/src/Stack/Types/Installed.hs0000644000000000000000000001253314604306201016314 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} -- | This module contains all the types related to the idea of installing a -- package in the pkg-db or an executable on the file system. module Stack.Types.Installed ( InstallLocation (..) , InstalledPackageLocation (..) , PackageDatabase (..) , PackageDbVariety (..) , InstallMap , Installed (..) , InstalledMap , InstalledLibraryInfo (..) , toPackageDbVariety , installedLibraryInfoFromGhcPkgId , simpleInstalledLib , installedToPackageIdOpt , installedPackageIdentifier , installedVersion , foldOnGhcPkgId' ) where import qualified Data.Map as M import qualified Distribution.SPDX.License as SPDX import Distribution.License ( License ) import Stack.Prelude import Stack.Types.ComponentUtils ( StackUnqualCompName ) import Stack.Types.GhcPkgId ( GhcPkgId, ghcPkgIdString ) -- | Type representing user package databases that packages can be installed -- into. data InstallLocation = Snap -- ^ The write-only package database, formerly known as the snapshot -- database. | Local -- ^ The mutable package database, formerly known as the local database. deriving (Eq, Show) instance Semigroup InstallLocation where Local <> _ = Local _ <> Local = Local Snap <> Snap = Snap instance Monoid InstallLocation where mempty = Snap mappend = (<>) -- | Type representing user (non-global) package databases that can provide -- installed packages. data InstalledPackageLocation = InstalledTo InstallLocation -- ^ A package database that a package can be installed into. | ExtraPkgDb -- ^ An \'extra\' package database, specified by @extra-package-dbs@. deriving (Eq, Show) -- | Type representing package databases that can provide installed packages. data PackageDatabase = GlobalPkgDb -- ^ GHC's global package database. | UserPkgDb InstalledPackageLocation (Path Abs Dir) -- ^ A user package database. deriving (Eq, Show) -- | A function to yield the variety of package database for a given -- package database that can provide installed packages. toPackageDbVariety :: PackageDatabase -> PackageDbVariety toPackageDbVariety GlobalPkgDb = GlobalDb toPackageDbVariety (UserPkgDb ExtraPkgDb _) = ExtraDb toPackageDbVariety (UserPkgDb (InstalledTo Snap) _) = WriteOnlyDb toPackageDbVariety (UserPkgDb (InstalledTo Local) _) = MutableDb -- | Type representing varieties of package databases that can provide -- installed packages. data PackageDbVariety = GlobalDb -- ^ GHC's global package database. | ExtraDb -- ^ An \'extra\' package database, specified by @extra-package-dbs@. | WriteOnlyDb -- ^ The write-only package database, for immutable packages. | MutableDb -- ^ The mutable package database. deriving (Eq, Show) -- | Type synonym representing dictionaries of package names for a project's -- packages and dependencies, and pairs of their relevant database (write-only -- or mutable) and package versions. type InstallMap = Map PackageName (InstallLocation, Version) -- | Type synonym representing dictionaries of package names, and a pair of in -- which package database the package is installed (write-only or mutable) and -- information about what is installed. type InstalledMap = Map PackageName (InstallLocation, Installed) data InstalledLibraryInfo = InstalledLibraryInfo { ghcPkgId :: GhcPkgId , license :: Maybe (Either SPDX.License License) , subLib :: Map StackUnqualCompName GhcPkgId } deriving (Eq, Show) -- | Type representing information about what is installed. data Installed = Library PackageIdentifier InstalledLibraryInfo -- ^ A library, including its installed package id and, optionally, its -- license. | Executable PackageIdentifier -- ^ An executable. deriving (Eq, Show) installedLibraryInfoFromGhcPkgId :: GhcPkgId -> InstalledLibraryInfo installedLibraryInfoFromGhcPkgId ghcPkgId = InstalledLibraryInfo ghcPkgId Nothing mempty simpleInstalledLib :: PackageIdentifier -> GhcPkgId -> Map StackUnqualCompName GhcPkgId -> Installed simpleInstalledLib pkgIdentifier ghcPkgId = Library pkgIdentifier . InstalledLibraryInfo ghcPkgId Nothing installedToPackageIdOpt :: InstalledLibraryInfo -> [String] installedToPackageIdOpt libInfo = M.foldr' (iterator (++)) (pure $ toStr libInfo.ghcPkgId) libInfo.subLib where toStr ghcPkgId = "-package-id=" <> ghcPkgIdString ghcPkgId iterator op ghcPkgId acc = pure (toStr ghcPkgId) `op` acc installedPackageIdentifier :: Installed -> PackageIdentifier installedPackageIdentifier (Library pid _) = pid installedPackageIdentifier (Executable pid) = pid -- | A strict fold over the 'GhcPkgId' of the given installed package. This will -- iterate on both sub and main libraries, if any. foldOnGhcPkgId' :: (Maybe StackUnqualCompName -> GhcPkgId -> resT -> resT) -> Installed -> resT -> resT foldOnGhcPkgId' _ Executable{} res = res foldOnGhcPkgId' fn (Library _ libInfo) res = M.foldrWithKey' (fn . Just) (base res) libInfo.subLib where base = fn Nothing libInfo.ghcPkgId -- | Get the installed Version. installedVersion :: Installed -> Version installedVersion i = let PackageIdentifier _ version = installedPackageIdentifier i in version stack-2.15.7/src/Stack/Types/IsMutable.hs0000644000000000000000000000062514445120723016267 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.IsMutable ( IsMutable (..) ) where import Stack.Prelude data IsMutable = Mutable | Immutable deriving (Eq, Show) instance Semigroup IsMutable where Mutable <> _ = Mutable _ <> Mutable = Mutable Immutable <> Immutable = Immutable instance Monoid IsMutable where mempty = Immutable mappend = (<>) stack-2.15.7/src/Stack/Types/LockFileBehavior.hs0000644000000000000000000000251014445120723017545 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.LockFileBehavior ( LockFileBehavior (..) , readLockFileBehavior ) where import qualified Data.Map as Map import Options.Applicative ( ReadM ) import qualified Options.Applicative.Types as OA import qualified RIO.List as List import Stack.Prelude -- | How to interact with lock files data LockFileBehavior = LFBReadWrite -- ^ Read and write lock files | LFBReadOnly -- ^ Read lock files, but do not write them | LFBIgnore -- ^ Entirely ignore lock files | LFBErrorOnWrite -- ^ Error out on trying to write a lock file. This can be used to -- ensure that lock files in a repository already ensure -- reproducible builds. deriving (Bounded, Enum, Show) -- | Parser for 'LockFileBehavior' readLockFileBehavior :: ReadM LockFileBehavior readLockFileBehavior = do s <- OA.readerAsk case Map.lookup s m of Just x -> pure x Nothing -> OA.readerError $ "Invalid lock file behavior, valid options: " ++ List.intercalate ", " (Map.keys m) where m = Map.fromList $ map (\x -> (render x, x)) [minBound..maxBound] render LFBReadWrite = "read-write" render LFBReadOnly = "read-only" render LFBIgnore = "ignore" render LFBErrorOnWrite = "error-on-write" stack-2.15.7/src/Stack/Types/NamedComponent.hs0000644000000000000000000001025314604306201017301 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Module exporting the 'NamedComponent' type and related functions. module Stack.Types.NamedComponent ( NamedComponent (..) , renderComponent , renderComponentTo , renderPkgComponents , renderPkgComponent , exeComponents , testComponents , benchComponents , subLibComponents , isCLib , isCSubLib , isCExe , isCTest , isCBench , isPotentialDependency , splitComponents ) where import qualified Data.Set as Set import qualified Data.Text as T import Stack.Prelude -- | Type representing components of a fully-resolved Cabal package. data NamedComponent = CLib -- The \'main\' unnamed library component. | CSubLib !Text -- A named \'subsidiary\' or \'ancillary\` library component (sub-library). | CFlib !Text -- A foreign library. | CExe !Text -- A named executable component. | CTest !Text -- A named test-suite component. | CBench !Text -- A named benchmark component. deriving (Eq, Ord, Show) -- | Render a component to anything with an "IsString" instance. For 'Text' -- prefer 'renderComponent'. renderComponentTo :: IsString a => NamedComponent -> a renderComponentTo = fromString . T.unpack . renderComponent renderComponent :: NamedComponent -> Text renderComponent CLib = "lib" renderComponent (CSubLib x) = "sub-lib:" <> x renderComponent (CFlib x) = "flib:" <> x renderComponent (CExe x) = "exe:" <> x renderComponent (CTest x) = "test:" <> x renderComponent (CBench x) = "bench:" <> x renderPkgComponents :: [(PackageName, NamedComponent)] -> Text renderPkgComponents = T.intercalate " " . map renderPkgComponent renderPkgComponent :: (PackageName, NamedComponent) -> Text renderPkgComponent (pkg, comp) = fromPackageName pkg <> ":" <> renderComponent comp exeComponents :: Set NamedComponent -> Set Text exeComponents = Set.fromList . mapMaybe mExeName . Set.toList where mExeName (CExe name) = Just name mExeName _ = Nothing testComponents :: Set NamedComponent -> Set Text testComponents = Set.fromList . mapMaybe mTestName . Set.toList where mTestName (CTest name) = Just name mTestName _ = Nothing benchComponents :: Set NamedComponent -> Set Text benchComponents = Set.fromList . mapMaybe mBenchName . Set.toList where mBenchName (CBench name) = Just name mBenchName _ = Nothing subLibComponents :: Set NamedComponent -> Set Text subLibComponents = Set.fromList . mapMaybe mSubLibName . Set.toList where mSubLibName (CSubLib name) = Just name mSubLibName _ = Nothing isCLib :: NamedComponent -> Bool isCLib CLib{} = True isCLib _ = False isCSubLib :: NamedComponent -> Bool isCSubLib CSubLib{} = True isCSubLib _ = False isCExe :: NamedComponent -> Bool isCExe CExe{} = True isCExe _ = False isCTest :: NamedComponent -> Bool isCTest CTest{} = True isCTest _ = False isCBench :: NamedComponent -> Bool isCBench CBench{} = True isCBench _ = False isPotentialDependency :: NamedComponent -> Bool isPotentialDependency v = isCLib v || isCSubLib v || isCExe v -- | A function to split the given list of components into sets of the names of -- the named components by the type of component (sub-libraries, executables, -- test-suites, benchmarks), ignoring any 'main' unnamed library component or -- foreign library component. This function should be used very sparingly; more -- often than not, you can keep/parse the components split from the start. splitComponents :: [NamedComponent] -> ( Set Text -- ^ Sub-libraries. , Set Text -- ^ Executables. , Set Text -- ^ Test-suites. , Set Text -- ^ Benchmarks. ) splitComponents = go id id id id where run c = Set.fromList $ c [] go s e t b [] = (run s, run e, run t, run b) go s e t b (CLib : xs) = go s e t b xs go s e t b (CSubLib x : xs) = go (s . (x:)) e t b xs -- Ignore foreign libraries, for now. go s e t b (CFlib _ : xs) = go s e t b xs go s e t b (CExe x : xs) = go s (e . (x:)) t b xs go s e t b (CTest x : xs) = go s e (t . (x:)) b xs go s e t b (CBench x : xs) = go s e t (b . (x:)) xs stack-2.15.7/src/Stack/Types/Nix.hs0000644000000000000000000000752414620153446015150 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} -- | Nix types. module Stack.Types.Nix ( NixOpts (..) , NixOptsMonoid (..) , nixAddGCRootsArgName , nixEnableArgName , nixInitFileArgName , nixPackagesArgName , nixPathArgName , nixPureShellArgName , nixShellOptsArgName ) where import Data.Aeson.Types ( FromJSON (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings, (..:?), withObjectWarnings ) import Generics.Deriving.Monoid ( mappenddefault, memptydefault ) import Stack.Prelude -- | Nix configuration. Parameterize by resolver type to avoid cyclic -- dependency. data NixOpts = NixOpts { enable :: !Bool , pureShell :: !Bool , packages :: ![Text] -- ^ The system packages to be installed in the environment before it runs , initFile :: !(Maybe FilePath) -- ^ The path of a file containing preconfiguration of the environment -- (e.g shell.nix) , shellOptions :: ![Text] -- ^ Options to be given to the nix-shell command line , addGCRoots :: !Bool -- ^ Should we register gc roots so running nix-collect-garbage doesn't -- remove nix dependencies } deriving Show -- | An uninterpreted representation of nix options. -- Configurations may be "cascaded" using mappend (left-biased). data NixOptsMonoid = NixOptsMonoid { enable :: !(First Bool) -- ^ Is using nix-shell enabled? , pureShell :: !(First Bool) -- ^ Should the nix-shell be pure , packages :: !(First [Text]) -- ^ System packages to use (given to nix-shell) , initFile :: !(First FilePath) -- ^ The path of a file containing preconfiguration of the environment (e.g -- shell.nix) , shellOptions :: !(First [Text]) -- ^ Options to be given to the nix-shell command line , path :: !(First [Text]) -- ^ Override parts of NIX_PATH (notably 'nixpkgs') , addGCRoots :: !FirstFalse -- ^ Should we register gc roots so running nix-collect-garbage doesn't -- remove nix dependencies } deriving (Eq, Generic, Show) -- | Decode uninterpreted nix options from JSON/YAML. instance FromJSON (WithJSONWarnings NixOptsMonoid) where parseJSON = withObjectWarnings "NixOptsMonoid" $ \o -> do enable <- First <$> o ..:? nixEnableArgName pureShell <- First <$> o ..:? nixPureShellArgName packages <- First <$> o ..:? nixPackagesArgName initFile <- First <$> o ..:? nixInitFileArgName shellOptions <- First <$> o ..:? nixShellOptsArgName path <- First <$> o ..:? nixPathArgName addGCRoots <- FirstFalse <$> o ..:? nixAddGCRootsArgName pure NixOptsMonoid { enable , pureShell , packages , initFile , shellOptions , path , addGCRoots } -- | Left-biased combine Nix options instance Semigroup NixOptsMonoid where (<>) = mappenddefault -- | Left-biased combine Nix options instance Monoid NixOptsMonoid where mempty = memptydefault mappend = (<>) -- | Nix enable argument name. nixEnableArgName :: Text nixEnableArgName = "enable" -- | Nix run in pure shell argument name. nixPureShellArgName :: Text nixPureShellArgName = "pure" -- | Nix packages (build inputs) argument name. nixPackagesArgName :: Text nixPackagesArgName = "packages" -- | shell.nix file path argument name. nixInitFileArgName :: Text nixInitFileArgName = "shell-file" -- | Extra options for the nix-shell command argument name. nixShellOptsArgName :: Text nixShellOptsArgName = "nix-shell-options" -- | NIX_PATH override argument name nixPathArgName :: Text nixPathArgName = "path" -- | Add GC roots arg name nixAddGCRootsArgName :: Text nixAddGCRootsArgName = "add-gc-roots" stack-2.15.7/src/Stack/Types/Package.hs0000644000000000000000000003723014620153446015742 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.Package ( BioInput (..) , BuildInfoOpts (..) , ExeName (..) , FileCacheInfo (..) , InstallLocation (..) , Installed (..) , InstalledLibraryInfo (..) , InstalledPackageLocation (..) , LocalPackage (..) , MemoizedWith (..) , Package (..) , PackageConfig (..) , PackageDatabase (..) , PackageDbVariety (..) , PackageException (..) , PackageSource (..) , dotCabalCFilePath , dotCabalGetPath , dotCabalMain , dotCabalMainPath , dotCabalModule , dotCabalModulePath , installedMapGhcPkgId , lpFiles , lpFilesForComponents , memoizeRefWith , packageDefinedFlags , packageIdentifier , psVersion , runMemoizedWith , simpleInstalledLib , toCabalMungedPackageName , toPackageDbVariety ) where import Data.Aeson ( ToJSON (..), FromJSON (..), (.=), (.:), object, withObject ) import qualified Data.Map as M import qualified Data.Set as Set import Distribution.CabalSpecVersion import Distribution.Parsec ( PError (..), PWarning (..), showPos ) import qualified Distribution.SPDX.License as SPDX import Distribution.License ( License ) import Distribution.ModuleName ( ModuleName ) import Distribution.PackageDescription ( BuildType ) import Distribution.System ( Platform (..) ) import Distribution.Types.MungedPackageName ( encodeCompatPackageName ) import qualified RIO.Text as T import Stack.Prelude import Stack.Types.CompCollection ( CompCollection ) import Stack.Types.Compiler ( ActualCompiler ) import Stack.Types.Component ( StackBenchmark, StackBuildInfo, StackExecutable , StackForeignLibrary, StackLibrary, StackTestSuite , StackUnqualCompName ) import Stack.Types.ComponentUtils (toCabalName) import Stack.Types.Dependency ( DepValue ) import Stack.Types.EnvConfig ( EnvConfig, HasEnvConfig (..) ) import Stack.Types.GhcPkgId ( GhcPkgId ) import Stack.Types.Installed ( InstallLocation (..), InstallMap, Installed (..) , InstalledLibraryInfo (..), InstalledMap , InstalledPackageLocation (..), PackageDatabase (..) , PackageDbVariety(..), simpleInstalledLib , toPackageDbVariety ) import Stack.Types.NamedComponent ( NamedComponent ) import Stack.Types.PackageFile ( DotCabalDescriptor (..), DotCabalPath (..) , StackPackageFile ) import Stack.Types.SourceMap ( CommonPackage, FromSnapshot ) -- | Type representing exceptions thrown by functions exported by the -- "Stack.Package" module. data PackageException = PackageInvalidCabalFile !(Either PackageIdentifierRevision (Path Abs File)) !(Maybe Version) ![PError] ![PWarning] | MismatchedCabalIdentifier !PackageIdentifierRevision !PackageIdentifier | CabalFileNameParseFail FilePath | CabalFileNameInvalidPackageName FilePath | ComponentNotParsedBug String deriving (Show, Typeable) instance Exception PackageException where displayException (PackageInvalidCabalFile loc _mversion errs warnings) = concat [ "Error: [S-8072]\n" , "Unable to parse Cabal file " , case loc of Left pir -> "for " ++ T.unpack (utf8BuilderToText (display pir)) Right fp -> toFilePath fp {- Not actually needed, the errors will indicate if a newer version exists. Also, it seems that this is set to Just the version even if we support it. , case mversion of Nothing -> "" Just version -> "\nRequires newer Cabal file parser version: " ++ versionString version -} , "\n\n" , unlines $ map (\(PError pos msg) -> concat [ "- " , showPos pos , ": " , msg ]) errs , unlines $ map (\(PWarning _ pos msg) -> concat [ "- " , showPos pos , ": " , msg ]) warnings ] displayException (MismatchedCabalIdentifier pir ident) = concat [ "Error: [S-5394]\n" , "Mismatched package identifier." , "\nFound: " , packageIdentifierString ident , "\nExpected: " , T.unpack $ utf8BuilderToText $ display pir ] displayException (CabalFileNameParseFail fp) = concat [ "Error: [S-2203]\n" , "Invalid file path for Cabal file, must have a .cabal extension: " , fp ] displayException (CabalFileNameInvalidPackageName fp) = concat [ "Error: [S-8854]\n" , "Cabal file names must use valid package names followed by a .cabal \ \extension, the following is invalid: " , fp ] displayException (ComponentNotParsedBug name) = bugReport "[S-4623]" ( "Component names should always parse as directory names. The component \ \name without a directory is '" <> name <> "'." ) -- | Name of an executable. newtype ExeName = ExeName { exeName :: Text } deriving (Data, Eq, Generic, Hashable, IsString, NFData, Ord, Show, Typeable) -- | Some package info. data Package = Package { name :: !PackageName -- ^ Name of the package. , version :: !Version -- ^ Version of the package , license :: !(Either SPDX.License License) -- ^ The license the package was released under. , ghcOptions :: ![Text] -- ^ Ghc options used on package. , cabalConfigOpts :: ![Text] -- ^ Additional options passed to ./Setup.hs configure , flags :: !(Map FlagName Bool) -- ^ Flags used on package. , defaultFlags :: !(Map FlagName Bool) -- ^ Defaults for unspecified flags. , library :: !(Maybe StackLibrary) -- ^ Does the package have a buildable main library stanza? , subLibraries :: !(CompCollection StackLibrary) -- ^ The sub-libraries of the package. , foreignLibraries :: !(CompCollection StackForeignLibrary) -- ^ The foreign libraries of the package. , testSuites :: !(CompCollection StackTestSuite) -- ^ The test suites of the package. , benchmarks :: !(CompCollection StackBenchmark) -- ^ The benchmarks of the package. , executables :: !(CompCollection StackExecutable) -- ^ The executables of the package. , buildType :: !BuildType -- ^ Package build-type. , setupDeps :: !(Maybe (Map PackageName DepValue)) -- ^ If present: custom-setup dependencies , cabalSpec :: !CabalSpecVersion -- ^ Cabal spec range , file :: StackPackageFile -- ^ The Cabal sourced files related to the package at the package level -- The components may have file information in their own types , testEnabled :: Bool -- ^ This is a requirement because when tests are not enabled, Stack's -- package dependencies should ignore test dependencies. Directly set from -- 'packageConfigEnableTests'. , benchmarkEnabled :: Bool -- ^ This is a requirement because when benchmark are not enabled, Stack's -- package dependencies should ignore benchmark dependencies. Directly set -- from 'packageConfigEnableBenchmarks'. } deriving (Show, Typeable) packageIdentifier :: Package -> PackageIdentifier packageIdentifier p = PackageIdentifier p.name p.version packageDefinedFlags :: Package -> Set FlagName packageDefinedFlags = M.keysSet . (.defaultFlags) -- | GHC options based on cabal information and ghc-options. data BuildInfoOpts = BuildInfoOpts { opts :: [String] , oneWordOpts :: [String] , packageFlags :: [String] -- ^ These options can safely have 'nubOrd' applied to them, as there are no -- multi-word options (see -- https://github.com/commercialhaskell/stack/issues/1255) , cabalMacros :: Path Abs File } deriving Show -- | Package build configuration data PackageConfig = PackageConfig { enableTests :: !Bool -- ^ Are tests enabled? , enableBenchmarks :: !Bool -- ^ Are benchmarks enabled? , flags :: !(Map FlagName Bool) -- ^ Configured flags. , ghcOptions :: ![Text] -- ^ Configured ghc options. , cabalConfigOpts :: ![Text] -- ^ ./Setup.hs configure options , compilerVersion :: ActualCompiler -- ^ GHC version , platform :: !Platform -- ^ host platform } deriving (Show, Typeable) -- | Compares the package name. instance Ord Package where compare = on compare (.name) -- | Compares the package name. instance Eq Package where (==) = on (==) (.name) -- | Where the package's source is located: local directory or package index data PackageSource = PSFilePath LocalPackage -- ^ Package which exist on the filesystem | PSRemote PackageLocationImmutable Version FromSnapshot CommonPackage -- ^ Package which is downloaded remotely. instance Show PackageSource where show (PSFilePath lp) = concat ["PSFilePath (", show lp, ")"] show (PSRemote pli v fromSnapshot _) = concat [ "PSRemote" , "(", show pli, ")" , "(", show v, ")" , show fromSnapshot , "" ] psVersion :: PackageSource -> Version psVersion (PSFilePath lp) = lp.package.version psVersion (PSRemote _ v _ _) = v -- | Information on a locally available package of source code. data LocalPackage = LocalPackage { package :: !Package -- ^ The @Package@ info itself, after resolution with package flags, with -- tests and benchmarks disabled , components :: !(Set NamedComponent) -- ^ Components to build, not including the library component. , unbuildable :: !(Set NamedComponent) -- ^ Components explicitly requested for build, that are marked -- "buildable: false". , wanted :: !Bool -- FIXME Should completely drop this "wanted" -- terminology, it's unclear -- ^ Whether this package is wanted as a target. , testBench :: !(Maybe Package) -- ^ This stores the 'Package' with tests and benchmarks enabled, if either -- is asked for by the user. , cabalFP :: !(Path Abs File) -- ^ Absolute path to the Cabal file. , buildHaddocks :: !Bool -- ^ Is Haddock documentation being built for this package? , forceDirty :: !Bool , dirtyFiles :: !(MemoizedWith EnvConfig (Maybe (Set FilePath))) -- ^ Nothing == not dirty, Just == dirty. Note that the Set may be empty if -- we forced the build to treat packages as dirty. Also, the Set may not -- include all modified files. , newBuildCaches :: !( MemoizedWith EnvConfig (Map NamedComponent (Map FilePath FileCacheInfo)) ) -- ^ current state of the files , componentFiles :: !( MemoizedWith EnvConfig (Map NamedComponent (Set (Path Abs File))) ) -- ^ all files used by this package } deriving Show newtype MemoizedWith env a = MemoizedWith { memoizedWith :: RIO env a } deriving (Applicative, Functor, Monad) memoizeRefWith :: MonadIO m => RIO env a -> m (MemoizedWith env a) memoizeRefWith action = do ref <- newIORef Nothing pure $ MemoizedWith $ do mres <- readIORef ref res <- case mres of Just res -> pure res Nothing -> do res <- tryAny action writeIORef ref $ Just res pure res either throwIO pure res runMemoizedWith :: (HasEnvConfig env, MonadReader env m, MonadIO m) => MemoizedWith EnvConfig a -> m a runMemoizedWith (MemoizedWith action) = do envConfig <- view envConfigL runRIO envConfig action instance Show (MemoizedWith env a) where show _ = "<>" lpFiles :: HasEnvConfig env => LocalPackage -> RIO env (Set.Set (Path Abs File)) lpFiles = runMemoizedWith . fmap (Set.unions . M.elems) . (.componentFiles) lpFilesForComponents :: HasEnvConfig env => Set NamedComponent -> LocalPackage -> RIO env (Set.Set (Path Abs File)) lpFilesForComponents components lp = runMemoizedWith $ do componentFiles <- lp.componentFiles pure $ mconcat (M.elems (M.restrictKeys componentFiles components)) newtype FileCacheInfo = FileCacheInfo { hash :: SHA256 } deriving (Eq, Generic, Show, Typeable) instance NFData FileCacheInfo -- Provided for storing the BuildCache values in a file. But maybe JSON/YAML -- isn't the right choice here, worth considering. instance ToJSON FileCacheInfo where toJSON (FileCacheInfo hash') = object [ "hash" .= hash' ] instance FromJSON FileCacheInfo where parseJSON = withObject "FileCacheInfo" $ \o -> FileCacheInfo <$> o .: "hash" -- | Maybe get the module name from the .cabal descriptor. dotCabalModule :: DotCabalDescriptor -> Maybe ModuleName dotCabalModule (DotCabalModule m) = Just m dotCabalModule _ = Nothing -- | Maybe get the main name from the .cabal descriptor. dotCabalMain :: DotCabalDescriptor -> Maybe FilePath dotCabalMain (DotCabalMain m) = Just m dotCabalMain _ = Nothing -- | Get the module path. dotCabalModulePath :: DotCabalPath -> Maybe (Path Abs File) dotCabalModulePath (DotCabalModulePath fp) = Just fp dotCabalModulePath _ = Nothing -- | Get the main path. dotCabalMainPath :: DotCabalPath -> Maybe (Path Abs File) dotCabalMainPath (DotCabalMainPath fp) = Just fp dotCabalMainPath _ = Nothing -- | Get the c file path. dotCabalCFilePath :: DotCabalPath -> Maybe (Path Abs File) dotCabalCFilePath (DotCabalCFilePath fp) = Just fp dotCabalCFilePath _ = Nothing -- | Get the path. dotCabalGetPath :: DotCabalPath -> Path Abs File dotCabalGetPath dcp = case dcp of DotCabalModulePath fp -> fp DotCabalMainPath fp -> fp DotCabalFilePath fp -> fp DotCabalCFilePath fp -> fp -- | Gathers all the GhcPkgId provided by a library into a map installedMapGhcPkgId :: PackageIdentifier -> InstalledLibraryInfo -> Map PackageIdentifier GhcPkgId installedMapGhcPkgId pkgId@(PackageIdentifier pkgName version) installedLib = finalMap where finalMap = M.insert pkgId installedLib.ghcPkgId baseMap baseMap = M.mapKeysMonotonic (toCabalMungedPackageIdentifier pkgName version) installedLib.subLib -- | Creates a 'MungedPackageName' identifier. toCabalMungedPackageIdentifier :: PackageName -> Version -> StackUnqualCompName -> PackageIdentifier toCabalMungedPackageIdentifier pkgName version = flip PackageIdentifier version . encodeCompatPackageName . toCabalMungedPackageName pkgName toCabalMungedPackageName :: PackageName -> StackUnqualCompName -> MungedPackageName toCabalMungedPackageName pkgName = MungedPackageName pkgName . LSubLibName . toCabalName -- | Type representing inputs to 'Stack.Package.generateBuildInfoOpts'. data BioInput = BioInput { installMap :: !InstallMap , installedMap :: !InstalledMap , cabalDir :: !(Path Abs Dir) , distDir :: !(Path Abs Dir) , omitPackages :: ![PackageName] , addPackages :: ![PackageName] , buildInfo :: !StackBuildInfo , dotCabalPaths :: ![DotCabalPath] , configLibDirs :: ![FilePath] , configIncludeDirs :: ![FilePath] , componentName :: !NamedComponent , cabalVersion :: !Version } stack-2.15.7/src/Stack/Types/PackageFile.hs0000644000000000000000000001060414604306201016525 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | The facility for retrieving all files from the main Stack -- 'Stack.Types.Package' type. This was moved into its own module to allow -- component-level file-gathering without circular dependency at the Package -- level. module Stack.Types.PackageFile ( GetPackageFileContext (..) , DotCabalPath (..) , DotCabalDescriptor (..) , PackageWarning (..) , StackPackageFile (..) , PackageComponentFile (..) ) where import Distribution.ModuleName ( ModuleName ) import RIO.Process ( HasProcessContext (..) ) import Stack.Prelude import Stack.Types.BuildConfig ( BuildConfig (..), HasBuildConfig (..) ) import Stack.Types.Config ( HasConfig (..) ) import Stack.Types.GHCVariant ( HasGHCVariant (..) ) import Stack.Types.NamedComponent ( NamedComponent ) import Stack.Types.Platform ( HasPlatform (..) ) import Stack.Types.Runner ( HasRunner (..) ) data GetPackageFileContext = GetPackageFileContext { file :: !(Path Abs File) , distDir :: !(Path Abs Dir) , buildConfig :: !BuildConfig , cabalVer :: !Version } instance HasPlatform GetPackageFileContext where platformL = configL . platformL {-# INLINE platformL #-} platformVariantL = configL . platformVariantL {-# INLINE platformVariantL #-} instance HasGHCVariant GetPackageFileContext where ghcVariantL = configL . ghcVariantL {-# INLINE ghcVariantL #-} instance HasLogFunc GetPackageFileContext where logFuncL = configL . logFuncL instance HasRunner GetPackageFileContext where runnerL = configL . runnerL instance HasStylesUpdate GetPackageFileContext where stylesUpdateL = runnerL . stylesUpdateL instance HasTerm GetPackageFileContext where useColorL = runnerL . useColorL termWidthL = runnerL . termWidthL instance HasConfig GetPackageFileContext where configL = buildConfigL . lens (.config) (\x y -> x { config = y }) {-# INLINE configL #-} instance HasBuildConfig GetPackageFileContext where buildConfigL = lens (.buildConfig) (\x y -> x { buildConfig = y }) instance HasPantryConfig GetPackageFileContext where pantryConfigL = configL . pantryConfigL instance HasProcessContext GetPackageFileContext where processContextL = configL . processContextL -- | A path resolved from the Cabal file, which is either main-is or -- an exposed/internal/referenced module. data DotCabalPath = DotCabalModulePath !(Path Abs File) | DotCabalMainPath !(Path Abs File) | DotCabalFilePath !(Path Abs File) | DotCabalCFilePath !(Path Abs File) deriving (Eq, Ord, Show) -- | A descriptor from a Cabal file indicating one of the following: -- -- exposed-modules: Foo -- other-modules: Foo -- or -- main-is: Foo.hs -- data DotCabalDescriptor = DotCabalModule !ModuleName | DotCabalMain !FilePath | DotCabalFile !FilePath | DotCabalCFile !FilePath deriving (Eq, Ord, Show) -- | Warning generated when reading a package data PackageWarning = UnlistedModulesWarning NamedComponent [ModuleName] -- ^ Modules found that are not listed in Cabal file -- TODO: bring this back - see -- https://github.com/commercialhaskell/stack/issues/2649 {- | MissingModulesWarning (Path Abs File) (Maybe String) [ModuleName] -- ^ Modules not found in file system, which are listed in Cabal file -} -- | This is the information from Cabal we need at the package level to track -- files. data StackPackageFile = StackPackageFile { extraSrcFiles :: [FilePath] , dataDir :: FilePath , dataFiles :: [FilePath] } deriving (Show, Typeable) -- | Files that the package depends on, relative to package directory. data PackageComponentFile = PackageComponentFile { modulePathMap :: Map NamedComponent (Map ModuleName (Path Abs File)) , cabalFileMap :: !(Map NamedComponent [DotCabalPath]) , packageExtraFile :: Set (Path Abs File) , warnings :: [PackageWarning] } instance Semigroup PackageComponentFile where PackageComponentFile x1 x2 x3 x4 <> PackageComponentFile y1 y2 y3 y4 = PackageComponentFile (x1 <> y1) (x2 <> y2) (x3 <> y3) (x4 <> y4) instance Monoid PackageComponentFile where mempty = PackageComponentFile mempty mempty mempty mempty stack-2.15.7/src/Stack/Types/PackageName.hs0000644000000000000000000000235414604306201016531 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Names for packages. module Stack.Types.PackageName ( packageNameArgument ) where import qualified Options.Applicative as O import Stack.Prelude -- | An argument which accepts a template name of the format -- @foo.hsfiles@. packageNameArgument :: O.Mod O.ArgumentFields PackageName -> O.Parser PackageName packageNameArgument = O.argument (do s <- O.str either O.readerError pure (p s)) where p s = case parsePackageName s of Just x -> Right x Nothing -> Left $ unlines [ "Expected a package name acceptable to Cabal, but got: " ++ s ++ "\n" , "An acceptable package name comprises an alphanumeric 'word'; or \ \two or more" , "such words, with the words separated by a hyphen/minus character ('-'). A \ \word" , "cannot be comprised only of the characters '0' to '9'. \n" , "An alphanumeric character is one in one of the Unicode Letter \ \categories" , "(Lu (uppercase), Ll (lowercase), Lt (titlecase), Lm (modifier), or \ \Lo (other))" , "or Number categories (Nd (decimal), Nl (letter), or No (other))." ] stack-2.15.7/src/Stack/Types/ParentMap.hs0000644000000000000000000000111114604306201016252 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Module exporting the 'ParentMap' type synonym. module Stack.Types.ParentMap ( ParentMap ) where import Data.Monoid.Map ( MonoidMap (..) ) import Stack.Prelude import Stack.Types.Version ( VersionRange ) -- | Type synonym representing dictionaries of package names, and a list of -- pairs of the identifier of a package depending on the package and the -- version range specified for the dependency by that package. type ParentMap = MonoidMap PackageName [(PackageIdentifier, VersionRange)] stack-2.15.7/src/Stack/Types/Platform.hs0000644000000000000000000000256214445120723016170 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.Platform ( PlatformVariant (..) , HasPlatform (..) , platformVariantSuffix , platformOnlyRelDir ) where import Distribution.System ( Platform ) import Distribution.Text ( display ) import Lens.Micro ( _1, _2 ) import Path ( parseRelDir ) import Stack.Prelude -- | A variant of the platform, used to differentiate Docker builds from host data PlatformVariant = PlatformVariantNone | PlatformVariant String -- | Class for environment values which have a Platform class HasPlatform env where platformL :: Lens' env Platform platformVariantL :: Lens' env PlatformVariant instance HasPlatform (Platform, PlatformVariant) where platformL = _1 platformVariantL = _2 -- | Render a platform variant to a String suffix. platformVariantSuffix :: PlatformVariant -> String platformVariantSuffix PlatformVariantNone = "" platformVariantSuffix (PlatformVariant v) = "-" ++ v -- | Relative directory for the platform identifier platformOnlyRelDir :: (MonadReader env m, HasPlatform env, MonadThrow m) => m (Path Rel Dir) platformOnlyRelDir = do platform <- view platformL platformVariant <- view platformVariantL parseRelDir ( Distribution.Text.display platform ++ platformVariantSuffix platformVariant ) stack-2.15.7/src/Stack/Types/Project.hs0000644000000000000000000000475414620153446016022 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.Project ( Project (..) ) where import Data.Aeson.Types ( ToJSON (..), (.=), object ) import qualified Data.Map as Map import qualified Data.Set as Set import Stack.Prelude import Stack.Types.Curator ( Curator ) -- | A project is a collection of packages. We can have multiple stack.yaml -- files, but only one of them may contain project information. data Project = Project { userMsg :: !(Maybe String) -- ^ A warning message to display to the user when the auto generated -- config may have issues. , packages :: ![RelFilePath] -- ^ Packages which are actually part of the project (as opposed -- to dependencies). , extraDeps :: ![RawPackageLocation] -- ^ Dependencies defined within the stack.yaml file, to be applied on top -- of the snapshot. , flagsByPkg :: !(Map PackageName (Map FlagName Bool)) -- ^ Flags to be applied on top of the snapshot flags. , resolver :: !RawSnapshotLocation -- ^ How we resolve which @Snapshot@ to use , compiler :: !(Maybe WantedCompiler) -- ^ Override the compiler in 'projectResolver' , extraPackageDBs :: ![FilePath] , curator :: !(Maybe Curator) -- ^ Extra configuration intended exclusively for usage by the curator tool. -- In other words, this is /not/ part of the documented and exposed Stack -- API. SUBJECT TO CHANGE. , dropPackages :: !(Set PackageName) -- ^ Packages to drop from the 'projectResolver'. } deriving Show instance ToJSON Project where -- Expanding the constructor fully to ensure we don't miss any fields. toJSON project = object $ concat [ maybe [] (\cv -> ["compiler" .= cv]) project.compiler , maybe [] (\msg -> ["user-message" .= msg]) project.userMsg , [ "extra-package-dbs" .= project.extraPackageDBs | not (null project.extraPackageDBs) ] , [ "extra-deps" .= project.extraDeps | not (null project.extraDeps) ] , [ "flags" .= fmap toCabalStringMap (toCabalStringMap project.flagsByPkg) | not (Map.null project.flagsByPkg) ] , ["packages" .= project.packages] , ["resolver" .= project.resolver] , maybe [] (\c -> ["curator" .= c]) project.curator , [ "drop-packages" .= Set.map CabalString project.dropPackages | not (Set.null project.dropPackages) ] ] stack-2.15.7/src/Stack/Types/ProjectAndConfigMonoid.hs0000644000000000000000000000445114620153446020733 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.ProjectAndConfigMonoid ( ProjectAndConfigMonoid (..) , parseProjectAndConfigMonoid ) where import Data.Aeson.Types ( Value ) import Data.Aeson.WarningParser ( WithJSONWarnings, (...:), (..:?), (..!=), jsonSubWarnings , jsonSubWarningsT, jsonSubWarningsTT, withObjectWarnings ) import qualified Data.Set as Set import qualified Data.Yaml as Yaml import Stack.Prelude import Stack.Types.ConfigMonoid ( ConfigMonoid, parseConfigMonoidObject ) import Stack.Types.Project ( Project (..) ) data ProjectAndConfigMonoid = ProjectAndConfigMonoid !Project !ConfigMonoid parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings (IO ProjectAndConfigMonoid)) parseProjectAndConfigMonoid rootDir = withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do packages <- o ..:? "packages" ..!= [RelFilePath "."] deps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= [] flags' <- o ..:? "flags" ..!= mempty let flagsByPkg = unCabalStringMap <$> unCabalStringMap (flags' :: Map (CabalString PackageName) (Map (CabalString FlagName) Bool)) resolver' <- jsonSubWarnings $ o ...: ["snapshot", "resolver"] compiler <- o ..:? "compiler" userMsg <- o ..:? "user-message" config <- parseConfigMonoidObject rootDir o extraPackageDBs <- o ..:? "extra-package-dbs" ..!= [] curator <- jsonSubWarningsT (o ..:? "curator") drops <- o ..:? "drop-packages" ..!= mempty let dropPackages = Set.map unCabalString drops pure $ do deps' <- mapM (resolvePaths (Just rootDir)) deps let extraDeps = concatMap toList (deps' :: [NonEmpty RawPackageLocation]) resolver <- resolvePaths (Just rootDir) resolver' let project = Project { userMsg , resolver , compiler -- FIXME make sure resolver' isn't SLCompiler , extraPackageDBs , packages , extraDeps , flagsByPkg , curator , dropPackages } pure $ ProjectAndConfigMonoid project config stack-2.15.7/src/Stack/Types/ProjectConfig.hs0000644000000000000000000000157614502056214017141 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.ProjectConfig ( ProjectConfig (..) , isPCGlobalProject ) where import Stack.Prelude -- | Project configuration information. Not every run of Stack has a -- true local project; see constructors below. data ProjectConfig a = PCProject a -- ^ Normal run: we want a project, and have one. This comes from -- either 'SYLDefault' or 'SYLOverride'. | PCGlobalProject -- ^ No project was found when using 'SYLDefault'. Instead, use -- the implicit global. | PCNoProject ![PackageIdentifierRevision] -- ^ Use a no project run. This comes from 'SYLNoProject'. -- | Yields 'True' only if the project configuration information is for the -- implicit global project. isPCGlobalProject :: ProjectConfig a -> Bool isPCGlobalProject PCGlobalProject = True isPCGlobalProject _ = False stack-2.15.7/src/Stack/Types/PvpBounds.hs0000644000000000000000000000317014502056214016315 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.PvpBounds ( PvpBounds (..) , PvpBoundsType (..) , pvpBoundsText , parsePvpBounds ) where import Data.Aeson.Types ( FromJSON (..), ToJSON (..), withText ) import qualified Data.Map as Map import qualified Data.Text as T import Stack.Prelude -- | How PVP bounds should be added to .cabal files data PvpBoundsType = PvpBoundsNone | PvpBoundsUpper | PvpBoundsLower | PvpBoundsBoth deriving (Bounded, Enum, Eq, Ord, Read, Show, Typeable) data PvpBounds = PvpBounds { pbType :: !PvpBoundsType , pbAsRevision :: !Bool } deriving (Eq, Ord, Read, Show, Typeable) pvpBoundsText :: PvpBoundsType -> Text pvpBoundsText PvpBoundsNone = "none" pvpBoundsText PvpBoundsUpper = "upper" pvpBoundsText PvpBoundsLower = "lower" pvpBoundsText PvpBoundsBoth = "both" parsePvpBounds :: Text -> Either String PvpBounds parsePvpBounds t = maybe err Right $ do (t', asRevision) <- case T.break (== '-') t of (x, "") -> Just (x, False) (x, "-revision") -> Just (x, True) _ -> Nothing x <- Map.lookup t' m Just PvpBounds { pbType = x , pbAsRevision = asRevision } where m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound] err = Left $ "Invalid PVP bounds: " ++ T.unpack t instance ToJSON PvpBounds where toJSON (PvpBounds typ asRevision) = toJSON (pvpBoundsText typ <> (if asRevision then "-revision" else "")) instance FromJSON PvpBounds where parseJSON = withText "PvpBounds" (either fail pure . parsePvpBounds) stack-2.15.7/src/Stack/Types/Resolver.hs0000644000000000000000000000723414620153446016211 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} module Stack.Types.Resolver ( AbstractResolver (..) , readAbstractResolver , Snapshots (..) ) where import qualified Data.Aeson.Key as Key import qualified Data.Aeson.KeyMap as KeyMap import Data.Aeson.Types ( FromJSON, parseJSON, withObject, withText ) import Data.Aeson.WarningParser ( (.:) ) import qualified Data.IntMap.Strict as IntMap import qualified Data.Text as T import Data.Text.Read ( decimal ) import Data.Time ( Day ) import Options.Applicative ( ReadM ) import qualified Options.Applicative.Types as OA import Stack.Prelude -- | Type representing exceptions thrown by functions exported by the -- "Stack.Types.Resolver" module. data TypesResolverException = ParseResolverException !Text | FilepathInDownloadedSnapshot !Text deriving (Show, Typeable) instance Exception TypesResolverException where displayException (ParseResolverException t) = concat [ "Error: [S-8787]\n" , "Invalid resolver value: " , T.unpack t , ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, \ \ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. See \ \https://www.stackage.org/snapshots for a complete list." ] displayException (FilepathInDownloadedSnapshot url) = unlines [ "Error: [S-4865]" , "Downloaded snapshot specified a 'resolver: { location: filepath }' " , "field, but filepaths are not allowed in downloaded snapshots.\n" , "Filepath specified: " ++ T.unpack url ] -- | Either an actual resolver value, or an abstract description of one (e.g., -- latest nightly). data AbstractResolver = ARLatestNightly | ARLatestLTS | ARLatestLTSMajor !Int | ARResolver !RawSnapshotLocation | ARGlobal instance Show AbstractResolver where show = T.unpack . utf8BuilderToText . display instance Display AbstractResolver where display ARLatestNightly = "nightly" display ARLatestLTS = "lts" display (ARLatestLTSMajor x) = "lts-" <> display x display (ARResolver usl) = display usl display ARGlobal = "global" readAbstractResolver :: ReadM (Unresolved AbstractResolver) readAbstractResolver = do s <- OA.readerAsk case s of "global" -> pure $ pure ARGlobal "nightly" -> pure $ pure ARLatestNightly "lts" -> pure $ pure ARLatestLTS 'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x -> pure $ pure $ ARLatestLTSMajor x' _ -> pure $ ARResolver <$> parseRawSnapshotLocation (T.pack s) -- | Most recent Nightly and newest LTS version per major release. data Snapshots = Snapshots { nightly :: !Day , lts :: !(IntMap Int) } deriving Show instance FromJSON Snapshots where parseJSON = withObject "Snapshots" $ \o -> Snapshots <$> (o .: "nightly" >>= parseNightly) <*> fmap IntMap.unions (mapM (parseLTS . snd) $ filter (isLTS . Key.toText . fst) $ KeyMap.toList o) where parseNightly t = case parseSnapName t of Left e -> fail $ displayException e Right (LTS _ _) -> fail "Unexpected LTS value" Right (Nightly d) -> pure d isLTS = ("lts-" `T.isPrefixOf`) parseLTS = withText "LTS" $ \t -> case parseSnapName t of Left e -> fail $ displayException e Right (LTS x y) -> pure $ IntMap.singleton x y Right (Nightly _) -> fail "Unexpected nightly value" stack-2.15.7/src/Stack/Types/Runner.hs0000644000000000000000000000606114604306201015645 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} module Stack.Types.Runner ( Runner (..) , HasRunner (..) , HasDockerEntrypointMVar (..) , globalOptsL , stackYamlLocL , lockFileBehaviorL , terminalL , reExecL , rslInLogL ) where import RIO.Process ( HasProcessContext (..), ProcessContext ) import Stack.Prelude hiding ( stylesUpdate ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.LockFileBehavior ( LockFileBehavior ) import Stack.Types.StackYamlLoc ( StackYamlLoc ) -- | The base environment that almost everything in Stack runs in, based off of -- parsing command line options in 'GlobalOpts'. Provides logging, process -- execution, and the MVar used to ensure that the Docker entrypoint is -- performed exactly once. data Runner = Runner { globalOpts :: !GlobalOpts , useColor :: !Bool , logFunc :: !LogFunc , termWidth :: !Int , processContext :: !ProcessContext , dockerEntrypointMVar :: !(MVar Bool) } instance HasLogFunc Runner where logFuncL = lens (.logFunc) (\x y -> x { logFunc = y }) instance HasProcessContext Runner where processContextL = lens (.processContext) (\x y -> x { processContext = y }) instance HasRunner Runner where runnerL = id instance HasStylesUpdate Runner where stylesUpdateL :: Lens' Runner StylesUpdate stylesUpdateL = globalOptsL . lens (.stylesUpdate) (\x y -> x { stylesUpdate = y }) instance HasTerm Runner where useColorL = lens (.useColor) (\x y -> x { useColor = y }) termWidthL = lens (.termWidth) (\x y -> x { termWidth = y }) instance HasDockerEntrypointMVar Runner where dockerEntrypointMVarL = lens (.dockerEntrypointMVar) (\x y -> x { dockerEntrypointMVar = y }) -- | Class for environment values which have a 'Runner'. class (HasProcessContext env, HasLogFunc env) => HasRunner env where runnerL :: Lens' env Runner -- | Class for environment values which have a Docker entrypoint 'MVar'. class HasRunner env => HasDockerEntrypointMVar env where dockerEntrypointMVarL :: Lens' env (MVar Bool) stackYamlLocL :: HasRunner env => Lens' env StackYamlLoc stackYamlLocL = globalOptsL . lens (.stackYaml) (\x y -> x { stackYaml = y }) lockFileBehaviorL :: HasRunner env => SimpleGetter env LockFileBehavior lockFileBehaviorL = globalOptsL . to (.lockFileBehavior) globalOptsL :: HasRunner env => Lens' env GlobalOpts globalOptsL = runnerL . lens (.globalOpts) (\x y -> x { globalOpts = y }) -- | See 'globalTerminal' terminalL :: HasRunner env => Lens' env Bool terminalL = globalOptsL . lens (.terminal) (\x y -> x { terminal = y }) -- | See 'globalReExecVersion' reExecL :: HasRunner env => SimpleGetter env Bool reExecL = globalOptsL . to (isJust . (.reExecVersion)) rslInLogL :: HasRunner env => SimpleGetter env Bool rslInLogL = globalOptsL . to (.rslInLog) stack-2.15.7/src/Stack/Types/SCM.hs0000644000000000000000000000102214502056214015011 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.SCM ( SCM (..) ) where import Data.Aeson.Types ( FromJSON (..), ToJSON (..) ) import Stack.Prelude -- | A software control system. data SCM = Git deriving Show instance FromJSON SCM where parseJSON v = do s <- parseJSON v case s of "git" -> pure Git _ -> fail ("Unknown or unsupported SCM: " <> s) instance ToJSON SCM where toJSON Git = toJSON ("git" :: Text) stack-2.15.7/src/Stack/Types/SetupInfo.hs0000644000000000000000000000455614604306201016317 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} module Stack.Types.SetupInfo ( SetupInfo (..) ) where import Data.Aeson.Types ( FromJSON (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings, (..:?), (..!=), jsonSubWarningsT , jsonSubWarningsTT, withObjectWarnings ) import qualified Data.Map as Map import Stack.Prelude import Stack.Types.DownloadInfo ( DownloadInfo ) import Stack.Types.VersionedDownloadInfo ( VersionedDownloadInfo ) import Stack.Types.GHCDownloadInfo ( GHCDownloadInfo ) data SetupInfo = SetupInfo { sevenzExe :: Maybe DownloadInfo , sevenzDll :: Maybe DownloadInfo , msys2 :: Map Text VersionedDownloadInfo , ghcByVersion :: Map Text (Map Version GHCDownloadInfo) , stackByVersion :: Map Text (Map Version DownloadInfo) } deriving Show instance FromJSON (WithJSONWarnings SetupInfo) where parseJSON = withObjectWarnings "SetupInfo" $ \o -> do sevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info") sevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info") msys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty) (fmap unCabalStringMap -> ghcByVersion) <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) (fmap unCabalStringMap -> stackByVersion) <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty) pure SetupInfo { sevenzExe , sevenzDll , msys2 , ghcByVersion , stackByVersion } -- | For the @siGHCs@ field maps are deeply merged. For all fields the values -- from the first @SetupInfo@ win. instance Semigroup SetupInfo where l <> r = SetupInfo { sevenzExe = l.sevenzExe <|> r.sevenzExe , sevenzDll = l.sevenzDll <|> r.sevenzDll , msys2 = l.msys2 <> r.msys2 , ghcByVersion = Map.unionWith (<>) l.ghcByVersion r.ghcByVersion , stackByVersion = Map.unionWith (<>) l.stackByVersion r.stackByVersion } instance Monoid SetupInfo where mempty = SetupInfo { sevenzExe = Nothing , sevenzDll = Nothing , msys2 = Map.empty , ghcByVersion = Map.empty , stackByVersion = Map.empty } mappend = (<>) stack-2.15.7/src/Stack/Types/SourceMap.hs0000644000000000000000000001666514615747055016327 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | A sourcemap maps a package name to how it should be built, including source -- code, flags, options, etc. This module contains various stages of source map -- construction. See the @build_overview.md@ doc for details on these stages. module Stack.Types.SourceMap ( -- * Different source map types SMWanted (..) , SMActual (..) , Target (..) , PackageType (..) , SMTargets (..) , SourceMap (..) -- * Helper types , FromSnapshot (..) , DepPackage (..) , ProjectPackage (..) , ppComponents , ppComponentsMaybe , ppGPD , ppRoot , ppVersion , CommonPackage (..) , GlobalPackageVersion (..) , GlobalPackage (..) , isReplacedGlobal , SourceMapHash (..) , smRelDir ) where import qualified Data.Set as Set import qualified Data.Text as T import Distribution.PackageDescription ( GenericPackageDescription ) import qualified Distribution.PackageDescription as C import qualified Pantry.SHA256 as SHA256 import Path ( parent, parseRelDir ) import Stack.Prelude import Stack.Types.Compiler ( ActualCompiler ) import Stack.Types.NamedComponent ( NamedComponent (..) ) -- | Settings common to dependency packages ('Stack.Types.SourceMap.DepPackage') -- and project packages ('Stack.Types.SourceMap.ProjectPackage'). data CommonPackage = CommonPackage { gpd :: !(IO GenericPackageDescription) , name :: !PackageName , flags :: !(Map FlagName Bool) -- ^ overrides default flags , ghcOptions :: ![Text] -- also lets us know if we're doing profiling , cabalConfigOpts :: ![Text] , buildHaddocks :: !Bool -- ^ Should Haddock documentation be built for this package? } -- | Flag showing if package comes from a snapshot needed to ignore dependency -- bounds between such packages data FromSnapshot = FromSnapshot | NotFromSnapshot deriving Show -- | A view of a dependency package, specified in stack.yaml data DepPackage = DepPackage { depCommon :: !CommonPackage , location :: !PackageLocation , hidden :: !Bool -- ^ Should the package be hidden after registering? Affects the script -- interpreter's module name import parser. , fromSnapshot :: !FromSnapshot -- ^ Needed to ignore bounds between snapshot packages -- See https://github.com/commercialhaskell/stackage/issues/3185 } -- | A view of a project package needed for resolving components data ProjectPackage = ProjectPackage { projectCommon :: !CommonPackage , cabalFP :: !(Path Abs File) , resolvedDir :: !(ResolvedPath Dir) } -- | A view of a package installed in the global package database also could -- include marker for a replaced global package (could be replaced because of a -- replaced dependency) data GlobalPackage = GlobalPackage !Version | ReplacedGlobalPackage ![PackageName] deriving Eq isReplacedGlobal :: GlobalPackage -> Bool isReplacedGlobal (ReplacedGlobalPackage _) = True isReplacedGlobal (GlobalPackage _) = False -- | A source map with information on the wanted (but not actual) compiler. This -- is derived by parsing the @stack.yaml@ file for @packages@, @extra-deps@, -- their configuration (e.g., flags and options), and parsing the snapshot it -- refers to. It does not include global packages or any information from the -- command line. -- -- Invariant: a @PackageName@ appears in either 'smwProject' or 'smwDeps', but -- not both. data SMWanted = SMWanted { compiler :: !WantedCompiler , project :: !(Map PackageName ProjectPackage) , deps :: !(Map PackageName DepPackage) , snapshotLocation :: !RawSnapshotLocation -- ^ Where this snapshot is loaded from. } -- | Adds in actual compiler information to 'SMWanted', in particular the -- contents of the global package database. -- -- Invariant: a @PackageName@ appears in only one of the @Map@s. data SMActual global = SMActual { compiler :: !ActualCompiler , project :: !(Map PackageName ProjectPackage) , deps :: !(Map PackageName DepPackage) , globals :: !(Map PackageName global) } newtype GlobalPackageVersion = GlobalPackageVersion Version -- | How a package is intended to be built data Target = TargetAll !PackageType -- ^ Build all of the default components. | TargetComps !(Set NamedComponent) -- ^ Only build specific components data PackageType = PTProject | PTDependency deriving (Eq, Show) -- | Builds on an 'SMActual' by resolving the targets specified on the command -- line, potentially adding in new dependency packages in the process. data SMTargets = SMTargets { targets :: !(Map PackageName Target) , deps :: !(Map PackageName DepPackage) } -- | The final source map, taking an 'SMTargets' and applying all command line -- flags and GHC options. data SourceMap = SourceMap { targets :: !SMTargets -- ^ Doesn't need to be included in the hash, does not affect the source -- map. , compiler :: !ActualCompiler -- ^ Need to hash the compiler version _and_ its installation path. Ideally -- there would be some kind of output from GHC telling us some unique ID for -- the compiler itself. , project :: !(Map PackageName ProjectPackage) -- ^ Doesn't need to be included in hash, doesn't affect any of the packages -- that get stored in the snapshot database. , deps :: !(Map PackageName DepPackage) -- ^ Need to hash all of the immutable dependencies, can ignore the mutable -- dependencies. , globalPkgs :: !(Map PackageName GlobalPackage) -- ^ Doesn't actually need to be hashed, implicitly captured by smCompiler. -- Can be broken if someone installs new global packages. We can document -- that as not supported, _or_ we could actually include all of this in the -- hash and make Stack more resilient. } -- | A unique hash for the immutable portions of a 'SourceMap'. newtype SourceMapHash = SourceMapHash SHA256 -- | Returns relative directory name with source map's hash smRelDir :: (MonadThrow m) => SourceMapHash -> m (Path Rel Dir) smRelDir (SourceMapHash smh) = parseRelDir $ T.unpack $ SHA256.toHexText smh ppGPD :: MonadIO m => ProjectPackage -> m GenericPackageDescription ppGPD = liftIO . (.projectCommon.gpd) -- | Root directory for the given 'ProjectPackage' ppRoot :: ProjectPackage -> Path Abs Dir ppRoot = parent . (.cabalFP) -- | All components available in the given 'ProjectPackage' ppComponents :: MonadIO m => ProjectPackage -> m (Set NamedComponent) ppComponents = ppComponentsMaybe Just ppComponentsMaybe :: MonadIO m => (NamedComponent -> Maybe NamedComponent) -> ProjectPackage -> m (Set NamedComponent) ppComponentsMaybe compType pp = do gpd <- ppGPD pp pure $ Set.fromList $ concat [ maybe [] (const $ catMaybes [compType CLib]) (C.condLibrary gpd) , go (compType . CExe) (fst <$> C.condExecutables gpd) , go (compType . CTest) (fst <$> C.condTestSuites gpd) , go (compType . CBench) (fst <$> C.condBenchmarks gpd) ] where go :: (T.Text -> Maybe NamedComponent) -> [C.UnqualComponentName] -> [NamedComponent] go wrapper = mapMaybe (wrapper . T.pack . C.unUnqualComponentName) -- | Version for the given 'ProjectPackage ppVersion :: MonadIO m => ProjectPackage -> m Version ppVersion = fmap gpdVersion . ppGPD stack-2.15.7/src/Stack/Types/StackYamlLoc.hs0000644000000000000000000000122614620153446016731 0ustar0000000000000000 {-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) where import Stack.Prelude -- | Location for the project's stack.yaml file. data StackYamlLoc = SYLDefault -- ^ Use the standard parent-directory-checking logic | SYLOverride !(Path Abs File) -- ^ Use a specific stack.yaml file provided | SYLNoProject ![PackageIdentifierRevision] -- ^ Do not load up a project, just user configuration. Include -- the given extra dependencies with the resolver. | SYLGlobalProject -- ^ Do not look for a project configuration, and use the implicit global. deriving Show stack-2.15.7/src/Stack/Types/Storage.hs0000644000000000000000000000442614604306201016003 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Types used by @Stack.Storage@ modules. module Stack.Types.Storage ( StoragePrettyException (..) , ProjectStorage (..) , UserStorage (..) ) where import Pantry.SQLite ( Storage ) import Stack.Prelude -- | Type representing \'pretty\' exceptions thrown by functions exported by -- modules beginning @Stack.Storage@. data StoragePrettyException = StorageMigrationFailure !Text !(Path Abs File) !SomeException deriving (Show, Typeable) instance Pretty StoragePrettyException where pretty (StorageMigrationFailure desc fp ex) = "[S-8835]" <> line <> fillSep [ flow "Stack could not migrate the the database" , style File (fromString $ show desc) , flow "located at" , pretty fp ] <> "." <> blankLine <> flow "While migrating the database, Stack encountered the error:" <> blankLine <> string exMsg <> blankLine <> fillSep [ flow "Please report this as an issue at" , style Url "https://github.com/commercialhaskell/stack/issues" ] <> "." <> blankLine -- See https://github.com/commercialhaskell/stack/issues/5851 <> if exMsg == winIOGHCRTSMsg then flow "This error can be caused by a bug that arises if GHC's \ \'--io-manager=native' RTS option is set using the GHCRTS \ \environment variable. As a workaround try setting the option \ \in the project's Cabal file, Stack's YAML configuration file \ \or at the command line." else flow "As a workaround you may delete the database. This \ \will cause the database to be recreated." where exMsg = displayException ex winIOGHCRTSMsg = "\\\\.\\NUL: hDuplicateTo: illegal operation (handles are incompatible)" instance Exception StoragePrettyException -- | A bit of type safety to ensure we're talking to the right database. newtype UserStorage = UserStorage { userStorage :: Storage } -- | A bit of type safety to ensure we're talking to the right database. newtype ProjectStorage = ProjectStorage { projectStorage :: Storage } stack-2.15.7/src/Stack/Types/TemplateName.hs0000644000000000000000000001271014604306201016746 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Template name handling. module Stack.Types.TemplateName ( TemplateName , RepoTemplatePath (..) , RepoService (..) , TemplatePath (..) , templateName , templatePath , parseTemplateNameFromString , parseRepoPathWithService , templateNameArgument , templateParamArgument , defaultTemplateName ) where import Data.Aeson ( FromJSON (..), withText ) import qualified Data.Text as T import Network.HTTP.StackClient ( parseRequest ) import qualified Options.Applicative as O import Path ( parseAbsFile, parseRelFile ) import Stack.Prelude -- | Type representing exceptions thrown by functions exported by the -- "Stack.Types.TemplateName" module. newtype TypeTemplateNameException = DefaultTemplateNameNotParsedBug String deriving (Show, Typeable) instance Exception TypeTemplateNameException where displayException (DefaultTemplateNameNotParsedBug s) = bugReport "[S-7410]" $ "Cannot parse default template name: " ++ s -- | A template name. data TemplateName = TemplateName !Text !TemplatePath deriving (Eq, Ord, Show) data TemplatePath = AbsPath (Path Abs File) -- ^ an absolute path on the filesystem | RelPath String (Path Rel File) -- ^ a relative path on the filesystem, or relative to the template -- repository. To avoid path separator conversion on Windows, the raw -- command-line parameter passed is also given as the first field (possibly -- with @.hsfiles@ appended). | UrlPath String -- ^ a full URL | RepoPath RepoTemplatePath deriving (Eq, Ord, Show) -- | Details for how to access a template from a remote repo. data RepoTemplatePath = RepoTemplatePath { service :: RepoService , user :: Text , template :: Text } deriving (Eq, Ord, Show) -- | Services from which templates can be retrieved from a repository. data RepoService = GitHub | GitLab | Bitbucket deriving (Eq, Ord, Show) instance FromJSON TemplateName where parseJSON = withText "TemplateName" $ either fail pure . parseTemplateNameFromString . T.unpack -- | An argument which accepts a template name of the format @foo.hsfiles@ or -- @foo@, ultimately normalized to @foo@. templateNameArgument :: O.Mod O.ArgumentFields TemplateName -> O.Parser TemplateName templateNameArgument = O.argument (do s <- O.str either O.readerError pure (parseTemplateNameFromString s)) -- | An argument which accepts a @key:value@ pair for specifying parameters. templateParamArgument :: O.Mod O.OptionFields (Text,Text) -> O.Parser (Text,Text) templateParamArgument = O.option (do s <- O.str either O.readerError pure (parsePair s)) where parsePair :: String -> Either String (Text, Text) parsePair s = case break (==':') s of (key,':':value@(_:_)) -> Right (T.pack key, T.pack value) _ -> Left ("Expected key:value format for argument: " <> s) -- | Parse a template name from a string. parseTemplateNameFromString :: String -> Either String TemplateName parseTemplateNameFromString fname = case T.stripSuffix ".hsfiles" (T.pack fname) of Nothing -> parseValidFile (T.pack fname) (fname <> ".hsfiles") fname Just prefix -> parseValidFile prefix fname fname where parseValidFile prefix hsf orig = maybe (Left expected) Right $ asum (validParses prefix hsf orig) validParses prefix hsf orig = -- NOTE: order is important [ TemplateName prefix . RepoPath <$> parseRepoPath hsf , TemplateName (T.pack orig) . UrlPath <$> (parseRequest orig *> Just orig) , TemplateName prefix . AbsPath <$> parseAbsFile hsf , TemplateName prefix . RelPath hsf <$> parseRelFile hsf ] expected = "Expected a template like: foo or foo.hsfiles or \ \https://example.com/foo.hsfiles or github:user/foo" -- | The default template name you can use if you don't have one. defaultTemplateName :: TemplateName defaultTemplateName = case parseTemplateNameFromString "new-template" of Left s -> impureThrow $ DefaultTemplateNameNotParsedBug s Right x -> x -- | Get a text representation of the template name. templateName :: TemplateName -> Text templateName (TemplateName prefix _) = prefix -- | Get the path of the template. templatePath :: TemplateName -> TemplatePath templatePath (TemplateName _ fp) = fp defaultRepoUserForService :: RepoService -> Maybe Text defaultRepoUserForService GitHub = Just "commercialhaskell" defaultRepoUserForService _ = Nothing -- | Parses a template path of the form @github:user/template@. parseRepoPath :: String -> Maybe RepoTemplatePath parseRepoPath s = case T.splitOn ":" (T.pack s) of ["github" , rest] -> parseRepoPathWithService GitHub rest ["gitlab" , rest] -> parseRepoPathWithService GitLab rest ["bitbucket" , rest] -> parseRepoPathWithService Bitbucket rest _ -> Nothing -- | Parses a template path of the form @user/template@, given a service parseRepoPathWithService :: RepoService -> Text -> Maybe RepoTemplatePath parseRepoPathWithService service path = case T.splitOn "/" path of [user, name] -> Just $ RepoTemplatePath service user name [name] -> do repoUser <- defaultRepoUserForService service Just $ RepoTemplatePath service repoUser name _ -> Nothing stack-2.15.7/src/Stack/Types/UnusedFlags.hs0000644000000000000000000000076214445120723016624 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Types.UnusedFlags ( UnusedFlags (..) , FlagSource (..) ) where import Stack.Prelude data FlagSource = FSCommandLine | FSStackYaml deriving (Eq, Ord, Show) data UnusedFlags = UFNoPackage FlagSource PackageName | UFFlagsNotDefined FlagSource PackageName (Set FlagName) -- defined in package (Set FlagName) -- not defined | UFSnapshot PackageName deriving (Eq, Ord, Show) stack-2.15.7/src/Stack/Types/Version.hs0000644000000000000000000001125014604306201016015 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -- | Versions for packages. module Stack.Types.Version ( Cabal.VersionRange -- TODO in the future should have a newtype wrapper , IntersectingVersionRange (..) , VersionCheck (..) , versionRangeText , Cabal.withinRange , Stack.Types.Version.intersectVersionRanges , toMajorVersion , latestApplicableVersion , checkVersion , nextMajorVersion , minorVersion , stackVersion , showStackVersion , stackMajorVersion , stackMinorVersion ) where import Data.Aeson.Types ( FromJSON (..), ToJSON (..), Value (..), withText ) import Data.List ( find ) import qualified Data.Set as Set import qualified Data.Text as T import Data.Version ( showVersion ) import Distribution.Pretty ( pretty ) import qualified Distribution.Version as Cabal import qualified Paths_stack as Meta import Stack.Prelude hiding ( Vector, pretty ) import Text.PrettyPrint ( render ) newtype IntersectingVersionRange = IntersectingVersionRange { intersectingVersionRange :: Cabal.VersionRange } deriving Show instance Semigroup IntersectingVersionRange where IntersectingVersionRange l <> IntersectingVersionRange r = IntersectingVersionRange (l `Cabal.intersectVersionRanges` r) instance Monoid IntersectingVersionRange where mempty = IntersectingVersionRange Cabal.anyVersion mappend = (<>) -- | Display a version range versionRangeText :: Cabal.VersionRange -> Text versionRangeText = T.pack . render . pretty -- | A modified intersection which also simplifies, for better display. intersectVersionRanges :: Cabal.VersionRange -> Cabal.VersionRange -> Cabal.VersionRange intersectVersionRanges x y = Cabal.simplifyVersionRange $ Cabal.intersectVersionRanges x y -- | Returns the first two components, defaulting to 0 if not present toMajorVersion :: Version -> Version toMajorVersion v = case Cabal.versionNumbers v of [] -> Cabal.mkVersion [0, 0] [a] -> Cabal.mkVersion [a, 0] a:b:_ -> Cabal.mkVersion [a, b] -- | Given a version range and a set of versions, find the latest version from -- the set that is within the range. latestApplicableVersion :: Cabal.VersionRange -> Set Version -> Maybe Version latestApplicableVersion r = find (`Cabal.withinRange` r) . Set.toDescList -- | Get the next major version number for the given version nextMajorVersion :: Version -> Version nextMajorVersion v = case Cabal.versionNumbers v of [] -> Cabal.mkVersion [0, 1] [a] -> Cabal.mkVersion [a, 1] a:b:_ -> Cabal.mkVersion [a, b + 1] data VersionCheck = MatchMinor | MatchExact | NewerMinor deriving (Eq, Ord, Show) instance ToJSON VersionCheck where toJSON MatchMinor = String "match-minor" toJSON MatchExact = String "match-exact" toJSON NewerMinor = String "newer-minor" instance FromJSON VersionCheck where parseJSON = withText expected $ \t -> case t of "match-minor" -> pure MatchMinor "match-exact" -> pure MatchExact "newer-minor" -> pure NewerMinor _ -> fail ("Expected " ++ expected ++ ", but got " ++ show t) where expected = "VersionCheck value (match-minor, match-exact, or newer-minor)" checkVersion :: VersionCheck -> Version -> Version -> Bool checkVersion check (Cabal.versionNumbers -> wanted) (Cabal.versionNumbers -> actual) = case check of MatchMinor -> and (take 3 matching) MatchExact -> length wanted == length actual && and matching NewerMinor -> and (take 2 matching) && newerMinor where matching = zipWith (==) wanted actual getMinor (_a:_b:c:_) = Just c getMinor _ = Nothing newerMinor = case (getMinor wanted, getMinor actual) of (Nothing, _) -> True (Just _, Nothing) -> False (Just w, Just a) -> a >= w -- | Get minor version (excludes any patchlevel) minorVersion :: Version -> Version minorVersion = Cabal.mkVersion . take 3 . Cabal.versionNumbers -- | Current Stack version stackVersion :: Version stackVersion = Cabal.mkVersion' Meta.version -- | Current Stack version in the same format as yielded by -- 'Data.Version.showVersion'. showStackVersion :: String showStackVersion = showVersion Meta.version -- | Current Stack minor version (excludes patchlevel) stackMinorVersion :: Version stackMinorVersion = minorVersion stackVersion -- | Current Stack major version. Returns the first two components, defaulting -- to 0 if not present stackMajorVersion :: Version stackMajorVersion = toMajorVersion stackVersion stack-2.15.7/src/Stack/Types/VersionedDownloadInfo.hs0000644000000000000000000000167114604306201020640 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Types.VersionedDownloadInfo ( VersionedDownloadInfo (..) ) where import Data.Aeson.Types ( FromJSON (..) ) import Data.Aeson.WarningParser ( WithJSONWarnings (..), (..:), withObjectWarnings ) import Stack.Prelude import Stack.Types.DownloadInfo ( DownloadInfo, parseDownloadInfoFromObject ) data VersionedDownloadInfo = VersionedDownloadInfo { version :: Version , downloadInfo :: DownloadInfo } deriving Show instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do CabalString version <- o ..: "version" downloadInfo <- parseDownloadInfoFromObject o pure VersionedDownloadInfo { version , downloadInfo } stack-2.15.7/src/Stack/Uninstall.hs0000644000000000000000000000634314604306201015244 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Function related to Stack's @uninstall@ command. module Stack.Uninstall ( uninstallCmd ) where import Stack.Constants ( osIsWindows ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.Config ( Config (..), configL, stackGlobalConfigL, stackRootL ) import Stack.Types.Runner ( Runner ) -- | Function underlying the @stack uninstall@ command. Display help for the -- command. uninstallCmd :: () -> RIO Runner () uninstallCmd () = withConfig NoReexec $ do stackRoot <- view stackRootL globalConfig <- view stackGlobalConfigL programsDir <- view $ configL . to (.localProgramsBase) localBinDir <- view $ configL . to (.localBin) let toStyleDoc = style Dir . fromString . toFilePath stackRoot' = toStyleDoc stackRoot globalConfig' = toStyleDoc globalConfig programsDir' = toStyleDoc programsDir localBinDir' = toStyleDoc localBinDir putUtf8Builder =<< displayWithColor ( vsep [ flow "To uninstall Stack, it should be sufficient to delete:" , hang 4 $ fillSep [ flow "(1) the directory containing Stack's tools" , "(" <> softbreak <> programsDir' <> softbreak <> ");" ] , hang 4 $ fillSep [ flow "(2) the Stack root directory" , "(" <> softbreak <> stackRoot' <> softbreak <> ");" ] , hang 4 $ fillSep [ flow "(3) if different, the directory containing " , flow "Stack's global YAML configuration file" , parens globalConfig' <> ";" , "and" ] , hang 4 $ fillSep [ flow "(4) the 'stack' executable file (see the output" , flow "of command" , howToFindStack <> "," , flow "if Stack is on the PATH;" , flow "Stack is often installed in" , localBinDir' <> softbreak <> ")." ] , fillSep [flow "You may also want to delete" , style File ".stack-work" , flow "directories in any Haskell projects that you have built." ] ] <> blankLine <> vsep [ fillSep [ flow "To uninstall completely a Stack-supplied tool (such as \ \GHC or, on Windows, MSYS2), delete from Stack's tools \ \directory" , parens programsDir' <> ":" ] , hang 4 $ fillSep [ flow "(1) the tool's subdirectory;" ] , hang 4 $ fillSep [ flow "(2) the tool's archive file" , parens (style File ".tar.xz") <> "; and" ] , hang 4 $ fillSep [ flow "(3) the file marking that the tool is installed" , parens (style File ".installed") <> "." ] ] ) where styleShell = style Shell howToFindStack | osIsWindows = styleShell "where.exe stack" | otherwise = styleShell "which stack" stack-2.15.7/src/Stack/Unpack.hs0000644000000000000000000002056414620153446014526 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Functions related to Stack's @unpack@ command. module Stack.Unpack ( UnpackOpts (..) , UnpackTarget , unpackCmd , unpackPackages ) where import Data.List.Extra ( notNull ) import Path ( SomeBase (..), (), parseRelDir ) import Path.IO ( doesDirExist, getCurrentDir ) import Pantry ( loadSnapshot ) import qualified RIO.Map as Map import RIO.Process ( HasProcessContext ) import qualified RIO.Set as Set import qualified RIO.Text as T import Stack.Config ( makeConcreteResolver ) import Stack.Constants ( relDirRoot ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.Config ( Config (..), HasConfig, configL ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Unpack" module. data UnpackPrettyException = UnpackDirectoryAlreadyExists (Set (Path Abs Dir)) | CouldNotParsePackageSelectors [StyleDoc] | PackageCandidatesRequireVersions [PackageName] | PackageLocationInvalid PackageIdentifierRevision deriving (Show, Typeable) instance Pretty UnpackPrettyException where pretty (UnpackDirectoryAlreadyExists dirs) = "[S-3515]" <> line <> flow "Stack was unable to unpack due to directories already being \ \present:" <> line <> bulletedList (map pretty $ Set.toList dirs) pretty (CouldNotParsePackageSelectors errs) = "[S-2628]" <> line <> flow "The following package selectors are not valid package names or \ \identifiers:" <> line <> bulletedList errs pretty (PackageCandidatesRequireVersions names) = "[S-6114]" <> line <> flow "Package candidates to unpack cannot be identified by name only. \ \The following do not specify a version:" <> line <> bulletedList (map fromPackageName names) pretty (PackageLocationInvalid pir) = "[S-5170]" <> line <> fillSep [ flow "While trying to unpack" , style Target (fromString $ T.unpack $ textDisplay pir) <> "," , flow "Stack encountered an error." ] instance Exception UnpackPrettyException -- | Type synonymn representing packages to be unpacked by the @stack unpack@ -- command, identified either by name only or by an identifier (including -- Hackage revision). type UnpackTarget = Either PackageName PackageIdentifierRevision -- | Type representing options for the @stack unpack@ command. data UnpackOpts = UnpackOpts { targets :: [UnpackTarget] -- ^ The packages or package candidates to be unpacked. , areCandidates :: Bool -- ^ Whether the targets are Hackage package candidates. , dest :: Maybe (SomeBase Dir) -- ^ The optional directory into which a target will be unpacked into a -- subdirectory. } -- | Function underlying the @stack unpack@ command. Unpack packages or package -- candidates to the filesystem. unpackCmd :: UnpackOpts -> RIO Runner () unpackCmd (UnpackOpts targets areCandidates Nothing) = unpackCmd (UnpackOpts targets areCandidates (Just $ Rel relDirRoot)) unpackCmd (UnpackOpts targets areCandidates (Just dstPath)) = withConfig NoReexec $ do mresolver <- view $ globalOptsL . to (.resolver) mSnapshot <- forM mresolver $ \resolver -> do concrete <- makeConcreteResolver resolver loc <- completeSnapshotLocation concrete loadSnapshot loc dstPath' <- case dstPath of Abs path -> pure path Rel path -> do wd <- getCurrentDir pure $ wd path unpackPackages mSnapshot dstPath' targets areCandidates -- | Intended to work for the command line command. unpackPackages :: forall env. (HasConfig env, HasPantryConfig env, HasProcessContext env, HasTerm env) => Maybe RawSnapshot -- ^ When looking up by name, take from this build plan. -> Path Abs Dir -- ^ Destination. -> [UnpackTarget] -> Bool -- ^ Whether the targets are package candidates. -> RIO env () unpackPackages mSnapshot dest targets areCandidates = do let (names, pirs) = partitionEithers targets pisWithRevisions = any hasRevision pirs hasRevision (PackageIdentifierRevision _ _ CFILatest) = False hasRevision _ = True when (areCandidates && notNull names) $ prettyThrowIO $ PackageCandidatesRequireVersions names when (areCandidates && pisWithRevisions) $ prettyWarn $ flow "Package revisions are not meaningful for package candidates and \ \will be ignored." <> line locs1 <- forM pirs $ \pir -> do hackageBaseUrl <- view $ configL . to (.hackageBaseUrl) let rpli = if areCandidates then let -- Ignoring revisions for package candidates. PackageIdentifierRevision candidateName candidateVersion _ = pir candidatePkgId = PackageIdentifier candidateName candidateVersion candidatePkgIdText = T.pack $ packageIdentifierString candidatePkgId candidateUrl = hackageBaseUrl <> "package/" <> candidatePkgIdText <> "/candidate/" <> candidatePkgIdText <> ".tar.gz" candidateLoc = ALUrl candidateUrl candidateArchive = RawArchive candidateLoc Nothing Nothing "" candidateMetadata = RawPackageMetadata Nothing Nothing Nothing in RPLIArchive candidateArchive candidateMetadata else RPLIHackage pir Nothing loc <- cplComplete <$> completePackageLocation rpli `catch` \(_ :: SomeException) -> prettyThrowIO $ PackageLocationInvalid pir pure (loc, packageLocationIdent loc) (errs, locs2) <- partitionEithers <$> traverse toLoc names unless (null errs) $ prettyThrowM $ CouldNotParsePackageSelectors errs locs <- Map.fromList <$> mapM (\(pir, ident) -> do suffix <- parseRelDir $ packageIdentifierString ident pure (pir, dest suffix) ) (locs1 ++ locs2) alreadyUnpacked <- filterM doesDirExist $ Map.elems locs unless (null alreadyUnpacked) $ prettyThrowM $ UnpackDirectoryAlreadyExists $ Set.fromList alreadyUnpacked forM_ (Map.toList locs) $ \(loc, dest') -> do unpackPackageLocation dest' loc prettyInfoL [ "Unpacked" , fromString $ T.unpack $ textDisplay loc , "to" , pretty dest' <> "." ] where toLoc name | Just snapshot <- mSnapshot = toLocSnapshot snapshot name | otherwise = do void $ updateHackageIndex $ Just "Updating the package index." toLocNoSnapshot name toLocNoSnapshot :: PackageName -> RIO env (Either StyleDoc (PackageLocationImmutable, PackageIdentifier)) toLocNoSnapshot name = do mLoc <- getLatestHackageLocation YesRequireHackageIndex name UsePreferredVersions case mLoc of Nothing -> do candidates <- getHackageTypoCorrections name pure $ Left $ fillSep [ flow "Could not find package" , style Current (fromPackageName name) , flow "on Hackage." , if null candidates then mempty else fillSep $ flow "Perhaps you meant one of:" : mkNarrativeList (Just Good) False (map fromPackageName candidates :: [StyleDoc]) ] Just loc -> pure $ Right (loc, packageLocationIdent loc) toLocSnapshot :: RawSnapshot -> PackageName -> RIO env (Either StyleDoc (PackageLocationImmutable, PackageIdentifier)) toLocSnapshot snapshot name = case Map.lookup name (rsPackages snapshot) of Nothing -> pure $ Left $ fillSep [ flow "Package does not appear in snapshot:" , style Current (fromPackageName name) <> "." ] Just sp -> do loc <- cplComplete <$> completePackageLocation (rspLocation sp) pure $ Right (loc, packageLocationIdent loc) stack-2.15.7/src/Stack/Update.hs0000644000000000000000000000072714445120723014523 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Functions related to Stack's @update@ command. module Stack.Update ( updateCmd ) where import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig ) import Stack.Types.Runner ( Runner ) -- | Function underlying the @stack update@ command. Update the package index. updateCmd :: () -> RIO Runner () updateCmd () = withConfig NoReexec (void (updateHackageIndex Nothing)) stack-2.15.7/src/Stack/Upgrade.hs0000644000000000000000000002600314620153446014666 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @upgrade@ command. module Stack.Upgrade ( UpgradeOpts (..) , BinaryOpts (..) , SourceOpts (..) , upgradeCmd , upgrade ) where import qualified Data.Text as T import Path ( (), parseRelDir ) import RIO.Process ( proc, runProcess_, withWorkingDir ) import Stack.Build ( build ) import Stack.Build.Target ( NeedTargets (..) ) import Stack.BuildInfo ( maybeGitHash ) import Stack.Constants ( relDirStackProgName, stackDotYaml ) import Stack.Prelude hiding ( force, Display (..) ) import Stack.Runners ( ShouldReexec (..), withConfig, withEnvConfig , withGlobalProject ) import Stack.Setup ( downloadStackExe, downloadStackReleaseInfo , getDownloadVersion, preferredPlatforms, stackVersion ) import Stack.Types.BuildOpts ( buildOptsInstallExesL ) import Stack.Types.BuildOptsCLI ( BuildOptsCLI (..), defaultBuildOptsCLI ) import Stack.Types.Config ( Config (..), HasConfig (..), buildOptsL ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Runner ( Runner, globalOptsL ) import Stack.Types.StackYamlLoc ( StackYamlLoc (..) ) import System.Process ( rawSystem, readProcess ) -- | Type representing \'pretty\' exceptions thrown by functions in the -- "Stack.Upgrade" module. data UpgradePrettyException = ResolverOptionInvalid | NeitherBinaryOrSourceSpecified | ExecutableFailure | CommitsNotFound String String | StackInPackageIndexNotFound | VersionWithNoRevision deriving (Show, Typeable) instance Pretty UpgradePrettyException where pretty ResolverOptionInvalid = "[S-8761]" <> line <> fillSep [ "The" , style Shell "--resolver" , flow "option cannot be used with Stack's" , style Shell "upgrade" , "command." ] pretty NeitherBinaryOrSourceSpecified = "[S-3642]" <> line <> flow "You must allow either binary or source upgrade paths." pretty ExecutableFailure = "[S-8716]" <> line <> flow "Non-success exit code from running newly downloaded executable." pretty (CommitsNotFound branch repo) = "[S-7114]" <> line <> fillSep [ flow "No commits found for branch" , style Current (fromString branch) , flow "on repo" , style Url (fromString repo) <> "." ] pretty StackInPackageIndexNotFound = "[S-9668]" <> line <> flow "No Stack version found in package indices." pretty VersionWithNoRevision = "[S-6648]" <> line <> flow "Latest version with no revision." instance Exception UpgradePrettyException -- | Type representing options for upgrading Stack with a binary executable -- file. data BinaryOpts = BinaryOpts { platform :: !(Maybe String) , force :: !Bool -- ^ Force a download, even if the downloaded version is older than what we -- are. , onlyLocalBin :: !Bool -- ^ Only download to Stack's local binary directory. , version :: !(Maybe String) -- ^ Specific version to download , gitHubOrg :: !(Maybe String) , gitHubRepo :: !(Maybe String) } deriving Show -- | Type representing options for upgrading Stack from source code. newtype SourceOpts = SourceOpts (Maybe (String, String)) -- repo and branch deriving Show -- | Type representing command line options for the @stack upgrade@ command. data UpgradeOpts = UpgradeOpts { binary :: !(Maybe BinaryOpts) , source :: !(Maybe SourceOpts) } deriving Show -- | Function underlying the @stack upgrade@ command. upgradeCmd :: UpgradeOpts -> RIO Runner () upgradeCmd upgradeOpts = do go <- view globalOptsL case go.resolver of Just _ -> prettyThrowIO ResolverOptionInvalid Nothing -> withGlobalProject $ upgrade maybeGitHash upgradeOpts upgrade :: Maybe String -- ^ git hash at time of building, if known -> UpgradeOpts -> RIO Runner () upgrade builtHash (UpgradeOpts mbo mso) = case (mbo, mso) of -- FIXME It would be far nicer to capture this case in the options parser -- itself so we get better error messages, but I can't think of a way to -- make it happen. (Nothing, Nothing) -> prettyThrowIO NeitherBinaryOrSourceSpecified (Just bo, Nothing) -> binary bo (Nothing, Just so) -> source so -- See #2977 - if --git or --git-repo is specified, do source upgrade. (_, Just so@(SourceOpts (Just _))) -> source so (Just bo, Just so) -> binary bo `catchAny` \e -> do prettyWarn $ flow "When trying to perform binary upgrade, Stack encountered the \ \following error:" <> blankLine <> ppException e <> blankLine <> flow "Falling back to source upgrade." source so where binary = binaryUpgrade source = sourceUpgrade builtHash binaryUpgrade :: BinaryOpts -> RIO Runner () binaryUpgrade (BinaryOpts mplatform force' onlyLocalBin mver morg mrepo) = withConfig NoReexec $ do platforms0 <- case mplatform of Nothing -> preferredPlatforms Just p -> pure [("windows" `T.isInfixOf` T.pack p, p)] archiveInfo <- downloadStackReleaseInfo morg mrepo mver let mdownloadVersion = getDownloadVersion archiveInfo force = case mver of Nothing -> force' Just _ -> True -- specifying a version implies we're forcing things isNewer <- case mdownloadVersion of Nothing -> do prettyError $ flow "Unable to determine upstream version from GitHub metadata." <> if force then mempty else line <> fillSep [ flow "Rerun with" , style Shell "--force-download" , flow "to force an upgrade." ] pure False Just downloadVersion -> do prettyInfoL [ flow "Current Stack version:" , fromString (versionString stackVersion) <> ";" , flow "available download version:" , fromString (versionString downloadVersion) <> "." ] pure $ downloadVersion > stackVersion toUpgrade <- case (force, isNewer) of (False, False) -> do prettyInfoS "Skipping binary upgrade, you are already running the most \ \recent version." pure False (True, False) -> do prettyInfoS "Forcing binary upgrade." pure True (_, True) -> do prettyInfoS "Newer version detected, downloading." pure True when toUpgrade $ do config <- view configL downloadStackExe platforms0 archiveInfo config.localBin (not onlyLocalBin) $ \tmpFile -> do -- Sanity check! ec <- rawSystem (toFilePath tmpFile) ["--version"] unless (ec == ExitSuccess) (prettyThrowIO ExecutableFailure) sourceUpgrade :: Maybe String -> SourceOpts -> RIO Runner () sourceUpgrade builtHash (SourceOpts gitRepo) = withSystemTempDir "stack-upgrade" $ \tmp -> do mdir <- case gitRepo of Just (repo, branch) -> do remote <- liftIO $ System.Process.readProcess "git" ["ls-remote", repo, branch] [] latestCommit <- case words remote of [] -> prettyThrowIO $ CommitsNotFound branch repo x:_ -> pure x when (isNothing builtHash) $ prettyWarnS "Information about the commit this version of Stack was built from \ \is not available due to how it was built. Will continue by \ \assuming an upgrade is needed because we have no information to \ \the contrary." if builtHash == Just latestCommit then do prettyInfoS "Already up-to-date, no upgrade required." pure Nothing else do prettyInfoS "Cloning stack." -- NOTE: "--recursive" was added after v1.0.0 (and before the next -- release). This means that we can't use submodules in the Stack -- repo until we're comfortable with "stack upgrade --git" not -- working for earlier versions. let args = [ "clone" , repo , "stack" , "--depth" , "1" , "--recursive" , "--branch" , branch ] withWorkingDir (toFilePath tmp) $ proc "git" args runProcess_ pure $ Just $ tmp relDirStackProgName -- We need to access the Pantry database to find out about the latest -- Stack available on Hackage. We first use a standard Config to do this, -- and once we have the source load up the stack.yaml from inside that -- source. Nothing -> withConfig NoReexec $ do void $ updateHackageIndex $ Just "Updating index to make sure we find the latest Stack version." mversion <- getLatestHackageVersion YesRequireHackageIndex "stack" UsePreferredVersions (PackageIdentifierRevision _ version _) <- case mversion of Nothing -> prettyThrowIO StackInPackageIndexNotFound Just version -> pure version if version <= stackVersion then do prettyInfoS "Already at latest version, no upgrade required." pure Nothing else do suffix <- parseRelDir $ "stack-" ++ versionString version let dir = tmp suffix mrev <- getLatestHackageRevision YesRequireHackageIndex "stack" version case mrev of Nothing -> prettyThrowIO VersionWithNoRevision Just (_rev, cfKey, treeKey) -> do let ident = PackageIdentifier "stack" version unpackPackageLocation dir $ PLIHackage ident cfKey treeKey pure $ Just dir let modifyGO dir go = go { resolver = Nothing -- always use the resolver settings in the -- stack.yaml file , stackYaml = SYLOverride $ dir stackDotYaml } boptsCLI = defaultBuildOptsCLI { targetsCLI = ["stack"] } forM_ mdir $ \dir -> local (over globalOptsL (modifyGO dir)) $ withConfig NoReexec $ withEnvConfig AllowNoTargets boptsCLI $ local (set (buildOptsL . buildOptsInstallExesL) True) $ build Nothing stack-2.15.7/src/Stack/Upload.hs0000644000000000000000000005057214620153446014533 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} -- | Types and functions related to Stack's @upload@ command. module Stack.Upload ( -- * Upload UploadOpts (..) , SDistOpts (..) , UploadContent (..) , UploadVariant (..) , uploadCmd , upload , uploadBytes , uploadRevision -- * Credentials , HackageCreds , HackageAuth (..) , HackageKey (..) , loadAuth , writeFilePrivate -- * Internal , maybeGetHackageKey ) where import Conduit ( mapOutput, sinkList ) import Data.Aeson ( FromJSON (..), ToJSON (..), (.:), (.=), decode' , fromEncoding, object, toEncoding, withObject ) import Data.ByteString.Builder ( lazyByteString ) import qualified Data.ByteString.Char8 as S import qualified Data.ByteString.Lazy as L import qualified Data.Conduit.Binary as CB import qualified Data.Text as T import Network.HTTP.StackClient ( Request, RequestBody (RequestBodyLBS), Response , applyDigestAuth, displayDigestAuthException, formDataBody , getGlobalManager, getResponseBody, getResponseStatusCode , httpNoBody, method, methodPost, methodPut, parseRequest , partBS, partFileRequestBody, partLBS, requestBody , setRequestHeader, setRequestHeaders, withResponse ) import Path ( (), addExtension, parseRelFile ) import Path.IO ( resolveDir', resolveFile' ) import qualified Path.IO as Path import Stack.Constants ( isStackUploadDisabled ) import Stack.Constants.Config ( distDirFromDir ) import Stack.Prelude import Stack.Runners ( ShouldReexec (..), withConfig, withDefaultEnvConfig ) import Stack.SDist ( SDistOpts (..), checkSDistTarball, checkSDistTarball' , getSDistTarball, readLocalPackage ) import Stack.Types.Config ( Config (..), configL, stackRootL ) import Stack.Types.EnvConfig ( HasEnvConfig ) import Stack.Types.Package ( LocalPackage (..), packageIdentifier ) import Stack.Types.PvpBounds (PvpBounds) import Stack.Types.Runner ( Runner ) import System.Directory ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist , removeFile, renameFile ) import System.Environment ( lookupEnv ) import qualified System.FilePath as FP import System.PosixCompat.Files ( setFileMode ) -- | Type representing \'pretty\' exceptions thrown by functions exported by the -- "Stack.Upload" module. data UploadPrettyException = AuthenticationFailure | ArchiveUploadFailure !Int ![String] !String | DocsTarballInvalid ![(String, Path Abs File)] | ItemsInvalid ![FilePath] | NoItemSpecified !String | PackageDirectoryInvalid ![FilePath] | PackageIdNotSpecifiedForDocsUploadBug | PackageIdSpecifiedForPackageUploadBug | TarGzFileNameInvalidBug !String deriving (Show, Typeable) instance Pretty UploadPrettyException where pretty AuthenticationFailure = "[S-2256]" <> line <> flow "authentification failure" <> line <> flow "Authentication failure uploading to server" pretty (ArchiveUploadFailure code res tarName) = "[S-6108]" <> line <> flow "unhandled status code:" <+> fromString (show code) <> line <> flow "Upload failed on" <+> style File (fromString tarName) <> line <> vsep (map string res) pretty (DocsTarballInvalid invalidItems) = "[S-2837]" <> line <> flow "Stack can't find:" <> line <> invalidList where invalidItem (pkgIdName, tarGzFile) = fillSep [ pretty tarGzFile , "for" , style Current (fromString pkgIdName) <> "." ] invalidList = bulletedList $ map invalidItem invalidItems pretty (ItemsInvalid invalidItems) = "[S-3179]" <> line <> flow "For package upload, Stack expects a list of relative paths to \ \tosdist tarballs or package directories. Stack can't find:" <> line <> invalidList where invalidList = bulletedList $ map (style File . fromString) invalidItems pretty (NoItemSpecified subject) = "[S-3030]" <> line <> fillSep [ flow "An item must be specified. To upload" , flow subject , flow "please run" , style Shell "stack upload ." , flow "(with the period at the end)." ] pretty (PackageDirectoryInvalid invalidItems) = "[S-5908]" <> line <> flow "For documentation upload, Stack expects a list of relative paths \ \to package directories. Stack can't find:" <> line <> invalidList where invalidList = bulletedList $ map (style Current . fromString) invalidItems pretty PackageIdNotSpecifiedForDocsUploadBug = bugPrettyReport "[S-7274]" $ flow "uploadBytes: Documentation upload but package identifier not \ \specified." pretty PackageIdSpecifiedForPackageUploadBug = bugPrettyReport "[S-5860]" $ flow "uploadBytes: Package upload but package identifier specified." pretty (TarGzFileNameInvalidBug name) = bugPrettyReport "[S-5955]" $ fillSep [ flow "uploadCmd: the name of the" , fromString name <> ".tar.gz" , flow "file could not be parsed." ] instance Exception UploadPrettyException -- | Type representing forms of content for upload to Hackage. data UploadContent = SDist -- ^ Content in the form of an sdist tarball. | DocArchive -- ^ Content in the form of an archive file of package documentation. -- | Type representing variants for uploading to Hackage. data UploadVariant = Publishing -- ^ Publish the package/a published package. | Candidate -- ^ Create a package candidate/a package candidate. -- | Type representing command line options for the @stack upload@ command. data UploadOpts = UploadOpts { itemsToWorkWith :: ![String] -- ^ The items to work with. , documentation :: !Bool -- ^ Uploading documentation for packages? , pvpBounds :: !(Maybe PvpBounds) , check :: !Bool , buildPackage :: !Bool , tarPath :: !(Maybe FilePath) , uploadVariant :: !UploadVariant } -- | Function underlying the @stack upload@ command. Upload to Hackage. uploadCmd :: UploadOpts -> RIO Runner () uploadCmd (UploadOpts [] uoDocumentation _ _ _ _ _) = do let subject = if uoDocumentation then "documentation for the current package," else "the current package," prettyThrowIO $ NoItemSpecified subject uploadCmd uo = withConfig YesReexec $ withDefaultEnvConfig $ do config <- view configL let hackageUrl = T.unpack config.hackageBaseUrl if uo.documentation then do (dirs, invalid) <- liftIO $ partitionM doesDirectoryExist uo.itemsToWorkWith unless (null invalid) $ prettyThrowIO $ PackageDirectoryInvalid invalid (failed, items) <- partitionEithers <$> forM dirs checkDocsTarball unless (null failed) $ do prettyThrowIO $ DocsTarballInvalid failed getCreds <- memoizeRef $ loadAuth config forM_ items $ \(pkgIdName, tarGzFile) -> do creds <- runMemoized getCreds upload hackageUrl creds DocArchive (Just pkgIdName) (toFilePath tarGzFile) uo.uploadVariant else do (files, nonFiles) <- liftIO $ partitionM doesFileExist uo.itemsToWorkWith (dirs, invalid) <- liftIO $ partitionM doesDirectoryExist nonFiles unless (null invalid) $ do prettyThrowIO $ ItemsInvalid invalid let sdistOpts = SDistOpts uo.itemsToWorkWith uo.pvpBounds uo.check uo.buildPackage uo.tarPath getCreds <- memoizeRef $ loadAuth config mapM_ (resolveFile' >=> checkSDistTarball sdistOpts) files forM_ files $ \file -> do tarFile <- resolveFile' file creds <- runMemoized getCreds upload hackageUrl creds SDist Nothing (toFilePath tarFile) uo.uploadVariant forM_ dirs $ \dir -> do pkgDir <- resolveDir' dir (tarName, tarBytes, mcabalRevision) <- getSDistTarball uo.pvpBounds pkgDir checkSDistTarball' sdistOpts tarName tarBytes creds <- runMemoized getCreds uploadBytes hackageUrl creds SDist Nothing tarName uo.uploadVariant tarBytes forM_ mcabalRevision $ uncurry $ uploadRevision hackageUrl creds where checkDocsTarball :: HasEnvConfig env => FilePath -> RIO env (Either (String, Path Abs File) (String, Path Abs File)) checkDocsTarball dir = do pkgDir <- resolveDir' dir distDir <- distDirFromDir pkgDir lp <- readLocalPackage pkgDir let pkgId = packageIdentifier lp.package pkgIdName = packageIdentifierString pkgId name = pkgIdName <> "-docs" tarGzFileName <- maybe (prettyThrowIO $ TarGzFileNameInvalidBug name) pure ( do nameRelFile <- parseRelFile name addExtension ".gz" =<< addExtension ".tar" nameRelFile ) let tarGzFile = distDir Path. tarGzFileName isFile <- Path.doesFileExist tarGzFile pure $ (if isFile then Right else Left) (pkgIdName, tarGzFile) partitionM _ [] = pure ([], []) partitionM f (x:xs) = do r <- f x (as, bs) <- partitionM f xs pure $ if r then (x:as, bs) else (as, x:bs) newtype HackageKey = HackageKey Text deriving (Eq, Show) -- | Username and password to log into Hackage. -- -- Since 0.1.0.0 data HackageCreds = HackageCreds { username :: !Text , password :: !Text , credsFile :: !FilePath } deriving (Eq, Show) data HackageAuth = HAKey HackageKey | HACreds HackageCreds deriving (Eq, Show) instance ToJSON HackageCreds where toJSON (HackageCreds u p _) = object [ "username" .= u , "password" .= p ] instance FromJSON (FilePath -> HackageCreds) where parseJSON = withObject "HackageCreds" $ \o -> HackageCreds <$> o .: "username" <*> o .: "password" withEnvVariable :: Text -> IO Text -> IO Text withEnvVariable varName fromPrompt = lookupEnv (T.unpack varName) >>= maybe fromPrompt (pure . T.pack) maybeGetHackageKey :: RIO m (Maybe HackageKey) maybeGetHackageKey = liftIO $ fmap (HackageKey . T.pack) <$> lookupEnv "HACKAGE_KEY" loadAuth :: (HasLogFunc m, HasTerm m) => Config -> RIO m HackageAuth loadAuth config = do maybeHackageKey <- maybeGetHackageKey case maybeHackageKey of Just key -> do prettyInfoS "HACKAGE_KEY environment variable found, using that for credentials." pure $ HAKey key Nothing -> HACreds <$> loadUserAndPassword config -- | Load Hackage credentials, either from a save file or the command -- line. -- -- Since 0.1.0.0 loadUserAndPassword :: HasTerm m => Config -> RIO m HackageCreds loadUserAndPassword config = do fp <- liftIO $ credsFile config elbs <- liftIO $ tryIO $ L.readFile fp case either (const Nothing) Just elbs >>= \lbs -> (lbs, ) <$> decode' lbs of Nothing -> fromPrompt fp Just (lbs, mkCreds) -> do -- Ensure privacy, for cleaning up old versions of Stack that -- didn't do this writeFilePrivate fp $ lazyByteString lbs unless config.saveHackageCreds $ do prettyWarnL [ flow "You've set" , style Shell "save-hackage-creds" , "to" , style Shell "false" <> "." , flow "However, credentials were found at:" , style File (fromString fp) <> "." ] pure $ mkCreds fp where fromPrompt :: HasTerm m => FilePath -> RIO m HackageCreds fromPrompt fp = do username <- liftIO $ withEnvVariable "HACKAGE_USERNAME" (prompt "Hackage username: ") password <- liftIO $ withEnvVariable "HACKAGE_PASSWORD" (promptPassword "Hackage password: ") let hc = HackageCreds { username , password , credsFile = fp } when config.saveHackageCreds $ do shouldSave <- promptBool $ T.pack $ "Save Hackage credentials to file at " ++ fp ++ " [y/n]? " prettyNoteL [ flow "Avoid this prompt in the future by using the configuration \ \file option" , style Shell (flow "save-hackage-creds: false") <> "." ] when shouldSave $ do writeFilePrivate fp $ fromEncoding $ toEncoding hc prettyInfoS "Saved!" hFlush stdout pure hc -- | Write contents to a file which is always private. -- -- For history of this function, see: -- -- * https://github.com/commercialhaskell/stack/issues/2159#issuecomment-477948928 -- -- * https://github.com/commercialhaskell/stack/pull/4665 writeFilePrivate :: MonadIO m => FilePath -> Builder -> m () writeFilePrivate fp builder = liftIO $ withTempFile (FP.takeDirectory fp) (FP.takeFileName fp) $ \fpTmp h -> do -- Temp file is created such that only current user can read and write it. -- See docs for openTempFile: -- https://www.stackage.org/haddock/lts-13.14/base-4.12.0.0/System-IO.html#v:openTempFile -- Write to the file and close the handle. hPutBuilder h builder hClose h -- Make sure the destination file, if present, is writeable void $ tryIO $ setFileMode fp 0o600 -- And atomically move renameFile fpTmp fp credsFile :: Config -> IO FilePath credsFile config = do let dir = toFilePath (view stackRootL config) FP. "upload" createDirectoryIfMissing True dir pure $ dir FP. "credentials.json" addAPIKey :: HackageKey -> Request -> Request addAPIKey (HackageKey key) = setRequestHeader "Authorization" [fromString $ "X-ApiKey" ++ " " ++ T.unpack key] applyAuth :: (HasLogFunc m, HasTerm m) => HackageAuth -> Request -> RIO m Request applyAuth haAuth req0 = case haAuth of HAKey key -> pure (addAPIKey key req0) HACreds creds -> applyCreds creds req0 applyCreds :: (HasLogFunc m, HasTerm m) => HackageCreds -> Request -> RIO m Request applyCreds creds req0 = do manager <- liftIO getGlobalManager ereq <- if isStackUploadDisabled then do debugRequest "applyCreds" req0 pure (Left $ toException ExitSuccess ) else liftIO $ applyDigestAuth (encodeUtf8 creds.username) (encodeUtf8 creds.password) req0 manager case ereq of Left e -> do prettyWarn $ flow "No HTTP digest prompt found, this will probably fail." <> blankLine <> string ( case fromException e of Just e' -> displayDigestAuthException e' Nothing -> displayException e ) pure req0 Right req -> pure req -- | Upload a single tarball with the given @Uploader@. Instead of sending a -- file like 'upload', this sends a lazy bytestring. -- -- Since 0.1.2.1 uploadBytes :: HasTerm m => String -- ^ Hackage base URL -> HackageAuth -> UploadContent -- ^ Form of the content to be uploaded. -> Maybe String -- ^ Optional package identifier name, applies only to the upload of -- documentation. -> String -- ^ tar file name -> UploadVariant -> L.ByteString -- ^ tar file contents -> RIO m () uploadBytes baseUrl auth contentForm mPkgIdName tarName uploadVariant bytes = do (url, headers, uploadMethod) <- case contentForm of SDist -> do unless (isNothing mPkgIdName) $ prettyThrowIO PackageIdSpecifiedForPackageUploadBug let variant = case uploadVariant of Publishing -> "" Candidate -> "candidates/" pure ( baseUrl <> "packages/" <> variant , [("Accept", "text/plain")] , methodPost ) DocArchive -> case mPkgIdName of Nothing -> prettyThrowIO PackageIdNotSpecifiedForDocsUploadBug Just pkgIdName -> do let variant = case uploadVariant of Publishing -> "" Candidate -> "candidate/" pure ( baseUrl <> "package/" <> pkgIdName <> "/" <> variant <> "docs" , [ ("Content-Type", "application/x-tar") , ("Content-Encoding", "gzip") ] , methodPut ) let req1 = setRequestHeaders headers (fromString url) reqData = RequestBodyLBS bytes formData = [partFileRequestBody "package" tarName reqData] req2 <- case contentForm of SDist -> liftIO $ formDataBody formData req1 DocArchive -> pure $ req1 { requestBody = reqData } let req3 = req2 { method = uploadMethod } req4 <- applyAuth auth req3 prettyInfoL [ "Uploading" , style Current (fromString tarName) <> "..." ] hFlush stdout if isStackUploadDisabled then debugRequest "uploadBytes" req4 else withRunInIO $ \runInIO -> withResponse req4 (runInIO . inner) where inner :: HasTerm m => Response (ConduitM () S.ByteString IO ()) -> RIO m () inner res = case getResponseStatusCode res of 200 -> prettyInfoS "done!" 401 -> do case auth of HACreds creds -> handleIO (const $ pure ()) (liftIO $ removeFile creds.credsFile) _ -> pure () prettyThrowIO AuthenticationFailure 403 -> do prettyError $ "[S-2804]" <> line <> flow "forbidden upload" <> line <> flow "Usually means: you've already uploaded this package/version \ \combination. Ignoring error and continuing. The full \ \message from Hackage is below:" <> blankLine liftIO $ printBody res 503 -> do prettyError $ "[S-4444]" <> line <> flow "service unavailable" <> line <> flow "This error some times gets sent even though the upload \ \succeeded. Check on Hackage to see if your package is \ \present. The full message form Hackage is below:" <> blankLine liftIO $ printBody res code -> do let resBody = mapOutput show (getResponseBody res) resBody' <- liftIO $ runConduit $ resBody .| sinkList prettyThrowIO (ArchiveUploadFailure code resBody' tarName) printBody :: Response (ConduitM () S.ByteString IO ()) -> IO () printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout -- | Upload a single tarball with the given @Uploader@. -- -- Since 0.1.0.0 upload :: (HasLogFunc m, HasTerm m) => String -- ^ Hackage base URL -> HackageAuth -> UploadContent -> Maybe String -- ^ Optional package identifier name, applies only to the upload of -- documentation. -> FilePath -- ^ Path to archive file. -> UploadVariant -> RIO m () upload baseUrl auth contentForm mPkgIdName fp uploadVariant = uploadBytes baseUrl auth contentForm mPkgIdName (FP.takeFileName fp) uploadVariant =<< liftIO (L.readFile fp) uploadRevision :: (HasLogFunc m, HasTerm m) => String -- ^ Hackage base URL -> HackageAuth -> PackageIdentifier -> L.ByteString -> RIO m () uploadRevision baseUrl auth ident@(PackageIdentifier name _) cabalFile = do req0 <- parseRequest $ concat [ baseUrl , "package/" , packageIdentifierString ident , "/" , packageNameString name , ".cabal/edit" ] req1 <- formDataBody [ partLBS "cabalfile" cabalFile , partBS "publish" "on" ] req0 req2 <- applyAuth auth req1 if isStackUploadDisabled then debugRequest "uploadRevision" req2 else void $ httpNoBody req2 debugRequest :: HasTerm env => String -> Request -> RIO env () debugRequest callSite req = prettyInfo $ fillSep [ fromString callSite <> ":" , flow "When enabled, would apply the following request:" ] <> line <> fromString (show req) stack-2.15.7/src/windows/System/Info/ShortPathName.hs0000644000000000000000000000036014445120723020613 0ustar0000000000000000-- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module System.Info.ShortPathName ( getShortPathName ) where import System.Win32.Info ( getShortPathName ) stack-2.15.7/src/unix/System/Info/ShortPathName.hs0000644000000000000000000000064714445120723020114 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module System.Info.ShortPathName ( getShortPathName ) where import RIO.FilePath ( FilePath ) import RIO.Prelude ( pure ) import RIO.Prelude.Types ( IO ) getShortPathName :: FilePath -> IO FilePath getShortPathName = pure stack-2.15.7/src/windows/System/Permissions.hs0000644000000000000000000000102114502056214017506 0ustar0000000000000000-- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module System.Permissions ( osIsMacOS , osIsWindows , setFileExecutable , setScriptPerms ) where -- | False if using Windows. osIsMacOS :: Bool osIsMacOS = False -- | True if using Windows. osIsWindows :: Bool osIsWindows = True setFileExecutable :: Monad m => FilePath -> m () setFileExecutable _ = pure () setScriptPerms :: Monad m => FilePath -> m () setScriptPerms _ = pure () stack-2.15.7/src/unix/System/Permissions.hs0000644000000000000000000000163114502056214017006 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module System.Permissions ( osIsMacOS , osIsWindows , setFileExecutable , setScriptPerms ) where import RIO import qualified System.Posix.Files as Posix import System.Info ( os ) -- | True if using macOS. osIsMacOS :: Bool osIsMacOS = os == "darwin" -- | False if not using Windows. osIsWindows :: Bool osIsWindows = False setFileExecutable :: MonadIO m => FilePath -> m () setFileExecutable fp = liftIO $ Posix.setFileMode fp 0o755 setScriptPerms :: MonadIO m => FilePath -> m () setScriptPerms fp = liftIO $ Posix.setFileMode fp $ Posix.ownerReadMode `Posix.unionFileModes` Posix.ownerWriteMode `Posix.unionFileModes` Posix.groupReadMode `Posix.unionFileModes` Posix.otherReadMode stack-2.15.7/src/System/Process/Pager.hs0000644000000000000000000000467614620153446016206 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Run external pagers (@$PAGER@, @less@, @more@). module System.Process.Pager ( pageWriter , pageText , PagerException (..) ) where import Control.Monad.Trans.Maybe ( MaybeT (runMaybeT, MaybeT) ) import qualified Data.Text.IO as T import Stack.Prelude import System.Directory ( findExecutable ) import System.Environment ( lookupEnv ) import System.Process ( createProcess, cmdspec, shell, proc, waitForProcess , CmdSpec (ShellCommand, RawCommand) , StdStream (CreatePipe) , CreateProcess (std_in, close_fds, delegate_ctlc) ) -- | Type representing exceptions thrown by functions exported by the -- "System.Process.Pager" module. data PagerException = PagerExitFailure CmdSpec Int deriving (Show, Typeable) instance Exception PagerException where displayException (PagerExitFailure cmd n) = let getStr (ShellCommand c) = c getStr (RawCommand exePath _) = exePath in concat [ "Error: [S-9392]\n" , "Pager (`" , getStr cmd , "') exited with non-zero status: " , show n ] -- | Run pager, providing a function that writes to the pager's input. pageWriter :: (Handle -> IO ()) -> IO () pageWriter writer = do mpager <- runMaybeT $ cmdspecFromEnvVar <|> cmdspecFromExeName "less" <|> cmdspecFromExeName "more" case mpager of Just pager -> do (Just h,_,_,procHandle) <- createProcess pager { std_in = CreatePipe , close_fds = True , delegate_ctlc = True } (_ :: Either IOException ()) <- try (do writer h hClose h) exit <- waitForProcess procHandle case exit of ExitSuccess -> pure () ExitFailure n -> throwIO (PagerExitFailure (cmdspec pager) n) pure () Nothing -> writer stdout where cmdspecFromEnvVar = shell <$> MaybeT (lookupEnv "PAGER") cmdspecFromExeName = fmap (\command -> proc command []) . MaybeT . findExecutable -- | Run pager to display a 'Text' pageText :: Text -> IO () pageText = pageWriter . flip T.hPutStr stack-2.15.7/src/windows/System/Terminal.hs0000644000000000000000000001065414604306201016757 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module System.Terminal ( fixCodePage , getTerminalWidth , hIsTerminalDeviceOrMinTTY ) where import Distribution.Types.Version ( mkVersion ) import Foreign.Marshal.Alloc ( allocaBytes ) import Foreign.Ptr ( Ptr ) import Foreign.Storable ( peekByteOff ) import Stack.Prelude import System.IO ( hGetContents ) import System.Process ( StdStream (..), createProcess, shell, std_err, std_in , std_out, waitForProcess ) import System.Win32 ( isMinTTYHandle, withHandleToHANDLE ) import System.Win32.Console ( setConsoleCP, setConsoleOutputCP, getConsoleCP , getConsoleOutputCP ) type HANDLE = Ptr () data CONSOLE_SCREEN_BUFFER_INFO sizeCONSOLE_SCREEN_BUFFER_INFO :: Int sizeCONSOLE_SCREEN_BUFFER_INFO = 22 posCONSOLE_SCREEN_BUFFER_INFO_srWindow :: Int posCONSOLE_SCREEN_BUFFER_INFO_srWindow = 10 -- 4 x Word16 Left,Top,Right,Bottom c_STD_OUTPUT_HANDLE :: Int c_STD_OUTPUT_HANDLE = -11 foreign import ccall unsafe "windows.h GetConsoleScreenBufferInfo" c_GetConsoleScreenBufferInfo :: HANDLE -> Ptr CONSOLE_SCREEN_BUFFER_INFO -> IO Bool foreign import ccall unsafe "windows.h GetStdHandle" c_GetStdHandle :: Int -> IO HANDLE getTerminalWidth :: IO (Maybe Int) getTerminalWidth = do hdl <- c_GetStdHandle c_STD_OUTPUT_HANDLE allocaBytes sizeCONSOLE_SCREEN_BUFFER_INFO $ \p -> do b <- c_GetConsoleScreenBufferInfo hdl p if not b then do -- This could happen on Cygwin or MSYS let stty = (shell "stty size") { std_in = UseHandle stdin , std_out = CreatePipe , std_err = CreatePipe } (_, mbStdout, _, rStty) <- createProcess stty exStty <- waitForProcess rStty case exStty of ExitFailure _ -> pure Nothing ExitSuccess -> maybe (pure Nothing) (\hSize -> do sizeStr <- hGetContents hSize case map readMaybe $ words sizeStr :: [Maybe Int] of [Just _r, Just c] -> pure $ Just c _ -> pure Nothing ) mbStdout else do [left,_top,right,_bottom] <- forM [0..3] $ \i -> do v <- peekByteOff p (i * 2 + posCONSOLE_SCREEN_BUFFER_INFO_srWindow) pure $ fromIntegral (v :: Word16) pure $ Just (1 + right - left) -- | Set the code page for this process as necessary. Only applies to Windows. -- See: https://github.com/commercialhaskell/stack/issues/738 fixCodePage :: HasTerm env => Bool -- ^ modify code page? -> Version -- ^ GHC version -> RIO env a -> RIO env a fixCodePage mcp ghcVersion inner = if mcp && ghcVersion < mkVersion [7, 10, 3] then fixCodePage' -- GHC >=7.10.3 doesn't need this code page hack. else inner where fixCodePage' = do origCPI <- liftIO getConsoleCP origCPO <- liftIO getConsoleOutputCP let setInput = origCPI /= expected setOutput = origCPO /= expected fixInput | setInput = bracket_ (liftIO $ setConsoleCP expected) (liftIO $ setConsoleCP origCPI) | otherwise = id fixOutput | setOutput = bracket_ (liftIO $ setConsoleOutputCP expected) (liftIO $ setConsoleOutputCP origCPO) | otherwise = id case (setInput, setOutput) of (False, False) -> pure () (True, True) -> warn [] (True, False) -> warn ["input"] (False, True) -> warn ["output"] fixInput $ fixOutput inner expected = 65001 -- UTF-8 warn typ = prettyInfoL $ "Setting" : typ <> [ flow "codepage to UTF-8 (65001) to ensure correct output from GHC." ] -- | hIsTerminaDevice does not recognise handles to mintty terminals as terminal -- devices, but isMinTTYHandle does. hIsTerminalDeviceOrMinTTY :: MonadIO m => Handle -> m Bool hIsTerminalDeviceOrMinTTY h = do isTD <- hIsTerminalDevice h if isTD then pure True else liftIO $ withHandleToHANDLE h isMinTTYHandle stack-2.15.7/src/unix/System/Terminal.hsc0000644000000000000000000000277014445120723016421 0ustar0000000000000000{-# LANGUAGE CApiFFI #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module System.Terminal ( fixCodePage , getTerminalWidth , hIsTerminalDeviceOrMinTTY ) where import Foreign import Foreign.C.Types import RIO ( Handle, MonadIO, hIsTerminalDevice ) #include #include newtype WindowWidth = WindowWidth CUShort deriving (Eq, Ord, Show) instance Storable WindowWidth where sizeOf _ = (#size struct winsize) alignment _ = (#alignment struct winsize) peek p = WindowWidth <$> (#peek struct winsize, ws_col) p poke p (WindowWidth w) = do (#poke struct winsize, ws_col) p w -- `ioctl` is variadic, so `capi` is needed, see: -- https://www.haskell.org/ghc/blog/20210709-capi-usage.html foreign import capi "sys/ioctl.h ioctl" ioctl :: CInt -> CInt -> Ptr WindowWidth -> IO CInt getTerminalWidth :: IO (Maybe Int) getTerminalWidth = alloca $ \p -> do errno <- ioctl (#const STDOUT_FILENO) (#const TIOCGWINSZ) p if errno < 0 then return Nothing else do WindowWidth w <- peek p return . Just . fromIntegral $ w fixCodePage :: x -> y -> a -> a fixCodePage _ _ = id -- | hIsTerminaDevice does not recognise handles to mintty terminals as terminal -- devices, but isMinTTYHandle does. hIsTerminalDeviceOrMinTTY :: MonadIO m => Handle -> m Bool hIsTerminalDeviceOrMinTTY = hIsTerminalDevice stack-2.15.7/src/windows/Stack/Constants/UsrLibDirs.hs0000644000000000000000000000100714502056214020756 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module Stack.Constants.UsrLibDirs ( libDirs , usrLibDirs ) where import Stack.Prelude -- | Used in Stack.Setup for detecting libc.musl-x86_64.so.1, see comments at -- use site libDirs :: [Path Abs Dir] libDirs = [] -- | Used in Stack.Setup for detecting libtinfo, see comments at use site usrLibDirs :: [Path Abs Dir] usrLibDirs = [] stack-2.15.7/src/unix/Stack/Constants/UsrLibDirs.hs0000644000000000000000000000125214502056214020251 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TemplateHaskell #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module Stack.Constants.UsrLibDirs ( libDirs , usrLibDirs ) where import Path ( mkAbsDir ) import Stack.Prelude -- | Used in Stack.Setup for detecting libc.musl-x86_64.so.1, see comments at -- use site libDirs :: [Path Abs Dir] libDirs = [$(mkAbsDir "/lib"), $(mkAbsDir "/lib64")] -- | Used in Stack.Setup for detecting libtinfo, see comments at use site usrLibDirs :: [Path Abs Dir] usrLibDirs = [$(mkAbsDir "/usr/lib"), $(mkAbsDir "/usr/lib64")] stack-2.15.7/src/windows/Stack/Docker/Handlers.hs0000644000000000000000000000232114604306201017724 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module Stack.Docker.Handlers ( handleSetGroups , handleSignals ) where import RIO.Process ( ExitCodeException, proc , runProcess_, setDelegateCtlc ) import Stack.Types.Config ( HasConfig ) import Stack.Types.Docker ( DockerOpts (..)) import Stack.Prelude import System.PosixCompat.Types ( GroupID ) handleSetGroups :: [GroupID] -> IO () handleSetGroups _ = pure () handleSignals :: (Exception e, HasConfig env) => DockerOpts -> Bool -> String -> RIO env (Either e ()) handleSignals docker keepStdinOpen containerID = do let args' = concat [ ["start"] , ["-a" | not docker.detach] , ["-i" | keepStdinOpen] , [containerID] ] finally (try $ proc "docker" args' $ runProcess_ . setDelegateCtlc False) ( unless (docker.persist || docker.detach) $ readProcessNull "docker" ["rm", "-f", containerID] `catch` (\(_ :: ExitCodeException) -> pure ()) ) stack-2.15.7/src/unix/Stack/Docker/Handlers.hs0000644000000000000000000000454514604306201017227 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedRecordDot #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module Stack.Docker.Handlers ( handleSetGroups , handleSignals ) where import RIO.Process ( ExitCodeException, proc, runProcess_, setDelegateCtlc ) import Stack.Prelude import Stack.Types.Config ( HasConfig ) import Stack.Types.Docker ( DockerOpts (..) ) import System.Posix.Signals ( Handler (..), installHandler, sigABRT, sigHUP, sigINT , sigPIPE, sigTERM, sigUSR1, sigUSR2 ) import qualified System.Posix.User as PosixUser import System.PosixCompat.Types ( GroupID ) handleSetGroups :: [GroupID] -> IO () handleSetGroups = PosixUser.setGroups -- MSS 2018-08-30 can the CPP below be removed entirely, and instead exec the -- `docker` process so that it can handle the signals directly? handleSignals :: (Exception e, HasConfig env) => DockerOpts -> Bool -> String -> RIO env (Either e ()) handleSignals docker keepStdinOpen containerID = do run <- askRunInIO oldHandlers <- forM signals $ \sig -> do let sigHandler = run $ do readProcessNull "docker" ["kill", "--signal=" ++ show sig, containerID] when (sig `elem` [sigTERM, sigABRT]) $ do -- Give the container 30 seconds to exit gracefully, then send a -- sigKILL to force it threadDelay 30000000 readProcessNull "docker" ["kill", containerID] oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing pure (sig, oldHandler) let args' = concat [ ["start"] , ["-a" | not docker.detach] , ["-i" | keepStdinOpen] , [containerID] ] finally (try $ proc "docker" args' $ runProcess_ . setDelegateCtlc False) ( do unless (docker.persist || docker.detach) $ readProcessNull "docker" ["rm", "-f", containerID] `catch` (\(_ :: ExitCodeException) -> pure ()) forM_ oldHandlers $ \(sig, oldHandler) -> liftIO $ installHandler sig oldHandler Nothing ) where signals = [sigINT, sigABRT, sigHUP, sigPIPE, sigTERM, sigUSR1, sigUSR2] stack-2.15.7/src/windows/System/Posix/User.hs0000644000000000000000000000310714604306201017217 0ustar0000000000000000{-# LANGUAGE NoFieldSelectors #-} {-# LANGUAGE OverloadedRecordDot #-} -- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. Non-Windows builds rely on the unix package, -- which exposes a module of the same name. module System.Posix.User ( getEffectiveUserID , getEffectiveGroupID , getGroups , getUserEntryForName , homeDirectory , setGroupID , setUserID ) where import System.IO.Error ( illegalOperationErrorType, mkIOError ) import System.PosixCompat.Types ( GroupID, UserID ) unsupported :: String -> IO a unsupported f = ioError $ mkIOError illegalOperationErrorType x Nothing Nothing where x = "System.Posix.User." ++ f ++ ": not supported on Windows." getEffectiveUserID :: IO UserID getEffectiveUserID = unsupported "getEffectiveUserID" getEffectiveGroupID :: IO GroupID getEffectiveGroupID = unsupported "getEffectiveGroupID" getGroups :: IO [GroupID] getGroups = return [] getUserEntryForName :: String -> IO UserEntry getUserEntryForName _ = unsupported "getUserEntryForName" setGroupID :: GroupID -> IO () setGroupID _ = return () setUserID :: UserID -> IO () setUserID _ = return () data UserEntry = UserEntry { userName :: String , userPassword :: String , userID :: UserID , userGroupID :: GroupID , userGecos :: String , homeDirectory :: String , userShell :: String } deriving (Eq, Read, Show) homeDirectory :: UserEntry -> String homeDirectory ue = ue.homeDirectory stack-2.15.7/src/windows/System/Uname.hs0000644000000000000000000000036714445120723016257 0ustar0000000000000000-- | The module of this name differs as between Windows and non-Windows builds. -- This is the Windows version. module System.Uname ( getRelease ) where getRelease :: IO String getRelease = error "getRelease not supported on Windows" stack-2.15.7/src/unix/System/Uname.hsc0000644000000000000000000000235714445120723015714 0ustar0000000000000000-- | The module of this name differs as between Windows and non-Windows builds. -- This is the non-Windows version. module System.Uname ( getRelease ) where #include import Foreign import Foreign.C getRelease :: IO String getRelease = do alloca $ \ ptr -> do throwErrnoIfMinus1_ "uname" $ uname ptr peekCString $ release ptr -- | @'uname' name@ stores nul-terminated strings of information -- identifying the current system info to the structure referenced -- by name. -- -- > import Foreign.C -- > import Foreign.Marshal -- > -- > sysName :: IO String -- > sysName = alloca $ \ ptr -> -- > do throwErrnoIfMinus1_ "uname" $ uname ptr -- > peekCString $ sysname ptr -- foreign import ccall unsafe "haskell_uname" uname :: Ptr Utsname -> IO CInt data Utsname instance Storable Utsname where sizeOf = const #size struct utsname alignment = const #alignment struct utsname poke = error "Storable Utsname: peek: unsupported operation" peek = error "Storable Utsname: poke: unsupported operation" release :: Ptr Utsname -> CString release = (#ptr struct utsname, release) stack-2.15.7/src/unix/cbits/uname.c0000644000000000000000000000014414331513215015225 0ustar0000000000000000#include int haskell_uname(struct utsname *name) { return uname(name); } stack-2.15.7/tests/integration/IntegrationSpec.hs0000644000000000000000000002311514502056214020221 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} import Conduit ( (.|), connect, filterC, filterMC, foldMapC, mapM_C , runConduit, runConduitRes, runResourceT, sourceDirectory , sourceDirectoryDeep, stderrC, withSourceFile ) import Data.List ( stripPrefix ) import Options.Generic ( ParseField, ParseRecord (..), defaultModifiers , fieldNameModifier, firstLetter, getRecord , parseRecordWithModifiers, shortNameModifier ) import RIO import RIO.Char ( toLower ) import RIO.Directory ( canonicalizePath, copyFile, createDirectoryIfMissing , doesFileExist, getAppUserDataDirectory ) import RIO.FilePath ( (), (<.>), isPathSeparator, takeDirectory , takeExtensions, takeFileName ) import RIO.List ( isInfixOf, partition ) import qualified RIO.Map as Map import RIO.Process ( HasProcessContext (..), closed, findExecutable, proc , runProcess, runProcess_, setStderr, setStdin, setStdout , useHandleOpen, withModifyEnvVars, withWorkingDir ) import qualified RIO.Set as Set import qualified RIO.Text as T import System.Environment ( getExecutablePath, lookupEnv ) import System.Info ( os ) import System.PosixCompat.Files ( createSymbolicLink ) -- This code does not use a test framework so that we get direct -- control of how the output is displayed. main :: IO () main = runSimpleApp $ do logInfo "Initiating Stack integration test running" options <- getRecord "Stack integration tests" results <- runApp options $ do logInfo "Running with the following environment" proc "env" [] runProcess_ tests <- asks appTestDirs let count = Set.size tests loop !idx rest !accum = case rest of [] -> pure accum next:rest' -> do logInfo $ "Running integration test " <> display idx <> "/" <> display count <> ": " <> fromString (takeFileName next) res <- test next loop (idx + 1) rest' (res <> accum) loop (1 :: Int) (Set.toList tests) mempty let (successes, failures) = partition ((== ExitSuccess) . snd) $ Map.toList results unless (null successes) $ do logInfo "Successful tests:" for_ successes $ \(x, _) -> logInfo $ "- " <> display x logInfo "" if null failures then logInfo "No failures!" else do logInfo "Failed tests:" for_ failures $ \(x, ec) -> logInfo $ "- " <> display x <> " - " <> displayShow ec exitFailure data Options = Options { optSpeed :: Maybe Speed , optMatch :: Maybe String , optNot :: [String] } deriving Generic instance ParseRecord Options where parseRecord = parseRecordWithModifiers modifiers where optName = map toLower . drop 3 modifiers = defaultModifiers { fieldNameModifier = optName , shortNameModifier = firstLetter . optName } data Speed = Fast | Normal | Superslow deriving (Read, Generic) instance ParseField Speed exeExt :: String exeExt = if isWindows then ".exe" else "" isWindows :: Bool isWindows = os == "mingw32" runApp :: Options -> RIO App a -> RIO SimpleApp a runApp options inner = do let speed = fromMaybe Normal $ optSpeed options simpleApp <- ask runghc <- findExecutable "runghc" >>= either throwIO pure srcDir <- canonicalizePath "" testsRoot <- canonicalizePath $ srcDir "tests/integration" libdir <- canonicalizePath $ testsRoot "lib" myPath <- liftIO getExecutablePath stack <- canonicalizePath $ takeDirectory myPath "stack" ++ exeExt logInfo $ "Using Stack located at " <> fromString stack proc stack ["--version"] runProcess_ logInfo $ "Using runghc located at " <> fromString runghc proc runghc ["--version"] runProcess_ let matchTest = case (optMatch options, optNot options) of (Just str, _) -> (str `isInfixOf`) (_, []) -> const True (_, nl) -> \a -> all (\b -> not $ b `isInfixOf` a) nl testDirs <- runConduitRes $ sourceDirectory (testsRoot "tests") .| filterMC (liftIO . hasTest) .| filterC matchTest .| foldMapC Set.singleton let modifyEnvCommon = Map.insert "SRC_DIR" (fromString srcDir) . Map.insert "STACK_EXE" (fromString stack) . Map.delete "GHC_PACKAGE_PATH" . Map.insert "STACK_TEST_SPEED" (case speed of Superslow -> "SUPERSLOW" _ -> "NORMAL") . Map.fromList . map (first T.toUpper) . Map.toList case speed of Fast -> do let app = App { appSimpleApp = simpleApp , appRunghc = runghc , appLibDir = libdir , appSetupHome = id , appTestDirs = testDirs } runRIO app $ withModifyEnvVars modifyEnvCommon inner _ -> do morigStackRoot <- liftIO $ lookupEnv "STACK_ROOT" origStackRoot <- case morigStackRoot of Nothing -> getAppUserDataDirectory "stack" Just x -> pure x logInfo "Initializing/updating the original Pantry store" proc stack ["update"] runProcess_ pantryRoot <- canonicalizePath $ origStackRoot "pantry" let modifyEnv = Map.insert "PANTRY_ROOT" (fromString pantryRoot) . modifyEnvCommon app = App { appSimpleApp = simpleApp , appRunghc = runghc , appLibDir = libdir , appSetupHome = \inner' -> withSystemTempDirectory "home" $ \newHome -> do let newStackRoot = newHome ".stack" createDirectoryIfMissing True newStackRoot let modifyEnv' = Map.insert "HOME" (fromString newHome) . Map.insert "APPDATA" (fromString newHome) . Map.insert "STACK_ROOT" (fromString newStackRoot) writeFileBinary (newStackRoot "config.yaml") "system-ghc: true\ninstall-ghc: false\n" withModifyEnvVars modifyEnv' inner' , appTestDirs = testDirs } runRIO app $ withModifyEnvVars modifyEnv inner hasTest :: FilePath -> IO Bool hasTest dir = doesFileExist $ dir "Main.hs" data App = App { appRunghc :: !FilePath , appLibDir :: !FilePath , appSetupHome :: !(forall a. RIO App a -> RIO App a) , appSimpleApp :: !SimpleApp , appTestDirs :: !(Set FilePath) } simpleAppL :: Lens' App SimpleApp simpleAppL = lens appSimpleApp (\x y -> x { appSimpleApp = y }) instance HasLogFunc App where logFuncL = simpleAppL.logFuncL instance HasProcessContext App where processContextL = simpleAppL.processContextL -- | Call 'appSetupHome' on the inner action withHome :: RIO App a -> RIO App a withHome inner = do app <- ask appSetupHome app inner test :: FilePath -- ^ test dir -> RIO App (Map Text ExitCode) test testDir = withDir $ \dir -> withHome $ do runghc <- asks appRunghc libDir <- asks appLibDir let mainFile = testDir "Main.hs" copyTree (testDir "files") dir withSystemTempFile (name <.> "log") $ \logfp logh -> do ec <- withWorkingDir dir $ withModifyEnvVars (Map.insert "TEST_DIR" $ fromString testDir) $ proc runghc [ "-clear-package-db" , "-global-package-db" , "-i" ++ libDir , mainFile ] $ runProcess . setStdin closed . setStdout (useHandleOpen logh) . setStderr (useHandleOpen logh) hClose logh case ec of ExitSuccess -> logInfo "Success!" _ -> do logError "Failure, dumping log\n\n" withSourceFile logfp $ \src -> runConduit $ src .| stderrC logError $ "\n\nEnd of log for " <> fromString name pure $ Map.singleton (fromString name) ec where name = takeFileName testDir withDir = withSystemTempDirectory ("stack-integration-" ++ name) copyTree :: MonadIO m => FilePath -> FilePath -> m () copyTree src dst = liftIO $ runResourceT (sourceDirectoryDeep False src `connect` mapM_C go) `catch` \(_ :: IOException) -> pure () where go srcfp = liftIO $ do Just suffix <- pure $ stripPrefix src srcfp let dstfp = dst stripHeadSeparator suffix createDirectoryIfMissing True $ takeDirectory dstfp -- copying yaml files so lock files won't get created in -- the source directory if takeFileName srcfp /= "package.yaml" && (takeExtensions srcfp == ".yaml" || takeExtensions srcfp == ".yml") then copyFile srcfp dstfp else createSymbolicLink srcfp dstfp `catch` \(_ :: IOException) -> copyFile srcfp dstfp -- for Windows stripHeadSeparator :: FilePath -> FilePath stripHeadSeparator [] = [] stripHeadSeparator fp@(x:xs) = if isPathSeparator x then xs else fp stack-2.15.7/tests/integration/lib/StackTest.hs0000644000000000000000000003127314620153474017611 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables #-} module StackTest ( run' , run , runShell , runWithCwd , stackExe , stackSrc , testDir , stack' , stack , stackCleanFull , stackIgnoreException , stackErr , Repl , ReplConnection (..) , nextPrompt , replCommand , replGetChar , replGetLine , runRepl , repl , stackStderr , stackCheckStderr , stackErrStderr , runEx , runEx' , stackCheckStdout , doesNotExist , doesExist , doesFileOrDirExist , copy , fileContentsMatch , logInfo , showProcessArgDebug , exeExt , isWindows , isLinux , getIsAlpine , isARM , isAarch64 , isMacOSX , defaultResolverArg , removeFileIgnore , removeDirIgnore , withCwd , withSourceDirectory , superslow ) where import Control.Monad ( forever, unless, void, when ) import Control.Monad.IO.Class ( liftIO ) import Control.Monad.Trans.Reader ( ReaderT, ask, runReaderT ) import Control.Concurrent ( forkIO ) import Control.Exception ( Exception (..), IOException, bracket_, catch, throw , throwIO ) import Data.Maybe ( fromMaybe ) import GHC.Stack ( HasCallStack ) import System.Environment ( getEnv, lookupEnv ) import System.Directory ( copyFile, doesDirectoryExist, doesFileExist , getCurrentDirectory, removeDirectoryRecursive, removeFile , setCurrentDirectory ) import System.IO ( BufferMode (..), Handle, IOMode (..), hGetChar, hGetLine , hPutChar, hPutStr, hPutStrLn, hSetBuffering, stderr , withFile ) import System.IO.Error ( isDoesNotExistError, isEOFError ) import System.Process ( CreateProcess (..), StdStream (..), createProcess, proc , readCreateProcessWithExitCode, readProcessWithExitCode , shell, waitForProcess ) import System.Exit ( ExitCode (..) ) import System.Info ( arch, os ) run' :: HasCallStack => FilePath -> [String] -> IO ExitCode run' cmd args = do logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) (Nothing, Nothing, Nothing, ph) <- createProcess (proc cmd args) waitForProcess ph run :: HasCallStack => FilePath -> [String] -> IO () run cmd args = do ec <- run' cmd args unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ displayException ec runShell :: HasCallStack => String -> IO () runShell cmd = do logInfo $ "Running: " ++ cmd (Nothing, Nothing, Nothing, ph) <- createProcess (shell cmd) ec <- waitForProcess ph unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ displayException ec runWithCwd :: HasCallStack => FilePath -> String -> [String] -> IO String runWithCwd cwdPath cmd args = do logInfo $ "Running: " ++ cmd let cp = proc cmd args (ec, stdoutStr, _) <- readCreateProcessWithExitCode (cp { cwd = Just cwdPath }) "" unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ displayException ec pure stdoutStr stackExe :: IO String stackExe = getEnv "STACK_EXE" stackSrc :: IO String stackSrc = getEnv "SRC_DIR" testDir :: IO String testDir = getEnv "TEST_DIR" stack' :: HasCallStack => [String] -> IO ExitCode stack' args = do stackEnv <- stackExe run' stackEnv args stack :: HasCallStack => [String] -> IO () stack args = do ec <- stack' args unless (ec == ExitSuccess) $ error $ "Exited with exit code: " ++ displayException ec -- Temporary workaround for Windows to ignore exceptions arising out of Windows -- when we do stack clean. More info here: -- https://github.com/commercialhaskell/stack/issues/4936 stackCleanFull :: HasCallStack => IO () stackCleanFull = stackIgnoreException ["clean", "--full"] -- Temporary workaround for Windows to ignore exceptions arising out of Windows -- when we do stack clean. More info here: -- https://github.com/commercialhaskell/stack/issues/4936 stackIgnoreException :: HasCallStack => [String] -> IO () stackIgnoreException args = if isWindows then void (stack' args) `catch` (\(_e :: IOException) -> pure ()) else stack args stackErr :: HasCallStack => [String] -> IO () stackErr args = do ec <- stack' args when (ec == ExitSuccess) $ error "stack was supposed to fail, but didn't" type Repl = ReaderT ReplConnection IO data ReplConnection = ReplConnection { replStdin :: Handle , replStdout :: Handle } nextPrompt :: Repl () nextPrompt = do (ReplConnection _ replStdoutHandle) <- ask c <- liftIO $ hGetChar replStdoutHandle if c == '>' then do -- Skip next character _ <- liftIO $ hGetChar replStdoutHandle pure () else nextPrompt replCommand :: String -> Repl () replCommand cmd = do (ReplConnection replStdinHandle _) <- ask liftIO $ hPutStrLn replStdinHandle cmd replGetLine :: Repl String replGetLine = ask >>= liftIO . hGetLine . replStdout replGetChar :: Repl Char replGetChar = ask >>= liftIO . hGetChar . replStdout runRepl :: HasCallStack => FilePath -> [String] -> ReaderT ReplConnection IO () -> IO ExitCode runRepl cmd args actions = do logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) (Just rStdin, Just rStdout, Just rStderr, ph) <- createProcess (proc cmd args) { std_in = CreatePipe , std_out = CreatePipe , std_err = CreatePipe } hSetBuffering rStdin NoBuffering hSetBuffering rStdout NoBuffering hSetBuffering rStderr NoBuffering -- Log stack repl's standard error output tempDir <- if isWindows then fromMaybe "" <$> lookupEnv "TEMP" else pure "/tmp" let tempLogFile = tempDir ++ "/stderr" _ <- forkIO $ withFile tempLogFile WriteMode $ \logFileHandle -> do hSetBuffering logFileHandle NoBuffering forever $ catch (hGetChar rStderr >>= hPutChar logFileHandle) (\e -> unless (isEOFError e) $ throw e) runReaderT actions (ReplConnection rStdin rStdout) waitForProcess ph repl :: HasCallStack => [String] -> Repl () -> IO () repl args action = do stackExe' <- stackExe ec <- runRepl stackExe' ("repl":args) action unless (ec == ExitSuccess) $ pure () -- TODO: Understand why the exit code is 1 despite running GHCi tests -- successfully. -- else error $ "Exited with exit code: " ++ show ec stackStderr :: HasCallStack => [String] -> IO (ExitCode, String) stackStderr args = do stackExe' <- stackExe logInfo $ "Running: " ++ stackExe' ++ " " ++ unwords (map showProcessArgDebug args) (ec, _, err) <- readProcessWithExitCode stackExe' args "" hPutStr stderr err pure (ec, err) -- | Run stack with arguments and apply a check to the resulting stderr output -- if the process succeeded. stackCheckStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () stackCheckStderr args check = do (ec, err) <- stackStderr args if ec /= ExitSuccess then error $ "Exited with exit code: " ++ displayException ec else check err -- | Same as 'stackCheckStderr', but ensures that the Stack process -- fails. stackErrStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO () stackErrStderr args check = do (ec, err) <- stackStderr args if ec == ExitSuccess then error "Stack process succeeded, but it shouldn't" else check err runEx :: HasCallStack => FilePath -> String -> IO (ExitCode, String, String) runEx cmd args = runEx' cmd $ words args runEx' :: HasCallStack => FilePath -> [String] -> IO (ExitCode, String, String) runEx' cmd args = do logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args) (ec, out, err) <- readProcessWithExitCode cmd args "" putStr out hPutStr stderr err pure (ec, out, err) -- | Run stack with arguments and apply a check to the resulting stdout output -- if the process succeeded. -- -- Take care with newlines; if the output includes a newline character that -- should not be there, use 'Data.List.Extra.trimEnd' to remove it. stackCheckStdout :: HasCallStack => [String] -> (String -> IO ()) -> IO () stackCheckStdout args check = do stackExe' <- stackExe (ec, out, _) <- runEx' stackExe' args if ec /= ExitSuccess then error $ "Exited with exit code: " ++ displayException ec else check out doesNotExist :: HasCallStack => FilePath -> IO () doesNotExist fp = do logInfo $ "doesNotExist " ++ fp exists <- doesFileOrDirExist fp case exists of (Right msg) -> error msg (Left _) -> pure () doesExist :: HasCallStack => FilePath -> IO () doesExist fp = do logInfo $ "doesExist " ++ fp exists <- doesFileOrDirExist fp case exists of (Right _) -> pure () (Left _) -> error "No file or directory exists" doesFileOrDirExist :: HasCallStack => FilePath -> IO (Either () String) doesFileOrDirExist fp = do isFile <- doesFileExist fp if isFile then pure (Right ("File exists: " ++ fp)) else do isDir <- doesDirectoryExist fp if isDir then pure (Right ("Directory exists: " ++ fp)) else pure (Left ()) copy :: HasCallStack => FilePath -> FilePath -> IO () copy src dest = do logInfo ("Copy " ++ show src ++ " to " ++ show dest) System.Directory.copyFile src dest fileContentsMatch :: HasCallStack => FilePath -> FilePath -> IO () fileContentsMatch f1 f2 = do doesExist f1 doesExist f2 f1Contents <- readFile f1 f2Contents <- readFile f2 unless (f1Contents == f2Contents) $ error ("contents do not match for " ++ show f1 ++ " " ++ show f2) logInfo :: String -> IO () logInfo = hPutStrLn stderr -- TODO: use Stack's process running utilities? (better logging) -- for now just copy+modifying this one from System.Process.Log -- | Show a process arg including speechmarks when necessary. Just for -- debugging purposes, not functionally important. showProcessArgDebug :: String -> String showProcessArgDebug x | any special x = show x | otherwise = x where special '"' = True special ' ' = True special _ = False -- | Extension of executables exeExt :: String exeExt = if isWindows then ".exe" else "" -- | Is the OS Windows? isWindows :: Bool isWindows = os == "mingw32" isLinux :: Bool isLinux = os == "linux" -- | Is the OS Alpine Linux? getIsAlpine :: IO Bool getIsAlpine = doesFileExist "/etc/alpine-release" -- | Is the architecture ARM? isARM :: Bool isARM = arch == "arm" -- | Is the architecture Aarch64? isAarch64 :: Bool isAarch64 = arch == "aarch64" -- | Is the OS Mac OS X? isMacOSX :: Bool isMacOSX = os == "darwin" -- | To avoid problems with GHC version mismatch when a new LTS major -- version is released, pass this argument to @stack@ when running in -- a global context. The LTS major version here should match that of -- the main @stack.yaml@. -- defaultResolverArg :: String defaultResolverArg = "--snapshot=lts-22.21" -- | Remove a file and ignore any warnings about missing files. removeFileIgnore :: HasCallStack => FilePath -> IO () removeFileIgnore fp = removeFile fp `catch` \e -> if isDoesNotExistError e then pure () else throwIO e -- | Remove a directory and ignore any warnings about missing files. removeDirIgnore :: HasCallStack => FilePath -> IO () removeDirIgnore fp = removeDirectoryRecursive fp `catch` \e -> if isDoesNotExistError e then pure () else throwIO e -- | Changes to the specified working directory. withCwd :: HasCallStack => FilePath -> IO () -> IO () withCwd dir action = do currentDirectory <- getCurrentDirectory let enterDir = setCurrentDirectory dir exitDir = setCurrentDirectory currentDirectory bracket_ enterDir exitDir action -- | Changes working directory to Stack source directory. withSourceDirectory :: HasCallStack => IO () -> IO () withSourceDirectory action = do dir <- stackSrc withCwd dir action -- | Mark a test as superslow, only to be run when explicitly requested. superslow :: HasCallStack => IO () -> IO () superslow inner = do mres <- lookupEnv "STACK_TEST_SPEED" case mres of Just "NORMAL" -> logInfo "Skipping superslow test" Just "SUPERSLOW" -> do logInfo "Running superslow test, hold on to your butts" inner Nothing -> do logInfo "No STACK_TEST_SPEED specified. Executing superslow test, hold \ \on to your butts" inner Just x -> error $ "Invalid value for STACK_TEST_SPEED env var: " ++ show x stack-2.15.7/app/Main.hs0000644000000000000000000000032314502056212013074 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Main ( main ) where import RIO ( IO ) import qualified Stack -- | The entry point for the Stack executable. main :: IO () main = Stack.main stack-2.15.7/tests/unit/Spec.hs0000644000000000000000000000013614502056215014450 0ustar0000000000000000{-# OPTIONS_GHC -Wno-missing-export-lists #-} {-# OPTIONS_GHC -F -pgmF hspec-discover #-} stack-2.15.7/tests/unit/Stack/ArgsSpec.hs0000644000000000000000000001441414502056215016336 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Args parser test suite. module Stack.ArgsSpec ( spec , argsSpec , argsInputOutput , interpreterArgsSpec ) where import Data.Attoparsec.Args ( EscapingMode (..), parseArgsFromString ) import Data.Attoparsec.Interpreter ( interpreterArgsParser ) import qualified Data.Attoparsec.Text as P import Data.Text ( pack ) import Prelude ( head ) import Stack.Constants ( stackProgName ) import Stack.Prelude import Test.Hspec ( Spec, describe, it ) -- | Test spec. spec :: Spec spec = do argsSpec interpreterArgsSpec argsSpec :: Spec argsSpec = forM_ argsInputOutput (\(input,output) -> it input (parseArgsFromString Escaping input == output)) -- | Fairly comprehensive checks. argsInputOutput :: [(String, Either String [String])] argsInputOutput = [ ("x", Right ["x"]) , ("x y z", Right ["x", "y", "z"]) , ("aaa bbb ccc", Right ["aaa", "bbb", "ccc"]) , (" aaa bbb ccc ", Right ["aaa", "bbb", "ccc"]) , ("aaa\"", Left "unterminated string: endOfInput") , ("\"", Left "unterminated string: endOfInput") , ("\"\"", Right [""]) , ("\"aaa", Left "unterminated string: endOfInput") , ("\"aaa\" bbb ccc \"ddd\"", Right ["aaa", "bbb", "ccc", "ddd"]) , ("\"aa\\\"a\" bbb ccc \"ddd\"", Right ["aa\"a", "bbb", "ccc", "ddd"]) , ("\"aa\\\"a\" bb\\b ccc \"ddd\"", Right ["aa\"a", "bb\\b", "ccc", "ddd"]) , ("\"\" \"\" c", Right ["","","c"])] interpreterArgsSpec :: Spec interpreterArgsSpec = describe "Script interpreter parser" $ do describe "Success cases" $ do describe "Line comments" $ do checkLines "" checkLines " --x" checkLines " --x --y" describe "Literate line comments" $ do checkLiterateLines "" checkLiterateLines " --x" checkLiterateLines " --x --y" describe "Block comments" $ do checkBlocks "" checkBlocks "\n" checkBlocks " --x" checkBlocks "\n--x" checkBlocks " --x --y" checkBlocks "\n--x\n--y" checkBlocks "\n\t--x\n\t--y" describe "Literate block comments" $ do checkLiterateBlocks "" "" checkLiterateBlocks "\n>" "" checkLiterateBlocks " --x" " --x" checkLiterateBlocks "\n>--x" "--x" checkLiterateBlocks " --x --y " "--x --y" checkLiterateBlocks "\n>--x\n>--y" "--x --y" checkLiterateBlocks "\n>\t--x\n>\t--y" "--x --y" describe "Failure cases" $ do checkFailures describe "Bare directives in literate files" $ do forM_ (interpreterGenValid lineComment []) $ testAndCheck (acceptFailure True) [] forM_ (interpreterGenValid blockComment []) $ testAndCheck (acceptFailure True) [] where parse isLiterate s = P.parseOnly (interpreterArgsParser isLiterate stackProgName) (pack s) acceptSuccess :: Bool -> String -> String -> Bool acceptSuccess isLiterate args s = case parse isLiterate s of Right x | words x == words args -> True _ -> False acceptFailure isLiterate _ s = case parse isLiterate s of Left _ -> True Right _ -> False testAndCheck checker out inp = it (show inp) $ checker out inp checkLines args = forM_ (interpreterGenValid lineComment args) (testAndCheck (acceptSuccess False) args) checkLiterateLines args = forM_ (interpreterGenValid literateLineComment args) (testAndCheck (acceptSuccess True) args) checkBlocks args = forM_ (interpreterGenValid blockComment args) (testAndCheck (acceptSuccess False) args) checkLiterateBlocks inp args = forM_ (interpreterGenValid literateBlockComment inp) (testAndCheck (acceptSuccess True) args) checkFailures = forM_ interpreterGenInvalid (testAndCheck (acceptFailure False) "unused") -- Generate a set of acceptable inputs for given format and args interpreterGenValid fmt args = shebang <++> newLine <++> fmt args interpreterGenInvalid :: [String] -- Generate a set of Invalid inputs interpreterGenInvalid = ["-stack\n"] -- random input -- just the shebang <|> shebang <++> ["\n"] -- invalid shebang <|> blockSpace <++> [head (interpreterGenValid lineComment args)] -- something between shebang and Stack comment <|> shebang <++> newLine <++> blockSpace <++> ([head (lineComment args)] <|> [head (blockComment args)]) -- unterminated block comment -- just chop the closing chars from a valid block comment <|> shebang <++> ["\n"] <++> let c = head (blockComment args) l = length c - 2 in [assert (drop l c == "-}") (take l c)] -- nested block comment <|> shebang <++> ["\n"] <++> [head (blockComment "--x {- nested -} --y")] where args = " --x --y" (<++>) = liftA2 (++) -- Generative grammar for the interpreter comments shebang = ["#!/usr/bin/env stack"] newLine = ["\n"] <|> ["\r\n"] -- A comment may be the last line or followed by something else postComment = [""] <|> newLine -- A command starts with zero or more whitespace followed by "stack" makeComment maker space args = let makePrefix s = (s <|> [""]) <++> [stackProgName] in (maker <$> (makePrefix space <++> [args])) <++> postComment lineSpace = [" "] <|> ["\t"] lineComment = makeComment makeLine lineSpace where makeLine s = "--" ++ s literateLineComment = makeComment ("> --" ++) lineSpace blockSpace = lineSpace <|> newLine blockComment = makeComment makeBlock blockSpace where makeBlock s = "{-" ++ s ++ "-}" literateBlockComment = makeComment (\s -> "> {-" ++ s ++ "-}") (lineSpace <|> map (++ ">") newLine) stack-2.15.7/tests/unit/Stack/Build/ExecuteSpec.hs0000644000000000000000000000032414502056215020076 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module Stack.Build.ExecuteSpec ( main , spec ) where import Stack.Prelude import Test.Hspec main :: IO () main = hspec spec spec :: Spec spec = pure () stack-2.15.7/tests/unit/Stack/Build/TargetSpec.hs0000644000000000000000000000231314502056215017722 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Build.TargetSpec ( main , spec ) where import qualified Data.Text as T import Distribution.Types.PackageName (mkPackageName) import Distribution.Version (mkVersion) import Stack.Build.Target import Stack.Prelude import Stack.Types.NamedComponent import Test.Hspec main :: IO () main = hspec spec spec :: Spec spec = do describe "parseRawTarget" $ do let test s e = it s $ parseRawTarget (T.pack s) `shouldBe` e test "foobar" $ Just $ RTPackage (mkPackageName "foobar") test "foobar-1.2.3" $ Just $ RTPackageIdentifier $ PackageIdentifier (mkPackageName "foobar") (mkVersion [1, 2, 3]) test "./foobar" Nothing test "foobar/" Nothing test "/foobar" Nothing test ":some-exe" $ Just $ RTComponent "some-exe" test "foobar:some-exe" $ Just $ RTPackageComponent (mkPackageName "foobar") $ UnresolvedComponent "some-exe" test "foobar:exe:some-exe" $ Just $ RTPackageComponent (mkPackageName "foobar") $ ResolvedComponent $ CExe "some-exe" stack-2.15.7/tests/unit/Stack/Config/DockerSpec.hs0000644000000000000000000000145214620153446020061 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.Config.DockerSpec ( spec ) where import Test.Hspec import Stack.Prelude import Stack.Types.Resolver import RIO.Time (fromGregorian) import Stack.Config.Docker (addDefaultTag) spec :: Spec spec = do describe "addDefaultTag" $ do it "succeeds fails no snapshot resolver" $ addDefaultTag "foo/bar" Nothing Nothing `shouldBe` Nothing it "succeeds on LTS" $ addDefaultTag "foo/bar" Nothing (Just $ ARResolver $ RSLSynonym $ LTS 1 2) `shouldBe` Just "foo/bar:lts-1.2" it "fails on nightly" $ addDefaultTag "foo/bar" Nothing (Just $ ARResolver $ RSLSynonym $ Nightly $ fromGregorian 2018 1 1) `shouldBe` Nothing stack-2.15.7/tests/unit/Stack/ConfigSpec.hs0000644000000000000000000003054014620153474016653 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.ConfigSpec ( sampleConfig , buildOptsConfig , hpackConfig , resolverConfig , snapshotConfig , resolverSnapshotConfig , stackDotYaml , setup , noException , spec ) where import Control.Arrow ( left ) import Data.Aeson.WarningParser ( WithJSONWarnings ) import Data.Yaml ( decodeEither', parseEither ) import Distribution.Verbosity ( verbose ) import Pantry.Internal.Stackage ( pcHpackExecutable ) import Path ( (), parent, parseAbsDir, parseRelDir, parseRelFile ) import Path.IO ( getCurrentDir ) import Stack.Config (defaultConfigYaml, loadConfig, loadConfigYaml ) import Stack.Options.GlobalParser ( globalOptsFromMonoid ) import Stack.Prelude import Stack.Runners ( withBuildConfig, withRunnerGlobal ) import Stack.Types.BuildConfig ( BuildConfig (..), projectRootL ) import Stack.Types.BuildOpts ( BenchmarkOpts (..), BuildOpts (..), HaddockOpts (..) , TestOpts (..) ) import Stack.Types.BuildOptsMonoid ( CabalVerbosity (..), ProgressBarFormat (NoBar) ) import Stack.Types.Config ( Config (..) ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..), parseConfigMonoid ) import Stack.Types.GlobalOpts ( GlobalOpts (..) ) import Stack.Types.Project ( Project (..) ) import Stack.Types.ProjectAndConfigMonoid ( ProjectAndConfigMonoid (..), parseProjectAndConfigMonoid ) import System.Directory ( createDirectory, createDirectoryIfMissing , getCurrentDirectory, setCurrentDirectory ) import System.Environment ( lookupEnv, setEnv, unsetEnv ) import System.IO ( writeFile ) import Test.Hspec ( Selector, Spec, anyException, beforeAll, describe, example , it, shouldBe, shouldThrow ) sampleConfig :: String sampleConfig = "snapshot: lts-22.21\n" ++ "packages: ['.']\n" buildOptsConfig :: String buildOptsConfig = "snapshot: lts-22.21\n" ++ "packages: ['.']\n" ++ "build:\n" ++ " library-profiling: true\n" ++ " executable-profiling: true\n" ++ " library-stripping: false\n" ++ " executable-stripping: false\n" ++ " haddock: true\n" ++ " haddock-arguments:\n" ++ " haddock-args:\n" ++ " - \"--css=/home/user/my-css\"\n" ++ " open-haddocks: true\n" ++ " haddock-deps: true\n" ++ " haddock-internal: true\n" ++ " haddock-hyperlink-source: false\n" ++ " haddock-for-hackage: false\n" ++ " copy-bins: true\n" ++ " copy-compiler-tool: true\n" ++ " prefetch: true\n" ++ " keep-going: true\n" ++ " keep-tmp-files: true\n" ++ " force-dirty: true\n" ++ " test: true\n" ++ " test-arguments:\n" ++ " rerun-tests: true\n" ++ " additional-args: ['-fprof']\n" ++ " coverage: true\n" ++ " no-run-tests: true\n" ++ " bench: true\n" ++ " benchmark-opts:\n" ++ " benchmark-arguments: -O2\n" ++ " no-run-benchmarks: true\n" ++ " reconfigure: true\n" ++ " cabal-verbosity: verbose\n" ++ " cabal-verbose: true\n" ++ " split-objs: true\n" ++ " skip-components: ['my-test']\n" ++ " interleaved-output: false\n" ++ " progress-bar: none\n" ++ " ddump-dir: my-ddump-dir\n" buildOptsHaddockForHackageConfig :: String buildOptsHaddockForHackageConfig = "snapshot: lts-22.21\n" ++ "packages: ['.']\n" ++ "build:\n" ++ " haddock: true\n" ++ " open-haddocks: true\n" ++ " haddock-deps: true\n" ++ " haddock-internal: true\n" ++ " haddock-hyperlink-source: false\n" ++ " haddock-for-hackage: true\n" ++ " force-dirty: false\n" hpackConfig :: String hpackConfig = "snapshot: lts-22.21\n" ++ "with-hpack: /usr/local/bin/hpack\n" ++ "packages: ['.']\n" resolverConfig :: String resolverConfig = "resolver: lts-22.21\n" ++ "packages: ['.']\n" snapshotConfig :: String snapshotConfig = "snapshot: lts-22.21\n" ++ "packages: ['.']\n" resolverSnapshotConfig :: String resolverSnapshotConfig = "resolver: lts-22.21\n" ++ "snapshot: lts-22.21\n" ++ "packages: ['.']\n" stackDotYaml :: Path Rel File stackDotYaml = either impureThrow id (parseRelFile "stack.yaml") setup :: IO () setup = unsetEnv "STACK_YAML" noException :: Selector SomeException noException = const False spec :: Spec spec = beforeAll setup $ do let logLevel = LevelOther "silent" -- TODO(danburton): not use inTempDir let inTempDir action = do currentDirectory <- getCurrentDirectory withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do let enterDir = setCurrentDirectory tempDir let exitDir = setCurrentDirectory currentDirectory bracket_ enterDir exitDir action -- TODO(danburton): a safer version of this? let withEnvVar name newValue action = do originalValue <- fromMaybe "" <$> lookupEnv name let setVar = setEnv name newValue let resetVar = setEnv name originalValue bracket_ setVar resetVar action describe "parseProjectAndConfigMonoid" $ do let loadProject' fp inner = do globalOpts <- globalOptsFromMonoid False mempty withRunnerGlobal globalOpts { logLevel = logLevel } $ do iopc <- loadConfigYaml ( parseProjectAndConfigMonoid (parent fp) ) fp ProjectAndConfigMonoid project _ <- liftIO iopc liftIO $ inner project toAbsPath path = do parentDir <- getCurrentDirectory >>= parseAbsDir pure (parentDir path) loadProject config inner = do yamlAbs <- toAbsPath stackDotYaml writeFile (toFilePath yamlAbs) config loadProject' yamlAbs inner it "parses snapshot using 'resolver'" $ inTempDir $ do loadProject resolverConfig $ \project -> project.resolver `shouldBe` RSLSynonym (LTS 22 21) it "parses snapshot using 'snapshot'" $ inTempDir $ do loadProject snapshotConfig $ \project -> project.resolver `shouldBe` RSLSynonym (LTS 22 21) it "throws if both 'resolver' and 'snapshot' are present" $ inTempDir $ do loadProject resolverSnapshotConfig (const (pure ())) `shouldThrow` anyException describe "loadConfig" $ do let loadConfig' inner = do globalOpts <- globalOptsFromMonoid False mempty withRunnerGlobal globalOpts { logLevel = logLevel } $ loadConfig inner -- TODO(danburton): make sure parent dirs also don't have config file it "works even if no config file exists" $ example $ loadConfig' $ const $ pure () it "works with a blank config file" $ inTempDir $ do writeFile (toFilePath stackDotYaml) "" -- TODO(danburton): more specific test for exception loadConfig' (const (pure ())) `shouldThrow` anyException let configOverrideHpack = pcHpackExecutable . view pantryConfigL it "parses config option with-hpack" $ inTempDir $ do writeFile (toFilePath stackDotYaml) hpackConfig loadConfig' $ \config -> liftIO $ configOverrideHpack config `shouldBe` HpackCommand "/usr/local/bin/hpack" it "parses config bundled Hpack" $ inTempDir $ do writeFile (toFilePath stackDotYaml) sampleConfig loadConfig' $ \config -> liftIO $ configOverrideHpack config `shouldBe` HpackBundled it "parses build config options" $ inTempDir $ do writeFile (toFilePath stackDotYaml) buildOptsConfig loadConfig' $ \config -> liftIO $ do let bopts = config.build bopts.libProfile `shouldBe` True bopts.exeProfile `shouldBe` True bopts.libStrip `shouldBe` False bopts.exeStrip `shouldBe` False bopts.buildHaddocks `shouldBe` True bopts.haddockOpts `shouldBe` HaddockOpts { additionalArgs = ["--css=/home/user/my-css"] } bopts.openHaddocks `shouldBe` True bopts.haddockDeps `shouldBe` Just True bopts.haddockInternal `shouldBe` True bopts.haddockHyperlinkSource `shouldBe` False bopts.haddockForHackage `shouldBe` False bopts.installExes `shouldBe` True bopts.installCompilerTool `shouldBe` True bopts.preFetch `shouldBe` True bopts.keepGoing `shouldBe` Just True bopts.keepTmpFiles `shouldBe` True bopts.forceDirty `shouldBe` True bopts.tests `shouldBe` True bopts.testOpts `shouldBe` TestOpts { rerunTests = True , additionalArgs = ["-fprof"] , coverage = True , disableRun = True , maximumTimeSeconds = Nothing , allowStdin = True } bopts.benchmarks `shouldBe` True bopts.benchmarkOpts `shouldBe` BenchmarkOpts { additionalArgs = Just "-O2" , disableRun = True } bopts.reconfigure `shouldBe` True bopts.cabalVerbose `shouldBe` CabalVerbosity verbose bopts.splitObjs `shouldBe` True bopts.skipComponents `shouldBe` ["my-test"] bopts.interleavedOutput `shouldBe` False bopts.progressBar `shouldBe` NoBar bopts.ddumpDir `shouldBe` Just "my-ddump-dir" it "parses build config options with haddock-for-hackage" $ inTempDir $ do writeFile (toFilePath stackDotYaml) buildOptsHaddockForHackageConfig loadConfig' $ \config -> liftIO $ do let bopts = config.build bopts.buildHaddocks `shouldBe` True bopts.openHaddocks `shouldBe` False bopts.haddockDeps `shouldBe` Nothing bopts.haddockInternal `shouldBe` False bopts.haddockHyperlinkSource `shouldBe` True bopts.haddockForHackage `shouldBe` True bopts.forceDirty `shouldBe` True it "finds the config file in a parent directory" $ inTempDir $ do writeFile "package.yaml" "name: foo" writeFile (toFilePath stackDotYaml) sampleConfig parentDir <- getCurrentDirectory >>= parseAbsDir let childDir = "child" createDirectory childDir setCurrentDirectory childDir loadConfig' $ \config -> liftIO $ do bc <- runRIO config $ withBuildConfig ask view projectRootL bc `shouldBe` parentDir it "respects the STACK_YAML env variable" $ inTempDir $ do withSystemTempDir "config-is-here" $ \dir -> do let stackYamlFp = toFilePath (dir stackDotYaml) writeFile stackYamlFp sampleConfig writeFile (toFilePath dir ++ "/package.yaml") "name: foo" withEnvVar "STACK_YAML" stackYamlFp $ loadConfig' $ \config -> liftIO $ do bc <- runRIO config $ withBuildConfig ask bc.stackYaml `shouldBe` dir stackDotYaml parent bc.stackYaml `shouldBe` dir it "STACK_YAML can be relative" $ inTempDir $ do parentDir <- getCurrentDirectory >>= parseAbsDir let childRel = either impureThrow id (parseRelDir "child") yamlRel = childRel either impureThrow id (parseRelFile "some-other-name.config") yamlAbs = parentDir yamlRel packageYaml = childRel either impureThrow id (parseRelFile "package.yaml") createDirectoryIfMissing True $ toFilePath $ parent yamlAbs writeFile (toFilePath yamlAbs) "snapshot: ghc-9.6.5" writeFile (toFilePath packageYaml) "name: foo" withEnvVar "STACK_YAML" (toFilePath yamlRel) $ loadConfig' $ \config -> liftIO $ do bc <- runRIO config $ withBuildConfig ask bc.stackYaml `shouldBe` yamlAbs describe "defaultConfigYaml" $ it "is parseable" $ \_ -> do curDir <- getCurrentDir let parsed :: Either String (Either String (WithJSONWarnings ConfigMonoid)) parsed = parseEither (parseConfigMonoid curDir) <$> left show (decodeEither' defaultConfigYaml) case parsed of Right (Right _) -> pure () :: IO () _ -> fail "Failed to parse default config yaml" stack-2.15.7/tests/unit/Stack/DotSpec.hs0000644000000000000000000001347414604306201016171 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | Test suite for Stack.Dot module Stack.DotSpec ( dummyPayload , spec , sublistOf , pkgName , stubLoader ) where import Data.List ((\\)) import qualified Data.Map as Map import qualified Data.Set as Set import Distribution.License ( License (BSD3) ) import qualified RIO.Text as T import Stack.DependencyGraph ( pruneGraph, resolveDependencies ) import Stack.Prelude hiding ( pkgName ) import Stack.Types.DependencyTree ( DotPayload (..) ) import Test.Hspec ( Spec, describe, it, shouldBe ) import Test.Hspec.QuickCheck ( prop ) import Test.QuickCheck ( Gen, choose, forAll ) dummyPayload :: DotPayload dummyPayload = DotPayload (parseVersion "0.0.0.0") (Just (Right BSD3)) Nothing spec :: Spec spec = do let graph = Map.mapKeys pkgName . fmap (\p -> (Set.map pkgName p, dummyPayload)) . Map.fromList $ [("one",Set.fromList ["base","free"]) ,("two",Set.fromList ["base","free","mtl","transformers","one"]) ] describe "Stack.Dot" $ do it "does nothing if depth is 0" $ resolveDependencies (Just 0) graph stubLoader `shouldBe` pure graph it "with depth 1, more dependencies are resolved" $ do let graph' = Map.insert (pkgName "cycle") (Set.singleton (pkgName "cycle"), dummyPayload) graph resultGraph = runIdentity (resolveDependencies (Just 0) graph stubLoader) resultGraph' = runIdentity (resolveDependencies (Just 1) graph' stubLoader) Map.size resultGraph < Map.size resultGraph' `shouldBe` True it "cycles are ignored" $ do let graph' = Map.insert (pkgName "cycle") (Set.singleton (pkgName "cycle"), dummyPayload) graph resultGraph = resolveDependencies Nothing graph stubLoader resultGraph' = resolveDependencies Nothing graph' stubLoader fmap Map.size resultGraph' `shouldBe` fmap ((+1) . Map.size) resultGraph let graphElem e = Set.member e . Set.unions . Map.elems prop "requested packages are pruned" $ do let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) allPackages g = Map.keysSet g `Set.union` foldMap fst g forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph in Set.null (allPackages pruned `Set.intersection` Set.fromList toPrune) prop "pruning removes orphans" $ do let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader) allPackages g = Map.keysSet g `Set.union` foldMap fst g orphans g = Map.filterWithKey (\k _ -> not (graphElem k g)) g forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune -> let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph in null (Map.keys (orphans (fmap fst pruned)) \\ [pkgName "one", pkgName "two"]) {- Helper functions below -} -- Backport from QuickCheck 2.8 to 2.7.6 sublistOf :: [a] -> Gen [a] sublistOf = filterM (\_ -> choose (False, True)) -- Unsafe internal helper to create a package name pkgName :: Text -> PackageName pkgName = fromMaybe failure . parsePackageName . T.unpack where failure = error "Internal error during package name creation in DotSpec.pkgName" -- Stub, simulates the function to load package dependencies stubLoader :: PackageName -> Identity (Set PackageName, DotPayload) stubLoader name = pure $ (, dummyPayload) . Set.fromList . map pkgName $ case show name of "StateVar" -> ["stm", "transformers"] "array" -> [] "bifunctors" -> ["semigroupoids", "semigroups", "tagged"] "binary" -> ["array", "bytestring", "containers"] "bytestring" -> ["deepseq", "ghc-prim", "integer-gmp"] "comonad" -> [ "containers", "contravariant", "distributive", "semigroups" , "tagged","transformers","transformers-compat" ] "cont" -> [ "StateVar", "semigroups", "transformers", "transformers-compat" , "void" ] "containers" -> ["array", "deepseq", "ghc-prim"] "deepseq" -> ["array"] "distributive" -> [ "ghc-prim", "tagged", "transformers" , "transformers-compat" ] "free" -> [ "bifunctors", "comonad", "distributive", "mtl", "prelude-extras" , "profunctors", "semigroupoids", "semigroups", "template-haskell" , "transformers" ] "ghc" -> [] "hashable" -> ["bytestring", "ghc-prim", "integer-gmp", "text"] "integer" -> [] "mtl" -> ["transformers"] "nats" -> [] "one" -> ["free"] "prelude" -> [] "profunctors" -> [ "comonad", "distributive", "semigroupoids", "tagged" , "transformers" ] "semigroupoids" -> [ "comonad", "containers", "contravariant" , "distributive", "semigroups", "transformers" , "transformers-compat" ] "semigroups" -> [ "bytestring", "containers", "deepseq", "hashable", "nats" , "text", "unordered-containers" ] "stm" -> ["array"] "tagged" -> ["template-haskell"] "template" -> [] "text" -> [ "array", "binary", "bytestring", "deepseq", "ghc-prim" , "integer-gmp" ] "transformers" -> [] "two" -> ["free", "mtl", "one", "transformers"] "unordered" -> ["deepseq", "hashable"] "void" -> ["ghc-prim", "hashable", "semigroups"] _ -> [] stack-2.15.7/tests/unit/Stack/Ghci/ScriptSpec.hs0000644000000000000000000000435614604306201017560 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedLists #-} {-# LANGUAGE TemplateHaskell #-} -- | Test suite for the GhciScript DSL module Stack.Ghci.ScriptSpec ( spec ) where import qualified Data.Set as S import Distribution.ModuleName import Path import Path.Extra ( pathToLazyByteString ) import Stack.Ghci.FakePaths import Stack.Ghci.Script import Stack.Prelude hiding ( fromString ) import qualified System.FilePath as FP import Test.Hspec spec :: Spec spec = do describe "GHCi" $ do describe "Script DSL" $ do describe "script" $ do it "should separate commands with a newline" $ do let script = cmdAdd [Left (fromString "Lib.A")] <> cmdAdd [Left (fromString "Lib.B")] scriptToLazyByteString script `shouldBe` ":add Lib.A\n:add Lib.B\n" describe ":add" $ do it "should not render empty add commands" $ do let script = cmdAdd S.empty scriptToLazyByteString script `shouldBe` "" it "should ensure that a space exists between each module in an add command" $ do let script = cmdAdd (S.fromList [Left (fromString "Lib.A"), Left (fromString "Lib.B")]) scriptToLazyByteString script `shouldBe` ":add Lib.A Lib.B\n" describe ":add (by file)" $ do it "should render a full file path" $ do let file = $(mkAbsFile $ defaultDrive FP. "Users" FP. "someone" FP. "src" FP. "project" FP. "package-a" FP. "src" FP. "Main.hs") script = cmdAdd (S.fromList [Right file]) scriptToLazyByteString script `shouldBe` ":add " <> pathToLazyByteString file <> "\n" describe ":module" $ do it "should render empty module as ':module +'" $ do let script = cmdModule [] scriptToLazyByteString script `shouldBe` ":module +\n" it "should ensure that a space exists between each module in a module command" $ do let script = cmdModule [fromString "Lib.A", fromString "Lib.B"] scriptToLazyByteString script `shouldBe` ":module + Lib.A Lib.B\n" stack-2.15.7/tests/unit/Stack/GhciSpec.hs0000644000000000000000000002300714604306201016306 0ustar0000000000000000-- {-# LANGUAGE NoImplicitPrelude #-} -- {-# LANGUAGE QuasiQuotes #-} -- {-# LANGUAGE TemplateHaskell #-} -- | Test suite for GHCi-like applications including GHCi. module Stack.GhciSpec ( spec ) where import Test.Hspec ( Spec ) spec :: Spec spec = pure () {- Commented out as part of the fix for https://github.com/commercialhaskell/stack/issues/3309 Not sure if maintaining this test is worth the effort. import qualified Data.ByteString.Lazy as LBS import qualified Data.Map as M import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.Encoding as T import Distribution.License (License (BSD3)) import qualified Distribution.ModuleName as ModuleName import Distribution.PackageDescription (BuildType(..)) import Stack.Prelude import Stack.Types.Package import Stack.Types.PackageName import Stack.Types.Version import Test.Hspec import NeatInterpolation import Path import Path.Extra (pathToText) import qualified System.FilePath as FP import Stack.Ghci import Stack.Ghci.Script (scriptToLazyByteString) import Stack.Ghci.PortableFakePaths textToLazy :: Text -> LBS.ByteString textToLazy = LBS.fromStrict . T.encodeUtf8 -- | Matches two strings, after converting line-ends in the second to Unix ones -- (in a hacky way) and converting both to the same type. Workaround for -- https://github.com/nikita-volkov/neat-interpolation/issues/14. shouldBeLE :: LBS.ByteString -> Text -> Expectation shouldBeLE actual expected = shouldBe actual (textToLazy $ T.filter (/= '\r') expected) baseProjDir, projDirA, projDirB :: Path Abs Dir baseProjDir = $(mkAbsDir $ defaultDrive FP. "Users" FP. "someone" FP. "src") projDirA = baseProjDir either impureThrow id (parseRelDir "project-a") projDirB = baseProjDir either impureThrow id (parseRelDir "project-b") relFile :: Path Rel File relFile = either impureThrow id (parseRelFile $ "exe" FP. "Main.hs") absFile :: Path Abs File absFile = projDirA relFile projDirAT, projDirBT, relFileT, absFileT :: Text projDirAT = pathToText projDirA projDirBT = pathToText projDirB relFileT = pathToText relFile absFileT = pathToText absFile spec :: Spec spec = do describe "GHCi" $ do describe "Script rendering" $ do describe "should render GHCi scripts" $ do it "with one library package" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage Nothing [] res `shouldBeLE` ghciScript_projectWithLib it "with one main package" $ do let res = scriptToLazyByteString $ renderScriptGhci [] (Just absFile) [] res `shouldBeLE` ghciScript_projectWithMain it "with one library and main package" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage (Just absFile) [] res `shouldBeLE` ghciScript_projectWithLibAndMain it "with multiple library packages" $ do let res = scriptToLazyByteString $ renderScriptGhci packages_multiplePackages Nothing [] res `shouldBeLE` ghciScript_multipleProjectsWithLib describe "should render intero scripts" $ do it "with one library package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage Nothing [] res `shouldBeLE` interoScript_projectWithLib it "with one main package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage (Just absFile) [] res `shouldBeLE` interoScript_projectWithMain it "with one library and main package" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage (Just absFile) [] res `shouldBeLE` interoScript_projectWithLibAndMain it "with multiple library packages" $ do let res = scriptToLazyByteString $ renderScriptIntero packages_multiplePackages Nothing [] res `shouldBeLE` interoScript_multipleProjectsWithLib -- Expected Intero scripts interoScript_projectWithLib :: Text interoScript_projectWithLib = [text| :cd-ghc $projDirAT :add Lib.A :module + Lib.A |] interoScript_projectWithMain :: Text interoScript_projectWithMain = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirAT :add $absFileT :module + Lib.A |] interoScript_projectWithLibAndMain :: Text interoScript_projectWithLibAndMain = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirAT :add $absFileT :module + Lib.A |] interoScript_multipleProjectsWithLib :: Text interoScript_multipleProjectsWithLib = [text| :cd-ghc $projDirAT :add Lib.A :cd-ghc $projDirBT :add Lib.B :module + Lib.A Lib.B |] -- Expected GHCi Scripts ghciScript_projectWithLib :: Text ghciScript_projectWithLib = [text| :add Lib.A :module + Lib.A |] ghciScript_projectWithMain :: Text ghciScript_projectWithMain = [text| :add $absFileT :module + |] ghciScript_projectWithLibAndMain :: Text ghciScript_projectWithLibAndMain = [text| :add Lib.A :add $absFileT :module + Lib.A |] ghciScript_multipleProjectsWithLib :: Text ghciScript_multipleProjectsWithLib = [text| :add Lib.A :add Lib.B :module + Lib.A Lib.B |] -- Expected Legacy GHCi scripts ghciLegacyScript_projectWithMain :: Text ghciLegacyScript_projectWithMain = [text| :add :add $absFileT :module + |] ghciLegacyScript_projectWithLibAndMain :: Text ghciLegacyScript_projectWithLibAndMain = [text| :add Lib.A :add $absFileT :module + Lib.A |] ghciLegacyScript_multipleProjectsWithLib :: Text ghciLegacyScript_multipleProjectsWithLib = [text| :add Lib.A Lib.B :module + Lib.A Lib.B |] -- Sample GHCi load configs packages_singlePackage :: [GhciPkgInfo] packages_singlePackage = [ GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"] , ghciPkgDir = projDirA , ghciPkgName = $(mkPackageName "package-a") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-a") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } ] packages_multiplePackages :: [GhciPkgInfo] packages_multiplePackages = [ GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"] , ghciPkgDir = projDirA , ghciPkgName = $(mkPackageName "package-a") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-a") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } , GhciPkgInfo { ghciPkgModules = S.fromList [ModuleName.fromString "Lib.B"] , ghciPkgDir = projDirB , ghciPkgName = $(mkPackageName "package-b") , ghciPkgOpts = [] , ghciPkgModFiles = S.empty , ghciPkgCFiles = S.empty , ghciPkgMainIs = M.empty , ghciPkgTargetFiles = Nothing , ghciPkgPackage = Package { packageName = $(mkPackageName "package-b") , packageVersion = $(mkVersion "0.1.0.0") , packageLicense = BSD3 , packageFiles = GetPackageFiles undefined , packageDeps = M.empty , packageTools = [] , packageAllDeps = S.empty , packageGhcOptions = [] , packageFlags = M.empty , packageDefaultFlags = M.empty , packageHasLibrary = True , packageTests = M.empty , packageBenchmarks = S.empty , packageExes = S.empty , packageOpts = GetPackageOpts undefined , packageHasExposedModules = True , packageBuildType = Just Simple , packageSetupDeps = Nothing } } ] -} stack-2.15.7/tests/unit/Stack/LockSpec.hs0000644000000000000000000001452414604306201016330 0ustar0000000000000000{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.LockSpec ( toBlobKey , decodeSHA , decodeLocked , spec ) where import Data.Aeson.WarningParser ( WithJSONWarnings (..) ) import qualified Data.Yaml as Yaml import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Types.Version ( mkVersion ) import Pantry ( BlobKey (..), FileSize (..), PackageIdentifier (..) , PackageLocationImmutable (..), PackageMetadata (..) , RawPackageLocationImmutable (..), RawPackageMetadata (..) , Repo (..), RepoType (..), SHA256, TreeKey (..) , resolvePaths ) import qualified Pantry.SHA256 as SHA256 import RIO ( ByteString, displayException, throwIO, unless ) import Stack.Lock ( Locked (..), LockedLocation (..) ) import Test.Hspec ( Spec, it, shouldBe ) import Text.RawString.QQ ( r ) toBlobKey :: ByteString -> Word -> BlobKey toBlobKey string size = BlobKey (decodeSHA string) (FileSize size) decodeSHA :: ByteString -> SHA256 decodeSHA string = case SHA256.fromHexBytes string of Right csha -> csha Left err -> error $ "Failed decoding. Error: " <> displayException err decodeLocked :: ByteString -> IO Locked decodeLocked bs = do val <- Yaml.decodeThrow bs case Yaml.parseEither Yaml.parseJSON val of Left err -> throwIO $ Yaml.AesonException err Right (WithJSONWarnings res warnings) -> do unless (null warnings) $ throwIO $ Yaml.AesonException $ "Unexpected warnings: " ++ show warnings -- we just assume no file references resolvePaths Nothing res spec :: Spec spec = do it "parses lock file (empty with GHC snapshot)" $ do let lockFile :: ByteString lockFile = [r|#some snapshots: - completed: compiler: ghc-8.6.5 original: compiler: ghc-8.6.5 packages: [] |] pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile pkgImm `shouldBe` [] it "parses lock file (empty with LTS snapshot)" $ do let lockFile :: ByteString lockFile = [r|#some snapshots: - completed: size: 527801 url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/11/22.yaml sha256: 7c8b1853da784bd7beb8728168bf4e879d8a2f6daf408ca0fa7933451864a96a original: lts-14.27 - completed: compiler: ghc-8.6.5 original: compiler: ghc-8.6.5 packages: [] |] pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile pkgImm `shouldBe` [] it "parses lock file (LTS, wai + warp)" $ do let lockFile :: ByteString lockFile = [r|#some snapshots: - completed: size: 527801 url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/11/22.yaml sha256: 7c8b1853da784bd7beb8728168bf4e879d8a2f6daf408ca0fa7933451864a96a original: lts-14.27 - completed: compiler: ghc-8.6.5 original: compiler: ghc-8.6.5 packages: - original: subdir: wai git: https://github.com/yesodweb/wai.git commit: d11d63f1a6a92db8c637a8d33e7953ce6194a3e0 completed: subdir: wai name: wai version: 3.2.1.2 git: https://github.com/yesodweb/wai.git pantry-tree: size: 714 sha256: ecfd0b4b75f435a3f362394807b35e5ef0647b1a25005d44a3632c49db4833d2 commit: d11d63f1a6a92db8c637a8d33e7953ce6194a3e0 - original: subdir: warp git: https://github.com/yesodweb/wai.git commit: d11d63f1a6a92db8c637a8d33e7953ce6194a3e0 completed: subdir: warp name: warp version: 3.2.25 git: https://github.com/yesodweb/wai.git pantry-tree: size: 5103 sha256: f808e075811b002563d24c393ce115be826bb66a317d38da22c513ee42b7443a commit: d11d63f1a6a92db8c637a8d33e7953ce6194a3e0 |] pkgImm <- (.pkgImmutableLocations) <$> decodeLocked lockFile let waiSubdirRepo subdir = Repo { repoType = RepoGit , repoUrl = "https://github.com/yesodweb/wai.git" , repoCommit = "d11d63f1a6a92db8c637a8d33e7953ce6194a3e0" , repoSubdir = subdir } emptyRPM = RawPackageMetadata { rpmName = Nothing , rpmVersion = Nothing , rpmTreeKey = Nothing } pkgImm `shouldBe` [ LockedLocation (RPLIRepo (waiSubdirRepo "wai") emptyRPM) (PLIRepo (waiSubdirRepo "wai") (PackageMetadata { pmIdent = PackageIdentifier { pkgName = mkPackageName "wai" , pkgVersion = mkVersion [3, 2, 1, 2] } , pmTreeKey = TreeKey (BlobKey (decodeSHA "ecfd0b4b75f435a3f362394807b35e5ef0647b1a25005d44a3632c49db4833d2") (FileSize 714)) })) , LockedLocation (RPLIRepo (waiSubdirRepo "warp") emptyRPM) (PLIRepo (waiSubdirRepo "warp") (PackageMetadata { pmIdent = PackageIdentifier { pkgName = mkPackageName "warp" , pkgVersion = mkVersion [3, 2, 25] } , pmTreeKey = TreeKey (BlobKey (decodeSHA "f808e075811b002563d24c393ce115be826bb66a317d38da22c513ee42b7443a") (FileSize 5103)) })) ] stack-2.15.7/tests/unit/Stack/NixSpec.hs0000644000000000000000000001214614604306201016174 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} module Stack.NixSpec ( sampleConfigNixEnabled , sampleConfigNixDisabled , setup , spec ) where import Data.Maybe ( fromJust ) import Options.Applicative ( defaultPrefs, execParserPure, getParseResult, info ) import Prelude ( writeFile ) import Stack.Config ( loadConfig ) import Stack.Config.Nix ( nixCompiler ) import Stack.Constants ( osIsWindows, stackDotYaml ) import Stack.Options.GlobalParser ( globalOptsFromMonoid ) import Stack.Options.NixParser ( nixOptsParser ) import Stack.Prelude import Stack.Runners ( withRunnerGlobal ) import Stack.Types.Config ( Config (..) ) import Stack.Types.ConfigMonoid ( ConfigMonoid (..) ) import qualified Stack.Types.GlobalOpts as GlobalOpts ( GlobalOpts (..) ) import Stack.Types.GlobalOptsMonoid ( GlobalOptsMonoid (..) ) import Stack.Types.Nix ( NixOpts (..) ) import System.Directory ( getCurrentDirectory, setCurrentDirectory ) import System.Environment ( unsetEnv ) import Test.Hspec ( Spec, around_, beforeAll, describe, it, shouldBe ) sampleConfigNixEnabled :: String sampleConfigNixEnabled = "snapshot: lts-19.22\n" ++ "packages: ['.']\n" ++ "system-ghc: true\n" ++ "nix:\n" ++ " enable: True\n" ++ " packages: [glpk]" sampleConfigNixDisabled :: String sampleConfigNixDisabled = "snapshot: lts-19.22\n" ++ "packages: ['.']\n" ++ "nix:\n" ++ " enable: False" setup :: IO () setup = unsetEnv "STACK_YAML" spec :: Spec spec = beforeAll setup $ do let loadConfig' :: ConfigMonoid -> (Config -> IO ()) -> IO () loadConfig' cmdLineArgs inner = do globalOpts <- globalOptsFromMonoid False mempty { configMonoid = cmdLineArgs } withRunnerGlobal globalOpts { GlobalOpts.logLevel = LevelOther "silent" } $ loadConfig (liftIO . inner) inTempDir test = do currentDirectory <- getCurrentDirectory withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do let enterDir = setCurrentDirectory tempDir exitDir = setCurrentDirectory currentDirectory bracket_ enterDir exitDir test withStackDotYaml config test = inTempDir $ do writeFile (toFilePath stackDotYaml) config test parseNixOpts cmdLineOpts = fromJust $ getParseResult $ execParserPure defaultPrefs (info (nixOptsParser False) mempty) cmdLineOpts parseOpts cmdLineOpts = mempty { nixOpts = parseNixOpts cmdLineOpts } let trueOnNonWindows = not osIsWindows describe "nix disabled in config file" $ around_ (withStackDotYaml sampleConfigNixDisabled) $ do it "sees that the nix shell is not enabled" $ loadConfig' mempty $ \config -> config.nix.enable `shouldBe` False describe "--nix given on command line" $ it "sees that the nix shell is enabled" $ loadConfig' (parseOpts ["--nix"]) $ \config -> config.nix.enable `shouldBe` trueOnNonWindows describe "--nix-pure given on command line" $ it "sees that the nix shell is enabled" $ loadConfig' (parseOpts ["--nix-pure"]) $ \config -> config.nix.enable `shouldBe` trueOnNonWindows describe "--no-nix given on command line" $ it "sees that the nix shell is not enabled" $ loadConfig' (parseOpts ["--no-nix"]) $ \config -> config.nix.enable `shouldBe` False describe "--no-nix-pure given on command line" $ it "sees that the nix shell is not enabled" $ loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> config.nix.enable `shouldBe` False describe "nix enabled in config file" $ around_ (withStackDotYaml sampleConfigNixEnabled) $ do it "sees that the nix shell is enabled" $ loadConfig' mempty $ \config -> config.nix.enable `shouldBe` trueOnNonWindows describe "--no-nix given on command line" $ it "sees that the nix shell is not enabled" $ loadConfig' (parseOpts ["--no-nix"]) $ \config -> config.nix.enable `shouldBe` False describe "--nix-pure given on command line" $ it "sees that the nix shell is enabled" $ loadConfig' (parseOpts ["--nix-pure"]) $ \config -> config.nix.enable `shouldBe` trueOnNonWindows describe "--no-nix-pure given on command line" $ it "sees that the nix shell is enabled" $ loadConfig' (parseOpts ["--no-nix-pure"]) $ \config -> config.nix.enable `shouldBe` trueOnNonWindows it "sees that the only package asked for is glpk and asks for the correct GHC derivation" $ loadConfig' mempty $ \config -> do config.nix.packages `shouldBe` ["glpk"] v <- parseVersionThrowing "9.0.2" ghc <- either throwIO pure $ nixCompiler (WCGhc v) ghc `shouldBe` "haskell.compiler.ghc902" stack-2.15.7/tests/unit/Stack/PackageDumpSpec.hs0000644000000000000000000003117714604306201017624 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.PackageDumpSpec ( main , spec , bestPrune , checkDepsPresent , runEnvNoLogging ) where import Conduit ( withSourceFile, yield ) import qualified Data.Conduit.List as CL import Data.Conduit.Text ( decodeUtf8 ) import qualified Data.Map as Map import qualified Data.Set as Set import Distribution.License ( License (..) ) import Distribution.Types.PackageName ( mkPackageName ) import Distribution.Version ( mkVersion ) import Path ( parseAbsFile ) import RIO.PrettyPrint.Simple ( SimplePrettyApp, mkSimplePrettyApp ) import RIO.Process ( envVarsL, findExecutable, mkDefaultProcessContext , mkProcessContext ) import Stack.PackageDump ( DumpPackage (..), conduitDumpPackage, eachPair , eachSection, ghcPkgDump, pruneDeps, sinkMatching ) import Stack.Prelude import Stack.Types.CompilerPaths ( GhcPkgExe (..) ) import Stack.Types.GhcPkgId ( parseGhcPkgId ) import Test.Hspec ( Spec, describe, hspec, it, shouldBe ) import Test.Hspec.QuickCheck ( prop ) main :: IO () main = hspec spec spec :: Spec spec = do describe "eachSection" $ do let test name content expected = it name $ do actual <- runConduit $ yield content .| eachSection CL.consume .| CL.consume actual `shouldBe` expected test "unix line endings" "foo\nbar\n---\nbaz---\nbin\n---\n" [ ["foo", "bar"] , ["baz---", "bin"] ] test "windows line endings" "foo\r\nbar\r\n---\r\nbaz---\r\nbin\r\n---\r\n" [ ["foo", "bar"] , ["baz---", "bin"] ] it "eachPair" $ do let bss = [ "key1: val1" , "key2: val2a" , " val2b" , "key3:" , "key4:" , " val4a" , " val4b" ] sink k = fmap (k, ) CL.consume actual <- runConduit $ mapM_ yield bss .| eachPair sink .| CL.consume actual `shouldBe` [ ("key1", ["val1"]) , ("key2", ["val2a", "val2b"]) , ("key3", []) , ("key4", ["val4a", "val4b"]) ] describe "conduitDumpPackage" $ do it "ghc 7.8" $ do haskell2010:_ <- withSourceFile "tests/unit/package-dump/ghc-7.8.txt" $ \src -> runConduit $ src .| decodeUtf8 .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "haskell2010-1.1.2.0-05c8dd51009e08c6371c82972d40f55a" packageIdent <- maybe (fail "Not parsable package id") pure $ parsePackageIdentifier "haskell2010-1.1.2.0" depends <- mapM parseGhcPkgId [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" , "base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1" , "ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37" ] haskell2010 { exposedModules = mempty } `shouldBe` DumpPackage { ghcPkgId = ghcPkgId , packageIdent = packageIdent , sublib = Nothing , license = Just BSD3 , libDirs = ["/opt/ghc/7.8.4/lib/ghc-7.8.4/haskell2010-1.1.2.0"] , depends = depends , libraries = ["HShaskell2010-1.1.2.0"] , hasExposedModules = True , haddockInterfaces = ["/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0/haskell2010.haddock"] , haddockHtml = Just "/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0" , isExposed = False , exposedModules = mempty } it "ghc 7.10" $ do haskell2010:_ <- withSourceFile "tests/unit/package-dump/ghc-7.10.txt" $ \src -> runConduit $ src .| decodeUtf8 .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "ghc-7.10.1-325809317787a897b7a97d646ceaa3a3" pkgIdent <- maybe (fail "Not parsable package id") pure $ parsePackageIdentifier "ghc-7.10.1" depends <- mapM parseGhcPkgId [ "array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9" , "base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a" , "bin-package-db-0.0.0.0-708fc7d634a370b311371a5bcde40b62" , "bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db" , "containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d" , "directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0" , "filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6" , "hoopl-3.10.0.2-8c8dfc4c3140e5f7c982da224c3cb1f0" , "hpc-0.6.0.2-ac9064885aa8cb08a93314222939ead4" , "process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1" , "template-haskell-2.10.0.0-e895139a0ffff267d412e3d0191ce93b" , "time-1.5.0.1-e17a9220d438435579d2914e90774246" , "transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f" , "unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f" ] haskell2010 { exposedModules = mempty } `shouldBe` DumpPackage { ghcPkgId = ghcPkgId , packageIdent = pkgIdent , sublib = Nothing , license = Just BSD3 , libDirs = ["/opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY"] , haddockInterfaces = ["/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1/ghc.haddock"] , haddockHtml = Just "/opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1" , depends = depends , libraries = ["HSghc-7.10.1-EMlWrQ42XY0BNVbSrKixqY"] , hasExposedModules = True , isExposed = False , exposedModules = mempty } it "ghc 7.8.4 (osx)" $ do hmatrix:_ <- withSourceFile "tests/unit/package-dump/ghc-7.8.4-osx.txt" $ \src -> runConduit $ src .| decodeUtf8 .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "hmatrix-0.16.1.5-12d5d21f26aa98774cdd8edbc343fbfe" pkgId <- maybe (fail "Not parsable package id") pure $ parsePackageIdentifier "hmatrix-0.16.1.5" depends <- mapM parseGhcPkgId [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" , "base-4.7.0.2-918c7ac27f65a87103264a9f51652d63" , "binary-0.7.1.0-108d06eea2ef05e517f9c1facf10f63c" , "bytestring-0.10.4.0-78bc8f2c724c765c78c004a84acf6cc3" , "deepseq-1.3.0.2-0ddc77716bd2515426e1ba39f6788a4f" , "random-1.1-822c19b7507b6ac1aaa4c66731e775ae" , "split-0.2.2-34cfb851cc3784e22bfae7a7bddda9c5" , "storable-complex-0.2.2-e962c368d58acc1f5b41d41edc93da72" , "vector-0.10.12.3-f4222db607fd5fdd7545d3e82419b307"] hmatrix `shouldBe` DumpPackage { ghcPkgId = ghcPkgId , packageIdent = pkgId , sublib = Nothing , license = Just BSD3 , libDirs = [ "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/lib/x86_64-osx-ghc-7.8.4/hmatrix-0.16.1.5" , "/opt/local/lib/" , "/usr/local/lib/" , "C:/Program Files/Example/"] , haddockInterfaces = ["/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html/hmatrix.haddock"] , haddockHtml = Just "/Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html" , depends = depends , libraries = ["HShmatrix-0.16.1.5"] , hasExposedModules = True , isExposed = True , exposedModules = Set.fromList ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] } it "ghc HEAD" $ do ghcBoot:_ <- withSourceFile "tests/unit/package-dump/ghc-head.txt" $ \src -> runConduit $ src .| decodeUtf8 .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "ghc-boot-0.0.0.0" pkgId <- maybe (fail "Not parsable package id") pure $ parsePackageIdentifier "ghc-boot-0.0.0.0" depends <- mapM parseGhcPkgId [ "base-4.9.0.0" , "binary-0.7.5.0" , "bytestring-0.10.7.0" , "directory-1.2.5.0" , "filepath-1.4.1.0" ] ghcBoot `shouldBe` DumpPackage { ghcPkgId = ghcPkgId , packageIdent = pkgId , sublib = Nothing , license = Just BSD3 , libDirs = ["/opt/ghc/head/lib/ghc-7.11.20151213/ghc-boot-0.0.0.0"] , haddockInterfaces = ["/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0/ghc-boot.haddock"] , haddockHtml = Just "/opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0" , depends = depends , libraries = ["HSghc-boot-0.0.0.0"] , hasExposedModules = True , isExposed = True , exposedModules = Set.fromList ["GHC.Lexeme", "GHC.PackageDb"] } it "sinkMatching" $ runEnvNoLogging $ \pkgexe -> do m <- ghcPkgDump pkgexe [] $ conduitDumpPackage .| sinkMatching (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) case Map.lookup (mkPackageName "base") m of Nothing -> error "base not present" Just _ -> pure () liftIO $ do Map.lookup (mkPackageName "transformers") m `shouldBe` Nothing Map.lookup (mkPackageName "ghc") m `shouldBe` Nothing describe "pruneDeps" $ do it "sanity check" $ do let prunes = [ ((1, 'a'), []) , ((1, 'b'), []) , ((2, 'a'), [(1, 'b')]) , ((2, 'b'), [(1, 'a')]) , ((3, 'a'), [(1, 'c')]) , ((4, 'a'), [(2, 'a')]) ] actual = fst <$> pruneDeps fst fst snd bestPrune prunes actual `shouldBe` Map.fromList [ (1, (1, 'b')) , (2, (2, 'a')) , (4, (4, 'a')) ] prop "invariant holds" $ \prunes' -> -- Force uniqueness let prunes = Map.toList $ Map.fromList prunes' in checkDepsPresent prunes $ fst <$> pruneDeps fst fst snd bestPrune prunes type PruneCheck = ((Int, Char), [(Int, Char)]) bestPrune :: PruneCheck -> PruneCheck -> PruneCheck bestPrune x y | fst x > fst y = x | otherwise = y checkDepsPresent :: [PruneCheck] -> Map Int (Int, Char) -> Bool checkDepsPresent prunes selected = all hasDeps $ Set.toList allIds where depMap = Map.fromList prunes allIds = Set.fromList $ Map.elems selected hasDeps ident = case Map.lookup ident depMap of Nothing -> error "checkDepsPresent: missing in depMap" Just deps -> Set.null $ Set.difference (Set.fromList deps) allIds runEnvNoLogging :: (GhcPkgExe -> RIO SimplePrettyApp a) -> IO a runEnvNoLogging inner = do envVars <- view envVarsL <$> mkDefaultProcessContext menv <- mkProcessContext $ Map.delete "GHC_PACKAGE_PATH" envVars let find name = runRIO menv (findExecutable name) >>= either throwIO parseAbsFile pkg <- GhcPkgExe <$> find "ghc-pkg" app <- mkSimplePrettyApp mempty (Just menv) True 80 mempty runRIO app (inner pkg) stack-2.15.7/tests/unit/Stack/Types/TemplateNameSpec.hs0000644000000000000000000000521114502056216021116 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} module Stack.Types.TemplateNameSpec ( spec ) where import Stack.Types.TemplateName import Path.Internal import System.Info (os) import Test.Hspec spec :: Spec spec = describe "TemplateName" $ do describe "parseTemplateNameFromString" $ do let pathOf s = either error templatePath (parseTemplateNameFromString s) it "parses out the TemplatePath" $ do pathOf "github:user/name" `shouldBe` RepoPath (RepoTemplatePath GitHub "user" "name.hsfiles") pathOf "bitbucket:user/name" `shouldBe` RepoPath (RepoTemplatePath Bitbucket "user" "name.hsfiles") pathOf "gitlab:user/name" `shouldBe` RepoPath (RepoTemplatePath GitLab "user" "name.hsfiles") pathOf "http://www.com/file" `shouldBe` UrlPath "http://www.com/file" pathOf "https://www.com/file" `shouldBe` UrlPath "https://www.com/file" pathOf "name" `shouldBe` RelPath "name.hsfiles" (Path "name.hsfiles") pathOf "name.hsfile" `shouldBe` RelPath "name.hsfile.hsfiles" (Path "name.hsfile.hsfiles") pathOf "name.hsfiles" `shouldBe` RelPath "name.hsfiles" (Path "name.hsfiles") pathOf "" `shouldBe` RelPath ".hsfiles" (Path ".hsfiles") if os == "mingw32" then do pathOf "//home/file" `shouldBe` AbsPath (Path "\\\\home\\file.hsfiles") pathOf "/home/file" `shouldBe` RelPath "/home/file.hsfiles" (Path "\\home\\file.hsfiles") pathOf "/home/file.hsfiles" `shouldBe` RelPath "/home/file.hsfiles" (Path "\\home\\file.hsfiles") pathOf "c:\\home\\file" `shouldBe` AbsPath (Path "C:\\home\\file.hsfiles") pathOf "with/slash" `shouldBe` RelPath "with/slash.hsfiles" (Path "with\\slash.hsfiles") let colonAction = do pure $! pathOf "with:colon" colonAction `shouldThrow` anyErrorCall else do pathOf "//home/file" `shouldBe` AbsPath (Path "/home/file.hsfiles") pathOf "/home/file" `shouldBe` AbsPath (Path "/home/file.hsfiles") pathOf "/home/file.hsfiles" `shouldBe` AbsPath (Path "/home/file.hsfiles") pathOf "c:\\home\\file" `shouldBe` RelPath "c:\\home\\file.hsfiles" (Path "c:\\home\\file.hsfiles") pathOf "with/slash" `shouldBe` RelPath "with/slash.hsfiles" (Path "with/slash.hsfiles") pathOf "with:colon" `shouldBe` RelPath "with:colon.hsfiles" (Path "with:colon.hsfiles") stack-2.15.7/tests/unit/Stack/UploadSpec.hs0000644000000000000000000000353314502056216016667 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Stack.UploadSpec ( spec ) where import Data.Bits ((.&.)) import RIO ( Bool (..), IO, IsString, Maybe (..), String, ($), finally , readFileBinary, replicateM_, runRIO, unless , withSystemTempDirectory ) import RIO.Directory ( emptyPermissions, getPermissions, setOwnerReadable , setOwnerWritable ) import RIO.FilePath ( () ) import Stack.Upload ( HackageKey (..), maybeGetHackageKey, writeFilePrivate ) import System.Environment ( setEnv, unsetEnv ) import System.Permissions ( osIsWindows ) import System.PosixCompat.Files ( getFileStatus, fileMode ) import Test.Hspec ( Spec, example, it, shouldBe, shouldReturn ) spec :: Spec spec = do it "writeFilePrivate" $ example $ withSystemTempDirectory "writeFilePrivate" $ \dir -> replicateM_ 2 $ do let fp = dir "filename" contents :: IsString s => s contents = "These are the contents" writeFilePrivate fp contents actual <- readFileBinary fp actual `shouldBe` contents perms <- getPermissions fp perms `shouldBe` setOwnerWritable True (setOwnerReadable True emptyPermissions) unless osIsWindows $ do status <- getFileStatus fp (fileMode status .&. 0o777) `shouldBe` 0o600 it "finds a HACKAGE_KEY env variable" $ do runRIO () maybeGetHackageKey `shouldReturn` Nothing withEnv "HACKAGE_KEY" "api_key" $ runRIO () maybeGetHackageKey `shouldReturn` Just (HackageKey "api_key") withEnv :: String -> String -> IO a -> IO a withEnv k v f = do setEnv k v f `finally` unsetEnv k stack-2.15.7/tests/unit/windows/Stack/Ghci/FakePaths.hs0000644000000000000000000000041414502056216021035 0ustar0000000000000000module Stack.Ghci.FakePaths ( defaultDrive ) where -- | Helpers for writing fake paths for test suite for the GhciScript DSL. This -- must be a separate module because it is used in Template Haskell splices. defaultDrive :: FilePath defaultDrive = "C:\\" stack-2.15.7/tests/unit/unix/Stack/Ghci/FakePaths.hs0000644000000000000000000000041114502056216020323 0ustar0000000000000000module Stack.Ghci.FakePaths ( defaultDrive ) where -- | Helpers for writing fake paths for test suite for the GhciScript DSL. This -- must be a separate module because it is used in Template Haskell splices. defaultDrive :: FilePath defaultDrive = "/" stack-2.15.7/CONTRIBUTING.md0000644000000000000000000007724214620153473013353 0ustar0000000000000000# Contributors Guide Thank you for considering contributing to the maintenance or development of Stack, or otherwise supporting users of Stack! We hope that the following information will encourage and assist you. We start with some advice about Stack's goals and governance, and approach to supporting users. ## Stack's goals Stack's current goals are: * To provide easy to use tooling for Haskell development * To provide complete support for at least the following three development environments: Linux, macOS, and Windows * To address the needs of industrial users, open source maintainers, and other people * To focus on the 'curated package set' use case * To prioritize reproducible build plans The goals above are not set in stone. However, any major changes to them should involve significant public discussion and a public vote by the Stack maintainer team. ## Stack's governance People involved in maintaining or developing Stack with rights to make commits to the repository can be classified into two groups: 'committers' and 'maintainers'. ### Stack's committers We encourages a wide range of people to be granted rights to make commits to the repository. People are encouraged to take initiative to make non-controversial changes, such as documentation improvements, bug fixes, performance improvements, and feature enhancements. Maintainers should be included in discussions of controversial changes and tricky code changes. Our general approach is **"it's easier to ask forgiveness than permission"**. If there is ever a bad change, it can always be rolled back. ### Stack's maintainers Stack's maintainers are long-term contributors to the project. Michael Snoyman (@snoyberg) was the founder of Stack, and its initial maintainer - and he has added others. Michael's current interests and priorities mean that he is no longer actively involved in adding new features to Stack. Maintainers are recognized for their contributions including: * Direct code contribution * Review of pull requests * Interactions on the GitHub issue tracker * Documentation management * External support - for example, hosting or training The maintainer team make certain decisions when that is necessary, specifically: * How to proceed, if there is disagreement on how to do so on a specific topic * Whether to add or remove (see further below) a maintainer Generally, maintainers are only removed due to non-participation or actions unhealthy to the project. Removal due to non-participation is not a punishment, simply a recognition that maintainership is for active participants only. We hope that removal due to unhealthy actions will never be necessary, but would include protection for cases of: * Disruptive behavior in public channels related to Stack * Impairing the codebase through bad commits/merges Like committers, maintainers are broadly encouraged to make autonomous decisions. Each maintainer is empowered to make a unilateral decision. However, maintainers should favor getting consensus first if: * They are uncertain what is the best course of action * They anticipate that other maintainers or users of Stack will disagree on the decision ## Stack's support A large part of the general discussion around Stack is on support-related topics, and that is reflected in the current issue tracker content. Assistance in responding to such matters is greatly appreciated. While support-related matters can be posted here as an 'issue', we encourage the use of other forums, in particular [Haskell's Discourse](https://discourse.haskell.org/). We also recommend Haskell's Discourse for general discussions about Stack's current or desired features. Stack is also discussed on Reddit's [Haskell community](https://www.reddit.com/r/haskell/). We encourage use of those other forums because support-related discussions can clog up the issue tracker and make it more difficult to maintain the project. People needing support may also get a faster and fuller response on other forums. Additions to the issue tracker are better suited to concrete feature proposals, bug reports, and other code base discussions (for example, refactorings). ## Bug Reports Please [open an issue](https://github.com/commercialhaskell/stack/issues/new) and use the provided template to include all necessary details. The more detailed your report, the faster it can be resolved and will ensure it is resolved in the right way. Once your bug has been resolved, the responsible person will tag the issue as _Needs confirmation_ and assign the issue back to you. Once you have tested and confirmed that the issue is resolved, close the issue. If you are not a member of the project, you will be asked for confirmation and we will close it. ## Documentation Consistent with its goal of being easy to use, Stack aims to maintain a high quality of in-tool and online documentation. The in-tool documentation includes the output when the `--help` flag is specified and the content of Stack's warning and error messages. When drafting documentation it is helpful to have in mind the intended reader and what they are assumed to know, and not know, already. In that regard, documentation should aim to meet, at least, the needs of a person who is about to begin to study computing as an undergraduate but who has not previously coded using Haskell. That person may be familiar with one popular operating system but may not be familiar with others. The files which make up Stack's online documentation are located in the `doc` directory of the repository. They are formatted in the [Markdown syntax](https://daringfireball.net/projects/markdown/), with some extensions. Those files are rendered on [haskellstack.org](http://haskellstack.org) by [Read the Docs](https://readthedocs.org/) using [MkDocs](https://www.mkdocs.org/) and the [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) theme. The `stable` branch of the repository provides the 'stable' version of the online documentation. The `master` branch provides the 'latest' version of the documentation. The 'stable' version of the online documentation is intended to be applicable to the latest released version of Stack. If you would like to help with that documentation, please submit a [pull request](https://help.github.com/articles/using-pull-requests/) with your changes/additions based off the [stable branch](https://github.com/commercialhaskell/stack/tree/stable). The Markdown files are organised into the navigation menu (the table of contents) in the file `mkdocs.yml`, the configuration file for MkDocs. The description of a file in the menu can differ from the file's name. The navigation menu allows files to be organised in a hierarchy. Currently, up to three levels are used. The top level is: * **Welcome!:** The introduction to Stack. This page aims to be no longer than necessary but also to not assume much existing knowledge on the part of the reader. It provides a 'quick start' guide to getting and using Stack. * **How to get & use Stack:** This includes Stack's user's guide, answers to frequently asked questions, and more thorough explanations of aspects of Stack. The user's guide is divided into two parts. The first part is 'introductory', and has the style of a tutorial. The second part is 'advanced', and has more of a reference style. * **How Stack works (advanced):** Many users will not need to consult this advanced documentation. * **Stack's code (advanced):** Other information useful to people contributing to, or maintaining, Stack's code, documentation, and other files. * **Signing key:** How Stack's released executables are signed. * **Glossary:** A glossary of terms used throughout Stack's in-tool and online documentation. We aim to describe the same things in the same way in different places. * **Version history:** The log of changes to Stack between versions. The specific versions of the online documentation (eg `v: v2.9.1`) are generated from the content of files at the point in the repository's history specified by the corresponding release tag. Consequently, that content is fixed once released. If the names of Markdown files do not change between versions, then people can use the flyout on the online documentation to move between different versions of the same page. For that reason, the names of new Markdown files should be chosen with care and existing Markdown files should not be deleted or renamed without due consideration of the consequences. The Markdown syntax supported by MkDocs and the Material for MkDocs theme can differ from the GitHub Flavored Markdown ([GFM](https://github.github.com/gfm/)) supported for content on GitHub.com. Please refer to the [MkDocs documentation](https://www.mkdocs.org/user-guide/writing-your-docs/#writing-with-markdown) and the [Material for MkDocs reference](https://squidfunk.github.io/mkdocs-material/reference/) to ensure your pull request will achieve the desired rendering. The extensions to the basic Markdown syntax used are set out in `mkdocs.yml` and include: * admonitions * code blocks, with syntax highlighting provided by [Pygments](https://pygments.org/) * content tabs, which can be nested * icons and emojis The files in the `doc` directory of the repository include two symbolic links (symlinks), `ChangeLog.md` and `CONTRIBUTING.md`. Users of Git on Windows should be aware of its approach to symbolic links. See the [Git for Windows Wiki](https://github.com/git-for-windows/git/wiki/Symbolic-Links). If `git config --show-scope --show-origin core.symlinks` is `false` in a local repository on Windows, then the files will be checked out as small plain files that contain the link text See the [Git documentation](https://git-scm.com/docs/git-config#Documentation/git-config.txt-coresymlinks). ## Error messages Stack catches exceptions thrown by its dependencies or by Stack itself in `Stack.main`. In addition to exceptions that halt Stack's execution, Stack logs certain other matters as 'errors'. To support the Haskell Foundation's [Haskell Error Index](https://errors.haskell.org/) initiative, all Stack error messages generated by Stack itself should have a unique initial line: ~~~text Error: [S-nnnn] ~~~ where `nnnn` is a four-digit number in the range 1000 to 9999. If you create a new Stack error, select a number using a random number generator (see, for example, [RANDOM.ORG](https://www.random.org/)) and check that number is not already in use in Stack's code. If it is, pick another until the number is unique. All exceptions generated by Stack itself are implemented using data constructors of closed sum types. Typically, there is one such type for each module that exports functions that throw exceptions. This type and the related `instance` definitions are usually located at the top of the relevant module. Stack supports two types of exceptions: 'pretty' exceptions that are instances of class `RIO.PrettyPrint.Pretty`, which provides `pretty :: e -> StyleDoc`, and thrown as expressions of type `RIO.PrettyPrint.PrettyException.PrettyException`; and other 'plain' exceptions that are simply instances of class `Control.Exception.Exception` and, hence, instances of class `Show`. These types and classes are re-exported by `Stack.Prelude`. Stack throws exceptions in parts of the code that should, in principle, be unreachable. The functions `Stack.Prelude.bugReport` and `Stack.Prelude.bugPrettyReport` are used to give the messages a consistent format. The names of the data constructors for those exceptions usually end in `Bug`. In a few cases, Stack may throw an exception in 'pure' code. The function `RIO.impureThrow :: Exception e => e -> a`, re-exported by `Stack.Prelude`, is used for that purpose. ## Code If you would like to contribute code to fix a bug, add a new feature, or otherwise improve `stack`, pull requests are most welcome. It's a good idea to [submit an issue](https://github.com/commercialhaskell/stack/issues/new) to discuss the change before plowing into writing code. If you'd like to help out but aren't sure what to work on, look for issues with the [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) label. Issues that are suitable for newcomers to the codebase have the [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22+label%3a%22newcomer+friendly%22) label. Best to post a comment to the issue before you start work, in case anyone has already started. Please include a [ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md) entry and [documentation](https://github.com/commercialhaskell/stack/tree/master/doc/) updates with your pull request. ## Backwards Compatability The Stack package provides a library and an executable (`stack`) that depends on the library. The library is intended for use only by the executable. Consequently, the Stack package does not need to, and does not, strive for the compatibility with a range of versions of GHC that a library package (such as `pantry`) would seek. Stack aims to depend on well-known packages. The specific versions on which it depends at any time are specified by `package.yaml` and `stack.yaml`. It does not aim to be compatible with more than one version of the `Cabal` package at any time. At the time of writing (May 2024) the package versions are primarily ones in Stackage snapshot LTS Haskell 22.21 (for GHC 9.6.5). A Stack executable makes use of Cabal (the library) through a small 'Setup' executable that it compiles from Haskell source code. The executable compiles that code with a dependency on the version of Cabal that ships with the specified GHC compiler. Each release of Stack will normally aim to support all versions of GHC and the Cabal package in Stackage snapshots published within seven years of the release. For example, snapshot LTS Haskell 10.0, published on 19 December 2017, was the first LTS Haskell snapshot to provide GHC 8.2.2 which comes with `base-4.10.1.0` and `Cabal-2.0.1.1`. Normally, until, at least, 19 December 2024, Stack releases would aim to support the immediate predecessor, GHC 8.0.2 and `base-4.9.1.0`, `Cabal-1.24.2.0` and Haddock 2.17.4. However, the next version of Stack will drop support for versions of Cabal before 2.2. `Cabal-2.2.0.0` was released with GHC 8.4.1 on 8 March 2018. When a version of the Stack executable actually ceases to support a version of GHC and `Cabal`, that should be recorded in Stack's [ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md). ## Code Quality The Stack project uses [yamllint](https://github.com/adrienverge/yamllint) as a YAML file quality tool and [HLint](https://github.com/ndmitchell/hlint) as a code quality tool. ### Linting of YAML files The yamllint configuration extends the tools default and is set out in `.yamllint.yaml`. In particular, indentation is set at 2 spaces and `- ` in sequences is treated as part of the indentation. ### Linting of Haskell source code The HLint configurations is set out in `.hlint.yaml`. Stack contributors need not follow dogmatically the suggested HLint hints but are encouraged to debate their usefulness. If you find a HLint hint is not useful and detracts from readability of code, consider marking it in the [configuration file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) to be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#readme) for configuration syntax. Quoting [@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan): > We are optimizing for code clarity, not code concision or what HLint thinks. You can install HLint with Stack. You might want to install it in the global project in case you run into dependency conflicts. HLint can report hints in your favourite text editor. Refer to the HLint repository for more details. To install, command: ~~~text stack install hlint ~~~ Once installed, you can check your changes with command: ~~~text stack exec -- sh ./etc/scripts/hlint.sh ~~~ ## Code syntax Stack makes use of GHC's `GHC2021` collection of language extensions, which is set using the `language` key in the `package.yaml` file. Stack makes use of single-constructor types where the constructor has a large number of fields. Some of those fields have similar types, and so on. Given that, Stack makes use of `OverloadedRecordDot`, introduced in GHC 9.2.1. It also makes use of `NoFieldSelectors`, also introduced in GHC 9.2.1, and, where necessary, `DuplicateRecordFields`. Together, these language extensions enable the removal from the names of fields of the prefixes that were used historically to indicate the type and make field names unique. This is because the names of fields no longer need to be unique in situations where the intended field is unambiguous. This allows for a terser syntax without loss of expressiveness. For example: ~~~haskell let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco ~~~ can become: ~~~haskell let cliTargets = bco.buildOptsCLI.targets ~~~ The intended field is unambiguous in almost all cases. In the case of a few record updates it is ambiguous. The name of the field needs to be qualified in those cases. For example: ~~~haskell import qualified Stack.Types.Build as ConfigCache ( ConfigCache (..) ) ... let ignoreComponents :: ConfigCache -> ConfigCache ignoreComponents cc = cc { ConfigCache.components = Set.empty } ~~~ ## Code Style A single code style is not applied consistently to Stack's code and Stack is not Procrustean about matters of style. Rules of thumb, however, are: * keep pull requests that simply reformat code separate from those that make other changes to code; and * when making changes to code other than reformatting, follow the existing style of the function(s) or module(s) in question. That said, the following may help: * Stack's code generally avoids the use of C preprocessor (CPP) directives. Windows and non-Windows code is separated in separate source code directories and distinguished in Stack's Cabal file. `Stack.Constants.osIsWindows :: Bool` is provided. Multi-line strings are generally formatted on the assumption that GHC's `CPP` language pragma is not being used. * Language pragmas usually start with `NoImplictPrelude`, where applicable, and then all others are listed alphabetically. The closing `#-}` are aligned, for purely aesthetic reasons. * Stack is compiled with GHC's `-Wall` enabled, which includes `-Wtabs` (no tabs in source code). Most modules are based on two spaces (with one space for a `where`) for indentation but older and larger modules are still based on four spaces. * Stack's code and documentation tends to be based on lines of no more than 80 characters or, if longer, no longer than necessary. * Stack uses export lists. * Stack's imports are listed alphabetically, including `Stack.Prelude`, where applicable. The module names are left aligned, with space left for `qualified` where it is absent. * Stack's code is sufficiently stable that explict import lists can sensibly be used. The exception is the import of `Stack.Prelude`. Not all modules have comprehensive explicit import lists. * Short explicit import lists follow the module name. Longer lists start on the line below the module name. Spaces are used to separate listed items from their enclosing parentheses. * As noted above, the types used to implement Stack's exceptions and the related `instance` definitions are usually located at the top of the relevant module. * In function type signatures, the `::` is kept on the same line as the function's name. This format is Haskell syntax highlighter-friendly. * If `where` is used, the declarations follow on a separate line. ## Testing The Stack code has both unit tests and integration tests. ### Working with Unit Tests Unit tests can be found in the [tests/unit](https://github.com/commercialhaskell/stack/tree/master/tests/unit) directory. Tests are written using the [Hspec](https://hspec.github.io/) framework. In order to run the full test suite, you can simply command: ~~~text stack test ~~~ The `--file-watch` is a very useful option to get quick feedback. However, running the entire test suite after each file change will slow you down. You'll need to specify which test suite (unit test or integration) and pass arguments to specify which module you'd specifically like to run to get quick feedback. A description of this follows below. If you would like to run the unit tests on their own, you can command: ~~~text stack test stack:stack-unit-test ~~~ Running an individual module works with a command like this: ~~~text stack test stack:stack-unit-test --ta "-m " ~~~ Where `` is the name of the module without `Spec.hs`. You may also load tests into GHCi and run them with these command: ~~~text stack ghci stack:stack-unit-test --only-main # GHCi starting up output ... > :main -m "" ~~~ Where again, `` is the name of the module without `Spec.hs`. ### Working with Integration Tests Integration tests can be found in the [tests/integration](https://github.com/commercialhaskell/stack/tree/master/tests/integration) folder. Running the integration tests is a little involved, you'll need to command: ~~~text stack build --flag stack:integration-tests stack --exec stack-integration-test ~~~ Running an individual module works with a command like this: ~~~text stack build --flag stack:integration-tests stack --exec "stack-integration-test -m " ~~~ Where `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) directory. You may also achieve this through GHCi with this command: ~~~text stack ghci stack:stack-integration-test # GHCi starting up output ... > :main -m "" ~~~ Where again, `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) directory. You can disable a few integration tests through the -n option : ~~~text stack build --flag stack:integration-tests stack --exec "stack-integration-test -n -n " ~~~ To disable folders named after `` and `` It's especially useful when some tests are taking a while to complete. ## Continuous integration (CI) We use [GitHub Actions](https://docs.github.com/en/actions) to do CI on Stack. The configuration of the workflows is in the YAML files in `.github/workflows`. The current active workflows are: ### Linting - `lint.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**` The workflow has one job (`style`). It runs on `ubuntu` only and applies yamllint and Hlint. ### Test suite - `unit-tests.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**`. * requested The workflow has two jobs: `pedantic` and `unit-tests`. The `pedantic` job runs on `ubuntu` only and builds Stack with the `--pedantic` flag. The `unit-tests` job runs on a matrix of operating systems and Stack project-level YAML configuration files (`stack.yaml`, by default). It builds and tests Stack with the following flags: `--haddock --no-haddock-deps`. Its approach to creating a cache depends on the operating system. Its 'Cache dependencies on Unix-like OS' step caches the Stack root on Unix-like operating systems. Its 'Cache dependencies on Windows' step caches the same information on Windows, but takes into account that a relevant directory is located outside of the Stack root. ### Integration-based - `integration-tests.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**` * any tag is created * requested The workflow has three jobs: `integration-tests`, `linux-arm64` and `github-release`. The `integration-tests` job runs on a matrix of operating systems (`ubuntu`, `windows` and `macos`) and makes use of the `release.hs` script at `etc/scripts`. Its approach to creating a cache is the same as for `unit-tests.yml`, described above. Its 'Install deps and run checks' step uses `release.hs check`. Its 'Build bindist' step uses `release.hs build`. Its 'Upload bindist' step uploads artifacts using the name of the runner's operating system (`Linux`, `Windows` or `macOS`) as the name for the artifacts. The `linux-arm64` job runs on a self-hosted runner for Linux and ARM64. It makes use of Docker and a Docker file at `etc/dockerfiles/arm64.Dockerfile`. Its 'Build bindist' step makes use of a compiled version of `release.hs` script at `etc/scripts` to command `release build`. Its 'Upload bindist' step uploads artifacts using `Linux-ARM64` as the name for the artifacts. The `github-release` job needs `integration-tests` and `linux-arm64`. It only takes effect if the trigger for the workflow was the creation of a tag. Its four steps `Download Linux/Windows/macOS/Linux-ARM64 artifact` download the named artifacts to path `_release`. Its step 'Hash and sign assets' makes use of a 'secret' environment variable `RELEASE_SIGNING_KEY` established by the owner of the Stack repository. The variable contains the private key for the GPG key with ID 0x575159689BEFB442. That key is imported into GPG and then used by GPG to create a detached signature for each file. ### Stan tool - `stan.yml` [Stan](https://hackage.haskell.org/package/stan) is a Haskell static analysis tool. As of `stan-0.1.0.1`, it supports GHC >= 9.6.3 and Stack is built with GHC 9.6.5. The tool is configured by the contents of the `.stan.toml` file. This workflow will run if: * there is a pull request * requested ## Haskell Language Server You may be using [Visual Studio Code](https://code.visualstudio.com/) (VS Code) with its [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell), which is powered by the [Haskell Language Server](https://github.com/haskell/haskell-language-server) (HLS). Stack can be built with Stack (which is recommended) or with Cabal (the tool). === "Stack" If you use Stack to build Stack, command `stack ghci` in the root directory of the Stack project should work as expected, if you have first commanded `stack build` once. `stack build` causes Cabal (the library) to create the automatically generated module `Stack_build`. If `ghc` is not on your PATH, then Haskell Language Server may report the following error about `Stack.Constants.ghcShowOptionsOutput`: ~~~text • Exception when trying to run compile-time code: ghc: readCreateProcess: does not exist (No such file or directory) Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) • In the untyped splice: $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) ~~~ `ghc` should be on the PATH if you run VS Code itself in the Stack environment: ~~~text stack exec -- code . ~~~ The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) should suffice to configure Haskell Language Server (HLS) explicitly for `./Setup.hs` and each of the buildable components in Stack's Cabal file: ~~~yaml cradle: multi: - path: "./Setup.hs" config: cradle: direct: arguments: [] - path: "./" config: cradle: stack: - path: "./src" component: "stack:lib" - path: "./app" component: "stack:exe:stack" - path: "./tests/integration" component: "stack:exe:stack-integration-test" - path: "./tests/unit" component: "stack:test:stack-unit-test" ~~~ === "Cabal (the tool)" If you use Cabal (the tool) to build Stack, command `cabal repl` in the root directory of the Stack project should work as expected, if you have GHC and (on Windows) MSYS2 on the PATH. Stack's custom `./Setup.hs` causes `cabal repl` to cause Cabal (the library) to create the automatically generated module `Stack_build`. If `ghc` is not on your PATH, then Haskell Language Server may report the following error about `Stack.Constants.ghcShowOptionsOutput`: ~~~text • Exception when trying to run compile-time code: ghc: readCreateProcess: does not exist (No such file or directory) Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) • In the untyped splice: $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) ~~~ `ghc` and (on Windows) MSYS2 should be on the PATH if you run commands (including `cabal`) in the Stack environment: ~~~text stack exec --no-ghc-package-path -- cabal repl ~~~ or ~~~text stack exec --no-ghc-package-path -- code . ~~~ Use of GHC's environment variable `GHC_PACKAGE_PATH` is not compatible with Cabal (the tool). That is why the `--no-ghc-package-path` flag must be specified with `stack exec` when relying on Cabal (the tool). The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) should suffice to configure Haskell Language Server (HLS) explicitly for `./Setup.hs` and each of the buildable components in Stack's Cabal file: ~~~yaml cradle: multi: - path: "./Setup.hs" config: cradle: direct: arguments: [] - path: "./" config: cradle: cabal: - path: "./src" component: "lib:stack" - path: "./app" component: "exe:stack" - path: "./tests/integration" component: "exe:stack-integration-test" - path: "./tests/unit" component: "test:stack-unit-test" ~~~ A cradle is not committed to Stack's repository because it imposes a choice of build tool. ## Dev Containers A [Development Container](https://containers.dev) (or Dev Container for short) allows you to use a container as a full‑featured development environment. You can run Dev Containers locally/remotely (with VS Code), or create a [Codespace](https://github.com/features/codespaces) for a branch in a repository to develop online. Stack's default Dev Container is intended for use with its default project‑level configuration (`stack.yaml`). But there are also Dev Containers for the experimental project‑level configurations. For further information, see the documentation for [Dev Containers](dev_containers.md). ## Slack channel If you're making deep changes and real-time communication with the Stack team would be helpful, we have a `#stack-collaborators` Slack channel in the Haskell Foundation workspace. To join the workspace, follow this [link](https://haskell-foundation.slack.com/join/shared_invite/zt-z45o9x38-8L55P27r12YO0YeEufcO2w#/shared-invite/email). ## Matrix room There is also a [Haskell Stack room](https://matrix.to/#/#haskell-stack:matrix.org) at address `#haskell-stack:matrix.org` on [Matrix](https://matrix.org/). stack-2.15.7/ChangeLog.md0000644000000000000000000051346114620154151013263 0ustar0000000000000000# Changelog ## v2.15.7 - 2024-05-12 Release notes: * This release fixes potential bugs. * The hash that Stack uses to distinguish one build plan from another has changed for plans that set (as opposed to unset) manually Cabal flags for immutable dependencies. This will cause Stack to rebuild dependencies for such plans. **Changes since v2.15.5:** Major changes: * Stack 2.15.5 and earlier cannot build with Cabal (the library) version `3.12.0.0`. Stack can now build with that Cabal version. Behavior changes: * Stack's `StackSetupShim` executable, when called with `repl` and `stack-initial-build-steps`, no longer uses Cabal's `replHook` to apply `initialBuildSteps` but takes a more direct approach. Bug fixes: * Fix a regression introduced in Stack 2.15.1 that caused a 'no operation' `stack build` to be slower than previously. * The hashes that Stack uses to distinguish one build plan from another now include the Cabal flags for immutable dependencies set manually. Previously, in error, only such flags that were unset manually were included. ## v2.15.5 - 2024-03-28 Release notes: * This release fixes potential bugs. **Changes since v2.15.3:** Behavior changes: * Following the handover of the Stackage project to the Haskell Foundation, the default value of the `urls` key is `latest-snapshot: https://stackage-haddock.haskell.org/snapshots.json`. * Stack no longer includes the snapshot package database when compiling the setup executable for a package with `build-type: Configure`. ## v2.15.3 - 2024-03-07 Release notes: * With one exception, this release fixes bugs. **Changes since v2.15.1:** Behavior changes: * `stack path --global-config`, `--programs`, and `--local-bin` no longer set up Stack's environment. Bug fixes: * Due to a bug, Stack 2.15.1 did not support versions of GHC before 8.2. Stack now supports GHC versions from 8.0. * `--haddock-for-hackage` does not ignore `--haddock-arguments`. * On Windows, package locations that are Git repositories with submodules now work as intended. * The `ghc`, `runghc` and `runhaskell` commands accept `--package` values that are a list of package names or package identifiers separated by spaces and, in the case of package identifiers, in the same way as if they were specified as targets to `stack build`. ## v2.15.1 - 2024-02-09 Release notes: * After an upgrade from an earlier version of Stack, on first use only, Stack 2.15.1 may warn that it had trouble loading the CompilerPaths cache. * The hash used as a key for Stack's pre-compiled package cache has changed, following the dropping of support for Cabal versions older than `1.24.0.0`. **Changes since v2.13.1:** Behavior changes: * Stack does not leave `*.hi` or `*.o` files in the `setup-exe-src` directory of the Stack root, and deletes any corresponding to a `setup-.hs` or `setup-shim-.hs` file, to avoid GHC issue [#21250](https://gitlab.haskell.org/ghc/ghc/-/issues/21250). * If Stack's Nix integration is not enabled, Stack will notify the user if a `nix` executable is on the PATH. This usually indicates the Nix package manager is available. In YAML configuration files, the `notify-if-nix-on-path` key is introduced, to allow the notification to be muted if unwanted. * Drop support for Intero (end of life in November 2019). * `stack path --stack-root` no longer sets up Stack's environment and does not load Stack's configuration. * Stack no longer locks on configuration, so packages (remote and local) can be configured in parallel. This increases the effective concurrency of builds that before would use fewer threads. Reconsider your `--jobs` setting accordingly. See [#84](https://github.com/commercialhaskell/stack/issues/84). * Stack warns that its support for Cabal versions before `2.2.0.0` is deprecated and may be removed in the next version of Stack. Removal would mean that projects using snapshots earlier than `lts-12.0` or `nightly-2018-03-18` (GHC 8.4.1) might no longer build. See [#6377](https://github.com/commercialhaskell/stack/issues/6377). * If Stack's `--resolver` option is not specified, Stack's `unpack` command with a package name will seek to update the package index before seeking to download the most recent version of the package in the index. * If the version of Cabal (the library) provided with the specified GHC can copy specific components, Stack will copy only the components built and will not build all executable components at least once. Other enhancements: * Consider GHC 9.8 to be a tested compiler and remove warnings. * Stack can build packages with dependencies on public sub-libraries of other packages. * Add flag `--no-init` to Stack's `new` command to skip the initialisation of the newly-created project for use with Stack. * The HTML file paths produced at the end of `stack haddock` are printed on separate lines and without a trailing dot. * Add option of the form `--doctest-option=` to `stack build`, where `doctest` is a program recognised by versions of the Cabal library from `1.24.0.0`. * Experimental: Add flag `--haddock-for-hackage` to Stack's `build` command (including the `haddock` synonym for `build --haddock`) to enable building project packages with flags to generate Haddock documentation, and an archive file, suitable for upload to Hackage. The form of the Haddock documentation generated for other packages is unaffected. * Experimental: Add flag `--documentation` (`-d` for short) to Stack's `upload` command to allow uploading of documentation for packages to Hackage. * `stack new` no longer rejects project templates that specify a `package.yaml` in a subdirectory of the project directory. * Stack will notify the user if Stack has not been tested with the version of GHC that is being user or a version of Cabal (the library) that has been found. In YAML configuration files, the `notify-if-ghc-untested` and `notify-if-cabal-untested` keys are introduced, to allow the notification to be muted if unwanted. * The compiler version is included in Stack's build message (e.g. `stack> build (lib + exe + test) with ghc-9.6.4`). * Add flag `--candidate` to Stack's `unpack` command, to allow package candidates to be unpacked locally. * Stack will notify the user if a specified architecture value is unknown to Cabal (the library). In YAML configuration files, the `notify-if-arch-unknown` key is introduced, to allow the notification to be muted if unwanted. * Add option `--filter ` to Stack's `ls dependencies text` command to filter out an item from the results, if present. The item can be `$locals` for all project packages. * Add option `--snapshot` as synonym for `--resolver`. * Add the `config set snapshot` command, corresponding to the `config set resolver` command. Bug fixes: * Fix the `Curator` instance of `ToJSON`, as regards `expect-haddock-failure`. * Better error message if a `resolver:` or `snapshot:` value is, in error, a YAML number. * Stack accepts all package names that are, in fact, acceptable to Cabal. * Stack's `sdist` command can check packages with names that include non-ASCII characters. ## v2.13.1 - 2023-09-29 Release notes: * Further to the release notes for Stack 2.3.1, the `-static` suffix has been removed from the statically linked Linux/x86_64 binaries. * The binaries for Linux/Aarch64 are now statically linked. * Binaries are now provided for macOS/AArch64. **Changes since v2.11.1:** Behavior changes: * Build artefacts are placed in `.stack-work/dist//` (hashed to a shorter path on Windows), rather than `.stack-work/dist//`. This allows build artifacts to be distinguished by GHC version. * By default, the `stack build` progress bar is capped to a length equal to the terminal width. * When building GHC from source, Stack no longer uses Hadrian's deprecated `--configure`\\`-c` flag and, instead, seeks to run GHC's Python `boot` and sh `configure` scripts, and ensure that the `happy` and `alex` executables are on the PATH. * When auto-detecting `--ghc-build` on Linux, the `musl` GHC build only is considered a possible GHC build if `libc.musl-x86_64.so.1` is found in `\lib` or `\lib64`. * No longer supports Cabal versions older than `1.24.0.0`. This means projects using snapshots earlier than `lts-7.0` or `nightly-2016-05-26` (GHC 8.0.1) will no longer build. GHC 8.0.1 comes with Haddock 2.17.2. * When unregistering many packages in a single step, Stack can now do that efficiently. Stack no longer uses GHC-supplied `ghc-pkg unregister` (which is, currently, slower). * `stack hpc report`, `stack list`, `stack templates` and `stack uninstall` output their information to the standard output stream rather than to the standard error stream. Logging is still to the standard error stream. * `stack upgrade` no longer assumes that binary upgrade is not supported on a AArch64 machine architecture. Other enhancements: * Consider GHC 9.6 to be a tested compiler and remove warnings. * Consider Cabal 3.10 to be a tested library and remove warnings. * Bump to Hpack 0.36.0. * Depend on `pantry-0.9.2`, for support for long filenames and directory names in archives created by `git archive`. * Avoid the duplicate resolving of usage files when parsing `*.hi` files into a set of modules and a collection of resolved usage files. See [#6123](https://github.com/commercialhaskell/stack/pull/6123). * Add composable component type flags `--exes`, `--tests` and `--benchmarks` to Stack's `ide targets` command, to list only those components. * `stack --verbose` excludes lengthy information about build plan construction in the debug output by default. The new `stack --[no-]plan-in-log` flag enables or disables the inclusion of the information in the debug output. * In YAML configuration files, the `casa` key is introduced, which takes precedence over the existing `casa-repo-prefix` key. The latter is deprecated. The new key also allows Stack's use of a Casa (content-addressable storage archive) server to be disabled and the maximum number of keys per request to be configured. The default Casa prefix references https://casa.stackage.org, instead of https://casa.fpcomplete.com. * Add option `--progress-bar=` to Stack's `build` command to configure the format of the progress bar, where `` is one of `none`, `count-only` (only the package count), `capped` (capped to a length equal to the terminal width) and `full` (the previous format). Bug fixes: * Restore `stack sdist --pvp-bounds lower` (broken with Stack 2.9.1). * Restore building of Stack with Cabal flag `disable-git-info` (broken with Stack 2.11.1). * With `stack hoogle`, avoid the message `Minimum version is hoogle-5.0. Found acceptable hoogle- in your index, requiring its installation.` when a `hoogle` executable has already been found on the `PATH`. * Stack's sanity check on a selected GHC now passes GHC flag `-hide-all-packages`, stopping GHC from looking for a package environment in default locations. * Restore Stack script files without extensions (broken with Stack 2.11.1). * Restore message suffix `due to warnings` with `dump-logs: warning` (broken with Stack 2.11.1). * On Windows, the `local-programs-path` directory can now be on a different drive to the system temporary directory and MSYS2 will still be installed. ## v2.11.1 - 2023-05-18 **Changes since v2.9.3:** Behavior changes: * Add flag `--[no-]-only-local-bin` to Stack's `upgrade` command for a binary upgrade. If the Stack executable is `my-stack`, the default is `my-stack upgrade --only-local-bin` where previously it was, effectively, `my-stack upgrade --no-only-local-bin`. If the Stack executable is `stack`, the default is `stack upgrade --no-only-local-bin`, the same behaviour as previously. * Use `$XDG_CACHE_HOME/stack/ghci-script`, rather than `/haskell-stack-ghci` (where `` is the directory yielded by the `temporary` package's `System.IO.Temp.getCanonicalTemporaryDirectory`), as the base location for GHCi script files generated by `stack ghci` or `stack repl`. See [#5203](https://github.com/commercialhaskell/stack/issues/5203) * Drop support for `Cabal` versions before 1.22 and, consequently, GHC versions before 7.10. * `stack ghci` and `stack repl` now take into account the values of `default-language` keys in Cabal files, like they take into account the values of `default-extensions` keys. * Removed `--ghc-paths`, `--global-stack-root` and `--local-bin-path` flags for `stack path`, deprecated in Stack 1.1.0 in favour of `--programs`, `--stack-root` and `local-bin` respectively. * On Windows, `stack upgrade` always renames the file of the running Stack executable (adding extension `.old`) before attempting to write to the original file name. * On Windows, `stack upgrade` does not offer `sudo` command alternatives if attempting to write to the original file name of the running Stack exectuable results in a 'Permission' error. Other enhancements: * Add options of the form `--PROG-option=` to `stack build`, where `PROG` is a program recognised by the Cabal library and one of `alex`, `ar`, `c2hs`, `cpphs`, `gcc`, `greencard`, `happy`, `hsc2hs`, `hscolour`, `ld`, `pkg-config`, `strip` and `tar`. If Cabal uses the program during the configuration step, the argument is passed to it. * By default all `--PROG-option` options are applied to all project packages. This behaviour can be changed with new configuration option `apply-prog-options`. * Add flag `--[no-]use-root` to `stack script` (default disabled). Used with `--compile` or `--optimize`, when enabled all compilation outputs (including the executable) are written to a script-specific location in the `scripts` directory of the Stack root rather than the script's directory, avoiding clutter of the latter directory. * Better error message if the value of the `STACK_WORK` environment variable or `--work-dir` option is not a valid relative path. * Stack will use the value of the `GH_TOKEN`, or `GITHUB_TOKEN`, environment variable as credentials to authenticate its GitHub REST API requests. * `stack uninstall` also shows how to uninstall Stack-supplied tools. Bug fixes: * Fix incorrect warning if `allow-newer-deps` are specified but `allow-newer` is `false`. See [#6068](https://github.com/commercialhaskell/stack/issues/6086). * `stack build` with `--file-watch` or `--file-watch-poll` outputs 'pretty' error messages, as intended. See [#5978](https://github.com/commercialhaskell/stack/issues/5978). * `stack build` unregisters any project packages for the sub libraries of a project package that is to be unregistered. See [#6046](https://github.com/commercialhaskell/stack/issues/6046). * The warning that sublibrary dependency is not supported is no longer triggered by internal libraries. ## v2.9.3.1 - 2023-06-22 Hackage-only release of the `stack` package: * Supports building against snapshot Stackage LTS Haskell 21.0 (GHC 9.4.5), without extra-deps. * Supports build with `persistent-2.14.5.0`, using CPP directives. * Supports build with `unix-compat-0.7`, by removing reliance on the module `System.PosixCompat.User` removed in that package. * Includes `cabal.project` and `cabal.config` files in the package. ## v2.9.3 - 2022-12-16 **Changes since v2.9.1:** Behavior changes: * In YAML configuration files, the `package-index` key is introduced which takes precedence over the existing `package-indices` key. The latter is deprecated. * In YAML configuration files, the `hackage-security` key of the `package-index` key or the `package-indices` item can be omitted, and the Hackage Security configuration for the item will default to that for the official Hackage server. See [#5870](https://github.com/commercialhaskell/stack/issues/5870). * Add the `stack config set package-index download-prefix` command to set the location of Stack's package index in YAML configuration files. * `stack setup` with the `--no-install-ghc` flag warns that the flag and the command are inconsistent and now takes no action. Previously the flag was silently ignored. * To support the Haskell Foundation's [Haskell Error Index](https://errors.haskell.org/) initiative, all Stack error messages generated by Stack itself begin with an unique code in the form `[S-nnnn]`, where `nnnn` is a four-digit number. * Test suite executables that seek input on the standard input stream (`stdin`) will not throw an exception. Previously, they would thow an exception, consistent with Cabal's 'exitcode-stdio-1.0' test suite interface specification. Pass the flag `--no-tests-allow-stdin` to `stack build` to enforce Cabal's specification. See [#5897](https://github.com/commercialhaskell/stack/issues/5897) Other enhancements: * Help documentation for `stack upgrade` warns that if GHCup is used to install Stack, only GHCup should be used to upgrade Stack. That is because GHCup uses an executable named `stack` to manage versions of Stack, that Stack will likely overwrite on upgrade. * Add `stack ls dependencies cabal` command, which lists dependencies in the format of exact Cabal constraints. * Add `STACK_XDG` environment variable to use the XDG Base Directory Specification for the Stack root and Stack's global YAML configuration file, if the Stack root location is not set on the command line or by using the `STACK_ROOT` environment variable. * Add `stack path --global-config`, to yield the full path of Stack's user-specific global YAML configuration file (`config.yaml`). * Experimental: Add option `allow-newer-deps`, which allows users to specify a subset of dependencies for which version bounds should be ignored (`allow-newer-deps: ['foo', 'bar']`). This field has no effect unless `allow-newer` is enabled. Bug fixes: * Fix ambiguous module name `Distribution.PackageDescription`, if compiling `StackSetupShim` with `Cabal-syntax-3.8.1.0` in package database. See [#5886](https://github.com/commercialhaskell/stack/pull/5886). * In YAML configuration files, if the `package-indices` key (or the `hackage-security` key of its item) is omitted, the expiration of timestamps is now ignored, as intended. See Pantry [#63](https://github.com/commercialhaskell/pantry/pull/63) ## v2.9.1 - 2022-09-19 **Changes since v2.7.5:** Release notes: * After an upgrade from an earlier version of Stack, on first use only, Stack 2.9.1 may warn that it had trouble loading the CompilerPaths cache. * The support from the Stack team for binary releases now includes Linux/AArch64 and is limited to: * Linux 64-bit/x86_64 (statically linked) * Linux AArch64 (dynamically linked) * macOS x86_64 * Windows 64-bit/x86_64 Behavior changes: * `stack build --coverage` will generate a unified coverage report, even if there is only one `*.tix` file, in case a package has tested the library of another package that has not tested its own library. See [#5713](https://github.com/commercialhaskell/stack/issues/5713) * `stack --verbose` no longer includes the lengthy raw snapshot layer (rsl) in the debug output by default. The new `stack --[no-]rsl-in-log` flag enables or disables the inclusion of the rsl in the debug output. Other enhancements: * Consider GHC 9.2 and 9.4 to be tested compilers and remove warnings. * Consider Cabal 3.6 and 3.8 to be a tested libraries and remove warnings. * Bump to Hpack 0.35.0. * On Windows, the installer now sets `DisplayVersion` in the registry, enabling tools like `winget` to properly read the version number. * Adds flag `--script-no-run-compile` (disabled by default) that uses the `--no-run` option with `stack script` (and forces the `--compile` option). This enables a command like `stack --script-no-run-compile Script.hs` to behave like `stack script --no-run --compile -- Script.hs` but without having to list all the `` in the Stack interpreter options comment in `Script.hs` on the command line. That may help test that scripts compile in CI (continuous integration). See [#5755](https://github.com/commercialhaskell/stack/issues/5755) * Fuller help is provided at the command line if a subcommand is missing (for example, `stack ls` now yields the equivalent of `stack ls --help`). See [#809](https://github.com/commercialhaskell/stack/issues/809) * Add build option `--cabal-verbosity=VERBOSITY` to specify the Cabal verbosity level (the option accepts Cabal's numerical and extended syntax). See [#1369](https://github.com/commercialhaskell/stack/issues/809) * Add the possibility of a `sh` script to customise fully GHC installation. See [#5585](https://github.com/commercialhaskell/stack/pull/5585) * `tools` subcommand added to `stack ls`, to list stack's installed tools. * `stack uninstall` shows how to uninstall Stack. * `--ghc-variant` accepts `int-native` as a variant. Bug fixes: * Fix `stack clean --full`, so that the files to be deleted are not in use. See [#5714](https://github.com/commercialhaskell/stack/issues/5714) * Fix an inconsistency in the pretty formatting of the output of `stack build --coverage` * Fix repeated warning about missing parameters when using `stack new` * Include `pantry-0.5.6`: Remove operational and mirror keys from bootstrap key set [#53](https://github.com/commercialhaskell/pantry/pull/53) * Pass any CPP options specified via `cpp-options:` in the Cabal file to GHCi using GHC's `-optP` flag. See [#5608](https://github.com/commercialhaskell/stack/pull/5608) * On Unix-like operating systems, respect the `with-gcc` option when installing GHC. See [#5609](https://github.com/commercialhaskell/stack/pull/5609) * Fixed logic in `get_isa()` in `get-stack.sh` to exclude systems that don't have x86 in their `uname -m` output. See [5792](https://github.com/commercialhaskell/stack/issues/5792). * Fixed output of `stack ls snapshots local` on Windows, to behave like that on Unix-like operating systems. * Fix non-deterministic test failures when executing a test suite for a multi-project repository with parallelism enabled. See [#5024](https://github.com/commercialhaskell/stack/issues/5024) ## v2.7.5 - 2022-03-06 **Changes since v2.7.3:** Behavior changes: * Cloning git repositories isn't per sub-directory anymore, see [#5411](https://github.com/commercialhaskell/stack/issues/5411) Other enhancements: * `stack setup` supports installing GHC for macOS aarch64 (M1) * `stack upload` supports authentication with a Hackage API key (via `HACKAGE_KEY` environment variable). Bug fixes: * Ensure that `extra-path` works for case-insensitive `PATH`s on Windows. See [rio#237](https://github.com/commercialhaskell/rio/pull/237) * Fix handling of overwritten `ghc` and `ghc-pkg` locations. [#5597](https://github.com/commercialhaskell/stack/pull/5597) * Fix failure to find package when a dependency is shared between projects. [#5680](https://github.com/commercialhaskell/stack/issues/5680) * `stack ghci` now uses package flags in `stack.yaml` [#5434](https://github.com/commercialhaskell/stack/issues/5434) ## v2.7.3 - 2021-07-20 **Changes since v2.7.1:** Other enhancements: * `stack upgrade` will download from `haskellstack.org` before trying `github.com`. See [#5288](https://github.com/commercialhaskell/stack/issues/5288) * `stack upgrade` makes less assumptions about archive format. See [#5288](https://github.com/commercialhaskell/stack/issues/5288) * Add a `--no-run` flag to the `script` command when compiling. Bug fixes: * GHC source builds work properly for recent GHC versions again. See [#5528](https://github.com/commercialhaskell/stack/issues/5528) * `stack setup` always looks for the unpacked directory name to support different tar file naming conventions. See [#5545](https://github.com/commercialhaskell/stack/issues/5545) * Bump `pantry` version for better OS support. See [pantry#33](https://github.com/commercialhaskell/pantry/issues/33) * When building the sanity check for a new GHC install, make sure to clear `GHC_PACKAGE_PATH`. * Specifying GHC RTS flags in the `stack.yaml` no longer fails with an error. [#5568](https://github.com/commercialhaskell/stack/pull/5568) * `stack setup` will look in sandboxed directories for executables, not relying on `findExecutables. See [GHC issue 20074](https://gitlab.haskell.org/ghc/ghc/-/issues/20074) * Track changes to `setup-config` properly to avoid reconfiguring on every change. See [#5578](https://github.com/commercialhaskell/stack/issues/5578) ## v2.7.1 - 2021-05-07 **Changes since v2.5.1.1:** Behavior changes: * `stack repl` now always warns about GHCi problems with loading multiple packages. It also sets now proper working directory when invoked with one package. See [#5421](https://github.com/commercialhaskell/stack/issues/5421) * `custom-setup` dependencies are now properly initialized for `stack dist`. This makes `explicit-setup-deps` no longer required and that option was removed. See [#4006](https://github.com/commercialhaskell/stack/issues/4006) Other enhancements: * Consider GHC 9.0 to be a tested compiler and remove warnings. * Consider Cabal 3.6 to be a tested library and remove warnings. * Nix integration now passes `ghcVersion` (in addition to existing `ghc`) to `shell-file` as an identifier that can be looked up in a compiler attribute set. * Nix integration now allows Nix integration if the user is ready in nix-shell. This gets rid of "In Nix shell but reExecL is False" error. * `stack list` is a new command to list package versions in a snapshot. See [#5431](https://github.com/commercialhaskell/stack/pull/5431) * `custom-preprocessor-extensions` is a new configuration option for allowing Stack to be aware of any custom preprocessors you have added to `Setup.hs`. See [#3491](https://github.com/commercialhaskell/stack/issues/3491) * Added `--candidate` flag to `upload` command to upload a package candidate rather than publishing the package. * Error output using `--no-interleaved-output` no longer prepends indenting whitespace. This allows emacs compilation-mode and vim quickfix to locate and track errors. See [#5523](https://github.com/commercialhaskell/stack/pull/5523) Bug fixes: * `stack new` now supports branches other than `master` as default for GitHub repositories. See [#5422](https://github.com/commercialhaskell/stack/issues/5422) * Ignore all errors from `hi-file-parser`. See [#5445](https://github.com/commercialhaskell/stack/issues/5445) and [#5486](https://github.com/commercialhaskell/stack/issues/5486). * Support basic auth in package-indices. See [#5509](https://github.com/commercialhaskell/stack/issues/5509). * Add support for parsing `.hi`. files from GHC 8.10 and 9.0. See [hi-file-parser#2](https://github.com/commercialhaskell/hi-file-parser/pull/2). ## v2.5.1.1 - 2020-12-09 Hackage-only release: * Support build with persistent-2.11.x and optparse-applicative-0.16.x ## v2.5.1 - 2020-10-15 **Changes since v2.3.3** Major changes: * Add the `snapshot-location-base` yaml configuration option, which allows to override the default location of snapshot configuration files. This option affects how snapshot synonyms (LTS/Nightly) are expanded to URLs by the `pantry` library. * `docker-network` configuration key added to override docker `--net` arg Behavior changes: * File watching now takes into account specified targets, old behavior could be restored using the new flag `--watch-all` [#5310](https://github.com/commercialhaskell/stack/issues/5310) Other enhancements: * `stack ls dependencies json` now includes fields `sha256` and `size` for dependencies of `type` `archive` in `location`. [#5280](https://github.com/commercialhaskell/stack/issues/5280) * Build failures now show a hint to scroll up to the corresponding section [#5279](https://github.com/commercialhaskell/stack/issues/5279) * Customisable output styles (see `stack --help` and the `--stack-colors` option, and `stack ls stack-colors --help`) now include `info`, `debug`, `other-level`, `secondary` and `highlight`, used with verbose output. Bug fixes: * Fix `stack test --coverage` when using Cabal 3 * `stack new` now generates PascalCase'd module name correctly. [#5376](https://github.com/commercialhaskell/stack/issues/5376) * Connection issues to Casa server no longer cause builds to failure. Casa acts only as an optimizing cache layer, not a critical piece of infrastructure. * Fix modified time busting caches by always calculating sha256 digest during the build process. [#5125](https://github.com/commercialhaskell/stack/issues/5125) ## v2.3.3 - 2020-08-06 **Changes since v2.3.1** Other enhancements: * Add the `stack-developer-mode` flag Bug fixes: * When using the `STACK_YAML` env var with Docker, make the path absolute. * Fix the problem of `stack repl foo:test:bar` failing without a project build before that. See [#5213](https://github.com/commercialhaskell/stack/issues/5213) * Fix `stack sdist` introducing unnecessary sublibrary syntax when using pvp-bounds. See [#5289](https://github.com/commercialhaskell/stack/issues/5289) ## v2.3.1 - 2020-04-29 Release notes: * We have reduced the number of platforms that we support with binary releases. The reason behind this is that we've been slowed down in our release process until now with issues trying to build binaries for less common platforms. In order to make sure we can address issues more quickly (like supporting new GHC versions), we're limiting support from the Stack team to: * Linux 64-bit/x86_64 (statically linked) * macOS x86_64 * Windows 64-bit/x86_64 If others want to provide additional binaries, we will definitely be happy for the support. But since our CI system is currently able to produce these three bindists only, that's what we will be providing with the next release. * Since we no longer have dynamically linked Linux binaries, we are removing the `-static` suffix from the static Linux/x86_64 binaries. If you have scripts to download the latest stable Linux/x86_64 binary, update them to use `linux-x86_64` instead of `linux-x86_64-static` (if you are already using the former, nothing needs to change). For this release, both are supported, but the next release will no longer have the `-static` variant. * We are also deprecating the download links at https://stackage.org/stack. See this page for the current installation instructions: https://docs.haskellstack.org/en/stable/install_and_upgrade/. * These are the canonical locations to download the latest stable binaries from, and will continue to be supported going forward: * Linux 64-bit/x86_64 (static): https://get.haskellstack.org/stable/linux-x86_64.tar.gz * macOS x86_64: https://get.haskellstack.org/stable/osx-x86_64.tar.gz * Windows 64-bit/x86_64: https://get.haskellstack.org/stable/windows-x86_64.zip As always, binaries for specific versions are available from the GitHub releases: https://github.com/commercialhaskell/stack/releases. **Changes since v2.1.3.1** Major changes: * `setup-info-locations` yaml configuration now allows overwriting the default locations of `stack-setup-2.yaml`. [#5031](https://github.com/commercialhaskell/stack/pull/5031) [#2983](https://github.com/commercialhaskell/stack/issues/2983) [#2913](https://github.com/commercialhaskell/stack/issues/2913) * The `setup-info` configuration key now allows overwriting parts of the default `setup-info` * The `--setup-info-yaml` command line flag now may be used in all Stack commands such as `stack build`, and not only in `stack setup` * The `--setup-info-yaml` may specify multiple locations for `stack-setup.yaml` files. * The `stack upload` can read first reads environment Variable `$HACKAGE_USERNAME` and `$HACKAGE_PASSWORD` if they are missing only then asks for `username` or `password` * Fully remove GHCJS support. * Remove the `freeze` command. It has been replaced by lock files. Behavior changes: * Remove the deprecated `--stack-setup-yaml` command line argument in favor of `--setup-info-yaml`. See [#2647](https://github.com/commercialhaskell/stack/issues/2647) * We now recommend checking in generated Cabal files for repos. When generating lock files for extra-deps that only include `package.yaml` files, a deprecation warning will be generated. Also, those packages will no longer be included in the generated lock files. See [#5210](https://github.com/commercialhaskell/stack/issues/5210). Other enhancements: * Add `build-output-timestamps` flag in yaml. Setting it to true prefixes each build log output line with a timestamp. * Show warning about `local-programs-path` with spaces on windows when running scripts. See [#5013](https://github.com/commercialhaskell/stack/pull/5013) * Add `ls dependencies json` which will print dependencies as JSON. `ls dependencies --tree` is now `ls dependencies tree`. See [#4424](https://github.com/commercialhaskell/stack/pull/4424) * Remove warning for using Stack with GHC 8.8-8.10, and Cabal 3.0-3.2. * Allow relative paths in `--setup-info-yaml` and tool paths [#3394](https://github.com/commercialhaskell/stack/issues/3394) * Added the `--only-locals` flag. See [#5272](https://github.com/commercialhaskell/stack/issues/5272) Bug fixes: * Upgrade `pantry`: module mapping insertions into the database are now atomic. Previously, if you SIGTERMed at the wrong time while running a script, you could end up with an inconsistent database state. * `--resolver global` doesn't retrieve snapshots list from the internet because doesn't need it. See [#5103](https://github.com/commercialhaskell/stack/issues/5103) * Fix using relative links in haddocks output. See [#4971](https://github.com/commercialhaskell/stack/issues/4971). * Do not include generated Cabal file information in lock files. See [#5045](https://github.com/commercialhaskell/stack/issues/5045). * Use proper Hoogle executable path when installed automatically. See [#4905](https://github.com/commercialhaskell/stack/issues/4905) * Fix GHC version for batched package unregistration. See [#4951](https://github.com/commercialhaskell/stack/issues/4951) * Use Hoogle from the snapshot used and not the latest version. See [#4905](https://github.com/commercialhaskell/stack/issues/4905) * Resolve "'stty' is not recognized". See [#4901](https://github.com/commercialhaskell/stack/issues/4901) * Fix missing reconfigure check causing errors when a package gets used in multiple projects. See [#5147](https://github.com/commercialhaskell/stack/issues/5147) ## v2.1.3.1 - 2019-07-16 Hackage-only release: * Support persistent-template-2.7.x * Support rio-0.1.11.0 * Add `stack.yaml` back to hackage sdist, and add `snapshot.yaml` ## v2.1.3 - 2019-07-13 **Changes since v2.1.1** Behavior changes: * Disable WAL mode for SQLite3 databases, to improve compatibility with some platforms and filesystems. See [#4876](https://github.com/commercialhaskell/stack/issues/4876). * By default, do not perform expiry checks in Hackage Security. See [#4928](https://github.com/commercialhaskell/stack/issues/4928). Other enhancements: * Do not rerun expected test failures. This is mostly a change that will only affect the Stackage Curator use case, but there is now an additional message letting the user know when a previously-failed test case is being rerun. * Move configure information for project packages back to .stack-work to improve caching. See [#4893](https://github.com/commercialhaskell/stack/issues/4893). Bug fixes: * Fix to allow dependencies on specific versions of local git repositories. See [#4862](https://github.com/commercialhaskell/stack/pull/4862) * Allow Stack commands to be run in Nix mode without having a project file available. See [#4854](https://github.com/commercialhaskell/stack/issues/4864). * Removes dependency on gnu-tar for OSX and Linux environment. The `--force-local` option was required only for windows environment. * Properly wait for the `tar` subprocess to complete before returning, thereby avoiding a SIGTERM screwing up GHC installation. See [#4888](https://github.com/commercialhaskell/stack/issues/4888). * Use package complete locations from lock files when resolving dependencies in `extra-deps`. See [#4887](https://github.com/commercialhaskell/stack/issues/4887). * Set the `HASKELL_DIST_DIR` environment to a proper package dist directory so `doctest` is able to load modules autogenerated by Cabal. * Expose package library when running tests. * Fix support for non-ASCII module names. See [#4938](https://github.com/commercialhaskell/stack/issues/4938) Other changes: * Rename `pantry-tmp` package back to `pantry`, now that we have gained maintainership (which had been used by someone else for a candidate-only test that made it look like the name was free but prevented uploading a real package). ## v2.1.1.1 - 2019-06-14 Hackage-only release that removes `stack.yaml` from the sdist. This is because `stack.yaml` now defines a multi-package project, whereas Hackage works on the basis on individual packages (see [#4860](https://github.com/commercialhaskell/stack/issues/4860)) If building a `stack` executable for distribution, please download the source code from https://github.com/commercialhaskell/stack/releases/tag/v2.1.1 and build it using Stack itself in order to ensure identical behaviour to official binaries. This package on Hackage is provided for convenience and bootstrapping purposes. ## v2.1.1 - 2019-06-13 The Stack 2 release represents a series of significant changes to how Stack works internally. For the vast majority of cases, these changes are backwards compatible, in that existing projects will continue to build in the same way with Stack 2 as they did with Stack 1. The large version bump is due to the fundamental internal changes to cache handling, database storage (using SQLite in place of binary files), implicit snapshots (which greatly improve the precompiled cache), and moving to Pantry. We have also removed some less used features, as listed below. **Changes since v1.9.3** Major changes: * Switch over to pantry for managing packages. This is a major change to Stack's internals, and affects user-visible behavior in a few places. Some highlights: * Drop support for multiple package indices and legacy `00-index.tar` style indices. See [#4137](https://github.com/commercialhaskell/stack/issues/4137). * Support for archives and repos in the `packages` section has been removed. Instead, you must use `extra-deps` for such dependencies. `packages` now only supports local filepaths. * Add support for Git repositories containing (recursive) submodules. * Addition of new configuration options for specifying a "pantry tree" key, which provides more reproducibility around builds, and (in the future) will be used for more efficient package content downloads. You can also specify package name and version for more efficient config parsing. * __NOTE__ The new `stack freeze` command provides support for automatically generating this additional information. * Package contents and metadata are stored in an SQLite database in place of files on the filesystem. The `pantry` library can be used for interacting with these contents. * Internally, Stack has changed many datatypes, including moving to Cabal's definition of many data types. As a result of such changes, existing cache files will in general be invalidated, resulting in Stack needing to rebuild many previously cached builds in the new version. Sorry :(. * A new command, `stack freeze` has been added which outputs project and snapshot definitions with dependencies pinned to their exact versions. * The `ignore-revision-mismatch` setting is no longer needed, and has been removed. * Overriding GHC boot packages results in any other GHC boot packages depending on it being no longer available as a dependency, such packages need to be added explicitly when needed. See [#4510] (https://github.com/commercialhaskell/stack/issues/4510). * Cabal solver integration was not updated to support newer `cabal-install` versions so `stack solver` command was removed as well as a related option `--solver` from `stack new` and `stack init`. * Upgrade to Cabal 2.4 * Note that, in this process, the behavior of file globbing has been modified to match that of Cabal. In particular, this means that for Cabal spec versions less than 2.4, `*.txt` will match `foo.txt`, but not `foo.2.txt`. * Remove the `stack image` command. With the advent of Docker multistage builds, this functionality is no longer useful. For an example, please see [Building Haskell Apps with Docker](https://www.fpcomplete.com/blog/2017/12/building-haskell-apps-with-docker). * Experimental: Support building GHC from source * Stack now supports building and installing GHC from source. The built GHC is uniquely identified by a commit id and an Hadrian "flavour" (Hadrian is the newer GHC build system), hence `compiler` can be set to use a GHC built from source with `ghc-git-COMMIT-FLAVOUR` * `stack.yaml` now supports a `configure-options`, which are passed directly to the `configure` step in the Cabal build process. See [#1438](https://github.com/commercialhaskell/stack/issues/1438) * Remove support for building GHCJS itself. Future releases of Stack may remove GHCJS support entirely. * Support for lock files for pinning exact project dependency versions Behavior changes: * `stack.yaml` now supports `snapshot`: a synonym for `resolver`. See [#4256](https://github.com/commercialhaskell/stack/issues/4256) * `stack script` now passes `-i -idir` in to the `ghc` invocation. This makes it so that the script can import local modules, and fixes an issue where `.hs` files in the current directory could affect interpretation of the script. See [#4538](https://github.com/commercialhaskell/stack/pull/4538) * When using `stack script`, custom snapshot files will be resolved relative to the directory containing the script. * Remove the deprecated `--upgrade-cabal` flag to `stack setup`. * Support the `drop-packages` field in `stack.yaml` * Remove the GPG signing code during uploads. The GPG signatures have never been used yet, and there are no plans to implement signature verification. * Remove the `--plain` option for the `exec` family of commands * Always use the `--exact-configuration` Cabal configuration option when building (should mostly be a non-user-visible enhancement). * No longer supports Cabal versions older than `1.19.2`. This means projects using snapshots earlier than `lts-3.0` or `nightly-2015-05-05` will no longer build. * Remove the `stack docker cleanup` command. Docker itself now has [`docker image prune`](https://docs.docker.com/engine/reference/commandline/image_prune/) and [`docker container prune`](https://docs.docker.com/engine/reference/commandline/container_prune/), which you can use instead. * Interleaved output is now turned on by default, see [#4702](https://github.com/commercialhaskell/stack/issues/4702). In addition, the `packagename> ` prefix is no longer included in interleaved mode when only building a single target. * The `-fhide-source-paths` GHC option is now enabled by default and can be disabled via the `hide-source-paths` configuration option in `stack.yaml`. See [#3784](https://github.com/commercialhaskell/stack/issues/3784) * Stack will reconfigure a package if you modify your `PATH` environment variable. See [#3138](https://github.com/commercialhaskell/stack/issues/3138). * For GHC 8.4 and later, disable the "shadowed dependencies" workaround. This means that Stack will no longer have to force reconfigures as often. See [#3554](https://github.com/commercialhaskell/stack/issues/3554). * When building a package, Stack takes a lock on the dist directory in use to avoid multiple runs of Stack from trampling each others' files. See [#2730](https://github.com/commercialhaskell/stack/issues/2730). * Stack will check occasionally if there is a new version available and prompt the user to upgrade. This will not incur any additional network traffic, as it will piggy-back on the existing Hackage index updates. You can set `recommend-stack-upgrade: false` to bypass this. See [#1681](https://github.com/commercialhaskell/stack/issues/1681). * `stack list-dependencies` has been removed in favour of `stack ls dependencies`. * The new default for `--docker-auto-pull` is enabled. See [#3332](https://github.com/commercialhaskell/stack/issues/3332). Other enhancements: * Support MX Linux in get-stack.sh. Fixes [#4769](https://github.com/commercialhaskell/stack/issues/4769). * Defer loading up of files for project packages. This allows us to get plan construction errors much faster, and avoid some unnecessary work when only building a subset of packages. This is especially useful for the curator use case. * Existing global option `--color=WHEN` is now also available as a non-project-specific yaml configuration parameter `color:`. * Adopt the standard proposed at http://no-color.org/, that color should not be added by default if the `NO_COLOR` environment variable is present. * New command `stack ls stack-colors` lists the styles and the associated 'ANSI' control character sequences that Stack uses to color some of its output. See `stack ls stack-colors --help` for more information. * New global option `--stack-colors=STYLES`, also available as a non-project-specific yaml configuration parameter, allows a Stack user to redefine the default styles that Stack uses to color some of its output. See `stack --help` for more information. * British English spelling of 'color' (colour) accepted as an alias for `--color`, `--stack-colors`, `stack ls stack-colors` at the command line and for `color:` and `stack-colors:` in yaml configuration files. * New build option `--ddump-dir`. (See [#4225](https://github.com/commercialhaskell/stack/issues/4225)) * Stack parses and respects the `preferred-versions` information from Hackage for choosing latest version of a package in some cases, e.g. `stack unpack packagename`. * The components output in the `The main module to load is ambiguous` message now include package names so they can be more easily copy-pasted. * Git repos are shared across multiple projects. See [#3551](https://github.com/commercialhaskell/stack/issues/3551) * Use en_US.UTF-8 locale by default in pure Nix mode so programs won't crash because of Unicode in their output [#4095](https://github.com/commercialhaskell/stack/issues/4095) * Add `--tree` to `ls dependencies` to list dependencies as tree. [#4101](https://github.com/commercialhaskell/stack/issues/4101) * Add `--pedantic` to `ghci` to run with `-Wall` and `-Werror` [#4463](https://github.com/commercialhaskell/stack/issues/4463) * Add `--cabal-files` flag to `stack ide targets` command. * Add `--stdout` flag to all `stack ide` subcommands. * Use batches when unregistering packages with `ghc-pkg`. (See [#2662](https://github.com/commercialhaskell/stack/issues/2662)) * `get-stack` script now works on Windows CI machines of Appveyor, Travis and Azure Pipelines. See [#4535](https://github.com/commercialhaskell/stack/issues/4535)/ * Show snapshot being used when `stack ghci` is invoked outside of a project directory. See [#3651](https://github.com/commercialhaskell/stack/issues/3651) * The script interpreter now accepts a `--extra-dep` flag for adding packages not present in the snapshot. Currently, this only works with packages from Hackage, not Git repos or archives. * When using the script interpreter with `--optimize` or `--compile`, Stack will perform an optimization of checking whether a newer executable exists, making reruns significantly faster. There's a downside to this, however: if you have a multifile script, and change one of the dependency modules, Stack will not automatically detect and recompile. * `stack clean` will delete the entire `.stack-work/dist` directory, not just the relevant subdirectory for the current GHC version. See [#4480](https://github.com/commercialhaskell/stack/issues/4480). * Add `stack purge` as a shortcut for `stack clean --full`. See [#3863](https://github.com/commercialhaskell/stack/issues/3863). * Both `stack dot` and `stack ls dependencies` accept a `--global-hints` flag to bypass the need for an installed GHC. See [#4390](https://github.com/commercialhaskell/stack/issues/4390). * Add the `stack config env` command for getting shell script environment variables. See [#620](https://github.com/commercialhaskell/stack/issues/620). * Less verbose output from `stack setup` on Windows. See [#1212](https://github.com/commercialhaskell/stack/issues/1212). * Add an optional `ignore-expiry` flag to the `hackage-security` section of the `~/.stack/config.yaml`. It allows to disable timestamp expiration verification just like `cabal --ignore-expiry` does. The flag is not enabled by default so that the default functionality is not changed. * Include default values for most command line flags in the `--help` output. See [#893](https://github.com/commercialhaskell/stack/issues/893). * Set the `GHC_ENVIRONMENT` environment variable to specify dependency packages explicitly when running test. This is done to prevent ambiguous module name errors in `doctest` tests. * `get-stack` script now works on Windows CI machines of Appveyor, Travis and Azure Pipelines. See [#4535](https://github.com/commercialhaskell/stack/issues/4535) * Warn when a Docker image does not include a `PATH` environment variable. See [#2472](https://github.com/commercialhaskell/stack/issues/2742) * When using `system-ghc: true`, Stack will now find the appropriate GHC installation based on the version suffix, allowing you to more easily switch between various system-installed GHCs. See [#2433](https://github.com/commercialhaskell/stack/issues/2433). * `stack init` will now support create a `stack.yaml` file without any local packages. See [#2465](https://github.com/commercialhaskell/stack/issues/2465) * Store caches in SQLite database instead of files. * No longer use "global" Docker image database (`docker.db`). * User config files are respected for the script command. See [#3705](https://github.com/commercialhaskell/stack/issues/3705), [#3887](https://github.com/commercialhaskell/stack/issues/3887). * Set the `GHC_ENVIRONMENT` environment variable to `-` to tell GHC to ignore any such files when GHC is new enough (>= 8.4.4), otherwise simply unset the variable. This allows Stack to have control of package databases when running commands like `stack exec ghci`, even in the presence of implicit environment files created by `cabal new-build`. See [#4706](https://github.com/commercialhaskell/stack/issues/4706). * Use a database cache table to speed up discovery of installed GHCs * You can specify multiple `--test-arguments` options. See [#2226](https://github.com/commercialhaskell/stack/issues/2226) * Windows terminal width detection is now done. See [#3588](https://github.com/commercialhaskell/stack/issues/3588) * On Windows, informs users if the 'programs' path contains a space character and further warns users if that path does not have an alternative short ('8 dot 3') name, referencing the `local-programs-path` configuration option. See [#4726](https://github.com/commercialhaskell/stack/issues/4726) * Add `--docker-mount-mode` option to set the Docker volume mount mode for performance tuning on macOS. Bug fixes: * Ignore duplicate files for a single module when a Haskell module was generated from a preprocessor file. See [#4076](https://github.com/commercialhaskell/stack/issues/4076). * Only track down components in current directory if there are no hs-source-dirs found. This eliminates a number of false-positive warnings, similar to [#4076](https://github.com/commercialhaskell/stack/issues/4076). * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See [yesodweb/yesod#1551](https://github.com/yesodweb/yesod/issues/1551) * Fix `subdirs` for git repos in `extra-deps` to match whole directory names. Also fixes for `subdirs: .`. See [#4292](https://github.com/commercialhaskell/stack/issues/4292) * Fix for git packages to update submodules to the correct state. See [#4314](https://github.com/commercialhaskell/stack/pull/4314) * Add `--cabal-files` flag to `stack ide targets` command. * Don't download ghc when using `stack clean`. * Support loading in GHCi definitions from symlinked C files. Without this patch, Stack will try to find object files in the directory pointed to by symlinks, while GCC will produce the object files in the original directory. See [#4402](https://github.com/commercialhaskell/stack/pull/4402) * Fix handling of GitHub and URL templates on Windows. See [#4394](https://github.com/commercialhaskell/stack/issues/4394) * Fix `--file-watch` not responding to file modifications when running inside docker on Mac. See [#4506](https://github.com/commercialhaskell/stack/issues/4506) * Using `--ghc-options` with `stack script --compile` now works. * Ensure the detailed-0.9 type tests work. See [#4453](https://github.com/commercialhaskell/stack/issues/4453). * Extra include and lib dirs are now order-dependent. See [#4527](https://github.com/commercialhaskell/stack/issues/4527). * Apply GHC options when building a `Setup.hs` file. See [#4526](https://github.com/commercialhaskell/stack/issues/4526). * Stack handles ABI changes in FreeBSD 12 by differentiating that version from previous. * Help text for the `templates` subcommand now reflects behaviour in Stack 1.9 — that it downloads and shows a help file, rather than listing available templates. * Fix detection of aarch64 platform (this broke when we upgraded to a newer Cabal version). * Docker: fix detecting and pulling missing images with `--docker-auto-pull`. See [#4598](https://github.com/commercialhaskell/stack/issues/4598) * Hackage credentials are not world-readable. See [#2159](https://github.com/commercialhaskell/stack/issues/2159). * Warnings are dumped from logs even when color is enabled. See [#2997](https://github.com/commercialhaskell/stack/issues/2997) * `stack init` will now work for Cabal files with sublibraries. See [#4408](https://github.com/commercialhaskell/stack/issues/4408) * When the Cabal spec version is newer than the global Cabal version, build against the snapshot's Cabal library. See [#4488](https://github.com/commercialhaskell/stack/issues/4488) * Docker: fix detection of expected subprocess failures. This fixes downloading a compatible `stack` executable when the host `stack` is not compatible with the Docker image (on Linux), and doesn't show an unnecessary extra error when the in-container re-exec'ed `stack` exits with failure. * The `stack ghci` command's `--ghc-options` flag now parses multiple options. See [#3315](https://github.com/commercialhaskell/stack/issues/3315). ## v1.9.3.1 - 2019-04-18 Hackage-only release with no user facing changes (added compatibility with `rio-0.1.9.2`). ## v1.9.3 - 2018-12-02 Bug fixes: * Stack can now be compiled again inside a directory that does not contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See [yesodweb/yesod#1551](https://github.com/yesodweb/yesod/issues/1551) * Allow variables to appear in template file names. ## v1.9.1.1 - 2018-11-14 Hackage-only release with no user facing changes. * Stack can now be compiled again inside a directory that does not contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) ## v1.9.1 - 2018-10-17 Release notes: * Statically linked Linux bindists are back again, thanks to [@nh2](https://github.com/nh2). * We will be deleting the Ubuntu, Debian, CentOS, Fedora, and Arch package repos from `download.fpcomplete.com` soon. These have been deprecated for over a year and have not received new releases, but were left in place for compatibility with older scripts. Major changes: * Upgrade to Cabal 2.4 * Note that, in this process, the behavior of file globbing has been modified to match that of Cabal. In particular, this means that for Cabal spec versions less than 2.4, `*.txt` will match `foo.txt`, but not `foo.2.txt`. * `GHCJS` support is being downgraded to 'experimental'. A warning notifying the user of the experimental status of `GHCJS` will be displayed. Behavior changes: * `ghc-options` from `stack.yaml` are now appended to `ghc-options` from `config.yaml`, whereas before they would be replaced. * `stack build` will now announce when sublibraries of a package are being build, in the same way executables, tests, benchmarks and libraries are announced * `stack sdist` will now announce the destination of the generated tarball, regardless of whether or not it passed the sanity checks * The `--upgrade-cabal` option to `stack setup` has been deprecated. This feature no longer works with GHC 8.2 and later. Furthermore, the reason for this flag originally being implemented was drastically lessened once Stack started using the snapshot's `Cabal` library for custom setups. See: [#4070](https://github.com/commercialhaskell/stack/issues/4070). * With the new namespaced template feature, `stack templates` is no longer able to meaningfully display a list of all templates available. Instead, the command will download and display a [help file](https://github.com/commercialhaskell/stack-templates/blob/master/STACK_HELP.md) with more information on how to discover templates. See: [#4039](https://github.com/commercialhaskell/stack/issues/4039) * Build tools are now handled in a similar way to `cabal-install`. In particular, for legacy `build-tools` fields, we use a hard-coded list of build tools in place of looking up build tool packages in a tool map. This both brings Stack's behavior closer into line with `cabal-install`, avoids some bugs, and opens up some possible optimizations/laziness. See: [#4125](https://github.com/commercialhaskell/stack/issues/4125). * Mustache templating is not applied to large files (over 50kb) to avoid performance degradation. See: [#4133](https://github.com/commercialhaskell/stack/issues/4133). * `stack upload` signs the package by default, as documented. `--no-signature` turns the signing off. [#3739](https://github.com/commercialhaskell/stack/issues/3739) * In case there is a network connectivity issue while trying to download a template, Stack will check whether that template had been downloaded before. In that case, the cached version will be used. See [#3850](https://github.com/commercialhaskell/stack/issues/3850). * In Stack's script interpreter, `-- stack --verbosity=info script` and `-- stack script --verbosity=info` now have the same effect and both override the `--verbosity=error` default in the interpreter. Previously the default meant the former was equivalent to `-- stack --verbosity=info script --verbosity=error` and the latter was equivalent to `-- stack --verbosity=error script --verbosity=info`, with the subcommand's global option having precedence over the Stack command's global option in each case. See [#5326](https://github.com/commercialhaskell/stack/issues/5326). Other enhancements: * On Windows before Windows 10, --color=never is the default on terminals that can support ANSI color codes in output only by emulation * On Windows, recognise a 'mintty' (false) terminal as a terminal, by default * `stack build` issues a warning when `base` is explicitly listed in `extra-deps` of `stack.yaml` * `stack build` suggests trying another GHC version should the build plan end up requiring unattainable `base` version. * A new sub command `run` has been introduced to build and run a specified executable similar to `cabal run`. If no executable is provided as the first argument, it defaults to the first available executable in the project. * `stack build` missing dependency suggestions (on failure to construct a valid build plan because of missing deps) are now printed with their latest Cabal file revision hash. See [#4068](https://github.com/commercialhaskell/stack/pull/4068). * Added new `--tar-dir` option to `stack sdist`, that allows to copy the resulting tarball to the specified directory. * Introduced the `--interleaved-output` command line option and `build.interleaved-output` config value which causes multiple concurrent builds to dump to stderr at the same time with a `packagename> ` prefix. See [#3225](https://github.com/commercialhaskell/stack/issues/3225). * The default retry strategy has changed to exponential backoff. This should help with [#3510](https://github.com/commercialhaskell/stack/issues/3510). * `stack new` now allows template names of the form `username/foo` to download from a user other than `commercialstack` on GitHub, and can be prefixed with the service `github:`, `gitlab:`, or `bitbucket:`. See [#4039](https://github.com/commercialhaskell/stack/issues/4039) * Switch to `githash` to include some unmerged bugfixes in `gitrev` Suggestion to add `'allow-newer': true` now shows path to user config file where this flag should be put into [#3685](https://github.com/commercialhaskell/stack/issues/3685) * `stack ghci` now asks which main target to load before doing the build, rather than after * Bump to Hpack 0.29.0 * With GHC 8.4 and later, Haddock is given the `--quickjump` flag. * It is possible to specify the Hackage base URL to upload packages to, instead of the default of `https://hackage.haskell.org/`, by using `hackage-base-url` configuration option. * When using Nix, if a specific minor version of GHC is not requested, the latest minor version in the given major branch will be used automatically. Bug fixes: * `stack ghci` now does not invalidate `.o` files on repeated runs, meaning any modules compiled with `-fobject-code` will be cached between ghci runs. See [#4038](https://github.com/commercialhaskell/stack/pull/4038). * `~/.stack/config.yaml` and `stack.yaml` terminating by newline * The previous released caused a regression where some `stderr` from the `ghc-pkg` command showed up in the terminal. This output is now silenced. * A regression in recompilation checking introduced in v1.7.1 has been fixed. See [#4001](https://github.com/commercialhaskell/stack/issues/4001) * `stack ghci` on a package with internal libraries was erroneously looking for a wrong package corresponding to the internal library and failing to load any module. This has been fixed now and changes to the code in the library and the sublibrary are properly tracked. See [#3926](https://github.com/commercialhaskell/stack/issues/3926). * For packages with internal libraries not depended upon, `stack build` used to fail the build process since the internal library was not built but it was tried to be registered. This is now fixed by always building internal libraries. See [#3996](https://github.com/commercialhaskell/stack/issues/3996). * `--no-nix` was not respected under NixOS * Fix a regression which might use a lot of RAM. See [#4027](https://github.com/commercialhaskell/stack/issues/4027). * Order of commandline arguments does not matter anymore. See [#3959](https://github.com/commercialhaskell/stack/issues/3959) * When prompting users about saving their Hackage credentials on upload, flush to stdout before waiting for the response so the prompt actually displays. Also fixes a similar issue with ghci target selection prompt. * If `cabal` is not on PATH, running `stack solver` now prompts the user to run `stack install cabal-install` * `stack build` now succeeds in building packages which contain sublibraries which are dependencies of executables, tests or benchmarks but not of the main library. See [#3787](https://github.com/commercialhaskell/stack/issues/3787). * Sublibraries are now properly considered for coverage reports when the test suite depends on the internal library. Before, Stack was erroring when trying to generate the coverage report, see [#4105](https://github.com/commercialhaskell/stack/issues/4105). * Sublibraries are now added to the precompiled cache and recovered from there when the snapshot gets updated. Previously, updating the snapshot when there was a package with a sublibrary in the snapshot resulted in broken builds. This is now fixed, see [#4071](https://github.com/commercialhaskell/stack/issues/4071). * [#4114](https://github.com/commercialhaskell/stack/issues/4114) Stack pretty prints error messages with proper `error` logging level instead of `warning` now. This also fixes self-executing scripts not piping plan construction errors from runhaskell to terminal (issue [#3942](https://github.com/commercialhaskell/stack/issues/3942)). * Fix invalid "While building Setup.hs" when Cabal calls fail. See: [#3934](https://github.com/commercialhaskell/stack/issues/3934) * `stack upload` signs the package by default, as documented. `--no-signature` turns the signing off. [#3739](https://github.com/commercialhaskell/stack/issues/3739) ## v1.7.1 - 2018-04-27 Release notes: * aarch64 (64-bit ARM) bindists are now available for the first time. * Statically linked Linux bindists are no longer available, due to difficulty with GHC 8.2.2 on Alpine Linux. * 32-bit Linux GMP4 bindists for CentOS 6 are no longer available, since GHC 8.2.2 is no longer being built for that platform. Major changes: * Upgrade from Cabal 2.0 to Cabal 2.2 Behavior changes: * `stack setup` no longer uses different GHC configure options on Linux distributions that use GCC with PIE enabled by default. GHC detects this itself since ghc-8.0.2, and Stack's attempted workaround for older versions caused more problems than it solved. * `stack new` no longer initializes a project if the project template contains a `stack.yaml` file. Other enhancements: * A new sub command `ls` has been introduced to Stack to view local and remote snapshots present in the system. Use `stack ls snapshots --help` to get more details about it. * `list-dependencies` has been deprecated. The functionality has to accessed through the new `ls dependencies` interface. See [#3669](https://github.com/commercialhaskell/stack/issues/3669) for details. * Specify User-Agent HTTP request header on every HTTP request. See [#3628](https://github.com/commercialhaskell/stack/issues/3628) for details. * `stack setup` looks for GHC bindists and installations by any OS key that is compatible (rather than only checking a single one). This is relevant on Linux where different distributions may have different combinations of libtinfo 5/6, ncurses 5/6, and gmp 4/5, and will allow simplifying the setup-info metadata YAML for future GHC releases. * The build progress bar reports names of packages currently building. * `stack setup --verbose` causes verbose output of GHC configure process. See [#3716](https://github.com/commercialhaskell/stack/issues/3716) * Improve the error message when an `extra-dep` from a path or git reference can't be found. See [#3808](https://github.com/commercialhaskell/stack/pull/3808) * Nix integration is now disabled on windows even if explicitly enabled, since it isn't supported. See [#3600](https://github.com/commercialhaskell/stack/issues/3600) * `stack build` now supports a new flag `--keep-tmp-files` to retain intermediate files and directories for the purpose of debugging. It is best used with ghc's equivalent flag, i.e. `stack build --keep-tmp-files --ghc-options=-keep-tmp-files`. See [#3857](https://github.com/commercialhaskell/stack/issues/3857) * Improved error messages for snapshot parse exceptions * `stack unpack` now supports a `--to /target/directory` option to specify where to unpack the package into * `stack hoogle` now supports a new flag `--server` that launches local Hoogle server on port 8080. See [#2310](https://github.com/commercialhaskell/stack/issues/2310) Bug fixes: * The script interpreter's implicit file arguments are now passed before other arguments. See [#3658](https://github.com/commercialhaskell/stack/issues/3658). In particular, this makes it possible to pass `-- +RTS ... -RTS` to specify RTS arguments used when running the script. * Don't ignore the template `year` parameter in config files, and clarify the surrounding documentation. See [#2275](https://github.com/commercialhaskell/stack/issues/2275). * Benchmarks used to be run concurrently with other benchmarks and build steps. This is non-ideal because CPU usage of other processes may interfere with benchmarks. It also prevented benchmark output from being displayed by default. This is now fixed. See [#3663](https://github.com/commercialhaskell/stack/issues/3663). * `stack ghci` now allows loading multiple packages with the same module name, as long as they have the same filepath. See [#3776](https://github.com/commercialhaskell/stack/pull/3776). * `stack ghci` no longer always adds a dependency on `base`. It is now only added when there are no local targets. This allows it to be to load code that uses replacements for `base`. See [#3589](https://github.com/commercialhaskell/stack/issues/3589#issuecomment) * `stack ghci` now uses correct paths for autogen files with [#3791](https://github.com/commercialhaskell/stack/issues/3791) * When a package contained sublibraries, Stack was always recompiling the package. This has been fixed now, no recompilation is being done because of sublibraries. See [#3899](https://github.com/commercialhaskell/stack/issues/3899). * The `get-stack.sh` install script now matches manual instructions when it comes to Debian/Fedora/CentOS install dependencies. * Compile Cabal-simple with gmp when using Nix. See [#2944](https://github.com/commercialhaskell/stack/issues/2944) * `stack ghci` now replaces the Stack process with ghci. This improves signal handling behavior. In particular, handling of Ctrl-C. To make this possible, the generated files are now left behind after exit. The paths are based on hashing file contents, and it's stored in the system temporary directory, so this shouldn't result in too much garbage. See [#3821](https://github.com/commercialhaskell/stack/issues/3821). ## v1.6.5 - 2018-02-19 Bug fixes: * Some unnecessary rebuilds when no files were changed are now avoided, by having a separate build cache for each component of a package. See [#3732](https://github.com/commercialhaskell/stack/issues/3732). * Correct the behavior of promoting a package from snapshot to local package. This would get triggered when version bounds conflicted in a snapshot, which could be triggered via Hackage revisions for old packages. This also should allow custom snapshots to define conflicting versions of packages without issue. See [Stackage issue #3185](https://github.com/fpco/stackage/issues/3185). * When promoting packages from snapshot to local, we were occasionally discarding the actual package location content and instead defaulting to pulling the package from the index. We now correctly retain this information. Note that if you were affected by this bug, you will likely need to delete the binary build cache associated with the relevant custom snapshot. See [#3714](https://github.com/commercialhaskell/stack/issues/3714). * `--no-rerun-tests` has been fixed. Previously, after running a test we were forgetting to record the result, which meant that all tests always ran even if they had already passed before. See [#3770](https://github.com/commercialhaskell/stack/pull/3770). * Includes a patched version of `hackage-security` which fixes both some issues around asynchronous exception handling, and moves from directory locking to file locking, making the update mechanism resilient against SIGKILL and machine failure. See `hackage-security` issue [#187](https://github.com/haskell/hackage-security/issues/187) and Stack issue [#3073](https://github.com/commercialhaskell/stack/issues/3073). ## v1.6.3.1 - 2018-02-16 Hackage-only release with no user facing changes (updated to build with newer version of Hpack dependency). ## v1.6.3 - 2017-12-23 Enhancements: * In addition to supporting `.tar.gz` and `.zip` files as remote archives, plain `.tar` files are now accepted too. This will additionally help with cases where HTTP servers mistakenly set the transfer encoding to `gzip`. See [#3647](https://github.com/commercialhaskell/stack/issues/3647). * Links to docs.haskellstack.org ignore Stack version patchlevel. * Downloading Docker-compatible `stack` binary ignores Stack version patchlevel. Bug fixes: * For versions of Cabal before 1.24, ensure that the dependencies of non-buildable components are part of the build plan to work around an old Cabal bug. See [#3631](https://github.com/commercialhaskell/stack/issues/3631). * Run the Cabal file checking in the `sdist` command more reliably by allowing the Cabal library to flatten the `GenericPackageDescription` itself. ## v1.6.1.1 - 2017-12-20 Hackage-only release with no user facing changes (updated to build with newer dependency versions). ## v1.6.1 - 2017-12-07 Major changes: * Complete overhaul of how snapshots are defined, the `packages` and `extra-deps` fields, and a number of related items. For full details, please see the [writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots) on these changes. [PR #3249](https://github.com/commercialhaskell/stack/pull/3249), see the PR description for a number of related issues. * Upgraded to version 2.0 of the Cabal library. Behavior changes: * The `--install-ghc` flag is now on by default. For example, if you run `stack build` in a directory requiring a GHC that you do not currently have, Stack will automatically download and install that GHC. You can explicitly set `install-ghc: false` or pass the flag `--no-install-ghc` to regain the previous behavior. * `stack ghci` no longer loads modules grouped by package. This is always an improvement for plain ghci - it makes loading faster and less noisy. For intero, this has the side-effect that it will no longer load multiple packages that depend on TH loading relative paths. TH relative paths will still work when loading a single package into intero. See [#3309](https://github.com/commercialhaskell/stack/issues/3309) * Setting GHC options for a package via `ghc-options:` in your `stack.yaml` will promote it to a local package, providing for more consistency with flags and better reproducibility. See: [#849](https://github.com/commercialhaskell/stack/issues/849) * The `package-indices` setting with Hackage no longer works with the `00-index.tar.gz` tarball, but must use the `01-index.tar.gz` file to allow revised packages to be found. * Options passed via `--ghci-options` are now passed to the end of the invocation of ghci, instead of the middle. This allows using `+RTS` without an accompanying `-RTS`. * When auto-detecting `--ghc-build`, `tinfo6` is now preferred over `standard` if both versions of libtinfo are installed * Addition of `stack build --copy-compiler-tool`, to allow tools like intero to be installed globally for a particular compiler. [#2643](https://github.com/commercialhaskell/stack/issues/2643) * Stack will ask before saving hackage credentials to file. This new prompt can be avoided by using the `save-hackage-creds` setting. Please see [#2159](https://github.com/commercialhaskell/stack/issues/2159). * The `GHCRTS` environment variable will no longer be passed through to every program Stack runs. Instead, it will only be passed through commands like `exec`, `runghc`, `script`, `ghci`, etc. See [#3444](https://github.com/commercialhaskell/stack/issues/3444). * `ghc-options:` for specific packages will now come after the options specified for all packages / particular sets of packages. See [#3573](https://github.com/commercialhaskell/stack/issues/3573). * The `pvp-bounds` feature is no longer fully functional, due to some issues with the Cabal library's printer. See [#3550](https://github.com/commercialhaskell/stack/issues/3550). Other enhancements: * The `with-hpack` configuration option specifies an Hpack executable to use instead of the Hpack bundled with Stack. Please see [#3179](https://github.com/commercialhaskell/stack/issues/3179). * It's now possible to skip tests and benchmarks using `--skip` flag * `GitSHA1` is now `StaticSHA256` and is implemented using the `StaticSize 64 ByteString` for improved performance. See [#3006](https://github.com/commercialhaskell/stack/issues/3006) * Dependencies via HTTP(S) archives have been generalized to allow local file path archives, as well as to support setting a cryptographic hash (SHA256) of the contents for better reproducibility. * Allow specifying `--git-branch` when upgrading * When running `stack upgrade` from a file which is different from the default executable path (e.g., on POSIX systems, `~/.local/bin/stack`), it will now additionally copy the new executable over the currently running `stack` executable. If permission is denied (such as in `/usr/local/bin/stack`), the user will be prompted to try again using `sudo`. This is intended to assist with the user experience when the `PATH` environment variable has not been properly configured, see [#3232](https://github.com/commercialhaskell/stack/issues/3232). * `stack setup` for ghcjs will now install `alex` and `happy` if they are not present. See [#3109](https://github.com/commercialhaskell/stack/issues/3232). * Added `stack ghci --only-main` flag, to skip loading / importing all but main modules. See the ghci documentation page for further info. * Allow GHC's colored output to show through. GHC colors output starting with version 8.2.1, for older GHC this does nothing. Sometimes GHC's heuristics would work fine even before this change, for example in `stack ghci`, but this override's GHC's heuristics when they're broken by our collecting and processing GHC's output. * Extended the `ghc-options` field to support `$locals`, `$targets`, and `$everything`. See: [#3329](https://github.com/commercialhaskell/stack/issues/3329) * Better error message for case that `stack ghci` file targets are combined with invalid package targets. See: [#3342](https://github.com/commercialhaskell/stack/issues/3342) * For profiling now uses `-fprof-auto -fprof-cafs` instead of the deprecated `-auto-all -caf-all`. See: [#3360](https://github.com/commercialhaskell/stack/issues/3360) * Better descriptions are now available for `stack upgrade --help`. See: [#3070](https://github.com/commercialhaskell/stack/issues/3070) * When using Nix, nix-shell now depends always on gcc to prevent build errors when using the FFI. As ghc depends on gcc anyway, this doesn't increase the dependency footprint. * `--cwd DIR` can now be passed to `stack exec` in order to execute the program in a different directory. See: [#3264](https://github.com/commercialhaskell/stack/issues/3264) * Plan construction will detect if you add an executable-only package as a library dependency, resulting in much clearer error messages. See: [#2195](https://github.com/commercialhaskell/stack/issues/2195). * Addition of `--ghc-options` to `stack script` to pass options directly to GHC. See: [#3454](https://github.com/commercialhaskell/stack/issues/3454) * Add Hpack `package.yaml` to build Stack itself * Add `ignore-revision-mismatch` setting. See: [#3520](https://github.com/commercialhaskell/stack/issues/3520). * Log when each individual test suite finishes. See: [#3552](https://github.com/commercialhaskell/stack/issues/3552). * Avoid spurious rebuilds when using `--file-watch` by not watching files for executable, test and benchmark components that aren't a target. See: [#3483](https://github.com/commercialhaskell/stack/issues/3483). * Stack will now try to detect the width of the running terminal (only on POSIX for the moment) and use that to better display output messages. Work is ongoing, so some messages will not be optimal yet. The terminal width can be overridden with the new `--terminal-width` command-line option (this works even on non-POSIX). * Passing non local packages as targets to `stack ghci` will now cause them to be used as `-package` args along with package hiding. * Detect when user changed Cabal file instead of `package.yaml`. This was implemented upstream in Hpack. See [#3383](https://github.com/commercialhaskell/stack/issues/3383). * Automatically run `autoreconf -i` as necessary when a `configure` script is missing. See [#3534](https://github.com/commercialhaskell/stack/issues/3534) * GHC bindists can now be identified by their SHA256 checksum in addition to their SHA1 checksum, allowing for more security in download. * For filesystem setup-info paths, it's no longer assumed that the directory is writable, instead a temp dir is used. See [#3188](https://github.com/commercialhaskell/stack/issues/3188). Bug fixes: * `stack hoogle` correctly generates Hoogle databases. See: [#3362](https://github.com/commercialhaskell/stack/issues/3362) * `stack --docker-help` is now clearer about --docker implying system-ghc: true, rather than both --docker and --no-docker. * `stack haddock` now includes package names for all modules in the Haddock index page. See: [#2886](https://github.com/commercialhaskell/stack/issues/2886) * Fixed an issue where Stack wouldn't detect missing Docker images properly with newer Docker versions. [#3171](https://github.com/commercialhaskell/stack/pull/3171) * Previously, Cabal files with just test-suite could cause build to fail ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * If an invalid snapshot file has been detected (usually due to mismatched hashes), Stack will delete the downloaded file and recommend either retrying or filing an issue upstream. See [#3319](https://github.com/commercialhaskell/stack/issues/3319). * Modified the flag parser within Stack to match the behavior of Cabal's flag parser, which allows multiple sequential dashes. See [#3345](https://github.com/commercialhaskell/stack/issues/3345) * Now clears the hackage index cache if it is older than the downloaded index. Fixes potential issue if Stack was interrupted when updating index. See [#3033](https://github.com/commercialhaskell/stack/issues/3033) * The Stack install script now respects the `-d` option. See [#3366](https://github.com/commercialhaskell/stack/pull/3366). * `stack script` can now handle relative paths to source files. See [#3372](https://github.com/commercialhaskell/stack/issues/3372). * Fixes explanation of why a target is needed by the build plan, when the target is an extra-dep from the commandline. See [#3378](https://github.com/commercialhaskell/stack/issues/3378). * Previously, if you delete a yaml file from ~/.stack/build-plan, it would trust the etag and not re-download. Fixed in this version. * Invoking `stack --docker` in parallel now correctly locks the sqlite database. See [#3400](https://github.com/commercialhaskell/stack/issues/3400). * docs.haskellstack.org RTD documentation search is replaced by the mkdocs search. Please see [#3376](https://github.com/commercialhaskell/stack/issues/3376). * `stack clean` now works with nix. See [#3468](https://github.com/commercialhaskell/stack/issues/3376). * `stack build --only-dependencies` no longer builds local project packages that are depended on. See [#3476](https://github.com/commercialhaskell/stack/issues/3476). * Properly handle relative paths stored in the precompiled cache files. See [#3431](https://github.com/commercialhaskell/stack/issues/3431). * In some cases, Cabal does not realize that it needs to reconfigure, and must be told to do so automatically. This would manifest as a "shadowed dependency" error message. We now force a reconfigure whenever a dependency is built, even if the package ID remained the same. See [#2781](https://github.com/commercialhaskell/stack/issues/2781). * When `--pvp-bounds` is enabled for sdist or upload, internal dependencies could cause errors when uploaded to hackage. This is fixed, see [#3290](https://github.com/commercialhaskell/stack/issues/3290) * Fixes a bug where nonexistent hackage versions would cause Stack to suggest the same package name, without giving version info. See [#3562](https://github.com/commercialhaskell/stack/issues/3562) * Fixes a bug that has existed since 1.5.0, where `stack setup --upgrade-cabal` would say that Cabal is already the latest version, when it wasn't. * Ensure that an `extra-dep` from a local directory is not treated as a `$locals` for GHC options purposes. See [#3574](https://github.com/commercialhaskell/stack/issues/3574). * Building all executables only happens once instead of every time. See [#3229](https://github.com/commercialhaskell/stack/issues/3229) for more info. ## 1.5.1 - 2017-08-05 Bug fixes: * Stack eagerly tries to parse all Cabal files related to a snapshot. Starting with Stackage Nightly 2017-07-31, snapshots are using GHC 8.2.1, and the `ghc.cabal` file implicitly referenced uses the (not yet supported) Cabal 2.0 file format. Future releases of Stack will both be less eager about Cabal file parsing and support Cabal 2.0. This patch simply bypasses the error for invalid parsing. ## 1.5.0 - 2017-07-25 Behavior changes: * `stack profile` and `stack trace` now add their extra RTS arguments for benchmarks and tests to the beginning of the args, instead of the end. See [#2399](https://github.com/commercialhaskell/stack/issues/2399) * Support for Git-based indices has been removed. Other enhancements: * `stack setup` allow to control options passed to ghcjs-boot with `--ghcjs-boot-options` (one word at a time) and `--[no-]ghcjs-boot-clean` * `stack setup` now accepts a `--install-cabal VERSION` option which will install a specific version of the Cabal library globally. * Updates to store-0.4.1, which has improved performance and better error reporting for version tags. A side-effect of this is that all of stack's binary caches will be invalidated. * `stack solver` will now warn about unexpected `cabal-install` versions. See [#3044](https://github.com/commercialhaskell/stack/issues/3044) * Upstream packages unpacked to a temp dir are now deleted as soon as possible to avoid running out of space in `/tmp`. See [#3018](https://github.com/commercialhaskell/stack/issues/3018) * Add short synonyms for `test-arguments` and `benchmark-arguments` options. * Adds `STACK_WORK` environment variable, to specify work dir. See [#3063](https://github.com/commercialhaskell/stack/issues/3063) * Can now use relative paths for `extra-include-dirs` and `extra-lib-dirs`. See [#2830](https://github.com/commercialhaskell/stack/issues/2830) * Improved bash completion for many options, including `--ghc-options`, `--flag`, targets, and project executables for `exec`. * `--haddock-arguments` is actually used now when `haddock` is invoked during documentation generation. * `--[no-]haddock-hyperlink-source` flag added which allows toggling of sources being included in Haddock output. See [#3099](https://github.com/commercialhaskell/stack/issues/3099) * `stack ghci` will now skip building all local targets, even if they have downstream deps, as long as it's registered in the DB. * The `pvp-bounds` feature now supports adding `-revision` to the end of each value, e.g. `pvp-bounds: both-revision`. This means that, when uploading to Hackage, Stack will first upload your tarball with an unmodified Cabal file, and then upload a Cabal file revision with the PVP bounds added. This can be useful - especially combined with the [Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field) - as a method to ensure PVP compliance without having to proactively fix bounds issues for Stackage maintenance. * Expose a `save-hackage-creds` configuration option * On GHC <= 7.8, filters out spurious linker warnings on windows See [#3127](https://github.com/commercialhaskell/stack/pull/3127) * Better error messages when creating or building packages which alias wired-in packages. See [#3172](https://github.com/commercialhaskell/stack/issues/3172). * MinGW bin folder now is searched for dynamic libraries. See [#3126](https://github.com/commercialhaskell/stack/issues/3126) * When using Nix, nix-shell now depends always on git to prevent runtime errors while fetching metadata * Experimental: The `stack unpack` command now accepts a form where an explicit Hackage revision hash is specified, e.g. `stack unpack foo-1.2.3@gitsha1:deadbeef`. Note that Stack will likely move towards a different hash format in the future. * Binary "stack upgrade" will now warn if the installed executable is not on the PATH or shadowed by another entry. * Allow running tests on tarball created by sdist and upload [#717](https://github.com/commercialhaskell/stack/issues/717). Bug fixes: * Fixes case where `stack build --profile` might not cause executables / tests / benchmarks to be rebuilt. See [#2984](https://github.com/commercialhaskell/stack/issues/2984) * `stack ghci file.hs` now loads the file even if it isn't part of your project. * `stack clean --full` now works when docker is enabled. See [#2010](https://github.com/commercialhaskell/stack/issues/2010) * Fixes an issue where cyclic deps can cause benchmarks or tests to be run before they are built. See [#2153](https://github.com/commercialhaskell/stack/issues/2153) * Fixes `stack build --file-watch` in cases where a directory is removed See [#1838](https://github.com/commercialhaskell/stack/issues/1838) * Fixes `stack dot` and `stack list-dependencies` to use info from the package database for wired-in-packages (ghc, base, etc). See [#3084](https://github.com/commercialhaskell/stack/issues/3084) * Fixes `stack --docker build` when user is part of libvirt/libvirtd groups on Ubuntu Yakkety (16.10). See [#3092](https://github.com/commercialhaskell/stack/issues/3092) * Switching a package between extra-dep and local package now forces rebuild (previously it wouldn't if versions were the same). See [#2147](https://github.com/commercialhaskell/stack/issues/2147) * `stack upload` no longer reveals your password when you type it on MinTTY-based Windows shells, such as Cygwin and MSYS2. See [#3142](https://github.com/commercialhaskell/stack/issues/3142) * `stack script`'s import parser will now properly parse files that have Windows-style line endings (CRLF) ## 1.4.0 - 2017-03-15 Release notes: * Docker images: [fpco/stack-full](https://hub.docker.com/r/fpco/stack-full/) and [fpco/stack-run](https://hub.docker.com/r/fpco/stack-run/) are no longer being built for LTS 8.0 and above. [fpco/stack-build](https://hub.docker.com/r/fpco/stack-build/) images continue to be built with a [simplified process](https://github.com/commercialhaskell/stack/tree/master/etc/dockerfiles/stack-build). [#624](https://github.com/commercialhaskell/stack/issues/624) Major changes: * A new command, `script`, has been added, intended to make the script interpreter workflow more reliable, easier to use, and more efficient. This command forces the user to provide a `--resolver` value, ignores all config files for more reproducible results, and optimizes the existing package check to make the common case of all packages already being present much faster. This mode does require that all packages be present in a snapshot, however. [#2805](https://github.com/commercialhaskell/stack/issues/2805) Behavior changes: * The default package metadata backend has been changed from Git to the 01-index.tar.gz file, from the hackage-security project. This is intended to address some download speed issues from GitHub for people in certain geographic regions. There is now full support for checking out specific Cabal file revisions from downloaded tarballs as well. If you manually specify a package index with only a Git URL, Git will still be used. See [#2780](https://github.com/commercialhaskell/stack/issues/2780) * When you provide the `--resolver` argument to the `stack unpack` command, any packages passed in by name only will be looked up in the given snapshot instead of taking the latest version. For example, `stack --resolver lts-7.14 unpack mtl` will get version 2.2.1 of `mtl`, regardless of the latest version available in the package indices. This will also force the same Cabal file revision to be used as is specified in the snapshot. Unpacking via a package identifier (e.g. `stack --resolver lts-7.14 unpack mtl-2.2.1`) will ignore any settings in the snapshot and take the most recent revision. For backwards compatibility with tools relying on the presence of a `00-index.tar`, Stack will copy the `01-index.tar` file to `00-index.tar`. Note, however, that these files are different; most importantly, 00-index contains only the newest revisions of Cabal files, while 01-index contains all versions. You may still need to update your tooling. * Passing `--(no-)nix-*` options now no longer implies `--nix`, except for `--nix-pure`, so that the user preference whether or not to use Nix is honored even in the presence of options that change the Nix behavior. Other enhancements: * Internal cleanup: configuration types are now based much more on lenses * `stack build` and related commands now allow the user to disable debug symbol stripping with new `--no-strip`, `--no-library-stripping`, and `--no-executable-shipping` flags, closing [#877](https://github.com/commercialhaskell/stack/issues/877). Also turned error message for missing targets more readable ([#2384](https://github.com/commercialhaskell/stack/issues/2384)) * `stack haddock` now shows index.html paths when documentation is already up to date. Resolved [#781](https://github.com/commercialhaskell/stack/issues/781) * Respects the `custom-setup` field introduced in Cabal 1.24. This supercedes any `explicit-setup-deps` settings in your `stack.yaml` and trusts the package's Cabal file to explicitly state all its dependencies. * If system package installation fails, `get-stack.sh` will fail as well. Also shows warning suggesting to run `apt-get update` or similar, depending on the OS. ([#2898](https://github.com/commercialhaskell/stack/issues/2898)) * When `stack ghci` is run with a config with no packages (e.g. global project), it will now look for source files in the current work dir. ([#2878](https://github.com/commercialhaskell/stack/issues/2878)) * Bump to Hpack 0.17.0 to allow `custom-setup` and `!include "..."` in `package.yaml`. * The script interpreter will now output error logging. In particular, this means it will output info about plan construction errors. ([#2879](https://github.com/commercialhaskell/stack/issues/2879)) * `stack ghci` now takes `--flag` and `--ghc-options` again (inadvertently removed in 1.3.0). ([#2986](https://github.com/commercialhaskell/stack/issues/2986)) * `stack exec` now takes `--rts-options` which passes the given arguments inside of `+RTS ... args .. -RTS` to the executable. This works around Stack itself consuming the RTS flags on Windows. ([#2640](https://github.com/commercialhaskell/stack/issues/2640)) * Upgraded `http-client-tls` version, which now offers support for the `socks5://` and `socks5h://` values in the `http_proxy` and `https_proxy` environment variables. Bug fixes: * Bump to Hpack 0.16.0 to avoid character encoding issues when reading and writing on non-UTF8 systems. * `stack ghci` will no longer ignore hsSourceDirs that contain `..`. ([#2895](https://github.com/commercialhaskell/stack/issues/2895)) * `stack list-dependencies --license` now works for wired-in-packages, like base. ([#2871](https://github.com/commercialhaskell/stack/issues/2871)) * `stack setup` now correctly indicates when it uses system ghc ([#2963](https://github.com/commercialhaskell/stack/issues/2963)) * Fix to `stack config set`, in 1.3.2 it always applied to the global project. ([#2709](https://github.com/commercialhaskell/stack/issues/2709)) * Previously, Cabal files without exe or lib would fail on the "copy" step. ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * `stack upgrade --git` now works properly. Workaround for affected versions (>= 1.3.0) is to instead run `stack upgrade --git --source-only`. ([#2977](https://github.com/commercialhaskell/stack/issues/2977)) * Added support for GHC 8's slightly different warning format for dumping warnings from logs. * Work around a bug in Cabal/GHC in which package IDs are not unique for different source code, leading to Stack not always rebuilding packages depending on local packages which have changed. ([#2904](https://github.com/commercialhaskell/stack/issues/2904)) ## 1.3.2 - 2016-12-27 Bug fixes: * `stack config set` can now be used without a compiler installed [#2852](https://github.com/commercialhaskell/stack/issues/2852). * `get-stack.sh` now installs correct binary on ARM for generic linux and raspbian, closing [#2856](https://github.com/commercialhaskell/stack/issues/2856). * Correct the testing of whether a package database exists by checking for the `package.cache` file itself instead of the containing directory. * Revert a change in the previous release which made it impossible to set local extra-dep packages as targets. This was overkill; we really only wanted to disable their test suites, which was already handled by a later patch. [#2849](https://github.com/commercialhaskell/stack/issues/2849) * `stack new` always treats templates as being UTF-8 encoding, ignoring locale settings on a local machine. See [Yesod mailing list discussion](https://groups.google.com/d/msg/yesodweb/ZyWLsJOtY0c/aejf9E7rCAAJ) ## 1.3.0 - 2016-12-12 Release notes: * For the _next_ Stack release after this one, we are planning changes to our Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora package repositories and switching to statically linked binaries. See [#2534](https://github.com/commercialhaskell/stack/issues/2534). Note that upgrading without a package manager has gotten easier with new binary upgrade support in `stack upgrade` (see the Major Changes section below for more information). In addition, the get.haskellstack.org script no longer installs from Ubuntu, Debian, CentOS, or Fedora package repositories. Instead it places a generic binary in /usr/local/bin. Major changes: * Stack will now always use its own GHC installation, even when a suitable GHC installation is available on the PATH. To get the old behaviour, use the `--system-ghc` flag or run `stack config set system-ghc --global true`. Docker- and Nix-enabled projects continue to use the GHC installations in their environment by default. NB: Scripts that previously used Stack in combination with a system GHC installation should now include a `stack setup` line or use the `--install-ghc` flag. [#2221](https://github.com/commercialhaskell/stack/issues/2221) * `stack ghci` now defaults to skipping the build of target packages, because support has been added for invoking "initial build steps", which create autogen files and run preprocessors. The `--no-build` flag is now deprecated because it should no longer be necessary. See [#1364](https://github.com/commercialhaskell/stack/issues/1364) * Stack is now capable of doing binary upgrades instead of always recompiling a new version from source. Running `stack upgrade` will now default to downloading a binary version of Stack from the most recent release, if one is available. See `stack upgrade --help` for more options. [#1238](https://github.com/commercialhaskell/stack/issues/1238) Behavior changes: * Passing `--resolver X` with a Stack command which forces creation of a global project config, will pass resolver X into the initial config. See [#2579](https://github.com/commercialhaskell/stack/issues/2229). * Switch the "Run from outside project" messages to debug-level, to avoid spamming users in the normal case of non-project usage * If a remote package is specified (such as a Git repo) without an explicit `extra-dep` setting, a warning is given to the user to provide one explicitly. Other enhancements: * `stack haddock` now supports `--haddock-internal`. See [#2229](https://github.com/commercialhaskell/stack/issues/2229) * Add support for `system-ghc` and `install-ghc` fields to `stack config set` command. * Add `ghc-build` option to override autodetected GHC build to use (e.g. gmp4, tinfo6, nopie) on Linux. * `stack setup` detects systems where gcc enables PIE by default (such as Ubuntu 16.10 and Hardened Gentoo) and adjusts the GHC `configure` options accordingly. [#2542](https://github.com/commercialhaskell/stack/issues/2542) * Upload to Hackage with HTTP digest instead of HTTP basic. * Make `stack list-dependencies` understand all of the `stack dot` options too. * Add the ability for `stack list-dependencies` to list dependency licenses by passing the `--license` flag. * Dump logs that contain warnings for any local non-dependency packages [#2545](https://github.com/commercialhaskell/stack/issues/2545) * Add the `dump-logs` config option and `--dump-logs` command line option to get full build output on the console. [#426](https://github.com/commercialhaskell/stack/issues/426) * Add the `--open` option to "stack hpc report" command, causing the report to be opened in the browser. * The `stack config set` command now accepts a `--global` flag for suitable fields which causes it to modify the global user configuration (`~/.stack/config.yaml`) instead of the project configuration. [#2675](https://github.com/commercialhaskell/stack/pull/2675) * Information on the latest available snapshots is now downloaded from S3 instead of stackage.org, increasing reliability in case of stackage.org outages. [#2653](https://github.com/commercialhaskell/stack/pull/2653) * `stack dot` and `stack list-dependencies` now take targets and flags. [#1919](https://github.com/commercialhaskell/stack/issues/1919) * Deprecate `stack setup --stack-setup-yaml` for `--setup-info-yaml` based on discussion in [#2647](https://github.com/commercialhaskell/stack/issues/2647). * The `--main-is` flag for GHCI now implies the TARGET, fixing [#1845](https://github.com/commercialhaskell/stack/issues/1845). * `stack ghci` no longer takes all build options, as many weren't useful [#2199](https://github.com/commercialhaskell/stack/issues/2199) * `--no-time-in-log` option, to make verbose logs more diffable [#2727](https://github.com/commercialhaskell/stack/issues/2727) * `--color` option added to override auto-detection of ANSI support [#2725](https://github.com/commercialhaskell/stack/issues/2725) * Missing extra-deps are now warned about, adding a degree of typo detection [#1521](https://github.com/commercialhaskell/stack/issues/1521) * No longer warns about missing build-tools if they are on the PATH. [#2235](https://github.com/commercialhaskell/stack/issues/2235) * Replace enclosed-exceptions with safe-exceptions. [#2768](https://github.com/commercialhaskell/stack/issues/2768) * The install location for GHC and other programs can now be configured with the `local-programs-path` option in `config.yaml`. [#1644](https://github.com/commercialhaskell/stack/issues/1644) * Added option to add nix dependencies as nix GC roots * Proper pid 1 (init) process for `stack exec` with Docker * Dump build logs if they contain warnings. [#2545](https://github.com/commercialhaskell/stack/issues/2545) * Docker: redirect stdout of `docker pull` to stderr so that it will not interfere with output of other commands. * Nix & docker can be activated at the same time, in order to run Stack in a nix-shell in a container, preferably from an image already containing the nix dependencies in its /nix/store * Stack/nix: Dependencies can be added as nix GC roots, so they are not removed when running `nix-collect-garbage` Bug fixes: * Fixed a gnarly bug where programs and package tarballs sometimes have corrupted downloads. See [#2657](https://github.com/commercialhaskell/stack/issues/2568). * Add proper support for non-ASCII characters in file paths for the `sdist` command. See [#2549](https://github.com/commercialhaskell/stack/issues/2549) * Never treat `extra-dep` local packages as targets. This ensures things like test suites are not run for these packages, and that build output is not hidden due to their presence. * Fix a resource leak in `sinkProcessStderrStdout` which could affect much of the codebase, in particular copying precompiled packages. [#1979](https://github.com/commercialhaskell/stack/issues/1979) * Docker: ensure that interrupted extraction process does not cause corrupt file when downloading a Docker-compatible Stack executable [#2568](https://github.com/commercialhaskell/stack/issues/2568) * Fixed running `stack hpc report` on package targets. [#2664](https://github.com/commercialhaskell/stack/issues/2664) * Fix a long-standing performance regression where Stack would parse the `.dump-hi` files of the library components of local packages twice. [#2658](https://github.com/commercialhaskell/stack/pull/2658) * Fixed a regression in "stack ghci --no-load", where it would prompt for a main module to load. [#2603](https://github.com/commercialhaskell/stack/pull/2603) * Build Setup.hs files with the threaded RTS, mirroring the behavior of `cabal-install` and enabling more complex build systems in those files. * Fixed a bug in passing along `--ghc-options` to ghcjs. They were being provided as `--ghc-options` to Cabal, when it needs to be `--ghcjs-options`. [#2714](https://github.com/commercialhaskell/stack/issues/2714) * Launch Docker from the project root regardless of the working directory Stack is invoked from. This means paths relative to the project root (e.g. environment files) can be specified in `stack.yaml`'s docker `run-args`. * `stack setup --reinstall` now behaves as expected. [#2554](https://github.com/commercialhaskell/stack/issues/2554) ## 1.2.0 - 2016-09-16 Release notes: * On many Un*x systems, Stack can now be installed with a simple one-liner: wget -qO- https://get.haskellstack.org/ | sh * The fix for [#2175](https://github.com/commercialhaskell/stack/issues/2175) entails that Stack must perform a full clone of a large Git repo of Hackage meta-information. The total download size is about 200 MB. Please be aware of this when upgrading your Stack installation. * If you use Mac OS X, you may want to delay upgrading to macOS Sierra as there are reports of GHC panics when building some packages (including Stack itself). See [#2577](https://github.com/commercialhaskell/stack/issues/2577) * This version of Stack does not build on ARM or PowerPC systems (see [store#37](https://github.com/fpco/store/issues/37)). Please stay with version 1.1.2 for now on those architectures. This will be rectified soon! * We are now releasing a statically linked Stack binary for [64-bit Linux](https://get.haskellstack.org/stable/linux-x86_64-static.tar.gz). Please try it and let us know if you run into any trouble on your platform. * We are planning some changes to our Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora package repositories and switching to statically linked binaries. We would value your feedback in [#2534](https://github.com/commercialhaskell/stack/issues/2534). Major changes: * Add `stack hoogle` command. [#55](https://github.com/commercialhaskell/stack/issues/55) * Support for absolute file path in `url` field of `setup-info` or `--ghc-bindist` * Add support for rendering GHCi scripts targeting different GHCi like applications [#2457](https://github.com/commercialhaskell/stack/pull/2457) Behavior changes: * Remove `stack ide start` and `stack ide load-targets` commands. [#2178](https://github.com/commercialhaskell/stack/issues/2178) * Support .buildinfo files in `stack ghci`. [#2242](https://github.com/commercialhaskell/stack/pull/2242) * Support -ferror-spans syntax in GHC error messages. * Avoid unpacking ghc to `/tmp` [#996](https://github.com/commercialhaskell/stack/issues/996) * The Linux `gmp4` GHC bindist is no longer considered a full-fledged GHC variant and can no longer be specified using the `ghc-variant` option, and instead is treated more like a slightly different platform. Other enhancements: * Use the `store` package for binary serialization of most caches. * Only require minor version match for Docker Stack exe. This way, we can make patch releases for version bounds and similar build issues without needing to upload new binaries for Docker. * Stack/Nix: Passes the right ghc derivation as an argument to the `shell.nix` when a custom `shell.nix` is used. See [#2243](https://github.com/commercialhaskell/stack/issues/2243) * Stack/Nix: Sets `LD_LIBRARY_PATH` so packages using C libs for Template Haskell can work (See _e.g._ [this HaskellR issue](https://github.com/tweag/HaskellR/issues/253)) * Parse CLI arguments and configuration files into less permissive types, improving error messages for bad inputs. [#2267](https://github.com/commercialhaskell/stack/issues/2267) * Add the ability to explicitly specify a gcc executable. [#593](https://github.com/commercialhaskell/stack/issues/593) * Nix: No longer uses LTS mirroring in nixpkgs. Gives to nix-shell a derivation like `haskell.compiler.ghc801` See [#2259](https://github.com/commercialhaskell/stack/issues/2259) * Perform some subprocesses during setup concurrently, slightly speeding up most commands. [#2346](https://github.com/commercialhaskell/stack/pull/2346) * `stack setup` no longer unpacks to the system temp dir on posix systems. [#996](https://github.com/commercialhaskell/stack/issues/996) * `stack setup` detects libtinfo6 and ncurses6 and can download alternate GHC bindists [#257](https://github.com/commercialhaskell/stack/issues/257) [#2302](https://github.com/commercialhaskell/stack/issues/2302). * `stack setup` detects Linux ARMv7 downloads appropriate GHC bindist [#2103](https://github.com/commercialhaskell/stack/issues/2103) * Custom `stack` binaries list dependency versions in output for `--version`. See [#2222](https://github.com/commercialhaskell/stack/issues/2222) and [#2450](https://github.com/commercialhaskell/stack/issues/2450). * Use a pretty printer to output dependency resolution errors. [#1912](https://github.com/commercialhaskell/stack/issues/1912) * Remove the `--os` flag [#2227](https://github.com/commercialhaskell/stack/issues/2227) * Add 'netbase' and 'ca-certificates' as dependency for .deb packages. [#2293](https://github.com/commercialhaskell/stack/issues/2293). * Add `stack ide targets` command. * Enhance debug logging with subprocess timings. * Pretty-print YAML parse errors [#2374](https://github.com/commercialhaskell/stack/issues/2374) * Clarify confusing `stack setup` output [#2314](https://github.com/commercialhaskell/stack/issues/2314) * Delete `Stack.Types` multimodule to improve build times [#2405](https://github.com/commercialhaskell/stack/issues/2405) * Remove spurious newlines in build logs [#2418](https://github.com/commercialhaskell/stack/issues/2418) * Interpreter: Provide a way to hide implicit packages [#1208](https://github.com/commercialhaskell/stack/issues/1208) * Check executability in exec lookup [#2489](https://github.com/commercialhaskell/stack/issues/2489) Bug fixes: * Fix Cabal warning about use of a deprecated Cabal flag [#2350](https://github.com/commercialhaskell/stack/issues/2350) * Support most executable extensions on Windows [#2225](https://github.com/commercialhaskell/stack/issues/2225) * Detect resolver change in `stack solver` [#2252](https://github.com/commercialhaskell/stack/issues/2252) * Fix a bug in docker image creation where the wrong base image was selected [#2376](https://github.com/commercialhaskell/stack/issues/2376) * Ignore special entries when unpacking tarballs [#2361](https://github.com/commercialhaskell/stack/issues/2361) * Fixes src directory pollution of `style.css` and `highlight.js` with GHC 8's haddock [#2429](https://github.com/commercialhaskell/stack/issues/2429) * Handle filepaths with spaces in `stack ghci` [#2266](https://github.com/commercialhaskell/stack/issues/2266) * Apply ghc-options to snapshot packages [#2289](https://github.com/commercialhaskell/stack/issues/2289) * `stack sdist`: Fix timestamp in tarball [#2394](https://github.com/commercialhaskell/stack/pull/2394) * Allow global Stack arguments with a script [#2316](https://github.com/commercialhaskell/stack/issues/2316) * Inconsistency between ToJSON and FromJSON instances of PackageLocation [#2412](https://github.com/commercialhaskell/stack/pull/2412) * Perform Unicode normalization on filepaths [#1810](https://github.com/commercialhaskell/stack/issues/1810) * Solver: always keep ghc wired-in as hard constraints [#2453](https://github.com/commercialhaskell/stack/issues/2453) * Support OpenBSD's tar where possible, require GNU tar for xz support [#2283](https://github.com/commercialhaskell/stack/issues/2283) * Fix using --coverage with Cabal-1.24 [#2424](https://github.com/commercialhaskell/stack/issues/2424) * When marking exe installed, remove old version [#2373](https://github.com/commercialhaskell/stack/issues/2373) * Stop truncating `all-cabal-hashes` git repo [#2175](https://github.com/commercialhaskell/stack/issues/2175) * Handle non-ASCII filenames on Windows [#2491](https://github.com/commercialhaskell/stack/issues/2491) * Avoid using multiple versions of a package in script interpreter by passing package-id to ghc/runghc [#1957](https://github.com/commercialhaskell/stack/issues/1957) * Only pre-load compiler version when using nix integration [#2459](https://github.com/commercialhaskell/stack/issues/2459) * Solver: parse Cabal errors also on Windows [#2502](https://github.com/commercialhaskell/stack/issues/2502) * Allow exec and ghci commands in interpreter mode. Scripts can now automatically open in the repl by using `exec ghci` instead of `runghc` in the shebang command. [#2510](https://github.com/commercialhaskell/stack/issues/2510) * Now consider a package to be dirty when an extra-source-file is changed. See [#2040](https://github.com/commercialhaskell/stack/issues/2040) ## 1.1.2 - 2016-05-20 Release notes: * Official FreeBSD binaries are [now available](http://docs.haskellstack.org/en/stable/install_and_upgrade/#freebsd) [#1253](https://github.com/commercialhaskell/stack/issues/1253). Major changes: * Extensible custom snapshots implemented. These allow you to define snapshots which extend other snapshots. See [#863](https://github.com/commercialhaskell/stack/issues/863). Local file custom snapshots can now be safely updated without changing their name. Remote custom snapshots should still be treated as immutable. Behavior changes: * `stack path --compiler` was added in the last release, to yield a path to the compiler. Unfortunately, `--compiler` is a global option that is useful to use with `stack path`. The same functionality is now provided by `stack path --compiler-exe`. See [#2123](https://github.com/commercialhaskell/stack/issues/2123) * For packages specified in terms of a git or hg repo, the hash used in the location has changed. This means that existing downloads from older stack versions won't be used. This is a side-effect of the fix to [#2133](https://github.com/commercialhaskell/stack/issues/2133) * `stack upgrade` no longer pays attention to local `stack.yaml` files, just the global config and CLI options. [#1392](https://github.com/commercialhaskell/stack/issues/1392) * `stack ghci` now uses `:add` instead of `:load`, making it potentially work better with user scripts. See [#1888](https://github.com/commercialhaskell/stack/issues/1888) Other enhancements: * Grab Cabal files via Git SHA to avoid regressions from Hackage revisions [#2070](https://github.com/commercialhaskell/stack/pull/2070) * Custom snapshots now support `ghc-options`. * Package git repos are now re-used rather than re-cloned. See [#1620](https://github.com/commercialhaskell/stack/issues/1620) * `DESTDIR` is filtered from environment when installing GHC. See [#1460](https://github.com/commercialhaskell/stack/issues/1460) * `stack haddock` now supports `--haddock-arguments`. See [#2144](https://github.com/commercialhaskell/stack/issues/2144) * Signing: warn if GPG_TTY is not set as per `man gpg-agent` Bug fixes: * Now ignore project config when doing `stack init` or `stack new`. See [#2110](https://github.com/commercialhaskell/stack/issues/2110) * Packages specified by git repo can now have submodules. See [#2133](https://github.com/commercialhaskell/stack/issues/2133) * Fix of hackage index fetch retry. See re-opening of [#1418](https://github.com/commercialhaskell/stack/issues/1418#issuecomment-217633843) * HPack now picks up changes to filesystem other than package.yaml. See [#2051](https://github.com/commercialhaskell/stack/issues/2051) * "stack solver" no longer suggests --omit-packages. See [#2031](https://github.com/commercialhaskell/stack/issues/2031) * Fixed an issue with building Cabal's Setup.hs. See [#1356](https://github.com/commercialhaskell/stack/issues/1356) * Package dirtiness now pays attention to deleted files. See [#1841](https://github.com/commercialhaskell/stack/issues/1841) * `stack ghci` now uses `extra-lib-dirs` and `extra-include-dirs`. See [#1656](https://github.com/commercialhaskell/stack/issues/1656) * Relative paths outside of source dir added via `qAddDependentFile` are now checked for dirtiness. See [#1982](https://github.com/commercialhaskell/stack/issues/1982) * Signing: always use `--with-fingerprints` ## 1.1.0 - 2016-05-04 Release notes: * Added Ubuntu 16.04 LTS (xenial) Apt repo. * No longer uploading new versions to Fedora 21 repo. Behavior changes: * Snapshot packages are no longer built with executable profiling. See [#1179](https://github.com/commercialhaskell/stack/issues/1179). * `stack init` now ignores symlinks when searching for Cabal files. It also now ignores any directory that begins with `.` (as well as `dist` dirs) - before it would only ignore `.git`, `.stack-work`, and `dist`. * The Stack executable is no longer built with `-rtsopts`. Before, when `-rtsopts` was enabled, Stack would process `+RTS` options even when intended for some other program, such as when used with `stack exec -- prog +RTS`. See [#2022](https://github.com/commercialhaskell/stack/issues/2022). * The `stack path --ghc-paths` option is deprecated and renamed to `--programs`. `--compiler` is added, which points directly at the compiler used in the current project. `--compiler-bin` points to the compiler's bin dir. * For consistency with the `$STACK_ROOT` environment variable, the `stack path --global-stack-root` flag and the `global-stack-root` field in the output of `stack path` are being deprecated and replaced with the `stack-root` flag and output field. Additionally, the Stack root can now be specified via the `--stack-root` command-line flag. See [#1148](https://github.com/commercialhaskell/stack/issues/1148). * `stack sig` GPG-related sub-commands were removed (folded into `upload` and `sdist`) * GPG signing of packages while uploading to Hackage is now the default. Use `upload --no-signature` if you would rather not contribute your package signature. If you don't yet have a GPG keyset, read this [blog post on GPG keys](https://fpcomplete.com/blog/2016/05/stack-security-gnupg-keys). We can add a `stack.yaml` config setting to disable signing if some people desire it. We hope that people will sign. Later we will be adding GPG signature verification options. * `stack build pkg-1.2.3` will now build even if the snapshot has a different package version - it is treated as an extra-dep. `stack build local-pkg-1.2.3` is an error even if the version number matches the local package [#2028](https://github.com/commercialhaskell/stack/issues/2028). * Having a `nix:` section no longer implies enabling nix build. This allows the user to globally configure whether nix is used (unless the project overrides the default explicitly). See [#1924](https://github.com/commercialhaskell/stack/issues/1924). * Remove deprecated valid-wanted field. * Docker: mount home directory in container [#1949](https://github.com/commercialhaskell/stack/issues/1949). * Deprecate `stack path --local-bin-path`; instead use `--local-bin`. * `stack image`: allow absolute source paths for `add`. Other enhancements: * `stack haddock --open [PACKAGE]` opens the local haddocks in the browser. * Fix too much rebuilding when enabling/disabling profiling flags. * `stack build pkg-1.0` will now build `pkg-1.0` even if the snapshot specifies a different version (it introduces a temporary extra-dep) * Experimental: Support for `--split-objs` added [#1284](https://github.com/commercialhaskell/stack/issues/1284). * `git` packages with submodules are supported by passing the `--recursive` flag to `git clone`. * When using [Hpack](https://github.com/sol/hpack), only regenerate Cabal files when Hpack files change. * Hpack files can now be used in templates * `stack ghci` now runs ghci as a separate process [#1306](https://github.com/commercialhaskell/stack/issues/1306) * Retry when downloading snapshots and package indices * Many build options are configurable now in `stack.yaml`: ~~~yaml build: library-profiling: true executable-profiling: true haddock: true haddock-deps: true copy-bins: true prefetch: true force-dirty: true keep-going: true test: true test-arguments: rerun-tests: true additional-args: ['-fprof'] coverage: true no-run-tests: true bench: true benchmark-opts: benchmark-arguments: -O2 no-run-benchmarks: true reconfigure: true cabal-verbose: true ~~~ * A number of URLs are now configurable, useful for firewalls. See [#1794](https://github.com/commercialhaskell/stack/issues/1884). * Suggest causes when executables are missing. * Allow `--omit-packages` even without `--solver`. * Improve the generated `stack.yaml`. * Improve ghci results after :load Main module collision with main file path. * Only load the hackage index if necessary [#1883](https://github.com/commercialhaskell/stack/issues/1883), [#1892](https://github.com/commercialhaskell/stack/issues/1892). * init: allow local packages to be deps of deps [#1965](https://github.com/commercialhaskell/stack/issues/1965). * Always use full fingerprints from GPG [#1952](https://github.com/commercialhaskell/stack/issues/1952). * Default to using `gpg2` and fall back to `gpg` [#1976](https://github.com/commercialhaskell/stack/issues/1976). * Add a flag for --verbosity silent. * Add `haddock --open` flag [#1396](https://github.com/commercialhaskell/stack/issues/1396). Bug fixes: * Package tarballs would fail to unpack. [#1884](https://github.com/commercialhaskell/stack/issues/1884). * Fixed errant warnings about missing modules, after deleted and removed from Cabal file [#921](https://github.com/commercialhaskell/stack/issues/921) [#1805](https://github.com/commercialhaskell/stack/issues/1805). * Now considers a package to dirty when the Hpack file is changed [#1819](https://github.com/commercialhaskell/stack/issues/1819). * Nix: cancelling a Stack build now exits properly rather than dropping into a nix-shell [#1778](https://github.com/commercialhaskell/stack/issues/1778). * `allow-newer: true` now causes `--exact-configuration` to be passed to Cabal. See [#1579](https://github.com/commercialhaskell/stack/issues/1579). * `stack solver` no longer fails with `InvalidRelFile` for relative package paths including `..`. See [#1954](https://github.com/commercialhaskell/stack/issues/1954). * Ignore emacs lock files when finding Cabal files [#1897](https://github.com/commercialhaskell/stack/issues/1897). * Use lenient UTF-8 decode for build output [#1945](https://github.com/commercialhaskell/stack/issues/1945). * Clear index cache whenever index updated [#1962](https://github.com/commercialhaskell/stack/issues/1962). * Fix: Building a container image drops a .stack-work dir in the current working (sub)directory [#1975](https://github.com/commercialhaskell/stack/issues/1975). * Fix: Rebuilding when disabling profiling [#2023](https://github.com/commercialhaskell/stack/issues/2023). ## 1.0.4.3 - 2016-04-07 Bug fixes: * Don't delete contents of ~/.ssh when using `stack clean --full` with Docker enabled [#2000](https://github.com/commercialhaskell/stack/issues/2000) ## 1.0.4.2 - 2016-03-09 Build with `path-io-1.0.0`. There are no changes in behaviour from 1.0.4, so no binaries are released for this version. ## 1.0.4.1 - 2016-02-21 Fixes build with `aeson-0.11.0.0`. There are no changes in behaviour from 1.0.4, so no binaries are released for this version. ## 1.0.4 - 2016-02-20 Major changes: * Some notable changes in `stack init`: * Overall it should now be able to initialize almost all existing Cabal packages out of the box as long as the package itself is consistently defined. * Choose the best possible snapshot and add extra-deps on top of a snapshot other than a compiler snapshot - [#1583](https://github.com/commercialhaskell/stack/pull/1583) * Automatically omit a package (`--omit-packages`) when it is compiler incompatible or when there are packages with conflicting dependency requirements. See [#1674](https://github.com/commercialhaskell/stack/pull/1674). * Some more changes for a better user experience. Please refer to the doc guide for details. * Add support for Hpack, alternative package description format [#1679](https://github.com/commercialhaskell/stack/issues/1679) Other enhancements: * Docker: pass ~/.ssh and SSH auth socket into container, so that git repos work [#1358](https://github.com/commercialhaskell/stack/issues/1358). * Docker: strip suffix from docker --version. [#1653](https://github.com/commercialhaskell/stack/issues/1653) * Docker: pass USER and PWD environment variables into container. * On each run, Stack will test the Stack root directory (~/.stack), and the project and package work directories (.stack-work) for whether they are owned by the current user and abort if they are not. This precaution can be disabled with the `--allow-different-user` flag or `allow-different-user` option in the global config (~/.stack/config.yaml). [#471](https://github.com/commercialhaskell/stack/issues/471) * Added `stack clean --full` option for full working dir cleanup. * YAML config: support Zip archives. * Redownload build plan if parsing fails [#1702](https://github.com/commercialhaskell/stack/issues/1702). * Give mustache templates access to a 'year' tag [#1716](https://github.com/commercialhaskell/stack/pull/1716). * Have "stack ghci" warn about module name aliasing. * Add "stack ghci --load-local-deps". * Build Setup.hs with -rtsopts [#1687](https://github.com/commercialhaskell/stack/issues/1687). * `stack init` accepts a list of directories. * Add flag infos to DependencyPlanFailures (for better error output in case of flags) [#713](https://github.com/commercialhaskell/stack/issues/713) * `stack new --bare` complains for overwrites, and add `--force` option [#1597](https://github.com/commercialhaskell/stack/issues/1597). Bug fixes: * Previously, `stack ghci` would fail with `cannot satisfy -package-id` when the implicit build step changes the package key of some dependency. * Fix: Building with ghcjs: "ghc-pkg: Prelude.chr: bad argument: 2980338" [#1665](https://github.com/commercialhaskell/stack/issues/1665). * Fix running test / bench with `--profile` / `--trace`. * Fix: build progress counter is no longer visible [#1685](https://github.com/commercialhaskell/stack/issues/1685). * Use "-RTS" w/ profiling to allow extra args [#1772](https://github.com/commercialhaskell/stack/issues/1772). * Fix withUnpackedTarball7z to find name of srcDir after unpacking (fixes `stack setup` fails for ghcjs project on windows) [#1774](https://github.com/commercialhaskell/stack/issues/1774). * Add space before auto-generated bench opts (makes profiling options work uniformly for applications and benchmark suites) [#1771](https://github.com/commercialhaskell/stack/issues/1771). * Don't try to find plugin if it resembles flag. * Setup.hs changes cause package dirtiness [#1711](https://github.com/commercialhaskell/stack/issues/1711). * Send "stack templates" output to stdout [#1792](https://github.com/commercialhaskell/stack/issues/1792). ## 1.0.2 - 2016-01-18 Release notes: - Arch Linux: Stack has been adopted into the [official community repository](https://www.archlinux.org/packages/community/x86_64/stack/), so we will no longer be updating the AUR with new versions. See the [install/upgrade guide](http://docs.haskellstack.org/en/stable/install_and_upgrade/#arch-linux) for current download instructions. Major changes: - `stack init` and `solver` overhaul [#1583](https://github.com/commercialhaskell/stack/pull/1583) Other enhancements: - Disable locale/codepage hacks when GHC >=7.10.3 [#1552](https://github.com/commercialhaskell/stack/issues/1552) - Specify multiple images to build for `stack image container` [docs](http://docs.haskellstack.org/en/stable/yaml_configuration/#image) - Specify which executables to include in images for `stack image container` [docs](http://docs.haskellstack.org/en/stable/yaml_configuration/#image) - Docker: pass supplementary groups and umask into container - If git fetch fails wipe the directory and try again from scratch [#1418](https://github.com/commercialhaskell/stack/issues/1418) - Warn if newly installed executables won't be available on the PATH [#1362](https://github.com/commercialhaskell/stack/issues/1362) - `stack.yaml`: for `stack image container`, specify multiple images to generate, and which executables should be added to those images - GHCI: add interactive Main selection [#1068](https://github.com/commercialhaskell/stack/issues/1068) - Care less about the particular name of a GHCJS sdist folder [#1622](https://github.com/commercialhaskell/stack/issues/1622) - Unified Enable/disable help messaging [#1613](https://github.com/commercialhaskell/stack/issues/1613) Bug fixes: - Don't share precompiled packages between GHC/platform variants and Docker [#1551](https://github.com/commercialhaskell/stack/issues/1551) - Properly redownload corrupted downloads with the correct file size. [Mailing list discussion](https://groups.google.com/d/msg/haskell-stack/iVGDG5OHYxs/FjUrR5JsDQAJ) - Gracefully handle invalid paths in error/warning messages [#1561](https://github.com/commercialhaskell/stack/issues/1561) - Nix: select the correct GHC version corresponding to the snapshot even when an abstract resolver is passed via `--resolver` on the command-line. [#1641](https://github.com/commercialhaskell/stack/issues/1641) - Fix: Stack does not allow using an external package from ghci [#1557](https://github.com/commercialhaskell/stack/issues/1557) - Disable ambiguous global '--resolver' option for 'stack init' [#1531](https://github.com/commercialhaskell/stack/issues/1531) - Obey `--no-nix` flag - Fix: GHCJS Execute.hs: Non-exhaustive patterns in lambda [#1591](https://github.com/commercialhaskell/stack/issues/1591) - Send file-watch and sticky logger messages to stderr [#1302](https://github.com/commercialhaskell/stack/issues/1302) [#1635](https://github.com/commercialhaskell/stack/issues/1635) - Use globaldb path for querying Cabal version [#1647](https://github.com/commercialhaskell/stack/issues/1647) ## 1.0.0 - 2015-12-24 Release notes: * We're calling this version 1.0.0 in preparation for Stackage LTS 4. Note, however, that this does not mean the code's API will be stable as this is primarily an end-user tool. Enhancements: * Added flag `--profile` flag: passed with `stack build`, it will enable profiling, and for `--bench` and `--test` it will generate a profiling report by passing `+RTS -p` to the executable(s). Great for using like `stack build --bench --profile` (remember that enabling profile will slow down your benchmarks by >4x). Run `stack build --bench` again to disable the profiling and get proper speeds * Added flag `--trace` flag: just like `--profile`, it enables profiling, but instead of generating a report for `--bench` and `--test`, prints out a stack trace on exception. Great for using like `stack build --test --trace` * Nix: all options can be overridden on command line [#1483](https://github.com/commercialhaskell/stack/issues/1483) * Nix: build environments (shells) are now pure by default. * Make verbosity silent by default in script interpreter mode [#1472](https://github.com/commercialhaskell/stack/issues/1472) * Show a message when resetting git commit fails [#1453](https://github.com/commercialhaskell/stack/issues/1453) * Improve Unicode handling in project/package names [#1337](https://github.com/commercialhaskell/stack/issues/1337) * Fix ambiguity between a Stack command and a filename to execute (prefer `stack` subcommands) [#1471](https://github.com/commercialhaskell/stack/issues/1471) * Support multi line interpreter directive comments [#1394](https://github.com/commercialhaskell/stack/issues/1394) * Handle space separated pids in ghc-pkg dump (for GHC HEAD) [#1509](https://github.com/commercialhaskell/stack/issues/1509) * Add ghci --no-package-hiding option [#1517](https://github.com/commercialhaskell/stack/issues/1517) * `stack new` can download templates from URL [#1466](https://github.com/commercialhaskell/stack/issues/1466) Bug fixes: * Nix: `stack exec` options are passed properly to the Stack sub process [#1538](https://github.com/commercialhaskell/stack/issues/1538) * Nix: specifying a shell-file works in any current working directory [#1547](https://github.com/commercialhaskell/stack/issues/1547) * Nix: use `--resolver` argument * Docker: fix missing image message and '--docker-auto-pull' * No HTML escaping for "stack new" template params [#1475](https://github.com/commercialhaskell/stack/issues/1475) * Set permissions for generated .ghci script [#1480](https://github.com/commercialhaskell/stack/issues/1480) * Restrict commands allowed in interpreter mode [#1504](https://github.com/commercialhaskell/stack/issues/1504) * `stack ghci` doesn't see preprocessed files for executables [#1347](https://github.com/commercialhaskell/stack/issues/1347) * All test suites run even when only one is requested [#1550](https://github.com/commercialhaskell/stack/pull/1550) * Edge cases in broken templates give odd errors [#1535](https://github.com/commercialhaskell/stack/issues/1535) * Fix test coverage bug on windows ## 0.1.10.1 - 2015-12-13 Bug fixes: * `stack image container` did not actually build an image [#1473](https://github.com/commercialhaskell/stack/issues/1473) ## 0.1.10.0 - 2015-12-04 Release notes: * The Stack home page is now at [haskellstack.org](http://haskellstack.org), which shows the documentation rendered by readthedocs.org. Note: this has necessitated some changes to the links in the documentation's markdown source code, so please check the links on the website before submitting a PR to fix them. * The locations of the [Ubuntu](http://docs.haskellstack.org/en/stable/install_and_upgrade/#ubuntu) and [Debian](http://docs.haskellstack.org/en/stable/install_and_upgrade/#debian) package repositories have changed to have correct URL semantics according to Debian's guidelines [#1378](https://github.com/commercialhaskell/stack/issues/1378). The old locations will continue to work for some months, but we suggest that you adjust your `/etc/apt/sources.list.d/fpco.list` to the new location to avoid future disruption. * [openSUSE and SUSE Linux Enterprise](http://docs.haskellstack.org/en/stable/install_and_upgrade/#suse) packages are now available, thanks to [@mimi1vx](https://github.com/mimi1vx). Note: there will be some lag before these pick up new versions, as they are based on Stackage LTS. Major changes: * Support for building inside a Nix-shell providing system dependencies [#1285](https://github.com/commercialhaskell/stack/pull/1285) * Add optional GPG signing on `stack upload --sign` or with `stack sig sign ...` Other enhancements: * Print latest applicable version of packages on conflicts [#508](https://github.com/commercialhaskell/stack/issues/508) * Support for packages located in Mercurial repositories [#1397](https://github.com/commercialhaskell/stack/issues/1397) * Only run benchmarks specified as build targets [#1412](https://github.com/commercialhaskell/stack/issues/1412) * Support git-style executable fall-through (`stack something` executes `stack-something` if present) [#1433](https://github.com/commercialhaskell/stack/issues/1433) * GHCi now loads intermediate dependencies [#584](https://github.com/commercialhaskell/stack/issues/584) * `--work-dir` option for overriding `.stack-work` [#1178](https://github.com/commercialhaskell/stack/issues/1178) * Support `detailed-0.9` tests [#1429](https://github.com/commercialhaskell/stack/issues/1429) * Docker: improved POSIX signal proxying to containers [#547](https://github.com/commercialhaskell/stack/issues/547) Bug fixes: * Show absolute paths in error messages in multi-package builds [#1348](https://github.com/commercialhaskell/stack/issues/1348) * Docker-built binaries and libraries in different path [#911](https://github.com/commercialhaskell/stack/issues/911) [#1367](https://github.com/commercialhaskell/stack/issues/1367) * Docker: `--resolver` argument didn't effect selected image tag * GHCi: Spaces in filepaths caused module loading issues [#1401](https://github.com/commercialhaskell/stack/issues/1401) * GHCi: cpp-options in Cabal files weren't used [#1419](https://github.com/commercialhaskell/stack/issues/1419) * Benchmarks couldn't be run independently of each other [#1412](https://github.com/commercialhaskell/stack/issues/1412) * Send output of building setup to stderr [#1410](https://github.com/commercialhaskell/stack/issues/1410) ## 0.1.8.0 - 2015-11-20 Major changes: * GHCJS can now be used with stackage snapshots via the new `compiler` field. * Windows installers are now available: [download them here](http://docs.haskellstack.org/en/stable/install_and_upgrade/#windows) [#613](https://github.com/commercialhaskell/stack/issues/613) * Docker integration works with non-FPComplete generated images [#531](https://github.com/commercialhaskell/stack/issues/531) Other enhancements: * Added an `allow-newer` config option [#922](https://github.com/commercialhaskell/stack/issues/922) [#770](https://github.com/commercialhaskell/stack/issues/770) * When a Hackage revision invalidates a build plan in a snapshot, trust the snapshot [#770](https://github.com/commercialhaskell/stack/issues/770) * Added a `stack config set resolver RESOLVER` command. Part of work on [#115](https://github.com/commercialhaskell/stack/issues/115) * `stack setup` can now install GHCJS on windows. See [#1145](https://github.com/commercialhaskell/stack/issues/1145) and [#749](https://github.com/commercialhaskell/stack/issues/749) * `stack hpc report` command added, which generates reports for HPC tix files * `stack ghci` now accepts all the flags accepted by `stack build`. See [#1186](https://github.com/commercialhaskell/stack/issues/1186) * `stack ghci` builds the project before launching GHCi. If the build fails, try to launch GHCi anyway. Use `stack ghci --no-build` option to disable [#1065](https://github.com/commercialhaskell/stack/issues/1065) * `stack ghci` now detects and warns about various circumstances where it is liable to fail. See [#1270](https://github.com/commercialhaskell/stack/issues/1270) * Added `require-docker-version` configuration option * Packages will now usually be built along with their tests and benchmarks. See [#1166](https://github.com/commercialhaskell/stack/issues/1166) * Relative `local-bin-path` paths will be relative to the project's root directory, not the current working directory. [#1340](https://github.com/commercialhaskell/stack/issues/1340) * `stack clean` now takes an optional `[PACKAGE]` argument for use in multi-package projects. See [#583](https://github.com/commercialhaskell/stack/issues/583) * Ignore cabal_macros.h as a dependency [#1195](https://github.com/commercialhaskell/stack/issues/1195) * Pad timestamps and show local time in --verbose output [#1226](https://github.com/commercialhaskell/stack/issues/1226) * GHCi: Import all modules after loading them [#995](https://github.com/commercialhaskell/stack/issues/995) * Add subcommand aliases: `repl` for `ghci`, and `runhaskell` for `runghc` [#1241](https://github.com/commercialhaskell/stack/issues/1241) * Add typo recommendations for unknown package identifiers [#158](https://github.com/commercialhaskell/stack/issues/158) * Add `stack path --local-hpc-root` option * Overhaul dependencies' haddocks copying [#1231](https://github.com/commercialhaskell/stack/issues/1231) * Support for extra-package-dbs in 'stack ghci' [#1229](https://github.com/commercialhaskell/stack/pull/1229) * `stack new` disallows package names with "words" consisting solely of numbers [#1336](https://github.com/commercialhaskell/stack/issues/1336) * `stack build --fast` turns off optimizations * Show progress while downloading package index [#1223](https://github.com/commercialhaskell/stack/issues/1223). Bug fixes: * Fix: Haddocks not copied for dependencies [#1105](https://github.com/commercialhaskell/stack/issues/1105) * Fix: Global options did not work consistently after subcommand [#519](https://github.com/commercialhaskell/stack/issues/519) * Fix: 'stack ghci' doesn't notice that a module got deleted [#1180](https://github.com/commercialhaskell/stack/issues/1180) * Rebuild when Cabal file is changed * Fix: Paths in GHC warnings not canonicalized, nor those for packages in subdirectories or outside the project root [#1259](https://github.com/commercialhaskell/stack/issues/1259) * Fix: unlisted files in tests and benchmarks trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) ## 0.1.6.0 - 2015-10-15 Major changes: * `stack setup` now supports building and booting GHCJS from source tarball. * On Windows, build directories no longer display "pretty" information (like x86_64-windows/Cabal-1.22.4.0), but rather a hash of that content. The reason is to avoid the 260 character path limitation on Windows. See [#1027](https://github.com/commercialhaskell/stack/pull/1027) * Rename config files and clarify their purposes [#969](https://github.com/commercialhaskell/stack/issues/969) * `~/.stack/stack.yaml` --> `~/.stack/config.yaml` * `~/.stack/global` --> `~/.stack/global-project` * `/etc/stack/config` --> `/etc/stack/config.yaml` * Old locations still supported, with deprecation warnings * New command "stack eval CODE", which evaluates to "stack exec ghc -- -e CODE". Other enhancements: * No longer install `git` on Windows [#1046](https://github.com/commercialhaskell/stack/issues/1046). You can still get this behavior by running the following yourself: `stack exec -- pacman -Sy --noconfirm git`. * Typing enter during --file-watch triggers a rebuild [#1023](https://github.com/commercialhaskell/stack/pull/1023) * Use Haddock's `--hyperlinked-source` (crosslinked source), if available [#1070](https://github.com/commercialhaskell/stack/pull/1070) * Use Stack-installed GHCs for `stack init --solver` [#1072](https://github.com/commercialhaskell/stack/issues/1072) * Experimental: Add `stack query` command [#1087](https://github.com/commercialhaskell/stack/issues/1087) * By default, Stack no longer rebuilds a package due to GHC options changes. This behavior can be tweaked with the `rebuild-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) * By default, ghc-options are applied to all local packages, not just targets. This behavior can be tweaked with the `apply-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) * Docker: download or override location of Stack executable to re-run in container [#974](https://github.com/commercialhaskell/stack/issues/974) * Docker: when Docker Engine is remote, don't run containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) * Docker: `set-user` option to enable/disable running containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) * Custom Setup.hs files are now precompiled instead of interpreted. This should be a major performance win for certain edge cases (biggest example: [building Cabal itself](https://github.com/commercialhaskell/stack/issues/1041)) while being either neutral or a minor slowdown for more common cases. * `stack test --coverage` now also generates a unified coverage report for multiple test-suites / packages. In the unified report, test-suites can contribute to the coverage of other packages. Bug fixes: * Ignore stack-built executables named `ghc` [#1052](https://github.com/commercialhaskell/stack/issues/1052) * Fix quoting of output failed command line arguments * Mark executable-only packages as installed when copied from cache [#1043](https://github.com/commercialhaskell/stack/pull/1043) * Canonicalize temporary directory paths [#1047](https://github.com/commercialhaskell/stack/pull/1047) * Put code page fix inside the build function itself [#1066](https://github.com/commercialhaskell/stack/issues/1066) * Add `explicit-setup-deps` option [#1110](https://github.com/commercialhaskell/stack/issues/1110), and change the default to the old behavior of using any package in the global and snapshot database [#1025](https://github.com/commercialhaskell/stack/issues/1025) * Precompiled cache checks full package IDs on Cabal < 1.22 [#1103](https://github.com/commercialhaskell/stack/issues/1103) * Pass -package-id to ghci [#867](https://github.com/commercialhaskell/stack/issues/867) * Ignore global packages when copying precompiled packages [#1146](https://github.com/commercialhaskell/stack/issues/1146) ## 0.1.5.0 - 2015-09-24 Major changes: * On Windows, we now use a full MSYS2 installation in place of the previous PortableGit. This gives you access to the pacman package manager for more easily installing libraries. * Support for custom GHC binary distributions [#530](https://github.com/commercialhaskell/stack/issues/530) * `ghc-variant` option in `stack.yaml` to specify the variant (also `--ghc-variant` command-line option) * `setup-info` in `stack.yaml`, to specify where to download custom binary distributions (also `--ghc-bindist` command-line option) * Note: On systems with libgmp4 (aka `libgmp.so.3`), such as CentOS 6, you may need to re-run `stack setup` due to the centos6 GHC bindist being treated like a variant * A new `--pvp-bounds` flag to the sdist and upload commands allows automatic adding of PVP upper and/or lower bounds to your dependencies Other enhancements: * Adapt to upcoming Cabal installed package identifier format change [#851](https://github.com/commercialhaskell/stack/issues/851) * `stack setup` takes a `--stack-setup-yaml` argument * `--file-watch` is more discerning about which files to rebuild for [#912](https://github.com/commercialhaskell/stack/issues/912) * `stack path` now supports `--global-pkg-db` and `--ghc-package-path` * `--reconfigure` flag [#914](https://github.com/commercialhaskell/stack/issues/914) [#946](https://github.com/commercialhaskell/stack/issues/946) * Cached data is written with a checksum of its structure [#889](https://github.com/commercialhaskell/stack/issues/889) * Fully removed `--optimizations` flag * Added `--cabal-verbose` flag * Added `--file-watch-poll` flag for polling instead of using filesystem events (useful for running tests in a Docker container while modifying code in the host environment. When code is injected into the container via a volume, the container won't propagate filesystem events). * Give a preemptive error message when `-prof` is given as a GHC option [#1015](https://github.com/commercialhaskell/stack/issues/1015) * Locking is now optional, and will be turned on by setting the `STACK_LOCK` environment variable to `true` [#950](https://github.com/commercialhaskell/stack/issues/950) * Create default `stack.yaml` with documentation comments and commented out options [#226](https://github.com/commercialhaskell/stack/issues/226) * Out of memory warning if Cabal exits with -9 [#947](https://github.com/commercialhaskell/stack/issues/947) Bug fixes: * Hacky workaround for optparse-applicative issue with `stack exec --help` [#806](https://github.com/commercialhaskell/stack/issues/806) * Build executables for local extra-deps [#920](https://github.com/commercialhaskell/stack/issues/920) * copyFile can't handle directories [#942](https://github.com/commercialhaskell/stack/pull/942) * Support for spaces in Haddock interface files [fpco/minghc#85](https://github.com/fpco/minghc/issues/85) * Temporarily building against a "shadowing" local package? [#992](https://github.com/commercialhaskell/stack/issues/992) * Fix `Setup.exe` name for `--upgrade-cabal` on Windows [#1002](https://github.com/commercialhaskell/stack/issues/1002) * Unlisted dependencies no longer trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) ## 0.1.4.1 - 2015-09-04 Fix stack's own Haddocks. No changes to functionality (only comments updated). ## 0.1.4.0 - 2015-09-04 Major changes: * You now have more control over how GHC versions are matched, e.g. "use exactly this version," "use the specified minor version, but allow patches," or "use the given minor version or any later minor in the given major release." The default has switched from allowing newer later minor versions to a specific minor version allowing patches. For more information, see [#736](https://github.com/commercialhaskell/stack/issues/736) and [#784](https://github.com/commercialhaskell/stack/pull/784). * Support added for compiling with GHCJS * Stack can now reuse prebuilt binaries between snapshots. That means that, if you build package foo in LTS-3.1, that binary version can be reused in LTS-3.2, assuming it uses the same dependencies and flags. [#878](https://github.com/commercialhaskell/stack/issues/878) Other enhancements: * Added the `--docker-env` argument, to set environment variables in Docker container. * Set locale environment variables to UTF-8 encoding for builds to avoid "commitBuffer: invalid argument" errors from GHC [#793](https://github.com/commercialhaskell/stack/issues/793) * Enable transliteration for encoding on stdout and stderr [#824](https://github.com/commercialhaskell/stack/issues/824) * By default, `stack upgrade` automatically installs GHC as necessary [#797](https://github.com/commercialhaskell/stack/issues/797) * Added the `ghc-options` field to `stack.yaml` [#796](https://github.com/commercialhaskell/stack/issues/796) * Added the `extra-path` field to `stack.yaml` * Code page changes on Windows only apply to the build command (and its synonyms), and can be controlled via a command line flag (still defaults to on) [#757](https://github.com/commercialhaskell/stack/issues/757) * Implicitly add packages to extra-deps when a flag for them is set [#807](https://github.com/commercialhaskell/stack/issues/807) * Use a precompiled Setup.hs for simple build types [#801](https://github.com/commercialhaskell/stack/issues/801) * Set --enable-tests and --enable-benchmarks optimistically [#805](https://github.com/commercialhaskell/stack/issues/805) * `--only-configure` option added [#820](https://github.com/commercialhaskell/stack/issues/820) * Check for duplicate local package names * Stop nagging people that call `stack test` [#845](https://github.com/commercialhaskell/stack/issues/845) * `--file-watch` will ignore files that are in your VCS boring/ignore files [#703](https://github.com/commercialhaskell/stack/issues/703) * Add `--numeric-version` option Bug fixes: * `stack init --solver` fails if `GHC_PACKAGE_PATH` is present [#860](https://github.com/commercialhaskell/stack/issues/860) * `stack solver` and `stack init --solver` check for test suite and benchmark dependencies [#862](https://github.com/commercialhaskell/stack/issues/862) * More intelligent logic for setting UTF-8 locale environment variables [#856](https://github.com/commercialhaskell/stack/issues/856) * Create missing directories for `stack sdist` * Don't ignore Cabal files with extra periods [#895](https://github.com/commercialhaskell/stack/issues/895) * Deprecate unused `--optimizations` flag * Truncated output on slow terminals [#413](https://github.com/commercialhaskell/stack/issues/413) ## 0.1.3.1 - 2015-08-12 Bug fixes: * Ignore disabled executables [#763](https://github.com/commercialhaskell/stack/issues/763) ## 0.1.3.0 - 2015-08-12 Major changes: * Detect when a module is compiled but not listed in the Cabal file ([#32](https://github.com/commercialhaskell/stack/issues/32)) * A warning is displayed for any modules that should be added to `other-modules` in the Cabal file * These modules are taken into account when determining whether a package needs to be built * Respect TemplateHaskell addDependentFile dependency changes ([#105](https://github.com/commercialhaskell/stack/issues/105)) * TH dependent files are taken into account when determining whether a package needs to be built. * Overhauled target parsing, added `--test` and `--bench` options [#651](https://github.com/commercialhaskell/stack/issues/651) * For details, see [Build commands documentation](http://docs.haskellstack.org/en/stable/build_command/) Other enhancements: * Set the `HASKELL_DIST_DIR` environment variable [#524](https://github.com/commercialhaskell/stack/pull/524) * Track build status of tests and benchmarks [#525](https://github.com/commercialhaskell/stack/issues/525) * `--no-run-tests` [#517](https://github.com/commercialhaskell/stack/pull/517) * Targets outside of root dir don't build [#366](https://github.com/commercialhaskell/stack/issues/366) * Upper limit on number of flag combinations to test [#543](https://github.com/commercialhaskell/stack/issues/543) * Fuzzy matching support to give better error messages for close version numbers [#504](https://github.com/commercialhaskell/stack/issues/504) * `--local-bin-path` global option. Use to change where binaries get placed on a `--copy-bins` [#342](https://github.com/commercialhaskell/stack/issues/342) * Custom snapshots [#111](https://github.com/commercialhaskell/stack/issues/111) * --force-dirty flag: Force treating all local packages as having dirty files (useful for cases where Stack can't detect a file change) * GHC error messages: display file paths as absolute instead of relative for better editor integration * Add the `--copy-bins` option [#569](https://github.com/commercialhaskell/stack/issues/569) * Give warnings on unexpected config keys [#48](https://github.com/commercialhaskell/stack/issues/48) * Remove Docker `pass-host` option * Don't require `cabal-install` to upload [#313](https://github.com/commercialhaskell/stack/issues/313) * Generate indexes for all deps and all installed snapshot packages [#143](https://github.com/commercialhaskell/stack/issues/143) * Provide `--resolver global` option [#645](https://github.com/commercialhaskell/stack/issues/645) * Also supports `--resolver nightly`, `--resolver lts`, and `--resolver lts-X` * Make `stack build --flag` error when flag or package is unknown [#617](https://github.com/commercialhaskell/stack/issues/617) * Preserve file permissions when unpacking sources [#666](https://github.com/commercialhaskell/stack/pull/666) * `stack build` etc work outside of a project * `list-dependencies` command [#638](https://github.com/commercialhaskell/stack/issues/638) * `--upgrade-cabal` option to `stack setup` [#174](https://github.com/commercialhaskell/stack/issues/174) * `--exec` option [#651](https://github.com/commercialhaskell/stack/issues/651) * `--only-dependencies` implemented correctly [#387](https://github.com/commercialhaskell/stack/issues/387) Bug fixes: * Extensions from the `other-extensions` field no longer enabled by default [#449](https://github.com/commercialhaskell/stack/issues/449) * Fix: haddock forces rebuild of empty packages [#452](https://github.com/commercialhaskell/stack/issues/452) * Don't copy over executables excluded by component selection [#605](https://github.com/commercialhaskell/stack/issues/605) * Fix: Stack fails on Windows with git package in `stack.yaml` and no git binary on path [#712](https://github.com/commercialhaskell/stack/issues/712) * Fixed GHCi issue: Specifying explicit package versions (#678) * Fixed GHCi issue: Specifying -odir and -hidir as .stack-work/odir (#529) * Fixed GHCi issue: Specifying A instead of A.ext for modules (#498) ## 0.1.2.0 - 2015-07-05 * Add `--prune` flag to `stack dot` [#487](https://github.com/commercialhaskell/stack/issues/487) * Add `--[no-]external`,`--[no-]include-base` flags to `stack dot` [#437](https://github.com/commercialhaskell/stack/issues/437) * Add `--ignore-subdirs` flag to init command [#435](https://github.com/commercialhaskell/stack/pull/435) * Handle attempt to use non-existing resolver [#436](https://github.com/commercialhaskell/stack/pull/436) * Add `--force` flag to `init` command * exec style commands accept the `--package` option (see [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) * `stack upload` without arguments doesn't do anything [#439](https://github.com/commercialhaskell/stack/issues/439) * Print latest version of packages on conflicts [#450](https://github.com/commercialhaskell/stack/issues/450) * Flag to avoid rerunning tests that haven't changed [#451](https://github.com/commercialhaskell/stack/issues/451) * Stack can act as a script interpreter (see [Script interpreter] (https://github.com/commercialhaskell/stack/wiki/Script-interpreter) and [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) * Add the __`--file-watch`__ flag to auto-rebuild on file changes [#113](https://github.com/commercialhaskell/stack/issues/113) * Rename `stack docker exec` to `stack exec --plain` * Add the `--skip-msys` flag [#377](https://github.com/commercialhaskell/stack/issues/377) * `--keep-going`, turned on by default for tests and benchmarks [#478](https://github.com/commercialhaskell/stack/issues/478) * `concurrent-tests: BOOL` [#492](https://github.com/commercialhaskell/stack/issues/492) * Use hashes to check file dirtiness [#502](https://github.com/commercialhaskell/stack/issues/502) * Install correct GHC build on systems with libgmp.so.3 [#465](https://github.com/commercialhaskell/stack/issues/465) * `stack upgrade` checks version before upgrading [#447](https://github.com/commercialhaskell/stack/issues/447) ## 0.1.1.0 - 2015-06-26 * Remove GHC uncompressed tar file after installation [#376](https://github.com/commercialhaskell/stack/issues/376) * Put stackage snapshots JSON on S3 [#380](https://github.com/commercialhaskell/stack/issues/380) * Specifying flags for multiple packages [#335](https://github.com/commercialhaskell/stack/issues/335) * single test suite failure should show entire log [#388](https://github.com/commercialhaskell/stack/issues/388) * valid-wanted is a confusing option name [#386](https://github.com/commercialhaskell/stack/issues/386) * `stack init` in multi-package project should use local packages for dependency checking [#384](https://github.com/commercialhaskell/stack/issues/384) * Display information on why a snapshot was rejected [#381](https://github.com/commercialhaskell/stack/issues/381) * Give a reason for unregistering packages [#389](https://github.com/commercialhaskell/stack/issues/389) * `stack exec` accepts the `--no-ghc-package-path` parameter * Don't require build plan to upload [#400](https://github.com/commercialhaskell/stack/issues/400) * Specifying test components only builds/runs those tests [#398](https://github.com/commercialhaskell/stack/issues/398) * `STACK_EXE` environment variable * Add the `stack dot` command * `stack upgrade` added [#237](https://github.com/commercialhaskell/stack/issues/237) * `--stack-yaml` command line flag [#378](https://github.com/commercialhaskell/stack/issues/378) * `--skip-ghc-check` command line flag [#423](https://github.com/commercialhaskell/stack/issues/423) Bug fixes: * Haddock links to global packages no longer broken on Windows [#375](https://github.com/commercialhaskell/stack/issues/375) * Make flags case-insensitive [#397](https://github.com/commercialhaskell/stack/issues/397) * Mark packages uninstalled before rebuilding [#365](https://github.com/commercialhaskell/stack/issues/365) ## 0.1.0.0 - 2015-06-23 * Fall back to Cabal dependency solver when a snapshot can't be found * Basic implementation of `stack new` [#137](https://github.com/commercialhaskell/stack/issues/137) * `stack solver` command [#364](https://github.com/commercialhaskell/stack/issues/364) * `stack path` command [#95](https://github.com/commercialhaskell/stack/issues/95) * Haddocks [#143](https://github.com/commercialhaskell/stack/issues/143): * Build for dependencies * Use relative links * Generate module contents and index for all packages in project ## 0.0.3 - 2015-06-17 * `--prefetch` [#297](https://github.com/commercialhaskell/stack/issues/297) * `upload` command ported from stackage-upload [#225](https://github.com/commercialhaskell/stack/issues/225) * `--only-snapshot` [#310](https://github.com/commercialhaskell/stack/issues/310) * `--resolver` [#224](https://github.com/commercialhaskell/stack/issues/224) * `stack init` [#253](https://github.com/commercialhaskell/stack/issues/253) * `--extra-include-dirs` and `--extra-lib-dirs` [#333](https://github.com/commercialhaskell/stack/issues/333) * Specify intra-package target [#201](https://github.com/commercialhaskell/stack/issues/201) ## 0.0.2 - 2015-06-14 * Fix some Windows specific bugs [#216](https://github.com/commercialhaskell/stack/issues/216) * Improve output for package index updates [#227](https://github.com/commercialhaskell/stack/issues/227) * Automatically update indices as necessary [#227](https://github.com/commercialhaskell/stack/issues/227) * --verbose flag [#217](https://github.com/commercialhaskell/stack/issues/217) * Remove packages (HTTPS and Git) [#199](https://github.com/commercialhaskell/stack/issues/199) * Config values for system-ghc and install-ghc * Merge `stack deps` functionality into `stack build` * `install` command [#153](https://github.com/commercialhaskell/stack/issues/153) and [#272](https://github.com/commercialhaskell/stack/issues/272) * overriding architecture value (useful to force 64-bit GHC on Windows, for example) * Overhauled test running (allows cycles, avoids unnecessary recompilation, etc) ## 0.0.1 - 2015-06-09 * First public release, beta quality stack-2.15.7/README.md0000644000000000000000000000313214604306200012352 0ustar0000000000000000## The Haskell Tool Stack [![Unit tests](https://github.com/commercialhaskell/stack/workflows/Unit%20tests/badge.svg)](https://github.com/commercialhaskell/stack/actions/workflows/unit-tests.yml) [![Integration tests](https://github.com/commercialhaskell/stack/workflows/Integration%20tests/badge.svg)](https://github.com/commercialhaskell/stack/actions/workflows/integration-tests.yml) [![Release](https://img.shields.io/github/release/commercialhaskell/stack.svg)](https://github.com/commercialhaskell/stack/releases) Stack is a cross-platform program for developing Haskell projects. It is intended for Haskellers both new and experienced. See [haskellstack.org](http://haskellstack.org), or the [doc](./doc) directory of this repository, for more information. ### Learning to use Stack If are learning to use Stack and have questions, a discussion at the [Haskell Community](https://discourse.haskell.org/) forum may help. See its 'Learn' category. ### Community You can participate with the Stack community in the following areas: * the [Haskell Community](https://discourse.haskell.org/) forum * the Haskell Foundation's [Slack workspace](https://haskell-foundation.slack.com/join/shared_invite/zt-z45o9x38-8L55P27r12YO0YeEufcO2w#/shared-invite/email) * `#stack-users` channel, for general Stack discussion * `#stack-collaborators` channel, for working on Stack's code base * the [Haskell Stack room](https://matrix.to/#/#haskell-stack:matrix.org) on [Matrix](https://matrix.org/) * the [Google Group mailing list](https://groups.google.com/g/haskell-stack) for Stack stack-2.15.7/stack.yaml0000644000000000000000000000034714620153474013104 0ustar0000000000000000snapshot: lts-22.21 # GHC 9.6.5 docker: enable: false repo: quay.io/benz0li/ghc-musl:9.6.5 nix: # --nix on the command-line to enable. packages: - zlib - unzip flags: stack: developer-mode: true stack-2.15.7/doc/azure_ci.md0000644000000000000000000001717214604306200013774 0ustar0000000000000000
# Azure CI This page documents how to use Stack on [Azure CI](http://dev.azure.com/). ## Quick Start Note that you have to create [azure pipelines](#creating-azure-pipelines) for your project and then you need to put the relevant configuration files: * For simple Azure configuration, copy-paste the [azure-simple](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) file into `azure-pipelines.yml`. * For complex Azure configuration, you need to take the below linked four files and put all of them into the `.azure` directory. For a more detailed explanation, you can read further. ## Simple and Complex configuration We provide two fully baked configuration ready to be used on your projects: * [The simple Azure configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-simple.yml) is intended for applications that do not require multiple GHC support or cross-platform support. It builds and tests your project with just the settings present in your `stack.yaml` file. * The complex Azure configuration is intended for projects that need to support multiple GHC versions and multiple operating systems, such as open source libraries to be released to Hackage. It tests against Stack for different snapshots on Linux, macOS and Windows. These are the files for the complex configuration: - [azure-pipelines.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-pipelines.yml) : This is the starter file used by the Azure CI. - [azure-linux-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-linux-template.yml) : Template for Azure Linux build - [azure-osx-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-osx-template.yml) : Template for Azure macOS build - [azure-windows-template.yml](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/azure/azure-windows-template.yml) : Template for Azure Windows build !!! note It is likely going to be necessary to modify this configuration to match the needs of your project, such as tweaking the build matrix to alter which GHC versions you test against, or to specify GHC-version-specific `stack.yaml` files if necessary. Don't be surprised if it doesn't work the first time around. See the multiple GHC section below for more information. ## Creating Azure Pipelines Each of these configurations is ready to be used immediately. But before we go into where to put them, we have to create pipeline for your project in Azure CI platform: * Go to [dev.azure.com](https://dev.azure.com). You have to initially sign-in to your microsoft account there. * Once you have logged in to your Microsoft account, you have to sign in to [Azure devops](https://user-images.githubusercontent.com/737477/52465678-70963080-2ba5-11e9-83d8-84112b140236.png) from there. * You will be greeted with a [dashboard](https://user-images.githubusercontent.com/737477/52465677-70963080-2ba5-11e9-904a-c15c7c0524ef.png) where you can create your projects. * Click the "Create Project" button and fill the relevant information in the [dialog](https://user-images.githubusercontent.com/737477/52465676-70963080-2ba5-11e9-82a4-093ee58f11c9.png) and then click the "Create" button. * This will lead you to the project [dashboard](https://user-images.githubusercontent.com/737477/52465675-6ffd9a00-2ba5-11e9-917e-3dec251fcc87.png) page where you can create pipelines. * Click on "Pipelines" in the left menu. This will load the [pipelines page](https://user-images.githubusercontent.com/737477/52465673-6ffd9a00-2ba5-11e9-97a4-04e703ae1fbc.png) on the right. * Click on the button "New Pipeline" and you have to follow through the wizard there. You need to choose your github repository (or Azure repos) and follow the wizard. Note that in the [Configure step](https://user-images.githubusercontent.com/737477/52465670-6ffd9a00-2ba5-11e9-83a3-9fffdacbf249.png) you have to select the "Starter Pipeline". This will open up an [editor window](https://user-images.githubusercontent.com/737477/52465669-6f650380-2ba5-11e9-9662-e9c6fc2682b5.png). You can leave the existing YAML configuration there as it is and click the "Save and run" button. That will popup a [dialog](https://user-images.githubusercontent.com/737477/52465668-6f650380-2ba5-11e9-9203-6347a609e3c4.png). Select the relevant option and click "Save and run" button. (Note that this step would have created `azure-pipelines.yml` in your repository. You have to replace that with the appropriate configuration file.) The rest of this document explains the details of common Azure configurations for those of you who want to tweak the above configuration files or write your own. *Note:* both Azure and Stack infrastructures are actively developed. We try to document best practices at the moment. ## Infrastructure Note that you need at least one agent to build your code. You can specify which virtual image you want to choose using this configuration: ~~~yaml pool: vmImage: ubuntu-latest ~~~ The other popular options are `macOS-latest`, `windows-latest` for macOS and Windows respectively. You can find the [complete list](https://docs.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=vsts&tabs=yaml) here. You also have the option to select a specific supported ubuntu version like `ubuntu-18.08`. ## Installing Stack Currently there is only one reasonable way to install Stack: fetch a precompiled binary from GitHub. ~~~yaml - script: | mkdir -p ~/.local/bin curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' displayName: Install Stack ~~~ ## Installing GHC There are two ways to install GHC: - Let Stack download GHC - Install GHC using apt package manger. This method is only applicable for Debian based images. But we only use the first method of using Stack to download GHC. ### Multiple GHC - parametrised builds For different GHC versions, you probably want to use different project-level configuration files (`stack.yaml`, by default). If you don't want to put a specific `stack.yaml` for a particular snapshot and still want to test it, you have specify your snapshot argument in `ARGS` environment variable (you will see an example below). ~~~yaml strategy: matrix: GHC 8.0: ARGS: "--snapshot lts-9" GHC 8.2: ARGS: "--snapshot lts-11" GHC 8.4: ARGS: "--snapshot lts-12" GHC 8.6: ARGS: "--snapshot lts-14" GHC 8.8: ARGS: "--snapsht lts-15" nightly: ARGS: "--snapshot nightly" ~~~ ## Running tests After the environment setup, actual test running is simple. Command: ~~~text stack $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps ~~~ ## Other details Some Stack commands will run for long time. To avoid timeouts, use the [timeoutInMinutes](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?tabs=yaml&view=azdevops#timeouts) for jobs. ## Examples - [commercialhaskell/stack](https://github.com/commercialhaskell/stack/blob/master/azure-pipelines.yml) - [psibi/tldr-hs](http://github.com/psibi/tldr-hs) - [psibi/wai-slack-middleware](https://github.com/psibi/wai-slack-middleware) stack-2.15.7/doc/build_command.md0000644000000000000000000011557114620153473015004 0ustar0000000000000000
# The `stack build` command and its synonyms ~~~text stack build [TARGET] [--dry-run] [--pedantic] [--fast] [--ghc-options OPTIONS] [--flag PACKAGE:[-]FLAG] [--dependencies-only | --only-snapshot | --only-dependencies | --only-locals] [--file-watch | --file-watch-poll] [--watch-all] [--exec COMMAND [ARGUMENT(S)]] [--only-configure] [--trace] [--profile] [--no-strip] [--[no-]library-profiling] [--[no-]executable-profiling] [--[no-]library-stripping] [--[no-]executable-stripping] [--[no-]haddock] [--haddock-arguments HADDOCK_ARGS] [--[no-]open] [--[no-]haddock-deps] [--[no-]haddock-internal] [--[no-]haddock-hyperlink-source] [--[no-]haddock-for-hackage] [--[no-]copy-bins] [--[no-]copy-compiler-tool] [--[no-]prefetch] [--[no-]keep-going] [--[no-]keep-tmp-files] [--[no-]force-dirty] [--[no-]test] [--[no-]rerun-tests] [--ta|--test-arguments TEST_ARGS] [--coverage] [--no-run-tests] [--test-suite-timeout ARG] [--[no-]tests-allow-stdin] [--[no-]bench] [--ba|--benchmark-arguments BENCH_ARGS] [--no-run-benchmarks] [--[no-]reconfigure] [--cabal-verbosity VERBOSITY | --[no-]cabal-verbose] [--[no-]split-objs] [--skip ARG] [--[no-]interleaved-output] [--ddump-dir ARG] ~~~ `stack build` and its synonyms (`stack test`, `stack bench`, `stack haddock` and `stack install`) are Stack's primany command. The command provides a simple interface for simple tasks and flexibility for more complicated goals. See the introductory part of Stack's [user's guide](GUIDE.md#the-stack-build-command) for an introduction to the command. ## Synonyms The synonym commands for `stack build` are: |Synonym command|Equivalent `stack build` command flag| |---------------|-------------------------------------| |`stack test` |`stack build --test` | |`stack bench` |`stack build --bench` | |`stack haddock`|`stack build --haddock` | |`stack install`|`stack build --copy-bins` | The advantage of the synonym commands is that they are convenient and short. The advantage of the flags is that they compose. See the examples below. ## Components Every Cabal package is made up of one or more components. It can have an optional public library component, one or more optional executable components, one or more optional test suite components, and one or more optional benchmark components. Stack allows you to identify a specific component to be built. For example, `stack build mypackage:test:mytests` will build (and run - see further below) the `mytests` component of the `mypackage` package. `mytests` must be a test suite component. By default, if a test suite component is targeted, the component is built and run. The running behaviour can be disabled with the `--no-run-tests` flag. Similarly, if a benchmark component is targeted, it is built and run unless the running behaviour is disabled with the `--no-run-benchmarks` flag. This ability to specify a component applies only to a project package. With dependencies, Stack will *always* build the library (if present) and all executables (if any), and ignore test suites and benchmarks. If you want more control over a package, you must add it to your `packages` setting in your project-level configuration file (`stack.yaml`, by default). ## Target syntax `stack build` takes a list of one or more optional *targets* to be built. The supported syntaxes for targets are as follows: * no targets specified * *package* * *package identifier* * project package *component* * *local directory* ### No targets specified Example: `stack build` `stack build` with no targets specified will build all project packages. ### Target: *package* Example: `stack build foobar` Stack will try to find the package in the following locations: * project packages, * extra-deps, * the snapshot, and * the package index (e.g. Hackage). If the package is found in the package index, then the latest version of that package from the index is implicitly added as an extra-dep. If the package is a project package, the library and executable components are selected to be built. If the `--test` and `--bench` flags are set, then all of the test suite and benchmark components, respectively, are selected to be built. If *package* is a GHC boot package (packages that come with GHC and are included in GHC's global package database), the behaviour can be complex: * If the boot package has not been 'replaced', then `stack build` will, effectively, do nothing. * If the boot package has been 'replaced' then `stack build` will specify the latest version of that package in the package index, which may differ from the version provided by the version of GHC specified by the snapshot. A boot package will be treated as 'replaced' if the package is included directly in the Stackage snapshot or it depends on a package included directly in the snapshot. !!! note Stackage snapshots do not include directly most boot packages but some snapshots may include directly some boot packages. In particular, some snapshots include directly `Win32` (which is a boot package on Windows) while others do not. For example, if `Cabal` (a boot package) is not a project package or an extra-dep, then `stack build Cabal` with Stackage snapshot LTS Haskell 20.25 will: * on Windows, try to build the latest version of `Cabal` in the package index (because that snapshot includes `Win32` directly, and `Cabal` depends on `Win32` and so is treated as 'replaced'); and * on non-Windows, effectively, do nothing (because `Cabal` is not 'replaced'). ### Target: *package identifier* Example: `stack build foobar-1.2.3` If the package name is that of a project package, then Stack fails with an error. If the package version is in the package index (e.g. Hackage) then Stack will use the latest revision of that version from the package index. If the package is an extra-dep or in the snapshot, Stack will behave as if only the package name had been specified as the target (that is, ignoring the specified version). Otherwise, Stack will fail with an error, reporting that the package name is unknown. ### Target: project package *component* Examples: * `stack build my-package:lib` * `stack build my-package:exe:my-executable` * `stack build my-package:test:my-test-suite` * `stack build my-package:bench:my-benchmark` * `stack build my-package:my-test-suite` * `stack build :my-test-suite` You can select individual components from inside a project package to be built. This can be done for more fine-grained control over which test suites to run, or to have a faster compilation cycle. There are multiple ways to refer to a specific component: * `:lib` or `::` (where the component type, ``, is one of `exe`, `test`, or `bench`) is the most explicit. The library component type (`lib`) does not have an associated component name, ``. * `:` allows you to leave out the component type, as that will often be unique for a given component name. * `:` is a useful shortcut, saying "find the component`` in all of the project packages". This will result in an error if more than one package has a component with the specified name. For further information about available targets, see the [`stack ide targets` command](ide_command.md). ### Target: *local directory* Examples: * `stack build foo/bar` * `stack build ./foo` * `stack build .` Stack will find all project packages that exist in the given directory hierarchy and then follow the same procedure as passing in package names as mentioned above. `stack build .` will target project packages in the current working directory or its subdirectories. !!! note If the directory name is parsed as one of the other target types, it will be treated as that. Explicitly starting the target with `./` can avoid that. For example, `stack build ./foo`. ## Controlling what gets built Stack will automatically build the necessary dependencies. See the introductory part of Stack's [user's guide](GUIDE.md#the-stack-build-command) for information about how these dependencies get specified. In addition to specifying targets, you can also control what gets built, or retained, with the flags and options listed below. You can also affect what gets built by specifying Cabal (the library) options for the configure step of the Cabal build process (for further information, see the documentation for the [configure-options](yaml_configuration.md#configure-options) configuration option). ### `--bench` flag Pass the flag to add benchmark components to the targets, if specific components are not identified. The `stack bench` synonym sets this flag. ### `--dependencies-only` flag Pass the flag to skip building the targets. The flag `--only-dependencies` has the same effect. ### `--[no-]dry-run` flag Default: Disabled Set the flag to build nothing and output information about the build plan. ### `--flag` option The option can be specified multiple times. It has two forms: * `--flag :[-]`; and * `--flag *:[-]`. `stack build --flag :[-]` sets (or unsets) the specified Cabal flag for the specified package. Stack will report an error if: * a package of that name is not known to Stack; or * a flag of that name is not a flag of that package. This overrides: * any Cabal flag specifications for the package in the snapshot; * any Cabal flag specifications for the package in Stack's project-level configuration file (`stack.yaml`); and * any use of `--flag *` (see below). `stack build --flag *:[-]` sets (or unsets) the specified Cabal flag for all packages (project packages and dependencies) (whether or not a flag of that name is a flag of the package). This overrides: * any Cabal flag specifications for packages in the snapshot; and * any Cabal flag specifications for packages in Stack's project-level configuration file (`stack.yaml`). !!! note For a package included directly in the snapshot, if the Cabal flag specifications differ from the Cabal flag specifications (if any) in the snapshot, then the package will automatically be promoted to be an [extra-dep](#extra-deps). !!! note In order to set a Cabal flag for a GHC boot package, the package must be specified as an [extra-dep](yaml_configuration.md#extra-deps). ### `--[no-]force-dirty` flag Default: Disabled Set the flag to force rebuild of packages even when it doesn't seem necessary based on file dirtiness. ### `--[no-]haddock` flag Default: Disabled Set the flag to build Haddock documentation. This may cause a lot of packages to get re-built, so that the documentation links work. The `stack haddock` synonym sets this flag. Stack applies Haddock's `--gen-contents` and `--gen-index` flags to generate a single HTML contents and index for multiple sets of Haddock documentation. !!! warning On Windows, the values for the `haddock-interfaces` and `haddock-html` keys in the `*.conf` files for boot packages provided with certain versions of GHC (in its `lib\package.conf.d` directory) can be corrupt and refer to non-existent files and directories. For example, in the case of GHC 9.0.1 to GHC 9.8.1 the references are to `${pkgroot}/../../docs/html/libraries/...` or `${pkgroot}/../../doc/html/libraries/...` instead of `${pkgroot}/../docs/html/libraries/...` or `${pkgroot}/../doc/html/libraries/...`. Until those values are corrected, Haddock documentation will be missing links to what those packages expose. ### `--haddock-arguments` option `stack haddock --haddock-arguments ` passes the specified arguments to the Haddock tool. Specified arguments are separated by spaces. Arguments can be unquoted (if they do not contain space or `"` characters) or quoted (`""`). Quoted arguments can include 'escaped' characters, escaped with an initial `\` character. !!! note Haddock's `--latex` flag is incompatible with the Haddock flags used by Stack to generate a single HTML contents and index. ### `--[no-]haddock-deps` flag Default: Enabled (if building Haddock documnentation) Unset the flag to disable building Haddock documentation for dependencies. ### `--[no-]haddock-for-hackage` flag :octicons-beaker-24: Experimental [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Default: Disabled Set the flag to build project packages with flags to generate Haddock documentation suitable for upload to Hackage. The form of the Haddock documentation generated for other packages is unaffected. For each project package: * the generated Haddock documentation files are in directory `doc\html\-docs\`, relative to Stack's dist work directory (see [`stack path --dist-dir`](path_command.md)); and * an archive of the `-docs` directory and its contents is in Stack's dist work directory. If the flag is set: * the [`--[no-]haddock-hyperlink-source`](#-no-haddock-hyperlink-source-flag) flag is ignored and `--haddock-hyperlink-source` is implied; * the [`--[no-]haddock-deps`](#-no-haddock-deps-flag) flag is ignored and the default value for the flag is implied; * the [`--[no-]haddock-internal`](#-no-haddock-hyperlink-internal-flag) flag is ignored and `--no-haddock-internal` is implied; * the [`--[no-]open`](#-no-open-flag) flag is ignored and `--no-open` is implied; and * the [`--[no-]force-dirty`](#-no-force-dirty-flag) flag is ignored and `--force-dirty` is implied. !!! info Stack does not distinguish the building of Haddock documentation for Hackage from the building of Haddock documentation generally, which is why the `--force-dirty` flag is implied. !!! note If set, Haddock will warn that `-source-*` options are ignored when `--hyperlinked-source` is enabled. That is due to a known bug in Cabal (the libiary). !!! note If set, Cabal (the library) will report that documentation has been created in `index.html` and `.txt` files. Those files do not exist. That false report is due to a known bug in Cabal (the library). ### `--[no-]haddock-hyperlink-source` flag Default: Enabled Unset the flag to disable building building hyperlinked source for Haddock. If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, this flag is ignored. ### `--[no-]haddock-internal` flag Default: Disabled Set the flag to enable building Haddock documentation for internal modules. If the [`--haddock-for-hackage`](#-no-haddock-for-hackage-flag) flag is passed, this flag is ignored. ### `--[no-]keep-going` flag Default (`stack build`): Disabled Default (`stack test` or `stack bench`): Enabled Set the flag to continue building packages even after some build step fails. The packages which depend upon the failed build won't get built. ### `--[no-]keep-tmp-files` flag Default: Disabled Set the flag to keep intermediate files and build directories that would otherwise be considered temporary and deleted. It may be useful to inspect these, if a build fails. By default, they are not kept. ### `--only-configure` flag [:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) Pass the flag to perform only the configure step, not any builds. This is intended for tool usage. It may break when used on multiple packages at once. !!! note If there are downstream actions that require a package to be built then a full build will occur, even if the flag is passed. ### `--only-dependencies` flag Pass the flag to skip building the targets. The flag `--dependencies-only` has the same effect. ### `--only-locals` flag Pass the flag to build only packages in the local database. Fails if the build plan includes packages in the snapshot database. ### `--only-snapshot` flag Pass the flag to build only snapshot dependencies, which are cached and shared with other projects. ### `--[no-]reconfigure` flag Default: Disabled Set the flag to force reconfiguration even when it doesn't seem necessary based on file dirtiness. This is sometimes useful with custom `Setup.hs` files, in particular when they depend on external data files. ### `--skip` option `stack build --skip ` skips building the specified components of a project package. It allows you to skip test suites and benchmark without specifying other components (e.g. `stack test --skip long-test-suite` will run the tests without the `long-test-suite` test suite). Be aware that skipping executables won't work the first time the package is built due to an issue in [Cabal](https://github.com/commercialhaskell/stack/issues/3229). This option can be specified multiple times to skip multiple components. ### `--test` flag Pass the flag to add test suite components to the targets, if specific components are not identified. The `stack test` synonym sets this flag. ## Controlling when building occurs ### `--file-watch` flag Pass the flag to rebuild your project every time a file changes. By default it will take into account all files belonging to the targets you specify. See also the `--watch-all` flag. ### `--file-watch-poll` flag Like the `--file-watch` flag, but based on polling the file system instead of using events to determine if a file has changed. ### `--watch-all` flag [:octicons-tag-24: 2.5.1](https://github.com/commercialhaskell/stack/releases/tag/v2.5.1) Pass the flag to rebuild your project every time any local file changes (from project packages or from dependencies located locally). See also the `--file-watch` flag. ## Controlling what happens after building ### `--benchmark-arguments`, `--ba` option `stack build --bench --benchmark-arguments=` will pass the specified argument, or arguments, to each benchmark when it is run. Specified arguments are separated by spaces. Arguments can be unquoted (if they do not contain space or `"` characters) or quoted (`""`). Quoted arguments can include 'escaped' characters, escaped with an initial `\` character. Account may need to be taken of the shell's approach to the processing of command line arguments. For example, to pass `'a single quoted string'`: === "Unix-like (Bash or Zsh)" In Bash, or Zsh (if `RC_QUOTES` option not set): `stack bench --benchmark-arguments \"\''a single quoted string'\'\"` Outside of single quotes, `\"` escapes a double quote and `\'` escapes a single quote. The content of single quotes is taken literally, but cannot contain a single quote. In Zsh (if `RC_QUOTES` option set): `stack bench --benchmark-arguments '"''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. === "Windows (PowerShell)" `stack bench --benchmark-arguments '"''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. ### `--exec` option `stack build --exec ' []'` will run the specified command after a successful build. Specified arguments are separated by spaces. Arguments can be unquoted (if they do not contain space or `"` characters) or quoted (`""`). Quoted arguments can include 'escaped' characters, escaped with an initial `\` character. Account may need to be taken of the shell's approach to the processing of command line arguments. For example, to pass `'a single quoted string'`: === "Unix-like (Bash or Zsh)" In Bash, or Zsh (if `RC_QUOTES` option not set): `stack build --exec ' '\"\''a single quoted string'\'\"` Outside of single quotes, `\"` escapes a double quote and `\'` escapes a single quote. The content of single quotes is taken literally, but cannot contain a single quote. In Zsh (if `RC_QUOTES` option set): `stack build --exec ' "''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. === "Windows (PowerShell)" `stack build --exec ' "''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. ### `--test-arguments`, `--ta` option `stack build --test --test-arguments=` will pass the specified argument, or arguments, to each test when it is run. This option can be specified multiple times. Specified arguments are separated by spaces. Arguments can be unquoted (if they do not contain space or `"` characters) or quoted (`""`). Quoted arguments can include 'escaped' characters, escaped with an initial `\` character. Account may need to be taken of the shell's approach to the processing of command line arguments. For example, to pass `'a single quoted string'`: === "Unix-like (Bash or Zsh)" In Bash, or Zsh (if `RC_QUOTES` option not set): `stack test --test-arguments \"\''a single quoted string'\'\"` Outside of single quotes, `\"` escapes a double quote and `\'` escapes a single quote. The content of single quotes is taken literally, but cannot contain a single quote. In Zsh (if `RC_QUOTES` option set): `stack bench --benchmark-arguments '"''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. === "Windows (PowerShell)" `stack test --test-arguments '"''a single quoted string''"'` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. ## Flags affecting GHC's behaviour ### `--[no-]executable-profiling` flag Default: Disabled Set the flag to enable executable profiling for TARGETs and all its dependencies. The flag affects the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--[no-]executable-stripping` flag Default: Enabled Unset the flag to disable executable stripping for TARGETs and all its dependencies. The flag may affect the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--fast` flag GHC has many flags that specify individual optimisations of the compiler. GHC also uses its `-O*` flags to specify convenient 'packages' of GHC optimisation flags. GHC's flags are evaluated from left to right and later flags can override the effect of earlier ones. If no GHC `-O*` type flag is specified, GHC takes that to mean "Please compile quickly; I'm not over-bothered about compiled-code quality." GHC's `-O0` flag reverts to the same settings as if no `-O*` flags had been specified. Pass Stack's `--fast` flag to add `-O0` to the flags and options passed to GHC. The effect of `--fast` can be overriden with Stack's [`--ghc-options`](#-ghc-options-option) command line options. !!! note With one exception, GHC's `-O` flag is always passed to GHC first (being Cabal's default behaviour). The exception is if Cabal's `--disable-optimization` flag or `--enable-optimization[=n]`, `-O[n]` options are used during the configure step of the Cabal build process; see Stack's [`configure-options`](yaml_configuration.md#configure-options) YAML configuration option. ### `--ghc-options` option GHC command line options can be specified for a package in its Cabal file (including one created from a `package.yaml` file). This option augments and, if applicable (see below), overrides any such GHC command line options and those specified in Stack's YAML configuration files - see the [`ghc-options`](yaml_configuration.md#ghc-options) configuration option. `stack build --ghc-options ` passes the specified command line options to GHC, depending on Stack's [`apply-ghc-options`](yaml_configuration.md#apply-ghc-options) YAML configuration option. This option can be specified multiple times. GHC's command line options are _order-dependent_ and evaluated from left to right. Later options can override the effect of earlier ones. Any GHC command line options for a package specified at Stack's command line are applied after those specified in Stack's YAML configuration files. ### `--[no-]library-profiling` flag Default: Disabled Set the flag to enable library profiling for TARGETs and all its dependencies. The flag affects the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--[no-]library-stripping` flag Default: Enabled Unset the flag to disable library stripping for TARGETs and all its dependencies. The flag may affect the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--pedantic` flag Pass the flag to build your project with the GHC options `-Wall` and `-Werror`. `-Wall` turns on all warning options that indicate potentially suspicious code. `-Werror` makes any warning into a fatal error. ### `--profile` flag Pass the flag to enable profiling in libraries, executables, etc. for all expressions, and generate a profiling report in tests or benchmarks. The flag affects the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--[no-]split-objs` flag :octicons-beaker-24: Experimental Default: Disabled Set the flag to enable the GHC option `-split-objs`. This will reduce output size (at the cost of build time). !!! note The behaviour of this feature may be changed and improved. You will need to clean your project's Stack working directory before use. If you want to compile all dependencies with split-objs, you will need to delete the snapshot (and all snapshots that could reference that snapshot). !!! note GHC's `-split-objs` flag was deprecated in favour of `-split-sections` in GHC 8.2.1 and was not supported by GHC on any platform from GHC 8.10.1. ### `--no-strip` flag Pass the flag to disable DWARF debugging symbol stripping in libraries, executables, etc. for all expressions, producing larger executables but allowing the use of standard debuggers/profiling tools/other utilities that use debugging symbols. The flag affects the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ### `--trace` flag Pass the flag to enable profiling in libraries, executables, etc. for all expressions, and generate a backtrace on exception. The flag affects the location of the local project installation directory. See the [`stack path --local-install-root`](path_command.md) command. ## Flags affecting other tools' behaviour ### `--PROG-option` options [:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) `PROG` is a program recognised by Cabal (the library) and one of `alex`, `ar`, `c2hs`, `cpphs`, `gcc`, `greencard`, `happy`, `hsc2hs`, `hscolour`, `ld`, `pkg-config`, `strip` and `tar`. `stack build --PROG-option ` passes the specified command line argument to `PROG`, if it used by Cabal during the configuration step. This option can be specified multiple times. For example, if the program `happy` is used by Cabal during the configuration step, you could command `stack build --happy-option=--ghc` or `stack build --happy-option --ghc` to pass to `happy` its `--ghc` flag. By default, all and any `--PROG-option` options on Stack's command line are applied to all project packages (targets or otherwise). This behaviour can be changed. See the [`apply-prog-options`](yaml_configuration.md#apply-prog-options) configuration option. Stack can also be configured to pass Cabal's `--PROG-option`, `--PROG-options` or other options to Cabal during the configuration step. For further information, see the documentation for the [configure-options](yaml_configuration.md#configure-options) configuration option. ## Flags relating to build outputs ### `--[no]-cabal-verbose` flag Default: Disabled Set the flag to enable verbose output from Cabal (the library). This flag is an alternative to the `--cabal-verbosity` option. ### `--[no]-cabal-verbosity` option `stack build --cabal-verbosity ` sets the specified verbosity level for output from Cabal (the library). It accepts Cabal's numerical and extended syntax. This option is an alternative to setting the `--cabal-verbose` flag. ### `--[no-]copy-bins` flag [:octicons-tag-24: 0.1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.3.0) Default: Disabled Set the flag to enable copying of built executable files (binaries) of targets to Stack's local binary directory (see `stack path --local-bin`). The `stack install` synonym sets this flag. ### `--[no-]copy-compiler-tool` flag [:octicons-tag-24: 1.6.1](https://github.com/commercialhaskell/stack/releases/tag/v1.6.1) Default: Disabled Set the flag to enable copying of built executable files (binaries) of targets to Stack's compiler tools binary directory (see `stack path --compiler-tools-bin`). ### `--coverage` flag Pass the flag to generate a code coverage report. For further information, see the [code coverage](hpc_command.md) documentation. ### `--ddump-dir` option GHC has a number of `ddump-*` flags and options to allow dumping out of intermediate structures produced by the compiler. They include the `-ddump-to-file` flag that causes the output from other flags to be dumped to a file or files. `stack build --ddump_dir ` causes Stack to copy `*.dump-*` files to subdirectories of the specified directory, which is relative to Stack's working directory for the project. For example: ~~~text stack build --ghc-options "-ddump-to-file -ddump-timings" --ddump-dir my-ddump-dir ~~~ ### `--[no-]interleaved-output` flag [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Default: Enabled Set the flag for interleaved output. With interleaved output, each line of output from each package being built (targets and dependencies) is sent to the console as it happens and output relating to different packages can be interleaved. Each line will be prefixed with the name of the relevant package. The spacing between the prefix and the output will be set based on the longest relevant package name, so that the start of the output itself aligns. For example (extract): ~~~text hpack > build mustache > configure hpack > Preprocessing library for hpack-0.35.0.. hpack > Building library for hpack-0.35.0.. mustache > Configuring mustache-2.4.1... hpack > [ 1 of 29] Compiling Data.Aeson.Config.Key hpack > [ 2 of 29] Compiling Data.Aeson.Config.KeyMap mustache > build hpack > [ 3 of 29] Compiling Data.Aeson.Config.Util mustache > Preprocessing library for mustache-2.4.1.. mustache > Building library for mustache-2.4.1.. hpack > [ 4 of 29] Compiling Hpack.Haskell hpack > [ 5 of 29] Compiling Hpack.Utf8 mustache > [1 of 8] Compiling Paths_mustache hpack > [ 6 of 29] Compiling Imports hpack > [ 7 of 29] Compiling Hpack.Util mustache > [2 of 8] Compiling Text.Mustache.Internal ~~~ Unset the flag for non-interleaved output. With non-interleaved output, the build output from GHC (as opposed to from Stack) in respect of dependencies is ignored. The behaviour then depends whether there is one target package or more than one. There can be one target if the project has a single package or if one package is targetted in a multi-package project (for example, using `stack build `). * **One target package:** The build output for the target package is sent to the standard error stream of the console as it happens. * **More than one target package:** The build output from GHC (as opposed to from Stack) for each target package is sent to a log file for that package, unless an error occurs that prevents that. If color in output is in use, there will be two files, one with extension `.log` without color codes and one with extension `.log-color` with color codes. At the end of the build, the location of the directory containing the log files is reported. To also output the contents of the log files to the standard error output stream of the console at the end of the build, use Stack's `dump-logs` option. For further information about that option, see the [YAML configuration](yaml_configuration.md#dump-logs) documentation. The default `dump-logs` mode is to output the contents of any log files that include GHC warnings. ### `--[no]-open` flag Default: Disabled Set the flag to enable opening the local Haddock documentation in the browser. ## Other flags and options ### `--[no]-prefetch` flag Default: Disabled Set the flag to enable fetching packages necessary for the build immediately. This can be useful with `stack build --dry-run`. ### `--progress-bar` option [:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) Default: `capped` `stack build --progress-bar ` sets the format of the progress bar, where `` is one of `none` (no bar), `count-only` (only the package count is displayed), `capped` (the bar showing package builds in progress is capped to a length equal to the terminal width), and `full` (the bar is uncapped). On terminals where 'backspace' has no effect if the cursor is in the first column, bars longer than the terminal width will not be 'sticky' at the bottom of the screen. ### `--tests-allow-stdin` flag [:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) Default: Enabled Cabal defines a test suite interface ['exitcode-stdio-1.0'](https://hackage.haskell.org/package/Cabal-syntax-3.8.1.0/docs/Distribution-Types-TestSuiteInterface.html#v:TestSuiteExeV1.0) where the test suite takes the form of an executable and the executable takes nothing on the standard input stream (`stdin`). Pass this flag to override that specification and allow the executable to receive input on that stream. If you pass `--no-tests-allow-stdin` and the executable seeks input on the standard input stream, an exception will be thown. ## Examples All the following examples assume that: * if `stack build` is commanded outside of a project directory, there is no `stack.yaml` file in the current directory or ancestor directory and, consequently, the project-level configuration will be determined by a `stack.yaml` file in the `global-project` directory in the [Stack root](stack_root.md) (for further information, see the [YAML configuration](yaml_configuration.md) documentation); and * if `stack build` is commanded in a project directory, there is a `stack.yaml` file in that directory. Examples: * In the project directory, `stack build --test --copy-bins` or, equivalently, `stack test --copy-bins` or `stack install --test`, will build libraries, executables, and test suites, run the test suites, and then copy the executables to Stack's local binary directory (see `stack path --local-bin`). This is an example of the flags composing. * The following example uses a clone of the `wai` [repository](https://github.com/yesodweb/wai/). The `wai` project comprises a number of packages, including `wai-extra` and `warp`. In the `wai` project directory, the command: ~~~text stack build --file-watch --test --copy-bins --haddock wai-extra :warp warp:doctest --exec 'echo Yay, it worked!' ~~~ will start Stack up in file watch mode, waiting for files in your project to change. When first starting, and each time a file changes, it will do all of the following. * Build the `wai-extra` package and its test suites * Build the `warp` executable * Build the `warp` package's `doctest` component (which is a test site) * Run all of the `wai-extra` package's test suite components and the `doctest` test suite component * If all of that succeeds: * Copy generated executables to Stack's local binary directory (see `stack path --local-bin`) * Run the command `echo Yay, it worked!` * The following example uses the `Adga` package and assumes that `Adga-2.6.3` is the latest version in the package index (e.g. Hackage) and is not a version in the snapshot specified by the `stack.yaml` in the `global-project` directory in the Stack root. Outside a project directory, `stack build Adga-2.6.3 --copy-bins` or, equivalently, `stack install Agda-2.6.3`, will attempt to build the libraries and executables of the identified version of the package in the package index (using the `stack.yaml` file in the `global-project` directory in the Stack root), and then copy the executables to Stack's local binary directory (see `stack path --local-bin`). If a different snapshot is required to build the identified version of the package, then that can be specified at the command line. For example, to use the most recent Stackage Nightly snapshot: ~~~text stack --snapshot nightly install Agda-2.6.3 ~~~ Alternatively, Stack can be used to unpack the package from the package index into a local project directory named after the package identifier (for further infomation, see the [`stack unpack` command](unpack_command.md) documentation) and, if the package does not provide its own Stack configuration file (`stack.yaml`, by default), to attempt to initialise that configuration (for further information, see the [`stack init` command](init_command.md) documentation). For example: ~~~text stack unpack Agda-2.6.3 cd Agda-2.6.3 # Change to the project directory stack init # Attempt to create a project stack.yaml file stack install # Equivalent to stack build --copy-bins ~~~ stack-2.15.7/doc/build_overview.md0000644000000000000000000002772314604306200015223 0ustar0000000000000000
# Build overview !!! warning This document should not be considered accurate until this warning is removed. This is a work-in-progress document covering the build process used by Stack. It was started following the Pantry rewrite work in Stack 2.1.1, and contains some significant changes/simplifications from how things used to work. This document will likely not fully be reflected in the behavior of Stack itself until late in the Stack 2.0 development cycle. ## Terminology * Project package: anything listed in `packages` in stack.yaml * Dependency: anything listed in extra-deps or a snapshot * Target: package and/or component listed on the command line to be built. Can be either project package or dependency. If none specified, automatically targets all project packages * Immutable package: a package which comes from Hackage, an archive, or a repository. In contrast to... * Mutable package: a package which comes from a local file path. The contents of such a package are assumed to mutate over time. * Write only database: a package database and set of executables for a given set of _immutable_ packages. Only packages from immutable sources and which depend exclusively on other immutable packages can be in this database. *NOTE* formerly this was the _snapshot database_. * Mutable database: a package database and set of executables for packages which are either mutable or depend on such mutable packages. Importantly, packages in this database can be unregister, replaced, etc, depending on what happens with the source packages. *NOTE* formerly this was the *local database*. Outdated terminology to be purged: * Wanted * Local * Snapshot package ## Inputs Stack pays attention to the following inputs: * Current working directory, used for finding the default `stack.yaml` file and resolving relative paths * The `STACK_YAML` environment variable * Command line arguments (CLI args), as will be referenced below Given these inputs, Stack attempts the following process when performing a build. ## Find the `stack.yaml` file * Check for a `--stack-yaml` CLI arg, and use that * Check for a `STACK_YAML` env var * Look for a `stack.yaml` in this directory or ancestor directories * Fall back to the default global project This file is parsed to provide the following config values: * `snapshot` (or, alternatively, `resolver`) (required field) * `compiler` (optional field) * `packages` (optional field, defaults to `["."]`) * `extra-deps` (optional field, defaults to `[]`) * `flags` (optional field, defaults to `{}`) * `ghc-options` (optional field, defaults to `{}`) `flags` and `ghc-options` break down into both _by name_ (applied to a specific package) and _general_ (general option `*` for flags is only available in CLI). ## Wanted compiler, dependencies, and project packages * If the `--snapshot` CLI is present, ignore the `snapshot` (or `resolver`) and `compiler` config values * Load up the indicated snapshot (either config value or CLI arg). This will provide: * A map from package name to package location, flags, GHC options, and if a package should be hidden. All package locations here are immutable. * A wanted compiler version, e.g. `ghc-8.6.5` * If the `--compiler` CLI arg is set, or the `compiler` config value is set (and `--snapshot` CLI arg is not set), ignore the wanted compiler from the snapshot and use the specified wanted compiler * Parse `extra-deps` into a `Map PackageName PackageLocation`, containing both mutable and immutable package locations. Parse `packages` into a `Map PackageName ProjectPackage`. * Ensure there are no duplicates between these two sets of packages * Delete any packages from the snapshot packages that appear in `packages` or `extra-deps` * Perform a left biased union between the immutable `extra-deps` values and the snapshot packages. Ignore any settings in the snapshot packages that have been replaced. * Apply the `flags` and `ghc-options` by name to these packages overwriting any previous values coming from a snapshot. If any values are specified but no matching package is found, it's an error. If a flag is not defined in the corresponding package cabal file, it's an error. * We are now left with the following: * A wanted compiler version * A map from package name to immutable packages with package config (flags, GHC options, hidden) * A map from package name to mutable packages as dependencies with package config * A map from package name to mutable packages as project packages with package config ## Get actual compiler Use the wanted compiler and various other Stack config values (not all listed here) to find the actual compiler, potentially installing it in the process. ## Global package sources With the actual compiler discovered, list out the packages available in its database and create a map from package name to version/GhcPkgId. Remove any packages from this map which are present in one of the other three maps mentioned above. ## Resolve targets Take the CLI args for targets as raw text values and turn them into actual targets. * Do a basic parse of the values into one of the following: * Package name * Package identifier * Package name + component * Directory * An empty target list is equivalent to listing the package names of all project packages * For any directories specified, find all project packages in that directory or subdirectories therefore and convert to those package names * For all package identifiers, ensure that either the package name does not exist in any of the three parsed maps from the "wanted compiler" step above, or that the package is present as an immutable dependency from Hackage. If so, create an immutable dependency entry with default flags, GHC options, and hidden status, and add this package to the set of immutable package dependencies. * For all package names, ensure the package is in one of the four maps we have, and if so add to either the dependency or project package target set. * For all package name + component, ensure that the package is a project package, and add that package + component to the set of project targets. * Ensure that no target has been specified multiple times. (*FIXME* Mihai states: I think we will need an extra consistency step for internal libraries. Sometimes stack needs to use the mangled name (`z-package-internallibname-z..`), sometimes the `package:internallibname` one. But I think this will become obvious when doing the code changes.) We now have an update four package maps, a new set of dependency targets, and a new set of project package targets (potentially with specific components). ## Apply named CLI flags Named CLI flags are applied to specific packages by updating the config in one of the four maps. If a flag is specified and no package is found, it's an error. Note that flag settings are added _on top of_ previous settings in this case, and does not replace them. That is, if previously we have `singleton (FlagName "foo") True` and now add `singleton (FlagName "bar") True`, both `foo` and `bar` will now be true. If any flags are specified but no matching package is found, it's an error. If a flag is not defined in the corresponding package cabal file, it's an error. ## Apply CLI GHC options CLI GHC options are applied as general GHC options according to `apply-ghc-options` setting. ## Apply general flags from CLI `--flag *:flagname[:bool]` specified on the CLI are applied to any project package which uses that flag name. ## Apply general GHC options General options are divided into the following categories: * `$locals` is deprecated, it's now a synonym for `$project` * `$project` applies to all project packages, not to any dependencies * `$targets` applies to all project packages that are targets, not to any dependencies or non-target project packages. This is the default option for `apply-ghc-options` * `$everything` applies to all packages in the source map excluding global packages These options get applied to any corresponding packages in the source map. If some GHC options already exist for such a package then they get prepended otherwise they get used as is. ## Determine snapshot hash Use some deterministic binary serialization and SHA256 thereof to get a hash of the following information: * Actual compiler (GHC version, path, *FIXME* probably some other unique info from GHC, I've heard that `ghc --info` gives you something) * Global database map * Immutable dependency map Motivation: Any package built from the immutable dependency map and installed in this database will never need to be rebuilt. !!! bug "To do" Caveat: do we need to take profiling settings into account here? How about Haddock status? ## Determine actual target components * Dependencies: "default" components (all libraries and executables) * Project packages: * If specific components named: only those, plus any libraries present * If no specific components, include the following: * All libraries, always * All executables, always * All test suites, _if_ `--test` specified on command line * All benchmarks, _if_ `--bench` specified on command line ## Construct build plan * Applied to every target (project package or dependency) * Apply flags, platform, and actual GHC version to resolve dependencies in any package analyzed * Include all library dependencies for all enabled components * Include all build tool dependencies for all enabled components (using the fun backwards compat logic for `build-tools`) * Apply the logic recursively to come up with a full build plan * If a task depends exclusively on immutable packages, mark it as immutable. Otherwise, it's mutable. The former go into the snapshot database, the latter into the local database. We now have a set of tasks of packages/components to build, with full config information for each package, and dependencies that must be built first. !!! bug "To do" There's some logic to deal with cyclic dependencies between test suites and benchmarks, where a task can be broken up into individual components versus be kept as a single task. Need to document this better. Currently it's the "all in one" logic. ## Unregister local modified packages * For all mutable packages in the set of tasks, see if any files have changed since last successful build and, if so, unregister + delete their executables * For anything which depends on them directly or transitively, unregister + delete their executables ## Perform the tasks * Topological sort, find things which have no dependencies remaining * Check if already installed in the relevant database * Check package database * Check Stack specific "is installed" flags, necessary for non-library packages * For project packages, need to also check which components were built, if tests were run, if we need to rerun tests, etc * If all good: do nothing * Otherwise, for immutable tasks: check the precompiled cache for an identical package installation (same GHC, dependencies, etc). If present: copy that over, and we're done. * Otherwise, perform the build, register, write to the Stack specific "is installed" stuff, and (for immutable tasks) register to the precompiled cache "Perform the build" consists of: * Do a cabal configure, if needed * Build the desired components * For all test suites built, unless "no rerun tests" logic is on and we already ran the test, _or_ "no run tests" is on, run the test * For all benchmarks built, unless "no run benchmarks" is on, run the benchmark stack-2.15.7/doc/ChangeLog.md0000644000000000000000000051346114620154151014030 0ustar0000000000000000# Changelog ## v2.15.7 - 2024-05-12 Release notes: * This release fixes potential bugs. * The hash that Stack uses to distinguish one build plan from another has changed for plans that set (as opposed to unset) manually Cabal flags for immutable dependencies. This will cause Stack to rebuild dependencies for such plans. **Changes since v2.15.5:** Major changes: * Stack 2.15.5 and earlier cannot build with Cabal (the library) version `3.12.0.0`. Stack can now build with that Cabal version. Behavior changes: * Stack's `StackSetupShim` executable, when called with `repl` and `stack-initial-build-steps`, no longer uses Cabal's `replHook` to apply `initialBuildSteps` but takes a more direct approach. Bug fixes: * Fix a regression introduced in Stack 2.15.1 that caused a 'no operation' `stack build` to be slower than previously. * The hashes that Stack uses to distinguish one build plan from another now include the Cabal flags for immutable dependencies set manually. Previously, in error, only such flags that were unset manually were included. ## v2.15.5 - 2024-03-28 Release notes: * This release fixes potential bugs. **Changes since v2.15.3:** Behavior changes: * Following the handover of the Stackage project to the Haskell Foundation, the default value of the `urls` key is `latest-snapshot: https://stackage-haddock.haskell.org/snapshots.json`. * Stack no longer includes the snapshot package database when compiling the setup executable for a package with `build-type: Configure`. ## v2.15.3 - 2024-03-07 Release notes: * With one exception, this release fixes bugs. **Changes since v2.15.1:** Behavior changes: * `stack path --global-config`, `--programs`, and `--local-bin` no longer set up Stack's environment. Bug fixes: * Due to a bug, Stack 2.15.1 did not support versions of GHC before 8.2. Stack now supports GHC versions from 8.0. * `--haddock-for-hackage` does not ignore `--haddock-arguments`. * On Windows, package locations that are Git repositories with submodules now work as intended. * The `ghc`, `runghc` and `runhaskell` commands accept `--package` values that are a list of package names or package identifiers separated by spaces and, in the case of package identifiers, in the same way as if they were specified as targets to `stack build`. ## v2.15.1 - 2024-02-09 Release notes: * After an upgrade from an earlier version of Stack, on first use only, Stack 2.15.1 may warn that it had trouble loading the CompilerPaths cache. * The hash used as a key for Stack's pre-compiled package cache has changed, following the dropping of support for Cabal versions older than `1.24.0.0`. **Changes since v2.13.1:** Behavior changes: * Stack does not leave `*.hi` or `*.o` files in the `setup-exe-src` directory of the Stack root, and deletes any corresponding to a `setup-.hs` or `setup-shim-.hs` file, to avoid GHC issue [#21250](https://gitlab.haskell.org/ghc/ghc/-/issues/21250). * If Stack's Nix integration is not enabled, Stack will notify the user if a `nix` executable is on the PATH. This usually indicates the Nix package manager is available. In YAML configuration files, the `notify-if-nix-on-path` key is introduced, to allow the notification to be muted if unwanted. * Drop support for Intero (end of life in November 2019). * `stack path --stack-root` no longer sets up Stack's environment and does not load Stack's configuration. * Stack no longer locks on configuration, so packages (remote and local) can be configured in parallel. This increases the effective concurrency of builds that before would use fewer threads. Reconsider your `--jobs` setting accordingly. See [#84](https://github.com/commercialhaskell/stack/issues/84). * Stack warns that its support for Cabal versions before `2.2.0.0` is deprecated and may be removed in the next version of Stack. Removal would mean that projects using snapshots earlier than `lts-12.0` or `nightly-2018-03-18` (GHC 8.4.1) might no longer build. See [#6377](https://github.com/commercialhaskell/stack/issues/6377). * If Stack's `--resolver` option is not specified, Stack's `unpack` command with a package name will seek to update the package index before seeking to download the most recent version of the package in the index. * If the version of Cabal (the library) provided with the specified GHC can copy specific components, Stack will copy only the components built and will not build all executable components at least once. Other enhancements: * Consider GHC 9.8 to be a tested compiler and remove warnings. * Stack can build packages with dependencies on public sub-libraries of other packages. * Add flag `--no-init` to Stack's `new` command to skip the initialisation of the newly-created project for use with Stack. * The HTML file paths produced at the end of `stack haddock` are printed on separate lines and without a trailing dot. * Add option of the form `--doctest-option=` to `stack build`, where `doctest` is a program recognised by versions of the Cabal library from `1.24.0.0`. * Experimental: Add flag `--haddock-for-hackage` to Stack's `build` command (including the `haddock` synonym for `build --haddock`) to enable building project packages with flags to generate Haddock documentation, and an archive file, suitable for upload to Hackage. The form of the Haddock documentation generated for other packages is unaffected. * Experimental: Add flag `--documentation` (`-d` for short) to Stack's `upload` command to allow uploading of documentation for packages to Hackage. * `stack new` no longer rejects project templates that specify a `package.yaml` in a subdirectory of the project directory. * Stack will notify the user if Stack has not been tested with the version of GHC that is being user or a version of Cabal (the library) that has been found. In YAML configuration files, the `notify-if-ghc-untested` and `notify-if-cabal-untested` keys are introduced, to allow the notification to be muted if unwanted. * The compiler version is included in Stack's build message (e.g. `stack> build (lib + exe + test) with ghc-9.6.4`). * Add flag `--candidate` to Stack's `unpack` command, to allow package candidates to be unpacked locally. * Stack will notify the user if a specified architecture value is unknown to Cabal (the library). In YAML configuration files, the `notify-if-arch-unknown` key is introduced, to allow the notification to be muted if unwanted. * Add option `--filter ` to Stack's `ls dependencies text` command to filter out an item from the results, if present. The item can be `$locals` for all project packages. * Add option `--snapshot` as synonym for `--resolver`. * Add the `config set snapshot` command, corresponding to the `config set resolver` command. Bug fixes: * Fix the `Curator` instance of `ToJSON`, as regards `expect-haddock-failure`. * Better error message if a `resolver:` or `snapshot:` value is, in error, a YAML number. * Stack accepts all package names that are, in fact, acceptable to Cabal. * Stack's `sdist` command can check packages with names that include non-ASCII characters. ## v2.13.1 - 2023-09-29 Release notes: * Further to the release notes for Stack 2.3.1, the `-static` suffix has been removed from the statically linked Linux/x86_64 binaries. * The binaries for Linux/Aarch64 are now statically linked. * Binaries are now provided for macOS/AArch64. **Changes since v2.11.1:** Behavior changes: * Build artefacts are placed in `.stack-work/dist//` (hashed to a shorter path on Windows), rather than `.stack-work/dist//`. This allows build artifacts to be distinguished by GHC version. * By default, the `stack build` progress bar is capped to a length equal to the terminal width. * When building GHC from source, Stack no longer uses Hadrian's deprecated `--configure`\\`-c` flag and, instead, seeks to run GHC's Python `boot` and sh `configure` scripts, and ensure that the `happy` and `alex` executables are on the PATH. * When auto-detecting `--ghc-build` on Linux, the `musl` GHC build only is considered a possible GHC build if `libc.musl-x86_64.so.1` is found in `\lib` or `\lib64`. * No longer supports Cabal versions older than `1.24.0.0`. This means projects using snapshots earlier than `lts-7.0` or `nightly-2016-05-26` (GHC 8.0.1) will no longer build. GHC 8.0.1 comes with Haddock 2.17.2. * When unregistering many packages in a single step, Stack can now do that efficiently. Stack no longer uses GHC-supplied `ghc-pkg unregister` (which is, currently, slower). * `stack hpc report`, `stack list`, `stack templates` and `stack uninstall` output their information to the standard output stream rather than to the standard error stream. Logging is still to the standard error stream. * `stack upgrade` no longer assumes that binary upgrade is not supported on a AArch64 machine architecture. Other enhancements: * Consider GHC 9.6 to be a tested compiler and remove warnings. * Consider Cabal 3.10 to be a tested library and remove warnings. * Bump to Hpack 0.36.0. * Depend on `pantry-0.9.2`, for support for long filenames and directory names in archives created by `git archive`. * Avoid the duplicate resolving of usage files when parsing `*.hi` files into a set of modules and a collection of resolved usage files. See [#6123](https://github.com/commercialhaskell/stack/pull/6123). * Add composable component type flags `--exes`, `--tests` and `--benchmarks` to Stack's `ide targets` command, to list only those components. * `stack --verbose` excludes lengthy information about build plan construction in the debug output by default. The new `stack --[no-]plan-in-log` flag enables or disables the inclusion of the information in the debug output. * In YAML configuration files, the `casa` key is introduced, which takes precedence over the existing `casa-repo-prefix` key. The latter is deprecated. The new key also allows Stack's use of a Casa (content-addressable storage archive) server to be disabled and the maximum number of keys per request to be configured. The default Casa prefix references https://casa.stackage.org, instead of https://casa.fpcomplete.com. * Add option `--progress-bar=` to Stack's `build` command to configure the format of the progress bar, where `` is one of `none`, `count-only` (only the package count), `capped` (capped to a length equal to the terminal width) and `full` (the previous format). Bug fixes: * Restore `stack sdist --pvp-bounds lower` (broken with Stack 2.9.1). * Restore building of Stack with Cabal flag `disable-git-info` (broken with Stack 2.11.1). * With `stack hoogle`, avoid the message `Minimum version is hoogle-5.0. Found acceptable hoogle- in your index, requiring its installation.` when a `hoogle` executable has already been found on the `PATH`. * Stack's sanity check on a selected GHC now passes GHC flag `-hide-all-packages`, stopping GHC from looking for a package environment in default locations. * Restore Stack script files without extensions (broken with Stack 2.11.1). * Restore message suffix `due to warnings` with `dump-logs: warning` (broken with Stack 2.11.1). * On Windows, the `local-programs-path` directory can now be on a different drive to the system temporary directory and MSYS2 will still be installed. ## v2.11.1 - 2023-05-18 **Changes since v2.9.3:** Behavior changes: * Add flag `--[no-]-only-local-bin` to Stack's `upgrade` command for a binary upgrade. If the Stack executable is `my-stack`, the default is `my-stack upgrade --only-local-bin` where previously it was, effectively, `my-stack upgrade --no-only-local-bin`. If the Stack executable is `stack`, the default is `stack upgrade --no-only-local-bin`, the same behaviour as previously. * Use `$XDG_CACHE_HOME/stack/ghci-script`, rather than `/haskell-stack-ghci` (where `` is the directory yielded by the `temporary` package's `System.IO.Temp.getCanonicalTemporaryDirectory`), as the base location for GHCi script files generated by `stack ghci` or `stack repl`. See [#5203](https://github.com/commercialhaskell/stack/issues/5203) * Drop support for `Cabal` versions before 1.22 and, consequently, GHC versions before 7.10. * `stack ghci` and `stack repl` now take into account the values of `default-language` keys in Cabal files, like they take into account the values of `default-extensions` keys. * Removed `--ghc-paths`, `--global-stack-root` and `--local-bin-path` flags for `stack path`, deprecated in Stack 1.1.0 in favour of `--programs`, `--stack-root` and `local-bin` respectively. * On Windows, `stack upgrade` always renames the file of the running Stack executable (adding extension `.old`) before attempting to write to the original file name. * On Windows, `stack upgrade` does not offer `sudo` command alternatives if attempting to write to the original file name of the running Stack exectuable results in a 'Permission' error. Other enhancements: * Add options of the form `--PROG-option=` to `stack build`, where `PROG` is a program recognised by the Cabal library and one of `alex`, `ar`, `c2hs`, `cpphs`, `gcc`, `greencard`, `happy`, `hsc2hs`, `hscolour`, `ld`, `pkg-config`, `strip` and `tar`. If Cabal uses the program during the configuration step, the argument is passed to it. * By default all `--PROG-option` options are applied to all project packages. This behaviour can be changed with new configuration option `apply-prog-options`. * Add flag `--[no-]use-root` to `stack script` (default disabled). Used with `--compile` or `--optimize`, when enabled all compilation outputs (including the executable) are written to a script-specific location in the `scripts` directory of the Stack root rather than the script's directory, avoiding clutter of the latter directory. * Better error message if the value of the `STACK_WORK` environment variable or `--work-dir` option is not a valid relative path. * Stack will use the value of the `GH_TOKEN`, or `GITHUB_TOKEN`, environment variable as credentials to authenticate its GitHub REST API requests. * `stack uninstall` also shows how to uninstall Stack-supplied tools. Bug fixes: * Fix incorrect warning if `allow-newer-deps` are specified but `allow-newer` is `false`. See [#6068](https://github.com/commercialhaskell/stack/issues/6086). * `stack build` with `--file-watch` or `--file-watch-poll` outputs 'pretty' error messages, as intended. See [#5978](https://github.com/commercialhaskell/stack/issues/5978). * `stack build` unregisters any project packages for the sub libraries of a project package that is to be unregistered. See [#6046](https://github.com/commercialhaskell/stack/issues/6046). * The warning that sublibrary dependency is not supported is no longer triggered by internal libraries. ## v2.9.3.1 - 2023-06-22 Hackage-only release of the `stack` package: * Supports building against snapshot Stackage LTS Haskell 21.0 (GHC 9.4.5), without extra-deps. * Supports build with `persistent-2.14.5.0`, using CPP directives. * Supports build with `unix-compat-0.7`, by removing reliance on the module `System.PosixCompat.User` removed in that package. * Includes `cabal.project` and `cabal.config` files in the package. ## v2.9.3 - 2022-12-16 **Changes since v2.9.1:** Behavior changes: * In YAML configuration files, the `package-index` key is introduced which takes precedence over the existing `package-indices` key. The latter is deprecated. * In YAML configuration files, the `hackage-security` key of the `package-index` key or the `package-indices` item can be omitted, and the Hackage Security configuration for the item will default to that for the official Hackage server. See [#5870](https://github.com/commercialhaskell/stack/issues/5870). * Add the `stack config set package-index download-prefix` command to set the location of Stack's package index in YAML configuration files. * `stack setup` with the `--no-install-ghc` flag warns that the flag and the command are inconsistent and now takes no action. Previously the flag was silently ignored. * To support the Haskell Foundation's [Haskell Error Index](https://errors.haskell.org/) initiative, all Stack error messages generated by Stack itself begin with an unique code in the form `[S-nnnn]`, where `nnnn` is a four-digit number. * Test suite executables that seek input on the standard input stream (`stdin`) will not throw an exception. Previously, they would thow an exception, consistent with Cabal's 'exitcode-stdio-1.0' test suite interface specification. Pass the flag `--no-tests-allow-stdin` to `stack build` to enforce Cabal's specification. See [#5897](https://github.com/commercialhaskell/stack/issues/5897) Other enhancements: * Help documentation for `stack upgrade` warns that if GHCup is used to install Stack, only GHCup should be used to upgrade Stack. That is because GHCup uses an executable named `stack` to manage versions of Stack, that Stack will likely overwrite on upgrade. * Add `stack ls dependencies cabal` command, which lists dependencies in the format of exact Cabal constraints. * Add `STACK_XDG` environment variable to use the XDG Base Directory Specification for the Stack root and Stack's global YAML configuration file, if the Stack root location is not set on the command line or by using the `STACK_ROOT` environment variable. * Add `stack path --global-config`, to yield the full path of Stack's user-specific global YAML configuration file (`config.yaml`). * Experimental: Add option `allow-newer-deps`, which allows users to specify a subset of dependencies for which version bounds should be ignored (`allow-newer-deps: ['foo', 'bar']`). This field has no effect unless `allow-newer` is enabled. Bug fixes: * Fix ambiguous module name `Distribution.PackageDescription`, if compiling `StackSetupShim` with `Cabal-syntax-3.8.1.0` in package database. See [#5886](https://github.com/commercialhaskell/stack/pull/5886). * In YAML configuration files, if the `package-indices` key (or the `hackage-security` key of its item) is omitted, the expiration of timestamps is now ignored, as intended. See Pantry [#63](https://github.com/commercialhaskell/pantry/pull/63) ## v2.9.1 - 2022-09-19 **Changes since v2.7.5:** Release notes: * After an upgrade from an earlier version of Stack, on first use only, Stack 2.9.1 may warn that it had trouble loading the CompilerPaths cache. * The support from the Stack team for binary releases now includes Linux/AArch64 and is limited to: * Linux 64-bit/x86_64 (statically linked) * Linux AArch64 (dynamically linked) * macOS x86_64 * Windows 64-bit/x86_64 Behavior changes: * `stack build --coverage` will generate a unified coverage report, even if there is only one `*.tix` file, in case a package has tested the library of another package that has not tested its own library. See [#5713](https://github.com/commercialhaskell/stack/issues/5713) * `stack --verbose` no longer includes the lengthy raw snapshot layer (rsl) in the debug output by default. The new `stack --[no-]rsl-in-log` flag enables or disables the inclusion of the rsl in the debug output. Other enhancements: * Consider GHC 9.2 and 9.4 to be tested compilers and remove warnings. * Consider Cabal 3.6 and 3.8 to be a tested libraries and remove warnings. * Bump to Hpack 0.35.0. * On Windows, the installer now sets `DisplayVersion` in the registry, enabling tools like `winget` to properly read the version number. * Adds flag `--script-no-run-compile` (disabled by default) that uses the `--no-run` option with `stack script` (and forces the `--compile` option). This enables a command like `stack --script-no-run-compile Script.hs` to behave like `stack script --no-run --compile -- Script.hs` but without having to list all the `` in the Stack interpreter options comment in `Script.hs` on the command line. That may help test that scripts compile in CI (continuous integration). See [#5755](https://github.com/commercialhaskell/stack/issues/5755) * Fuller help is provided at the command line if a subcommand is missing (for example, `stack ls` now yields the equivalent of `stack ls --help`). See [#809](https://github.com/commercialhaskell/stack/issues/809) * Add build option `--cabal-verbosity=VERBOSITY` to specify the Cabal verbosity level (the option accepts Cabal's numerical and extended syntax). See [#1369](https://github.com/commercialhaskell/stack/issues/809) * Add the possibility of a `sh` script to customise fully GHC installation. See [#5585](https://github.com/commercialhaskell/stack/pull/5585) * `tools` subcommand added to `stack ls`, to list stack's installed tools. * `stack uninstall` shows how to uninstall Stack. * `--ghc-variant` accepts `int-native` as a variant. Bug fixes: * Fix `stack clean --full`, so that the files to be deleted are not in use. See [#5714](https://github.com/commercialhaskell/stack/issues/5714) * Fix an inconsistency in the pretty formatting of the output of `stack build --coverage` * Fix repeated warning about missing parameters when using `stack new` * Include `pantry-0.5.6`: Remove operational and mirror keys from bootstrap key set [#53](https://github.com/commercialhaskell/pantry/pull/53) * Pass any CPP options specified via `cpp-options:` in the Cabal file to GHCi using GHC's `-optP` flag. See [#5608](https://github.com/commercialhaskell/stack/pull/5608) * On Unix-like operating systems, respect the `with-gcc` option when installing GHC. See [#5609](https://github.com/commercialhaskell/stack/pull/5609) * Fixed logic in `get_isa()` in `get-stack.sh` to exclude systems that don't have x86 in their `uname -m` output. See [5792](https://github.com/commercialhaskell/stack/issues/5792). * Fixed output of `stack ls snapshots local` on Windows, to behave like that on Unix-like operating systems. * Fix non-deterministic test failures when executing a test suite for a multi-project repository with parallelism enabled. See [#5024](https://github.com/commercialhaskell/stack/issues/5024) ## v2.7.5 - 2022-03-06 **Changes since v2.7.3:** Behavior changes: * Cloning git repositories isn't per sub-directory anymore, see [#5411](https://github.com/commercialhaskell/stack/issues/5411) Other enhancements: * `stack setup` supports installing GHC for macOS aarch64 (M1) * `stack upload` supports authentication with a Hackage API key (via `HACKAGE_KEY` environment variable). Bug fixes: * Ensure that `extra-path` works for case-insensitive `PATH`s on Windows. See [rio#237](https://github.com/commercialhaskell/rio/pull/237) * Fix handling of overwritten `ghc` and `ghc-pkg` locations. [#5597](https://github.com/commercialhaskell/stack/pull/5597) * Fix failure to find package when a dependency is shared between projects. [#5680](https://github.com/commercialhaskell/stack/issues/5680) * `stack ghci` now uses package flags in `stack.yaml` [#5434](https://github.com/commercialhaskell/stack/issues/5434) ## v2.7.3 - 2021-07-20 **Changes since v2.7.1:** Other enhancements: * `stack upgrade` will download from `haskellstack.org` before trying `github.com`. See [#5288](https://github.com/commercialhaskell/stack/issues/5288) * `stack upgrade` makes less assumptions about archive format. See [#5288](https://github.com/commercialhaskell/stack/issues/5288) * Add a `--no-run` flag to the `script` command when compiling. Bug fixes: * GHC source builds work properly for recent GHC versions again. See [#5528](https://github.com/commercialhaskell/stack/issues/5528) * `stack setup` always looks for the unpacked directory name to support different tar file naming conventions. See [#5545](https://github.com/commercialhaskell/stack/issues/5545) * Bump `pantry` version for better OS support. See [pantry#33](https://github.com/commercialhaskell/pantry/issues/33) * When building the sanity check for a new GHC install, make sure to clear `GHC_PACKAGE_PATH`. * Specifying GHC RTS flags in the `stack.yaml` no longer fails with an error. [#5568](https://github.com/commercialhaskell/stack/pull/5568) * `stack setup` will look in sandboxed directories for executables, not relying on `findExecutables. See [GHC issue 20074](https://gitlab.haskell.org/ghc/ghc/-/issues/20074) * Track changes to `setup-config` properly to avoid reconfiguring on every change. See [#5578](https://github.com/commercialhaskell/stack/issues/5578) ## v2.7.1 - 2021-05-07 **Changes since v2.5.1.1:** Behavior changes: * `stack repl` now always warns about GHCi problems with loading multiple packages. It also sets now proper working directory when invoked with one package. See [#5421](https://github.com/commercialhaskell/stack/issues/5421) * `custom-setup` dependencies are now properly initialized for `stack dist`. This makes `explicit-setup-deps` no longer required and that option was removed. See [#4006](https://github.com/commercialhaskell/stack/issues/4006) Other enhancements: * Consider GHC 9.0 to be a tested compiler and remove warnings. * Consider Cabal 3.6 to be a tested library and remove warnings. * Nix integration now passes `ghcVersion` (in addition to existing `ghc`) to `shell-file` as an identifier that can be looked up in a compiler attribute set. * Nix integration now allows Nix integration if the user is ready in nix-shell. This gets rid of "In Nix shell but reExecL is False" error. * `stack list` is a new command to list package versions in a snapshot. See [#5431](https://github.com/commercialhaskell/stack/pull/5431) * `custom-preprocessor-extensions` is a new configuration option for allowing Stack to be aware of any custom preprocessors you have added to `Setup.hs`. See [#3491](https://github.com/commercialhaskell/stack/issues/3491) * Added `--candidate` flag to `upload` command to upload a package candidate rather than publishing the package. * Error output using `--no-interleaved-output` no longer prepends indenting whitespace. This allows emacs compilation-mode and vim quickfix to locate and track errors. See [#5523](https://github.com/commercialhaskell/stack/pull/5523) Bug fixes: * `stack new` now supports branches other than `master` as default for GitHub repositories. See [#5422](https://github.com/commercialhaskell/stack/issues/5422) * Ignore all errors from `hi-file-parser`. See [#5445](https://github.com/commercialhaskell/stack/issues/5445) and [#5486](https://github.com/commercialhaskell/stack/issues/5486). * Support basic auth in package-indices. See [#5509](https://github.com/commercialhaskell/stack/issues/5509). * Add support for parsing `.hi`. files from GHC 8.10 and 9.0. See [hi-file-parser#2](https://github.com/commercialhaskell/hi-file-parser/pull/2). ## v2.5.1.1 - 2020-12-09 Hackage-only release: * Support build with persistent-2.11.x and optparse-applicative-0.16.x ## v2.5.1 - 2020-10-15 **Changes since v2.3.3** Major changes: * Add the `snapshot-location-base` yaml configuration option, which allows to override the default location of snapshot configuration files. This option affects how snapshot synonyms (LTS/Nightly) are expanded to URLs by the `pantry` library. * `docker-network` configuration key added to override docker `--net` arg Behavior changes: * File watching now takes into account specified targets, old behavior could be restored using the new flag `--watch-all` [#5310](https://github.com/commercialhaskell/stack/issues/5310) Other enhancements: * `stack ls dependencies json` now includes fields `sha256` and `size` for dependencies of `type` `archive` in `location`. [#5280](https://github.com/commercialhaskell/stack/issues/5280) * Build failures now show a hint to scroll up to the corresponding section [#5279](https://github.com/commercialhaskell/stack/issues/5279) * Customisable output styles (see `stack --help` and the `--stack-colors` option, and `stack ls stack-colors --help`) now include `info`, `debug`, `other-level`, `secondary` and `highlight`, used with verbose output. Bug fixes: * Fix `stack test --coverage` when using Cabal 3 * `stack new` now generates PascalCase'd module name correctly. [#5376](https://github.com/commercialhaskell/stack/issues/5376) * Connection issues to Casa server no longer cause builds to failure. Casa acts only as an optimizing cache layer, not a critical piece of infrastructure. * Fix modified time busting caches by always calculating sha256 digest during the build process. [#5125](https://github.com/commercialhaskell/stack/issues/5125) ## v2.3.3 - 2020-08-06 **Changes since v2.3.1** Other enhancements: * Add the `stack-developer-mode` flag Bug fixes: * When using the `STACK_YAML` env var with Docker, make the path absolute. * Fix the problem of `stack repl foo:test:bar` failing without a project build before that. See [#5213](https://github.com/commercialhaskell/stack/issues/5213) * Fix `stack sdist` introducing unnecessary sublibrary syntax when using pvp-bounds. See [#5289](https://github.com/commercialhaskell/stack/issues/5289) ## v2.3.1 - 2020-04-29 Release notes: * We have reduced the number of platforms that we support with binary releases. The reason behind this is that we've been slowed down in our release process until now with issues trying to build binaries for less common platforms. In order to make sure we can address issues more quickly (like supporting new GHC versions), we're limiting support from the Stack team to: * Linux 64-bit/x86_64 (statically linked) * macOS x86_64 * Windows 64-bit/x86_64 If others want to provide additional binaries, we will definitely be happy for the support. But since our CI system is currently able to produce these three bindists only, that's what we will be providing with the next release. * Since we no longer have dynamically linked Linux binaries, we are removing the `-static` suffix from the static Linux/x86_64 binaries. If you have scripts to download the latest stable Linux/x86_64 binary, update them to use `linux-x86_64` instead of `linux-x86_64-static` (if you are already using the former, nothing needs to change). For this release, both are supported, but the next release will no longer have the `-static` variant. * We are also deprecating the download links at https://stackage.org/stack. See this page for the current installation instructions: https://docs.haskellstack.org/en/stable/install_and_upgrade/. * These are the canonical locations to download the latest stable binaries from, and will continue to be supported going forward: * Linux 64-bit/x86_64 (static): https://get.haskellstack.org/stable/linux-x86_64.tar.gz * macOS x86_64: https://get.haskellstack.org/stable/osx-x86_64.tar.gz * Windows 64-bit/x86_64: https://get.haskellstack.org/stable/windows-x86_64.zip As always, binaries for specific versions are available from the GitHub releases: https://github.com/commercialhaskell/stack/releases. **Changes since v2.1.3.1** Major changes: * `setup-info-locations` yaml configuration now allows overwriting the default locations of `stack-setup-2.yaml`. [#5031](https://github.com/commercialhaskell/stack/pull/5031) [#2983](https://github.com/commercialhaskell/stack/issues/2983) [#2913](https://github.com/commercialhaskell/stack/issues/2913) * The `setup-info` configuration key now allows overwriting parts of the default `setup-info` * The `--setup-info-yaml` command line flag now may be used in all Stack commands such as `stack build`, and not only in `stack setup` * The `--setup-info-yaml` may specify multiple locations for `stack-setup.yaml` files. * The `stack upload` can read first reads environment Variable `$HACKAGE_USERNAME` and `$HACKAGE_PASSWORD` if they are missing only then asks for `username` or `password` * Fully remove GHCJS support. * Remove the `freeze` command. It has been replaced by lock files. Behavior changes: * Remove the deprecated `--stack-setup-yaml` command line argument in favor of `--setup-info-yaml`. See [#2647](https://github.com/commercialhaskell/stack/issues/2647) * We now recommend checking in generated Cabal files for repos. When generating lock files for extra-deps that only include `package.yaml` files, a deprecation warning will be generated. Also, those packages will no longer be included in the generated lock files. See [#5210](https://github.com/commercialhaskell/stack/issues/5210). Other enhancements: * Add `build-output-timestamps` flag in yaml. Setting it to true prefixes each build log output line with a timestamp. * Show warning about `local-programs-path` with spaces on windows when running scripts. See [#5013](https://github.com/commercialhaskell/stack/pull/5013) * Add `ls dependencies json` which will print dependencies as JSON. `ls dependencies --tree` is now `ls dependencies tree`. See [#4424](https://github.com/commercialhaskell/stack/pull/4424) * Remove warning for using Stack with GHC 8.8-8.10, and Cabal 3.0-3.2. * Allow relative paths in `--setup-info-yaml` and tool paths [#3394](https://github.com/commercialhaskell/stack/issues/3394) * Added the `--only-locals` flag. See [#5272](https://github.com/commercialhaskell/stack/issues/5272) Bug fixes: * Upgrade `pantry`: module mapping insertions into the database are now atomic. Previously, if you SIGTERMed at the wrong time while running a script, you could end up with an inconsistent database state. * `--resolver global` doesn't retrieve snapshots list from the internet because doesn't need it. See [#5103](https://github.com/commercialhaskell/stack/issues/5103) * Fix using relative links in haddocks output. See [#4971](https://github.com/commercialhaskell/stack/issues/4971). * Do not include generated Cabal file information in lock files. See [#5045](https://github.com/commercialhaskell/stack/issues/5045). * Use proper Hoogle executable path when installed automatically. See [#4905](https://github.com/commercialhaskell/stack/issues/4905) * Fix GHC version for batched package unregistration. See [#4951](https://github.com/commercialhaskell/stack/issues/4951) * Use Hoogle from the snapshot used and not the latest version. See [#4905](https://github.com/commercialhaskell/stack/issues/4905) * Resolve "'stty' is not recognized". See [#4901](https://github.com/commercialhaskell/stack/issues/4901) * Fix missing reconfigure check causing errors when a package gets used in multiple projects. See [#5147](https://github.com/commercialhaskell/stack/issues/5147) ## v2.1.3.1 - 2019-07-16 Hackage-only release: * Support persistent-template-2.7.x * Support rio-0.1.11.0 * Add `stack.yaml` back to hackage sdist, and add `snapshot.yaml` ## v2.1.3 - 2019-07-13 **Changes since v2.1.1** Behavior changes: * Disable WAL mode for SQLite3 databases, to improve compatibility with some platforms and filesystems. See [#4876](https://github.com/commercialhaskell/stack/issues/4876). * By default, do not perform expiry checks in Hackage Security. See [#4928](https://github.com/commercialhaskell/stack/issues/4928). Other enhancements: * Do not rerun expected test failures. This is mostly a change that will only affect the Stackage Curator use case, but there is now an additional message letting the user know when a previously-failed test case is being rerun. * Move configure information for project packages back to .stack-work to improve caching. See [#4893](https://github.com/commercialhaskell/stack/issues/4893). Bug fixes: * Fix to allow dependencies on specific versions of local git repositories. See [#4862](https://github.com/commercialhaskell/stack/pull/4862) * Allow Stack commands to be run in Nix mode without having a project file available. See [#4854](https://github.com/commercialhaskell/stack/issues/4864). * Removes dependency on gnu-tar for OSX and Linux environment. The `--force-local` option was required only for windows environment. * Properly wait for the `tar` subprocess to complete before returning, thereby avoiding a SIGTERM screwing up GHC installation. See [#4888](https://github.com/commercialhaskell/stack/issues/4888). * Use package complete locations from lock files when resolving dependencies in `extra-deps`. See [#4887](https://github.com/commercialhaskell/stack/issues/4887). * Set the `HASKELL_DIST_DIR` environment to a proper package dist directory so `doctest` is able to load modules autogenerated by Cabal. * Expose package library when running tests. * Fix support for non-ASCII module names. See [#4938](https://github.com/commercialhaskell/stack/issues/4938) Other changes: * Rename `pantry-tmp` package back to `pantry`, now that we have gained maintainership (which had been used by someone else for a candidate-only test that made it look like the name was free but prevented uploading a real package). ## v2.1.1.1 - 2019-06-14 Hackage-only release that removes `stack.yaml` from the sdist. This is because `stack.yaml` now defines a multi-package project, whereas Hackage works on the basis on individual packages (see [#4860](https://github.com/commercialhaskell/stack/issues/4860)) If building a `stack` executable for distribution, please download the source code from https://github.com/commercialhaskell/stack/releases/tag/v2.1.1 and build it using Stack itself in order to ensure identical behaviour to official binaries. This package on Hackage is provided for convenience and bootstrapping purposes. ## v2.1.1 - 2019-06-13 The Stack 2 release represents a series of significant changes to how Stack works internally. For the vast majority of cases, these changes are backwards compatible, in that existing projects will continue to build in the same way with Stack 2 as they did with Stack 1. The large version bump is due to the fundamental internal changes to cache handling, database storage (using SQLite in place of binary files), implicit snapshots (which greatly improve the precompiled cache), and moving to Pantry. We have also removed some less used features, as listed below. **Changes since v1.9.3** Major changes: * Switch over to pantry for managing packages. This is a major change to Stack's internals, and affects user-visible behavior in a few places. Some highlights: * Drop support for multiple package indices and legacy `00-index.tar` style indices. See [#4137](https://github.com/commercialhaskell/stack/issues/4137). * Support for archives and repos in the `packages` section has been removed. Instead, you must use `extra-deps` for such dependencies. `packages` now only supports local filepaths. * Add support for Git repositories containing (recursive) submodules. * Addition of new configuration options for specifying a "pantry tree" key, which provides more reproducibility around builds, and (in the future) will be used for more efficient package content downloads. You can also specify package name and version for more efficient config parsing. * __NOTE__ The new `stack freeze` command provides support for automatically generating this additional information. * Package contents and metadata are stored in an SQLite database in place of files on the filesystem. The `pantry` library can be used for interacting with these contents. * Internally, Stack has changed many datatypes, including moving to Cabal's definition of many data types. As a result of such changes, existing cache files will in general be invalidated, resulting in Stack needing to rebuild many previously cached builds in the new version. Sorry :(. * A new command, `stack freeze` has been added which outputs project and snapshot definitions with dependencies pinned to their exact versions. * The `ignore-revision-mismatch` setting is no longer needed, and has been removed. * Overriding GHC boot packages results in any other GHC boot packages depending on it being no longer available as a dependency, such packages need to be added explicitly when needed. See [#4510] (https://github.com/commercialhaskell/stack/issues/4510). * Cabal solver integration was not updated to support newer `cabal-install` versions so `stack solver` command was removed as well as a related option `--solver` from `stack new` and `stack init`. * Upgrade to Cabal 2.4 * Note that, in this process, the behavior of file globbing has been modified to match that of Cabal. In particular, this means that for Cabal spec versions less than 2.4, `*.txt` will match `foo.txt`, but not `foo.2.txt`. * Remove the `stack image` command. With the advent of Docker multistage builds, this functionality is no longer useful. For an example, please see [Building Haskell Apps with Docker](https://www.fpcomplete.com/blog/2017/12/building-haskell-apps-with-docker). * Experimental: Support building GHC from source * Stack now supports building and installing GHC from source. The built GHC is uniquely identified by a commit id and an Hadrian "flavour" (Hadrian is the newer GHC build system), hence `compiler` can be set to use a GHC built from source with `ghc-git-COMMIT-FLAVOUR` * `stack.yaml` now supports a `configure-options`, which are passed directly to the `configure` step in the Cabal build process. See [#1438](https://github.com/commercialhaskell/stack/issues/1438) * Remove support for building GHCJS itself. Future releases of Stack may remove GHCJS support entirely. * Support for lock files for pinning exact project dependency versions Behavior changes: * `stack.yaml` now supports `snapshot`: a synonym for `resolver`. See [#4256](https://github.com/commercialhaskell/stack/issues/4256) * `stack script` now passes `-i -idir` in to the `ghc` invocation. This makes it so that the script can import local modules, and fixes an issue where `.hs` files in the current directory could affect interpretation of the script. See [#4538](https://github.com/commercialhaskell/stack/pull/4538) * When using `stack script`, custom snapshot files will be resolved relative to the directory containing the script. * Remove the deprecated `--upgrade-cabal` flag to `stack setup`. * Support the `drop-packages` field in `stack.yaml` * Remove the GPG signing code during uploads. The GPG signatures have never been used yet, and there are no plans to implement signature verification. * Remove the `--plain` option for the `exec` family of commands * Always use the `--exact-configuration` Cabal configuration option when building (should mostly be a non-user-visible enhancement). * No longer supports Cabal versions older than `1.19.2`. This means projects using snapshots earlier than `lts-3.0` or `nightly-2015-05-05` will no longer build. * Remove the `stack docker cleanup` command. Docker itself now has [`docker image prune`](https://docs.docker.com/engine/reference/commandline/image_prune/) and [`docker container prune`](https://docs.docker.com/engine/reference/commandline/container_prune/), which you can use instead. * Interleaved output is now turned on by default, see [#4702](https://github.com/commercialhaskell/stack/issues/4702). In addition, the `packagename> ` prefix is no longer included in interleaved mode when only building a single target. * The `-fhide-source-paths` GHC option is now enabled by default and can be disabled via the `hide-source-paths` configuration option in `stack.yaml`. See [#3784](https://github.com/commercialhaskell/stack/issues/3784) * Stack will reconfigure a package if you modify your `PATH` environment variable. See [#3138](https://github.com/commercialhaskell/stack/issues/3138). * For GHC 8.4 and later, disable the "shadowed dependencies" workaround. This means that Stack will no longer have to force reconfigures as often. See [#3554](https://github.com/commercialhaskell/stack/issues/3554). * When building a package, Stack takes a lock on the dist directory in use to avoid multiple runs of Stack from trampling each others' files. See [#2730](https://github.com/commercialhaskell/stack/issues/2730). * Stack will check occasionally if there is a new version available and prompt the user to upgrade. This will not incur any additional network traffic, as it will piggy-back on the existing Hackage index updates. You can set `recommend-stack-upgrade: false` to bypass this. See [#1681](https://github.com/commercialhaskell/stack/issues/1681). * `stack list-dependencies` has been removed in favour of `stack ls dependencies`. * The new default for `--docker-auto-pull` is enabled. See [#3332](https://github.com/commercialhaskell/stack/issues/3332). Other enhancements: * Support MX Linux in get-stack.sh. Fixes [#4769](https://github.com/commercialhaskell/stack/issues/4769). * Defer loading up of files for project packages. This allows us to get plan construction errors much faster, and avoid some unnecessary work when only building a subset of packages. This is especially useful for the curator use case. * Existing global option `--color=WHEN` is now also available as a non-project-specific yaml configuration parameter `color:`. * Adopt the standard proposed at http://no-color.org/, that color should not be added by default if the `NO_COLOR` environment variable is present. * New command `stack ls stack-colors` lists the styles and the associated 'ANSI' control character sequences that Stack uses to color some of its output. See `stack ls stack-colors --help` for more information. * New global option `--stack-colors=STYLES`, also available as a non-project-specific yaml configuration parameter, allows a Stack user to redefine the default styles that Stack uses to color some of its output. See `stack --help` for more information. * British English spelling of 'color' (colour) accepted as an alias for `--color`, `--stack-colors`, `stack ls stack-colors` at the command line and for `color:` and `stack-colors:` in yaml configuration files. * New build option `--ddump-dir`. (See [#4225](https://github.com/commercialhaskell/stack/issues/4225)) * Stack parses and respects the `preferred-versions` information from Hackage for choosing latest version of a package in some cases, e.g. `stack unpack packagename`. * The components output in the `The main module to load is ambiguous` message now include package names so they can be more easily copy-pasted. * Git repos are shared across multiple projects. See [#3551](https://github.com/commercialhaskell/stack/issues/3551) * Use en_US.UTF-8 locale by default in pure Nix mode so programs won't crash because of Unicode in their output [#4095](https://github.com/commercialhaskell/stack/issues/4095) * Add `--tree` to `ls dependencies` to list dependencies as tree. [#4101](https://github.com/commercialhaskell/stack/issues/4101) * Add `--pedantic` to `ghci` to run with `-Wall` and `-Werror` [#4463](https://github.com/commercialhaskell/stack/issues/4463) * Add `--cabal-files` flag to `stack ide targets` command. * Add `--stdout` flag to all `stack ide` subcommands. * Use batches when unregistering packages with `ghc-pkg`. (See [#2662](https://github.com/commercialhaskell/stack/issues/2662)) * `get-stack` script now works on Windows CI machines of Appveyor, Travis and Azure Pipelines. See [#4535](https://github.com/commercialhaskell/stack/issues/4535)/ * Show snapshot being used when `stack ghci` is invoked outside of a project directory. See [#3651](https://github.com/commercialhaskell/stack/issues/3651) * The script interpreter now accepts a `--extra-dep` flag for adding packages not present in the snapshot. Currently, this only works with packages from Hackage, not Git repos or archives. * When using the script interpreter with `--optimize` or `--compile`, Stack will perform an optimization of checking whether a newer executable exists, making reruns significantly faster. There's a downside to this, however: if you have a multifile script, and change one of the dependency modules, Stack will not automatically detect and recompile. * `stack clean` will delete the entire `.stack-work/dist` directory, not just the relevant subdirectory for the current GHC version. See [#4480](https://github.com/commercialhaskell/stack/issues/4480). * Add `stack purge` as a shortcut for `stack clean --full`. See [#3863](https://github.com/commercialhaskell/stack/issues/3863). * Both `stack dot` and `stack ls dependencies` accept a `--global-hints` flag to bypass the need for an installed GHC. See [#4390](https://github.com/commercialhaskell/stack/issues/4390). * Add the `stack config env` command for getting shell script environment variables. See [#620](https://github.com/commercialhaskell/stack/issues/620). * Less verbose output from `stack setup` on Windows. See [#1212](https://github.com/commercialhaskell/stack/issues/1212). * Add an optional `ignore-expiry` flag to the `hackage-security` section of the `~/.stack/config.yaml`. It allows to disable timestamp expiration verification just like `cabal --ignore-expiry` does. The flag is not enabled by default so that the default functionality is not changed. * Include default values for most command line flags in the `--help` output. See [#893](https://github.com/commercialhaskell/stack/issues/893). * Set the `GHC_ENVIRONMENT` environment variable to specify dependency packages explicitly when running test. This is done to prevent ambiguous module name errors in `doctest` tests. * `get-stack` script now works on Windows CI machines of Appveyor, Travis and Azure Pipelines. See [#4535](https://github.com/commercialhaskell/stack/issues/4535) * Warn when a Docker image does not include a `PATH` environment variable. See [#2472](https://github.com/commercialhaskell/stack/issues/2742) * When using `system-ghc: true`, Stack will now find the appropriate GHC installation based on the version suffix, allowing you to more easily switch between various system-installed GHCs. See [#2433](https://github.com/commercialhaskell/stack/issues/2433). * `stack init` will now support create a `stack.yaml` file without any local packages. See [#2465](https://github.com/commercialhaskell/stack/issues/2465) * Store caches in SQLite database instead of files. * No longer use "global" Docker image database (`docker.db`). * User config files are respected for the script command. See [#3705](https://github.com/commercialhaskell/stack/issues/3705), [#3887](https://github.com/commercialhaskell/stack/issues/3887). * Set the `GHC_ENVIRONMENT` environment variable to `-` to tell GHC to ignore any such files when GHC is new enough (>= 8.4.4), otherwise simply unset the variable. This allows Stack to have control of package databases when running commands like `stack exec ghci`, even in the presence of implicit environment files created by `cabal new-build`. See [#4706](https://github.com/commercialhaskell/stack/issues/4706). * Use a database cache table to speed up discovery of installed GHCs * You can specify multiple `--test-arguments` options. See [#2226](https://github.com/commercialhaskell/stack/issues/2226) * Windows terminal width detection is now done. See [#3588](https://github.com/commercialhaskell/stack/issues/3588) * On Windows, informs users if the 'programs' path contains a space character and further warns users if that path does not have an alternative short ('8 dot 3') name, referencing the `local-programs-path` configuration option. See [#4726](https://github.com/commercialhaskell/stack/issues/4726) * Add `--docker-mount-mode` option to set the Docker volume mount mode for performance tuning on macOS. Bug fixes: * Ignore duplicate files for a single module when a Haskell module was generated from a preprocessor file. See [#4076](https://github.com/commercialhaskell/stack/issues/4076). * Only track down components in current directory if there are no hs-source-dirs found. This eliminates a number of false-positive warnings, similar to [#4076](https://github.com/commercialhaskell/stack/issues/4076). * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See [yesodweb/yesod#1551](https://github.com/yesodweb/yesod/issues/1551) * Fix `subdirs` for git repos in `extra-deps` to match whole directory names. Also fixes for `subdirs: .`. See [#4292](https://github.com/commercialhaskell/stack/issues/4292) * Fix for git packages to update submodules to the correct state. See [#4314](https://github.com/commercialhaskell/stack/pull/4314) * Add `--cabal-files` flag to `stack ide targets` command. * Don't download ghc when using `stack clean`. * Support loading in GHCi definitions from symlinked C files. Without this patch, Stack will try to find object files in the directory pointed to by symlinks, while GCC will produce the object files in the original directory. See [#4402](https://github.com/commercialhaskell/stack/pull/4402) * Fix handling of GitHub and URL templates on Windows. See [#4394](https://github.com/commercialhaskell/stack/issues/4394) * Fix `--file-watch` not responding to file modifications when running inside docker on Mac. See [#4506](https://github.com/commercialhaskell/stack/issues/4506) * Using `--ghc-options` with `stack script --compile` now works. * Ensure the detailed-0.9 type tests work. See [#4453](https://github.com/commercialhaskell/stack/issues/4453). * Extra include and lib dirs are now order-dependent. See [#4527](https://github.com/commercialhaskell/stack/issues/4527). * Apply GHC options when building a `Setup.hs` file. See [#4526](https://github.com/commercialhaskell/stack/issues/4526). * Stack handles ABI changes in FreeBSD 12 by differentiating that version from previous. * Help text for the `templates` subcommand now reflects behaviour in Stack 1.9 — that it downloads and shows a help file, rather than listing available templates. * Fix detection of aarch64 platform (this broke when we upgraded to a newer Cabal version). * Docker: fix detecting and pulling missing images with `--docker-auto-pull`. See [#4598](https://github.com/commercialhaskell/stack/issues/4598) * Hackage credentials are not world-readable. See [#2159](https://github.com/commercialhaskell/stack/issues/2159). * Warnings are dumped from logs even when color is enabled. See [#2997](https://github.com/commercialhaskell/stack/issues/2997) * `stack init` will now work for Cabal files with sublibraries. See [#4408](https://github.com/commercialhaskell/stack/issues/4408) * When the Cabal spec version is newer than the global Cabal version, build against the snapshot's Cabal library. See [#4488](https://github.com/commercialhaskell/stack/issues/4488) * Docker: fix detection of expected subprocess failures. This fixes downloading a compatible `stack` executable when the host `stack` is not compatible with the Docker image (on Linux), and doesn't show an unnecessary extra error when the in-container re-exec'ed `stack` exits with failure. * The `stack ghci` command's `--ghc-options` flag now parses multiple options. See [#3315](https://github.com/commercialhaskell/stack/issues/3315). ## v1.9.3.1 - 2019-04-18 Hackage-only release with no user facing changes (added compatibility with `rio-0.1.9.2`). ## v1.9.3 - 2018-12-02 Bug fixes: * Stack can now be compiled again inside a directory that does not contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See [yesodweb/yesod#1551](https://github.com/yesodweb/yesod/issues/1551) * Allow variables to appear in template file names. ## v1.9.1.1 - 2018-11-14 Hackage-only release with no user facing changes. * Stack can now be compiled again inside a directory that does not contain a `.git` directory. See [#4364](https://github.com/commercialhaskell/stack/issues/4364#issuecomment-431600841) ## v1.9.1 - 2018-10-17 Release notes: * Statically linked Linux bindists are back again, thanks to [@nh2](https://github.com/nh2). * We will be deleting the Ubuntu, Debian, CentOS, Fedora, and Arch package repos from `download.fpcomplete.com` soon. These have been deprecated for over a year and have not received new releases, but were left in place for compatibility with older scripts. Major changes: * Upgrade to Cabal 2.4 * Note that, in this process, the behavior of file globbing has been modified to match that of Cabal. In particular, this means that for Cabal spec versions less than 2.4, `*.txt` will match `foo.txt`, but not `foo.2.txt`. * `GHCJS` support is being downgraded to 'experimental'. A warning notifying the user of the experimental status of `GHCJS` will be displayed. Behavior changes: * `ghc-options` from `stack.yaml` are now appended to `ghc-options` from `config.yaml`, whereas before they would be replaced. * `stack build` will now announce when sublibraries of a package are being build, in the same way executables, tests, benchmarks and libraries are announced * `stack sdist` will now announce the destination of the generated tarball, regardless of whether or not it passed the sanity checks * The `--upgrade-cabal` option to `stack setup` has been deprecated. This feature no longer works with GHC 8.2 and later. Furthermore, the reason for this flag originally being implemented was drastically lessened once Stack started using the snapshot's `Cabal` library for custom setups. See: [#4070](https://github.com/commercialhaskell/stack/issues/4070). * With the new namespaced template feature, `stack templates` is no longer able to meaningfully display a list of all templates available. Instead, the command will download and display a [help file](https://github.com/commercialhaskell/stack-templates/blob/master/STACK_HELP.md) with more information on how to discover templates. See: [#4039](https://github.com/commercialhaskell/stack/issues/4039) * Build tools are now handled in a similar way to `cabal-install`. In particular, for legacy `build-tools` fields, we use a hard-coded list of build tools in place of looking up build tool packages in a tool map. This both brings Stack's behavior closer into line with `cabal-install`, avoids some bugs, and opens up some possible optimizations/laziness. See: [#4125](https://github.com/commercialhaskell/stack/issues/4125). * Mustache templating is not applied to large files (over 50kb) to avoid performance degradation. See: [#4133](https://github.com/commercialhaskell/stack/issues/4133). * `stack upload` signs the package by default, as documented. `--no-signature` turns the signing off. [#3739](https://github.com/commercialhaskell/stack/issues/3739) * In case there is a network connectivity issue while trying to download a template, Stack will check whether that template had been downloaded before. In that case, the cached version will be used. See [#3850](https://github.com/commercialhaskell/stack/issues/3850). * In Stack's script interpreter, `-- stack --verbosity=info script` and `-- stack script --verbosity=info` now have the same effect and both override the `--verbosity=error` default in the interpreter. Previously the default meant the former was equivalent to `-- stack --verbosity=info script --verbosity=error` and the latter was equivalent to `-- stack --verbosity=error script --verbosity=info`, with the subcommand's global option having precedence over the Stack command's global option in each case. See [#5326](https://github.com/commercialhaskell/stack/issues/5326). Other enhancements: * On Windows before Windows 10, --color=never is the default on terminals that can support ANSI color codes in output only by emulation * On Windows, recognise a 'mintty' (false) terminal as a terminal, by default * `stack build` issues a warning when `base` is explicitly listed in `extra-deps` of `stack.yaml` * `stack build` suggests trying another GHC version should the build plan end up requiring unattainable `base` version. * A new sub command `run` has been introduced to build and run a specified executable similar to `cabal run`. If no executable is provided as the first argument, it defaults to the first available executable in the project. * `stack build` missing dependency suggestions (on failure to construct a valid build plan because of missing deps) are now printed with their latest Cabal file revision hash. See [#4068](https://github.com/commercialhaskell/stack/pull/4068). * Added new `--tar-dir` option to `stack sdist`, that allows to copy the resulting tarball to the specified directory. * Introduced the `--interleaved-output` command line option and `build.interleaved-output` config value which causes multiple concurrent builds to dump to stderr at the same time with a `packagename> ` prefix. See [#3225](https://github.com/commercialhaskell/stack/issues/3225). * The default retry strategy has changed to exponential backoff. This should help with [#3510](https://github.com/commercialhaskell/stack/issues/3510). * `stack new` now allows template names of the form `username/foo` to download from a user other than `commercialstack` on GitHub, and can be prefixed with the service `github:`, `gitlab:`, or `bitbucket:`. See [#4039](https://github.com/commercialhaskell/stack/issues/4039) * Switch to `githash` to include some unmerged bugfixes in `gitrev` Suggestion to add `'allow-newer': true` now shows path to user config file where this flag should be put into [#3685](https://github.com/commercialhaskell/stack/issues/3685) * `stack ghci` now asks which main target to load before doing the build, rather than after * Bump to Hpack 0.29.0 * With GHC 8.4 and later, Haddock is given the `--quickjump` flag. * It is possible to specify the Hackage base URL to upload packages to, instead of the default of `https://hackage.haskell.org/`, by using `hackage-base-url` configuration option. * When using Nix, if a specific minor version of GHC is not requested, the latest minor version in the given major branch will be used automatically. Bug fixes: * `stack ghci` now does not invalidate `.o` files on repeated runs, meaning any modules compiled with `-fobject-code` will be cached between ghci runs. See [#4038](https://github.com/commercialhaskell/stack/pull/4038). * `~/.stack/config.yaml` and `stack.yaml` terminating by newline * The previous released caused a regression where some `stderr` from the `ghc-pkg` command showed up in the terminal. This output is now silenced. * A regression in recompilation checking introduced in v1.7.1 has been fixed. See [#4001](https://github.com/commercialhaskell/stack/issues/4001) * `stack ghci` on a package with internal libraries was erroneously looking for a wrong package corresponding to the internal library and failing to load any module. This has been fixed now and changes to the code in the library and the sublibrary are properly tracked. See [#3926](https://github.com/commercialhaskell/stack/issues/3926). * For packages with internal libraries not depended upon, `stack build` used to fail the build process since the internal library was not built but it was tried to be registered. This is now fixed by always building internal libraries. See [#3996](https://github.com/commercialhaskell/stack/issues/3996). * `--no-nix` was not respected under NixOS * Fix a regression which might use a lot of RAM. See [#4027](https://github.com/commercialhaskell/stack/issues/4027). * Order of commandline arguments does not matter anymore. See [#3959](https://github.com/commercialhaskell/stack/issues/3959) * When prompting users about saving their Hackage credentials on upload, flush to stdout before waiting for the response so the prompt actually displays. Also fixes a similar issue with ghci target selection prompt. * If `cabal` is not on PATH, running `stack solver` now prompts the user to run `stack install cabal-install` * `stack build` now succeeds in building packages which contain sublibraries which are dependencies of executables, tests or benchmarks but not of the main library. See [#3787](https://github.com/commercialhaskell/stack/issues/3787). * Sublibraries are now properly considered for coverage reports when the test suite depends on the internal library. Before, Stack was erroring when trying to generate the coverage report, see [#4105](https://github.com/commercialhaskell/stack/issues/4105). * Sublibraries are now added to the precompiled cache and recovered from there when the snapshot gets updated. Previously, updating the snapshot when there was a package with a sublibrary in the snapshot resulted in broken builds. This is now fixed, see [#4071](https://github.com/commercialhaskell/stack/issues/4071). * [#4114](https://github.com/commercialhaskell/stack/issues/4114) Stack pretty prints error messages with proper `error` logging level instead of `warning` now. This also fixes self-executing scripts not piping plan construction errors from runhaskell to terminal (issue [#3942](https://github.com/commercialhaskell/stack/issues/3942)). * Fix invalid "While building Setup.hs" when Cabal calls fail. See: [#3934](https://github.com/commercialhaskell/stack/issues/3934) * `stack upload` signs the package by default, as documented. `--no-signature` turns the signing off. [#3739](https://github.com/commercialhaskell/stack/issues/3739) ## v1.7.1 - 2018-04-27 Release notes: * aarch64 (64-bit ARM) bindists are now available for the first time. * Statically linked Linux bindists are no longer available, due to difficulty with GHC 8.2.2 on Alpine Linux. * 32-bit Linux GMP4 bindists for CentOS 6 are no longer available, since GHC 8.2.2 is no longer being built for that platform. Major changes: * Upgrade from Cabal 2.0 to Cabal 2.2 Behavior changes: * `stack setup` no longer uses different GHC configure options on Linux distributions that use GCC with PIE enabled by default. GHC detects this itself since ghc-8.0.2, and Stack's attempted workaround for older versions caused more problems than it solved. * `stack new` no longer initializes a project if the project template contains a `stack.yaml` file. Other enhancements: * A new sub command `ls` has been introduced to Stack to view local and remote snapshots present in the system. Use `stack ls snapshots --help` to get more details about it. * `list-dependencies` has been deprecated. The functionality has to accessed through the new `ls dependencies` interface. See [#3669](https://github.com/commercialhaskell/stack/issues/3669) for details. * Specify User-Agent HTTP request header on every HTTP request. See [#3628](https://github.com/commercialhaskell/stack/issues/3628) for details. * `stack setup` looks for GHC bindists and installations by any OS key that is compatible (rather than only checking a single one). This is relevant on Linux where different distributions may have different combinations of libtinfo 5/6, ncurses 5/6, and gmp 4/5, and will allow simplifying the setup-info metadata YAML for future GHC releases. * The build progress bar reports names of packages currently building. * `stack setup --verbose` causes verbose output of GHC configure process. See [#3716](https://github.com/commercialhaskell/stack/issues/3716) * Improve the error message when an `extra-dep` from a path or git reference can't be found. See [#3808](https://github.com/commercialhaskell/stack/pull/3808) * Nix integration is now disabled on windows even if explicitly enabled, since it isn't supported. See [#3600](https://github.com/commercialhaskell/stack/issues/3600) * `stack build` now supports a new flag `--keep-tmp-files` to retain intermediate files and directories for the purpose of debugging. It is best used with ghc's equivalent flag, i.e. `stack build --keep-tmp-files --ghc-options=-keep-tmp-files`. See [#3857](https://github.com/commercialhaskell/stack/issues/3857) * Improved error messages for snapshot parse exceptions * `stack unpack` now supports a `--to /target/directory` option to specify where to unpack the package into * `stack hoogle` now supports a new flag `--server` that launches local Hoogle server on port 8080. See [#2310](https://github.com/commercialhaskell/stack/issues/2310) Bug fixes: * The script interpreter's implicit file arguments are now passed before other arguments. See [#3658](https://github.com/commercialhaskell/stack/issues/3658). In particular, this makes it possible to pass `-- +RTS ... -RTS` to specify RTS arguments used when running the script. * Don't ignore the template `year` parameter in config files, and clarify the surrounding documentation. See [#2275](https://github.com/commercialhaskell/stack/issues/2275). * Benchmarks used to be run concurrently with other benchmarks and build steps. This is non-ideal because CPU usage of other processes may interfere with benchmarks. It also prevented benchmark output from being displayed by default. This is now fixed. See [#3663](https://github.com/commercialhaskell/stack/issues/3663). * `stack ghci` now allows loading multiple packages with the same module name, as long as they have the same filepath. See [#3776](https://github.com/commercialhaskell/stack/pull/3776). * `stack ghci` no longer always adds a dependency on `base`. It is now only added when there are no local targets. This allows it to be to load code that uses replacements for `base`. See [#3589](https://github.com/commercialhaskell/stack/issues/3589#issuecomment) * `stack ghci` now uses correct paths for autogen files with [#3791](https://github.com/commercialhaskell/stack/issues/3791) * When a package contained sublibraries, Stack was always recompiling the package. This has been fixed now, no recompilation is being done because of sublibraries. See [#3899](https://github.com/commercialhaskell/stack/issues/3899). * The `get-stack.sh` install script now matches manual instructions when it comes to Debian/Fedora/CentOS install dependencies. * Compile Cabal-simple with gmp when using Nix. See [#2944](https://github.com/commercialhaskell/stack/issues/2944) * `stack ghci` now replaces the Stack process with ghci. This improves signal handling behavior. In particular, handling of Ctrl-C. To make this possible, the generated files are now left behind after exit. The paths are based on hashing file contents, and it's stored in the system temporary directory, so this shouldn't result in too much garbage. See [#3821](https://github.com/commercialhaskell/stack/issues/3821). ## v1.6.5 - 2018-02-19 Bug fixes: * Some unnecessary rebuilds when no files were changed are now avoided, by having a separate build cache for each component of a package. See [#3732](https://github.com/commercialhaskell/stack/issues/3732). * Correct the behavior of promoting a package from snapshot to local package. This would get triggered when version bounds conflicted in a snapshot, which could be triggered via Hackage revisions for old packages. This also should allow custom snapshots to define conflicting versions of packages without issue. See [Stackage issue #3185](https://github.com/fpco/stackage/issues/3185). * When promoting packages from snapshot to local, we were occasionally discarding the actual package location content and instead defaulting to pulling the package from the index. We now correctly retain this information. Note that if you were affected by this bug, you will likely need to delete the binary build cache associated with the relevant custom snapshot. See [#3714](https://github.com/commercialhaskell/stack/issues/3714). * `--no-rerun-tests` has been fixed. Previously, after running a test we were forgetting to record the result, which meant that all tests always ran even if they had already passed before. See [#3770](https://github.com/commercialhaskell/stack/pull/3770). * Includes a patched version of `hackage-security` which fixes both some issues around asynchronous exception handling, and moves from directory locking to file locking, making the update mechanism resilient against SIGKILL and machine failure. See `hackage-security` issue [#187](https://github.com/haskell/hackage-security/issues/187) and Stack issue [#3073](https://github.com/commercialhaskell/stack/issues/3073). ## v1.6.3.1 - 2018-02-16 Hackage-only release with no user facing changes (updated to build with newer version of Hpack dependency). ## v1.6.3 - 2017-12-23 Enhancements: * In addition to supporting `.tar.gz` and `.zip` files as remote archives, plain `.tar` files are now accepted too. This will additionally help with cases where HTTP servers mistakenly set the transfer encoding to `gzip`. See [#3647](https://github.com/commercialhaskell/stack/issues/3647). * Links to docs.haskellstack.org ignore Stack version patchlevel. * Downloading Docker-compatible `stack` binary ignores Stack version patchlevel. Bug fixes: * For versions of Cabal before 1.24, ensure that the dependencies of non-buildable components are part of the build plan to work around an old Cabal bug. See [#3631](https://github.com/commercialhaskell/stack/issues/3631). * Run the Cabal file checking in the `sdist` command more reliably by allowing the Cabal library to flatten the `GenericPackageDescription` itself. ## v1.6.1.1 - 2017-12-20 Hackage-only release with no user facing changes (updated to build with newer dependency versions). ## v1.6.1 - 2017-12-07 Major changes: * Complete overhaul of how snapshots are defined, the `packages` and `extra-deps` fields, and a number of related items. For full details, please see the [writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots) on these changes. [PR #3249](https://github.com/commercialhaskell/stack/pull/3249), see the PR description for a number of related issues. * Upgraded to version 2.0 of the Cabal library. Behavior changes: * The `--install-ghc` flag is now on by default. For example, if you run `stack build` in a directory requiring a GHC that you do not currently have, Stack will automatically download and install that GHC. You can explicitly set `install-ghc: false` or pass the flag `--no-install-ghc` to regain the previous behavior. * `stack ghci` no longer loads modules grouped by package. This is always an improvement for plain ghci - it makes loading faster and less noisy. For intero, this has the side-effect that it will no longer load multiple packages that depend on TH loading relative paths. TH relative paths will still work when loading a single package into intero. See [#3309](https://github.com/commercialhaskell/stack/issues/3309) * Setting GHC options for a package via `ghc-options:` in your `stack.yaml` will promote it to a local package, providing for more consistency with flags and better reproducibility. See: [#849](https://github.com/commercialhaskell/stack/issues/849) * The `package-indices` setting with Hackage no longer works with the `00-index.tar.gz` tarball, but must use the `01-index.tar.gz` file to allow revised packages to be found. * Options passed via `--ghci-options` are now passed to the end of the invocation of ghci, instead of the middle. This allows using `+RTS` without an accompanying `-RTS`. * When auto-detecting `--ghc-build`, `tinfo6` is now preferred over `standard` if both versions of libtinfo are installed * Addition of `stack build --copy-compiler-tool`, to allow tools like intero to be installed globally for a particular compiler. [#2643](https://github.com/commercialhaskell/stack/issues/2643) * Stack will ask before saving hackage credentials to file. This new prompt can be avoided by using the `save-hackage-creds` setting. Please see [#2159](https://github.com/commercialhaskell/stack/issues/2159). * The `GHCRTS` environment variable will no longer be passed through to every program Stack runs. Instead, it will only be passed through commands like `exec`, `runghc`, `script`, `ghci`, etc. See [#3444](https://github.com/commercialhaskell/stack/issues/3444). * `ghc-options:` for specific packages will now come after the options specified for all packages / particular sets of packages. See [#3573](https://github.com/commercialhaskell/stack/issues/3573). * The `pvp-bounds` feature is no longer fully functional, due to some issues with the Cabal library's printer. See [#3550](https://github.com/commercialhaskell/stack/issues/3550). Other enhancements: * The `with-hpack` configuration option specifies an Hpack executable to use instead of the Hpack bundled with Stack. Please see [#3179](https://github.com/commercialhaskell/stack/issues/3179). * It's now possible to skip tests and benchmarks using `--skip` flag * `GitSHA1` is now `StaticSHA256` and is implemented using the `StaticSize 64 ByteString` for improved performance. See [#3006](https://github.com/commercialhaskell/stack/issues/3006) * Dependencies via HTTP(S) archives have been generalized to allow local file path archives, as well as to support setting a cryptographic hash (SHA256) of the contents for better reproducibility. * Allow specifying `--git-branch` when upgrading * When running `stack upgrade` from a file which is different from the default executable path (e.g., on POSIX systems, `~/.local/bin/stack`), it will now additionally copy the new executable over the currently running `stack` executable. If permission is denied (such as in `/usr/local/bin/stack`), the user will be prompted to try again using `sudo`. This is intended to assist with the user experience when the `PATH` environment variable has not been properly configured, see [#3232](https://github.com/commercialhaskell/stack/issues/3232). * `stack setup` for ghcjs will now install `alex` and `happy` if they are not present. See [#3109](https://github.com/commercialhaskell/stack/issues/3232). * Added `stack ghci --only-main` flag, to skip loading / importing all but main modules. See the ghci documentation page for further info. * Allow GHC's colored output to show through. GHC colors output starting with version 8.2.1, for older GHC this does nothing. Sometimes GHC's heuristics would work fine even before this change, for example in `stack ghci`, but this override's GHC's heuristics when they're broken by our collecting and processing GHC's output. * Extended the `ghc-options` field to support `$locals`, `$targets`, and `$everything`. See: [#3329](https://github.com/commercialhaskell/stack/issues/3329) * Better error message for case that `stack ghci` file targets are combined with invalid package targets. See: [#3342](https://github.com/commercialhaskell/stack/issues/3342) * For profiling now uses `-fprof-auto -fprof-cafs` instead of the deprecated `-auto-all -caf-all`. See: [#3360](https://github.com/commercialhaskell/stack/issues/3360) * Better descriptions are now available for `stack upgrade --help`. See: [#3070](https://github.com/commercialhaskell/stack/issues/3070) * When using Nix, nix-shell now depends always on gcc to prevent build errors when using the FFI. As ghc depends on gcc anyway, this doesn't increase the dependency footprint. * `--cwd DIR` can now be passed to `stack exec` in order to execute the program in a different directory. See: [#3264](https://github.com/commercialhaskell/stack/issues/3264) * Plan construction will detect if you add an executable-only package as a library dependency, resulting in much clearer error messages. See: [#2195](https://github.com/commercialhaskell/stack/issues/2195). * Addition of `--ghc-options` to `stack script` to pass options directly to GHC. See: [#3454](https://github.com/commercialhaskell/stack/issues/3454) * Add Hpack `package.yaml` to build Stack itself * Add `ignore-revision-mismatch` setting. See: [#3520](https://github.com/commercialhaskell/stack/issues/3520). * Log when each individual test suite finishes. See: [#3552](https://github.com/commercialhaskell/stack/issues/3552). * Avoid spurious rebuilds when using `--file-watch` by not watching files for executable, test and benchmark components that aren't a target. See: [#3483](https://github.com/commercialhaskell/stack/issues/3483). * Stack will now try to detect the width of the running terminal (only on POSIX for the moment) and use that to better display output messages. Work is ongoing, so some messages will not be optimal yet. The terminal width can be overridden with the new `--terminal-width` command-line option (this works even on non-POSIX). * Passing non local packages as targets to `stack ghci` will now cause them to be used as `-package` args along with package hiding. * Detect when user changed Cabal file instead of `package.yaml`. This was implemented upstream in Hpack. See [#3383](https://github.com/commercialhaskell/stack/issues/3383). * Automatically run `autoreconf -i` as necessary when a `configure` script is missing. See [#3534](https://github.com/commercialhaskell/stack/issues/3534) * GHC bindists can now be identified by their SHA256 checksum in addition to their SHA1 checksum, allowing for more security in download. * For filesystem setup-info paths, it's no longer assumed that the directory is writable, instead a temp dir is used. See [#3188](https://github.com/commercialhaskell/stack/issues/3188). Bug fixes: * `stack hoogle` correctly generates Hoogle databases. See: [#3362](https://github.com/commercialhaskell/stack/issues/3362) * `stack --docker-help` is now clearer about --docker implying system-ghc: true, rather than both --docker and --no-docker. * `stack haddock` now includes package names for all modules in the Haddock index page. See: [#2886](https://github.com/commercialhaskell/stack/issues/2886) * Fixed an issue where Stack wouldn't detect missing Docker images properly with newer Docker versions. [#3171](https://github.com/commercialhaskell/stack/pull/3171) * Previously, Cabal files with just test-suite could cause build to fail ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * If an invalid snapshot file has been detected (usually due to mismatched hashes), Stack will delete the downloaded file and recommend either retrying or filing an issue upstream. See [#3319](https://github.com/commercialhaskell/stack/issues/3319). * Modified the flag parser within Stack to match the behavior of Cabal's flag parser, which allows multiple sequential dashes. See [#3345](https://github.com/commercialhaskell/stack/issues/3345) * Now clears the hackage index cache if it is older than the downloaded index. Fixes potential issue if Stack was interrupted when updating index. See [#3033](https://github.com/commercialhaskell/stack/issues/3033) * The Stack install script now respects the `-d` option. See [#3366](https://github.com/commercialhaskell/stack/pull/3366). * `stack script` can now handle relative paths to source files. See [#3372](https://github.com/commercialhaskell/stack/issues/3372). * Fixes explanation of why a target is needed by the build plan, when the target is an extra-dep from the commandline. See [#3378](https://github.com/commercialhaskell/stack/issues/3378). * Previously, if you delete a yaml file from ~/.stack/build-plan, it would trust the etag and not re-download. Fixed in this version. * Invoking `stack --docker` in parallel now correctly locks the sqlite database. See [#3400](https://github.com/commercialhaskell/stack/issues/3400). * docs.haskellstack.org RTD documentation search is replaced by the mkdocs search. Please see [#3376](https://github.com/commercialhaskell/stack/issues/3376). * `stack clean` now works with nix. See [#3468](https://github.com/commercialhaskell/stack/issues/3376). * `stack build --only-dependencies` no longer builds local project packages that are depended on. See [#3476](https://github.com/commercialhaskell/stack/issues/3476). * Properly handle relative paths stored in the precompiled cache files. See [#3431](https://github.com/commercialhaskell/stack/issues/3431). * In some cases, Cabal does not realize that it needs to reconfigure, and must be told to do so automatically. This would manifest as a "shadowed dependency" error message. We now force a reconfigure whenever a dependency is built, even if the package ID remained the same. See [#2781](https://github.com/commercialhaskell/stack/issues/2781). * When `--pvp-bounds` is enabled for sdist or upload, internal dependencies could cause errors when uploaded to hackage. This is fixed, see [#3290](https://github.com/commercialhaskell/stack/issues/3290) * Fixes a bug where nonexistent hackage versions would cause Stack to suggest the same package name, without giving version info. See [#3562](https://github.com/commercialhaskell/stack/issues/3562) * Fixes a bug that has existed since 1.5.0, where `stack setup --upgrade-cabal` would say that Cabal is already the latest version, when it wasn't. * Ensure that an `extra-dep` from a local directory is not treated as a `$locals` for GHC options purposes. See [#3574](https://github.com/commercialhaskell/stack/issues/3574). * Building all executables only happens once instead of every time. See [#3229](https://github.com/commercialhaskell/stack/issues/3229) for more info. ## 1.5.1 - 2017-08-05 Bug fixes: * Stack eagerly tries to parse all Cabal files related to a snapshot. Starting with Stackage Nightly 2017-07-31, snapshots are using GHC 8.2.1, and the `ghc.cabal` file implicitly referenced uses the (not yet supported) Cabal 2.0 file format. Future releases of Stack will both be less eager about Cabal file parsing and support Cabal 2.0. This patch simply bypasses the error for invalid parsing. ## 1.5.0 - 2017-07-25 Behavior changes: * `stack profile` and `stack trace` now add their extra RTS arguments for benchmarks and tests to the beginning of the args, instead of the end. See [#2399](https://github.com/commercialhaskell/stack/issues/2399) * Support for Git-based indices has been removed. Other enhancements: * `stack setup` allow to control options passed to ghcjs-boot with `--ghcjs-boot-options` (one word at a time) and `--[no-]ghcjs-boot-clean` * `stack setup` now accepts a `--install-cabal VERSION` option which will install a specific version of the Cabal library globally. * Updates to store-0.4.1, which has improved performance and better error reporting for version tags. A side-effect of this is that all of stack's binary caches will be invalidated. * `stack solver` will now warn about unexpected `cabal-install` versions. See [#3044](https://github.com/commercialhaskell/stack/issues/3044) * Upstream packages unpacked to a temp dir are now deleted as soon as possible to avoid running out of space in `/tmp`. See [#3018](https://github.com/commercialhaskell/stack/issues/3018) * Add short synonyms for `test-arguments` and `benchmark-arguments` options. * Adds `STACK_WORK` environment variable, to specify work dir. See [#3063](https://github.com/commercialhaskell/stack/issues/3063) * Can now use relative paths for `extra-include-dirs` and `extra-lib-dirs`. See [#2830](https://github.com/commercialhaskell/stack/issues/2830) * Improved bash completion for many options, including `--ghc-options`, `--flag`, targets, and project executables for `exec`. * `--haddock-arguments` is actually used now when `haddock` is invoked during documentation generation. * `--[no-]haddock-hyperlink-source` flag added which allows toggling of sources being included in Haddock output. See [#3099](https://github.com/commercialhaskell/stack/issues/3099) * `stack ghci` will now skip building all local targets, even if they have downstream deps, as long as it's registered in the DB. * The `pvp-bounds` feature now supports adding `-revision` to the end of each value, e.g. `pvp-bounds: both-revision`. This means that, when uploading to Hackage, Stack will first upload your tarball with an unmodified Cabal file, and then upload a Cabal file revision with the PVP bounds added. This can be useful - especially combined with the [Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field) - as a method to ensure PVP compliance without having to proactively fix bounds issues for Stackage maintenance. * Expose a `save-hackage-creds` configuration option * On GHC <= 7.8, filters out spurious linker warnings on windows See [#3127](https://github.com/commercialhaskell/stack/pull/3127) * Better error messages when creating or building packages which alias wired-in packages. See [#3172](https://github.com/commercialhaskell/stack/issues/3172). * MinGW bin folder now is searched for dynamic libraries. See [#3126](https://github.com/commercialhaskell/stack/issues/3126) * When using Nix, nix-shell now depends always on git to prevent runtime errors while fetching metadata * Experimental: The `stack unpack` command now accepts a form where an explicit Hackage revision hash is specified, e.g. `stack unpack foo-1.2.3@gitsha1:deadbeef`. Note that Stack will likely move towards a different hash format in the future. * Binary "stack upgrade" will now warn if the installed executable is not on the PATH or shadowed by another entry. * Allow running tests on tarball created by sdist and upload [#717](https://github.com/commercialhaskell/stack/issues/717). Bug fixes: * Fixes case where `stack build --profile` might not cause executables / tests / benchmarks to be rebuilt. See [#2984](https://github.com/commercialhaskell/stack/issues/2984) * `stack ghci file.hs` now loads the file even if it isn't part of your project. * `stack clean --full` now works when docker is enabled. See [#2010](https://github.com/commercialhaskell/stack/issues/2010) * Fixes an issue where cyclic deps can cause benchmarks or tests to be run before they are built. See [#2153](https://github.com/commercialhaskell/stack/issues/2153) * Fixes `stack build --file-watch` in cases where a directory is removed See [#1838](https://github.com/commercialhaskell/stack/issues/1838) * Fixes `stack dot` and `stack list-dependencies` to use info from the package database for wired-in-packages (ghc, base, etc). See [#3084](https://github.com/commercialhaskell/stack/issues/3084) * Fixes `stack --docker build` when user is part of libvirt/libvirtd groups on Ubuntu Yakkety (16.10). See [#3092](https://github.com/commercialhaskell/stack/issues/3092) * Switching a package between extra-dep and local package now forces rebuild (previously it wouldn't if versions were the same). See [#2147](https://github.com/commercialhaskell/stack/issues/2147) * `stack upload` no longer reveals your password when you type it on MinTTY-based Windows shells, such as Cygwin and MSYS2. See [#3142](https://github.com/commercialhaskell/stack/issues/3142) * `stack script`'s import parser will now properly parse files that have Windows-style line endings (CRLF) ## 1.4.0 - 2017-03-15 Release notes: * Docker images: [fpco/stack-full](https://hub.docker.com/r/fpco/stack-full/) and [fpco/stack-run](https://hub.docker.com/r/fpco/stack-run/) are no longer being built for LTS 8.0 and above. [fpco/stack-build](https://hub.docker.com/r/fpco/stack-build/) images continue to be built with a [simplified process](https://github.com/commercialhaskell/stack/tree/master/etc/dockerfiles/stack-build). [#624](https://github.com/commercialhaskell/stack/issues/624) Major changes: * A new command, `script`, has been added, intended to make the script interpreter workflow more reliable, easier to use, and more efficient. This command forces the user to provide a `--resolver` value, ignores all config files for more reproducible results, and optimizes the existing package check to make the common case of all packages already being present much faster. This mode does require that all packages be present in a snapshot, however. [#2805](https://github.com/commercialhaskell/stack/issues/2805) Behavior changes: * The default package metadata backend has been changed from Git to the 01-index.tar.gz file, from the hackage-security project. This is intended to address some download speed issues from GitHub for people in certain geographic regions. There is now full support for checking out specific Cabal file revisions from downloaded tarballs as well. If you manually specify a package index with only a Git URL, Git will still be used. See [#2780](https://github.com/commercialhaskell/stack/issues/2780) * When you provide the `--resolver` argument to the `stack unpack` command, any packages passed in by name only will be looked up in the given snapshot instead of taking the latest version. For example, `stack --resolver lts-7.14 unpack mtl` will get version 2.2.1 of `mtl`, regardless of the latest version available in the package indices. This will also force the same Cabal file revision to be used as is specified in the snapshot. Unpacking via a package identifier (e.g. `stack --resolver lts-7.14 unpack mtl-2.2.1`) will ignore any settings in the snapshot and take the most recent revision. For backwards compatibility with tools relying on the presence of a `00-index.tar`, Stack will copy the `01-index.tar` file to `00-index.tar`. Note, however, that these files are different; most importantly, 00-index contains only the newest revisions of Cabal files, while 01-index contains all versions. You may still need to update your tooling. * Passing `--(no-)nix-*` options now no longer implies `--nix`, except for `--nix-pure`, so that the user preference whether or not to use Nix is honored even in the presence of options that change the Nix behavior. Other enhancements: * Internal cleanup: configuration types are now based much more on lenses * `stack build` and related commands now allow the user to disable debug symbol stripping with new `--no-strip`, `--no-library-stripping`, and `--no-executable-shipping` flags, closing [#877](https://github.com/commercialhaskell/stack/issues/877). Also turned error message for missing targets more readable ([#2384](https://github.com/commercialhaskell/stack/issues/2384)) * `stack haddock` now shows index.html paths when documentation is already up to date. Resolved [#781](https://github.com/commercialhaskell/stack/issues/781) * Respects the `custom-setup` field introduced in Cabal 1.24. This supercedes any `explicit-setup-deps` settings in your `stack.yaml` and trusts the package's Cabal file to explicitly state all its dependencies. * If system package installation fails, `get-stack.sh` will fail as well. Also shows warning suggesting to run `apt-get update` or similar, depending on the OS. ([#2898](https://github.com/commercialhaskell/stack/issues/2898)) * When `stack ghci` is run with a config with no packages (e.g. global project), it will now look for source files in the current work dir. ([#2878](https://github.com/commercialhaskell/stack/issues/2878)) * Bump to Hpack 0.17.0 to allow `custom-setup` and `!include "..."` in `package.yaml`. * The script interpreter will now output error logging. In particular, this means it will output info about plan construction errors. ([#2879](https://github.com/commercialhaskell/stack/issues/2879)) * `stack ghci` now takes `--flag` and `--ghc-options` again (inadvertently removed in 1.3.0). ([#2986](https://github.com/commercialhaskell/stack/issues/2986)) * `stack exec` now takes `--rts-options` which passes the given arguments inside of `+RTS ... args .. -RTS` to the executable. This works around Stack itself consuming the RTS flags on Windows. ([#2640](https://github.com/commercialhaskell/stack/issues/2640)) * Upgraded `http-client-tls` version, which now offers support for the `socks5://` and `socks5h://` values in the `http_proxy` and `https_proxy` environment variables. Bug fixes: * Bump to Hpack 0.16.0 to avoid character encoding issues when reading and writing on non-UTF8 systems. * `stack ghci` will no longer ignore hsSourceDirs that contain `..`. ([#2895](https://github.com/commercialhaskell/stack/issues/2895)) * `stack list-dependencies --license` now works for wired-in-packages, like base. ([#2871](https://github.com/commercialhaskell/stack/issues/2871)) * `stack setup` now correctly indicates when it uses system ghc ([#2963](https://github.com/commercialhaskell/stack/issues/2963)) * Fix to `stack config set`, in 1.3.2 it always applied to the global project. ([#2709](https://github.com/commercialhaskell/stack/issues/2709)) * Previously, Cabal files without exe or lib would fail on the "copy" step. ([#2862](https://github.com/commercialhaskell/stack/issues/2862)) * `stack upgrade --git` now works properly. Workaround for affected versions (>= 1.3.0) is to instead run `stack upgrade --git --source-only`. ([#2977](https://github.com/commercialhaskell/stack/issues/2977)) * Added support for GHC 8's slightly different warning format for dumping warnings from logs. * Work around a bug in Cabal/GHC in which package IDs are not unique for different source code, leading to Stack not always rebuilding packages depending on local packages which have changed. ([#2904](https://github.com/commercialhaskell/stack/issues/2904)) ## 1.3.2 - 2016-12-27 Bug fixes: * `stack config set` can now be used without a compiler installed [#2852](https://github.com/commercialhaskell/stack/issues/2852). * `get-stack.sh` now installs correct binary on ARM for generic linux and raspbian, closing [#2856](https://github.com/commercialhaskell/stack/issues/2856). * Correct the testing of whether a package database exists by checking for the `package.cache` file itself instead of the containing directory. * Revert a change in the previous release which made it impossible to set local extra-dep packages as targets. This was overkill; we really only wanted to disable their test suites, which was already handled by a later patch. [#2849](https://github.com/commercialhaskell/stack/issues/2849) * `stack new` always treats templates as being UTF-8 encoding, ignoring locale settings on a local machine. See [Yesod mailing list discussion](https://groups.google.com/d/msg/yesodweb/ZyWLsJOtY0c/aejf9E7rCAAJ) ## 1.3.0 - 2016-12-12 Release notes: * For the _next_ Stack release after this one, we are planning changes to our Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora package repositories and switching to statically linked binaries. See [#2534](https://github.com/commercialhaskell/stack/issues/2534). Note that upgrading without a package manager has gotten easier with new binary upgrade support in `stack upgrade` (see the Major Changes section below for more information). In addition, the get.haskellstack.org script no longer installs from Ubuntu, Debian, CentOS, or Fedora package repositories. Instead it places a generic binary in /usr/local/bin. Major changes: * Stack will now always use its own GHC installation, even when a suitable GHC installation is available on the PATH. To get the old behaviour, use the `--system-ghc` flag or run `stack config set system-ghc --global true`. Docker- and Nix-enabled projects continue to use the GHC installations in their environment by default. NB: Scripts that previously used Stack in combination with a system GHC installation should now include a `stack setup` line or use the `--install-ghc` flag. [#2221](https://github.com/commercialhaskell/stack/issues/2221) * `stack ghci` now defaults to skipping the build of target packages, because support has been added for invoking "initial build steps", which create autogen files and run preprocessors. The `--no-build` flag is now deprecated because it should no longer be necessary. See [#1364](https://github.com/commercialhaskell/stack/issues/1364) * Stack is now capable of doing binary upgrades instead of always recompiling a new version from source. Running `stack upgrade` will now default to downloading a binary version of Stack from the most recent release, if one is available. See `stack upgrade --help` for more options. [#1238](https://github.com/commercialhaskell/stack/issues/1238) Behavior changes: * Passing `--resolver X` with a Stack command which forces creation of a global project config, will pass resolver X into the initial config. See [#2579](https://github.com/commercialhaskell/stack/issues/2229). * Switch the "Run from outside project" messages to debug-level, to avoid spamming users in the normal case of non-project usage * If a remote package is specified (such as a Git repo) without an explicit `extra-dep` setting, a warning is given to the user to provide one explicitly. Other enhancements: * `stack haddock` now supports `--haddock-internal`. See [#2229](https://github.com/commercialhaskell/stack/issues/2229) * Add support for `system-ghc` and `install-ghc` fields to `stack config set` command. * Add `ghc-build` option to override autodetected GHC build to use (e.g. gmp4, tinfo6, nopie) on Linux. * `stack setup` detects systems where gcc enables PIE by default (such as Ubuntu 16.10 and Hardened Gentoo) and adjusts the GHC `configure` options accordingly. [#2542](https://github.com/commercialhaskell/stack/issues/2542) * Upload to Hackage with HTTP digest instead of HTTP basic. * Make `stack list-dependencies` understand all of the `stack dot` options too. * Add the ability for `stack list-dependencies` to list dependency licenses by passing the `--license` flag. * Dump logs that contain warnings for any local non-dependency packages [#2545](https://github.com/commercialhaskell/stack/issues/2545) * Add the `dump-logs` config option and `--dump-logs` command line option to get full build output on the console. [#426](https://github.com/commercialhaskell/stack/issues/426) * Add the `--open` option to "stack hpc report" command, causing the report to be opened in the browser. * The `stack config set` command now accepts a `--global` flag for suitable fields which causes it to modify the global user configuration (`~/.stack/config.yaml`) instead of the project configuration. [#2675](https://github.com/commercialhaskell/stack/pull/2675) * Information on the latest available snapshots is now downloaded from S3 instead of stackage.org, increasing reliability in case of stackage.org outages. [#2653](https://github.com/commercialhaskell/stack/pull/2653) * `stack dot` and `stack list-dependencies` now take targets and flags. [#1919](https://github.com/commercialhaskell/stack/issues/1919) * Deprecate `stack setup --stack-setup-yaml` for `--setup-info-yaml` based on discussion in [#2647](https://github.com/commercialhaskell/stack/issues/2647). * The `--main-is` flag for GHCI now implies the TARGET, fixing [#1845](https://github.com/commercialhaskell/stack/issues/1845). * `stack ghci` no longer takes all build options, as many weren't useful [#2199](https://github.com/commercialhaskell/stack/issues/2199) * `--no-time-in-log` option, to make verbose logs more diffable [#2727](https://github.com/commercialhaskell/stack/issues/2727) * `--color` option added to override auto-detection of ANSI support [#2725](https://github.com/commercialhaskell/stack/issues/2725) * Missing extra-deps are now warned about, adding a degree of typo detection [#1521](https://github.com/commercialhaskell/stack/issues/1521) * No longer warns about missing build-tools if they are on the PATH. [#2235](https://github.com/commercialhaskell/stack/issues/2235) * Replace enclosed-exceptions with safe-exceptions. [#2768](https://github.com/commercialhaskell/stack/issues/2768) * The install location for GHC and other programs can now be configured with the `local-programs-path` option in `config.yaml`. [#1644](https://github.com/commercialhaskell/stack/issues/1644) * Added option to add nix dependencies as nix GC roots * Proper pid 1 (init) process for `stack exec` with Docker * Dump build logs if they contain warnings. [#2545](https://github.com/commercialhaskell/stack/issues/2545) * Docker: redirect stdout of `docker pull` to stderr so that it will not interfere with output of other commands. * Nix & docker can be activated at the same time, in order to run Stack in a nix-shell in a container, preferably from an image already containing the nix dependencies in its /nix/store * Stack/nix: Dependencies can be added as nix GC roots, so they are not removed when running `nix-collect-garbage` Bug fixes: * Fixed a gnarly bug where programs and package tarballs sometimes have corrupted downloads. See [#2657](https://github.com/commercialhaskell/stack/issues/2568). * Add proper support for non-ASCII characters in file paths for the `sdist` command. See [#2549](https://github.com/commercialhaskell/stack/issues/2549) * Never treat `extra-dep` local packages as targets. This ensures things like test suites are not run for these packages, and that build output is not hidden due to their presence. * Fix a resource leak in `sinkProcessStderrStdout` which could affect much of the codebase, in particular copying precompiled packages. [#1979](https://github.com/commercialhaskell/stack/issues/1979) * Docker: ensure that interrupted extraction process does not cause corrupt file when downloading a Docker-compatible Stack executable [#2568](https://github.com/commercialhaskell/stack/issues/2568) * Fixed running `stack hpc report` on package targets. [#2664](https://github.com/commercialhaskell/stack/issues/2664) * Fix a long-standing performance regression where Stack would parse the `.dump-hi` files of the library components of local packages twice. [#2658](https://github.com/commercialhaskell/stack/pull/2658) * Fixed a regression in "stack ghci --no-load", where it would prompt for a main module to load. [#2603](https://github.com/commercialhaskell/stack/pull/2603) * Build Setup.hs files with the threaded RTS, mirroring the behavior of `cabal-install` and enabling more complex build systems in those files. * Fixed a bug in passing along `--ghc-options` to ghcjs. They were being provided as `--ghc-options` to Cabal, when it needs to be `--ghcjs-options`. [#2714](https://github.com/commercialhaskell/stack/issues/2714) * Launch Docker from the project root regardless of the working directory Stack is invoked from. This means paths relative to the project root (e.g. environment files) can be specified in `stack.yaml`'s docker `run-args`. * `stack setup --reinstall` now behaves as expected. [#2554](https://github.com/commercialhaskell/stack/issues/2554) ## 1.2.0 - 2016-09-16 Release notes: * On many Un*x systems, Stack can now be installed with a simple one-liner: wget -qO- https://get.haskellstack.org/ | sh * The fix for [#2175](https://github.com/commercialhaskell/stack/issues/2175) entails that Stack must perform a full clone of a large Git repo of Hackage meta-information. The total download size is about 200 MB. Please be aware of this when upgrading your Stack installation. * If you use Mac OS X, you may want to delay upgrading to macOS Sierra as there are reports of GHC panics when building some packages (including Stack itself). See [#2577](https://github.com/commercialhaskell/stack/issues/2577) * This version of Stack does not build on ARM or PowerPC systems (see [store#37](https://github.com/fpco/store/issues/37)). Please stay with version 1.1.2 for now on those architectures. This will be rectified soon! * We are now releasing a statically linked Stack binary for [64-bit Linux](https://get.haskellstack.org/stable/linux-x86_64-static.tar.gz). Please try it and let us know if you run into any trouble on your platform. * We are planning some changes to our Linux releases, including dropping our Ubuntu, Debian, CentOS, and Fedora package repositories and switching to statically linked binaries. We would value your feedback in [#2534](https://github.com/commercialhaskell/stack/issues/2534). Major changes: * Add `stack hoogle` command. [#55](https://github.com/commercialhaskell/stack/issues/55) * Support for absolute file path in `url` field of `setup-info` or `--ghc-bindist` * Add support for rendering GHCi scripts targeting different GHCi like applications [#2457](https://github.com/commercialhaskell/stack/pull/2457) Behavior changes: * Remove `stack ide start` and `stack ide load-targets` commands. [#2178](https://github.com/commercialhaskell/stack/issues/2178) * Support .buildinfo files in `stack ghci`. [#2242](https://github.com/commercialhaskell/stack/pull/2242) * Support -ferror-spans syntax in GHC error messages. * Avoid unpacking ghc to `/tmp` [#996](https://github.com/commercialhaskell/stack/issues/996) * The Linux `gmp4` GHC bindist is no longer considered a full-fledged GHC variant and can no longer be specified using the `ghc-variant` option, and instead is treated more like a slightly different platform. Other enhancements: * Use the `store` package for binary serialization of most caches. * Only require minor version match for Docker Stack exe. This way, we can make patch releases for version bounds and similar build issues without needing to upload new binaries for Docker. * Stack/Nix: Passes the right ghc derivation as an argument to the `shell.nix` when a custom `shell.nix` is used. See [#2243](https://github.com/commercialhaskell/stack/issues/2243) * Stack/Nix: Sets `LD_LIBRARY_PATH` so packages using C libs for Template Haskell can work (See _e.g._ [this HaskellR issue](https://github.com/tweag/HaskellR/issues/253)) * Parse CLI arguments and configuration files into less permissive types, improving error messages for bad inputs. [#2267](https://github.com/commercialhaskell/stack/issues/2267) * Add the ability to explicitly specify a gcc executable. [#593](https://github.com/commercialhaskell/stack/issues/593) * Nix: No longer uses LTS mirroring in nixpkgs. Gives to nix-shell a derivation like `haskell.compiler.ghc801` See [#2259](https://github.com/commercialhaskell/stack/issues/2259) * Perform some subprocesses during setup concurrently, slightly speeding up most commands. [#2346](https://github.com/commercialhaskell/stack/pull/2346) * `stack setup` no longer unpacks to the system temp dir on posix systems. [#996](https://github.com/commercialhaskell/stack/issues/996) * `stack setup` detects libtinfo6 and ncurses6 and can download alternate GHC bindists [#257](https://github.com/commercialhaskell/stack/issues/257) [#2302](https://github.com/commercialhaskell/stack/issues/2302). * `stack setup` detects Linux ARMv7 downloads appropriate GHC bindist [#2103](https://github.com/commercialhaskell/stack/issues/2103) * Custom `stack` binaries list dependency versions in output for `--version`. See [#2222](https://github.com/commercialhaskell/stack/issues/2222) and [#2450](https://github.com/commercialhaskell/stack/issues/2450). * Use a pretty printer to output dependency resolution errors. [#1912](https://github.com/commercialhaskell/stack/issues/1912) * Remove the `--os` flag [#2227](https://github.com/commercialhaskell/stack/issues/2227) * Add 'netbase' and 'ca-certificates' as dependency for .deb packages. [#2293](https://github.com/commercialhaskell/stack/issues/2293). * Add `stack ide targets` command. * Enhance debug logging with subprocess timings. * Pretty-print YAML parse errors [#2374](https://github.com/commercialhaskell/stack/issues/2374) * Clarify confusing `stack setup` output [#2314](https://github.com/commercialhaskell/stack/issues/2314) * Delete `Stack.Types` multimodule to improve build times [#2405](https://github.com/commercialhaskell/stack/issues/2405) * Remove spurious newlines in build logs [#2418](https://github.com/commercialhaskell/stack/issues/2418) * Interpreter: Provide a way to hide implicit packages [#1208](https://github.com/commercialhaskell/stack/issues/1208) * Check executability in exec lookup [#2489](https://github.com/commercialhaskell/stack/issues/2489) Bug fixes: * Fix Cabal warning about use of a deprecated Cabal flag [#2350](https://github.com/commercialhaskell/stack/issues/2350) * Support most executable extensions on Windows [#2225](https://github.com/commercialhaskell/stack/issues/2225) * Detect resolver change in `stack solver` [#2252](https://github.com/commercialhaskell/stack/issues/2252) * Fix a bug in docker image creation where the wrong base image was selected [#2376](https://github.com/commercialhaskell/stack/issues/2376) * Ignore special entries when unpacking tarballs [#2361](https://github.com/commercialhaskell/stack/issues/2361) * Fixes src directory pollution of `style.css` and `highlight.js` with GHC 8's haddock [#2429](https://github.com/commercialhaskell/stack/issues/2429) * Handle filepaths with spaces in `stack ghci` [#2266](https://github.com/commercialhaskell/stack/issues/2266) * Apply ghc-options to snapshot packages [#2289](https://github.com/commercialhaskell/stack/issues/2289) * `stack sdist`: Fix timestamp in tarball [#2394](https://github.com/commercialhaskell/stack/pull/2394) * Allow global Stack arguments with a script [#2316](https://github.com/commercialhaskell/stack/issues/2316) * Inconsistency between ToJSON and FromJSON instances of PackageLocation [#2412](https://github.com/commercialhaskell/stack/pull/2412) * Perform Unicode normalization on filepaths [#1810](https://github.com/commercialhaskell/stack/issues/1810) * Solver: always keep ghc wired-in as hard constraints [#2453](https://github.com/commercialhaskell/stack/issues/2453) * Support OpenBSD's tar where possible, require GNU tar for xz support [#2283](https://github.com/commercialhaskell/stack/issues/2283) * Fix using --coverage with Cabal-1.24 [#2424](https://github.com/commercialhaskell/stack/issues/2424) * When marking exe installed, remove old version [#2373](https://github.com/commercialhaskell/stack/issues/2373) * Stop truncating `all-cabal-hashes` git repo [#2175](https://github.com/commercialhaskell/stack/issues/2175) * Handle non-ASCII filenames on Windows [#2491](https://github.com/commercialhaskell/stack/issues/2491) * Avoid using multiple versions of a package in script interpreter by passing package-id to ghc/runghc [#1957](https://github.com/commercialhaskell/stack/issues/1957) * Only pre-load compiler version when using nix integration [#2459](https://github.com/commercialhaskell/stack/issues/2459) * Solver: parse Cabal errors also on Windows [#2502](https://github.com/commercialhaskell/stack/issues/2502) * Allow exec and ghci commands in interpreter mode. Scripts can now automatically open in the repl by using `exec ghci` instead of `runghc` in the shebang command. [#2510](https://github.com/commercialhaskell/stack/issues/2510) * Now consider a package to be dirty when an extra-source-file is changed. See [#2040](https://github.com/commercialhaskell/stack/issues/2040) ## 1.1.2 - 2016-05-20 Release notes: * Official FreeBSD binaries are [now available](http://docs.haskellstack.org/en/stable/install_and_upgrade/#freebsd) [#1253](https://github.com/commercialhaskell/stack/issues/1253). Major changes: * Extensible custom snapshots implemented. These allow you to define snapshots which extend other snapshots. See [#863](https://github.com/commercialhaskell/stack/issues/863). Local file custom snapshots can now be safely updated without changing their name. Remote custom snapshots should still be treated as immutable. Behavior changes: * `stack path --compiler` was added in the last release, to yield a path to the compiler. Unfortunately, `--compiler` is a global option that is useful to use with `stack path`. The same functionality is now provided by `stack path --compiler-exe`. See [#2123](https://github.com/commercialhaskell/stack/issues/2123) * For packages specified in terms of a git or hg repo, the hash used in the location has changed. This means that existing downloads from older stack versions won't be used. This is a side-effect of the fix to [#2133](https://github.com/commercialhaskell/stack/issues/2133) * `stack upgrade` no longer pays attention to local `stack.yaml` files, just the global config and CLI options. [#1392](https://github.com/commercialhaskell/stack/issues/1392) * `stack ghci` now uses `:add` instead of `:load`, making it potentially work better with user scripts. See [#1888](https://github.com/commercialhaskell/stack/issues/1888) Other enhancements: * Grab Cabal files via Git SHA to avoid regressions from Hackage revisions [#2070](https://github.com/commercialhaskell/stack/pull/2070) * Custom snapshots now support `ghc-options`. * Package git repos are now re-used rather than re-cloned. See [#1620](https://github.com/commercialhaskell/stack/issues/1620) * `DESTDIR` is filtered from environment when installing GHC. See [#1460](https://github.com/commercialhaskell/stack/issues/1460) * `stack haddock` now supports `--haddock-arguments`. See [#2144](https://github.com/commercialhaskell/stack/issues/2144) * Signing: warn if GPG_TTY is not set as per `man gpg-agent` Bug fixes: * Now ignore project config when doing `stack init` or `stack new`. See [#2110](https://github.com/commercialhaskell/stack/issues/2110) * Packages specified by git repo can now have submodules. See [#2133](https://github.com/commercialhaskell/stack/issues/2133) * Fix of hackage index fetch retry. See re-opening of [#1418](https://github.com/commercialhaskell/stack/issues/1418#issuecomment-217633843) * HPack now picks up changes to filesystem other than package.yaml. See [#2051](https://github.com/commercialhaskell/stack/issues/2051) * "stack solver" no longer suggests --omit-packages. See [#2031](https://github.com/commercialhaskell/stack/issues/2031) * Fixed an issue with building Cabal's Setup.hs. See [#1356](https://github.com/commercialhaskell/stack/issues/1356) * Package dirtiness now pays attention to deleted files. See [#1841](https://github.com/commercialhaskell/stack/issues/1841) * `stack ghci` now uses `extra-lib-dirs` and `extra-include-dirs`. See [#1656](https://github.com/commercialhaskell/stack/issues/1656) * Relative paths outside of source dir added via `qAddDependentFile` are now checked for dirtiness. See [#1982](https://github.com/commercialhaskell/stack/issues/1982) * Signing: always use `--with-fingerprints` ## 1.1.0 - 2016-05-04 Release notes: * Added Ubuntu 16.04 LTS (xenial) Apt repo. * No longer uploading new versions to Fedora 21 repo. Behavior changes: * Snapshot packages are no longer built with executable profiling. See [#1179](https://github.com/commercialhaskell/stack/issues/1179). * `stack init` now ignores symlinks when searching for Cabal files. It also now ignores any directory that begins with `.` (as well as `dist` dirs) - before it would only ignore `.git`, `.stack-work`, and `dist`. * The Stack executable is no longer built with `-rtsopts`. Before, when `-rtsopts` was enabled, Stack would process `+RTS` options even when intended for some other program, such as when used with `stack exec -- prog +RTS`. See [#2022](https://github.com/commercialhaskell/stack/issues/2022). * The `stack path --ghc-paths` option is deprecated and renamed to `--programs`. `--compiler` is added, which points directly at the compiler used in the current project. `--compiler-bin` points to the compiler's bin dir. * For consistency with the `$STACK_ROOT` environment variable, the `stack path --global-stack-root` flag and the `global-stack-root` field in the output of `stack path` are being deprecated and replaced with the `stack-root` flag and output field. Additionally, the Stack root can now be specified via the `--stack-root` command-line flag. See [#1148](https://github.com/commercialhaskell/stack/issues/1148). * `stack sig` GPG-related sub-commands were removed (folded into `upload` and `sdist`) * GPG signing of packages while uploading to Hackage is now the default. Use `upload --no-signature` if you would rather not contribute your package signature. If you don't yet have a GPG keyset, read this [blog post on GPG keys](https://fpcomplete.com/blog/2016/05/stack-security-gnupg-keys). We can add a `stack.yaml` config setting to disable signing if some people desire it. We hope that people will sign. Later we will be adding GPG signature verification options. * `stack build pkg-1.2.3` will now build even if the snapshot has a different package version - it is treated as an extra-dep. `stack build local-pkg-1.2.3` is an error even if the version number matches the local package [#2028](https://github.com/commercialhaskell/stack/issues/2028). * Having a `nix:` section no longer implies enabling nix build. This allows the user to globally configure whether nix is used (unless the project overrides the default explicitly). See [#1924](https://github.com/commercialhaskell/stack/issues/1924). * Remove deprecated valid-wanted field. * Docker: mount home directory in container [#1949](https://github.com/commercialhaskell/stack/issues/1949). * Deprecate `stack path --local-bin-path`; instead use `--local-bin`. * `stack image`: allow absolute source paths for `add`. Other enhancements: * `stack haddock --open [PACKAGE]` opens the local haddocks in the browser. * Fix too much rebuilding when enabling/disabling profiling flags. * `stack build pkg-1.0` will now build `pkg-1.0` even if the snapshot specifies a different version (it introduces a temporary extra-dep) * Experimental: Support for `--split-objs` added [#1284](https://github.com/commercialhaskell/stack/issues/1284). * `git` packages with submodules are supported by passing the `--recursive` flag to `git clone`. * When using [Hpack](https://github.com/sol/hpack), only regenerate Cabal files when Hpack files change. * Hpack files can now be used in templates * `stack ghci` now runs ghci as a separate process [#1306](https://github.com/commercialhaskell/stack/issues/1306) * Retry when downloading snapshots and package indices * Many build options are configurable now in `stack.yaml`: ~~~yaml build: library-profiling: true executable-profiling: true haddock: true haddock-deps: true copy-bins: true prefetch: true force-dirty: true keep-going: true test: true test-arguments: rerun-tests: true additional-args: ['-fprof'] coverage: true no-run-tests: true bench: true benchmark-opts: benchmark-arguments: -O2 no-run-benchmarks: true reconfigure: true cabal-verbose: true ~~~ * A number of URLs are now configurable, useful for firewalls. See [#1794](https://github.com/commercialhaskell/stack/issues/1884). * Suggest causes when executables are missing. * Allow `--omit-packages` even without `--solver`. * Improve the generated `stack.yaml`. * Improve ghci results after :load Main module collision with main file path. * Only load the hackage index if necessary [#1883](https://github.com/commercialhaskell/stack/issues/1883), [#1892](https://github.com/commercialhaskell/stack/issues/1892). * init: allow local packages to be deps of deps [#1965](https://github.com/commercialhaskell/stack/issues/1965). * Always use full fingerprints from GPG [#1952](https://github.com/commercialhaskell/stack/issues/1952). * Default to using `gpg2` and fall back to `gpg` [#1976](https://github.com/commercialhaskell/stack/issues/1976). * Add a flag for --verbosity silent. * Add `haddock --open` flag [#1396](https://github.com/commercialhaskell/stack/issues/1396). Bug fixes: * Package tarballs would fail to unpack. [#1884](https://github.com/commercialhaskell/stack/issues/1884). * Fixed errant warnings about missing modules, after deleted and removed from Cabal file [#921](https://github.com/commercialhaskell/stack/issues/921) [#1805](https://github.com/commercialhaskell/stack/issues/1805). * Now considers a package to dirty when the Hpack file is changed [#1819](https://github.com/commercialhaskell/stack/issues/1819). * Nix: cancelling a Stack build now exits properly rather than dropping into a nix-shell [#1778](https://github.com/commercialhaskell/stack/issues/1778). * `allow-newer: true` now causes `--exact-configuration` to be passed to Cabal. See [#1579](https://github.com/commercialhaskell/stack/issues/1579). * `stack solver` no longer fails with `InvalidRelFile` for relative package paths including `..`. See [#1954](https://github.com/commercialhaskell/stack/issues/1954). * Ignore emacs lock files when finding Cabal files [#1897](https://github.com/commercialhaskell/stack/issues/1897). * Use lenient UTF-8 decode for build output [#1945](https://github.com/commercialhaskell/stack/issues/1945). * Clear index cache whenever index updated [#1962](https://github.com/commercialhaskell/stack/issues/1962). * Fix: Building a container image drops a .stack-work dir in the current working (sub)directory [#1975](https://github.com/commercialhaskell/stack/issues/1975). * Fix: Rebuilding when disabling profiling [#2023](https://github.com/commercialhaskell/stack/issues/2023). ## 1.0.4.3 - 2016-04-07 Bug fixes: * Don't delete contents of ~/.ssh when using `stack clean --full` with Docker enabled [#2000](https://github.com/commercialhaskell/stack/issues/2000) ## 1.0.4.2 - 2016-03-09 Build with `path-io-1.0.0`. There are no changes in behaviour from 1.0.4, so no binaries are released for this version. ## 1.0.4.1 - 2016-02-21 Fixes build with `aeson-0.11.0.0`. There are no changes in behaviour from 1.0.4, so no binaries are released for this version. ## 1.0.4 - 2016-02-20 Major changes: * Some notable changes in `stack init`: * Overall it should now be able to initialize almost all existing Cabal packages out of the box as long as the package itself is consistently defined. * Choose the best possible snapshot and add extra-deps on top of a snapshot other than a compiler snapshot - [#1583](https://github.com/commercialhaskell/stack/pull/1583) * Automatically omit a package (`--omit-packages`) when it is compiler incompatible or when there are packages with conflicting dependency requirements. See [#1674](https://github.com/commercialhaskell/stack/pull/1674). * Some more changes for a better user experience. Please refer to the doc guide for details. * Add support for Hpack, alternative package description format [#1679](https://github.com/commercialhaskell/stack/issues/1679) Other enhancements: * Docker: pass ~/.ssh and SSH auth socket into container, so that git repos work [#1358](https://github.com/commercialhaskell/stack/issues/1358). * Docker: strip suffix from docker --version. [#1653](https://github.com/commercialhaskell/stack/issues/1653) * Docker: pass USER and PWD environment variables into container. * On each run, Stack will test the Stack root directory (~/.stack), and the project and package work directories (.stack-work) for whether they are owned by the current user and abort if they are not. This precaution can be disabled with the `--allow-different-user` flag or `allow-different-user` option in the global config (~/.stack/config.yaml). [#471](https://github.com/commercialhaskell/stack/issues/471) * Added `stack clean --full` option for full working dir cleanup. * YAML config: support Zip archives. * Redownload build plan if parsing fails [#1702](https://github.com/commercialhaskell/stack/issues/1702). * Give mustache templates access to a 'year' tag [#1716](https://github.com/commercialhaskell/stack/pull/1716). * Have "stack ghci" warn about module name aliasing. * Add "stack ghci --load-local-deps". * Build Setup.hs with -rtsopts [#1687](https://github.com/commercialhaskell/stack/issues/1687). * `stack init` accepts a list of directories. * Add flag infos to DependencyPlanFailures (for better error output in case of flags) [#713](https://github.com/commercialhaskell/stack/issues/713) * `stack new --bare` complains for overwrites, and add `--force` option [#1597](https://github.com/commercialhaskell/stack/issues/1597). Bug fixes: * Previously, `stack ghci` would fail with `cannot satisfy -package-id` when the implicit build step changes the package key of some dependency. * Fix: Building with ghcjs: "ghc-pkg: Prelude.chr: bad argument: 2980338" [#1665](https://github.com/commercialhaskell/stack/issues/1665). * Fix running test / bench with `--profile` / `--trace`. * Fix: build progress counter is no longer visible [#1685](https://github.com/commercialhaskell/stack/issues/1685). * Use "-RTS" w/ profiling to allow extra args [#1772](https://github.com/commercialhaskell/stack/issues/1772). * Fix withUnpackedTarball7z to find name of srcDir after unpacking (fixes `stack setup` fails for ghcjs project on windows) [#1774](https://github.com/commercialhaskell/stack/issues/1774). * Add space before auto-generated bench opts (makes profiling options work uniformly for applications and benchmark suites) [#1771](https://github.com/commercialhaskell/stack/issues/1771). * Don't try to find plugin if it resembles flag. * Setup.hs changes cause package dirtiness [#1711](https://github.com/commercialhaskell/stack/issues/1711). * Send "stack templates" output to stdout [#1792](https://github.com/commercialhaskell/stack/issues/1792). ## 1.0.2 - 2016-01-18 Release notes: - Arch Linux: Stack has been adopted into the [official community repository](https://www.archlinux.org/packages/community/x86_64/stack/), so we will no longer be updating the AUR with new versions. See the [install/upgrade guide](http://docs.haskellstack.org/en/stable/install_and_upgrade/#arch-linux) for current download instructions. Major changes: - `stack init` and `solver` overhaul [#1583](https://github.com/commercialhaskell/stack/pull/1583) Other enhancements: - Disable locale/codepage hacks when GHC >=7.10.3 [#1552](https://github.com/commercialhaskell/stack/issues/1552) - Specify multiple images to build for `stack image container` [docs](http://docs.haskellstack.org/en/stable/yaml_configuration/#image) - Specify which executables to include in images for `stack image container` [docs](http://docs.haskellstack.org/en/stable/yaml_configuration/#image) - Docker: pass supplementary groups and umask into container - If git fetch fails wipe the directory and try again from scratch [#1418](https://github.com/commercialhaskell/stack/issues/1418) - Warn if newly installed executables won't be available on the PATH [#1362](https://github.com/commercialhaskell/stack/issues/1362) - `stack.yaml`: for `stack image container`, specify multiple images to generate, and which executables should be added to those images - GHCI: add interactive Main selection [#1068](https://github.com/commercialhaskell/stack/issues/1068) - Care less about the particular name of a GHCJS sdist folder [#1622](https://github.com/commercialhaskell/stack/issues/1622) - Unified Enable/disable help messaging [#1613](https://github.com/commercialhaskell/stack/issues/1613) Bug fixes: - Don't share precompiled packages between GHC/platform variants and Docker [#1551](https://github.com/commercialhaskell/stack/issues/1551) - Properly redownload corrupted downloads with the correct file size. [Mailing list discussion](https://groups.google.com/d/msg/haskell-stack/iVGDG5OHYxs/FjUrR5JsDQAJ) - Gracefully handle invalid paths in error/warning messages [#1561](https://github.com/commercialhaskell/stack/issues/1561) - Nix: select the correct GHC version corresponding to the snapshot even when an abstract resolver is passed via `--resolver` on the command-line. [#1641](https://github.com/commercialhaskell/stack/issues/1641) - Fix: Stack does not allow using an external package from ghci [#1557](https://github.com/commercialhaskell/stack/issues/1557) - Disable ambiguous global '--resolver' option for 'stack init' [#1531](https://github.com/commercialhaskell/stack/issues/1531) - Obey `--no-nix` flag - Fix: GHCJS Execute.hs: Non-exhaustive patterns in lambda [#1591](https://github.com/commercialhaskell/stack/issues/1591) - Send file-watch and sticky logger messages to stderr [#1302](https://github.com/commercialhaskell/stack/issues/1302) [#1635](https://github.com/commercialhaskell/stack/issues/1635) - Use globaldb path for querying Cabal version [#1647](https://github.com/commercialhaskell/stack/issues/1647) ## 1.0.0 - 2015-12-24 Release notes: * We're calling this version 1.0.0 in preparation for Stackage LTS 4. Note, however, that this does not mean the code's API will be stable as this is primarily an end-user tool. Enhancements: * Added flag `--profile` flag: passed with `stack build`, it will enable profiling, and for `--bench` and `--test` it will generate a profiling report by passing `+RTS -p` to the executable(s). Great for using like `stack build --bench --profile` (remember that enabling profile will slow down your benchmarks by >4x). Run `stack build --bench` again to disable the profiling and get proper speeds * Added flag `--trace` flag: just like `--profile`, it enables profiling, but instead of generating a report for `--bench` and `--test`, prints out a stack trace on exception. Great for using like `stack build --test --trace` * Nix: all options can be overridden on command line [#1483](https://github.com/commercialhaskell/stack/issues/1483) * Nix: build environments (shells) are now pure by default. * Make verbosity silent by default in script interpreter mode [#1472](https://github.com/commercialhaskell/stack/issues/1472) * Show a message when resetting git commit fails [#1453](https://github.com/commercialhaskell/stack/issues/1453) * Improve Unicode handling in project/package names [#1337](https://github.com/commercialhaskell/stack/issues/1337) * Fix ambiguity between a Stack command and a filename to execute (prefer `stack` subcommands) [#1471](https://github.com/commercialhaskell/stack/issues/1471) * Support multi line interpreter directive comments [#1394](https://github.com/commercialhaskell/stack/issues/1394) * Handle space separated pids in ghc-pkg dump (for GHC HEAD) [#1509](https://github.com/commercialhaskell/stack/issues/1509) * Add ghci --no-package-hiding option [#1517](https://github.com/commercialhaskell/stack/issues/1517) * `stack new` can download templates from URL [#1466](https://github.com/commercialhaskell/stack/issues/1466) Bug fixes: * Nix: `stack exec` options are passed properly to the Stack sub process [#1538](https://github.com/commercialhaskell/stack/issues/1538) * Nix: specifying a shell-file works in any current working directory [#1547](https://github.com/commercialhaskell/stack/issues/1547) * Nix: use `--resolver` argument * Docker: fix missing image message and '--docker-auto-pull' * No HTML escaping for "stack new" template params [#1475](https://github.com/commercialhaskell/stack/issues/1475) * Set permissions for generated .ghci script [#1480](https://github.com/commercialhaskell/stack/issues/1480) * Restrict commands allowed in interpreter mode [#1504](https://github.com/commercialhaskell/stack/issues/1504) * `stack ghci` doesn't see preprocessed files for executables [#1347](https://github.com/commercialhaskell/stack/issues/1347) * All test suites run even when only one is requested [#1550](https://github.com/commercialhaskell/stack/pull/1550) * Edge cases in broken templates give odd errors [#1535](https://github.com/commercialhaskell/stack/issues/1535) * Fix test coverage bug on windows ## 0.1.10.1 - 2015-12-13 Bug fixes: * `stack image container` did not actually build an image [#1473](https://github.com/commercialhaskell/stack/issues/1473) ## 0.1.10.0 - 2015-12-04 Release notes: * The Stack home page is now at [haskellstack.org](http://haskellstack.org), which shows the documentation rendered by readthedocs.org. Note: this has necessitated some changes to the links in the documentation's markdown source code, so please check the links on the website before submitting a PR to fix them. * The locations of the [Ubuntu](http://docs.haskellstack.org/en/stable/install_and_upgrade/#ubuntu) and [Debian](http://docs.haskellstack.org/en/stable/install_and_upgrade/#debian) package repositories have changed to have correct URL semantics according to Debian's guidelines [#1378](https://github.com/commercialhaskell/stack/issues/1378). The old locations will continue to work for some months, but we suggest that you adjust your `/etc/apt/sources.list.d/fpco.list` to the new location to avoid future disruption. * [openSUSE and SUSE Linux Enterprise](http://docs.haskellstack.org/en/stable/install_and_upgrade/#suse) packages are now available, thanks to [@mimi1vx](https://github.com/mimi1vx). Note: there will be some lag before these pick up new versions, as they are based on Stackage LTS. Major changes: * Support for building inside a Nix-shell providing system dependencies [#1285](https://github.com/commercialhaskell/stack/pull/1285) * Add optional GPG signing on `stack upload --sign` or with `stack sig sign ...` Other enhancements: * Print latest applicable version of packages on conflicts [#508](https://github.com/commercialhaskell/stack/issues/508) * Support for packages located in Mercurial repositories [#1397](https://github.com/commercialhaskell/stack/issues/1397) * Only run benchmarks specified as build targets [#1412](https://github.com/commercialhaskell/stack/issues/1412) * Support git-style executable fall-through (`stack something` executes `stack-something` if present) [#1433](https://github.com/commercialhaskell/stack/issues/1433) * GHCi now loads intermediate dependencies [#584](https://github.com/commercialhaskell/stack/issues/584) * `--work-dir` option for overriding `.stack-work` [#1178](https://github.com/commercialhaskell/stack/issues/1178) * Support `detailed-0.9` tests [#1429](https://github.com/commercialhaskell/stack/issues/1429) * Docker: improved POSIX signal proxying to containers [#547](https://github.com/commercialhaskell/stack/issues/547) Bug fixes: * Show absolute paths in error messages in multi-package builds [#1348](https://github.com/commercialhaskell/stack/issues/1348) * Docker-built binaries and libraries in different path [#911](https://github.com/commercialhaskell/stack/issues/911) [#1367](https://github.com/commercialhaskell/stack/issues/1367) * Docker: `--resolver` argument didn't effect selected image tag * GHCi: Spaces in filepaths caused module loading issues [#1401](https://github.com/commercialhaskell/stack/issues/1401) * GHCi: cpp-options in Cabal files weren't used [#1419](https://github.com/commercialhaskell/stack/issues/1419) * Benchmarks couldn't be run independently of each other [#1412](https://github.com/commercialhaskell/stack/issues/1412) * Send output of building setup to stderr [#1410](https://github.com/commercialhaskell/stack/issues/1410) ## 0.1.8.0 - 2015-11-20 Major changes: * GHCJS can now be used with stackage snapshots via the new `compiler` field. * Windows installers are now available: [download them here](http://docs.haskellstack.org/en/stable/install_and_upgrade/#windows) [#613](https://github.com/commercialhaskell/stack/issues/613) * Docker integration works with non-FPComplete generated images [#531](https://github.com/commercialhaskell/stack/issues/531) Other enhancements: * Added an `allow-newer` config option [#922](https://github.com/commercialhaskell/stack/issues/922) [#770](https://github.com/commercialhaskell/stack/issues/770) * When a Hackage revision invalidates a build plan in a snapshot, trust the snapshot [#770](https://github.com/commercialhaskell/stack/issues/770) * Added a `stack config set resolver RESOLVER` command. Part of work on [#115](https://github.com/commercialhaskell/stack/issues/115) * `stack setup` can now install GHCJS on windows. See [#1145](https://github.com/commercialhaskell/stack/issues/1145) and [#749](https://github.com/commercialhaskell/stack/issues/749) * `stack hpc report` command added, which generates reports for HPC tix files * `stack ghci` now accepts all the flags accepted by `stack build`. See [#1186](https://github.com/commercialhaskell/stack/issues/1186) * `stack ghci` builds the project before launching GHCi. If the build fails, try to launch GHCi anyway. Use `stack ghci --no-build` option to disable [#1065](https://github.com/commercialhaskell/stack/issues/1065) * `stack ghci` now detects and warns about various circumstances where it is liable to fail. See [#1270](https://github.com/commercialhaskell/stack/issues/1270) * Added `require-docker-version` configuration option * Packages will now usually be built along with their tests and benchmarks. See [#1166](https://github.com/commercialhaskell/stack/issues/1166) * Relative `local-bin-path` paths will be relative to the project's root directory, not the current working directory. [#1340](https://github.com/commercialhaskell/stack/issues/1340) * `stack clean` now takes an optional `[PACKAGE]` argument for use in multi-package projects. See [#583](https://github.com/commercialhaskell/stack/issues/583) * Ignore cabal_macros.h as a dependency [#1195](https://github.com/commercialhaskell/stack/issues/1195) * Pad timestamps and show local time in --verbose output [#1226](https://github.com/commercialhaskell/stack/issues/1226) * GHCi: Import all modules after loading them [#995](https://github.com/commercialhaskell/stack/issues/995) * Add subcommand aliases: `repl` for `ghci`, and `runhaskell` for `runghc` [#1241](https://github.com/commercialhaskell/stack/issues/1241) * Add typo recommendations for unknown package identifiers [#158](https://github.com/commercialhaskell/stack/issues/158) * Add `stack path --local-hpc-root` option * Overhaul dependencies' haddocks copying [#1231](https://github.com/commercialhaskell/stack/issues/1231) * Support for extra-package-dbs in 'stack ghci' [#1229](https://github.com/commercialhaskell/stack/pull/1229) * `stack new` disallows package names with "words" consisting solely of numbers [#1336](https://github.com/commercialhaskell/stack/issues/1336) * `stack build --fast` turns off optimizations * Show progress while downloading package index [#1223](https://github.com/commercialhaskell/stack/issues/1223). Bug fixes: * Fix: Haddocks not copied for dependencies [#1105](https://github.com/commercialhaskell/stack/issues/1105) * Fix: Global options did not work consistently after subcommand [#519](https://github.com/commercialhaskell/stack/issues/519) * Fix: 'stack ghci' doesn't notice that a module got deleted [#1180](https://github.com/commercialhaskell/stack/issues/1180) * Rebuild when Cabal file is changed * Fix: Paths in GHC warnings not canonicalized, nor those for packages in subdirectories or outside the project root [#1259](https://github.com/commercialhaskell/stack/issues/1259) * Fix: unlisted files in tests and benchmarks trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) ## 0.1.6.0 - 2015-10-15 Major changes: * `stack setup` now supports building and booting GHCJS from source tarball. * On Windows, build directories no longer display "pretty" information (like x86_64-windows/Cabal-1.22.4.0), but rather a hash of that content. The reason is to avoid the 260 character path limitation on Windows. See [#1027](https://github.com/commercialhaskell/stack/pull/1027) * Rename config files and clarify their purposes [#969](https://github.com/commercialhaskell/stack/issues/969) * `~/.stack/stack.yaml` --> `~/.stack/config.yaml` * `~/.stack/global` --> `~/.stack/global-project` * `/etc/stack/config` --> `/etc/stack/config.yaml` * Old locations still supported, with deprecation warnings * New command "stack eval CODE", which evaluates to "stack exec ghc -- -e CODE". Other enhancements: * No longer install `git` on Windows [#1046](https://github.com/commercialhaskell/stack/issues/1046). You can still get this behavior by running the following yourself: `stack exec -- pacman -Sy --noconfirm git`. * Typing enter during --file-watch triggers a rebuild [#1023](https://github.com/commercialhaskell/stack/pull/1023) * Use Haddock's `--hyperlinked-source` (crosslinked source), if available [#1070](https://github.com/commercialhaskell/stack/pull/1070) * Use Stack-installed GHCs for `stack init --solver` [#1072](https://github.com/commercialhaskell/stack/issues/1072) * Experimental: Add `stack query` command [#1087](https://github.com/commercialhaskell/stack/issues/1087) * By default, Stack no longer rebuilds a package due to GHC options changes. This behavior can be tweaked with the `rebuild-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) * By default, ghc-options are applied to all local packages, not just targets. This behavior can be tweaked with the `apply-ghc-options` setting. [#1089](https://github.com/commercialhaskell/stack/issues/1089) * Docker: download or override location of Stack executable to re-run in container [#974](https://github.com/commercialhaskell/stack/issues/974) * Docker: when Docker Engine is remote, don't run containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) * Docker: `set-user` option to enable/disable running containerized processes as host's UID/GID [#194](https://github.com/commercialhaskell/stack/issues/194) * Custom Setup.hs files are now precompiled instead of interpreted. This should be a major performance win for certain edge cases (biggest example: [building Cabal itself](https://github.com/commercialhaskell/stack/issues/1041)) while being either neutral or a minor slowdown for more common cases. * `stack test --coverage` now also generates a unified coverage report for multiple test-suites / packages. In the unified report, test-suites can contribute to the coverage of other packages. Bug fixes: * Ignore stack-built executables named `ghc` [#1052](https://github.com/commercialhaskell/stack/issues/1052) * Fix quoting of output failed command line arguments * Mark executable-only packages as installed when copied from cache [#1043](https://github.com/commercialhaskell/stack/pull/1043) * Canonicalize temporary directory paths [#1047](https://github.com/commercialhaskell/stack/pull/1047) * Put code page fix inside the build function itself [#1066](https://github.com/commercialhaskell/stack/issues/1066) * Add `explicit-setup-deps` option [#1110](https://github.com/commercialhaskell/stack/issues/1110), and change the default to the old behavior of using any package in the global and snapshot database [#1025](https://github.com/commercialhaskell/stack/issues/1025) * Precompiled cache checks full package IDs on Cabal < 1.22 [#1103](https://github.com/commercialhaskell/stack/issues/1103) * Pass -package-id to ghci [#867](https://github.com/commercialhaskell/stack/issues/867) * Ignore global packages when copying precompiled packages [#1146](https://github.com/commercialhaskell/stack/issues/1146) ## 0.1.5.0 - 2015-09-24 Major changes: * On Windows, we now use a full MSYS2 installation in place of the previous PortableGit. This gives you access to the pacman package manager for more easily installing libraries. * Support for custom GHC binary distributions [#530](https://github.com/commercialhaskell/stack/issues/530) * `ghc-variant` option in `stack.yaml` to specify the variant (also `--ghc-variant` command-line option) * `setup-info` in `stack.yaml`, to specify where to download custom binary distributions (also `--ghc-bindist` command-line option) * Note: On systems with libgmp4 (aka `libgmp.so.3`), such as CentOS 6, you may need to re-run `stack setup` due to the centos6 GHC bindist being treated like a variant * A new `--pvp-bounds` flag to the sdist and upload commands allows automatic adding of PVP upper and/or lower bounds to your dependencies Other enhancements: * Adapt to upcoming Cabal installed package identifier format change [#851](https://github.com/commercialhaskell/stack/issues/851) * `stack setup` takes a `--stack-setup-yaml` argument * `--file-watch` is more discerning about which files to rebuild for [#912](https://github.com/commercialhaskell/stack/issues/912) * `stack path` now supports `--global-pkg-db` and `--ghc-package-path` * `--reconfigure` flag [#914](https://github.com/commercialhaskell/stack/issues/914) [#946](https://github.com/commercialhaskell/stack/issues/946) * Cached data is written with a checksum of its structure [#889](https://github.com/commercialhaskell/stack/issues/889) * Fully removed `--optimizations` flag * Added `--cabal-verbose` flag * Added `--file-watch-poll` flag for polling instead of using filesystem events (useful for running tests in a Docker container while modifying code in the host environment. When code is injected into the container via a volume, the container won't propagate filesystem events). * Give a preemptive error message when `-prof` is given as a GHC option [#1015](https://github.com/commercialhaskell/stack/issues/1015) * Locking is now optional, and will be turned on by setting the `STACK_LOCK` environment variable to `true` [#950](https://github.com/commercialhaskell/stack/issues/950) * Create default `stack.yaml` with documentation comments and commented out options [#226](https://github.com/commercialhaskell/stack/issues/226) * Out of memory warning if Cabal exits with -9 [#947](https://github.com/commercialhaskell/stack/issues/947) Bug fixes: * Hacky workaround for optparse-applicative issue with `stack exec --help` [#806](https://github.com/commercialhaskell/stack/issues/806) * Build executables for local extra-deps [#920](https://github.com/commercialhaskell/stack/issues/920) * copyFile can't handle directories [#942](https://github.com/commercialhaskell/stack/pull/942) * Support for spaces in Haddock interface files [fpco/minghc#85](https://github.com/fpco/minghc/issues/85) * Temporarily building against a "shadowing" local package? [#992](https://github.com/commercialhaskell/stack/issues/992) * Fix `Setup.exe` name for `--upgrade-cabal` on Windows [#1002](https://github.com/commercialhaskell/stack/issues/1002) * Unlisted dependencies no longer trigger extraneous second build [#838](https://github.com/commercialhaskell/stack/issues/838) ## 0.1.4.1 - 2015-09-04 Fix stack's own Haddocks. No changes to functionality (only comments updated). ## 0.1.4.0 - 2015-09-04 Major changes: * You now have more control over how GHC versions are matched, e.g. "use exactly this version," "use the specified minor version, but allow patches," or "use the given minor version or any later minor in the given major release." The default has switched from allowing newer later minor versions to a specific minor version allowing patches. For more information, see [#736](https://github.com/commercialhaskell/stack/issues/736) and [#784](https://github.com/commercialhaskell/stack/pull/784). * Support added for compiling with GHCJS * Stack can now reuse prebuilt binaries between snapshots. That means that, if you build package foo in LTS-3.1, that binary version can be reused in LTS-3.2, assuming it uses the same dependencies and flags. [#878](https://github.com/commercialhaskell/stack/issues/878) Other enhancements: * Added the `--docker-env` argument, to set environment variables in Docker container. * Set locale environment variables to UTF-8 encoding for builds to avoid "commitBuffer: invalid argument" errors from GHC [#793](https://github.com/commercialhaskell/stack/issues/793) * Enable transliteration for encoding on stdout and stderr [#824](https://github.com/commercialhaskell/stack/issues/824) * By default, `stack upgrade` automatically installs GHC as necessary [#797](https://github.com/commercialhaskell/stack/issues/797) * Added the `ghc-options` field to `stack.yaml` [#796](https://github.com/commercialhaskell/stack/issues/796) * Added the `extra-path` field to `stack.yaml` * Code page changes on Windows only apply to the build command (and its synonyms), and can be controlled via a command line flag (still defaults to on) [#757](https://github.com/commercialhaskell/stack/issues/757) * Implicitly add packages to extra-deps when a flag for them is set [#807](https://github.com/commercialhaskell/stack/issues/807) * Use a precompiled Setup.hs for simple build types [#801](https://github.com/commercialhaskell/stack/issues/801) * Set --enable-tests and --enable-benchmarks optimistically [#805](https://github.com/commercialhaskell/stack/issues/805) * `--only-configure` option added [#820](https://github.com/commercialhaskell/stack/issues/820) * Check for duplicate local package names * Stop nagging people that call `stack test` [#845](https://github.com/commercialhaskell/stack/issues/845) * `--file-watch` will ignore files that are in your VCS boring/ignore files [#703](https://github.com/commercialhaskell/stack/issues/703) * Add `--numeric-version` option Bug fixes: * `stack init --solver` fails if `GHC_PACKAGE_PATH` is present [#860](https://github.com/commercialhaskell/stack/issues/860) * `stack solver` and `stack init --solver` check for test suite and benchmark dependencies [#862](https://github.com/commercialhaskell/stack/issues/862) * More intelligent logic for setting UTF-8 locale environment variables [#856](https://github.com/commercialhaskell/stack/issues/856) * Create missing directories for `stack sdist` * Don't ignore Cabal files with extra periods [#895](https://github.com/commercialhaskell/stack/issues/895) * Deprecate unused `--optimizations` flag * Truncated output on slow terminals [#413](https://github.com/commercialhaskell/stack/issues/413) ## 0.1.3.1 - 2015-08-12 Bug fixes: * Ignore disabled executables [#763](https://github.com/commercialhaskell/stack/issues/763) ## 0.1.3.0 - 2015-08-12 Major changes: * Detect when a module is compiled but not listed in the Cabal file ([#32](https://github.com/commercialhaskell/stack/issues/32)) * A warning is displayed for any modules that should be added to `other-modules` in the Cabal file * These modules are taken into account when determining whether a package needs to be built * Respect TemplateHaskell addDependentFile dependency changes ([#105](https://github.com/commercialhaskell/stack/issues/105)) * TH dependent files are taken into account when determining whether a package needs to be built. * Overhauled target parsing, added `--test` and `--bench` options [#651](https://github.com/commercialhaskell/stack/issues/651) * For details, see [Build commands documentation](http://docs.haskellstack.org/en/stable/build_command/) Other enhancements: * Set the `HASKELL_DIST_DIR` environment variable [#524](https://github.com/commercialhaskell/stack/pull/524) * Track build status of tests and benchmarks [#525](https://github.com/commercialhaskell/stack/issues/525) * `--no-run-tests` [#517](https://github.com/commercialhaskell/stack/pull/517) * Targets outside of root dir don't build [#366](https://github.com/commercialhaskell/stack/issues/366) * Upper limit on number of flag combinations to test [#543](https://github.com/commercialhaskell/stack/issues/543) * Fuzzy matching support to give better error messages for close version numbers [#504](https://github.com/commercialhaskell/stack/issues/504) * `--local-bin-path` global option. Use to change where binaries get placed on a `--copy-bins` [#342](https://github.com/commercialhaskell/stack/issues/342) * Custom snapshots [#111](https://github.com/commercialhaskell/stack/issues/111) * --force-dirty flag: Force treating all local packages as having dirty files (useful for cases where Stack can't detect a file change) * GHC error messages: display file paths as absolute instead of relative for better editor integration * Add the `--copy-bins` option [#569](https://github.com/commercialhaskell/stack/issues/569) * Give warnings on unexpected config keys [#48](https://github.com/commercialhaskell/stack/issues/48) * Remove Docker `pass-host` option * Don't require `cabal-install` to upload [#313](https://github.com/commercialhaskell/stack/issues/313) * Generate indexes for all deps and all installed snapshot packages [#143](https://github.com/commercialhaskell/stack/issues/143) * Provide `--resolver global` option [#645](https://github.com/commercialhaskell/stack/issues/645) * Also supports `--resolver nightly`, `--resolver lts`, and `--resolver lts-X` * Make `stack build --flag` error when flag or package is unknown [#617](https://github.com/commercialhaskell/stack/issues/617) * Preserve file permissions when unpacking sources [#666](https://github.com/commercialhaskell/stack/pull/666) * `stack build` etc work outside of a project * `list-dependencies` command [#638](https://github.com/commercialhaskell/stack/issues/638) * `--upgrade-cabal` option to `stack setup` [#174](https://github.com/commercialhaskell/stack/issues/174) * `--exec` option [#651](https://github.com/commercialhaskell/stack/issues/651) * `--only-dependencies` implemented correctly [#387](https://github.com/commercialhaskell/stack/issues/387) Bug fixes: * Extensions from the `other-extensions` field no longer enabled by default [#449](https://github.com/commercialhaskell/stack/issues/449) * Fix: haddock forces rebuild of empty packages [#452](https://github.com/commercialhaskell/stack/issues/452) * Don't copy over executables excluded by component selection [#605](https://github.com/commercialhaskell/stack/issues/605) * Fix: Stack fails on Windows with git package in `stack.yaml` and no git binary on path [#712](https://github.com/commercialhaskell/stack/issues/712) * Fixed GHCi issue: Specifying explicit package versions (#678) * Fixed GHCi issue: Specifying -odir and -hidir as .stack-work/odir (#529) * Fixed GHCi issue: Specifying A instead of A.ext for modules (#498) ## 0.1.2.0 - 2015-07-05 * Add `--prune` flag to `stack dot` [#487](https://github.com/commercialhaskell/stack/issues/487) * Add `--[no-]external`,`--[no-]include-base` flags to `stack dot` [#437](https://github.com/commercialhaskell/stack/issues/437) * Add `--ignore-subdirs` flag to init command [#435](https://github.com/commercialhaskell/stack/pull/435) * Handle attempt to use non-existing resolver [#436](https://github.com/commercialhaskell/stack/pull/436) * Add `--force` flag to `init` command * exec style commands accept the `--package` option (see [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) * `stack upload` without arguments doesn't do anything [#439](https://github.com/commercialhaskell/stack/issues/439) * Print latest version of packages on conflicts [#450](https://github.com/commercialhaskell/stack/issues/450) * Flag to avoid rerunning tests that haven't changed [#451](https://github.com/commercialhaskell/stack/issues/451) * Stack can act as a script interpreter (see [Script interpreter] (https://github.com/commercialhaskell/stack/wiki/Script-interpreter) and [Reddit discussion](http://www.reddit.com/r/haskell/comments/3bd66h/stack_runghc_turtle_as_haskell_script_solution/)) * Add the __`--file-watch`__ flag to auto-rebuild on file changes [#113](https://github.com/commercialhaskell/stack/issues/113) * Rename `stack docker exec` to `stack exec --plain` * Add the `--skip-msys` flag [#377](https://github.com/commercialhaskell/stack/issues/377) * `--keep-going`, turned on by default for tests and benchmarks [#478](https://github.com/commercialhaskell/stack/issues/478) * `concurrent-tests: BOOL` [#492](https://github.com/commercialhaskell/stack/issues/492) * Use hashes to check file dirtiness [#502](https://github.com/commercialhaskell/stack/issues/502) * Install correct GHC build on systems with libgmp.so.3 [#465](https://github.com/commercialhaskell/stack/issues/465) * `stack upgrade` checks version before upgrading [#447](https://github.com/commercialhaskell/stack/issues/447) ## 0.1.1.0 - 2015-06-26 * Remove GHC uncompressed tar file after installation [#376](https://github.com/commercialhaskell/stack/issues/376) * Put stackage snapshots JSON on S3 [#380](https://github.com/commercialhaskell/stack/issues/380) * Specifying flags for multiple packages [#335](https://github.com/commercialhaskell/stack/issues/335) * single test suite failure should show entire log [#388](https://github.com/commercialhaskell/stack/issues/388) * valid-wanted is a confusing option name [#386](https://github.com/commercialhaskell/stack/issues/386) * `stack init` in multi-package project should use local packages for dependency checking [#384](https://github.com/commercialhaskell/stack/issues/384) * Display information on why a snapshot was rejected [#381](https://github.com/commercialhaskell/stack/issues/381) * Give a reason for unregistering packages [#389](https://github.com/commercialhaskell/stack/issues/389) * `stack exec` accepts the `--no-ghc-package-path` parameter * Don't require build plan to upload [#400](https://github.com/commercialhaskell/stack/issues/400) * Specifying test components only builds/runs those tests [#398](https://github.com/commercialhaskell/stack/issues/398) * `STACK_EXE` environment variable * Add the `stack dot` command * `stack upgrade` added [#237](https://github.com/commercialhaskell/stack/issues/237) * `--stack-yaml` command line flag [#378](https://github.com/commercialhaskell/stack/issues/378) * `--skip-ghc-check` command line flag [#423](https://github.com/commercialhaskell/stack/issues/423) Bug fixes: * Haddock links to global packages no longer broken on Windows [#375](https://github.com/commercialhaskell/stack/issues/375) * Make flags case-insensitive [#397](https://github.com/commercialhaskell/stack/issues/397) * Mark packages uninstalled before rebuilding [#365](https://github.com/commercialhaskell/stack/issues/365) ## 0.1.0.0 - 2015-06-23 * Fall back to Cabal dependency solver when a snapshot can't be found * Basic implementation of `stack new` [#137](https://github.com/commercialhaskell/stack/issues/137) * `stack solver` command [#364](https://github.com/commercialhaskell/stack/issues/364) * `stack path` command [#95](https://github.com/commercialhaskell/stack/issues/95) * Haddocks [#143](https://github.com/commercialhaskell/stack/issues/143): * Build for dependencies * Use relative links * Generate module contents and index for all packages in project ## 0.0.3 - 2015-06-17 * `--prefetch` [#297](https://github.com/commercialhaskell/stack/issues/297) * `upload` command ported from stackage-upload [#225](https://github.com/commercialhaskell/stack/issues/225) * `--only-snapshot` [#310](https://github.com/commercialhaskell/stack/issues/310) * `--resolver` [#224](https://github.com/commercialhaskell/stack/issues/224) * `stack init` [#253](https://github.com/commercialhaskell/stack/issues/253) * `--extra-include-dirs` and `--extra-lib-dirs` [#333](https://github.com/commercialhaskell/stack/issues/333) * Specify intra-package target [#201](https://github.com/commercialhaskell/stack/issues/201) ## 0.0.2 - 2015-06-14 * Fix some Windows specific bugs [#216](https://github.com/commercialhaskell/stack/issues/216) * Improve output for package index updates [#227](https://github.com/commercialhaskell/stack/issues/227) * Automatically update indices as necessary [#227](https://github.com/commercialhaskell/stack/issues/227) * --verbose flag [#217](https://github.com/commercialhaskell/stack/issues/217) * Remove packages (HTTPS and Git) [#199](https://github.com/commercialhaskell/stack/issues/199) * Config values for system-ghc and install-ghc * Merge `stack deps` functionality into `stack build` * `install` command [#153](https://github.com/commercialhaskell/stack/issues/153) and [#272](https://github.com/commercialhaskell/stack/issues/272) * overriding architecture value (useful to force 64-bit GHC on Windows, for example) * Overhauled test running (allows cycles, avoids unnecessary recompilation, etc) ## 0.0.1 - 2015-06-09 * First public release, beta quality stack-2.15.7/doc/CI.md0000644000000000000000000000106514353310533012465 0ustar0000000000000000
# Continuous integration (CI) ## GitHub Actions The Stack repository uses GitHub Actions for its own CI. For further information, see the guide to [contributing](CONTRIBUTING.md#continuous-integration-ci). ## Azure For further information, see the [Azure CI](azure_ci.md) documentation. ## Travis For further information, see the [Travis CI](travis_ci.md) documentation. stack-2.15.7/doc/clean_command.md0000644000000000000000000000103014353310533014742 0ustar0000000000000000
# The `stack clean` command Either ~~~text stack clean [PACKAGE] ~~~ or ~~~text stack clean --full ~~~ `stack clean` deletes build artefacts for one or more project packages specified as arguments. If no project packages are specified, all project packages are cleaned. `stack clean --full` deletes the project's Stack working directory. stack-2.15.7/doc/config_command.md0000644000000000000000000001177214620153445015147 0ustar0000000000000000
# The `stack config` commands ~~~text stack config COMMAND Available commands: env Print environment variables for use in a shell set Sets a key in YAML configuration file to value ~~~ The `stack config` commands provide assistance with accessing or modifying Stack's configuration. See `stack config` for the available commands. ## The `stack config env` command ~~~text stack config env [--[no-]locals] [--[no-]ghc-package-path] [--[no-]stack-exe] [--[no-]locale-utf8] [--[no-]keep-ghc-rts] ~~~ `stack config env` outputs a script that sets or unsets environment variables for a Stack environment. Flags modify the script that is output: * `--[no-]locals` (enabled by default) include/exclude project package information * `--[no-]ghc-package-path` (enabled by default) set `GHC_PACKAGE_PATH` environment variable or not * `--[no-]stack-exe` (enabled by default) set `STACK_EXE` environment variable or not * `--[no-]locale-utf8` (disabled by default) set the `GHC_CHARENC` environment variable to `UTF-8` or not * `--[no-]keep-ghc-rts` (disabled by default) keep/discard any `GHCRTS` environment variable The command also accepts flags and options of the [`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that affect the location of the local project installation directory, such as `--profile` and `--no-strip`. For further information, see the documentation of the [project Stack work directory](stack_work.md#project-stack-work-directory). ## The `stack config set` commands ~~~text stack config set COMMAND Available commands: install-ghc Configure whether Stack should automatically install GHC when necessary. package-index Configure Stack's package index resolver Change the resolver key of the current project. snapshot Change the snapshot of the current project. system-ghc Configure whether Stack should use a system GHC installation or not. ~~~ The `stack config set` commands allow the values of keys in YAML configuration files to be set. See `stack config set` for the available keys. !!! note The `config set` commands support an existing key only in the form `key: value` on a single line. ## The `stack config set install-ghc` command ~~~text stack config set install-ghc [--global] true|false ~~~ `stack config set install-ghc true` or `false` sets the `install-ghc` key in a YAML configuration file, accordingly. By default, the project-level configuration file (`stack.yaml`, by default) is altered. The `--global` flag specifies the user-specific global configuration file (`config.yaml`). ## The `stack config set package-index download-prefix` command [:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) ~~~text stack config set package-index download-prefix [--global] [URL] ~~~ `stack config set package-index download-prefix ` sets the `download-prefix` key of the `package-index` key in a YAML configuration file, accordingly. By default, the project-level configuration file (`stack.yaml`, by default) is altered. The `--global` flag specifies the user-specific global configuration file (`config.yaml`). ## The `stack config set resolver` command ~~~text stack config set resolver SNAPSHOT ~~~ `stack config set resolver ` sets the `resolver` key in the project-level configuration file (`stack.yaml`, by default). A snapshot of `lts` or `nightly` will be translated into the most recent available. A snapshot of `lts-22` will be translated into the most recent available in the `lts-22` sequence. Known bug: * The command does not respect the presence of a `snapshot` key. ## The `stack config set snapshot` command [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) ~~~text stack config set snapshot SNAPSHOT ~~~ `stack config set snapshot ` sets the `snapshot` key in the project-level configuration file (`stack.yaml`, by default). A snapshot of `lts` or `nightly` will be translated into the most recent available. A snapshot of `lts-22` will be translated into the most recent available in the `lts-22` sequence. Known bug: * The command does not respect the presence of a `resolver` key. ## The `stack config set system-ghc` command ~~~text stack config set system-ghc [--global] true|false ~~~ `stack config set system-ghc true` or `false` sets the `system-ghc` key in a YAML configuration file, accordingly. By default, the project-level configuration file (`stack.yaml`, by default) is altered. The `--global` flag specifies the user-specific global configuration file (`config.yaml`). stack-2.15.7/doc/CONTRIBUTING.md0000644000000000000000000007724214620153473014120 0ustar0000000000000000# Contributors Guide Thank you for considering contributing to the maintenance or development of Stack, or otherwise supporting users of Stack! We hope that the following information will encourage and assist you. We start with some advice about Stack's goals and governance, and approach to supporting users. ## Stack's goals Stack's current goals are: * To provide easy to use tooling for Haskell development * To provide complete support for at least the following three development environments: Linux, macOS, and Windows * To address the needs of industrial users, open source maintainers, and other people * To focus on the 'curated package set' use case * To prioritize reproducible build plans The goals above are not set in stone. However, any major changes to them should involve significant public discussion and a public vote by the Stack maintainer team. ## Stack's governance People involved in maintaining or developing Stack with rights to make commits to the repository can be classified into two groups: 'committers' and 'maintainers'. ### Stack's committers We encourages a wide range of people to be granted rights to make commits to the repository. People are encouraged to take initiative to make non-controversial changes, such as documentation improvements, bug fixes, performance improvements, and feature enhancements. Maintainers should be included in discussions of controversial changes and tricky code changes. Our general approach is **"it's easier to ask forgiveness than permission"**. If there is ever a bad change, it can always be rolled back. ### Stack's maintainers Stack's maintainers are long-term contributors to the project. Michael Snoyman (@snoyberg) was the founder of Stack, and its initial maintainer - and he has added others. Michael's current interests and priorities mean that he is no longer actively involved in adding new features to Stack. Maintainers are recognized for their contributions including: * Direct code contribution * Review of pull requests * Interactions on the GitHub issue tracker * Documentation management * External support - for example, hosting or training The maintainer team make certain decisions when that is necessary, specifically: * How to proceed, if there is disagreement on how to do so on a specific topic * Whether to add or remove (see further below) a maintainer Generally, maintainers are only removed due to non-participation or actions unhealthy to the project. Removal due to non-participation is not a punishment, simply a recognition that maintainership is for active participants only. We hope that removal due to unhealthy actions will never be necessary, but would include protection for cases of: * Disruptive behavior in public channels related to Stack * Impairing the codebase through bad commits/merges Like committers, maintainers are broadly encouraged to make autonomous decisions. Each maintainer is empowered to make a unilateral decision. However, maintainers should favor getting consensus first if: * They are uncertain what is the best course of action * They anticipate that other maintainers or users of Stack will disagree on the decision ## Stack's support A large part of the general discussion around Stack is on support-related topics, and that is reflected in the current issue tracker content. Assistance in responding to such matters is greatly appreciated. While support-related matters can be posted here as an 'issue', we encourage the use of other forums, in particular [Haskell's Discourse](https://discourse.haskell.org/). We also recommend Haskell's Discourse for general discussions about Stack's current or desired features. Stack is also discussed on Reddit's [Haskell community](https://www.reddit.com/r/haskell/). We encourage use of those other forums because support-related discussions can clog up the issue tracker and make it more difficult to maintain the project. People needing support may also get a faster and fuller response on other forums. Additions to the issue tracker are better suited to concrete feature proposals, bug reports, and other code base discussions (for example, refactorings). ## Bug Reports Please [open an issue](https://github.com/commercialhaskell/stack/issues/new) and use the provided template to include all necessary details. The more detailed your report, the faster it can be resolved and will ensure it is resolved in the right way. Once your bug has been resolved, the responsible person will tag the issue as _Needs confirmation_ and assign the issue back to you. Once you have tested and confirmed that the issue is resolved, close the issue. If you are not a member of the project, you will be asked for confirmation and we will close it. ## Documentation Consistent with its goal of being easy to use, Stack aims to maintain a high quality of in-tool and online documentation. The in-tool documentation includes the output when the `--help` flag is specified and the content of Stack's warning and error messages. When drafting documentation it is helpful to have in mind the intended reader and what they are assumed to know, and not know, already. In that regard, documentation should aim to meet, at least, the needs of a person who is about to begin to study computing as an undergraduate but who has not previously coded using Haskell. That person may be familiar with one popular operating system but may not be familiar with others. The files which make up Stack's online documentation are located in the `doc` directory of the repository. They are formatted in the [Markdown syntax](https://daringfireball.net/projects/markdown/), with some extensions. Those files are rendered on [haskellstack.org](http://haskellstack.org) by [Read the Docs](https://readthedocs.org/) using [MkDocs](https://www.mkdocs.org/) and the [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) theme. The `stable` branch of the repository provides the 'stable' version of the online documentation. The `master` branch provides the 'latest' version of the documentation. The 'stable' version of the online documentation is intended to be applicable to the latest released version of Stack. If you would like to help with that documentation, please submit a [pull request](https://help.github.com/articles/using-pull-requests/) with your changes/additions based off the [stable branch](https://github.com/commercialhaskell/stack/tree/stable). The Markdown files are organised into the navigation menu (the table of contents) in the file `mkdocs.yml`, the configuration file for MkDocs. The description of a file in the menu can differ from the file's name. The navigation menu allows files to be organised in a hierarchy. Currently, up to three levels are used. The top level is: * **Welcome!:** The introduction to Stack. This page aims to be no longer than necessary but also to not assume much existing knowledge on the part of the reader. It provides a 'quick start' guide to getting and using Stack. * **How to get & use Stack:** This includes Stack's user's guide, answers to frequently asked questions, and more thorough explanations of aspects of Stack. The user's guide is divided into two parts. The first part is 'introductory', and has the style of a tutorial. The second part is 'advanced', and has more of a reference style. * **How Stack works (advanced):** Many users will not need to consult this advanced documentation. * **Stack's code (advanced):** Other information useful to people contributing to, or maintaining, Stack's code, documentation, and other files. * **Signing key:** How Stack's released executables are signed. * **Glossary:** A glossary of terms used throughout Stack's in-tool and online documentation. We aim to describe the same things in the same way in different places. * **Version history:** The log of changes to Stack between versions. The specific versions of the online documentation (eg `v: v2.9.1`) are generated from the content of files at the point in the repository's history specified by the corresponding release tag. Consequently, that content is fixed once released. If the names of Markdown files do not change between versions, then people can use the flyout on the online documentation to move between different versions of the same page. For that reason, the names of new Markdown files should be chosen with care and existing Markdown files should not be deleted or renamed without due consideration of the consequences. The Markdown syntax supported by MkDocs and the Material for MkDocs theme can differ from the GitHub Flavored Markdown ([GFM](https://github.github.com/gfm/)) supported for content on GitHub.com. Please refer to the [MkDocs documentation](https://www.mkdocs.org/user-guide/writing-your-docs/#writing-with-markdown) and the [Material for MkDocs reference](https://squidfunk.github.io/mkdocs-material/reference/) to ensure your pull request will achieve the desired rendering. The extensions to the basic Markdown syntax used are set out in `mkdocs.yml` and include: * admonitions * code blocks, with syntax highlighting provided by [Pygments](https://pygments.org/) * content tabs, which can be nested * icons and emojis The files in the `doc` directory of the repository include two symbolic links (symlinks), `ChangeLog.md` and `CONTRIBUTING.md`. Users of Git on Windows should be aware of its approach to symbolic links. See the [Git for Windows Wiki](https://github.com/git-for-windows/git/wiki/Symbolic-Links). If `git config --show-scope --show-origin core.symlinks` is `false` in a local repository on Windows, then the files will be checked out as small plain files that contain the link text See the [Git documentation](https://git-scm.com/docs/git-config#Documentation/git-config.txt-coresymlinks). ## Error messages Stack catches exceptions thrown by its dependencies or by Stack itself in `Stack.main`. In addition to exceptions that halt Stack's execution, Stack logs certain other matters as 'errors'. To support the Haskell Foundation's [Haskell Error Index](https://errors.haskell.org/) initiative, all Stack error messages generated by Stack itself should have a unique initial line: ~~~text Error: [S-nnnn] ~~~ where `nnnn` is a four-digit number in the range 1000 to 9999. If you create a new Stack error, select a number using a random number generator (see, for example, [RANDOM.ORG](https://www.random.org/)) and check that number is not already in use in Stack's code. If it is, pick another until the number is unique. All exceptions generated by Stack itself are implemented using data constructors of closed sum types. Typically, there is one such type for each module that exports functions that throw exceptions. This type and the related `instance` definitions are usually located at the top of the relevant module. Stack supports two types of exceptions: 'pretty' exceptions that are instances of class `RIO.PrettyPrint.Pretty`, which provides `pretty :: e -> StyleDoc`, and thrown as expressions of type `RIO.PrettyPrint.PrettyException.PrettyException`; and other 'plain' exceptions that are simply instances of class `Control.Exception.Exception` and, hence, instances of class `Show`. These types and classes are re-exported by `Stack.Prelude`. Stack throws exceptions in parts of the code that should, in principle, be unreachable. The functions `Stack.Prelude.bugReport` and `Stack.Prelude.bugPrettyReport` are used to give the messages a consistent format. The names of the data constructors for those exceptions usually end in `Bug`. In a few cases, Stack may throw an exception in 'pure' code. The function `RIO.impureThrow :: Exception e => e -> a`, re-exported by `Stack.Prelude`, is used for that purpose. ## Code If you would like to contribute code to fix a bug, add a new feature, or otherwise improve `stack`, pull requests are most welcome. It's a good idea to [submit an issue](https://github.com/commercialhaskell/stack/issues/new) to discuss the change before plowing into writing code. If you'd like to help out but aren't sure what to work on, look for issues with the [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) label. Issues that are suitable for newcomers to the codebase have the [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22+label%3a%22newcomer+friendly%22) label. Best to post a comment to the issue before you start work, in case anyone has already started. Please include a [ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md) entry and [documentation](https://github.com/commercialhaskell/stack/tree/master/doc/) updates with your pull request. ## Backwards Compatability The Stack package provides a library and an executable (`stack`) that depends on the library. The library is intended for use only by the executable. Consequently, the Stack package does not need to, and does not, strive for the compatibility with a range of versions of GHC that a library package (such as `pantry`) would seek. Stack aims to depend on well-known packages. The specific versions on which it depends at any time are specified by `package.yaml` and `stack.yaml`. It does not aim to be compatible with more than one version of the `Cabal` package at any time. At the time of writing (May 2024) the package versions are primarily ones in Stackage snapshot LTS Haskell 22.21 (for GHC 9.6.5). A Stack executable makes use of Cabal (the library) through a small 'Setup' executable that it compiles from Haskell source code. The executable compiles that code with a dependency on the version of Cabal that ships with the specified GHC compiler. Each release of Stack will normally aim to support all versions of GHC and the Cabal package in Stackage snapshots published within seven years of the release. For example, snapshot LTS Haskell 10.0, published on 19 December 2017, was the first LTS Haskell snapshot to provide GHC 8.2.2 which comes with `base-4.10.1.0` and `Cabal-2.0.1.1`. Normally, until, at least, 19 December 2024, Stack releases would aim to support the immediate predecessor, GHC 8.0.2 and `base-4.9.1.0`, `Cabal-1.24.2.0` and Haddock 2.17.4. However, the next version of Stack will drop support for versions of Cabal before 2.2. `Cabal-2.2.0.0` was released with GHC 8.4.1 on 8 March 2018. When a version of the Stack executable actually ceases to support a version of GHC and `Cabal`, that should be recorded in Stack's [ChangeLog](https://github.com/commercialhaskell/stack/blob/master/ChangeLog.md). ## Code Quality The Stack project uses [yamllint](https://github.com/adrienverge/yamllint) as a YAML file quality tool and [HLint](https://github.com/ndmitchell/hlint) as a code quality tool. ### Linting of YAML files The yamllint configuration extends the tools default and is set out in `.yamllint.yaml`. In particular, indentation is set at 2 spaces and `- ` in sequences is treated as part of the indentation. ### Linting of Haskell source code The HLint configurations is set out in `.hlint.yaml`. Stack contributors need not follow dogmatically the suggested HLint hints but are encouraged to debate their usefulness. If you find a HLint hint is not useful and detracts from readability of code, consider marking it in the [configuration file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) to be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#readme) for configuration syntax. Quoting [@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan): > We are optimizing for code clarity, not code concision or what HLint thinks. You can install HLint with Stack. You might want to install it in the global project in case you run into dependency conflicts. HLint can report hints in your favourite text editor. Refer to the HLint repository for more details. To install, command: ~~~text stack install hlint ~~~ Once installed, you can check your changes with command: ~~~text stack exec -- sh ./etc/scripts/hlint.sh ~~~ ## Code syntax Stack makes use of GHC's `GHC2021` collection of language extensions, which is set using the `language` key in the `package.yaml` file. Stack makes use of single-constructor types where the constructor has a large number of fields. Some of those fields have similar types, and so on. Given that, Stack makes use of `OverloadedRecordDot`, introduced in GHC 9.2.1. It also makes use of `NoFieldSelectors`, also introduced in GHC 9.2.1, and, where necessary, `DuplicateRecordFields`. Together, these language extensions enable the removal from the names of fields of the prefixes that were used historically to indicate the type and make field names unique. This is because the names of fields no longer need to be unique in situations where the intended field is unambiguous. This allows for a terser syntax without loss of expressiveness. For example: ~~~haskell let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco ~~~ can become: ~~~haskell let cliTargets = bco.buildOptsCLI.targets ~~~ The intended field is unambiguous in almost all cases. In the case of a few record updates it is ambiguous. The name of the field needs to be qualified in those cases. For example: ~~~haskell import qualified Stack.Types.Build as ConfigCache ( ConfigCache (..) ) ... let ignoreComponents :: ConfigCache -> ConfigCache ignoreComponents cc = cc { ConfigCache.components = Set.empty } ~~~ ## Code Style A single code style is not applied consistently to Stack's code and Stack is not Procrustean about matters of style. Rules of thumb, however, are: * keep pull requests that simply reformat code separate from those that make other changes to code; and * when making changes to code other than reformatting, follow the existing style of the function(s) or module(s) in question. That said, the following may help: * Stack's code generally avoids the use of C preprocessor (CPP) directives. Windows and non-Windows code is separated in separate source code directories and distinguished in Stack's Cabal file. `Stack.Constants.osIsWindows :: Bool` is provided. Multi-line strings are generally formatted on the assumption that GHC's `CPP` language pragma is not being used. * Language pragmas usually start with `NoImplictPrelude`, where applicable, and then all others are listed alphabetically. The closing `#-}` are aligned, for purely aesthetic reasons. * Stack is compiled with GHC's `-Wall` enabled, which includes `-Wtabs` (no tabs in source code). Most modules are based on two spaces (with one space for a `where`) for indentation but older and larger modules are still based on four spaces. * Stack's code and documentation tends to be based on lines of no more than 80 characters or, if longer, no longer than necessary. * Stack uses export lists. * Stack's imports are listed alphabetically, including `Stack.Prelude`, where applicable. The module names are left aligned, with space left for `qualified` where it is absent. * Stack's code is sufficiently stable that explict import lists can sensibly be used. The exception is the import of `Stack.Prelude`. Not all modules have comprehensive explicit import lists. * Short explicit import lists follow the module name. Longer lists start on the line below the module name. Spaces are used to separate listed items from their enclosing parentheses. * As noted above, the types used to implement Stack's exceptions and the related `instance` definitions are usually located at the top of the relevant module. * In function type signatures, the `::` is kept on the same line as the function's name. This format is Haskell syntax highlighter-friendly. * If `where` is used, the declarations follow on a separate line. ## Testing The Stack code has both unit tests and integration tests. ### Working with Unit Tests Unit tests can be found in the [tests/unit](https://github.com/commercialhaskell/stack/tree/master/tests/unit) directory. Tests are written using the [Hspec](https://hspec.github.io/) framework. In order to run the full test suite, you can simply command: ~~~text stack test ~~~ The `--file-watch` is a very useful option to get quick feedback. However, running the entire test suite after each file change will slow you down. You'll need to specify which test suite (unit test or integration) and pass arguments to specify which module you'd specifically like to run to get quick feedback. A description of this follows below. If you would like to run the unit tests on their own, you can command: ~~~text stack test stack:stack-unit-test ~~~ Running an individual module works with a command like this: ~~~text stack test stack:stack-unit-test --ta "-m " ~~~ Where `` is the name of the module without `Spec.hs`. You may also load tests into GHCi and run them with these command: ~~~text stack ghci stack:stack-unit-test --only-main # GHCi starting up output ... > :main -m "" ~~~ Where again, `` is the name of the module without `Spec.hs`. ### Working with Integration Tests Integration tests can be found in the [tests/integration](https://github.com/commercialhaskell/stack/tree/master/tests/integration) folder. Running the integration tests is a little involved, you'll need to command: ~~~text stack build --flag stack:integration-tests stack --exec stack-integration-test ~~~ Running an individual module works with a command like this: ~~~text stack build --flag stack:integration-tests stack --exec "stack-integration-test -m " ~~~ Where `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) directory. You may also achieve this through GHCi with this command: ~~~text stack ghci stack:stack-integration-test # GHCi starting up output ... > :main -m "" ~~~ Where again, `` is the name of the folder listed in the [test/integration/tests/](https://github.com/commercialhaskell/stack/tree/master/test/integration/tests) directory. You can disable a few integration tests through the -n option : ~~~text stack build --flag stack:integration-tests stack --exec "stack-integration-test -n -n " ~~~ To disable folders named after `` and `` It's especially useful when some tests are taking a while to complete. ## Continuous integration (CI) We use [GitHub Actions](https://docs.github.com/en/actions) to do CI on Stack. The configuration of the workflows is in the YAML files in `.github/workflows`. The current active workflows are: ### Linting - `lint.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**` The workflow has one job (`style`). It runs on `ubuntu` only and applies yamllint and Hlint. ### Test suite - `unit-tests.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**`. * requested The workflow has two jobs: `pedantic` and `unit-tests`. The `pedantic` job runs on `ubuntu` only and builds Stack with the `--pedantic` flag. The `unit-tests` job runs on a matrix of operating systems and Stack project-level YAML configuration files (`stack.yaml`, by default). It builds and tests Stack with the following flags: `--haddock --no-haddock-deps`. Its approach to creating a cache depends on the operating system. Its 'Cache dependencies on Unix-like OS' step caches the Stack root on Unix-like operating systems. Its 'Cache dependencies on Windows' step caches the same information on Windows, but takes into account that a relevant directory is located outside of the Stack root. ### Integration-based - `integration-tests.yml` This workflow will run if: * there is a pull request * commits are pushed to these branches: `master`, `stable` and `rc/**` * any tag is created * requested The workflow has three jobs: `integration-tests`, `linux-arm64` and `github-release`. The `integration-tests` job runs on a matrix of operating systems (`ubuntu`, `windows` and `macos`) and makes use of the `release.hs` script at `etc/scripts`. Its approach to creating a cache is the same as for `unit-tests.yml`, described above. Its 'Install deps and run checks' step uses `release.hs check`. Its 'Build bindist' step uses `release.hs build`. Its 'Upload bindist' step uploads artifacts using the name of the runner's operating system (`Linux`, `Windows` or `macOS`) as the name for the artifacts. The `linux-arm64` job runs on a self-hosted runner for Linux and ARM64. It makes use of Docker and a Docker file at `etc/dockerfiles/arm64.Dockerfile`. Its 'Build bindist' step makes use of a compiled version of `release.hs` script at `etc/scripts` to command `release build`. Its 'Upload bindist' step uploads artifacts using `Linux-ARM64` as the name for the artifacts. The `github-release` job needs `integration-tests` and `linux-arm64`. It only takes effect if the trigger for the workflow was the creation of a tag. Its four steps `Download Linux/Windows/macOS/Linux-ARM64 artifact` download the named artifacts to path `_release`. Its step 'Hash and sign assets' makes use of a 'secret' environment variable `RELEASE_SIGNING_KEY` established by the owner of the Stack repository. The variable contains the private key for the GPG key with ID 0x575159689BEFB442. That key is imported into GPG and then used by GPG to create a detached signature for each file. ### Stan tool - `stan.yml` [Stan](https://hackage.haskell.org/package/stan) is a Haskell static analysis tool. As of `stan-0.1.0.1`, it supports GHC >= 9.6.3 and Stack is built with GHC 9.6.5. The tool is configured by the contents of the `.stan.toml` file. This workflow will run if: * there is a pull request * requested ## Haskell Language Server You may be using [Visual Studio Code](https://code.visualstudio.com/) (VS Code) with its [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell), which is powered by the [Haskell Language Server](https://github.com/haskell/haskell-language-server) (HLS). Stack can be built with Stack (which is recommended) or with Cabal (the tool). === "Stack" If you use Stack to build Stack, command `stack ghci` in the root directory of the Stack project should work as expected, if you have first commanded `stack build` once. `stack build` causes Cabal (the library) to create the automatically generated module `Stack_build`. If `ghc` is not on your PATH, then Haskell Language Server may report the following error about `Stack.Constants.ghcShowOptionsOutput`: ~~~text • Exception when trying to run compile-time code: ghc: readCreateProcess: does not exist (No such file or directory) Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) • In the untyped splice: $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) ~~~ `ghc` should be on the PATH if you run VS Code itself in the Stack environment: ~~~text stack exec -- code . ~~~ The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) should suffice to configure Haskell Language Server (HLS) explicitly for `./Setup.hs` and each of the buildable components in Stack's Cabal file: ~~~yaml cradle: multi: - path: "./Setup.hs" config: cradle: direct: arguments: [] - path: "./" config: cradle: stack: - path: "./src" component: "stack:lib" - path: "./app" component: "stack:exe:stack" - path: "./tests/integration" component: "stack:exe:stack-integration-test" - path: "./tests/unit" component: "stack:test:stack-unit-test" ~~~ === "Cabal (the tool)" If you use Cabal (the tool) to build Stack, command `cabal repl` in the root directory of the Stack project should work as expected, if you have GHC and (on Windows) MSYS2 on the PATH. Stack's custom `./Setup.hs` causes `cabal repl` to cause Cabal (the library) to create the automatically generated module `Stack_build`. If `ghc` is not on your PATH, then Haskell Language Server may report the following error about `Stack.Constants.ghcShowOptionsOutput`: ~~~text • Exception when trying to run compile-time code: ghc: readCreateProcess: does not exist (No such file or directory) Code: (TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) • In the untyped splice: $(TH.runIO (readProcess "ghc" ["--show-options"] "") >>= TH.lift . lines) ~~~ `ghc` and (on Windows) MSYS2 should be on the PATH if you run commands (including `cabal`) in the Stack environment: ~~~text stack exec --no-ghc-package-path -- cabal repl ~~~ or ~~~text stack exec --no-ghc-package-path -- code . ~~~ Use of GHC's environment variable `GHC_PACKAGE_PATH` is not compatible with Cabal (the tool). That is why the `--no-ghc-package-path` flag must be specified with `stack exec` when relying on Cabal (the tool). The following [cradle (`hie.yaml`)](https://github.com/haskell/hie-bios) should suffice to configure Haskell Language Server (HLS) explicitly for `./Setup.hs` and each of the buildable components in Stack's Cabal file: ~~~yaml cradle: multi: - path: "./Setup.hs" config: cradle: direct: arguments: [] - path: "./" config: cradle: cabal: - path: "./src" component: "lib:stack" - path: "./app" component: "exe:stack" - path: "./tests/integration" component: "exe:stack-integration-test" - path: "./tests/unit" component: "test:stack-unit-test" ~~~ A cradle is not committed to Stack's repository because it imposes a choice of build tool. ## Dev Containers A [Development Container](https://containers.dev) (or Dev Container for short) allows you to use a container as a full‑featured development environment. You can run Dev Containers locally/remotely (with VS Code), or create a [Codespace](https://github.com/features/codespaces) for a branch in a repository to develop online. Stack's default Dev Container is intended for use with its default project‑level configuration (`stack.yaml`). But there are also Dev Containers for the experimental project‑level configurations. For further information, see the documentation for [Dev Containers](dev_containers.md). ## Slack channel If you're making deep changes and real-time communication with the Stack team would be helpful, we have a `#stack-collaborators` Slack channel in the Haskell Foundation workspace. To join the workspace, follow this [link](https://haskell-foundation.slack.com/join/shared_invite/zt-z45o9x38-8L55P27r12YO0YeEufcO2w#/shared-invite/email). ## Matrix room There is also a [Haskell Stack room](https://matrix.to/#/#haskell-stack:matrix.org) at address `#haskell-stack:matrix.org` on [Matrix](https://matrix.org/). stack-2.15.7/doc/custom_snapshot.md0000644000000000000000000001014114620153473015423 0ustar0000000000000000
## Snapshot specification [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Snapshots provide a list of packages to use, along with flags, GHC options, and a few other settings. Snapshots may extend any other snapshot that can be specified in a [`snapshot`](yaml_configuration.md#snapshot) or [`resolver`](yaml_configuration.md#resolver) key. The packages specified follow the same syntax for dependencies in Stack's project-level configuration files. Unlike the `extra-deps` key, however, no support for local directories is available in snapshots to ensure reproducibility. !!! info Stack uses the [Pantry](https://hackage.haskell.org/package/pantry) library for snapshot specification. ~~~yaml snapshot: lts-22.21 # Inherits GHC version and package set compiler: ghc-9.6.4 # Overwrites GHC version in the snapshot, optional # Additional packages, follows extra-deps syntax packages: - unordered-containers-0.2.7.1 - hashable-1.2.4.0 - text-1.2.2.1 # Packages from the parent snapshot to ignore drop-packages: - wai-extra # Packages which should be hidden hidden: wai: true warp: false # Set GHC options for specific packages ghc-options: warp: - -O2 # Override flags, can also override flags in the parent snapshot flags: unordered-containers: debug: true ~~~ If you put this in a `snapshot.yaml` file in the same directory as your project, you can now use the snapshot like this: ~~~yaml snapshot: snapshot.yaml ~~~ This is an example of a custom snapshot stored in the filesystem. They are assumed to be mutable, so you are free to modify it. We detect that the snapshot has changed by hashing the contents of the involved files, and using it to identify the snapshot internally. It is often reasonably efficient to modify a custom snapshot, due to Stack sharing snapshot packages whenever possible. ### Overriding the compiler The following snapshot specification will be identical to `lts-22.21`, but instead use `ghc-9.6.4` instead of `ghc-9.6.5`: ~~~yaml snapshot: lts-22.21 compiler: ghc-9.6.4 ~~~ ### Dropping packages The following snapshot specification will be identical to `lts-22.21`, but without the `text` package in our snapshot. Removing this package will cause all the packages that depend on `text` to be unbuildable, but they will still be present in the snapshot. ~~~yaml snapshot: lts-22.21 drop-packages: - text ~~~ ### Hiding packages The following snapshot specification will be identical to `lts-22.21`, but the `text` package will be hidden when registering. This will affect, for example, the import parser in the script command. ~~~yaml snapshot: lts-22.21 hidden: - text ~~~ ### Specifying GHC options In order to specify GHC options for a package, you use the same syntax as the [ghc-options](yaml_configuration.md#ghc-options) key for build configuration. The following snapshot specification will be identical to `lts-22.21`, but provides `-O1` as a ghc-option for `text`: ~~~yaml snapshot: lts-22.21 packages: - text-2.0.2 ghc-options: text: -O1 ~~~ This works somewhat differently than the stack.yaml `ghc-options` field, in that options can only be specified for packages that are mentioned in the custom snapshot's `packages` list. It sets the ghc-options, rather than extending those specified in the snapshot being extended. Another difference is that the `*` entry for `ghc-options` applies to all packages in the `packages` list, rather than all packages in the snapshot. ### Specifying Cabal flags In order to specify Cabal flags for a package, you use the same syntax as the [flags](yaml_configuration.md#flags) key for build configuration. The following snapshot specification will be identical to `lts-22.21`, but it enables the `developer` Cabal flag: ~~~yaml snapshot: lts-22.21 packages: - text-2.0.2 flags: text: developer: true ~~~ stack-2.15.7/doc/debugging.md0000644000000000000000000001037414604306200014123 0ustar0000000000000000
# Debugging To profile a component of the current project, pass the [`--profile` flag](build_command.md#-profile-flag) to `stack build`. The flag: * for project packages, turns on the Cabal flag [`--enable-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-profiling); * turns on the Cabal flag [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and * passes GHC's [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) to any test suites and benchmarks. For example the following command will build the `my-tests` testsuite with profiling options and create a `my-tests.prof` file in the current directory as a result of the test run. ~~~text stack test --profile my-tests ~~~ The `my-tests.prof` file now contains time and allocation info for the test run. To create a profiling report for an executable, e.g. `my-exe`, you can command: ~~~text stack exec --profile -- my-exe +RTS -p ~~~ For more fine-grained control of compilation options there are the [`--library-profiling` flag](build_command.md#-no-library-profiling-flag) and [`--executable-profiling` flag](build_command.md#-no-executable-profiling-flag). The `--library-profiling` flag: * turns on the Cabal flag [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and * passes GHC's [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) to any test suites and benchmarks. The `--executable-profiling` flag: * for project packages, turns on the Cabal flag [`--enable-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-profiling); * turns on the Cabal flag [`--enable-library-profiling`](https://cabal.readthedocs.io/en/stable/setup-commands.html#cmdoption-runhaskell-Setup.hs-configure-enable-library-profiling); and * passes GHC's [`+RTS -p` runtime options](https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#rts-flag--p) to any test suites and benchmarks. To enable compilation with profiling options by default you can add the following to a project-level or global YAML configuration file: ~~~yaml build: library-profiling: true executable-profiling: true ~~~ ## Further reading For more commands and uses, see the [official GHC chapter on profiling](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html), the [Haskell wiki](https://wiki.haskell.org/How_to_profile_a_Haskell_program), and the [chapter on profiling in Real World Haskell](http://book.realworldhaskell.org/read/profiling-and-optimization.html). ## Tracing To generate a backtrace in case of exceptions during a test or benchmarks run, use the `--trace` flag. Like `--profile` this compiles with profiling options, but adds the `+RTS -xc` runtime option. ## Debugging symbols Building with debugging symbols in the [DWARF information](https://ghc.haskell.org/trac/ghc/wiki/DWARF) is supported by Stack. This can be done by passing the flag `--ghc-options="-g"` and also to override the default behaviour of stripping executables of debugging symbols by passing either one of the following flags: `--no-strip`, `--no-library-stripping` or `--no-executable-stripping`. In Windows, GDB can be installed to debug an executable with `stack exec -- pacman -S gdb`. Windows' Visual Studio compiler's debugging format PDB is not supported at the moment. This might be possible by [separating](https://stackoverflow.com/questions/866721/how-to-generate-gcc-debug-symbol-outside-the-build-target) debugging symbols and [converting](https://github.com/rainers/cv2pdb) their format. Or as an option when [using the LLVM backend](http://blog.llvm.org/2017/08/llvm-on-windows-now-supports-pdb-debug.html). stack-2.15.7/doc/dev_containers.md0000644000000000000000000001502214604306200015166 0ustar0000000000000000
# Dev Containers A *container* refers to an isolated area of memory where application software and some drivers execute. A [Development Container](https://containers.dev) (or Dev Container for short) allows a container to be used as a full‑featured development environment. Stack provides the following Dev Containers: * a default Dev Container, intended for use with Stack's default project‑level configuration file (`stack.yaml`); and * alternative Dev Containers, intended for use with Stack's experimental project‑level configurations (in anticipation of building Stack with more recent versions of GHC). Stack's Dev Containers provide the following tools: 1. The [Haskell Toolchain](https://www.haskell.org/ghcup/install/#supported-tools) ([GHC](https://www.haskell.org/ghc), Stack, [Cabal (the tool)](https://cabal.readthedocs.io) and [HLS](https://haskell-language-server.readthedocs.io)) 2. [Git](https://git-scm.com) 3. [HLint](https://hackage.haskell.org/package/hlint) 4. [yamllint](https://yamllint.readthedocs.io) 5. [ShellCheck](https://www.shellcheck.net) 6. [hadolint](https://github.com/hadolint/hadolint) The tools in the Haskell Toolchain are installed at `/usr/local/bin`. HLS is provided in the default Dev Container only. !!! info The PATH is `$HOME/.cabal/bin:$HOME/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin`. Consequently, executables installed with Cabal (the tool) (at `$HOME/.cabal/bin` or `$HOME/.local/bin`) or Stack or Pip (at `$HOME/.local/bin`) take precedence over the same executable installed at `/usr/local/sbin`, `/usr/local/bin`, etc. [VS Code](https://code.visualstudio.com) is used as IDE, with the following extensions pre‑installed: * [Haskell](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) (Default Dev Container only) * [GitHub Pull Requests and Issues](https://marketplace.visualstudio.com/items?itemName=GitHub.vscode-pull-request-github) * [GitLens — Git supercharged](https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens) * Pinned to version 11.7.0 due to unsolicited AI content in recent versions * [Git Graph](https://marketplace.visualstudio.com/items?itemName=mhutchie.git-graph) * [ShellCheck](https://marketplace.visualstudio.com/items?itemName=timonwong.shellcheck) * [hadolint](https://marketplace.visualstudio.com/items?itemName=exiasr.hadolint) * [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) ## Parent images Stack's Dev Containers are derived from Docker images that are used to build the *statically linked* Linux/x86_64 and Linux/AArch64 binary distributions of Stack. These Docker images are multi‑architecture (`linux/amd64`, `linux/arm64/v8`) *GHC musl* images. They are based on Alpine Linux (that is [musl libc](https://musl.libc.org) and [BusyBox](https://www.busybox.net)). The images contain *unofficial* binary distributions of GHC (that is, ones not released by the GHC developers). That is because: 1. the official GHC binary distributions for Alpine Linux/x86_64 have known bugs; and 2. there are no official binary distributions for Alpine Linux/AArch64. Stack's global configuration (`/etc/stack/config.yaml`) sets `system-ghc: true` and `install-ghc: false`. That ensures that only the GHC available in the Dev Containers is used. ## Usage You can run Dev Containers locally/remotely with VS Code, or create a [GitHub Codespace](https://github.com/features/codespaces) for a branch in a repository to develop online. === "VS Code" Follow the instructions at [Developing inside a Container](https://code.visualstudio.com/docs/devcontainers/containers). === "GitHub Codespaces" For use with GitHub Codespaces, follow the instructions at [Creating a codespace for a repository](https://docs.github.com/en/codespaces/developing-in-codespaces/creating-a-codespace-for-a-repository#creating-a-codespace-for-a-repository). ## Build Stack Stack can be built with Stack (which is recommended) or with Cabal (the tool). === "Stack" Command `stack build` to build the `stack` executable. Append `--flag=stack:static` to build a *statically linked* `stack` executable that can run on any Linux machine of the same architecture. Append `--stack-yaml stack-ghc-$GHC_VERSION.yaml` if you want to use an experimental project‑level configuration with the appropriate Dev Container. === "Cabal (the tool)" !!! info Default Dev Container only. Command `cabal build` to build the `stack` executable. Append `--flag=static` to build a *statically linked* `stack` executable that can run on any Linux machine of the same architecture. ## Haskell Language Server (HLS) The [Haskell Language Server](https://github.com/haskell/haskell-language-server) and the [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) are only available in the default Dev Container. In order to use the Haskell extension, you must first configure the project for the build tool of your choice. See the documentation at [Contributing: Haskell Language Server](CONTRIBUTING.md#haskell-language-server) for cradles (`hie.yaml` files) that should suffice to configure the HLS explicitly for `./Setup.hs` and each of the buildable components in Stack's Cabal file. ### Haskell extension Choose `Manually via PATH` when asked the following question: Manage HLS ## Issues If there is a problem with a Dev Container, please [open an issue](https://github.com/benz0li/ghc-musl/issues/new) at its [parent images](#parent-images)' repository at [https://github.com/benz0li/ghc-musl](https://github.com/benz0li/ghc-musl). stack-2.15.7/doc/developing_on_windows.md0000644000000000000000000001200414620153445016573 0ustar0000000000000000
# Developing on Windows # On Windows, Stack comes with an installation of [MSYS2](https://www.msys2.org/). The MINGW64 (MINGW32 on 32-bit Windows) environment of MSYS2 will be used by Stack to provide a Unix-like shell and environment for Stack. This may be necessary for installing some Haskell packages, such as those which use `configure` scripts, or if your project needs some additional tools during the build phase. No matter which terminal software you choose (Windows Terminal, Console Windows Host, Command Prompt, PowerShell, Git bash or any other) you can use this environment too by executing all programs through `stack exec -- `. Executables and libraries can be installed with the MSYS2 package manager `pacman`. All tools can be found in the [index](https://packages.msys2.org) to MSYS2 packages. A [guide](https://www.msys2.org/docs/package-management/) to package management with `pacman` is also available. `pacman` — like all other tools in the Stack environment — should be started with `stack exec -- pacman`. Help about `pacman` commands (operations) can be obtained by `stack exec -- pacman --help`. Help about a specific `pacman` operation can be obtained by using `--help` (or `-h`) with an operation. For example, help about the operation `--sync` (or `-S`) can be obtained with `stack exec -- pacman --sync --help` or, equivalently, `stack exec -- pacman -Sh`. Command `stack path --bin-path` to see the PATH in the Stack environment. On Windows, it includes the `\mingw64\bin` (`\mingw32\bin` on 32-bit Windows), `\usr\bin` and `\usr\local\bin` directories of the Stack-supplied MSYS2. If your executable depends on files (for example, dynamic-link libraries) in those directories and you want to run it outside of the Stack environment, you will need to ensure copies of those files are on the PATH. Command `stack path --extra-include-dirs` and `stack path --extra-library-dirs` to see the extra directories searched for C header files or system libraries files in the Stack environment. On Windows, it includes the `\mingw64\include` (`mingw32\include` on 32-bit Windows) (include) and the `\mingw64\lib` and `\mingw64\bin` directories (`mingw32\lib` and `mingw32\bin` on 32-bit Windows) (library) of the Stack-supplied MSYS2. ## Updating the Stack-supplied MSYS2 ## The Stack-supplied MSYS2 can itself be updated with the Stack-supplied `pacman`. See the MSYS2 guide [Updating MSYS2](https://www.msys2.org/docs/updating/). If the Stack-supplied `pacman` has a version that is 5.0.1.6403 or greater (see `stack exec -- pacman --version`) then the command to update is simply: stack exec -- pacman -Suy This command may need to be run more than once, until everything is reported by `pacman` as 'up to date' and 'nothing to do'. ## Setup.hs ## `Setup.hs` is automatically run inside the Stack environment. So when you need to launch another tool you don't need to prefix the command with `stack exec --` within the custom `Setup.hs` file. ## Pacman packages to install for common Haskell packages ## The following lists MSYS2 packages known to allow the installation of some common Haskell packages on Windows. Feel free to submit additional entries via a pull request. * For [text-icu](https://hackage.haskell.org/package/text-icu) install `mingw64/mingw-w64-x86_64-icu`. * For [zlib >= 0.7](https://hackage.haskell.org/package/zlib) the default Cabal flag `pkg-config` is `true` and requires executable `pkg-config` on the PATH. MSYS2 [defaults](https://www.msys2.org/docs/pkgconfig/) to [`pkgconf`](https://packages.msys2.org/package/pkgconf?repo=msys&variant=x86_64) as its `pkg-config` implementation. Installation: stack exec -- pacman -S pkgconf Alternatively, build with `--flag zlib:-pkg-config`. ## CMake ## CMake has trouble finding other tools even if they are available on the PATH. Likely this is not a CMake problem but one of the environment not fully integrating. For example GHC comes with a copy of GCC which is not installed by MSYS2 itself. If you want to use this GCC you can provide a full path to it, or find it first with `System.Directory.findExecutable` if you want to launch GCC from a Haskell file such as `Setup.hs`. Experience tells that the `mingw-w64` versions of Make and CMake are most likely to work. Though there are other versions available through `pacman`, so have a look to see what works for you. Both tools can be installed with the commands: stack exec -- pacman -S mingw-w64-x86_64-make stack exec -- pacman -S mingw-w64-x86_64-cmake Even though Make and CMake are then both installed into the same environment, CMake still seems to have trouble to find Make. To help CMake find GCC and Make supply the following flags: -DCMAKE_C_COMPILER=path -DCMAKE_MAKE_PROGRAM=path stack-2.15.7/doc/docker_command.md0000644000000000000000000000204514353310533015136 0ustar0000000000000000
# The `stack docker` commands ~~~text stack docker COMMAND Available commands: pull Pull latest version of Docker image from registry reset Reset the Docker sandbox ~~~ Stack is able to build your code inside a Docker image, which means even more reproducibility to your builds, since you and the rest of your team will always have the same system libraries. For further information, see the [Docker integration](docker_integration.md) documentation. ## The `stack docker pull` command ~~~text stack docker pull ~~~ `stack docker pull` pulls the latest version of the Docker image from the registry. ## The `stack docker reset` command ~~~text stack docker reset [--keep-home] ~~~ `stack docker reset` resets the Docker sandbox. Pass the flag `--keep-home` to preserve the sandbox's home directory. stack-2.15.7/doc/docker_integration.md0000644000000000000000000005275614613163672016072 0ustar0000000000000000
Docker integration =============================================================================== Stack has support for automatically performing builds inside a Docker container, using volume mounts and user ID switching to make it mostly seamless. FP Complete provides images for use with stack that include GHC, tools, and optionally have all of the Stackage LTS packages pre-installed in the global package database. The primary purpose for using stack/docker this way is for teams to ensure all developers are building in an exactly consistent environment without team members needing to deal with Docker themselves. See the [how stack can use Docker under the hood](https://www.fpcomplete.com/blog/2015/08/stack-docker) blog post for more information about the motivation and implementation of stack's Docker support. If you'd like to build Docker images that contain your Haskell executables, see [Building Haskell Apps with Docker](https://www.fpcomplete.com/blog/2017/12/building-haskell-apps-with-docker). Prerequisites ------------------------------------------------------------------------------- ### Supported operating systems **Linux 64-bit**: Docker use requires machine (virtual or metal) running a Linux distribution [that Docker supports](https://docs.docker.com/installation/#installation), with a 64-bit kernel. If you do not already have one, we suggest Ubuntu 14.04 ("trusty") since this is what we test with. **macOS**: [Docker for Mac](https://docs.docker.com/docker-for-mac/) is the supported way to use Docker integration on macOS (the older Docker Machine (boot2docker) approach to using Docker on macOS is not supported due to issues with host volume mounting that make Stack nearly unusable for anything but the most trivial projects). Other Un*xen are not officially supported but there are ways to get them working. See [#194](https://github.com/commercialhaskell/stack/issues/194) for details and workarounds. Note: you may want to use set the `mount-mode` option to `delegated`, since this can dramatically improve performance on macOS (see [configuration](#configuration) for more information). **Windows does not work at all** (see [#2421](https://github.com/commercialhaskell/stack/issues/2421)). ### Docker Install the latest version of Docker by following the [instructions for your operating system](http://docs.docker.com/installation/). The Docker client should be able to connect to the Docker daemon as a non-root user. For example (from [here](http://docs.docker.com/installation/ubuntulinux/#ubuntu-raring-1304-and-saucy-1310-64-bit)): # Add the connected user "${USER}" to the docker group. # Change the user name to match your preferred user. sudo gpasswd -a ${USER} docker # Restart the Docker daemon. sudo service docker restart You will now need to log out and log in again for the group addition to take effect. Note the above has security implications. See [security](#security) for more. Usage ------------------------------------------------------------------------------- This section assumes that you already have Docker installed and working. If not, see the [prerequisites](#prerequisites) section. If you run into any trouble, see the [troubleshooting](#troubleshooting) section. ### Enable in stack.yaml The most basic configuration is to add this to your project's `stack.yaml`: docker: enable: true See [configuration](#configuration) for additional options. You can enable it on the command-line using `stack --docker`. Please note that in a docker-enabled configuration, stack uses the GHC installed in the Docker container by default. To use a compiler installed by stack, add system-ghc: false (see [`system-ghc`](yaml_configuration.md#system-ghc)). ### Use stack as normal With Docker enabled, most stack sub-commands will automatically launch themselves in an ephemeral Docker container (the container is deleted as soon as the command completes). The project directory and `~/.stack` are volume-mounted into the container, so any build artifacts are "permanent" (not deleted with the container). The first time you run a command with a new image, you will be prompted to run `stack docker pull` to pull the image first. This will pull a Docker image with a tag that matches your snapshot. Only LTS snapshots are supported (we do not generate images for nightly snapshots). Not every LTS version is guaranteed to have an image existing, and new LTS images tend to lag behind the LTS snapshot being published on stackage.org. Be warned: these images are rather large! Docker sub-commands ------------------------------------------------------------------------------- These `stack docker` sub-commands have Docker-specific functionality. Most other `stack` commands will also use a Docker container under the surface if Docker is enabled. ### pull - Pull latest version of image `stack docker pull` pulls an image from the Docker registry for the first time, or updates the image by pulling the latest version. ### reset - Reset the Docker "sandbox" In order to preserve the contents of the in-container home directory between runs, a special "sandbox" directory is volume-mounted into the container. `stack docker reset` will reset that sandbox to its defaults. Note: `~/.stack` is separately volume-mounted, and is left alone during reset. Command-line options ------------------------------------------------------------------------------- The default Docker configuration can be overridden on the command-line. See `stack --docker-help` for a list of all Docker options, and consult [configuration](#configuration) section below for more information about their meanings. These are global options, and apply to all commands (not just `stack docker` sub-commands). Configuration ------------------------------------------------------------------------------- `stack.yaml` contains a `docker:` section with Docker settings. If this section is omitted, Docker containers will not be used. These settings can be included in project, user, or global configuration. Here is an annotated configuration file. The default values are shown unless otherwise noted. docker: # Set to false to disable using Docker. In the project configuration, # the presence of a `docker:` section implies docker is enabled unless # `enable: false` is set. In user and global configuration, this is not # the case. enable: true # The name of the repository to pull the image from. See the "repositories" # section of this document for more information about available repositories. # If this includes a tag (e.g. "my/image:tag"), that tagged image will be # used. Without a tag specified, the LTS version slug is added automatically. # Either `repo` or `image` may be specified, but not both. repo: "fpco/stack-build" # Exact Docker image name or ID. Overrides `repo`. Either `repo` or `image` # may be specified, but not both. (default none) image: "5c624ec1d63f" # Registry requires login. A login will be requested before attempting to # pull. registry-login: false # Username to log into the registry. (default none) registry-username: "myuser" # Password to log into the registry. (default none) registry-password: "SETME" # If true, the image will be pulled from the registry automatically, without # needing to run `stack docker pull`. See the "security" section of this # document for implications of enabling this. auto-pull: true # If true, the container will be run "detached" (in the background). Refer # to the Docker users guide for information about how to manage containers. # This option would rarely make sense in the configuration file, but can be # useful on the command-line. When true, implies `persist`. detach: false # If true, the container will not be deleted after it terminates. Refer to # the Docker users guide for information about how to manage containers. This # option would rarely make sense in the configuration file, but can be # useful on the command-line. `detach` implies `persist`. persist: false # What to name the Docker container. Only useful with `detach` or # `persist` true. (default none) container-name: "example-name" # Sets the network used by docker. Gets directly passed to dockers `net` # argument (default: host) network: host # Additional arguments to pass to `docker run`. (default none) run-args: ["--net=bridge"] # Directories from the host to volume-mount into the container. If it # contains a `:`, the part before the `:` is the directory on the host and # the part after the `:` is where it should be mounted in the container. # (default none, aside from the project and stack root directories which are # always mounted) mount: - "/foo/bar" - "/baz:/tmp/quux" # Sets the volume mount mode, passed directly to `docker`. # The default mode (consistent) is safest, but may suffer poor performance # on non-Linux platforms such as macOS, where the `delegated` mode will # be significantly faster. # See https://docs.docker.com/docker-for-mac/osxfs-caching/ # for valid values and the implications of changing the default. mount-mode: delegated # Environment variables to set in the container. Environment variables # are not automatically inherited from the host, so if you need any specific # variables, use the `--docker-env` command-line argument version of this to # pass them in. (default none) env: - "FOO=BAR" - "BAR=BAZ QUUX" # Location of a Docker container-compatible 'stack' executable with the # matching version. This executable must be compatible with the Docker # image in terms of platform (linux-x86_64) and shared libraries # (statically linked is best, otherwise the image needs to have the # same shared libraries installed). # Valid values are: # host: use the host's executable. This is the default when the host's # executable is known to work (e.g., from official linux-x86_64 bindist) # download: download a compatible executable matching the host's version. # This is the default when the host's executable is not known to work # image: use the 'stack' executable baked into the image. The version # must match the host's version # /path/to/stack: path on the host's local filesystem stack-exe: host # If true (the default when using the local Docker Engine), run processes # in the Docker container as the same UID/GID as the host. The ensures # that files written by the container are owned by you on the host. # When the Docker Engine is remote (accessed by tcp), defaults to false. set-user: true # Require the version of the Docker client to be within the specified # Cabal-style version range (e.g., ">= 1.6.0 && < 1.9.0") require-docker-version: "any" Image Repositories ------------------------------------------------------------------------------- FP Complete provides the following public image repositories on Docker Hub: - [fpco/stack-build](https://registry.hub.docker.com/r/fpco/stack-build/) (the default) - GHC (patched), tools (Stack, Cabal (the tool), happy, alex, etc.), and system developer libraries required to build all Stackage packages. FP Complete also builds custom variants of these images for their clients. These images can also be used directly with `docker run` and provide a complete Haskell build environment. In addition, most Docker images that contain the basics for running GHC can be used with Stack's Docker integration. For example, the [official Haskell image repository](https://hub.docker.com/_/haskell/) works. See [Custom images](#custom-images) for more details. Security ------------------------------------------------------------------------------- Having `docker` usable as a non-root user is always a security risk, and will allow root access to your system. It is also possible to craft a `stack.yaml` that will run arbitrary commands in an arbitrary docker container through that vector, thus a `stack.yaml` could cause stack to run arbitrary commands as root. While this is a risk, it is not really a greater risk than is posed by the docker permissions in the first place (for example, if you ever run an unknown shell script or executable, or ever compile an unknown Haskell package that uses Template Haskell, you are at equal risk). Nevertheless, there are [plans to close the stack.yaml loophole](https://github.com/commercialhaskell/stack/issues/260). One way to mitigate this risk is, instead of allowing `docker` to run as non-root, replace `docker` with a wrapper script that uses `sudo` to run the real Docker client as root. This way you will at least be prompted for your root password. As [@gregwebs](https://github.com/gregwebs) pointed out, put this script named `docker` in your PATH (and make sure you remove your user from the `docker` group as well, if you added it earlier): #!/bin/bash -e # The goal of this script is to maintain the security privileges of sudo # Without having to constantly type "sudo" exec sudo /usr/bin/docker "$@" Additional notes ------------------------------------------------------------------------------- ### Volume-mounts and ephemeral containers Since filesystem changes outside of the volume-mounted project directory are not persisted across runs, this means that if you `stack exec sudo apt-get install some-ubuntu-package`, that package will be installed but then the container it's installed in will disappear, thus causing it to have no effect. If you wish to make this kind of change permanent, see later instructions for how to create a [derivative Docker image](#derivative-image). Inside the container, your home directory is a special location that volume- mounted from within your project directory's `.stack-work` in such a way as that installed GHC/cabal packages are not shared between different Stackage snapshots. In addition, `~/.stack` is volume-mounted from the host. ### Network stack containers use the host's network stack within the container by default, meaning a process running in the container can connect to services running on the host, and a server process run within the container can be accessed from the host without needing to explicitly publish its port. To run the container with an isolated network, use `--docker-run-args` to pass the `--net` argument to `docker-run`. For example: stack --docker-run-args='--net=bridge --publish=3000:3000' \ exec some-server will run the container's network in "bridge" mode (which is Docker's default) and publish port 3000. ### Persistent container If you do want to do all your work, including editing, in the container, it might be better to use a persistent container in which you can install Ubuntu packages. You could get that by running something like `stack --docker-container-name=NAME --docker-persist exec bash`. This means when the container exits, it won't be deleted. You can then restart it using `docker start -a -i NAME`. It's also possible to detach from a container while it continues running in the background using by pressing Ctrl-P Ctrl-Q, and then reattach to it using `docker attach NAME`. Note that each time you run `stack --docker-persist`, a _new_ persistent container is created (it will not automatically reuse the previous one). See the [Docker user guide](https://docs.docker.com/userguide/) for more information about managing Docker containers. ### Derivative image Creating your own custom derivative image can be useful if you need to install additional Ubuntu packages or make other changes to the operating system. Here is an example (replace `stack-build:custom` if you prefer a different name for your derived container, but it's best if the repo name matches what you're deriving from, only with a different tag, to avoid recompilation): ;;; On host $ sudo stack --docker-persist --docker-container-name=temp exec bash ;;; In container, make changes to OS # apt-get install r-cran-numderiv [...] # exit ;;; On host again $ docker commit temp stack-build:custom $ docker rm temp Now you have a new Docker image named `stack-build:custom`. To use the new image, run a command such as the following or update the corresponding values in your `stack.yaml`: stack --docker-image=stack-build:custom Note, however, that any time a new image is used, you will have to re-do this process. You could also use a Dockerfile to make this reusable. Consult the [Docker user guide](https://docs.docker.com/userguide/) for more on creating Docker images. ### Custom images The easiest way to create your own custom image us by extending FP Complete's images, but if you prefer to start from scratch, most images that include the basics for building code with GHC will work. The image doesn't even, strictly speaking, need to include GHC, but it does need to have libraries and tools that GHC requires (e.g., libgmp, gcc, etc.). There are also a few ways to set up images that tightens the integration: * Create a user and group named `stack`, and create a `~/.stack` directory for it. Any build plans and caches from it will be copied from the image by Stack, meaning they don't need to be downloaded separately. * Any packages in GHC's global package database will be available. This can be used to add private libraries to the image, or the make available a set of packages from an LTS release. Troubleshooting ------------------------------------------------------------------------------- ### "No Space Left on Device", but 'df' shows plenty of disk space This is likely due to the storage driver Docker is using, in combination with the large size and number of files in these images. Use `docker info|grep 'Storage Driver'` to determine the current storage driver. We recommend using either the `overlay` or `aufs` storage driver for stack, as they are least likely to give you trouble. On Ubuntu, `aufs` is the default for new installations, but older installations sometimes used `devicemapper`. The `devicemapper` storage driver's doesn't work well with large filesystems, and we have experienced other instabilities with it as well. We recommend against its use. The `btrfs` storage driver has problems running out of metadata space long before running out of actual disk space, which requires rebalancing or adding more metadata space. See [CoreOS's btrfs troubleshooting page](https://coreos.com/docs/cluster-management/debugging/btrfs-troubleshooting/) for details about how to do this. Pass the `-s ` argument to the Docker daemon to set the storage driver (in `/etc/default/docker` on Ubuntu). See [Docker daemon storage-driver option](https://docs.docker.com/reference/commandline/cli/#daemon-storage-driver-option) for more details. You may also be running out of inodes on your filesystem. Use `df -i` to check for this condition. Unfortunately, the number of inodes is set when creating the filesystem, so fixing this requires reformatting and passing the `-N` argument to mkfs.ext4. ### Name resolution doesn't work from within container On Ubuntu 12.04, by default `NetworkManager` runs `dnsmasq` service, which sets `127.0.0.1` as your DNS server. Since Docker containers cannot access this dnsmasq, Docker falls back to using Google DNS (8.8.8.8/8.8.4.4). This causes problems if you are forced to use internal DNS server. This can be fixed by executing: sudo sed 's@dns=dnsmasq@#dns=dnsmasq@' -i \ /etc/NetworkManager/NetworkManager.conf && \ sudo service network-manager restart If you have already installed Docker, you must restart the daemon for this change to take effect: sudo service docker restart The above commands turn off `dnsmasq` usage in NetworkManager configuration and restart network manager. They can be reversed by executing `sudo sed 's@#dns=dnsmasq@dns=dnsmasq@' -i /etc/NetworkManager/NetworkManager.conf && sudo service network-manager restart`. These instructions are adapted from [the Shipyard Project's QuickStart guide](https://github.com/shipyard/shipyard/wiki/QuickStart#127011-dns-server-problem-on-ubuntu). ### Cannot pull images from behind firewall that blocks TLS/SSL If you are behind a firewall that blocks TLS/SSL and pulling images from a private Docker registry, you must edit the system configuration so that the `--insecure-registry ` option is passed to the Docker daemon. For example, on Ubuntu: echo 'DOCKER_OPTS="--insecure-registry registry.example.com"' \ |sudo tee -a /etc/default/docker sudo service docker restart This does require the private registry to be available over plaintext HTTP. See [Docker daemon insecure registries documentation](https://docs.docker.com/reference/commandline/cli/#insecure-registries) for details. stack-2.15.7/doc/dot_command.md0000644000000000000000000001164214505617134014466 0ustar0000000000000000
# The `stack dot` command ~~~text stack dot [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ A package and its dependencies and the direct dependency relationships between them form a directed graph. [Graphviz](https://www.graphviz.org/) is open source software that visualises graphs. It provides the DOT language for defining graphs and the `dot` executable for drawing directed graphs. Graphviz is available to [download](https://www.graphviz.org/download/) for Linux, Windows, macOS and FreeBSD. `stack dot` produces output, to the standard output stream, in the DOT language to represent the relationships between your packages and their dependencies. By default: * external dependencies are excluded from the output. Pass the flag `--external` to include external dependencies; * the `base` package and its dependencies are included in the output. Pass the flag `--no-include-base` to exclude `base` and its dependencies; * there is no limit to the depth of the resolution of dependencies. Pass the `--depth ` option to limit the depth; * all relevant packages are included in the output. Pass the `--prune ` option to exclude the specified packages, where `` is a list of package names separated by commas; * all packages in the project are included in the output. However, the target for the command can be specified as an argument. It uses the same format as the [`stack build` command](build_command.md); * test components of the packages in the project are excluded from the output. Pass the flag `--test` to include test components; and * benchmark components of the packages in the project are excluded from the output. Pass the flag `--bench` to include benchmark components.git p Pass the option `--flag :` or `--flag :-` to set or unset a Cabal flag. This option can be specified multiple times. Pass the flag `--global-hints` to use a hint file for global packages. If a hint file is used, GHC does not need to be installed. ## Examples The following examples are based on a version of the [`wreq` package](https://hackage.haskell.org/package/wreq). In each case, the output from `stack dot` is piped as an input into Graphviz's `dot` executable, and `dot` produces output in the form of a PNG file named `wreq.png`. * A simple example: ~~~text stack dot | dot -Tpng -o wreq.png ~~~ [![wreq](https://cloud.githubusercontent.com/assets/591567/8478591/ae10a418-20d2-11e5-8945-55246dcfac62.png)](https://cloud.githubusercontent.com/assets/591567/8478591/ae10a418-20d2-11e5-8945-55246dcfac62.png) * Include external dependencies: ~~~text stack dot --external | dot -Tpng -o wreq.png ~~~ [![wreq_ext](https://cloud.githubusercontent.com/assets/591567/8478621/d247247e-20d2-11e5-993d-79096e382abd.png)](https://cloud.githubusercontent.com/assets/591567/8478621/d247247e-20d2-11e5-993d-79096e382abd.png) * Include external dependencies, limit the depth and save the output from `stack dot` as an intermediate file (`wreq.dot`). ~~~text stack dot --external --depth 1 > wreq.dot dot -Tpng -o wreq.png wreq.dot ~~~ * Include external dependencies, exclude `base` and limit the depth: ~~~text stack dot --no-include-base --external --depth 1 | dot -Tpng -o wreq.png ~~~ [![wreq_depth](https://cloud.githubusercontent.com/assets/591567/8484310/45b399a0-20f7-11e5-8068-031c2b352961.png)](https://cloud.githubusercontent.com/assets/591567/8484310/45b399a0-20f7-11e5-8068-031c2b352961.png) * Include external dependencies and prune `base` and other packages: ~~~text stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | dot -Tpng -o wreq.png ~~~ [![wreq_pruned](https://cloud.githubusercontent.com/assets/591567/8478768/adbad280-20d3-11e5-9992-914dc24fe569.png)](https://cloud.githubusercontent.com/assets/591567/8478768/adbad280-20d3-11e5-9992-914dc24fe569.png) * Include external dependencies, prune `base` and other packages, and use a different Graphviz executable to draw the graph: Graphviz's `twopi` executable draws graphs in a radial layout. ~~~text stack dot --external --prune base,lens,wreq-examples,http-client,aeson,tls,http-client-tls,exceptions | twopi -Groot=wreq -Goverlap=false -Tpng -o wreq.png ~~~ [![wreq_pruned](https://cloud.githubusercontent.com/assets/591567/8495538/9fae1184-216e-11e5-9931-99e6147f8aed.png)](https://cloud.githubusercontent.com/assets/591567/8495538/9fae1184-216e-11e5-9931-99e6147f8aed.png) stack-2.15.7/doc/editor_integration.md0000644000000000000000000000126014604306200016053 0ustar0000000000000000
# Editor integration ## Visual Studio Code For further information, see the [Stack and Visual Code](Stack_and_VS_Code.md) documentation. ## Shell auto-completion Love tab-completion of commands? You're not alone. If you're on bash, just run the following command (or add it to `.bashrc`): ~~~text eval "$(stack --bash-completion-script stack)" ~~~ For more information and other shells, see the [Shell auto-completion wiki page](https://docs.haskellstack.org/en/stable/shell_autocompletion) stack-2.15.7/doc/environment_variables.md0000644000000000000000000001072714445120722016574 0ustar0000000000000000
# Stack's environment variables The environment variables listed in alphabetal order below can affect how Stack behaves. ## `GH_TOKEN` or `GITHUB_TOKEN` [:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) Stack will use the value of the `GH_TOKEN` or, in the alternative, `GITHUB_TOKEN` environment variable (if not an empty string) as credentials to authenticate its requests of the GitHub REST API, using HTTP 'Basic' authentication. GitHub limits the rate of unauthenticated requests to its API, although most users of Stack will not experience that limit from the use of Stack alone. The limit for authenticated requests is significantly higher. For more information about authentication of requests of the GitHub REST API, see GitHub's REST API documentation. ## `HACKAGE_KEY` [:octicons-tag-24: 2.7.5](https://github.com/commercialhaskell/stack/releases/tag/v2.7.5) Related command: [`stack upload`](upload_command.md) Hackage allows its members to register an API authentification token and to authenticate using the token. A Hackage API authentification token can be used with `stack upload` instead of username and password, by setting the `HACKAGE_KEY` environment variable. For example: === "Unix-like" ~~~text HACKAGE_KEY= stack upload . ~~~ === "Windows (with PowerShell)" ~~~text $Env:HACKAGE_KEY= stack upload . ~~~ ## `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` [:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) Related command: [`stack upload`](upload_command.md) `stack upload` will request a Hackage username and password to authenticate. This can be avoided by setting the `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` environment variables. For example: === "Unix-like" ~~~text export $HACKAGE_USERNAME="" export $HACKAGE_PASSWORD="" stack upload . ~~~ === "Windows (with PowerShell)" ~~~text $Env:HACKAGE_USERNAME='' $Env:HACKAGE_PASSWORD='' stack upload . ~~~ ## `NO_COLOR` Related command: all commands that can produce colored output using control character sequences. Stack follows the standard at http://no-color.org/. Stack checks for a `NO_COLOR` environment variable. When it is present and not an empty string (regardless of its value), Stack prevents the addition of control character sequences for color to its output. ## `STACK_ROOT` Related command: all commands that make use of Stack's global YAML configuration file (`config.yaml`). Overridden by: Stack's global [`--stack-root`](global_flags.md#-stack-root-option) option. The environment variable `STACK_ROOT` can be used to specify the [Stack root](stack_root.md) directory. ## `STACK_WORK` Related command: all commands that make use of Stack's work directories. Overridden by: Stack's [`work-dir`](yaml_configuration.md#work-dir) non-project specific configuration option, or global [`--work-dir`](global_flags.md#-work-dir-option) option. The environment variable `STACK_WORK` can be used to specify the path of Stack's work directory, within a local project or package directory, and override Stack's default of `.stack-work`. The path must be a relative one, relative to the root directory of the project or package. The relative path cannot include a `..` (parent directory) component. ## `STACK_XDG` Related command: all commands that make use of Stack's user-specific general YAML configuration file (`config.yaml`). Overridden by: the use of Stack's `STACK_ROOT` environment variable, or the use of Stack's global [`--stack-root`](global_flags.md#-stack-root-option) option. On Unix-like operating systems and Windows, Stack can be configured to follow the XDG Base Directory Specification if the environment variable `STACK_XDG` is set to any non-empty value. ## `STACK_YAML` Related command: all commands that make use of Stack's project-level YAML configuration file. Overridden by: Stack's global [`--stack-yaml`](global_flags.md#-stack-yaml-option) option. The environment variable `STACK_YAML` can be used to specify Stack's project-level YAML configuration file. stack-2.15.7/doc/eval_command.md0000644000000000000000000000132014353310533014611 0ustar0000000000000000
# The `stack eval` command ~~~text stack eval CODE [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] ~~~ GHC has an [expression-evaluation mode](https://downloads.haskell.org/ghc/latest/docs/users_guide/using.html#eval-mode), set by passing the GHC option `-e `. Commanding `stack eval ` is equivalent to commanding: ~~~text stack exec ghc -- -e ~~~ For further information, see the [`stack exec` command](exec_command.md) documentation. stack-2.15.7/doc/exec_command.md0000644000000000000000000000621314613163672014625 0ustar0000000000000000
# The `stack exec` command ~~~text stack exec COMMAND [-- ARGUMENT(S) (e.g. stack exec ghc-pkg -- describe base)] [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] ~~~ `stack exec` executes the specified executable as a command in the Stack environment. If an executable is not specified, the first argument after `--` is taken to be the executable. Otherwise, all arguments after `--` are taken to be command line arguments for the specified executable. By default: * the `GHC_PACKAGE_PATH` environment variable is set for the command's process. Pass the flag `--no-ghc-package-path` to not set the environment variable; * the `STACK_EXE` environment variable is set for the command's process. Pass the flag `--no-stack-exe` to not set the environment variable; and * the specified executable is executed in the current directory. Pass the option `--cwd ` to execute the executable in the specified directory. The option `--package ` has no effect for the `stack exec` command. For further information about its use, see the [`stack ghc` command](ghc_command.md) documentation or the [`stack runghc` command](runghc_command.md) documentation. Pass the option `--rts-option ` to specify a GHC RTS flag or option. The option can be specified multiple times. All specified GHC RTS flags and options are added to the arguments for the specified executable between arguments `+RTS` and `-RTS`. Specified GHC RTS flags and options are separated by spaces. Items can be unquoted (if they do not contain space or `"` characters) or quoted (`""`). Quoted items can include 'escaped' characters, escaped with an initial `\` character. Account may need to be taken of the shell's approach to the processing of command line arguments. For example, to pass `'a single quoted string'`: === "Unix-like (Bash or Zsh)" In Bash, or Zsh (if `RC_QUOTES` option not set): `stack exec -- \''a single quoted string'\'` Outside of single quotes, `\'` escapes a single quote. The content of single quotes is taken literally, but cannot contain a single quote. In Zsh (if `RC_QUOTES` option set): `stack exec -- '''a single quoted string'''` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. === "Windows (PowerShell)" `stack exec -- '''a single quoted string'''` The content of single quotes is taken literally. Within single quotes, `''` escapes a single quote. The command also accepts flags and options of the [`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that affect the location of the local project installation directory, such as `--profile` and `--no-strip`. For further information, see the documentation of the [project Stack work directory](stack_work.md#project-stack-work-directory). stack-2.15.7/doc/faq.md0000644000000000000000000007105714620153473012756 0ustar0000000000000000
# FAQ So that this doesn't become repetitive: for the reasons behind the answers below, see the [Build overview](build_overview.md) page. The goal of the answers here is to be as helpful and concise as possible. ## What version of GHC is used when I run something like `stack ghci`? The version of GHC, as well as which packages can be installed, are specified by the _snapshot_. This may be something like `lts-22.21`, which is from [Stackage](https://www.stackage.org/). The [user's guide](GUIDE.md) discusses the snapshot in more detail. The snapshot is determined by finding the relevant project-level configuration file (`stack.yaml`, by default) for the directory you're running the command from. This essentially works by: 1. Check for a `STACK_YAML` environment variable or the `--stack-yaml` command line argument 2. If none present, check for a `stack.yaml` file in the current directory or any parents 3. If no `stack.yaml` file was found, use the _implicit global_ The implicit global is a shared project used whenever you're outside of another project. It's a sort of "mutable shared state" that you should be aware of when working with Stack. A frequent request when working with the implicit global is how to move to a more recent LTS snapshot. You can do this using the following command from outside of a project: ~~~text stack config set snapshot lts ~~~ ## Where is Stack installed and will it interfere with the GHC (etc) I already have installed? Stack itself is installed in normal system locations based on the mechanism you used (see the [Install and upgrade](install_and_upgrade.md) page). Stack installs files in the Stack root and other files in a `.stack-work` directory within each project's directory. None of this should affect any existing Haskell tools at all. ## What is the relationship between Stack and Cabal (the tool)? * 'Cabal' can refer to Cabal (the library) or to Cabal (the tool). Cabal (the library) is used by Stack to build your Haskell code. Cabal (the tool) is provided by the `cabal-install` package. * A Cabal file is provided for each package, named `.cabal`. It defines all package-level metadata, just like it does in the world of Cabal (the tool): modules, executables, test suites, etc. No change at all on this front. * A `stack.yaml` file references one or more packages, and provides information on where dependencies come from. * The `stack init` command initializes a `stack.yaml` file from an existing Cabal file. * Stack uses Cabal (the library) via an executable. For `build-type: Simple` (the most common case), Stack builds that executable using the version of Cabal which came with the compiler. Stack caches such executables, in the Stack root under directory `setup-exe-cache`. * In rare or complex cases, a different version of Cabal to the one that came with the compiler may be needed. `build-type: Custom` and a `setup-custom` stanza in the Cabal file, and a `Setup.hs` file in the package directory, can be specified. The `stack.yaml` file can then specify the version of Cabal that Stack will use to build the executable (named `setup`) from `Setup.hs`. Stack will use Cabal via `setup`. For detail on the differences between a `stack.yaml` file and a Cabal file, see [stack.yaml vs a Cabal file](stack_yaml_vs_cabal_package_file.md). ## I need to use a different version of a package than what is provided by the LTS Haskell snapshot I'm using, what should I do? You can make tweaks to a snapshot by modifying the `extra-deps` configuration value in your `stack.yaml` file, e.g.: ~~~yaml snapshot: lts-22.21 packages: - . extra-deps: - text-2.1.1@rev:0 ~~~ ## I need to use a package (or version of a package) that is not available on Hackage, what should I do? Add it to the [`extra-deps`](yaml_configuration.md#extra-deps) list in your project's `stack.yaml` file, specifying the package's source code location relative to the directory where your `stack.yaml` file lives, e.g. ~~~yaml snapshot: lts-22.21 packages: - . extra-deps: - third-party/proprietary-dep - github-version-of/conduit - patched/diagrams ~~~ The above example specifies that the `proprietary-dep` package is found in the project's `third-party` directory, that the `conduit` package is found in the project's `github-version-of` directory, and that the `diagrams` package is found in the project's `patched` directory. This autodetects changes and reinstalls the package. To install packages directly from a Git repository, use e.g.: ~~~yaml extra-deps: - git: https://github.com/githubuser/reponame.git commit: somecommitID ~~~ ## What is the meaning of the arguments given to `stack build`, `test`, etc? Those are the targets of the build, and can have one of three formats: * A package name (e.g., `my-package`) will mean that the `my-package` package must be built * A package identifier (e.g., `my-package-1.2.3`), which includes a specific version. This is useful for passing to `stack install` for getting a specific version from upstream * A directory (e.g., `./my-package`) for including a local directory's package, including any packages in subdirectories ## I need to modify an upstream package, how should I do it? Typically, you will want to get the source for the package and then add it to your `packages` list in the `stack.yaml` file. (See the previous question.) `stack unpack` is one approach for getting the source. Another would be to add the upstream package as a submodule to your project. ## How do I use this with sandboxes? Explicit sandboxing on the part of the user is not required by Stack. All builds are automatically isolated into separate package databases without any user interaction. This ensures that you won't accidentally corrupt your installed packages with actions taken in other projects. ## Can I run `cabal` commands inside `stack exec`? With a recent enough version of Cabal (the tool) (1.22 or later), you can. For earlier versions this does not work, due to Cabal issue [#1800](https://github.com/haskell/cabal/issues/1800). Note that even with recent versions, for some commands you may need the following extra level of indirection. Command: ~~~text stack exec -- cabal exec -- cabal ~~~ However, virtually all `cabal` commands have an equivalent in Stack, so this should not be necessary. In particular, users of Cabal (the tool) may be accustomed to the `cabal run` command. With Stack, command: ~~~text stack build stack exec ~~~ Or, if you want to install the binaries in a shared location, command: ~~~text stack install ~~~ assuming your PATH has been set appropriately. ## Using custom preprocessors If you have a custom preprocessor, for example, Ruby, you may have a file like: ***B.erb*** ~~~haskell module B where <% (1..5).each do |i| %> test<%= i %> :: Int test<%= i %> = <%= i %> <% end %> ~~~ To ensure that Stack picks up changes to this file for rebuilds, add the following lines to your `stack.yaml` file: ~~~yaml custom-preprocessor-extensions: - erb require-stack-version: ">= 2.6.0" ~~~ And for backwards compatability with older versions of Stack, also add the following line to your Cabal file: extra-source-files: B.erb You could also use the [`--custom-preprocessor-extensions`](yaml_configuration.md#custom-preprocessor-extensions) flag. ## I already have GHC installed, can I still use Stack? Yes. In its default configuration, Stack will simply ignore any system GHC installation and use a sandboxed GHC that it has installed itself. You can find these sandboxed GHC installations in the `ghc-*` directories in the `stack path --programs` directory. If you would like Stack to use your system GHC installation, use the [`--system-ghc`](yaml_configuration.md#system-ghc) flag or run `stack config set system-ghc --global true` to make Stack check your PATH for a suitable GHC by default. Stack can only use a system GHC installation if its version is compatible with the configuration of the current project, particularly the snapshot specified by the [`snapshot`](yaml_configuration.md#snapshot) or [`resolver`](yaml_configuration.md#resolver) key. GHC installation doesn't work for all operating systems, so in some cases you will need to use `system-ghc` and install GHC yourself. ## How does Stack determine what GHC to use? In its default configuration, Stack determines from the current project which GHC version, architecture etc it needs. It then looks in the `ghc-` subdirectory of the `stack path --programs` directory for a compatible GHC, requesting to install one via `stack setup` if none is found. If you are using the [`--system-ghc`](yaml_configuration.md/#system-ghc) flag or have configured `system-ghc: true` either in the project `stack.yaml` or the global `config.yaml`, Stack will use the first GHC that it finds on your PATH, falling back on its sandboxed installations only if the found GHC doesn't comply with the various requirements (version, architecture) that your project needs. See issue [#420](https://github.com/commercialhaskell/stack/issues/420) for a detailed discussion of Stack's behavior when `system-ghc` is enabled. ## How do I get extra build tools? Stack will automatically install build tools required by your packages or their dependencies, in particular [Alex](https://hackage.haskell.org/package/alex) and [Happy](https://hackage.haskell.org/package/happy). !!! note This works when using LTS or nightly snapshots, not with GHC or custom snapshots. You can manually install build tools by running, e.g., `stack build alex happy`. ## How does Stack choose which snapshot to use when creating a new configuration file? It checks the two most recent LTS Haskell major versions and the most recent Stackage Nightly for a snapshot that is compatible with all of the version bounds in your Cabal file, favoring the most recent LTS. For more information, see the snapshot auto-detection section in the architecture document. ## I'd like to use my installed packages in a different directory. How do I tell Stack where to find my packages? Set the `STACK_YAML` environment variable to point to the `stack.yaml` configuration file for your project. Then you can run `stack exec`, `stack ghc`, etc., from any directory and still use your packages. ## My tests are failing. What should I do? Like all other targets, `stack test` runs test suites in parallel by default. This can cause problems with test suites that depend on global resources such as a database or binding to a fixed port number. A quick hack is to force stack to run all test suites in sequence, using `stack test --jobs=1`. For test suites to run in parallel developers should ensure that their test suites do not depend on global resources (e.g. by asking the operating system for a random port to bind to) and where unavoidable, add a lock in order to serialize access to shared resources. ## Can I get bash autocompletion? Yes, see the [shell-autocompletion](shell_autocompletion.md) documentation. ## How do I update my package index? Users of Cabal (the tool) are used to running `cabal update` regularly. You can do the same with Stack by running `stack update`. But generally, it's not necessary: if the package index is missing, or if a snapshot refers to package/version that isn't available, Stack will automatically update and then try again. If you run into a situation where Stack doesn't automatically do the update for you, please report it as a bug. ## Isn't it dangerous to automatically update the index? Can't that corrupt build plans? No, Stack is very explicit about which packages it's going to build for you. There are three sources of information to tell it which packages to install: the selected snapshot, the `extra-deps` configuration value, and your local packages. The only way to get Stack to change its build plan is to modify one of those three. Updating the index will have no impact on Stack's behavior. ## I have a custom package index I'd like to use, how do I do so? You can configure this in your project-level configuration file (`stack.yaml`, by default). See [YAML configuration](yaml_configuration.md). ## How can I make sure my project builds against multiple GHC versions? You can create multiple YAML configuration files for your project, one for each build plan. For example, you might set up your project directory like so: ~~~text myproject/ stack-ghc-9.0.2.yaml stack-ghc-9.2.4.yaml stack.yaml --> symlink to stack-ghc-9.2.4.yaml myproject.cabal src/ ... ~~~ When you run `stack build`, you can set the `STACK_YAML` environment variable to indicate which build plan to use. On Unix-like operating systems command: ~~~bash stack build # builds using the default stack.yaml STACK_YAML=stack-ghc-7.10.yaml stack build # builds using the given yaml file ~~~ On Windows (with PowerShell) command: ~~~ps $Env:STACK_YAML='stack-ghc-9.0.2.yaml' stack build ~~~ ## I heard you can use this with Docker? Yes, Stack supports using Docker with images that contain preinstalled Stackage packages and the tools. See [Docker integration](docker_integration.md) for details. ## How do I build a statically-linked executable on Linux? The way that Stack itself builds statically-linked Stack executables for Linux is as follows: * In the Cabal file, the following [`ld-options`](https://cabal.readthedocs.io/en/stable/cabal-package.html#pkg-field-ld-options) are set: `-static` and `-pthread`. * The Stack command is run in a Docker container based on Alpine Linux. The relevant Docker image repository is set out in Stack's `stack.yaml` file. See also Olivier Benz's [GHC musl project](https://gitlab.com/benz0li/ghc-musl). * Stack's configuration includes: ~~~yaml extra-include-dirs: - /usr/include extra-lib-dirs: - /lib - /usr/lib ~~~ * The build command is `stack build --docker --system-ghc --no-install-ghc` (on x86_64) or `stack build --docker --docker-stack-exe=image --system-ghc --no-install-ghc` (on AArch64; the host Stack and the image Stack must have the same version number). ## How do I use this with Travis CI? See the [Travis CI instructions](travis_ci.md) ## How do I use this with Azure CI? See the [Azure CI instructions](azure_ci.md) ## What is licensing restrictions on Windows? Currently, on Windows, GHC produces binaries linked statically with [GNU Multiple Precision Arithmetic Library](https://gmplib.org/) (GMP), which is used by [integer-gmp](https://hackage.haskell.org/package/integer-gmp) library to provide big integer implementation for Haskell. Contrary to the majority of Haskell code licensed under permissive BSD3 license, GMP library is licensed under LGPL, which means resulting binaries [have to be provided with source code or object files](http://www.gnu.org/licenses/gpl-faq.html#LGPLStaticVsDynamic). That may or may not be acceptable for your situation. Current workaround is to use GHC built with alternative big integer implementation called `integer-simple`, which is free from LGPL limitations as it's pure Haskell and does not use GMP. Unfortunately it has yet to be available out of the box with Stack. See issue [#399](https://github.com/commercialhaskell/stack/issues/399) for the ongoing effort and information on workarounds. ## How to get a working executable on Windows? When executing a binary after building with `stack build` (e.g. for target "foo"), the command `foo.exe` might complain about missing runtime libraries (whereas `stack exec foo` works). Windows is not able to find the necessary C++ libraries from the standard prompt because they're not in the PATH environment variable. `stack exec` works because it's modifying PATH to include extra things. Those libraries are shipped with GHC (and, theoretically in some cases, MSYS2). The easiest way to find them is `stack exec which`. For example, command: ~~~text stack exec -- which libstdc++-6.dll /c/Users/Michael/AppData/Local/Programs/stack/i386-windows/ghc-7.8.4/mingw/bin/libstdc++-6.dll ~~~ A quick workaround is adding this path to the PATH environment variable or copying the files somewhere Windows finds them (see https://msdn.microsoft.com/de-de/library/7d83bc18.aspx). See issue [#425](https://github.com/commercialhaskell/stack/issues/425). Another issue that may arise with building on Windows is as follows. The default location of Stack's programs folder is `%LOCALAPPDATA\Programs\stack`. If there is a space character in the `%LOCALAPPDATA%` path this may, in some circumstances, cause problems with building packages that make use of the GNU project's `autoconf` package and `configure` shell script files. It may be necessary to override the default location of Stack's programs folder. See the [local-programs-path](yaml_configuration.md#local-programs-path) option for more information. See issue [#4726](https://github.com/commercialhaskell/stack/issues/4726). ## Can I change Stack's default temporary directory? Stack downloads and extracts files to `$STACK_ROOT/programs` on most platforms, which defaults to `~/.stack/programs`. On Windows `$LOCALAPPDATA\Programs\stack` is used. If there is not enough free space in this directory, Stack may fail. For instance, `stack setup` with a GHC installation requires roughly 1GB free. If this is an issue, you can set `local-programs-path` in your `~/.stack/config.yaml` to a directory on a file system with more free space. If you use Stack with Nix integration, be aware that Nix uses a `TMPDIR` variable, and if it is not set Nix sets it to some subdirectory of `/run`, which on most Linuxes is a Ramdir. Nix will run the builds in `TMPDIR`, therefore if you don't have enough RAM you will get errors about disk space. If this happens to you, please _manually_ set `TMPDIR` before launching Stack to some directory on the disk. ## Why doesn't Stack rebuild my project when I specify `--ghc-options` on the command line? Because GHC options often only affect optimization levels and warning behavior, Stack doesn't recompile when it detects an option change by default. This behavior can be changed though by setting the [`rebuild-ghc-options` option](yaml_configuration.md#rebuild-ghc-options) to `true`. To force recompilation manually, use the `--force-dirty` flag. If this still doesn't lead to a rebuild, add the `-fforce-recomp` flag to your `--ghc-options`. ## Why doesn't Stack apply my `--ghc-options` to my dependencies? By default, Stack applies command line GHC options only to [project packages](yaml_configuration.md#packages). For an explanation of this choice see this discussion on issue [#827](https://github.com/commercialhaskell/stack/issues/827#issuecomment-133263678). If you still want to set specific GHC options for a dependency, use the [`ghc-options`](yaml_configuration.md#ghc-options) option in your YAML configuration file. To change the set of packages that command line GHC options apply to, use the [`apply-ghc-options`](yaml_configuration.md#apply-ghc-options) option. ## `stack setup` on a Windows system only tells me to add certain paths to the PATH variable instead of doing it With PowerShell, it is easy to automate even that step. Command: ~~~ps $Env:Path = ( stack setup | %{ $_ -replace '[^ ]+ ', ''} ), $Env:Path -join ";" ~~~ ## How do I reset/remove Stack (such as to do a completely fresh build)? The first thing to remove is project-specific `.stack-work` directory within the project's directory. Next, remove the Stack root directory overall. You may have errors if you remove the latter but leave the former. Removing Stack itself will relate to how it was installed, and if you used GHC installed outside of Stack, that would need to be removed separately. ## How does Stack handle parallel builds? What exactly does it run in parallel? See issue [#644](https://github.com/commercialhaskell/stack/issues/644) for more details. ## I get strange `ld` errors about recompiling with "-fPIC" (Updated in January 2019) This is related to more recent versions of Linux distributions that have GCC with PIE enabled by default. The continuously-updated distros like Arch, in particular, had been in flux with this change and the upgrading libtinfo6/ncurses6, and there were some workarounds attempted in Stack that ended up causing trouble as these distros evolved. GHC added official support for this setup in 8.0.2, so if you are using an older version your best bet is to upgrade. You may be able to work around it for older versions by editing `~/.stack/programs/x86_64-osx/ghc-VER/lib/ghc- VER/settings` (replace `VER` with the GHC version) and adding `-no-pie` (or `--no-pie` in the case of Gentoo, at least as of December 2017) to the __C compiler link flags__. If `stack setup` complains that there is no `linuxNN-*-nopie` bindist available, try adding `ghc-build: *` (replacing the `*` with the actual value that precedes `-nopie`, which may be empty) to your `~/.stack/config.yaml` (this will no longer be necessary for stack >= 1.7). If you are experiencing this with GHC >= 8.0.2, try running `stack setup --reinstall` if you've upgraded your Linux distribution or you set up GHC before late December 2017. If GHC doesn't recognize your C compiler as being able to use `-no-pie`, this can happen even with GCC and Clang, it might be necessary to enable this feature manually. To do this, just change `("C compiler supports -no-pie", "NO"),` to `("C compiler supports -no-pie", "YES"),` in the file `~/.stack/programs/x86_64-osx/ghc-VER/lib/ghc-VER/settings`. If you are still having trouble after trying the above, check the following for more possible workarounds: * Previous version of this [FAQ entry](https://docs.haskellstack.org/en/v1.6.3/faq/#i-get-strange-ld-errors-about-recompiling-with-fpic) * Related issues: [#3518](https://github.com/commercialhaskell/stack/issues/3518), [#2712](https://github.com/commercialhaskell/stack/issues/2712), [#3630](https://github.com/commercialhaskell/stack/issues/3630), [#3648](https://github.com/commercialhaskell/stack/issues/3648) ## Where does the output from `--ghc-options=-ddump-splices` (and other `-ddump*` options) go? These are written to `*.dump-*` files inside the package's `.stack-work` directory. Specifically, they will be available at `PKG-DIR/$(stack path --dist-dir)/build/SOURCE-PATH`, where `SOURCE-PATH` is the path to the source file, relative to the location of the Cabal file. When building named components such as test-suites, `SOURCE-PATH` will also include `COMPONENT/COMPONENT-tmp`, where `COMPONENT` is the name of the component. ## Why is DYLD_LIBRARY_PATH ignored? If you are on Mac OS X 10.11 ("El Capitan") or later, there is a GHC issue [#11617](https://ghc.haskell.org/trac/ghc/ticket/11617) which prevents the `DYLD_LIBRARY_PATH` environment variable from being passed to GHC (see issue [#1161](https://github.com/commercialhaskell/stack/issues/1161)) when System Integrity Protection (a.k.a. "rootless") is enabled. There are two known workarounds: 1. Known to work in all cases: [disable System Integrity Protection](http://osxdaily.com/2015/10/05/disable-rootless-system-integrity-protection-mac-os-x/). **WARNING: Disabling SIP will severely reduce the security of your system, so only do this if absolutely necessary!** 2. Experimental: modify GHC's shell script wrappers to use a shell outside the protected directories (see issue [#1161](https://github.com/commercialhaskell/stack/issues/1161#issuecomment-186690904)). ## Why do I get a `/usr/bin/ar: permission denied` error? ## Why is the `--` argument separator ignored in Windows PowerShell Some versions of Windows PowerShell don't pass the `--` to programs (see issue [#813](https://github.com/commercialhaskell/stack/issues/813)). The workaround is to quote the `"--"`. For example, command: ~~~ps stack exec "--" cabal --version ~~~ This is known to be a problem on Windows 7, but seems to be fixed on Windows 10. ## Does Stack also install the system/C libraries that some Cabal packages depend on? No, this is currently out of the scope of Stack's target set of features. Instead of attempting to automate the installation of 3rd party dependencies, we have the following approaches for handling system dependencies: * Nix and docker help make your build and execution environment deterministic and predictable. This way, you can install system dependencies into a container, and share this container with all developers. * If you have installed some libraries into a non-standard location, use the [`extra-lib-dirs`](yaml_configuration.md#extra-lib-dirs) option or the [`extra-include-dirs`](yaml_configuration.md#extra-include-dirs) option to specify it. In the future, Stack might give operating system-specific suggestions for how to install system libraries. ## How can I make Stack aware of my custom SSL certificates? ### macOS In principle, you can use the following command to add a certificate to your system certificate keychain: ~~~bash sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain ~~~ Some users have reported issues with this approach, see issue [#907](https://github.com/commercialhaskell/stack/issues/907) for more information. ### Other *NIX OSs Use the `SYSTEM_CERTIFICATE_PATH` environment variable to point at the directory where you keep your SSL certificates. ## How do I get `verbose` output from GHC when I build? Add `ghc-options: -vN` to the Cabal file or pass it via `stack build --ghc-options="-v"`. ## Does Stack support the Hpack specification? Yes: * If a package directory contains an [Hpack](https://github.com/sol/hpack) `package.yaml` file, then Stack will use it to generate a Cabal file when building the package. * You can run `stack init` to initialize a `stack.yaml` file regardless of whether your packages are declared with Cabal files or with Hpack `package.yaml` files. * You can use the `with-hpack` YAML configuration or command line option to specify an Hpack executable to use instead of Stack's in-built Hpack functionality. ## How do I resolve linker errors when running `stack setup` or `stack build` on macOS? This is likely to be caused by having both a LLVM installation and default Apple Clang compiler on the PATH. The symptom of this issue is a linker error "bad relocation (Invalid pointer diff)". The compiler picks up inconsistent versions of binaries and the mysterious error occurs. The workaround is to remove LLVM binaries from the PATH. ## How do I suppress `'-nopie'` warnings with `stack build` on macOS? ~~~bash clang: warning: argument unused during compilation: '-nopie' [-Wunused-command-line-argument] ~~~ This warning is shown when compiler support of `-no-pie` is expected but unavailable. It's possible to bypass the warning for a specific version of GHC by modifying a global setting: ~~~bash # ~/.stack/programs/x86_64-osx/ghc-8.2.2/lib/ghc-8.2.2/settings -- ("C compiler supports -no-pie", "YES"), ++ ("C compiler supports -no-pie", "NO"), ~~~ **Note that we're fixing `ghc-8.2.2` in this case; repeat for other versions as necessary.** You should apply this fix for the version of GHC that matches your snapshot. Issue [#4009](https://github.com/commercialhaskell/stack/issues/4009) goes into further detail. ## How do I install GHC in Stack when it fails with the error: Missing ghc bindist for "linux64-ncurses6"? Example Error: ~~~text No setup information found for ghc-8.6.4 on your platform. This probably means a GHC bindist has not yet been added for OS key 'linux64-ncurses6'. Supported versions: ghc-7.10.3, ghc-8.0.1, ghc-8.0.2, ghc-8.2.1, ghc-8.2.2 ~~~ Most Linux distributions have standardized on providing libtinfo.so.6 (either directly or as a symlink to libncursesw.so.6). As such, there aren't GHC 8.6.* bindists that link to libncursesw.so.6 available. So creating a symlink to libncursesw.so.6 as libtinfo.so.6 can prevent this error (root privileges might be required). Command: ~~~bash ln -s /usr/lib/libncursesw.so.6 /usr/lib/libtinfo.so.6 ~~~ stack-2.15.7/doc/ghc_command.md0000644000000000000000000000200614604306200014420 0ustar0000000000000000
# The `stack ghc` command ~~~text stack ghc [-- ARGUMENT(S) (e.g. stack ghc -- X.hs -o x)] [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] ~~~ `stack ghc` has the same effect as, and is provided as a shorthand for, [`stack exec ghc`](exec_command.md), with the exception of the `--package` option. Pass the option `--package ` to add the initial GHC argument `-package-id=`, where `` is the unit ID of the specified package in the installed package database. The option can be a list of package names or package identifiers separated by spaces. The option can also be specified multiple times. The approach taken to these packages is the same as if they were specified as targets to [`stack build`](build_command.md#target-syntax). stack-2.15.7/doc/ghci.md0000644000000000000000000001536114613163672013121 0ustar0000000000000000
# The `stack ghci` and `stack repl` commands ~~~text stack ghci [TARGET/FILE] [--pedantic] [--ghci-options OPTIONS] [--ghc-options OPTIONS] [--flag PACKAGE:[-]FLAG] [--with-ghc GHC] [--[no-]load] [--package PACKAGE] [--main-is TARGET] [--load-local-deps] [--[no-]package-hiding] [--only-main] [--trace] [--profile] [--no-strip] [--[no-]test] [--[no-]bench] ~~~ A read–evaluate–print loop (REPL) environment takes single user inputs, executes them, and returns the result to the user. GHCi is GHC's interactive environment. The `stack ghci` or `stack repl` commands, which are equivalent, allow you to load components and files of your project into GHCi. The command accepts the same TARGET syntax as [`stack build`](build_command.md#target-syntax). By default: * Stack loads up GHCi with all the library and executable components of all the packages in the project. Pass the flag `--test` to include test suite components (unlike `stack build`, test suites will not be run). Pass the flag `--bench` to include benchmark components (unlike `stack build`, benchmarks will not be run). It is also possible to specify a module source code file. For example: ~~~text stack ghci src/MyFile.hs ~~~ Stack will identify which component the file is associated with, and use the options from that component. Pass the `--package` option to load GHCi with an additional package that is not a direct dependency of your components. This option can be specified multiple times. Pass the option `--flag :` or `--flag ` to set or unset a Cabal flag. This option can be specified multiple times. The same Cabal flag name can be set (or unset) for multiple packages with: ~~~text --flag *:[-] ~~~ !!! note In order to set a Cabal flag for a GHC boot package, the package must either be an extra-dep or the package version must be specified with the `--package` option. By default: * Stack uses the GHC specified in Stack's configuration. Pass the `--with-ghc` option with a file path to the executable to specify a different GHC executable; * Stack performs an inital build step. Pass the `--no-build` flag to skip the step. Pass the `--ghc-options` option to pass flags or options to GHC. Pass the `--profile`, `--no-strip`, `--trace` flags for the same behaviour as in the case of the `stack build` command. !!! info Not performing the initial build step speeds up the startup of GHCi. It only works if the dependencies of the loaded packages have already been built. * Stack runs GHCi via `ghc --interactive`. Pass the `--ghc-options` option to pass flags or options to GHC (during the initial build step) and to GHCi. Pass the `--pedantic` flag to pass the GHC options `-Wall` and `-Werror` to GHCi (only). Pass the `--ghci-options` option to pass flags or options to GHCi (only). * Stack configures GHCi to hide unnecessary packages, unless no packages are targetted and no additional packages are specified. Pass the `--package-hiding` flag to hide unnecessary packages or `--no-package-hiding` flag not to hide unnecessary packages. * Stack loads and imports all of the modules for each target. Pass the `--no-load` flag to skip the loading of modules. Pass the `--only-main` flag to skip the loading of modules other than the main module. Pass the `--load-local-deps` flag to include all local dependencies of targets. !!! info Not loading modules speeds up the startup of GHCi. Once in GHCi, you can use `:load myModule` to load a specific module in your project. !!! info The `--only-main` flag can be useful if: 1. You're loading the project in order to run it in GHCi (e.g. via `main`), and you intend to reload while developing. Without flag, you will need to quit and restart GHCi whenever a module gets deleted. With the flag, reloading should work fine in this case. 2. If many of your modules have exports named the same thing, then you'll need to refer to them using qualified names. To avoid this, use the `--only-main` flag to start with a blank slate and just import the modules you are interested in. * If there are multiple definitions for the `Main` module, Stack will ask you to select one from a list of options. Pass the `--main-is ` option to specific which `Main` module to load. Stack combines all of the GHC options of components. !!! note Combining GHC options should work out when packages share similar conventions. However, conflicts may arise, such as when one component defines default extensions which aren't assumed by another. For example, specifying `NoImplicitPrelude` in one component but not another is likely to cause failures. GHCi will be run with `-XNoImplicitPrelude`, but it is likely that modules in the other component assume that the `Prelude` is implicitly imported. `stack ghci` configures GHCi by using a GHCi script file. Such files are located in subdirectories of `/stack/ghci-script`, where `` refers to the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) for user-specific non-essential (cached) data. === "Unix-like" The default for `` is `$HOME/.cache`. === "Windows" On Windows, the default for `` is `%LOCALAPPDATA%`. ## Running plain GHCi `stack ghci` always runs GHCi configured to load code from packages in your project. In particular, this means it passes in flags like `-hide-all-packages` and `-package-id=` in order to configure which packages are visible to GHCi. For doing experiments which just involve packages installed in your databases, it may be useful to run GHCi plainly like: ~~~text stack exec ghci ~~~ This will run a plain GHCi in an environment which includes `GHC_PACKAGE_PATH`, and so will have access to your databases. !!! note Running `stack ghci` on a pristine copy of the code doesn't currently build libraries (issue [#2790](https://github.com/commercialhaskell/stack/issues/2790)) or internal libraries (issue [#4148](https://github.com/commercialhaskell/stack/issues/4148)). It is recommended to always use `stack build` before using `stack ghci`, until these two issues are closed. stack-2.15.7/doc/global_flags.md0000644000000000000000000003142414620153445014614 0ustar0000000000000000
# Stack's global flags and options Stack can also be configured by flags and options on the command line. Global flags and options apply to all of Stack's commands. In addition, all of Stack's commands accept the `--setup-info-yaml` and `--snapshot-location-base` options and the `--help` flag. ## `--allow-different-user` flag Restrictions: POSIX systems only Default: True, if inside Docker; false otherwise Enable/disable permitting users other than the owner of the [Stack root](stack_root.md) directory to use a Stack installation. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#allow-different-user). ## `--arch` option Pass the option `--arch ` to specify the relevant machine architecture. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#arch). ## `--color` or `-colour` options Pass the option `stack --color ` to specify when to use color in output. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#color). ## `--compiler` option Pass the option `--compiler ` to specify the compiler. For further information, see the [YAML configuration](yaml_configuration.md#compiler) documentation. ## `--custom-preprocessor-extensions` option Pass the option `--custom-preprocessor-extensions ` to specify an extension used for a custom preprocessor. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#custom-preprocessor-extensions). ## `--docker*` flags and options Stack supports automatically performing builds inside a Docker container. For further information see `stack --docker-help` or the [Docker integratiom](docker_integration.md) documentation. ## `--[no-]dump-logs` flag Default: Dump warning logs Enables/disables the dumping of the build output logs for project packages to the console. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#dump-logs). ## `--extra-include-dirs` option Pass the option `--extra-include-dirs ` to specify an extra directory to check for C header files. The option can be specified multiple times. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#extra-include-dirs). ## `--extra-lib-dirs` option Pass the option `--extra-lib-dirs ` to specify an extra directory to check for libraries. The option can be specified multiple times. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#extra-lib-dirs). ## `--ghc-build` option Pass the option `--ghc-build ` to specify the relevant specialised GHC build. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#ghc-build). ## `--ghc-variant` option Pass the option `--ghc-variant ` to specify the relevant GHC variant. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#ghc-variant). ## `--hpack-numeric-version` flag Pass the flag `--hpack-numeric-version` to cause Stack to report the numeric version of its built-in Hpack library to the standard output stream (e.g. `0.35.0`) and quit. ## `--[no-]install-ghc` flag Default: Enabled Enables/disables the download and instalation of GHC if necessary. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#install-ghc). ## `--jobs` or `-j` option Pass the option `--jobs ` to specify the number of concurrent jobs (Stack actions during building) to run. When [building GHC from source](yaml_configuration.md#building-ghc-from-source), specifies the `-j[]` flag of GHC's Hadrian build system. By default, Stack specifies a number of concurrent jobs equal to the number of CPUs (cores) that the machine has. In some circumstances, that default can cause some machines to run out of memory during building. If those circumstances arise, specify `--jobs 1`. This configuration option is distinct from GHC's own `-j[]` flag, which relates to parallel compilation of modules within a package. For further information, see the documentation for the corresponding non-project specific configuration option: [`jobs`](yaml_configuration.md#jobs). ## `--local-bin-path` option Pass the option `--local-bin-path ` to set the target directory for [`stack build --copy-bins`](build_command.md#-no-copy-bins-flag) and `stack install`. An absolute or relative path can be specified. A relative path at the command line is always assumed to be relative to the current directory. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#local-bin-path). ## `--lock-file` option Default: `read-write`, if snapshot specified in YAML configuration file; `read-only`, if a different snapshot is specified on the command line. Pass the option `--lock-file ` to specify how Stack interacts with lock files. Valid modes are: * `error-on-write`: Stack reports an error, rather than write a lock file; * `ignore`: Stack ignores lock files; * `read-only`: Stack only reads lock files; and * `read-write`: Stack reads and writes lock files. ## `--[no-]modify-code-page` flag Restrictions: Windows systems only Default: Enabled Enables/disables setting the codepage to support UTF-8. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#modify-code-page). ## `--nix*` flags and options Stack can be configured to integrate with Nix. For further information, see `stack --nix-help` or the [Nix integration](nix_integration.md) documentation. ## `--numeric-version` flag Pass the flag `--numeric-version` to cause Stack to report its numeric version to the standard output stream (e.g. `2.9.1`) and quit. ## `--[no-]plan-in-log` flag [:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) Default: Disabled Enables/disables the logging of build plan construction in debug output. Information about the build plan construction can be lengthy. If you do not need it, it is best omitted from the debug output. ## `--resolver` option A synonym for the [`--snapshot` option](#snapshot-option) to specify the snapshot resolver. ## `--[no-]rsl-in-log` flag [:octicons-tag-24: 2.9.1](https://github.com/commercialhaskell/stack/releases/tag/v2.9.1) Default: Disabled Enables/disables the logging of the raw snapshot layer (rsl) in debug output. Information about the raw snapshot layer can be lengthy. If you do not need it, it is best omitted from the debug output. ## `--[no-]script-no-run-compile` flag Default: Disabled Enables/disables the use of options `--no-run --compile` with the [`stack script` command](script_command.md). ## `--silent` flag Equivalent to the `--verbosity silent` option. ## `--[no-]skip-ghc-check` option Default: Disabled Enables/disables the skipping of checking the GHC version and architecture. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#skip-ghc-check). ## `--[no-]skip-msys` option Restrictions: Windows systems only Default: Disabled Enables/disables the skipping of checking for the Stack-supplied MSYS2 (and installing that MSYS2, if it is not installed) when Stack is setting up the environment. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#skip-msys). ## `--snapshot` option [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Pass the option `--snapshot ` to specify the snapshot. For further information, see the [YAML configuration](yaml_configuration.md#snapshot) documentation. At the command line (only): * `--snapshot lts-` specifies the latest Stackage LTS Haskell snapshot with the specified major version; * `--snapshot lts` specifies, from those with the greatest major version, the latest Stackage LTS Haskell snapshot; * `--snapshot nightly` specifies the most recent Stackage Nightly snapshot; and * `--snapshot global` specifies the snapshot specified by the project-level configuration file in the `global-project` directory in the [Stack root](stack_root.md#global-project-directory). ## `--stack-colors` or `--stack-colours` options Pass the option `--stack-colors ` to specify Stack's output styles. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#stack-colors). ## `--stack-root` option Overrides: `STACK_ROOT` environment variable Pass the option `--stack-root ` to specify the path to the [Stack root](stack_root.md) directory. The path must be an absolute one. ## `--stack-yaml` option Default: `stack.yaml` Overrides: `STACK_YAML` enviroment variable Pass the option `--stack-yaml ` to specify Stack's project-level YAML configuration file. ## `--[no-]system-ghc` flag Default: Disabled Enables/disables the use of a GHC executable on the PATH, if one is available and its version matches. ## `--[no-]terminal` flag Default: Stack is running in a terminal (as detected) Enables/disables whether Stack is running in a terminal. ## `--terminal-width` option Default: the terminal width (if detected); otherwise `100` Pass the option `--terminal-width ` to specify the width of the terminal, used by Stack's pretty printed messages. ## `--[no-]time-in-logs` flag Default: Enabled Enables/disables the inclusion of time stamps against logging entries when the verbosity level is 'debug'. ## `--verbose` or `-v` flags Equivalent to the `--verbosity debug` option. ## `--verbosity` option Default: `info` Pass the option `--verbosity ` to specify the level for logging. Possible levels are `silent`, `error`, `warn`, `info` and `debug`, in order of increasing amounts of information provided by logging. ## `--version` flag Pass the flag `--version` to cause Stack to report its version to standard output and quit. For versions that are release candidates, the report will list the dependencies that Stack has been compiled with. ## `--with-gcc` option Pass the option `--with-gcc ` to specify use of a GCC executable. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#with-gcc). ## `--with-hpack` option Pass the option `--with-hpack ` to specify use of an Hpack executable. For further information, see the documentation for the corresponding non-project specific configuration [option](yaml_configuration.md#with-hpack). ## `--work-dir` option Default: `.stack-work` Overrides: [`STACK_WORK`](environment_variables.md#stack_work) environment variable, and [`work-dir`](yaml_configuration.md) non-project specific configuration option. Pass the option `--work-dir ` to specify the path to Stack's work directory, within a local project or package directory. The path must be a relative one, relative to the the root directory of the project or package. The relative path cannot include a `..` (parent directory) component. ## `--setup-info-yaml` command option Default: `https://raw.githubusercontent.com/commercialhaskell/stackage-content/master/stack/stack-setup-2.yaml` The `--setup-info-yaml ` command option specifies the location of a `setup-info` dictionary. The option can be specified multiple times. ## `--snapshot-location-base` command option Default: `https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master` The `--snapshot-location-base ` command option specifies the base location of snapshots. ## `--help` command flag If Stack is passed the `--help` command flag, it will output help for the command. stack-2.15.7/doc/glossary.md0000644000000000000000000001557614604306200014044 0ustar0000000000000000
# Glossary The following terms are used in Stack's documentation. |Term |Meaning | |-------------------|----------------------------------------------------------| |Cabal |The Haskell Common Architecture for Building Applications and Libraries, provided by the [`Cabal` package](https://hackage.haskell.org/package/Cabal). Also referred to as Cabal (the library) to distinguish it from Cabal (the tool).| |Cabal file|A file containing a [package description](https://cabal.readthedocs.io/en/stable/cabal-package.html) used by Cabal, named `.cabal`.| |Cabal (the tool)|The Haskell build tool provided by the [`cabal-install` package](https://hackage.haskell.org/package/cabal-install).| |CI |Continuous integration. | |CMake |A [system](https://cmake.org/) for managing build processes.| |`config.yaml` |A global and non-project-specific configuration file used by Stack.| |dependency |A Haskell package other than a project package and on which a project package depends (directly or indirectly), located locally or elsewhere.| |Docker |A [platform](https://www.docker.com/) for developing, shipping, and running applications. It can package and run an application in a loosely isolated environment called a _container_.| |Emacs |[GNU Emacs](https://www.gnu.org/software/emacs/), an extensible, customisable text editor.| |extra-deps |Extra dependencies (one version of each) that add to, or shadow, those specified in a snapshot.| |FreeBSD |A Unix-like operating system. | |GCC |The [GNU Compiler Collection](https://gcc.gnu.org/) or its executable `gcc`.| |GHC |The [Glasgow Haskell Compiler](https://www.haskell.org/ghc/).| |GHC boot package |A package that comes with GHC, is included in GHC's global package database, and is not included in a Stackage snapshot. See the output of command `stack exec -- ghc-pkg list --global`.| |GHCi |GHC's [interactive environment](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/ghci.html).| |GHCJS |A Haskell to JavaScript [compiler](https://github.com/ghcjs/ghcjs).| |GHCup |An [installer](https://www.haskell.org/ghcup/) for Haskell. |Git |A [distributed version control system](https://git-scm.com/).| |GPG |The [GNU Privacy Guard](https://gnupg.org/) or GnuPG, software that allows you to encrypt or sign your data and communications.| |Hackage |The [Haskell Package Repository](https://hackage.haskell.org/). |Haddock |The [document generation tool](https://hackage.haskell.org/package/haddock) for Haskell libraries.| |'Haskell' extension|The ['Haskell' extension]() for VS Code. | |HLS |[Haskell Language Server](https://haskell-language-server.readthedocs.io/en/latest/), an implementation of the Language Server Protocol for Haskell.| |Homebrew |A [package manager](https://brew.sh/) for macOS or Linux, or its executable `brew`.| |Hoogle |A Haskell API [search engine](https://hoogle.haskell.org/).| |Hpack |A [format](https://github.com/sol/hpack) for Haskell packages or the executable `hpack` that produces a Cabal file from `package.yaml`.| |Linux |A family of operating systems based on the Linux kernel. | |macOS |The primary operating system for Apple's Mac computers. Previously known as Mac OS X or OS X.| |Make |A [build automation tool](https://www.gnu.org/software/make/).| |MSYS2 |The [MSYS2](https://www.msys2.org/) software distribution and building platform for Windows.| |Nix |A purely functional [package manager](https://nixos.org/), available for Linux and macOS.| |package |A Haskell package is an organised collection of Haskell code and related files. It is described by a Cabal file or a `package.yaml` file, which is itself part of the package.| |`package.yaml` |A file that describes a package in the Hpack format. | |Pantry |A library for content-addressable Haskell package management, provided by the [`pantry` package](https://hackage.haskell.org/package/pantry). A dependency of Stack.| |PATH |The `PATH` environment variable, specifying a list of directories searched for executable files.| |project |A Stack project is a local directory that contains a project-level configuration file (`stack.yaml`, by default). A project may relate to more than one project package.| |project package |A Haskell package that is part of a project and located locally. Distinct from a dependency located locally.| |PVP |The Haskell [Package Versioning Policy](https://pvp.haskell.org/), which tells developers of libraries how to set their version numbers.| |REPL |An interactive (run-eval-print loop) programming environment.| |resolver |A synonym for snapshot. | |`Setup.hs` |A project-specific file used by Cabal to perform setup tasks.| |snapshot |A snapshot defines a GHC version, a set of packages (one version of each), and build flags or other settings.| |Stack |The Haskell Tool Stack project or its executable `stack`. | |`stack.yaml` |A project-level configuration file used by Stack, which may also contain non-project-specific options.| |Stackage |A [distribution](https://www.stackage.org/) of compatible Haskell packages.| |Stack root |A directory in which Stack stores important files. See `stack path --stack-root`. On Windows, or if Stack is configured to use the XDG Base Directory Specification, Stack also stores important files outside of the Stack root.| |Stack work directory|A directory within a local project or package directory in which Stack stores files created during the build process. Named `.stack-work`, by default.| |Unix-like operating systems|Linux, FreeBSD and macOS. | |VS Code |[Visual Studio Code](https://code.visualstudio.com/), a source code editor.| |Windows |A group of operating systems developed by Microsoft. | |WSL |[Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/). Provides a Linux environment on Windows.| |XDG Base Directory Specification|A [specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) of directories relative to which files should be located.| |YAML |A human-friendly [data serialization language](https://yaml.org/).| stack-2.15.7/doc/GUIDE.md0000644000000000000000000016263014620153473013042 0ustar0000000000000000
# User guide (introductory) Stack is a modern, cross-platform build tool for Haskell code. This introductory guide takes a new Stack user through the typical workflows. This guide will not teach Haskell or involve much code, and it requires no prior experience with the Haskell packaging system or other build tools. Terms used in the guide are defined in the [glossary](glossary.md). Some of Stack's features will not be needed regularly or by all users. See the [advanced user's guide](GUIDE_advanced.md) for information about those features. ## Stack's functions Stack handles the management of your toolchain (including GHC — the Glasgow Haskell Compiler — and, for Windows users, MSYS2), building and registering libraries, building build tool dependencies, and more. While it can use existing tools on your system, Stack has the capacity to be your one-stop shop for all Haskell tooling you need. This guide will follow that Stack-centric approach. ### What makes Stack special? The primary Stack design point is __reproducible builds__. If you run `stack build` today, you should get the same result running `stack build` tomorrow. There are some cases that can break that rule (changes in your operating system configuration, for example), but, overall, Stack follows this design philosophy closely. To make this a simple process, Stack uses curated package sets called __snapshots__. Stack has also been designed from the ground up to be user friendly, with an intuitive, discoverable command line interface. For many users, simply downloading Stack and reading `stack --help` will be enough to get up and running. This guide provides a more gradual tour for users who prefer that learning style. To build your project, Stack uses a project-level configuration file, named `stack.yaml`, in the root directory of your project as a sort of blueprint. That file contains a reference to the snapshot (also known as a __resolver__) which your package will be built against. Finally, Stack is __isolated__: it will not make changes outside of specific Stack directories. Stack-built files generally go in either the Stack root directory or `./.stack-work` directories local to each project. The [Stack root](stack_root.md) directory holds packages belonging to snapshots and any Stack-installed versions of GHC. Stack will not tamper with any system version of GHC or interfere with packages installed by other build tools, such as Cabal (the tool). ## Downloading and Installation The [documentation dedicated to downloading Stack](install_and_upgrade.md) has the most up-to-date information for a variety of operating systems. Instead of repeating that content here, please go check out that page and come back here when you can successfully run `stack --version`. We also assume that the directory reported by `stack path --local-bin` has been added to the PATH. ## Hello World Example With Stack installed, let's create a new project from a template and walk through the most common Stack commands. In this guide, an initial `$` represents the command line prompt. The prompt may differ in the terminal on your operating system. Unless stated otherwise, the working directory is the project's root directory. ### The `stack new` command We'll start off with the `stack new` command to create a new *project*, that will contain a Haskell *package* of the same name. So let's pick a valid package name first: > A package is identified by a globally-unique package name, which consists > of one or more alphanumeric words separated by hyphens. To avoid ambiguity, > each of these words should contain at least one letter. (From the [Cabal users guide](https://www.haskell.org/cabal/users-guide/developing-packages.html#developing-packages)) We'll call our project `helloworld`, and we'll use the `new-template` project template. This template is used by default, but in our example we will refer to it expressly. Other templates are available. For further information about templates, see the `stack templates` command [documentation](templates_command.md). From the root directory for all our Haskell projects, we command: ~~~text stack new helloworld new-template ~~~ For this first Stack command, there's quite a bit of initial setup it needs to do (such as downloading the list of packages available upstream), so you'll see a lot of output. Over the course of this guide a lot of the content will begin to make more sense. After creating the project directory, and obtaining and populating the project template, Stack will initialise its own project-level configuration. For further information about setting paramaters to populate templates, see the YAML configuration [documentation](yaml_configuration.md#templates). For further information about initialisation, see the `stack init` command [documentation](#the-stack-init-command). The `stack new` and `stack init` commands have options and flags in common. !!! info Pass the `--bare` flag to cause Stack to create the project in the current working directory rather than in a new project directory. !!! info Parameters to populate project templates can be set at the command line with the `--param :` (or `-p`) option. We now have a project in the `helloworld` directory! We will change to that directory, with command: ~~~text cd helloworld ~~~ ### The `stack build` command Next, we'll run the most important Stack command, `stack build`: ~~~text stack build # installing ... building ... ~~~ Stack needs a version of GHC in order to build your project. Stack will discover that you are missing it and will install it for you. You'll get intermediate download percentage statistics while the download is occurring. This command may take some time, depending on download speeds. !!! note GHC will be installed to your Stack programs directory, so calling `ghc` on the command line won't work. See the `stack exec`, `stack ghc`, and `stack runghc` commands below for more information. Once a version of GHC is installed, Stack will then build your project. ### The `stack exec` command Looking closely at the output of the previous command, you can see that it built both a library called `helloworld` and an executable called `helloworld-exe` (on Windows, `helloworld-exe.exe`). We'll explain more in the next section, but, for now, just notice that the executables are installed in a location in our project's `.stack-work` directory. Now, Let's use the `stack exec` command to run our executable (which just outputs "someFunc"): ~~~text stack exec helloworld-exe someFunc ~~~ `stack exec` works by providing the same reproducible environment that was used to build your project to the command that you are running. Thus, it knew where to find `helloworld-exe` even though it is hidden in the `.stack-work` directory. Command `stack path --bin-path` to see the PATH in the Stack environment. !!! info On Windows, the Stack environment includes the `\mingw64\bin`, `\usr\bin` and `\usr\local\bin` directories of the Stack-supplied MSYS2. If your executable depends on files (for example, dynamic-link libraries) in those directories and you want ro run it outside of the Stack environment, you will need to ensure copies of those files are on the PATH. ### The `stack test` command Finally, like all good software, `helloworld` actually has a test suite. Let's run it with the `stack test` command: ~~~text stack test # build output ... ~~~ Reading the output, you'll see that Stack first builds the test suite and then automatically runs it for us. For both the `build` and `test` command, already built components are not built again. You can see this by using the `stack build` and `stack test` commands a second time: ~~~text stack build stack test # build output ... ~~~ ## Inner Workings of Stack In this subsection, we'll dissect the `helloworld` example in more detail. ### Files in helloworld Before studying Stack more, let's understand our project a bit better. The files in the directory include: ~~~text app/Main.hs src/Lib.hs test/Spec.hs ChangeLog.md README.md LICENSE Setup.hs helloworld.cabal package.yaml stack.yaml .gitignore ~~~ The `app/Main.hs`, `src/Lib.hs`, and `test/Spec.hs` files are all Haskell source files that compose the actual functionality of our project (we won't dwell on them here). The `ChangeLog.md`, `README.md`, `LICENSE` and `.gitignore` files have no effect on the build. The `helloworld.cabal` file is updated automatically as part of the `stack build` process and should not be modified. The files of interest here are `Setup.hs`, `stack.yaml`, and `package.yaml`. The `Setup.hs` file is a component of the Cabal build system which Stack uses. It's technically not needed by Stack, but it is still considered good practice in the Haskell world to include it. The file we're using is straight boilerplate: ~~~haskell import Distribution.Simple main = defaultMain ~~~ Next, let's look at our `stack.yaml` file, which gives our project-level settings. Ignoring comments beginning `#`, the contents will look something like this: ~~~yaml resolver: url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/22/21.yaml packages: - . ~~~ The value of the [`resolver`](yaml_configuration.md#resolver) key tells Stack *how* to build your package: which GHC version to use, versions of package dependencies, and so on. Our value here says to use [LTS Haskell 22.21](https://www.stackage.org/lts-22.21), which implies GHC 9.6.5 (which is why `stack build` installs that version of GHC if it is not already available to Stack). There are a number of values you can use for `resolver`, which we'll cover later. The value of the `packages` key tells Stack which project packages, located locally, to build. In our simple example, we have only a single project package, located in the same directory, so '`.`' suffices. However, Stack has powerful support for multi-package projects, which we'll elaborate on as this guide progresses. Another file important to the build is `package.yaml`. The `package.yaml` file describes the package in the [Hpack](https://github.com/sol/hpack) format. Stack has in-built Hpack functionality and this is its preferred package format. The default behaviour is to generate the Cabal file (here named `helloworld.cabal`) from this `package.yaml` file, and accordingly you should **not** modify the Cabal file. It is also important to remember that Stack is built on top of the Cabal build system. Therefore, an understanding of the moving parts in Cabal are necessary. In Cabal, we have individual *packages*, each of which contains a single Cabal file, named `.cabal`. The Cabal file can define one or more *components*: a library, executables, test suites, and benchmarks. It also specifies additional information such as library dependencies, default language pragmas, and so on. In this guide, we'll discuss the bare minimum necessary to understand how to modify a `package.yaml` file. You can see a full list of the available options at the [Hpack documentation](https://github.com/sol/hpack#quick-reference). The Cabal User Guide is the definitive reference for the [Cabal file format](https://cabal.readthedocs.io/en/stable/cabal-package.html). ### The location of GHC As we saw above, the `build` command installed GHC for us. You can use the `stack path` command for quite a bit of path information (which we'll play with more later). We'll look at where GHC is installed: === "Unix-like" Command: ~~~text stack exec -- which ghc /home//.stack/programs/x86_64-linux/ghc-9.0.2/bin/ghc ~~~ === "Windows (with PowerShell)" Command: ~~~text stack exec -- where.exe ghc C:\Users\\AppData\Local\Programs\stack\x86_64-windows\ghc-9.0.2\bin\ghc.exe ~~~ As you can see from that path (and as emphasized earlier), the installation is placed to not interfere with any other GHC installation, whether system-wide or even different GHC versions installed by Stack. ## Cleaning your project You can clean up build artifacts for your project using the `stack clean` and `stack purge` commands. ### The `stack clean` command `stack clean` deletes the local working directories containing compiler output. By default, that means the contents of directories in `.stack-work/dist`, for all the `.stack-work` directories within a project. Use `stack clean ` to delete the output for the package _specific-package_ only. ### The `stack purge` command `stack purge` deletes the local stack working directories, including extra-deps, git dependencies and the compiler output (including logs). It does not delete any snapshot packages, compilers or programs installed using `stack install`. This essentially reverts the project to a completely fresh state, as if it had never been built. `stack purge` is just a shortcut for `stack clean --full` ### The `stack build` command The `build` command is the heart and soul of Stack. It is the engine that powers building your code, testing it, getting dependencies, and more. Quite a bit of the remainder of this guide will cover more advanced `build` functions and features, such as building test and Haddocks at the same time, or constantly rebuilding blocking on file changes. !!! note Using the `build` command twice with the same options and arguments should generally do nothing (besides things like rerunning test suites), and should, in general, produce a reproducible result between different runs. ## Adding dependencies Let's say we decide to modify our `helloworld` source a bit to use a new library, perhaps the ubiquitous `text` package. In `src/Lib.hs`, we can, for example add: ~~~haskell {-# LANGUAGE OverloadedStrings #-} module Lib ( someFunc ) where import qualified Data.Text.IO as T someFunc :: IO () someFunc = T.putStrLn "someFunc" ~~~ When we try to build this, things don't go as expected: ~~~text stack build # build failure output (abridged for clarity) ... src\Lib.hs:6:1: error: Could not load module ‘Data.Text.IO’ It is a member of the hidden package ‘text-1.2.5.0’. Perhaps you need to add ‘text’ to the build-depends in your .cabal file. Use -v (or `:set -v` in ghci) to see a list of the files searched for. | 6 | import qualified Data.Text.IO as T | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ~~~ This means that the package containing the module in question is not available. To tell Stack to use [text](https://hackage.haskell.org/package/text), you need to add it to your `package.yaml` file — specifically in your `dependencies` section, like this: ~~~yaml dependencies: - base >= 4.7 && < 5 - text # added here ~~~ Now if we rerun `stack build`, we should get a successful result. Command: ~~~text stack build # build output ... ~~~ This output means that the `text` package was downloaded, configured, built, and locally installed. Once that was done, we moved on to building our project package (`helloworld`). At no point did we need to ask Stack to build dependencies — it does so automatically. ### Listing Dependencies Let's have Stack add a few more dependencies to our project. First, we'll include two new packages in the `dependencies` section for our library in our `package.yaml`: ~~~yaml dependencies: - base >= 4.7 && < 5 - text - filepath - containers ~~~ After adding these two dependencies, we can again run `stack build` to have them installed. Command: ~~~text stack build # build output ... ~~~ Finally, to find out which versions of these libraries Stack installed, we can ask Stack to `ls dependencies`. Command: ~~~text stack ls dependencies # dependency output ... ~~~ ### extra-deps Let's try a more off-the-beaten-track package: the joke [acme-missiles](http://www.stackage.org/package/acme-missiles) package. Our source code is simple: ~~~haskell module Lib ( someFunc ) where import Acme.Missiles someFunc :: IO () someFunc = launchMissiles ~~~ Again, we add this new dependency to the `package.yaml` file like this: ~~~yaml dependencies: - base >= 4.7 && < 5 - text - filepath - containers - acme-missiles # added ~~~ However, rerunning `stack build` shows us the following error message. Command: ~~~text stack build # build failure output ... ~~~ It says that it was unable to construct the build plan. This brings us to the next major topic in using Stack. ## Curated package sets Remember above when `stack new` selected some [LTS snapshot](https://github.com/commercialhaskell/lts-haskell#readme) for us? That defined our build plan and available packages. When we tried using the `text` package, it just worked, because it was part of the LTS *package set*. We've specified the `acme-missiles` package in the `package.yaml` file (see above), but `acme-missiles` is not part of that LTS package set, so building failed. To add `acme-missiles` to the available packages, we'll use the `extra-deps` key in the `stack.yaml` file. That key defines extra packages, not present in the snapshot, that will be needed as dependencies. You can add this like so: ~~~yaml extra-deps: - acme-missiles-0.3 # not in the LTS snapshot ~~~ Now `stack build` will succeed. With that out of the way, let's dig a little bit more into these package sets, also known as *snapshots*. We mentioned the LTS snapshots, and you can get quite a bit of information about it at [https://www.stackage.org/lts](https://www.stackage.org/lts), including: * The appropriate value (`lts-22.13`, as is currently the latest LTS) * The GHC version used * A full list of all packages available in this snapshot * The ability to perform a Hoogle search on the packages in this snapshot * A [list of all modules](https://www.stackage.org/lts/docs) in a snapshot, which can be useful when trying to determine which package to add to your `package.yaml` file. You can also see a [list of all available snapshots](https://www.stackage.org/snapshots). You'll notice two flavors: LTS (for "Long Term Support") and Nightly. You can read more about them on the [LTS Haskell GitHub page](https://github.com/commercialhaskell/lts-haskell#readme). If you're not sure which to use, start with LTS Haskell (which Stack will lean towards by default as well). ## Snapshots and changing your compiler version Let's explore package sets a bit further. Instead of `lts-22.13`, let's change our `stack.yaml` file to use the [latest nightly](https://www.stackage.org/nightly). Right now, this is currently 2024-03-20 - please see the snapshot from the link above to get the latest. Then, commanding `stack build` again will produce: ~~~text stack build # Downloaded nightly-2024-03-20 build plan. # build output ... ~~~ We can also change snapshots on the command line, which can be useful in a Continuous Integration (CI) setting, like on Travis. For example, command: ~~~text stack --snapshot lts-21.25 build # Downloaded lts-21.25 build plan. # build output ... ~~~ When passed on the command line, you also get some additional "short-cut" versions of snapshots: `--snapshot nightly` will use the newest Nightly snapshot available, `--snapshot lts` will use the newest LTS, and `--snapshot lts-22` will use the newest LTS in the 22.x series. The reason these are only available on the command line and not in your `stack.yaml` file is that using them: 1. Will slow down your build (since Stack then needs to download information on the latest available LTS each time it builds) 2. Produces unreliable results (since a build run today may proceed differently tomorrow because of changes outside of your control) ### Changing GHC versions Finally, let's try using an older LTS snapshot. We'll use the newest 21.x snapshot with the command: ~~~text stack --snapshot lts-21 build # build output ... ~~~ This succeeds, automatically installing the necessary GHC along the way. So, we see that different LTS versions use different GHC versions and Stack can handle that. ### Other snapshot values We've mentioned `nightly-YYYY-MM-DD` and `lts-X.Y` values for the snapshot. There are actually other options available, and the list will grow over time. At the time of writing: * `ghc-X.Y.Z`, for requiring a specific GHC version but no additional packages * Experimental custom snapshot support The most up-to-date information can always be found in the [stack.yaml documentation](yaml_configuration.md#snapshot). ## Existing projects Alright, enough playing around with simple projects. Let's take an open source package and try to build it. We'll be ambitious and use [yackage](https://hackage.haskell.org/package/yackage), a local package server using [Yesod](http://www.yesodweb.com/). To get the code, we'll use the `stack unpack` command from the root directory for all our Haskell projects: ~~~text stack unpack yackage Unpacked yackage-0.8.1 to /yackage-0.8.1/ ~~~ You can also unpack to the directory of your liking instead of the current one by issuing the command: ~~~text stack unpack yackage --to ~~~ This will create a `yackage-0.8.1` directory inside ``. We will change to that directory, with the command: ~~~text cd yackage-0.8.1 ~~~ ### The `stack init` command This new directory does not have a `stack.yaml` file, so we need to make one first. We could do it by hand, but let's be lazy instead with the `stack init` command: ~~~text stack init # init output ... ~~~ `stack init` does quite a few things for you behind the scenes: * Finds all of the Cabal files in your current directory and subdirectories (unless you use `--ignore-subdirs`) and determines the packages and versions they require * Finds the best combination of snapshot and package flags that allows everything to compile with minimum external dependencies * It tries to look for the best matching snapshot from latest LTS, latest nightly, other LTS versions in that order Assuming it finds a match, it will write your `stack.yaml` file, and everything will work. !!! note The `yackage` package does not currently support Hpack, but you can also use `hpack-convert` should you need to generate a `package.yaml` file. #### Excluded Packages Sometimes multiple packages in your project may have conflicting requirements. In that case `stack init` will fail, so what do you do? You could manually create `stack.yaml` by omitting some packages to resolve the conflict. Alternatively you can ask `stack init` to do that for you by specifying `--omit-packages` flag on the command line. Let's see how that works. To simulate a conflict we will use `acme-missiles-0.3` in `yackage` and we will also copy `yackage.cabal` to another directory and change the name of the file and package to `yackage-test`. In this new package we will use `acme-missiles-0.2` instead. Let's see what happens when we command `stack init` again: ~~~text stack init --force --omit-packages # init failure output ... ~~~ Looking at `stack.yaml`, you will see that the excluded packages have been commented out under the `packages` field. In case wrong packages are excluded you can uncomment the right one and comment the other one. Packages may get excluded due to conflicting requirements among user packages or due to conflicting requirements between a user package and the snapshot compiler. If all of the packages have a conflict with the compiler then all of them may get commented out. When packages are commented out you will see a warning every time you run a command which needs the configuration file. The warning can be disabled by editing the configuration file and removing it. #### Using a specific snapshot Sometimes you may want to use a specific snapshot for your project instead of `stack init` picking one for you. You can do that by using `stack init --snapshot `. You can also init with a compiler snapshot if you do not want to use a Stackage snapshot. That will result in all of your project's dependencies being put under the `extra-deps` section. #### Installing the compiler Stack will automatically install the compiler when you run `stack build` but you can manually specify the compiler by running `stack setup `. #### Miscellaneous and diagnostics _Add selected packages_: If you want to use only selected packages from your project directory you can do so by explicitly specifying the package directories on the command line. _Duplicate package names_: If multiple packages under the directory tree have same name, `stack init` will report those and automatically ignore one of them. _Ignore subdirectories_: By default `stack init` searches all the subdirectories for Cabal files. If you do not want that then you can use `--ignore-subdirs` command line switch. _Cabal warnings_: `stack init` will show warnings if there were issues in reading a Cabal file. You may want to pay attention to the warnings as sometimes they may result in incomprehensible errors later on during dependency solving. _Package naming_: If the `Name` field defined in a Cabal file does not match with the Cabal file name then `stack init` will refuse to continue. _User warnings_: When packages are excluded or external dependencies added Stack will show warnings every time the configuration file is loaded. You can suppress the warnings by editing the configuration file and removing the warnings from it. You may see something like this: ~~~text stack build Warning: Some packages were found to be incompatible with the resolver and have been left commented out in the packages section. Warning: Specified resolver could not satisfy all dependencies. Some external packages have been added as dependencies. You can suppress this message by removing it from stack.yaml ~~~ ## Different databases Time to take a short break from hands-on examples and discuss a little architecture. Stack has the concept of multiple *databases*. A database consists of a GHC package database (which contains the compiled version of a library), executables, and a few other things as well. To give you an idea, the contents of the parent directory of the `stack path --local-pkg-db` directory are the directories: ~~~text bin doc lib pkgdb ~~~ Databases in Stack are *layered*. For example, the database listing we just gave is called a *local* database (also known as a *mutable* database). That is layered on top of a *snapshot* database (also known as a *write-only* database). The snapshot database contains the libraries and executables that are considered to be *immutable*. Finally, GHC itself ships with a number of libraries and executables, also considered to be immutable, which forms the *global* database. To get a quick idea of this, we can look at the output of the `stack exec -- ghc-pkg list` command in our `helloworld` project: ~~~text Cabal-3.6.3.0 Win32-2.12.0.1 array-0.5.4.0 base-4.16.2.0 binary-0.8.9.0 bytestring-0.11.3.1 containers-0.6.5.1 deepseq-1.4.6.1 directory-1.3.6.2 exceptions-0.10.4 filepath-1.4.2.2 (ghc-9.2.3) ghc-bignum-1.2 ghc-boot-9.2.3 ghc-boot-th-9.2.3 ghc-compact-0.1.0.0 ghc-heap-9.2.3 ghc-prim-0.8.0 ghci-9.2.3 haskeline-0.8.2 hpc-0.6.1.0 integer-gmp-1.1 libiserv-9.2.3 mtl-2.2.2 parsec-3.1.15.0 pretty-1.1.3.6 process-1.6.13.2 rts-1.0.2 stm-2.5.0.2 template-haskell-2.18.0.0 text-1.2.5.0 time-1.11.1.1 transformers-0.5.6.2 xhtml-3000.2.2.1 acme-missiles-0.3 helloworld-0.1.0.0 ~~~ where `` refers to the directory output by the command `stack path --global-pkg-db`, and so on. Notice that `acme-missiles` ends up in the *snapshot* database. Any package which comes from Hackage, an archive, or a repository is considered to be an *immutable* package. Anything which is considered *mutable*, or depends on something mutable, ends up in the *local* database. This includes your own code and any other packages located on a local file path. The reason we have this structure is that: * it lets multiple projects reuse the same binary builds of immutable packages, * but doesn't allow different projects to "contaminate" each other by putting non-standard content into the shared snapshot database. As you probably guessed, there can be multiple snapshot databases available. See the contents of the `snapshots` directory in the [Stack root](stack_root.md). * On Unix-like operating systems, each snapshot is in the last of a sequence of three subdirectories named after the platform, a 256-bit hash of the source map (how the package should be built -- including the compiler, options, and immutable dependencies), and the GHC version. * On Windows, each snapshot is in a subdirectory that is a shorter hash (eight characters) of the sequence of three directories used on Unix-like operating systems. This is done to avoid problems created by default limits on file path lengths on Windows systems. These snapshot databases don't get layered on top of each other; they are each used separately. In reality, you'll rarely — if ever — interact directly with these databases, but it's good to have a basic understanding of how they work so you can understand why rebuilding may occur at different points. ## The build synonyms Let's look at a subset of the `stack --help` output: ~~~text build Build the package(s) in this directory/configuration install Shortcut for 'build --copy-bins' test Shortcut for 'build --test' bench Shortcut for 'build --bench' haddock Shortcut for 'build --haddock' ~~~ Four of these commands are just synonyms for the `build` command. They are provided for convenience for common cases (e.g., `stack test` instead of `stack build --test`) and so that commonly expected commands just work. What's so special about these commands being synonyms? It allows us to make much more composable command lines. For example, we can have a command that builds executables, generates Haddock documentation (Haskell API-level docs), and builds and runs your test suites, with: ~~~text stack build --haddock --test ~~~ You can even get more inventive as you learn about other flags. For example, take the following command: ~~~text stack build --pedantic --haddock --test --exec "echo Yay, it succeeded" --file-watch ~~~ This command will: * turn on all warnings and errors (the `--pedantic` flag) * build your library and executables * generate Haddocks (the `--haddock` flag) * build and run your test suite (the `--test` flag) * run the command `echo Yay, it succeeded` when that completes (the `--exec` option) * after building, watch for changes in the files used to build the project, and kick off a new build when done (the `--file-watch` flag) ### The `stack install` command and `copy-bins` option It's worth calling out the behavior of the `install` command and `--copy-bins` option, since this has confused a number of users (especially when compared to behavior of other tools like Cabal (the tool)). The `install` command does precisely one thing in addition to the build command: it copies any generated executables to the local binary directory. You may recognize the default value for that path: On Unix-like operating systems, command: ~~~text stack path --local-bin /home//.local/bin ~~~ On Windows, command: ~~~text stack path --local-bin C:\Users\\AppData\Roaming\local\bin ~~~ That's why the download page recommends adding that directory to your PATH. This feature is convenient, because now you can simply run `executable-name` in your shell instead of having to run `stack exec executable-name` from inside your project directory. Since it's such a point of confusion, let me list a number of things Stack does *not* do specially for the `install` command: * Stack will always build any necessary dependencies for your code. The install command is not necessary to trigger this behavior. If you just want to build a project, run `stack build`. * Stack will *not* track which files it's copied to your local binary directory nor provide a way to automatically delete them. There are many great tools out there for managing installation of binaries, and Stack does not attempt to replace those. * Stack will not necessarily be creating a relocatable executable. If your executables hard-codes paths, copying the executable will not change those hard-coded paths. * At the time of writing, there's no way to change those kinds of paths with Stack, but see [issue #848 about --prefix](https://github.com/commercialhaskell/stack/issues/848) for future plans. That's really all there is to the `install` command: for the simplicity of what it does, it occupies a much larger mental space than is warranted. ## Targets, locals, and extra-deps We haven't discussed this too much yet, but, in addition to having a number of synonyms *and* taking a number of options on the command line, the `build` command *also* takes many arguments. These are parsed in different ways, and can be used to achieve a high level of flexibility in telling Stack exactly what you want to build. We're not going to cover the full generality of these arguments here; instead, there's documentation covering the full [build command syntax](build_command.md). Here, we'll just point out a few different types of arguments: * You can specify a *package name*, e.g. `stack build vector`. * This will attempt to build the `vector` package, whether it's a local package, in your extra-deps, in your snapshot, or just available upstream. If it's just available upstream but not included in your locals, extra-deps, or snapshot, the newest version is automatically promoted to an extra-dep. * You can also give a *package identifier*, which is a package name plus version, e.g. `stack build yesod-bin-1.4.14`. * This is almost identical to specifying a package name, except it will (1) choose the given version instead of latest, and (2) error out if the given version conflicts with the version of a project package. * The most flexibility comes from specifying individual *components*, e.g. `stack build helloworld:test:helloworld-test` says "build the test suite component named helloworld-test from the helloworld package." * In addition to this long form, you can also shorten it by skipping what type of component it is, e.g. `stack build helloworld:helloworld-test`, or even skip the package name entirely, e.g. `stack build :helloworld-test`. * Finally, you can specify individual *directories* to build to trigger building of any project packages included in those directories or subdirectories. When you give no specific arguments on the command line (e.g., `stack build`), it's the same as specifying the names of all of your project packages. If you just want to build the package for the directory you're currently in, you can use `stack build .`. ### Components, --test, and --bench Here's one final important yet subtle point. Consider our `helloworld` package: it has a library component, an executable `helloworld-exe`, and a test suite `helloworld-test`. When you run `stack build helloworld`, how does it know which ones to build? By default, it will build the library (if any) and all of the executables but ignore the test suites and benchmarks. This is where the `--test` and `--bench` flags come into play. If you use them, those components will also be included. So `stack build --test helloworld` will end up including the helloworld-test component as well. You can bypass this implicit adding of components by being much more explicit, and stating the components directly. For example, the following will not build the `helloworld-exe` executable: ~~~text stack purge stack build :helloworld-test helloworld> configure (lib + test) Configuring helloworld-0.1.0.0... helloworld> build (lib + test) with ghc-9.6.5 Preprocessing library for helloworld-0.1.0.0.. Building library for helloworld-0.1.0.0.. [1 of 2] Compiling Lib [2 of 2] Compiling Paths_helloworld Preprocessing test suite 'helloworld-test' for helloworld-0.1.0.0.. Building test suite 'helloworld-test' for helloworld-0.1.0.0.. [1 of 2] Compiling Main [2 of 2] Compiling Paths_helloworld [3 of 3] Linking .stack-work\dist\\build\helloworld-test\helloworld-test.exe helloworld> copy/register Installing library in ...\helloworld\.stack-work\install\... Registering library for helloworld-0.1.0.0.. helloworld> test (suite: helloworld-test) Test suite not yet implemented helloworld> Test suite helloworld-test passed Completed 2 action(s). ~~~ We first purged our project to clear old results so we know exactly what Stack is trying to do. The last line shows that our command also *runs* the test suite it just built. This may surprise some people who would expect tests to only be run when using `stack test`, but this design decision is what allows the `stack build` command to be as composable as it is (as described previously). The same rule applies to benchmarks. To spell it out completely: * The `--test` and `--bench` flags simply state which components of a package should be built, if no explicit set of components is given * The default behavior for any test suite or benchmark component which has been built is to also run it You can use the `--no-run-tests` and `--no-run-benchmarks` flags to disable running of these components. You can also use `--no-rerun-tests` to prevent running a test suite which has already passed and has not changed. !!! note Stack doesn't build or run test suites and benchmarks for non-local packages. This is done so that a command like `stack test` doesn't need to run 200 test suites! ## Multi-package projects Until now, everything we've done with Stack has used a single-package project. However, Stack's power truly shines when you're working on multi-package projects. All the functionality you'd expect to work just does: dependencies between packages are detected and respected, dependencies of all packages are just as one cohesive whole, and if anything fails to build, the build commands exits appropriately. Let's demonstrate this with the `wai-app-static` and `yackage` packages, starting in the root directory for all our Haskell projects. Command: ~~~text mkdir multi cd multi stack unpack wai-app-static yackage Unpacked wai-app-static (from Hackage) to .../multi/wai-app-static-3.1.7.4/ Unpacked yackage (from Hackage) to .../multi/yackage-0.8.1/ stack init Looking for .cabal or package.yaml files to use to init the project. Using cabal packages: - wai-app-static-3.1.7.4/ - yackage-0.8.1/ Cabal file warning in .../multi/yackage-0.8.1/yackage.cabal@47:40: version operators used. To use version operators the package needs to specify at least 'cabal-version: >= 1.8'. Cabal file warning in .../multi/yackage-0.8.1/yackage.cabal@21:36: version operators used. To use version operators the package needs to specify at least 'cabal-version: >= 1.8'. Selecting the best among 18 snapshots... * Matches ... Selected resolver: ... Initialising configuration using resolver: ... Total number of user packages considered: 2 Writing configuration to file: stack.yaml stack build --haddock --test # Goes off to build a whole bunch of packages ~~~ If you look at the `stack.yaml` file, you'll see exactly what you'd expect: ~~~yaml resolver: url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/19/17.yaml packages: - wai-app-static-3.1.7.4 - yackage-0.8.1 ~~~ Notice that multiple directories are listed in the `packages` key. In addition to local directories, you can also refer to packages available in a Git repository or in a tarball over HTTP/HTTPS. This can be useful for using a modified version of a dependency that hasn't yet been released upstream. !!! note When adding upstream packages directly to your project it is important to distinguish _project packages_ located locally from the upstream _dependency packages_. Otherwise you may have trouble running `stack ghci`. See [stack.yaml documentation](yaml_configuration.md#packages) for more details. ## Flags and GHC options There are two common ways to alter how a package will install: with Cabal flags and with GHC options. ### Cabal flag management To change a Cabal flag setting, we can use the command line `--flag` option. The `yackage` package has an `upload` flag that is enabled by default. We can command: ~~~text stack build --flag yackage:-upload ~~~ This means: when compiling the `yackage` package, turn off the `upload` flag (thus the `-` in `-upload`). Unlike other tools, Stack is explicit about which package's flag you want to change. It does this for two reasons: 1. There's no global meaning for Cabal flags, and therefore two packages can use the same flag name for completely different things. 2. By following this approach, we can avoid unnecessarily recompiling snapshot packages that happen to use a flag that we're using. You can also change flag values on the command line for extra-dep and snapshot packages. If you do this, that package will automatically be promoted to an extra-dep, since the build plan is different than what the plan snapshot definition would entail. ### GHC options GHC options follow a similar logic as in managing Cabal flags, with a few nuances to adjust for common use cases. Let's consider the command: ~~~text stack build --ghc-options="-Wall -Werror" ~~~ This will set the `-Wall -Werror` options for all *local targets*. Note that this will not affect extra-dep and snapshot packages at all. This design provides us with reproducible and fast builds. (By the way: the above GHC options have a special convenience flag: `--pedantic`.) There's one extra nuance about command line GHC options: Since they only apply to local targets, if you change your local targets, they will no longer apply to other packages. Let's play around with an example from the `wai` repository, which includes the `wai` and `warp` packages, the latter depending on the former. If we command again: ~~~text stack build --ghc-options=-O0 wai ~~~ It will build all of the dependencies of `wai`, and then build `wai` with all optimizations disabled. Now let's add in `warp` as well. Command: ~~~text stack build --ghc-options=-O0 wai warp ~~~ This builds the additional dependencies for `warp`, and then builds `warp` with optimizations disabled. Importantly: it does not rebuild `wai`, since `wai`'s configuration has not been altered. Now the surprising case. Command: ~~~text stack build --ghc-options=-O0 warp wai-3.0.3.0-5a49351d03cba6cbaf906972d788e65d: unregistering (flags changed from ["--ghc-options","-O0"] to []) warp-3.1.3-a91c7c3108f63376877cb3cd5dbe8a7a: unregistering (missing dependencies: wai) wai-3.0.3.0: configure ~~~ You may expect this to be a no-op: neither `wai` nor `warp` has changed. However, Stack will instead recompile `wai` with optimizations enabled again, and then rebuild `warp` (with optimizations disabled) against this newly built `wai`. The reason: reproducible builds. If we'd never built `wai` or `warp` before, trying to build `warp` would necessitate building all of its dependencies, and it would do so with default GHC options (optimizations enabled). This dependency would include `wai`. So when we command: ~~~text stack build --ghc-options=-O0 warp ~~~ We want its behavior to be unaffected by any previous build steps we took. While this specific corner case does catch people by surprise, the overall goal of reproducible builds is - in the Stack maintainers' views - worth the confusion. Final point: if you have GHC options that you'll be regularly passing to your packages, you can add them to your `stack.yaml` file. See the [documentation section on ghc-options](yaml_configuration.md#ghc-options) for more information. !!! note That's it, the heavy content of this guide is done! Everything from here on out is simple explanations of commands. Congratulations! ## The `stack path` command Generally, you don't need to worry about where Stack stores various files. But some people like to know this stuff. That's when the `stack path` command is useful. `stack path --help` explains the available options and, consequently, the output of the command: ~~~text --stack-root Global Stack root directory --global-config Global Stack configuration file --project-root Project root (derived from stack.yaml file) --config-location Configuration location (where the stack.yaml file is) --bin-path PATH environment variable --programs Install location for GHC and other core tools (see 'stack ls tools' command) --compiler-exe Compiler binary (e.g. ghc) --compiler-bin Directory containing the compiler binary (e.g. ghc) --compiler-tools-bin Directory containing binaries specific to a particular compiler --local-bin Directory where Stack installs executables (e.g. ~/.local/bin (Unix-like OSs) or %APPDATA%\local\bin (Windows)) --extra-include-dirs Extra include directories --extra-library-dirs Extra library directories --snapshot-pkg-db Snapshot package database --local-pkg-db Local project package database --global-pkg-db Global package database --ghc-package-path GHC_PACKAGE_PATH environment variable --snapshot-install-root Snapshot installation root --local-install-root Local project installation root --snapshot-doc-root Snapshot documentation root --local-doc-root Local project documentation root --local-hoogle-root Local project documentation root --dist-dir Dist work directory, relative to package directory --local-hpc-root Where HPC reports and tix files are stored ~~~ In addition, `stack path` accepts the flags above on the command line to state which keys you're interested in. This can be convenient for scripting. As a simple example, let's find out the sandboxed versions of GHC that Stack installed: === "Unix-like" Command: ~~~text ls $(stack path --programs)/*.installed /home//.stack/programs/x86_64-linux/ghc-9.0.2.installed ~~~ === "Windows (with PowerShell)" Command: ~~~text dir "$(stack path --programs)/*.installed" Directory: C:\Users\mikep\AppData\Local\Programs\stack\x86_64-windows Mode LastWriteTime Length Name ---- ------------- ------ ---- -a--- 27/07/2022 5:40 PM 9 ghc-9.0.2.installed -a--- 25/02/2022 11:39 PM 9 msys2-20210604.installed ~~~ While we're talking about paths, to wipe our Stack install completely, here's what typically needs to be removed: 1. the Stack root folder (see `stack path --stack-root`, before you uninstall); 2. if different, the folder containing Stack's global YAML configuration file (see `stack path --global-config`, before you uninstall); 3. on Windows, the folder containing Stack's tools (see `stack path --programs`, before you uninstall), which is located outside of the Stack root folder; and 4. the `stack` executable file (see `which stack`, on Unix-like operating systems, or `where.exe stack`, on Windows). You may also want to delete `.stack-work` folders in any Haskell projects that you have built using Stack. The `stack uninstall` command provides information about how to uninstall Stack. ## The `stack exec` command We've already used `stack exec` multiple times in this guide. As you've likely already guessed, it allows you to run executables, but with a slightly modified environment. In particular: `stack exec` looks for executables on Stack's bin paths, and sets a few additional environment variables (like adding those paths to the PATH, and setting `GHC_PACKAGE_PATH`, which tells GHC which package databases to use). If you want to see exactly what the modified environment looks like, try command: ~~~text stack exec env ~~~ The only issue is how to distinguish flags to be passed to Stack versus those for the underlying program. Thanks to the `optparse-applicative` library, Stack follows the Unix convention of `--` to separate these. For example, command: ~~~text stack exec --package stm -- echo I installed the stm package via --package stm ~~~ yields output like: ~~~text Run from outside a project, using implicit global project config Using latest snapshot resolver: lts-22.21 Writing global (non-project-specific) config file to: /home/michael/.stack/global/stack.yaml Note: You can change the snapshot via the resolver field there. I installed the stm package via --package stm ~~~ Flags worth mentioning: * `--package foo` can be used to force a package to be installed before running the given command. * `--no-ghc-package-path` can be used to stop the `GHC_PACKAGE_PATH` environment variable from being set. Some tools — notably Cabal (the tool) — do not behave well with that variable set. You may also find it convenient to use `stack exec` to launch a subshell (substitute `bash` with your preferred shell) where your compiled executable is available at the front of your PATH. Command: ~~~text stack exec bash ~~~ ## The `stack ghci` or `stack repl` command GHCi is the interactive GHC environment, a.k.a. the REPL. You *could* access it with command: ~~~text stack exec ghci ~~~ But that won't load up locally written modules for access. For that, use the `stack ghci` or `stack repl` commands, which are equivalent. To then load modules from your project in GHCi, use the `:module` command (`:m` for short) followed by the module name. !!! note If you have added packages to your project please make sure to mark them as extra-deps for faster and reliable usage of `stack ghci`. Otherwise GHCi may have trouble due to conflicts of compilation flags or having to unnecessarily interpret too many modules. See Stack's project-level [configuration](yaml_configuration.md#extra-deps) to learn how to configure a package as an extra-dep. For further information, see the [REPL environment](ghci.md) documentation. ## The `stack ghc` and `stack runghc` commands You'll sometimes want to just compile (or run) a single Haskell source file, instead of creating an entire Cabal package for it. You can use `stack exec ghc` or `stack exec runghc` for that. As simple helpers, we also provide the `stack ghc` and `stack runghc` commands, for these common cases. ## Finding project configs, and the implicit global project Whenever you run something with Stack, it needs a project-level configuration file. The algorithm Stack uses to find such a file is: 1. Check for a `--stack-yaml` option on the command line 2. Check for a `STACK_YAML` environment variable 3. Check the current directory and all ancestor directories for a `stack.yaml` file The first two provide a convenient method for using an alternate configuration. For example: `stack build --stack-yaml stack-ghc-9.2.3.yaml` can be used by your CI system to check your code against GHC 9.2.3. Setting the `STACK_YAML` environment variable can be convenient if you're going to be running commands like `stack ghc` in other directories, but you want to use the configuration you defined in a specific project. If Stack does not find a project level configuration file in any of the three specified locations, the *implicit global* logic kicks in. You've probably noticed that phrase a few times in the output from commands above. Implicit global is essentially a hack to allow Stack to be useful in a non-project setting. When no implicit global configuration file exists, Stack creates one for you with the latest LTS snapshot. This allows you to do things like: * compile individual files easily with `stack ghc` * build executables without starting a project, e.g. `stack install pandoc` Keep in mind that there's nothing magical about this implicit global configuration. It has no effect on projects at all. Every package you install with it is put into isolated databases just like everywhere else. The only magic is that it's the catch-all project whenever you're running Stack somewhere else. ## `stack.yaml` versus Cabal files Now that we've covered a lot of Stack use cases, this quick summary of `stack.yaml` versus Cabal files will hopefully make sense and be a good reminder for future uses of Stack: * A project can have multiple packages. * Each project has a `stack.yaml`. * Each package has a Cabal file, named `.cabal`. * The Cabal file specifies which packages are dependencies. * The `stack.yaml` file specifies which packages are available to be used. * The Cabal file specifies the components, modules, and build flags provided by a package * `stack.yaml` can override the flag settings for individual packages * `stack.yaml` specifies which packages to include ## Comparison to other tools Stack is not the only tool available for building Haskell code. Stack came into existence due to limitations at that time with some of the existing tools. If you are happily building Haskell code with other tools, you may not need Stack. If you're experiencing problems with other tools, give Stack a try instead. If you're a new user who has no experience with other tools, we recommend Stack. The defaults match modern best practices in Haskell development, and there are fewer corner cases you need to be aware of. You *can* develop Haskell code with other tools, but you probably want to spend your time writing code, not convincing a tool to do what you want. ### Underlying package format Before turning to differences, we clarify an important similarity: Stack, Cabal (the tool), and presumably all other tools share the same underlying package format of Cabal (the library). This is a Good Thing: we can share the same set of upstream libraries, and collaboratively work on the same project with Stack, Cabal (the tool), and NixOS. In that sense, we're sharing the same ecosystem. ### Curation vs dependency solving * Stack uses 'curation' (snapshots and Stack's project-level configuration file (`stack.yaml`, by default) define precisely the set of packages available for a project). The Stack team firmly believes that the majority of users want to simply ignore dependency resolution nightmares and get a valid build plan from day one. That's why we've made 'curation' the focus of Stack. * Cabal (the tool) can use 'curation' too but its origins are in dependency solving. ### Emphasis on reproducibility * Stack goes to great lengths to ensure that `stack build` today does the same thing tomorrow. With Stack, changing the build plan is always an explicit decision. * Cabal (the tool) does not go to the same lengths: build plans can be affected by the presence of pre-installed packages, and running `cabal update` can cause a previously successful build to fail. ### Automatic building of dependencies * Stack's automatically builds dependencies. So for example, in Stack, `stack test` does the same job as: ~~~text cabal install --enable-tests --only-dependencies cabal configure --enable-tests cabal build cabal test ~~~ (newer versions of Cabal (the tool) may make this command sequence shorter). * With Cabal (the tool), you need to use `cabal install` to trigger dependency building. This is somewhat necessary as building dependencies can, in some cases, break existing installed packages. ### Isolation * Stack is isolated - provides 'sandboxed' behaviour - by default, via its databases. In other words: when you use Stack, there's __no need for sandboxes__, everything is (essentially) sandboxed by default. * With Cabal (the tool), the default behavior is a non-isolated build where working on two projects can cause the user package database to become corrupted. The Cabal solution to this is sandboxes. ### Tools other than Stack and Cabal (the tool) * [cabal-meta](https://hackage.haskell.org/package/cabal-meta) inspired a lot of the multi-package functionality of Stack. Still relevant for Cabal (the tool). * [cabal-src](https://hackage.haskell.org/package/cabal-src). Deprecated in favor of Stack in 2016. * [stackage-cli](https://hackage.haskell.org/package/stackage-cli).Deprecated in favor of Stack in 2015. * [cabal-dev](https://hackage.haskell.org/package/cabal-dev). Deprecated in favor of Cabal (the tool) in 2013. stack-2.15.7/doc/GUIDE_advanced.md0000644000000000000000000000740014604306200014646 0ustar0000000000000000
# User guide (advanced) Some of Stack's features will not be needed regularly or by all users. This part of the guide provides information about those features, organised as a reference guide. Some of the features are complex and separate pages are dedicated to them. ## Environment variables The existence or content of certain environment variables can affect how Stack behaves. For further information, see the [environment variables](environment_variables.md) documentation. ## YAML configuration files Stack is configured by the content of YAML files. A global YAML configuration file contains non-project specific options. A project-level YAML configuration file contains project-specific options and may contain non-project specific options. For further information, see the [YAML configuration](yaml_configuration.md) documentation. ## Global flags and options Stack can also be configured by flags and options on the command line. Global flags and options apply to all of Stack's commands. For further information, see the [global flags and options](global_flags.md) documentation. ## Stack commands Stack's commands are listed below, in alphabetical order. * [`bench`](build_command.md) - a synonym for `stack build --bench` * [`build`](build_command.md) - build packages * [`clean`](clean_command.md) - delete build artefacts for the project packages * [`config`](config_command.md) - access and modify Stack's configuration * [`docker`](docker_command.md) - use Stack with Docker * [`dot`](dot_command.md) - dependency visualization * [`eval`](eval_command.md) - evaluate some Haskell code inline * [`exec`](exec_command.md) - executate a command in the Stack environment * [`haddock`](build_command.md) - a synonym for `stack build --haddock` * [`hoogle`](hoogle_command.md) - run `hoogle` * [`hpc`](hpc_command.md) - generate Haskell Program Coverage (HPC) code coverage reports * [`ghc`](ghc_command.md) - run `ghc` * [`ghci`](ghci.md) - run GHCi, a REPL environment * [`ide`](ide_command.md) - information for an integrated development environment (IDE) * [`init`](init_command.md) - initialise Stack's project-level YAML configuration file for an existing project * [`install`](build_command.md) - a synonym for `stack build --copy-bins` * [`list`](list_command.md) - list packages on Hackage or in a snapshot * [`ls`](ls_command.md) - list information about Stack * [`new`](new_command.md) - create a new project with Stack * [`path`](path_command.md) - information about locations used by Stack * [`purge`](purge_command.md) - delete the Stack working directories * [`query`](query_command.md) - information about the build * [`repl`](ghci.md) - a synonym for `stack ghci` * [`run`](run_command.md) - build and run an executable * [`runghc`](runghc_command.md) - run `runghc` * [`runhaskell`](runghc_command.md) - a synonym for `stack runghc` * [`script`](script_command.md) - run a Haskell source file as a script * [`sdist`](sdist_command.md) - create an archive file for a package, in a form accepted by Hackage * [`setup`](setup_command.md) - get GHC for a Stack project * [`templates`](templates_command.md) - information about templates for use with `stack new` * [`test`](build_command.md) - a synonym for `stack build --test` * [`uninstall`](uninstall_command.md) - information about how to uninstall Stack * [`unpack`](unpack_command.md) - unpack one or more packages locally * [`update`](update_command.md) - update the package index * [`upgrade`](upgrade_command.md) - upgrade Stack * [`upload`](upload_command.md) - upload a package to Hackage stack-2.15.7/doc/hoogle_command.md0000644000000000000000000000317314445120722015150 0ustar0000000000000000
# The `stack hoogle` command ~~~text stack hoogle [-- ARGUMENT(S) (e.g. 'stack hoogle -- server --local')] [--[no-]setup] [--rebuild] [--server] ~~~ Hoogle is a Haskell API search engine. `stack hoogle` runs Hoogle. Stack needs Hoogle version 5 or greater. Stack will use a Hoogle database (`database.hoo`) specific to the project's source map and the version of GHC, located in a subdirectory of subdirectory `hoogle` of Stack's work directory for the project. By default: * if a `hoogle` executable is found on the `PATH`, Stack will try to use it. Otherwise, Stack will try to identify an executable as a build target. If the Hoogle database does not exist, Stack will generate it with `hoogle generate --local`. `hoogle generate --local` queries `ghc-pkg` and generates links for all packages which have documentation and Hoogle input files (`*.txt`) generated. Pass the flag `--no-setup` to skip such setup; * the existing Hoogle database is used. Pass the flag `--rebuild` to trigger the generation of a new Hoogle database (generated as above); and * `hoogle` is passed the specified arguments (if any). The arguments are usually the subject of the search. Pass the flag `--server` to first pass `server --local --port 8080` before those arguments. `hoogle server --local --port 8080` starts a local Hoogle web server, using port 8080, that allows the following of `file://` links. stack-2.15.7/doc/hpc_command.md0000644000000000000000000001540514604306200014440 0ustar0000000000000000
# The `stack hpc` commands ~~~text stack hpc COMMAND Available commands: report Generate unified HPC coverage report from tix files and project targets ~~~ Code coverage is a measure of the degree to which the source code of a program is executed when a test suite is run. [Haskell Program Coverage (HPC)](https://ku-fpg.github.io/software/hpc/) is a code coverage tool for Haskell that is provided with GHC. Code coverage is enabled by passing the flag `--coverage` to `stack build`. `stack hpc` provides commands specific to HPC. Command `stack hpc` for the available commands. The following refers to the local HPC root directory. Its location can be obtained by command: ~~~text stack path --local-hpc-root ~~~ ## The `stack hpc report` command ~~~text stack hpc report [TARGET_OR_TIX] [--all] [--destdir DIR] [--open] ~~~ The `stack hpc report` command generates a report for a selection of targets and `.tix` files. Pass the flag `--all` for a report that uses all stored results. Pass the flag `--open` to open the HTML report in your browser. ## The `extra-tix-files` directory During the execution of the build, you can place additional tix files in the `extra-tix-files` subdirectory in the local HPC root directory, in order for them to be included in the unified report. A couple caveats: 1. These tix files must be generated by executables that are built against the exact same library versions. Also note that, on subsequent builds with coverage, the local HPC root directory will be recursively deleted. It just stores the most recent coverage data. 2. These tix files will not be considered by `stack hpc report` unless listed explicitly by file name. ## Examples If we have three different packages with test suites, packages `A`, `B`, and `C`, the default unified report will have coverage from all three. If we want a unified report with just two, we can instead command: ~~~text stack hpc report A B ~~~ This will output to the standard output stream a summary report for the combined coverage from `A` and `B`'s test suites. It will also log the path to the HTML for the corresponding full report. This command also supports taking extra `.tix` files. If you've also built an executable, against exactly the same library versions of `A`, `B`, and `C`, then you could command the following: ~~~text stack exec -- an-exe stack hpc report A B C an-exe.tix ~~~ or, equivalently: ~~~text stack exec -- an-exe stack hpc report --all an-exe.tix ~~~ This report will consider all test results as well as the newly generated `an-exe.tix` file. ## Usage `stack test --coverage` is quite streamlined for the following use-case: 1. You have test suites which exercise your project packages. 2. These test suites link against your library, rather than building the library directly. Coverage information is only given for libraries, ignoring the modules which get compiled directly into your executable. A common case where this doesn't happen is when your test suite and library both have something like `hs-source-dirs: src/`. In this case, when building your test suite you may also be compiling your library, instead of just linking against it. When your project has these properties, you will get the following: 1. Summary coverage reports, sent to the standard output stream in the build output, and a log of the paths to the HTML for the corresponding full reports. 2. A summary unified report, sent to the standard output stream, and a log of the path to the HTML for the corresponding full report. These reports consider the coverage on all local libraries, based on all of the tests that were run. 3. An index of all generated HTML reports, in `index.html` in the local HPC root directory, and a log of the path to the HTML for that index. ## Implementation details Most users can get away with just understanding the above documentation. However, advanced users may want to understand exactly how `--coverage` works: 1. The GHC option `-fhpc` gets passed to all project packages. This tells GHC to output executables that track coverage information and output them to `.tix` files. `the-exe-name.tix` files will get written to the working directory of the executable. When switching on this flag, it will usually cause all project packages to be rebuilt (see issue [#1940](https://github.com/commercialhaskell/stack/issues/1940)). 2. Before the build runs with `--coverage`, the contents of the local HPC root directory gets deleted. This prevents old reports from getting mixed with new reports. If you want to preserve report information from multiple runs, copy the contents of this path to a new directory. 3. Before a test run, if a `test-name.tix` file exists in the package directory, it will be deleted. 4. After a test run, it will expect a `test-name.tix` file to exist. This file will then get loaded, modified, and outputted to `pkg-name/test-name/test-name.tix` in the local HPC root directory. The `.tix` file gets modified to remove coverage file that isn't associated with a library. So, this means that you won't get coverage information for the modules compiled in the `executable` or `test-suite` stanza of your Cabal file. This makes it possible to directly union multiple `*.tix` files from different executables (assuming they are using the exact same versions of the project packages). If there is enough popular demand, it may be possible in the future to give coverage information for modules that are compiled directly into the executable. See issue [#1359](https://github.com/commercialhaskell/stack/issues/1359). 5. Once we have a `.tix` file for a test, we also generate a summary report and a corresponding full report using HTML. The summary report is sent to the standard output stream. The index of the test-specific HTML report is available at `pkg-name/test-name/index.html` in the local HPC root directory. 6. After the build completes, if there are multiple output `*.tix` files, they get combined into a unified report. The index of this report will be available at `combined/all/index.html` in the local HPC root directory. 7. Finally, an index of the resulting coverage reports is generated. It links to the individual coverage reports (one for each test-suite), as well as the unified report. This index is available at `index.html` in the local HPC root directory. stack-2.15.7/doc/ide_command.md0000644000000000000000000000350414604306200014424 0ustar0000000000000000
# The `stack ide` commands ~~~text stack ide COMMAND Available commands: packages List all available local loadable packages targets List all available Stack targets ~~~ The `stack ide` commands provide information that may be of use in an integrated development environment (IDE). See `stack ide` for the available commands. ## The `stack ide packages` command ~~~text stack ide packages [--stdout] [--cabal-files] ~~~ `stack ide packages` lists all available project packages that are loadable. By default: * its output is sent to the standard error stream. Pass the flag `--stdout` to change to the standard output stream; and * the output is the package name (without its version). Pass the flag `--cabal-files` to change to the full path to the package's Cabal file. ## The `stack ide targets` command ~~~text stack ide targets [--exes] [--tests] [--benchmarks] [--stdout] ~~~ `stack ide targets` lists all available Stack targets. Alternatively, pass one or more of the flags `--exes`, `--tests` and `--benchmarks` to list only targets of those component types. By default, its output is sent to the standard error stream. Pass the flag `--stdout` to change to the standard output stream. For example, for the Stack project itself, command: ~~~text cd stack stack ide targets ~~~ and the output from the second command is: ~~~text stack:lib stack:exe:stack stack:exe:stack-integration-test stack:test:stack-unit-test ~~~ or command: ~~~text stack ide targets --exes ~~~ and the output is: ~~~text stack:exe:stack stack:exe:stack-integration-test ~~~ stack-2.15.7/doc/init_command.md0000644000000000000000000000340014613163672014637 0ustar0000000000000000
# The `stack init` command ~~~text stack init [DIR(S)] [--omit-packages] [--force] [--ignore-subdirs] ~~~ `stack init` initialises Stack's default project-level YAML configuration file (`stack.yaml`) for an existing project, based on the Cabal file or `package.yaml` file for each of its packages. By default: * Stack searches for Cabal and `package.yaml` files in the current directory. Specify one or more directories as arguments to cause Stack to search them; * Stack also searches for Cabal and `package.yaml` files in subdirectories. Pass the flag `--ignore-subdirs` to ignore subdirectories; * Stack will not overwrite an existing `stack.yaml` file. Pass the flag `--force` to allow overwriting; and * Stack will not initialise if there are conflicting or incompatable user packages. Pass the flag `--omit-packages` to cause Stack to ignore such matters while initialising. If a snapshot is specified at the command line, `stack init` will try to use it. For further information, see the documentation for the [`--snapshot`](global_flags.md#-snapshot-option) and [`--resolver`](global_flags.md#-resolver-option) options. Otherwise, `stack init` will try to use the following Stackage snapshots in order of preference, using the first that is compatable: the most recent LTS Haskell, the most recent Stackage Nightly, and other LTS Haskell (most recent first). !!! note If Cabal (the tool) has been used in the directory, consider commanding `cabal clean` before applying `stack init`, in case Cabal has created any unintended Cabal files. stack-2.15.7/doc/install_and_upgrade.md0000644000000000000000000006513414620153474016206 0ustar0000000000000000
# Install or upgrade ## Install Stack Stack can be installed on most Linux distributions, macOS and Windows. It will require at least about 5 GB of disk space, of which about 3 GB is for a single version of GHC and about 2 GB is for Stack's local copy of the Hackage package index. Stack is open to supporting more operating systems. To request support for an operating system, please submit an [issue](https://github.com/commercialhaskell/stack/issues/new) at Stack's GitHub repository. !!! info In addition to the methods described below, Stack can also be installed using the separate [GHCup](https://www.haskell.org/ghcup/) installer for Haskell-related tools. GHCup provides Stack for some combinations of machine architecture and operating system not provided elsewhere. Unlike Stack, other build tools do not automatically install GHC. GHCup can be used to install GHC for those other tools. By default, the script to install GHCup (which can be run more than once) also configures Stack so that if Stack needs a version of GHC, GHCup takes over obtaining and installing that version. !!! info "Releases on GitHub" Stack executables are also available on the [releases](https://github.com/commercialhaskell/stack/releases) page of Stack's GitHub repository. !!! info "`https://get.haskellstack.org/stable` URLs" URLs with the format `https://get.haskellstack.org/stable/.` point to the latest stable release. See the manual download links for examples. === "Linux" For most Linux distributions, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions for your platform below. ### Manual download Manual download for Linux distributions depends on your machine architecture, x86_64 or AArch64/ARM64. === "x86_64" * Click [:material-cloud-download-outline:](https://get.haskellstack.org/stable/linux-x86_64.tar.gz) to download an archive file with the latest release. * Extract the archive and place the `stack` executable somewhere on your PATH (see the [Path](#path) section below). * Ensure you have the required system dependencies installed. These include GCC, GNU Make, xz, perl, libgmp, libffi, and zlib. We also recommend Git and GPG. The installation of system dependencies will depend on the package manager for your Linux distribution. Notes are provided for Arch Linux, CentOS, Debian, Fedora, Gentoo and Ubuntu. === "Arch Linux" ~~~text sudo pacman -S make gcc ncurses git gnupg xz zlib gmp libffi zlib ~~~ === "CentOS" ~~~text sudo yum install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg ~~~ === "Debian" ~~~text sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase ~~~ === "Fedora" ~~~text sudo dnf install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg ~~~ === "Gentoo" Ensure you have the `ncurses` package with `USE=tinfo`. Without it, Stack will not be able to install GHC. === "Ubuntu" ~~~text sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase ~~~ === "AArch64" * Click [:material-cloud-download-outline:](https://get.haskellstack.org/stable/linux-aarch64.tar.gz) to download an archive file with the latest release. * Extract the archive and place the `stack` executable somewhere on your PATH (see the [Path](#path) section below). * Ensure you have the required system dependencies installed. These include GCC, GNU Make, xz, perl, libgmp, libffi, and zlib. We also recommend Git and GPG. The installation of system dependencies will depend on the package manager for your Linux distribution. Notes are provided for Arch Linux, CentOS, Debian, Fedora, Gentoo and Ubuntu. === "Arch Linux" ~~~text sudo pacman -S make gcc ncurses git gnupg xz zlib gmp libffi zlib ~~~ === "CentOS" ~~~text sudo yum install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg ~~~ === "Debian" ~~~text sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase ~~~ === "Fedora" ~~~text sudo dnf install perl make automake gcc gmp-devel libffi zlib zlib-devel xz tar git gnupg ~~~ === "Gentoo" Ensure you have the `ncurses` package with `USE=tinfo`. Without it, Stack will not be able to install GHC. === "Ubuntu" ~~~text sudo apt-get install g++ gcc libc6-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg netbase ~~~ ### Linux packages Some Linux distributions have official or unofficial packages for Stack, including Arch Linux, Debian, Fedora, NixOS, openSUSE/SUSE Linux Enterprise, and Ubuntu. However, the Stack version available as a Linux package may lag behind Stack's current version and, in some cases, the lag may be significant. !!! info "Linux packages that lag behind Stack's current version" If Stack version available as a Linux package lags behind Stack's current version, using `stack upgrade --binary-only` is recommended after installing it. === "Arch Linux" The Arch extra package repository provides an official x86_64 [package](https://www.archlinux.org/packages/extra/x86_64/stack/). You can install it with the command: ~~~text sudo pacman -S stack ~~~ The Arch User Repository (AUR) also provides: * a [`stack-bin` package](https://aur.archlinux.org/packages/stack-bin); and * a [`stack-static` package](https://aur.archlinux.org/packages/stack-static) === "Debian" There are Debian [packages](https://packages.debian.org/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) for Buster and up. However, the distribution's Stack version lags behind. === "Fedora" Fedora includes Stack, but its Stack version may lag behind. === "NixOS" Users who follow the `nixos-unstable` channel or the Nixpkgs `master` branch can install the latest Stack release into their profile with the command: ~~~text nix-env -f "" -iA stack ~~~ Alternatively, the package can be built from source as follows. 1. Clone the git repo, with the command: ~~~text git clone https://github.com/commercialhaskell/stack.git ~~~ 2. Create a `shell.nix` file with the command: ~~~text cabal2nix --shell ./. --no-check --no-haddock > shell.nix ~~~ Note that the tests fail on NixOS, so disable them with `--no-check`. Also, Haddock currently doesn't work for Stack, so `--no-haddock` disables it. 3. Install Stack to your user profile with the command: ~~~text nix-env -i -f shell.nix ~~~ For more information on using Stack together with Nix, please see the [NixOS manual section on Stack](http://nixos.org/nixpkgs/manual/#how-to-build-a-haskell-project-using-stack). === "SUSE" There is also an unofficial package for openSUSE or SUSE Linux Enterprise. Its Stack version may lag behind. To install it: === "openSUSE Tumbleweed" ~~~text sudo zypper in stack ~~~ === "openSUSE Leap" ~~~text sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/openSUSE_Leap_42.1/devel:languages:haskell.repo sudo zypper in stack ~~~ === "SUSE Linux Enterprise 12" ~~~text sudo zypper ar http://download.opensuse.org/repositories/devel:/languages:/haskell/SLE_12/devel:languages:haskell.repo sudo zypper in stack ~~~ === "Ubuntu" There are Ubuntu [packages](http://packages.ubuntu.com/search?keywords=haskell-stack&searchon=names&suite=all§ion=all) for Ubuntu 20.04 and up. It is possible to set up auto-completion of Stack commands. For further information, see the [shell auto-completion](shell_autocompletion.md) documentation. === "macOS" Most users of Stack on macOS will also have up to date tools for software development (see [Xcode Command Line Tools](#xcode-command-line-tools) below). From late 2020, Apple began a transition from Mac computers with Intel processors (Intel-based Mac) to [Mac computers with Apple silicon](https://support.apple.com/en-gb/HT211814). === "Intel-based" Intel-based Mac computers have processors with x86_64 architectures. For most Intel-based Mac computers, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions below. !!! info We generally test on the current version of macOS and do our best to keep it compatible with the three most recent major versions. Stack may also work on older versions. ### Manual download * Click [:material-cloud-download-outline:](https://get.haskellstack.org/stable/osx-x86_64.tar.gz) to download an archive file with the latest release for x86_64 architectures. * Extract the archive and place `stack` somewhere on your PATH (see the [Path](#path) section below). * Now you can run Stack from the command line in a terminal. === "Apple silicon" Mac computers with Apple silicon have an M1, M1 Pro, M1 Max, M1 Ultra or M2 chip. These chips use an architecture known as ARM64 or AArch64. For Mac computers with Apple silicon, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions below. The installation of Stack or some packages (e.g. `network`) requiring C source compilation might fail with `configure: error: C compiler cannot build executables`. In that case you should pass `-arch arm64` as part of the `CFLAGS` environment variable. This setting will be picked up by the C compiler of your choice. ~~~bash # Assuming BASH below # passing CFLAGS in-line with the command giving rise to the error CFLAGS="-arch arm64 ${CFLAGS:-}" some_command_to_install_stack CFLAGS="-arch arm64 ${CFLAGS:-}" stack [build|install] # -- OR -- # ~/.bash_profile # NOTE: only do this if you do not have to cross-compile, or remember to unset # CFLAGS when needed export CFLAGS="-arch arm64 ${CFLAGS:-}" ~~~ The setting instructs the C compiler to compile objects for ARM64. These can then be linked with libraries built for ARM64. Without the instruction, the C compiler, invoked by Cabal running in x86-64, would compile x86-64 objects and attempt to link them with existing ARM64 libraries, resulting in the error above. ### Manual download * Click [:material-cloud-download-outline:](https://get.haskellstack.org/stable/osx-aarch64.tar.gz) to download an archive file with the latest release for AArch64 architectures. * Extract the archive and place `stack` somewhere on your PATH (see the [Path](#path) section below). * Now you can run Stack from the command line in a terminal. ### LLVM The documentation for each version of GHC identifies the versions of LLVM that are supported. That is summarised in the table below for recent versions of GHC: |GHC version|LLVM versions| |-----------|-------------| |9.8.2 |11 to 15 | |9.6.5 |11 to 15 | |9.4.8 |10 to 14 | |9.2.8 |9 to 12 | |9.0.2 |9, 10 or 12 | |8.10.7 |9 to 12 | |8.8.4 |7 | |8.6.5 |6 | |8.4.4 |5 | ### Using Homebrew [Homebrew](https://brew.sh/) is a popular package manager for macOS. If you have its `brew` tool installed, you can just command: ~~~text brew install haskell-stack ~~~ * The Homebrew formula and bottles are **unofficial** and lag slightly behind new Stack releases, but tend to be updated within a day or two. * Normally, Homebrew will install from a pre-built binary (aka "pour from a bottle"), but if it starts trying to build everything from source (which will take hours), see [their FAQ on the topic](https://github.com/Homebrew/brew/blob/master/docs/FAQ.md#why-do-you-compile-everything). ### Xcode Command Line Tools macOS does not come with all the tools required for software development but a collection of useful tools, known as the Xcode Command Line Tools, is readily available. A version of that collection is provided with each version of Xcode (Apple’s integrated development environment) and can also be obtained from Apple separately from Xcode. The collection also includes the macOS SDK (software development kit). The macOS SDK provides header files for macOS APIs. If you use a command that refers to a common Xcode Command Line Tool and the Xcode Command Line Tools are not installed, macOS may prompt you to install the tools. macOS also comes with a command line tool, `xcode-select`, that can be used to obtain the Xcode Command Line Tools. Command `xcode-select --print-path` to print the path to the currently selected (active) developer directory. If the directory does not exist, or is empty, then the Xcode Command Line Tools are not installed. If the Xcode Command Line Tools are not installed, command `xcode-select --install` to open a user interface dialog to request automatic installation of the tools. An upgrade of macOS may sometimes require the existing Xcode Command Line Tools to be uninstalled and an updated version of the tools to be installed. The existing tools can be uninstalled by deleting the directory reported by `xcode-select --print-path`. If, after the installation of Stack, running `stack setup` fails with `configure: error: cannot run C compiled programs.` that indicates that the Xcode Command Line Tools are not installed. If building fails with messages that `*.h` files are not found, that may also indicate that Xcode Command Line Tools are not up to date. Xcode 10 provided an SDK for macOS 10.14 (Mojave) and [changed the location](https://developer.apple.com/documentation/xcode-release-notes/xcode-10-release-notes#Command-Line-Tools) of the macOS system headers. As a workaround, an extra package was provided by Apple which installed the headers to the base system under `/usr/include`. ### Auto-completion of Stack commands It is possible to set up auto-completion of Stack commands. For further information, see the [shell auto-completion](shell_autocompletion.md) documentation. === "Windows" On 64-bit Windows, the easiest way to install Stack directly (rather than use GHCup) is to download and use the [Windows installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe). !!! info "Stack root" By default, the Windows installer will set the Stack root by setting the `STACK_ROOT` environment variable to `C:\sr`. !!! warning "Long user PATH environment variable" The Windows installer for Stack 2.9.1, 2.9.3 and 2.11.1 (only) will replace the user `PATH` environment variable (rather than append to it) if a 1024 character limit is exceeded. If the content of your existing user `PATH` is long, preserve it before running the installer. !!! note "Anti-virus software" Systems with antivirus software may need to add Stack to the list of 'trusted' applications. You may see a "Windows Defender SmartScreen prevented an unrecognized app from starting" warning when you try to run the installer. If so, click on **More info**, and then click on the **Run anyway** button that appears. We recommend installing to the default location with the installer, as that will make `stack install` and `stack upgrade` work correctly out of the box. ### Manual download * Click [:material-cloud-download-outline:](https://get.haskellstack.org/stable/windows-x86_64.zip) to download an archive file with the latest release. * Unpack the archive and place `stack.exe` somewhere on your PATH (see the [Path](#path) section below). * Now you can run Stack from the command line in a terminal. ## Path You can install Stack by copying the executable file anywhere on your PATH. A good place to install is the same directory where Stack itself will install executables, which depends on the operating system: === "Unix-like" Stack installs executables to: ~~~text $HOME/.local/bin ~~~ If you don't have that directory in your PATH, you may need to update your PATH. That can be done by editing the `~/.bashrc` file. === "Windows" Stack installs executables to: ~~~text %APPDATA%\local\bin ~~~ For example: `C:\Users\\AppData\Roaming\local\bin`. If you don't have that directory in your PATH, you may need to update your PATH. That can be done by searching for 'Edit Environment variables for your account' under Start. !!! note If you used [GHCup](https://www.haskell.org/ghcup/) to install Stack, GHCup puts executable files in the `bin` directory in the GHCup root directory. ## China-based users If you're attempting to install Stack from within China: * As of 24 February 2020, the download link has limited connectivity from within mainland China. If this is the case, please proceed by manually downloading (ideally via a VPN) and installing Stack per the instructions found on this page pertinent to your operating system. * After installation, your `config.yaml` file will need to be configured before Stack can download large files consistently from within China (without reliance on a VPN). Please add the following to the bottom of the `config.yaml` file: ~~~yaml ###ADD THIS IF YOU LIVE IN CHINA setup-info-locations: - "http://mirrors.tuna.tsinghua.edu.cn/stackage/stack-setup.yaml" urls: latest-snapshot: http://mirrors.tuna.tsinghua.edu.cn/stackage/snapshots.json package-indices: - download-prefix: http://mirrors.tuna.tsinghua.edu.cn/hackage/ ~~~ ## Using an HTTP proxy To use Stack behind a HTTP proxy with IP address *IP* and port *PORT*, first set up an environment variable `http_proxy` and then run the Stack command. For example: === "Unix-like" ~~~text export http_proxy=IP:PORT stack install ~~~ On most operating systems, it is not mandatory for programs to follow the "system-wide" HTTP proxy. Some programs, such as browsers, do honor this "system-wide" HTTP proxy setting, while other programs, including Bash, do not. That means configuring "http proxy setting" in your System Preferences (macOS) would not result in Stack traffic going through the proxy. === "Windows" ~~~text $Env:http_proxy=IP:PORT stack install ~~~ It is not mandatory for programs to follow the "system-wide" HTTP proxy. Some programs, such as browsers, do honor this "system-wide" HTTP proxy setting, while other programs do not. That means configuring "http proxy setting" in your Control Panel would not result in Stack traffic going through the proxy. ## Upgrade Stack There are different approaches to upgrading Stack, which vary as between Unix-like operating systems (including macOS) and Windows. !!! note If you used [GHCup](https://www.haskell.org/ghcup/) to install Stack, you should also use GHCup to upgrade Stack. GHCup uses an executable named `stack` to manage versions of Stack, through a file `stack.shim`. Stack will likely overwrite the executable on upgrade. === "Unix-like" There are essentially four different approaches: 1. The `stack upgrade` command, which downloads a Stack executable, or builds it from source, and installs it to Stack's 'local-bin' directory (see `stack path --local-bin`). If different and permitted, it also installs a copy in the directory of the current Stack executable. (If copying is not permitted, copy `stack` from Stack's 'local-bin' directory to the system location afterward.) You can use `stack upgrade` to get the latest official release, and `stack upgrade --git` to install from GitHub and live on the bleeding edge. Make sure the location of the Stack executable is on the PATH. See the [Path](#Path) section above. 2. If you're using a package manager and are happy with sticking with the officially released binaries from the distribution (which may the lag behind the latest version of Stack significantly), simply follow your normal package manager strategies for upgrading. For example: ~~~text apt-get update apt-get upgrade ~~~ 3. The `get.haskellstack.org` script supports the `-f` argument to over-write the current Stack executable. For example, command: ~~~text curl -sSL https://get.haskellstack.org/ | sh -s - -f ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh -s - -f ~~~ 4. Manually follow the steps above to download the newest executable from the GitHub releases page and replace the old executable. === "Windows" There are essentially two different approaches: 1. The `stack upgrade` command, which downloads a Stack executable, or builds it from source, and installs it to Stack's 'local-bin' directory (see `stack path --local-bin`). If different and permitted, it also installs a copy in the directory of the current Stack executable. (If copying is not permitted, copy `stack` from Stack's 'local-bin' directory to the system location afterward.) You can use `stack upgrade` to get the latest official release, and `stack upgrade --git` to install from GitHub and live on the bleeding edge. Make sure the location of the Stack executable is on the PATH. See the [Path](#Path) section above. 2. Manually follow the steps above to download the newest executable from the GitHub releases page and replace the old executable. ## Install earlier versions To install a specific version of Stack, navigate to the desired version on the [GitHub release page](https://github.com/commercialhaskell/stack/releases), and click the appropriate link under its "Assets" drop-down menu. Alternatively, use the URL `https://github.com/commercialhaskell/stack/releases/download/vVERSION/stack-VERSION-PLATFORM.EXTENSION`. For example, the tarball for Stack version 2.1.0.1, osx-x86_64 is at `https://github.com/commercialhaskell/stack/releases/download/v2.1.0.1/stack-2.1.0.1-osx-x86_64.tar.gz`. Here's a snippet for `appveyor.yml` files, borrowed from `dhall`'s [`appveyor.yml`](https://github.com/dhall-lang/dhall-haskell/blob/1079b7a3a7a6922f72a373e47daf6f1b74f128b1/appveyor.yml). Change the values of PATH and VERSION as needed. ~~~yaml install: - set PATH=C:\Program Files\Git\mingw64\bin;%PATH% - curl --silent --show-error --output stack.zip --location "https://github.com/commercialhaskell/stack/releases/download/v%STACK_VERSION%/stack-%STACK_VERSION%-windows-x86_64.zip" - 7z x stack.zip stack.exe - stack setup > nul - git submodule update --init --recursive ~~~ stack-2.15.7/doc/list_command.md0000644000000000000000000000425014620153474014650 0ustar0000000000000000
# The `stack list` command [:octicons-tag-24: 2.7.1](https://github.com/commercialhaskell/stack/releases/tag/v2.7.1) ~~~text stack list [PACKAGE] ~~~ `stack list ` will send to the standard output stream the latest version of the package from Hackage. If the package name cannot be found on Hackage, even after updating the package index, suggestions (not necessarily good ones) will be made about the intended package name. `stack --snapshot list ` will send to the standard output stream the version of the package in the specified snapshot, unless the package comes with GHC on Unix-like operating systems. If the package name cannot be found in the snapshot, the command will fail, identifying only the package(s) that did not appear in the snapshot. More than one package name can be specified. `stack --snapshot list` will send to the standard output stream a list of all the packages in the specified snapshot, except those which come with GHC on Unix-like operating systems. For example: ~~~text stack list base unix Win32 acme-missiles pantry base-4.19.0.0 unix-2.8.5.0 Win32-2.13.4.0 acme-missiles-0.3 pantry-0.9.3.1 stack list paltry Could not find package paltry, updating ... Package index cache populated Error: [S-4926] * Could not find package paltry on Hackage. Perhaps you meant one of: pantry, pretty, pasty, xattr, alloy, para, pappy, alure, polar and factory. stack --snapshot lts-22.21 list base unix Win32 acme-missiles pantry Error: [S-4926] * Package does not appear in snapshot: base. * Package does not appear in snapshot: unix. * Package does not appear in snapshot: Win32. * Package does not appear in snapshot: acme-missiles. stack --snapshot lts-22.21 list base unix Win32 pantry base-4.18.2.1 unix-2.8.4.0 Win32-2.13.3.0 pantry-0.9.3.2 stack --snapshot lts-22.21 list AC-Angle-1.0 ALUT-2.4.0.3 ... zstd-0.1.3.0 zxcvbn-hs-0.3.6 ~~~ stack-2.15.7/doc/lock_files.md0000644000000000000000000001550514604306200014303 0ustar0000000000000000
# Lock Files Stack attempts to provide reproducible build plans. This involves reproducibly getting the exact same contents of source packages and configuration options (like Cabal flags and GHC options) for a given set of input files. There are a few problems with making this work: * Entering all of the information to fully provide reproducibility is tedious. This would include things like Hackage revisions, hashes of remote tarballs, etc. Users don't want to enter this information. * Many operations in Stack rely upon a "snapshot hash," which transitively includes the completed information for all of these dependencies. If any of that information is missing when parsing the `stack.yaml` file or snapshot files, it could be expensive for Stack to calculate it. To address this, we follow the (fairly standard) approach of having a _lock file_. The goal of the lock file is to cache completed locations of project, snapshot packages and snapshots themselves so that: * These files can be stored in source control * Users on other machines can reuse these lock files and get identical build plans given that the used project packages and local snapshots are the same on those machines * Rerunning `stack build` in the future is deterministic in the build plan, not depending on mutable state in the world like Hackage revisions !!! note If, for example, a tarball available remotely is deleted or the hash changes, it will not be possible for Stack to perform the build. However, by deterministic, we mean it either performs the same build or fails, never accidentally doing something different. This document explains the contents of a lock file, how they are used, and how they are created and updated. ## stack.yaml and snapshot files Relevant to this discussion, Stack's project-level configuration file (`stack.yaml`, by default) specifies: * the parent snapshot (the [`snapshot`](yaml_configuration.md#snapshot) or [`resolver`](yaml_configuration.md#resolver) key) * extra-deps Some of this information can be incomplete. Consider this `stack.yaml` file: ~~~yaml snapshot: lts-19.22 packages: - . extra-deps: - acme-missiles-0.3 ~~~ This information is _incomplete_. For example, the extra-deps may change in the future. Instead, you could specify enough information in the `stack.yaml` file to fully resolve that package. That looks like: ~~~yaml extra-deps: - hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 pantry-tree: size: 226 sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 ~~~ The `lts-19.22` information is also incomplete. While we assume in general that Haskell LTS snapshots never change, there's nothing that prohibits that from happening. Instead, the complete version of that key is: ~~~yaml snapshot: - url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/19/22.yaml size: 619399 sha256: 5098594e71bdefe0c13e9e6236f12e3414ef91a2b89b029fd30e8fc8087f3a07 ~~~ Users don't particularly feel like writing all of that. Therefore, it's common to see _incomplete_ information in a `stack.yaml` file. ## Recursive snapshot layers Snapshot files can be _recursive_, where `stack.yaml` refers to `foo.yaml`, which refers to `bar.yaml`, which refers to `baz.yaml`. A local snapshot file can refer to a remote snapshot file (available via an HTTP(S) URL). We need to encode information from _all_ of these snapshot layers and the `stack.yaml` file in the lock file, to ensure that we can detect if anything changes. ## Performance In addition to acting as a pure correctness mechanism, the design of a lock file given here also works as a performance improvement. Instead of requiring that all snapshot files be fully parsed on each Stack invocation, we can store information in the lock file and bypass parsing of the additional files in the common case of no changes. ## Lock file contents The lock file contains the following information: * Completed package locations for extra-deps and packages in snapshot files !!! note This only applies to _immutable_ packages. Mutable packages are not included in the lock file. * Completed information for the snapshot locations It looks like the following: ~~~yaml # Lock file, some message about the file being auto-generated snapshots: # Starts with the snapshot specified in stack.yaml, # then continues with the snapshot specified in each # subsequent snapshot file - original: foo.yaml # raw content specified in a snapshot file completed: file: foo.yaml sha256: XXXX size: XXXX - original: lts-13.9 completed: size: 496662 url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea packages: - original: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz completed: size: 1442 url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz name: acme-missiles version: '0.3' sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b pantry-tree: size: 226 sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 ~~~ ## Creation procedure Whenever a project-level configuration file (`stack.yaml`, by default) is loaded, Stack checks for a lock file in the same file path, with a `.lock` extension added. For example, if you command: ~~~text stack --stack-yaml my-stack.yaml build ~~~ or ~~~text stack --stack-yaml my-stack.yaml build --dry-run ~~~ then Stack will use a lock file in the location `my-stack.yaml.lock`. For the rest of this document, we'll assume that the files are simply `stack.yaml` and `stack.yaml.lock`. If the lock file does not exist, subject to Stack's [`--lock-file`](global_flags.md#-lock-file-option) option, it will be created by: * Loading the `stack.yaml` * Loading all snapshot files * Completing all missing information * Writing out the new `stack.yaml.lock` file to the disk ## Update procedure Whenever a project-level configuration file (`stack.yaml`, by default) is loaded, all completed package or snapshot locations (even those completed using information from a lock file) get collected to form a new lock file in memory. Subject to Stack's [`--lock-file`](global_flags.md#-lock-file-option) option, that new lock file is compared against the one on disk and, if there are any differences, written out to the disk. stack-2.15.7/doc/ls_command.md0000644000000000000000000001725714620153445014324 0ustar0000000000000000
# The `stack ls` commands ~~~text stack ls COMMAND Available commands: dependencies View the dependencies snapshots View snapshots (local by default) stack-colors View Stack's output styles stack-colours View Stack's output styles (alias for 'stack-colors') tools View Stack's installed tools ~~~ The `stack ls` commands list different types of information. Command `stack ls` for the available commands. ## The `stack ls dependencies` command Either ~~~text stack ls dependencies COMMAND Available commands: cabal Print dependencies as exact Cabal constraints json Print dependencies as JSON text Print dependencies as text (default) tree Print dependencies as tree ~~~ or ~~~text stack ls dependencies [--separator SEP] [--[no-]license] [--filter ITEM] [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ `stack ls dependencies` lists all of the packages and versions used for a project. All project packages are considered by default, but a target can be specified as an argument. For further information, see the [target syntax](build_command.md#target-syntax) documentation. Subcommands specify the format of the output, as follows: * `cabal` lists the packages in the format of exact Cabal constraints. ~~~text stack ls dependencies cabal [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ For example (extract): ~~~text constraints: , Cabal ==3.6.3.0 , Cabal-syntax ==3.6.0.0 , Glob ==0.10.2 ~~~ * `json` lists dependencies in JSON format (an array of objects). ~~~text stack ls dependencies json [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ For example (extract): ~~~text [{"dependencies":["base","bytestring"],"license":"BSD3","location":{"type":"hackage","url":"https://hackage.haskell.org/package/zlib-0.6.3.0"},"name":"zlib","version":"0.6.3.0"}, ~~~ Each object has the following keys: ~~~json name: zlib version: 0.6.3.0 location: type: hackage url: https://hackage.haskell.org/package/zlib-0.6.3.0 licence: BSD3 dependencies: - base - bytestring ~~~ * `text` (the default) lists the packages, each on a separate line. ~~~text stack ls dependencies text [--separator SEP] [--[no-]license] [--filter ITEM] [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ For example (extract): ~~~text Cabal 3.6.3.0 Cabal-syntax 3.6.0.0 Glob 0.10.2 ~~~ * `tree` lists dependencies in the format of a tree. ~~~text stack ls dependencies tree [--separator SEP] [--[no-]license] [--[no-]external] [--[no-]include-base] [--depth DEPTH] [--prune PACKAGES] [TARGET] [--flag PACKAGE:[-]FLAG] [--test] [--bench] [--global-hints] ~~~ For example (extract): ~~~text Packages └─┬ stack 2.10.0 ├─┬ Cabal 3.6.3.0 │ ├─┬ Win32 2.12.0.1 │ │ ├─┬ base 4.16.3.0 │ │ │ ├─┬ ghc-bignum 1.2 │ │ │ │ └─┬ ghc-prim 0.8.0 │ │ │ │ └── rts 1.0.2 │ │ │ ├─┬ ghc-prim 0.8.0 ~~~ The `--separator` option, with the `text` or `tree` subcommand, specifies the separator between the package name and its version. The default is a space character. Set the `--license` flag, after the `text` or `tree` subcommand, to replace each package's version with its licence. (Consistent with the Cabal package description format specification, only the American English spelling (license) is accepted.) The `--filter` option, with the `text` subcommand, specifies an item to be filtered out from the results, if present. An item can be `$locals` (for all project packages) or a package name. It can be specified multiple times. !!! note The special value `$locals` will need to be enclosed with single quotes to distinguish it from a shell variable. Set the `--no-external` flag to exclude external dependencies. Set the `--no-include-base` flag to exclude dependencies on the `base` package. The `--depth` option limits the depth of dependency resolution. The `--prune ` option prunes the specified packages and their dependencies from the tree of packages used to generate the output, where `` is a comma separated list of package names. The `--flag` option allows Cabal flags to be specified. Pass the `--test` flag to consider the dependencies of test suite components. Pass the `--bench` flag to consider the dependencies of benchmark components. Pass the `--global-hints` flag to use a hints file for global packages. The command then does not require an installed GHC. ## The `stack ls snapshots` command ~~~text stack ls snapshots [COMMAND] [-l|--lts] [-n|--nightly] Available commands: local View local snapshots remote View remote snapshots ~~~ `stack ls snapshots` will list all the local snapshots by default. You can also view the remote snapshots using `stack ls snapshots remote`. It also supports options for viewing only lts (`-l`) and nightly (`-n`) snapshots. ## The `stack ls stack-colors` command ~~~text stack ls stack-colors [--[no-]basic] [--[no-]sgr] [--[no-]example] ~~~ The British English spelling is also accepted (`stack ls stack-colours`). `stack ls stack-colors` will list all of Stack's output styles. A number of different formats for the output are available, see `stack ls stack-colors --help`. The default is a full report, with the equivalent SGR instructions and an example of the applied style. The latter can be disabled with flags `--no-sgr` and `--no-example`. The flag `--basic` specifies a more basic report, in the format that is accepted by Stack's command line option `--stack-colors` and the YAML configuration key `stack-colors`. ## The `stack ls tools` command ~~~text stack ls tools [--filter TOOL_NAME] ~~~ `stack ls tools` will list Stack's installed tools. On Unix-like operating systems, they will be one or more versions of GHC. On Windows, they will include MSYS2. For example, on Windows the command: ~~~text stack ls tools ~~~ yields output like: ~~~text ghc-9.4.1 ghc-9.2.4 ghc-9.0.2 msys2-20210604 ~~~ The `--filter ` option will filter the output by a tool name (e.g. 'ghc', 'ghc-git' or 'msys2'). The tool name is case sensitive. For example the command: ~~~text stack ls tools --filter ghc ~~~ yields output like: ~~~text ghc-9.4.1 ghc-9.2.4 ghc-9.0.2 ~~~ stack-2.15.7/doc/new_command.md0000644000000000000000000001134414604306200014455 0ustar0000000000000000
# The `stack new` command ~~~text stack new PACKAGE_NAME [--bare] [--[no-]init] [TEMPLATE_NAME] [-p|--param KEY:VALUE] [DIR(S)] [--omit-packages] [--force] [--ignore-subdirs] ~~~ `stack new` creates a new project using a project template. By default: * the project is created in a new directory named after the package. Pass the `--bare` flag to create the project in the current directory; * the project is initialised for use with Stack. Pass the `--no-init` flag to skip such initialisation; and * the project template is the one specified by the [default-template](yaml_configuration.md#default-template) option. A package name acceptable to Cabal comprises an alphanumeric 'word'; or two or more such words, with the words separated by a hyphen/minus character (`-`). A word cannot be comprised only of the characters `0` to `9`. An alphanumeric character is one in one of the Unicode Letter categories (Lu (uppercase), Ll (lowercase), Lt (titlecase), Lm (modifier), or Lo (other)) or Number categories (Nd (decimal), Nl (letter), or No (other)). !!! note In the case of Hackage and acceptable package names, an alphanumeric character is limited to one of `A` to `Z`, `a` to `z`, and `0` to `9`. !!! note The name of a project is not constrained to be an acceptable package name. A single-package project can be renamed to differ from the name of its package. The `--param :` option specifies a key-value pair to populate a key in a template. The option can be specified multiple times. The arguments specifying directories and the `--ignore-subdirs`, `--force` and `--omit-packages` flags are as for the [`stack init` command](init_command.md). These arguments are ignored if the `--no-init` flag is passed. If a snapshot is specified at the command line and the project is initialised for use with Stack, `stack new` will try to use it. For further information, see the documentation for the [`--snapshot`](global_flags.md#-snapshot-option) and [`--resolver`](global_flags.md#-resolver-option) options. ## Project templates A project template file can be located in a repository named `stack-templates` on GitHub, GitLab or Bitbucket; at a URL; or on the local file system. Project template file names have the extension `.hsfiles`. The extension does not need to be specified with `stack new`. A project template file `my-template.hsfiles` in a repository `username/stack-templates` on GitHub, GitLab or Bitbucket can be specified with `stack new` as: ~~~test :username/my-template ~~~ where `` is one of `github` for [GitHub](https://github.com/), `gitlab` for [GitLab](https://gitlab.com), or `bitbucket` for [Bitbucket](https://bitbucket.com). The default service is GitHub, the default username is `commercialhaskell` and the default project template name is `new-template`. ## Examples Create a project for package `my-project` in new directory `my-project` with the default project template file and initialise it for use with Stack: ~~~text stack new my-project ~~~ Create a project for package `my-package` in the current directory with the default project template file and initialise it for use with Stack: ~~~text stack new my-package --bare ~~~ Create a project with the `rio` project template at the default repository and initialise it for use with Stack: ~~~text stack new my-project rio ~~~ Create a project with the `mysql` project template provided by the `yesodweb/stack-templates` repository on GitHub and initialise it for use with Stack: ~~~text stack new my-project yesodweb/mysql ~~~ Create a project with the `my-template` project template provided by the `username/stack-templates` repository on Bitbucket and initialise it for use with Stack: ~~~text stack new my-project bitbucket:username/my-template ~~~ Create a project with the `my-template.hsfiles` project template file at `https://example.com` and initialise it for use with Stack: ~~~text stack new my-project https://example.com/my-template ~~~ Create a project with the local project template file `/my-template.hsfiles` and initialise it for use with Stack: ~~~text stack new my-project /my-template ~~~ Create a project with the `simple` project template file at the default repository (which does not use Hpack and a `package.yaml` file) and do not initialise it for use with Stack (`stack init` could be used subsequently): ~~~text stack new my-project --no-init simple ~~~ stack-2.15.7/doc/nix_integration.md0000644000000000000000000004412114620153600015370 0ustar0000000000000000
# Nix integration [:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) [Nix](https://nixos.org/) is a purely functional package manager. Stack can be configured to integrate with Nix. Integration provides these benefits: * more reproducible builds. This is because fixed versions of any system libraries and commands required to build the project are automatically built using Nix and managed locally for each project. These system packages never conflict with any existing versions of these libraries on your system. That they are managed locally to the project means that you don't need to alter your system in any way to build any odd project pulled from the Internet; and * implicit sharing of system packages between projects. This means you don't have more copies on-disk than you need. The Nix package manager is a pre-requisite for integration. On Linux (including Windows Subsystem for Linux) and macOS, it can be downloaded and installed from the [Nix download page](https://nixos.org/download.html). When integrated with Nix, Stack handles Haskell dependencies as it usually does and the Nix package manager handles the _non-Haskell_ dependencies needed by the Haskell packages. Stack downloads Haskell packages from [Stackage](https://www.stackage.org/lts) and builds them locally. Stack uses Nix to download [Nix packages][nix-search-packages]. These provide the GHC compiler and external C libraries that you would normally install manually. Nix's `nix-shell` starts an interactive shell based on a Nix expression. Stack can automatically create a Nix build environment in the background using `nix-shell`. There are two alternative options to create such a build environment: 1. provide a list of [Nix packages][nix-search-packages] 2. provide a `shell.nix` file that gives you more control over the libraries and tools available inside the shell. A `shell.nix` file requires writing code in Nix's [custom language][nix-language]. Use this option only if you know Nix and have special requirements, such as using custom Nix packages that override the standard ones or using system libraries with special requirements. ### Checking the Nix installation Once Nix is installed, the Nix commands (`nix-shell` etc) should be available. If they are not, it could be because the file `$HOME/.nix-profile/etc/profile.d/nix.sh` is not sourced by your shell. You should either: 1. run `source ~/.nix-profile/etc/profile.d/nix.sh` each time you open a terminal and need Nix; or 2. add the command `source ~/.nix-profile/etc/profile.d/nix.sh` to your `~/.bashrc` or `~/.bash_profile` file. A Nix path can be specified between angle brackets, e.g. ``, and the directories listed in the `NIX_PATH` environment variable will be searched for the given file or directory name. Stack makes use of path ``. From Nix 2.4, `NIX_PATH` is not set by `nix.sh`. If `NIX_PATH` is not set, Nix will fall back to (first) `$HOME/.nix-defexpr/channels` in impure and unrestricted evaluation mode. However, Stack may use a pure Nix mode (see further [below](#pure-and-impure-nix-shells)). That directory can be appended to `NIX_PATH` with `export NIX_PATH=${NIX_PATH:+$NIX_PATH:}$HOME/.nix-defexpr/channels`. For information about how Stack itself can configure `NIX_PATH`, see further [below](#nix-package-sources). ### Enable Nix integration On NixOS, Nix integration is enabled by default; on other operating systems it is disabled. To enable Nix integration, add the following section to your Stack YAML configuration file (`stack.yaml` or `config.yaml`): ~~~yaml nix: enable: true # false by default, except on NixOS ~~~ The equivalent command line flag (which will prevail) is `--[no-]nix`. Passing any `--nix-*` option on the command line will imply the `--nix` option. If Nix integration is not enabled, Stack will notify the user if a `nix` executable is on the PATH. If that notification is unwanted, it can be muted by setting Stack's configuration option [`notify-if-nix-on-path`](yaml_configuration.md#notify-if-nix-on-path) to `false`. With Nix integration enabled, `stack build` and `stack exec` will automatically launch themselves in a local build environment (using `nix-shell` behind the scenes). It is not necessary to run `stack setup`, unless you want to cache a GHC installation before running a build. **Known limitation on macOS:** currently, `stack --nix ghci` fails on macOS, due to a bug in GHCi when working with external shared libraries. ### Supporting both Nix and non-Nix developers With Nix integration enabled in Stack's YAML configuration file, every developer of your project needs to have Nix installed, but the developer also gets all external libraries automatically. Julien Debon of Tweag has published a [blog post][tweag-blog-post] on *Smooth, non-invasive Haskell Stack and Nix shell integration* (2 June 2022). The post explains how to set things up so that both Nix and non-Nix developers can work together on the same project. The `tweag/haskell-stack-nix-example` [GitHub repository][tweag-example] provides an example of working Stack and Nix shell integration to accompany the post. Nix 2.4 (released 1 November 2021) introduced a new and experimental format to package Nix-based projects, known as 'flakes'. The example below adapts and extends the example accompanying the blog post above to use Nix flakes. The `flake.nix` file is: ~~~nix { description = "my project description"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; inputs.flake-utils.url = "github:numtide/flake-utils"; outputs = { self, nixpkgs, flake-utils }: flake-utils.lib.eachDefaultSystem (system: let pkgs = nixpkgs.legacyPackages.${system}; hPkgs = pkgs.haskell.packages."ghc8107"; # need to match Stackage LTS version # from stack.yaml snapshot myDevTools = [ hPkgs.ghc # GHC compiler in the desired version (will be available on PATH) hPkgs.ghcid # Continuous terminal Haskell compile checker hPkgs.ormolu # Haskell formatter hPkgs.hlint # Haskell codestyle checker hPkgs.hoogle # Lookup Haskell documentation hPkgs.haskell-language-server # LSP server for editor hPkgs.implicit-hie # auto generate LSP hie.yaml file from cabal hPkgs.retrie # Haskell refactoring tool # hPkgs.cabal-install stack-wrapped pkgs.zlib # External C library needed by some Haskell packages ]; # Wrap Stack to work with our Nix integration. We don't want to modify # stack.yaml so non-Nix users don't notice anything. # - no-nix: We don't want Stack's way of integrating Nix. # --system-ghc # Use the existing GHC on PATH (will come from this Nix file) # --no-install-ghc # Don't try to install GHC if no matching GHC found on PATH stack-wrapped = pkgs.symlinkJoin { name = "stack"; # will be available as the usual `stack` in terminal paths = [ pkgs.stack ]; buildInputs = [ pkgs.makeWrapper ]; postBuild = '' wrapProgram $out/bin/stack \ --add-flags "\ --no-nix \ --system-ghc \ --no-install-ghc \ " ''; }; in { devShells.default = pkgs.mkShell { buildInputs = myDevTools; # Make external Nix c libraries like zlib known to GHC, like # pkgs.haskell.lib.buildStackProject does # https://github.com/NixOS/nixpkgs/blob/d64780ea0e22b5f61cd6012a456869c702a72f20/pkgs/development/haskell-modules/generic-stack-builder.nix#L38 LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath myDevTools; }; }); } ~~~ Check-in this `flake.nix` to your project's repository. Run the `nix develop` command (it searches for `flake.nix` by default) and you'll find a new `flake.lock` file. That file that pins the precise nixpkgs package set. Check-in that `flake.lock` file as well, and every Nix developer of your project will use precisely the same package set. ### GHC through Nix packages Nix integration will instruct Stack to build inside a local build environment. That environment will also download and use a [GHC Nix package](https://search.nixos.org/packages?query=haskell.compiler.ghc) matching the required version of the configured Stack [snapshot](yaml_configuration.md#snapshot). Enabling Nix integration means that packages will always be built using the local GHC from Nix inside your shell, rather than your globally installed system GHC (if any). Stack can use only GHC versions that are in the Nix package repository. The [Nixpkgs master branch](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/haskell-modules) usually picks up new versions quickly, but it takes two or three days before those updates arrive in the `unstable` channel. Release channels, like `nixos-22.05`, receive those updates only occasionally -- say, every two or three months --, so you should not expect them to have the latest compiler available. Fresh NixOS installs use a release version by default. To identify whether a given compiler is available, you can use the following Nix command: ~~~sh nix-env -f "" -qaP -A haskell.compiler.ghc924 haskell.compiler.ghc924 ghc-9.2.4 ~~~ If Nix doesn't know that version of GHC, you'll see the following error message: ~~~sh nix-env -f "" -qaP -A haskell.compiler.ghc999 error: attribute ‘ghc999’ in selection path ‘haskell.compiler.ghc999’ not found ~~~ You can list all known Haskell compilers in Nix with the following: ~~~sh nix-instantiate --eval -E "with import {}; lib.attrNames haskell.compiler" ~~~ Alternatively, use `nix repl`, a convenient tool to explore nixpkgs: ~~~sh nix repl ~~~ In the REPL, load nixpkgs and get the same information through autocomplete: ~~~sh nix-repl> :l nix-repl> haskell.compiler.ghc ~~~ You can type and evaluate any Nix expression in the Nix REPL, such as the one we gave to `nix-instantiate` earlier. ### External C libraries through Nix packages To let Nix manage external C libraries, add (for example) the following section to your Stack YAML configuration file: ~~~yaml nix: enable: true packages: [zlib, glpk, pcre] ~~~ The equivalent command line option is `--nix-packages "zlib glpk pcre"`. The `packages` key and the `shell-file` key (see further below) are alternatives. Specifying both results in an error. The example above will instruct Stack to build inside a local build environment that will have the Nix packages [zlib](https://search.nixos.org/packages?query=zlib), [glpk](https://search.nixos.org/packages?query=glpk) and [pcre](https://search.nixos.org/packages?query=pcre) installed, which provide the C libraries of the same names. **Note:** currently, Stack only discovers dynamic and static libraries in the `lib/` folder of any Nix package, and likewise header files in the `include/` folder. If you're dealing with a package that doesn't follow this standard layout, you'll have to deal with that using a custom `shell.nix` file (see further below). ### External C libraries through a `shell.nix` file In Nix, a 'derivation' is a description of a build action and its result is a Nix store object. Nix's [custom language][nix-language] can provide a fully customized derivation as an environment to use. To specify such a `shell.nix` file, add the following section to your Stack YAML configuration file: ~~~yaml nix: enable: true shell-file: shell.nix ~~~ The equivalent command line option (which will prevail) is `--nix-shell-file shell.nix`. The `packages` and `shell-file` keys are alternatives. Specifying both results in an error. Defining a `shell.nix` file allow you to override some Nix derivations, for instance to change some build options of the libraries you use, or to set additional environment variables. For further information, see the [Nix manual][nix-manual-exprs]. The `shell.nix` file that is the equivalent of the `packages: [zlib, glpk, pcre]` example above is: ~~~nix {ghc}: with (import {}); haskell.lib.buildStackProject { inherit ghc; name = "myEnv"; buildInputs = [ zlib glpk pcre ]; } ~~~ The `buildStackProject` utility function is documented in the [Nixpkgs manual][nixpkgs-manual-haskell]. Stack expects the `shell.nix` file to define a function of with one argument called `ghc` (arguments are not positional), which you should give to function `buildStackProject`. This argument is a GHC Nix package in the version as defined in the snapshot you set in Stack's project-level configuration file (`stack.yaml`, by default). ### Pure and impure Nix shells By default, Stack will run the build in a *pure* Nix build environment (or *shell*), which means two important things: 1. basically **no environment variable will be forwarded** from your user session to the nix-shell (variables like `HTTP_PROXY` or `PATH` notably will not be available); and 2. the build should fail if you haven't specified all the dependencies in the `packages:` section of the Stack YAML configuration file, even if these dependencies are installed elsewhere on your system. This behaviour enforces a complete description of the build environment to facilitate reproducibility. To override this behaviour, add the following section to your Stack YAML configuration file: ~~~yaml nix: enable: true pure: false ~~~ The equivalent command line flag (which will prevail) is `--[no-]-nix-pure`. **Note:** On macOS, shells are non-pure by default currently. This is due soon to be resolved locale issues. So on macOS you'll need to be a bit more careful to check that you really have listed all dependencies. ### Nix package sources Nix organizes its packages in snapshots of packages (each snapshot being a "package set") similar to how Stackage organizes Haskell packages. By default, `nix-shell` will look for the "nixpkgs" package set located by your `NIX_PATH` environment variable. This package set can be different depending on when you installed Nix and which nixpkgs channel you're using (similar to the LTS channel for stable packages and the nightly channel for bleeding edge packages in [Stackage](https://www.stackage.org/)). This is bad for reproducibility so that nixpkgs should be pinned, i.e., set to the same package set for every developer of your project. To set or override the Nix package set, add the following section to your Stack YAML configuration file: ~~~yaml nix: path: [nixpkgs=] ~~~ The equivalent command line option is `--nix-path `. By this means, you can ask Nix to use your own local checkout of the nixpkgs repository. You could in this way use a bleeding edge nixpkgs, cloned from the `NixOS/nixpkgs` [repository](http://www.github.com/NixOS/nixpkgs) `master` branch, or edit the Nix descriptions of some packages. The Tweag example [repository][tweag-example] shows how you can pin a package set. ## Non-project specific configuration Below is a summary of the non-project specific configuration options and their default values. The options can be set in Stack's project-level configuration file (`stack.yaml`, by default) or its global configuration file (`config.yaml`). ~~~yaml nix: # false by default, except on NixOS. Is Nix integration enabled? enable: true # true by default. Should Nix run in a pure shell? pure: true # Empty by default. The list of packages you want to be available in the # nix-shell at build time (with `stack build`) and run time (with # `stack exec`). packages: [] # Unset by default. You cannot set this option if `packages:` # is already present and not empty. shell-file: shell.nix # A list of strings, empty by default. Additional options that will be passed # verbatim to the `nix-shell` command. nix-shell-options: [] # A list of strings, empty by default, such as # `[nixpkgs=/my/local/nixpkgs/clone]` that will be used to override # NIX_PATH. path: [] # false by default. Whether to add your Nix dependencies as Nix garbage # collection roots. This way, calling nix-collect-garbage will not remove # those packages from the Nix store, saving you some time when running # stack build again with Nix support activated. # # This creates a `nix-gc-symlinks` directory in the project `.stack-work`. # To revert that, just delete this `nix-gc-symlinks` directory. add-gc-roots: false ~~~ `stack --nix-help` will list the equivalent command line flags and options. ## Stack and developer tools on NixOS NixOS is a Linux distribution based on Nix, that is composed using modules and packages defined in the Nixpkgs project. When using Stack on NixOS, you must use Stack's Nix integration to install GHC. That is because external C libraries in NixOS are not installed in the usual distribution directories. GHC installed through Stack (without Nix) can't find those libraries and, therefore, can't build most projects. However, GHC provided through Nix can be modified to find the external C libraries provided through Nix. [nix-language]: https://wiki.nixos.org/wiki/Overview_of_the_Nix_Language [nix-manual-exprs]: http://nixos.org/manual/nix/stable/expressions/writing-nix-expressions.html [nix-search-packages]: https://search.nixos.org/packages [nixpkgs-manual-haskell]: https://haskell4nix.readthedocs.io/nixpkgs-users-guide.html?highlight=buildStackProject#how-to-build-a-haskell-project-using-stack [tweag-blog-post]: https://www.tweag.io/blog/2022-06-02-haskell-stack-nix-shell/ [tweag-example]: https://github.com/tweag/haskell-stack-nix-example/ stack-2.15.7/doc/nonstandard_project_init.md0000644000000000000000000000257214604306200017255 0ustar0000000000000000
# Non-standard project initialization You may need to configure Stack to work with an existing project that has one or more Cabal files but no Stack project-level configuration file (`stack.yaml`, by default). ## The `stack init` command The `stack init` command: * finds all of the Cabal files in your current directory and subdirectories (unless you use `--ignore-subdirs`) and determines the packages and versions they require * Finds the best combination of snapshot and package flags that allows everything to compile with minimum external dependencies * Tries to look for the best matching snapshot from latest Haskell LTS, latest Stackage Nightly, and other Haskell LTS, in that order If `stack init` finds a match, it will generate a `stack.yaml` file. You can specify the directory, or directories to include in the search for Cabal files. ### The `stack init --force` flag Set the flag to force the over-writing of any existing `stack.yaml` file. ### The `stack init --ignore-subdirs` flag Set the flag to not search for Cabal files in subdirectories. ### The `stack init --omit-packages` flag Set the flag to exclude any conflicting or incompatible user packages. stack-2.15.7/doc/other_resources.md0000644000000000000000000000217714353310533015412 0ustar0000000000000000
# Other resources There are lots of resources available for learning more about Stack: * `stack` or `stack --help` — lists Stack's commands, and flags and options common to those commands * `stack --help` — provides help on the particular Stack command, including flags and options specific to the command * `stack --version` — identify the version and Git hash of the Stack executable * `--verbose` (or `-v`) — much more info about internal operations (useful for bug reports) * The [home page](http://haskellstack.org) * The [Stack mailing list](https://groups.google.com/d/forum/haskell-stack) * The [FAQ](faq.md) * The [haskell-stack tag on Stack Overflow](http://stackoverflow.com/questions/tagged/haskell-stack) * [Another getting started with Stack tutorial](http://seanhess.github.io/2015/08/04/practical-haskell-getting-started.html) * [Why is Stack not Cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal) stack-2.15.7/doc/pantry.md0000644000000000000000000002501214604306200013500 0ustar0000000000000000
# Snapshot and package location [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) This document describes: * the specification of a snapshot location (in the [`snapshot`](yaml_configuration.md#snapshot) or [`resolver`](yaml_configuration.md#resolver) key) * the specification of a package location (in the `extra-deps` key and in a snapshot) !!! info Stack uses the [Pantry](https://hackage.haskell.org/package/pantry) to specify the location of snapshots and packages. Pantry is geared towards reproducible build plans with cryptographically secure specification of snapshots and packages. ## Snapshot location There are essentially four different ways of specifying a snapshot location: 1. Via a compiler version, which is a "compiler only" snapshot. This could be, for example: ~~~yaml snapshot: ghc-8.6.5 ~~~ 2. Via a URL pointing to a snapshot configuration file, for example: ~~~yaml snapshot: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2018/8/21.yaml` ~~~ 3. Via a local file path pointing to a snapshot configuration file, for example: ~~~yaml snapshot: my-local-snapshot.yaml ~~~ 4. Via a _convenience synonym_, which provides a short form for some common URLs. These are: * GitHub: `github:user/repo:path` is treated as: ~~~text https://raw.githubusercontent.com/user/repo/master/path ~~~ * LTS Haskell: `lts-X.Y` is treated (by default) as: ~~~text github:commercialhaskell/stackage-snapshots:lts/X/Y.yaml ~~~ * Stackage Nightly: `nightly-YYYY-MM-DD` is treated (by default) as: ~~~text github:commercialhaskell/stackage-snapshots:nightly/YYYY/M/D.yaml ~~~ !!! info By default, LTS Haskell and Stackage Nightly snapshot configurations are retrieved from the `stackage-snapshots` GitHub repository of user `commercialhaskell`. The [snapshot-location-base](yaml_configuration.md#snapshot-location-base) option allows a custom location to be set. For safer, more reproducible builds, you can optionally specify a URL together with a cryptographic hash of its content. For example: ~~~yaml snapshot: url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/12/0.yaml size: 499143 sha256: 781ea577595dff08b9c8794761ba1321020e3e1ec3297fb833fe951cce1bee11 ~~~ `size` is the number of bytes in the file and `sha256` is the file's SHA256 hash. If not provided, the information will automatically be generated and stored in a [lock file](lock_files.md). ## Package location There are three types of package locations: 1. Hackage packages 2. Git and Mecurial repositories 3. Local or remote archives (such as GitHub archives) All three types support optional tree metadata to be added, which can be used for reproducibility and faster downloads. This information can automatically be generated in a [lock file](lock_files.md). ### Hackage packages A package can be identified by its name, version and Cabal file revision number, with revision `0` being the original Cabal file. For example: ~~~yaml extra-deps: - acme-missiles-0.3@rev:0 ~~~ A package name and version only can be stated. Using this syntax, the most recent Cabal file revision available in the package index will be used. For example: ~~~yaml extra-deps: - acme-missiles-0.3 ~~~ This syntax is often used in practice, but may result in one build differing from another, if a new or further Cabal file revision is added to the package index between the builds. As an alternative to specifying the Cabal file revision number, you can specify the package name and version with the SHA256 hash of the contents of its Cabal file. Doing so is slightly more resilient than using the Cabal file revision number, as it does not rely on the correct ordering in the package index. For example: ~~~yaml extra-deps: - acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 ~~~ Optionally, you can specify also the size of the Cabal file in bytes. For example (where the file size is `631` bytes): ~~~yaml extra-deps: - acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,631 ~~~ Optionally, you can specify also the Pantry tree information. For example: ~~~yaml - hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 pantry-tree: size: 226 sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 ~~~ A Pantry tree is a list of CAS (content-addressable storage) 'SHA256 hash'-'size in bytes' keys for each of the files in a package. The SHA256 hash of the contents of the Cabal file and its size in bytes is provided in Stack's lock file. For further information, see the [lock files](lock_files.md) documentation. The SHA256 hash and file size alternative is also what Stack uses when it makes suggestions about missing packages. ### Git and Mercurial repositories You can specify a Git or Mercurial repository at a specific commit, and Stack will clone that repository and, if it has submodules (Git), update the repository's submodules. For example: ~~~yaml extra-deps: - git: git@github.com:commercialhaskell/stack.git commit: '6a86ee32e5b869a877151f74064572225e1a0398' - git: git@github.com:snoyberg/http-client.git commit: 'a5f4f3' - hg: https://example.com/hg/repo commit: 'da39a3ee5e6b4b0d3255bfef95601890afd80709' ~~~ !!! note It is highly recommended that you only use SHA1 values for a Git or Mercurial commit. Other values may work, but they are not officially supported, and may result in unexpected behavior (namely, Stack will not automatically pull to update to new versions). Another problem with this is that your build will not be deterministic, because when someone else tries to build the project they can get a different checkout of the package. !!! note The `commit:` key expects a YAML string. A commit hash, or partial hash, comprised only of digits represents a YAML number, unless it is enclosed in quotation marks. !!! warning For the contents of a Git repository, Stack cannot handle filepaths or symbolic link names that are longer than those supported by the `ustar` (Unix Standard TAR) archive format defined by [POSIX.1-1988](https://nvlpubs.nist.gov/nistpubs/Legacy/FIPS/fipspub151-1.pdf). Stack uses `git archive` to convert the content of a Git repository to a TAR archive, which it then seeks to consume. Git produces `pax` format archives which use 'extended' headers for matters that the `ustar` format cannot handle. Unfortunately, Stack cannot consume an extended header and will silently discard the item. A common practice in the Haskell world is to use "megarepos", or repositories with multiple packages in various subdirectories. Some common examples include [wai](https://github.com/yesodweb/wai/) and [digestive-functors](https://github.com/jaspervdj/digestive-functors). To support this, you may also specify `subdirs` for repositories. For example: ~~~yaml extra-deps: - git: git@github.com:yesodweb/wai commit: '2f8a8e1b771829f4a8a77c0111352ce45a14c30f' subdirs: - auto-update - wai ~~~ If unspecified, `subdirs` defaults to `['.']` meaning looking for a package in the root of the repository. If you specify a value of `subdirs`, then `'.'` is _not_ included by default and needs to be explicitly specified if a required package is found in the top-level directory of the repository. #### git-annex [git-annex](https://git-annex.branchable.com) is not supported. This is because `git archive` does not handle symbolic links outside the work tree. It is still possible to use repositories which use git-annex but do not require the annex files for the package to be built. To do so, ensure that any files or directories stored by git-annex are marked [export-ignore](https://git-scm.com/docs/git-archive#Documentation/git-archive.txt-export-ignore) in the `.gitattributes` file in the repository. For further information, see issue [#4579](https://github.com/commercialhaskell/stack/issues/4579). For example, if the directory `fonts/` is controlled by git-annex, use the following line: ~~~gitattributes fonts export-ignore ~~~ ### Local or remote archives (such as GitHub archives) #### Filepaths or URLs to archive files You can use filepaths referring to local archive files or HTTP or HTTPS URLs referring to remote archive files, either tarballs or ZIP files. !!! note An example of a remote archive file is a Hackage package candidate, usually located at (for example) https://hackage.haskell.org/package/my-package-1.0.0/candidate/my-package-1.0.0.tar.gz. !!! warning Stack assumes that these archive files never change after downloading to avoid needing to make an HTTP request on each build. For safer, more reproducible builds, you can optionally specify a cryptographic hash of the archive file. For example: ~~~yaml extra-deps: - https://example.com/foo/bar/baz-0.0.2.tar.gz - archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip subdirs: - wai - warp - archive: ../acme-missiles-0.3.tar.gz sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b ~~~ #### GitHub archive files [:octicons-tag-24: 1.7.1](https://github.com/commercialhaskell/stack/releases/tag/v1.7.1) You can specify a GitHub respository at a specific commit and Stack will obtain from GitHub an archive file of the files in the repository at that point in its history. For example: ~~~yaml extra-deps: - github: snoyberg/http-client commit: 'a5f4f30f01366738f913968163d856366d7e0342' ~~~ !!! note An archive file of the files in a GitHub repository at a point in its history is not the same as a clone of the repository (including its history) and the updating of any submodules. If you need the latter, use the syntax for a [Git repository](pantry.md#git-and-mercurial-repositories). If the package fails to build due to missing files, it may be that updated submodules are required. stack-2.15.7/doc/path_command.md0000644000000000000000000000641514613163672014641 0ustar0000000000000000
# The `stack path` command ~~~text stack path [--stack-root] [--global-config] [--programs] [--local-bin] [--project-root] [--config-location] [--bin-path] [--compiler-exe] [--compiler-bin] [--compiler-tools-bin] [--extra-include-dirs] [--extra-library-dirs] [--snapshot-pkg-db] [--local-pkg-db] [--global-pkg-db] [--ghc-package-path] [--snapshot-install-root] [--local-install-root] [--snapshot-doc-root] [--local-doc-root] [--local-hoogle-root] [--dist-dir] [--local-hpc-root] ~~~ `stack path` provides information about files and locations used by Stack. Pass the following flags for information about specific files or locations: |Flag |File or location | |-----------------------|------------------------------------------------------| |--bin-path |The PATH in the Stack environment. | |--compiler-bin |The directory containing the GHC executable. | |--compiler-exe |The GHC executable. | |--compiler-tools-bin |The directory containing binaries specific to a particular compiler.| |--config-location |Stack's project-level YAML configuration file (`stack.yaml`, by default).| |--dist-dir |The dist work directory, relative to the package directory.| |--extra-include-dirs |Extra include directories. | |--extra-library-dirs |Extra library directories. | |--ghc-package-path |The `GHC_PACKAGE_PATH` environment variable. | |--global-config |Stack's user-specific global YAML configuration file (`config.yaml`).| |--global-pkg-db |The global package database. | |--local-bin |The directory in which Stack installs executables. | |--local-doc-root |The root directory for local project documentation. | |--local-hoogle-root |The root directory for local project documentation. | |--local-hpc-root |The root directory for .tix files and HPC reports. | |--local-install-root |The root directory for local project installation. | |--local-pkg-db |The local package database. | |--programs |The root directory for GHC and other Stack-supplied tools.| |--project-root |The project root directory.| |--snapshot-doc-root |The root directory for snapshot documentation. | |--snapshot-install-root|The root directory for snapshot installation. | |--snapshot-pkg-db |The snapshot package database. | |--stack-root |The Stack root. | The command also accepts flags and options of the [`stack build`](build_command.md#flags-affecting-ghcs-behaviour) command that affect the location of the local project installation directory, such as `--profile` and `--no-strip`. For further information, see the documentation of the [project Stack work directory](stack_work.md#project-stack-work-directory). stack-2.15.7/doc/purge_command.md0000644000000000000000000000053714353310533015015 0ustar0000000000000000
# The `stack purge` command ~~~text stack purge ~~~ `stack purge` has the same effect as, and is provided as a shorthand for, [`stack clean --full`](clean_command.md). stack-2.15.7/doc/query_command.md0000644000000000000000000000244114604306200015027 0ustar0000000000000000
# The `stack query` command :octicons-beaker-24: Experimental [:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) ~~~text stack query [SELECTOR...] ~~~ `stack query` outputs certain build information. For example, for a multi-package project `multi` specifying snapshot `lts-19.25` (GHC 9.0.2) and with two project packages, `my-package-A` (version 0.1.0.0) and `my-package-B` (version 0.2.0.0), command `stack query` outputs: ~~~text compiler: actual: ghc-9.0.2 wanted: ghc-9.0.2 locals: my-package-A: path: \multi\my-package-A\ version: 0.1.0.0 my-package-B: path: \multi\my-package-B\ version: 0.2.0.0 ~~~ The component parts of the information can be specified using 'selectors' with the command. In the example above the selectors include `compiler`, `compiler actual`, `locals`, `locals my-package-A`, and `locals my-package-A version`. For example, commanding: ~~~text stack query locals my-package-B path ~~~ results in output: ~~~text \multi\my-package-B\ ~~~ stack-2.15.7/doc/README.md0000644000000000000000000003454614615404001013134 0ustar0000000000000000
# The Haskell Tool Stack Welcome to the [Haskell](https://www.haskell.org/) programming language and the Haskell Tool Stack (Stack)! Stack is a program for developing Haskell projects. It is aimed at Haskellers both new and experienced. It is cross-platform and aims to support fully users on Linux, macOS and Windows. Stack features: * Installing the [Glasgow Haskell Compiler (GHC)](https://www.haskell.org/ghc/) automatically, in an isolated location. * Installing packages needed for your project. * Building your project. * Testing your project. * Benchmarking your project. ## How to install Stack Stack can be installed on most Unix-like operating systems (including macOS) and Windows. It will require at least about 5 GB of disk space, for use with one version of GHC. !!! info In addition to the methods described below, Stack can also be installed using the separate [GHCup](https://www.haskell.org/ghcup/) installer for Haskell-related tools. GHCup provides Stack for some combinations of machine architecture and operating system not provided elsewhere. By default, the script to install GHCup (which can be run more than once) also configures Stack so that if Stack needs a version of GHC, GHCup takes over obtaining and installing that version. === "Linux" For most Linux distributions, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions in the [install and upgrade guide](install_and_upgrade.md). === "macOS" From late 2020, Apple began a transition from Mac computers with Intel processors (Intel-based Mac) to [Mac computers with Apple silicon](https://support.apple.com/en-gb/HT211814). === "Intel-based" For most Intel-based Mac computers, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions in the [install and upgrade guide](install_and_upgrade.md). === "Apple silicon" Mac computers with Apple silicon have an M1, M1 Pro, M1 Max, M1 Ultra or M2 chip. These chips use an architecture known as ARM64 or AArch64. For Mac computers with Apple silicon, the easiest way to install Stack directly (rather than use GHCup) is to command: ~~~text curl -sSL https://get.haskellstack.org/ | sh ~~~ or: ~~~text wget -qO- https://get.haskellstack.org/ | sh ~~~ !!! note The script at [get.haskellstack.org](https://get.haskellstack.org/) will ask for root access using `sudo`. It needs such access in order to use your platform's package manager to install dependencies and to install to `/usr/local/bin`. If you prefer more control, follow the manual installation instructions in the [install and upgrade guide](install_and_upgrade.md). === "Windows" On 64-bit Windows, the easiest way to install Stack directly (rather than use GHCup) is to download and install the [Windows installer](https://get.haskellstack.org/stable/windows-x86_64-installer.exe). !!! info By default, the Windows installer will set the Stack root to `C:\sr`. !!! warning The Windows installer for Stack 2.9.1, 2.9.3 and 2.11.1 (only) will replace the user `PATH` environment variable (rather than append to it) if a 1024 character limit is exceeded. If the content of your existing user `PATH` is long, preserve it before running the installer. !!! note Systems with antivirus software may need to add Stack to the list of 'trusted' applications. === "Other/direct downloads" For other operating systems and direct downloads (rather than use GHCup), see the [install and upgrade guide](install_and_upgrade.md). ## How to upgrade Stack If Stack is already installed, you can upgrade it to the latest version by the command: ~~~text stack upgrade ~~~ !!! note If you used [GHCup](https://www.haskell.org/ghcup/) to install Stack, you should also use GHCup, and not Stack, to upgrade Stack. ## Quick Start guide For an immediate experience of using Stack to build an executable with Haskell, first you need to follow the [guide to install Stack](#how-to-install-Stack). ### Step 1: Start your new project To start a new project named `my-project`, issue these four commands in a terminal: ~~~text stack new my-project cd my-project stack build stack exec my-project-exe ~~~ - The `stack new my-project` command will create a new directory, named `my-project`. It contains all the files needed to start a project correctly, using a default template. - The `cd my-project` command will change the current working directory to that directory. - The `stack build` command will build the template project and create an executable named `my-project-exe` (on Windows, `my-project-exe.exe`). First, if necessary, Stack will download a version of GHC in an isolated location. That won't interfere with other GHC installations on your system. - The `stack exec my-project-exe` command will run (execute) the built executable, in Stack's environment. For a complete list of Stack's commands, and flags and options common to those commands, simply command: ~~~text stack ~~~ For help on a particular Stack command, including flags and options specific to that command, for example `stack build`, command: ~~~text stack build --help ~~~ If you want to launch a run-eval-print loop (REPL) environment, then command: ~~~text stack repl ~~~ !!! info `stack ghci` can be used instead of `stack repl`. GHCi is GHC's REPL tool. People organise Haskell code into packages. If you want to use Stack to install an executable provided by a Haskell package, then all you have to do is command: ~~~text stack install ~~~ ### Step 2: Next steps The `stack new my-project` command in step one should have created the following files and directories (among others): ~~~text . ├── app │   └── Main.hs ├── src │   └── Lib.hs ├── test │ └── Spec.hs ├── my-project.cabal ├── package.yaml └── stack.yaml ~~~ The Haskell source code for the executable (application) is in file `Main.hs`. The executable uses a library. Its source code is in file `Lib.hs`. The contents of `my-project.cabal` describes the project's package. That file is generated by the contents of `package.yaml`. !!! info If you want, you can delete the `package.yaml` file and update the `my-project.cabal` file directly. Stack will then use that file. The contents of `stack.yaml` describe Stack's own project-level configuration. You can edit the source files in the `src` directory (used for the library) or the `app` directory (used for the executable (application)). As your project develops, you may need to depend on a library provided by another Haskell package. If you do, then add the name of that new package to the file `package.yaml`, in its `dependencies:` section. !!! info When you use `stack build` again, Stack will use `package.yaml` to create an updated `my-project.cabal` for you. If Stack reports that the Stack configuration has no specified version for the new package, then follow Stack's likely recommended action to add a specific version of that package your project's `stack.yaml` file, in its `extra-deps:` section. That was a really fast introduction on how to start to code in Haskell using Stack. If you want to go further, we highly recommend you read Stack's introductory [user's guide](GUIDE.md). ## Complete guide to Stack A complete [user's guide](GUIDE.md) to Stack is available, covering all of the most common ways to use Stack. Terms used in Stack's documentation are also explained in the [glossary](glossary.md). ## Why Stack? Stack is a build tool for Haskell designed to answer the needs of Haskell users, both new and experienced. It has a strong focus on reproducible build plans, multi-package projects, and a consistent, easy-to-learn set of Stack commands. It also aims to provide the customizability and power that experienced developers need. Stack does not stand alone. It is built on the great work provided by: * The __Glasgow Haskell Compiler__ ([GHC](https://www.haskell.org/ghc/)), the premier Haskell compiler. Stack will manage your GHC installations and automatically select the appropriate version of GHC for your project. * The __Cabal build system__. Cabal is a specification for defining Haskell packages and a [library](https://hackage.haskell.org/package/Cabal) for performing builds. !!! info Cabal is also the name of another build tool, provided by the `cabal-install` package. This guide distinguishes between them by Cabal (the library) and Cabal (the tool). * The __Hackage Haskell Package Repository__, a [repository](https://hackage.haskell.org/) of Haskell packages providing thousands of open source libraries and applications to help you get your work done. * The __Stackage package collection__, sets of packages from Hackage that are [curated](https://www.stackage.org/). That is, they are regularly tested for compatibility. Stack defaults to using Stackage package sets to avoid problems with incompatible dependencies. Stack is provided by a team of volunteers and companies under the auspices of the [Commercial Haskell](http://commercialhaskell.com/) group. The project was spearheaded by [FP Complete](https://www.fpcomplete.com/) to answer the needs of commercial Haskell users. It has since become a thriving open source project meeting the needs of Haskell users of all stripes. If you'd like to get involved with Stack, check out the [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) label on the GitHub issue tracker. ## Questions, feedback, and discussion * For answers to frequently asked questions about Stack, please see the [FAQ](faq.md). * For general questions, comments, feedback and support, please post to the [Haskell Community](https://discourse.haskell.org/about). * For bugs, issues, or requests, please [open an issue](https://github.com/commercialhaskell/stack/issues/new). * When using Stack Overflow, please use the [haskell-stack](http://stackoverflow.com/questions/tagged/haskell-stack) tag. ## How to contribute to the maintenance or development of Stack A [guide](CONTRIBUTING.md) is provided to help potential contributors to the Stack project. If you have already installed a version of Stack and the [Git application](https://git-scm.com/) the followings steps should get you started with building Stack from source with Stack: 1. Clone the `stack` repository from GitHub with the command: ~~~text git clone https://github.com/commercialhaskell/stack.git ~~~ 2. Change the current working directory to the cloned `stack` directory with the command: ~~~text cd stack ~~~ 3. Build the `stack` executable using a preexisting installation of Stack with the command: ~~~text stack build ~~~ 4. Once the `stack` executable has been built, check its version with the command: ~~~text stack exec -- stack --version ~~~ Make sure the version is the latest one. 5. In the GitHub repository's issue tracker, look for issues tagged with [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) and [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22) labels. If you need to check your changes quickly command: ~~~text stack repl ~~~ and then, at the REPL's prompt, command: ~~~text :main --stack-root= --stack-yaml= ~~~ This allows you to set a special Stack root (instead of the default Stack root) and to target your commands at a particular `stack.yaml` file instead of the one found in the current directory. ## How to uninstall To uninstall Stack, it should be sufficient to delete: 1. the Stack root directory (see `stack path --stack-root`, before you uninstall); 2. if different, the directory containing Stack's global YAML configuration file (see `stack path --global-config`, before you uninstall); 3. on Windows, the directory containing Stack's tools (see `stack path --programs`, before you uninstall), which is located outside of the Stack root directory; and 4. the `stack` executable file (see `which stack`, on Unix-like operating systems, or `where.exe stack`, on Windows). You may also want to delete ``.stack-work`` directories in any Haskell projects that you have built using Stack. The `stack uninstall` command provides information about how to uninstall Stack. stack-2.15.7/doc/run_command.md0000644000000000000000000000401614604306200014466 0ustar0000000000000000
# The `stack run` command ~~~text stack run [-- ARGUMENT(S) (e.g. stack run -- file.txt)] [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] ~~~ `stack run` builds a project executable and runs it. If the command has a first argument and it is recognised as the name of an executable component of a project package then that is built. Otherwise, the project's first executable is built. If the project has no executables Stack reports no executables found as an error. !!! note To identify a project's first executable, and search for the name of an executable component, Stack lists the executable components, in order, for each package, listed in order. For example: `packageA:a-exe` < `packageA:b-exe` < `packageB:a-exe` < `packageB:b-exe` Everything after `--` on the command line is interpreted as a command line argument to be passed to what is run, other than a first argument recognised as the name of an executable component of a project package. By default: * the `GHC_PACKAGE_PATH` environment variable is set for the subprocess. Pass the `--no-ghc-package-path` flag to not set the variable; and * the `STACK_EXE` environment variable is set with the path to Stack. Pass the `--no-stack-exe` flag to not set the variable. The `--cwd` option can be used to set the working directory before the executable is run. The `--package` option (which can be specified multiple times) can be used to add a package name to build targets. The `--rts-options` option (which can be specified multiple times) can be used to pass a list of GHC's [runtime system (RTS) options](https://downloads.haskell.org/~ghc/latest/docs/users_guide/runtime_control.html#) to the executable when it is run. (The `+RTS` and `-RTS` must not be included.) stack-2.15.7/doc/runghc_command.md0000644000000000000000000000215114604306200015146 0ustar0000000000000000
# The `stack runghc` and `stack runhaskell` commands ~~~text stack runghc [-- ARGUMENT(S) (e.g. stack runghc -- X.hs)] [--[no-]ghc-package-path] [--[no-]stack-exe] [--package PACKAGE] [--rts-options RTSFLAG] [--cwd DIR] ~~~ `stack runhaskell` has the same effect as `stack runghc`. `stack runghc` has the same effect as, and is provided as a shorthand for, [`stack exec runghc`](exec_command.md), with the exception of the `--package` option. Pass the option `--package ` to add the initial GHC argument `-package-id=`, where `` is the unit ID of the specified package in the installed package database. The option can be a list of package names or package identifiers separated by spaces. The option can also be specified multiple times. The approach taken to these packages is the same as if they were specified as targets to [`stack build`](build_command.md#target-syntax). stack-2.15.7/doc/script_command.md0000644000000000000000000001620514620153474015204 0ustar0000000000000000
# The `stack script` command ~~~text stack script [--package PACKAGE] FILE [-- ARGUMENT(S) (e.g. stack script X.hs -- argument(s) to program)] [--compile | --optimize] [--[no-]use-root] [--ghc-options OPTIONS] [--extra-dep PACKAGE-VERSION] [--no-run] ~~~ The `stack script` command either runs a specified Haskell source file (using GHC's `runghc`) or, optionally, compiles such a file (using GHC) and, by default, runs it. Unlike `stack ghc` and `stack runghc`, the command ignores all Stack YAML configuration files (global and project-level). A snapshot must be specified on the command line (with the `--snapshot` option). For example: ~~~text stack script --snapshot lts-22.21 MyScript.hs ~~~ The `stack script` command behaves as if the `--install-ghc` flag had been passed at the command line. Everything after `--` on the command line is interpreted as a command line argument to be passed to what is run. A package can be added to the snapshot on the command line with the `--extra-dep` option (which can be specified multiple times). Each required package can be specified by name on the command line with the `--package` option (which can be specified multiple times). A single `--package` option can also refer to a list of package names, separated by a space or comma character. If the package is not in the snapshot, the most recent version in the package index (e.g. Hackage) will be obtained. If no packages are specified in that way, all the required packages that are in the snapshot or are a GHC boot package (packages that come with GHC and are included in GHC's global package database) will be deduced by reference to the `import` statements in the source file. The `base` package associated with the version of GHC specified by the snapshot is always available. If a required package is a GHC boot package, the behaviour can be complex. If the boot package has not been 'replaced', then it will be used in Stack's build plan. However, if the boot package has been 'replaced', the latest version of that package in the package index will be used in Stack's build plan, which may differ from the version provided by the version of GHC specified by the snapshot. A boot package will be treated as 'replaced' if the package i included directly in the Stackage snapshot or it depends on a package included directly in the snapshot. Stackage snapshots do not include directly most boot packages but some snapshots may include directly some boot packages. In particular, some snapshots include directly `Win32` (which is a boot package on Windows) while others do not. For example, if `Cabal` (a boot package) is a required package then, with Stackage snapshot LTS Haskell 20.25, Stack will: * on Windows, try to construct a build plan based on the latest version of `Cabal` in the package index (because that snapshot includes `Win32` directly, and `Cabal` depends on `Win32` and so is treated as 'replaced'); and * on non-Windows, use the boot package in the build plan (because `Cabal` is not 'replaced'). Boot packages that have been 'replaced' can be specified as an `--extra-dep`. The source file can be compiled by passing either the `--compile` flag (no optimization) or the `--optimize` flag (compilation with optimization). If the file is compiled, passing the `--no-run` flag will mean the compiled code is not run. By default, all the compilation outputs (including the executable) are written to the directory of the source file. Pass the `--use-root` flag to write such outputs to a script-specific location in the `scripts` directory of the Stack root. The location reflects the absolute path to the source file, but ignoring the drive. This can avoid clutter in the source file directory. Additional options can be passed to GHC using the `--ghc-options` option. ## Examples For example, Haskell source file `MyScript.hs` at location `Users/jane/my-project` (where `` could be `/` on Unix-like operating systems or `C:/` or similar on Windows): ~~~haskell module Main (main) where import Data.List (intercalate) import System.Environment (getArgs) import Acme.Missiles (launchMissiles) main :: IO () main = do advices <- getArgs launchMissiles putStrLn $ intercalate "\n" advices ~~~ can be compiled and run, with arguments, with: ~~~text stack --snapshot lts-22.21 script --package acme-missiles --compile MyScript.hs -- "Don't panic!" "Duck and cover!" ~~~ All the compilation outputs (like `Main.hi`, `Main.o`, and the executable `MyScript`) will be written to the `my-project` directory. If compiled and run with the additional flag `--use-root`, all the compilation outputs will be written to a directory named `MyScript.hs` at `Users/jane/my-project/` in the `scripts` directory of the Stack root. For example, consider the following script extract, based on snapshot Stackage LTS Haskell 20.25, where considerations on Windows differ from non-Windows. The `stack script` command is specified using Stack's [script interpreter](scripts.md). === "Windows" The snapshot includes `Win32` directly. As a consequence, GHC boot packages `directory`, `process` and `time` (which depend on `Win32`) are all treated as 'replaced'. ~~~haskell {- stack script --snapshot lts-20.25 --extra-dep acme-missiles-0.3 --extra-dep directory-1.3.6.2 --extra-dep process-1.6.16.0 --extra-dep time-1.11.1.1 -} import Acme.Missiles -- from acme-missiles import Data.Time.Clock.System -- from time import System.Time.Extra -- from extra ... ~~~ `acme-missiles` is not in the snapshot and so needs to be specified as an extra-dep. Stack can deduce that the module imports imply that the required packages are `acme-missiles`, `time` and `extra` (which is in the snapshot). `extra` depends on `directory` and `process`. If `directory` and `process` are not specified as extra-deps, Stack will complain that they have been 'pruned'. `directory-1.3.6.2` depends on `time < 1.12`. If `time` is not specified as an extra-dep, Stack will try to construct a build plan based on the latest version in the package index (which will fail, as the latest version is `>= 1.12`) === "Unix-like" ~~~haskell {- stack script --snapshot lts-20.25 --extra-dep acme-missiles-0.3 -} import Acme.Missiles -- from acme-missiles import Data.Time.Clock.System -- from time import System.Time.Extra -- from extra ... ~~~ `acme-missiles` is not in the snapshot and so needs to be specified as an extra-dep. Stack can deduce that the module imports imply that the required packages are `acme-missiles`, `time` and `extra` (which is in the snapshot). All the other dependencies required are either GHC boot packages (which have not been 'replaced') or in the snapshot. stack-2.15.7/doc/scripts.md0000644000000000000000000002242314620153474013670 0ustar0000000000000000
# Stack's script interpreter Stack offers a very useful feature for running files: a script interpreter. For too long have Haskellers felt shackled to bash or Python because it's just too hard to create reusable source-only Haskell scripts. Stack attempts to solve that. You can use `stack ` to execute a Haskell source file. Usually, the Stack command to be applied is specified using a special Haskell comment (the Stack interpreter options comment) at the start of the source file. That command is most often `stack script` but it can be, for example, `stack runghc`. If there is no Stack interpreter options comment, Stack will warn that one was expected. An example will be easiest to understand. Consider the Haskell source file `turtle-example.hs` with contents: ~~~haskell #!/usr/bin/env stack -- stack script --snapshot lts-22.21 --package turtle {-# LANGUAGE OverloadedStrings #-} import Turtle (echo) main = echo "Hello World!" ~~~ === "Unix-like" The first line beginning with the 'shebang' (`#!`) tells Unix to use Stack as a script interpreter, if the file's permissions mark it as executable. A shebang line is limited to a single argument, here `stack`. The file's permissions can be set with command `chmod` and then it can be run: ~~~text chmod +x turtle-example.hs ./turtle-example.hs ~~~ !!! note On macOS: - Avoid `{-# LANGUAGE CPP #-}` in Stack scripts; it breaks the shebang line ([GHC #6132](https://gitlab.haskell.org/ghc/ghc/issues/6132)) - Use a compiled executable, not another script, in the shebang line. Eg `#!/usr/bin/env runhaskell` will work but `#!/usr/local/bin/runhaskell` would not. Alternatively, the script can be run with command: ~~~text stack turtle-example.hs ~~~ === "Windows (with PowerShell)" The first line beginning with the 'shebang' (`#!`) has a meaning on Unix-like operating systems but will be ignored by PowerShell. It can be omitted on Windows. The script can be run with command: ~~~text stack turtle-example.hs ~~~ In both cases, the command yields: ~~~text Hello World! ~~~ the first time after a little delay (as GHC is downloaded, if necessary, and dependencies are built) and subsequent times more promptly (as the runs are able to reuse everything already built). The second line of the source code is the Stack interpreter options comment. In this example, it specifies the `stack script` command with the options of a LTS Haskell 22.21 snapshot (`--snapshot lts-22.21`) and ensuring the [`turtle` package](https://hackage.haskell.org/package/turtle) is available (`--package turtle`). The version of the package will be that in the specified snapshot (`lts-22.21` provides `turtle-1.6.2`). ## Arguments and interpreter options and arguments Arguments for the script can be specified on the command line after the file name: `stack ...`. The Stack interpreter options comment must specify what would be a single valid Stack command at the command line if the file name were included as an argument, starting with `stack`. It can include `--` followed by arguments. In particular, the Stack command `stack MyScript.hs ` with Stack interpreter options comment: ~~~haskell -- stack -- ~~~ is equivalent to the following command at the command line: ~~~text stack -- MyScript.hs ~~~ The Stack interpreter options comment must be the first line of the file, unless a shebang line is the first line, when the comment must be the second line. The comment must start in the first column of the line. When many options are needed, a block style comment that splits the command over more than one line may be more convenient and easier to read. For example, the command `stack MyScript.hs arg1 arg2` with `MyScript.hs`: ~~~haskell #!/usr/bin/env stack {- stack script --snapshot lts-22.21 -- +RTS -s -RTS -} import Data.List (intercalate) import System.Environment (getArgs) import Turtle (echo, fromString) main = do args <- getArgs echo $ fromString $ intercalate ", " args ~~~ is equivalent to the following command at the command line: ~~~text stack script --snapshot lts-22.21 -- MyScript.hs arg1 arg2 +RTS -s -RTS ~~~ where `+RTS -s -RTS` are some of GHC's [runtime system (RTS) options](https://downloads.haskell.org/~ghc/latest/docs/users_guide/runtime_control.html). ## Just-in-time compilation As with using `stack script` at the command line, you can pass the `--compile` flag to make Stack compile the script, and then run the compiled executable. Compilation is done quickly, without optimization. To compile with optimization, pass the `--optimize` flag instead. Compilation is done only if needed; if the executable already exists, and is newer than the script, Stack just runs the executable directly. This feature can be good for speed (your script runs faster) and also for durability (the executable remains runnable even if the script is disturbed, eg due to changes in your installed GHC/snapshots, changes to source files during git bisect, etc.) ## Using multiple packages As with using `stack script` at the command line, you can also specify multiple packages, either with multiple `--package` options, or by providing a comma or space separated list. For example: ~~~haskell #!/usr/bin/env stack {- stack script --snapshot lts-22.21 --package turtle --package "stm async" --package http-client,http-conduit -} ~~~ ## Stack configuration for scripts With the `stack script` command, all Stack YAML configuration files (global and project-level) are ignored. With the `stack runghc` command, if the current working directory is inside a project then that project's Stack project-level YAML configuration is effective when running the script. Otherwise the script uses the global project configuration specified in `/global-project/stack.yaml`. ## Testing scripts You can use the flag `--script-no-run-compile` on the command line to enable (it is disabled by default) the use of the `--no-run` option with `stack script` (and forcing the `--compile` option). The flag may help test that scripts compile in CI (continuous integration). For example, consider the following simple script, in a file named `Script.hs`, which makes use of the joke package [`acme-missiles`](https://hackage.haskell.org/package/acme-missiles): ~~~haskell {- stack script --snapshot lts-22.21 --package acme-missiles -} import Acme.Missiles (launchMissiles) main :: IO () main = launchMissiles ~~~ The command `stack --script-no-run-compile Script.hs` then behaves as if the command `stack script --snapshot lts-22.21 --package acme-missiles --no-run --compile -- Script.hs` had been given. `Script.hs` is compiled (without optimisation) and the resulting executable is not run: no missiles are launched in the process! ## Writing independent and reliable scripts The `stack script` command will automatically: * Install GHC and libraries, if missing. `stack script` behaves as if the `--install-ghc` flag had been passed at the command line. * Require that all packages used be explicitly stated on the command line. This ensures that your scripts are _independent_ of any prior deployment specific configuration, and are _reliable_ by using exactly the same version of all packages every time it runs so that the script does not break by accidentally using incompatible package versions. In earlier versions of Stack, the `runghc` command was used for scripts and can still be used in that way. In order to achieve the same effect with the `runghc` command, you can do the following: 1. Use the `--install-ghc` option to install the compiler automatically 2. Explicitly specify all packages required by the script using the `--package` option. Use `-hide-all-packages` GHC option to force explicit specification of all packages. 3. Use the `--snapshot` Stack option to ensure a specific GHC version and package set is used. It is possible for configuration files to affect `stack runghc`. For that reason, `stack script` is strongly recommended. For those curious, here is an example with `runghc`: ~~~haskell #!/usr/bin/env stack {- stack runghc --install-ghc --snapshot lts-22.21 --package base --package turtle -- -hide-all-packages -} ~~~ The `runghc` command is still very useful, especially when you're working on a project and want to access the package databases and configurations used by that project. See the next section for more information on configuration files. ## Loading scripts in GHCi Sometimes you want to load your script in GHCi to play around with your program. In those cases, you can use `exec ghci` option in the script to achieve it. Here is an example: ~~~haskell #!/usr/bin/env stack {- stack exec ghci --install-ghc --snapshot lts-22.21 --package turtle -} ~~~ stack-2.15.7/doc/sdist_command.md0000644000000000000000000000307614445120722015023 0ustar0000000000000000
# The `stack sdist` command ~~~text stack sdist [DIR] [--pvp-bounds PVP-BOUNDS] [--ignore-check] [--[no-]test-tarball] [--tar-dir ARG] ~~~ Hackage only accepts packages for uploading in a standard form, a compressed archive ('tarball') in the format produced by Cabal's `sdist` action. `stack sdist` generates a file for your package, in the format accepted by Hackage for uploads. The command will report the location of the generated file. ## `--ignore-check` flag Pass the flag to disable checks of the package for common mistakes. By default, the command will check the package for common mistakes. ## `--pvp-bounds` option The `--pvp-bounds ` option determines whether and, if so, how PVP version bounds should be added to the Cabal file of the package. The available modes for basic use are: `none`, `lower`, `upper`, and `both`. The available modes for use with Cabal file revisions are `lower-revision`, `upper-revision` and `both-revision`. For futher information, see the [YAML configuration](yaml_configuration.md#pvp-bounds) documentation. ## `--tar-dir` option The `--tar-dir ` option determines whether the package archive should be copied to the specified directory. ## `--[no-]test-tarball` flag Default: Disabled Set the flag to cause Stack to test the resulting package archive, by attempting to build it. stack-2.15.7/doc/setup_command.md0000644000000000000000000000762514604306200015033 0ustar0000000000000000
# The `stack setup` command ~~~text stack setup [GHC_VERSION] [--[no-]reinstall] [--ghc-bindist URL] [--ghcjs-boot-options GHCJS_BOOT] [--[no-]ghcjs-boot-clean] ~~~ `stack setup` attempts to install a version of GHC. By default: * the version of GHC is the one required by the project. Specify the version of GHC as an argument to attempt to install a different version of GHC. For example `stack setup 9.4.4` will attempt to install GHC 9.4.4; and * an attempt to install is made only if the version of GHC is not already available to Stack. Pass the flag `--reinstall` (disabled by default) to attempt to install the version of GHC regardless of whether it is already available to Stack. Pass the option `--ghc-bindist ` to specify the URL of the GHC to be downloaded and installed. This option requires the use of the `--ghc-variant` option specifying a custom GHC variant. For further information about the `--ghc-variant` option, see the see the [YAML configuration](yaml_configuration.md#ghc-variant) documentation. If Stack is configured not to install GHC (`install-ghc: false` or passing the `--no-install-ghc` flag) then `stack setup` will warn that the flag and the command are inconsistent and take no action. === "Linux" A particular binary distribution of GHC will depend on certain libraries, which need to be available. There are many different Linux distributions and different versions of a particular Linux distribution. One Linux distribution/version may make available different libraries to another Linux distribution/version. In attempting to identify the particular binary distribution of GHC that is required on Linux, Stack will refer to the presence or absence of certain libraries or the versions of those libraries. For example, Stack 2.15.1 considers: * If `libc.musl-x86_64.so.1` is present. This file is provided by the [musl libc](https://musl.libc.org/). * The version of `libc6` (if musl libc is not applicable), the [GNU C Library](https://www.gnu.org/software/libc/) (glibc), that is present. The GNU C Library is designed to be backwards compatible. * If `libgmp.so.3` or `libgmp.so.10` is present. These files are provided by different versions of the [GNU Multiple Precision Arithmetic Library](https://gmplib.org/). * If `libncursesw.so.6` is present. This file is provided by a shared library for terminal handling with wide character support. * If `libtinfo.so.5` or `libtinfo.so.6` is present. These files are provided by different versions of a shared low-level terminfo library for terminal handling. Stack 2.15.1 uses `ghc-build`: * `musl` to indicate `libc.musl-x86_64.so.1` is present and Stack should use the GHC binary distribution for Alpine Linux. * `tinfo6` to indicate `libgmp.so.10` and `libtinfo.so.6` are present and `libc6` is compatible with `libc6` 2.32. * `tinfo6-libc6-pre232` to indicate `libgmp.so.10` and `libtinfo.so.6` are present and `libc6` is not compatible with `libc6` 2.32. * `ncurses6` to indicate `libgmp.so.10` and `libncursesw.so.6` are present * `gmp4` to indicate `libgmp.so.3` is present By default, Stack associates: * the `tinfo6` build with the 'Fedora 33' binary distribution of GHC 9.4.1 to 9.4.4. Those binary distributions require versions of `libc6` that are compatible with `libc6` 2.32; and * the `tinfo6-libc6-pre232` build with the 'Debian 10' binary distribution of GHC 9.4.1 to 9.4.4. Those binary distributions require versions of `libc6` that are compatible with `libc6` 2.28. stack-2.15.7/doc/shell_autocompletion.md0000644000000000000000000000403214353310351016416 0ustar0000000000000000
# Shell auto-completion The following adds support for the tab completion of standard Stack arguments to the following shell programs: Bash, Zsh (the Z shell) and fish. Completion of file names and executables within Stack is still lacking. For further information, see issue [#823](https://github.com/commercialhaskell/stack/issues/832). !!! info Stack's completion library provides [hidden options](https://github.com/pcapriotti/optparse-applicative#bash-zsh-and-fish-completions) for Bash, Zsh, and fish which output commands used for shell auto-completion. For example: ~~~bash $ stack --bash-completion-script stack _stack() { local CMDLINE local IFS=$'\n' CMDLINE=(--bash-completion-index $COMP_CWORD) for arg in ${COMP_WORDS[@]}; do CMDLINE=(${CMDLINE[@]} --bash-completion-word $arg) done COMPREPLY=( $(stack "${CMDLINE[@]}") ) } complete -o filenames -F _stack stack ~~~ === "Bash" Add the output of the following command to your preferred completions file (e.g. `~/.config/bash_completions.d/stack`). ~~~bash stack --bash-completion-script $(which stack) ~~~ You may need to `source` this. === "Zsh" Add the output of the following command to your preferred completions file (e.g. `~/.config/zsh/completions/_stack`). ~~~zsh stack --zsh-completion-script $(which stack) ~~~ You won't need to `source` this, but do update your `fpath`: ~~~zsh fpath=($HOME/.config/zsh/completions $fpath) autoload -U compinit && compinit ~~~ === "fish" Add the output of the following command to your preferred completions file (e.g. `~/.config/fish/completions/stack.fish`). ~~~fish stack --fish-completion-script $(which stack) ~~~ stack-2.15.7/doc/SIGNING_KEY.md0000644000000000000000000000710514353310351013777 0ustar0000000000000000
# Signing key Each released Stack executable is signed with either: * the GPG key with ID 0x575159689BEFB442; or * the GPG key of a person that has been authorised by the GPG key with ID 0x575159689BEFB442. The signature is in an `*.asc` file. For example: ~~~text stack-2.7.5-linux-x86_64-bin stack-2.7.5-linux-x86_64-bin.asc ~~~ The signature can be verified with GPG, as follows: ~~~text # Receive the public key from a keyserver gpg --keyserver keyserver.ubuntu.com --recv-keys 0x575159689BEFB442 # Get information about the key gpg --keyid-format long --list-keys 0x575159689BEFB442 pub rsa2048/575159689BEFB442 2015-06-02 [SC] C5705533DA4F78D8664B5DC0575159689BEFB442 uid [ unknown] FPComplete sub rsa2048/85A738994664AB89 2015-06-02 [E] # Attempt to verify the file using the signature file. The public key has not # yet been certified with a trusted signature. gpg --verify stack-2.7.5-linux-x86_64-bin.asc stack-2.7.5-linux-x86_64-bin gpg: Signature made 06/03/2022 15:15:21 GMT Standard Time gpg: using RSA key C5705533DA4F78D8664B5DC0575159689BEFB442 gpg: Good signature from "FPComplete " [unknown] gpg: WARNING: This key is not certified with a trusted signature! gpg: There is no indication that the signature belongs to the owner. Primary key fingerprint: C570 5533 DA4F 78D8 664B 5DC0 5751 5968 9BEF B442 ~~~ The GPG key with ID 0x575159689BEFB442, and keys it has signed, have been uploaded to the [Ubuntu Keyserver](https://keyserver.ubuntu.com/pks/lookup?search=0x575159689BEFB442&fingerprint=on&op=index). This is the public key block for GPG key ID 0x575159689BEFB442: ~~~text -----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 mQENBFVs+cMBCAC5IsLWTikd1V70Ur1FPJMn14Sc/C2fbXc0zRcPuWX+JaXgrIJQ 74A3UGBpa07wJDZiQLLz4AasDQj++9gXdiM9MlK/xWt8BQpgQqSMgkktFVajSWX2 rSXPjqLtsl5dLsc8ziBkd/AARXoeITmXX+n6oRTy6QfdMv2Tacnq7r9M9J6bAz6/ 7UsKkyZVwsbUPea4SuD/s7jkXAuly15APaYDmF5mMlpoRWp442lJFpA0h52mREX1 s5FDbuKRQW7OpZdLcmOgoknJBDSpKHuHEoUhdG7Y3WDUGYFZcTtta1qSVHrm3nYa 7q5yOzPW4/VpftkBs1KzIxx0nQ5INT5W5+oTABEBAAG0H0ZQQ29tcGxldGUgPGRl dkBmcGNvbXBsZXRlLmNvbT6JATcEEwEKACEFAlVs+cMCGwMFCwkIBwMFFQoJCAsF FgMCAQACHgECF4AACgkQV1FZaJvvtEIP8gf/S/k4C3lp/BFb0K9DHHSt6EaGQPwy g+O8d+JvL7ghkvMjlQ+UxDw+LfRKANTpl8a4vHtEQLHEy1tPJfrnMA8DNci8HLVx rK3lIqMfv5t85VST9rz3X8huSw7qwFyxsmIqFtJC/BBQfsOXC+Q5Z2nbResXHMeA 5ZvDopZnqKPdmMOngabPGZd89hOKn6r8k7+yvZ/mXmrGOB8q5ZGbOXUbCshst7lc yZWmoK3VJdErQjGHCdF4MC9KFBQsYYUy9b1q0OUv9QLtq/TeKxfpvYk9zMWAoafk M8QBE/qqOpqkBRoKbQHCDQgx7AXJMKnOA0jPx1At57hWl7PuEH4rK38UtLkBDQRV bPnDAQgAx1+4ENyaMk8XznQQ4l+nl8qw4UedZhnR5Xxr6z2kcMO/0VdwmIDCpxaM spurOF+yExfY/Chbex7fThWTwVgfsItUc/QLLv9jkvpveMUDuPyh/4QrAQBYoW09 jMJcOTFQU+f4CtKaN/1PNoTSU2YkVpbhvtV3Jn2LPFjUSPb7z2NZ9NKe10M0/yN+ l0CuPlqu6GZR5L3pA5i8PZ0Nh47j0Ux5KIjrjCGne4p+J8qqeRhUf04yHAYfDLgE aLAG4v4pYbb1jNPUm1Kbk0lo2c3dxx0IU201uAQ6LNLdF/WW/ZF7w3iHn7kbbzXO jhbq2rvZEn3K9xDr7homVnnj21/LSQARAQABiQEfBBgBCgAJBQJVbPnDAhsMAAoJ EFdRWWib77RC3ukH/R9jQ4q6LpXynQPJJ9QKwstglKfoKNpGeAYVTEn0e7NB0HV5 BC+Da5SzBowboxC2YCD1wTAjBjLLQfAYNyR+tHpJBaBmruafj87nBCDhSWwWDXwx OUDpNOwKUkrwZDRlM7n4byoMRl7Vh/7CXxaTqkyao1c5v3mHh/DremiTvOJ4OXgJ 77NHaPXezHkCFZC8/sX6aY0DJxF+LIE84CoLI1LYBatH+NKxoICKA+yeF3RIVw0/ F3mtEFEtmJ6ljSks5tECxfJFvQlkpILBbGvHfuljKMeaj+iN+bsHmV4em/ELB1ku N9Obs/bFDBMmQklIdLP7dOunDjY4FwwcFcXdNyg= =YUsC -----END PGP PUBLIC KEY BLOCK----- ~~~ stack-2.15.7/doc/Stack_and_VS_Code.md0000644000000000000000000001567114620153473015440 0ustar0000000000000000
# Stack and Visual Studio Code [Visual Studio Code](https://code.visualstudio.com/) (VS Code) is a popular source code editor, and ['Haskell'](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) is an extension for VS Code that is popular with Haskell coders. The 'Haskell' extension can be used with Stack but there are some things to be aware of, set out below. ## Haskell Language Server The VS Code extension makes use of the Haskell Language Server (HLS). To work, HLS has to be built with the same version of GHC that it will support. That is, a version of HLS is required for each version of GHC in use. It is possible that the most recent versions of GHC are not supported by HLS. By default, the VS Code extension uses tools that are in the PATH. However, the extension's settings (under 'Haskell: Manage HLS') allow a user to specify that the extension should use a separate application, [GHCup](https://www.haskell.org/ghcup/), to download and install the versions of HLS that it needs. GHCup can download and install things other than HLS, including GHC, MSYS2 (on Windows), Cabal (a build tool), and Stack itself. GHCup can also update itself. On Windows, GHCup has the capability of using the Stack-supplied MSYS2 rather than installing a duplicate copy. Cabal (the build tool), like Stack, depends on the Cabal (the library). Cabal (the tool), unlike Stack, does not have the capability to automatically install necessary versions of GHC, and (as well as supporting the extension) GHCup fills a important gap for users of the Cabal tool. If the VS Code extension is set not to use GHCup, its user needs to ensure that each version of HLS that the extension needs is on the PATH. For the most part, the versions of HLS provided by GHCup are built with the same versions of GHC that Stack downloads from its default `setup-info` dictionary (see [YAML configuration: setup-info](yaml_configuration.md)). Stack's default is to mirror the 'official' binary distributions published by GHC. However, in some cases, it is possible that a GHCup-supplied and GHCup-selected HLS has been built with a different binary distribution of GHC than the one which Stack has installed. One example of that occurred with the release of GHC 9.0.2. For some Linux users (Debian 9 and Fedora 27), the version of GHC 9.0.2 linked on GHC’s download [web page](https://www.haskell.org/ghc/download_ghc_9_0_2.html) was broken. The GHC developers made alternative ‘9.0.2a’ versions available. For a while, Stack referred to the versions published by GHC on its download web page while the GHCup-supplied versions of HLS were built using alternative versions. This incompatibility led to problems. It was resolved by Stack's default also being changed to refer to the '9.0.2a' versions. (Where Stack has already installed GHC 9.0.2, it is necessary to delete GHC 9.0.2 from the `stack path --programs` directory. This will cause Stack to reinstall the alternative version, when it first needs GHC 9.0.2. Stack should distinguish what it builds with the alternative from what it has built, and cached, with the original GHC 9.0.2.) ### GHCup and Stack >= 2.9.1 From Stack 2.9.1, GHCup can configure Stack so that if Stack needs a version of GHC, GHCup takes over obtaining and installing that version. By default, the script to install GHCup (which can be run more than once) configures Stack in that way. For further information about how GHCup configures Stack, see the GHC installation customisation [documentation](yaml_configuration.md#ghc-installation-customisation). ### Workaround #1 If GHCup does not configure Stack in the way described above, one workaround is to allow GHCup to install versions of GHC on the PATH and to cause Stack to use those versions of GHC, by making use of Stack's `install-ghc` option (which needs to be disabled) and Stack's `system-ghc` option (which needs to be enabled). For further information about these options, see the `install-ghc` [documentation](yaml_configuration.md#install-ghc) and the `system-ghc` [documentation](yaml_configuration.md#system-ghc). For this workaround to work, each time that a snapshot is used that references a different version of GHC, then GHCup must be used to install it (if GHCup has not already installed that version). For example, to use `snapshot: lts-22.21` (GHC 9.6.5), the command `ghcup install ghc 9.6.5` must have been used to install GHC 9.6.5. That may be a minor inconvenience for some people, as one the primary benefits of Stack over other Haskell build tools has been that Stack automatically ensures that the necessary version of GHC is available. ### Workaround #2 If GHCup does not configure Stack, another partial workaround is to install GHCup so that it is 'empty' except for the current version of HLS, allow the VS Code extension to use GHCup to manage HLS requirements only, and to ignore any messages (if any) from the extension on start-up that installation of GHC, Cabal (the tool) and/or Stack are also necessary (they are not, if only Stack is being used). For this workaround to work, however, there can be no differences between the version of GHC that the GHCup-supplied HLS was built with and the version that Stack has installed. A slight inconvenience here is also the possibility of false messages from the start-up that need to be ignored. In principle, those messages can be disabled by [setting the following](https://github.com/haskell/vscode-haskell#setting-a-specific-toolchain) for the VS Code extension: ~~~yaml "haskell.toolchain": { "ghc": null, "cabal": null, "stack": null } ~~~ To install a version of GHCup that is 'empty' is a little more complicated than a default installation of GHCup. On Unix-like operating systems, the following environment variable must be set before GHCup's installation `sh` script is run: `BOOTSTRAP_HASKELL_MINIMAL`. On Windows, the second argument to the PowerShell script must be set to `$false`, namely: Set-ExecutionPolicy Bypass -Scope Process -Force;[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072;Invoke-Command -ScriptBlock ([ScriptBlock]::Create((Invoke-WebRequest https://www.haskell.org/ghcup/sh/bootstrap-haskell.ps1 -UseBasicParsing))) -ArgumentList $true,$false ### Cradle HLS may need a 'cradle' - an [`hie.yaml` file](https://hackage.haskell.org/package/hie-bios#stack) - in the project's root directory in order to work well. The [`gen-hie` tool](https://hackage.haskell.org/package/implicit-hie) can help generate such a cradle. ### Tips It has been suggested that a project must have been successfully built before the VS code extension (and HLS) is first activated on the project, for HLS to work reliably. stack-2.15.7/doc/stack_root.md0000644000000000000000000002473614620153474014362 0ustar0000000000000000
# Stack root The Stack root is a directory where Stack stores important files. On Unix-like operating systems and Windows, Stack can be configured to follow the XDG Base Directory Specification if the environment variable `STACK_XDG` is set to any non-empty value. However, Stack will ignore that configuration if the Stack root location has been set on the command line or the `STACK_ROOT` environment variable exists. ## Location The location of the Stack root depends on the operating system, whether Stack is configured to use the XDG Base Directory Specification, and/or whether an alternative location to Stack's default 'programs' directory has been specified. The location of the Stack root can be configured by setting the [`STACK_ROOT`](environment_variables.md#stack_root) environment variable or using Stack's [`--stack-root`](global_flags.md#stack-root-option) option on the command line. === "Unix-like" The Stack root contains snapshot packages; (by default) tools such as GHC, in a `programs` directory; Stack's global [YAML configuration](yaml_configuration.md#yaml-configuration) file (`config.yaml`); and Stack's [`global-projects`](yaml_configuration.md#yaml-configuration) directory. The default Stack root is `~/.stack`. === "Windows" The default Stack root is `%APPDIR%\stack`. If the `LOCALAPPDATA` environment variable exists, the default location of tools is `%LOCALAPPDATA%\Programs\stack`. Otherwise, it is the `programs` directory in the Stack root. !!! warning If there is a space character in the `%LOCALAPPDATA%` path (which may be the case if the relevant user account name and its corresponding user profile path have a space) this may cause problems with building packages that make use of the GNU project's `autoconf` package and `configure` shell script files. That may be the case particularly if there is no corresponding short name ('8 dot 3' name) for the directory in the path with the space (which may be the case if '8 dot 3' names have been stripped or their creation not enabled by default). If there are problems building, it will be necessary to override the default location of Stack's 'programs' directory to specify an alternative path that does not contain space characters. Examples of packages on Hackage that make use of `configure` are `network` and `process`. On Windows, the length of filepaths may be limited (to [MAX_PATH](https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd)), and things can break when this limit is exceeded. Setting a Stack root with a short path to its location (for example, `C:\sr`) can help. === "XDG Base Directory Specification" The Stack root is `/stack`. If the `XDG_DATA_HOME` environment variable does not exist, the default is `~/.local/share/stack` on Unix-like operating systems and `%APPDIR%\stack` on Windows. The location of `config.yaml` is `/stack`. If the `XDG_CONFIG_HOME` environment variable does not exist, the default is `~/.config/stack` on Unix-like operating systems and `%APPDIR%\stack` on Windows. This approach treats: * the project-level YAML configuration file that is common to all projects without another such file in their project directory or its ancestor directories as _data_ rather than as part of Stack's own _configuration_; * the snapshots database as essential data rather than as non-essential data that would be part of a _cache_, notwithstanding that Stack will rebuild that database as its contents are needed; and * the Pantry store as essential data rather than as non-essential data that would be part of a _cache_, notwithstanding that Stack will download the package index and rebuild the store if it is absent. An alternative to the default location of tools such as GHC can be specified with the [`local-programs-path`](yaml_configuration.md#local-programs-path) configuration option. The location of the Stack root is reported by command: ~~~text stack path --stack-root ~~~ The full path of Stack's global YAML configuration file is reported by command: ~~~text stack path --global-config ~~~ The location of tools such as GHC for the current platform is reported by command: ~~~text stack path --programs ~~~ ## Contents The contents of the Stack root depend on the operating system, whether Stack is configured to use the XDG Base Directory Specification, and/or whether an alternative location to Stack's default 'programs' directory has been specified. === "Unix-like" The Stack root contains snapshot packages; (by default) tools such as GHC, in a `programs` directory; Stack's global [YAML configuration](yaml_configuration.md#yaml-configuration) file (`config.yaml`); and Stack's [`global-projects`](yaml_configuration.md#yaml-configuration) directory. === "Windows" The Stack root contains snapshot packages; Stack's global [YAML configuration](yaml_configuration.md#yaml-configuration) file (`config.yaml`); and Stack's [`global-projects`](yaml_configuration.md#yaml-configuration) directory. The default location of tools such as GHC and MSYS2 is outside of the Stack root. === "XDG Base Directory Specification" If Stack is following the XDG Base Directory Specification, the Stack root contains what it would otherwise contain for the operating system, but Stack's global YAML configuration file (`config.yaml`) may be located elsewhere. ### `config.yaml` This is Stack's global configuration file. For further information, see the documentation for non-project specific [configuration](yaml_configuration.md#non-project-specific-configuration). If the file is deleted, and Stack needs to consult it, Stack will create a file with default contents. ### `stack.sqlite3` This is a 'user' database that Stack uses to cache certain information. The associated lock file is `stack.sqlite3.pantry-write-lock`. ### `global-project` directory This contains: * an explanation of the directory (`README.txt`); * the project-level configuration file (`stack.yaml`) for the global project and its associated lock file (`stack.yaml.lock`); and * if created, Stack's working directory (`.stack-work`) for the global project. If the project-level configuration file is deleted, and Stack needs to consult it, Stack will recreate the contents of the directory. ### `pantry\hackage` directory This contains a local cache of the package index. If the contents of the directory are deleted, and Stack needs to consult the package index, Stack will seek to download the latest package index. !!! info Stack depends on package `pantry` which, in turn, depends on package `hackage-security`. The latter handles the local cache of the package index. The type `CacheLayout` represents the location of the files that are cached. `pantry` uses `cabalCacheLayout :: CacheLayout`, the layout that Cabal (the tool) uses. That is what specifies the names of the files used to cache the package index, including `00-index.tar` and `00-index.tar.gz`. ### `pantry` directory This contains: * the Pantry database used by Stack (`pantry.sqlite3`) and its associated lock file (`pantry.sqlite2.pantry-write-lock`). If the database is deleted, and Stack needs to consult it, Stack will seek to create and initialise it. The database is initialised with information from the package index; and * a database of package versions that come with each version of GHC (`global-hints-cache.yaml`). ### `programs` directory This contains a directory for the platform. That directory contains for each installed Stack-supplied tool: * the archive file for the tool. This can be deleted; * a file indicating the tool is installed (`.installed`); and * a directory for the tool. To remove a Stack-supplied tool, delete all of the above. If Stack needs a Stack-supplied tool and it is unavailable, Stack will seek to obtain it. ### `setup-exe-cache` directory This contains a directory for the platform. That directory contains, for each version of GHC (an associated version of Cabal (the library)) that Stack has used, an executable that Stack uses to access Cabal (the library). If the contents of the directory are deleted, and Stack needs the executable, Stack will seek to rebuild it. ### `setup-exe-src` directory See the documentation for the [`setup-exe-cache` directory](#setup-exe-cache-directorysetup-exe-cache). This contains the two source files (`setup-.hs` and `setup-shim-.hs`) that Stack uses to build the executable. If the contents of the directory are deleted, and Stack needs the executable, Stack will recreate them. The hash in the names of the source files is a hash of arguments passed to GHC when building the executable and the contents of the two source files. The content of the `setup-.hs` file is the familiar: ~~~haskell import Distribution.Simple main = defaultMain ~~~ The content of the `setup-shim-.hs` file uses `main` except when the executable is called with arguments `repl` and `stack-initial-build-steps`. Then Stack uses Cabal (the library) to create the autogenerated files for every configured component. Stack's `stack ghci` or `stack repl` commands call the executable with those arguments. ### `snapshots` directory This contains a directory for each snapshot that Stack creates when building immutable dependencies of projects. If the contents of the directory are deleted, and the snapshot is not available to Stack when it builds, Stack will recreate the snapshot. ### `templates` directory This contains a `.hsfile` for each project template that Stack has used. For further information, see the [`stack templates`](templates_command.md) command documentation. If the contents of the directory are deleted, an Stack needs a project template, Stack will seek to download the template. ### `upload` directory This may contain saved credentials for uploading packages to Hackage (`credentials.json`). stack-2.15.7/doc/stack_work.md0000644000000000000000000001407214613163672014354 0ustar0000000000000000
# Stack work directories Stack work directories are directories within a local project or package directory in which Stack stores files created during the build process. Stack can be used without an understanding of the content of those directories. In particular, the [`stack exec`](exec_command.md) command sets up an environment where relevant subdirectories of the project Stack work directory are on the PATH. ## Naming By default, Stack work directories are named `.stack-work`. The name can be overidden by: * the use of the [`STACK_WORK` environment variable](environment_variables.md#stack_work); * the [`work-dir`](yaml_configuration.md#work-dir) non-project specific configuration option; or * the [`--work-dir`](global_flags.md#-work-dir-option) command line option. Given the location of Stack work directories, the name of the work directories must be a relative path to a directory. ## Location If the work directory does not already exist, it will be created by the [`stack build`](build_command.md) command as a subdirectory of each project package directory and, if different, the project directory. ## Project package Stack work directory The Stack work directory for a project package will contain a `dist` directory. This directory will contain a path to a directory containing: * a `build` directory; * a `package.conf.inplace` directory; * a `stack-build-caches` directory; * a `build-lock` file; * a `setup-config` file; * a `stack-cabal-mod` file. This file is used by Stack only for its modification time; * a `stack-project-root` file. This file contains an absolute path to the project root directory; and * a `stack-setup-config-mod` file. This file is used by Stack only for its modification time. The directory, relative to the project package directory or the project directory, is the one reported by [`stack path --dist-dir`](path_command.md). === "Unix-like" On Unix-like operating systems, the path to the directory is a directory named after the platform (including Stack's classification of variants of Linux distributions) followed by a directory named after the GHC version. === "Windows" On Windows, the path to the directory is an eight-character hash of the path that applies on Unix-like operating systems. ## Project Stack work directory The Stack work directory for a project will contain a `install` directory. This directory will contain a path to a directory containing: * a `bin` directory, containing built executable files; * a `doc` directory, containing a directory for each project package. This is the directory reported by [`stack path --local-doc-root`](path_command.md); * if the [`stack hpc`](hpc_command.md) command is used, a `hpc` directory. This is the directory reported by [`stack path --local-hpc-root`](path_command.md); * a `lib` directory, containing a directory named after the platform and the GHC version and, within that, a directory for each project package; * a `pkgdb` directory. This is the directory reported by [`stack path --local-pkg-db`](path_command.md); * a `stack.sqlite3` file; and * a `stack.sqlite3.pantry-write-lock` file. The directory is the one reported by [`stack path --local-install-root`](path_command.md). === "Unix-like" On Unix-like operating systems, the path to the directory is a directory named after the platform (including Stack's classification of variants of Linux distributions) followed by a directory named after a SHA 256 hash (see further below) followed by a directory named after the version number of GHC. The SHA 256 hash is a hash of the following information: * the path to the specified compiler; * the information about the compiler provided by `ghc --info`; * the options that Stack passes to GHC for package that is not a project package; and * information about the immutable dependencies: their location, whether or not Haddock documentation is to be built, their flags, their GHC options, and their Cabal configuration options. The options that Stack passes to GHC for a package that is not a project package depend on: * the specification of [profiling](https://docs.haskellstack.org/en/stable/build_command/#flags-affecting-ghcs-behaviour); * the specification of [stripping](https://docs.haskellstack.org/en/stable/build_command/#flags-affecting-ghcs-behaviour); and * if [`apply-ghc-options: everything`](yaml_configuration.md#apply-ghc-options) is specified, any GHC command line options specified on the command line. !!! note As a consequence, the path reported by the following commands will differ (and similarly for the paths established by the [`stack exec`](exec_command.md) command): ~~~text stack path --local-install-root stack --profile path --local-install-root stack --no-strip path --local-install-root stack --profile --no-strip path --local-install-root ~~~ === "Windows" On Windows, the path to the directory is an eight-character hash of the path that applies on Unix-like operating systems. Following a `stack ghci` or `stack repl` command, the Stack work directory for a project will contain a `ghci` directory. This directory will contain paths to `cabal_macos.h` files that are generated automatically by Cabal. !!! note Haskell Language Server makes use of the `stack ghci` command to obtain information. If the [`stack hoogle`](hoogle_command.md) command is used, the Stack work directory for a project will contain a `hoogle` directory. This directory will contain a directory being the one reported by [`stack path --local-hoogle-root`](path_command.md). The naming of the path to the directory is same as for the path to the directory in the `install` directory. stack-2.15.7/doc/stack_yaml_vs_cabal_package_file.md0000644000000000000000000001673414620153474020644 0ustar0000000000000000
# stack.yaml versus package.yaml versus a Cabal file What is the difference between a `stack.yaml` file, a `package.yaml` file and a Cabal file (named `.cabal`)? This page aims to make that clear. In short: * `stack.yaml` contains project-level configuration for Stack, and may contain project-specific options and non-project-specific options. * `package.yaml` contains a description of a package in the [Hpack](https://github.com/sol/hpack) format. Hpack, including Stack's built-in version, uses the file to create a Cabal file. * a Cabal file also contains a description of a package, but in the format used by Cabal. ## package.yaml versus a Cabal file Why two different formats to describe packages? Hpack is considered to have some advantages over the underlying Cabal format, which are explained its project repository. They include that the Hpack format supports YAML syntax and the automatic generation of the lists of `exposed-modules` used in the Cabal format. The remainder of this page will focus on the difference between a `stack.yaml` file and a package description file. ## Package versus project Stack is a build tool and it uses Cabal, a build system. Cabal defines the concept of a _package_. A package has: * A name and version * optionally, one library * optionally, one or more executables * A Cabal file (or, as mentioned above, an [Hpack](https://github.com/sol/hpack) `package.yaml` file that generates a Cabal file) * And a bunch more There is a one-to-one correspondence between a package and a Cabal file. Stack defines a new concept called a _project_. A project has: * A snapshot _resolver_ (more on this later) * Extra dependencies on top of the snapshot * Optionally, one or more local Cabal packages * Flag and GHC options configurations * And a bunch more Stack configuration Often you will have a project that defines only one local Cabal package that you are working on. If you need to specify a dependency, a source of confusion can be why you need to specify it both in the `stack.yaml` file _and_ in the Cabal file. To explain, let's take a quick detour to talk about snapshots and how Stack resolves dependencies. ## Snapshots and resolvers Stack follows a rule that says, for any project, there is precisely one version of each package available. Obviously, for many packages there are _many_ versions available in the world. But when resolving a `stack.yaml` file, Stack requires that you have chosen a specific version for each package available. The most common means by which this set of packages is defined is via a snapshot provided by Stackage. For example, if you go to the page , you will see a list of 3,342 packages at specific version numbers. When you then specify `snapshot: lts-22.21` or, alternatively, `resolver: lts-22.21`, you're telling Stack to use those package versions in resolving dependencies down to specific versions of packages. Sometimes a snapshot doesn't have all of the packages that you want. Or you want a different version of a package. Or you want to work on a local modification of a package. In all of those cases, you can add more configuration data to your `stack.yaml` file to override the values it received from your [`snapshot`](yaml_configuration.md#snapshot) or [`resolver`](yaml_configuration.md#resolver) setting. At the end of the day, each of your projects will end up with some way of resolving a package name into a specific version of that package. ## Why specify dependencies twice? The package `acme-missiles` is not included in any Stackage snapshots. When you add something like this to your `stack.yaml` file: ~~~yaml extra-deps: - acme-missiles-0.3 ~~~ what you're saying to Stack is: "if at any point you find that you need to build the `acme-missiles` package, please use version `0.3`". You are _not_ saying "please build `acme-missiles` now." You are also not saying "my package depends on `acme-missiles`." You are simply making it available should the need arise. When you add to your `package.yaml` file: ~~~yaml dependencies: - acme-missiles ~~~ or, alternatively, you add directly to your Cabal file: ~~~yaml build-depends: acme-missiles ~~~ you're saying "this package requires that `acme-missiles` be available." Since `acme-missiles` doesn't appear in your snapshot, without also modifying your `stack.yaml` to mention it via `extra-deps`, Stack will complain about the dependency being unavailable. You may challenge: but why go through all of that annoyance? Stack knows what package I want, why not just go grab it? The answer is that, if Stack just grabbed `acme-missiles` for you without it being specified in the `stack.yaml` somehow, you'd lose reproducibility. How would Stack know which version to use? It may elect to use the newest version, but if a new version is available in the future, will it automatically switch to that? Stack's core philosophy is that build plans are always reproducible. The purpose of the `stack.yaml` file is to define an immutable set of packages. No matter when in time you use it, and no matter how many new release happen in the interim, the build plan generated should be the same. (There is, however, at least one hole in this theory today, which is Hackage revisions. When you specify `extra-deps: [acme-missiles-0.3]`, it doesn't specify which revision of the Cabal file to use, and Stack will just choose the latest. Stack has the ability to specify exact revisions of Cabal files, but this isn't enforced as a requirement, because it is so different from the way most people work with packages.) And now, how about the other side: why doesn't Stack automatically add `acme-missiles` to `build-depends` in your Cabal file if you add it as an extra-dep? There are a surprising number reasons for this: * The Cabal specification doesn't support anything like that * There can be multiple packages in a project, and how do we know which package actually needs the dependency? * There can be multiple components (libraries, executable, etc) in a package, and how do we know which of those actually needs the dependency? * The dependency may only be conditionally needed, based on flags, operating system, or architecture. As an extreme example, we wouldn't want a Linux-only package to be built by force on Windows. While for simple use cases it seems like automatically adding dependencies from the Cabal file to the `stack.yaml` file or vice-versa would be a good thing, it breaks down immediately for any semi-difficult case. Therefore, Stack requires you to add it to both places. And a final note, in case it wasn't clear. The example above used `acme-missiles`, which is not in Stackage snapshots. If, however, you want to depend on a package already present in the snapshot you've selected, there's no need to add it explicitly to your `stack.yaml` file: it's already there implicitly via the `snapshot` setting. This is what you do the majority of the time, such as when you add `vector` or `mtl` as a `build-depends` value. ## Should I check-in automatically generated Cabal files? Yes, you should. This recommendation was changed in [issue #5210](https://github.com/commercialhaskell/stack/issues/5210). Please see the discussion there. stack-2.15.7/doc/templates_command.md0000644000000000000000000000322714505617134015676 0ustar0000000000000000
# The `stack templates` command ~~~text stack templates ~~~ `stack templates` provides information to the standard output stream about project templates used with the [`stack new` command](new_command.md). Project templates are specified in `.hsfiles` files. The format of those files is documented at the [`commercialhaskell/stack-templates`](https://github.com/commercialhaskell/stack-templates#project-template-format) repository on GitHub. Any GitHub, GitLab or Bitbucket repository named `stack-templates` can provide project template files. For example, a template file `username/stack-templates/my-template.hsfiles` on GitHub can be identified as `username/my-template` when using `stack new`. The relevant service can be specified by a prefix: `github:` for [GitHub](https://github.com/) (the default service), `gitlab:` for [GitLab](https://gitlab.com), or `bitbucket:` for [Bitbucket](https://bitbucket.com). [`commercialhaskell/stack-templates`](https://github.com/commercialhaskell/stack-templates#project-template-format) on GitHub is the default repository for project templates. Its username (`commercialhaskell`) does not need to be specified when using `stack new`. The project template that `stack new` uses by default is named `new-template` and provided at the default repository. The default repository provides 24 other project templates. Its Wiki provides a description of some of those templates and information about the location of other templates. stack-2.15.7/doc/travis_ci.md0000644000000000000000000001456214353310351014161 0ustar0000000000000000
# Travis CI This page documents how to use Stack on [Travis CI](https://travis-ci.org/). We assume you have basic familiarity with Travis. We provide two fully baked example files ready to be used on your projects: * [The simple Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-simple.yml) is intended for applications that do not require multiple GHC support or cross-platform support. It builds and tests your project with just the settings present in your `stack.yaml` file. * [The complex Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-complex.yml) is intended for projects that need to support multiple GHC versions and multiple OSes, such as open source libraries to be released to Hackage. It tests against cabal-install, as well as Stack on Linux and macOS. The configuration is significantly more involved to allow for all of this branching behavior. !!! note It is likely going to be necessary to modify this configuration to match the needs of your project, such as tweaking the build matrix to alter which GHC versions you test against, or to specify GHC-version-specific `stack.yaml` files if necessary. Don't be surprised if it doesn't work the first time around. See the multiple GHC section below for more information. Each of these configurations is ready to be used immediately, just copy-paste the content into the `.travis.yml` file in the root or your repo, enable Travis on the repo, and you're good to go. You may also be interested in using AppVeyor, which supports Windows builds, for more cross-platform testing. There's a [short blog post available on how to do this](http://www.snoyman.com/blog/2016/08/appveyor-haskell-windows-ci), or just copy in [the appveyor.yml file](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/appveyor.yml) The rest of this document explains the details of common Travis configurations for those of you who want to tweak the above configuration files or write your own. *Note:* both Travis and Stack infrastructures are actively developed. We try to document best practices at the moment. ## Container infrastructure For Stack on Travis to be practical, we must use caching. Otherwise build times will take an incredibly long time, about 30 minutes versus 3-5. Caching is currently available only for [container-based Travis infrastructure](http://docs.travis-ci.com/user/workers/container-based-infrastructure/). Shortly we have to add ~~~yaml sudo: false # Caching so the next build will be fast too. cache: directories: - $HOME/.stack ~~~ To the `.travis.yml`. This however restricts how we can install GHC and Stack on the Travis machines. ## Installing Stack Currently there is only one reasonable way to install Stack: fetch precompiled binary from the GitHub. ~~~yaml before_install: # Download and unpack the stack executable - mkdir -p ~/.local/bin - export PATH=$HOME/.local/bin:$PATH - travis_retry curl -L https://get.haskellstack.org/stable/linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' ~~~ ## Installing GHC There are two ways to install GHC: - Let Stack download GHC - Install GHC using [apt plugin](http://docs.travis-ci.com/user/apt/) See the above scripts for an example of the first option (letting Stack download GHC). Here, we will explain the second option. With single GHC the situation is simple: ~~~yaml before_install: # Install stack as above # ... # Configure stack to use the system GHC installation - stack config set system-ghc --global true - export PATH=/opt/ghc/7.10.2/bin:$PATH addons: apt: sources: - hvr-ghc packages: - ghc-7.10.2 ~~~ ### Multiple GHC - parametrised builds Travis apt plugin doesn't yet support installing apt packages dynamically (https://github.com/travis-ci/travis-ci/issues/4291). That for we need to write a bit repetitive `.travis.yml`. Also for different GHC versions, you probably want to use different `stack.yaml` files. ~~~yaml # N.B. No top-level env: declaration! matrix: include: - env: GHCVER=7.8.4 STACK_YAML=stack.yaml addons: apt: sources: - hvr-ghc packages: - ghc-7.8.4 - env: GHCVER=7.10.1 STACK_YAML=stack-7.10.yaml addons: apt: sources: - hvr-ghc packages: - ghc-7.10.1 - env: GHCVER=head STACK_YAML=stack-head.yaml addons: apt: sources: - hvr-ghc packages: - ghc-head allow_failures: - env: GHCVER=head STACK_YAML=stack-head.yaml before_install: # ghc - export PATH=/opt/ghc/$GHCVER/bin:$PATH ~~~ Especially to use ghc `HEAD` you need to pass `--skip-ghc-check` option to Stack. ## Running tests After the environment setup, actual test running is simple: ~~~yaml script: - stack --no-terminal --skip-ghc-check test ~~~ In case you're wondering: we need `--no-terminal` because stack does some fancy sticky display on smart terminals to give nicer status and progress messages, and the terminal detection is broken on Travis. ## Other details Some Stack commands will run for long time (when cache is cold) without producing any output. To avoid timeouts, use the built in [travis_wait](https://docs.travis-ci.com/user/common-build-problems/#Build-times-out-because-no-output-was-received). ~~~yaml install: - travis_wait stack --no-terminal --skip-ghc-check setup - travis_wait stack --no-terminal --skip-ghc-check test --only-snapshot ~~~ ## Examples - [futurice/fum2github](https://github.com/futurice/fum2github/blob/master/.travis.yml) - [haskell-distributed/cloud-haskell](https://github.com/haskell-distributed/cloud-haskell/blob/master/.travis.yml) - [simonmichael/hledger](https://github.com/simonmichael/hledger/blob/master/.travis.yml) - [fpco/wai-middleware-crowd](https://github.com/fpco/wai-middleware-crowd/blob/master/.travis.yml) - [commercialhaskell/all-cabal-hashes-tool](https://github.com/commercialhaskell/all-cabal-hashes-tool/blob/master/.travis.yml) stack-2.15.7/doc/uninstall_command.md0000644000000000000000000000072114505617134015705 0ustar0000000000000000
# The `stack uninstall` command ~~~text stack uninstall ~~~ `stack uninstall` provides information to the standard output stream about how to uninstall Stack or a Stack-supplied tool (such as GHC or, on Windows, MSYS2). It does not itself uninstall Stack or a Stack-supplied tool. stack-2.15.7/doc/unpack_command.md0000644000000000000000000000437414604306200015152 0ustar0000000000000000
# The `stack unpack` command ~~~text stack unpack TARGET [--candidate] [--to DIR] ~~~ `stack unpack` downloads an archive file for one or more specified target packages from the package index (e.g. Hackage), or one or more specified target package candidates, and unpacks each archive into a subdirectory named after the package version. In the case of packages from the package index, a target can be a package name only. In that case, by default: * if Stack's `--snapshot` option is not specified, the download is for the most recent version of the package in the package index. Stack will first seek to update the index; and * if Stack's `--snapshot` option is specified, the download is for the version of the package included directly in the specified snapshot. !!! note Stackage snapshots do not include directly most GHC boot packages (packages that come with GHC and are included in GHC's global package database) but some snapshots may include directly some boot packages. In particular, some snapshots include directly `Win32` (which is a boot package on Windows) while others do not. Otherwise, a target should specify a package name and version (for example, `acme-missiles-0.3`). In the case of package versions from the package index, optionally, a revision in the package index can be specified by appending `@rev:` or `@sha256:` (for example, `acme-missiles-0.3@rev:0`). By default: * the download is from the package index. Pass the flag `--candidate` to specify package candidates; and !!! note Stack assumes that a package candidate archive is a `.tar.gz` file named after the package version and located at endpoint `package\\candidate\`. This is true of Hackage. * the target is unpacked into a subdirectory of the current directory. Pass the option `--to ` to specify an alternative destination directory to the current directory. The destination directory can be an absolute one or relative to the current directory. stack-2.15.7/doc/update_command.md0000644000000000000000000000064014353310533015150 0ustar0000000000000000
# The `stack update` command ~~~text stack update ~~~ Generally, Stack automatically updates the package index when necessary. `stack update` will download the most recent set of packages from your package indices (e.g. Hackage). stack-2.15.7/doc/upgrade_command.md0000644000000000000000000001063114604306200015311 0ustar0000000000000000
# The `stack upgrade` command Either: ~~~text stack upgrade [--binary-only] [--binary-platform ARG] [--force-download] [--[no-]only-local-bin] [--binary-version ARG] [--github-org ARG] [--github-repo ARG] ~~~ or: ~~~text stack upgrade [--source-only] [--git] [--git-repo ARG] [--git-branch ARG] ~~~ `stack upgrade` will get a new version of Stack. It can also get a version before the current version (downgrade). !!! warning If you use GHCup to install Stack, use only GHCup to upgrade Stack. By default: * the new version will be from an existing binary distribution. Pass the `--source-only` flag to specify compiling from source code. The `--binary-only` and `--source-only` flags are alternatives; * the new version will not overwrite the existing version unless it is newer. Pass the `--force-download` flag to force a download; * when an existing binary distribution is applicable, it will be put in Stack's local binary directory (see `stack path --local-bin`) and named `stack` (replacing any existing executable named `stack` there); * if the current running Stack executable is named `stack` (or, on Windows, `stack.exe`) (this is case insensitive), an existing binary distribution will replace it. If the executable is located outside of Stack's local binary directory, pass the `--only-local-bin` flag to skip that step; * if the current running Stack executable is named other than `stack` (and, on Windows, `stack.exe`), an existing binary distribution will only be put in Stack's local binary directory and named `stack`. Pass the `--no-only-local-bin` flag to replace also the current running executable; * the new version will be the latest available. Pass the `--binary-version ` option to specify the version (this implies `--force-download`); * the binary distribution will be sought from the GitHub organisation/user `commercialhaskell`. Pass the `--github-org ` option to specify a different GitHub user; * the binary distribution will be sought from the GitHub repository `stack`. Pass the `--github-repo ` option to specify a different repository; and * the binary distribution will be sought for the current platform. Pass the `--binary-platform ` option to specify a different platform (`--`). When compiling from source code, by default: * Stack will obtain the source code for the most recent version in the package index (eg Hackage). Pass the flag `--git` to specify the most recent version from the `master` branch of Stack's repository (pass the option `--git-branch ` to specify a different branch and the option `--git-repo ` to specify a different repository). !!! note An earlier version of Stack could be inconsistent with some of the current contents of the Stack root. For further information about the contents of the Stack root and configuring its location, see the documentation about the [Stack root](stack_root.md). ## Examples * `stack upgrade` seeks an upgrade to the latest version of Stack available as a binary distribution for the platform, if newer. * `stack upgrade --force-download` seeks an upgrade to the latest version of Stack available as a binary distribution for the platform, even if not newer. * If the Stack executable is named `my-stack`, `my-stack upgrade` seeks only to put the latest version of Stack available as a binary distribution for the platform, if newer, in Stack's local binary directory and name it `stack`. `my-stack upgrade --no-only-local-bin` seeks also to upgrade `my-stack` to the latest version of Stack available. * `stack upgrade --binary-version 2.15.1` seeks an upgrade to Stack 2.15.1 if available as a binary distribution for the platform, even if not newer. * `stack upgrade --source-only` seeks an upgrade by building Stack with Stack from the latest version of the source code in the package index (i.e. Hackage). * `stack upgrade --source-only --git` seeks an upgrade by building Stack with Stack from the latest version of the source code in the `master` branch of Stack's repository. stack-2.15.7/doc/upload_command.md0000644000000000000000000001065214620153445015162 0ustar0000000000000000
# The `stack upload` command ~~~text stack upload [ITEM] [-d|--documentation] [--pvp-bounds PVP-BOUNDS] [--ignore-check] [--[no-]test-tarball] [--tar-dir ARG] [--candidate] [--setup-info-yaml URL] [--snapshot-location-base URL] ~~~ By default: * the command uploads one or more packages. Pass the flag `--documentation` (`-d` for short) to upload documentation for one or more packages; and * the upload is a package to be published or documentation for a published package. Pass the flag `--candidate` to upload a [package candidate](http://hackage.haskell.org/upload#candidates) or documentation for a package candidate. At least one `ITEM` must be specified. For example, if the current working directory is a package directory: ~~~text stack upload . ~~~ ## Upload one or more packages Hackage accepts packages for uploading in a standard form, a compressed archive ('tarball') in the format produced by Cabal's `sdist` action. If `ITEM` is a relative path to an sdist tarball, `stack upload` uploads the package to Hackage. If `ITEM` is a relative path to a package directory, `stack upload` generates a file for your package, in the format accepted by Hackage for uploads, and uploads the package to Hackage. By default: * the command will check each package for common mistakes. Pass the flag `--ignore-check` to disable such checks; * Stack will not test the resulting package archive. Pass the flag `--test-tarball` to cause Stack to test each resulting package archive, by attempting to build it. The `--pvp-bounds ` option determines whether and, if so, how PVP version bounds should be added to the Cabal file of the package. The available modes for basic use are: `none`, `lower`, `upper`, and `both`. The available modes for use with Cabal file revisions are `lower-revision`, `upper-revision` and `both-revision`. For futher information, see the [YAML configuration](yaml_configuration.md#pvp-bounds) documentation. The `--tar-dir ` option determines whether the package archive should be copied to the specified directory. ## Upload documentation for a package :octicons-beaker-24: Experimental [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Hackage accepts documentation for a package for uploading in a standard form and in a compressed archive ('tarball') in the `.tar.gz` format. For further information about how to create such an archive file, see the documentation for the [`stack haddock --haddock-for-hackage`](build_command.md#-no-haddock-for-hackage-flag) command. If `ITEM` is a relative path to a package directory, `stack upload --documentation` uploads an existing archive file of documentation for the specified package to Hackage. If the `--documentation` flag is passed then flags specific to package upload are ignored. ## The `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` environment variables [:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) `stack upload` will request a Hackage username and password to authenticate. This can be avoided by setting the `HACKAGE_USERNAME` and `HACKAGE_PASSWORD` environment variables. For example: === "Unix-like" ~~~text export $HACKAGE_USERNAME="" export $HACKAGE_PASSWORD="" stack upload . ~~~ === "Windows (with PowerShell)" ~~~text $Env:HACKAGE_USERNAME='' $Env:HACKAGE_PASSWORD='' stack upload . ~~~ ## The `HACKAGE_KEY` environment variable [:octicons-tag-24: 2.7.5](https://github.com/commercialhaskell/stack/releases/tag/v2.7.5) Hackage allows its members to register an API authentification token and to authenticate using the token. A Hackage API authentification token can be used with `stack upload` instead of username and password, by setting the `HACKAGE_KEY` environment variable. For example: === "Unix-like" ~~~text HACKAGE_KEY= stack upload . ~~~ === "Windows (with PowerShell)" ~~~text $Env:HACKAGE_KEY= stack upload . ~~~ stack-2.15.7/doc/yaml_configuration.md0000644000000000000000000021744414620153474016103 0ustar0000000000000000
# Configuration and customisation Stack is configured by the content of YAML files. Some Stack operations can also be customised by the use of scripts. !!! info A Haskell package is an organised collection of Haskell code and related files. It is described by a Cabal file or a `package.yaml` file (which can be used to generate a Cabal file). The package description is itself part of the package. Its file is located in the root directory of a project package or dependency located locally. A Stack project is a local directory that contains a Stack project-level configuration file (`stack.yaml`, by default). A project may relate to more than one project package. A single-package project's directory will usually also be the project package's root directory. ## YAML configuration Stack's YAML configuration options break down into [project-specific](#project-specific-configuration) options and [non-project-specific](#non-project-specific-configuration) options. The former are configured at the project level. The latter are configured at the project level or globally. The **project-level** configuration file (`stack.yaml`, by default) contains project-specific options and may contain non-project-specific options. However, non-project-specific options in the project-level configuration file in the `global-project` directory (see below) are ignored by Stack. Stack obtains project-level configuration from one of the following (in order of preference): 1. A file specified by the `--stack-yaml` command line option. 2. A file specified by the `STACK_YAML` environment variable. 3. A file named `stack.yaml` in the current directory or an ancestor directory. 4. A file name `stack.yaml` in the `global-project` directory in the [Stack root](stack_root.md). The **global** configuration file (`config.yaml`) contains only non-project-specific options. The location of this file depends on the operating system and whether Stack is configured to use the XDG Base Directory Specification. === "Unix-like" `config.yaml` is located in `/etc/stack` (for system-wide options); and/or in the [Stack root](stack_root.md) (for user-specific options). === "Windows" `config.yaml` is located in the [Stack root](stack_root.md). === "XDG Base Directory Specification" On Unix-like operating systems and Windows, Stack can be configured to follow the XDG Base Directory Specification if the environment variable `STACK_XDG` is set to any non-empty value. However, Stack will ignore that configuration if the [Stack root](stack_root.md) location has been set on the command line or the `STACK_ROOT` environment variable exists. If Stack is following the XDG Base Directory Specification, the location of `config.yaml` (for user-specific options) is `/stack`. If the `XDG_CONFIG_HOME` environment variable does not exist, the default is `~/.config/stack` on Unix-like operating systems and `%APPDIR%\stack` on Windows. This page is intended to document fully all YAML configuration options. If you identify any inaccuracies or incompleteness, please update the page, and if you're not sure how, open an issue labeled "question". If you wish to understand the difference between a `stack.yaml` files and a Cabal file (named `.cabal`), see the [stack.yaml vs a Cabal file](stack_yaml_vs_cabal_package_file.md) documentation. ## Project-specific configuration Project-specific configuration options are valid only in a project-level configuration file (`stack.yaml`, by default). Each of the Haskell packages to which a Stack project relates is either a **project package** that is part of the project and located locally or a package on which one or more of the project packages depends (directly or indirectly). The latter is referred to as a **dependency** and it may be located locally or elsewhere. !!! info Project packages are built by default. Dependencies are only built when needed. Building can target individual components of a project package. The individual components of dependencies cannot be targeted. Test suite and benchmark components of a project package can be built and run. The library and executable components of a dependency, and only those components, are built when the dependency is needed. In your project-specific options, you specify both **which project packages** to build and **which dependencies to use** when building these packages. A dependency specified as an [extra-dep](#extra-deps) will shadow a package of the same name specified in a [snapshot](#snapshot). A project package will shadow a dependency of the same name. ### snapshot Command line equivalent (takes precedence): [`--snapshot`](global_flags.md#snapshot-option) or [`--resolver`](global_flags.md#resolver-option) option The `snapshot` key specifies which snapshot is to be used for this project. A snapshot defines a GHC version, the package version of packages available for installation, and various settings like build flags. It is also called a resolver since a snapshot states how dependencies are resolved. There are currently four snapshot types: * LTS Haskell snapshots, e.g. `snapshot: lts-22.21` * Stackage Nightly snapshots, e.g. `snapshot: nightly-2024-05-06` * No snapshot, just use packages shipped with the compiler. For GHC this looks like `snapshot: ghc-9.6.5` * Custom snapshot, via a URL or relative file path. For further information, see the [snapshot and package location](pantry.md) documentation. Each of these snapshots will also determine what constraints are placed on the compiler version. See the [compiler-check](#compiler-check) option for some additional control over compiler version. A package version specified in a snapshot can be shadowed by an [extra-dep](#extra-deps) of the same name or a [project package](#packages) of the same name. ### resolver `resolver` and [`snapshot`](#snapshot) are synonyms. Only one of these keys is permitted, not both. ### packages Default: ~~~yaml packages: - . ~~~ The `packages` key specifies a list of the project packages that are part of your project. These are specified via paths to local directories. A path is considered relative to the directory containing the project-level configuration file (`stack.yaml`, by default). For example, if the `stack.yaml` file is located at `/dir1/dir2/stack.yaml`, and has: ~~~yaml packages: - my-package - dir3/my-other-package ~~~ the configuration means "project packages in directories `/dir1/dir2/my-package` and `/dir1/dir2/dir3/my-other-package`". The `packages` key is optional. The default value, '`.`', means that the project has a single project package located in the current directory. A project package will shaddow a dependency of the same name. A package version specified in a snapshot can be shadowed by an [extra-dep](#extra-deps) of the same name or a [project package](#packages) of the same name. Each specified project package directory must have a valid Cabal file or Hpack `package.yaml` file present. Any subdirectories of the directory are not searched for Cabal files. A subdirectory has to be specified as an independent item in the list of project packages. A project package is different from a dependency (located locally or elsewhere) specified as an [extra-dep](#extra-deps) or via a [snapshot](#snapshot). For example: * a project package will be built by default by commanding [`stack build`](build_command.md) without specific targets. A dependency will only be built if it is needed; and * test suites and benchmarks may be built and run for a project package. They are never run for a dependency. ### extra-deps Default: `[]` The `extra-deps` key specifies a list of extra dependencies on top of what is defined in the [snapshot](#snapshot). A dependency may come from either a Pantry package location or a local file path. A Pantry package location is one or three different kinds of sources: * the package index (Hackage); * an archive (a tarball or zip file, either local or over HTTP or HTTPS); or * a Git or Mercurial repository. For further information on the format for specifying a Pantry package location, see the [Pantry](pantry.md) documentation. For example: ~~~yaml extra-deps: # The latest revision of a package in the package index (Hackage): - acme-missiles-0.3 # A specific revision of a package in the package index (Hackage): - acme-missiles-0.3@rev:0 # An *.tar.gz archive file over HTTPS: - url: https://github.com/example-user/my-repo/archive/08c9b4cdf977d5bcd1baba046a007940c1940758.tar.gz subdirs: - my-package # A Git repository at a specific commit: - git: https://github.com/example-user/my-repo.git commit: '08c9b4cdf977d5bcd1baba046a007940c1940758' # An archive of files at a point in the history of a GitHub repository # (identified by a specific commit): - github: example-user/my-repo commit: '08c9b4cdf977d5bcd1baba046a007940c1940758' subdirs: - my-package ~~~ !!! note GHC boot packages are special. An extra-dep with the same package name and version as a GHC boot package will be ignored. !!! note The `commit:` key expects a YAML string. A commit hash, or partial hash, comprised only of digits represents a YAML number, unless it is enclosed in quotation marks. For a local file path source, the path is considered relative to the directory containing the `stack.yaml` file. For example, if the `stack.yaml` is located at `/dir1/dir2/stack.yaml`, and has: ~~~yaml extra-deps: - my-package - dir3/my-other-package ~~~ the configuration means "extra-deps packages in directories `/dir1/dir2/my-package` and `/dir1/dir2/dir3/my-other-package`". !!! note A local file path that has the format of a package identifier will be interpreted as a reference to a package on Hackage. Prefix it with `./` to avoid that confusion. !!! note A specified extra-dep that does not have the format of a valid Pantry package location (for example, a reference to a package on Hackage that omits the package's version) will be interpreted as a local file path. An extra-dep will shadow a dependency specified in a [snapshot](#snapshot) of the same name. An extra-dep can be shadowed by a [project package](#packages) of the same name. !!! info Some Haskell packages published on Hackage, for example `base` and `ghc`, are referred to as 'wired-in' to one or more versions of GHC or as 'magic'. They can be distinguished from normal packages by the contents of their Cabal files: GHC's `-this-unit-id` option is set as the name of the package without a version. For example, the `base.cabal` for `base-4.19.1.0` includes: ~~~yaml -- We need to set the unit id to base (without a version number) -- as it's magic. ghc-options: -this-unit-id base ~~~ The GHC boot packages that are 'wired-in' cannot be shaddowed with different versions of the same package. Given their dependencies, the use of these boot packages in a build plan may limit what can be specified as an extra-dep. For example, GHC boot package `ghc-9.8.2` has a dependency on `process`. Its `*.conf` file identifies the dependency as `process-1.6.18.0-4fb7`. If package `ghc-9.8.2` is part of a build plan and a different version of `process` is specified as an extra-dep, during a build, Stack will identify that the build plan refers to two versions of `process` and warn that the build is likely to fail. Stack treats the following as the names of 'wired-in' packages: `base`, `dph-par`, `dph-seq`, `ghc-bignum`, `ghc-prim`, `ghc`, `integer-gmp`, `integer-simple`, `interactive`, `rts` and `template-haskell`. ### flags Default: `{}` Command line equivalent (takes precedence): [`stack build --flag`](build_command.md#-flag-option) option Cabal flags can be set for each package separately. For example: ~~~yaml flags: package-name: flag-name: true ~~~ This overrides all Cabal flag specifications (if any) for the specified packages in the snapshot. !!! note For a package included directly in the snapshot, if the Cabal flag specifications differ from the Cabal flag specifications (if any) in the snapshot, then the package will automatically be promoted to be an [extra-dep](#extra-deps). !!! note In order to set a Cabal flag for a GHC boot package, the package must be specified as an [extra-dep](#extra-deps). ### drop-packages [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Default: `[]` Packages which, when present in the snapshot specified in the [`snapshot`](#snapshot) or [`resolver`](#resolver) key, should not be included in our project. This can be used for a few different purposes, e.g.: * Ensure that packages you don't want used in your project cannot be used in a `package.yaml` file (e.g., for license reasons) * When using a custom GHC build, avoid incompatible packages (see this [comment](https://github.com/commercialhaskell/stack/pull/4655#issuecomment-477954429)). ~~~yaml drop-packages: - buggy-package - package-with-unacceptable-license ~~~ !!! info Stackage snapshots LTS Haskell 14.27 (GHC 8.6.5) and earlier, and Nightly 2022-02-08 (GHC 8.8.2) and earlier, included directly the `Cabal` package. Later snapshots do not include directly that package (which is a GHC boot package). For the older Stackage snapshots, it could be handy to drop the snapshot-specified `Cabal` package, to avoid building that version of the package. For the later snapshots, there is no package version to drop. ### user-message If present, specifies a message to be displayed every time the configuration is loaded by Stack. It can serve as a reminder for the user to review the configuration and make any changes if needed. The user can delete this message if the generated configuration is acceptable. For example, a user-message is inserted by `stack init` when it omits packages or adds external dependencies, namely: ~~~yaml user-message: ! 'Warning: Some packages were found to be incompatible with the resolver and have been left commented out in the packages section. Warning: Specified resolver could not satisfy all dependencies. Some external packages have been added as dependencies. You can omit this message by removing it from stack.yaml ' ~~~ ### custom-preprocessor-extensions Default: `[]` Command line equivalent: `--customer-preprocessor-extensions` option In order for Stack to be aware of any custom preprocessors you are using, add their extensions here ~~~yaml custom-preprocessor-extensions: - erb ~~~ TODO: Add a simple example of how to use custom preprocessors. ### extra-package-dbs [:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) Default: `[]` A list of relative or absolute paths to package databases. These databases will be added on top of GHC's global package database before the addition of other package databases. !!! warning Use of this feature may result in builds that are not reproducible, as Stack has no control over the contents of the extra package databases. ### curator :octicons-beaker-24: Experimental [:octicons-tag-24: 2.1.0.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.0.1) Default: `{}` Configuration intended for use only by the [`curator` tool](https://github.com/commercialhaskell/curator), which uses Stack to build packages. For given package names (which need not exist in the project), Stack can be configured to ignore (skip) silently building test suites, building benchmarks and/or creating Haddock documentation or to expect that building test suites, building benchmarks and/or creating Haddock documentation will fail. For example: ~~~yaml curator: skip-test: - my-package1 expect-test-failure: - my-package2 skip-bench: - my-package3 expect-benchmark-failure: - my-package4 skip-haddock: - my-package5 expect-haddock-failure: - my-package6 ~~~ ## Non-project-specific configuration Non-project configuration options can be included in a project-level configuration file (`stack.yaml`, by default) or in global configuration files (`config.yaml`). However, non-project-specific options in the project-level configuration file in the `global-project` directory are ignored by Stack. The options below are listed in alphabetic order. ### allow-different-user [:octicons-tag-24: 1.0.1.0](https://github.com/commercialhaskell/stack/releases/tag/v1.0.1.0) Restrictions: POSIX systems only. Default: `false` Command line equivalent (takes precedence): `--[no-]allow-different-user` flag Allow users other than the owner of the [Stack root](stack_root.md) to use the Stack installation. ~~~yaml allow-different-user: true ~~~ The intention of this option is to prevent file permission problems, for example as the result of a Stack command executed under `sudo`. The option is automatically enabled when Stack is re-spawned in a Docker process. ### allow-newer [:octicons-tag-24: 0.1.8.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.8.0) Default: `false` Whether to ignore version bounds in Cabal files. This also ignores lower bounds. The name `allow-newer` is chosen to match the commonly-used Cabal option. ~~~yaml allow-newer: true ~~~ ### allow-newer-deps :octicons-beaker-24: Experimental [:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) Default: `none` Determines a subset of packages to which `allow-newer` should apply. This option has no effect (but warns) if `allow-newer` is `false`. ~~~yaml allow-newer-deps: - foo - bar ~~~ ### apply-ghc-options [:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) Default: `locals` Related command line: [`stack build --ghc-options`](build_command.md#-ghc-options-option) option Determines to which packages any GHC command line options specified on the command line are applied. Possible values are: `everything` (all packages, project packages or otherwise), `locals` (all project packages, targets or otherwise), and `targets` (all project packages that are targets). !!! note The use of `everything` can break invariants about your snapshot database. !!! info Before Stack 0.1.6.0, the default value was `targets`. ### apply-prog-options [:octicons-tag-24: 2.11.1](https://github.com/commercialhaskell/stack/releases/tag/v2.11.1) Default: `locals` Related command line: [`stack build --PROG-option`](build_command.md#-prog-option-options) options Determines to which packages all and any `--PROG-option` command line options specified on the command line are applied. Possible values are: `everything` (all packages, project packages or otherwise), `locals` (all project packages, targets or otherwise), and `targets` (all project packages that are targets). !!! note The use of `everything` can break invariants about your snapshot database. ### arch Default: The machine architecture on which Stack is running. Command line equivalent (takes precedence): [`--arch`](global_flags.md#-arch-option) option Stack identifies different GHC executables by platform (operating system and machine architecture), (optional) GHC variant and (optional) GHC build. See [`setup-info`](#setup-info). `arch` sets the machine architecture. Values can be those recognized by Cabal (the library) (which are case-insensitive and include `i386`, `x86_64`, and `aarch64` / `arm64`), or other values (which are case-sensitive and treated as an unknown 'other' architecture of the specified name). By default, Stack will warn the user if the specified machine architecture is an unknown 'other' architecture. The warning can be muted; see [`notify-if-arch-unknown`](#notify-if-arch-unknown) !!! note The machine architecture on which Stack is running is as classified by Cabal (the library). Cabal does not distinguish between certain architectures. Examples are `ppc64`/`powerpc64`/`powerpc64le` (classified as `ppc64`) and `arm`/`armel`/`armeb` (classified as `arm`). !!! note As Cabal (the library) does not distinguish between machine architectures `powerpc64` and `powerpc64le`, the latter can be specified in Stack's configuration as an 'other' architecture, such as `arch: ppc64le`. ### build [:octicons-tag-24: 1.1.0](https://github.com/commercialhaskell/stack/releases/tag/v1.1.0) Default: ~~~yaml build: library-profiling: false executable-profiling: false library-stripping: true executable-stripping: true # NOTE: global usage of haddock can cause build failures when documentation is # incorrectly formatted. This could also affect scripts which use Stack. haddock: false haddock-arguments: # Additional arguments passed to haddock. The corresponding command line # option is --haddock-arguments. Example of use: # # haddock-args: # - "--css=/home/user/my-css" haddock-args: [] # The corresponding command line flag is --[no-]open. open-haddocks: false # If Stack is configured to build Haddock documentation, defaults to true. haddock-deps: false # The configuration is ignored, if haddock-for-hackage: true. haddock-internal: false # The configuration is ignored, if haddock-for-hackage: true. haddock-hyperlink-source: true # If specified, implies haddock-internal: false and # haddock-hyperlink-source: true. Since Stack 2.15.1. haddock-for-hackage: false copy-bins: false copy-compiler-tool: false prefetch: false keep-going: false keep-tmp-files: false # These are inadvisable to use in your global configuration, as they make the # Stack build command line behave quite differently. force-dirty: false test: false test-arguments: rerun-tests: true # Rerun successful tests # The corresponding command line option is --test-arguments. Example of use: # # additional-args: # - "--fail-fast" additional-args: [] coverage: false no-run-tests: false bench: false benchmark-opts: # Example of use: # # benchmark-arguments: "--csv bench.csv" benchmark-arguments: "" no-run-benchmarks: false reconfigure: false cabal-verbosity: normal cabal-verbose: false split-objs: false skip-components: [] # --skip # Since Stack 1.8. Starting with Stack 2.0, the default is true interleaved-output: true # Since Stack 2.13.1. Available options are none, count-only, capped and full. progress-bar: capped # Since Stack 1.10. ddump-dir: "" ~~~ Command line equivalents (take precedence): Yes, see below. Allows setting build options which are usually specified on the command line. The meanings of these settings correspond directly with the command line flags of the same name. For further information, see the [`stack build` command](build_command.md) documentation and the [users guide](GUIDE.md#the-build-command). ### casa [:octicons-tag-24: 2.13.1](https://github.com/commercialhaskell/stack/releases/tag/v2.13.1) Default: ~~~yaml casa: enable: true # Use a Casa server? repo-prefix: https://casa.stackage.org # Unless casa-repo-prefix is set. max-keys-per-request: 1280 # Maximum number of keys per request. ~~~ This option specifies whether or not Stack should use a Casa (content-addressable storage archive) server to cache Cabal files and all other files in packages; and, if so, the prefix for the URL used to pull information from the server and the maximum number of keys per request. For further information, see this blog post about [Casa and Stack](https://www.fpcomplete.com/blog/casa-and-stack/). `repo-prefix` replaces [`casa-repo-prefix`](#casa-repo-prefix) (which is deprecated) and has precedence if both keys are set. ### casa-repo-prefix [:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) Deprecated in favour of [`casa`](#casa), which takes precedence if present. Default: `https://casa.stackage.org` This option specifies the prefix for the URL used to pull information from the Casa server. ### color Command line equivalent (takes precedence): `--color` option This option specifies when to use color in output. The option is used as `color: `, where `` is 'always', 'never', or 'auto'. On Windows versions before Windows 10, for terminals that do not support color codes, the default is 'never'; color may work on terminals that support color codes. (The British English spelling (colour) is also accepted. In yaml configuration files, the American spelling is the alternative that has priority.) ### compiler [:octicons-tag-24: 0.1.8.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.8.0) Command line equivalent (takes precedence): `--compiler` option Overrides the compiler version in the snapshot. Note that the `compiler-check` flag also applies to the version numbers. This uses the same syntax as compiler snapshots like `ghc-9.6.5`. This can be used to override the compiler for a Stackage snapshot, like this: ~~~yaml snapshot: lts-22.21 compiler: ghc-9.6.4 compiler-check: match-exact ~~~ #### Building GHC from source :octicons-beaker-24: Experimental [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Stack supports building the GHC compiler from source, using [Hadrian](https://gitlab.haskell.org/ghc/ghc/blob/master/hadrian/README.md) (the build system for GHC). The GHC version to build and to use is defined by a a Git commit ID and a Hadrian "flavour", with the following syntax: ~~~yaml compiler: ghc-git-- ~~~ In the following example the commit ID is "5be7ad..." and the flavour is "quick": ~~~yaml compiler: ghc-git-5be7ad7861c8d39f60b7101fd8d8e816ff50353a-quick ~~~ By default, the code is retrieved from the main GHC repository. If you want to select another repository, use the `compiler-repository` option: ~~~yaml compiler-repository: git://my/ghc/repository # default # compiler-repository: https://gitlab.haskell.org/ghc/ghc.git ~~~ Stack does not check the compiler version when it uses a compiler built from source. It is assumed that the built compiler is recent enough as Stack doesn't enable any known workaround to make older compilers work. Building the compiler can take a very long time (more than one hour). For faster build times, use Hadrian flavours that disable documentation generation. #### Bootstrap compiler Building GHC from source requires a working GHC (known as the bootstrap compiler). As we use a Stack based version of Hadrian (`hadrian/build-stack` in GHC sources), the bootstrap compiler is configured into `hadrian/stack.yaml` and fully managed by Stack. !!! note For some commit IDs, the snapshot specified in `hadrian/stack.yaml` specifies a version of GHC that cannot be used to build GHC. This results in GHC's `configure` script reporting messages similar to the following before aborting: ~~~text checking version of ghc... 9.0.2 configure: error: GHC version 9.2 or later is required to compile GHC. ~~~ The resolution is: (1) to specify an alternative snapshot (one that specifies a sufficiently recent version of GHC) on the command line, using Stack's option `--snapshot `. Stack will use that snapshot when running GHC's `configure` script; and (2) to set the contents of the `STACK` environment variable to be `stack --snapshot `. Hadrian's `build-stack` script wil refer to that environment variable for the Stack command it uses. #### Hadrian prerequisites The Hadrian build system has certain [prerequisites](https://gitlab.haskell.org/ghc/ghc/-/wikis/building/preparation). It requires certain versions of the `happy` and `alex` executables on the PATH. Stack will build and install `happy` and `alex`, if not already on the PATH. === "macOS" Hadrian requires, or case use, certain tools or Python packages that do not come with macOS by default and that need to be installed using `brew` or `pip3` (Python). Hadrian's LaTeX documentation also requires the [DejaVu fonts](https://dejavu-fonts.github.io/) to be installed. ~~~zsh brew install python@3.11 # GHC uses a Python script named `boot`. brew install automake # Tool for generating GNU Standards-compliant Makefiles. brew install texinfo # Official documentation format of the GNU project. pip3 install -U sphinx # Sphinx is the Python documentation generator. brew install --cask mactex # MacTeX: Full TeX Live distribution with GUI applications ~~~ === "Windows" Hadrian requires, or can use, certain MSYS2 or Python packages that do not come with the Stack-supplied MSYS2 by default and need to be installed using `pacman` (MSYS2) or `pip` (Python). Hadrian's LaTeX documentation also requires the [DejaVu fonts](https://dejavu-fonts.github.io/) to be installed. ~~~pwsh stack exec -- pacman --sync --refresh # Synchronize MSYS2 package databases stack exec -- pacman --sync mingw-w64-x86_64-python-pip # The PyPA recommended tool (pip) for installing Python packages. Also # installs Python as a dependency. GHC uses a Python script named `boot`. # The package must be the one from the `mingw64` MSYS2 repository, as Python # from the `msys` repository cannot interpret Windows file paths correctly. stack exec -- pacman --sync mingw-w64-x86_64-autotools # The GNU autotools build system, including `autoreconf`, `aclocal` # and `make`. GHC uses a sh script named `configure` which is itself created # from a file named `configure.ac`. stack exec -- pacman --sync patch # A utility to apply patch files to original sources. stack exec -- pacman --sync texinfo # Utilities to work with and produce manuals, ASCII text, and on-line # documentation from a single source file, including `makeinfo`. stack exec -- pacman --sync mingw-w64-x86_64-ca-certificates # Common CA (certificate authority) certificates. stack exec -- pip install -U sphinx # Sphinx is the Python documentation generator. ~~~ Hadrian may require certain LaTeX packages and may prompt for these to be installed duing the build process. #### Global packages The GHC compiler you build from sources may depend on unreleased versions of some global packages (e.g. Cabal). It may be an issue if a package you try to build with this compiler depends on such global packages because Stack may not be able to find versions of those packages (on Hackage, etc.) that are compatible with the compiler. The easiest way to deal with this issue is to drop the offending packages as follows. Instead of using the packages specified in the snapshot, the global packages bundled with GHC will be used. ~~~yaml drop-packages: - Cabal - ... ~~~ Another way to deal with this issue is to add the relevant packages as `extra-deps` built from source. To avoid mismatching versions, you can use exactly the same commit id you used to build GHC as follows: ~~~ extra-deps: - git: https://gitlab.haskell.org/ghc/ghc.git commit: '5be7ad7861c8d39f60b7101fd8d8e816ff50353a' subdirs: - libraries/Cabal/Cabal - libraries/... ~~~ ### compiler-check [:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) Default: `match-minor` Specifies how the compiler version in the snapshot is matched against concrete versions. Valid values: * `match-minor`: make sure that the first three components match, but allow patch-level differences. For example< 7.8.4.1 and 7.8.4.2 would both match 7.8.4. This is useful to allow for custom patch levels of a compiler. * `match-exact`: the entire version number must match precisely * `newer-minor`: the third component can be increased, e.g. if your snapshot is `ghc-7.10.1`, then 7.10.2 will also be allowed. This was the default up through Stack 0.1.3 ### concurrent-tests [:octicons-tag-24: 0.1.2.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.2.0) Default: `true` This option specifies whether test suites should be executed concurrently with each other. The default is `true` since this is usually fine and it often means that tests can complete earlier. However, if some test suites require exclusive access to some resource, or require a great deal of CPU or memory resources, then it makes sense to set this to `false`. ~~~yaml concurrent-tests: false ~~~ ### configure-options [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Related command line (takes precedence): [`stack build --PROG-option`](build_command.md#prog-option-options) options `configure-options` can specify Cabal (the library) options (including `--PROG-option` or `--PROG-options` options) for the configure step of the Cabal build process for a named package, all project packages that are targets (using the `$targets` key), all project packages (targets or otherwise) (using the `$locals` key), or all packages (project packages or otherwise) (using the `$everything` key). ~~~yaml configure-options: $everything: - --with-gcc - /some/path $locals: - --happy-option=--ghc $targets: # Only works on platforms where GHC supports linking against shared Haskell # libraries: - --enable-executable-dynamic my-package: - --another-flag ~~~ On platforms where GHC supports linking against shared Haskell libraries (that currently excludes Windows), Cabal's `--enable-executable-dynamic` flag (which implies `--enable-shared`, unless `--disable-shared` is specified) links dependent Haskell libraries into executables dynamically. ### connection-count Default: `8` Integer indicating how many simultaneous downloads are allowed to happen. ### default-template Default: `new-template` in the [stack-templates](https://github.com/commercialhaskell/stack-templates/) repository. This option specifies which template to use with `stack new`, when none is specified. Other templates are listed in the [stack-templates](https://github.com/commercialhaskell/stack-templates/) repository. See the output of `stack templates`. ### docker Command line equivalents: `--docker-*` flags and options (see `stack --docker-help` for details). For further information, see the [Docker integration](docker_integration.md#configuration) documentation. ### dump-logs [:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) Default: `warning` Command line equivalent (takes precedence): `--[no-]dump-logs` flag In the case of *non-interleaved* output and *more than one* target package, Stack sends the build output from GHC for each target package to a log file, unless an error occurs that prevents that. For further information, see the [`stack build --[no-]interleaved-output` flag](build_command.md#-no-interleaved-output-flag) documentation. The value of the `dump-logs` key controls what, if any, log file content is sent ('dumped') to the standard error stream of the console at the end of the build. Possible values are: ~~~yaml dump-logs: none # don't dump the content of any log files dump-logs: warning # dump the content of any log files that include GHC warnings dump-logs: all # dump the content of all log files ~~~ At the command line, `--no-dump-logs` is equivalent to `dump-logs: none` and `--dump-logs` is equivalent to `dump-logs: all`. If GHC reports an error during the build and a log file is created, that build output will be included in the log file. Stack will also report errors during building to the standard error stream. That stream can be piped to a file. For example, for a file named `stderr.log`: ~~~text stack --no-dump-logs --color always build --no-interleaved-output 2> stderr.log ~~~ ### extra-include-dirs Default: `[]` Command line equivalent: `--extra-include-dirs` option (repeat for each directory) A list of extra paths to be searched for header files. Paths should be absolute ~~~yaml extra-include-dirs: - /opt/foo/include ~~~ Since these are system-dependent absolute paths, it is recommended that you specify these in your `config.yaml` file. If you control the build environment in your project's ``stack.yaml``, perhaps through docker or other means, then it may well make sense to include these there as well. ### extra-lib-dirs Default: `[]` Command line equivalent: `--extra-lib-dirs` option (repeat for each directory) A list of extra paths to be searched for libraries. Paths should be absolute ~~~yaml extra-lib-dirs: - /opt/foo/lib ~~~ Since these are system-dependent absolute paths, it is recommended that you specify these in your `config.yaml` file. If you control the build environment in your project's ``stack.yaml``, perhaps through Docker or other means, then it may well make sense to include these there as well. ### extra-path [:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) This option specifies additional directories to prepend to the PATH. These will be used when resolving the location of executables, and will also be visible in the PATH of processes run by Stack. For example, to prepend `/path-to-some-dep/bin` to your PATH: ~~~yaml extra-path: - /path-to-some-dep/bin ~~~ Other paths added by Stack - things like the project's binary directory and the compiler's binary directory - will take precedence over those specified here (the automatic paths get prepended). ### ghc-build [:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) Default: `standard` Command line equivalent (takes precedence): `--ghc-build` option Stack identifies different GHC executables by platform (operating system and machine architecture), (optional) GHC variant and (optional) GHC build. See [`setup-info`](#setup-info). `ghc-build` specifies a specialized architecture for the GHC executable. Normally this is determined automatically, but it can be overridden. Possible arguments include `standard`, `gmp4`, `nopie`, `tinfo6`, `tinfo6-libc6-pre232`, `tinfo6-nopie`, `ncurses6`, `int-native` and `integersimple`. ### ghc-options [:octicons-tag-24: 0.1.4.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.4.0) Default: `{}` Related command line (takes precedence): [`stack build --ghc-options`](build_command.md#-ghc-options-option) option GHC command line options can be specified for a package in its Cabal file (including one created from a `package.yaml` file). This option augments and, if applicable (see below), overrides any such GHC command line options. `ghc-options` can specify GHC command line options for a named package, all project packages that are targets (using the `$targets` key), all project packages (targets or otherwise) (using the `$locals` key), or all packages (project packages or otherwise) (using the `$everything` key). ~~~yaml ghc-options: "$everything": -O2 "$locals": -Wall "$targets": -Werror some-package: -DSOME_CPP_FLAG ~~~ GHC's command line options are _order-dependent_ and evaluated from left to right. Later options can override the effect of earlier ones. Stack applies options (as applicable) in the order of `$everything`, `$locals`, `$targets`, and then those for the named package. Any GHC command line options for a package specified at Stack's command line are applied after those specified in Stack's YAML configuration files. Since Stack 1.6.1, setting a GHC options for a specific package will automatically promote it to a project package (much like setting a custom package flag). However, setting options via `$everything` on all flags will not do so (see [GitHub discussion](https://github.com/commercialhaskell/stack/issues/849#issuecomment-320892095) for reasoning). This can lead to unpredictable behavior by affecting your snapshot packages. !!! info Before Stack 1.6.1, the key `*` (then deprecated) had the same function as the key `$everything`. ### ghc-variant [:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) Default: `standard` Command line equivalent (takes precedence): `--ghc-variant` option Stack identifies different GHC executables by platform (operating system and machine architecture), (optional) GHC variant and (optional) GHC build. See [`setup-info`](#setup-info). `ghc-variant` specifies a variant of the GHC executable. Known values are: * `standard`: Use the standard GHC binary distribution * `int-native`: From GHC 9.4.1, use a GHC bindist that uses the Haskell-native big-integer [backend](https://downloads.haskell.org/~ghc/9.0.2/docs/html/users_guide/9.0.1-notes.html#highlights). For further information, see this [article](https://iohk.io/en/blog/posts/2020/07/28/improving-haskells-big-numbers-support/). * `integersimple`: Use a GHC bindist that uses [integer-simple instead of GMP](https://ghc.haskell.org/trac/ghc/wiki/ReplacingGMPNotes) * any other value: Use a custom GHC bindist. You should specify [setup-info](#setup-info) or [setup-info-locations](#setup-info-locations) so `stack setup` knows where to download it, or pass the `stack setup --ghc-bindist` argument on the command-line This option is incompatible with `system-ghc: true`. ### hackage-base-url [:octicons-tag-24: 1.9.1](https://github.com/commercialhaskell/stack/releases/tag/v1.9.1) Default: `https://hackage.haskell.org/` Sets the address of the Hackage server to upload the package to. ~~~yaml hackage-base-url: https://hackage.example.com/ ~~~ ### hide-source-paths Default: `true` ([:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1)) Whether to use the `-fhide-source-paths` option by default for GHC >= 8.2: ~~~yaml hide-source-paths: false ~~~ Build output when enabled: ~~~text ... [1 of 2] Compiling Lib [2 of 2] Compiling Paths_test_pr ... ~~~ Build output when disabled: ~~~text ... [1 of 2] Compiling Lib ( src/Lib.hs, .stack-work/dist/x86_64-linux-tinfo6/Cabal-2.4.0.1/build/Lib.o ) ... ~~~ ### hide-th-loading Default: `true` Strip out the "Loading ..." lines from GHC build output, produced when using Template Haskell. ### ignore-revision-mismatch (Removed 1.11) This flag was introduced in Stack 1.6, and removed on the move to Pantry. You will receive a warning if this configuration value is set. ### install-ghc Default: `true` ([:octicons-tag-24: 1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v1.5.0)) Command line equivalent (takes precedence): `--[no-]install-ghc` flag Whether or not to automatically install GHC when necessary. ### jobs Default: the number of CPUs (cores) that the machine has. Command line equivalent (takes precedence): [`-j`, `--jobs` option](global_flags.md#-jobs-or-j-option) Specifies the number of concurrent jobs (principally, Stack actions during building - see further below) to run. When [building GHC from source](#building-ghc-from-source), specifies the `-j[]` flag of GHC's Hadrian build system. In some circumstances, the default can cause some machines to run out of memory during building. If those circumstances arise, specify `jobs: 1`. This configuration option is distinct from GHC's own `-j[]` flag, which relates to parallel compilation of modules within a package. ### local-bin-path Default (on Unix-like operating systems): `~/.local/bin` Default (on Windows): `%APPDATA%\local\bin` Command line equivalent (takes precedence): `--local-bin-path` option Specifies the target directory for [`stack build --copy-bins`](build_command.md#-no-copy-bins-flag) and `stack install`. An absolute or relative path can be specified. If the project-level configuration is provided in the `global-project` directory in the [Stack root](stack_root.md), a relative path is assumed to be relative to the current directory. Otherwise, it is assumed to be relative to the directory of the project-level configuration file. ### local-programs-path [:octicons-tag-24: 1.3.0](https://github.com/commercialhaskell/stack/releases/tag/v1.3.0) This overrides the location of the Stack 'programs' directory, where tools like GHC get installed. The path must be an absolute one. Stack's defaults differ between Unix-like operating systems and Windows. === "Unix-like" Default: `programs` directory in the [Stack root](stack_root.md). === "Windows" Default: `%LOCALAPPDATA%\Programs\stack`, if the `%LOCALAPPDATA%` environment variable exists. Otherwise, the `programs` directory in the [Stack root](stack_root.md). The MSYS2 tool is also installed in the Stack 'programs' directory. !!! warning If there is a space character in the path to Stack's 'programs' directory this may cause problems with building packages that make use of the GNU project's `autoconf` package and `configure` shell script files. That may be the case particularly if there is no corresponding short name ('8 dot 3' name) for the directory in the path with the space (which may be the case if '8 dot 3' names have been stripped or their creation not enabled by default). If there are problems building, it will be necessary to specify an alternative path that does not contain space characters. Examples of packages on Hackage that make use of `configure` are `network` and `process`. ### modify-code-page [:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) Restrictions: Windows systems only. Default: `true` Command line equivalent (takes precedence): `--[no-]modify-code-page` flag Whether to modify the code page for UTF-8 output. ~~~yaml modify-code-page: false ~~~ ### nix [:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) Default: ~~~yaml nix: enable: false # Except on NixOS, where `enable: true` pure: true packages: [] shell-file: nix-shell-options: [] path: [] add-gc-roots: false ~~~ Command line equivalents: `--nix-*` flags and options (see `stack --nix-help` for details). For further information, see the [Nix integration](nix_integration.md#configuration) documentation. ### notify-if-arch-unknown [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Default: `true` If the specified machine architecture value is unknown to Cabal (the library), should Stack notify the user of that? ### notify-if-cabal-untested [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Default: `true` If Stack has not been tested with the version of Cabal (the library) that has been found, should Stack notify the user of that? ### notify-if-ghc-untested [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Default: `true` If Stack has not been tested with the version of GHC that is being used, should Stack notify the user of that? ### notify-if-nix-on-path [:octicons-tag-24: 2.15.1](https://github.com/commercialhaskell/stack/releases/tag/v2.15.1) Default: `true` If Stack's integration with the Nix package manager is not enabled, should Stack notify the user if a `nix` executable is on the PATH? ### package-index [:octicons-tag-24: 2.9.3](https://github.com/commercialhaskell/stack/releases/tag/v2.9.3) Default: ~~~yaml package-index: download-prefix: https://hackage.haskell.org/ hackage-security: keyids: - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 - 2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3 - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 key-threshold: 3 ignore-expiry: true ~~~ Takes precedence over the `package-indices` key, which is deprecated. Specify the package index. The index must use the [Hackage Security](https://hackage.haskell.org/package/hackage-security) format. This setting is most useful for providing a mirror of the official Hackage server for * bypassing a firewall; or * faster downloads. If the setting specifies an index that does not mirror Hackage, it is likely that will result in significant breakage, including most snapshots failing to work. In the case of Hackage, the keys of its root key holders are contained in the `haskell-infra/hackage-root-keys` [repository](https://github.com/haskell-infra/hackage-root-keys). The Hackage package index is signed. A signature is valid when three key holders have signed. The Hackage timestamp is also signed. A signature is valid when one key holder has signed. If the `hackage-security` key is absent, the Hackage Security configuration will default to that for the official Hackage server. `key-threshold` specifies the minimum number of keyholders that must have signed the package index for it to be considered valid. `ignore-expiry` specifies whether or not the expiration of timestamps should be ignored. ### package-indices [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Deprecated in favour of [`package-index`](#package-index), which takes precedence if present. Default: ~~~yaml package-indices: - download-prefix: https://hackage.haskell.org/ hackage-security: keyids: - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 - 2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3 - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 key-threshold: 3 ignore-expiry: true ~~~ !!! info Before Stack 2.1.3, the default for `ignore-expiry` was `false`. For more information, see [issue #4928](https://github.com/commercialhaskell/stack/issues/4928). !!! info Before Stack 2.1.1, Stack had a different approach to `package-indices`. For more information, see [issue #4137](https://github.com/commercialhaskell/stack/issues/4137). Specify the package index. For further information, see the `package-index` [documentation](#package-index). ### pvp-bounds [:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) Default: `none` Command line equivalent (takes precedence): `stack sdist --pvp-bounds` option or `stack upload --pvp-bounds` option !!! warning As of Stack 1.6.0, this feature does not reliably work, due to issues with the Cabal library's printer. Stack will generate a warning when a lossy conversion occurs, in which case you may need to disable this setting. For further information, see issue [#3550](https://github.com/commercialhaskell/stack/issues/3550). When using the `sdist` and `upload` commands, this setting determines whether the Cabal file's dependencies should be modified to reflect PVP lower and upper bounds. #### Basic use Values are `none` (unchanged), `upper` (add upper bounds), `lower` (add lower bounds), and both (and upper and lower bounds). The algorithm Stack follows is: * If an upper or lower bound (other than `>= 0` - 'any version') already exists on a dependency, it is left alone * When adding a lower bound, Stack looks at the current version specified by `stack.yaml`, and sets it as the lower bound (e.g., `foo >= 1.2.3`) * When adding an upper bound, Stack sets it as less than the next major version (e.g., `foo < 1.3`) ~~~yaml pvp-bounds: none ~~~ For further information, see the announcement [blog post](https://www.fpcomplete.com/blog/2015/09/stack-pvp). #### Use with Cabal file revisions [:octicons-tag-24: 1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v1.5.0) Each of the values listed above supports adding `-revision` to the end of the value, e.g. `pvp-bounds: both-revision`. This means that, when uploading to Hackage, Stack will first upload your tarball with an unmodified Cabal file, and then upload a Cabal file revision with the PVP bounds added. This can be useful - especially combined with the [Stackage no-revisions feature](http://www.snoyman.com/blog/2017/04/stackages-no-revisions-field) - as a method to ensure PVP compliance without having to proactively fix bounds issues for Stackage maintenance. ### recommend-stack-upgrade [:octicons-tag-24: 2.1.1](https://github.com/commercialhaskell/stack/releases/tag/v2.1.1) Default: `true` When Stack notices that a new version of Stack is available, should it notify the user? ### rebuild-ghc-options [:octicons-tag-24: 0.1.6.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.6.0) Default: `false` Should Stack rebuild a package when its GHC options change? The default value reflects that, in most cases, GHC options are used to affect optimization levels and warning behavior, for which GHC does not recompile the modules. !!! info Before Stack 0.1.6.0, Stack rebuilt a package when its GHC options changed. ### require-stack-version Default: `"-any"` Require a version of Stack within the specified range ([cabal-style](https://www.haskell.org/cabal/users-guide/developing-packages.html#build-information)) to be used for this project. Example: `require-stack-version: "== 0.1.*"` ### save-hackage-creds [:octicons-tag-24: 1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v1.5.0) Default: `true` Controls whether, when using `stack upload`, the user's Hackage username and password are stored in a local file. ~~~yaml save-hackage-creds: true ~~~ ### setup-info [:octicons-tag-24: 0.1.5.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.5.0) The `setup-info` dictionary specifies download locations for tools to be installed during set-up, such as GHC or, on Windows, 7z and MSYS2. The dictionary maps `('Tool', 'Platform', 'Version')` to the location where it can be obtained. For example, mapping `(GHC, 64-bit Windows, 9.2.3)` to the URL hosting the archive file for GHC's installation. Possible usages of this configuration option are: 1. Using Stack offline or behind a firewall. 2. Extending the tools known to Stack, such as cutting-edge versions of GHC or builds for custom Linux distributions (for use with the [ghc-variant](#ghc-variant) option). By default, Stack obtains the dictionary from [stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml). The `setup-info` dictionary is constructed in the following order: 1. `setup-info` in the YAML configuration - inline configuration 2. `--setup-info-yaml` command line arguments - URLs or paths. Multiple locations may be specified. 3. `setup-info-locations` in the YAML configuration - URLs or paths. See further below. The format of this key is the same as in the default [stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml). For example, GHC 9.2.3 of custom variant `myvariant` (see further below) on 64-bit Windows: ~~~yaml setup-info: ghc: windows64-custom-myvariant: 9.2.3: url: "https://example.com/ghc-9.2.3-x86_64-unknown-mingw32-myvariant.tar.xz" ~~~ 'Platforms' are pairs of an operating system and a machine architecture (for example, 32-bit i386 or 64-bit x86-64) (represented by the `Cabal.Distribution.Systems.Platform` type). Stack currently (version 2.15.1) supports the following pairs in the format of the `setup-info` key: |Operating system|I386 arch|X86_64 arch|Other machine architectures | |----------------|---------|-----------|------------------------------------------------------------| |Linux |linux32 |linux64 |AArch64: linux-aarch64, Arm: linux-armv7, Sparc: linux-sparc| |OSX |macosx |macosx | | |Windows |windows32|windows64 | | |FreeBSD |freebsd32|freebsd64 |AArch64: freebsd-aarch64 | |OpenBSD |openbsd32|openbsd64 | | For GHC, the distinguishing 'Version' in the key format includes a 'tag' for any (optional) GHC variant (see [ghc-variant](#ghc-variant)) and a further 'tag' for any (optional) specialised GHC build (see [ghc-build](#ghc-build)). The optional variant 'tag' is either `-integersimple` or `-custom-`. For example, for GHC 9.0.2 of specialised GHC build `tinfo6` on x86_64 Linux: ~~~yaml setup-info: ghc: linux64-tinfo6: 9.0.2: url: "http://downloads.haskell.org/~ghc/9.0.2/ghc-9.0.2a-x86_64-fedora27-linux.tar.xz" content-length: 237286244 sha1: affc2aaa3e6a1c446698a884f56a0a13e57f00b4 sha256: b2670e9f278e10355b0475c2cc3b8842490f1bca3c70c306f104aa60caff37b0 ~~~ On Windows, the required 7z executable and DLL tools are represented in the format of the `setup-info` key simply by `sevenzexe-info` and `sevenzdll-info`. This configuration **adds** the specified setup information metadata to the default. Specifying this configuration **does not** prevent the default [stack-setup-2.yaml](https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml) from being consulted as a fallback. If, however, you need to **replace** the default `setup-info` dictionary, use the following: ~~~yaml setup-info-locations: [] ~~~ ### setup-info-locations [:octicons-tag-24: 2.3.1](https://github.com/commercialhaskell/stack/releases/tag/v2.3.1) Command line equivalent (takes precedence): `--setup-info-yaml` option By way of introduction, see the [`setup-info`](#setup-info) option. This option specifies the location(s) of `setup-info` dictionaries. The first location which provides a dictionary that specifies the location of a tool - `('Tool', 'Platform', 'Version')` - takes precedence. For example, you can extend the default tools, with a fallback to the default `setup-info` location, as follows: ~~~yaml setup-info-locations: - C:/stack-offline/my-stack-setup.yaml - relative/inside/my/project/setup-info.yaml - \\smbShare\stack\my-stack-setup.yaml - http://stack-mirror.com/stack-setup.yaml # Fallback to the default location - https://github.com/commercialhaskell/stackage-content/raw/master/stack/stack-setup-2.yaml ~~~ Stack only refers to the default `setup-info` location if no locations are specified in the `setup-info-locations` configuration or on the command line using the `--setup-info-yaml` option. For example, both of the following will cause `stack setup` not to consult the default `setup-info` location: ~~~yaml setup-info-locations: - C:/stack-offline/my-stack-setup.yaml ~~~ and ~~~yaml setup-info-locations: [] ~~~ Relative paths are resolved relative to the `stack.yaml` file (either the one in the local project or the global `stack.yaml`). Relative paths may also be used for the installation paths to tools (such as GHC or 7z). This allows vendoring the tools inside a monorepo (a single repository storing many projects). For example: Directory structure: ~~~text - src/ - installs/ - my-stack-setup.yaml - 7z.exe - 7z.dll - ghc-9.2.3.tar.xz - stack.yaml ~~~ In the project's `stack.yaml`: ~~~yaml setup-info-locations: - installs/my-stack-setup.yaml ~~~ In `installs/my-stack-setup.yaml`: ~~~yaml sevenzexe-info: url: "installs/7z.exe" sevenzdll-info: url: "installs/7z.dll" ghc: windows64: 9.2.3: url: "installs/ghc-9.2.3.tar.xz" ~~~ ### skip-ghc-check Default: `false` Command line equivalent (takes precedence): `--[no-]skip-ghc-check` flag Should we skip the check to confirm that your system GHC version (on the PATH) matches what your project expects? ### skip-msys [:octicons-tag-24: 0.1.2.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.2.0) Restrictions: Windows systems only Default: `false` Command line equivalent (takes precedence): [`--[no-]skip-msys`](global_flags.md#-no-skip-msys-option) flag Skips checking for the Stack-supplied MSYS2 (and installing that MSYS2, if it is not installed) when Stack is setting up the environment. ~~~yaml skip-msys: true ~~~ !!! note Usually, the use of this option does not make sense in project-level configuration and it is used only in global configuration. ### snapshot-location-base [:octicons-tag-24: 2.5.1](https://github.com/commercialhaskell/stack/releases/tag/v2.5.1) Default: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/ (as set in the `pantry` library) Command line equivalent (takes precedence): `--snapshot-location-base` option Sets the base location of the LTS Haskell or Stackage Nightly snapshots. For example: ~~~yaml snapshot-location-base: https://example.com/snapshots/location/ ~~~ has the following effect: * `lts-X.Y` expands to `https://example.com/snapshots/location/lts/X/Y.yaml` * `nightly-YYYY-MM-DD` expands to `https://example.com/snapshots/location/nightly/YYYY/M/D.yaml` This key is convenient in setups that restrict access to GitHub, for instance closed corporate setups. In this setting, it is common for the development environment to have general access to the internet, but not for testing/building environments. To avoid the firewall, one can run a local snapshots mirror and then use a custom `snapshot-location-base` in the closed environments only. ### stack-colors Command line equivalent (takes precedence): `--stack-colors` option Stack uses styles to format some of its output. The default styles do not work well with every terminal theme. This option specifies Stack's output styles, allowing new styles to replace the defaults. The option is used as `stack-colors: `, where `` is a colon-delimited sequence of key=value, 'key' is a style name and 'value' is a semicolon-delimited list of 'ANSI' SGR (Select Graphic Rendition) control codes (in decimal). Use the command `stack ls stack-colors --basic` to see the current sequence. The 'ANSI' standards refer to (1) standard ECMA-48 'Control Functions for Coded Character Sets' (5th edition, 1991); (2) extensions in ITU-T Recommendation (previously CCITT Recommendation) T.416 (03/93) 'Information Technology – Open Document Architecture (ODA) and Interchange Format: Character Content Architectures' (also published as ISO/IEC International Standard 8613-6); and (3) further extensions used by 'XTerm', a terminal emulator for the X Window System. The 'ANSI' SGR codes are described in a [Wikipedia article](http://en.wikipedia.org/wiki/ANSI_escape_code) and those codes supported on current versions of Windows in [Microsoft's documentation](https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences). For example, users of the popular [Solarized Dark](https://ethanschoonover.com/solarized/) terminal theme might wish to set the styles as follows: ~~~yaml stack-colors: error=31:good=32:shell=35:dir=34:recommendation=32:target=95:module=35:package-component=95:secondary=92:highlight=32 ~~~ In respect of styles used in verbose output, some of that output occurs before the configuration file is processed. (The British English spelling (colour) is also accepted. In YAML configuration files, the American spelling is the alternative that has priority.) ### stack-developer-mode [:octicons-tag-24: 2.3.3](https://github.com/commercialhaskell/stack/releases/tag/v2.3.3) Default (official distributed binaries): `false` Default (built from source): `true` Turns on a mode where some messages are printed at WARN level instead of DEBUG level, especially useful for developers of Stack itself. ~~~yaml stack-developer-mode: false ~~~ ### system-ghc Default: `false`, unless the [Docker](docker_integration.md) or [Nix](nix_integration.md) integration is enabled. Command line equivalent (takes precedence): `--[no-]system-ghc` flag Enables or disables using the GHC available on the PATH. (Make sure PATH is explicit, i.e., don't use ~.) Useful to enable if you want to save the time, bandwidth or storage space needed to setup an isolated GHC. In a Nix-enabled configuration, Stack is incompatible with `system-ghc: false`. ~~~yaml # Turn on system GHC system-ghc: true ~~~ ### templates Command line equivalent (takes precedence): `stack new --param :` (or `-p`) option Templates used with `stack new` have a number of parameters that affect the generated code. These can be set for all new projects you create. The result of them can be observed in the generated LICENSE and Cabal files. The value for all of these parameters must be strings. The parameters are: `author-email`, `author-name`, `category`, `copyright`, `year` and `github-username`. * _author-email_ - sets the `maintainer` property in Cabal * _author-name_ - sets the `author` property in Cabal and the name used in LICENSE * _category_ - sets the `category` property in Cabal. This is used in Hackage. For examples of categories see [Packages by category](https://hackage.haskell.org/packages/). It makes sense for `category` to be set on a per project basis because it is uncommon for all projects a user creates to belong to the same category. The category can be set per project by passing `-p "category:value"` to the `stack new` command. * _copyright_ - sets the `copyright` property in Cabal. It is typically the name of the holder of the copyright on the package and the year(s) from which copyright is claimed. For example: `Copyright (c) 2023-2024 Joe Bloggs` * _year_ - if `copyright` is not specified, `year` and `author-name` are used to generate the copyright property in Cabal. If `year` is not specified, it defaults to the current year. * _github-username_ - used to generate `homepage` and `source-repository` in Cabal. For instance `github-username: myusername` and `stack new my-project new-template` would result: ~~~yaml homepage: http://github.com/myusername/my-project#readme source-repository head type: git location: https://github.com/myusername/my-project ~~~ These properties can be set in `config.yaml` as follows: ~~~yaml templates: params: author-name: Your Name author-email: youremail@example.com category: Your Projects Category copyright: 'Copyright (c) 2024 Your Name' github-username: yourusername ~~~ Additionally, `stack new` can automatically initialize source control repositories in the directories it creates. Source control tools can be specified with the `scm-init` option. At the moment, only `git` is supported. ~~~yaml templates: scm-init: git ~~~ ### urls Default: ~~~yaml urls: latest-snapshot: https://stackage-haddock.haskell.org/snapshots.json ~~~ Customize the URLs where Stack looks for snapshot build plans. !!! note The default for Stack 1.3.0 to 2.15.3 was https://s3.amazonaws.com/haddock.stackage.org/snapshots.json. Following the handover of the Stackage project to the Haskell Foundation in early 2024, the file at that URL may not be up to date. Users of those versions of Stack should configure the URL to be the default above. ### with-gcc Command line equivalent (takes precedence): `--with-gcc` option Specify a path to GCC explicitly, rather than relying on the normal path resolution. ~~~yaml with-gcc: /usr/local/bin/gcc-5 ~~~ ### with-hpack Command line equivalent (takes precedence): `--with-hpack` option Use an [Hpack](https://github.com/sol/hpack) executable, rather than Stack's in-built version of the Hpack functionality. ~~~yaml with-hpack: /usr/local/bin/hpack ~~~ ### work-dir [:octicons-tag-24: 0.1.10.0](https://github.com/commercialhaskell/stack/releases/tag/v0.1.10.0) Default: `.stack-work` Command line equivalent (takes precedence): [`--work-dir`](global_flags.md#-work-dir-option) option Environment variable alternative (lowest precedence): [`STACK_WORK`](environment_variables.md#stack_work) `work-dir` specifies the path of Stack's work directory, within a local project or package directory. The path must be a relative one, relative to the root directory of the project or package. The relative path cannot include a `..` (parent directory) component. ## Customisation scripts ### GHC installation customisation [:octicons-tag-24: 2.9.1](https://github.com/commercialhaskell/stack/releases/tag/v2.9.1) On Unix-like operating systems and Windows, Stack's installation procedure can be fully customised by placing a `sh` shell script (a 'hook') in the [Stack root](stack_root.md) directory at `hooks/ghc-install.sh`. On Unix-like operating systems, the script file must be made executable. The script is run by the `sh` application (which is provided by MSYS2 on Windows). The script **must** return an exit code of `0` and the standard output **must** be the absolute path to the GHC binary that was installed. Otherwise Stack will ignore the script and possibly fall back to its own installation procedure. The script is not run when `system-ghc: true`. When `install-ghc: false`, the script is still run, which allows you to ensure that only your script will install GHC and Stack won't default to its own installation logic, even when the script fails. The following environment variables are always available to the script: * `HOOK_GHC_TYPE = "bindist" | "git" | "ghcjs"` For "bindist", additional variables are: * `HOOK_GHC_VERSION = ` For "git", additional variables are: * `HOOK_GHC_COMMIT = ` * `HOOK_GHC_FLAVOR = ` For "ghcjs", additional variables are: * `HOOK_GHC_VERSION = ` * `HOOK_GHCJS_VERSION = ` An example script is: ~~~sh #!/bin/sh set -eu case $HOOK_GHC_TYPE in bindist) # install GHC here, not printing to stdout, e.g.: # command install $HOOK_GHC_VERSION >/dev/null ;; git) >&2 echo "Hook doesn't support installing from source" exit 1 ;; *) >&2 echo "Unsupported GHC installation type: $HOOK_GHC_TYPE" exit 2 ;; esac echo "location/to/ghc/executable" ~~~ If the following script is installed by GHCup, GHCup makes use of it, so that if Stack needs a version of GHC, GHCup takes over obtaining and installing that version: ~~~sh #!/bin/sh set -eu case $HOOK_GHC_TYPE in bindist) ghcdir=$(ghcup whereis --directory ghc "$HOOK_GHC_VERSION" || ghcup run --ghc "$HOOK_GHC_VERSION" --install) || exit 3 printf "%s/ghc" "${ghcdir}" ;; git) # TODO: should be somewhat possible >&2 echo "Hook doesn't support installing from source" exit 1 ;; *) >&2 echo "Unsupported GHC installation type: $HOOK_GHC_TYPE" exit 2 ;; esac ~~~ stack-2.15.7/src/setup-shim/StackSetupShim.hs0000644000000000000000000001477014620153474017250 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE PackageImports #-} module StackSetupShim where -- | Stack no longer supports Cabal < 1.24 and, consequently, GHC versions -- before GHC 8.0 or base < 4.9.0.0. Consequently, we do not need to test for -- the existence of the MIN_VERSION_Cabal macro (provided from GHC 8.0). import Data.List ( stripPrefix ) import Distribution.ReadE ( ReadE (..) ) import Distribution.Simple.Configure ( getPersistBuildConfig ) -- | Temporary, can be removed if initialBuildSteps restored to Cabal's API. #if MIN_VERSION_Cabal(3,11,0) import Distribution.Simple.Build ( writeBuiltinAutogenFiles ) #else import Distribution.Simple.Build ( initialBuildSteps ) #endif #if MIN_VERSION_Cabal(3,11,0) import Distribution.Simple.Errors ( exceptionMessage ) #endif -- | Temporary, can be removed if initialBuildSteps restored to Cabal's API. #if MIN_VERSION_Cabal(3,11,0) import Distribution.Simple.LocalBuildInfo ( componentBuildDir, withAllComponentsInBuildOrder ) #endif #if MIN_VERSION_Cabal(3,8,1) import Distribution.Simple.PackageDescription ( readGenericPackageDescription ) #elif MIN_VERSION_Cabal(2,2,0) -- Avoid confusion with Cabal-syntax module of same name. -- readGenericPackageDescription was exported from module -- Distribution.PackageDescription.Parsec in Cabal-2.2.0.0. import "Cabal" Distribution.PackageDescription.Parsec ( readGenericPackageDescription ) #elif MIN_VERSION_Cabal(2,0,0) -- readPackageDescription was renamed readGenericPackageDescription in -- Cabal-2.0.0.2. import Distribution.PackageDescription.Parse ( readGenericPackageDescription ) #else import Distribution.PackageDescription.Parse ( readPackageDescription ) #endif import Distribution.Simple.Utils ( createDirectoryIfMissingVerbose, findPackageDesc ) #if MIN_VERSION_Cabal(3,8,1) import Distribution.Types.GenericPackageDescription ( GenericPackageDescription (..) ) #elif MIN_VERSION_Cabal(2,0,0) -- Avoid confusion with Cabal-syntax module of same name. -- GenericPackageDescription was exported from module -- Distribution.Types.GenericPackageDescription in Cabal-2.0.0.2. import "Cabal" Distribution.Types.GenericPackageDescription ( GenericPackageDescription (..) ) #else import Distribution.PackageDescription ( GenericPackageDescription (..) ) #endif -- | Temporary, can be removed if initialBuildSteps restored to Cabal's API. #if MIN_VERSION_Cabal(3,11,0) import Distribution.Types.ComponentLocalBuildInfo ( ComponentLocalBuildInfo ) import Distribution.Types.LocalBuildInfo ( LocalBuildInfo ) import Distribution.Types.PackageDescription ( PackageDescription ) import Distribution.Verbosity ( Verbosity ) #endif import Distribution.Verbosity ( flagToVerbosity ) import Main -- Before base-4.11.0.0 (GHC 8.4.1), <> was not exported by Prelude. #if !MIN_VERSION_base(4,11,0) import Data.Semigroup ( (<>) ) #endif import System.Environment ( getArgs ) mainOverride :: IO () mainOverride = do args <- getArgs case args of [arg1, arg2, "repl", "stack-initial-build-steps"] -> stackReplHook arg1 arg2 _ -> main -- | The name of the function is a mismomer, but is kept for historical reasons. -- This function relies on Stack calling the 'setup' executable with: -- -- --verbose= -- --builddir= -- repl -- stack-initial-build-steps stackReplHook :: String -> String -> IO () stackReplHook arg1 arg2 = do let mRawVerbosity = stripPrefix "--verbose=" arg1 mRawBuildDir = stripPrefix "--builddir=" arg2 case (mRawVerbosity, mRawBuildDir) of (Nothing, _) -> fail $ "Misuse of running Setup.hs with stack-initial-build-steps, expected " <> "first argument to start --verbose=" (_, Nothing) -> fail $ "Misuse of running Setup.hs with stack-initial-build-steps, expected" <> "second argument to start --builddir=" (Just rawVerbosity, Just rawBuildDir) -> do let eVerbosity = runReadE flagToVerbosity rawVerbosity case eVerbosity of Left msg1 -> fail $ "Unexpected happened running Setup.hs with " <> "stack-initial-build-steps, expected to parse Cabal verbosity: " <> msg1 Right verbosity -> do eFp <- findPackageDesc "" case eFp of Left err -> fail $ "Unexpected happened running Setup.hs with " <> "stack-initial-build-steps, expected to find a Cabal file: " <> msg2 where #if MIN_VERSION_Cabal(3,11,0) -- The type of findPackageDesc changed in Cabal-3.11.0.0. msg2 = exceptionMessage err #else msg2 = err #endif Right fp -> do gpd <- #if MIN_VERSION_Cabal(2,0,0) readGenericPackageDescription verbosity fp #else readPackageDescription verbosity fp #endif let pd = packageDescription gpd lbi <- getPersistBuildConfig rawBuildDir initialBuildSteps rawBuildDir pd lbi verbosity -- | Temporary, can be removed if initialBuildSteps restored to Cabal's API. -- Based on the functions of the same name provided by Cabal-3.10.3.0. #if MIN_VERSION_Cabal(3,11,0) -- | Runs 'componentInitialBuildSteps' on every configured component. initialBuildSteps :: FilePath -- ^"dist" prefix -> PackageDescription -- ^mostly information from the .cabal file -> LocalBuildInfo -- ^Configuration information -> Verbosity -- ^The verbosity to use -> IO () initialBuildSteps distPref pkg_descr lbi verbosity = withAllComponentsInBuildOrder pkg_descr lbi $ \_comp clbi -> componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity -- | Creates the autogenerated files for a particular configured component. componentInitialBuildSteps :: FilePath -- ^"dist" prefix -> PackageDescription -- ^mostly information from the .cabal file -> LocalBuildInfo -- ^Configuration information -> ComponentLocalBuildInfo -> Verbosity -- ^The verbosity to use -> IO () componentInitialBuildSteps _distPref pkg_descr lbi clbi verbosity = do createDirectoryIfMissingVerbose verbosity True (componentBuildDir lbi clbi) -- Cabal-3.10.3.0 used writeAutogenFiles, that generated and wrote out the -- Paths_.hs, PackageInfo_.hs, and cabal_macros.h files. This -- appears to be the equivalent function for Cabal-3.11.0.0. writeBuiltinAutogenFiles verbosity pkg_descr lbi clbi #endif stack-2.15.7/tests/unit/package-dump/ghc-7.10.txt0000644000000000000000000016144714502056216017463 0ustar0000000000000000name: ghc version: 7.10.1 id: ghc-7.10.1-325809317787a897b7a97d646ceaa3a3 key: ghc_EMlWrQ42XY0BNVbSrKixqY license: BSD3 maintainer: glasgow-haskell-users@haskell.org homepage: http://www.haskell.org/ghc/ synopsis: The GHC API description: GHC's functionality can be useful for more things than just compiling Haskell programs. Important use cases are programs that analyse (and perhaps transform) Haskell code. Others include loading Haskell code dynamically in a GHCi-like manner. For this reason, a lot of GHC's functionality is made available through this package. category: Development author: The GHC Team exposed: False exposed-modules: Avail BasicTypes ConLike DataCon PatSyn Demand Debug Exception GhcMonad Hooks Id IdInfo Lexeme Literal Llvm Llvm.AbsSyn Llvm.MetaData Llvm.PpLlvm Llvm.Types LlvmCodeGen LlvmCodeGen.Base LlvmCodeGen.CodeGen LlvmCodeGen.Data LlvmCodeGen.Ppr LlvmCodeGen.Regs LlvmMangler MkId Module Name NameEnv NameSet OccName RdrName SrcLoc UniqSupply Unique Var VarEnv VarSet UnVarGraph BlockId CLabel Cmm CmmBuildInfoTables CmmPipeline CmmCallConv CmmCommonBlockElim CmmContFlowOpt CmmExpr CmmInfo CmmLex CmmLint CmmLive CmmMachOp CmmNode CmmOpt CmmParse CmmProcPoint CmmSink CmmType CmmUtils CmmLayoutStack MkGraph PprBase PprC PprCmm PprCmmDecl PprCmmExpr Bitmap CodeGen.Platform CodeGen.Platform.ARM CodeGen.Platform.ARM64 CodeGen.Platform.NoRegs CodeGen.Platform.PPC CodeGen.Platform.PPC_Darwin CodeGen.Platform.SPARC CodeGen.Platform.X86 CodeGen.Platform.X86_64 CgUtils StgCmm StgCmmBind StgCmmClosure StgCmmCon StgCmmEnv StgCmmExpr StgCmmForeign StgCmmHeap StgCmmHpc StgCmmArgRep StgCmmLayout StgCmmMonad StgCmmPrim StgCmmProf StgCmmTicky StgCmmUtils StgCmmExtCode SMRep CoreArity CoreFVs CoreLint CorePrep CoreSubst CoreSyn TrieMap CoreTidy CoreUnfold CoreUtils MkCore PprCore Check Coverage Desugar DsArrows DsBinds DsCCall DsExpr DsForeign DsGRHSs DsListComp DsMonad DsUtils Match MatchCon MatchLit HsBinds HsDecls HsDoc HsExpr HsImpExp HsLit PlaceHolder HsPat HsSyn HsTypes HsUtils BinIface BuildTyCl IfaceEnv IfaceSyn IfaceType LoadIface MkIface TcIface FlagChecker Annotations BreakArray CmdLineParser CodeOutput Config Constants DriverMkDepend DriverPhases PipelineMonad DriverPipeline DynFlags ErrUtils Finder GHC GhcMake GhcPlugins DynamicLoading HeaderInfo HscMain HscStats HscTypes InteractiveEval InteractiveEvalTypes PackageConfig Packages PlatformConstants Plugins TcPluginM PprTyThing StaticFlags StaticPtrTable SysTools TidyPgm Ctype HaddockUtils Lexer OptCoercion Parser RdrHsSyn ApiAnnotation ForeignCall PrelInfo PrelNames PrelRules PrimOp TysPrim TysWiredIn CostCentre ProfInit SCCfinal RnBinds RnEnv RnExpr RnHsDoc RnNames RnPat RnSource RnSplice RnTypes CoreMonad CSE FloatIn FloatOut LiberateCase OccurAnal SAT SetLevels SimplCore SimplEnv SimplMonad SimplUtils Simplify SimplStg StgStats UnariseStg Rules SpecConstr Specialise CoreToStg StgLint StgSyn CallArity DmdAnal WorkWrap WwLib FamInst Inst TcAnnotations TcArrows TcBinds TcClassDcl TcDefaults TcDeriv TcEnv TcExpr TcForeign TcGenDeriv TcGenGenerics TcHsSyn TcHsType TcInstDcls TcMType TcValidity TcMatches TcPat TcPatSyn TcRnDriver TcRnMonad TcRnTypes TcRules TcSimplify TcErrors TcTyClsDecls TcTyDecls TcType TcEvidence TcUnify TcInteract TcCanonical TcFlatten TcSMonad TcTypeNats TcSplice Class Coercion FamInstEnv FunDeps InstEnv TyCon CoAxiom Kind Type TypeRep Unify Bag Binary BooleanFormula BufWrite Digraph Encoding FastBool FastFunctions FastMutInt FastString FastTypes Fingerprint FiniteMap GraphBase GraphColor GraphOps GraphPpr IOEnv ListSetOps Maybes MonadUtils OrdList Outputable Pair Panic Pretty Serialized State Stream StringBuffer UniqFM UniqSet Util ExtsCompat46 Vectorise.Builtins.Base Vectorise.Builtins.Initialise Vectorise.Builtins Vectorise.Monad.Base Vectorise.Monad.Naming Vectorise.Monad.Local Vectorise.Monad.Global Vectorise.Monad.InstEnv Vectorise.Monad Vectorise.Utils.Base Vectorise.Utils.Closure Vectorise.Utils.Hoisting Vectorise.Utils.PADict Vectorise.Utils.Poly Vectorise.Utils Vectorise.Generic.Description Vectorise.Generic.PAMethods Vectorise.Generic.PADict Vectorise.Generic.PData Vectorise.Type.Env Vectorise.Type.Type Vectorise.Type.TyConDecl Vectorise.Type.Classify Vectorise.Convert Vectorise.Vect Vectorise.Var Vectorise.Env Vectorise.Exp Vectorise Hoopl.Dataflow Hoopl AsmCodeGen TargetReg NCGMonad Instruction Size Reg RegClass PIC Platform CPrim X86.Regs X86.RegInfo X86.Instr X86.Cond X86.Ppr X86.CodeGen PPC.Regs PPC.RegInfo PPC.Instr PPC.Cond PPC.Ppr PPC.CodeGen SPARC.Base SPARC.Regs SPARC.Imm SPARC.AddrMode SPARC.Cond SPARC.Instr SPARC.Stack SPARC.ShortcutJump SPARC.Ppr SPARC.CodeGen SPARC.CodeGen.Amode SPARC.CodeGen.Base SPARC.CodeGen.CondCode SPARC.CodeGen.Gen32 SPARC.CodeGen.Gen64 SPARC.CodeGen.Sanity SPARC.CodeGen.Expand RegAlloc.Liveness RegAlloc.Graph.Main RegAlloc.Graph.Stats RegAlloc.Graph.ArchBase RegAlloc.Graph.ArchX86 RegAlloc.Graph.Coalesce RegAlloc.Graph.Spill RegAlloc.Graph.SpillClean RegAlloc.Graph.SpillCost RegAlloc.Graph.TrivColorable RegAlloc.Linear.Main RegAlloc.Linear.JoinToTargets RegAlloc.Linear.State RegAlloc.Linear.Stats RegAlloc.Linear.FreeRegs RegAlloc.Linear.StackMap RegAlloc.Linear.Base RegAlloc.Linear.X86.FreeRegs RegAlloc.Linear.X86_64.FreeRegs RegAlloc.Linear.PPC.FreeRegs RegAlloc.Linear.SPARC.FreeRegs Dwarf Dwarf.Types Dwarf.Constants DsMeta Convert ByteCodeAsm ByteCodeGen ByteCodeInstr ByteCodeItbls ByteCodeLink Debugger LibFFI Linker ObjLink RtClosureInspect DebuggerUtils trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/ghc-7.10.1 hs-libraries: HSghc-7.10.1-EMlWrQ42XY0BNVbSrKixqY include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/ghc_EMlWrQ42XY0BNVbSrKixqY/include depends: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a bin-package-db-0.0.0.0-708fc7d634a370b311371a5bcde40b62 bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 hoopl-3.10.0.2-8c8dfc4c3140e5f7c982da224c3cb1f0 hpc-0.6.0.2-ac9064885aa8cb08a93314222939ead4 process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1 template-haskell-2.10.0.0-e895139a0ffff267d412e3d0191ce93b time-1.5.0.1-e17a9220d438435579d2914e90774246 transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1/ghc.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-7.10.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: haskeline version: 0.7.2.1 id: haskeline-0.7.2.1-a646e1ddf1a755ca5b5775dcb2ef8d8b key: haske_IlDhIe25uAn0WJY379Nu1M license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Experimental homepage: http://trac.haskell.org/haskeline synopsis: A command-line interface for user input, written in Haskell. description: Haskeline provides a user interface for line input in command-line programs. This library is similar in purpose to readline, but since it is written in Haskell it is (hopefully) more easily used in other Haskell programs. . Haskeline runs both on POSIX-compatible systems and on Windows. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Haskeline System.Console.Haskeline.Completion System.Console.Haskeline.MonadException System.Console.Haskeline.History System.Console.Haskeline.IO hidden-modules: System.Console.Haskeline.Backend System.Console.Haskeline.Backend.WCWidth System.Console.Haskeline.Command System.Console.Haskeline.Command.Completion System.Console.Haskeline.Command.History System.Console.Haskeline.Command.KillRing System.Console.Haskeline.Directory System.Console.Haskeline.Emacs System.Console.Haskeline.InputT System.Console.Haskeline.Key System.Console.Haskeline.LineState System.Console.Haskeline.Monads System.Console.Haskeline.Prefs System.Console.Haskeline.RunCommand System.Console.Haskeline.Term System.Console.Haskeline.Command.Undo System.Console.Haskeline.Vi System.Console.Haskeline.Recover System.Console.Haskeline.Backend.Posix System.Console.Haskeline.Backend.Posix.Encoder System.Console.Haskeline.Backend.DumbTerm System.Console.Haskeline.Backend.Terminfo trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/haske_IlDhIe25uAn0WJY379Nu1M library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/haske_IlDhIe25uAn0WJY379Nu1M data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/haskeline-0.7.2.1 hs-libraries: HShaskeline-0.7.2.1-IlDhIe25uAn0WJY379Nu1M depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 terminfo-0.4.0.1-75199801b414a3f4c9de438be2a4e967 transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/haskeline-0.7.2.1/haskeline.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/haskeline-0.7.2.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: terminfo version: 0.4.0.1 id: terminfo-0.4.0.1-75199801b414a3f4c9de438be2a4e967 key: termi_7qZwBlx3clR8sTBilJl253 license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Stable homepage: https://github.com/judah/terminfo synopsis: Haskell bindings to the terminfo library. description: This library provides an interface to the terminfo database (via bindings to the curses library). allows POSIX systems to interact with a variety of terminals using a standard set of capabilities. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Terminfo System.Console.Terminfo.Base System.Console.Terminfo.Cursor System.Console.Terminfo.Color System.Console.Terminfo.Edit System.Console.Terminfo.Effects System.Console.Terminfo.Keys trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/termi_7qZwBlx3clR8sTBilJl253 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/termi_7qZwBlx3clR8sTBilJl253 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/terminfo-0.4.0.1 hs-libraries: HSterminfo-0.4.0.1-7qZwBlx3clR8sTBilJl253 extra-libraries: tinfo includes: ncurses.h term.h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/terminfo-0.4.0.1/terminfo.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/terminfo-0.4.0.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: xhtml version: 3000.2.1 id: xhtml-3000.2.1-7de0560ea74b173b7313fc2303cc6c58 key: xhtml_0mVDYvYGgNUBWShvlDofr1 license: BSD3 copyright: Bjorn Bringert 2004-2006, Andy Gill and the Oregon Graduate Institute of Science and Technology, 1999-2001 maintainer: Chris Dornan stability: Stable homepage: https://github.com/haskell/xhtml synopsis: An XHTML combinator library description: This package provides combinators for producing XHTML 1.0, including the Strict, Transitional and Frameset variants. category: Web, XML, Pretty Printer author: Bjorn Bringert exposed: True exposed-modules: Text.XHtml Text.XHtml.Frameset Text.XHtml.Strict Text.XHtml.Transitional Text.XHtml.Debug Text.XHtml.Table hidden-modules: Text.XHtml.Strict.Attributes Text.XHtml.Strict.Elements Text.XHtml.Frameset.Attributes Text.XHtml.Frameset.Elements Text.XHtml.Transitional.Attributes Text.XHtml.Transitional.Elements Text.XHtml.BlockTable Text.XHtml.Extras Text.XHtml.Internals trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/xhtml_0mVDYvYGgNUBWShvlDofr1 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/xhtml_0mVDYvYGgNUBWShvlDofr1 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/xhtml-3000.2.1 hs-libraries: HSxhtml-3000.2.1-0mVDYvYGgNUBWShvlDofr1 depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/xhtml-3000.2.1/xhtml.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/xhtml-3000.2.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: transformers version: 0.4.2.0 id: transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f key: trans_ALYlebOVzVI4kxbFX5SGhm license: BSD3 maintainer: Ross Paterson synopsis: Concrete functor and monad transformers description: A portable library of functor and monad transformers, inspired by the paper \"Functional Programming with Overloading and Higher-Order Polymorphism\", by Mark P Jones, in /Advanced School of Functional Programming/, 1995 (). . This package contains: . * the monad transformer class (in "Control.Monad.Trans.Class") and IO monad class (in "Control.Monad.IO.Class") . * concrete functor and monad transformers, each with associated operations and functions to lift operations associated with other transformers. . The package can be used on its own in portable Haskell code, in which case operations need to be manually lifted through transformer stacks (see "Control.Monad.Trans.Class" for some examples). Alternatively, it can be used with the non-portable monad classes in the @mtl@ or @monads-tf@ packages, which automatically lift operations introduced by monad transformers through other transformers. category: Control author: Andy Gill, Ross Paterson exposed: True exposed-modules: Control.Applicative.Backwards Control.Applicative.Lift Control.Monad.IO.Class Control.Monad.Signatures Control.Monad.Trans.Class Control.Monad.Trans.Cont Control.Monad.Trans.Except Control.Monad.Trans.Error Control.Monad.Trans.Identity Control.Monad.Trans.List Control.Monad.Trans.Maybe Control.Monad.Trans.Reader Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy Control.Monad.Trans.RWS.Strict Control.Monad.Trans.State Control.Monad.Trans.State.Lazy Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict Data.Functor.Classes Data.Functor.Compose Data.Functor.Constant Data.Functor.Product Data.Functor.Reverse Data.Functor.Sum trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/trans_ALYlebOVzVI4kxbFX5SGhm library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/trans_ALYlebOVzVI4kxbFX5SGhm data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/transformers-0.4.2.0 hs-libraries: HStransformers-0.4.2.0-ALYlebOVzVI4kxbFX5SGhm depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/transformers-0.4.2.0/transformers.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/transformers-0.4.2.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: hoopl version: 3.10.0.2 id: hoopl-3.10.0.2-8c8dfc4c3140e5f7c982da224c3cb1f0 key: hoopl_JxODiSRz1e84NbH6nnZuUk license: BSD3 maintainer: nr@cs.tufts.edu homepage: http://ghc.cs.tufts.edu/hoopl/ synopsis: A library to support dataflow analysis and optimization description: Higher-order optimization library . See /Norman Ramsey, Joao Dias, and Simon Peyton Jones./ /(2010)/ for more details. category: Compilers/Interpreters author: Norman Ramsey, Joao Dias, Simon Marlow and Simon Peyton Jones exposed: True exposed-modules: Compiler.Hoopl Compiler.Hoopl.Internals Compiler.Hoopl.Wrappers Compiler.Hoopl.Passes.Dominator Compiler.Hoopl.Passes.DList hidden-modules: Compiler.Hoopl.Checkpoint Compiler.Hoopl.Collections Compiler.Hoopl.Combinators Compiler.Hoopl.Dataflow Compiler.Hoopl.Debug Compiler.Hoopl.Block Compiler.Hoopl.Graph Compiler.Hoopl.Label Compiler.Hoopl.MkGraph Compiler.Hoopl.Fuel Compiler.Hoopl.Pointed Compiler.Hoopl.Shape Compiler.Hoopl.Show Compiler.Hoopl.Unique Compiler.Hoopl.XUtil trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/hoopl_JxODiSRz1e84NbH6nnZuUk library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/hoopl_JxODiSRz1e84NbH6nnZuUk data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/hoopl-3.10.0.2 hs-libraries: HShoopl-3.10.0.2-JxODiSRz1e84NbH6nnZuUk depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/hoopl-3.10.0.2/hoopl.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/hoopl-3.10.0.2 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: bin-package-db version: 0.0.0.0 id: bin-package-db-0.0.0.0-708fc7d634a370b311371a5bcde40b62 key: binpa_JNoexmBMuO8C771QaIy3YN license: BSD3 maintainer: ghc-devs@haskell.org synopsis: The GHC compiler's view of the GHC package database format description: This library is shared between GHC and ghc-pkg and is used by GHC to read package databases. . It only deals with the subset of the package database that the compiler cares about: modules paths etc and not package metadata like description, authors etc. It is thus not a library interface to ghc-pkg and is *not* suitable for modifying GHC package databases. . The package database format and this library are constructed in such a way that while ghc-pkg depends on Cabal, the GHC library and program do not have to depend on Cabal. exposed: True exposed-modules: GHC.PackageDb trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/binpa_JNoexmBMuO8C771QaIy3YN library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/binpa_JNoexmBMuO8C771QaIy3YN data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/bin-package-db-0.0.0.0 hs-libraries: HSbin-package-db-0.0.0.0-JNoexmBMuO8C771QaIy3YN depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a binary-0.7.3.0-0f543654a1ae447e0d4d0bbfc1bb704e bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/bin-package-db-0.0.0.0/bin-package-db.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/bin-package-db-0.0.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: Cabal version: 1.22.2.0 id: Cabal-1.22.2.0-9f7cae2e98cca225e3d159c1e1bc773c key: Cabal_HWT8QvVfJLn2ubvobpycJY license: BSD3 copyright: 2003-2006, Isaac Jones 2005-2011, Duncan Coutts maintainer: cabal-devel@haskell.org homepage: http://www.haskell.org/cabal/ synopsis: A framework for packaging Haskell software description: The Haskell Common Architecture for Building Applications and Libraries: a framework defining a common interface for authors to more easily build their Haskell applications in a portable way. . The Haskell Cabal is part of a larger infrastructure for distributing, organizing, and cataloging Haskell libraries and tools. category: Distribution author: Isaac Jones Duncan Coutts exposed: True exposed-modules: Distribution.Compat.CreatePipe Distribution.Compat.Environment Distribution.Compat.Exception Distribution.Compat.ReadP Distribution.Compiler Distribution.InstalledPackageInfo Distribution.License Distribution.Make Distribution.ModuleName Distribution.Package Distribution.PackageDescription Distribution.PackageDescription.Check Distribution.PackageDescription.Configuration Distribution.PackageDescription.Parse Distribution.PackageDescription.PrettyPrint Distribution.PackageDescription.Utils Distribution.ParseUtils Distribution.ReadE Distribution.Simple Distribution.Simple.Bench Distribution.Simple.Build Distribution.Simple.Build.Macros Distribution.Simple.Build.PathsModule Distribution.Simple.BuildPaths Distribution.Simple.BuildTarget Distribution.Simple.CCompiler Distribution.Simple.Command Distribution.Simple.Compiler Distribution.Simple.Configure Distribution.Simple.GHC Distribution.Simple.GHCJS Distribution.Simple.Haddock Distribution.Simple.HaskellSuite Distribution.Simple.Hpc Distribution.Simple.Install Distribution.Simple.InstallDirs Distribution.Simple.JHC Distribution.Simple.LHC Distribution.Simple.LocalBuildInfo Distribution.Simple.PackageIndex Distribution.Simple.PreProcess Distribution.Simple.PreProcess.Unlit Distribution.Simple.Program Distribution.Simple.Program.Ar Distribution.Simple.Program.Builtin Distribution.Simple.Program.Db Distribution.Simple.Program.Find Distribution.Simple.Program.GHC Distribution.Simple.Program.HcPkg Distribution.Simple.Program.Hpc Distribution.Simple.Program.Ld Distribution.Simple.Program.Run Distribution.Simple.Program.Script Distribution.Simple.Program.Strip Distribution.Simple.Program.Types Distribution.Simple.Register Distribution.Simple.Setup Distribution.Simple.SrcDist Distribution.Simple.Test Distribution.Simple.Test.ExeV10 Distribution.Simple.Test.LibV09 Distribution.Simple.Test.Log Distribution.Simple.UHC Distribution.Simple.UserHooks Distribution.Simple.Utils Distribution.System Distribution.TestSuite Distribution.Text Distribution.Utils.NubList Distribution.Verbosity Distribution.Version Language.Haskell.Extension hidden-modules: Distribution.Compat.Binary Distribution.Compat.CopyFile Distribution.Compat.TempFile Distribution.GetOpt Distribution.Simple.GHC.Internal Distribution.Simple.GHC.IPI641 Distribution.Simple.GHC.IPI642 Distribution.Simple.GHC.ImplInfo Paths_Cabal trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/Cabal_HWT8QvVfJLn2ubvobpycJY library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/Cabal_HWT8QvVfJLn2ubvobpycJY data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/Cabal-1.22.2.0 hs-libraries: HSCabal-1.22.2.0-HWT8QvVfJLn2ubvobpycJY depends: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a binary-0.7.3.0-0f543654a1ae447e0d4d0bbfc1bb704e bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 pretty-1.1.2.0-0d4e1eca3b0cfcebe20b9405f7bdaca9 process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1 time-1.5.0.1-e17a9220d438435579d2914e90774246 unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/Cabal-1.22.2.0/Cabal.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/Cabal-1.22.2.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: binary version: 0.7.3.0 id: binary-0.7.3.0-0f543654a1ae447e0d4d0bbfc1bb704e key: binar_EKE3c9Lmxb3DQpU0fPtru6 license: BSD3 maintainer: Lennart Kolmodin, Don Stewart stability: provisional homepage: https://github.com/kolmodin/binary synopsis: Binary serialisation for Haskell values using lazy ByteStrings description: Efficient, pure binary serialisation using lazy ByteStrings. Haskell values may be encoded to and from binary formats, written to disk as binary, or sent over the network. The format used can be automatically generated, or you can choose to implement a custom format if needed. Serialisation speeds of over 1 G\/sec have been observed, so this library should be suitable for high performance scenarios. category: Data, Parsing author: Lennart Kolmodin exposed: True exposed-modules: Data.Binary Data.Binary.Put Data.Binary.Get Data.Binary.Get.Internal Data.Binary.Builder Data.Binary.Builder.Internal hidden-modules: Data.Binary.Builder.Base Data.Binary.Class Data.Binary.Generic trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/binar_EKE3c9Lmxb3DQpU0fPtru6 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/binar_EKE3c9Lmxb3DQpU0fPtru6 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/binary-0.7.3.0 hs-libraries: HSbinary-0.7.3.0-EKE3c9Lmxb3DQpU0fPtru6 depends: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/binary-0.7.3.0/binary.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/binary-0.7.3.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: template-haskell version: 2.10.0.0 id: template-haskell-2.10.0.0-e895139a0ffff267d412e3d0191ce93b key: templ_BVMCZyLwIlfGfcqqzyUAI8 license: BSD3 maintainer: libraries@haskell.org synopsis: Support library for Template Haskell description: This package provides modules containing facilities for manipulating Haskell source code using Template Haskell. . See for more information. category: Template Haskell exposed: True exposed-modules: Language.Haskell.TH Language.Haskell.TH.Lib Language.Haskell.TH.Ppr Language.Haskell.TH.PprLib Language.Haskell.TH.Quote Language.Haskell.TH.Syntax hidden-modules: Language.Haskell.TH.Lib.Map trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/templ_BVMCZyLwIlfGfcqqzyUAI8 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/templ_BVMCZyLwIlfGfcqqzyUAI8 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/template-haskell-2.10.0.0 hs-libraries: HStemplate-haskell-2.10.0.0-BVMCZyLwIlfGfcqqzyUAI8 depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a pretty-1.1.2.0-0d4e1eca3b0cfcebe20b9405f7bdaca9 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/template-haskell-2.10.0.0/template-haskell.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/template-haskell-2.10.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: pretty version: 1.1.2.0 id: pretty-1.1.2.0-0d4e1eca3b0cfcebe20b9405f7bdaca9 key: prett_7jIfj8VCGFf1WS0tIQ1XSZ license: BSD3 maintainer: David Terei stability: Stable homepage: http://github.com/haskell/pretty synopsis: Pretty-printing library description: This package contains a pretty-printing library, a set of API's that provides a way to easily print out text in a consistent format of your choosing. This is useful for compilers and related tools. . This library was originally designed by John Hughes's and has since been heavily modified by Simon Peyton Jones. category: Text exposed: True exposed-modules: Text.PrettyPrint Text.PrettyPrint.HughesPJ Text.PrettyPrint.HughesPJClass trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/prett_7jIfj8VCGFf1WS0tIQ1XSZ library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/prett_7jIfj8VCGFf1WS0tIQ1XSZ data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/pretty-1.1.2.0 hs-libraries: HSpretty-1.1.2.0-7jIfj8VCGFf1WS0tIQ1XSZ depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/pretty-1.1.2.0/pretty.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/pretty-1.1.2.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: hpc version: 0.6.0.2 id: hpc-0.6.0.2-ac9064885aa8cb08a93314222939ead4 key: hpc_CmUUQl5bURfBueJrdYfNs3 license: BSD3 maintainer: ghc-devs@haskell.org synopsis: Code Coverage Library for Haskell description: This package provides the code coverage library for Haskell. . See for more information. category: Control author: Andy Gill exposed: True exposed-modules: Trace.Hpc.Util Trace.Hpc.Mix Trace.Hpc.Tix Trace.Hpc.Reflect trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/hpc_CmUUQl5bURfBueJrdYfNs3 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/hpc_CmUUQl5bURfBueJrdYfNs3 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/hpc-0.6.0.2 hs-libraries: HShpc-0.6.0.2-CmUUQl5bURfBueJrdYfNs3 depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 time-1.5.0.1-e17a9220d438435579d2914e90774246 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/hpc-0.6.0.2/hpc.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/hpc-0.6.0.2 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: process version: 1.2.3.0 id: process-1.2.3.0-3b1e9bca6ac38225806ff7bbf3f845b1 key: proce_0hwN3CTKynhHQqQkChnSdH license: BSD3 maintainer: libraries@haskell.org synopsis: Process libraries description: This package contains libraries for dealing with system processes. category: System exposed: True exposed-modules: System.Cmd System.Process System.Process.Internals trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/proce_0hwN3CTKynhHQqQkChnSdH library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/proce_0hwN3CTKynhHQqQkChnSdH data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/process-1.2.3.0 hs-libraries: HSprocess-1.2.3.0-0hwN3CTKynhHQqQkChnSdH include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/proce_0hwN3CTKynhHQqQkChnSdH/include includes: runProcess.h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/process-1.2.3.0/process.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/process-1.2.3.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: directory version: 1.2.2.0 id: directory-1.2.2.0-b4959b472d9eee380c6b32291ade29e0 key: direc_3TcTyYedch32o1zTH2MR00 license: BSD3 maintainer: libraries@haskell.org synopsis: Platform-agnostic library for filesystem operations description: This library provides a basic set of operations for manipulating files and directories in a portable way. category: System exposed: True exposed-modules: System.Directory trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/direc_3TcTyYedch32o1zTH2MR00 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/direc_3TcTyYedch32o1zTH2MR00 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/directory-1.2.2.0 hs-libraries: HSdirectory-1.2.2.0-3TcTyYedch32o1zTH2MR00 include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/direc_3TcTyYedch32o1zTH2MR00/include includes: HsDirectory.h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 time-1.5.0.1-e17a9220d438435579d2914e90774246 unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/directory-1.2.2.0/directory.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/directory-1.2.2.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: unix version: 2.7.1.0 id: unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f key: unix_G4Yo1pNtYrk8nCq1cx8P9d license: BSD3 maintainer: libraries@haskell.org homepage: https://github.com/haskell/unix synopsis: POSIX functionality description: This package gives you access to the set of operating system services standardised by POSIX 1003.1b (or the IEEE Portable Operating System Interface for Computing Environments - IEEE Std. 1003.1). . The package is not supported under Windows (except under Cygwin). category: System exposed: True exposed-modules: System.Posix System.Posix.ByteString System.Posix.Error System.Posix.Resource System.Posix.Time System.Posix.Unistd System.Posix.User System.Posix.Signals System.Posix.Signals.Exts System.Posix.Semaphore System.Posix.SharedMem System.Posix.ByteString.FilePath System.Posix.Directory System.Posix.Directory.ByteString System.Posix.DynamicLinker.Module System.Posix.DynamicLinker.Module.ByteString System.Posix.DynamicLinker.Prim System.Posix.DynamicLinker.ByteString System.Posix.DynamicLinker System.Posix.Files System.Posix.Files.ByteString System.Posix.IO System.Posix.IO.ByteString System.Posix.Env System.Posix.Env.ByteString System.Posix.Fcntl System.Posix.Process System.Posix.Process.Internals System.Posix.Process.ByteString System.Posix.Temp System.Posix.Temp.ByteString System.Posix.Terminal System.Posix.Terminal.ByteString hidden-modules: System.Posix.Directory.Common System.Posix.DynamicLinker.Common System.Posix.Files.Common System.Posix.IO.Common System.Posix.Process.Common System.Posix.Terminal.Common trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/unix_G4Yo1pNtYrk8nCq1cx8P9d library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/unix_G4Yo1pNtYrk8nCq1cx8P9d data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/unix-2.7.1.0 hs-libraries: HSunix-2.7.1.0-G4Yo1pNtYrk8nCq1cx8P9d extra-libraries: rt util dl pthread include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/unix_G4Yo1pNtYrk8nCq1cx8P9d/include includes: HsUnix.h execvpe.h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db time-1.5.0.1-e17a9220d438435579d2914e90774246 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/unix-2.7.1.0/unix.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/unix-2.7.1.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: time version: 1.5.0.1 id: time-1.5.0.1-e17a9220d438435579d2914e90774246 key: time_Hh2clZW6in4HpYHx5bLtb7 license: BSD3 maintainer: stability: stable homepage: https://github.com/haskell/time synopsis: A time library description: A time library category: System author: Ashley Yakeley exposed: True exposed-modules: Data.Time.Calendar Data.Time.Calendar.MonthDay Data.Time.Calendar.OrdinalDate Data.Time.Calendar.WeekDate Data.Time.Calendar.Julian Data.Time.Calendar.Easter Data.Time.Clock Data.Time.Clock.POSIX Data.Time.Clock.TAI Data.Time.LocalTime Data.Time.Format Data.Time hidden-modules: Data.Time.Calendar.Private Data.Time.Calendar.Days Data.Time.Calendar.Gregorian Data.Time.Calendar.JulianYearDay Data.Time.Clock.Scale Data.Time.Clock.UTC Data.Time.Clock.CTimeval Data.Time.Clock.UTCDiff Data.Time.LocalTime.TimeZone Data.Time.LocalTime.TimeOfDay Data.Time.LocalTime.LocalTime Data.Time.Format.Parse Data.Time.Format.Locale trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/time_Hh2clZW6in4HpYHx5bLtb7 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/time_Hh2clZW6in4HpYHx5bLtb7 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/time-1.5.0.1 hs-libraries: HStime-1.5.0.1-Hh2clZW6in4HpYHx5bLtb7 include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/time_Hh2clZW6in4HpYHx5bLtb7/include depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/time-1.5.0.1/time.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/time-1.5.0.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: containers version: 0.5.6.2 id: containers-0.5.6.2-2114032c163425cc264e6e1169dc2f6d key: conta_47ajk3tbda43DFWyeF3oHQ license: BSD3 maintainer: fox@ucw.cz synopsis: Assorted concrete container types description: This package contains efficient general-purpose implementations of various basic immutable container types. The declared cost of each operation is either worst-case or amortized, but remains valid even if structures are shared. category: Data Structures exposed: True exposed-modules: Data.IntMap Data.IntMap.Lazy Data.IntMap.Strict Data.IntSet Data.Map Data.Map.Lazy Data.Map.Strict Data.Set Data.Graph Data.Sequence Data.Tree hidden-modules: Data.IntMap.Base Data.IntSet.Base Data.Map.Base Data.Set.Base Data.Utils.BitUtil Data.Utils.StrictFold Data.Utils.StrictPair trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/conta_47ajk3tbda43DFWyeF3oHQ library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/conta_47ajk3tbda43DFWyeF3oHQ data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/containers-0.5.6.2 hs-libraries: HScontainers-0.5.6.2-47ajk3tbda43DFWyeF3oHQ depends: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/containers-0.5.6.2/containers.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/containers-0.5.6.2 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: bytestring version: 0.10.6.0 id: bytestring-0.10.6.0-0909f8f31271f3d75749190bf2ee35db key: bytes_6vj5EoliHgNHISHCVCb069 license: BSD3 copyright: Copyright (c) Don Stewart 2005-2009, (c) Duncan Coutts 2006-2015, (c) David Roundy 2003-2005, (c) Jasper Van der Jeugt 2010, (c) Simon Meier 2010-2013. maintainer: Duncan Coutts homepage: https://github.com/haskell/bytestring synopsis: Fast, compact, strict and lazy byte strings with a list interface description: An efficient compact, immutable byte string type (both strict and lazy) suitable for binary or 8-bit character data. . The 'ByteString' type represents sequences of bytes or 8-bit characters. It is suitable for high performance use, both in terms of large data quantities, or high speed requirements. The 'ByteString' functions follow the same style as Haskell\'s ordinary lists, so it is easy to convert code from using 'String' to 'ByteString'. . Two 'ByteString' variants are provided: . * Strict 'ByteString's keep the string as a single large array. This makes them convenient for passing data between C and Haskell. . * Lazy 'ByteString's use a lazy list of strict chunks which makes it suitable for I\/O streaming tasks. . The @Char8@ modules provide a character-based view of the same underlying 'ByteString' types. This makes it convenient to handle mixed binary and 8-bit character content (which is common in many file formats and network protocols). . The 'Builder' module provides an efficient way to build up 'ByteString's in an ad-hoc way by repeated concatenation. This is ideal for fast serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should use the 'Text' type from the @text@ package. . These modules are intended to be imported qualified, to avoid name clashes with "Prelude" functions, e.g. . > import qualified Data.ByteString as BS category: Data author: Don Stewart, Duncan Coutts exposed: True exposed-modules: Data.ByteString Data.ByteString.Char8 Data.ByteString.Unsafe Data.ByteString.Internal Data.ByteString.Lazy Data.ByteString.Lazy.Char8 Data.ByteString.Lazy.Internal Data.ByteString.Short Data.ByteString.Short.Internal Data.ByteString.Builder Data.ByteString.Builder.Extra Data.ByteString.Builder.Prim Data.ByteString.Builder.Internal Data.ByteString.Builder.Prim.Internal Data.ByteString.Lazy.Builder Data.ByteString.Lazy.Builder.Extras Data.ByteString.Lazy.Builder.ASCII hidden-modules: Data.ByteString.Builder.ASCII Data.ByteString.Builder.Prim.Binary Data.ByteString.Builder.Prim.ASCII Data.ByteString.Builder.Prim.Internal.Floating Data.ByteString.Builder.Prim.Internal.UncheckedShifts Data.ByteString.Builder.Prim.Internal.Base16 trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/bytes_6vj5EoliHgNHISHCVCb069 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/bytes_6vj5EoliHgNHISHCVCb069 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/bytestring-0.10.6.0 hs-libraries: HSbytestring-0.10.6.0-6vj5EoliHgNHISHCVCb069 include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/bytes_6vj5EoliHgNHISHCVCb069/include includes: fpstring.h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 integer-gmp-1.0.0.0-3c947e5fb6dca14804d9b2793c521b67 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/bytestring-0.10.6.0/bytestring.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/bytestring-0.10.6.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: deepseq version: 1.4.1.1 id: deepseq-1.4.1.1-c1376f846fa170f2cc2cb2e57b203339 key: deeps_FpR4obOZALU1lutWnrBldi license: BSD3 maintainer: libraries@haskell.org synopsis: Deep evaluation of data structures description: This package provides methods for fully evaluating data structures (\"deep evaluation\"). Deep evaluation is often used for adding strictness to a program, e.g. in order to force pending exceptions, remove space leaks, or force lazy I/O to happen. It is also useful in parallel programs, to ensure pending work does not migrate to the wrong thread. . The primary use of this package is via the 'deepseq' function, a \"deep\" version of 'seq'. It is implemented on top of an 'NFData' typeclass (\"Normal Form Data\", data structures with no unevaluated components) which defines strategies for fully evaluating different data types. category: Control exposed: True exposed-modules: Control.DeepSeq trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/deeps_FpR4obOZALU1lutWnrBldi library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/deeps_FpR4obOZALU1lutWnrBldi data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/deepseq-1.4.1.1 hs-libraries: HSdeepseq-1.4.1.1-FpR4obOZALU1lutWnrBldi depends: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/deepseq-1.4.1.1/deepseq.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/deepseq-1.4.1.1 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: array version: 0.5.1.0 id: array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9 key: array_FaHmcBFfuRM8kmZLEY8D5S license: BSD3 maintainer: libraries@haskell.org synopsis: Mutable and immutable arrays description: In addition to providing the "Data.Array" module , this package also defines the classes 'IArray' of immutable arrays and 'MArray' of arrays mutable within appropriate monads, as well as some instances of these classes. category: Data Structures exposed: True exposed-modules: Data.Array Data.Array.Base Data.Array.IArray Data.Array.IO Data.Array.IO.Safe Data.Array.IO.Internals Data.Array.MArray Data.Array.MArray.Safe Data.Array.ST Data.Array.ST.Safe Data.Array.Storable Data.Array.Storable.Safe Data.Array.Storable.Internals Data.Array.Unboxed Data.Array.Unsafe trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/array_FaHmcBFfuRM8kmZLEY8D5S library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/array_FaHmcBFfuRM8kmZLEY8D5S data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/array-0.5.1.0 hs-libraries: HSarray-0.5.1.0-FaHmcBFfuRM8kmZLEY8D5S depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/array-0.5.1.0/array.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/array-0.5.1.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: filepath version: 1.4.0.0 id: filepath-1.4.0.0-40d643aa87258c186441a1f8f3e13ca6 key: filep_5HhyRonfEZoDO205Wm9E4h license: BSD3 copyright: Neil Mitchell 2005-2015 maintainer: Neil Mitchell homepage: https://github.com/haskell/filepath#readme synopsis: Library for manipulating FilePaths in a cross platform way. description: This package provides functionality for manipulating @FilePath@ values, and is shipped with both and the . It provides three modules: . * "System.FilePath.Posix" manipulates POSIX\/Linux style @FilePath@ values (with @\/@ as the path separator). . * "System.FilePath.Windows" manipulates Windows style @FilePath@ values (with either @\\@ or @\/@ as the path separator, and deals with drives). . * "System.FilePath" is an alias for the module appropriate to your platform. . All three modules provide the same API, and the same documentation (calling out differences in the different variants). category: System author: Neil Mitchell exposed: True exposed-modules: System.FilePath System.FilePath.Posix System.FilePath.Windows trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/filep_5HhyRonfEZoDO205Wm9E4h library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/filep_5HhyRonfEZoDO205Wm9E4h data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/filepath-1.4.0.0 hs-libraries: HSfilepath-1.4.0.0-5HhyRonfEZoDO205Wm9E4h depends: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/filepath-1.4.0.0/filepath.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/filepath-1.4.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: base version: 4.8.0.0 id: base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a key: base_I5BErHzyOm07EBNpKBEeUv license: BSD3 maintainer: libraries@haskell.org synopsis: Basic libraries description: This package contains the "Prelude" and its support libraries, and a large collection of useful libraries ranging from data structures to parsing combinators and debugging utilities. category: Prelude exposed: True exposed-modules: Control.Applicative Control.Arrow Control.Category Control.Concurrent Control.Concurrent.Chan Control.Concurrent.MVar Control.Concurrent.QSem Control.Concurrent.QSemN Control.Exception Control.Exception.Base Control.Monad Control.Monad.Fix Control.Monad.Instances Control.Monad.ST Control.Monad.ST.Lazy Control.Monad.ST.Lazy.Safe Control.Monad.ST.Lazy.Unsafe Control.Monad.ST.Safe Control.Monad.ST.Strict Control.Monad.ST.Unsafe Control.Monad.Zip Data.Bifunctor Data.Bits Data.Bool Data.Char Data.Coerce Data.Complex Data.Data Data.Dynamic Data.Either Data.Eq Data.Fixed Data.Foldable Data.Function Data.Functor Data.Functor.Identity Data.IORef Data.Int Data.Ix Data.List Data.Maybe Data.Monoid Data.Ord Data.Proxy Data.Ratio Data.STRef Data.STRef.Lazy Data.STRef.Strict Data.String Data.Traversable Data.Tuple Data.Type.Bool Data.Type.Coercion Data.Type.Equality Data.Typeable Data.Typeable.Internal Data.Unique Data.Version Data.Void Data.Word Debug.Trace Foreign Foreign.C Foreign.C.Error Foreign.C.String Foreign.C.Types Foreign.Concurrent Foreign.ForeignPtr Foreign.ForeignPtr.Safe Foreign.ForeignPtr.Unsafe Foreign.Marshal Foreign.Marshal.Alloc Foreign.Marshal.Array Foreign.Marshal.Error Foreign.Marshal.Pool Foreign.Marshal.Safe Foreign.Marshal.Unsafe Foreign.Marshal.Utils Foreign.Ptr Foreign.Safe Foreign.StablePtr Foreign.Storable GHC.Arr GHC.Base GHC.Char GHC.Conc GHC.Conc.IO GHC.Conc.Signal GHC.Conc.Sync GHC.ConsoleHandler GHC.Constants GHC.Desugar GHC.Enum GHC.Environment GHC.Err GHC.Exception GHC.Exts GHC.Fingerprint GHC.Fingerprint.Type GHC.Float GHC.Float.ConversionUtils GHC.Float.RealFracMethods GHC.Foreign GHC.ForeignPtr GHC.GHCi GHC.Generics GHC.IO GHC.IO.Buffer GHC.IO.BufferedIO GHC.IO.Device GHC.IO.Encoding GHC.IO.Encoding.CodePage GHC.IO.Encoding.Failure GHC.IO.Encoding.Iconv GHC.IO.Encoding.Latin1 GHC.IO.Encoding.Types GHC.IO.Encoding.UTF16 GHC.IO.Encoding.UTF32 GHC.IO.Encoding.UTF8 GHC.IO.Exception GHC.IO.FD GHC.IO.Handle GHC.IO.Handle.FD GHC.IO.Handle.Internals GHC.IO.Handle.Text GHC.IO.Handle.Types GHC.IO.IOMode GHC.IOArray GHC.IORef GHC.IP GHC.Int GHC.List GHC.MVar GHC.Natural GHC.Num GHC.OldList GHC.PArr GHC.Pack GHC.Profiling GHC.Ptr GHC.Read GHC.Real GHC.RTS.Flags GHC.ST GHC.StaticPtr GHC.STRef GHC.Show GHC.Stable GHC.Stack GHC.Stats GHC.Storable GHC.TopHandler GHC.TypeLits GHC.Unicode GHC.Weak GHC.Word Numeric Numeric.Natural Prelude System.CPUTime System.Console.GetOpt System.Environment System.Exit System.IO System.IO.Error System.IO.Unsafe System.Info System.Mem System.Mem.StableName System.Mem.Weak System.Posix.Internals System.Posix.Types System.Timeout Text.ParserCombinators.ReadP Text.ParserCombinators.ReadPrec Text.Printf Text.Read Text.Read.Lex Text.Show Text.Show.Functions Unsafe.Coerce GHC.Event hidden-modules: Control.Monad.ST.Imp Control.Monad.ST.Lazy.Imp Data.OldList Foreign.ForeignPtr.Imp System.Environment.ExecutablePath GHC.Event.Arr GHC.Event.Array GHC.Event.Clock GHC.Event.Control GHC.Event.EPoll GHC.Event.IntTable GHC.Event.Internal GHC.Event.KQueue GHC.Event.Manager GHC.Event.PSQ GHC.Event.Poll GHC.Event.Thread GHC.Event.TimerManager GHC.Event.Unique trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/base_I5BErHzyOm07EBNpKBEeUv library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/base_I5BErHzyOm07EBNpKBEeUv data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/base-4.8.0.0 hs-libraries: HSbase-4.8.0.0-I5BErHzyOm07EBNpKBEeUv include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/base_I5BErHzyOm07EBNpKBEeUv/include includes: HsBase.h depends: builtin_rts ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 integer-gmp-1.0.0.0-3c947e5fb6dca14804d9b2793c521b67 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/base-4.8.0.0/base.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/base-4.8.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: integer-gmp version: 1.0.0.0 id: integer-gmp-1.0.0.0-3c947e5fb6dca14804d9b2793c521b67 key: integ_2aU3IZNMF9a7mQ0OzsZ0dS license: BSD3 maintainer: hvr@gnu.org synopsis: Integer library based on GMP category: Numeric, Algebra author: Herbert Valerio Riedel exposed: True exposed-modules: GHC.Integer GHC.Integer.Logarithms GHC.Integer.Logarithms.Internals GHC.Integer.GMP.Internals hidden-modules: GHC.Integer.Type trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/integ_2aU3IZNMF9a7mQ0OzsZ0dS library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/integ_2aU3IZNMF9a7mQ0OzsZ0dS data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/integer-gmp-1.0.0.0 hs-libraries: HSinteger-gmp-1.0.0.0-2aU3IZNMF9a7mQ0OzsZ0dS extra-libraries: gmp include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/integ_2aU3IZNMF9a7mQ0OzsZ0dS/include depends: ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/integer-gmp-1.0.0.0/integer-gmp.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/integer-gmp-1.0.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: ghc-prim version: 0.4.0.0 id: ghc-prim-0.4.0.0-7c945cc0c41d3b7b70f3edd125671166 key: ghcpr_8TmvWUcS1U1IKHT0levwg3 license: BSD3 maintainer: libraries@haskell.org synopsis: GHC primitives description: GHC primitives. category: GHC exposed: True exposed-modules: GHC.CString GHC.Classes GHC.Debug GHC.IntWord64 GHC.Magic GHC.PrimopWrappers GHC.Tuple GHC.Types GHC.Prim trusted: False import-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/ghcpr_8TmvWUcS1U1IKHT0levwg3 library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/ghcpr_8TmvWUcS1U1IKHT0levwg3 data-dir: /opt/ghc/7.10.1/share/x86_64-linux-ghc-7.10.1/ghc-prim-0.4.0.0 hs-libraries: HSghc-prim-0.4.0.0-8TmvWUcS1U1IKHT0levwg3 depends: builtin_rts haddock-interfaces: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-prim-0.4.0.0/ghc-prim.haddock haddock-html: /opt/ghc/7.10.1/share/doc/ghc/html/libraries/ghc-prim-0.4.0.0 pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" --- name: rts version: 1.0 id: builtin_rts key: rts license: BSD3 maintainer: glasgow-haskell-users@haskell.org exposed: True trusted: False library-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/rts hs-libraries: HSrts Cffi extra-libraries: m rt dl include-dirs: /opt/ghc/7.10.1/lib/ghc-7.10.1/include includes: Stg.h ld-options: "-Wl,-u,ghczmprim_GHCziTypes_Izh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_static_info" "-Wl,-u,base_GHCziPtr_Ptr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Wzh_static_info" "-Wl,-u,base_GHCziInt_I8zh_static_info" "-Wl,-u,base_GHCziInt_I16zh_static_info" "-Wl,-u,base_GHCziInt_I32zh_static_info" "-Wl,-u,base_GHCziInt_I64zh_static_info" "-Wl,-u,base_GHCziWord_W8zh_static_info" "-Wl,-u,base_GHCziWord_W16zh_static_info" "-Wl,-u,base_GHCziWord_W32zh_static_info" "-Wl,-u,base_GHCziWord_W64zh_static_info" "-Wl,-u,base_GHCziStable_StablePtr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Izh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_con_info" "-Wl,-u,base_GHCziPtr_Ptr_con_info" "-Wl,-u,base_GHCziPtr_FunPtr_con_info" "-Wl,-u,base_GHCziStable_StablePtr_con_info" "-Wl,-u,ghczmprim_GHCziTypes_False_closure" "-Wl,-u,ghczmprim_GHCziTypes_True_closure" "-Wl,-u,base_GHCziPack_unpackCString_closure" "-Wl,-u,base_GHCziIOziException_stackOverflow_closure" "-Wl,-u,base_GHCziIOziException_heapOverflow_closure" "-Wl,-u,base_ControlziExceptionziBase_nonTermination_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnMVar_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnSTM_closure" "-Wl,-u,base_GHCziIOziException_allocationLimitExceeded_closure" "-Wl,-u,base_ControlziExceptionziBase_nestedAtomically_closure" "-Wl,-u,base_GHCziEventziThread_blockedOnBadFD_closure" "-Wl,-u,base_GHCziWeak_runFinalizzerBatch_closure" "-Wl,-u,base_GHCziTopHandler_flushStdHandles_closure" "-Wl,-u,base_GHCziTopHandler_runIO_closure" "-Wl,-u,base_GHCziTopHandler_runNonIO_closure" "-Wl,-u,base_GHCziConcziIO_ensureIOManagerIsRunning_closure" "-Wl,-u,base_GHCziConcziIO_ioManagerCapabilitiesChanged_closure" "-Wl,-u,base_GHCziConcziSync_runSparks_closure" "-Wl,-u,base_GHCziConcziSignal_runHandlersPtr_closure" pkgroot: "/opt/ghc/7.10.1/lib/ghc-7.10.1" stack-2.15.7/tests/unit/package-dump/ghc-7.8.4-osx.txt0000644000000000000000000000636214502056216020355 0ustar0000000000000000name: hmatrix version: 0.16.1.5 id: hmatrix-0.16.1.5-12d5d21f26aa98774cdd8edbc343fbfe license: BSD3 copyright: maintainer: Alberto Ruiz stability: provisional homepage: https://github.com/albertoruiz/hmatrix package-url: synopsis: Numeric Linear Algebra description: Linear algebra based on BLAS and LAPACK. . The package is organized as follows: . ["Numeric.LinearAlgebra.HMatrix"] Starting point and recommended import module for most applications. . ["Numeric.LinearAlgebra.Static"] Experimental alternative interface. . ["Numeric.LinearAlgebra.Devel"] Tools for extending the library. . (Other modules are exposed with hidden documentation for backwards compatibility.) . Code examples: category: Math author: Alberto Ruiz exposed: True exposed-modules: Data.Packed Data.Packed.Vector Data.Packed.Matrix Data.Packed.Foreign Data.Packed.ST Data.Packed.Development Numeric.LinearAlgebra Numeric.LinearAlgebra.LAPACK Numeric.LinearAlgebra.Algorithms Numeric.Container Numeric.LinearAlgebra.Util Numeric.LinearAlgebra.Devel Numeric.LinearAlgebra.Data Numeric.LinearAlgebra.HMatrix Numeric.LinearAlgebra.Static hidden-modules: Data.Packed.Internal Data.Packed.Internal.Common Data.Packed.Internal.Signatures Data.Packed.Internal.Vector Data.Packed.Internal.Matrix Data.Packed.IO Numeric.Chain Numeric.Vectorized Numeric.Vector Numeric.Matrix Data.Packed.Internal.Numeric Data.Packed.Numeric Numeric.LinearAlgebra.Util.Convolution Numeric.LinearAlgebra.Util.CG Numeric.LinearAlgebra.Random Numeric.Conversion Numeric.Sparse Numeric.LinearAlgebra.Static.Internal trusted: False import-dirs: /Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/lib/x86_64-osx-ghc-7.8.4/hmatrix-0.16.1.5 library-dirs: /Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/lib/x86_64-osx-ghc-7.8.4/hmatrix-0.16.1.5 /opt/local/lib/ /usr/local/lib/ "C:/Program Files/Example/" hs-libraries: HShmatrix-0.16.1.5 extra-libraries: blas lapack extra-ghci-libraries: include-dirs: /opt/local/include/ /usr/local/include/ includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-918c7ac27f65a87103264a9f51652d63 binary-0.7.1.0-108d06eea2ef05e517f9c1facf10f63c bytestring-0.10.4.0-78bc8f2c724c765c78c004a84acf6cc3 deepseq-1.3.0.2-0ddc77716bd2515426e1ba39f6788a4f random-1.1-822c19b7507b6ac1aaa4c66731e775ae split-0.2.2-34cfb851cc3784e22bfae7a7bddda9c5 storable-complex-0.2.2-e962c368d58acc1f5b41d41edc93da72 vector-0.10.12.3-f4222db607fd5fdd7545d3e82419b307 hugs-options: cc-options: ld-options: framework-dirs: frameworks: Accelerate haddock-interfaces: /Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html/hmatrix.haddock haddock-html: /Users/alexbiehl/.stack/snapshots/x86_64-osx/lts-2.13/7.8.4/doc/html stack-2.15.7/tests/unit/package-dump/ghc-7.8.txt0000644000000000000000000020303114502056216017374 0ustar0000000000000000name: haskell2010 version: 1.1.2.0 id: haskell2010-1.1.2.0-05c8dd51009e08c6371c82972d40f55a license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: http://www.haskell.org/onlinereport/haskell2010/ package-url: synopsis: Compatibility with Haskell 2010 description: This package provides exactly the library modules defined by the . category: Haskell2010, Prelude author: exposed: False exposed-modules: Prelude Control.Monad Data.Array Data.Bits Data.Char Data.Complex Data.Int Data.Ix Data.List Data.Maybe Data.Ratio Data.Word Foreign Foreign.C Foreign.C.Error Foreign.C.String Foreign.C.Types Foreign.ForeignPtr Foreign.Marshal Foreign.Marshal.Alloc Foreign.Marshal.Array Foreign.Marshal.Error Foreign.Marshal.Utils Foreign.Ptr Foreign.StablePtr Foreign.Storable Numeric System.Environment System.Exit System.IO System.IO.Error hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskell2010-1.1.2.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskell2010-1.1.2.0 hs-libraries: HShaskell2010-1.1.2.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0/haskell2010.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: haskell98 version: 2.0.0.3 id: haskell98-2.0.0.3-045e8778b656db76e2c729405eee707b license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: http://www.haskell.org/definition/ package-url: synopsis: Compatibility with Haskell 98 description: This package provides compatibility with the modules of Haskell 98 and the FFI addendum, by means of wrappers around modules from the base package (which in many cases have additional features). However "Prelude", "Numeric" and "Foreign" are provided directly by the @base@ package. category: Haskell98, Prelude author: exposed: False exposed-modules: Prelude Array CPUTime Char Complex Directory IO Ix List Locale Maybe Monad Numeric Random Ratio System Time Bits CError CForeign CString CTypes ForeignPtr Int MarshalAlloc MarshalArray MarshalError MarshalUtils Ptr StablePtr Storable Word hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskell98-2.0.0.3 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskell98-2.0.0.3 hs-libraries: HShaskell98-2.0.0.3 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 old-locale-1.0.0.6-50b0125c49f76af85dc7aa22975cdc34 old-time-1.1.0.2-e3f776e97c1a6ff1770b04943a7ef7c6 process-1.2.0.0-06c3215a79834ce4886ae686a0f81122 time-1.4.2-9b3076800c33f8382c38628f35717951 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell98-2.0.0.3/haskell98.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell98-2.0.0.3 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: old-time version: 1.1.0.2 id: old-time-1.1.0.2-e3f776e97c1a6ff1770b04943a7ef7c6 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Time library description: This package provides the old time library. . For new projects, the newer is recommended. category: System author: exposed: True exposed-modules: System.Time hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/old-time-1.1.0.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/old-time-1.1.0.2 hs-libraries: HSold-time-1.1.0.2 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/old-time-1.1.0.2/include includes: HsTime.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 old-locale-1.0.0.6-50b0125c49f76af85dc7aa22975cdc34 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/old-time-1.1.0.2/old-time.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/old-time-1.1.0.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: ghc version: 7.8.4 id: ghc-7.8.4-6c4818bc66adb23509058069f781d99a license: BSD3 copyright: maintainer: glasgow-haskell-users@haskell.org stability: homepage: http://www.haskell.org/ghc/ package-url: synopsis: The GHC API description: GHC's functionality can be useful for more things than just compiling Haskell programs. Important use cases are programs that analyse (and perhaps transform) Haskell code. Others include loading Haskell code dynamically in a GHCi-like manner. For this reason, a lot of GHC's functionality is made available through this package. category: Development author: The GHC Team exposed: False exposed-modules: Avail BasicTypes ConLike DataCon PatSyn Demand Exception GhcMonad Hooks Id IdInfo Literal Llvm Llvm.AbsSyn Llvm.MetaData Llvm.PpLlvm Llvm.Types LlvmCodeGen LlvmCodeGen.Base LlvmCodeGen.CodeGen LlvmCodeGen.Data LlvmCodeGen.Ppr LlvmCodeGen.Regs LlvmMangler MkId Module Name NameEnv NameSet OccName RdrName SrcLoc UniqSupply Unique Var VarEnv VarSet BlockId CLabel Cmm CmmBuildInfoTables CmmPipeline CmmCallConv CmmCommonBlockElim CmmContFlowOpt CmmExpr CmmInfo CmmLex CmmLint CmmLive CmmMachOp CmmNode CmmOpt CmmParse CmmProcPoint CmmRewriteAssignments CmmSink CmmType CmmUtils CmmLayoutStack MkGraph PprBase PprC PprCmm PprCmmDecl PprCmmExpr Bitmap CodeGen.Platform CodeGen.Platform.ARM CodeGen.Platform.NoRegs CodeGen.Platform.PPC CodeGen.Platform.PPC_Darwin CodeGen.Platform.SPARC CodeGen.Platform.X86 CodeGen.Platform.X86_64 CgUtils StgCmm StgCmmBind StgCmmClosure StgCmmCon StgCmmEnv StgCmmExpr StgCmmForeign StgCmmHeap StgCmmHpc StgCmmArgRep StgCmmLayout StgCmmMonad StgCmmPrim StgCmmProf StgCmmTicky StgCmmUtils StgCmmExtCode SMRep CoreArity CoreFVs CoreLint CorePrep CoreSubst CoreSyn TrieMap CoreTidy CoreUnfold CoreUtils ExternalCore MkCore MkExternalCore PprCore PprExternalCore Check Coverage Desugar DsArrows DsBinds DsCCall DsExpr DsForeign DsGRHSs DsListComp DsMonad DsUtils Match MatchCon MatchLit HsBinds HsDecls HsDoc HsExpr HsImpExp HsLit HsPat HsSyn HsTypes HsUtils BinIface BuildTyCl IfaceEnv IfaceSyn IfaceType LoadIface MkIface TcIface FlagChecker Annotations BreakArray CmdLineParser CodeOutput Config Constants DriverMkDepend DriverPhases PipelineMonad DriverPipeline DynFlags ErrUtils Finder GHC GhcMake GhcPlugins DynamicLoading HeaderInfo HscMain HscStats HscTypes InteractiveEval InteractiveEvalTypes PackageConfig Packages PlatformConstants PprTyThing StaticFlags SysTools TidyPgm Ctype HaddockUtils LexCore Lexer OptCoercion Parser ParserCore ParserCoreUtils RdrHsSyn ForeignCall PrelInfo PrelNames PrelRules PrimOp TysPrim TysWiredIn CostCentre ProfInit SCCfinal RnBinds RnEnv RnExpr RnHsDoc RnNames RnPat RnSource RnSplice RnTypes CoreMonad CSE FloatIn FloatOut LiberateCase OccurAnal SAT SetLevels SimplCore SimplEnv SimplMonad SimplUtils Simplify SimplStg StgStats UnariseStg Rules SpecConstr Specialise CoreToStg StgLint StgSyn DmdAnal WorkWrap WwLib FamInst Inst TcAnnotations TcArrows TcBinds TcClassDcl TcDefaults TcDeriv TcEnv TcExpr TcForeign TcGenDeriv TcGenGenerics TcHsSyn TcHsType TcInstDcls TcMType TcValidity TcMatches TcPat TcPatSyn TcRnDriver TcRnMonad TcRnTypes TcRules TcSimplify TcErrors TcTyClsDecls TcTyDecls TcType TcEvidence TcUnify TcInteract TcCanonical TcSMonad TcTypeNats TcSplice Class Coercion FamInstEnv FunDeps InstEnv TyCon CoAxiom Kind Type TypeRep Unify Bag Binary BooleanFormula BufWrite Digraph Encoding FastBool FastFunctions FastMutInt FastString FastTypes Fingerprint FiniteMap GraphBase GraphColor GraphOps GraphPpr IOEnv ListSetOps Maybes MonadUtils OrdList Outputable Pair Panic Pretty Serialized State Stream StringBuffer UniqFM UniqSet Util ExtsCompat46 Vectorise.Builtins.Base Vectorise.Builtins.Initialise Vectorise.Builtins Vectorise.Monad.Base Vectorise.Monad.Naming Vectorise.Monad.Local Vectorise.Monad.Global Vectorise.Monad.InstEnv Vectorise.Monad Vectorise.Utils.Base Vectorise.Utils.Closure Vectorise.Utils.Hoisting Vectorise.Utils.PADict Vectorise.Utils.Poly Vectorise.Utils Vectorise.Generic.Description Vectorise.Generic.PAMethods Vectorise.Generic.PADict Vectorise.Generic.PData Vectorise.Type.Env Vectorise.Type.Type Vectorise.Type.TyConDecl Vectorise.Type.Classify Vectorise.Convert Vectorise.Vect Vectorise.Var Vectorise.Env Vectorise.Exp Vectorise Hoopl.Dataflow Hoopl AsmCodeGen TargetReg NCGMonad Instruction Size Reg RegClass PIC Platform CPrim X86.Regs X86.RegInfo X86.Instr X86.Cond X86.Ppr X86.CodeGen PPC.Regs PPC.RegInfo PPC.Instr PPC.Cond PPC.Ppr PPC.CodeGen SPARC.Base SPARC.Regs SPARC.Imm SPARC.AddrMode SPARC.Cond SPARC.Instr SPARC.Stack SPARC.ShortcutJump SPARC.Ppr SPARC.CodeGen SPARC.CodeGen.Amode SPARC.CodeGen.Base SPARC.CodeGen.CondCode SPARC.CodeGen.Gen32 SPARC.CodeGen.Gen64 SPARC.CodeGen.Sanity SPARC.CodeGen.Expand RegAlloc.Liveness RegAlloc.Graph.Main RegAlloc.Graph.Stats RegAlloc.Graph.ArchBase RegAlloc.Graph.ArchX86 RegAlloc.Graph.Coalesce RegAlloc.Graph.Spill RegAlloc.Graph.SpillClean RegAlloc.Graph.SpillCost RegAlloc.Graph.TrivColorable RegAlloc.Linear.Main RegAlloc.Linear.JoinToTargets RegAlloc.Linear.State RegAlloc.Linear.Stats RegAlloc.Linear.FreeRegs RegAlloc.Linear.StackMap RegAlloc.Linear.Base RegAlloc.Linear.X86.FreeRegs RegAlloc.Linear.X86_64.FreeRegs RegAlloc.Linear.PPC.FreeRegs RegAlloc.Linear.SPARC.FreeRegs DsMeta Convert ByteCodeAsm ByteCodeGen ByteCodeInstr ByteCodeItbls ByteCodeLink Debugger LibFFI Linker ObjLink RtClosureInspect DebuggerUtils hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/ghc-7.8.4 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/ghc-7.8.4 hs-libraries: HSghc-7.8.4 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/ghc-7.8.4/include includes: depends: Cabal-1.18.1.5-6478013104bde01737bfd67d34bbee0a array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 bin-package-db-0.0.0.0-0f3da03684207f2dc4dce793df1db62e bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab hoopl-3.10.0.1-267659e4b5b51c3d2e02f2a6d6f46936 hpc-0.6.0.1-cca17f12dab542e09c423a74a4590c5d process-1.2.0.0-06c3215a79834ce4886ae686a0f81122 template-haskell-2.9.0.0-6d27c2b362b15abb1822f2f34b9ae7f9 time-1.4.2-9b3076800c33f8382c38628f35717951 transformers-0.3.0.0-6458c21515cab7c1cf21e53141557a1c unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/ghc-7.8.4/ghc.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/ghc-7.8.4 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: haskeline version: 0.7.1.2 id: haskeline-0.7.1.2-2dd2f2fb537352f5367ae77fe47ab211 license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Experimental homepage: http://trac.haskell.org/haskeline package-url: synopsis: A command-line interface for user input, written in Haskell. description: Haskeline provides a user interface for line input in command-line programs. This library is similar in purpose to readline, but since it is written in Haskell it is (hopefully) more easily used in other Haskell programs. . Haskeline runs both on POSIX-compatible systems and on Windows. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Haskeline System.Console.Haskeline.Completion System.Console.Haskeline.MonadException System.Console.Haskeline.History System.Console.Haskeline.IO hidden-modules: System.Console.Haskeline.Backend System.Console.Haskeline.Backend.WCWidth System.Console.Haskeline.Command System.Console.Haskeline.Command.Completion System.Console.Haskeline.Command.History System.Console.Haskeline.Command.KillRing System.Console.Haskeline.Directory System.Console.Haskeline.Emacs System.Console.Haskeline.InputT System.Console.Haskeline.Key System.Console.Haskeline.LineState System.Console.Haskeline.Monads System.Console.Haskeline.Prefs System.Console.Haskeline.RunCommand System.Console.Haskeline.Term System.Console.Haskeline.Command.Undo System.Console.Haskeline.Vi System.Console.Haskeline.Recover System.Console.Haskeline.Backend.Posix System.Console.Haskeline.Backend.Posix.Encoder System.Console.Haskeline.Backend.DumbTerm System.Console.Haskeline.Backend.Terminfo trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskeline-0.7.1.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/haskeline-0.7.1.2 hs-libraries: HShaskeline-0.7.1.2 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab terminfo-0.4.0.0-c1d02a7210b0d1bc250d87463b38b8d1 transformers-0.3.0.0-6458c21515cab7c1cf21e53141557a1c unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskeline-0.7.1.2/haskeline.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskeline-0.7.1.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: terminfo version: 0.4.0.0 id: terminfo-0.4.0.0-c1d02a7210b0d1bc250d87463b38b8d1 license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Stable homepage: https://github.com/judah/terminfo package-url: synopsis: Haskell bindings to the terminfo library. description: This library provides an interface to the terminfo database (via bindings to the curses library). allows POSIX systems to interact with a variety of terminals using a standard set of capabilities. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Terminfo System.Console.Terminfo.Base System.Console.Terminfo.Cursor System.Console.Terminfo.Color System.Console.Terminfo.Edit System.Console.Terminfo.Effects System.Console.Terminfo.Keys hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/terminfo-0.4.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/terminfo-0.4.0.0 hs-libraries: HSterminfo-0.4.0.0 extra-libraries: tinfo extra-ghci-libraries: include-dirs: includes: ncurses.h term.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/terminfo-0.4.0.0/terminfo.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/terminfo-0.4.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: xhtml version: 3000.2.1 id: xhtml-3000.2.1-6a3ed472b07e58fe29db22a5bc2bdb06 license: BSD3 copyright: Bjorn Bringert 2004-2006, Andy Gill and the Oregon Graduate Institute of Science and Technology, 1999-2001 maintainer: Chris Dornan stability: Stable homepage: https://github.com/haskell/xhtml package-url: synopsis: An XHTML combinator library description: This package provides combinators for producing XHTML 1.0, including the Strict, Transitional and Frameset variants. category: Web, XML, Pretty Printer author: Bjorn Bringert exposed: True exposed-modules: Text.XHtml Text.XHtml.Frameset Text.XHtml.Strict Text.XHtml.Transitional Text.XHtml.Debug Text.XHtml.Table hidden-modules: Text.XHtml.Strict.Attributes Text.XHtml.Strict.Elements Text.XHtml.Frameset.Attributes Text.XHtml.Frameset.Elements Text.XHtml.Transitional.Attributes Text.XHtml.Transitional.Elements Text.XHtml.BlockTable Text.XHtml.Extras Text.XHtml.Internals trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/xhtml-3000.2.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/xhtml-3000.2.1 hs-libraries: HSxhtml-3000.2.1 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/xhtml-3000.2.1/xhtml.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/xhtml-3000.2.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: transformers version: 0.3.0.0 id: transformers-0.3.0.0-6458c21515cab7c1cf21e53141557a1c license: BSD3 copyright: maintainer: Ross Paterson stability: homepage: package-url: synopsis: Concrete functor and monad transformers description: A portable library of functor and monad transformers, inspired by the paper \"Functional Programming with Overloading and Higher-Order Polymorphism\", by Mark P Jones, in /Advanced School of Functional Programming/, 1995 (). . This package contains: . * the monad transformer class (in "Control.Monad.Trans.Class") . * concrete functor and monad transformers, each with associated operations and functions to lift operations associated with other transformers. . It can be used on its own in portable Haskell code, or with the monad classes in the @mtl@ or @monads-tf@ packages, which automatically lift operations introduced by monad transformers through other transformers. category: Control author: Andy Gill, Ross Paterson exposed: True exposed-modules: Control.Applicative.Backwards Control.Applicative.Lift Control.Monad.IO.Class Control.Monad.Trans.Class Control.Monad.Trans.Cont Control.Monad.Trans.Error Control.Monad.Trans.Identity Control.Monad.Trans.List Control.Monad.Trans.Maybe Control.Monad.Trans.Reader Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy Control.Monad.Trans.RWS.Strict Control.Monad.Trans.State Control.Monad.Trans.State.Lazy Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict Data.Functor.Compose Data.Functor.Constant Data.Functor.Identity Data.Functor.Product Data.Functor.Reverse hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/transformers-0.3.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/transformers-0.3.0.0 hs-libraries: HStransformers-0.3.0.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/transformers-0.3.0.0/transformers.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/transformers-0.3.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: hoopl version: 3.10.0.1 id: hoopl-3.10.0.1-267659e4b5b51c3d2e02f2a6d6f46936 license: BSD3 copyright: maintainer: nr@cs.tufts.edu stability: homepage: http://ghc.cs.tufts.edu/hoopl/ package-url: synopsis: A library to support dataflow analysis and optimization description: Higher-order optimization library . See /Norman Ramsey, Joao Dias, and Simon Peyton Jones./ /(2010)/ for more details. category: Compilers/Interpreters author: Norman Ramsey, Joao Dias, Simon Marlow and Simon Peyton Jones exposed: True exposed-modules: Compiler.Hoopl Compiler.Hoopl.Internals Compiler.Hoopl.Wrappers Compiler.Hoopl.Passes.Dominator Compiler.Hoopl.Passes.DList hidden-modules: Compiler.Hoopl.Checkpoint Compiler.Hoopl.Collections Compiler.Hoopl.Combinators Compiler.Hoopl.Dataflow Compiler.Hoopl.Debug Compiler.Hoopl.Block Compiler.Hoopl.Graph Compiler.Hoopl.Label Compiler.Hoopl.MkGraph Compiler.Hoopl.Fuel Compiler.Hoopl.Pointed Compiler.Hoopl.Shape Compiler.Hoopl.Show Compiler.Hoopl.Unique Compiler.Hoopl.XUtil trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/hoopl-3.10.0.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/hoopl-3.10.0.1 hs-libraries: HShoopl-3.10.0.1 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/hoopl-3.10.0.1/hoopl.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/hoopl-3.10.0.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: bin-package-db version: 0.0.0.0 id: bin-package-db-0.0.0.0-0f3da03684207f2dc4dce793df1db62e license: BSD3 copyright: maintainer: ghc-devs@haskell.org stability: homepage: package-url: synopsis: A binary format for the package database description: category: author: exposed: True exposed-modules: Distribution.InstalledPackageInfo.Binary hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/bin-package-db-0.0.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/bin-package-db-0.0.0.0 hs-libraries: HSbin-package-db-0.0.0.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: Cabal-1.18.1.5-6478013104bde01737bfd67d34bbee0a base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 binary-0.7.1.0-f867dbbb69966feb9f5c4ef7695a70a5 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/bin-package-db-0.0.0.0/bin-package-db.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/bin-package-db-0.0.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: binary version: 0.7.1.0 id: binary-0.7.1.0-f867dbbb69966feb9f5c4ef7695a70a5 license: BSD3 copyright: maintainer: Lennart Kolmodin, Don Stewart stability: provisional homepage: https://github.com/kolmodin/binary package-url: synopsis: Binary serialisation for Haskell values using lazy ByteStrings description: Efficient, pure binary serialisation using lazy ByteStrings. Haskell values may be encoded to and from binary formats, written to disk as binary, or sent over the network. The format used can be automatically generated, or you can choose to implement a custom format if needed. Serialisation speeds of over 1 G\/sec have been observed, so this library should be suitable for high performance scenarios. category: Data, Parsing author: Lennart Kolmodin exposed: True exposed-modules: Data.Binary Data.Binary.Put Data.Binary.Get Data.Binary.Get.Internal Data.Binary.Builder Data.Binary.Builder.Internal hidden-modules: Data.Binary.Builder.Base Data.Binary.Class Data.Binary.Generic trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/binary-0.7.1.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/binary-0.7.1.0 hs-libraries: HSbinary-0.7.1.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/binary-0.7.1.0/binary.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/binary-0.7.1.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: Cabal version: 1.18.1.5 id: Cabal-1.18.1.5-6478013104bde01737bfd67d34bbee0a license: BSD3 copyright: 2003-2006, Isaac Jones 2005-2011, Duncan Coutts maintainer: cabal-devel@haskell.org stability: homepage: http://www.haskell.org/cabal/ package-url: synopsis: A framework for packaging Haskell software description: The Haskell Common Architecture for Building Applications and Libraries: a framework defining a common interface for authors to more easily build their Haskell applications in a portable way. . The Haskell Cabal is part of a larger infrastructure for distributing, organizing, and cataloging Haskell libraries and tools. category: Distribution author: Isaac Jones Duncan Coutts exposed: True exposed-modules: Distribution.Compat.Environment Distribution.Compat.Exception Distribution.Compat.ReadP Distribution.Compiler Distribution.InstalledPackageInfo Distribution.License Distribution.Make Distribution.ModuleName Distribution.Package Distribution.PackageDescription Distribution.PackageDescription.Check Distribution.PackageDescription.Configuration Distribution.PackageDescription.Parse Distribution.PackageDescription.PrettyPrint Distribution.PackageDescription.Utils Distribution.ParseUtils Distribution.ReadE Distribution.Simple Distribution.Simple.Bench Distribution.Simple.Build Distribution.Simple.Build.Macros Distribution.Simple.Build.PathsModule Distribution.Simple.BuildPaths Distribution.Simple.BuildTarget Distribution.Simple.CCompiler Distribution.Simple.Command Distribution.Simple.Compiler Distribution.Simple.Configure Distribution.Simple.GHC Distribution.Simple.Haddock Distribution.Simple.Hpc Distribution.Simple.Hugs Distribution.Simple.Install Distribution.Simple.InstallDirs Distribution.Simple.JHC Distribution.Simple.LHC Distribution.Simple.LocalBuildInfo Distribution.Simple.NHC Distribution.Simple.PackageIndex Distribution.Simple.PreProcess Distribution.Simple.PreProcess.Unlit Distribution.Simple.Program Distribution.Simple.Program.Ar Distribution.Simple.Program.Builtin Distribution.Simple.Program.Db Distribution.Simple.Program.Find Distribution.Simple.Program.GHC Distribution.Simple.Program.HcPkg Distribution.Simple.Program.Hpc Distribution.Simple.Program.Ld Distribution.Simple.Program.Run Distribution.Simple.Program.Script Distribution.Simple.Program.Types Distribution.Simple.Register Distribution.Simple.Setup Distribution.Simple.SrcDist Distribution.Simple.Test Distribution.Simple.UHC Distribution.Simple.UserHooks Distribution.Simple.Utils Distribution.System Distribution.TestSuite Distribution.Text Distribution.Verbosity Distribution.Version Language.Haskell.Extension hidden-modules: Distribution.Compat.CopyFile Distribution.Compat.TempFile Distribution.GetOpt Distribution.Simple.GHC.IPI641 Distribution.Simple.GHC.IPI642 Paths_Cabal trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/Cabal-1.18.1.5 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/Cabal-1.18.1.5 hs-libraries: HSCabal-1.18.1.5 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab pretty-1.1.1.1-0984f47ffe93ef3983c80b96280f1c3a process-1.2.0.0-06c3215a79834ce4886ae686a0f81122 time-1.4.2-9b3076800c33f8382c38628f35717951 unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/Cabal-1.18.1.5/Cabal.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/Cabal-1.18.1.5 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: template-haskell version: 2.9.0.0 id: template-haskell-2.9.0.0-6d27c2b362b15abb1822f2f34b9ae7f9 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Support library for Template Haskell description: This package provides modules containing facilities for manipulating Haskell source code using Template Haskell. . See for more information. category: Template Haskell author: exposed: True exposed-modules: Language.Haskell.TH Language.Haskell.TH.Lib Language.Haskell.TH.Ppr Language.Haskell.TH.PprLib Language.Haskell.TH.Quote Language.Haskell.TH.Syntax hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/template-haskell-2.9.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/template-haskell-2.9.0.0 hs-libraries: HStemplate-haskell-2.9.0.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 pretty-1.1.1.1-0984f47ffe93ef3983c80b96280f1c3a hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/template-haskell-2.9.0.0/template-haskell.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/template-haskell-2.9.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: pretty version: 1.1.1.1 id: pretty-1.1.1.1-0984f47ffe93ef3983c80b96280f1c3a license: BSD3 copyright: maintainer: David Terei stability: Stable homepage: http://github.com/haskell/pretty package-url: synopsis: Pretty-printing library description: This package contains a pretty-printing library, a set of API's that provides a way to easily print out text in a consistent format of your choosing. This is useful for compilers and related tools. . This library was originally designed by John Hughes's and has since been heavily modified by Simon Peyton Jones. category: Text author: exposed: True exposed-modules: Text.PrettyPrint Text.PrettyPrint.HughesPJ hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/pretty-1.1.1.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/pretty-1.1.1.1 hs-libraries: HSpretty-1.1.1.1 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/pretty-1.1.1.1/pretty.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/pretty-1.1.1.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: hpc version: 0.6.0.1 id: hpc-0.6.0.1-cca17f12dab542e09c423a74a4590c5d license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Code Coverage Library for Haskell description: This package provides the code coverage library for Haskell. . See for more information. category: Control author: Andy Gill exposed: True exposed-modules: Trace.Hpc.Util Trace.Hpc.Mix Trace.Hpc.Tix Trace.Hpc.Reflect hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/hpc-0.6.0.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/hpc-0.6.0.1 hs-libraries: HShpc-0.6.0.1 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 time-1.4.2-9b3076800c33f8382c38628f35717951 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/hpc-0.6.0.1/hpc.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/hpc-0.6.0.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: process version: 1.2.0.0 id: process-1.2.0.0-06c3215a79834ce4886ae686a0f81122 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Process libraries description: This package contains libraries for dealing with system processes. category: System author: exposed: True exposed-modules: System.Cmd System.Process System.Process.Internals hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/process-1.2.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/process-1.2.0.0 hs-libraries: HSprocess-1.2.0.0 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/process-1.2.0.0/include includes: runProcess.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/process-1.2.0.0/process.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/process-1.2.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: directory version: 1.2.1.0 id: directory-1.2.1.0-07cd1f59e3c6cac5e3e180019c59a115 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: library for directory handling description: This package provides a library for handling directories. category: System author: exposed: True exposed-modules: System.Directory hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/directory-1.2.1.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/directory-1.2.1.0 hs-libraries: HSdirectory-1.2.1.0 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/directory-1.2.1.0/include includes: HsDirectory.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab time-1.4.2-9b3076800c33f8382c38628f35717951 unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/directory-1.2.1.0/directory.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/directory-1.2.1.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: unix version: 2.7.0.1 id: unix-2.7.0.1-f8658ba9ec1c4fba8a371a8e0f42ec6c license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: POSIX functionality description: This package gives you access to the set of operating system services standardised by POSIX 1003.1b (or the IEEE Portable Operating System Interface for Computing Environments - IEEE Std. 1003.1). . The package is not supported under Windows (except under Cygwin). category: System author: exposed: True exposed-modules: System.Posix System.Posix.ByteString System.Posix.Error System.Posix.Resource System.Posix.Time System.Posix.Unistd System.Posix.User System.Posix.Signals System.Posix.Signals.Exts System.Posix.Semaphore System.Posix.SharedMem System.Posix.ByteString.FilePath System.Posix.Directory System.Posix.Directory.ByteString System.Posix.DynamicLinker.Module System.Posix.DynamicLinker.Module.ByteString System.Posix.DynamicLinker.Prim System.Posix.DynamicLinker.ByteString System.Posix.DynamicLinker System.Posix.Files System.Posix.Files.ByteString System.Posix.IO System.Posix.IO.ByteString System.Posix.Env System.Posix.Env.ByteString System.Posix.Process System.Posix.Process.Internals System.Posix.Process.ByteString System.Posix.Temp System.Posix.Temp.ByteString System.Posix.Terminal System.Posix.Terminal.ByteString hidden-modules: System.Posix.Directory.Common System.Posix.DynamicLinker.Common System.Posix.Files.Common System.Posix.IO.Common System.Posix.Process.Common System.Posix.Terminal.Common trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/unix-2.7.0.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/unix-2.7.0.1 hs-libraries: HSunix-2.7.0.1 extra-libraries: rt util dl pthread extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/unix-2.7.0.1/include includes: HsUnix.h execvpe.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 time-1.4.2-9b3076800c33f8382c38628f35717951 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/unix-2.7.0.1/unix.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/unix-2.7.0.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: time version: 1.4.2 id: time-1.4.2-9b3076800c33f8382c38628f35717951 license: BSD3 copyright: maintainer: stability: stable homepage: http://semantic.org/TimeLib/ package-url: synopsis: A time library description: A time library category: System author: Ashley Yakeley exposed: True exposed-modules: Data.Time.Calendar Data.Time.Calendar.MonthDay Data.Time.Calendar.OrdinalDate Data.Time.Calendar.WeekDate Data.Time.Calendar.Julian Data.Time.Calendar.Easter Data.Time.Clock Data.Time.Clock.POSIX Data.Time.Clock.TAI Data.Time.LocalTime Data.Time.Format Data.Time hidden-modules: Data.Time.Calendar.Private Data.Time.Calendar.Days Data.Time.Calendar.Gregorian Data.Time.Calendar.JulianYearDay Data.Time.Clock.Scale Data.Time.Clock.UTC Data.Time.Clock.CTimeval Data.Time.Clock.UTCDiff Data.Time.LocalTime.TimeZone Data.Time.LocalTime.TimeOfDay Data.Time.LocalTime.LocalTime Data.Time.Format.Parse trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/time-1.4.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/time-1.4.2 hs-libraries: HStime-1.4.2 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/time-1.4.2/include includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 old-locale-1.0.0.6-50b0125c49f76af85dc7aa22975cdc34 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/time-1.4.2/time.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/time-1.4.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: old-locale version: 1.0.0.6 id: old-locale-1.0.0.6-50b0125c49f76af85dc7aa22975cdc34 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: locale library description: This package provides the ability to adapt to locale conventions such as date and time formats. category: System author: exposed: True exposed-modules: System.Locale hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/old-locale-1.0.0.6 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/old-locale-1.0.0.6 hs-libraries: HSold-locale-1.0.0.6 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/old-locale-1.0.0.6/old-locale.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/old-locale-1.0.0.6 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: containers version: 0.5.5.1 id: containers-0.5.5.1-d4bd887fb97aa3a46cbadc13709b7653 license: BSD3 copyright: maintainer: fox@ucw.cz stability: homepage: package-url: synopsis: Assorted concrete container types description: This package contains efficient general-purpose implementations of various basic immutable container types. The declared cost of each operation is either worst-case or amortized, but remains valid even if structures are shared. category: Data Structures author: exposed: True exposed-modules: Data.IntMap Data.IntMap.Lazy Data.IntMap.Strict Data.IntSet Data.Map Data.Map.Lazy Data.Map.Strict Data.Set Data.Graph Data.Sequence Data.Tree hidden-modules: Data.BitUtil Data.IntMap.Base Data.IntSet.Base Data.Map.Base Data.Set.Base Data.StrictPair trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/containers-0.5.5.1 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/containers-0.5.5.1 hs-libraries: HScontainers-0.5.5.1 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/containers-0.5.5.1/containers.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/containers-0.5.5.1 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: bytestring version: 0.10.4.0 id: bytestring-0.10.4.0-d6f1d17d717e8652498cab8269a0acd5 license: BSD3 copyright: Copyright (c) Don Stewart 2005-2009, (c) Duncan Coutts 2006-2013, (c) David Roundy 2003-2005, (c) Jasper Van der Jeugt 2010, (c) Simon Meier 2010-2013. maintainer: Don Stewart , Duncan Coutts stability: homepage: https://github.com/haskell/bytestring package-url: synopsis: Fast, compact, strict and lazy byte strings with a list interface description: An efficient compact, immutable byte string type (both strict and lazy) suitable for binary or 8-bit character data. . The 'ByteString' type represents sequences of bytes or 8-bit characters. It is suitable for high performance use, both in terms of large data quantities, or high speed requirements. The 'ByteString' functions follow the same style as Haskell\'s ordinary lists, so it is easy to convert code from using 'String' to 'ByteString'. . Two 'ByteString' variants are provided: . * Strict 'ByteString's keep the string as a single large array. This makes them convenient for passing data between C and Haskell. . * Lazy 'ByteString's use a lazy list of strict chunks which makes it suitable for I\/O streaming tasks. . The @Char8@ modules provide a character-based view of the same underlying 'ByteString' types. This makes it convenient to handle mixed binary and 8-bit character content (which is common in many file formats and network protocols). . The 'Builder' module provides an efficient way to build up 'ByteString's in an ad-hoc way by repeated concatenation. This is ideal for fast serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should use the 'Text' type from the @text@ package. . These modules are intended to be imported qualified, to avoid name clashes with "Prelude" functions, e.g. . > import qualified Data.ByteString as BS category: Data author: Don Stewart, Duncan Coutts exposed: True exposed-modules: Data.ByteString Data.ByteString.Char8 Data.ByteString.Unsafe Data.ByteString.Internal Data.ByteString.Lazy Data.ByteString.Lazy.Char8 Data.ByteString.Lazy.Internal Data.ByteString.Short Data.ByteString.Short.Internal Data.ByteString.Builder Data.ByteString.Builder.Extra Data.ByteString.Builder.Prim Data.ByteString.Builder.Internal Data.ByteString.Builder.Prim.Internal Data.ByteString.Lazy.Builder Data.ByteString.Lazy.Builder.Extras Data.ByteString.Lazy.Builder.ASCII hidden-modules: Data.ByteString.Builder.ASCII Data.ByteString.Builder.Prim.Binary Data.ByteString.Builder.Prim.ASCII Data.ByteString.Builder.Prim.Internal.Floating Data.ByteString.Builder.Prim.Internal.UncheckedShifts Data.ByteString.Builder.Prim.Internal.Base16 trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/bytestring-0.10.4.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/bytestring-0.10.4.0 hs-libraries: HSbytestring-0.10.4.0 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/bytestring-0.10.4.0/include includes: fpstring.h depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 integer-gmp-0.5.1.0-26579559b3647acf4f01d5edd9491a46 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/bytestring-0.10.4.0/bytestring.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/bytestring-0.10.4.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: deepseq version: 1.3.0.2 id: deepseq-1.3.0.2-63a1ab91b7017a28bb5d04cb1b5d2d02 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Deep evaluation of data structures description: This package provides methods for fully evaluating data structures (\"deep evaluation\"). Deep evaluation is often used for adding strictness to a program, e.g. in order to force pending exceptions, remove space leaks, or force lazy I/O to happen. It is also useful in parallel programs, to ensure pending work does not migrate to the wrong thread. . The primary use of this package is via the 'deepseq' function, a \"deep\" version of 'seq'. It is implemented on top of an 'NFData' typeclass (\"Normal Form Data\", data structures with no unevaluated components) which defines strategies for fully evaluating different data types. . If you want to automatically derive 'NFData' instances via the "GHC.Generics" facility, there is a companion package which builds on top of this package. category: Control author: exposed: True exposed-modules: Control.DeepSeq hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/deepseq-1.3.0.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/deepseq-1.3.0.2 hs-libraries: HSdeepseq-1.3.0.2 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/deepseq-1.3.0.2/deepseq.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/deepseq-1.3.0.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: array version: 0.5.0.0 id: array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Mutable and immutable arrays description: In addition to providing the "Data.Array" module , this package also defines the classes 'IArray' of immutable arrays and 'MArray' of arrays mutable within appropriate monads, as well as some instances of these classes. category: Data Structures author: exposed: True exposed-modules: Data.Array Data.Array.Base Data.Array.IArray Data.Array.IO Data.Array.IO.Safe Data.Array.IO.Internals Data.Array.MArray Data.Array.MArray.Safe Data.Array.ST Data.Array.ST.Safe Data.Array.Storable Data.Array.Storable.Safe Data.Array.Storable.Internals Data.Array.Unboxed Data.Array.Unsafe hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/array-0.5.0.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/array-0.5.0.0 hs-libraries: HSarray-0.5.0.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/array-0.5.0.0/array.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/array-0.5.0.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: filepath version: 1.3.0.2 id: filepath-1.3.0.2-25a474a9272ae6260626ce0d70ad1cab license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: http://www-users.cs.york.ac.uk/~ndm/filepath/ package-url: synopsis: Library for manipulating FilePaths in a cross platform way. description: A library for 'FilePath' manipulations, using Posix or Windows filepaths depending on the platform. . Both "System.FilePath.Posix" and "System.FilePath.Windows" provide the same interface. See either for examples and a list of the available functions. category: System author: Neil Mitchell exposed: True exposed-modules: System.FilePath System.FilePath.Posix System.FilePath.Windows hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/filepath-1.3.0.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/filepath-1.3.0.2 hs-libraries: HSfilepath-1.3.0.2 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/filepath-1.3.0.2/filepath.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/filepath-1.3.0.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: base version: 4.7.0.2 id: base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Basic libraries description: This package contains the "Prelude" and its support libraries, and a large collection of useful libraries ranging from data structures to parsing combinators and debugging utilities. category: Prelude author: exposed: True exposed-modules: Control.Applicative Control.Arrow Control.Category Control.Concurrent Control.Concurrent.Chan Control.Concurrent.MVar Control.Concurrent.QSem Control.Concurrent.QSemN Control.Exception Control.Exception.Base Control.Monad Control.Monad.Fix Control.Monad.Instances Control.Monad.ST Control.Monad.ST.Lazy Control.Monad.ST.Lazy.Safe Control.Monad.ST.Lazy.Unsafe Control.Monad.ST.Safe Control.Monad.ST.Strict Control.Monad.ST.Unsafe Control.Monad.Zip Data.Bits Data.Bool Data.Char Data.Coerce Data.Complex Data.Data Data.Dynamic Data.Either Data.Eq Data.Fixed Data.Foldable Data.Function Data.Functor Data.IORef Data.Int Data.Ix Data.List Data.Maybe Data.Monoid Data.OldTypeable Data.OldTypeable.Internal Data.Ord Data.Proxy Data.Ratio Data.STRef Data.STRef.Lazy Data.STRef.Strict Data.String Data.Traversable Data.Tuple Data.Type.Bool Data.Type.Coercion Data.Type.Equality Data.Typeable Data.Typeable.Internal Data.Unique Data.Version Data.Word Debug.Trace Foreign Foreign.C Foreign.C.Error Foreign.C.String Foreign.C.Types Foreign.Concurrent Foreign.ForeignPtr Foreign.ForeignPtr.Safe Foreign.ForeignPtr.Unsafe Foreign.Marshal Foreign.Marshal.Alloc Foreign.Marshal.Array Foreign.Marshal.Error Foreign.Marshal.Pool Foreign.Marshal.Safe Foreign.Marshal.Unsafe Foreign.Marshal.Utils Foreign.Ptr Foreign.Safe Foreign.StablePtr Foreign.Storable GHC.Arr GHC.Base GHC.Char GHC.Conc GHC.Conc.IO GHC.Conc.Signal GHC.Conc.Sync GHC.ConsoleHandler GHC.Constants GHC.Desugar GHC.Enum GHC.Environment GHC.Err GHC.Exception GHC.Exts GHC.Fingerprint GHC.Fingerprint.Type GHC.Float GHC.Float.ConversionUtils GHC.Float.RealFracMethods GHC.Foreign GHC.ForeignPtr GHC.GHCi GHC.Generics GHC.IO GHC.IO.Buffer GHC.IO.BufferedIO GHC.IO.Device GHC.IO.Encoding GHC.IO.Encoding.CodePage GHC.IO.Encoding.Failure GHC.IO.Encoding.Iconv GHC.IO.Encoding.Latin1 GHC.IO.Encoding.Types GHC.IO.Encoding.UTF16 GHC.IO.Encoding.UTF32 GHC.IO.Encoding.UTF8 GHC.IO.Exception GHC.IO.FD GHC.IO.Handle GHC.IO.Handle.FD GHC.IO.Handle.Internals GHC.IO.Handle.Text GHC.IO.Handle.Types GHC.IO.IOMode GHC.IOArray GHC.IORef GHC.IP GHC.Int GHC.List GHC.MVar GHC.Num GHC.PArr GHC.Pack GHC.Profiling GHC.Ptr GHC.Read GHC.Real GHC.ST GHC.STRef GHC.Show GHC.Stable GHC.Stack GHC.Stats GHC.Storable GHC.TopHandler GHC.TypeLits GHC.Unicode GHC.Weak GHC.Word Numeric Prelude System.CPUTime System.Console.GetOpt System.Environment System.Exit System.IO System.IO.Error System.IO.Unsafe System.Info System.Mem System.Mem.StableName System.Mem.Weak System.Posix.Internals System.Posix.Types System.Timeout Text.ParserCombinators.ReadP Text.ParserCombinators.ReadPrec Text.Printf Text.Read Text.Read.Lex Text.Show Text.Show.Functions Unsafe.Coerce GHC.Event hidden-modules: Control.Monad.ST.Imp Control.Monad.ST.Lazy.Imp Foreign.ForeignPtr.Imp System.Environment.ExecutablePath GHC.Event.Arr GHC.Event.Array GHC.Event.Clock GHC.Event.Control GHC.Event.EPoll GHC.Event.IntTable GHC.Event.Internal GHC.Event.KQueue GHC.Event.Manager GHC.Event.PSQ GHC.Event.Poll GHC.Event.Thread GHC.Event.TimerManager GHC.Event.Unique trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/base-4.7.0.2 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/base-4.7.0.2 hs-libraries: HSbase-4.7.0.2 extra-libraries: extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/base-4.7.0.2/include includes: HsBase.h depends: ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 integer-gmp-0.5.1.0-26579559b3647acf4f01d5edd9491a46 builtin_rts hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/base-4.7.0.2/base.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/base-4.7.0.2 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: integer-gmp version: 0.5.1.0 id: integer-gmp-0.5.1.0-26579559b3647acf4f01d5edd9491a46 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: Integer library based on GMP description: This package provides the low-level implementation of the standard 'Integer' type based on the . . This package provides access to the internal representation of 'Integer' as well as primitive operations with no proper error handling, and should only be used directly with the utmost care. . For more details about the design of @integer-gmp@, see . category: Numerical author: exposed: True exposed-modules: GHC.Integer GHC.Integer.GMP.Internals GHC.Integer.GMP.Prim GHC.Integer.Logarithms GHC.Integer.Logarithms.Internals hidden-modules: GHC.Integer.Type trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/integer-gmp-0.5.1.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/integer-gmp-0.5.1.0 hs-libraries: HSinteger-gmp-0.5.1.0 extra-libraries: gmp extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/integer-gmp-0.5.1.0/include includes: depends: ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/integer-gmp-0.5.1.0/integer-gmp.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/integer-gmp-0.5.1.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: ghc-prim version: 0.3.1.0 id: ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37 license: BSD3 copyright: maintainer: libraries@haskell.org stability: homepage: package-url: synopsis: GHC primitives description: GHC primitives. category: GHC author: exposed: True exposed-modules: GHC.CString GHC.Classes GHC.Debug GHC.IntWord64 GHC.Magic GHC.PrimopWrappers GHC.Tuple GHC.Types GHC.Prim hidden-modules: trusted: False import-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/ghc-prim-0.3.1.0 library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/ghc-prim-0.3.1.0 hs-libraries: HSghc-prim-0.3.1.0 extra-libraries: extra-ghci-libraries: include-dirs: includes: depends: builtin_rts hugs-options: cc-options: ld-options: framework-dirs: frameworks: haddock-interfaces: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/ghc-prim-0.3.1.0/ghc-prim.haddock haddock-html: /opt/ghc/7.8.4/share/doc/ghc/html/libraries/ghc-prim-0.3.1.0 pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" --- name: rts version: 1.0 id: builtin_rts license: BSD3 copyright: maintainer: glasgow-haskell-users@haskell.org stability: homepage: package-url: synopsis: description: category: author: exposed: True exposed-modules: hidden-modules: trusted: False import-dirs: library-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/rts-1.0 hs-libraries: HSrts Cffi extra-libraries: m rt dl extra-ghci-libraries: include-dirs: /opt/ghc/7.8.4/lib/ghc-7.8.4/include includes: Stg.h depends: hugs-options: cc-options: ld-options: "-Wl,-u,ghczmprim_GHCziTypes_Izh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_static_info" "-Wl,-u,base_GHCziPtr_Ptr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Wzh_static_info" "-Wl,-u,base_GHCziInt_I8zh_static_info" "-Wl,-u,base_GHCziInt_I16zh_static_info" "-Wl,-u,base_GHCziInt_I32zh_static_info" "-Wl,-u,base_GHCziInt_I64zh_static_info" "-Wl,-u,base_GHCziWord_W8zh_static_info" "-Wl,-u,base_GHCziWord_W16zh_static_info" "-Wl,-u,base_GHCziWord_W32zh_static_info" "-Wl,-u,base_GHCziWord_W64zh_static_info" "-Wl,-u,base_GHCziStable_StablePtr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Izh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_con_info" "-Wl,-u,base_GHCziPtr_Ptr_con_info" "-Wl,-u,base_GHCziPtr_FunPtr_con_info" "-Wl,-u,base_GHCziStable_StablePtr_con_info" "-Wl,-u,ghczmprim_GHCziTypes_False_closure" "-Wl,-u,ghczmprim_GHCziTypes_True_closure" "-Wl,-u,base_GHCziPack_unpackCString_closure" "-Wl,-u,base_GHCziIOziException_stackOverflow_closure" "-Wl,-u,base_GHCziIOziException_heapOverflow_closure" "-Wl,-u,base_ControlziExceptionziBase_nonTermination_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnMVar_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnSTM_closure" "-Wl,-u,base_ControlziExceptionziBase_nestedAtomically_closure" "-Wl,-u,base_GHCziWeak_runFinalizzerBatch_closure" "-Wl,-u,base_GHCziTopHandler_flushStdHandles_closure" "-Wl,-u,base_GHCziTopHandler_runIO_closure" "-Wl,-u,base_GHCziTopHandler_runNonIO_closure" "-Wl,-u,base_GHCziConcziIO_ensureIOManagerIsRunning_closure" "-Wl,-u,base_GHCziConcziIO_ioManagerCapabilitiesChanged_closure" "-Wl,-u,base_GHCziConcziSync_runSparks_closure" "-Wl,-u,base_GHCziConcziSignal_runHandlersPtr_closure" framework-dirs: frameworks: haddock-interfaces: haddock-html: pkgroot: "/opt/ghc/7.8.4/lib/ghc-7.8.4" stack-2.15.7/tests/unit/package-dump/ghc-head.txt0000644000000000000000000015056114502056216017772 0ustar0000000000000000name: ghc-boot version: 0.0.0.0 id: ghc-boot-0.0.0.0 key: ghc-boot-0.0.0.0 license: BSD3 maintainer: ghc-devs@haskell.org synopsis: Shared functionality between GHC and its boot libraries description: This library is shared between GHC, ghc-pkg, and other boot libraries. . A note about "GHC.PackageDb": it only deals with the subset of the package database that the compiler cares about: modules paths etc and not package metadata like description, authors etc. It is thus not a library interface to ghc-pkg and is *not* suitable for modifying GHC package databases. . The package database format and this library are constructed in such a way that while ghc-pkg depends on Cabal, the GHC library and program do not have to depend on Cabal. exposed: True exposed-modules: GHC.Lexeme GHC.PackageDb abi: 7a24014b606b3e9dd8b7b8aa4cf35acc trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-boot-0.0.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-boot-0.0.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/ghc-boot-0.0.0.0 hs-libraries: HSghc-boot-0.0.0.0 depends: base-4.9.0.0 binary-0.7.5.0 bytestring-0.10.7.0 directory-1.2.5.0 filepath-1.4.1.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0/ghc-boot.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-boot-0.0.0.0 --- name: ghc version: 7.11.20151213 id: ghc-7.11.20151213 key: ghc-7.11.20151213 license: BSD3 maintainer: glasgow-haskell-users@haskell.org homepage: http://www.haskell.org/ghc/ synopsis: The GHC API description: GHC's functionality can be useful for more things than just compiling Haskell programs. Important use cases are programs that analyse (and perhaps transform) Haskell code. Others include loading Haskell code dynamically in a GHCi-like manner. For this reason, a lot of GHC's functionality is made available through this package. category: Development author: The GHC Team exposed: False exposed-modules: Avail BasicTypes ConLike DataCon PatSyn Demand Debug Exception FieldLabel GhcMonad Hooks Id IdInfo Lexeme Literal Llvm Llvm.AbsSyn Llvm.MetaData Llvm.PpLlvm Llvm.Types LlvmCodeGen LlvmCodeGen.Base LlvmCodeGen.CodeGen LlvmCodeGen.Data LlvmCodeGen.Ppr LlvmCodeGen.Regs LlvmMangler MkId Module Name NameEnv NameSet OccName RdrName SrcLoc UniqSupply Unique Var VarEnv VarSet UnVarGraph BlockId CLabel Cmm CmmBuildInfoTables CmmPipeline CmmCallConv CmmCommonBlockElim CmmImplementSwitchPlans CmmContFlowOpt CmmExpr CmmInfo CmmLex CmmLint CmmLive CmmMachOp CmmSwitch CmmNode CmmOpt CmmParse CmmProcPoint CmmSink CmmType CmmUtils CmmLayoutStack MkGraph PprBase PprC PprCmm PprCmmDecl PprCmmExpr Bitmap CodeGen.Platform CodeGen.Platform.ARM CodeGen.Platform.ARM64 CodeGen.Platform.NoRegs CodeGen.Platform.PPC CodeGen.Platform.PPC_Darwin CodeGen.Platform.SPARC CodeGen.Platform.X86 CodeGen.Platform.X86_64 CgUtils StgCmm StgCmmBind StgCmmClosure StgCmmCon StgCmmEnv StgCmmExpr StgCmmForeign StgCmmHeap StgCmmHpc StgCmmArgRep StgCmmLayout StgCmmMonad StgCmmPrim StgCmmProf StgCmmTicky StgCmmUtils StgCmmExtCode SMRep CoreArity CoreFVs CoreLint CorePrep CoreSubst CoreSyn TrieMap CoreTidy CoreUnfold CoreUtils CoreSeq CoreStats MkCore PprCore PmExpr TmOracle Check Coverage Desugar DsArrows DsBinds DsCCall DsExpr DsForeign DsGRHSs DsListComp DsMonad DsUtils Match MatchCon MatchLit HsBinds HsDecls HsDoc HsExpr HsImpExp HsLit PlaceHolder HsPat HsSyn HsTypes HsUtils BinIface BuildTyCl IfaceEnv IfaceSyn IfaceType LoadIface MkIface TcIface FlagChecker Annotations BreakArray CmdLineParser CodeOutput Config Constants DriverMkDepend DriverPhases PipelineMonad DriverPipeline DynFlags ErrUtils Finder GHC GhcMake GhcPlugins DynamicLoading HeaderInfo HscMain HscStats HscTypes InteractiveEval InteractiveEvalTypes PackageConfig Packages PlatformConstants Plugins TcPluginM PprTyThing StaticFlags StaticPtrTable SysTools Elf TidyPgm Ctype HaddockUtils Lexer OptCoercion Parser RdrHsSyn ApiAnnotation ForeignCall PrelInfo PrelNames PrelRules PrimOp TysPrim TysWiredIn CostCentre ProfInit SCCfinal RnBinds RnEnv RnExpr RnHsDoc RnNames RnPat RnSource RnSplice RnTypes CoreMonad CSE FloatIn FloatOut LiberateCase OccurAnal SAT SetLevels SimplCore SimplEnv SimplMonad SimplUtils Simplify SimplStg StgStats UnariseStg Rules SpecConstr Specialise CoreToStg StgLint StgSyn CallArity DmdAnal WorkWrap WwLib FamInst Inst TcAnnotations TcArrows TcBinds TcClassDcl TcDefaults TcDeriv TcEnv TcExpr TcForeign TcGenDeriv TcGenGenerics TcHsSyn TcHsType TcInstDcls TcMType TcValidity TcMatches TcPat TcPatSyn TcRnDriver TcRnMonad TcRnTypes TcRules TcSimplify TcErrors TcTyClsDecls TcTyDecls TcTypeable TcType TcEvidence TcUnify TcInteract TcCanonical TcFlatten TcSMonad TcTypeNats TcSplice Class Coercion DsMeta THNames FamInstEnv FunDeps InstEnv TyCon CoAxiom Kind Type TyCoRep Unify Bag Binary BooleanFormula BufWrite Digraph Encoding FastFunctions FastMutInt FastString FastStringEnv Fingerprint FiniteMap FV GraphBase GraphColor GraphOps GraphPpr IOEnv ListSetOps Maybes MonadUtils OrdList Outputable Pair Panic Pretty Serialized State Stream StringBuffer UniqDFM UniqDSet UniqFM UniqSet Util Vectorise.Builtins.Base Vectorise.Builtins.Initialise Vectorise.Builtins Vectorise.Monad.Base Vectorise.Monad.Naming Vectorise.Monad.Local Vectorise.Monad.Global Vectorise.Monad.InstEnv Vectorise.Monad Vectorise.Utils.Base Vectorise.Utils.Closure Vectorise.Utils.Hoisting Vectorise.Utils.PADict Vectorise.Utils.Poly Vectorise.Utils Vectorise.Generic.Description Vectorise.Generic.PAMethods Vectorise.Generic.PADict Vectorise.Generic.PData Vectorise.Type.Env Vectorise.Type.Type Vectorise.Type.TyConDecl Vectorise.Type.Classify Vectorise.Convert Vectorise.Vect Vectorise.Var Vectorise.Env Vectorise.Exp Vectorise Hoopl.Dataflow Hoopl AsmCodeGen TargetReg NCGMonad Instruction Format Reg RegClass PIC Platform CPrim X86.Regs X86.RegInfo X86.Instr X86.Cond X86.Ppr X86.CodeGen PPC.Regs PPC.RegInfo PPC.Instr PPC.Cond PPC.Ppr PPC.CodeGen SPARC.Base SPARC.Regs SPARC.Imm SPARC.AddrMode SPARC.Cond SPARC.Instr SPARC.Stack SPARC.ShortcutJump SPARC.Ppr SPARC.CodeGen SPARC.CodeGen.Amode SPARC.CodeGen.Base SPARC.CodeGen.CondCode SPARC.CodeGen.Gen32 SPARC.CodeGen.Gen64 SPARC.CodeGen.Sanity SPARC.CodeGen.Expand RegAlloc.Liveness RegAlloc.Graph.Main RegAlloc.Graph.Stats RegAlloc.Graph.ArchBase RegAlloc.Graph.ArchX86 RegAlloc.Graph.Coalesce RegAlloc.Graph.Spill RegAlloc.Graph.SpillClean RegAlloc.Graph.SpillCost RegAlloc.Graph.TrivColorable RegAlloc.Linear.Main RegAlloc.Linear.JoinToTargets RegAlloc.Linear.State RegAlloc.Linear.Stats RegAlloc.Linear.FreeRegs RegAlloc.Linear.StackMap RegAlloc.Linear.Base RegAlloc.Linear.X86.FreeRegs RegAlloc.Linear.X86_64.FreeRegs RegAlloc.Linear.PPC.FreeRegs RegAlloc.Linear.SPARC.FreeRegs Dwarf Dwarf.Types Dwarf.Constants Convert ByteCodeAsm ByteCodeGen ByteCodeInstr ByteCodeItbls ByteCodeLink Debugger LibFFI Linker ObjLink RtClosureInspect DebuggerUtils abi: bc2e1cb7cdee2089e52f007db59a253c trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-7.11.20151213 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-7.11.20151213 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/ghc-7.11.20151213 hs-libraries: HSghc-7.11.20151213 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-7.11.20151213/include depends: array-0.5.1.0 base-4.9.0.0 binary-0.7.5.0 bytestring-0.10.7.0 containers-0.5.6.3 directory-1.2.5.0 filepath-1.4.1.0 ghc-boot-0.0.0.0 hoopl-3.10.2.0 hpc-0.6.0.2 process-1.4.1.0 template-haskell-2.11.0.0 time-1.5.0.1 transformers-0.4.3.0 unix-2.7.1.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-7.11.20151213/ghc.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-7.11.20151213 --- name: haskeline version: 0.7.2.1 id: haskeline-0.7.2.1 key: haskeline-0.7.2.1 license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Experimental homepage: http://trac.haskell.org/haskeline synopsis: A command-line interface for user input, written in Haskell. description: Haskeline provides a user interface for line input in command-line programs. This library is similar in purpose to readline, but since it is written in Haskell it is (hopefully) more easily used in other Haskell programs. . Haskeline runs both on POSIX-compatible systems and on Windows. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Haskeline System.Console.Haskeline.Completion System.Console.Haskeline.MonadException System.Console.Haskeline.History System.Console.Haskeline.IO hidden-modules: System.Console.Haskeline.Backend System.Console.Haskeline.Backend.WCWidth System.Console.Haskeline.Command System.Console.Haskeline.Command.Completion System.Console.Haskeline.Command.History System.Console.Haskeline.Command.KillRing System.Console.Haskeline.Directory System.Console.Haskeline.Emacs System.Console.Haskeline.InputT System.Console.Haskeline.Key System.Console.Haskeline.LineState System.Console.Haskeline.Monads System.Console.Haskeline.Prefs System.Console.Haskeline.RunCommand System.Console.Haskeline.Term System.Console.Haskeline.Command.Undo System.Console.Haskeline.Vi System.Console.Haskeline.Recover System.Console.Haskeline.Backend.Posix System.Console.Haskeline.Backend.Posix.Encoder System.Console.Haskeline.Backend.DumbTerm System.Console.Haskeline.Backend.Terminfo abi: 1084385e878ca046b1ba1b0149406b60 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/haskeline-0.7.2.1 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/haskeline-0.7.2.1 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/haskeline-0.7.2.1 hs-libraries: HShaskeline-0.7.2.1 depends: base-4.9.0.0 bytestring-0.10.7.0 containers-0.5.6.3 directory-1.2.5.0 filepath-1.4.1.0 terminfo-0.4.0.1 transformers-0.4.3.0 unix-2.7.1.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/haskeline-0.7.2.1/haskeline.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/haskeline-0.7.2.1 --- name: terminfo version: 0.4.0.1 id: terminfo-0.4.0.1 key: terminfo-0.4.0.1 license: BSD3 copyright: (c) Judah Jacobson maintainer: Judah Jacobson stability: Stable homepage: https://github.com/judah/terminfo synopsis: Haskell bindings to the terminfo library. description: This library provides an interface to the terminfo database (via bindings to the curses library). allows POSIX systems to interact with a variety of terminals using a standard set of capabilities. category: User Interfaces author: Judah Jacobson exposed: True exposed-modules: System.Console.Terminfo System.Console.Terminfo.Base System.Console.Terminfo.Cursor System.Console.Terminfo.Color System.Console.Terminfo.Edit System.Console.Terminfo.Effects System.Console.Terminfo.Keys abi: d0bd235d4bbae7f2cbb36b97e6bcfba9 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/terminfo-0.4.0.1 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/terminfo-0.4.0.1 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/terminfo-0.4.0.1 hs-libraries: HSterminfo-0.4.0.1 extra-libraries: tinfo includes: ncurses.h term.h depends: base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/terminfo-0.4.0.1/terminfo.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/terminfo-0.4.0.1 --- name: xhtml version: 3000.2.1 id: xhtml-3000.2.1 key: xhtml-3000.2.1 license: BSD3 copyright: Bjorn Bringert 2004-2006, Andy Gill and the Oregon Graduate Institute of Science and Technology, 1999-2001 maintainer: Chris Dornan stability: Stable homepage: https://github.com/haskell/xhtml synopsis: An XHTML combinator library description: This package provides combinators for producing XHTML 1.0, including the Strict, Transitional and Frameset variants. category: Web, XML, Pretty Printer author: Bjorn Bringert exposed: True exposed-modules: Text.XHtml Text.XHtml.Frameset Text.XHtml.Strict Text.XHtml.Transitional Text.XHtml.Debug Text.XHtml.Table hidden-modules: Text.XHtml.Strict.Attributes Text.XHtml.Strict.Elements Text.XHtml.Frameset.Attributes Text.XHtml.Frameset.Elements Text.XHtml.Transitional.Attributes Text.XHtml.Transitional.Elements Text.XHtml.BlockTable Text.XHtml.Extras Text.XHtml.Internals abi: 932c4b6847d698115f4ad73b10e56807 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/xhtml-3000.2.1 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/xhtml-3000.2.1 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/xhtml-3000.2.1 hs-libraries: HSxhtml-3000.2.1 depends: base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/xhtml-3000.2.1/xhtml.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/xhtml-3000.2.1 --- name: transformers version: 0.4.3.0 id: transformers-0.4.3.0 key: transformers-0.4.3.0 license: BSD3 maintainer: Ross Paterson synopsis: Concrete functor and monad transformers description: A portable library of functor and monad transformers, inspired by the paper \"Functional Programming with Overloading and Higher-Order Polymorphism\", by Mark P Jones, in /Advanced School of Functional Programming/, 1995 (). . This package contains: . * the monad transformer class (in "Control.Monad.Trans.Class") and IO monad class (in "Control.Monad.IO.Class") . * concrete functor and monad transformers, each with associated operations and functions to lift operations associated with other transformers. . The package can be used on its own in portable Haskell code, in which case operations need to be manually lifted through transformer stacks (see "Control.Monad.Trans.Class" for some examples). Alternatively, it can be used with the non-portable monad classes in the @mtl@ or @monads-tf@ packages, which automatically lift operations introduced by monad transformers through other transformers. category: Control author: Andy Gill, Ross Paterson exposed: True exposed-modules: Control.Applicative.Backwards Control.Applicative.Lift Control.Monad.Signatures Control.Monad.Trans.Class Control.Monad.Trans.Cont Control.Monad.Trans.Except Control.Monad.Trans.Error Control.Monad.Trans.Identity Control.Monad.Trans.List Control.Monad.Trans.Maybe Control.Monad.Trans.Reader Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy Control.Monad.Trans.RWS.Strict Control.Monad.Trans.State Control.Monad.Trans.State.Lazy Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict Data.Functor.Classes Data.Functor.Compose Data.Functor.Constant Data.Functor.Product Data.Functor.Reverse Data.Functor.Sum abi: d71166f18d2591685ff3ee72b17638c0 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/transformers-0.4.3.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/transformers-0.4.3.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/transformers-0.4.3.0 hs-libraries: HStransformers-0.4.3.0 depends: base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/transformers-0.4.3.0/transformers.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/transformers-0.4.3.0 --- name: hoopl version: 3.10.2.0 id: hoopl-3.10.2.0 key: hoopl-3.10.2.0 license: BSD3 maintainer: nr@cs.tufts.edu, andreas.voellmy@gmail.com, email@ningwang.org homepage: http://ghc.cs.tufts.edu/hoopl/ synopsis: A library to support dataflow analysis and optimization description: Higher-order optimization library . See /Norman Ramsey, Joao Dias, and Simon Peyton Jones./ /(2010)/ for more details. category: Compilers/Interpreters author: Norman Ramsey, Joao Dias, Simon Marlow and Simon Peyton Jones exposed: True exposed-modules: Compiler.Hoopl Compiler.Hoopl.Internals Compiler.Hoopl.Wrappers Compiler.Hoopl.Passes.Dominator Compiler.Hoopl.Passes.DList hidden-modules: Compiler.Hoopl.Checkpoint Compiler.Hoopl.Collections Compiler.Hoopl.Combinators Compiler.Hoopl.Dataflow Compiler.Hoopl.Debug Compiler.Hoopl.Block Compiler.Hoopl.Graph Compiler.Hoopl.Label Compiler.Hoopl.MkGraph Compiler.Hoopl.Fuel Compiler.Hoopl.Pointed Compiler.Hoopl.Shape Compiler.Hoopl.Show Compiler.Hoopl.Unique Compiler.Hoopl.XUtil abi: 719b00050240e530b78b62520193b342 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/hoopl-3.10.2.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/hoopl-3.10.2.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/hoopl-3.10.2.0 hs-libraries: HShoopl-3.10.2.0 depends: base-4.9.0.0 containers-0.5.6.3 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/hoopl-3.10.2.0/hoopl.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/hoopl-3.10.2.0 --- name: template-haskell version: 2.11.0.0 id: template-haskell-2.11.0.0 key: template-haskell-2.11.0.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Support library for Template Haskell description: This package provides modules containing facilities for manipulating Haskell source code using Template Haskell. . See for more information. category: Template Haskell exposed: True exposed-modules: Language.Haskell.TH Language.Haskell.TH.Lib Language.Haskell.TH.Ppr Language.Haskell.TH.PprLib Language.Haskell.TH.Quote Language.Haskell.TH.Syntax hidden-modules: Language.Haskell.TH.Lib.Map abi: 26855f7c84ab668b019a8d35abdb5276 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/template-haskell-2.11.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/template-haskell-2.11.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/template-haskell-2.11.0.0 hs-libraries: HStemplate-haskell-2.11.0.0 depends: base-4.9.0.0 ghc-boot-0.0.0.0 pretty-1.1.2.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/template-haskell-2.11.0.0/template-haskell.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/template-haskell-2.11.0.0 --- name: Cabal version: 1.23.0.0 id: Cabal-1.23.0.0 key: Cabal-1.23.0.0 license: BSD3 copyright: 2003-2006, Isaac Jones 2005-2011, Duncan Coutts maintainer: cabal-devel@haskell.org homepage: http://www.haskell.org/cabal/ synopsis: A framework for packaging Haskell software description: The Haskell Common Architecture for Building Applications and Libraries: a framework defining a common interface for authors to more easily build their Haskell applications in a portable way. . The Haskell Cabal is part of a larger infrastructure for distributing, organizing, and cataloging Haskell libraries and tools. category: Distribution author: Isaac Jones Duncan Coutts exposed: True exposed-modules: Distribution.Compat.CreatePipe Distribution.Compat.Environment Distribution.Compat.Exception Distribution.Compat.Internal.TempFile Distribution.Compat.ReadP Distribution.Compiler Distribution.InstalledPackageInfo Distribution.License Distribution.Make Distribution.ModuleName Distribution.Package Distribution.PackageDescription Distribution.PackageDescription.Check Distribution.PackageDescription.Configuration Distribution.PackageDescription.Parse Distribution.PackageDescription.PrettyPrint Distribution.PackageDescription.Utils Distribution.ParseUtils Distribution.ReadE Distribution.Simple Distribution.Simple.Bench Distribution.Simple.Build Distribution.Simple.Build.Macros Distribution.Simple.Build.PathsModule Distribution.Simple.BuildPaths Distribution.Simple.BuildTarget Distribution.Simple.CCompiler Distribution.Simple.Command Distribution.Simple.Compiler Distribution.Simple.Configure Distribution.Simple.GHC Distribution.Simple.GHCJS Distribution.Simple.Haddock Distribution.Simple.HaskellSuite Distribution.Simple.Hpc Distribution.Simple.Install Distribution.Simple.InstallDirs Distribution.Simple.JHC Distribution.Simple.LHC Distribution.Simple.LocalBuildInfo Distribution.Simple.PackageIndex Distribution.Simple.PreProcess Distribution.Simple.PreProcess.Unlit Distribution.Simple.Program Distribution.Simple.Program.Ar Distribution.Simple.Program.Builtin Distribution.Simple.Program.Db Distribution.Simple.Program.Find Distribution.Simple.Program.GHC Distribution.Simple.Program.HcPkg Distribution.Simple.Program.Hpc Distribution.Simple.Program.Internal Distribution.Simple.Program.Ld Distribution.Simple.Program.Run Distribution.Simple.Program.Script Distribution.Simple.Program.Strip Distribution.Simple.Program.Types Distribution.Simple.Register Distribution.Simple.Setup Distribution.Simple.SrcDist Distribution.Simple.Test Distribution.Simple.Test.ExeV10 Distribution.Simple.Test.LibV09 Distribution.Simple.Test.Log Distribution.Simple.UHC Distribution.Simple.UserHooks Distribution.Simple.Utils Distribution.System Distribution.TestSuite Distribution.Text Distribution.Utils.NubList Distribution.Verbosity Distribution.Version Language.Haskell.Extension hidden-modules: Distribution.Compat.Binary Distribution.Compat.CopyFile Distribution.GetOpt Distribution.Lex Distribution.Simple.GHC.Internal Distribution.Simple.GHC.IPI641 Distribution.Simple.GHC.IPI642 Distribution.Simple.GHC.ImplInfo Paths_Cabal abi: 4b55984d7d0e5898df279f52ba75702f trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/Cabal-1.23.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/Cabal-1.23.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/Cabal-1.23.0.0 hs-libraries: HSCabal-1.23.0.0 depends: array-0.5.1.0 base-4.9.0.0 binary-0.7.5.0 bytestring-0.10.7.0 containers-0.5.6.3 deepseq-1.4.2.0 directory-1.2.5.0 filepath-1.4.1.0 pretty-1.1.2.0 process-1.4.1.0 time-1.5.0.1 unix-2.7.1.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/Cabal-1.23.0.0/Cabal.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/Cabal-1.23.0.0 --- name: binary version: 0.7.5.0 id: binary-0.7.5.0 key: binary-0.7.5.0 license: BSD3 maintainer: Lennart Kolmodin, Don Stewart stability: provisional homepage: https://github.com/kolmodin/binary synopsis: Binary serialisation for Haskell values using lazy ByteStrings description: Efficient, pure binary serialisation using lazy ByteStrings. Haskell values may be encoded to and from binary formats, written to disk as binary, or sent over the network. The format used can be automatically generated, or you can choose to implement a custom format if needed. Serialisation speeds of over 1 G\/sec have been observed, so this library should be suitable for high performance scenarios. category: Data, Parsing author: Lennart Kolmodin exposed: True exposed-modules: Data.Binary Data.Binary.Put Data.Binary.Get Data.Binary.Get.Internal Data.Binary.Builder Data.Binary.Builder.Internal hidden-modules: Data.Binary.Builder.Base Data.Binary.Class Data.Binary.Generic abi: 023629bb1f3d2da077b9dfaec842d5d6 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/binary-0.7.5.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/binary-0.7.5.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/binary-0.7.5.0 hs-libraries: HSbinary-0.7.5.0 depends: array-0.5.1.0 base-4.9.0.0 bytestring-0.10.7.0 containers-0.5.6.3 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/binary-0.7.5.0/binary.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/binary-0.7.5.0 --- name: pretty version: 1.1.2.0 id: pretty-1.1.2.0 key: pretty-1.1.2.0 license: BSD3 maintainer: David Terei stability: Stable homepage: http://github.com/haskell/pretty synopsis: Pretty-printing library description: This package contains a pretty-printing library, a set of API's that provides a way to easily print out text in a consistent format of your choosing. This is useful for compilers and related tools. . This library was originally designed by John Hughes's and has since been heavily modified by Simon Peyton Jones. category: Text exposed: True exposed-modules: Text.PrettyPrint Text.PrettyPrint.HughesPJ Text.PrettyPrint.HughesPJClass abi: ff204a4f63b87ec08dfb63935ab60346 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/pretty-1.1.2.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/pretty-1.1.2.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/pretty-1.1.2.0 hs-libraries: HSpretty-1.1.2.0 depends: base-4.9.0.0 deepseq-1.4.2.0 ghc-prim-0.5.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/pretty-1.1.2.0/pretty.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/pretty-1.1.2.0 --- name: hpc version: 0.6.0.2 id: hpc-0.6.0.2 key: hpc-0.6.0.2 license: BSD3 maintainer: ghc-devs@haskell.org synopsis: Code Coverage Library for Haskell description: This package provides the code coverage library for Haskell. . See for more information. category: Control author: Andy Gill exposed: True exposed-modules: Trace.Hpc.Util Trace.Hpc.Mix Trace.Hpc.Tix Trace.Hpc.Reflect abi: b98013c17bf1741c790e3a830237e8bc trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/hpc-0.6.0.2 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/hpc-0.6.0.2 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/hpc-0.6.0.2 hs-libraries: HShpc-0.6.0.2 depends: base-4.9.0.0 containers-0.5.6.3 directory-1.2.5.0 filepath-1.4.1.0 time-1.5.0.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/hpc-0.6.0.2/hpc.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/hpc-0.6.0.2 --- name: process version: 1.4.1.0 id: process-1.4.1.0 key: process-1.4.1.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Process libraries description: This package contains libraries for dealing with system processes. category: System exposed: True exposed-modules: System.Cmd System.Process System.Process.Internals hidden-modules: System.Process.Common System.Process.Posix abi: 483b4c1d894e8c880c567a1ee593790f trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/process-1.4.1.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/process-1.4.1.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/process-1.4.1.0 hs-libraries: HSprocess-1.4.1.0 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/process-1.4.1.0/include includes: runProcess.h depends: base-4.9.0.0 deepseq-1.4.2.0 directory-1.2.5.0 filepath-1.4.1.0 unix-2.7.1.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/process-1.4.1.0/process.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/process-1.4.1.0 --- name: directory version: 1.2.5.0 id: directory-1.2.5.0 key: directory-1.2.5.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Platform-agnostic library for filesystem operations description: This library provides a basic set of operations for manipulating files and directories in a portable way. category: System exposed: True exposed-modules: System.Directory hidden-modules: System.Directory.Internal System.Directory.Internal.Config System.Directory.Internal.C_utimensat System.Directory.Internal.Posix System.Directory.Internal.Windows abi: 73051d50bd0377c1f91d40ac29eafcde trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/directory-1.2.5.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/directory-1.2.5.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/directory-1.2.5.0 hs-libraries: HSdirectory-1.2.5.0 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/directory-1.2.5.0/include includes: HsDirectory.h depends: base-4.9.0.0 filepath-1.4.1.0 time-1.5.0.1 unix-2.7.1.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/directory-1.2.5.0/directory.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/directory-1.2.5.0 --- name: unix version: 2.7.1.1 id: unix-2.7.1.1 key: unix-2.7.1.1 license: BSD3 maintainer: libraries@haskell.org homepage: https://github.com/haskell/unix synopsis: POSIX functionality description: This package gives you access to the set of operating system services standardised by POSIX 1003.1b (or the IEEE Portable Operating System Interface for Computing Environments - IEEE Std. 1003.1). . The package is not supported under Windows (except under Cygwin). category: System exposed: True exposed-modules: System.Posix System.Posix.ByteString System.Posix.Error System.Posix.Resource System.Posix.Time System.Posix.Unistd System.Posix.User System.Posix.Signals System.Posix.Signals.Exts System.Posix.Semaphore System.Posix.SharedMem System.Posix.ByteString.FilePath System.Posix.Directory System.Posix.Directory.ByteString System.Posix.DynamicLinker.Module System.Posix.DynamicLinker.Module.ByteString System.Posix.DynamicLinker.Prim System.Posix.DynamicLinker.ByteString System.Posix.DynamicLinker System.Posix.Files System.Posix.Files.ByteString System.Posix.IO System.Posix.IO.ByteString System.Posix.Env System.Posix.Env.ByteString System.Posix.Fcntl System.Posix.Process System.Posix.Process.Internals System.Posix.Process.ByteString System.Posix.Temp System.Posix.Temp.ByteString System.Posix.Terminal System.Posix.Terminal.ByteString hidden-modules: System.Posix.Directory.Common System.Posix.DynamicLinker.Common System.Posix.Files.Common System.Posix.IO.Common System.Posix.Process.Common System.Posix.Terminal.Common abi: 416bbf4a68812f768d46e0603efc98e6 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/unix-2.7.1.1 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/unix-2.7.1.1 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/unix-2.7.1.1 hs-libraries: HSunix-2.7.1.1 extra-libraries: rt util dl pthread include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/unix-2.7.1.1/include includes: HsUnix.h execvpe.h depends: base-4.9.0.0 bytestring-0.10.7.0 time-1.5.0.1 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/unix-2.7.1.1/unix.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/unix-2.7.1.1 --- name: time version: 1.5.0.1 id: time-1.5.0.1 key: time-1.5.0.1 license: BSD3 maintainer: stability: stable homepage: https://github.com/haskell/time synopsis: A time library description: A time library category: System author: Ashley Yakeley exposed: True exposed-modules: Data.Time.Calendar Data.Time.Calendar.MonthDay Data.Time.Calendar.OrdinalDate Data.Time.Calendar.WeekDate Data.Time.Calendar.Julian Data.Time.Calendar.Easter Data.Time.Clock Data.Time.Clock.POSIX Data.Time.Clock.TAI Data.Time.LocalTime Data.Time.Format Data.Time hidden-modules: Data.Time.Calendar.Private Data.Time.Calendar.Days Data.Time.Calendar.Gregorian Data.Time.Calendar.JulianYearDay Data.Time.Clock.Scale Data.Time.Clock.UTC Data.Time.Clock.CTimeval Data.Time.Clock.UTCDiff Data.Time.LocalTime.TimeZone Data.Time.LocalTime.TimeOfDay Data.Time.LocalTime.LocalTime Data.Time.Format.Parse Data.Time.Format.Locale abi: fa14628fffb7d93741bb88caab63757e trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/time-1.5.0.1 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/time-1.5.0.1 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/time-1.5.0.1 hs-libraries: HStime-1.5.0.1 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/time-1.5.0.1/include depends: base-4.9.0.0 deepseq-1.4.2.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/time-1.5.0.1/time.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/time-1.5.0.1 --- name: containers version: 0.5.6.3 id: containers-0.5.6.3 key: containers-0.5.6.3 license: BSD3 maintainer: fox@ucw.cz synopsis: Assorted concrete container types description: This package contains efficient general-purpose implementations of various basic immutable container types. The declared cost of each operation is either worst-case or amortized, but remains valid even if structures are shared. category: Data Structures exposed: True exposed-modules: Data.IntMap Data.IntMap.Lazy Data.IntMap.Strict Data.IntSet Data.Map Data.Map.Lazy Data.Map.Strict Data.Set Data.Graph Data.Sequence Data.Tree hidden-modules: Data.IntMap.Base Data.IntSet.Base Data.Map.Base Data.Set.Base Data.Utils.BitUtil Data.Utils.StrictFold Data.Utils.StrictPair abi: 4ae96ef90aaf7e7c342611448391c5cd trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/containers-0.5.6.3 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/containers-0.5.6.3 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/containers-0.5.6.3 hs-libraries: HScontainers-0.5.6.3 depends: array-0.5.1.0 base-4.9.0.0 deepseq-1.4.2.0 ghc-prim-0.5.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/containers-0.5.6.3/containers.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/containers-0.5.6.3 --- name: bytestring version: 0.10.7.0 id: bytestring-0.10.7.0 key: bytestring-0.10.7.0 license: BSD3 copyright: Copyright (c) Don Stewart 2005-2009, (c) Duncan Coutts 2006-2015, (c) David Roundy 2003-2005, (c) Jasper Van der Jeugt 2010, (c) Simon Meier 2010-2013. maintainer: Duncan Coutts homepage: https://github.com/haskell/bytestring synopsis: Fast, compact, strict and lazy byte strings with a list interface description: An efficient compact, immutable byte string type (both strict and lazy) suitable for binary or 8-bit character data. . The 'ByteString' type represents sequences of bytes or 8-bit characters. It is suitable for high performance use, both in terms of large data quantities, or high speed requirements. The 'ByteString' functions follow the same style as Haskell\'s ordinary lists, so it is easy to convert code from using 'String' to 'ByteString'. . Two 'ByteString' variants are provided: . * Strict 'ByteString's keep the string as a single large array. This makes them convenient for passing data between C and Haskell. . * Lazy 'ByteString's use a lazy list of strict chunks which makes it suitable for I\/O streaming tasks. . The @Char8@ modules provide a character-based view of the same underlying 'ByteString' types. This makes it convenient to handle mixed binary and 8-bit character content (which is common in many file formats and network protocols). . The 'Builder' module provides an efficient way to build up 'ByteString's in an ad-hoc way by repeated concatenation. This is ideal for fast serialisation or pretty printing. . There is also a 'ShortByteString' type which has a lower memory overhead and can be converted to or from a 'ByteString', but supports very few other operations. It is suitable for keeping many short strings in memory. . 'ByteString's are not designed for Unicode. For Unicode strings you should use the 'Text' type from the @text@ package. . These modules are intended to be imported qualified, to avoid name clashes with "Prelude" functions, e.g. . > import qualified Data.ByteString as BS category: Data author: Don Stewart, Duncan Coutts exposed: True exposed-modules: Data.ByteString Data.ByteString.Char8 Data.ByteString.Unsafe Data.ByteString.Internal Data.ByteString.Lazy Data.ByteString.Lazy.Char8 Data.ByteString.Lazy.Internal Data.ByteString.Short Data.ByteString.Short.Internal Data.ByteString.Builder Data.ByteString.Builder.Extra Data.ByteString.Builder.Prim Data.ByteString.Builder.Internal Data.ByteString.Builder.Prim.Internal Data.ByteString.Lazy.Builder Data.ByteString.Lazy.Builder.Extras Data.ByteString.Lazy.Builder.ASCII hidden-modules: Data.ByteString.Builder.ASCII Data.ByteString.Builder.Prim.Binary Data.ByteString.Builder.Prim.ASCII Data.ByteString.Builder.Prim.Internal.Floating Data.ByteString.Builder.Prim.Internal.UncheckedShifts Data.ByteString.Builder.Prim.Internal.Base16 abi: d9206a8fe0d44e69be0c04076cabad23 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/bytestring-0.10.7.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/bytestring-0.10.7.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/bytestring-0.10.7.0 hs-libraries: HSbytestring-0.10.7.0 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/bytestring-0.10.7.0/include includes: fpstring.h depends: base-4.9.0.0 deepseq-1.4.2.0 ghc-prim-0.5.0.0 integer-gmp-1.0.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/bytestring-0.10.7.0/bytestring.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/bytestring-0.10.7.0 --- name: deepseq version: 1.4.2.0 id: deepseq-1.4.2.0 key: deepseq-1.4.2.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Deep evaluation of data structures description: This package provides methods for fully evaluating data structures (\"deep evaluation\"). Deep evaluation is often used for adding strictness to a program, e.g. in order to force pending exceptions, remove space leaks, or force lazy I/O to happen. It is also useful in parallel programs, to ensure pending work does not migrate to the wrong thread. . The primary use of this package is via the 'deepseq' function, a \"deep\" version of 'seq'. It is implemented on top of an 'NFData' typeclass (\"Normal Form Data\", data structures with no unevaluated components) which defines strategies for fully evaluating different data types. category: Control exposed: True exposed-modules: Control.DeepSeq abi: 63c4c214c0c19484502b3c8b7e42ec69 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/deepseq-1.4.2.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/deepseq-1.4.2.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/deepseq-1.4.2.0 hs-libraries: HSdeepseq-1.4.2.0 depends: array-0.5.1.0 base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/deepseq-1.4.2.0/deepseq.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/deepseq-1.4.2.0 --- name: array version: 0.5.1.0 id: array-0.5.1.0 key: array-0.5.1.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Mutable and immutable arrays description: In addition to providing the "Data.Array" module , this package also defines the classes 'IArray' of immutable arrays and 'MArray' of arrays mutable within appropriate monads, as well as some instances of these classes. category: Data Structures exposed: True exposed-modules: Data.Array Data.Array.Base Data.Array.IArray Data.Array.IO Data.Array.IO.Safe Data.Array.IO.Internals Data.Array.MArray Data.Array.MArray.Safe Data.Array.ST Data.Array.ST.Safe Data.Array.Storable Data.Array.Storable.Safe Data.Array.Storable.Internals Data.Array.Unboxed Data.Array.Unsafe abi: 2b2b879a09eb81c865ac273803e08132 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/array-0.5.1.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/array-0.5.1.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/array-0.5.1.0 hs-libraries: HSarray-0.5.1.0 depends: base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/array-0.5.1.0/array.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/array-0.5.1.0 --- name: filepath version: 1.4.1.0 id: filepath-1.4.1.0 key: filepath-1.4.1.0 license: BSD3 copyright: Neil Mitchell 2005-2015 maintainer: Neil Mitchell homepage: https://github.com/haskell/filepath#readme synopsis: Library for manipulating FilePaths in a cross platform way. description: This package provides functionality for manipulating @FilePath@ values, and is shipped with both and the . It provides three modules: . * "System.FilePath.Posix" manipulates POSIX\/Linux style @FilePath@ values (with @\/@ as the path separator). . * "System.FilePath.Windows" manipulates Windows style @FilePath@ values (with either @\\@ or @\/@ as the path separator, and deals with drives). . * "System.FilePath" is an alias for the module appropriate to your platform. . All three modules provide the same API, and the same documentation (calling out differences in the different variants). category: System author: Neil Mitchell exposed: True exposed-modules: System.FilePath System.FilePath.Posix System.FilePath.Windows abi: 1e3d9055afc6aa08b97f5ad5f8014ce4 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/filepath-1.4.1.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/filepath-1.4.1.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/filepath-1.4.1.0 hs-libraries: HSfilepath-1.4.1.0 depends: base-4.9.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/filepath-1.4.1.0/filepath.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/filepath-1.4.1.0 --- name: base version: 4.9.0.0 id: base-4.9.0.0 key: base-4.9.0.0 license: BSD3 maintainer: libraries@haskell.org synopsis: Basic libraries description: This package contains the "Prelude" and its support libraries, and a large collection of useful libraries ranging from data structures to parsing combinators and debugging utilities. category: Prelude exposed: True exposed-modules: Control.Applicative Control.Arrow Control.Category Control.Concurrent Control.Concurrent.Chan Control.Concurrent.MVar Control.Concurrent.QSem Control.Concurrent.QSemN Control.Exception Control.Exception.Base Control.Monad Control.Monad.Fail Control.Monad.Fix Control.Monad.Instances Control.Monad.IO.Class Control.Monad.ST Control.Monad.ST.Lazy Control.Monad.ST.Lazy.Safe Control.Monad.ST.Lazy.Unsafe Control.Monad.ST.Safe Control.Monad.ST.Strict Control.Monad.ST.Unsafe Control.Monad.Zip Data.Bifunctor Data.Bits Data.Bool Data.Char Data.Coerce Data.Complex Data.Data Data.Dynamic Data.Either Data.Eq Data.Fixed Data.Foldable Data.Function Data.Functor Data.Functor.Identity Data.IORef Data.Int Data.Ix Data.Kind Data.List Data.List.NonEmpty Data.Maybe Data.Monoid Data.Ord Data.Proxy Data.Ratio Data.Semigroup Data.STRef Data.STRef.Lazy Data.STRef.Strict Data.String Data.Traversable Data.Tuple Data.Type.Bool Data.Type.Coercion Data.Type.Equality Data.Typeable Data.Typeable.Internal Data.Unique Data.Version Data.Void Data.Word Debug.Trace Foreign Foreign.C Foreign.C.Error Foreign.C.String Foreign.C.Types Foreign.Concurrent Foreign.ForeignPtr Foreign.ForeignPtr.Safe Foreign.ForeignPtr.Unsafe Foreign.Marshal Foreign.Marshal.Alloc Foreign.Marshal.Array Foreign.Marshal.Error Foreign.Marshal.Pool Foreign.Marshal.Safe Foreign.Marshal.Unsafe Foreign.Marshal.Utils Foreign.Ptr Foreign.Safe Foreign.StablePtr Foreign.Storable GHC.Arr GHC.Base GHC.Char GHC.Conc GHC.Conc.IO GHC.Conc.Signal GHC.Conc.Sync GHC.ConsoleHandler GHC.Constants GHC.Desugar GHC.Enum GHC.Environment GHC.Err GHC.Exception GHC.ExecutionStack GHC.ExecutionStack.Internal GHC.Exts GHC.Fingerprint GHC.Fingerprint.Type GHC.Float GHC.Float.ConversionUtils GHC.Float.RealFracMethods GHC.Foreign GHC.ForeignPtr GHC.GHCi GHC.Generics GHC.IO GHC.IO.Buffer GHC.IO.BufferedIO GHC.IO.Device GHC.IO.Encoding GHC.IO.Encoding.CodePage GHC.IO.Encoding.Failure GHC.IO.Encoding.Iconv GHC.IO.Encoding.Latin1 GHC.IO.Encoding.Types GHC.IO.Encoding.UTF16 GHC.IO.Encoding.UTF32 GHC.IO.Encoding.UTF8 GHC.IO.Exception GHC.IO.FD GHC.IO.Handle GHC.IO.Handle.FD GHC.IO.Handle.Internals GHC.IO.Handle.Text GHC.IO.Handle.Types GHC.IO.IOMode GHC.IO.Unsafe GHC.IOArray GHC.IORef GHC.Int GHC.List GHC.MVar GHC.Natural GHC.Num GHC.OldList GHC.OverloadedLabels GHC.PArr GHC.Pack GHC.Profiling GHC.Ptr GHC.Read GHC.Real GHC.RTS.Flags GHC.ST GHC.StaticPtr GHC.STRef GHC.Show GHC.Stable GHC.Stack GHC.Stack.CCS GHC.Stack.Types GHC.Stats GHC.Storable GHC.TopHandler GHC.TypeLits GHC.Unicode GHC.Weak GHC.Word Numeric Numeric.Natural Prelude System.CPUTime System.Console.GetOpt System.Environment System.Exit System.IO System.IO.Error System.IO.Unsafe System.Info System.Mem System.Mem.StableName System.Mem.Weak System.Posix.Internals System.Posix.Types System.Timeout Text.ParserCombinators.ReadP Text.ParserCombinators.ReadPrec Text.Printf Text.Read Text.Read.Lex Text.Show Text.Show.Functions Unsafe.Coerce GHC.Event hidden-modules: Control.Monad.ST.Imp Control.Monad.ST.Lazy.Imp Data.OldList Foreign.ForeignPtr.Imp System.Environment.ExecutablePath GHC.Event.Arr GHC.Event.Array GHC.Event.Clock GHC.Event.Control GHC.Event.EPoll GHC.Event.IntTable GHC.Event.Internal GHC.Event.KQueue GHC.Event.Manager GHC.Event.PSQ GHC.Event.Poll GHC.Event.Thread GHC.Event.TimerManager GHC.Event.Unique abi: 472df40e39128303d276cf121f250e89 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/base-4.9.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/base-4.9.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/base-4.9.0.0 hs-libraries: HSbase-4.9.0.0 include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/base-4.9.0.0/include includes: HsBase.h depends: ghc-prim-0.5.0.0 integer-gmp-1.0.0.0 rts haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/base-4.9.0.0/base.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/base-4.9.0.0 --- name: integer-gmp version: 1.0.0.0 id: integer-gmp-1.0.0.0 key: integer-gmp-1.0.0.0 license: BSD3 maintainer: hvr@gnu.org synopsis: Integer library based on GMP category: Numeric, Algebra author: Herbert Valerio Riedel exposed: True exposed-modules: GHC.Integer GHC.Integer.Logarithms GHC.Integer.Logarithms.Internals GHC.Integer.GMP.Internals hidden-modules: GHC.Integer.Type abi: 32980bb533b4a3996f3424fd198cf767 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/integer-gmp-1.0.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/integer-gmp-1.0.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/integer-gmp-1.0.0.0 hs-libraries: HSinteger-gmp-1.0.0.0 extra-libraries: gmp include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/integer-gmp-1.0.0.0/include depends: ghc-prim-0.5.0.0 haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/integer-gmp-1.0.0.0/integer-gmp.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/integer-gmp-1.0.0.0 --- name: ghc-prim version: 0.5.0.0 id: ghc-prim-0.5.0.0 key: ghc-prim-0.5.0.0 license: BSD3 maintainer: libraries@haskell.org synopsis: GHC primitives description: GHC primitives. category: GHC exposed: True exposed-modules: GHC.CString GHC.Classes GHC.Debug GHC.IntWord64 GHC.Magic GHC.PrimopWrappers GHC.Tuple GHC.Types GHC.Prim abi: 9f5ec1125ba73d164ce53f7b537009e8 trusted: False import-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-prim-0.5.0.0 library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/ghc-prim-0.5.0.0 data-dir: /opt/ghc/head/share/x86_64-linux-ghc-7.11.20151213/ghc-prim-0.5.0.0 hs-libraries: HSghc-prim-0.5.0.0 depends: rts haddock-interfaces: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-prim-0.5.0.0/ghc-prim.haddock haddock-html: /opt/ghc/head/share/doc/ghc/html/libraries/ghc-prim-0.5.0.0 --- name: rts version: 1.0 id: rts key: rts license: BSD3 maintainer: glasgow-haskell-users@haskell.org exposed: True abi: trusted: False library-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/rts hs-libraries: HSrts Cffi extra-libraries: m rt dl include-dirs: /opt/ghc/head/lib/ghc-7.11.20151213/include includes: Stg.h ld-options: "-Wl,-u,ghczmprim_GHCziTypes_Izh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_static_info" "-Wl,-u,base_GHCziPtr_Ptr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Wzh_static_info" "-Wl,-u,base_GHCziInt_I8zh_static_info" "-Wl,-u,base_GHCziInt_I16zh_static_info" "-Wl,-u,base_GHCziInt_I32zh_static_info" "-Wl,-u,base_GHCziInt_I64zh_static_info" "-Wl,-u,base_GHCziWord_W8zh_static_info" "-Wl,-u,base_GHCziWord_W16zh_static_info" "-Wl,-u,base_GHCziWord_W32zh_static_info" "-Wl,-u,base_GHCziWord_W64zh_static_info" "-Wl,-u,base_GHCziStable_StablePtr_static_info" "-Wl,-u,ghczmprim_GHCziTypes_Izh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Czh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Fzh_con_info" "-Wl,-u,ghczmprim_GHCziTypes_Dzh_con_info" "-Wl,-u,base_GHCziPtr_Ptr_con_info" "-Wl,-u,base_GHCziPtr_FunPtr_con_info" "-Wl,-u,base_GHCziStable_StablePtr_con_info" "-Wl,-u,ghczmprim_GHCziTypes_False_closure" "-Wl,-u,ghczmprim_GHCziTypes_True_closure" "-Wl,-u,base_GHCziPack_unpackCString_closure" "-Wl,-u,base_GHCziIOziException_stackOverflow_closure" "-Wl,-u,base_GHCziIOziException_heapOverflow_closure" "-Wl,-u,base_ControlziExceptionziBase_nonTermination_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnMVar_closure" "-Wl,-u,base_GHCziIOziException_blockedIndefinitelyOnSTM_closure" "-Wl,-u,base_GHCziIOziException_allocationLimitExceeded_closure" "-Wl,-u,base_ControlziExceptionziBase_nestedAtomically_closure" "-Wl,-u,base_GHCziEventziThread_blockedOnBadFD_closure" "-Wl,-u,base_GHCziWeak_runFinalizzerBatch_closure" "-Wl,-u,base_GHCziTopHandler_flushStdHandles_closure" "-Wl,-u,base_GHCziTopHandler_runIO_closure" "-Wl,-u,base_GHCziTopHandler_runNonIO_closure" "-Wl,-u,base_GHCziConcziIO_ensureIOManagerIsRunning_closure" "-Wl,-u,base_GHCziConcziIO_ioManagerCapabilitiesChanged_closure" "-Wl,-u,base_GHCziConcziSync_runSparks_closure" "-Wl,-u,base_GHCziConcziSignal_runHandlersPtr_closure" stack-2.15.7/tests/unit/Stack/Untar/test1.tar.gz0000644000000000000000000000042114502056216017545 0ustar0000000000000000꣌WMj0`s fFRȾ^AvɢXU"JZl? OMB5"8㚔V+C^ؐ1LJɩj?b4HW߼4k#nb;Yrs4a}!ڼ:sO]ŋce}ބŸ=}.k )?3;̜ză?zNFqk/Ƴf?;\(stack-2.15.7/tests/unit/Stack/Untar/test2.tar.gz0000644000000000000000000000055714502056216017560 0ustar0000000000000000꣌WAN0E)r lXr@S%)TIEyQ,+/V]~-MQVKh)I@cRF%!h kIzg:/9h_=Yo)ġ {w&_|7hG\my>r_Uxv{r'|yO1Q?};XqpO`96bOGc֘vM@VиBJ)^Ka`kLw9HA.߶a:k    ? 4(stack-2.15.7/cabal.project0000644000000000000000000000337614620153473013551 0ustar0000000000000000-- This file is a configuration file for Cabal (the tool). It is provided to -- assist some users of that tool to develop Stack. For information about -- `cabal.project` files, see: -- https://cabal.readthedocs.io/en/stable/cabal-project.html. -- -- For information about possible limitations of the `cabal.config` files -- corresponding to Stackage package sets that are made available by Stackage -- see: https://github.com/fpco/stackage-server/issues/232. -- -- `import:` is only available to users of Cabal (the tool) >= 3.8.1.0. -- -- The constraints in file `cabal.config` can be obtained by commanding: -- -- > stack ls dependencies cabal > cabal.config -- -- However, be aware that, in respect of the `unix` package or the `Win32` -- package (that may come with GHC, depending on the operating system): -- -- * on Windows, the Stack project does not depend on `unix` but depends on -- `Win32`; and -- -- * on non-Windows operating systems, the Stack project does not depend on -- `Win32` but depends on `unix`. -- -- The command above will add one of the two packages to `cabal.config` but omit -- the other. A comprehensive `cabal.config` will need to be created by editing -- the command's output. -- -- Also be aware that there may be other packages required only on non-Windows -- systems. For example, the `hinotify` package. -- -- Be sure to set `with-compiler: ghc-x.y.z` below to the version of GHC that is -- specified by the snapshot specifed in Stack's project-level YAML -- configuration file (`stack.yaml`). The relevant version of GHC can be -- confirmed by reviewing the snapshot on Stackage. For example, at: -- https://www.stackage.org/lts-22.21/cabal.config. -- with-compiler: ghc-9.6.5 import: cabal.config packages: . stack-2.15.7/cabal.config0000644000000000000000000001226714620154271013344 0ustar0000000000000000constraints: , Cabal ==3.10.3.0 , Cabal-syntax ==3.10.3.0 , Glob ==0.10.2 , OneTuple ==0.4.1.1 , QuickCheck ==2.14.3 , StateVar ==1.2.2 , Win32 ==2.13.3.0 , aeson ==2.1.2.1 , aeson-warning-parser ==0.1.1 , annotated-wl-pprint ==0.7.0 , ansi-terminal ==1.0.2 , ansi-terminal-types ==0.11.5 , appar ==0.1.8 , array ==0.5.6.0 , asn1-encoding ==0.9.6 , asn1-parse ==0.9.5 , asn1-types ==0.3.4 , assoc ==1.1 , async ==2.2.5 , attoparsec ==0.14.4 , attoparsec-aeson ==2.1.0.0 , attoparsec-iso8601 ==1.1.0.1 , auto-update ==0.1.6 , base ==4.18.2.1 , base-compat ==0.13.1 , base-compat-batteries ==0.13.1 , base-orphans ==0.9.2 , base16-bytestring ==1.0.2.0 , base64-bytestring ==1.2.1.0 , basement ==0.0.16 , bifunctors ==5.6.2 , binary ==0.8.9.1 , bitvec ==1.1.5.0 , blaze-builder ==0.4.2.3 , blaze-html ==0.9.2.0 , blaze-markup ==0.8.3.0 , byteorder ==1.0.4 , bytestring ==0.11.5.3 , casa-client ==0.0.2 , casa-types ==0.0.2 , case-insensitive ==1.2.1.0 , cereal ==0.5.8.3 , clock ==0.8.4 , cmdargs ==0.10.22 , colour ==2.3.6 , comonad ==5.0.8 , companion ==0.1.0 , conduit ==1.3.5 , conduit-combinators ==1.3.0 , conduit-extra ==1.3.6 , containers ==0.6.7 , contravariant ==1.5.5 , cookie ==0.4.6 , cryptohash-sha256 ==0.11.102.1 , crypton ==0.34 , crypton-conduit ==0.2.3 , crypton-connection ==0.3.2 , crypton-x509 ==1.7.6 , crypton-x509-store ==1.6.9 , crypton-x509-system ==1.6.7 , crypton-x509-validation ==1.6.12 , data-default-class ==0.1.2.0 , data-fix ==0.3.2 , deepseq ==1.4.8.1 , digest ==0.0.2.1 , directory ==1.3.8.4 , distributive ==0.6.2.1 , dlist ==1.0 , easy-file ==0.2.5 , echo ==0.1.4 , ed25519 ==0.0.5.0 , exceptions ==0.10.7 , extra ==1.7.14 , fast-logger ==3.2.2 , file-embed ==0.0.16.0 , filelock ==0.1.1.7 , filepath ==1.4.300.1 , fsnotify ==0.4.1.0 , generic-deriving ==1.14.5 , generically ==0.1.1 , ghc-bignum ==1.3 , ghc-boot ==9.6.5 , ghc-boot-th ==9.6.5 , ghc-prim ==0.10.0 , githash ==0.1.7.0 , hackage-security ==0.6.2.4 , hashable ==1.4.4.0 , hi-file-parser ==0.1.6.0 , hinotify ==0.4.1 , hourglass ==0.2.12 , hpack ==0.36.0 , hpc ==0.6.2.0 , http-api-data ==0.5.1 , http-client ==0.7.17 , http-client-tls ==0.3.6.3 , http-conduit ==2.3.8.3 , http-download ==0.2.1.0 , http-types ==0.12.4 , indexed-traversable ==0.1.3 , indexed-traversable-instances ==0.1.1.2 , infer-license ==0.2.0 , integer-conversion ==0.1.0.1 , integer-gmp ==1.1 , integer-logarithms ==1.0.3.1 , iproute ==1.7.12 , libyaml ==0.1.4 , libyaml-clib ==0.2.5 , lift-type ==0.1.1.1 , lifted-base ==0.2.3.12 , lukko ==0.1.1.3 , megaparsec ==9.5.0 , memory ==0.18.0 , microlens ==0.4.13.1 , microlens-mtl ==0.2.0.3 , microlens-th ==0.4.3.15 , mime-types ==0.1.2.0 , mintty ==0.1.4 , monad-control ==1.0.3.1 , monad-logger ==0.3.40 , monad-loops ==0.4.3 , mono-traversable ==1.0.17.0 , mtl ==2.3.1 , mtl-compat ==0.2.2 , mustache ==2.4.2 , neat-interpolation ==0.5.1.4 , network ==3.1.4.0 , network-uri ==2.6.4.2 , old-locale ==1.0.0.7 , old-time ==1.1.0.4 , open-browser ==0.2.1.0 , optparse-applicative ==0.18.1.0 , optparse-simple ==0.1.1.4 , os-string ==2.0.2.1 , pantry ==0.9.3.2 , parsec ==3.1.16.1 , parser-combinators ==1.3.0 , path ==0.9.5 , path-io ==1.8.1 , path-pieces ==0.2.1 , pem ==0.2.4 , persistent ==2.14.6.1 , persistent-sqlite ==2.13.3.0 , persistent-template ==2.12.0.0 , pretty ==1.1.3.6 , prettyprinter ==1.7.1 , prettyprinter-ansi-terminal ==1.1.3 , primitive ==0.8.0.0 , process ==1.6.19.0 , project-template ==0.2.1.0 , random ==1.2.1.2 , resource-pool ==0.4.0.0 , resourcet ==1.3.0 , retry ==0.9.3.1 , rio ==0.1.22.0 , rio-orphans ==0.1.2.0 , rio-prettyprint ==0.1.8.0 , rts ==1.0.2 , safe ==0.3.21 , safe-exceptions ==0.1.7.4 , scientific ==0.3.7.0 , semialign ==1.3 , semigroupoids ==6.0.1 , silently ==1.2.5.3 , socks ==0.6.1 , split ==0.2.5 , splitmix ==0.1.0.5 , stack ==2.15.7 , static-bytes ==0.1.0 , stm ==2.5.1.0 , stm-chans ==3.0.0.9 , streaming-commons ==0.2.2.6 , strict ==0.5 , tagged ==0.8.8 , tar ==0.5.1.1 , tar-conduit ==0.4.1 , template-haskell ==2.20.0.0 , temporary ==1.3 , text ==2.0.2 , text-metrics ==0.3.2 , text-short ==0.1.5 , th-abstraction ==0.5.0.0 , th-compat ==0.1.5 , th-lift ==0.8.4 , th-lift-instances ==0.1.20 , these ==1.2 , time ==1.12.2 , time-compat ==1.9.6.1 , tls ==1.8.0 , transformers ==0.6.1.0 , transformers-base ==0.4.6 , transformers-compat ==0.7.2 , typed-process ==0.2.11.1 , unix ==2.8.4.0 , unix-compat ==0.7.1 , unix-time ==0.4.12 , unliftio ==0.2.25.0 , unliftio-core ==0.2.1.0 , unordered-containers ==0.2.20 , uuid-types ==1.0.5.1 , vault ==0.3.1.5 , vector ==0.13.1.0 , vector-algorithms ==0.9.0.1 , vector-stream ==0.1.0.1 , witherable ==0.4.2 , yaml ==0.11.11.2 , zip-archive ==0.4.3.2 , zlib ==0.6.3.0 stack-2.15.7/LICENSE0000644000000000000000000000276114604306200012107 0ustar0000000000000000Copyright (c) 2015-2024, Stack contributors All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Stack nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL STACK CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. stack-2.15.7/Setup.hs0000644000000000000000000000725014620153445012545 0ustar0000000000000000module Main ( main ) where import Data.List ( nub, sortOn ) import Distribution.InstalledPackageInfo ( sourcePackageId, installedUnitId ) import Distribution.Package ( UnitId, packageVersion, packageName ) import Distribution.PackageDescription ( PackageDescription (), Executable (..) ) import Distribution.Pretty ( prettyShow ) import Distribution.Simple ( defaultMainWithHooks, UserHooks(..), simpleUserHooks ) import Distribution.Simple.BuildPaths ( autogenPackageModulesDir ) import Distribution.Simple.LocalBuildInfo ( installedPkgs, withLibLBI, withExeLBI, LocalBuildInfo () , ComponentLocalBuildInfo (componentPackageDeps) ) import Distribution.Simple.PackageIndex ( allPackages, dependencyClosure ) import Distribution.Simple.Setup ( BuildFlags (..), ReplFlags (..), fromFlag ) import Distribution.Simple.Utils ( rewriteFileEx, createDirectoryIfMissingVerbose ) import Distribution.Types.PackageName ( unPackageName ) import Distribution.Types.UnqualComponentName ( unUnqualComponentName ) import Distribution.Verbosity ( Verbosity, normal ) import System.FilePath ( () ) main :: IO () main = defaultMainWithHooks simpleUserHooks { buildHook = \pkg lbi hooks flags -> do generateBuildModule (fromFlag (buildVerbosity flags)) pkg lbi buildHook simpleUserHooks pkg lbi hooks flags -- The 'cabal repl' hook corresponds to the 'cabal build' hook and is added -- because, with a Cabal-based cradle, Haskell Language Server makes use of -- 'cabal repl'. , replHook = \pkg lbi hooks flags args -> do generateBuildModule (fromFlag (replVerbosity flags)) pkg lbi replHook simpleUserHooks pkg lbi hooks flags args } generateBuildModule :: Verbosity -> PackageDescription -> LocalBuildInfo -> IO () generateBuildModule verbosity pkg lbi = do let dir = autogenPackageModulesDir lbi createDirectoryIfMissingVerbose verbosity True dir withLibLBI pkg lbi $ \_ libcfg -> do withExeLBI pkg lbi $ \exe clbi -> rewriteFileEx normal (dir "Build_" ++ exeName' exe ++ ".hs") $ unlines [ "module Build_" ++ exeName' exe , " ( deps" , " ) where" , "" , "deps :: [String]" , "deps = " ++ show (formatdeps (transDeps libcfg clbi)) ] where exeName' = unUnqualComponentName . exeName formatdeps = map formatone . sortOn unPackageName' formatone p = unPackageName' p ++ "-" ++ prettyShow (packageVersion p) unPackageName' = unPackageName . packageName transDeps xs ys = either (map sourcePackageId . allPackages) handleDepClosureFailure $ dependencyClosure allInstPkgsIdx availInstPkgIds where allInstPkgsIdx = installedPkgs lbi allInstPkgIds = map installedUnitId $ allPackages allInstPkgsIdx -- instPkgIds includes `stack-X.X.X`, which is not a dependency hence is missing from allInstPkgsIdx. Filter that out. availInstPkgIds = filter (`elem` allInstPkgIds) $ testDeps xs ys handleDepClosureFailure unsatisfied = error $ "Computation of transitive dependencies failed." ++ if null unsatisfied then "" else " Unresolved dependencies: " ++ show unsatisfied testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -> [UnitId] testDeps xs ys = map fst $ nub $ componentPackageDeps xs ++ componentPackageDeps ys stack-2.15.7/stack.cabal0000644000000000000000000005610314620205646013204 0ustar0000000000000000cabal-version: 2.0 name: stack version: 2.15.7 license: BSD3 license-file: LICENSE maintainer: manny@fpcomplete.com author: Commercial Haskell SIG homepage: http://haskellstack.org bug-reports: https://github.com/commercialhaskell/stack/issues synopsis: The Haskell Tool Stack description: Please see the documentation at for usage information. . If building a 'stack' executable for distribution, please download the source code from and build it using Stack itself in order to ensure identical behaviour to official binaries. This package on Hackage is provided for convenience and bootstrapping purposes. . Note that the API for the library is not currently stable, and may change significantly, even between minor releases. It is currently only intended for use by the executable. category: Development build-type: Custom extra-source-files: CONTRIBUTING.md ChangeLog.md README.md stack.yaml doc/azure_ci.md doc/build_command.md doc/build_overview.md doc/ChangeLog.md doc/CI.md doc/clean_command.md doc/config_command.md doc/CONTRIBUTING.md doc/custom_snapshot.md doc/debugging.md doc/dev_containers.md doc/developing_on_windows.md doc/docker_command.md doc/docker_integration.md doc/dot_command.md doc/editor_integration.md doc/environment_variables.md doc/eval_command.md doc/exec_command.md doc/faq.md doc/ghc_command.md doc/ghci.md doc/global_flags.md doc/glossary.md doc/GUIDE.md doc/GUIDE_advanced.md doc/hoogle_command.md doc/hpc_command.md doc/ide_command.md doc/init_command.md doc/install_and_upgrade.md doc/list_command.md doc/lock_files.md doc/ls_command.md doc/new_command.md doc/nix_integration.md doc/nonstandard_project_init.md doc/other_resources.md doc/pantry.md doc/path_command.md doc/purge_command.md doc/query_command.md doc/README.md doc/run_command.md doc/runghc_command.md doc/script_command.md doc/scripts.md doc/sdist_command.md doc/setup_command.md doc/shell_autocompletion.md doc/SIGNING_KEY.md doc/Stack_and_VS_Code.md doc/stack_root.md doc/stack_work.md doc/stack_yaml_vs_cabal_package_file.md doc/templates_command.md doc/travis_ci.md doc/uninstall_command.md doc/unpack_command.md doc/update_command.md doc/upgrade_command.md doc/upload_command.md doc/yaml_configuration.md src/setup-shim/StackSetupShim.hs tests/unit/package-dump/ghc-7.10.txt tests/unit/package-dump/ghc-7.8.4-osx.txt tests/unit/package-dump/ghc-7.8.txt tests/unit/package-dump/ghc-head.txt tests/unit/Stack/Untar/test1.tar.gz tests/unit/Stack/Untar/test2.tar.gz cabal.project cabal.config source-repository head type: git location: https://github.com/commercialhaskell/stack custom-setup setup-depends: Cabal >=3.10.3.0 && <3.12, base >=4.14.3.0 && <5, filepath >=1.4.300.1 flag developer-mode description: By default, output extra developer information. default: False manual: True flag disable-git-info description: Disable inclusion of current Git information in the Stack executable when it is built. default: False manual: True flag disable-stack-upload description: For use only during development and debugging. Disable 'stack upload' so that it does not make HTTP requests. Stack will output information about the HTTP request(s) that it would have made if the command was enabled. default: False manual: True flag hide-dependency-versions description: Hides dependency versions from 'stack --version'. Used only when building a Stack executable for official release. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING STACK ANY OTHER WAY (e.g. using Cabal or from Hackage), as it makes debugging support requests more difficult. default: False manual: True flag integration-tests description: Run the integration test suite. default: False manual: True flag static description: When building the Stack executable, or the stack-integration-test executable, pass the -static and -pthread flags to the linker used by GHC. default: False manual: True flag supported-build description: If false, causes 'stack --version' to issue a warning about the build being unsupported. Used only when building a Stack executable for official release. Note to packagers/distributors: DO NOT OVERRIDE THIS FLAG IF YOU ARE BUILDING STACK ANY OTHER WAY (e.g. using Cabal or from Hackage), as it makes debugging support requests more difficult. default: False manual: True library exposed-modules: Codec.Archive.Tar.Utf8 Control.Concurrent.Execute Data.Attoparsec.Args Data.Attoparsec.Combinators Data.Attoparsec.Interpreter Data.Monoid.Map GHC.Utils.GhcPkg.Main.Compat Network.HTTP.StackClient Options.Applicative.Args Options.Applicative.Builder.Extra Options.Applicative.Complicated Path.CheckInstall Path.Extended Path.Extra Path.Find Stack Stack.Build Stack.Build.Cache Stack.Build.ConstructPlan Stack.Build.Execute Stack.Build.ExecuteEnv Stack.Build.ExecutePackage Stack.Build.Haddock Stack.Build.Installed Stack.Build.Source Stack.Build.Target Stack.BuildInfo Stack.BuildOpts Stack.BuildPlan Stack.CLI Stack.Clean Stack.Component Stack.ComponentFile Stack.Config Stack.Config.Build Stack.Config.ConfigureScript Stack.Config.Docker Stack.Config.Nix Stack.ConfigCmd Stack.Constants Stack.Constants.Config Stack.Constants.StackProgName Stack.Coverage Stack.DefaultColorWhen Stack.DependencyGraph Stack.Docker Stack.DockerCmd Stack.Dot Stack.Eval Stack.Exec Stack.FileWatch Stack.GhcPkg Stack.Ghci Stack.Ghci.Script Stack.Hoogle Stack.IDE Stack.Init Stack.List Stack.Ls Stack.Lock Stack.New Stack.Nix Stack.Options.BenchParser Stack.Options.BuildMonoidParser Stack.Options.BuildParser Stack.Options.CleanParser Stack.Options.ConfigParser Stack.Options.Completion Stack.Options.DockerParser Stack.Options.DotParser Stack.Options.EvalParser Stack.Options.ExecParser Stack.Options.GhcBuildParser Stack.Options.GhciParser Stack.Options.GhcVariantParser Stack.Options.GlobalParser Stack.Options.HaddockParser Stack.Options.HpcReportParser Stack.Options.InitParser Stack.Options.LogLevelParser Stack.Options.LsParser Stack.Options.NewParser Stack.Options.NixParser Stack.Options.PackageParser Stack.Options.PathParser Stack.Options.ResolverParser Stack.Options.SDistParser Stack.Options.ScriptParser Stack.Options.SetupParser Stack.Options.TestParser Stack.Options.UnpackParser Stack.Options.UpgradeParser Stack.Options.UploadParser Stack.Options.Utils Stack.Package Stack.PackageDump Stack.PackageFile Stack.Path Stack.Prelude Stack.Query Stack.Runners Stack.Script Stack.SDist Stack.Setup Stack.Setup.Installed Stack.SetupCmd Stack.SourceMap Stack.Storage.Project Stack.Storage.User Stack.Storage.Util Stack.Templates Stack.Types.AddCommand Stack.Types.AllowNewerDeps Stack.Types.ApplyGhcOptions Stack.Types.ApplyProgOptions Stack.Types.Build Stack.Types.Build.ConstructPlan Stack.Types.Build.Exception Stack.Types.BuildConfig Stack.Types.BuildOpts Stack.Types.BuildOptsCLI Stack.Types.BuildOptsMonoid Stack.Types.CabalConfigKey Stack.Types.Cache Stack.Types.Casa Stack.Types.ColorWhen Stack.Types.CompCollection Stack.Types.CompilerBuild Stack.Types.CompilerPaths Stack.Types.Compiler Stack.Types.Component Stack.Types.ComponentUtils Stack.Types.Config Stack.Types.Config.Exception Stack.Types.ConfigMonoid Stack.Types.ConfigureOpts Stack.Types.Curator Stack.Types.Dependency Stack.Types.DependencyTree Stack.Types.Docker Stack.Types.DockerEntrypoint Stack.Types.DotConfig Stack.Types.DotOpts Stack.Types.DownloadInfo Stack.Types.DumpLogs Stack.Types.DumpPackage Stack.Types.EnvConfig Stack.Types.EnvSettings Stack.Types.ExtraDirs Stack.Types.FileDigestCache Stack.Types.GHCDownloadInfo Stack.Types.GHCVariant Stack.Types.GhcOptionKey Stack.Types.GhcOptions Stack.Types.GhcPkgId Stack.Types.GlobalOpts Stack.Types.GlobalOptsMonoid Stack.Types.Installed Stack.Types.IsMutable Stack.Types.LockFileBehavior Stack.Types.NamedComponent Stack.Types.Nix Stack.Types.Package Stack.Types.PackageFile Stack.Types.PackageName Stack.Types.ParentMap Stack.Types.Platform Stack.Types.Project Stack.Types.ProjectAndConfigMonoid Stack.Types.ProjectConfig Stack.Types.PvpBounds Stack.Types.Resolver Stack.Types.Runner Stack.Types.SCM Stack.Types.SetupInfo Stack.Types.SourceMap Stack.Types.StackYamlLoc Stack.Types.Storage Stack.Types.TemplateName Stack.Types.UnusedFlags Stack.Types.Version Stack.Types.VersionedDownloadInfo Stack.Uninstall Stack.Unpack Stack.Update Stack.Upgrade Stack.Upload System.Info.ShortPathName System.Permissions System.Process.Pager System.Terminal Build_stack Paths_stack hs-source-dirs: src autogen-modules: Build_stack Paths_stack default-language: GHC2021 ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -Widentities build-depends: Cabal >=3.8.1.0, aeson >=2.0.3.0, aeson-warning-parser >=0.1.1, ansi-terminal >=1.0.2, array >=0.5.6.0, async >=2.2.5, attoparsec >=0.14.4, base >=4.16.0.0 && <5, base64-bytestring >=1.2.1.0, bytestring >=0.11.5.3, casa-client >=0.0.2, companion >=0.1.0, conduit >=1.3.5, conduit-extra >=1.3.6, containers >=0.6.7, crypton >=0.34, directory >=1.3.8.4, echo >=0.1.4, exceptions >=0.10.7, extra >=1.7.14, file-embed >=0.0.16.0, filelock >=0.1.1.7, filepath >=1.4.300.1, fsnotify >=0.4.1, generic-deriving >=1.14.5, ghc-boot >=9.6.5, hi-file-parser >=0.1.6.0, hpack >=0.36.0, hpc >=0.6.2.0, http-client >=0.7.17, http-client-tls >=0.3.6.2, http-conduit >=2.3.8.3, http-download >=0.2.1.0, http-types >=0.12.4, memory >=0.18.0, microlens >=0.4.13.1, mtl >=2.3.1, mustache >=2.4.2, neat-interpolation >=0.5.1.4, open-browser >=0.2.1.0, optparse-applicative >=0.18.1.0, pantry >=0.9.3.2 && <0.10.0, path >=0.9.5, path-io >=1.8.1, persistent >=2.14.0.0 && <2.15, persistent-sqlite >=2.13.3.0, pretty >=1.1.3.6, process >=1.6.13.2, project-template >=0.2.1.0, random >=1.2.1.2, rio >=0.1.22.0, rio-prettyprint >=0.1.8.0, split >=0.2.5, stm >=2.5.1.0, tar >=0.5.1.1, template-haskell >=2.20.0.0, text >=2.0.2, time >=1.12.2, transformers >=0.6.1.0, unix-compat >=0.7.1, unordered-containers >=0.2.20, vector >=0.13.1.0, yaml >=0.11.11.2, zlib >=0.6.3.0 if os(windows) cpp-options: -DWINDOWS build-depends: Win32 >=2.13.3.0 else build-tool-depends: hsc2hs:hsc2hs build-depends: unix if (impl(ghc >=9.4.5) && os(windows)) build-depends: network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False if flag(disable-stack-upload) cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True else cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if os(windows) hs-source-dirs: src/windows/ other-modules: Stack.Constants.UsrLibDirs Stack.Docker.Handlers System.Posix.User System.Uname else c-sources: src/unix/cbits/uname.c hs-source-dirs: src/unix/ other-modules: Stack.Constants.UsrLibDirs Stack.Docker.Handlers System.Uname if !flag(disable-git-info) cpp-options: -DUSE_GIT_INFO build-depends: githash >=0.1.7.0, optparse-simple >=0.1.1.4 if flag(hide-dependency-versions) cpp-options: -DHIDE_DEP_VERSIONS if flag(supported-build) cpp-options: -DSUPPORTED_BUILD executable stack main-is: Main.hs hs-source-dirs: app other-modules: Paths_stack autogen-modules: Paths_stack default-language: GHC2021 ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded -rtsopts build-depends: Cabal >=3.8.1.0, aeson >=2.0.3.0, aeson-warning-parser >=0.1.1, ansi-terminal >=1.0.2, array >=0.5.6.0, async >=2.2.5, attoparsec >=0.14.4, base >=4.16.0.0 && <5, base64-bytestring >=1.2.1.0, bytestring >=0.11.5.3, casa-client >=0.0.2, companion >=0.1.0, conduit >=1.3.5, conduit-extra >=1.3.6, containers >=0.6.7, crypton >=0.34, directory >=1.3.8.4, echo >=0.1.4, exceptions >=0.10.7, extra >=1.7.14, file-embed >=0.0.16.0, filelock >=0.1.1.7, filepath >=1.4.300.1, fsnotify >=0.4.1, generic-deriving >=1.14.5, ghc-boot >=9.6.5, hi-file-parser >=0.1.6.0, hpack >=0.36.0, hpc >=0.6.2.0, http-client >=0.7.17, http-client-tls >=0.3.6.2, http-conduit >=2.3.8.3, http-download >=0.2.1.0, http-types >=0.12.4, memory >=0.18.0, microlens >=0.4.13.1, mtl >=2.3.1, mustache >=2.4.2, neat-interpolation >=0.5.1.4, open-browser >=0.2.1.0, optparse-applicative >=0.18.1.0, pantry >=0.9.3.2 && <0.10.0, path >=0.9.5, path-io >=1.8.1, persistent >=2.14.0.0 && <2.15, persistent-sqlite >=2.13.3.0, pretty >=1.1.3.6, process >=1.6.13.2, project-template >=0.2.1.0, random >=1.2.1.2, rio >=0.1.22.0, rio-prettyprint >=0.1.8.0, split >=0.2.5, stack, stm >=2.5.1.0, tar >=0.5.1.1, template-haskell >=2.20.0.0, text >=2.0.2, time >=1.12.2, transformers >=0.6.1.0, unix-compat >=0.7.1, unordered-containers >=0.2.20, vector >=0.13.1.0, yaml >=0.11.11.2, zlib >=0.6.3.0 if os(windows) cpp-options: -DWINDOWS build-depends: Win32 >=2.13.3.0 else build-tool-depends: hsc2hs:hsc2hs build-depends: unix if (impl(ghc >=9.4.5) && os(windows)) build-depends: network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False if flag(disable-stack-upload) cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True else cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if flag(static) ld-options: -static -pthread executable stack-integration-test main-is: IntegrationSpec.hs hs-source-dirs: tests/integration tests/integration/lib other-modules: StackTest Paths_stack autogen-modules: Paths_stack default-language: GHC2021 ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded -rtsopts -with-rtsopts=-N build-depends: Cabal >=3.8.1.0, aeson >=2.0.3.0, aeson-warning-parser >=0.1.1, ansi-terminal >=1.0.2, array >=0.5.6.0, async >=2.2.5, attoparsec >=0.14.4, base >=4.16.0.0 && <5, base64-bytestring >=1.2.1.0, bytestring >=0.11.5.3, casa-client >=0.0.2, companion >=0.1.0, conduit >=1.3.5, conduit-extra >=1.3.6, containers >=0.6.7, crypton >=0.34, directory >=1.3.8.4, echo >=0.1.4, exceptions >=0.10.7, extra >=1.7.14, file-embed >=0.0.16.0, filelock >=0.1.1.7, filepath >=1.4.300.1, fsnotify >=0.4.1, generic-deriving >=1.14.5, ghc-boot >=9.6.5, hi-file-parser >=0.1.6.0, hpack >=0.36.0, hpc >=0.6.2.0, hspec >=2.11.8, http-client >=0.7.17, http-client-tls >=0.3.6.2, http-conduit >=2.3.8.3, http-download >=0.2.1.0, http-types >=0.12.4, memory >=0.18.0, microlens >=0.4.13.1, mtl >=2.3.1, mustache >=2.4.2, neat-interpolation >=0.5.1.4, open-browser >=0.2.1.0, optparse-applicative >=0.18.1.0, optparse-generic >=1.5.2, pantry >=0.9.3.2 && <0.10.0, path >=0.9.5, path-io >=1.8.1, persistent >=2.14.0.0 && <2.15, persistent-sqlite >=2.13.3.0, pretty >=1.1.3.6, process >=1.6.13.2, project-template >=0.2.1.0, random >=1.2.1.2, rio >=0.1.22.0, rio-prettyprint >=0.1.8.0, split >=0.2.5, stm >=2.5.1.0, tar >=0.5.1.1, template-haskell >=2.20.0.0, text >=2.0.2, time >=1.12.2, transformers >=0.6.1.0, unix-compat >=0.7.1, unordered-containers >=0.2.20, vector >=0.13.1.0, yaml >=0.11.11.2, zlib >=0.6.3.0 if os(windows) cpp-options: -DWINDOWS build-depends: Win32 >=2.13.3.0 else build-tool-depends: hsc2hs:hsc2hs build-depends: unix if (impl(ghc >=9.4.5) && os(windows)) build-depends: network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False if flag(disable-stack-upload) cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True else cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if !flag(integration-tests) buildable: False if flag(static) ld-options: -static -pthread test-suite stack-unit-test type: exitcode-stdio-1.0 main-is: Spec.hs build-tool-depends: hspec-discover:hspec-discover hs-source-dirs: tests/unit other-modules: Stack.ArgsSpec Stack.Build.ExecuteSpec Stack.Build.TargetSpec Stack.Config.DockerSpec Stack.ConfigSpec Stack.DotSpec Stack.Ghci.ScriptSpec Stack.GhciSpec Stack.LockSpec Stack.NixSpec Stack.PackageDumpSpec Stack.Types.TemplateNameSpec Stack.UploadSpec Paths_stack autogen-modules: Paths_stack default-language: GHC2021 ghc-options: -fwrite-ide-info -hiedir=.hie -Wall -Wmissing-export-lists -optP-Wno-nonportable-include-path -threaded build-depends: Cabal >=3.8.1.0, QuickCheck >=2.14.3, aeson >=2.0.3.0, aeson-warning-parser >=0.1.1, ansi-terminal >=1.0.2, array >=0.5.6.0, async >=2.2.5, attoparsec >=0.14.4, base >=4.16.0.0 && <5, base64-bytestring >=1.2.1.0, bytestring >=0.11.5.3, casa-client >=0.0.2, companion >=0.1.0, conduit >=1.3.5, conduit-extra >=1.3.6, containers >=0.6.7, crypton >=0.34, directory >=1.3.8.4, echo >=0.1.4, exceptions >=0.10.7, extra >=1.7.14, file-embed >=0.0.16.0, filelock >=0.1.1.7, filepath >=1.4.300.1, fsnotify >=0.4.1, generic-deriving >=1.14.5, ghc-boot >=9.6.5, hi-file-parser >=0.1.6.0, hpack >=0.36.0, hpc >=0.6.2.0, hspec >=2.11.8, http-client >=0.7.17, http-client-tls >=0.3.6.2, http-conduit >=2.3.8.3, http-download >=0.2.1.0, http-types >=0.12.4, memory >=0.18.0, microlens >=0.4.13.1, mtl >=2.3.1, mustache >=2.4.2, neat-interpolation >=0.5.1.4, open-browser >=0.2.1.0, optparse-applicative >=0.18.1.0, pantry >=0.9.3.2 && <0.10.0, path >=0.9.5, path-io >=1.8.1, persistent >=2.14.0.0 && <2.15, persistent-sqlite >=2.13.3.0, pretty >=1.1.3.6, process >=1.6.13.2, project-template >=0.2.1.0, random >=1.2.1.2, raw-strings-qq >=1.1, rio >=0.1.22.0, rio-prettyprint >=0.1.8.0, split >=0.2.5, stack, stm >=2.5.1.0, tar >=0.5.1.1, template-haskell >=2.20.0.0, text >=2.0.2, time >=1.12.2, transformers >=0.6.1.0, unix-compat >=0.7.1, unordered-containers >=0.2.20, vector >=0.13.1.0, yaml >=0.11.11.2, zlib >=0.6.3.0 if os(windows) cpp-options: -DWINDOWS build-depends: Win32 >=2.13.3.0 else build-tool-depends: hsc2hs:hsc2hs build-depends: unix if (impl(ghc >=9.4.5) && os(windows)) build-depends: network >=3.1.2.9 if flag(developer-mode) cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=True else cpp-options: -DSTACK_DEVELOPER_MODE_DEFAULT=False if flag(disable-stack-upload) cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=True else cpp-options: -DSTACK_DISABLE_STACK_UPLOAD=False if os(windows) hs-source-dirs: tests/unit/windows/ other-modules: Stack.Ghci.FakePaths else hs-source-dirs: tests/unit/unix/ other-modules: Stack.Ghci.FakePaths