rio-0.1.22.0/src/0000755000000000000000000000000014231470023011474 5ustar0000000000000000rio-0.1.22.0/src/RIO/0000755000000000000000000000000014231470023012125 5ustar0000000000000000rio-0.1.22.0/src/RIO/ByteString/0000755000000000000000000000000014231470023014217 5ustar0000000000000000rio-0.1.22.0/src/RIO/ByteString/Lazy/0000755000000000000000000000000014231470023015136 5ustar0000000000000000rio-0.1.22.0/src/RIO/Char/0000755000000000000000000000000014231470023013002 5ustar0000000000000000rio-0.1.22.0/src/RIO/HashMap/0000755000000000000000000000000014231470023013446 5ustar0000000000000000rio-0.1.22.0/src/RIO/List/0000755000000000000000000000000014231470023013040 5ustar0000000000000000rio-0.1.22.0/src/RIO/Map/0000755000000000000000000000000014231470023012642 5ustar0000000000000000rio-0.1.22.0/src/RIO/NonEmpty/0000755000000000000000000000000014231470023013676 5ustar0000000000000000rio-0.1.22.0/src/RIO/Prelude/0000755000000000000000000000000014231470023013525 5ustar0000000000000000rio-0.1.22.0/src/RIO/Set/0000755000000000000000000000000014231470023012660 5ustar0000000000000000rio-0.1.22.0/src/RIO/Text/0000755000000000000000000000000014231470023013051 5ustar0000000000000000rio-0.1.22.0/src/RIO/Text/Lazy/0000755000000000000000000000000014231470023013770 5ustar0000000000000000rio-0.1.22.0/src/RIO/Vector/0000755000000000000000000000000014231470023013367 5ustar0000000000000000rio-0.1.22.0/src/RIO/Vector/Boxed/0000755000000000000000000000000014231470023014430 5ustar0000000000000000rio-0.1.22.0/src/RIO/Vector/Storable/0000755000000000000000000000000014231470023015142 5ustar0000000000000000rio-0.1.22.0/src/RIO/Vector/Unboxed/0000755000000000000000000000000014231470023014773 5ustar0000000000000000rio-0.1.22.0/test/0000755000000000000000000000000014231470023011664 5ustar0000000000000000rio-0.1.22.0/test/RIO/0000755000000000000000000000000014231470023012315 5ustar0000000000000000rio-0.1.22.0/test/RIO/Prelude/0000755000000000000000000000000014231470023013715 5ustar0000000000000000rio-0.1.22.0/src/RIO.hs0000644000000000000000000001100114231470023012452 0ustar0000000000000000module RIO ( -- * Custom @Prelude@ -- | One of the core features of @rio@ is that it can be used as a @Prelude@ -- replacement. Therefore it is best to disable the default `Prelude` with: -- [NoImplicitPrelude](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/glasgow_exts.html#extension-NoImplicitPrelude) -- pragma: -- -- > {-# LANGUAGE NoImplicitPrelude #-} -- > import RIO -- -- Some functions not exported here can be found in "RIO.Partial": -- @fromJust@, @read@, @toEnum@, @pred@, @succ@. -- module RIO.Prelude , module RIO.Prelude.Types -- * The @RIO@ Monad , module MonadRIO -- ** @SimpleApp@ -- | If all you need is just some default environment that does basic logging and allows -- spawning processes, then you can use `SimpleApp`: -- -- > {-# LANGUAGE OverloadedStrings #-} -- > module Main where -- > -- > main :: IO () -- > main = -- > runSimpleApp $ do -- > logInfo "Hello World!" -- -- Note the -- [OverloadedStrings](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/glasgow_exts.html#extension-OverloadedStrings) -- extension, which is enabled to simplify logging. , module RIO.Prelude.Simple -- * @MonadIO@ and @MonadUnliftIO@ , module Control.Monad.IO.Unlift -- * Logger -- $logging-intro , module RIO.Prelude.Logger -- * Display , module RIO.Prelude.Display -- * Optics -- | @microlens@-based Lenses, Traversals, etc. , module RIO.Prelude.Lens -- * Concurrency , UnliftIO.Concurrent.ThreadId , UnliftIO.Concurrent.myThreadId , UnliftIO.Concurrent.isCurrentThreadBound , UnliftIO.Concurrent.threadWaitRead , UnliftIO.Concurrent.threadWaitWrite , UnliftIO.Concurrent.threadDelay , RIO.Prelude.Renames.yieldThread -- ** Async , module UnliftIO.Async -- ** STM , module UnliftIO.STM -- ** Chan , module UnliftIO.Chan -- ** Timeout , module UnliftIO.Timeout -- * Exceptions , module UnliftIO.Exception -- | Re-exported from "Control.Monad.Catch": , Control.Monad.Catch.throwM -- * Files and handles , module UnliftIO.IO , module UnliftIO.Temporary , module RIO.Prelude.IO -- * Exit , module RIO.Prelude.Exit -- * Mutable Variables -- ** SomeRef , module SomeRef -- ** URef , module RIO.Prelude.URef -- ** IORef , module UnliftIO.IORef -- ** MVar , module UnliftIO.MVar -- ** QSem , module UnliftIO.QSem -- ** QSemN , module UnliftIO.QSemN -- ** Memoize , module UnliftIO.Memoize -- ** Deque , module RIO.Deque -- * Debugging , module RIO.Prelude.Trace ) where import qualified Control.Monad.Catch (MonadThrow(..)) import RIO.Deque import RIO.Prelude import RIO.Prelude.Display import RIO.Prelude.Exit import RIO.Prelude.Extra import RIO.Prelude.IO import RIO.Prelude.Lens import RIO.Prelude.Logger import RIO.Prelude.Renames import RIO.Prelude.RIO as MonadRIO (RIO(..), liftRIO, runRIO) import RIO.Prelude.RIO as SomeRef hiding (RIO(..), liftRIO, runRIO) import RIO.Prelude.Simple import RIO.Prelude.Text import RIO.Prelude.Trace import RIO.Prelude.Types import RIO.Prelude.URef import Control.Monad.IO.Unlift import UnliftIO.Async import UnliftIO.Chan import UnliftIO.Exception import UnliftIO.IO import UnliftIO.IORef import UnliftIO.Memoize import UnliftIO.MVar import UnliftIO.QSem import UnliftIO.QSemN import UnliftIO.STM import UnliftIO.Temporary import UnliftIO.Timeout import UnliftIO.Concurrent -------------------------------------------------------------------------------- -- $logging-intro -- -- The logging system in RIO is built upon "log functions", which are -- accessed in RIO's environment via a class like "has log -- function". There are two provided: -- -- * In the common case: for logging plain text (via 'Utf8Builder') -- efficiently, there is 'LogFunc', which can be created via -- 'withLogFunc', and is accessed via 'HasLogFunc'. This provides -- all the classical logging facilities: timestamped text output -- with log levels and colors (if terminal-supported) to the -- terminal. We log output via 'logInfo', 'logDebug', etc. -- -- * In the advanced case: where logging takes on a more semantic -- meaning and the logs need to be digested, acted upon, translated -- or serialized upstream (to e.g. a JSON logging server), we have -- 'GLogFunc' (as in "generic log function"), and is accessed via -- 'HasGLogFunc'. In this case, we log output via 'glog'. See the -- Type-generic logger section for more information. rio-0.1.22.0/src/RIO/ByteString.hs0000644000000000000000000000607414231470023014562 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Strict @ByteString@. Import as: -- -- > import qualified RIO.ByteString as B -- -- This module does not export any partial functions. For those, see -- "RIO.ByteString.Partial" module RIO.ByteString ( module Data.ByteString , module RIO.ByteString ) where import Data.ByteString hiding (head, last, tail, init, foldl1, foldl1', foldr1, foldr1', maximum, minimum, findSubstring, findSubstrings, packCString, packCStringLen, useAsCString, useAsCStringLen, getLine, getContents, putStr, putStrLn, interact, readFile, writeFile, appendFile, hGetLine, hGetContents, hGet, hGetSome, hGetNonBlocking, hPut, hPutNonBlocking, hPutStr, hPutStrLn, breakByte) import qualified Data.ByteString as B import RIO import Foreign.C.String (CString, CStringLen) -- | Lifted 'B.packCString' packCString :: MonadIO m => CString -> m ByteString packCString = liftIO . B.packCString -- | Lifted 'B.packCStringLen' packCStringLen :: MonadIO m => CStringLen -> m ByteString packCStringLen = liftIO . B.packCStringLen -- | Unlifted 'B.useAsCString' useAsCString :: MonadUnliftIO m => ByteString -> (CString -> m a) -> m a useAsCString bs inner = withRunInIO $ \run -> B.useAsCString bs $ run . inner -- | Unlifted 'B.useAsCStringLen' useAsCStringLen :: MonadUnliftIO m => ByteString -> (CStringLen -> m a) -> m a useAsCStringLen bs inner = withRunInIO $ \run -> B.useAsCStringLen bs $ run . inner -- | Lifted 'B.getLine' getLine :: MonadIO m => m ByteString getLine = liftIO B.getLine -- | Lifted 'B.getContents' getContents :: MonadIO m => m ByteString getContents = liftIO B.getContents -- | Lifted 'B.putStr' putStr :: MonadIO m => ByteString -> m () putStr = liftIO . B.putStr -- | Lifted 'B.interact' interact :: MonadIO m => (ByteString -> ByteString) -> m () interact = liftIO . B.interact -- | Lifted 'B.readFile' readFile :: MonadIO m => FilePath -> m ByteString readFile = liftIO . B.readFile -- | Lifted 'B.writeFile' writeFile :: MonadIO m => FilePath -> ByteString -> m () writeFile fp = liftIO . B.writeFile fp -- | Lifted 'B.appendFile' appendFile :: MonadIO m => FilePath -> ByteString -> m () appendFile fp = liftIO . B.appendFile fp -- | Lifted 'B.hGetLine' hGetLine :: MonadIO m => Handle -> m ByteString hGetLine = liftIO . B.hGetLine -- | Lifted 'B.hGetContents' hGetContents :: MonadIO m => Handle -> m ByteString hGetContents = liftIO . B.hGetContents -- | Lifted 'B.hGet' hGet :: MonadIO m => Handle -> Int -> m ByteString hGet h = liftIO . B.hGet h -- | Lifted 'B.hGetSome' hGetSome :: MonadIO m => Handle -> Int -> m ByteString hGetSome h = liftIO . B.hGetSome h -- | Lifted 'B.hGetNonBlocking' hGetNonBlocking :: MonadIO m => Handle -> Int -> m ByteString hGetNonBlocking h = liftIO . B.hGetNonBlocking h -- | Lifted 'B.hPut' hPut :: MonadIO m => Handle -> ByteString -> m () hPut h = liftIO . B.hPut h -- | Lifted 'B.hPutNonBlocking' hPutNonBlocking :: MonadIO m => Handle -> ByteString -> m ByteString hPutNonBlocking h = liftIO . B.hPutNonBlocking h -- | Lifted 'B.hPutStr' hPutStr :: MonadIO m => Handle -> ByteString -> m () hPutStr h = liftIO . B.hPutStr h rio-0.1.22.0/src/RIO/ByteString/Lazy.hs0000644000000000000000000001351214231470023015474 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Lazy @ByteString@. Import as: -- -- > import qualified RIO.ByteString.Lazy as BL -- -- This module does not export any partial functions. For those, see -- "RIO.ByteString.Lazy.Partial" module RIO.ByteString.Lazy ( -- * The @ByteString@ type Data.ByteString.Lazy.ByteString -- * Introducing and eliminating 'ByteString's , Data.ByteString.Lazy.empty , Data.ByteString.Lazy.singleton , Data.ByteString.Lazy.pack , Data.ByteString.Lazy.unpack , Data.ByteString.Lazy.fromStrict , Data.ByteString.Lazy.toStrict , Data.ByteString.Lazy.fromChunks , Data.ByteString.Lazy.toChunks , Data.ByteString.Lazy.foldrChunks , Data.ByteString.Lazy.foldlChunks -- * Basic interface , Data.ByteString.Lazy.cons , Data.ByteString.Lazy.cons' , Data.ByteString.Lazy.snoc , Data.ByteString.Lazy.append , Data.ByteString.Lazy.uncons , Data.ByteString.Lazy.unsnoc , Data.ByteString.Lazy.null , Data.ByteString.Lazy.length -- * Transforming ByteStrings , Data.ByteString.Lazy.map , Data.ByteString.Lazy.reverse , Data.ByteString.Lazy.intersperse , Data.ByteString.Lazy.intercalate , Data.ByteString.Lazy.transpose -- * Reducing 'ByteString's (folds) , Data.ByteString.Lazy.foldl , Data.ByteString.Lazy.foldl' , Data.ByteString.Lazy.foldr -- ** Special folds , Data.ByteString.Lazy.concat , Data.ByteString.Lazy.concatMap , Data.ByteString.Lazy.any , Data.ByteString.Lazy.all -- * Building ByteStrings -- ** Scans , Data.ByteString.Lazy.scanl -- ** Accumulating maps , Data.ByteString.Lazy.mapAccumL , Data.ByteString.Lazy.mapAccumR -- ** Infinite ByteStrings , Data.ByteString.Lazy.repeat , Data.ByteString.Lazy.replicate , Data.ByteString.Lazy.cycle , Data.ByteString.Lazy.iterate -- ** Unfolding ByteStrings , Data.ByteString.Lazy.unfoldr -- * Substrings -- ** Breaking strings , Data.ByteString.Lazy.take , Data.ByteString.Lazy.drop , Data.ByteString.Lazy.splitAt , Data.ByteString.Lazy.takeWhile , Data.ByteString.Lazy.dropWhile , Data.ByteString.Lazy.span , Data.ByteString.Lazy.break , Data.ByteString.Lazy.group , Data.ByteString.Lazy.groupBy , Data.ByteString.Lazy.inits , Data.ByteString.Lazy.tails , Data.ByteString.Lazy.stripPrefix , Data.ByteString.Lazy.stripSuffix -- ** Breaking into many substrings , Data.ByteString.Lazy.split , Data.ByteString.Lazy.splitWith -- * Predicates , Data.ByteString.Lazy.isPrefixOf , Data.ByteString.Lazy.isSuffixOf -- * Search ByteStrings -- ** Searching by equality , Data.ByteString.Lazy.elem , Data.ByteString.Lazy.notElem -- ** Searching with a predicate , Data.ByteString.Lazy.find , Data.ByteString.Lazy.filter , Data.ByteString.Lazy.partition -- * Indexing ByteStrings , Data.ByteString.Lazy.index , Data.ByteString.Lazy.elemIndex , Data.ByteString.Lazy.elemIndexEnd , Data.ByteString.Lazy.elemIndices , Data.ByteString.Lazy.findIndex , Data.ByteString.Lazy.findIndices , Data.ByteString.Lazy.count -- * Zipping and unzipping ByteStrings , Data.ByteString.Lazy.zip , Data.ByteString.Lazy.zipWith , Data.ByteString.Lazy.unzip -- * Low level conversions -- ** Copying ByteStrings , Data.ByteString.Lazy.copy -- * I\/O with 'ByteString's -- ** Standard input and output , getContents , putStr , putStrLn , interact -- ** Files , readFile , writeFile , appendFile -- ** I\/O with Handles , hGetContents , hGet , hGetNonBlocking , hPut , hPutNonBlocking , hPutStr ) where import Data.ByteString.Lazy hiding ( getContents , putStr , putStrLn , interact , readFile , writeFile , appendFile , hGetContents , hGet , hGetNonBlocking , hPut , hPutNonBlocking , hPutStr ) import qualified Data.ByteString.Lazy import qualified Data.ByteString.Lazy.Char8 import RIO -- | Lifted 'Data.ByteString.Lazy.getContents' getContents :: MonadIO m => m LByteString getContents = liftIO Data.ByteString.Lazy.getContents -- | Lifted 'Data.ByteString.Lazy.putStr' putStr :: MonadIO m => LByteString -> m () putStr = liftIO . Data.ByteString.Lazy.putStr -- | Lifted 'Data.ByteString.Lazy.putStrLn' putStrLn :: MonadIO m => LByteString -> m () putStrLn = liftIO . Data.ByteString.Lazy.Char8.putStrLn -- | Lifted 'Data.ByteString.Lazy.interact' interact :: MonadIO m => (LByteString -> LByteString) -> m () interact = liftIO . Data.ByteString.Lazy.interact -- | Lifted 'Data.ByteString.Lazy.readFile' readFile :: MonadIO m => FilePath -> m LByteString readFile = liftIO . Data.ByteString.Lazy.readFile -- | Lifted 'Data.ByteString.Lazy.writeFile' writeFile :: MonadIO m => FilePath -> LByteString -> m () writeFile fp contents = liftIO $ Data.ByteString.Lazy.writeFile fp contents -- | Lifted 'Data.ByteString.Lazy.appendFile' appendFile :: MonadIO m => FilePath -> LByteString -> m () appendFile fp = liftIO . Data.ByteString.Lazy.appendFile fp -- | Lifted 'Data.ByteString.Lazy.hGet' hGet :: MonadIO m => Handle -> Int -> m LByteString hGet handle' count' = liftIO $ Data.ByteString.Lazy.hGet handle' count' -- | Lifted 'Data.ByteString.Lazy.hGetContents' hGetContents :: MonadIO m => Handle -> m LByteString hGetContents = liftIO . Data.ByteString.Lazy.hGetContents -- | Lifted 'Data.ByteString.Lazy.hGetNonBlocking' hGetNonBlocking :: MonadIO m => Handle -> Int -> m LByteString hGetNonBlocking h = liftIO . Data.ByteString.Lazy.hGetNonBlocking h -- | Lifted 'Data.ByteString.Lazy.hPut' hPut :: MonadIO m => Handle -> LByteString -> m () hPut h = liftIO . Data.ByteString.Lazy.hPut h -- | Lifted 'Data.ByteString.Lazy.hPutNonBlocking' hPutNonBlocking :: MonadIO m => Handle -> LByteString -> m LByteString hPutNonBlocking h = liftIO . Data.ByteString.Lazy.hPutNonBlocking h -- | Lifted 'Data.ByteString.Lazy.hPutStr' hPutStr :: MonadIO m => Handle -> LByteString -> m () hPutStr h = liftIO . Data.ByteString.Lazy.hPutStr h rio-0.1.22.0/src/RIO/ByteString/Lazy/Partial.hs0000644000000000000000000000107114231470023017065 0ustar0000000000000000-- | Lazy @ByteString@ partial functions. Import as: -- -- > import qualified RIO.ByteString.Lazy.Partial as BL' module RIO.ByteString.Lazy.Partial ( -- * Basic interface Data.ByteString.Lazy.head , Data.ByteString.Lazy.last , Data.ByteString.Lazy.tail , Data.ByteString.Lazy.init -- * Reducing 'ByteString's (folds) , Data.ByteString.Lazy.foldl1 , Data.ByteString.Lazy.foldl1' , Data.ByteString.Lazy.foldr1 -- ** Special folds , Data.ByteString.Lazy.maximum , Data.ByteString.Lazy.minimum ) where import qualified Data.ByteString.Lazy rio-0.1.22.0/src/RIO/ByteString/Partial.hs0000644000000000000000000000103114231470023016142 0ustar0000000000000000-- | Strict @ByteString@ partial functions. Import as: -- -- > import qualified RIO.ByteString.Partial as B' module RIO.ByteString.Partial ( -- * Basic interface Data.ByteString.head , Data.ByteString.last , Data.ByteString.tail , Data.ByteString.init -- * Reducing 'ByteString's (folds) , Data.ByteString.foldl1 , Data.ByteString.foldl1' , Data.ByteString.foldr1 , Data.ByteString.foldr1' -- * Special folds , Data.ByteString.maximum , Data.ByteString.minimum ) where import qualified Data.ByteString rio-0.1.22.0/src/RIO/Char.hs0000644000000000000000000000243614231470023013343 0ustar0000000000000000-- | Unicode @Char@. Import as: -- -- > import qualified RIO.Char as C -- -- This module does not export any partial functions. For those, see -- "RIO.Char.Partial" module RIO.Char ( Data.Char.Char -- * Character classification -- | Unicode characters are divided into letters, Data.Char.numbers, marks, -- punctuation, Data.Char.symbols, separators (including spaces) and others -- (including control characters). , Data.Char.isControl , Data.Char.isSpace , Data.Char.isLower , Data.Char.isUpper , Data.Char.isAlpha , Data.Char.isAlphaNum , Data.Char.isPrint , Data.Char.isDigit , Data.Char.isOctDigit , Data.Char.isHexDigit , Data.Char.isLetter , Data.Char.isMark , Data.Char.isNumber , Data.Char.isPunctuation , Data.Char.isSymbol , Data.Char.isSeparator -- ** Subranges , Data.Char.isAscii , Data.Char.isLatin1 , Data.Char.isAsciiUpper , Data.Char.isAsciiLower -- ** Unicode general categories , Data.Char.GeneralCategory(..) , Data.Char.generalCategory -- * Case conversion , Data.Char.toUpper , Data.Char.toLower , Data.Char.toTitle -- * Numeric representations , Data.Char.ord -- * String representations , Data.Char.showLitChar , Data.Char.lexLitChar , Data.Char.readLitChar ) where import qualified Data.Char rio-0.1.22.0/src/RIO/Char/Partial.hs0000644000000000000000000000044714231470023014737 0ustar0000000000000000-- | Unicode @Char@ partial functions. Import as: -- -- > import qualified RIO.Char.Partial as C' module RIO.Char.Partial ( -- * Single digit characters Data.Char.digitToInt , Data.Char.intToDigit -- * Numeric representations , Data.Char.chr ) where import qualified Data.Char rio-0.1.22.0/src/RIO/Deque.hs0000644000000000000000000002246014231470023013530 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module RIO.Deque ( -- * Types Deque , UDeque , SDeque , BDeque -- * Operations , newDeque , getDequeSize , popFrontDeque , popBackDeque , pushFrontDeque , pushBackDeque , foldlDeque , foldrDeque , dequeToList , dequeToVector , freezeDeque -- * Inference helpers , asUDeque , asSDeque , asBDeque ) where import RIO.Prelude.Reexports import Control.Exception (assert) import Control.Monad (liftM) import qualified Data.Vector.Generic as VG import qualified Data.Vector.Generic.Mutable as V import qualified Data.Vector.Mutable as B import qualified Data.Vector.Storable.Mutable as S import qualified Data.Vector.Unboxed.Mutable as U import Data.Primitive.MutVar data DequeState v s a = DequeState !(v s a) {-# UNPACK #-} !Int -- start {-# UNPACK #-} !Int -- size -- | A double-ended queue supporting any underlying vector type and any monad. -- -- This implements a circular double-ended queue with exponential growth. -- -- @since 0.1.9.0 newtype Deque v s a = Deque (MutVar s (DequeState v s a)) -- | A 'Deque' specialized to unboxed vectors. -- -- @since 0.1.9.0 type UDeque = Deque U.MVector -- | A 'Deque' specialized to storable vectors. -- -- @since 0.1.9.0 type SDeque = Deque S.MVector -- | A 'Deque' specialized to boxed vectors. -- -- @since 0.1.9.0 type BDeque = Deque B.MVector -- | Helper function to assist with type inference, forcing usage of -- an unboxed vector. -- -- @since 0.1.9.0 asUDeque :: UDeque s a -> UDeque s a asUDeque = id -- | Helper function to assist with type inference, forcing usage of a -- storable vector. -- -- @since 0.1.9.0 asSDeque :: SDeque s a -> SDeque s a asSDeque = id -- | Helper function to assist with type inference, forcing usage of a -- boxed vector. -- -- @since 0.1.9.0 asBDeque :: BDeque s a -> BDeque s a asBDeque = id -- | Create a new, empty 'Deque' -- -- @since 0.1.9.0 newDeque :: (V.MVector v a, PrimMonad m) => m (Deque v (PrimState m) a) newDeque = do v <- V.new baseSize liftM Deque $ newMutVar (DequeState v 0 0) where baseSize = 32 {-# INLINE newDeque #-} -- | /O(1)/ - Get the number of elements that is currently in the `Deque` -- -- @since 0.1.9.0 getDequeSize :: PrimMonad m => Deque v (PrimState m) a -> m Int getDequeSize (Deque var) = do DequeState _ _ size <- readMutVar var pure size {-# INLINE getDequeSize #-} -- | Pop the first value from the beginning of the 'Deque' -- -- @since 0.1.9.0 popFrontDeque :: (V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> m (Maybe a) popFrontDeque (Deque var) = do DequeState v start size <- readMutVar var if size == 0 then return Nothing else do x <- V.unsafeRead v start let start' = start + 1 start'' | start' >= V.length v = 0 | otherwise = start' writeMutVar var $! DequeState v start'' (size - 1) return $! Just x {-# INLINE popFrontDeque #-} -- | Pop the first value from the end of the 'Deque' -- -- @since 0.1.9.0 popBackDeque :: (V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> m (Maybe a) popBackDeque (Deque var) = do DequeState v start size <- readMutVar var if size == 0 then return Nothing else do let size' = size - 1 end = start + size' end' | end >= V.length v = end - V.length v | otherwise = end x <- V.unsafeRead v end' writeMutVar var $! DequeState v start size' return $! Just x {-# INLINE popBackDeque #-} -- | Push a new value to the beginning of the 'Deque' -- -- @since 0.1.9.0 pushFrontDeque :: (V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> a -> m () pushFrontDeque (Deque var) x = do DequeState v start size <- readMutVar var inner v start size where inner v start size = do if size >= V.length v then newVector v start size inner else do let size' = size + 1 start' = (start - 1) `rem` V.length v start'' | start' < 0 = V.length v + start' | otherwise = start' V.unsafeWrite v start'' x writeMutVar var $! DequeState v start'' size' {-# INLINE pushFrontDeque #-} -- | Push a new value to the end of the 'Deque' -- -- @since 0.1.9.0 pushBackDeque :: (V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> a -> m () pushBackDeque (Deque var) x = do DequeState v start size <- readMutVar var inner v start size where inner v start size = do if size >= V.length v then newVector v start size inner else do let end = start + size end' | end >= V.length v = end - V.length v | otherwise = end V.unsafeWrite v end' x writeMutVar var $! DequeState v start (size + 1) {-# INLINE pushBackDeque #-} -- | Fold over a 'Deque', starting at the beginning. Does not modify the 'Deque'. -- -- @since 0.1.9.0 foldlDeque :: (V.MVector v a, PrimMonad m) => (acc -> a -> m acc) -> acc -> Deque v (PrimState m) a -> m acc foldlDeque f acc0 (Deque var) = do DequeState v start size <- readMutVar var let loop idx acc | idx >= size = pure acc | otherwise = do let idxPlusStart = idx + start idx' | idxPlusStart >= V.length v = idxPlusStart - V.length v | otherwise = idxPlusStart a <- V.unsafeRead v idx' acc' <- f acc a loop (idx + 1) $! acc' loop 0 acc0 -- | Fold over a 'Deque', starting at the end. Does not modify the 'Deque'. -- -- @since 0.1.9.0 foldrDeque :: (V.MVector v a, PrimMonad m) => (a -> acc -> m acc) -> acc -> Deque v (PrimState m) a -> m acc foldrDeque f acc0 (Deque var) = do DequeState v start size <- readMutVar var let loop idx acc | idx < 0 = pure acc | otherwise = do let idxPlusStart = idx + start idx' | idxPlusStart >= V.length v = idxPlusStart - V.length v | otherwise = idxPlusStart a <- V.unsafeRead v idx' acc' <- f a acc loop (idx - 1) $! acc' loop (size - 1) acc0 -- | Convert a 'Deque' into a list. Does not modify the 'Deque'. -- -- @since 0.1.9.0 dequeToList :: (V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> m [a] dequeToList = foldrDeque (\a rest -> pure $ a : rest) [] {-# INLINE dequeToList #-} -- | Convert to an immutable vector of any type. If resulting pure vector corresponds to the mutable -- one used by the `Deque`, it will be more efficient to use `freezeDeque` instead. -- -- ==== __Example__ -- -- >>> :set -XTypeApplications -- >>> import qualified RIO.Vector.Unboxed as U -- >>> import qualified RIO.Vector.Storable as S -- >>> d <- newDeque @U.MVector @Int -- >>> mapM_ (pushFrontDeque d) [0..10] -- >>> dequeToVector @S.Vector d -- [10,9,8,7,6,5,4,3,2,1,0] -- -- @since 0.1.9.0 dequeToVector :: (VG.Vector v' a, V.MVector v a, PrimMonad m) => Deque v (PrimState m) a -> m (v' a) dequeToVector dq = do size <- getDequeSize dq mv <- V.unsafeNew size foldlDeque (\i e -> V.unsafeWrite mv i e >> pure (i+1)) 0 dq VG.unsafeFreeze mv newVector :: (PrimMonad m, V.MVector v a) => v (PrimState m) a -> Int -> Int -> (v (PrimState m) a -> Int -> Int -> m b) -> m b newVector v size2 sizeOrig f = assert (sizeOrig == V.length v) $ do v' <- V.unsafeNew (V.length v * 2) let size1 = V.length v - size2 V.unsafeCopy (V.unsafeTake size1 v') (V.unsafeSlice size2 size1 v) V.unsafeCopy (V.unsafeSlice size1 size2 v') (V.unsafeTake size2 v) f v' 0 sizeOrig {-# INLINE newVector #-} -- | Yield an immutable copy of the underlying mutable vector. The difference from `dequeToVector` -- is that the the copy will be performed with a more efficient @memcpy@, rather than element by -- element. The downside is that the resulting vector type must be the one that corresponds to the -- mutable one that is used in the `Deque`. -- -- ==== __Example__ -- -- >>> :set -XTypeApplications -- >>> import qualified RIO.Vector.Unboxed as U -- >>> d <- newDeque @U.MVector @Int -- >>> mapM_ (pushFrontDeque d) [0..10] -- >>> freezeDeque @U.Vector d -- [10,9,8,7,6,5,4,3,2,1,0] -- -- @since 0.1.9.0 freezeDeque :: (VG.Vector v a, PrimMonad m) => Deque (VG.Mutable v) (PrimState m) a -> m (v a) freezeDeque (Deque var) = do state@(DequeState v _ size) <- readMutVar var v' <- V.unsafeNew size makeCopy v' state VG.unsafeFreeze v' makeCopy :: (V.MVector v a, PrimMonad m) => v (PrimState m) a -> DequeState v (PrimState m) a -> m () makeCopy v' (DequeState v start size) = do let size1 = min size (V.length v - start) size2 = size - size1 V.unsafeCopy (V.unsafeTake size1 v') (V.unsafeSlice start size1 v) when (size > size1) $ V.unsafeCopy (V.unsafeSlice size1 size2 v') (V.unsafeTake size2 v) {-# INLINE makeCopy #-} rio-0.1.22.0/src/RIO/Directory.hs0000644000000000000000000000022014231470023014417 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE NoImplicitPrelude #-} module RIO.Directory ( module UnliftIO.Directory ) where import UnliftIO.Directory rio-0.1.22.0/src/RIO/File.hs0000644000000000000000000000117614231470023013345 0ustar0000000000000000{-| == Rationale This module offers functions to handle files that offer better durability and/or atomicity. See "UnliftIO.IO.File" for the rationale behind this module, since all of the functions were moved upstream and are now simply re-exported from here. @since 0.1.6 -} module RIO.File ( -- * Regular withBinaryFile , writeBinaryFile -- * Atomic , withBinaryFileAtomic , writeBinaryFileAtomic -- * Durable , withBinaryFileDurable , writeBinaryFileDurable , ensureFileDurable -- * Durable and Atomic , withBinaryFileDurableAtomic , writeBinaryFileDurableAtomic ) where import UnliftIO.IO.File rio-0.1.22.0/src/RIO/FilePath.hs0000644000000000000000000000052014231470023014152 0ustar0000000000000000module RIO.FilePath ( module System.FilePath , getSearchPath ) where import Control.Monad.IO.Class import System.FilePath hiding(getSearchPath) import qualified System.FilePath -- | Lifted version of 'System.FilePath.getSearchPath' getSearchPath :: MonadIO m => m [FilePath] getSearchPath = liftIO System.FilePath.getSearchPath rio-0.1.22.0/src/RIO/HashMap.hs0000644000000000000000000000336414231470023014010 0ustar0000000000000000-- | Strict @Map@ with hashed keys. Import as: -- -- > import qualified RIO.HashMap as HM -- -- This module does not export any partial functions. For those, see -- "RIO.HashMap.Partial" module RIO.HashMap ( Data.HashMap.Strict.HashMap -- * Construction , Data.HashMap.Strict.empty , Data.HashMap.Strict.singleton -- * Basic interface , Data.HashMap.Strict.null , Data.HashMap.Strict.size , Data.HashMap.Strict.member , Data.HashMap.Strict.lookup , Data.HashMap.Strict.lookupDefault , Data.HashMap.Strict.insert , Data.HashMap.Strict.insertWith , Data.HashMap.Strict.delete , Data.HashMap.Strict.adjust , Data.HashMap.Strict.update , Data.HashMap.Strict.alter -- * Combine -- ** Union , Data.HashMap.Strict.union , Data.HashMap.Strict.unionWith , Data.HashMap.Strict.unionWithKey , Data.HashMap.Strict.unions -- * Transformations , Data.HashMap.Strict.map , Data.HashMap.Strict.mapWithKey , Data.HashMap.Strict.traverseWithKey -- * Difference and intersection , Data.HashMap.Strict.difference , Data.HashMap.Strict.differenceWith , Data.HashMap.Strict.intersection , Data.HashMap.Strict.intersectionWith , Data.HashMap.Strict.intersectionWithKey -- * Folds , Data.HashMap.Strict.foldl' , Data.HashMap.Strict.foldlWithKey' , Data.HashMap.Strict.foldr , Data.HashMap.Strict.foldrWithKey -- * Filter , Data.HashMap.Strict.filter , Data.HashMap.Strict.filterWithKey , Data.HashMap.Strict.mapMaybe , Data.HashMap.Strict.mapMaybeWithKey -- * Conversions , Data.HashMap.Strict.keys , Data.HashMap.Strict.elems -- ** Lists , Data.HashMap.Strict.toList , Data.HashMap.Strict.fromList , Data.HashMap.Strict.fromListWith ) where import Data.HashMap.Strict rio-0.1.22.0/src/RIO/HashMap/Partial.hs0000644000000000000000000000035214231470023015376 0ustar0000000000000000-- | Strict @HashMap@ partial functions. Import as: -- -- > import qualified RIO.HashMap.Partial as HM' module RIO.HashMap.Partial ( -- * Basic interface (Data.HashMap.Strict.!) ) where import qualified Data.HashMap.Strict rio-0.1.22.0/src/RIO/HashSet.hs0000644000000000000000000000151014231470023014015 0ustar0000000000000000-- | @Set@ with hashed members. Import as: -- -- > import qualified RIO.HashSet as HS module RIO.HashSet ( Data.HashSet.HashSet -- * Construction , Data.HashSet.empty , Data.HashSet.singleton -- * Combine , Data.HashSet.union , Data.HashSet.unions -- * Basic interface , Data.HashSet.null , Data.HashSet.size , Data.HashSet.member , Data.HashSet.insert , Data.HashSet.delete -- * Transformations , Data.HashSet.map -- * Difference and intersection , Data.HashSet.difference , Data.HashSet.intersection -- * Folds , Data.HashSet.foldl' , Data.HashSet.foldr -- * Filter , Data.HashSet.filter -- * Conversions -- ** Lists , Data.HashSet.toList , Data.HashSet.fromList -- * HashMaps , Data.HashSet.toMap , Data.HashSet.fromMap ) where import qualified Data.HashSet rio-0.1.22.0/src/RIO/Lens.hs0000644000000000000000000000101514231470023013357 0ustar0000000000000000-- | -- Module : RIO.Lens -- License : MIT -- Maintainer: Colin Woodbury -- -- Extra utilities from @microlens@. -- -- @since: 0.1.16.0 module RIO.Lens ( -- * Fold SimpleFold , toListOf , has -- * Lens , _1, _2, _3, _4, _5 , at , lens -- * Iso , non -- * Traversal , singular , failing , filtered , both , traversed , each , ix , _head , _tail , _init , _last -- * Prism , _Left , _Right , _Just , _Nothing ) where import Lens.Micro rio-0.1.22.0/src/RIO/List.hs0000644000000000000000000001416314231470023013401 0ustar0000000000000000-- | @List@. Import as: -- -- > import qualified RIO.List as L -- -- This module does not export any partial functions. For those, see -- "RIO.List.Partial" module RIO.List ( -- * Basic functions (Data.List.++) , Data.List.uncons , Data.List.null , Data.List.length , headMaybe , lastMaybe , tailMaybe , initMaybe -- * List transformations , Data.List.map , Data.List.reverse , Data.List.intersperse , Data.List.intercalate , Data.List.transpose , Data.List.subsequences , Data.List.permutations -- * Reducing lists (folds) , Data.List.foldl , Data.List.foldl' , Data.List.foldr -- ** Special folds , Data.List.concat , Data.List.concatMap , Data.List.and , Data.List.or , Data.List.any , Data.List.all , Data.List.sum , Data.List.product , maximumMaybe , minimumMaybe , maximumByMaybe , minimumByMaybe -- * Building lists -- ** Scans , Data.List.scanl , Data.List.scanl' , Data.List.scanr , Data.List.scanl1 , Data.List.scanr1 -- ** Accumulating maps , Data.List.mapAccumL , Data.List.mapAccumR -- ** Infinite lists , Data.List.iterate , Data.List.repeat , Data.List.replicate , Data.List.cycle -- ** Unfolding , Data.List.unfoldr -- * Sublists -- ** Extracting sublists , Data.List.take , Data.List.drop , Data.List.splitAt , Data.List.takeWhile , Data.List.dropWhile , Data.List.dropWhileEnd , Data.List.span , Data.List.break , Data.List.stripPrefix , stripSuffix , dropPrefix , dropSuffix , Data.List.group , Data.List.inits , Data.List.tails -- ** Predicates , Data.List.isPrefixOf , Data.List.isSuffixOf , Data.List.isInfixOf , Data.List.isSubsequenceOf -- * Searching lists -- ** Searching by equality , Data.List.elem , Data.List.notElem , Data.List.lookup -- ** Searching with a predicate , Data.List.find , Data.List.filter , Data.List.partition -- * Indexing lists -- | These functions treat a list @xs@ as a indexed collection, -- with indices ranging from 0 to @'length' xs - 1@. , Data.List.elemIndex , Data.List.elemIndices , Data.List.findIndex , Data.List.findIndices -- * Zipping and unzipping lists , Data.List.zip , Data.List.zip3 , Data.List.zip4 , Data.List.zip5 , Data.List.zip6 , Data.List.zip7 , Data.List.zipWith , Data.List.zipWith3 , Data.List.zipWith4 , Data.List.zipWith5 , Data.List.zipWith6 , Data.List.zipWith7 , Data.List.unzip , Data.List.unzip3 , Data.List.unzip4 , Data.List.unzip5 , Data.List.unzip6 , Data.List.unzip7 -- * Special lists -- ** Functions on strings , Data.List.lines , linesCR , Data.List.words , Data.List.unlines , Data.List.unwords -- ** \"Set\" operations , Data.List.nub , Data.List.delete , (Data.List.\\) , Data.List.union , Data.List.intersect -- ** Ordered lists , Data.List.sort , Data.List.sortOn , Data.List.insert -- * Generalized functions -- ** The \"@By@\" operations -- | By convention, overloaded functions have a non-overloaded -- counterpart whose name is suffixed with \`@By@\'. -- -- It is often convenient to use these functions together with -- 'Data.Function.on', for instance @'sortBy' ('compare' -- \`on\` 'fst')@. -- *** User-supplied equality (replacing an @Eq@ context) -- | The predicate is assumed to define an equivalence. , Data.List.nubBy , Data.List.deleteBy , Data.List.deleteFirstsBy , Data.List.unionBy , Data.List.intersectBy , Data.List.groupBy -- *** User-supplied comparison (replacing an @Ord@ context) -- | The function is assumed to define a total ordering. , Data.List.sortBy , Data.List.insertBy -- ** The \"@generic@\" operations -- | The prefix \`@generic@\' indicates an overloaded function that -- is a generalized version of a "Prelude" function. , Data.List.genericLength , Data.List.genericTake , Data.List.genericDrop , Data.List.genericSplitAt , Data.List.genericIndex , Data.List.genericReplicate ) where import qualified Data.List import Data.List(stripPrefix) import Data.Maybe (fromMaybe) -- | Remove the suffix from the given list, if present -- -- @since 0.0.0 stripSuffix :: Eq a => [a] -- ^ suffix -> [a] -> Maybe [a] stripSuffix suffix list = fmap reverse (stripPrefix (reverse suffix) (reverse list)) -- | Drop prefix if present, otherwise return original list. -- -- @since 0.0.0.0 dropPrefix :: Eq a => [a] -- ^ prefix -> [a] -> [a] dropPrefix prefix t = fromMaybe t (stripPrefix prefix t) -- | Drop prefix if present, otherwise return original list. -- -- @since 0.0.0.0 dropSuffix :: Eq a => [a] -- ^ suffix -> [a] -> [a] dropSuffix suffix t = fromMaybe t (stripSuffix suffix t) -- | 'linesCR' breaks a 'String' up into a list of `String`s at newline -- 'Char's. It is very similar to 'lines', but it also removes any -- trailing @'\r'@ 'Char's. The resulting 'String' values do not contain -- newlines or trailing @'\r'@ characters. -- -- @since 0.1.0.0 linesCR :: String -> [String] linesCR = map (dropSuffix "\r") . lines safeListCall :: Foldable t => (t a -> b) -> t a -> Maybe b safeListCall f xs | Data.List.null xs = Nothing | otherwise = Just $ f xs -- | @since 0.1.3.0 headMaybe :: [a] -> Maybe a headMaybe = safeListCall Data.List.head -- | @since 0.1.3.0 lastMaybe :: [a] -> Maybe a lastMaybe = safeListCall Data.List.last -- | @since 0.1.3.0 tailMaybe :: [a] -> Maybe [a] tailMaybe = safeListCall Data.List.tail -- | @since 0.1.3.0 initMaybe :: [a] -> Maybe [a] initMaybe = safeListCall Data.List.init -- | @since 0.1.3.0 maximumMaybe :: (Ord a, Foldable t) => t a -> Maybe a maximumMaybe = safeListCall Data.List.maximum -- | @since 0.1.3.0 minimumMaybe :: (Ord a, Foldable t) => t a -> Maybe a minimumMaybe = safeListCall Data.List.minimum -- | @since 0.1.3.0 maximumByMaybe :: (Foldable t) => (a -> a -> Ordering) -> t a -> Maybe a maximumByMaybe f = safeListCall (Data.List.maximumBy f) -- | @since 0.1.3.0 minimumByMaybe :: (Foldable t) => (a -> a -> Ordering) -> t a -> Maybe a minimumByMaybe f = safeListCall (Data.List.minimumBy f) rio-0.1.22.0/src/RIO/List/Partial.hs0000644000000000000000000000135314231470023014772 0ustar0000000000000000-- | @List@ partial functions. Import as: -- -- > import qualified RIO.List.Partial as L' module RIO.List.Partial ( -- * Basic functions Data.List.head , Data.List.last , Data.List.tail , Data.List.init -- * Reducing lists (folds) , Data.List.foldl1 , Data.List.foldl1' , Data.List.foldr1 -- ** Special folds , Data.List.maximum , Data.List.minimum , Data.List.maximumBy , Data.List.minimumBy -- * Building lists -- ** Scans -- -- These functions are not partial, they are being exported here for legacy -- reasons, they may be removed from this module on a future major release , Data.List.scanl1 , Data.List.scanr1 -- * Indexing lists , (Data.List.!!) ) where import qualified Data.List rio-0.1.22.0/src/RIO/Map.hs0000644000000000000000000001061714231470023013203 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Strict @Map@. Import as: -- -- > import qualified RIO.Map as Map -- -- This module does not export any partial or unchecked functions. For those, -- see "RIO.Map.Partial" and "RIO.Map.Unchecked" module RIO.Map ( -- * Map type Data.Map.Strict.Map -- * Operators #if MIN_VERSION_containers(0,5,9) , (Data.Map.Strict.!?) #endif , (Data.Map.Strict.\\) -- * Query , Data.Map.Strict.null , Data.Map.Strict.size , Data.Map.Strict.member , Data.Map.Strict.notMember , Data.Map.Strict.lookup , Data.Map.Strict.findWithDefault , Data.Map.Strict.lookupLT , Data.Map.Strict.lookupGT , Data.Map.Strict.lookupLE , Data.Map.Strict.lookupGE -- * Construction , Data.Map.Strict.empty , Data.Map.Strict.singleton -- ** Insertion , Data.Map.Strict.insert , Data.Map.Strict.insertWith , Data.Map.Strict.insertWithKey , Data.Map.Strict.insertLookupWithKey -- ** Delete\/Update , Data.Map.Strict.delete , Data.Map.Strict.adjust , Data.Map.Strict.adjustWithKey , Data.Map.Strict.update , Data.Map.Strict.updateWithKey , Data.Map.Strict.updateLookupWithKey , Data.Map.Strict.alter #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.alterF #endif -- * Combine -- ** Union , Data.Map.Strict.union , Data.Map.Strict.unionWith , Data.Map.Strict.unionWithKey , Data.Map.Strict.unions , Data.Map.Strict.unionsWith -- ** Difference , Data.Map.Strict.difference , Data.Map.Strict.differenceWith , Data.Map.Strict.differenceWithKey -- ** Intersection , Data.Map.Strict.intersection , Data.Map.Strict.intersectionWith , Data.Map.Strict.intersectionWithKey -- ** General combining functions -- | See "Data.Map.Merge.Strict" -- ** Deprecated general combining function , Data.Map.Strict.mergeWithKey -- * Traversal -- ** Map , Data.Map.Strict.map , Data.Map.Strict.mapWithKey , Data.Map.Strict.traverseWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.traverseMaybeWithKey #endif , Data.Map.Strict.mapAccum , Data.Map.Strict.mapAccumWithKey , Data.Map.Strict.mapAccumRWithKey , Data.Map.Strict.mapKeys , Data.Map.Strict.mapKeysWith -- * Folds , Data.Map.Strict.foldr , Data.Map.Strict.foldl , Data.Map.Strict.foldrWithKey , Data.Map.Strict.foldlWithKey , Data.Map.Strict.foldMapWithKey -- ** Strict folds , Data.Map.Strict.foldr' , Data.Map.Strict.foldl' , Data.Map.Strict.foldrWithKey' , Data.Map.Strict.foldlWithKey' -- * Conversion , Data.Map.Strict.elems , Data.Map.Strict.keys , Data.Map.Strict.assocs , Data.Map.Strict.keysSet , Data.Map.Strict.fromSet -- ** Lists , Data.Map.Strict.toList , Data.Map.Strict.fromList , Data.Map.Strict.fromListWith , Data.Map.Strict.fromListWithKey -- ** Ordered lists , Data.Map.Strict.toAscList #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.toDescList #endif -- * Filter , Data.Map.Strict.filter , Data.Map.Strict.filterWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.restrictKeys , Data.Map.Strict.withoutKeys #endif , Data.Map.Strict.partition , Data.Map.Strict.partitionWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.takeWhileAntitone , Data.Map.Strict.dropWhileAntitone , Data.Map.Strict.spanAntitone #endif , Data.Map.Strict.mapMaybe , Data.Map.Strict.mapMaybeWithKey , Data.Map.Strict.mapEither , Data.Map.Strict.mapEitherWithKey , Data.Map.Strict.split , Data.Map.Strict.splitLookup , Data.Map.Strict.splitRoot -- * Submap , Data.Map.Strict.isSubmapOf , Data.Map.Strict.isSubmapOfBy , Data.Map.Strict.isProperSubmapOf , Data.Map.Strict.isProperSubmapOfBy -- * Indexed , Data.Map.Strict.lookupIndex , Data.Map.Strict.elemAt , Data.Map.Strict.deleteAt #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.take , Data.Map.Strict.drop , Data.Map.Strict.splitAt #endif -- * Min\/Max #if MIN_VERSION_containers(0,5,9) , Data.Map.Strict.lookupMin , Data.Map.Strict.lookupMax #endif , Data.Map.Strict.deleteMin , Data.Map.Strict.deleteMax , Data.Map.Strict.updateMin , Data.Map.Strict.updateMax , Data.Map.Strict.updateMinWithKey , Data.Map.Strict.updateMaxWithKey , Data.Map.Strict.minView , Data.Map.Strict.maxView , Data.Map.Strict.minViewWithKey , Data.Map.Strict.maxViewWithKey -- * Debugging , Data.Map.Strict.showTree , Data.Map.Strict.showTreeWith , Data.Map.Strict.valid ) where import qualified Data.Map.Strict rio-0.1.22.0/src/RIO/Map/Partial.hs0000644000000000000000000000074014231470023014573 0ustar0000000000000000-- | Strict @Map@ partial functions. Import as: -- -- > import qualified RIO.Map.Partial as Map' module RIO.Map.Partial ( -- * Operators (Data.Map.Strict.!) -- * Indexed , Data.Map.Strict.elemAt , Data.Map.Strict.deleteAt , Data.Map.Strict.findIndex , Data.Map.Strict.updateAt -- * Min\/Max , Data.Map.Strict.findMin , Data.Map.Strict.findMax , Data.Map.Strict.deleteFindMin , Data.Map.Strict.deleteFindMax ) where import qualified Data.Map.Strict rio-0.1.22.0/src/RIO/Map/Unchecked.hs0000644000000000000000000000200114231470023015060 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | This module contains functions from "Data.Map.Strict" that have unchecked -- preconditions on their input. If these preconditions are not satisfied, -- the data structure may end up in an invalid state and other operations -- may misbehave. Import as: -- -- > import qualified RIO.Map.Unchecked as Map' module RIO.Map.Unchecked ( -- * Traversal -- ** Map Data.Map.Strict.mapKeysMonotonic -- * Conversion -- ** Ordered lists , Data.Map.Strict.toAscList -- FIXME: remove in the next major version (0.2.0.0) , Data.Map.Strict.fromAscList , Data.Map.Strict.fromAscListWith , Data.Map.Strict.fromAscListWithKey , Data.Map.Strict.fromDistinctAscList #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.toDescList -- FIXME: remove in the next major version (0.2.0.0) , Data.Map.Strict.fromDescList , Data.Map.Strict.fromDescListWith , Data.Map.Strict.fromDescListWithKey , Data.Map.Strict.fromDistinctDescList #endif ) where import qualified Data.Map.Strict rio-0.1.22.0/src/RIO/NonEmpty.hs0000644000000000000000000000417314231470023014237 0ustar0000000000000000-- | @NonEmpty@ list. Import as: -- -- > import qualified RIO.NonEmpty as NE -- -- This module does not export any partial functions. For those, see -- "RIO.NonEmpty.Partial" module RIO.NonEmpty ( -- * The type of non-empty streams Data.List.NonEmpty.NonEmpty(..) -- * Non-empty stream transformations , Data.List.NonEmpty.map , Data.List.NonEmpty.intersperse , Data.List.NonEmpty.scanl , Data.List.NonEmpty.scanr , Data.List.NonEmpty.scanl1 , Data.List.NonEmpty.scanr1 , Data.List.NonEmpty.transpose , Data.List.NonEmpty.sortBy , Data.List.NonEmpty.sortWith -- * Basic functions , Data.List.NonEmpty.length , Data.List.NonEmpty.head , Data.List.NonEmpty.tail , Data.List.NonEmpty.last , Data.List.NonEmpty.init , (Data.List.NonEmpty.<|) , Data.List.NonEmpty.cons , Data.List.NonEmpty.uncons , Data.List.NonEmpty.unfoldr , Data.List.NonEmpty.sort , Data.List.NonEmpty.reverse , Data.List.NonEmpty.inits , Data.List.NonEmpty.tails -- * Building streams , Data.List.NonEmpty.iterate , Data.List.NonEmpty.repeat , Data.List.NonEmpty.cycle , Data.List.NonEmpty.insert , Data.List.NonEmpty.some1 -- * Extracting sublists , Data.List.NonEmpty.take , Data.List.NonEmpty.drop , Data.List.NonEmpty.splitAt , Data.List.NonEmpty.takeWhile , Data.List.NonEmpty.dropWhile , Data.List.NonEmpty.span , Data.List.NonEmpty.break , Data.List.NonEmpty.filter , Data.List.NonEmpty.partition , Data.List.NonEmpty.group , Data.List.NonEmpty.groupBy , Data.List.NonEmpty.groupWith , Data.List.NonEmpty.groupAllWith , Data.List.NonEmpty.group1 , Data.List.NonEmpty.groupBy1 , Data.List.NonEmpty.groupWith1 , Data.List.NonEmpty.groupAllWith1 -- * Sublist predicates , Data.List.NonEmpty.isPrefixOf -- * Set-like operations , Data.List.NonEmpty.nub , Data.List.NonEmpty.nubBy -- * Zipping and unzipping streams , Data.List.NonEmpty.zip , Data.List.NonEmpty.zipWith , Data.List.NonEmpty.unzip -- * Converting to and from a list , Data.List.NonEmpty.nonEmpty , Data.List.NonEmpty.toList , Data.List.NonEmpty.xor ) where import qualified Data.List.NonEmpty rio-0.1.22.0/src/RIO/NonEmpty/Partial.hs0000644000000000000000000000046214231470023015630 0ustar0000000000000000-- | @NonEmpty@ list partial functions. Import as: -- -- > import qualified RIO.NonEmpty.Partial as NE' module RIO.NonEmpty.Partial ( -- * Indexing streams (Data.List.NonEmpty.!!) -- * Converting to and from a list , Data.List.NonEmpty.fromList ) where import qualified Data.List.NonEmpty rio-0.1.22.0/src/RIO/Partial.hs0000644000000000000000000000040114231470023014050 0ustar0000000000000000-- | Partial functions. Import as: -- -- > import qualified RIO.Partial as RIO' module RIO.Partial ( Data.Maybe.fromJust , Prelude.read , Prelude.toEnum , Prelude.pred , Prelude.succ ) where import qualified Data.Maybe import qualified Prelude rio-0.1.22.0/src/RIO/Prelude.hs0000644000000000000000000002624014231470023014065 0ustar0000000000000000module RIO.Prelude ( module RIO.Prelude.Types -- * @Bool@ -- | Re-exported from "Data.Bool": , (Data.Bool.||) , (Data.Bool.&&) , Data.Bool.not , Data.Bool.otherwise , Data.Bool.bool -- * @Maybe@ -- | Re-exported from "Data.Maybe": , Data.Maybe.maybe , Data.Maybe.fromMaybe , RIO.Prelude.Extra.fromFirst , Data.Maybe.isJust , Data.Maybe.isNothing , Data.Maybe.listToMaybe , Data.Maybe.maybeToList , Data.Maybe.catMaybes , Data.Maybe.mapMaybe , RIO.Prelude.Extra.mapMaybeA , RIO.Prelude.Extra.mapMaybeM , RIO.Prelude.Extra.forMaybeA , RIO.Prelude.Extra.forMaybeM -- * @Either@ -- | Re-exported from "Data.Either": , Data.Either.either , Data.Either.fromLeft , Data.Either.fromRight , Data.Either.isLeft , Data.Either.isRight , RIO.Prelude.Extra.mapLeft , Data.Either.lefts , Data.Either.partitionEithers , Data.Either.rights -- * Tuples -- | Re-exported from "Data.Tuple": , Data.Tuple.fst , Data.Tuple.snd , Data.Tuple.curry , Data.Tuple.uncurry -- , Data.Tuple.swap -- TODO: export? -- * @Eq@ -- | Re-exported from "Data.Eq": , (Data.Eq.==) , (Data.Eq./=) -- * @Ord@ -- | Re-exported from "Data.Ord": , (Data.Ord.<) , (Data.Ord.<=) , (Data.Ord.>) , (Data.Ord.>=) , Data.Ord.max , Data.Ord.min , Data.Ord.compare , Data.Ord.comparing , Data.Ord.Down(..) -- * @Enum@ -- | Re-exported from "Prelude": , Prelude.fromEnum -- * @Bounded@ -- | Re-exported from "Prelude": , Prelude.minBound , Prelude.maxBound -- * @Num@ -- | Re-exported from "Prelude": , (Prelude.+) , (Prelude.-) , (*) -- HSE can't parse qualified export, which results in hlint error. , (Prelude.^) , Prelude.negate , Prelude.abs , Prelude.signum , Prelude.fromInteger , Prelude.subtract -- * @Real@ -- | Re-exported from "Prelude": , Prelude.toRational -- * @Integral@ -- | Re-exported from "Prelude": , Prelude.quot , Prelude.rem , Prelude.div , Prelude.mod , Prelude.quotRem , Prelude.divMod , Prelude.toInteger , Prelude.even , Prelude.odd , Prelude.gcd , Prelude.lcm , Prelude.fromIntegral -- * @Fractional@ -- | Re-exported from "Prelude": , (Prelude./) , (Prelude.^^) , Prelude.recip , Prelude.fromRational , Prelude.realToFrac -- * @Floating@ -- | Re-exported from "Prelude": , Prelude.pi , Prelude.exp , Prelude.log , Prelude.sqrt , (Prelude.**) , Prelude.logBase , Prelude.sin , Prelude.cos , Prelude.tan , Prelude.asin , Prelude.acos , Prelude.atan , Prelude.sinh , Prelude.cosh , Prelude.tanh , Prelude.asinh , Prelude.acosh , Prelude.atanh -- * @RealFrac@ -- | Re-exported from "Prelude": , Prelude.properFraction , Prelude.truncate , Prelude.round , Prelude.ceiling , Prelude.floor -- * @RealFloat@ -- | Re-exported from "Prelude": , Prelude.floatRadix , Prelude.floatDigits , Prelude.floatRange , Prelude.decodeFloat , Prelude.encodeFloat , Prelude.exponent , Prelude.significand , Prelude.scaleFloat , Prelude.isNaN , Prelude.isInfinite , Prelude.isDenormalized , Prelude.isNegativeZero , Prelude.isIEEE , Prelude.atan2 -- * @Word@ -- | Re-exported from "Data.Word": , Data.Word.byteSwap16 , Data.Word.byteSwap32 , Data.Word.byteSwap64 -- * @Semigroup@ -- | Re-exported from "Data.Semigroup": , (Data.Semigroup.<>) , RIO.Prelude.Renames.sappend -- * @Monoid@ -- | Re-exported from "Data.Monoid": , Data.Monoid.mempty , Data.Monoid.mappend , Data.Monoid.mconcat -- * @Functor@ -- | Re-exported from "Data.Functor": , Data.Functor.fmap , (Data.Functor.<$>) , (Data.Functor.<$) , (Data.Functor.$>) , Data.Functor.void , (RIO.Prelude.Extra.<&>) -- * @Applicative@ -- | Re-exported from "Control.Applicative": , Control.Applicative.pure , (Control.Applicative.<*>) , (Control.Applicative.<*) , (Control.Applicative.*>) , Control.Applicative.liftA , Control.Applicative.liftA2 , Control.Applicative.liftA3 , Control.Monad.forever , Data.Foldable.traverse_ , Data.Foldable.for_ , Data.Foldable.sequenceA_ , Control.Monad.filterM , Control.Monad.replicateM_ , Control.Monad.zipWithM , Control.Monad.zipWithM_ -- * @Monad@ -- | Re-exported from "Control.Monad": , Control.Monad.return , Control.Monad.join , Control.Monad.fail , (Control.Monad.>>=) , (Control.Monad.>>) , (Control.Monad.=<<) , (Control.Monad.>=>) , (Control.Monad.<=<) , (Control.Monad.<$!>) , Control.Monad.liftM , Control.Monad.liftM2 , RIO.Prelude.Extra.whenM , RIO.Prelude.Extra.unlessM , Data.Foldable.mapM_ , Data.Foldable.forM_ , Data.Foldable.sequence_ , Control.Monad.foldM , Control.Monad.foldM_ -- TODO: Export these as well perhaps? -- , Data.Foldable.foldlM -- , Data.Foldable.foldrM -- * @Foldable@ -- | Re-exported from "Data.Foldable": , Data.Foldable.foldr , Data.Foldable.foldl' , Data.Foldable.fold , Data.Foldable.foldMap , RIO.Prelude.Extra.foldMapM , Data.Foldable.elem , Data.Foldable.notElem , Data.Foldable.null , Data.Foldable.length , Data.Foldable.sum , Data.Foldable.product , Data.Foldable.all , Data.Foldable.any , Data.Foldable.and , Data.Foldable.or , Data.Foldable.toList , Data.Foldable.concat , Data.Foldable.concatMap -- * @Traversable@ -- | Re-exported from "Data.Traversable": , Data.Traversable.traverse , Data.Traversable.for , Data.Traversable.sequenceA , Data.Traversable.mapM , Data.Traversable.forM , Data.Traversable.sequence -- * @Alternative@ -- | Re-exported from "Control.Applicative": , (Control.Applicative.<|>) , Control.Applicative.some , Control.Applicative.many , Control.Applicative.optional , Data.Foldable.asum , Control.Monad.guard , Control.Monad.when , Control.Monad.unless -- * @Bifunctor@ -- | Re-exported from "Data.Bifunctor": , Data.Bifunctor.bimap , Data.Bifunctor.first , Data.Bifunctor.second -- * @Bifoldable@ -- | Re-exported from "Data.Bifoldable": , Data.Bifoldable.bifold , Data.Bifoldable.bifoldMap , Data.Bifoldable.bifoldr , Data.Bifoldable.bifoldl , Data.Bifoldable.bifoldr' , Data.Bifoldable.bifoldr1 , Data.Bifoldable.bifoldrM , Data.Bifoldable.bifoldl' , Data.Bifoldable.bifoldl1 , Data.Bifoldable.bifoldlM , Data.Bifoldable.bitraverse_ , Data.Bifoldable.bifor_ , Data.Bifoldable.bisequence_ , Data.Bifoldable.biasum , Data.Bifoldable.biList , Data.Bifoldable.binull , Data.Bifoldable.bilength , Data.Bifoldable.bielem , Data.Bifoldable.bimaximum , Data.Bifoldable.biminimum , Data.Bifoldable.bisum , Data.Bifoldable.biproduct , Data.Bifoldable.biconcat , Data.Bifoldable.biconcatMap , Data.Bifoldable.biand , Data.Bifoldable.bior , Data.Bifoldable.biany , Data.Bifoldable.biall , Data.Bifoldable.bimaximumBy , Data.Bifoldable.biminimumBy , Data.Bifoldable.binotElem , Data.Bifoldable.bifind -- * @Bitraverse@ -- | Re-exported from "Data.Bitraversable": , Data.Bitraversable.bitraverse , Data.Bitraversable.bisequence , Data.Bitraversable.bifor , Data.Bitraversable.bimapAccumL , Data.Bitraversable.bimapAccumR -- * @MonadPlus@ -- | Re-exported from "Control.Monad": , Control.Monad.mzero , Control.Monad.mplus , Control.Monad.msum , Control.Monad.mfilter -- * @Arrow@ -- | Re-exported from "Control.Arrow" and "Control.Category": , (Control.Arrow.&&&) , (Control.Arrow.***) , (Control.Category.>>>) -- * @Function@ -- | Re-exported from "Data.Function": , Data.Function.id , Data.Function.const , (Data.Function..) , (Data.Function.$) , (Data.Function.&) , Data.Function.flip , Data.Function.fix , Data.Function.on -- * Miscellaneous functions , (Prelude.$!) , Prelude.seq , Prelude.error , Prelude.undefined , Prelude.asTypeOf , RIO.Prelude.Extra.asIO -- * List -- | Re-exported from "Data.List": , (Data.List.++) , Data.List.break , Data.List.drop , Data.List.dropWhile , Data.List.filter , Data.List.lookup , Data.List.map , Data.List.replicate , Data.List.reverse , Data.List.span , Data.List.take , Data.List.takeWhile , Data.List.zip , Data.List.zipWith , RIO.Prelude.Extra.nubOrd -- * @String@ -- | Re-exported from "Data.String": , Data.String.fromString , Data.String.lines , Data.String.unlines , Data.String.unwords , Data.String.words -- ** @Show@ -- | Re-exported from "Text.Show": , Text.Show.show -- ** @Read@ -- | Re-exported from "Text.Read": , Text.Read.readMaybe -- * @NFData@ -- | Re-exported from "Control.DeepSeq": , (Control.DeepSeq.$!!) , Control.DeepSeq.rnf , Control.DeepSeq.deepseq , Control.DeepSeq.force -- * @Void@ -- | Re-exported from "Data.Void": , Data.Void.absurd -- * @Reader@ -- | Re-exported from "Control.Monad.Reader": , Control.Monad.Reader.lift , Control.Monad.Reader.ask , Control.Monad.Reader.asks , Control.Monad.Reader.local , Control.Monad.Reader.runReader , Control.Monad.Reader.runReaderT -- * @ByteString@ -- | Helper synonyms for converting bewteen lazy and strict @ByteString@s , RIO.Prelude.Renames.toStrictBytes , RIO.Prelude.Renames.fromStrictBytes -- * @ShortByteString@ -- | Re-exported from "Data.ByteString.Short": , Data.ByteString.Short.toShort , Data.ByteString.Short.fromShort -- * @Text@ , RIO.Prelude.Text.tshow , RIO.Prelude.Text.decodeUtf8Lenient -- | Re-exported from "Data.Text.Encoding": , Data.Text.Encoding.decodeUtf8' , Data.Text.Encoding.decodeUtf8With , Data.Text.Encoding.encodeUtf8 , Data.Text.Encoding.encodeUtf8Builder , Data.Text.Encoding.Error.lenientDecode -- * @PrimMonad@ -- | Re-exported from "Control.Monad.Primitive": , Control.Monad.Primitive.primitive -- | Re-exported from "Control.Monad.ST": , Control.Monad.ST.runST ) where import qualified RIO.Prelude.Extra import qualified RIO.Prelude.Renames import qualified RIO.Prelude.Text import qualified RIO.Prelude.Types import qualified Control.Applicative import qualified Control.Arrow import qualified Control.Category import qualified Control.DeepSeq import qualified Control.Monad import qualified Control.Monad.Primitive (primitive) import qualified Control.Monad.Reader import qualified Control.Monad.ST import qualified Data.Bifoldable import qualified Data.Bifunctor import qualified Data.Bitraversable import qualified Data.Bool import qualified Data.ByteString.Short import qualified Data.Either import qualified Data.Eq import qualified Data.Foldable import qualified Data.Function import qualified Data.Functor import qualified Data.List import qualified Data.Maybe import qualified Data.Monoid import qualified Data.Ord import qualified Data.Semigroup import qualified Data.String import qualified Data.Text.Encoding (decodeUtf8', decodeUtf8With, encodeUtf8, encodeUtf8Builder) import qualified Data.Text.Encoding.Error (lenientDecode) import qualified Data.Traversable import qualified Data.Tuple import qualified Data.Void import qualified Data.Word import Prelude ((*)) import qualified Prelude import qualified Text.Read import qualified Text.Show rio-0.1.22.0/src/RIO/Prelude/Simple.hs0000644000000000000000000000365714231470023015325 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Provide a @`SimpleApp`@ datatype, for providing a basic @App@-like -- environment with common functionality built in. This is intended to -- make it easier to, e.g., use rio's logging and process code from -- within short scripts. -- -- @since 0.1.3.0 module RIO.Prelude.Simple ( SimpleApp , mkSimpleApp , runSimpleApp ) where import RIO.Prelude.Reexports import RIO.Prelude.Logger import RIO.Prelude.Lens import RIO.Prelude.RIO import RIO.Process import System.Environment (lookupEnv) -- | A simple, non-customizable environment type for @RIO@, which -- provides common functionality. If it's insufficient for your needs, -- define your own, custom @App@ data type. -- -- @since 0.1.3.0 data SimpleApp = SimpleApp { saLogFunc :: !LogFunc , saProcessContext :: !ProcessContext } instance HasLogFunc SimpleApp where logFuncL = lens saLogFunc (\x y -> x { saLogFunc = y }) instance HasProcessContext SimpleApp where processContextL = lens saProcessContext (\x y -> x { saProcessContext = y }) -- | Constructor for `SimpleApp`. In case when `ProcessContext` is not supplied -- `mkDefaultProcessContext` will be used to create it. -- -- @since 0.1.14.0 mkSimpleApp :: MonadIO m => LogFunc -> Maybe ProcessContext -> m SimpleApp mkSimpleApp logFunc mProcessContext = do processContext <- maybe mkDefaultProcessContext pure mProcessContext pure $ SimpleApp {saLogFunc = logFunc, saProcessContext = processContext} -- | Run with a default configured @SimpleApp@, consisting of: -- -- * Logging to stderr -- -- * If the @RIO_VERBOSE@ environment variable is set, turns on -- verbose logging -- -- * Default process context -- -- @since 0.1.3.0 runSimpleApp :: MonadIO m => RIO SimpleApp a -> m a runSimpleApp m = liftIO $ do verbose <- isJust <$> lookupEnv "RIO_VERBOSE" lo <- logOptionsHandle stderr verbose withLogFunc lo $ \lf -> do simpleApp <- mkSimpleApp lf Nothing runRIO simpleApp m rio-0.1.22.0/src/RIO/Prelude/Types.hs0000644000000000000000000002424614231470023015175 0ustar0000000000000000module RIO.Prelude.Types ( -- * @base@ -- ** Types -- *** @Bool@ -- | Re-exported from "Data.Bool": Data.Bool.Bool(..) -- *** @Char@ (@String@) -- | Re-exported from "Data.Char": , Data.Char.Char -- | Re-exported from "Data.String": , Data.String.String -- | Re-exported from "System.IO": , System.IO.FilePath -- *** @Ordering@ -- | Re-exported from "Data.Ord": , Data.Ord.Ordering(..) -- *** Numbers -- **** @Int@ -- | Re-exported from "Data.Int": , Data.Int.Int -- ***** @Int8@ , Data.Int.Int8 -- ***** @Int16@ , Data.Int.Int16 -- ***** @Int32@ , Data.Int.Int32 -- ***** @Int64@ , Data.Int.Int64 -- **** @Word@ -- | Re-exported from "Data.Word": , Data.Word.Word -- ***** @Word8@ , Data.Word.Word8 -- ***** @Word16@ , Data.Word.Word16 -- ***** @Word32@ , Data.Word.Word32 -- ***** @Word64@ , Data.Word.Word64 -- **** @Integer@ -- | Re-exported from "Prelude.Integer": , Prelude.Integer -- **** @Natural@ -- | Re-exported from "Numeric.Natural": , Numeric.Natural.Natural -- **** @Rational@ -- | Re-exported from "Data.Ratio": , Data.Ratio.Rational -- **** @Float@ -- | Re-exported from "Prelude": , Prelude.Float -- **** @Double@ -- | Re-exported from "Prelude": , Prelude.Double -- *** @Maybe@ -- | Re-exported from "Data.Maybe": , Data.Maybe.Maybe(..) -- *** @Either@ -- | Re-exported from "Data.Either": , Data.Either.Either(..) -- *** @NonEmpty@ -- | Re-exported from Data.List.NonEmpty , Data.List.NonEmpty.NonEmpty(..) -- *** @Proxy@ -- | Re-exported from "Data.Proxy": , Data.Proxy.Proxy(..) -- *** @Void@ -- | Re-exported from "Data.Void": , Data.Void.Void -- *** @Const@ -- | Re-exported from "Data.Functor.Const": , Data.Functor.Const.Const(..) -- *** @Identity@ -- | Re-exported from "Data.Functor.Identity": , Data.Functor.Identity.Identity(..) -- *** @IO@ -- | Re-exported from "System.IO": , System.IO.IO -- *** @ST@ -- | Re-exported from "Control.Monad.ST": , Control.Monad.ST.ST -- ** Type Classes -- *** @Eq@ -- | Re-exported from "Data.Eq": , Data.Eq.Eq -- *** @Ord@ -- | Re-exported from "Data.Ord": , Data.Ord.Ord -- *** @Bounded@ -- | Re-exported from "Prelude": , Prelude.Bounded -- *** @Enum@ -- | Re-exported from "Prelude": , Prelude.Enum -- *** Strings -- **** @Show@ -- | Re-exported from "Text.Show": , Text.Show.Show -- **** @Read@ -- | Re-exported from "Text.Read": , Text.Read.Read -- **** @IsString@ -- | Re-exported from "Data.String": , Data.String.IsString -- *** Numeric -- | All numeric classes are re-exported from "Prelude": -- **** @Num@ , Prelude.Num -- **** @Fractional@ , Prelude.Fractional -- **** @Floating@ , Prelude.Floating -- **** @Real@ , Prelude.Real -- **** @Integral@ , Prelude.Integral -- **** @RealFrac@ , Prelude.RealFrac -- **** @RealFloat@ , Prelude.RealFloat -- *** Categories -- **** @Functor@ -- | Re-exported from "Data.Functor": , Data.Functor.Functor -- **** @Bifunctor@ -- | Re-exported from "Data.Bifunctor": , Data.Bifunctor.Bifunctor -- **** @Foldable@ -- | Re-exported from "Data.Foldable": , Data.Foldable.Foldable -- **** @Bifoldable@ -- | Re-exported from "Data.Bifoldable": , Data.Bifoldable.Bifoldable -- **** @Semigroup@ -- | Re-exported from "Data.Semigroup": , Data.Semigroup.Semigroup -- **** @Monoid@ -- | Re-exported from "Data.Monoid": , Data.Monoid.Monoid -- **** @Applicative@ -- | Re-exported from "Control.Applicative": , Control.Applicative.Applicative -- **** @Alternative@ -- | Re-exported from "Control.Applicative": , Control.Applicative.Alternative -- **** @Traversable@ -- | Re-exported from "Data.Traversable": , Data.Traversable.Traversable -- **** @Bitraversable@ -- | Re-exported from "Data.Bitraversable": , Data.Bitraversable.Bitraversable -- **** @Monad@ -- | Re-exported from "Control.Monad": , Control.Monad.Monad -- **** @MonadPlus@ -- | Re-exported from "Control.Monad": , Control.Monad.MonadPlus -- **** @Category@ -- | Re-exported from "Control.Category": , Control.Category.Category -- **** @Arrow@ -- | Re-exported from "Control.Arrow": , Control.Arrow.Arrow -- **** @MonadFail@ -- | Re-exported from "Control.Monad.Fail": , Control.Monad.Fail.MonadFail -- *** Data -- **** @Typeable@ -- | Re-exported from "Control.Monad": , Data.Typeable.Typeable -- **** @Data@ -- | Re-exported from "Data.Data": , Data.Data.Data(..) -- **** @Generic@ -- | Re-exported from "GHC.Generics": , GHC.Generics.Generic -- **** @Storable@ -- | Re-exported from "Foreign.Storable": , Foreign.Storable.Storable -- *** Exceptions -- *** @Exception@ -- | Re-exported from "Control.Exception.Base": , Control.Exception.Base.Exception -- **** @HasCallStack@ -- | Re-exported from "GHC.Stack": , GHC.Stack.HasCallStack -- * @deepseq@ -- ** @NFData@ -- | Re-exported from "Control.DeepSeq": , Control.DeepSeq.NFData -- * @mtl@ -- ** @MonadTrans@ -- | Re-exported from "Control.Monad.Reader": , Control.Monad.Reader.MonadTrans -- ** @MonadReader@ , Control.Monad.Reader.MonadReader -- ** @ReaderT@ (@Reader@) -- | Re-exported from "Control.Monad.Reader": , Control.Monad.Reader.Reader , Control.Monad.Reader.ReaderT(ReaderT) -- * @exceptions@ -- ** @MonadThrow@ -- | Re-exported from "Control.Monad.Catch": , Control.Monad.Catch.MonadThrow -- * @bytestring@ -- ** @ByteString@ -- | Re-exported from "Data.ByteString": , Data.ByteString.ByteString -- ** @LByteString@ -- | A synonym for lazy `Data.ByteString.Lazy.ByteString` re-exported from "Data.ByteString.Lazy": , RIO.Prelude.Renames.LByteString -- ** @Builder@ -- | Re-exported from "Data.ByteString.Builder": , Data.ByteString.Builder.Builder -- ** @ShortByteString@ -- | Re-exported from "Data.ByteString.Short": , Data.ByteString.Short.ShortByteString -- * @text@ -- ** @Text@ -- | Re-exported from "Data.Text": , Data.Text.Text -- ** @LText@ -- | A synonym for lazy `Data.Text.Lazy.Text` re-exported from "Data.Text.Lazy": , RIO.Prelude.Renames.LText -- ** @UncodeException@ -- | Re-exported from "Data.Text.Encoding.Error": , Data.Text.Encoding.Error.UnicodeException(..) -- * @vector@ -- ** @Vector@ -- | Boxed vector re-exported from "Data.Vector": , Data.Vector.Vector -- ** @UVector@ -- | A synonym for unboxed `Data.Vector.Unboxed.Vector` re-exported from "Data.Vector.Unboxed": , RIO.Prelude.Renames.UVector -- *** @Unbox@ , Data.Vector.Unboxed.Unbox -- ** @SVector@ -- | A synonym for storable `Data.Vector.Storable.Vector` re-exported from "Data.Vector.Storable": , RIO.Prelude.Renames.SVector -- ** @GVector@ -- | A synonym for generic `Data.Vector.Generic.Vector` re-exported from "Data.Vector.Generic": , RIO.Prelude.Renames.GVector -- * @containers@ -- ** @IntMap@ -- | Re-exported from "Data.IntMap.Strict": , Data.IntMap.Strict.IntMap -- ** @Map@ -- | Re-exported from "Data.Map.Strict": , Data.Map.Strict.Map -- ** @IntSet@ -- | Re-exported from "Data.IntSet": , Data.IntSet.IntSet -- ** @Set@ -- | Re-exported from "Data.Set": , Data.Set.Set -- ** @Seq@ -- | Re-exported from "Data.Seq": , Data.Sequence.Seq -- * @hashable@ -- ** @Hashable@ , Data.Hashable.Hashable -- * @unordered-containers@ -- ** @HashMap@ -- | Re-exported from "Data.HashMap.Strict": , Data.HashMap.Strict.HashMap -- ** @HashSet@ -- | Re-exported from "Data.HashSet": , Data.HashSet.HashSet -- * @primitive@ -- ** @PrimMonad@ -- | Re-exported from "Control.Monad.Primitive": , Control.Monad.Primitive.PrimMonad (PrimState) ) where import qualified RIO.Prelude.Renames import qualified Control.Applicative import qualified Control.Arrow import qualified Control.Category import qualified Control.DeepSeq import qualified Control.Exception.Base import qualified Control.Monad import qualified Control.Monad.Catch import qualified Control.Monad.Fail import qualified Control.Monad.Primitive (PrimMonad(..)) import qualified Control.Monad.Reader import qualified Control.Monad.ST import qualified Data.Bifoldable import qualified Data.Bifunctor import qualified Data.Bitraversable import qualified Data.Bool import qualified Data.ByteString (ByteString) import qualified Data.ByteString.Builder (Builder) import qualified Data.ByteString.Short import qualified Data.Char import qualified Data.Data import qualified Data.Either import qualified Data.Eq import qualified Data.Foldable import qualified Data.Function import qualified Data.Functor import qualified Data.Functor.Const import qualified Data.Functor.Identity import qualified Data.Hashable import qualified Data.HashMap.Strict import qualified Data.HashSet import qualified Data.Int import qualified Data.IntMap.Strict import qualified Data.IntSet import qualified Data.List import qualified Data.List.NonEmpty import qualified Data.Map.Strict import qualified Data.Maybe import qualified Data.Monoid (Monoid) import qualified Data.Ord import qualified Data.Proxy import qualified Data.Ratio import qualified Data.Semigroup (Semigroup) import qualified Data.Sequence import qualified Data.Set import qualified Data.String (IsString, String) import qualified Data.Text (Text) import qualified Data.Text.Encoding.Error import qualified Data.Traversable import qualified Data.Typeable import qualified Data.Vector import qualified Data.Vector.Unboxed (Unbox) import qualified Data.Void import qualified Data.Word import qualified Foreign.Storable import qualified GHC.Generics import qualified GHC.Stack import qualified Numeric.Natural import qualified Prelude import qualified System.Exit import qualified System.IO import qualified Text.Read import qualified Text.Show -- Bring instances for some of the unliftio types in scope, so they can be documented here. import UnliftIO () rio-0.1.22.0/src/RIO/Process.hs0000644000000000000000000005500514231470023014104 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -- | Interacting with external processes. -- -- This module provides a layer on top of "System.Process.Typed", with -- the following additions: -- -- * For efficiency, it will cache @PATH@ lookups. -- -- * For convenience, you can set the working directory and env vars -- overrides in a 'RIO' environment instead of on the individual -- calls to the process. -- -- * Built-in support for logging at the debug level. -- -- In order to switch over to this API, the main idea is: -- -- * Like most of the rio library, you need to create an environment -- value (this time 'ProcessContext'), and include it in your 'RIO' -- environment. See 'mkProcessContext'. -- -- * Instead of using the 'System.Process.Typed.proc' function from -- "System.Process.Typed" for creating a 'ProcessConfig', use the -- locally defined 'proc' function, which will handle overriding -- environment variables, looking up paths, performing logging, etc. -- -- Once you have your 'ProcessConfig', use the standard functions from -- 'System.Process.Typed' (reexported here for convenient) for running -- the 'ProcessConfig'. -- -- @since 0.0.3.0 module RIO.Process ( -- * Process context ProcessContext , HasProcessContext (..) , EnvVars , mkProcessContext , mkDefaultProcessContext , modifyEnvVars , withModifyEnvVars , lookupEnvFromContext , withWorkingDir -- ** Lenses , workingDirL , envVarsL , envVarsStringsL , exeSearchPathL -- ** Actions , resetExeCache -- * Configuring , proc -- * Spawning (run child process) , withProcess , withProcess_ , withProcessWait , withProcessWait_ , withProcessTerm , withProcessTerm_ -- * Exec (replacing current process) , exec , execSpawn -- * Environment helper , LoggedProcessContext (..) , withProcessContextNoLogging -- * Exceptions , ProcessException (..) -- * Utilities , doesExecutableExist , findExecutable , exeExtensions , augmentPath , augmentPathMap , augmentPathMap' , showProcessArgDebug -- * Reexports , P.ProcessConfig , P.StreamSpec , P.StreamType (..) , P.Process , P.setStdin , P.setStdout , P.setStderr , P.setCloseFds , P.setCreateGroup , P.setDelegateCtlc #if MIN_VERSION_process(1, 3, 0) , P.setDetachConsole , P.setCreateNewConsole , P.setNewSession #endif #if MIN_VERSION_process(1, 4, 0) && !WINDOWS , P.setChildGroup , P.setChildUser #endif , P.mkStreamSpec , P.inherit , P.closed , P.byteStringInput , P.byteStringOutput , P.createPipe , P.useHandleOpen , P.useHandleClose , P.startProcess , P.stopProcess , P.readProcess , P.readProcess_ , P.runProcess , P.runProcess_ , P.readProcessStdout , P.readProcessStdout_ , P.readProcessStderr , P.readProcessStderr_ , P.waitExitCode , P.waitExitCodeSTM , P.getExitCode , P.getExitCodeSTM , P.checkExitCode , P.checkExitCodeSTM , P.getStdin , P.getStdout , P.getStderr , P.ExitCodeException (..) , P.ByteStringOutputException (..) , P.unsafeProcessHandle ) where import RIO.Prelude.Display import RIO.Prelude.Reexports import RIO.Prelude.Logger import RIO.Prelude.RIO import RIO.Prelude.Lens import qualified Data.Map as Map import qualified Data.Text as T import qualified System.Directory as D import System.Environment (getEnvironment) import System.Exit (exitWith) import qualified System.FilePath as FP import qualified System.Process.Typed as P import System.Process.Typed hiding (withProcess, withProcess_, withProcessWait, withProcessWait_, withProcessTerm, withProcessTerm_, proc) #ifndef WINDOWS import System.Directory (setCurrentDirectory) import System.Posix.Process (executeFile) #endif -- | The environment variable map -- -- @since 0.0.3.0 type EnvVars = Map Text Text -- | Context in which to run processes. -- -- @since 0.0.3.0 data ProcessContext = ProcessContext { pcTextMap :: !EnvVars -- ^ Environment variables as map , pcStringList :: ![(String, String)] -- ^ Environment variables as association list , pcPath :: ![FilePath] -- ^ List of directories searched for executables (@PATH@) , pcExeCache :: !(IORef (Map FilePath (Either ProcessException FilePath))) -- ^ Cache of already looked up executable paths. , pcExeExtensions :: [String] -- ^ @[""]@ on non-Windows systems, @["", ".exe", ".bat"]@ on Windows , pcWorkingDir :: !(Maybe FilePath) -- ^ Override the working directory. } -- | Exception type which may be generated in this module. -- -- /NOTE/ Other exceptions may be thrown by underlying libraries! -- -- @since 0.0.3.0 data ProcessException = NoPathFound | ExecutableNotFound String [FilePath] | ExecutableNotFoundAt FilePath | PathsInvalidInPath [FilePath] deriving (Typeable, Eq) instance Show ProcessException where show NoPathFound = "PATH not found in ProcessContext" show (ExecutableNotFound name path) = concat [ "Executable named " , name , " not found on path: " , show path ] show (ExecutableNotFoundAt name) = "Did not find executable at specified path: " ++ name show (PathsInvalidInPath paths) = unlines $ [ "Would need to add some paths to the PATH environment variable \ \to continue, but they would be invalid because they contain a " ++ show FP.searchPathSeparator ++ "." , "Please fix the following paths and try again:" ] ++ paths instance Exception ProcessException -- | Get the 'ProcessContext' from the environment. -- -- @since 0.0.3.0 class HasProcessContext env where processContextL :: Lens' env ProcessContext instance HasProcessContext ProcessContext where processContextL = id data EnvVarFormat = EVFWindows | EVFNotWindows currentEnvVarFormat :: EnvVarFormat currentEnvVarFormat = #if WINDOWS EVFWindows #else EVFNotWindows #endif -- Don't use CPP so that the Windows code path is at least type checked -- regularly isWindows :: Bool isWindows = case currentEnvVarFormat of EVFWindows -> True EVFNotWindows -> False -- | Override the working directory processes run in. @Nothing@ means -- the current process's working directory. -- -- @since 0.0.3.0 workingDirL :: HasProcessContext env => Lens' env (Maybe FilePath) workingDirL = processContextL.lens pcWorkingDir (\x y -> x { pcWorkingDir = y }) -- | Get the environment variables. We cannot provide a @Lens@ here, -- since updating the environment variables requires an @IO@ action to -- allocate a new @IORef@ for holding the executable path cache. -- -- @since 0.0.3.0 envVarsL :: HasProcessContext env => SimpleGetter env EnvVars envVarsL = processContextL.to pcTextMap -- | Get the 'EnvVars' as an associated list of 'String's. -- -- Useful for interacting with other libraries. -- -- @since 0.0.3.0 envVarsStringsL :: HasProcessContext env => SimpleGetter env [(String, String)] envVarsStringsL = processContextL.to pcStringList -- | Get the list of directories searched for executables (the @PATH@). -- -- Similar to 'envVarMapL', this cannot be a full @Lens@. -- -- @since 0.0.3.0 exeSearchPathL :: HasProcessContext env => SimpleGetter env [FilePath] exeSearchPathL = processContextL.to pcPath -- | Create a new 'ProcessContext' from the given environment variable map. -- -- @since 0.0.3.0 mkProcessContext :: MonadIO m => EnvVars -> m ProcessContext mkProcessContext (normalizePathEnv -> tm) = do ref <- newIORef Map.empty return ProcessContext { pcTextMap = tm , pcStringList = map (T.unpack *** T.unpack) $ Map.toList tm , pcPath = (if isWindows then (".":) else id) (maybe [] (FP.splitSearchPath . T.unpack) (Map.lookup "PATH" tm)) , pcExeCache = ref , pcExeExtensions = if isWindows then let pathext = fromMaybe defaultPATHEXT (Map.lookup "PATHEXT" tm) in map T.unpack $ T.splitOn ";" pathext else [""] , pcWorkingDir = Nothing } where -- Default value for PATHTEXT on Windows versions after Windows XP. (The -- documentation of the default at -- https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/start -- is incomplete.) defaultPATHEXT = ".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC" -- Fix case insensitivity of the PATH environment variable on Windows, -- by forcing all keys full uppercase. normalizePathEnv :: EnvVars -> EnvVars normalizePathEnv env | isWindows = Map.fromList $ map (first T.toUpper) $ Map.toList env | otherwise = env -- | Reset the executable cache. -- -- @since 0.0.3.0 resetExeCache :: (MonadIO m, MonadReader env m, HasProcessContext env) => m () resetExeCache = do pc <- view processContextL atomicModifyIORef (pcExeCache pc) (const mempty) -- | Same as 'mkProcessContext' but uses the system environment (from -- 'System.Environment.getEnvironment'). -- -- @since 0.0.3.0 mkDefaultProcessContext :: MonadIO m => m ProcessContext mkDefaultProcessContext = liftIO $ getEnvironment >>= mkProcessContext . Map.fromList . map (T.pack *** T.pack) -- | Modify the environment variables of a 'ProcessContext'. This will not -- change the working directory. -- -- Note that this requires 'MonadIO', as it will create a new 'IORef' -- for the cache. -- -- @since 0.0.3.0 modifyEnvVars :: MonadIO m => ProcessContext -> (EnvVars -> EnvVars) -> m ProcessContext modifyEnvVars pc f = do pc' <- mkProcessContext (f $ pcTextMap pc) return pc' { pcWorkingDir = pcWorkingDir pc } -- | Use 'modifyEnvVars' to create a new 'ProcessContext', and then -- use it in the provided action. -- -- @since 0.0.3.0 withModifyEnvVars :: (HasProcessContext env, MonadReader env m, MonadIO m) => (EnvVars -> EnvVars) -> m a -> m a withModifyEnvVars f inner = do pc <- view processContextL pc' <- modifyEnvVars pc f local (set processContextL pc') inner -- | Look into the `ProcessContext` and return the specified environmet variable if one is -- available. -- -- @since 0.1.14.0 lookupEnvFromContext :: (MonadReader env m, HasProcessContext env) => Text -> m (Maybe Text) lookupEnvFromContext envName = Map.lookup envName <$> view envVarsL -- | Set the working directory to be used by child processes. -- -- @since 0.0.3.0 withWorkingDir :: (HasProcessContext env, MonadReader env m, MonadIO m) => FilePath -> m a -> m a withWorkingDir = local . set workingDirL . Just -- | Perform pre-call-process tasks. Ensure the working directory exists and find the -- executable path. -- -- Throws a 'ProcessException' if unsuccessful. -- -- NOT CURRENTLY EXPORTED preProcess :: (HasProcessContext env, MonadReader env m, MonadIO m) => String -- ^ Command name -> m FilePath preProcess name = do name' <- findExecutable name >>= either throwIO return wd <- view workingDirL liftIO $ maybe (return ()) (D.createDirectoryIfMissing True) wd return name' -- | Log running a process with its arguments, for debugging (-v). -- -- This logs one message before running the process and one message after. -- -- NOT CURRENTLY EXPORTED withProcessTimeLog :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Maybe FilePath -- ^ working dirj -> String -- ^ executable -> [String] -- ^ arguments -> m a -> m a withProcessTimeLog mdir name args proc' = do let cmdText = T.intercalate " " (T.pack name : map showProcessArgDebug args) dirMsg = case mdir of Nothing -> "" Just dir -> " within " <> T.pack dir logDebug ("Run process" <> display dirMsg <> ": " <> display cmdText) start <- getMonotonicTime x <- proc' end <- getMonotonicTime let diff = end - start useColor <- view logFuncUseColorL accentColors <- view logFuncAccentColorsL logDebug ("Process finished in " <> (if useColor then accentColors 0 else "") <> -- accent color 0 timeSpecMilliSecondText diff <> (if useColor then "\ESC[0m" else "") <> -- reset ": " <> display cmdText) return x timeSpecMilliSecondText :: Double -> Utf8Builder timeSpecMilliSecondText d = display (round (d * 1000) :: Int) <> "ms" -- | Provide a 'ProcessConfig' based on the 'ProcessContext' in -- scope. Deals with resolving the full path, setting the child -- process's environment variables, setting the working directory, and -- wrapping the call with 'withProcessTimeLog' for debugging output. -- -- This is intended to be analogous to the @proc@ function provided by -- the @System.Process.Typed@ module, but has a different type -- signature to (1) allow it to perform @IO@ actions for looking up -- paths, and (2) allow logging and timing of the running action. -- -- @since 0.0.3.0 proc :: (HasProcessContext env, HasLogFunc env, MonadReader env m, MonadIO m, HasCallStack) => FilePath -- ^ command to run -> [String] -- ^ command line arguments -> (ProcessConfig () () () -> m a) -> m a proc name0 args inner = do name <- preProcess name0 wd <- view workingDirL envStrings <- view envVarsStringsL withProcessTimeLog wd name args $ inner $ setEnv envStrings $ maybe id setWorkingDir wd $ P.proc name args -- | Same as 'P.withProcess', but generalized to 'MonadUnliftIO'. -- -- @since 0.0.3.0 withProcess :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcess pc f = withRunInIO $ \run -> P.withProcessTerm pc (run . f) {-# DEPRECATED withProcess "Please consider using withProcessWait, or instead use withProcessTerm" #-} -- | Same as 'P.withProcess_', but generalized to 'MonadUnliftIO'. -- -- @since 0.0.3.0 withProcess_ :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcess_ pc f = withRunInIO $ \run -> P.withProcessTerm_ pc (run . f) {-# DEPRECATED withProcess_ "Please consider using withProcessWait, or instead use withProcessTerm" #-} -- | Same as 'P.withProcessWait', but generalized to 'MonadUnliftIO'. -- -- @since 0.1.10.0 withProcessWait :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcessWait pc f = withRunInIO $ \run -> P.withProcessWait pc (run . f) -- | Same as 'P.withProcessWait_', but generalized to 'MonadUnliftIO'. -- -- @since 0.1.10.0 withProcessWait_ :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcessWait_ pc f = withRunInIO $ \run -> P.withProcessWait_ pc (run . f) -- | Same as 'P.withProcessTerm', but generalized to 'MonadUnliftIO'. -- -- @since 0.1.10.0 withProcessTerm :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcessTerm pc f = withRunInIO $ \run -> P.withProcessTerm pc (run . f) -- | Same as 'P.withProcessTerm_', but generalized to 'MonadUnliftIO'. -- -- @since 0.1.10.0 withProcessTerm_ :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcessTerm_ pc f = withRunInIO $ \run -> P.withProcessTerm_ pc (run . f) -- | A convenience environment combining a 'LogFunc' and a 'ProcessContext' -- -- @since 0.0.3.0 data LoggedProcessContext = LoggedProcessContext ProcessContext LogFunc instance HasLogFunc LoggedProcessContext where logFuncL = lens (\(LoggedProcessContext _ lf) -> lf) (\(LoggedProcessContext pc _) lf -> LoggedProcessContext pc lf) instance HasProcessContext LoggedProcessContext where processContextL = lens (\(LoggedProcessContext x _) -> x) (\(LoggedProcessContext _ lf) pc -> LoggedProcessContext pc lf) -- | Run an action using a 'LoggedProcessContext' with default -- settings and no logging. -- -- @since 0.0.3.0 withProcessContextNoLogging :: MonadIO m => RIO LoggedProcessContext a -> m a withProcessContextNoLogging inner = do pc <- mkDefaultProcessContext runRIO (LoggedProcessContext pc mempty) inner -- | Execute a process within the configured environment. -- -- Execution will not return, because either: -- -- 1) On non-windows, execution is taken over by execv of the -- sub-process. This allows signals to be propagated (#527) -- -- 2) On windows, an 'ExitCode' exception will be thrown. -- -- @since 0.0.3.0 exec :: (HasProcessContext env, HasLogFunc env) => String -> [String] -> RIO env b #ifdef WINDOWS exec = execSpawn #else exec cmd0 args = do wd <- view workingDirL envStringsL <- view envVarsStringsL cmd <- preProcess cmd0 withProcessTimeLog wd cmd args $ liftIO $ do for_ wd setCurrentDirectory executeFile cmd True args $ Just envStringsL #endif -- | Like 'exec', but does not use 'execv' on non-windows. This way, -- there is a sub-process, which is helpful in some cases -- (). -- -- This function only exits by throwing 'ExitCode'. -- -- @since 0.0.3.0 execSpawn :: (HasProcessContext env, HasLogFunc env) => String -> [String] -> RIO env a execSpawn cmd args = proc cmd args (runProcess . setStdin inherit) >>= liftIO . exitWith -- | Check if the given executable exists on the given PATH. -- -- @since 0.0.3.0 doesExecutableExist :: (MonadIO m, MonadReader env m, HasProcessContext env) => String -- ^ Name of executable -> m Bool doesExecutableExist = liftM isRight . findExecutable -- | Find the complete path for the given executable name. -- -- On POSIX systems, filenames that match but are not exectuables are excluded. -- -- On Windows systems, the executable names tried, in turn, are the supplied -- name (only if it has an extension) and that name extended by each of the -- 'exeExtensions'. Also, this function may behave differently from -- 'RIO.Directory.findExecutable'. The latter excludes as executables filenames -- without a @.bat@, @.cmd@, @.com@ or @.exe@ extension (case-insensitive). -- -- @since 0.0.3.0 findExecutable :: (MonadIO m, MonadReader env m, HasProcessContext env) => String -- ^ Name of executable -> m (Either ProcessException FilePath) -- ^ Full path to that executable on success findExecutable name | any FP.isPathSeparator name = do names <- addPcExeExtensions name testFPs (pure $ Left $ ExecutableNotFoundAt name) D.makeAbsolute names findExecutable name = do pc <- view processContextL m <- readIORef $ pcExeCache pc case Map.lookup name m of Just epath -> pure epath Nothing -> do let loop [] = pure $ Left $ ExecutableNotFound name (pcPath pc) loop (dir:dirs) = do fps <- addPcExeExtensions $ dir FP. name testFPs (loop dirs) D.makeAbsolute fps epath <- loop $ pcPath pc () <- atomicModifyIORef (pcExeCache pc) $ \m' -> (Map.insert name epath m', ()) pure epath -- | A helper function to add the executable extensions of the process context -- to a file path. On Windows, the original file path is included, if it has an -- existing extension. addPcExeExtensions :: (MonadIO m, MonadReader env m, HasProcessContext env) => FilePath -> m [FilePath] addPcExeExtensions fp = do pc <- view processContextL pure $ (if isWindows && FP.hasExtension fp then (fp:) else id) (map (fp ++) (pcExeExtensions pc)) -- | A helper function to test whether file paths are to an executable testFPs :: (MonadIO m, MonadReader env m, HasProcessContext env) => m (Either ProcessException FilePath) -- ^ Default if no executable exists at any file path -> (FilePath -> IO FilePath) -- ^ Modification to apply to a file path, if an executable exists there -> [FilePath] -- ^ File paths to test, in turn -> m (Either ProcessException FilePath) testFPs ifNone _ [] = ifNone testFPs ifNone modify (fp:fps) = do exists <- liftIO $ D.doesFileExist fp existsExec <- liftIO $ if exists then if isWindows then pure True else isExecutable else pure False if existsExec then liftIO $ Right <$> modify fp else testFPs ifNone modify fps where isExecutable = D.executable <$> D.getPermissions fp -- | Get the filename extensions for executable files, including the dot (if -- any). -- -- On POSIX systems, this is @[""]@. -- -- On Windows systems, the list is determined by the value of the @PATHEXT@ -- environment variable, if it present in the environment. If the variable is -- absent, this is its default value on a Windows system. This function may, -- therefore, behave differently from 'RIO.Directory.exeExtension', -- which returns only @".exe"@. -- -- @since 0.1.13.0 exeExtensions :: (MonadIO m, MonadReader env m, HasProcessContext env) => m [String] exeExtensions = do pc <- view processContextL return $ pcExeExtensions pc -- | Augment the given value (assumed to be that of an environment variable -- that lists paths, such as PATH; this is not checked) with the given extra -- paths. Those paths are prepended (as in: they take precedence). -- -- @since 0.0.3.0 augmentPath :: [FilePath] -> Maybe Text -> Either ProcessException Text augmentPath dirs mpath = case filter (FP.searchPathSeparator `elem`) dirs of [] -> Right $ T.intercalate (T.singleton FP.searchPathSeparator) $ map (T.pack . FP.dropTrailingPathSeparator) dirs ++ maybeToList mpath illegal -> Left $ PathsInvalidInPath illegal -- | Apply 'augmentPath' on the value of the PATH environment variable in the -- given 'EnvVars'. -- -- @since 0.0.3.0 augmentPathMap :: [FilePath] -> EnvVars -> Either ProcessException EnvVars augmentPathMap = augmentPathMap' "PATH" -- | Apply 'augmentPath' on the value of the given environment variable in the -- given 'EnvVars'. -- -- @since 0.1.22.0 augmentPathMap' :: Text -- ^ Environment variable. If it does not already exist in the given -- 'EnvVars', it will be created. -> [FilePath] -> EnvVars -> Either ProcessException EnvVars augmentPathMap' envVar dirs (normalizePathEnv -> origEnv) = do path <- augmentPath dirs mpath return $ Map.insert envVar path origEnv where mpath = Map.lookup envVar origEnv -- | Show a process arg including speechmarks when necessary. Just for -- debugging purposes, not functionally important. -- -- @since 0.0.3.0 showProcessArgDebug :: String -> Text showProcessArgDebug x | any special x || null x = T.pack (show x) | otherwise = T.pack x where special '"' = True special ' ' = True special _ = False rio-0.1.22.0/src/RIO/Seq.hs0000644000000000000000000000611714231470023013216 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | @Seq@. Import as: -- -- > import qualified RIO.Seq as Seq module RIO.Seq ( Data.Sequence.Seq(..) -- * Construction , Data.Sequence.empty , Data.Sequence.singleton , (Data.Sequence.<|) , (Data.Sequence.|>) , (Data.Sequence.><) , Data.Sequence.fromList , Data.Sequence.fromFunction , Data.Sequence.fromArray -- ** Repetition , Data.Sequence.replicate , Data.Sequence.replicateA , Data.Sequence.replicateM #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.cycleTaking #endif -- ** Iterative construction , Data.Sequence.iterateN , Data.Sequence.unfoldr , Data.Sequence.unfoldl -- * Deconstruction -- | Additional functions for deconstructing sequences are available via the -- 'Foldable' instance of 'Seq'. -- ** Queries , Data.Sequence.null , Data.Sequence.length -- ** Views , Data.Sequence.ViewL(..) , Data.Sequence.viewl , Data.Sequence.ViewR(..) , Data.Sequence.viewr -- * Scans , Data.Sequence.scanl , Data.Sequence.scanl1 , Data.Sequence.scanr , Data.Sequence.scanr1 -- * Sublists , Data.Sequence.tails , Data.Sequence.inits #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.chunksOf #endif -- ** Sequential searches , Data.Sequence.takeWhileL , Data.Sequence.takeWhileR , Data.Sequence.dropWhileL , Data.Sequence.dropWhileR , Data.Sequence.spanl , Data.Sequence.spanr , Data.Sequence.breakl , Data.Sequence.breakr , Data.Sequence.partition , Data.Sequence.filter -- * Sorting , Data.Sequence.sort , Data.Sequence.sortBy , Data.Sequence.unstableSort , Data.Sequence.unstableSortBy -- * Indexing #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.lookup , (Data.Sequence.!?) #endif , Data.Sequence.index , Data.Sequence.adjust #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.adjust' #endif , Data.Sequence.update , Data.Sequence.take , Data.Sequence.drop #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.insertAt , Data.Sequence.deleteAt #endif , Data.Sequence.splitAt -- ** Indexing with predicates -- | These functions perform sequential searches from the left or right ends -- of the sequence elements. , Data.Sequence.elemIndexL , Data.Sequence.elemIndicesL , Data.Sequence.elemIndexR , Data.Sequence.elemIndicesR , Data.Sequence.findIndexL , Data.Sequence.findIndicesL , Data.Sequence.findIndexR , Data.Sequence.findIndicesR -- * Folds -- | General folds are available via the 'Foldable' instance of 'Seq'. #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.foldMapWithIndex #endif , Data.Sequence.foldlWithIndex , Data.Sequence.foldrWithIndex -- * Transformations , Data.Sequence.mapWithIndex #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.traverseWithIndex #endif , Data.Sequence.reverse #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.intersperse #endif -- ** Zips , Data.Sequence.zip , Data.Sequence.zipWith , Data.Sequence.zip3 , Data.Sequence.zipWith3 , Data.Sequence.zip4 , Data.Sequence.zipWith4 ) where import qualified Data.Sequence rio-0.1.22.0/src/RIO/Set.hs0000644000000000000000000000333714231470023013222 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | @Set@. Import as: -- -- > import qualified RIO.Set as Set -- -- This module does not export any partial or unchecked functions. For those, -- see "RIO.Set.Partial" and "RIO.Set.Unchecked" module RIO.Set ( -- * Set type Data.Set.Set -- * Operators , (Data.Set.\\) -- * Query , Data.Set.null , Data.Set.size , Data.Set.member , Data.Set.notMember , Data.Set.lookupLT , Data.Set.lookupGT , Data.Set.lookupLE , Data.Set.lookupGE , Data.Set.isSubsetOf , Data.Set.isProperSubsetOf -- * Construction , Data.Set.empty , Data.Set.singleton , Data.Set.insert , Data.Set.delete -- * Combine , Data.Set.union , Data.Set.unions , Data.Set.difference , Data.Set.intersection -- * Filter , Data.Set.filter #if MIN_VERSION_containers(0,5,8) , Data.Set.takeWhileAntitone , Data.Set.dropWhileAntitone , Data.Set.spanAntitone #endif , Data.Set.partition , Data.Set.split , Data.Set.splitMember , Data.Set.splitRoot -- * Indexed , Data.Set.lookupIndex #if MIN_VERSION_containers(0,5,8) , Data.Set.take , Data.Set.drop , Data.Set.splitAt #endif -- * Map , Data.Set.map -- * Folds , Data.Set.foldr , Data.Set.foldl -- ** Strict folds , Data.Set.foldr' , Data.Set.foldl' -- * Min\/Max #if MIN_VERSION_containers(0,5,9) , Data.Set.lookupMin , Data.Set.lookupMax #endif , Data.Set.deleteMin , Data.Set.deleteMax , Data.Set.maxView , Data.Set.minView -- * Conversion -- ** List , Data.Set.elems , Data.Set.toList , Data.Set.fromList -- ** Ordered list , Data.Set.toAscList , Data.Set.toDescList -- * Debugging , Data.Set.showTree , Data.Set.showTreeWith , Data.Set.valid ) where import qualified Data.Set rio-0.1.22.0/src/RIO/Set/Partial.hs0000644000000000000000000000053314231470023014611 0ustar0000000000000000-- | @Set@ partial functions. Import as: -- -- > import qualified RIO.Set.Partial as Set' module RIO.Set.Partial ( -- * Indexed Data.Set.findIndex , Data.Set.elemAt , Data.Set.deleteAt -- * Min\/Max , Data.Set.findMin , Data.Set.findMax , Data.Set.deleteFindMin , Data.Set.deleteFindMax ) where import qualified Data.Set rio-0.1.22.0/src/RIO/Set/Unchecked.hs0000644000000000000000000000121714231470023015106 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | This module contains functions from "Data.Set" that have unchecked -- preconditions on their input. If these preconditions are not satisfied, -- the data structure may end up in an invalid state and other operations -- may misbehave. Import as: -- -- > import qualified RIO.Set.Unchecked as Set' module RIO.Set.Unchecked ( -- * Map Data.Set.mapMonotonic -- * Ordered list , Data.Set.fromAscList #if MIN_VERSION_containers(0,5,8) , Data.Set.fromDescList #endif , Data.Set.fromDistinctAscList #if MIN_VERSION_containers(0,5,8) , Data.Set.fromDistinctDescList #endif ) where import qualified Data.Set rio-0.1.22.0/src/RIO/State.hs0000644000000000000000000000122014231470023013534 0ustar0000000000000000-- | Provides reexports of 'MonadState' and related helpers. -- -- @since 0.1.4.0 module RIO.State ( Control.Monad.State.MonadState (..) , Control.Monad.State.gets , Control.Monad.State.modify , Control.Monad.State.modify' , Control.Monad.State.State , Control.Monad.State.runState , Control.Monad.State.evalState , Control.Monad.State.execState , Control.Monad.State.mapState , Control.Monad.State.withState , Control.Monad.State.StateT (..) , Control.Monad.State.evalStateT , Control.Monad.State.execStateT , Control.Monad.State.mapStateT , Control.Monad.State.withStateT ) where import qualified Control.Monad.State rio-0.1.22.0/src/RIO/Text.hs0000644000000000000000000000770714231470023013420 0ustar0000000000000000{-# LANGUAGE MagicHash #-} {-# LANGUAGE OverloadedStrings #-} -- | Strict @Text@. Import as: -- -- > import qualified RIO.Text as T -- -- This module does not export any partial functions. For those, see -- "RIO.Text.Partial" module RIO.Text ( -- * Types Data.Text.Text -- * Creation and elimination , Data.Text.pack , Data.Text.unpack , Data.Text.singleton , Data.Text.empty -- * Basic interface , Data.Text.cons , Data.Text.snoc , Data.Text.append , Data.Text.uncons , Data.Text.null , Data.Text.length , Data.Text.compareLength -- * Transformations , Data.Text.map , Data.Text.intercalate , Data.Text.intersperse , Data.Text.transpose , Data.Text.reverse -- ** Case conversion , Data.Text.toCaseFold , Data.Text.toLower , Data.Text.toUpper , Data.Text.toTitle -- ** Justification , Data.Text.justifyLeft , Data.Text.justifyRight , Data.Text.center -- * Folds , Data.Text.foldl , Data.Text.foldl' , Data.Text.foldr -- ** Special folds , Data.Text.concat , Data.Text.concatMap , Data.Text.any , Data.Text.all -- * Construction -- ** Scans , Data.Text.scanl , Data.Text.scanl1 -- scanl1 and scanr1 are /not/ partial , Data.Text.scanr , Data.Text.scanr1 -- ** Accumulating maps , Data.Text.mapAccumL , Data.Text.mapAccumR -- ** Generation and unfolding , Data.Text.replicate , Data.Text.unfoldr , Data.Text.unfoldrN -- * Substrings -- ** Breaking strings , Data.Text.take , Data.Text.takeEnd , Data.Text.drop , Data.Text.dropEnd , Data.Text.takeWhile , Data.Text.takeWhileEnd , Data.Text.dropWhile , Data.Text.dropWhileEnd , Data.Text.dropAround , Data.Text.strip , Data.Text.stripStart , Data.Text.stripEnd , Data.Text.splitAt , Data.Text.break , Data.Text.span , Data.Text.group , Data.Text.groupBy , Data.Text.inits , Data.Text.tails -- ** Breaking into many substrings , Data.Text.split , Data.Text.chunksOf -- ** Breaking into lines and words , Data.Text.lines , linesCR , Data.Text.words , Data.Text.unlines , Data.Text.unwords -- * Predicates , Data.Text.isPrefixOf , Data.Text.isSuffixOf , Data.Text.isInfixOf -- ** View patterns , Data.Text.stripPrefix , Data.Text.stripSuffix , dropPrefix , dropSuffix , Data.Text.commonPrefixes -- * Searching , Data.Text.filter , Data.Text.find , Data.Text.partition -- * Indexing , Data.Text.index , Data.Text.findIndex -- * Zipping , Data.Text.zip , Data.Text.zipWith -- * Low level operations , Data.Text.copy , Data.Text.unpackCString# -- * Encoding , Data.Text.Encoding.encodeUtf8 , Data.Text.Encoding.decodeUtf8With , Data.Text.Encoding.decodeUtf8' , Data.Text.Encoding.Error.lenientDecode ) where import Data.Maybe (fromMaybe) import Data.Text (Text, stripPrefix, stripSuffix) import qualified Data.Text import qualified Data.Text.Encoding import qualified Data.Text.Encoding.Error -- | Drop prefix if present, otherwise return original 'Text'. -- -- @since 0.0.0.0 dropPrefix :: Text -- ^ prefix -> Text -> Text dropPrefix prefix t = fromMaybe t (stripPrefix prefix t) -- | Drop prefix if present, otherwise return original 'Text'. -- -- @since 0.0.0.0 dropSuffix :: Text -- ^ suffix -> Text -> Text dropSuffix suffix t = fromMaybe t (stripSuffix suffix t) -- | 'linesCR' breaks a 'Text' up into a list of `Text`s at newline -- 'Char's. It is very similar to 'Data.Text.lines', but it also removes -- any trailing @'\r'@ characters. The resulting 'Text' values do not -- contain newlines or trailing @'\r'@ characters. -- -- @since 0.1.0.0 linesCR :: Text -> [Text] linesCR = map (dropSuffix "\r") . Data.Text.lines rio-0.1.22.0/src/RIO/Text/Lazy.hs0000644000000000000000000000645714231470023014340 0ustar0000000000000000-- | Lazy @Text@. Import as: -- -- > import qualified RIO.Text.Lazy as TL -- -- This module does not export any partial functions. For those, see -- "RIO.Text.Lazy.Partial" module RIO.Text.Lazy ( -- * Types Data.Text.Lazy.Text -- * Creation and elimination , Data.Text.Lazy.pack , Data.Text.Lazy.unpack , Data.Text.Lazy.singleton , Data.Text.Lazy.empty , Data.Text.Lazy.fromChunks , Data.Text.Lazy.toChunks , Data.Text.Lazy.toStrict , Data.Text.Lazy.fromStrict , Data.Text.Lazy.foldrChunks , Data.Text.Lazy.foldlChunks -- * Basic interface , Data.Text.Lazy.cons , Data.Text.Lazy.snoc , Data.Text.Lazy.append , Data.Text.Lazy.uncons , Data.Text.Lazy.null , Data.Text.Lazy.length , Data.Text.Lazy.compareLength -- * Transformations , Data.Text.Lazy.map , Data.Text.Lazy.intercalate , Data.Text.Lazy.intersperse , Data.Text.Lazy.transpose , Data.Text.Lazy.reverse -- ** Case conversion , Data.Text.Lazy.toCaseFold , Data.Text.Lazy.toLower , Data.Text.Lazy.toUpper , Data.Text.Lazy.toTitle -- ** Justification , Data.Text.Lazy.justifyLeft , Data.Text.Lazy.justifyRight , Data.Text.Lazy.center -- * Folds , Data.Text.Lazy.foldl , Data.Text.Lazy.foldl' , Data.Text.Lazy.foldr -- ** Special folds , Data.Text.Lazy.concat , Data.Text.Lazy.concatMap , Data.Text.Lazy.any , Data.Text.Lazy.all -- * Construction -- ** Scans , Data.Text.Lazy.scanl , Data.Text.Lazy.scanl1 -- NB. scanl1 and scanr1 are not partial , Data.Text.Lazy.scanr , Data.Text.Lazy.scanr1 -- ** Accumulating maps , Data.Text.Lazy.mapAccumL , Data.Text.Lazy.mapAccumR -- ** Generation and unfolding , Data.Text.Lazy.repeat , Data.Text.Lazy.replicate , Data.Text.Lazy.cycle , Data.Text.Lazy.iterate , Data.Text.Lazy.unfoldr , Data.Text.Lazy.unfoldrN -- * Substrings -- ** Breaking strings , Data.Text.Lazy.take , Data.Text.Lazy.takeEnd , Data.Text.Lazy.drop , Data.Text.Lazy.dropEnd , Data.Text.Lazy.takeWhile , Data.Text.Lazy.takeWhileEnd , Data.Text.Lazy.dropWhile , Data.Text.Lazy.dropWhileEnd , Data.Text.Lazy.dropAround , Data.Text.Lazy.strip , Data.Text.Lazy.stripStart , Data.Text.Lazy.stripEnd , Data.Text.Lazy.splitAt , Data.Text.Lazy.span , Data.Text.Lazy.break , Data.Text.Lazy.group , Data.Text.Lazy.groupBy , Data.Text.Lazy.inits , Data.Text.Lazy.tails -- ** Breaking into many substrings , Data.Text.Lazy.split , Data.Text.Lazy.chunksOf -- ** Breaking into lines and words , Data.Text.Lazy.lines , Data.Text.Lazy.words , Data.Text.Lazy.unlines , Data.Text.Lazy.unwords -- * Predicates , Data.Text.Lazy.isPrefixOf , Data.Text.Lazy.isSuffixOf , Data.Text.Lazy.isInfixOf -- ** View patterns , Data.Text.Lazy.stripPrefix , Data.Text.Lazy.stripSuffix , Data.Text.Lazy.commonPrefixes -- * Searching , Data.Text.Lazy.filter , Data.Text.Lazy.find , Data.Text.Lazy.partition -- * Indexing , Data.Text.Lazy.index , Data.Text.Lazy.count -- * Zipping and unzipping , Data.Text.Lazy.zip , Data.Text.Lazy.zipWith ) where import qualified Data.Text.Lazy rio-0.1.22.0/src/RIO/Text/Lazy/Partial.hs0000644000000000000000000000142614231470023015723 0ustar0000000000000000-- | Lazy @Text@ partial functions. Import as: -- -- > import qualified RIO.Text.Lazy.Partial as TL' module RIO.Text.Lazy.Partial ( -- * Creation and elimination Data.Text.Lazy.head , Data.Text.Lazy.last , Data.Text.Lazy.tail , Data.Text.Lazy.init -- * Transformations , Data.Text.Lazy.replace -- * Folds , Data.Text.Lazy.foldl1 , Data.Text.Lazy.foldl1' , Data.Text.Lazy.foldr1 -- ** Special folds , Data.Text.Lazy.maximum , Data.Text.Lazy.minimum -- * Substrings -- ** Breaking strings , Data.Text.Lazy.breakOn , Data.Text.Lazy.breakOnEnd -- ** Breaking into many substrings , Data.Text.Lazy.splitOn -- * Searching , Data.Text.Lazy.breakOnAll ) where import qualified Data.Text.Lazy rio-0.1.22.0/src/RIO/Text/Partial.hs0000644000000000000000000000134214231470023015001 0ustar0000000000000000-- | Strict @Text@ partial functions. Import as: -- -- > import qualified RIO.Text.Partial as T' module RIO.Text.Partial ( -- * Basic interface Data.Text.head , Data.Text.last , Data.Text.tail , Data.Text.init -- * Transformations , Data.Text.replace -- * Folds , Data.Text.foldl1 , Data.Text.foldl1' , Data.Text.foldr1 -- ** Special folds , Data.Text.maximum , Data.Text.minimum -- * Substrings -- ** Breaking strings , Data.Text.breakOn , Data.Text.breakOnEnd -- ** Breaking into many substrings , Data.Text.splitOn -- * Searching , Data.Text.breakOnAll -- * Indexing , Data.Text.count ) where import qualified Data.Text rio-0.1.22.0/src/RIO/Time.hs0000644000000000000000000000150414231470023013357 0ustar0000000000000000module RIO.Time ( module Data.Time , getCurrentTime , getTimeZone , getCurrentTimeZone , getZonedTime , utcToLocalZonedTime ) where import Control.Monad.IO.Class import Data.Time hiding( getCurrentTime, getTimeZone, getCurrentTimeZone , getZonedTime, utcToLocalZonedTime) import qualified Data.Time getCurrentTime :: MonadIO m => m UTCTime getCurrentTime = liftIO Data.Time.getCurrentTime getTimeZone :: MonadIO m => UTCTime -> m TimeZone getTimeZone = liftIO . Data.Time.getTimeZone getCurrentTimeZone :: MonadIO m => m TimeZone getCurrentTimeZone = liftIO Data.Time.getCurrentTimeZone getZonedTime :: MonadIO m => m ZonedTime getZonedTime = liftIO Data.Time.getZonedTime utcToLocalZonedTime :: MonadIO m => UTCTime -> m ZonedTime utcToLocalZonedTime = liftIO . Data.Time.utcToLocalZonedTime rio-0.1.22.0/src/RIO/Vector.hs0000644000000000000000000001457714231470023013741 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Generic @Vector@ interface. Import as: -- -- > import qualified RIO.Vector as V -- -- This module does not export any partial or unsafe functions. For those, see -- "RIO.Vector.Partial" and "RIO.Vector.Unsafe" module RIO.Vector ( -- * Immutable vectors Data.Vector.Generic.Vector -- * Accessors -- ** Length information , Data.Vector.Generic.length , Data.Vector.Generic.null -- ** Indexing , (Data.Vector.Generic.!?) -- ** Extracting subvectors , Data.Vector.Generic.slice , Data.Vector.Generic.take , Data.Vector.Generic.drop , Data.Vector.Generic.splitAt -- * Construction -- ** Initialisation , Data.Vector.Generic.empty , Data.Vector.Generic.singleton , Data.Vector.Generic.replicate , Data.Vector.Generic.generate , Data.Vector.Generic.iterateN -- ** Monadic initialisation , Data.Vector.Generic.replicateM , Data.Vector.Generic.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iterateNM #endif , Data.Vector.Generic.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.createT #endif -- ** Unfolding , Data.Vector.Generic.unfoldr , Data.Vector.Generic.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.unfoldrM , Data.Vector.Generic.unfoldrNM #endif , Data.Vector.Generic.constructN , Data.Vector.Generic.constructrN -- ** Enumeration , Data.Vector.Generic.enumFromN , Data.Vector.Generic.enumFromStepN , Data.Vector.Generic.enumFromTo , Data.Vector.Generic.enumFromThenTo -- ** Concatenation , Data.Vector.Generic.cons , Data.Vector.Generic.snoc , (Data.Vector.Generic.++) , Data.Vector.Generic.concat #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.concatNE #endif -- ** Restricting memory usage , Data.Vector.Generic.force -- * Modifying vectors -- ** Permutations , Data.Vector.Generic.reverse -- ** Safe destructive update , Data.Vector.Generic.modify -- * Elementwise operations -- ** Indexing , Data.Vector.Generic.indexed -- ** Mapping , Data.Vector.Generic.map , Data.Vector.Generic.imap , Data.Vector.Generic.concatMap -- ** Monadic mapping , Data.Vector.Generic.mapM , Data.Vector.Generic.imapM , Data.Vector.Generic.mapM_ , Data.Vector.Generic.imapM_ , Data.Vector.Generic.forM , Data.Vector.Generic.forM_ -- ** Zipping , Data.Vector.Generic.zipWith , Data.Vector.Generic.zipWith3 , Data.Vector.Generic.zipWith4 , Data.Vector.Generic.zipWith5 , Data.Vector.Generic.zipWith6 , Data.Vector.Generic.izipWith , Data.Vector.Generic.izipWith3 , Data.Vector.Generic.izipWith4 , Data.Vector.Generic.izipWith5 , Data.Vector.Generic.izipWith6 , Data.Vector.Generic.zip , Data.Vector.Generic.zip3 , Data.Vector.Generic.zip4 , Data.Vector.Generic.zip5 , Data.Vector.Generic.zip6 -- ** Monadic zipping , Data.Vector.Generic.zipWithM , Data.Vector.Generic.izipWithM , Data.Vector.Generic.zipWithM_ , Data.Vector.Generic.izipWithM_ -- ** Unzipping , Data.Vector.Generic.unzip , Data.Vector.Generic.unzip3 , Data.Vector.Generic.unzip4 , Data.Vector.Generic.unzip5 , Data.Vector.Generic.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.Generic.filter , Data.Vector.Generic.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.uniq , Data.Vector.Generic.mapMaybe , Data.Vector.Generic.imapMaybe #endif , Data.Vector.Generic.filterM , Data.Vector.Generic.takeWhile , Data.Vector.Generic.dropWhile -- ** Partitioning , Data.Vector.Generic.partition , Data.Vector.Generic.unstablePartition , Data.Vector.Generic.span , Data.Vector.Generic.break -- ** Searching , Data.Vector.Generic.elem , Data.Vector.Generic.notElem , Data.Vector.Generic.find , Data.Vector.Generic.findIndex , Data.Vector.Generic.findIndices , Data.Vector.Generic.elemIndex , Data.Vector.Generic.elemIndices -- * Folding , Data.Vector.Generic.foldl , Data.Vector.Generic.foldl' , Data.Vector.Generic.foldr , Data.Vector.Generic.foldr' , Data.Vector.Generic.ifoldl , Data.Vector.Generic.ifoldl' , Data.Vector.Generic.ifoldr , Data.Vector.Generic.ifoldr' -- ** Specialised folds , Data.Vector.Generic.all , Data.Vector.Generic.any , Data.Vector.Generic.and , Data.Vector.Generic.or , Data.Vector.Generic.sum , Data.Vector.Generic.product -- ** Monadic folds , Data.Vector.Generic.foldM , Data.Vector.Generic.ifoldM , Data.Vector.Generic.foldM' , Data.Vector.Generic.ifoldM' , Data.Vector.Generic.foldM_ , Data.Vector.Generic.ifoldM_ , Data.Vector.Generic.foldM'_ , Data.Vector.Generic.ifoldM'_ -- ** Monadic sequencing , Data.Vector.Generic.sequence , Data.Vector.Generic.sequence_ -- * Prefix sums (scans) , Data.Vector.Generic.prescanl , Data.Vector.Generic.prescanl' , Data.Vector.Generic.postscanl , Data.Vector.Generic.postscanl' , Data.Vector.Generic.scanl , Data.Vector.Generic.scanl' #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iscanl , Data.Vector.Generic.iscanl' #endif , Data.Vector.Generic.prescanr , Data.Vector.Generic.prescanr' , Data.Vector.Generic.postscanr , Data.Vector.Generic.postscanr' , Data.Vector.Generic.scanr , Data.Vector.Generic.scanr' #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iscanr , Data.Vector.Generic.iscanr' #endif -- * Conversions -- ** Lists , Data.Vector.Generic.toList , Data.Vector.Generic.fromList , Data.Vector.Generic.fromListN -- ** Different vector types , Data.Vector.Generic.convert -- ** Mutable vectors , Data.Vector.Generic.freeze , Data.Vector.Generic.thaw , Data.Vector.Generic.copy -- * Fusion support -- ** Conversion to/from Bundles , Data.Vector.Generic.stream , Data.Vector.Generic.unstream , Data.Vector.Generic.streamR , Data.Vector.Generic.unstreamR -- ** Recycling support , Data.Vector.Generic.new , Data.Vector.Generic.clone -- * Utilities -- ** Comparisons , Data.Vector.Generic.eq , Data.Vector.Generic.cmp #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.eqBy , Data.Vector.Generic.cmpBy #endif -- ** Show and Read , Data.Vector.Generic.showsPrec , Data.Vector.Generic.readPrec #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.liftShowsPrec , Data.Vector.Generic.liftReadsPrec #endif -- ** @Data@ and @Typeable@ , Data.Vector.Generic.gfoldl , Data.Vector.Generic.dataCast , Data.Vector.Generic.mkType ) where import qualified Data.Vector.Generic rio-0.1.22.0/src/RIO/Vector/Boxed.hs0000644000000000000000000001100014231470023014754 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Boxed @Vector@. Import as: -- -- > import qualified RIO.Vector.Boxed as VB -- -- This module does not export any partial or unsafe functions. For those, see -- "RIO.Vector.Boxed.Partial" and "RIO.Vector.Boxed.Unsafe" module RIO.Vector.Boxed ( -- * Boxed vectors Data.Vector.Vector , Data.Vector.MVector -- * Accessors -- ** Length information , Data.Vector.length , Data.Vector.null -- ** Indexing , (Data.Vector.!?) -- ** Extracting subvectors , Data.Vector.slice , Data.Vector.take , Data.Vector.drop , Data.Vector.splitAt -- * Construction -- ** Initialisation , Data.Vector.empty , Data.Vector.singleton , Data.Vector.replicate , Data.Vector.generate , Data.Vector.iterateN -- ** Monadic initialisation , Data.Vector.replicateM , Data.Vector.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.iterateNM #endif , Data.Vector.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.createT #endif -- ** Unfolding , Data.Vector.unfoldr , Data.Vector.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.unfoldrM , Data.Vector.unfoldrNM #endif , Data.Vector.constructN , Data.Vector.constructrN -- ** Enumeration , Data.Vector.enumFromN , Data.Vector.enumFromStepN , Data.Vector.enumFromTo , Data.Vector.enumFromThenTo -- ** Concatenation , Data.Vector.cons , Data.Vector.snoc , (Data.Vector.++) , Data.Vector.concat -- ** Restricting memory usage , Data.Vector.force -- * Modifying vectors -- ** Permutations , Data.Vector.reverse -- ** Safe destructive update , Data.Vector.modify -- * Elementwise operations -- ** Indexing , Data.Vector.indexed -- ** Mapping , Data.Vector.map , Data.Vector.imap , Data.Vector.concatMap -- ** Monadic mapping , Data.Vector.mapM , Data.Vector.imapM , Data.Vector.mapM_ , Data.Vector.imapM_ , Data.Vector.forM , Data.Vector.forM_ -- ** Zipping , Data.Vector.zipWith , Data.Vector.zipWith3 , Data.Vector.zipWith4 , Data.Vector.zipWith5 , Data.Vector.zipWith6 , Data.Vector.izipWith , Data.Vector.izipWith3 , Data.Vector.izipWith4 , Data.Vector.izipWith5 , Data.Vector.izipWith6 , Data.Vector.zip , Data.Vector.zip3 , Data.Vector.zip4 , Data.Vector.zip5 , Data.Vector.zip6 -- ** Monadic zipping , Data.Vector.zipWithM , Data.Vector.izipWithM , Data.Vector.zipWithM_ , Data.Vector.izipWithM_ -- ** Unzipping , Data.Vector.unzip , Data.Vector.unzip3 , Data.Vector.unzip4 , Data.Vector.unzip5 , Data.Vector.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.filter , Data.Vector.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.uniq , Data.Vector.mapMaybe , Data.Vector.imapMaybe #endif , Data.Vector.filterM , Data.Vector.takeWhile , Data.Vector.dropWhile -- ** Partitioning , Data.Vector.partition , Data.Vector.unstablePartition , Data.Vector.span , Data.Vector.break -- ** Searching , Data.Vector.elem , Data.Vector.notElem , Data.Vector.find , Data.Vector.findIndex , Data.Vector.findIndices , Data.Vector.elemIndex , Data.Vector.elemIndices -- * Folding , Data.Vector.foldl , Data.Vector.foldl' , Data.Vector.foldr , Data.Vector.foldr' , Data.Vector.ifoldl , Data.Vector.ifoldl' , Data.Vector.ifoldr , Data.Vector.ifoldr' -- ** Specialised folds , Data.Vector.all , Data.Vector.any , Data.Vector.and , Data.Vector.or , Data.Vector.sum , Data.Vector.product -- ** Monadic folds , Data.Vector.foldM , Data.Vector.ifoldM , Data.Vector.foldM' , Data.Vector.ifoldM' , Data.Vector.foldM_ , Data.Vector.ifoldM_ , Data.Vector.foldM'_ , Data.Vector.ifoldM'_ -- ** Monadic sequencing , Data.Vector.sequence , Data.Vector.sequence_ -- * Prefix sums (scans) , Data.Vector.prescanl , Data.Vector.prescanl' , Data.Vector.postscanl , Data.Vector.postscanl' , Data.Vector.scanl , Data.Vector.scanl' #if MIN_VERSION_vector(0,12,0) , Data.Vector.iscanl , Data.Vector.iscanl' #endif , Data.Vector.prescanr , Data.Vector.prescanr' , Data.Vector.postscanr , Data.Vector.postscanr' , Data.Vector.scanr , Data.Vector.scanr' #if MIN_VERSION_vector(0,12,0) , Data.Vector.iscanr , Data.Vector.iscanr' #endif -- * Conversions -- ** Lists , Data.Vector.toList , Data.Vector.fromList , Data.Vector.fromListN -- ** Different vector types , Data.Vector.convert -- ** Mutable vectors , Data.Vector.freeze , Data.Vector.thaw , Data.Vector.copy ) where import qualified Data.Vector rio-0.1.22.0/src/RIO/Vector/Boxed/Partial.hs0000644000000000000000000000241414231470023016361 0ustar0000000000000000-- | Boxed @Vector@ partial functions. Import as: -- -- > import qualified RIO.Vector.Boxed.Partial as VB' module RIO.Vector.Boxed.Partial ( -- * Accessors -- ** Indexing (Data.Vector.!) , Data.Vector.head , Data.Vector.last -- ** Monadic indexing , Data.Vector.indexM , Data.Vector.headM , Data.Vector.lastM -- ** Extracting subvectors , Data.Vector.init , Data.Vector.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.//) , Data.Vector.update , Data.Vector.update_ -- ** Accumulations , Data.Vector.accum , Data.Vector.accumulate , Data.Vector.accumulate_ -- ** Permutations , Data.Vector.backpermute -- * Folding , Data.Vector.foldl1 , Data.Vector.foldl1' , Data.Vector.foldr1 , Data.Vector.foldr1' -- ** Specialised folds , Data.Vector.maximum , Data.Vector.maximumBy , Data.Vector.minimum , Data.Vector.minimumBy , Data.Vector.minIndex , Data.Vector.minIndexBy , Data.Vector.maxIndex , Data.Vector.maxIndexBy -- ** Monadic folds , Data.Vector.fold1M , Data.Vector.fold1M' , Data.Vector.fold1M_ , Data.Vector.fold1M'_ -- * Prefix sums (scans) , Data.Vector.scanl1 , Data.Vector.scanl1' , Data.Vector.scanr1 , Data.Vector.scanr1' ) where import qualified Data.Vector rio-0.1.22.0/src/RIO/Vector/Boxed/Unsafe.hs0000644000000000000000000000207714231470023016213 0ustar0000000000000000-- | Boxed @Vector@ unsafe functions. These perform no bounds -- checking, and may cause segmentation faults etc.! Import as: -- -- > import qualified RIO.Vector.Boxed.Unsafe as VB' module RIO.Vector.Boxed.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.unsafeIndex , Data.Vector.unsafeHead , Data.Vector.unsafeLast -- ** Monadic indexing , Data.Vector.unsafeIndexM , Data.Vector.unsafeHeadM , Data.Vector.unsafeLastM -- ** Extracting subvectors , Data.Vector.unsafeSlice , Data.Vector.unsafeInit , Data.Vector.unsafeTail , Data.Vector.unsafeTake , Data.Vector.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.unsafeUpd , Data.Vector.unsafeUpdate , Data.Vector.unsafeUpdate_ -- ** Accumulations , Data.Vector.unsafeAccum , Data.Vector.unsafeAccumulate , Data.Vector.unsafeAccumulate_ -- ** Permutations , Data.Vector.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.unsafeFreeze , Data.Vector.unsafeThaw , Data.Vector.unsafeCopy ) where import qualified Data.Vector rio-0.1.22.0/src/RIO/Vector/Partial.hs0000644000000000000000000000305314231470023015320 0ustar0000000000000000-- | Generic @Vector@ interface partial functions. Import as: -- -- > import qualified RIO.Vector.Partial as V' module RIO.Vector.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Generic.!) , Data.Vector.Generic.head , Data.Vector.Generic.last -- ** Monadic indexing , Data.Vector.Generic.indexM , Data.Vector.Generic.headM , Data.Vector.Generic.lastM -- ** Extracting subvectors , Data.Vector.Generic.init , Data.Vector.Generic.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Generic.//) , Data.Vector.Generic.update , Data.Vector.Generic.update_ -- ** Accumulations , Data.Vector.Generic.accum , Data.Vector.Generic.accumulate , Data.Vector.Generic.accumulate_ -- ** Permutations , Data.Vector.Generic.backpermute -- * Folding , Data.Vector.Generic.foldl1 , Data.Vector.Generic.foldl1' , Data.Vector.Generic.foldr1 , Data.Vector.Generic.foldr1' -- ** Specialised folds , Data.Vector.Generic.maximum , Data.Vector.Generic.maximumBy , Data.Vector.Generic.minimum , Data.Vector.Generic.minimumBy , Data.Vector.Generic.minIndex , Data.Vector.Generic.minIndexBy , Data.Vector.Generic.maxIndex , Data.Vector.Generic.maxIndexBy -- ** Monadic folds , Data.Vector.Generic.fold1M , Data.Vector.Generic.fold1M' , Data.Vector.Generic.fold1M_ , Data.Vector.Generic.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Generic.scanl1 , Data.Vector.Generic.scanl1' , Data.Vector.Generic.scanr1 , Data.Vector.Generic.scanr1' ) where import qualified Data.Vector.Generic rio-0.1.22.0/src/RIO/Vector/Storable.hs0000644000000000000000000001152614231470023015503 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Storable @Vector@. Import as: -- -- > import qualified RIO.Vector.Storable as VS -- -- This module does not export any partial or unsafe functions. For those, see -- "RIO.Vector.Storable.Partial" and "RIO.Vector.Storable.Unsafe" module RIO.Vector.Storable ( -- * Storable vectors Data.Vector.Storable.Vector , Data.Vector.Storable.MVector(..) , Data.Vector.Storable.Storable -- * Accessors -- ** Length information , Data.Vector.Storable.length , Data.Vector.Storable.null -- ** Indexing , (Data.Vector.Storable.!?) -- ** Extracting subvectors , Data.Vector.Storable.slice , Data.Vector.Storable.take , Data.Vector.Storable.drop , Data.Vector.Storable.splitAt -- * Construction -- ** Initialisation , Data.Vector.Storable.empty , Data.Vector.Storable.singleton , Data.Vector.Storable.replicate , Data.Vector.Storable.generate , Data.Vector.Storable.iterateN -- ** Monadic initialisation , Data.Vector.Storable.replicateM , Data.Vector.Storable.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.iterateNM #endif , Data.Vector.Storable.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.createT #endif -- ** Unfolding , Data.Vector.Storable.unfoldr , Data.Vector.Storable.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.unfoldrM , Data.Vector.Storable.unfoldrNM #endif , Data.Vector.Storable.constructN , Data.Vector.Storable.constructrN -- ** Enumeration , Data.Vector.Storable.enumFromN , Data.Vector.Storable.enumFromStepN , Data.Vector.Storable.enumFromTo , Data.Vector.Storable.enumFromThenTo -- ** Concatenation , Data.Vector.Storable.cons , Data.Vector.Storable.snoc , (Data.Vector.Storable.++) , Data.Vector.Storable.concat -- ** Restricting memory usage , Data.Vector.Storable.force -- * Modifying vectors -- ** Permutations , Data.Vector.Storable.reverse -- ** Safe destructive update , Data.Vector.Storable.modify -- * Elementwise operations -- ** Mapping , Data.Vector.Storable.map , Data.Vector.Storable.imap , Data.Vector.Storable.concatMap -- ** Monadic mapping , Data.Vector.Storable.mapM , Data.Vector.Storable.mapM_ , Data.Vector.Storable.forM , Data.Vector.Storable.forM_ -- ** Zipping , Data.Vector.Storable.zipWith , Data.Vector.Storable.zipWith3 , Data.Vector.Storable.zipWith4 , Data.Vector.Storable.zipWith5 , Data.Vector.Storable.zipWith6 , Data.Vector.Storable.izipWith , Data.Vector.Storable.izipWith3 , Data.Vector.Storable.izipWith4 , Data.Vector.Storable.izipWith5 , Data.Vector.Storable.izipWith6 -- ** Monadic zipping , Data.Vector.Storable.zipWithM , Data.Vector.Storable.zipWithM_ -- * Working with predicates -- ** Filtering , Data.Vector.Storable.filter , Data.Vector.Storable.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.uniq , Data.Vector.Storable.mapMaybe , Data.Vector.Storable.imapMaybe #endif , Data.Vector.Storable.filterM , Data.Vector.Storable.takeWhile , Data.Vector.Storable.dropWhile -- ** Partitioning , Data.Vector.Storable.partition , Data.Vector.Storable.unstablePartition , Data.Vector.Storable.span , Data.Vector.Storable.break -- ** Searching , Data.Vector.Storable.elem , Data.Vector.Storable.notElem , Data.Vector.Storable.find , Data.Vector.Storable.findIndex , Data.Vector.Storable.findIndices , Data.Vector.Storable.elemIndex , Data.Vector.Storable.elemIndices -- * Folding , Data.Vector.Storable.foldl , Data.Vector.Storable.foldl' , Data.Vector.Storable.foldr , Data.Vector.Storable.foldr' , Data.Vector.Storable.ifoldl , Data.Vector.Storable.ifoldl' , Data.Vector.Storable.ifoldr , Data.Vector.Storable.ifoldr' -- ** Specialised folds , Data.Vector.Storable.all , Data.Vector.Storable.any , Data.Vector.Storable.and , Data.Vector.Storable.or , Data.Vector.Storable.sum , Data.Vector.Storable.product -- ** Monadic folds , Data.Vector.Storable.foldM , Data.Vector.Storable.foldM' , Data.Vector.Storable.foldM_ , Data.Vector.Storable.foldM'_ -- * Prefix sums (scans) , Data.Vector.Storable.prescanl , Data.Vector.Storable.prescanl' , Data.Vector.Storable.postscanl , Data.Vector.Storable.postscanl' , Data.Vector.Storable.scanl , Data.Vector.Storable.scanl' , Data.Vector.Storable.prescanr , Data.Vector.Storable.prescanr' , Data.Vector.Storable.postscanr , Data.Vector.Storable.postscanr' , Data.Vector.Storable.scanr , Data.Vector.Storable.scanr' -- * Conversions -- ** Lists , Data.Vector.Storable.toList , Data.Vector.Storable.fromList , Data.Vector.Storable.fromListN -- ** Different vector types , Data.Vector.Storable.convert -- ** Mutable vectors , Data.Vector.Storable.freeze , Data.Vector.Storable.thaw , Data.Vector.Storable.copy ) where import qualified Data.Vector.Storable rio-0.1.22.0/src/RIO/Vector/Storable/Partial.hs0000644000000000000000000000302514231470023017072 0ustar0000000000000000-- | Storable @Vector@ partial functions. Import as: -- -- > import qualified RIO.Vector.Storable.Partial as VS' module RIO.Vector.Storable.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Storable.!) , Data.Vector.Storable.head , Data.Vector.Storable.last -- ** Monadic indexing , Data.Vector.Storable.indexM , Data.Vector.Storable.headM , Data.Vector.Storable.lastM -- ** Extracting subvectors , Data.Vector.Storable.init , Data.Vector.Storable.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Storable.//) , Data.Vector.Storable.update_ -- ** Accumulations , Data.Vector.Storable.accum , Data.Vector.Storable.accumulate_ -- ** Permutations , Data.Vector.Storable.backpermute -- * Folding , Data.Vector.Storable.foldl1 , Data.Vector.Storable.foldl1' , Data.Vector.Storable.foldr1 , Data.Vector.Storable.foldr1' -- ** Specialised folds , Data.Vector.Storable.maximum , Data.Vector.Storable.maximumBy , Data.Vector.Storable.minimum , Data.Vector.Storable.minimumBy , Data.Vector.Storable.minIndex , Data.Vector.Storable.minIndexBy , Data.Vector.Storable.maxIndex , Data.Vector.Storable.maxIndexBy -- ** Monadic folds , Data.Vector.Storable.fold1M , Data.Vector.Storable.fold1M' , Data.Vector.Storable.fold1M_ , Data.Vector.Storable.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Storable.scanl1 , Data.Vector.Storable.scanl1' , Data.Vector.Storable.scanr1 , Data.Vector.Storable.scanr1' ) where import qualified Data.Vector.Storable rio-0.1.22.0/src/RIO/Vector/Storable/Unsafe.hs0000644000000000000000000000334414231470023016723 0ustar0000000000000000-- | Storable @Vector@ unsafe functions. These perform no bounds -- checking, and may cause segmentation faults etc.! Import as: -- -- > import qualified RIO.Vector.Storable.Unsafe as VS' module RIO.Vector.Storable.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.Storable.unsafeIndex , Data.Vector.Storable.unsafeHead , Data.Vector.Storable.unsafeLast -- ** Monadic indexing , Data.Vector.Storable.unsafeIndexM , Data.Vector.Storable.unsafeHeadM , Data.Vector.Storable.unsafeLastM -- ** Extracting subvectors , Data.Vector.Storable.unsafeSlice , Data.Vector.Storable.unsafeInit , Data.Vector.Storable.unsafeTail , Data.Vector.Storable.unsafeTake , Data.Vector.Storable.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Storable.unsafeUpd , Data.Vector.Storable.unsafeUpdate_ -- ** Accumulations , Data.Vector.Storable.unsafeAccum , Data.Vector.Storable.unsafeAccumulate_ -- ** Permutations , Data.Vector.Storable.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Storable.unsafeFreeze , Data.Vector.Storable.unsafeThaw , Data.Vector.Storable.unsafeCopy -- * Raw pointers , Data.Vector.Storable.unsafeFromForeignPtr , Data.Vector.Storable.unsafeFromForeignPtr0 , Data.Vector.Storable.unsafeToForeignPtr , Data.Vector.Storable.unsafeToForeignPtr0 , unsafeWith ) where import Data.Vector.Storable(Storable, Vector) import qualified Data.Vector.Storable import Foreign.Ptr(Ptr) import UnliftIO -- | Lifted version of 'Data.Vector.Storable.unsafeWith' unsafeWith :: (MonadUnliftIO m, Storable a) => Vector a -> (Ptr a -> m b) -> m b unsafeWith vec action = withRunInIO $ \unlifter -> Data.Vector.Storable.unsafeWith vec (unlifter . action) rio-0.1.22.0/src/RIO/Vector/Unboxed.hs0000644000000000000000000001251514231470023015333 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Unboxed @Vector@. Import as: -- -- > import qualified RIO.Vector.Unboxed as VU -- -- This module does not export any partial or unsafe functions. For those, see -- "RIO.Vector.Unboxed.Partial" and "RIO.Vector.Unboxed.Unsafe" module RIO.Vector.Unboxed ( -- * Unboxed vectors Data.Vector.Unboxed.Vector , Data.Vector.Unboxed.MVector(..) , Data.Vector.Unboxed.Unbox -- * Accessors -- ** Length information , Data.Vector.Unboxed.length , Data.Vector.Unboxed.null -- ** Indexing , (Data.Vector.Unboxed.!?) -- ** Extracting subvectors , Data.Vector.Unboxed.slice , Data.Vector.Unboxed.take , Data.Vector.Unboxed.drop , Data.Vector.Unboxed.splitAt -- * Construction -- ** Initialisation , Data.Vector.Unboxed.empty , Data.Vector.Unboxed.singleton , Data.Vector.Unboxed.replicate , Data.Vector.Unboxed.generate , Data.Vector.Unboxed.iterateN -- ** Monadic initialisation , Data.Vector.Unboxed.replicateM , Data.Vector.Unboxed.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.iterateNM #endif , Data.Vector.Unboxed.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.createT #endif -- ** Unfolding , Data.Vector.Unboxed.unfoldr , Data.Vector.Unboxed.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.unfoldrM , Data.Vector.Unboxed.unfoldrNM #endif , Data.Vector.Unboxed.constructN , Data.Vector.Unboxed.constructrN -- ** Enumeration , Data.Vector.Unboxed.enumFromN , Data.Vector.Unboxed.enumFromStepN , Data.Vector.Unboxed.enumFromTo , Data.Vector.Unboxed.enumFromThenTo -- ** Concatenation , Data.Vector.Unboxed.cons , Data.Vector.Unboxed.snoc , (Data.Vector.Unboxed.++) , Data.Vector.Unboxed.concat -- ** Restricting memory usage , Data.Vector.Unboxed.force -- * Modifying vectors -- ** Permutations , Data.Vector.Unboxed.reverse -- ** Safe destructive update , Data.Vector.Unboxed.modify -- * Elementwise operations -- ** Indexing , Data.Vector.Unboxed.indexed -- ** Mapping , Data.Vector.Unboxed.map , Data.Vector.Unboxed.imap , Data.Vector.Unboxed.concatMap -- ** Monadic mapping , Data.Vector.Unboxed.mapM , Data.Vector.Unboxed.imapM , Data.Vector.Unboxed.mapM_ , Data.Vector.Unboxed.imapM_ , Data.Vector.Unboxed.forM , Data.Vector.Unboxed.forM_ -- ** Zipping , Data.Vector.Unboxed.zipWith , Data.Vector.Unboxed.zipWith3 , Data.Vector.Unboxed.zipWith4 , Data.Vector.Unboxed.zipWith5 , Data.Vector.Unboxed.zipWith6 , Data.Vector.Unboxed.izipWith , Data.Vector.Unboxed.izipWith3 , Data.Vector.Unboxed.izipWith4 , Data.Vector.Unboxed.izipWith5 , Data.Vector.Unboxed.izipWith6 , Data.Vector.Unboxed.zip , Data.Vector.Unboxed.zip3 , Data.Vector.Unboxed.zip4 , Data.Vector.Unboxed.zip5 , Data.Vector.Unboxed.zip6 -- ** Monadic zipping , Data.Vector.Unboxed.zipWithM , Data.Vector.Unboxed.izipWithM , Data.Vector.Unboxed.zipWithM_ , Data.Vector.Unboxed.izipWithM_ -- ** Unzipping , Data.Vector.Unboxed.unzip , Data.Vector.Unboxed.unzip3 , Data.Vector.Unboxed.unzip4 , Data.Vector.Unboxed.unzip5 , Data.Vector.Unboxed.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.Unboxed.filter , Data.Vector.Unboxed.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.uniq , Data.Vector.Unboxed.mapMaybe , Data.Vector.Unboxed.imapMaybe #endif , Data.Vector.Unboxed.filterM , Data.Vector.Unboxed.takeWhile , Data.Vector.Unboxed.dropWhile -- ** Partitioning , Data.Vector.Unboxed.partition , Data.Vector.Unboxed.unstablePartition , Data.Vector.Unboxed.span , Data.Vector.Unboxed.break -- ** Searching , Data.Vector.Unboxed.elem , Data.Vector.Unboxed.notElem , Data.Vector.Unboxed.find , Data.Vector.Unboxed.findIndex , Data.Vector.Unboxed.findIndices , Data.Vector.Unboxed.elemIndex , Data.Vector.Unboxed.elemIndices -- * Folding , Data.Vector.Unboxed.foldl , Data.Vector.Unboxed.foldl' , Data.Vector.Unboxed.foldr , Data.Vector.Unboxed.foldr' , Data.Vector.Unboxed.ifoldl , Data.Vector.Unboxed.ifoldl' , Data.Vector.Unboxed.ifoldr , Data.Vector.Unboxed.ifoldr' -- ** Specialised folds , Data.Vector.Unboxed.all , Data.Vector.Unboxed.any , Data.Vector.Unboxed.and , Data.Vector.Unboxed.or , Data.Vector.Unboxed.sum , Data.Vector.Unboxed.product -- ** Monadic folds , Data.Vector.Unboxed.foldM , Data.Vector.Unboxed.ifoldM , Data.Vector.Unboxed.foldM' , Data.Vector.Unboxed.ifoldM' , Data.Vector.Unboxed.foldM_ , Data.Vector.Unboxed.ifoldM_ , Data.Vector.Unboxed.foldM'_ , Data.Vector.Unboxed.ifoldM'_ -- * Prefix sums (scans) , Data.Vector.Unboxed.prescanl , Data.Vector.Unboxed.prescanl' , Data.Vector.Unboxed.postscanl , Data.Vector.Unboxed.postscanl' , Data.Vector.Unboxed.scanl , Data.Vector.Unboxed.scanl' , Data.Vector.Unboxed.prescanr , Data.Vector.Unboxed.prescanr' , Data.Vector.Unboxed.postscanr , Data.Vector.Unboxed.postscanr' , Data.Vector.Unboxed.scanr , Data.Vector.Unboxed.scanr' -- * Conversions -- ** Lists , Data.Vector.Unboxed.toList , Data.Vector.Unboxed.fromList , Data.Vector.Unboxed.fromListN -- ** Different vector types , Data.Vector.Unboxed.convert -- ** Mutable vectors , Data.Vector.Unboxed.freeze , Data.Vector.Unboxed.thaw , Data.Vector.Unboxed.copy ) where import qualified Data.Vector.Unboxed rio-0.1.22.0/src/RIO/Vector/Unboxed/Partial.hs0000644000000000000000000000306214231470023016724 0ustar0000000000000000-- | Unboxed @Vector@ partial functions. Import as: -- -- > import qualified RIO.Vector.Unboxed.Partial as VU' module RIO.Vector.Unboxed.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Unboxed.!) , Data.Vector.Unboxed.head , Data.Vector.Unboxed.last -- ** Monadic indexing , Data.Vector.Unboxed.indexM , Data.Vector.Unboxed.headM , Data.Vector.Unboxed.lastM -- ** Extracting subvectors , Data.Vector.Unboxed.init , Data.Vector.Unboxed.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Unboxed.//) , Data.Vector.Unboxed.update , Data.Vector.Unboxed.update_ -- ** Accumulations , Data.Vector.Unboxed.accum , Data.Vector.Unboxed.accumulate , Data.Vector.Unboxed.accumulate_ -- ** Permutations , Data.Vector.Unboxed.backpermute -- * Folding , Data.Vector.Unboxed.foldl1 , Data.Vector.Unboxed.foldl1' , Data.Vector.Unboxed.foldr1 , Data.Vector.Unboxed.foldr1' -- ** Specialised folds , Data.Vector.Unboxed.maximum , Data.Vector.Unboxed.maximumBy , Data.Vector.Unboxed.minimum , Data.Vector.Unboxed.minimumBy , Data.Vector.Unboxed.minIndex , Data.Vector.Unboxed.minIndexBy , Data.Vector.Unboxed.maxIndex , Data.Vector.Unboxed.maxIndexBy -- ** Monadic folds , Data.Vector.Unboxed.fold1M , Data.Vector.Unboxed.fold1M' , Data.Vector.Unboxed.fold1M_ , Data.Vector.Unboxed.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Unboxed.scanl1 , Data.Vector.Unboxed.scanl1' , Data.Vector.Unboxed.scanr1 , Data.Vector.Unboxed.scanr1' ) where import qualified Data.Vector.Unboxed rio-0.1.22.0/src/RIO/Vector/Unboxed/Unsafe.hs0000644000000000000000000000236314231470023016554 0ustar0000000000000000-- | Unoxed @Vector@ unsafe functions. These perform no bounds -- checking, and may cause segmentation faults etc.! Import as: -- -- > import qualified RIO.Vector.Unoxed.Unsafe as VU' module RIO.Vector.Unboxed.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.Unboxed.unsafeIndex , Data.Vector.Unboxed.unsafeHead , Data.Vector.Unboxed.unsafeLast -- ** Monadic indexing , Data.Vector.Unboxed.unsafeIndexM , Data.Vector.Unboxed.unsafeHeadM , Data.Vector.Unboxed.unsafeLastM -- ** Extracting subvectors , Data.Vector.Unboxed.unsafeSlice , Data.Vector.Unboxed.unsafeInit , Data.Vector.Unboxed.unsafeTail , Data.Vector.Unboxed.unsafeTake , Data.Vector.Unboxed.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Unboxed.unsafeUpd , Data.Vector.Unboxed.unsafeUpdate , Data.Vector.Unboxed.unsafeUpdate_ -- ** Accumulations , Data.Vector.Unboxed.unsafeAccum , Data.Vector.Unboxed.unsafeAccumulate , Data.Vector.Unboxed.unsafeAccumulate_ -- ** Permutations , Data.Vector.Unboxed.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Unboxed.unsafeFreeze , Data.Vector.Unboxed.unsafeThaw , Data.Vector.Unboxed.unsafeCopy ) where import qualified Data.Vector.Unboxed rio-0.1.22.0/src/RIO/Vector/Unsafe.hs0000644000000000000000000000245314231470023015150 0ustar0000000000000000-- | Generic @Vector@ interface unsafe functions. These perform no bounds -- checking, and may cause segmentation faults etc.! Import as: -- -- > import qualified RIO.Vector.Unsafe as V' module RIO.Vector.Unsafe ( -- * Immutable vectors Data.Vector.Generic.Vector(..) -- * Accessors -- ** Indexing , Data.Vector.Generic.unsafeIndex , Data.Vector.Generic.unsafeHead , Data.Vector.Generic.unsafeLast -- ** Monadic indexing , Data.Vector.Generic.unsafeIndexM , Data.Vector.Generic.unsafeHeadM , Data.Vector.Generic.unsafeLastM -- ** Extracting subvectors , Data.Vector.Generic.unsafeSlice , Data.Vector.Generic.unsafeInit , Data.Vector.Generic.unsafeTail , Data.Vector.Generic.unsafeTake , Data.Vector.Generic.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Generic.unsafeUpd , Data.Vector.Generic.unsafeUpdate , Data.Vector.Generic.unsafeUpdate_ -- ** Accumulations , Data.Vector.Generic.unsafeAccum , Data.Vector.Generic.unsafeAccumulate , Data.Vector.Generic.unsafeAccumulate_ -- ** Permutations , Data.Vector.Generic.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Generic.unsafeFreeze , Data.Vector.Generic.unsafeThaw , Data.Vector.Generic.unsafeCopy ) where import qualified Data.Vector.Generic rio-0.1.22.0/src/RIO/Writer.hs0000644000000000000000000000077614231470023013747 0ustar0000000000000000-- | Provides reexports of 'MonadWriter' and related helpers. -- -- @since 0.1.4.0 module RIO.Writer ( Control.Monad.Writer.MonadWriter (..) , Control.Monad.Writer.listens , Control.Monad.Writer.censor , Control.Monad.Writer.Writer , Control.Monad.Writer.runWriter , Control.Monad.Writer.execWriter , Control.Monad.Writer.mapWriter , Control.Monad.Writer.WriterT (..) , Control.Monad.Writer.execWriterT , Control.Monad.Writer.mapWriterT ) where import qualified Control.Monad.Writer rio-0.1.22.0/src/RIO/Prelude/Display.hs0000644000000000000000000001166614231470023015500 0ustar0000000000000000{-# LANGUAGE GeneralizedNewtypeDeriving #-} module RIO.Prelude.Display ( Utf8Builder (..) , Display (..) , displayShow , utf8BuilderToText , utf8BuilderToLazyText , displayBytesUtf8 , writeFileUtf8Builder ) where import Data.String (IsString (..)) import Data.ByteString (ByteString) import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Builder as BB import Data.ByteString.Builder (Builder) import Data.Semigroup (Semigroup(..)) import Data.Text (Text) import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TL import UnliftIO import Data.Text.Encoding (decodeUtf8With, encodeUtf8Builder) import Data.Text.Encoding.Error (lenientDecode) import Data.Int import Data.Word import System.Process.Typed (ProcessConfig, setEnvInherit) -- | A builder of binary data, with the invariant that the underlying -- data is supposed to be UTF-8 encoded. -- -- @since 0.1.0.0 newtype Utf8Builder = Utf8Builder { getUtf8Builder :: Builder } deriving (Semigroup) -- Custom instance is created instead of deriving, otherwise list fusion breaks -- for `mconcat`. instance Monoid Utf8Builder where mempty = Utf8Builder mempty {-# INLINE mempty #-} mappend = (Data.Semigroup.<>) {-# INLINE mappend #-} mconcat = foldr mappend mempty {-# INLINE mconcat #-} -- | @since 0.1.0.0 instance IsString Utf8Builder where fromString = Utf8Builder . BB.stringUtf8 -- | A typeclass for values which can be converted to a -- 'Utf8Builder'. The intention of this typeclass is to provide a -- human-friendly display of the data. -- -- @since 0.1.0.0 class Display a where {-# MINIMAL display | textDisplay #-} display :: a -> Utf8Builder display = display . textDisplay -- | Display data as `Text`, which will also be used for `display` if it is -- not overriden. -- -- @since 0.1.7.0 textDisplay :: a -> Text textDisplay = utf8BuilderToText . display -- | @since 0.1.0.0 instance Display Utf8Builder where display = id -- | @since 0.1.0.0 instance Display Text where display = Utf8Builder . encodeUtf8Builder -- | @since 0.1.0.0 instance Display TL.Text where display = foldMap display . TL.toChunks -- | @since 0.1.0.0 instance Display Char where display = Utf8Builder . BB.charUtf8 -- | @since 0.1.0.0 instance Display Integer where display = Utf8Builder . BB.integerDec -- | @since 0.1.0.0 instance Display Float where display = Utf8Builder . BB.floatDec instance Display Double where display = Utf8Builder . BB.doubleDec -- | @since 0.1.0.0 instance Display Int where display = Utf8Builder . BB.intDec -- | @since 0.1.0.0 instance Display Int8 where display = Utf8Builder . BB.int8Dec -- | @since 0.1.0.0 instance Display Int16 where display = Utf8Builder . BB.int16Dec -- | @since 0.1.0.0 instance Display Int32 where display = Utf8Builder . BB.int32Dec -- | @since 0.1.0.0 instance Display Int64 where display = Utf8Builder . BB.int64Dec -- | @since 0.1.0.0 instance Display Word where display = Utf8Builder . BB.wordDec -- | @since 0.1.0.0 instance Display Word8 where display = Utf8Builder . BB.word8Dec -- | @since 0.1.0.0 instance Display Word16 where display = Utf8Builder . BB.word16Dec -- | @since 0.1.0.0 instance Display Word32 where display = Utf8Builder . BB.word32Dec -- | @since 0.1.0.0 instance Display Word64 where display = Utf8Builder . BB.word64Dec -- | @since 0.1.0.0 instance Display SomeException where display = fromString . displayException -- | @since 0.1.0.0 instance Display IOException where display = fromString . displayException -- | @since 0.1.0.0 instance Display (ProcessConfig a b c) where display = displayShow . setEnvInherit -- | Use the 'Show' instance for a value to convert it to a -- 'Utf8Builder'. -- -- @since 0.1.0.0 displayShow :: Show a => a -> Utf8Builder displayShow = fromString . show -- | Convert a 'ByteString' into a 'Utf8Builder'. -- -- /NOTE/ This function performs no checks to ensure that the data is, -- in fact, UTF8 encoded. If you provide non-UTF8 data, later -- functions may fail. -- -- @since 0.1.0.0 displayBytesUtf8 :: ByteString -> Utf8Builder displayBytesUtf8 = Utf8Builder . BB.byteString -- | Convert a 'Utf8Builder' value into a strict 'Text'. -- -- @since 0.1.0.0 utf8BuilderToText :: Utf8Builder -> Text utf8BuilderToText = decodeUtf8With lenientDecode . BL.toStrict . BB.toLazyByteString . getUtf8Builder -- | Convert a 'Utf8Builder' value into a lazy 'Text'. -- -- @since 0.1.0.0 utf8BuilderToLazyText :: Utf8Builder -> TL.Text utf8BuilderToLazyText = TL.decodeUtf8With lenientDecode . BB.toLazyByteString . getUtf8Builder -- | Write the given 'Utf8Builder' value to a file. -- -- @since 0.1.0.0 writeFileUtf8Builder :: MonadIO m => FilePath -> Utf8Builder -> m () writeFileUtf8Builder fp (Utf8Builder builder) = liftIO $ withBinaryFile fp WriteMode $ \h -> BB.hPutBuilder h builder rio-0.1.22.0/src/RIO/Prelude/Exit.hs0000644000000000000000000000147114231470023014775 0ustar0000000000000000module RIO.Prelude.Exit ( exitFailure , exitSuccess , exitWith , System.Exit.ExitCode(..) ) where import Control.Monad.IO.Class import qualified System.Exit ( ExitCode (..) , exitFailure , exitSuccess , exitWith ) -- | Lifted version of "System.Exit.exitFailure". -- -- @since 0.1.9.0. exitFailure :: MonadIO m => m a exitFailure = liftIO System.Exit.exitFailure -- | Lifted version of "System.Exit.exitSuccess". -- -- @since 0.1.9.0. exitSuccess :: MonadIO m => m a exitSuccess = liftIO System.Exit.exitSuccess -- | Lifted version of "System.Exit.exitWith". -- -- @since 0.1.9.0. exitWith :: MonadIO m => System.Exit.ExitCode -> m a exitWith code = liftIO $ System.Exit.exitWith code rio-0.1.22.0/src/RIO/Prelude/Extra.hs0000644000000000000000000000503114231470023015143 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE BangPatterns #-} module RIO.Prelude.Extra ( mapLeft , fromFirst , mapMaybeA , mapMaybeM , forMaybeA , forMaybeM , foldMapM , nubOrd , whenM , unlessM , (<&>) , asIO ) where import Prelude import qualified Data.Set as Set import Data.Monoid (First (..)) import Data.Foldable (foldlM) import Data.Functor import Data.Maybe import Control.Monad -- | Apply a function to a 'Left' constructor mapLeft :: (a1 -> a2) -> Either a1 b -> Either a2 b mapLeft f (Left a1) = Left (f a1) mapLeft _ (Right b) = Right b -- | Get a 'First' value with a default fallback fromFirst :: a -> First a -> a fromFirst x = fromMaybe x . getFirst -- | Applicative 'mapMaybe'. mapMaybeA :: Applicative f => (a -> f (Maybe b)) -> [a] -> f [b] mapMaybeA f = fmap catMaybes . traverse f -- | @'forMaybeA' '==' 'flip' 'mapMaybeA'@ forMaybeA :: Applicative f => [a] -> (a -> f (Maybe b)) -> f [b] forMaybeA = flip mapMaybeA -- | Monadic 'mapMaybe'. mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b] mapMaybeM f = liftM catMaybes . mapM f -- | @'forMaybeM' '==' 'flip' 'mapMaybeM'@ forMaybeM :: Monad m => [a] -> (a -> m (Maybe b)) -> m [b] forMaybeM = flip mapMaybeM -- | Extend 'foldMap' to allow side effects. -- -- Internally, this is implemented using a strict left fold. This is used for -- performance reasons. It also necessitates that this function has a @Monad@ -- constraint and not just an @Applicative@ constraint. For more information, -- see -- . -- -- @since 0.1.3.0 foldMapM :: (Monad m, Monoid w, Foldable t) => (a -> m w) -> t a -> m w foldMapM f = foldlM (\acc a -> do w <- f a return $! mappend acc w) mempty -- | Strip out duplicates nubOrd :: Ord a => [a] -> [a] nubOrd = loop mempty where loop _ [] = [] loop !s (a:as) | a `Set.member` s = loop s as | otherwise = a : loop (Set.insert a s) as -- | Run the second value if the first value returns 'True' whenM :: Monad m => m Bool -> m () -> m () whenM boolM action = boolM >>= (`when` action) -- | Run the second value if the first value returns 'False' unlessM :: Monad m => m Bool -> m () -> m () unlessM boolM action = boolM >>= (`unless` action) #if !MIN_VERSION_base(4, 11, 0) (<&>) :: Functor f => f a -> (a -> b) -> f b as <&> f = f <$> as infixl 1 <&> #endif -- | Helper function to force an action to run in 'IO'. Especially -- useful for overly general contexts, like hspec tests. -- -- @since 0.1.3.0 asIO :: IO a -> IO a asIO = id rio-0.1.22.0/src/RIO/Prelude/IO.hs0000644000000000000000000000413714231470023014375 0ustar0000000000000000{-# LANGUAGE CPP #-} module RIO.Prelude.IO ( withLazyFile , withLazyFileUtf8 , readFileBinary , writeFileBinary , readFileUtf8 , writeFileUtf8 , hPutBuilder ) where import RIO.Prelude.Reexports import qualified Data.ByteString as B import qualified Data.ByteString.Builder as BB import qualified Data.ByteString.Lazy as BL import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.IO as TL import qualified Data.Text.IO as T import System.IO (hSetEncoding, utf8) -- | Lazily get the contents of a file. Unlike 'BL.readFile', this -- ensures that if an exception is thrown, the file handle is closed -- immediately. withLazyFile :: MonadUnliftIO m => FilePath -> (BL.ByteString -> m a) -> m a withLazyFile fp inner = withBinaryFile fp ReadMode $ inner <=< liftIO . BL.hGetContents -- | Lazily read a file in UTF8 encoding. -- -- @since 0.1.13 withLazyFileUtf8 :: MonadUnliftIO m => FilePath -> (TL.Text -> m a) -> m a withLazyFileUtf8 fp inner = withFile fp ReadMode $ \h -> inner =<< liftIO (hSetEncoding h utf8 >> TL.hGetContents h) -- | Write a file in UTF8 encoding -- -- This function will use OS-specific line ending handling. writeFileUtf8 :: MonadIO m => FilePath -> Text -> m () writeFileUtf8 fp text = liftIO $ withFile fp WriteMode $ \h -> do hSetEncoding h utf8 T.hPutStr h text hPutBuilder :: MonadIO m => Handle -> Builder -> m () hPutBuilder h = liftIO . BB.hPutBuilder h {-# INLINE hPutBuilder #-} -- | Same as 'B.readFile', but generalized to 'MonadIO' readFileBinary :: MonadIO m => FilePath -> m ByteString readFileBinary = liftIO . B.readFile -- | Same as 'B.writeFile', but generalized to 'MonadIO' writeFileBinary :: MonadIO m => FilePath -> ByteString -> m () writeFileBinary fp = liftIO . B.writeFile fp -- | Read a file in UTF8 encoding, throwing an exception on invalid character -- encoding. -- -- This function will use OS-specific line ending handling. readFileUtf8 :: MonadIO m => FilePath -> m Text readFileUtf8 fp = liftIO $ withFile fp ReadMode $ \h -> do hSetEncoding h utf8 T.hGetContents h rio-0.1.22.0/src/RIO/Prelude/Lens.hs0000644000000000000000000000066114231470023014765 0ustar0000000000000000module RIO.Prelude.Lens ( view , preview , Lens.Micro.ASetter , Lens.Micro.ASetter' , Lens.Micro.Getting , Lens.Micro.Lens , Lens.Micro.Lens' , Lens.Micro.SimpleGetter , Lens.Micro.lens , Lens.Micro.over , Lens.Micro.set , Lens.Micro.sets , Lens.Micro.to , (Lens.Micro.^.) , (Lens.Micro.^?) , (Lens.Micro.^..) , (Lens.Micro.%~) , (Lens.Micro..~) ) where import Lens.Micro import Lens.Micro.Mtl rio-0.1.22.0/src/RIO/Prelude/Logger.hs0000644000000000000000000007502714231470023015313 0ustar0000000000000000{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE BangPatterns #-} module RIO.Prelude.Logger ( -- ** Running with logging withLogFunc , newLogFunc , LogFunc , HasLogFunc (..) , logOptionsHandle -- *** Log options , LogOptions , setLogMinLevel , setLogMinLevelIO , setLogVerboseFormat , setLogVerboseFormatIO , setLogTerminal , setLogUseTime , setLogUseColor , setLogUseLoc , setLogFormat , setLogLevelColors , setLogSecondaryColor , setLogAccentColors -- ** Standard logging functions , logDebug , logInfo , logWarn , logError , logOther -- ** Advanced logging functions -- *** Sticky logging , logSticky , logStickyDone -- *** With source -- -- $withSource , logDebugS , logInfoS , logWarnS , logErrorS , logOtherS -- *** Generic log function , logGeneric -- ** Advanced running functions , mkLogFunc , logOptionsMemory -- ** Data types , LogLevel (..) , LogSource , CallStack -- ** Convenience functions , displayCallStack , noLogging -- ** Accessors , logFuncUseColorL , logFuncLogLevelColorsL , logFuncSecondaryColorL , logFuncAccentColorsL -- * Type-generic logger -- $type-generic-intro , glog , GLogFunc , gLogFuncClassic , mkGLogFunc , contramapMaybeGLogFunc , contramapGLogFunc , HasGLogFunc(..) , HasLogLevel(..) , HasLogSource(..) ) where import RIO.Prelude.Reexports hiding ((<>)) import RIO.Prelude.Renames import RIO.Prelude.Display import RIO.Prelude.Lens import Data.Text (Text) import qualified Data.Text as T import Control.Monad.IO.Class (MonadIO, liftIO) import GHC.Stack (HasCallStack, CallStack, SrcLoc (..), getCallStack, callStack) import Data.Time import qualified Data.Text.IO as TIO import Data.Bits import Data.ByteString.Builder (toLazyByteString, char7, byteString, hPutBuilder) import Data.ByteString.Builder.Extra (flush) import GHC.IO.Handle.Internals (wantWritableHandle) import GHC.IO.Encoding.Types (textEncodingName) import GHC.IO.Handle.Types (Handle__ (..)) import qualified Data.ByteString as B import System.IO (localeEncoding) import GHC.Foreign (peekCString, withCString) import Data.Semigroup (Semigroup (..)) #if MIN_VERSION_base(4,12,0) import Data.Functor.Contravariant #endif -- | The log level of a message. -- -- @since 0.0.0.0 data LogLevel = LevelDebug | LevelInfo | LevelWarn | LevelError | LevelOther !Text deriving (Eq, Show, Read, Ord) -- | Where in the application a log message came from. Used for -- display purposes only. -- -- @since 0.0.0.0 type LogSource = Text -- | Environment values with a logging function. -- -- @since 0.0.0.0 class HasLogFunc env where logFuncL :: Lens' env LogFunc instance HasLogFunc LogFunc where logFuncL = id -- | A logging function, wrapped in a newtype for better error messages. -- -- An implementation may choose any behavior of this value it wishes, -- including printing to standard output or no action at all. -- -- @since 0.0.0.0 data LogFunc = LogFunc { unLogFunc :: !(CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) , lfOptions :: !(Maybe LogOptions) } -- | Perform both sets of actions per log entry. -- -- @since 0.0.0.0 instance Semigroup LogFunc where LogFunc f o1 <> LogFunc g o2 = LogFunc { unLogFunc = \a b c d -> f a b c d *> g a b c d , lfOptions = o1 `mplus` o2 } -- | 'mempty' peforms no logging. -- -- @since 0.0.0.0 instance Monoid LogFunc where mempty = mkLogFunc $ \_ _ _ _ -> return () mappend = (<>) -- | Create a 'LogFunc' from the given function. -- -- @since 0.0.0.0 mkLogFunc :: (CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) -> LogFunc mkLogFunc f = LogFunc f Nothing -- | Generic, basic function for creating other logging functions. -- -- @since 0.0.0.0 logGeneric :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> LogLevel -> Utf8Builder -> m () logGeneric src level str = do LogFunc logFunc _ <- view logFuncL liftIO $ logFunc callStack src level str -- | Log a debug level message with no source. -- -- @since 0.0.0.0 logDebug :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logDebug = logGeneric "" LevelDebug -- | Log an info level message with no source. -- -- @since 0.0.0.0 logInfo :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logInfo = logGeneric "" LevelInfo -- | Log a warn level message with no source. -- -- @since 0.0.0.0 logWarn :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logWarn = logGeneric "" LevelWarn -- | Log an error level message with no source. -- -- @since 0.0.0.0 logError :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logError = logGeneric "" LevelError -- | Log a message with the specified textual level and no source. -- -- @since 0.0.0.0 logOther :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Text -- ^ level -> Utf8Builder -> m () logOther = logGeneric "" . LevelOther -- $withSource -- -- There is a set of logging functions that take an extra 'LogSource' -- argument to provide context, typically detailing what part of an -- application the message comes from. -- -- For example, in verbose mode, @infoLogS "database" "connected"@ will -- result in -- -- > [info] (database) connected -- | Log a debug level message with the given source. -- -- @since 0.0.0.0 logDebugS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logDebugS src = logGeneric src LevelDebug -- | Log an info level message with the given source. -- -- @since 0.0.0.0 logInfoS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logInfoS src = logGeneric src LevelInfo -- | Log a warn level message with the given source. -- -- @since 0.0.0.0 logWarnS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logWarnS src = logGeneric src LevelWarn -- | Log an error level message with the given source. -- -- @since 0.0.0.0 logErrorS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logErrorS src = logGeneric src LevelError -- | Log a message with the specified textual level and the given -- source. -- -- @since 0.0.0.0 logOtherS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Text -- ^ level -> LogSource -> Utf8Builder -> m () logOtherS src = logGeneric src . LevelOther -- | Write a "sticky" line to the terminal. Any subsequent lines will -- overwrite this one, and that same line will be repeated below -- again. In other words, the line sticks at the bottom of the output -- forever. Running this function again will replace the sticky line -- with a new sticky line. When you want to get rid of the sticky -- line, run 'logStickyDone'. -- -- Note that not all 'LogFunc' implementations will support sticky -- messages as described. However, the 'withLogFunc' implementation -- provided by this module does. -- -- @since 0.0.0.0 logSticky :: (MonadIO m, HasCallStack, MonadReader env m, HasLogFunc env) => Utf8Builder -> m () logSticky = logOther "sticky" -- | This will print out the given message with a newline and disable -- any further stickiness of the line until a new call to 'logSticky' -- happens. -- -- @since 0.0.0.0 logStickyDone :: (MonadIO m, HasCallStack, MonadReader env m, HasLogFunc env) => Utf8Builder -> m () logStickyDone = logOther "sticky-done" -- TODO It might be better at some point to have a 'runSticky' function -- that encompasses the logSticky->logStickyDone pairing. canUseUtf8 :: MonadIO m => Handle -> m Bool canUseUtf8 h = liftIO $ wantWritableHandle "canUseUtf8" h $ \h_ -> do -- TODO also handle haOutputNL for CRLF return $ (textEncodingName <$> haCodec h_) == Just "UTF-8" -- | Create a 'LogOptions' value which will store its data in -- memory. This is primarily intended for testing purposes. This will -- return both a 'LogOptions' value and an 'IORef' containing the -- resulting 'Builder' value. -- -- This will default to non-verbose settings and assume there is a -- terminal attached. These assumptions can be overridden using the -- appropriate @set@ functions. -- -- @since 0.0.0.0 logOptionsMemory :: MonadIO m => m (IORef Builder, LogOptions) logOptionsMemory = do ref <- newIORef mempty let options = LogOptions { logMinLevel = return LevelInfo , logVerboseFormat = return False , logTerminal = True , logUseTime = False , logUseColor = False , logColors = defaultLogColors , logUseLoc = False , logFormat = id , logSend = \new -> atomicModifyIORef' ref $ \old -> (old <> new, ()) } return (ref, options) -- | Create a 'LogOptions' value from the given 'Handle' and whether -- to perform verbose logging or not. Individiual settings can be -- overridden using appropriate @set@ functions. -- Logging output is guaranteed to be non-interleaved only for a -- UTF-8 'Handle' in a multi-thread environment. -- -- When Verbose Flag is @True@, the following happens: -- -- * @setLogVerboseFormat@ is called with @True@ -- * @setLogUseColor@ is called with @True@ (except on Windows) -- * @setLogUseLoc@ is called with @True@ -- * @setLogUseTime@ is called with @True@ -- * @setLogMinLevel@ is called with 'Debug' log level -- -- @since 0.0.0.0 logOptionsHandle :: MonadIO m => Handle -> Bool -- ^ Verbose Flag -> m LogOptions logOptionsHandle handle' verbose = liftIO $ do terminal <- hIsTerminalDevice handle' useUtf8 <- canUseUtf8 handle' unicode <- if useUtf8 then return True else getCanUseUnicode return LogOptions { logMinLevel = return $ if verbose then LevelDebug else LevelInfo , logVerboseFormat = return verbose , logTerminal = terminal , logUseTime = verbose #if WINDOWS , logUseColor = False #else , logUseColor = verbose && terminal #endif , logColors = defaultLogColors , logUseLoc = verbose , logFormat = id , logSend = \builder -> if useUtf8 && unicode then hPutBuilder handle' (builder <> flush) else do let lbs = toLazyByteString builder bs = toStrictBytes lbs case decodeUtf8' bs of Left e -> error $ "mkLogOptions: invalid UTF8 sequence: " ++ show (e, bs) Right text -> do let text' | unicode = text | otherwise = T.map replaceUnicode text TIO.hPutStr handle' text' hFlush handle' } -- | Taken from GHC: determine if we should use Unicode syntax getCanUseUnicode :: IO Bool getCanUseUnicode = do let enc = localeEncoding str = "\x2018\x2019" test = withCString enc str $ \cstr -> do str' <- peekCString enc cstr return (str == str') test `catchIO` \_ -> return False -- | Given a 'LogOptions' value, returns both a new 'LogFunc' and a sub-routine that -- disposes it. -- -- Intended for use if you want to deal with the teardown of 'LogFunc' yourself, -- otherwise prefer the 'withLogFunc' function instead. -- -- @since 0.1.3.0 newLogFunc :: (MonadIO n, MonadIO m) => LogOptions -> n (LogFunc, m ()) newLogFunc options = if logTerminal options then do var <- newMVar (mempty,0) return (LogFunc { unLogFunc = stickyImpl var options (simpleLogFunc options) , lfOptions = Just options } , do (state,_) <- takeMVar var unless (B.null state) (liftIO $ logSend options "\n") ) else return (LogFunc { unLogFunc = \cs src level str -> simpleLogFunc options cs src (noSticky level) str , lfOptions = Just options } , return () ) -- | Given a 'LogOptions' value, run the given function with the -- specified 'LogFunc'. A common way to use this function is: -- -- @ -- let isVerbose = False -- get from the command line instead -- logOptions' <- logOptionsHandle stderr isVerbose -- let logOptions = setLogUseTime True logOptions' -- withLogFunc logOptions $ \\lf -> do -- let app = App -- application specific environment -- { appLogFunc = lf -- , appOtherStuff = ... -- } -- runRIO app $ do -- logInfo "Starting app" -- myApp -- @ -- -- @since 0.0.0.0 withLogFunc :: MonadUnliftIO m => LogOptions -> (LogFunc -> m a) -> m a withLogFunc options inner = withRunInIO $ \run -> do bracket (newLogFunc options) snd (run . inner . fst) -- | Replace Unicode characters with non-Unicode equivalents replaceUnicode :: Char -> Char replaceUnicode '\x2018' = '`' replaceUnicode '\x2019' = '\'' replaceUnicode c = c noSticky :: LogLevel -> LogLevel noSticky (LevelOther "sticky-done") = LevelInfo noSticky (LevelOther "sticky") = LevelInfo noSticky level = level -- | Configuration for how to create a 'LogFunc'. Intended to be used -- with the 'withLogFunc' function. -- -- @since 0.0.0.0 data LogOptions = LogOptions { logMinLevel :: !(IO LogLevel) , logVerboseFormat :: !(IO Bool) , logTerminal :: !Bool , logUseTime :: !Bool , logUseColor :: !Bool , logColors :: !LogColors , logUseLoc :: !Bool , logFormat :: !(Utf8Builder -> Utf8Builder) , logSend :: !(Builder -> IO ()) } -- | ANSI color codes for use in the configuration of the creation of a -- 'LogFunc'. -- -- @since 0.1.18.0 data LogColors = LogColors { -- | The color associated with each 'LogLevel'. logColorLogLevels :: !(LogLevel -> Utf8Builder) -- | The color of secondary content. , logColorSecondary :: !Utf8Builder -- | The color of accents, which are indexed by 'Int'. , logColorAccents :: !(Int -> Utf8Builder) } defaultLogColors :: LogColors defaultLogColors = LogColors { logColorLogLevels = defaultLogLevelColors , logColorSecondary = defaultLogSecondaryColor , logColorAccents = defaultLogAccentColors } defaultLogLevelColors :: LogLevel -> Utf8Builder defaultLogLevelColors LevelDebug = "\ESC[32m" -- Green defaultLogLevelColors LevelInfo = "\ESC[34m" -- Blue defaultLogLevelColors LevelWarn = "\ESC[33m" -- Yellow defaultLogLevelColors LevelError = "\ESC[31m" -- Red defaultLogLevelColors (LevelOther _) = "\ESC[35m" -- Magenta defaultLogSecondaryColor :: Utf8Builder defaultLogSecondaryColor = "\ESC[90m" -- Bright black (gray) defaultLogAccentColors :: Int -> Utf8Builder defaultLogAccentColors = const "\ESC[92m" -- Bright green -- | Set the minimum log level. Messages below this level will not be -- printed. -- -- Default: in verbose mode, 'LevelDebug'. Otherwise, 'LevelInfo'. -- -- @since 0.0.0.0 setLogMinLevel :: LogLevel -> LogOptions -> LogOptions setLogMinLevel level options = options { logMinLevel = return level } -- | Refer to 'setLogMinLevel'. This modifier allows to alter the verbose format -- value dynamically at runtime. -- -- Default: in verbose mode, 'LevelDebug'. Otherwise, 'LevelInfo'. -- -- @since 0.1.3.0 setLogMinLevelIO :: IO LogLevel -> LogOptions -> LogOptions setLogMinLevelIO getLevel options = options { logMinLevel = getLevel } -- | Use the verbose format for printing log messages. -- -- Default: follows the value of the verbose flag. -- -- @since 0.0.0.0 setLogVerboseFormat :: Bool -> LogOptions -> LogOptions setLogVerboseFormat v options = options { logVerboseFormat = return v } -- | Refer to 'setLogVerboseFormat'. This modifier allows to alter the verbose -- format value dynamically at runtime. -- -- Default: follows the value of the verbose flag. -- -- @since 0.1.3.0 setLogVerboseFormatIO :: IO Bool -> LogOptions -> LogOptions setLogVerboseFormatIO getVerboseLevel options = options { logVerboseFormat = getVerboseLevel } -- | Do we treat output as a terminal. If @True@, we will enable -- sticky logging functionality. -- -- Default: checks if the @Handle@ provided to 'logOptionsHandle' is a -- terminal with 'hIsTerminalDevice'. -- -- @since 0.0.0.0 setLogTerminal :: Bool -> LogOptions -> LogOptions setLogTerminal t options = options { logTerminal = t } -- | Include the time when printing log messages. -- -- Default: `True` in debug mode, `False` otherwise. -- -- @since 0.0.0.0 setLogUseTime :: Bool -> LogOptions -> LogOptions setLogUseTime t options = options { logUseTime = t } -- | Use ANSI color codes in the log output. -- -- Default: `True` if in verbose mode /and/ the 'Handle' is a terminal device. -- -- @since 0.0.0.0 setLogUseColor :: Bool -> LogOptions -> LogOptions setLogUseColor c options = options { logUseColor = c } -- | ANSI color codes for 'LogLevel' in the log output. -- -- Default: 'LevelDebug' = \"\\ESC[32m\" -- Green -- 'LevelInfo' = \"\\ESC[34m\" -- Blue -- 'LevelWarn' = \"\\ESC[33m\" -- Yellow -- 'LevelError' = \"\\ESC[31m\" -- Red -- 'LevelOther' _ = \"\\ESC[35m\" -- Magenta -- -- @since 0.1.18.0 setLogLevelColors :: (LogLevel -> Utf8Builder) -> LogOptions -> LogOptions setLogLevelColors logLevelColors options = let lc = (logColors options){ logColorLogLevels = logLevelColors } in options { logColors = lc } -- | ANSI color codes for secondary content in the log output. -- -- Default: \"\\ESC[90m\" -- Bright black (gray) -- -- @since 0.1.18.0 setLogSecondaryColor :: Utf8Builder -> LogOptions -> LogOptions setLogSecondaryColor c options = let lc = (logColors options){ logColorSecondary = c } in options { logColors = lc } -- | ANSI color codes for accents in the log output. Accent colors are indexed -- by 'Int'. -- -- Default: 'const' \"\\ESC[92m\" -- Bright green, for all indicies -- -- @since 0.1.18.0 setLogAccentColors :: (Int -> Utf8Builder) -- ^ This should be a total function. -> LogOptions -> LogOptions setLogAccentColors accentColors options = let lc = (logColors options){ logColorAccents = accentColors } in options { logColors = lc } -- | Use code location in the log output. -- -- Default: `True` if in verbose mode, `False` otherwise. -- -- @since 0.1.2.0 setLogUseLoc :: Bool -> LogOptions -> LogOptions setLogUseLoc l options = options { logUseLoc = l } -- | Set format method for messages -- -- Default: `id` -- -- @since 0.1.13.0 setLogFormat :: (Utf8Builder -> Utf8Builder) -> LogOptions -> LogOptions setLogFormat f options = options { logFormat = f } simpleLogFunc :: LogOptions -> CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO () simpleLogFunc lo cs src level msg = do logLevel <- logMinLevel lo logVerbose <- logVerboseFormat lo when (level >= logLevel) $ do timestamp <- getTimestamp logVerbose logSend lo $ getUtf8Builder $ timestamp <> getLevel logVerbose <> ansi reset <> getSource <> logFormat lo msg <> getLoc <> ansi reset <> "\n" where reset = "\ESC[0m" lc = logColors lo levelColor = logColorLogLevels lc level timestampColor = logColorSecondary lc locColor = logColorSecondary lc ansi :: Utf8Builder -> Utf8Builder ansi xs | logUseColor lo = xs | otherwise = mempty getTimestamp :: Bool -> IO Utf8Builder getTimestamp logVerbose | logVerbose && logUseTime lo = do now <- getZonedTime return $ ansi timestampColor <> fromString (formatTime' now) <> ": " | otherwise = return mempty where formatTime' = take timestampLength . formatTime defaultTimeLocale "%F %T.%q" getLevel :: Bool -> Utf8Builder getLevel logVerbose | logVerbose = ansi levelColor <> case level of LevelDebug -> "[debug] " LevelInfo -> "[info] " LevelWarn -> "[warn] " LevelError -> "[error] " LevelOther name -> "[" <> display name <> "] " | otherwise = mempty getSource :: Utf8Builder getSource = case src of "" -> "" _ -> "(" <> display src <> ") " getLoc :: Utf8Builder getLoc | logUseLoc lo = ansi locColor <> "\n@(" <> displayCallStack cs <> ")" | otherwise = mempty -- | Convert a 'CallStack' value into a 'Utf8Builder' indicating -- the first source location. -- -- TODO Consider showing the entire call stack instead. -- -- @since 0.0.0.0 displayCallStack :: CallStack -> Utf8Builder displayCallStack cs = case reverse $ getCallStack cs of [] -> "" (_desc, loc):_ -> let file = srcLocFile loc in fromString file <> ":" <> displayShow (srcLocStartLine loc) <> ":" <> displayShow (srcLocStartCol loc) -- | The length of a timestamp in the format "YYYY-MM-DD hh:mm:ss.μμμμμμ". -- This definition is top-level in order to avoid multiple reevaluation at runtime. timestampLength :: Int timestampLength = length (formatTime defaultTimeLocale "%F %T.000000" (UTCTime (ModifiedJulianDay 0) 0)) stickyImpl :: MVar (ByteString,Int) -> LogOptions -> (CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) -> CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO () stickyImpl ref lo logFunc loc src level msgOrig = modifyMVar_ ref $ \(sticky,stickyLen) -> do let backSpaceChar = '\8' repeating = mconcat . replicate stickyLen . char7 clear = logSend lo (repeating backSpaceChar <> repeating ' ' <> repeating backSpaceChar) logLevel <- logMinLevel lo case level of LevelOther "sticky-done" -> do clear logFunc loc src LevelInfo msgOrig return (mempty,0) LevelOther "sticky" -> do clear let bs = toStrictBytes $ toLazyByteString $ getUtf8Builder msgOrig logSend lo (byteString bs <> flush) return (bs, utf8CharacterCount bs) _ | level >= logLevel -> do clear logFunc loc src level msgOrig unless (B.null sticky) $ logSend lo (byteString sticky <> flush) return (sticky,stickyLen) | otherwise -> return (sticky,stickyLen) -- | The number of Unicode characters in a UTF-8 encoded byte string, -- excluding ANSI CSI sequences. utf8CharacterCount :: ByteString -> Int utf8CharacterCount = go 0 where go !n bs = case B.uncons bs of Nothing -> n Just (c,bs) | c .&. 0xC0 == 0x80 -> go n bs -- UTF-8 continuation | c == 0x1B -> go n $ dropCSI bs -- ANSI escape | otherwise -> go (n+1) bs dropCSI bs = case B.uncons bs of Just (0x5B,bs2) -> B.drop 1 $ B.dropWhile isSequenceByte bs2 _ -> bs isSequenceByte c = c >= 0x20 && c <= 0x3F -- | Is the log func configured to use color output? -- -- Intended for use by code which wants to optionally add additional color to -- its log messages. -- -- @since 0.1.0.0 logFuncUseColorL :: HasLogFunc env => SimpleGetter env Bool logFuncUseColorL = logFuncL.to (maybe False logUseColor . lfOptions) -- | What color is the log func configured to use for each 'LogLevel'? -- -- Intended for use by code which wants to optionally add additional color to -- its log messages. -- -- @since 0.1.18.0 logFuncLogLevelColorsL :: HasLogFunc env => SimpleGetter env (LogLevel -> Utf8Builder) logFuncLogLevelColorsL = logFuncL.to (maybe defaultLogLevelColors (logColorLogLevels . logColors) . lfOptions) -- | What color is the log func configured to use for secondary content? -- -- Intended for use by code which wants to optionally add additional color to -- its log messages. -- -- @since 0.1.18.0 logFuncSecondaryColorL :: HasLogFunc env => SimpleGetter env Utf8Builder logFuncSecondaryColorL = logFuncL.to (maybe defaultLogSecondaryColor (logColorSecondary . logColors) . lfOptions) -- | What accent colors, indexed by 'Int', is the log func configured to use? -- -- Intended for use by code which wants to optionally add additional color to -- its log messages. -- -- @since 0.1.18.0 logFuncAccentColorsL :: HasLogFunc env => SimpleGetter env (Int -> Utf8Builder) logFuncAccentColorsL = logFuncL.to (maybe defaultLogAccentColors (logColorAccents . logColors) . lfOptions) -- | Disable logging capabilities in a given sub-routine -- -- Intended to skip logging in general purpose implementations, where secrets -- might be logged accidently. -- -- @since 0.1.5.0 noLogging :: (HasLogFunc env, MonadReader env m) => m a -> m a noLogging = local (set logFuncL mempty) -------------------------------------------------------------------------------- -- -- $type-generic-intro -- -- When logging takes on a more semantic meaning and the logs need to -- be digested, acted upon, translated or serialized upstream (to -- e.g. a JSON logging server), we have 'GLogFunc' (as in "generic log -- function"), and is accessed via 'HasGLogFunc'. -- -- There is only one function to log in this system: the 'glog' -- function, which can log any message. You determine the log levels -- or severity of messages when needed. -- -- Using 'RIO.Prelude.mapRIO' and 'contramapGLogFunc' (or -- 'contramapMaybeGLogFunc'), you can build hierarchies of loggers. -- -- Example: -- -- @ -- import RIO -- -- data DatabaseMsg = Connected String | Query String | Disconnected deriving Show -- data WebMsg = Request String | Error String | DatabaseMsg DatabaseMsg deriving Show -- data AppMsg = InitMsg String | WebMsg WebMsg deriving Show -- -- main :: IO () -- main = -- runRIO -- (mkGLogFunc (\stack msg -> print msg)) -- (do glog (InitMsg "Ready to go!") -- runWeb -- (do glog (Request "/foo") -- runDB (do glog (Connected "127.0.0.1") -- glog (Query "SELECT 1")) -- glog (Error "Oh noes!"))) -- -- runDB :: RIO (GLogFunc DatabaseMsg) () -> RIO (GLogFunc WebMsg) () -- runDB = mapRIO (contramapGLogFunc DatabaseMsg) -- -- runWeb :: RIO (GLogFunc WebMsg) () -> RIO (GLogFunc AppMsg) () -- runWeb = mapRIO (contramapGLogFunc WebMsg) -- @ -- -- If we instead decided that we only wanted to log database queries, -- and not bother the upstream with connect/disconnect messages, we -- could simplify the constructor to @DatabaseQuery String@: -- -- @ -- data WebMsg = Request String | Error String | DatabaseQuery String deriving Show -- @ -- -- And then @runDB@ could use 'contramapMaybeGLogFunc' to parse only queries: -- -- @ -- runDB = -- mapRIO -- (contramapMaybeGLogFunc -- (\msg -> -- case msg of -- Query string -> pure (DatabaseQuery string) -- _ -> Nothing)) -- @ -- -- This way, upstream only has to care about queries and not -- connect/disconnect constructors. -- | An app is capable of generic logging if it implements this. -- -- @since 0.1.13.0 class HasGLogFunc env where type GMsg env gLogFuncL :: Lens' env (GLogFunc (GMsg env)) -- | Quick way to run a RIO that only has a logger in its environment. -- -- @since 0.1.13.0 instance HasGLogFunc (GLogFunc msg) where type GMsg (GLogFunc msg) = msg gLogFuncL = id -- | A generic logger of some type @msg@. -- -- Your 'GLocFunc' can re-use the existing classical logging framework -- of RIO, and/or implement additional transforms, -- filters. Alternatively, you may log to a JSON source in a database, -- or anywhere else as needed. You can decide how to log levels or -- severities based on the constructors in your type. You will -- normally determine this in your main app entry point. -- -- @since 0.1.13.0 newtype GLogFunc msg = GLogFunc (CallStack -> msg -> IO ()) #if MIN_VERSION_base(4,12,0) -- https://hackage.haskell.org/package/base-4.12.0.0/docs/Data-Functor-Contravariant.html -- | Use this instance to wrap sub-loggers via 'RIO.mapRIO'. -- -- The 'Contravariant' class is available in base 4.12.0. -- -- @since 0.1.13.0 instance Contravariant GLogFunc where contramap = contramapGLogFunc {-# INLINABLE contramap #-} #endif -- | Perform both sets of actions per log entry. -- -- @since 0.1.13.0 instance Semigroup (GLogFunc msg) where GLogFunc f <> GLogFunc g = GLogFunc (\a b -> f a b *> g a b) -- | 'mempty' peforms no logging. -- -- @since 0.1.13.0 instance Monoid (GLogFunc msg) where mempty = mkGLogFunc $ \_ _ -> return () mappend = (<>) -- | A vesion of 'contramapMaybeGLogFunc' which supports filering. -- -- @since 0.1.13.0 contramapMaybeGLogFunc :: (a -> Maybe b) -> GLogFunc b -> GLogFunc a contramapMaybeGLogFunc f (GLogFunc io) = GLogFunc (\stack msg -> maybe (pure ()) (io stack) (f msg)) {-# INLINABLE contramapMaybeGLogFunc #-} -- | A contramap. Use this to wrap sub-loggers via 'RIO.mapRIO'. -- -- If you are on base > 4.12.0, you can just use 'contramap'. -- -- @since 0.1.13.0 contramapGLogFunc :: (a -> b) -> GLogFunc b -> GLogFunc a contramapGLogFunc f (GLogFunc io) = GLogFunc (\stack msg -> io stack (f msg)) {-# INLINABLE contramapGLogFunc #-} -- | Make a custom generic logger. With this you could, for example, -- write to a database or a log digestion service. For example: -- -- > mkGLogFunc (\stack msg -> send (Data.Aeson.encode (JsonLog stack msg))) -- -- @since 0.1.13.0 mkGLogFunc :: (CallStack -> msg -> IO ()) -> GLogFunc msg mkGLogFunc = GLogFunc -- | Log a value generically. -- -- @since 0.1.13.0 glog :: (MonadIO m, HasCallStack, HasGLogFunc env, MonadReader env m) => GMsg env -> m () glog t = do GLogFunc gLogFunc <- view gLogFuncL liftIO (gLogFunc callStack t) {-# INLINABLE glog #-} -------------------------------------------------------------------------------- -- Integration with classical logger framework -- | Level, if any, of your logs. If unknown, use 'LogOther'. Use for -- your generic log data types that want to sit inside the classic log -- framework. -- -- @since 0.1.13.0 class HasLogLevel msg where getLogLevel :: msg -> LogLevel -- | Source of a log. This can be whatever you want. Use for your -- generic log data types that want to sit inside the classic log -- framework. -- -- @since 0.1.13.0 class HasLogSource msg where getLogSource :: msg -> LogSource -- | Make a 'GLogFunc' via classic 'LogFunc'. Use this if you'd like -- to log your generic data type via the classic RIO terminal logger. -- -- @since 0.1.13.0 gLogFuncClassic :: (HasLogLevel msg, HasLogSource msg, Display msg) => LogFunc -> GLogFunc msg gLogFuncClassic (LogFunc {unLogFunc = io}) = mkGLogFunc (\theCallStack msg -> liftIO (io theCallStack (getLogSource msg) (getLogLevel msg) (display msg))) rio-0.1.22.0/src/RIO/Prelude/Reexports.hs0000644000000000000000000000034614231470023016057 0ustar0000000000000000module RIO.Prelude.Reexports ( module RIO.Prelude , module RIO.Prelude.Types , module UnliftIO , module UnliftIO.Concurrent ) where import RIO.Prelude import RIO.Prelude.Types import UnliftIO import UnliftIO.Concurrent rio-0.1.22.0/src/RIO/Prelude/Renames.hs0000644000000000000000000000212014231470023015446 0ustar0000000000000000{-# LANGUAGE ConstraintKinds #-} module RIO.Prelude.Renames ( sappend , LByteString , LText , UVector , SVector , GVector , toStrictBytes , fromStrictBytes , yieldThread ) where import Prelude import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import qualified Data.Vector.Generic as GVector import qualified Data.Vector.Storable as SVector import qualified Data.Vector.Unboxed as UVector import qualified Data.Text.Lazy as TL import qualified Data.Semigroup import UnliftIO (MonadIO) import qualified UnliftIO.Concurrent (yield) sappend :: Data.Semigroup.Semigroup s => s -> s -> s sappend = (Data.Semigroup.<>) type UVector = UVector.Vector type SVector = SVector.Vector type GVector = GVector.Vector type LByteString = BL.ByteString type LText = TL.Text toStrictBytes :: LByteString -> B.ByteString toStrictBytes = BL.toStrict fromStrictBytes :: B.ByteString -> LByteString fromStrictBytes = BL.fromStrict yieldThread :: MonadIO m => m () yieldThread = UnliftIO.Concurrent.yield {-# INLINE yieldThread #-} rio-0.1.22.0/src/RIO/Prelude/RIO.hs0000644000000000000000000001123414231470023014513 0ustar0000000000000000{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE UndecidableInstances #-} module RIO.Prelude.RIO ( RIO (..) , runRIO , liftRIO , mapRIO -- SomeRef for Writer/State interfaces , SomeRef , HasStateRef (..) , HasWriteRef (..) , newSomeRef , newUnboxedSomeRef , readSomeRef , writeSomeRef , modifySomeRef ) where import GHC.Exts (RealWorld) import RIO.Prelude.Lens import RIO.Prelude.URef import RIO.Prelude.Reexports import Control.Monad.State (MonadState(..)) import Control.Monad.Writer (MonadWriter(..)) -- | The Reader+IO monad. This is different from a 'ReaderT' because: -- -- * It's not a transformer, it hardcodes IO for simpler usage and -- error messages. -- -- * Instances of typeclasses like 'MonadLogger' are implemented using -- classes defined on the environment, instead of using an -- underlying monad. newtype RIO env a = RIO { unRIO :: ReaderT env IO a } deriving (Functor,Applicative,Monad,MonadIO,MonadReader env,MonadThrow) instance Semigroup a => Semigroup (RIO env a) where (<>) = liftA2 (<>) instance Monoid a => Monoid (RIO env a) where mempty = pure mempty mappend = liftA2 mappend -- | Using the environment run in IO the action that requires that environment. -- -- @since 0.0.1.0 runRIO :: MonadIO m => env -> RIO env a -> m a runRIO env (RIO (ReaderT f)) = liftIO (f env) -- | Abstract `RIO` to an arbitrary `MonadReader` instance, which can handle IO. -- -- @since 0.0.1.0 liftRIO :: (MonadIO m, MonadReader env m) => RIO env a -> m a liftRIO rio = do env <- ask runRIO env rio -- | Lift one RIO env to another. -- -- @since 0.1.13.0 mapRIO :: (outer -> inner) -> RIO inner a -> RIO outer a mapRIO f m = do outer <- ask runRIO (f outer) m instance MonadUnliftIO (RIO env) where withRunInIO inner = RIO $ withRunInIO $ \run -> inner (run . unRIO) {-# INLINE withRunInIO #-} instance PrimMonad (RIO env) where type PrimState (RIO env) = PrimState IO primitive = RIO . ReaderT . const . primitive -- | Abstraction over how to read from and write to a mutable reference -- -- @since 0.1.4.0 data SomeRef a = SomeRef !(IO a) !(a -> IO ()) -- | Read from a SomeRef -- -- @since 0.1.4.0 readSomeRef :: MonadIO m => SomeRef a -> m a readSomeRef (SomeRef x _) = liftIO x -- | Write to a SomeRef -- -- @since 0.1.4.0 writeSomeRef :: MonadIO m => SomeRef a -> a -> m () writeSomeRef (SomeRef _ x) = liftIO . x -- | Modify a SomeRef -- This function is subject to change due to the lack of atomic operations -- -- @since 0.1.4.0 modifySomeRef :: MonadIO m => SomeRef a -> (a -> a) -> m () modifySomeRef (SomeRef read' write) f = liftIO $ (f <$> read') >>= write ioRefToSomeRef :: IORef a -> SomeRef a ioRefToSomeRef ref = SomeRef (readIORef ref) (\val -> modifyIORef' ref (\_ -> val)) uRefToSomeRef :: Unbox a => URef RealWorld a -> SomeRef a uRefToSomeRef ref = SomeRef (readURef ref) (writeURef ref) -- | Environment values with stateful capabilities to SomeRef -- -- @since 0.1.4.0 class HasStateRef s env | env -> s where stateRefL :: Lens' env (SomeRef s) -- | Identity state reference where the SomeRef is the env -- -- @since 0.1.4.0 instance HasStateRef a (SomeRef a) where stateRefL = lens id (\_ x -> x) -- | Environment values with writing capabilities to SomeRef -- -- @since 0.1.4.0 class HasWriteRef w env | env -> w where writeRefL :: Lens' env (SomeRef w) -- | Identity write reference where the SomeRef is the env -- -- @since 0.1.4.0 instance HasWriteRef a (SomeRef a) where writeRefL = lens id (\_ x -> x) instance HasStateRef s env => MonadState s (RIO env) where get = do ref <- view stateRefL liftIO $ readSomeRef ref put st = do ref <- view stateRefL liftIO $ writeSomeRef ref st instance (Monoid w, HasWriteRef w env) => MonadWriter w (RIO env) where tell value = do ref <- view writeRefL liftIO $ modifySomeRef ref (`mappend` value) listen action = do w1 <- view writeRefL >>= liftIO . readSomeRef a <- action w2 <- do refEnv <- view writeRefL v <- liftIO $ readSomeRef refEnv _ <- liftIO $ writeSomeRef refEnv w1 return v return (a, w2) pass action = do (a, transF) <- action ref <- view writeRefL liftIO $ modifySomeRef ref transF return a -- | create a new boxed SomeRef -- -- @since 0.1.4.0 newSomeRef :: MonadIO m => a -> m (SomeRef a) newSomeRef a = do ioRefToSomeRef <$> newIORef a -- | create a new unboxed SomeRef -- -- @since 0.1.4.0 newUnboxedSomeRef :: (MonadIO m, Unbox a) => a -> m (SomeRef a) newUnboxedSomeRef a = uRefToSomeRef <$> (liftIO $ newURef a) rio-0.1.22.0/src/RIO/Prelude/Text.hs0000644000000000000000000000060314231470023015004 0ustar0000000000000000module RIO.Prelude.Text ( decodeUtf8Lenient , tshow ) where import Data.ByteString (ByteString) import qualified Data.Text as T import Data.Text.Encoding (decodeUtf8With) import Data.Text.Encoding.Error (lenientDecode) tshow :: Show a => a -> T.Text tshow = T.pack . show decodeUtf8Lenient :: ByteString -> T.Text decodeUtf8Lenient = decodeUtf8With lenientDecode rio-0.1.22.0/src/RIO/Prelude/Trace.hs0000644000000000000000000001322014231470023015115 0ustar0000000000000000module RIO.Prelude.Trace ( -- ** Trace -- *** Text trace , traceId , traceIO , traceM , traceEvent , traceEventIO , traceMarker , traceMarkerIO , traceStack -- *** Show , traceShow , traceShowId , traceShowIO , traceShowM , traceShowEvent , traceShowEventIO , traceShowMarker , traceShowMarkerIO , traceShowStack -- *** Display , traceDisplay , traceDisplayId , traceDisplayIO , traceDisplayM , traceDisplayEvent , traceDisplayEventIO , traceDisplayMarker , traceDisplayMarkerIO , traceDisplayStack ) where import qualified Debug.Trace as Trace import Control.Monad.IO.Class(MonadIO(..)) import RIO.Prelude.Display import RIO.Text (Text) import qualified RIO.Text as Text ---------------------------------------------------- -- Text ---------------------------------------------------- {-# WARNING trace "Trace statement left in code" #-} -- | @since 0.1.0.0 trace :: Text -> a -> a trace = Trace.trace . Text.unpack {-# WARNING traceId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceId :: Text -> Text traceId str = Trace.trace (Text.unpack str) str {-# WARNING traceIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceIO :: MonadIO m => Text -> m () traceIO = liftIO . Trace.traceIO . Text.unpack {-# WARNING traceM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceM :: Applicative f => Text -> f () traceM = Trace.traceM . Text.unpack {-# WARNING traceEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceEvent :: Text -> a -> a traceEvent = Trace.traceEvent . Text.unpack {-# WARNING traceEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceEventIO :: MonadIO m => Text -> m () traceEventIO = liftIO . Trace.traceEventIO . Text.unpack {-# WARNING traceMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceMarker :: Text -> a -> a traceMarker = Trace.traceMarker . Text.unpack {-# WARNING traceMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceMarkerIO :: MonadIO m => Text -> m () traceMarkerIO = liftIO . Trace.traceMarkerIO . Text.unpack {-# WARNING traceStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceStack :: Text -> a -> a traceStack = Trace.traceStack . Text.unpack ---------------------------------------------------- -- Show ---------------------------------------------------- {-# WARNING traceShow "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShow :: Show a => a -> b -> b traceShow = Trace.traceShow {-# WARNING traceShowId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowId :: Show a => a -> a traceShowId = Trace.traceShowId {-# WARNING traceShowIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowIO :: (Show a, MonadIO m) => a -> m () traceShowIO = liftIO . Trace.traceIO . show {-# WARNING traceShowM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowM :: (Show a, Applicative f) => a -> f () traceShowM = Trace.traceM . show {-# WARNING traceShowEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowEvent :: Show a => a -> b -> b traceShowEvent = Trace.traceEvent . show {-# WARNING traceShowEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowEventIO :: (Show a, MonadIO m) => a -> m () traceShowEventIO = liftIO . Trace.traceEventIO . show {-# WARNING traceShowMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowMarker :: Show a => a -> b -> b traceShowMarker = Trace.traceMarker . show {-# WARNING traceShowMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowMarkerIO :: (Show a, MonadIO m) => a -> m () traceShowMarkerIO = liftIO . Trace.traceMarkerIO . show {-# WARNING traceShowStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowStack :: Show a => a -> b -> b traceShowStack = Trace.traceStack . show ---------------------------------------------------- -- Display ---------------------------------------------------- {-# WARNING traceDisplay "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplay :: Display a => a -> b -> b traceDisplay = trace . utf8BuilderToText . display {-# WARNING traceDisplayId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayId :: Display a => a -> a traceDisplayId x = traceDisplay x x {-# WARNING traceDisplayIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayIO :: (Display a, MonadIO m) => a -> m () traceDisplayIO = traceIO . utf8BuilderToText . display {-# WARNING traceDisplayM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayM :: (Display a, Applicative f) => a -> f () traceDisplayM = traceM . utf8BuilderToText . display {-# WARNING traceDisplayEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayEvent :: Display a => a -> b -> b traceDisplayEvent = traceEvent . utf8BuilderToText . display {-# WARNING traceDisplayEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayEventIO :: (Display a, MonadIO m) => a -> m () traceDisplayEventIO = traceEventIO . utf8BuilderToText . display {-# WARNING traceDisplayMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayMarker :: Display a => a -> b -> b traceDisplayMarker = traceMarker . utf8BuilderToText . display {-# WARNING traceDisplayMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayMarkerIO :: (Display a, MonadIO m) => a -> m () traceDisplayMarkerIO = traceMarkerIO . utf8BuilderToText . display {-# WARNING traceDisplayStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayStack :: Display a => a -> b -> b traceDisplayStack = traceStack . utf8BuilderToText . display rio-0.1.22.0/src/RIO/Prelude/URef.hs0000644000000000000000000000336514231470023014731 0ustar0000000000000000module RIO.Prelude.URef ( URef , IOURef , newURef , readURef , writeURef , modifyURef ) where import RIO.Prelude.Reexports import qualified Data.Vector.Unboxed.Mutable as MUVector -- | An unboxed reference. This works like an 'IORef', but the data is -- stored in a bytearray instead of a heap object, avoiding -- significant allocation overhead in some cases. For a concrete -- example, see this Stack Overflow question: -- . -- -- The first parameter is the state token type, the same as would be -- used for the 'ST' monad. If you're using an 'IO'-based monad, you -- can use the convenience 'IOURef' type synonym instead. -- -- @since 0.0.2.0 newtype URef s a = URef (MUVector.MVector s a) -- | Helpful type synonym for using a 'URef' from an 'IO'-based stack. -- -- @since 0.0.2.0 type IOURef = URef (PrimState IO) -- | Create a new 'URef' -- -- @since 0.0.2.0 newURef :: (PrimMonad m, Unbox a) => a -> m (URef (PrimState m) a) newURef a = fmap URef (MUVector.replicate 1 a) -- | Read the value in a 'URef' -- -- @since 0.0.2.0 readURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> m a readURef (URef v) = MUVector.unsafeRead v 0 -- | Write a value into a 'URef'. Note that this action is strict, and -- will force evalution of the value. -- -- @since 0.0.2.0 writeURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> a -> m () writeURef (URef v) = MUVector.unsafeWrite v 0 -- | Modify a value in a 'URef'. Note that this action is strict, and -- will force evaluation of the result value. -- -- @since 0.0.2.0 modifyURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> (a -> a) -> m () modifyURef u f = readURef u >>= writeURef u . f rio-0.1.22.0/test/Spec.hs0000644000000000000000000000005414231470023013111 0ustar0000000000000000{-# OPTIONS_GHC -F -pgmF hspec-discover #-} rio-0.1.22.0/test/RIO/DequeSpec.hs0000644000000000000000000001172014231470023014530 0ustar0000000000000000{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ScopedTypeVariables #-} module RIO.DequeSpec (spec) where import RIO import Test.Hspec import Test.Hspec.QuickCheck import Test.QuickCheck.Arbitrary import Test.QuickCheck.Gen import qualified Data.Vector as VB import qualified Data.Vector.Generic as VG import qualified Data.Vector.Unboxed as VU import qualified Data.Vector.Storable as VS import qualified Data.Vector.Generic.Mutable as V data DequeAction = PushFront Int | PushBack Int | PopFront | PopBack deriving Show instance Arbitrary DequeAction where arbitrary = oneof $ concat [ replicate 25 $ fmap PushFront arbitrary , replicate 25 $ fmap PushBack arbitrary , [return PopFront, return PopBack] ] manyPushes :: [DequeAction] manyPushes = concat [ replicate 50 $ PushBack 0 , replicate 50 PopFront , replicate 50 $ PushFront 0 , replicate 50 PopBack ] specialCase :: [DequeAction] specialCase = [PushBack 9, PushBack 5,PushBack 11,PushBack 2,PushBack 13,PushBack 10,PushBack 4,PushBack 13,PushBack 7,PushBack 8,PushBack 6,PushBack 4,PushBack 7,PushBack 9,PushBack 10,PushBack 3,PushBack 2,PushBack 12,PushBack 12 ,PushBack 6,PushBack 3,PushBack 5,PushBack 14,PushBack 14,PushBack 11,PushBack 8,PopFront,PopFront,PopFront,PushBack 11,PushBack 3,PopFront,PopFront,PushBack 13,PushBack 12,PopFront,PushBack 10,PushBack 7,PopFront,PopFront,PushBack 13,PushBack 9,PopFront,PushBack 7,PushBack 2,PopFront,PopFront,PushBack 6,PushBack 4,PopFront,PopFront,PopFront,PushBack 9,PushBack 3,PopFront,PushBack 10,PushBack 6,PopFront,PopFront,PopFront,PushBack 12,PushBack 5,PopFront,PushBack 12,PushBack 5,PopFront,PushBack 6,PushBack 4,PopFront,PopFront,PopFront,PushBack 14,PushBack 10,PopFront,PushBack 14,PushBack 10,PopFront,PushBack 11,PushBack 8,PopFront,PushBack 8,PushBack 2,PopFront,PopFront,PopFront,PushBack 13,PushBack 7,PopFront,PushBack 12,PushBack 5,PopFront,PushBack 10,PushBack 8, PopFront,PushBack 7,PushBack 2,PopFront,PopFront,PushBack 9,PushBack 4,PopFront,PopFront,PopFront,PopFront,PopFront,PopFront,PopFront,PopFront,PushBack 4,PushBack 9,PushBack 3,PushBack 10,PushBack 6,PushBack 4,PushBack 13,PushBack 7,PushBack 9,PushBack 3,PopFront] spec :: Spec spec = do let runActions :: forall v . (VG.Vector v Int, Show (v Int), Eq (v Int)) => Proxy v -> [DequeAction] -> IO () runActions proxy actions = do base <- newIORef [] :: IO (IORef [Int]) tested <- newDeque :: IO (Deque (VG.Mutable v) (PrimState IO) Int) for_ (PopFront : PopBack : actions) $ \action -> do case action of PushFront i -> do pushFrontRef base i pushFrontDeque tested i same proxy base tested PushBack i -> do pushBackRef base i pushBackDeque tested i same proxy base tested PopFront -> do expected <- popFrontRef base actual <- popFrontDeque tested actual `shouldBe` expected same proxy base tested PopBack -> do expected <- popBackRef base actual <- popBackDeque tested actual `shouldBe` expected same proxy base tested let drain = do expected <- popBackRef base actual <- popBackDeque tested actual `shouldBe` expected case actual of Just _ -> drain Nothing -> return $! () drain test name proxy = describe name $ do prop "arbitrary actions" $ runActions proxy it "many pushes" $ runActions proxy manyPushes it "special case" $ runActions proxy specialCase test "UDeque" (Proxy :: Proxy VU.Vector) test "SDeque" (Proxy :: Proxy VS.Vector) test "BDeque" (Proxy :: Proxy VB.Vector) pushFrontRef :: IORef [Int] -> Int -> IO () pushFrontRef ref i = modifyIORef ref (i:) pushBackRef :: IORef [Int] -> Int -> IO () pushBackRef ref i = modifyIORef ref (++ [i]) popFrontRef :: IORef [Int] -> IO (Maybe Int) popFrontRef ref = do is <- readIORef ref case is of i:is' -> do writeIORef ref is' pure $ Just i [] -> pure Nothing popBackRef :: IORef [Int] -> IO (Maybe Int) popBackRef ref = do is <- readIORef ref case reverse is of i:is' -> do writeIORef ref $ reverse is' pure $ Just i [] -> pure Nothing same :: forall v. (Show (v Int), Eq (v Int), VG.Vector v Int) => Proxy v -> IORef [Int] -> Deque (VG.Mutable v) (PrimState IO) Int -> IO () same proxy ref deque = do fromRef <- readIORef ref fromRight <- foldrDeque (\i rest -> pure $ i : rest) [] deque fromRight `shouldBe` fromRef fromLeft <- foldlDeque (\rest i -> pure $ i : rest) [] deque fromLeft `shouldBe` reverse fromRef dequeToList deque `shouldReturn` fromRef dequeToVector deque `shouldReturn` (VU.fromList fromRef :: VU.Vector Int) uv :: v Int <- freezeDeque deque uv `shouldBe` VG.fromList fromRef rio-0.1.22.0/test/RIO/FileSpec.hs0000644000000000000000000000350414231470023014345 0ustar0000000000000000{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} module RIO.FileSpec where import Test.Hspec import System.FilePath (()) import UnliftIO.Temporary (withSystemTempDirectory) import RIO import qualified RIO.ByteString as BS import qualified RIO.File as SUT spec :: Spec spec = do describe "ensureFileDurable" $ do it "ensures a file is durable with an fsync" $ withSystemTempDirectory "rio" $ \dir -> do let fp = dir "ensure_file_durable" writeFileUtf8 fp "Hello World" SUT.ensureFileDurable fp contents <- BS.readFile fp contents `shouldBe` "Hello World" describe "withBinaryFileDurableAtomic" $ do context "read/write" $ do it "works correctly" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "ensure_file_durable_atomic" writeFileUtf8 fp "Hello World" SUT.withBinaryFileDurableAtomic fp ReadWriteMode $ \h -> do input <- BS.hGetLine h input `shouldBe` "Hello World" BS.hPut h "Goodbye World" context "happy path" $ do it "works the same as withFile" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "with_file_durable_atomic" SUT.withBinaryFileDurableAtomic fp WriteMode $ \h -> BS.hPut h "Hello World" contents <- BS.readFile fp contents `shouldBe` "Hello World" describe "withBinaryFileDurable" $ do context "happy path" $ do it "works the same as withFile" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "with_file_durable" SUT.withBinaryFileDurable fp WriteMode $ \h -> BS.hPut h "Hello World" contents <- BS.readFile fp contents `shouldBe` "Hello World" rio-0.1.22.0/test/RIO/ListSpec.hs0000644000000000000000000000213314231470023014376 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module RIO.ListSpec where import Test.Hspec import RIO import qualified RIO.List as List data TestType = TestType { testTypeContents :: Int } deriving (Eq, Show) testTypeList :: [TestType] testTypeList = [TestType { testTypeContents = 1 }, TestType { testTypeContents = 0 }] spec :: Spec spec = do describe "dropPrefix" $ do it "present" $ List.dropPrefix "foo" "foobar" `shouldBe` "bar" it "absent" $ List.dropPrefix "bar" "foobar" `shouldBe` "foobar" describe "dropSuffix" $ do it "present" $ List.dropSuffix "bar" "foobar" `shouldBe` "foo" it "absent" $ List.dropSuffix "foo" "foobar" `shouldBe` "foobar" describe "maximumByMaybe" $ do it "should support elements that do not have an Ord instance" $ List.maximumByMaybe (compare `on` testTypeContents) testTypeList `shouldBe` (Just TestType { testTypeContents = 1}) describe "minimumByMaybe" $ do it "should support elements that do not have an Ord instance" $ List.minimumByMaybe (compare `on` testTypeContents) testTypeList `shouldBe` (Just TestType { testTypeContents = 0}) rio-0.1.22.0/test/RIO/LoggerSpec.hs0000644000000000000000000000657414231470023014717 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.LoggerSpec (spec) where import Test.Hspec import RIO import Data.ByteString.Builder (toLazyByteString) spec :: Spec spec = do it "sanity" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logDebug "should not appear" logInfo "should appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "should appear\n" it "sticky" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logSticky "ABC" logDebug "should not appear" logInfo "should appear" logStickyDone "XYZ" builder <- readIORef ref toLazyByteString builder `shouldBe` "ABC\b\b\b \b\b\bshould appear\nABC\b\b\b \b\b\bXYZ\n" it "stickyUnicode" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logSticky "ö" logStickyDone "." builder <- readIORef ref toLazyByteString builder `shouldBe` "\195\182\b \b.\n" it "stickyAnsiEscape" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logSticky "\ESC[31mABC\ESC[0m" logStickyDone "." builder <- readIORef ref toLazyByteString builder `shouldBe` "\ESC[31mABC\ESC[0m\b\b\b \b\b\b.\n" it "setLogMinLevelIO" $ do (ref, options) <- logOptionsMemory logLevelRef <- newIORef LevelDebug withLogFunc (options & setLogMinLevelIO (readIORef logLevelRef)) $ \lf -> runRIO lf $ do logDebug "should appear" -- reset log min level to info atomicModifyIORef' logLevelRef (\_ -> (LevelInfo, ())) logDebug "should not appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "should appear\n" it "setLogVerboseFormatIO" $ do (ref, options) <- logOptionsMemory logVerboseFormatRef <- newIORef True withLogFunc (options & setLogVerboseFormatIO (readIORef logVerboseFormatRef)) $ \lf -> runRIO lf $ do logInfo "verbose log" -- reset verbose format atomicModifyIORef' logVerboseFormatRef (\_ -> (False, ())) logInfo "no verbose log" builder <- readIORef ref toLazyByteString builder `shouldBe` "[info] verbose log\nno verbose log\n" it "noLogging" $ do (ref, options) <- logOptionsMemory withLogFunc (options & setLogVerboseFormat True) $ \lf -> runRIO lf $ do logInfo "should appear" noLogging $ logInfo "should not appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "[info] should appear\n" it "setLogFormat" $ do (ref, options) <- logOptionsMemory let format = ("[context] " <>) withLogFunc (options & setLogFormat format) $ \lf -> runRIO lf $ do logInfo "should be formatted" builder <- readIORef ref toLazyByteString builder `shouldBe` "[context] should be formatted\n" it "noSource" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logInfoS "tests" "should appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "(tests) should appear\n" it "noSource verbose" $ do (ref, options) <- logOptionsMemory withLogFunc (options & setLogVerboseFormat True) $ \lf -> runRIO lf $ do logInfoS "tests" "should appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "[info] (tests) should appear\n" rio-0.1.22.0/test/RIO/Prelude/ExtraSpec.hs0000644000000000000000000000247014231470023016152 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} module RIO.Prelude.ExtraSpec (spec) where import RIO import RIO.Process import Test.Hspec import qualified Data.Map as Map import qualified Data.Text as T import qualified System.FilePath as FP spec :: Spec spec = do describe "foldMapM" $ do it "sanity" $ do let helper :: Applicative f => Int -> f [Int] helper = pure . pure res <- foldMapM helper [1..10] res `shouldBe` [1..10] describe "augmentPathMap" $ do -- https://github.com/commercialhaskell/rio/issues/234 it "Doesn't duplicate PATH keys on windows" $ do let pathKey :: T.Text #if WINDOWS pathKey = "Path" #else pathKey = "PATH" #endif origEnv :: EnvVars origEnv = Map.fromList [ ("foo", "3") , ("bar", "baz") , (pathKey, makePath ["/local/bin", "/usr/bin"]) ] let res = second (fmap getPaths . Map.lookup "PATH") $ augmentPathMap ["/bin"] origEnv res `shouldBe` Right (Just ["/bin", "/local/bin", "/usr/bin"]) where makePath :: [T.Text] -> T.Text makePath = T.intercalate (T.singleton FP.searchPathSeparator) getPaths :: T.Text -> [T.Text] getPaths = fmap T.pack . FP.splitSearchPath . T.unpack rio-0.1.22.0/test/RIO/Prelude/IOSpec.hs0000644000000000000000000000137614231470023015402 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.IOSpec (spec) where import RIO import Test.Hspec import Test.Hspec.QuickCheck import qualified RIO.ByteString as B import qualified RIO.Text as T spec :: Spec spec = do prop "binary file read/write" $ \(B.pack -> bs1) -> withSystemTempFile "binary-read-write" $ \fp h -> do hClose h writeFileBinary fp bs1 bs2 <- readFileBinary fp bs2 `shouldBe` bs1 -- filter our \r for Windows prop "text file read/write" $ \(T.pack . filter (/= '\r') -> text1) -> withSystemTempFile "binary-read-write" $ \fp h -> do hClose h writeFileUtf8 fp text1 text2 <- readFileUtf8 fp text2 `shouldBe` text1 rio-0.1.22.0/test/RIO/Prelude/RIOSpec.hs0000644000000000000000000000271314231470023015520 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.RIOSpec (spec) where import RIO import RIO.State import RIO.Writer import Test.Hspec spec :: Spec spec = do describe "RIO writer instance" $ do it "tell works" $ do ref <- newSomeRef (mempty :: Text) runRIO ref $ do tell "hello\n" tell "world\n" contents <- readSomeRef ref contents `shouldBe` "hello\nworld\n" it "listen works" $ do ref <- newSomeRef (mempty :: Text) ((), str) <- runRIO ref $ listen $ do tell "hello\n" tell "world\n" contents <- readSomeRef ref contents `shouldBe` "" str `shouldBe` "hello\nworld\n" it "pass works" $ do ref <- newSomeRef (mempty :: Text) () <- runRIO ref $ pass $ do tell "hello\n" tell "world\n" return ((), \a -> a <> "!") contents <- readSomeRef ref contents `shouldBe` "hello\nworld\n!" describe "RIO state instance" $ do it "get works" $ do ref <- newSomeRef (mempty :: Text) result <- runRIO ref $ do put "hello world" x <- get return x result `shouldBe` "hello world" it "state works" $ do ref <- newSomeRef (mempty :: Text) _newRef <- newSomeRef ("Hello World!" :: Text) () <- runRIO ref $ state (\_ -> ((), "Hello World!")) contents <- readSomeRef ref contents `shouldBe` "Hello World!" rio-0.1.22.0/test/RIO/Prelude/SimpleSpec.hs0000644000000000000000000000056714231470023016325 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.SimpleSpec (spec) where import RIO import RIO.Process import Test.Hspec spec :: Spec spec = do it "logging works" $ asIO $ runSimpleApp $ logDebug "logging allowed" it "process calling works" $ asIO $ runSimpleApp $ proc "echo" ["hello"] runProcess_ rio-0.1.22.0/test/RIO/PreludeSpec.hs0000644000000000000000000000203414231470023015063 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.PreludeSpec (spec) where import Test.Hspec import RIO spec :: Spec spec = do describe "URef" $ do it "sanity" $ do ref <- newURef (0 :: Int) x <- readURef ref x `shouldBe` 0 writeURef ref 1 y <- readURef ref y `shouldBe` 1 modifyURef ref (+ 1) z <- readURef ref z `shouldBe` 2 describe "whenM" $ do it "returns True" $ do ref <- newIORef False whenM (return True) (writeIORef ref True) readIORef ref `shouldReturn` True it "returns False" $ do ref <- newIORef False whenM (return False) (writeIORef ref True) readIORef ref `shouldReturn` False describe "unlessM" $ do it "returns True" $ do ref <- newIORef False unlessM (return True) (writeIORef ref True) readIORef ref `shouldReturn` False it "returns False" $ do ref <- newIORef False unlessM (return False) (writeIORef ref True) readIORef ref `shouldReturn` True rio-0.1.22.0/test/RIO/TextSpec.hs0000644000000000000000000000076314231470023014416 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.TextSpec where import Test.Hspec import RIO import qualified RIO.Text as T spec :: Spec spec = do describe "dropPrefix" $ do it "present" $ T.dropPrefix "foo" "foobar" `shouldBe` "bar" it "absent" $ T.dropPrefix "bar" "foobar" `shouldBe` "foobar" describe "dropSuffix" $ do it "present" $ T.dropSuffix "bar" "foobar" `shouldBe` "foo" it "absent" $ T.dropSuffix "foo" "foobar" `shouldBe` "foobar" rio-0.1.22.0/LICENSE0000644000000000000000000000204314231470023011711 0ustar0000000000000000Copyright (c) 2018 Michael Snoyman Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. rio-0.1.22.0/rio.cabal0000644000000000000000000000712514231544506012477 0ustar0000000000000000cabal-version: 1.12 -- This file has been generated from package.yaml by hpack version 0.34.4. -- -- see: https://github.com/sol/hpack name: rio version: 0.1.22.0 synopsis: A standard library for Haskell description: See README and Haddocks at category: Control homepage: https://github.com/commercialhaskell/rio#readme bug-reports: https://github.com/commercialhaskell/rio/issues author: Michael Snoyman maintainer: michael@snoyman.com license: MIT license-file: LICENSE build-type: Simple extra-source-files: README.md ChangeLog.md source-repository head type: git location: https://github.com/commercialhaskell/rio library exposed-modules: RIO RIO.ByteString RIO.ByteString.Lazy RIO.ByteString.Lazy.Partial RIO.ByteString.Partial RIO.Char RIO.Char.Partial RIO.Deque RIO.Directory RIO.File RIO.FilePath RIO.HashMap RIO.HashMap.Partial RIO.HashSet RIO.Lens RIO.List RIO.List.Partial RIO.Map RIO.Map.Partial RIO.Map.Unchecked RIO.NonEmpty RIO.NonEmpty.Partial RIO.Partial RIO.Prelude RIO.Prelude.Simple RIO.Prelude.Types RIO.Process RIO.Seq RIO.Set RIO.Set.Partial RIO.Set.Unchecked RIO.State RIO.Text RIO.Text.Lazy RIO.Text.Lazy.Partial RIO.Text.Partial RIO.Time RIO.Vector RIO.Vector.Boxed RIO.Vector.Boxed.Partial RIO.Vector.Boxed.Unsafe RIO.Vector.Partial RIO.Vector.Storable RIO.Vector.Storable.Partial RIO.Vector.Storable.Unsafe RIO.Vector.Unboxed RIO.Vector.Unboxed.Partial RIO.Vector.Unboxed.Unsafe RIO.Vector.Unsafe RIO.Writer other-modules: RIO.Prelude.Display RIO.Prelude.Exit RIO.Prelude.Extra RIO.Prelude.IO RIO.Prelude.Lens RIO.Prelude.Logger RIO.Prelude.Reexports RIO.Prelude.Renames RIO.Prelude.RIO RIO.Prelude.Text RIO.Prelude.Trace RIO.Prelude.URef hs-source-dirs: src/ build-depends: base >=4.12 && <10 , bytestring , containers , deepseq , directory , exceptions , filepath , hashable , microlens , microlens-mtl , mtl , primitive , process , text , time , typed-process >=0.2.5.0 , unliftio >=0.2.14 , unliftio-core , unordered-containers , vector if os(windows) cpp-options: -DWINDOWS build-depends: Win32 else build-depends: unix default-language: Haskell2010 test-suite spec type: exitcode-stdio-1.0 main-is: Spec.hs other-modules: RIO.DequeSpec RIO.FileSpec RIO.ListSpec RIO.LoggerSpec RIO.Prelude.ExtraSpec RIO.Prelude.IOSpec RIO.Prelude.RIOSpec RIO.Prelude.SimpleSpec RIO.PreludeSpec RIO.TextSpec Paths_rio hs-source-dirs: test build-depends: QuickCheck , base >=4.12 && <10 , bytestring , containers , deepseq , directory , exceptions , filepath , hashable , hspec , microlens , microlens-mtl , mtl , primitive , process , rio , text , time , typed-process >=0.2.5.0 , unliftio >=0.2.14 , unliftio-core , unordered-containers , vector if os(windows) cpp-options: -DWINDOWS build-depends: Win32 else build-depends: unix default-language: Haskell2010 build-tool-depends: hspec-discover:hspec-discover rio-0.1.22.0/README.md0000644000000000000000000003347014231470023012173 0ustar0000000000000000# The rio library *A standard library for Haskell* ![Rio](https://camo.githubusercontent.com/fc162fb0024699c85f00eae769085a5fe528153e/68747470733a2f2f7777772e61687374617469632e636f6d2f70686f746f732f636974792f76692d76363837315f30305f31343030783434322e6a7067) ![Tests](https://github.com/commercialhaskell/rio/workflows/Tests/badge.svg) The goal of the `rio` library is to make it easier to adopt Haskell for writing production software. It is intended as a cross between: * Collection of well designed, trusted libraries * Useful `Prelude` replacement * A set of best practices for writing production quality Haskell code This repository contains the `rio` library and other related libraries, such as `rio-orphans`. There is a [tutorial on how to use `rio`](https://haskell.fpcomplete.com/library/rio) available on FP Complete's Haskell site. This README discusses project goals and collects other reference information. ## Standard library While GHC ships with a `base` library, as well as a number of other common packages like `directory` and `transformers`, there are large gaps in functionality provided by these libraries. This choice for a more minimalistic `base` is by design, but it leads to some unfortunate consequences: * For a given task, it's often unclear which is the right library to use * When writing libraries, there is often concern about adding dependencies to any libraries outside of `base`, due to creating a heavier dependency footprint * By avoiding adding dependencies, many libraries end up reimplementing the same functionality, often with incompatible types and type classes, leading to difficulty using libraries together This library attempts to define a standard library for Haskell. One immediate response may be [XKCD #927](https://xkcd.com/927/): ![XKCD Standards](https://imgs.xkcd.com/comics/standards.png) To counter that effect, this library takes a specific approach: __it reuses existing, commonly used libraries__. Instead of defining an incompatible `Map` type, for instance, we standardize on the commonly used one from the `containers` library and reexport it from this library. This library attempts to define a set of libraries as "standard," meaning they are recommended for use, and should be encouraged as dependencies for other libraries. It does this by depending on these libraries itself, and reexporting their types and functions for easy use. Beyond the ecosystem effects we hope to achieve, this will hopefully make the user story much easier. For a new user or team trying to get started, there is an easy library to depend upon for a large percentage of common functionality. See the dependencies of this package to see the list of packages considered standard. The primary interfaces of each of these packages is exposed from this library via a `RIO.`-prefixed module reexporting its interface. ## Prelude replacement The `RIO` module works as a prelude replacement, providing more functionality and types out of the box than the standard prelude (such as common data types like `ByteString` and `Text`), as well as removing common "gotchas", like partial functions and lazy I/O. The guiding principle here is: * If something is safe to use in general and has no expected naming conflicts, expose it from `RIO` * If something should not always be used, or has naming conflicts, expose it from another module in the `RIO.` hierarchy. ## Best practices Below is a set of best practices we recommend following. You're obviously free to take any, all, or none of this. Over time, these will probably develop into much more extensive docs. Some of these design decisions will be catered to by choices in the `rio` library. For Haskellers looking for a set of best practices to follow: you've come to the right place! ### Import practices This library is intended to provide a fully loaded set of basic functionality. You should: * Enable the `NoImplicitPrelude` language extension (see below) * Add `import RIO` as your replacement prelude in all modules * Use the `RIO.`-prefixed modules as necessary, imported using the recommended qualified names in the modules themselves. For example, `import qualified RIO.ByteString as B`. See the module documentation for more information. * Infix operators may be imported unqualified, with a separate import line if necessary. For example, `import RIO.Map ((?!), (\\))`. Do this only if your module contains no overlapping infix names, regardless of qualification. For instance, if you are importing both `RIO.Map.\\` and `RIO.List.\\` do not import either one unqualified. In the future, we may have editor integration or external tooling to help with import management. ### Language extensions Very few projects these days use bare-bones Haskell 98 or 2010. Instead, almost all codebases enable some set of additional language extensions. Below is a list of extensions we recommend as a good default, in that these are: * Well accepted in the community * Cause little to no code breakage versus leaving them off * Are generally considered safe Our recommended defaults are: ``` AutoDeriveTypeable BangPatterns BinaryLiterals ConstraintKinds DataKinds DefaultSignatures DeriveDataTypeable DeriveFoldable DeriveFunctor DeriveGeneric DeriveTraversable DoAndIfThenElse EmptyDataDecls ExistentialQuantification FlexibleContexts FlexibleInstances FunctionalDependencies GADTs GeneralizedNewtypeDeriving InstanceSigs KindSignatures LambdaCase MonadFailDesugaring MultiParamTypeClasses MultiWayIf NamedFieldPuns NoImplicitPrelude OverloadedStrings PartialTypeSignatures PatternGuards PolyKinds RankNTypes RecordWildCards ScopedTypeVariables StandaloneDeriving TupleSections TypeFamilies TypeSynonymInstances ViewPatterns ``` Notes on some surprising choices: * `RecordWildCards` is really up for debate. It's widely used, but rightfully considered by many to be dangerous. Open question about what we do with it. * Despite the fact that `OverloadedStrings` can break existing code, we recommend its usage to encourage avoidance of the `String` data type. Also, for new code, the risk of breakage is much lower. * `MonadFailDesugaring` helps prevent partial pattern matches in your code, see [#85](https://github.com/commercialhaskell/rio/issues/85) Due to concerns about tooling usage (see [issue #9](https://github.com/commercialhaskell/rio/issues/9)), we recommend adding these extensions on-demand in your individual source modules instead of including them in your `package.yaml` or `.cabal` files. There are other language extensions which are perfectly fine to use as well, but are not recommended to be turned on by default: ``` CPP TemplateHaskell ForeignFunctionInterface MagicHash UnliftedFFITypes TypeOperators UnboxedTuples PackageImports QuasiQuotes DeriveAnyClass DeriveLift StaticPointers ``` ### GHC Options We recommend using these GHC complier warning flags on all projects, to catch problems that might otherwise go overlooked: * `-Wall` * `-Wcompat` * `-Widentities` * `-Wincomplete-record-updates` * `-Wincomplete-uni-patterns` * `-Wpartial-fields` * `-Wredundant-constraints` You may add them per file, or to your `package.yaml`, or pass them on the command line when running ghc. We include these in the project template's `package.yaml` file. For code targeting production use, you should also use the flag that turns all warnings into errors, to force you to resolve the warnings before you ship your code: * `-Werror` Further reading: * Alexis King explains why these are a good idea in [her blog post](https://lexi-lambda.github.io/blog/2018/02/10/an-opinionated-guide-to-haskell-in-2018/) which was the original inspiration for this section. * Max Tagher gives an in-depth overview of these flags, and more, [in his blog post](https://medium.com/mercury-bank/enable-all-the-warnings-a0517bc081c3). ### Monads A primary design choice you'll need to make in your code is how to structure your monads. There are many options out there, with various trade-offs. Instead of going through all of the debates, we're going to point to [an existing blog post](https://www.fpcomplete.com/blog/2017/07/the-rio-monad), and here just give recommendations. * If your code is going to perform I/O: it should live in the `RIO` monad. `RIO` is "reader IO." It's the same as `ReaderT env IO`, but includes some helper functions in this library and leads to nicer type signatures and error messages. * If you need to provide access to specific data to a function, do it via a typeclass constraint on the `env`, _not_ via a concrete env. For example, this is bad: ```haskell myFunction :: RIO Config Foo ``` This is good: ```haskell class HasConfig env where configL :: Lens' env Config -- more on this in a moment myFunction :: HasConfig env => RIO env Foo ``` Reason: by using typeclass constraints on the environment, we can easily compose multiple functions together and collect up the constraints, which wouldn't be possible with concrete environments. We _could_ go more general with mtl-style typeclasses, like `MonadReader` or `MonadHasConfig`, but `RIO` is a perfect balance point in the composability/concreteness space (see blog post above for more details). * When defining `Has`-style typeclasses for the environments, we use lenses (which are exposed by `RIO`) because it provides for easy composability. We also leverage superclasses wherever possible. As an example of how this works in practice: ```haskell -- Defined in RIO.Logger class HasLogFunc env where logFuncL :: Lens' env LogFunc class HasConfig env where configL :: Lens' env Config instance HasConfig Config where configL = id data Env = Env { envLogFunc :: !LogFunc, envConfig :: !Config } class (HasLogFunc env, HasConfig env) => HasEnv env where envL :: Lens' env Env instance HasLogFunc Env where logFuncL = lens envLogFunc (\x y -> x { envLogFunc = y }) instance HasConfig Env where configL = lens envConfig (\x y -> x { envConfig = y }) instance HasEnv Env where envL = id -- And then, at some other part of the code data SuperEnv = SuperEnv { seEnv :: !Env, seOtherStuff :: !OtherStuff } instance HasLogFunc SuperEnv where logFuncL = envL.logFuncL instance HasConfig SuperEnv where configL = envL.configL instance HasEnv SuperEnv where envL = lens seEnv (\x y -> x { seEnv = y }) ``` * If you're writing code that you want to be usable outside of `RIO` for some reason, you should stick to the good mtl-style typeclasses: `MonadReader`, `MonadIO`, `MonadUnliftIO`, `MonadThrow`, and `PrimMonad`. It's better to use `MonadReader`+`Has` than to create new typeclasses like `MonadLogger`, though usually just sticking with the simpler `RIO env` is fine (and can easily be converted to the more general form with `liftRIO`). You should avoid using the following typeclasses (intentionally not exposed from this library): `MonadBase`, `MonadBaseControl`, `MonadCatch`, and `MonadMask`. ### Exceptions For in-depth discussion, see [safe exception handling](https://haskell.fpcomplete.com/tutorial/exceptions). The basic idea is: * If something can fail, and you want people to deal with that failure every time (e.g., `lookup`), then return a `Maybe` or `Either` value. * If the user will usually not want to deal with it, then use exceptions. In the case of pure code, use a `MonadThrow` constraint. In the case of `IO` code: use runtime exceptions via `throwIO` (works in the `RIO` monad too). * You'll be upset and frustrated that you don't know exactly how some `IO` action can fail. Accept that pain, live with it, internalize it, use `tryAny`, and move on. It's the price we pay for async exceptions. * Do all resource allocations with functions like `bracket` and `finally`. It’s a good idea to define an app-wide exception type: ```haskell data AppExceptions = NetworkChangeError Text | FilePathError FilePath | ImpossibleError deriving (Typeable) instance Exception AppExceptions instance Show AppExceptions where show = \case NetworkChangeError err -> "network error: " <> (unpack err) FilePathError fp -> "error accessing filepath at: " <> fp ImpossibleError -> "this codepath should never have been executed. Please report a bug." ``` ### Strict data fields Make data fields strict by default, unless you have a good reason to do otherwise. ### Project template We provide a project template which sets up lots of things for you out of the box. You can use it by running: ``` $ stack new projectname rio ``` ### Safety first This library intentionally puts safety first, and therefore avoids promoting partial functions and lazy I/O. If you think you need lazy I/O: you need a streaming data library like conduit instead. ### When to generalize A common question in Haskell code is when should you generalize. Here are some simple guidelines. For parametric polymorphism: _almost always_ generalize, it makes your type signatures more informative and functions more useful. In other words, `reverse :: [a] -> [a]` is far better than `reverse :: [Int] -> [Int]`. When it comes to typeclasses: the story is more nuanced. For typeclasses provided by `RIO`, like `Foldable` or `Traversable`, it's generally a good thing to generalize to them when possible. The real question is defining your own typeclasses. As a general rule: avoid doing so as long as possible. And _if_ you define a typeclass: make sure its usage can't lead to accidental bugs by allowing you to swap in types you didn't expect. ### Module hierarchy The `RIO.Prelude.` module hierarchy contains identifiers which are reexported by the `RIO` module. The reason for this is to make it easier to view the generated Haddocks. The `RIO` module itself is intended to be imported unqualified, with `NoImplicitPrelude` enabled. All other modules are _not_ reexported by the `RIO` module, and will document inside of them whether they should be imported qualified or unqualified. rio-0.1.22.0/ChangeLog.md0000644000000000000000000001246214231544503013070 0ustar0000000000000000# Changelog for rio ## 0.1.22.0 * Expose `augmentPathMap'` ## 0.1.21.0 * Fix minor bug in `augmentPathMap` on windows wrt [#234](https://github.com/commercialhaskell/rio/issues/234) not adhering to case-insensitive semantics ## 0.1.20.0 * Export `UnliftIO.QSem` and `UnliftIO.QSemN` in `RIO` ## 0.1.19.0 * Expose `fromLeft` and `fromRight` ## 0.1.18.0 * Add colours to the `LogOption` constructor [#222](https://github.com/commercialhaskell/rio/pull/222) ## 0.1.17.0 * Expose `Bifunctor`, `Bifoldable`, and `Bitraversable`. * The `first` and `second` functions exported by `RIO` formerly originated from `Control.Arrow`. They now come from `Bifunctor`. ## 0.1.16.0 * Expand the number of `microlens` functions exported by the RIO prelude. * Add new module `RIO.Lens` which provides the rest of `microlens`. ## 0.1.15.1 * Replace `canonicalizePath` with `makeAbsolute` [#217](https://github.com/commercialhaskell/rio/issues/217) ## 0.1.15.0 * Include source in log messages ## 0.1.14.1 * Support `unliftio-core` 0.2 ## 0.1.14.0 * Addition of `mkSimpleApp` * Addition of `lookupEnvFromContext` ## 0.1.13.0 * Add `withLazyFileUtf8` * Add `mapRIO` * Add generic logger * Add `exeExtensions` and improve `findExecutable` on Windows [#205](https://github.com/commercialhaskell/rio/issues/205) ## 0.1.12.0 * Add `logFormat` and `setLogFormat` for `LogOptions`. ## 0.1.11.0 * Replace atomic and durable file writing functions with the ones from `unliftio`, see [#167](https://github.com/commercialhaskell/rio/pull/167) ## 0.1.10.0 * Relax a bunch of `RIO.File` functions from `MonadUnliftIO` to `MonadIO` * Custom `Monoid` instance for `Utf8Builder` that matches semantics of the derived one, but doesn't break list fusion * Qualified import recommendations for `*.Partial`, `*.Unchecked`, `*.Unsafe` * Re-export `Data.Ord.Down` from `RIO.Prelude` * Addition of `RIO.NonEmpty` module * Addition of `RIO.NonEmpty.Partial` module * Export `NonEmpty` type and its constructor `(:|)` from RIO.Prelude.Types * Fix handling of non-ASCII characters in `logSticky` * Deprecate `withProcess` and `withProcess_`, add `withProcessWait`, `withProcessWait_`, `withProcessTerm`, and `withProcessTerm_` ## 0.1.9.2 * Bring back re-export of `Monad.fail` from `RIO.Prelude`. ## 0.1.9.1 * Remove accidental reexport of `Control.Applicative.empty` introduced in the previous release. * Functions from `Data.Data.Data` class are brought to the re-export list as well. ## 0.1.9.0 * Add `Prelude.Exit` to export lifted versions of the exit functions from `System.Exit`. * Re-export the `Control.Monad.State.State` and `Control.Monad.State.StateT` types and related computation functions in `RIO.State`. * Re-export the `Control.Monad.Writer.Writer` and `Control.Monad.Writer.WriterT` types and related computation functions in `RIO.Writer`. * Re-export `pred`, `succ` in `RIO.Partial`. * Add `Semigroup` and `Monoid` instances for `RIO` * Add the `Deque` double-ended queue data type * Re-export `Data.Map.Strict.toAscList` and `Data.Map.Strict.toDescList` from `RIO.Map`. * Re-export `Data.Sequence.Seq` from `RIO`. * Addition of `RIO.Prelude` module * Addition of `RIO.Prelude.Types` module * Re-export `zipWith` and `runST` from `RIO.Prelude` * Re-export `Exception`, `MonadFail`, `Typeable` and `ST` from `RIO.Prelude.Types` * Switch to `MonadFail.fail` from `Monad.fail` and re-exported it from `RIO.Prelude` ## 0.1.8.0 * Re-export `Control.Monad.State.modify`, `Control.Monad.State.modify'` and `Control.Monad.State.gets` in `RIO.State` ## 0.1.7.0 * Addition of `textDisplay` to `Display` class. ## 0.1.6.0 * Changed `logUseColor` to default to `False` on Windows, even when verbose and on the terminal * Add `RIO.File` module which offers a family of file handling functions (`withBinaryFileDurable`, `withBinaryFileDurableAtomic`, among others.) with better durability and atomicity guarantees ## 0.1.5.0 * Re-export `Numeric.Natural.Natural` [#119](https://github.com/commercialhaskell/rio/issues/119) * Re-export `Data.Functor.<&>` from GHC 8.4+, falling back local definition for `base < 4.11` [#117](https://github.com/commercialhaskell/rio/issues/117) * Re-export `Data.Proxy.Proxy(..)` * Re-export `fromEnum` from RIO, export `toEnum`, `read` and `fromJust` from RIO.Partial * Add `noLogging` function to skip logging on specific sub-routines * Re-export `Control.Category.>>>` ## 0.1.4.0 * Add `Const` and `Identity` * Add `Reader` and `runReader` * Add instances for `MonadWriter` and `MonadState` to `RIO` via mutable reference [#103](https://github.com/commercialhaskell/rio/issues/103) ## 0.1.3.0 * Add `newLogFunc` function to create `LogFunc` records outside of a callback scope * Allow dynamic reloading of `logMinLevel` and `logVerboseFormat` for the `LogOptions` record * Add `foldMapM` * Add `headMaybe`, `lastMaybe`, `tailMaybe`, `initMaybe`, `maximumMaybe`, `minimumMaybe`, `maximumByMaybe`, `minimumByMaybe` functions to `RIO.List` module (issue #82) * Move non partial functions `scanr1` and `scanl1` from `RIO.List.Partial` to `RIO.List` (issue #82) * Add `SimpleApp` and `runSimpleApp` * Add `asIO` ## 0.1.2.0 * Allow setting usage of code location in the log output ## 0.1.1.0 * Move some accidentally included partial functions ## 0.1.0.0 * Initial stable release ## 0.0 __NOTE__ All releases beginning with 0.0 are considered experimental. Caveat emptor!