rio-0.1.8.0/src/0000755000000000000000000000000013312220501011411 5ustar0000000000000000rio-0.1.8.0/src/RIO/0000755000000000000000000000000013412071757012064 5ustar0000000000000000rio-0.1.8.0/src/RIO/ByteString/0000755000000000000000000000000013253671155014157 5ustar0000000000000000rio-0.1.8.0/src/RIO/ByteString/Lazy/0000755000000000000000000000000013253417303015067 5ustar0000000000000000rio-0.1.8.0/src/RIO/Char/0000755000000000000000000000000013253417303012733 5ustar0000000000000000rio-0.1.8.0/src/RIO/HashMap/0000755000000000000000000000000013253417303013377 5ustar0000000000000000rio-0.1.8.0/src/RIO/List/0000755000000000000000000000000013312220526012764 5ustar0000000000000000rio-0.1.8.0/src/RIO/Map/0000755000000000000000000000000013253417303012573 5ustar0000000000000000rio-0.1.8.0/src/RIO/Prelude/0000755000000000000000000000000013411212256013452 5ustar0000000000000000rio-0.1.8.0/src/RIO/Set/0000755000000000000000000000000013253417303012611 5ustar0000000000000000rio-0.1.8.0/src/RIO/Text/0000755000000000000000000000000013253417303013002 5ustar0000000000000000rio-0.1.8.0/src/RIO/Text/Lazy/0000755000000000000000000000000013253417303013721 5ustar0000000000000000rio-0.1.8.0/src/RIO/Vector/0000755000000000000000000000000013253417303013320 5ustar0000000000000000rio-0.1.8.0/src/RIO/Vector/Boxed/0000755000000000000000000000000013253417303014361 5ustar0000000000000000rio-0.1.8.0/src/RIO/Vector/Storable/0000755000000000000000000000000013253417303015073 5ustar0000000000000000rio-0.1.8.0/src/RIO/Vector/Unboxed/0000755000000000000000000000000013253417303014724 5ustar0000000000000000rio-0.1.8.0/test/0000755000000000000000000000000013253417303011615 5ustar0000000000000000rio-0.1.8.0/test/RIO/0000755000000000000000000000000013406724606012255 5ustar0000000000000000rio-0.1.8.0/test/RIO/Prelude/0000755000000000000000000000000013317557066013662 5ustar0000000000000000rio-0.1.8.0/src/RIO.hs0000644000000000000000000000124213312220501012375 0ustar0000000000000000module RIO ( module RIO.Prelude.Display , module RIO.Prelude.Extra , module RIO.Prelude.IO , module RIO.Prelude.Lens , module RIO.Prelude.Logger , module RIO.Prelude.RIO , module RIO.Prelude.Reexports , module RIO.Prelude.Renames , module RIO.Prelude.Text , module RIO.Prelude.Trace , module RIO.Prelude.URef , module RIO.Prelude.Simple ) where import RIO.Prelude.Display import RIO.Prelude.Extra import RIO.Prelude.IO import RIO.Prelude.Lens import RIO.Prelude.Logger import RIO.Prelude.RIO import RIO.Prelude.Reexports import RIO.Prelude.Renames import RIO.Prelude.Text import RIO.Prelude.Trace import RIO.Prelude.URef import RIO.Prelude.Simple rio-0.1.8.0/src/RIO/ByteString.hs0000644000000000000000000000572613253417303014516 0ustar0000000000000000-- | Strict @ByteString@. Import as: -- -- > import qualified RIO.ByteString as B {-# LANGUAGE NoImplicitPrelude #-} module RIO.ByteString ( module Data.ByteString , module RIO.ByteString ) where import Data.ByteString hiding (head, last, tail, init, foldl1, foldl1', foldr1, foldr1', maximum, minimum, findSubstring, findSubstrings, packCString, packCStringLen, useAsCString, useAsCStringLen, getLine, getContents, putStr, putStrLn, interact, readFile, writeFile, appendFile, hGetLine, hGetContents, hGet, hGetSome, hGetNonBlocking, hPut, hPutNonBlocking, hPutStr, hPutStrLn, breakByte) import qualified Data.ByteString as B import RIO import Foreign.C.String (CString, CStringLen) -- | Lifted 'B.packCString' packCString :: MonadIO m => CString -> m ByteString packCString = liftIO . B.packCString -- | Lifted 'B.packCStringLen' packCStringLen :: MonadIO m => CStringLen -> m ByteString packCStringLen = liftIO . B.packCStringLen -- | Unlifted 'B.useAsCString' useAsCString :: MonadUnliftIO m => ByteString -> (CString -> m a) -> m a useAsCString bs inner = withRunInIO $ \run -> B.useAsCString bs $ run . inner -- | Unlifted 'B.useAsCStringLen' useAsCStringLen :: MonadUnliftIO m => ByteString -> (CStringLen -> m a) -> m a useAsCStringLen bs inner = withRunInIO $ \run -> B.useAsCStringLen bs $ run . inner -- | Lifted 'B.getLine' getLine :: MonadIO m => m ByteString getLine = liftIO B.getLine -- | Lifted 'B.getContents' getContents :: MonadIO m => m ByteString getContents = liftIO B.getContents -- | Lifted 'B.putStr' putStr :: MonadIO m => ByteString -> m () putStr = liftIO . B.putStr -- | Lifted 'B.interact' interact :: MonadIO m => (ByteString -> ByteString) -> m () interact = liftIO . B.interact -- | Lifted 'B.readFile' readFile :: MonadIO m => FilePath -> m ByteString readFile = liftIO . B.readFile -- | Lifted 'B.writeFile' writeFile :: MonadIO m => FilePath -> ByteString -> m () writeFile fp = liftIO . B.writeFile fp -- | Lifted 'B.appendFile' appendFile :: MonadIO m => FilePath -> ByteString -> m () appendFile fp = liftIO . B.appendFile fp -- | Lifted 'B.hGetLine' hGetLine :: MonadIO m => Handle -> m ByteString hGetLine = liftIO . B.hGetLine -- | Lifted 'B.hGetContents' hGetContents :: MonadIO m => Handle -> m ByteString hGetContents = liftIO . B.hGetContents -- | Lifted 'B.hGet' hGet :: MonadIO m => Handle -> Int -> m ByteString hGet h = liftIO . B.hGet h -- | Lifted 'B.hGetSome' hGetSome :: MonadIO m => Handle -> Int -> m ByteString hGetSome h = liftIO . B.hGetSome h -- | Lifted 'B.hGetNonBlocking' hGetNonBlocking :: MonadIO m => Handle -> Int -> m ByteString hGetNonBlocking h = liftIO . B.hGetNonBlocking h -- | Lifted 'B.hPut' hPut :: MonadIO m => Handle -> ByteString -> m () hPut h = liftIO . B.hPut h -- | Lifted 'B.hPutNonBlocking' hPutNonBlocking :: MonadIO m => Handle -> ByteString -> m ByteString hPutNonBlocking h = liftIO . B.hPutNonBlocking h -- | Lifted 'B.hPutStr' hPutStr :: MonadIO m => Handle -> ByteString -> m () hPutStr h = liftIO . B.hPutStr h rio-0.1.8.0/src/RIO/ByteString/Lazy.hs0000644000000000000000000001333713253671155015441 0ustar0000000000000000-- | Lazy @ByteString@. Import as: -- -- > import qualified RIO.ByteString.Lazy as BL {-# LANGUAGE NoImplicitPrelude #-} module RIO.ByteString.Lazy ( -- * The @ByteString@ type Data.ByteString.Lazy.ByteString -- * Introducing and eliminating 'ByteString's , Data.ByteString.Lazy.empty , Data.ByteString.Lazy.singleton , Data.ByteString.Lazy.pack , Data.ByteString.Lazy.unpack , Data.ByteString.Lazy.fromStrict , Data.ByteString.Lazy.toStrict , Data.ByteString.Lazy.fromChunks , Data.ByteString.Lazy.toChunks , Data.ByteString.Lazy.foldrChunks , Data.ByteString.Lazy.foldlChunks -- * Basic interface , Data.ByteString.Lazy.cons , Data.ByteString.Lazy.cons' , Data.ByteString.Lazy.snoc , Data.ByteString.Lazy.append , Data.ByteString.Lazy.uncons , Data.ByteString.Lazy.unsnoc , Data.ByteString.Lazy.null , Data.ByteString.Lazy.length -- * Transforming ByteStrings , Data.ByteString.Lazy.map , Data.ByteString.Lazy.reverse , Data.ByteString.Lazy.intersperse , Data.ByteString.Lazy.intercalate , Data.ByteString.Lazy.transpose -- * Reducing 'ByteString's (folds) , Data.ByteString.Lazy.foldl , Data.ByteString.Lazy.foldl' , Data.ByteString.Lazy.foldr -- ** Special folds , Data.ByteString.Lazy.concat , Data.ByteString.Lazy.concatMap , Data.ByteString.Lazy.any , Data.ByteString.Lazy.all -- * Building ByteStrings -- ** Scans , Data.ByteString.Lazy.scanl -- ** Accumulating maps , Data.ByteString.Lazy.mapAccumL , Data.ByteString.Lazy.mapAccumR -- ** Infinite ByteStrings , Data.ByteString.Lazy.repeat , Data.ByteString.Lazy.replicate , Data.ByteString.Lazy.cycle , Data.ByteString.Lazy.iterate -- ** Unfolding ByteStrings , Data.ByteString.Lazy.unfoldr -- * Substrings -- ** Breaking strings , Data.ByteString.Lazy.take , Data.ByteString.Lazy.drop , Data.ByteString.Lazy.splitAt , Data.ByteString.Lazy.takeWhile , Data.ByteString.Lazy.dropWhile , Data.ByteString.Lazy.span , Data.ByteString.Lazy.break , Data.ByteString.Lazy.group , Data.ByteString.Lazy.groupBy , Data.ByteString.Lazy.inits , Data.ByteString.Lazy.tails , Data.ByteString.Lazy.stripPrefix , Data.ByteString.Lazy.stripSuffix -- ** Breaking into many substrings , Data.ByteString.Lazy.split , Data.ByteString.Lazy.splitWith -- * Predicates , Data.ByteString.Lazy.isPrefixOf , Data.ByteString.Lazy.isSuffixOf -- * Search ByteStrings -- ** Searching by equality , Data.ByteString.Lazy.elem , Data.ByteString.Lazy.notElem -- ** Searching with a predicate , Data.ByteString.Lazy.find , Data.ByteString.Lazy.filter , Data.ByteString.Lazy.partition -- * Indexing ByteStrings , Data.ByteString.Lazy.index , Data.ByteString.Lazy.elemIndex , Data.ByteString.Lazy.elemIndexEnd , Data.ByteString.Lazy.elemIndices , Data.ByteString.Lazy.findIndex , Data.ByteString.Lazy.findIndices , Data.ByteString.Lazy.count -- * Zipping and unzipping ByteStrings , Data.ByteString.Lazy.zip , Data.ByteString.Lazy.zipWith , Data.ByteString.Lazy.unzip -- * Low level conversions -- ** Copying ByteStrings , Data.ByteString.Lazy.copy -- * I\/O with 'ByteString's -- ** Standard input and output , getContents , putStr , putStrLn , interact -- ** Files , readFile , writeFile , appendFile -- ** I\/O with Handles , hGetContents , hGet , hGetNonBlocking , hPut , hPutNonBlocking , hPutStr ) where import Data.ByteString.Lazy hiding ( getContents , putStr , putStrLn , interact , readFile , writeFile , appendFile , hGetContents , hGet , hGetNonBlocking , hPut , hPutNonBlocking , hPutStr ) import qualified Data.ByteString.Lazy import qualified Data.ByteString.Lazy.Char8 import RIO -- | Lifted 'Data.ByteString.Lazy.getContents' getContents :: MonadIO m => m LByteString getContents = liftIO Data.ByteString.Lazy.getContents -- | Lifted 'Data.ByteString.Lazy.putStr' putStr :: MonadIO m => LByteString -> m () putStr = liftIO . Data.ByteString.Lazy.putStr -- | Lifted 'Data.ByteString.Lazy.putStrLn' putStrLn :: MonadIO m => LByteString -> m () putStrLn = liftIO . Data.ByteString.Lazy.Char8.putStrLn -- | Lifted 'Data.ByteString.Lazy.interact' interact :: MonadIO m => (LByteString -> LByteString) -> m () interact = liftIO . Data.ByteString.Lazy.interact -- | Lifted 'Data.ByteString.Lazy.readFile' readFile :: MonadIO m => FilePath -> m LByteString readFile = liftIO . Data.ByteString.Lazy.readFile -- | Lifted 'Data.ByteString.Lazy.writeFile' writeFile :: MonadIO m => FilePath -> LByteString -> m () writeFile fp contents = liftIO $ Data.ByteString.Lazy.writeFile fp contents -- | Lifted 'Data.ByteString.Lazy.appendFile' appendFile :: MonadIO m => FilePath -> LByteString -> m () appendFile fp = liftIO . Data.ByteString.Lazy.appendFile fp -- | Lifted 'Data.ByteString.Lazy.hGet' hGet :: MonadIO m => Handle -> Int -> m LByteString hGet handle' count' = liftIO $ Data.ByteString.Lazy.hGet handle' count' -- | Lifted 'Data.ByteString.Lazy.hGetContents' hGetContents :: MonadIO m => Handle -> m LByteString hGetContents = liftIO . Data.ByteString.Lazy.hGetContents -- | Lifted 'Data.ByteString.Lazy.hGetNonBlocking' hGetNonBlocking :: MonadIO m => Handle -> Int -> m LByteString hGetNonBlocking h = liftIO . Data.ByteString.Lazy.hGetNonBlocking h -- | Lifted 'Data.ByteString.Lazy.hPut' hPut :: MonadIO m => Handle -> LByteString -> m () hPut h = liftIO . Data.ByteString.Lazy.hPut h -- | Lifted 'Data.ByteString.Lazy.hPutNonBlocking' hPutNonBlocking :: MonadIO m => Handle -> LByteString -> m LByteString hPutNonBlocking h = liftIO . Data.ByteString.Lazy.hPutNonBlocking h -- | Lifted 'Data.ByteString.Lazy.hPutStr' hPutStr :: MonadIO m => Handle -> LByteString -> m () hPutStr h = liftIO . Data.ByteString.Lazy.hPutStr h rio-0.1.8.0/src/RIO/ByteString/Lazy/Partial.hs0000644000000000000000000000103013253417303017011 0ustar0000000000000000-- | This module exports all the partial functions from "Data.ByteString.Lazy" module RIO.ByteString.Lazy.Partial ( -- * Basic interface Data.ByteString.Lazy.head , Data.ByteString.Lazy.last , Data.ByteString.Lazy.tail , Data.ByteString.Lazy.init -- * Reducing 'ByteString's (folds) , Data.ByteString.Lazy.foldl1 , Data.ByteString.Lazy.foldl1' , Data.ByteString.Lazy.foldr1 -- ** Special folds , Data.ByteString.Lazy.maximum , Data.ByteString.Lazy.minimum ) where import qualified Data.ByteString.Lazy rio-0.1.8.0/src/RIO/ByteString/Partial.hs0000644000000000000000000000076713253417303016112 0ustar0000000000000000-- | This module exports all the partial functions from 'Data.ByteString' module RIO.ByteString.Partial ( -- * Basic interface Data.ByteString.head , Data.ByteString.last , Data.ByteString.tail , Data.ByteString.init -- * Reducing 'ByteString's (folds) , Data.ByteString.foldl1 , Data.ByteString.foldl1' , Data.ByteString.foldr1 , Data.ByteString.foldr1' -- * Special folds , Data.ByteString.maximum , Data.ByteString.minimum ) where import qualified Data.ByteString rio-0.1.8.0/src/RIO/Char.hs0000644000000000000000000000227713253417303013277 0ustar0000000000000000-- | Unicode @Char@. Import as: -- -- > import qualified RIO.Char as C module RIO.Char ( Data.Char.Char -- * Character classification -- | Unicode characters are divided into letters, Data.Char.numbers, marks, -- punctuation, Data.Char.symbols, separators (including spaces) and others -- (including control characters). , Data.Char.isControl , Data.Char.isSpace , Data.Char.isLower , Data.Char.isUpper , Data.Char.isAlpha , Data.Char.isAlphaNum , Data.Char.isPrint , Data.Char.isDigit , Data.Char.isOctDigit , Data.Char.isHexDigit , Data.Char.isLetter , Data.Char.isMark , Data.Char.isNumber , Data.Char.isPunctuation , Data.Char.isSymbol , Data.Char.isSeparator -- ** Subranges , Data.Char.isAscii , Data.Char.isLatin1 , Data.Char.isAsciiUpper , Data.Char.isAsciiLower -- ** Unicode general categories , Data.Char.GeneralCategory(..) , Data.Char.generalCategory -- * Case conversion , Data.Char.toUpper , Data.Char.toLower , Data.Char.toTitle -- * Numeric representations , Data.Char.ord -- * String representations , Data.Char.showLitChar , Data.Char.lexLitChar , Data.Char.readLitChar ) where import qualified Data.Char rio-0.1.8.0/src/RIO/Char/Partial.hs0000644000000000000000000000030513253417303014661 0ustar0000000000000000module RIO.Char.Partial ( -- * Single digit characters Data.Char.digitToInt , Data.Char.intToDigit -- * Numeric representations , Data.Char.chr ) where import qualified Data.Char rio-0.1.8.0/src/RIO/Directory.hs0000644000000000000000000000022013253417404014352 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE NoImplicitPrelude #-} module RIO.Directory ( module UnliftIO.Directory ) where import UnliftIO.Directory rio-0.1.8.0/src/RIO/File.hs0000644000000000000000000004200413402156770013275 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-| == Rationale This module offers functions to handle files that offer better durability and/or atomicity. == When to use the functions on this module? Given the usage of this functions comes at a cost in performance, it is important to consider what are the use cases that are ideal for each of the functions. === Not Durable and not Atomic For this use case, you want to use the regular functions: * 'System.IO.withBinaryFile' * 'RIO.writeFileBinary' The regular use case for this scenario happens when your program is dealing with outputs that are never going to be consumed again by your program. For example, imagine you have a program that generates sales reports for the last month, this is a report that can be generated quickly; you don't really care if the output file gets corrupted or lost at one particular execution of your program given that is cheap to execute the data export program a second time. In other words, your program doesn't /rely/ on the data contained in this file in order to work. === Atomic but not Durable Imagine a scenario where your program builds a temporary file that serves as an intermediate step to a bigger task, like Object files (@.o@) in a compilation process. The program will use an existing @.o@ file if it is present, or it will build one from scratch if it is not. The file is not really required, but if it is present, it *must* be valid and consistent. In this situation, you care about atomicity, but not durability. There is no function exported by this module that provides /only/ atomicity. === Durable but not Atomic For this use case, you want to use the functions: * 'withBinaryFileDurable' * 'writeBinaryFileDurable' The regular use case for this scenario happens when your program deals with file modifications that must be guaranteed to be durable, but you don't care that changes are consistent. If you use this function, more than likely your program is ensuring consistency guarantees through other means, for example, SQLite uses the Write Ahead Log (WAL) algorithm to ensure changes are atomic at an application level. === Durable and Atomic For this use case, you can use the functions: * 'withBinaryFileDurableAtomic' * 'writeBinaryFileDurableAtomic' The regular use case for this scenario happens when you want to ensure that after a program is executed, the modifications done to a file are guaranteed to be saved, and also that changes are rolled-back in case there is a failure (e.g. hard reboot, shutdown, etc). @since 0.1.6 -} module RIO.File ( writeBinaryFileDurable , writeBinaryFileDurableAtomic , withBinaryFileDurable , withBinaryFileDurableAtomic , ensureFileDurable ) where import RIO.Prelude.Reexports #ifdef WINDOWS import RIO.Prelude.IO #else import RIO.Directory (doesFileExist) import RIO.ByteString (hPut) import Data.Bits ((.|.)) import Data.Typeable (cast) import Foreign.C (CInt (..), throwErrnoIfMinus1, throwErrnoIfMinus1Retry) import GHC.IO.Device (IODeviceType (RegularFile)) import qualified GHC.IO.Device as Device import qualified GHC.IO.FD as FD import qualified GHC.IO.Handle.FD as HandleFD import System.Directory (copyFile) import System.FilePath (takeDirectory, takeFileName, ()) import System.Posix.Internals (CFilePath, c_close, c_safe_open, withFilePath) import System.Posix.Types (CMode (..), Fd (..)) import System.IO (openBinaryTempFile) #if MIN_VERSION_base(4,9,0) import qualified GHC.IO.Handle.Types as HandleFD (Handle (..), Handle__ (..)) #endif -- TODO: Add a ticket/pull request to export this symbols from -- System.Internal.Posix -- -- NOTE: System.Posix.Internal doesn't re-export this constants so we have to -- recreate-them here foreign import ccall unsafe "HsBase.h __hscore_o_rdonly" o_RDONLY :: CInt foreign import ccall unsafe "HsBase.h __hscore_o_wronly" o_WRONLY :: CInt foreign import ccall unsafe "HsBase.h __hscore_o_rdwr" o_RDWR :: CInt foreign import ccall unsafe "HsBase.h __hscore_o_append" o_APPEND :: CInt foreign import ccall unsafe "HsBase.h __hscore_o_creat" o_CREAT :: CInt foreign import ccall unsafe "HsBase.h __hscore_o_noctty" o_NOCTTY :: CInt -- After here, we have our own imports foreign import ccall safe "fcntl.h openat" c_safe_openat :: CInt -> CFilePath -> CInt -> CMode -> IO CInt foreign import ccall safe "fcntl.h renameat" c_safe_renameat :: CInt -> CFilePath -> CInt -> CFilePath -> IO CInt foreign import ccall safe "unistd.h fsync" c_safe_fsync :: CInt -> IO CInt std_flags, output_flags, read_flags, write_flags, rw_flags, append_flags :: CInt std_flags = o_NOCTTY output_flags = std_flags .|. o_CREAT read_flags = std_flags .|. o_RDONLY write_flags = output_flags .|. o_WRONLY rw_flags = output_flags .|. o_RDWR append_flags = write_flags .|. o_APPEND ioModeToFlags :: IOMode -> CInt ioModeToFlags iomode = case iomode of ReadMode -> read_flags WriteMode -> write_flags ReadWriteMode -> rw_flags AppendMode -> append_flags -- | Returns a low-level file descriptor for a directory path. This function -- exists given the fact that 'openFile' does not work with directories. -- -- If you use this function, make sure you are working on a masked state, -- otherwise async exceptions may leave file descriptors open. -- -- @since 0.1.6 openDir :: MonadIO m => FilePath -> m Fd openDir fp -- TODO: Investigate what is the situation with Windows FS in regards to non_blocking -- NOTE: File operations _do not support_ non_blocking on various kernels, more -- info can be found here: https://ghc.haskell.org/trac/ghc/ticket/15153 = liftIO $ withFilePath fp $ \cFp -> Fd <$> (throwErrnoIfMinus1Retry "openDir" $ c_safe_open cFp (ioModeToFlags ReadMode) 0o660) -- | Closes a 'Fd' that points to a Directory. -- -- @since 0.1.6 closeDirectory :: MonadIO m => Fd -> m () closeDirectory (Fd dirFd) = liftIO $ void $ throwErrnoIfMinus1Retry "closeDirectory" $ c_close dirFd -- | Executes the low-level C function fsync on a C file descriptor -- -- @since 0.1.6 fsyncFileDescriptor :: MonadIO m => String -- ^ Meta-description for error messages -> CInt -- ^ C File Descriptor -> m () fsyncFileDescriptor name cFd = liftIO $ void $ throwErrnoIfMinus1 ("fsync - " <> name) $ c_safe_fsync cFd -- | Opens a file from a directory, using this function in favour of a regular -- 'openFile' guarantees that any file modifications are kept in the same -- directory where the file was opened. An edge case scenario is a mount -- happening in the directory where the file was opened while your program is -- running. -- -- If you use this function, make sure you are working on an masked state, -- otherwise async exceptions may leave file descriptors open. -- openFileFromDir :: (MonadIO m) => Fd -> FilePath -> IOMode -> m Handle openFileFromDir (Fd dirFd) fp iomode = liftIO $ withFilePath fp $ \f -> do bracketOnError (do fileFd <- throwErrnoIfMinus1Retry "openFileFromDir" $ c_safe_openat dirFd f (ioModeToFlags iomode) 0o666 {- Can open directory with read only -} FD.mkFD fileFd iomode Nothing {- no stat -} False {- not a socket -} False {- non_blocking -} `onException` c_close fileFd) (liftIO . Device.close . fst) (\(fD, fd_type) -> do -- we want to truncate() if this is an open in WriteMode, but only if the -- target is a RegularFile. ftruncate() fails on special files like -- /dev/null. when (iomode == WriteMode && fd_type == RegularFile) $ Device.setSize fD 0 HandleFD.mkHandleFromFD fD fd_type fp iomode False Nothing) -- | Opens a file using the openat C low-level API. This approach allows us to -- get a file descriptor for the directory that contains the file, which we can -- use later on to fsync the directory with. -- -- If you use this function, make sure you are working on an masked state, -- otherwise async exceptions may leave file descriptors open. -- -- @since 0.1.6 openFileAndDirectory :: MonadUnliftIO m => FilePath -> IOMode -> m (Fd, Handle) openFileAndDirectory absFp iomode = do let dir = takeDirectory absFp fp = takeFileName absFp bracketOnError (openDir dir) closeDirectory $ \dirFd -> do fileHandle <- openFileFromDir dirFd fp iomode return (dirFd, fileHandle) -- | This sub-routine does the following tasks: -- -- * It calls fsync and then closes the given Handle (mapping to a temporal/backup filepath) -- * It calls fsync and then closes the containing directory of the file -- -- These steps guarantee that the file changes are durable. -- -- @since 0.1.6 closeFileDurable :: MonadIO m => Fd -> Handle -> m () closeFileDurable dirFd@(Fd cDirFd) h = liftIO $ finally (do (withHandleFd h $ \fileFd -> fsyncFileDescriptor "closeFileDurable/File" (FD.fdFD fileFd)) `finally` hClose h -- NOTE: Here we are purposefully not fsyncing the directory if the file fails to fsync fsyncFileDescriptor "closeFileDurable/Directory" cDirFd) (closeDirectory dirFd) buildTemporaryFilePath :: MonadUnliftIO m => FilePath -> m FilePath buildTemporaryFilePath filePath = do let dirFp = takeDirectory filePath fileFp = takeFileName filePath bracket (liftIO $ openBinaryTempFile dirFp fileFp) (hClose . snd) (return . fst) toTmpFilePath :: MonadUnliftIO m => FilePath -> m FilePath toTmpFilePath filePath = buildTemporaryFilePath (dirPath tmpFilename) where dirPath = takeDirectory filePath filename = takeFileName filePath tmpFilename = "." <> filename <> ".tmp" withHandleFd :: Handle -> (FD.FD -> IO a) -> IO a withHandleFd h cb = case h of HandleFD.FileHandle _ mv -> do withMVar mv $ \HandleFD.Handle__{HandleFD.haDevice = dev} -> case cast dev of Just fd -> cb fd Nothing -> error "withHandleFd: not a file handle" HandleFD.DuplexHandle {} -> error "withHandleFd: not a file handle" -- | This sub-routine does the following tasks: -- -- * It calls fsync and then closes the given Handle (mapping to a temporal/backup filepath) -- * It renames the file to the original path (using renameat) -- * It calls fsync and then closes the containing directory of the file -- -- These steps guarantee that the file is durable, and that the backup mechanism -- for catastrophic failure is discarded after no error is thrown. -- -- @since 0.1.6 closeFileDurableAtomic :: MonadUnliftIO m => FilePath -> FilePath -> Fd -> Handle -> m () closeFileDurableAtomic tmpFilePath filePath dirFd@(Fd cDirFd) fileHandle = do liftIO $ finally (withFilePath tmpFilePath $ \tmpFp -> withFilePath filePath $ \fp -> do (withHandleFd fileHandle $ \fileFd -> fsyncFileDescriptor "closeFileDurableAtomic/File" (FD.fdFD fileFd)) `finally` hClose fileHandle renameFile tmpFp fp fsyncFileDescriptor "closeFileDurableAtomic/Directory" cDirFd) (closeDirectory dirFd) where renameFile tmpFp origFp = void $ throwErrnoIfMinus1Retry "closeFileDurableAtomic - renameFile" $ c_safe_renameat cDirFd tmpFp cDirFd origFp #endif -- | After a file is closed, it opens it again and executes fsync internally on -- both the file and the directory that contains it. Note this function is -- intended to work around the non-durability of existing file APIs, as opposed -- to being necessary for the API functions provided in 'RIO.File' module. -- -- [The effectiveness of calling this function is -- debatable](https://stackoverflow.com/questions/37288453/calling-fsync2-after-close2/50158433#50158433), -- as it relies on internal implementation details at the Kernel level that -- might change. We argue that, despite this fact, calling this function may -- bring benefits in terms of durability. -- -- === Cross-Platform support -- -- This function is a noop on Windows platforms. -- -- @since 0.1.6 ensureFileDurable :: MonadUnliftIO m => FilePath -> m () ensureFileDurable absFp = #if WINDOWS absFp `seq` return () #else bracket (openFileAndDirectory absFp ReadMode) (uncurry closeFileDurable) (const $ return ()) #endif -- | Similar to 'writeFileBinary', but it also ensures that changes executed to -- the file are guaranteed to be durable. It internally uses fsync and makes -- sure it synchronizes the file on disk. -- -- === Cross-Platform support -- -- This function behaves the same as 'RIO.writeFileBinary' on Windows platforms. -- -- @since 0.1.6 writeBinaryFileDurable :: MonadUnliftIO m => FilePath -> ByteString -> m () writeBinaryFileDurable absFp bytes = #if WINDOWS writeFileBinary absFp bytes #else withBinaryFileDurable absFp WriteMode (liftIO . (`hPut` bytes)) #endif -- | Similar to 'writeFileBinary', but it also guarantes that changes executed -- to the file are durable, also, in case of failure, the modified file is never -- going to get corrupted. It internally uses fsync and makes sure it -- synchronizes the file on disk. -- -- === Cross-Platform support -- -- This function behaves the same as 'RIO.writeFileBinary' on Windows platforms. -- -- @since 0.1.6 writeBinaryFileDurableAtomic :: MonadUnliftIO m => FilePath -> ByteString -> m () writeBinaryFileDurableAtomic fp bytes = #if WINDOWS writeFileBinary fp bytes #else withBinaryFileDurableAtomic fp WriteMode (liftIO . (`hPut` bytes)) #endif -- | Opens a file with the following guarantees: -- -- * It successfully closes the file in case of an asynchronous exception -- -- * It reliably saves the file in the correct directory; including edge case -- situations like a different device being mounted to the current directory, -- or the current directory being renamed to some other name while the file is -- being used. -- -- * It ensures durability by executing an fsync call before closing the file -- handle -- -- === Cross-Platform support -- -- This function behaves the same as 'System.IO.withBinaryFile' on Windows platforms. -- -- @since 0.1.6 withBinaryFileDurable :: MonadUnliftIO m => FilePath -> IOMode -> (Handle -> m r) -> m r withBinaryFileDurable absFp iomode cb = #if WINDOWS withBinaryFile absFp iomode cb #else withRunInIO $ \run -> bracket (openFileAndDirectory absFp iomode) (uncurry closeFileDurable) (run . cb . snd) #endif -- | Opens a file with the following guarantees: -- -- * It successfully closes the file in case of an asynchronous exception -- -- * It reliably saves the file in the correct directory; including edge case -- situations like a different device being mounted to the current directory, -- or the current directory being renamed to some other name while the file is -- being used. -- -- * It ensures durability by executing an fsync call before closing the file -- handle -- -- * It keeps all changes in a temporary file, and after it is closed it atomically -- moves the temporal file to the original filepath, in case of catastrophic -- failure, the original file stays unaffected. -- -- -- === Performance Considerations -- -- When using a writable but non-truncating 'IOMode' (i.e. 'ReadWriteMode' and -- 'AppendMode'), this function performs a copy operation of the specified input -- file to guarantee the original file is intact in case of a catastrophic -- failure (no partial writes). This approach may be prohibitive in scenarios -- where the input file is expected to be large in size. -- -- === Cross-Platform support -- -- This function behaves the same as 'System.IO.withBinaryFile' on Windows -- platforms. -- -- @since 0.1.6 withBinaryFileDurableAtomic :: MonadUnliftIO m => FilePath -> IOMode -> (Handle -> m r) -> m r withBinaryFileDurableAtomic absFp iomode cb = do #if WINDOWS withBinaryFile absFp iomode cb #else withRunInIO $ \run -> case iomode of -- We need to consider an atomic operation only when we are on 'WriteMode', lets -- use a regular withBinaryFile ReadMode -> run (withBinaryFile absFp iomode cb) -- Given we are not going to read contents from the original file, we -- can create a temporal file and then do an atomic move WriteMode -> do tmpFp <- toTmpFilePath absFp withDurableAtomic tmpFp run _ {- ReadWriteMode, AppendMode -} -> do -- copy original file for read purposes fileExists <- doesFileExist absFp tmpFp <- toTmpFilePath absFp when fileExists $ copyFile absFp tmpFp withDurableAtomic tmpFp run where withDurableAtomic tmpFp run = do bracket (openFileAndDirectory tmpFp iomode) (uncurry $ closeFileDurableAtomic tmpFp absFp) (run . cb . snd) #endif rio-0.1.8.0/src/RIO/FilePath.hs0000644000000000000000000000052013253417303014103 0ustar0000000000000000module RIO.FilePath ( module System.FilePath , getSearchPath ) where import Control.Monad.IO.Class import System.FilePath hiding(getSearchPath) import qualified System.FilePath -- | Lifted version of 'System.FilePath.getSearchPath' getSearchPath :: MonadIO m => m [FilePath] getSearchPath = liftIO System.FilePath.getSearchPath rio-0.1.8.0/src/RIO/HashMap.hs0000644000000000000000000000322213253417303013732 0ustar0000000000000000-- | Strict @Map@ with hashed keys. Import as: -- -- > import qualified RIO.HashMap as HM module RIO.HashMap ( Data.HashMap.Strict.HashMap -- * Construction , Data.HashMap.Strict.empty , Data.HashMap.Strict.singleton -- * Basic interface , Data.HashMap.Strict.null , Data.HashMap.Strict.size , Data.HashMap.Strict.member , Data.HashMap.Strict.lookup , Data.HashMap.Strict.lookupDefault , Data.HashMap.Strict.insert , Data.HashMap.Strict.insertWith , Data.HashMap.Strict.delete , Data.HashMap.Strict.adjust , Data.HashMap.Strict.update , Data.HashMap.Strict.alter -- * Combine -- ** Union , Data.HashMap.Strict.union , Data.HashMap.Strict.unionWith , Data.HashMap.Strict.unionWithKey , Data.HashMap.Strict.unions -- * Transformations , Data.HashMap.Strict.map , Data.HashMap.Strict.mapWithKey , Data.HashMap.Strict.traverseWithKey -- * Difference and intersection , Data.HashMap.Strict.difference , Data.HashMap.Strict.differenceWith , Data.HashMap.Strict.intersection , Data.HashMap.Strict.intersectionWith , Data.HashMap.Strict.intersectionWithKey -- * Folds , Data.HashMap.Strict.foldl' , Data.HashMap.Strict.foldlWithKey' , Data.HashMap.Strict.foldr , Data.HashMap.Strict.foldrWithKey -- * Filter , Data.HashMap.Strict.filter , Data.HashMap.Strict.filterWithKey , Data.HashMap.Strict.mapMaybe , Data.HashMap.Strict.mapMaybeWithKey -- * Conversions , Data.HashMap.Strict.keys , Data.HashMap.Strict.elems -- ** Lists , Data.HashMap.Strict.toList , Data.HashMap.Strict.fromList , Data.HashMap.Strict.fromListWith ) where import Data.HashMap.Strict rio-0.1.8.0/src/RIO/HashMap/Partial.hs0000644000000000000000000000020213253417303015321 0ustar0000000000000000module RIO.HashMap.Partial ( -- * Basic interface (Data.HashMap.Strict.!) ) where import qualified Data.HashMap.Strict rio-0.1.8.0/src/RIO/HashSet.hs0000644000000000000000000000151013253417303013746 0ustar0000000000000000-- | @Set@ with hashed members. Import as: -- -- > import qualified RIO.HashSet as HS module RIO.HashSet ( Data.HashSet.HashSet -- * Construction , Data.HashSet.empty , Data.HashSet.singleton -- * Combine , Data.HashSet.union , Data.HashSet.unions -- * Basic interface , Data.HashSet.null , Data.HashSet.size , Data.HashSet.member , Data.HashSet.insert , Data.HashSet.delete -- * Transformations , Data.HashSet.map -- * Difference and intersection , Data.HashSet.difference , Data.HashSet.intersection -- * Folds , Data.HashSet.foldl' , Data.HashSet.foldr -- * Filter , Data.HashSet.filter -- * Conversions -- ** Lists , Data.HashSet.toList , Data.HashSet.fromList -- * HashMaps , Data.HashSet.toMap , Data.HashSet.fromMap ) where import qualified Data.HashSet rio-0.1.8.0/src/RIO/List.hs0000644000000000000000000001402413406724606013335 0ustar0000000000000000-- | @List@. Import as: -- -- > import qualified RIO.List as L module RIO.List ( -- * Basic functions (Data.List.++) , Data.List.uncons , Data.List.null , Data.List.length , headMaybe , lastMaybe , tailMaybe , initMaybe -- * List transformations , Data.List.map , Data.List.reverse , Data.List.intersperse , Data.List.intercalate , Data.List.transpose , Data.List.subsequences , Data.List.permutations -- * Reducing lists (folds) , Data.List.foldl , Data.List.foldl' , Data.List.foldr -- ** Special folds , Data.List.concat , Data.List.concatMap , Data.List.and , Data.List.or , Data.List.any , Data.List.all , Data.List.sum , Data.List.product , maximumMaybe , minimumMaybe , maximumByMaybe , minimumByMaybe -- * Building lists -- ** Scans , Data.List.scanl , Data.List.scanl' , Data.List.scanr , Data.List.scanl1 , Data.List.scanr1 -- ** Accumulating maps , Data.List.mapAccumL , Data.List.mapAccumR -- ** Infinite lists , Data.List.iterate , Data.List.repeat , Data.List.replicate , Data.List.cycle -- ** Unfolding , Data.List.unfoldr -- * Sublists -- ** Extracting sublists , Data.List.take , Data.List.drop , Data.List.splitAt , Data.List.takeWhile , Data.List.dropWhile , Data.List.dropWhileEnd , Data.List.span , Data.List.break , Data.List.stripPrefix , stripSuffix , dropPrefix , dropSuffix , Data.List.group , Data.List.inits , Data.List.tails -- ** Predicates , Data.List.isPrefixOf , Data.List.isSuffixOf , Data.List.isInfixOf , Data.List.isSubsequenceOf -- * Searching lists -- ** Searching by equality , Data.List.elem , Data.List.notElem , Data.List.lookup -- ** Searching with a predicate , Data.List.find , Data.List.filter , Data.List.partition -- * Indexing lists -- | These functions treat a list @xs@ as a indexed collection, -- with indices ranging from 0 to @'length' xs - 1@. , Data.List.elemIndex , Data.List.elemIndices , Data.List.findIndex , Data.List.findIndices -- * Zipping and unzipping lists , Data.List.zip , Data.List.zip3 , Data.List.zip4 , Data.List.zip5 , Data.List.zip6 , Data.List.zip7 , Data.List.zipWith , Data.List.zipWith3 , Data.List.zipWith4 , Data.List.zipWith5 , Data.List.zipWith6 , Data.List.zipWith7 , Data.List.unzip , Data.List.unzip3 , Data.List.unzip4 , Data.List.unzip5 , Data.List.unzip6 , Data.List.unzip7 -- * Special lists -- ** Functions on strings , Data.List.lines , linesCR , Data.List.words , Data.List.unlines , Data.List.unwords -- ** \"Set\" operations , Data.List.nub , Data.List.delete , (Data.List.\\) , Data.List.union , Data.List.intersect -- ** Ordered lists , Data.List.sort , Data.List.sortOn , Data.List.insert -- * Generalized functions -- ** The \"@By@\" operations -- | By convention, overloaded functions have a non-overloaded -- counterpart whose name is suffixed with \`@By@\'. -- -- It is often convenient to use these functions together with -- 'Data.Function.on', for instance @'sortBy' ('compare' -- \`on\` 'fst')@. -- *** User-supplied equality (replacing an @Eq@ context) -- | The predicate is assumed to define an equivalence. , Data.List.nubBy , Data.List.deleteBy , Data.List.deleteFirstsBy , Data.List.unionBy , Data.List.intersectBy , Data.List.groupBy -- *** User-supplied comparison (replacing an @Ord@ context) -- | The function is assumed to define a total ordering. , Data.List.sortBy , Data.List.insertBy -- ** The \"@generic@\" operations -- | The prefix \`@generic@\' indicates an overloaded function that -- is a generalized version of a "Prelude" function. , Data.List.genericLength , Data.List.genericTake , Data.List.genericDrop , Data.List.genericSplitAt , Data.List.genericIndex , Data.List.genericReplicate ) where import qualified Data.List import Data.List(stripPrefix) import Data.Maybe (fromMaybe) -- | Remove the suffix from the given list, if present -- -- @since 0.0.0 stripSuffix :: Eq a => [a] -- ^ suffix -> [a] -> Maybe [a] stripSuffix suffix list = fmap reverse (stripPrefix (reverse suffix) (reverse list)) -- | Drop prefix if present, otherwise return original list. -- -- @since 0.0.0.0 dropPrefix :: Eq a => [a] -- ^ prefix -> [a] -> [a] dropPrefix prefix t = fromMaybe t (stripPrefix prefix t) -- | Drop prefix if present, otherwise return original list. -- -- @since 0.0.0.0 dropSuffix :: Eq a => [a] -- ^ suffix -> [a] -> [a] dropSuffix suffix t = fromMaybe t (stripSuffix suffix t) -- | 'linesCR' breaks a 'String' up into a list of `String`s at newline -- 'Char's. It is very similar to 'lines', but it also removes any -- trailing @'\r'@ 'Char's. The resulting 'String' values do not contain -- newlines or trailing @'\r'@ characters. -- -- @since 0.1.0.0 linesCR :: String -> [String] linesCR = map (dropSuffix "\r") . lines safeListCall :: Foldable t => (t a -> b) -> t a -> Maybe b safeListCall f xs | Data.List.null xs = Nothing | otherwise = Just $ f xs -- | @since 0.1.3.0 headMaybe :: [a] -> Maybe a headMaybe = safeListCall Data.List.head -- | @since 0.1.3.0 lastMaybe :: [a] -> Maybe a lastMaybe = safeListCall Data.List.last -- | @since 0.1.3.0 tailMaybe :: [a] -> Maybe [a] tailMaybe = safeListCall Data.List.tail -- | @since 0.1.3.0 initMaybe :: [a] -> Maybe [a] initMaybe = safeListCall Data.List.init -- | @since 0.1.3.0 maximumMaybe :: (Ord a, Foldable t) => t a -> Maybe a maximumMaybe = safeListCall Data.List.maximum -- | @since 0.1.3.0 minimumMaybe :: (Ord a, Foldable t) => t a -> Maybe a minimumMaybe = safeListCall Data.List.minimum -- | @since 0.1.3.0 maximumByMaybe :: (Foldable t) => (a -> a -> Ordering) -> t a -> Maybe a maximumByMaybe f = safeListCall (Data.List.maximumBy f) -- | @since 0.1.3.0 minimumByMaybe :: (Foldable t) => (a -> a -> Ordering) -> t a -> Maybe a minimumByMaybe f = safeListCall (Data.List.minimumBy f) rio-0.1.8.0/src/RIO/List/Partial.hs0000644000000000000000000000122113312220526014710 0ustar0000000000000000module RIO.List.Partial ( -- * Basic functions Data.List.head , Data.List.last , Data.List.tail , Data.List.init -- * Reducing lists (folds) , Data.List.foldl1 , Data.List.foldl1' , Data.List.foldr1 -- ** Special folds , Data.List.maximum , Data.List.minimum , Data.List.maximumBy , Data.List.minimumBy -- * Building lists -- ** Scans -- -- These functions are not partial, they are being exported here for legacy -- reasons, they may be removed from this module on a future major release , Data.List.scanl1 , Data.List.scanr1 -- * Indexing lists , (Data.List.!!) ) where import qualified Data.List rio-0.1.8.0/src/RIO/Map.hs0000644000000000000000000001021613253417303013127 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Strict @Map@. Import as: -- -- > import qualified RIO.Map as Map module RIO.Map ( -- * Map type Data.Map.Strict.Map -- * Operators #if MIN_VERSION_containers(0,5,9) , (Data.Map.Strict.!?) #endif , (Data.Map.Strict.\\) -- * Query , Data.Map.Strict.null , Data.Map.Strict.size , Data.Map.Strict.member , Data.Map.Strict.notMember , Data.Map.Strict.lookup , Data.Map.Strict.findWithDefault , Data.Map.Strict.lookupLT , Data.Map.Strict.lookupGT , Data.Map.Strict.lookupLE , Data.Map.Strict.lookupGE -- * Construction , Data.Map.Strict.empty , Data.Map.Strict.singleton -- ** Insertion , Data.Map.Strict.insert , Data.Map.Strict.insertWith , Data.Map.Strict.insertWithKey , Data.Map.Strict.insertLookupWithKey -- ** Delete\/Update , Data.Map.Strict.delete , Data.Map.Strict.adjust , Data.Map.Strict.adjustWithKey , Data.Map.Strict.update , Data.Map.Strict.updateWithKey , Data.Map.Strict.updateLookupWithKey , Data.Map.Strict.alter #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.alterF #endif -- * Combine -- ** Union , Data.Map.Strict.union , Data.Map.Strict.unionWith , Data.Map.Strict.unionWithKey , Data.Map.Strict.unions , Data.Map.Strict.unionsWith -- ** Difference , Data.Map.Strict.difference , Data.Map.Strict.differenceWith , Data.Map.Strict.differenceWithKey -- ** Intersection , Data.Map.Strict.intersection , Data.Map.Strict.intersectionWith , Data.Map.Strict.intersectionWithKey -- ** General combining functions -- | See "Data.Map.Merge.Strict" -- ** Deprecated general combining function , Data.Map.Strict.mergeWithKey -- * Traversal -- ** Map , Data.Map.Strict.map , Data.Map.Strict.mapWithKey , Data.Map.Strict.traverseWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.traverseMaybeWithKey #endif , Data.Map.Strict.mapAccum , Data.Map.Strict.mapAccumWithKey , Data.Map.Strict.mapAccumRWithKey , Data.Map.Strict.mapKeys , Data.Map.Strict.mapKeysWith -- * Folds , Data.Map.Strict.foldr , Data.Map.Strict.foldl , Data.Map.Strict.foldrWithKey , Data.Map.Strict.foldlWithKey , Data.Map.Strict.foldMapWithKey -- ** Strict folds , Data.Map.Strict.foldr' , Data.Map.Strict.foldl' , Data.Map.Strict.foldrWithKey' , Data.Map.Strict.foldlWithKey' -- * Conversion , Data.Map.Strict.elems , Data.Map.Strict.keys , Data.Map.Strict.assocs , Data.Map.Strict.keysSet , Data.Map.Strict.fromSet -- ** Lists , Data.Map.Strict.toList , Data.Map.Strict.fromList , Data.Map.Strict.fromListWith , Data.Map.Strict.fromListWithKey -- * Filter , Data.Map.Strict.filter , Data.Map.Strict.filterWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.restrictKeys , Data.Map.Strict.withoutKeys #endif , Data.Map.Strict.partition , Data.Map.Strict.partitionWithKey #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.takeWhileAntitone , Data.Map.Strict.dropWhileAntitone , Data.Map.Strict.spanAntitone #endif , Data.Map.Strict.mapMaybe , Data.Map.Strict.mapMaybeWithKey , Data.Map.Strict.mapEither , Data.Map.Strict.mapEitherWithKey , Data.Map.Strict.split , Data.Map.Strict.splitLookup , Data.Map.Strict.splitRoot -- * Submap , Data.Map.Strict.isSubmapOf , Data.Map.Strict.isSubmapOfBy , Data.Map.Strict.isProperSubmapOf , Data.Map.Strict.isProperSubmapOfBy -- * Indexed , Data.Map.Strict.lookupIndex , Data.Map.Strict.elemAt , Data.Map.Strict.deleteAt #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.take , Data.Map.Strict.drop , Data.Map.Strict.splitAt #endif -- * Min\/Max #if MIN_VERSION_containers(0,5,9) , Data.Map.Strict.lookupMin , Data.Map.Strict.lookupMax #endif , Data.Map.Strict.deleteMin , Data.Map.Strict.deleteMax , Data.Map.Strict.updateMin , Data.Map.Strict.updateMax , Data.Map.Strict.updateMinWithKey , Data.Map.Strict.updateMaxWithKey , Data.Map.Strict.minView , Data.Map.Strict.maxView , Data.Map.Strict.minViewWithKey , Data.Map.Strict.maxViewWithKey -- * Debugging , Data.Map.Strict.showTree , Data.Map.Strict.showTreeWith , Data.Map.Strict.valid ) where import qualified Data.Map.Strict rio-0.1.8.0/src/RIO/Map/Partial.hs0000644000000000000000000000057713253417303014534 0ustar0000000000000000module RIO.Map.Partial ( -- * Operators (Data.Map.Strict.!) -- * Indexed , Data.Map.Strict.elemAt , Data.Map.Strict.deleteAt , Data.Map.Strict.findIndex , Data.Map.Strict.updateAt -- * Min\/Max , Data.Map.Strict.findMin , Data.Map.Strict.findMax , Data.Map.Strict.deleteFindMin , Data.Map.Strict.deleteFindMax ) where import qualified Data.Map.Strict rio-0.1.8.0/src/RIO/Map/Unchecked.hs0000644000000000000000000000153013253417303015017 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | This module contains functions from "Data.Map.strict" that have unchecked -- preconditions on their input. If these preconditions are not satisfied, -- the data structure may end up in an invalid state and other operations -- may misbehave. module RIO.Map.Unchecked ( -- * Traversal -- ** Map Data.Map.Strict.mapKeysMonotonic -- * Conversion -- ** Ordered lists , Data.Map.Strict.toAscList , Data.Map.Strict.fromAscList , Data.Map.Strict.fromAscListWith , Data.Map.Strict.fromAscListWithKey , Data.Map.Strict.fromDistinctAscList #if MIN_VERSION_containers(0,5,8) , Data.Map.Strict.toDescList , Data.Map.Strict.fromDescList , Data.Map.Strict.fromDescListWith , Data.Map.Strict.fromDescListWithKey , Data.Map.Strict.fromDistinctDescList #endif ) where import qualified Data.Map.Strict rio-0.1.8.0/src/RIO/Partial.hs0000644000000000000000000000025213327276360014015 0ustar0000000000000000-- | Partial functions. -- module RIO.Partial ( Data.Maybe.fromJust , Prelude.read , Prelude.toEnum ) where import qualified Data.Maybe import qualified Prelude rio-0.1.8.0/src/RIO/Prelude/Simple.hs0000644000000000000000000000312513312220526015237 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} -- | Provide a @SimpleApp@ datatype, for providing a basic @App@-like -- environment with common functionality built in. This is intended to -- make it easier to, e.g., use rio's logging and process code from -- within short scripts. -- -- @since 0.1.3.0 module RIO.Prelude.Simple ( SimpleApp , runSimpleApp ) where import RIO.Prelude.Reexports import RIO.Prelude.Logger import RIO.Prelude.Lens import RIO.Prelude.RIO import RIO.Process import System.Environment (lookupEnv) -- | A simple, non-customizable environment type for @RIO@, which -- provides common functionality. If it's insufficient for your needs, -- define your own, custom @App@ data type. -- -- @since 0.1.3.0 data SimpleApp = SimpleApp { saLogFunc :: !LogFunc , saProcessContext :: !ProcessContext } instance HasLogFunc SimpleApp where logFuncL = lens saLogFunc (\x y -> x { saLogFunc = y }) instance HasProcessContext SimpleApp where processContextL = lens saProcessContext (\x y -> x { saProcessContext = y }) -- | Run with a default configured @SimpleApp@, consisting of: -- -- * Logging to stderr -- -- * If the @RIO_VERBOSE@ environment variable is set, turns on -- verbose logging -- -- * Default process context -- -- @since 0.1.3.0 runSimpleApp :: MonadIO m => RIO SimpleApp a -> m a runSimpleApp m = liftIO $ do verbose <- isJust <$> lookupEnv "RIO_VERBOSE" lo <- logOptionsHandle stderr verbose pc <- mkDefaultProcessContext withLogFunc lo $ \lf -> let simpleApp = SimpleApp { saLogFunc = lf , saProcessContext = pc } in runRIO simpleApp m rio-0.1.8.0/src/RIO/Process.hs0000644000000000000000000004425613330513636014045 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE OverloadedStrings #-} -- | Interacting with external processes. -- -- This module provides a layer on top of "System.Process.Typed", with -- the following additions: -- -- * For efficiency, it will cache @PATH@ lookups. -- -- * For convenience, you can set the working directory and env vars -- overrides in a 'RIO' environment instead of on the individual -- calls to the process. -- -- * Built-in support for logging at the debug level. -- -- In order to switch over to this API, the main idea is: -- -- * Like most of the rio library, you need to create an environment -- value (this time 'ProcessContext'), and include it in your 'RIO' -- environment. See 'mkProcessContext'. -- -- * Instead of using the 'System.Process.Typed.proc' function from -- "System.Process.Typed" for creating a 'ProcessConfig', use the -- locally defined 'proc' function, which will handle overriding -- environment variables, looking up paths, performing logging, etc. -- -- Once you have your 'ProcessConfig', use the standard functions from -- 'System.Process.Typed' (reexported here for convenient) for running -- the 'ProcessConfig'. -- -- @since 0.0.3.0 module RIO.Process ( -- * Process context ProcessContext , HasProcessContext (..) , EnvVars , mkProcessContext , mkDefaultProcessContext , modifyEnvVars , withModifyEnvVars , withWorkingDir -- ** Lenses , workingDirL , envVarsL , envVarsStringsL , exeSearchPathL -- ** Actions , resetExeCache -- * Configuring , proc -- * Spawning (run child process) , withProcess , withProcess_ -- * Exec (replacing current process) , exec , execSpawn -- * Environment helper , LoggedProcessContext (..) , withProcessContextNoLogging -- * Exceptions , ProcessException (..) -- * Utilities , doesExecutableExist , findExecutable , augmentPath , augmentPathMap , showProcessArgDebug -- * Reexports , P.ProcessConfig , P.StreamSpec , P.StreamType (..) , P.Process , P.setStdin , P.setStdout , P.setStderr , P.setCloseFds , P.setCreateGroup , P.setDelegateCtlc #if MIN_VERSION_process(1, 3, 0) , P.setDetachConsole , P.setCreateNewConsole , P.setNewSession #endif #if MIN_VERSION_process(1, 4, 0) && !WINDOWS , P.setChildGroup , P.setChildUser #endif , P.mkStreamSpec , P.inherit , P.closed , P.byteStringInput , P.byteStringOutput , P.createPipe , P.useHandleOpen , P.useHandleClose , P.startProcess , P.stopProcess , P.readProcess , P.readProcess_ , P.runProcess , P.runProcess_ , P.readProcessStdout , P.readProcessStdout_ , P.readProcessStderr , P.readProcessStderr_ , P.waitExitCode , P.waitExitCodeSTM , P.getExitCode , P.getExitCodeSTM , P.checkExitCode , P.checkExitCodeSTM , P.getStdin , P.getStdout , P.getStderr , P.ExitCodeException (..) , P.ByteStringOutputException (..) , P.unsafeProcessHandle ) where import RIO.Prelude.Display import RIO.Prelude.Reexports import RIO.Prelude.Logger import RIO.Prelude.RIO import RIO.Prelude.Lens import qualified Data.Map as Map import qualified Data.Text as T import qualified System.Directory as D import System.Environment (getEnvironment) import System.Exit (exitWith) import qualified System.FilePath as FP import qualified System.Process.Typed as P import System.Process.Typed hiding (withProcess, withProcess_, proc) #ifndef WINDOWS import System.Directory (setCurrentDirectory) import System.Posix.Process (executeFile) #endif -- | The environment variable map -- -- @since 0.0.3.0 type EnvVars = Map Text Text -- | Context in which to run processes. -- -- @since 0.0.3.0 data ProcessContext = ProcessContext { pcTextMap :: !EnvVars -- ^ Environment variables as map , pcStringList :: ![(String, String)] -- ^ Environment variables as association list , pcPath :: ![FilePath] -- ^ List of directories searched for executables (@PATH@) , pcExeCache :: !(IORef (Map FilePath (Either ProcessException FilePath))) -- ^ Cache of already looked up executable paths. , pcExeExtensions :: [String] -- ^ @[""]@ on non-Windows systems, @["", ".exe", ".bat"]@ on Windows , pcWorkingDir :: !(Maybe FilePath) -- ^ Override the working directory. } -- | Exception type which may be generated in this module. -- -- /NOTE/ Other exceptions may be thrown by underlying libraries! -- -- @since 0.0.3.0 data ProcessException = NoPathFound | ExecutableNotFound String [FilePath] | ExecutableNotFoundAt FilePath | PathsInvalidInPath [FilePath] deriving Typeable instance Show ProcessException where show NoPathFound = "PATH not found in ProcessContext" show (ExecutableNotFound name path) = concat [ "Executable named " , name , " not found on path: " , show path ] show (ExecutableNotFoundAt name) = "Did not find executable at specified path: " ++ name show (PathsInvalidInPath paths) = unlines $ [ "Would need to add some paths to the PATH environment variable \ \to continue, but they would be invalid because they contain a " ++ show FP.searchPathSeparator ++ "." , "Please fix the following paths and try again:" ] ++ paths instance Exception ProcessException -- | Get the 'ProcessContext' from the environment. -- -- @since 0.0.3.0 class HasProcessContext env where processContextL :: Lens' env ProcessContext instance HasProcessContext ProcessContext where processContextL = id data EnvVarFormat = EVFWindows | EVFNotWindows currentEnvVarFormat :: EnvVarFormat currentEnvVarFormat = #if WINDOWS EVFWindows #else EVFNotWindows #endif -- | Override the working directory processes run in. @Nothing@ means -- the current process's working directory. -- -- @since 0.0.3.0 workingDirL :: HasProcessContext env => Lens' env (Maybe FilePath) workingDirL = processContextL.lens pcWorkingDir (\x y -> x { pcWorkingDir = y }) -- | Get the environment variables. We cannot provide a @Lens@ here, -- since updating the environment variables requires an @IO@ action to -- allocate a new @IORef@ for holding the executable path cache. -- -- @since 0.0.3.0 envVarsL :: HasProcessContext env => SimpleGetter env EnvVars envVarsL = processContextL.to pcTextMap -- | Get the 'EnvVars' as an associated list of 'String's. -- -- Useful for interacting with other libraries. -- -- @since 0.0.3.0 envVarsStringsL :: HasProcessContext env => SimpleGetter env [(String, String)] envVarsStringsL = processContextL.to pcStringList -- | Get the list of directories searched for executables (the @PATH@). -- -- Similar to 'envVarMapL', this cannot be a full @Lens@. -- -- @since 0.0.3.0 exeSearchPathL :: HasProcessContext env => SimpleGetter env [FilePath] exeSearchPathL = processContextL.to pcPath -- | Create a new 'ProcessContext' from the given environment variable map. -- -- @since 0.0.3.0 mkProcessContext :: MonadIO m => EnvVars -> m ProcessContext mkProcessContext tm' = do ref <- newIORef Map.empty return ProcessContext { pcTextMap = tm , pcStringList = map (T.unpack *** T.unpack) $ Map.toList tm , pcPath = (if isWindows then (".":) else id) (maybe [] (FP.splitSearchPath . T.unpack) (Map.lookup "PATH" tm)) , pcExeCache = ref , pcExeExtensions = if isWindows then let pathext = fromMaybe ".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC" (Map.lookup "PATHEXT" tm) in map T.unpack $ "" : T.splitOn ";" pathext else [""] , pcWorkingDir = Nothing } where -- Fix case insensitivity of the PATH environment variable on Windows. tm | isWindows = Map.fromList $ map (first T.toUpper) $ Map.toList tm' | otherwise = tm' -- Don't use CPP so that the Windows code path is at least type checked -- regularly isWindows = case currentEnvVarFormat of EVFWindows -> True EVFNotWindows -> False -- | Reset the executable cache. -- -- @since 0.0.3.0 resetExeCache :: (MonadIO m, MonadReader env m, HasProcessContext env) => m () resetExeCache = do pc <- view processContextL atomicModifyIORef (pcExeCache pc) (const mempty) -- | Load up an 'EnvOverride' from the standard environment. mkDefaultProcessContext :: MonadIO m => m ProcessContext mkDefaultProcessContext = liftIO $ getEnvironment >>= mkProcessContext . Map.fromList . map (T.pack *** T.pack) -- | Modify the environment variables of a 'ProcessContext'. -- -- This will keep other settings unchanged, in particular the working -- directory. -- -- Note that this requires 'MonadIO', as it will create a new 'IORef' -- for the cache. -- -- @since 0.0.3.0 modifyEnvVars :: MonadIO m => ProcessContext -> (EnvVars -> EnvVars) -> m ProcessContext modifyEnvVars pc f = do pc' <- mkProcessContext (f $ pcTextMap pc) return pc' { pcWorkingDir = pcWorkingDir pc } -- | Use 'modifyEnvVarMap' to create a new 'ProcessContext', and then -- use it in the provided action. -- -- @since 0.0.3.0 withModifyEnvVars :: (HasProcessContext env, MonadReader env m, MonadIO m) => (EnvVars -> EnvVars) -> m a -> m a withModifyEnvVars f inner = do pc <- view processContextL pc' <- modifyEnvVars pc f local (set processContextL pc') inner -- | Set the working directory to be used by child processes. -- -- @since 0.0.3.0 withWorkingDir :: (HasProcessContext env, MonadReader env m, MonadIO m) => FilePath -> m a -> m a withWorkingDir = local . set workingDirL . Just -- | Perform pre-call-process tasks. Ensure the working directory exists and find the -- executable path. -- -- Throws a 'ProcessException' if unsuccessful. -- -- NOT CURRENTLY EXPORTED preProcess :: (HasProcessContext env, MonadReader env m, MonadIO m) => String -- ^ Command name -> m FilePath preProcess name = do name' <- findExecutable name >>= either throwIO return wd <- view workingDirL liftIO $ maybe (return ()) (D.createDirectoryIfMissing True) wd return name' -- | Log running a process with its arguments, for debugging (-v). -- -- This logs one message before running the process and one message after. -- -- NOT CURRENTLY EXPORTED withProcessTimeLog :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Maybe FilePath -- ^ working dirj -> String -- ^ executable -> [String] -- ^ arguments -> m a -> m a withProcessTimeLog mdir name args proc' = do let cmdText = T.intercalate " " (T.pack name : map showProcessArgDebug args) dirMsg = case mdir of Nothing -> "" Just dir -> " within " <> T.pack dir logDebug ("Run process" <> display dirMsg <> ": " <> display cmdText) start <- getMonotonicTime x <- proc' end <- getMonotonicTime let diff = end - start useColor <- view logFuncUseColorL logDebug ("Process finished in " <> (if useColor then "\ESC[92m" else "") <> -- green timeSpecMilliSecondText diff <> (if useColor then "\ESC[0m" else "") <> -- reset ": " <> display cmdText) return x timeSpecMilliSecondText :: Double -> Utf8Builder timeSpecMilliSecondText d = display (round (d * 1000) :: Int) <> "ms" -- | Provide a 'ProcessConfig' based on the 'ProcessContext' in -- scope. Deals with resolving the full path, setting the child -- process's environment variables, setting the working directory, and -- wrapping the call with 'withProcessTimeLog' for debugging output. -- -- This is intended to be analogous to the @proc@ function provided by -- the @System.Process.Typed@ module, but has a different type -- signature to (1) allow it to perform @IO@ actions for looking up -- paths, and (2) allow logging and timing of the running action. -- -- @since 0.0.3.0 proc :: (HasProcessContext env, HasLogFunc env, MonadReader env m, MonadIO m, HasCallStack) => FilePath -- ^ command to run -> [String] -- ^ command line arguments -> (ProcessConfig () () () -> m a) -> m a proc name0 args inner = do name <- preProcess name0 wd <- view workingDirL envStrings <- view envVarsStringsL withProcessTimeLog wd name args $ inner $ setEnv envStrings $ maybe id setWorkingDir wd $ P.proc name args -- | Same as 'P.withProcess', but generalized to 'MonadUnliftIO'. -- -- @since 0.0.3.0 withProcess :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcess pc f = withRunInIO $ \run -> P.withProcess pc (run . f) -- | Same as 'P.withProcess_', but generalized to 'MonadUnliftIO'. -- -- @since 0.0.3.0 withProcess_ :: MonadUnliftIO m => ProcessConfig stdin stdout stderr -> (Process stdin stdout stderr -> m a) -> m a withProcess_ pc f = withRunInIO $ \run -> P.withProcess_ pc (run . f) -- | A convenience environment combining a 'LogFunc' and a 'ProcessContext' -- -- @since 0.0.3.0 data LoggedProcessContext = LoggedProcessContext ProcessContext LogFunc instance HasLogFunc LoggedProcessContext where logFuncL = lens (\(LoggedProcessContext _ lf) -> lf) (\(LoggedProcessContext pc _) lf -> LoggedProcessContext pc lf) instance HasProcessContext LoggedProcessContext where processContextL = lens (\(LoggedProcessContext x _) -> x) (\(LoggedProcessContext _ lf) pc -> LoggedProcessContext pc lf) -- | Run an action using a 'LoggedProcessContext' with default -- settings and no logging. -- -- @since 0.0.3.0 withProcessContextNoLogging :: MonadIO m => RIO LoggedProcessContext a -> m a withProcessContextNoLogging inner = do pc <- mkDefaultProcessContext runRIO (LoggedProcessContext pc mempty) inner -- | Execute a process within the configured environment. -- -- Execution will not return, because either: -- -- 1) On non-windows, execution is taken over by execv of the -- sub-process. This allows signals to be propagated (#527) -- -- 2) On windows, an 'ExitCode' exception will be thrown. -- -- @since 0.0.3.0 exec :: (HasProcessContext env, HasLogFunc env) => String -> [String] -> RIO env b #ifdef WINDOWS exec = execSpawn #else exec cmd0 args = do wd <- view workingDirL envStringsL <- view envVarsStringsL cmd <- preProcess cmd0 withProcessTimeLog wd cmd args $ liftIO $ do for_ wd setCurrentDirectory executeFile cmd True args $ Just envStringsL #endif -- | Like 'exec', but does not use 'execv' on non-windows. This way, -- there is a sub-process, which is helpful in some cases -- (). -- -- This function only exits by throwing 'ExitCode'. -- -- @since 0.0.3.0 execSpawn :: (HasProcessContext env, HasLogFunc env) => String -> [String] -> RIO env a execSpawn cmd args = proc cmd args (runProcess . setStdin inherit) >>= liftIO . exitWith -- | Check if the given executable exists on the given PATH. -- -- @since 0.0.3.0 doesExecutableExist :: (MonadIO m, MonadReader env m, HasProcessContext env) => String -- ^ Name of executable -> m Bool doesExecutableExist = liftM isRight . findExecutable -- | Find the complete path for the executable. -- -- @since 0.0.3.0 findExecutable :: (MonadIO m, MonadReader env m, HasProcessContext env) => String -- ^ Name of executable -> m (Either ProcessException FilePath) -- ^ Full path to that executable on success findExecutable name0 | any FP.isPathSeparator name0 = do pc <- view processContextL let names0 = map (name0 ++) (pcExeExtensions pc) testNames [] = return $ Left $ ExecutableNotFoundAt name0 testNames (name:names) = do exists <- liftIO $ D.doesFileExist name if exists then do path <- liftIO $ D.canonicalizePath name return $ return path else testNames names testNames names0 findExecutable name = do pc <- view processContextL m <- readIORef $ pcExeCache pc epath <- case Map.lookup name m of Just epath -> return epath Nothing -> do let loop [] = return $ Left $ ExecutableNotFound name (pcPath pc) loop (dir:dirs) = do let fp0 = dir FP. name fps0 = map (fp0 ++) (pcExeExtensions pc) testFPs [] = loop dirs testFPs (fp:fps) = do exists <- D.doesFileExist fp existsExec <- if exists then liftM D.executable $ D.getPermissions fp else return False if existsExec then do fp' <- D.makeAbsolute fp return $ return fp' else testFPs fps testFPs fps0 epath <- liftIO $ loop $ pcPath pc () <- atomicModifyIORef (pcExeCache pc) $ \m' -> (Map.insert name epath m', ()) return epath return epath -- | Augment the PATH environment variable with the given extra paths. -- -- @since 0.0.3.0 augmentPath :: [FilePath] -> Maybe Text -> Either ProcessException Text augmentPath dirs mpath = case filter (FP.searchPathSeparator `elem`) dirs of [] -> Right $ T.intercalate (T.singleton FP.searchPathSeparator) $ map (T.pack . FP.dropTrailingPathSeparator) dirs ++ maybeToList mpath illegal -> Left $ PathsInvalidInPath illegal -- | Apply 'augmentPath' on the PATH value in the given 'EnvVars'. -- -- @since 0.0.3.0 augmentPathMap :: [FilePath] -> EnvVars -> Either ProcessException EnvVars augmentPathMap dirs origEnv = do path <- augmentPath dirs mpath return $ Map.insert "PATH" path origEnv where mpath = Map.lookup "PATH" origEnv -- | Show a process arg including speechmarks when necessary. Just for -- debugging purposes, not functionally important. -- -- @since 0.0.3.0 showProcessArgDebug :: String -> Text showProcessArgDebug x | any special x || null x = T.pack (show x) | otherwise = T.pack x where special '"' = True special ' ' = True special _ = False rio-0.1.8.0/src/RIO/Seq.hs0000644000000000000000000000611713253417303013147 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | @Seq@. Import as: -- -- > import qualified RIO.Seq as Seq module RIO.Seq ( Data.Sequence.Seq(..) -- * Construction , Data.Sequence.empty , Data.Sequence.singleton , (Data.Sequence.<|) , (Data.Sequence.|>) , (Data.Sequence.><) , Data.Sequence.fromList , Data.Sequence.fromFunction , Data.Sequence.fromArray -- ** Repetition , Data.Sequence.replicate , Data.Sequence.replicateA , Data.Sequence.replicateM #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.cycleTaking #endif -- ** Iterative construction , Data.Sequence.iterateN , Data.Sequence.unfoldr , Data.Sequence.unfoldl -- * Deconstruction -- | Additional functions for deconstructing sequences are available via the -- 'Foldable' instance of 'Seq'. -- ** Queries , Data.Sequence.null , Data.Sequence.length -- ** Views , Data.Sequence.ViewL(..) , Data.Sequence.viewl , Data.Sequence.ViewR(..) , Data.Sequence.viewr -- * Scans , Data.Sequence.scanl , Data.Sequence.scanl1 , Data.Sequence.scanr , Data.Sequence.scanr1 -- * Sublists , Data.Sequence.tails , Data.Sequence.inits #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.chunksOf #endif -- ** Sequential searches , Data.Sequence.takeWhileL , Data.Sequence.takeWhileR , Data.Sequence.dropWhileL , Data.Sequence.dropWhileR , Data.Sequence.spanl , Data.Sequence.spanr , Data.Sequence.breakl , Data.Sequence.breakr , Data.Sequence.partition , Data.Sequence.filter -- * Sorting , Data.Sequence.sort , Data.Sequence.sortBy , Data.Sequence.unstableSort , Data.Sequence.unstableSortBy -- * Indexing #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.lookup , (Data.Sequence.!?) #endif , Data.Sequence.index , Data.Sequence.adjust #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.adjust' #endif , Data.Sequence.update , Data.Sequence.take , Data.Sequence.drop #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.insertAt , Data.Sequence.deleteAt #endif , Data.Sequence.splitAt -- ** Indexing with predicates -- | These functions perform sequential searches from the left or right ends -- of the sequence elements. , Data.Sequence.elemIndexL , Data.Sequence.elemIndicesL , Data.Sequence.elemIndexR , Data.Sequence.elemIndicesR , Data.Sequence.findIndexL , Data.Sequence.findIndicesL , Data.Sequence.findIndexR , Data.Sequence.findIndicesR -- * Folds -- | General folds are available via the 'Foldable' instance of 'Seq'. #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.foldMapWithIndex #endif , Data.Sequence.foldlWithIndex , Data.Sequence.foldrWithIndex -- * Transformations , Data.Sequence.mapWithIndex #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.traverseWithIndex #endif , Data.Sequence.reverse #if MIN_VERSION_containers(0, 5, 8) , Data.Sequence.intersperse #endif -- ** Zips , Data.Sequence.zip , Data.Sequence.zipWith , Data.Sequence.zip3 , Data.Sequence.zipWith3 , Data.Sequence.zip4 , Data.Sequence.zipWith4 ) where import qualified Data.Sequence rio-0.1.8.0/src/RIO/Set.hs0000644000000000000000000000313313253417303013145 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | @Set@. Import as: -- -- > import qualified RIO.Set as Set module RIO.Set ( -- * Set type Data.Set.Set -- * Operators , (Data.Set.\\) -- * Query , Data.Set.null , Data.Set.size , Data.Set.member , Data.Set.notMember , Data.Set.lookupLT , Data.Set.lookupGT , Data.Set.lookupLE , Data.Set.lookupGE , Data.Set.isSubsetOf , Data.Set.isProperSubsetOf -- * Construction , Data.Set.empty , Data.Set.singleton , Data.Set.insert , Data.Set.delete -- * Combine , Data.Set.union , Data.Set.unions , Data.Set.difference , Data.Set.intersection -- * Filter , Data.Set.filter #if MIN_VERSION_containers(0,5,8) , Data.Set.takeWhileAntitone , Data.Set.dropWhileAntitone , Data.Set.spanAntitone #endif , Data.Set.partition , Data.Set.split , Data.Set.splitMember , Data.Set.splitRoot -- * Indexed , Data.Set.lookupIndex #if MIN_VERSION_containers(0,5,8) , Data.Set.take , Data.Set.drop , Data.Set.splitAt #endif -- * Map , Data.Set.map -- * Folds , Data.Set.foldr , Data.Set.foldl -- ** Strict folds , Data.Set.foldr' , Data.Set.foldl' -- * Min\/Max #if MIN_VERSION_containers(0,5,9) , Data.Set.lookupMin , Data.Set.lookupMax #endif , Data.Set.deleteMin , Data.Set.deleteMax , Data.Set.maxView , Data.Set.minView -- * Conversion -- ** List , Data.Set.elems , Data.Set.toList , Data.Set.fromList -- ** Ordered list , Data.Set.toAscList , Data.Set.toDescList -- * Debugging , Data.Set.showTree , Data.Set.showTreeWith , Data.Set.valid ) where import qualified Data.Set rio-0.1.8.0/src/RIO/Set/Partial.hs0000644000000000000000000000040113253417303014534 0ustar0000000000000000module RIO.Set.Partial ( -- * Indexed Data.Set.findIndex , Data.Set.elemAt , Data.Set.deleteAt -- * Min\/Max , Data.Set.findMin , Data.Set.findMax , Data.Set.deleteFindMin , Data.Set.deleteFindMax ) where import qualified Data.Set rio-0.1.8.0/src/RIO/Set/Unchecked.hs0000644000000000000000000000112013253417303015030 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | This module contains functions from "Data.Set" that have unchecked -- preconditions on their input. If these preconditions are not satisfied, -- the data structure may end up in an invalid state and other operations -- may misbehave. module RIO.Set.Unchecked ( -- * Map Data.Set.mapMonotonic -- * Ordered list , Data.Set.fromAscList #if MIN_VERSION_containers(0,5,8) , Data.Set.fromDescList #endif , Data.Set.fromDistinctAscList #if MIN_VERSION_containers(0,5,8) , Data.Set.fromDistinctDescList #endif ) where import qualified Data.Set rio-0.1.8.0/src/RIO/State.hs0000644000000000000000000000043313412071757013500 0ustar0000000000000000-- | Provides reexports of 'MonadState' and related helpers. -- -- @since 0.1.4.0 module RIO.State ( Control.Monad.State.MonadState (..) , Control.Monad.State.gets , Control.Monad.State.modify , Control.Monad.State.modify' ) where import qualified Control.Monad.State rio-0.1.8.0/src/RIO/Text.hs0000644000000000000000000000771213261111216013336 0ustar0000000000000000{-# LANGUAGE MagicHash #-} {-# LANGUAGE OverloadedStrings #-} -- | Strict @Text@. Import as: -- -- > import qualified RIO.Text as Text -- -- This module does not export any partial functions. For those, see -- "RIO.Text.Partial" module RIO.Text ( -- * Types Data.Text.Text -- * Creation and elimination , Data.Text.pack , Data.Text.unpack , Data.Text.singleton , Data.Text.empty -- * Basic interface , Data.Text.cons , Data.Text.snoc , Data.Text.append , Data.Text.uncons , Data.Text.null , Data.Text.length , Data.Text.compareLength -- * Transformations , Data.Text.map , Data.Text.intercalate , Data.Text.intersperse , Data.Text.transpose , Data.Text.reverse -- ** Case conversion , Data.Text.toCaseFold , Data.Text.toLower , Data.Text.toUpper , Data.Text.toTitle -- ** Justification , Data.Text.justifyLeft , Data.Text.justifyRight , Data.Text.center -- * Folds , Data.Text.foldl , Data.Text.foldl' , Data.Text.foldr -- ** Special folds , Data.Text.concat , Data.Text.concatMap , Data.Text.any , Data.Text.all -- * Construction -- ** Scans , Data.Text.scanl , Data.Text.scanl1 -- scanl1 and scanr1 are /not/ partial , Data.Text.scanr , Data.Text.scanr1 -- ** Accumulating maps , Data.Text.mapAccumL , Data.Text.mapAccumR -- ** Generation and unfolding , Data.Text.replicate , Data.Text.unfoldr , Data.Text.unfoldrN -- * Substrings -- ** Breaking strings , Data.Text.take , Data.Text.takeEnd , Data.Text.drop , Data.Text.dropEnd , Data.Text.takeWhile , Data.Text.takeWhileEnd , Data.Text.dropWhile , Data.Text.dropWhileEnd , Data.Text.dropAround , Data.Text.strip , Data.Text.stripStart , Data.Text.stripEnd , Data.Text.splitAt , Data.Text.break , Data.Text.span , Data.Text.group , Data.Text.groupBy , Data.Text.inits , Data.Text.tails -- ** Breaking into many substrings , Data.Text.split , Data.Text.chunksOf -- ** Breaking into lines and words , Data.Text.lines , linesCR , Data.Text.words , Data.Text.unlines , Data.Text.unwords -- * Predicates , Data.Text.isPrefixOf , Data.Text.isSuffixOf , Data.Text.isInfixOf -- ** View patterns , Data.Text.stripPrefix , Data.Text.stripSuffix , dropPrefix , dropSuffix , Data.Text.commonPrefixes -- * Searching , Data.Text.filter , Data.Text.find , Data.Text.partition -- * Indexing , Data.Text.index , Data.Text.findIndex -- * Zipping , Data.Text.zip , Data.Text.zipWith -- * Low level operations , Data.Text.copy , Data.Text.unpackCString# -- * Encoding , Data.Text.Encoding.encodeUtf8 , Data.Text.Encoding.decodeUtf8With , Data.Text.Encoding.decodeUtf8' , Data.Text.Encoding.Error.lenientDecode ) where import Data.Maybe (fromMaybe) import Data.Text (Text, stripPrefix, stripSuffix) import qualified Data.Text import qualified Data.Text.Encoding import qualified Data.Text.Encoding.Error -- | Drop prefix if present, otherwise return original 'Text'. -- -- @since 0.0.0.0 dropPrefix :: Text -- ^ prefix -> Text -> Text dropPrefix prefix t = fromMaybe t (stripPrefix prefix t) -- | Drop prefix if present, otherwise return original 'Text'. -- -- @since 0.0.0.0 dropSuffix :: Text -- ^ suffix -> Text -> Text dropSuffix suffix t = fromMaybe t (stripSuffix suffix t) -- | 'linesCR' breaks a 'Text' up into a list of `Text`s at newline -- 'Char's. It is very similar to 'Data.Text.lines', but it also removes -- any trailing @'\r'@ characters. The resulting 'Text' values do not -- contain newlines or trailing @'\r'@ characters. -- -- @since 0.1.0.0 linesCR :: Text -> [Text] linesCR = map (dropSuffix "\r") . Data.Text.lines rio-0.1.8.0/src/RIO/Text/Lazy.hs0000644000000000000000000000645713253417303014271 0ustar0000000000000000-- | Lazy @Text@. Import as: -- -- > import qualified RIO.Text.Lazy as TL -- -- This module does not export any partial functions. For those, see -- "RIO.Text.Lazy.Partial" module RIO.Text.Lazy ( -- * Types Data.Text.Lazy.Text -- * Creation and elimination , Data.Text.Lazy.pack , Data.Text.Lazy.unpack , Data.Text.Lazy.singleton , Data.Text.Lazy.empty , Data.Text.Lazy.fromChunks , Data.Text.Lazy.toChunks , Data.Text.Lazy.toStrict , Data.Text.Lazy.fromStrict , Data.Text.Lazy.foldrChunks , Data.Text.Lazy.foldlChunks -- * Basic interface , Data.Text.Lazy.cons , Data.Text.Lazy.snoc , Data.Text.Lazy.append , Data.Text.Lazy.uncons , Data.Text.Lazy.null , Data.Text.Lazy.length , Data.Text.Lazy.compareLength -- * Transformations , Data.Text.Lazy.map , Data.Text.Lazy.intercalate , Data.Text.Lazy.intersperse , Data.Text.Lazy.transpose , Data.Text.Lazy.reverse -- ** Case conversion , Data.Text.Lazy.toCaseFold , Data.Text.Lazy.toLower , Data.Text.Lazy.toUpper , Data.Text.Lazy.toTitle -- ** Justification , Data.Text.Lazy.justifyLeft , Data.Text.Lazy.justifyRight , Data.Text.Lazy.center -- * Folds , Data.Text.Lazy.foldl , Data.Text.Lazy.foldl' , Data.Text.Lazy.foldr -- ** Special folds , Data.Text.Lazy.concat , Data.Text.Lazy.concatMap , Data.Text.Lazy.any , Data.Text.Lazy.all -- * Construction -- ** Scans , Data.Text.Lazy.scanl , Data.Text.Lazy.scanl1 -- NB. scanl1 and scanr1 are not partial , Data.Text.Lazy.scanr , Data.Text.Lazy.scanr1 -- ** Accumulating maps , Data.Text.Lazy.mapAccumL , Data.Text.Lazy.mapAccumR -- ** Generation and unfolding , Data.Text.Lazy.repeat , Data.Text.Lazy.replicate , Data.Text.Lazy.cycle , Data.Text.Lazy.iterate , Data.Text.Lazy.unfoldr , Data.Text.Lazy.unfoldrN -- * Substrings -- ** Breaking strings , Data.Text.Lazy.take , Data.Text.Lazy.takeEnd , Data.Text.Lazy.drop , Data.Text.Lazy.dropEnd , Data.Text.Lazy.takeWhile , Data.Text.Lazy.takeWhileEnd , Data.Text.Lazy.dropWhile , Data.Text.Lazy.dropWhileEnd , Data.Text.Lazy.dropAround , Data.Text.Lazy.strip , Data.Text.Lazy.stripStart , Data.Text.Lazy.stripEnd , Data.Text.Lazy.splitAt , Data.Text.Lazy.span , Data.Text.Lazy.break , Data.Text.Lazy.group , Data.Text.Lazy.groupBy , Data.Text.Lazy.inits , Data.Text.Lazy.tails -- ** Breaking into many substrings , Data.Text.Lazy.split , Data.Text.Lazy.chunksOf -- ** Breaking into lines and words , Data.Text.Lazy.lines , Data.Text.Lazy.words , Data.Text.Lazy.unlines , Data.Text.Lazy.unwords -- * Predicates , Data.Text.Lazy.isPrefixOf , Data.Text.Lazy.isSuffixOf , Data.Text.Lazy.isInfixOf -- ** View patterns , Data.Text.Lazy.stripPrefix , Data.Text.Lazy.stripSuffix , Data.Text.Lazy.commonPrefixes -- * Searching , Data.Text.Lazy.filter , Data.Text.Lazy.find , Data.Text.Lazy.partition -- * Indexing , Data.Text.Lazy.index , Data.Text.Lazy.count -- * Zipping and unzipping , Data.Text.Lazy.zip , Data.Text.Lazy.zipWith ) where import qualified Data.Text.Lazy rio-0.1.8.0/src/RIO/Text/Lazy/Partial.hs0000644000000000000000000000137313253417303015655 0ustar0000000000000000-- | This module exports all the partial functions from "Data.Text.Lazy" module RIO.Text.Lazy.Partial ( -- * Creation and elimination Data.Text.Lazy.head , Data.Text.Lazy.last , Data.Text.Lazy.tail , Data.Text.Lazy.init -- * Transformations , Data.Text.Lazy.replace -- * Folds , Data.Text.Lazy.foldl1 , Data.Text.Lazy.foldl1' , Data.Text.Lazy.foldr1 -- ** Special folds , Data.Text.Lazy.maximum , Data.Text.Lazy.minimum -- * Substrings -- ** Breaking strings , Data.Text.Lazy.breakOn , Data.Text.Lazy.breakOnEnd -- ** Breaking into many substrings , Data.Text.Lazy.splitOn -- * Searching , Data.Text.Lazy.breakOnAll ) where import qualified Data.Text.Lazy rio-0.1.8.0/src/RIO/Text/Partial.hs0000644000000000000000000000130613253417303014732 0ustar0000000000000000-- | This module exports all the partial functions from "Data.Text" module RIO.Text.Partial ( -- * Basic interface Data.Text.head , Data.Text.last , Data.Text.tail , Data.Text.init -- * Transformations , Data.Text.replace -- * Folds , Data.Text.foldl1 , Data.Text.foldl1' , Data.Text.foldr1 -- ** Special folds , Data.Text.maximum , Data.Text.minimum -- * Substrings -- ** Breaking strings , Data.Text.breakOn , Data.Text.breakOnEnd -- ** Breaking into many substrings , Data.Text.splitOn -- * Searching , Data.Text.breakOnAll -- * Indexing , Data.Text.count ) where import qualified Data.Text rio-0.1.8.0/src/RIO/Time.hs0000644000000000000000000000150413253417303013310 0ustar0000000000000000module RIO.Time ( module Data.Time , getCurrentTime , getTimeZone , getCurrentTimeZone , getZonedTime , utcToLocalZonedTime ) where import Control.Monad.IO.Class import Data.Time hiding( getCurrentTime, getTimeZone, getCurrentTimeZone , getZonedTime, utcToLocalZonedTime) import qualified Data.Time getCurrentTime :: MonadIO m => m UTCTime getCurrentTime = liftIO Data.Time.getCurrentTime getTimeZone :: MonadIO m => UTCTime -> m TimeZone getTimeZone = liftIO . Data.Time.getTimeZone getCurrentTimeZone :: MonadIO m => m TimeZone getCurrentTimeZone = liftIO Data.Time.getCurrentTimeZone getZonedTime :: MonadIO m => m ZonedTime getZonedTime = liftIO Data.Time.getZonedTime utcToLocalZonedTime :: MonadIO m => UTCTime -> m ZonedTime utcToLocalZonedTime = liftIO . Data.Time.utcToLocalZonedTime rio-0.1.8.0/src/RIO/Vector.hs0000644000000000000000000001437313253417303013664 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Generic @Vector@ interface. Import as: -- -- > import qualified RIO.Vector as V module RIO.Vector ( -- * Immutable vectors Data.Vector.Generic.Vector -- * Accessors -- ** Length information , Data.Vector.Generic.length , Data.Vector.Generic.null -- ** Indexing , (Data.Vector.Generic.!?) -- ** Extracting subvectors , Data.Vector.Generic.slice , Data.Vector.Generic.take , Data.Vector.Generic.drop , Data.Vector.Generic.splitAt -- * Construction -- ** Initialisation , Data.Vector.Generic.empty , Data.Vector.Generic.singleton , Data.Vector.Generic.replicate , Data.Vector.Generic.generate , Data.Vector.Generic.iterateN -- ** Monadic initialisation , Data.Vector.Generic.replicateM , Data.Vector.Generic.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iterateNM #endif , Data.Vector.Generic.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.createT #endif -- ** Unfolding , Data.Vector.Generic.unfoldr , Data.Vector.Generic.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.unfoldrM , Data.Vector.Generic.unfoldrNM #endif , Data.Vector.Generic.constructN , Data.Vector.Generic.constructrN -- ** Enumeration , Data.Vector.Generic.enumFromN , Data.Vector.Generic.enumFromStepN , Data.Vector.Generic.enumFromTo , Data.Vector.Generic.enumFromThenTo -- ** Concatenation , Data.Vector.Generic.cons , Data.Vector.Generic.snoc , (Data.Vector.Generic.++) , Data.Vector.Generic.concat #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.concatNE #endif -- ** Restricting memory usage , Data.Vector.Generic.force -- * Modifying vectors -- ** Permutations , Data.Vector.Generic.reverse -- ** Safe destructive update , Data.Vector.Generic.modify -- * Elementwise operations -- ** Indexing , Data.Vector.Generic.indexed -- ** Mapping , Data.Vector.Generic.map , Data.Vector.Generic.imap , Data.Vector.Generic.concatMap -- ** Monadic mapping , Data.Vector.Generic.mapM , Data.Vector.Generic.imapM , Data.Vector.Generic.mapM_ , Data.Vector.Generic.imapM_ , Data.Vector.Generic.forM , Data.Vector.Generic.forM_ -- ** Zipping , Data.Vector.Generic.zipWith , Data.Vector.Generic.zipWith3 , Data.Vector.Generic.zipWith4 , Data.Vector.Generic.zipWith5 , Data.Vector.Generic.zipWith6 , Data.Vector.Generic.izipWith , Data.Vector.Generic.izipWith3 , Data.Vector.Generic.izipWith4 , Data.Vector.Generic.izipWith5 , Data.Vector.Generic.izipWith6 , Data.Vector.Generic.zip , Data.Vector.Generic.zip3 , Data.Vector.Generic.zip4 , Data.Vector.Generic.zip5 , Data.Vector.Generic.zip6 -- ** Monadic zipping , Data.Vector.Generic.zipWithM , Data.Vector.Generic.izipWithM , Data.Vector.Generic.zipWithM_ , Data.Vector.Generic.izipWithM_ -- ** Unzipping , Data.Vector.Generic.unzip , Data.Vector.Generic.unzip3 , Data.Vector.Generic.unzip4 , Data.Vector.Generic.unzip5 , Data.Vector.Generic.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.Generic.filter , Data.Vector.Generic.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.uniq , Data.Vector.Generic.mapMaybe , Data.Vector.Generic.imapMaybe #endif , Data.Vector.Generic.filterM , Data.Vector.Generic.takeWhile , Data.Vector.Generic.dropWhile -- ** Partitioning , Data.Vector.Generic.partition , Data.Vector.Generic.unstablePartition , Data.Vector.Generic.span , Data.Vector.Generic.break -- ** Searching , Data.Vector.Generic.elem , Data.Vector.Generic.notElem , Data.Vector.Generic.find , Data.Vector.Generic.findIndex , Data.Vector.Generic.findIndices , Data.Vector.Generic.elemIndex , Data.Vector.Generic.elemIndices -- * Folding , Data.Vector.Generic.foldl , Data.Vector.Generic.foldl' , Data.Vector.Generic.foldr , Data.Vector.Generic.foldr' , Data.Vector.Generic.ifoldl , Data.Vector.Generic.ifoldl' , Data.Vector.Generic.ifoldr , Data.Vector.Generic.ifoldr' -- ** Specialised folds , Data.Vector.Generic.all , Data.Vector.Generic.any , Data.Vector.Generic.and , Data.Vector.Generic.or , Data.Vector.Generic.sum , Data.Vector.Generic.product -- ** Monadic folds , Data.Vector.Generic.foldM , Data.Vector.Generic.ifoldM , Data.Vector.Generic.foldM' , Data.Vector.Generic.ifoldM' , Data.Vector.Generic.foldM_ , Data.Vector.Generic.ifoldM_ , Data.Vector.Generic.foldM'_ , Data.Vector.Generic.ifoldM'_ -- ** Monadic sequencing , Data.Vector.Generic.sequence , Data.Vector.Generic.sequence_ -- * Prefix sums (scans) , Data.Vector.Generic.prescanl , Data.Vector.Generic.prescanl' , Data.Vector.Generic.postscanl , Data.Vector.Generic.postscanl' , Data.Vector.Generic.scanl , Data.Vector.Generic.scanl' #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iscanl , Data.Vector.Generic.iscanl' #endif , Data.Vector.Generic.prescanr , Data.Vector.Generic.prescanr' , Data.Vector.Generic.postscanr , Data.Vector.Generic.postscanr' , Data.Vector.Generic.scanr , Data.Vector.Generic.scanr' #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.iscanr , Data.Vector.Generic.iscanr' #endif -- * Conversions -- ** Lists , Data.Vector.Generic.toList , Data.Vector.Generic.fromList , Data.Vector.Generic.fromListN -- ** Different vector types , Data.Vector.Generic.convert -- ** Mutable vectors , Data.Vector.Generic.freeze , Data.Vector.Generic.thaw , Data.Vector.Generic.copy -- * Fusion support -- ** Conversion to/from Bundles , Data.Vector.Generic.stream , Data.Vector.Generic.unstream , Data.Vector.Generic.streamR , Data.Vector.Generic.unstreamR -- ** Recycling support , Data.Vector.Generic.new , Data.Vector.Generic.clone -- * Utilities -- ** Comparisons , Data.Vector.Generic.eq , Data.Vector.Generic.cmp #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.eqBy , Data.Vector.Generic.cmpBy #endif -- ** Show and Read , Data.Vector.Generic.showsPrec , Data.Vector.Generic.readPrec #if MIN_VERSION_vector(0,12,0) , Data.Vector.Generic.liftShowsPrec , Data.Vector.Generic.liftReadsPrec #endif -- ** @Data@ and @Typeable@ , Data.Vector.Generic.gfoldl , Data.Vector.Generic.dataCast , Data.Vector.Generic.mkType ) where import qualified Data.Vector.Generic rio-0.1.8.0/src/RIO/Vector/Boxed.hs0000644000000000000000000001056013253417303014717 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Boxed @Vector@. Import as: -- -- > import qualified RIO.Vector.Boxed as VB module RIO.Vector.Boxed ( -- * Boxed vectors Data.Vector.Vector , Data.Vector.MVector -- * Accessors -- ** Length information , Data.Vector.length , Data.Vector.null -- ** Indexing , (Data.Vector.!?) -- ** Extracting subvectors , Data.Vector.slice , Data.Vector.take , Data.Vector.drop , Data.Vector.splitAt -- * Construction -- ** Initialisation , Data.Vector.empty , Data.Vector.singleton , Data.Vector.replicate , Data.Vector.generate , Data.Vector.iterateN -- ** Monadic initialisation , Data.Vector.replicateM , Data.Vector.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.iterateNM #endif , Data.Vector.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.createT #endif -- ** Unfolding , Data.Vector.unfoldr , Data.Vector.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.unfoldrM , Data.Vector.unfoldrNM #endif , Data.Vector.constructN , Data.Vector.constructrN -- ** Enumeration , Data.Vector.enumFromN , Data.Vector.enumFromStepN , Data.Vector.enumFromTo , Data.Vector.enumFromThenTo -- ** Concatenation , Data.Vector.cons , Data.Vector.snoc , (Data.Vector.++) , Data.Vector.concat -- ** Restricting memory usage , Data.Vector.force -- * Modifying vectors -- ** Permutations , Data.Vector.reverse -- ** Safe destructive update , Data.Vector.modify -- * Elementwise operations -- ** Indexing , Data.Vector.indexed -- ** Mapping , Data.Vector.map , Data.Vector.imap , Data.Vector.concatMap -- ** Monadic mapping , Data.Vector.mapM , Data.Vector.imapM , Data.Vector.mapM_ , Data.Vector.imapM_ , Data.Vector.forM , Data.Vector.forM_ -- ** Zipping , Data.Vector.zipWith , Data.Vector.zipWith3 , Data.Vector.zipWith4 , Data.Vector.zipWith5 , Data.Vector.zipWith6 , Data.Vector.izipWith , Data.Vector.izipWith3 , Data.Vector.izipWith4 , Data.Vector.izipWith5 , Data.Vector.izipWith6 , Data.Vector.zip , Data.Vector.zip3 , Data.Vector.zip4 , Data.Vector.zip5 , Data.Vector.zip6 -- ** Monadic zipping , Data.Vector.zipWithM , Data.Vector.izipWithM , Data.Vector.zipWithM_ , Data.Vector.izipWithM_ -- ** Unzipping , Data.Vector.unzip , Data.Vector.unzip3 , Data.Vector.unzip4 , Data.Vector.unzip5 , Data.Vector.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.filter , Data.Vector.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.uniq , Data.Vector.mapMaybe , Data.Vector.imapMaybe #endif , Data.Vector.filterM , Data.Vector.takeWhile , Data.Vector.dropWhile -- ** Partitioning , Data.Vector.partition , Data.Vector.unstablePartition , Data.Vector.span , Data.Vector.break -- ** Searching , Data.Vector.elem , Data.Vector.notElem , Data.Vector.find , Data.Vector.findIndex , Data.Vector.findIndices , Data.Vector.elemIndex , Data.Vector.elemIndices -- * Folding , Data.Vector.foldl , Data.Vector.foldl' , Data.Vector.foldr , Data.Vector.foldr' , Data.Vector.ifoldl , Data.Vector.ifoldl' , Data.Vector.ifoldr , Data.Vector.ifoldr' -- ** Specialised folds , Data.Vector.all , Data.Vector.any , Data.Vector.and , Data.Vector.or , Data.Vector.sum , Data.Vector.product -- ** Monadic folds , Data.Vector.foldM , Data.Vector.ifoldM , Data.Vector.foldM' , Data.Vector.ifoldM' , Data.Vector.foldM_ , Data.Vector.ifoldM_ , Data.Vector.foldM'_ , Data.Vector.ifoldM'_ -- ** Monadic sequencing , Data.Vector.sequence , Data.Vector.sequence_ -- * Prefix sums (scans) , Data.Vector.prescanl , Data.Vector.prescanl' , Data.Vector.postscanl , Data.Vector.postscanl' , Data.Vector.scanl , Data.Vector.scanl' #if MIN_VERSION_vector(0,12,0) , Data.Vector.iscanl , Data.Vector.iscanl' #endif , Data.Vector.prescanr , Data.Vector.prescanr' , Data.Vector.postscanr , Data.Vector.postscanr' , Data.Vector.scanr , Data.Vector.scanr' #if MIN_VERSION_vector(0,12,0) , Data.Vector.iscanr , Data.Vector.iscanr' #endif -- * Conversions -- ** Lists , Data.Vector.toList , Data.Vector.fromList , Data.Vector.fromListN -- ** Different vector types , Data.Vector.convert -- ** Mutable vectors , Data.Vector.freeze , Data.Vector.thaw , Data.Vector.copy ) where import qualified Data.Vector rio-0.1.8.0/src/RIO/Vector/Boxed/Partial.hs0000644000000000000000000000224113253417303016310 0ustar0000000000000000module RIO.Vector.Boxed.Partial ( -- * Accessors -- ** Indexing (Data.Vector.!) , Data.Vector.head , Data.Vector.last -- ** Monadic indexing , Data.Vector.indexM , Data.Vector.headM , Data.Vector.lastM -- ** Extracting subvectors , Data.Vector.init , Data.Vector.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.//) , Data.Vector.update , Data.Vector.update_ -- ** Accumulations , Data.Vector.accum , Data.Vector.accumulate , Data.Vector.accumulate_ -- ** Permutations , Data.Vector.backpermute -- * Folding , Data.Vector.foldl1 , Data.Vector.foldl1' , Data.Vector.foldr1 , Data.Vector.foldr1' -- ** Specialised folds , Data.Vector.maximum , Data.Vector.maximumBy , Data.Vector.minimum , Data.Vector.minimumBy , Data.Vector.minIndex , Data.Vector.minIndexBy , Data.Vector.maxIndex , Data.Vector.maxIndexBy -- ** Monadic folds , Data.Vector.fold1M , Data.Vector.fold1M' , Data.Vector.fold1M_ , Data.Vector.fold1M'_ -- * Prefix sums (scans) , Data.Vector.scanl1 , Data.Vector.scanl1' , Data.Vector.scanr1 , Data.Vector.scanr1' ) where import qualified Data.Vector rio-0.1.8.0/src/RIO/Vector/Boxed/Unsafe.hs0000644000000000000000000000160613253417303016141 0ustar0000000000000000module RIO.Vector.Boxed.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.unsafeIndex , Data.Vector.unsafeHead , Data.Vector.unsafeLast -- ** Monadic indexing , Data.Vector.unsafeIndexM , Data.Vector.unsafeHeadM , Data.Vector.unsafeLastM -- ** Extracting subvectors , Data.Vector.unsafeSlice , Data.Vector.unsafeInit , Data.Vector.unsafeTail , Data.Vector.unsafeTake , Data.Vector.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.unsafeUpd , Data.Vector.unsafeUpdate , Data.Vector.unsafeUpdate_ -- ** Accumulations , Data.Vector.unsafeAccum , Data.Vector.unsafeAccumulate , Data.Vector.unsafeAccumulate_ -- ** Permutations , Data.Vector.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.unsafeFreeze , Data.Vector.unsafeThaw , Data.Vector.unsafeCopy ) where import qualified Data.Vector rio-0.1.8.0/src/RIO/Vector/Partial.hs0000644000000000000000000000267313253417303015260 0ustar0000000000000000module RIO.Vector.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Generic.!) , Data.Vector.Generic.head , Data.Vector.Generic.last -- ** Monadic indexing , Data.Vector.Generic.indexM , Data.Vector.Generic.headM , Data.Vector.Generic.lastM -- ** Extracting subvectors , Data.Vector.Generic.init , Data.Vector.Generic.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Generic.//) , Data.Vector.Generic.update , Data.Vector.Generic.update_ -- ** Accumulations , Data.Vector.Generic.accum , Data.Vector.Generic.accumulate , Data.Vector.Generic.accumulate_ -- ** Permutations , Data.Vector.Generic.backpermute -- * Folding , Data.Vector.Generic.foldl1 , Data.Vector.Generic.foldl1' , Data.Vector.Generic.foldr1 , Data.Vector.Generic.foldr1' -- ** Specialised folds , Data.Vector.Generic.maximum , Data.Vector.Generic.maximumBy , Data.Vector.Generic.minimum , Data.Vector.Generic.minimumBy , Data.Vector.Generic.minIndex , Data.Vector.Generic.minIndexBy , Data.Vector.Generic.maxIndex , Data.Vector.Generic.maxIndexBy -- ** Monadic folds , Data.Vector.Generic.fold1M , Data.Vector.Generic.fold1M' , Data.Vector.Generic.fold1M_ , Data.Vector.Generic.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Generic.scanl1 , Data.Vector.Generic.scanl1' , Data.Vector.Generic.scanr1 , Data.Vector.Generic.scanr1' ) where import qualified Data.Vector.Generic rio-0.1.8.0/src/RIO/Vector/Storable.hs0000644000000000000000000001130013253417303015422 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Storable @Vector@. Import as: -- -- > import qualified RIO.Vector.Storable as VS module RIO.Vector.Storable ( -- * Storable vectors Data.Vector.Storable.Vector , Data.Vector.Storable.MVector(..) , Data.Vector.Storable.Storable -- * Accessors -- ** Length information , Data.Vector.Storable.length , Data.Vector.Storable.null -- ** Indexing , (Data.Vector.Storable.!?) -- ** Extracting subvectors , Data.Vector.Storable.slice , Data.Vector.Storable.take , Data.Vector.Storable.drop , Data.Vector.Storable.splitAt -- * Construction -- ** Initialisation , Data.Vector.Storable.empty , Data.Vector.Storable.singleton , Data.Vector.Storable.replicate , Data.Vector.Storable.generate , Data.Vector.Storable.iterateN -- ** Monadic initialisation , Data.Vector.Storable.replicateM , Data.Vector.Storable.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.iterateNM #endif , Data.Vector.Storable.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.createT #endif -- ** Unfolding , Data.Vector.Storable.unfoldr , Data.Vector.Storable.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.unfoldrM , Data.Vector.Storable.unfoldrNM #endif , Data.Vector.Storable.constructN , Data.Vector.Storable.constructrN -- ** Enumeration , Data.Vector.Storable.enumFromN , Data.Vector.Storable.enumFromStepN , Data.Vector.Storable.enumFromTo , Data.Vector.Storable.enumFromThenTo -- ** Concatenation , Data.Vector.Storable.cons , Data.Vector.Storable.snoc , (Data.Vector.Storable.++) , Data.Vector.Storable.concat -- ** Restricting memory usage , Data.Vector.Storable.force -- * Modifying vectors -- ** Permutations , Data.Vector.Storable.reverse -- ** Safe destructive update , Data.Vector.Storable.modify -- * Elementwise operations -- ** Mapping , Data.Vector.Storable.map , Data.Vector.Storable.imap , Data.Vector.Storable.concatMap -- ** Monadic mapping , Data.Vector.Storable.mapM , Data.Vector.Storable.mapM_ , Data.Vector.Storable.forM , Data.Vector.Storable.forM_ -- ** Zipping , Data.Vector.Storable.zipWith , Data.Vector.Storable.zipWith3 , Data.Vector.Storable.zipWith4 , Data.Vector.Storable.zipWith5 , Data.Vector.Storable.zipWith6 , Data.Vector.Storable.izipWith , Data.Vector.Storable.izipWith3 , Data.Vector.Storable.izipWith4 , Data.Vector.Storable.izipWith5 , Data.Vector.Storable.izipWith6 -- ** Monadic zipping , Data.Vector.Storable.zipWithM , Data.Vector.Storable.zipWithM_ -- * Working with predicates -- ** Filtering , Data.Vector.Storable.filter , Data.Vector.Storable.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Storable.uniq , Data.Vector.Storable.mapMaybe , Data.Vector.Storable.imapMaybe #endif , Data.Vector.Storable.filterM , Data.Vector.Storable.takeWhile , Data.Vector.Storable.dropWhile -- ** Partitioning , Data.Vector.Storable.partition , Data.Vector.Storable.unstablePartition , Data.Vector.Storable.span , Data.Vector.Storable.break -- ** Searching , Data.Vector.Storable.elem , Data.Vector.Storable.notElem , Data.Vector.Storable.find , Data.Vector.Storable.findIndex , Data.Vector.Storable.findIndices , Data.Vector.Storable.elemIndex , Data.Vector.Storable.elemIndices -- * Folding , Data.Vector.Storable.foldl , Data.Vector.Storable.foldl' , Data.Vector.Storable.foldr , Data.Vector.Storable.foldr' , Data.Vector.Storable.ifoldl , Data.Vector.Storable.ifoldl' , Data.Vector.Storable.ifoldr , Data.Vector.Storable.ifoldr' -- ** Specialised folds , Data.Vector.Storable.all , Data.Vector.Storable.any , Data.Vector.Storable.and , Data.Vector.Storable.or , Data.Vector.Storable.sum , Data.Vector.Storable.product -- ** Monadic folds , Data.Vector.Storable.foldM , Data.Vector.Storable.foldM' , Data.Vector.Storable.foldM_ , Data.Vector.Storable.foldM'_ -- * Prefix sums (scans) , Data.Vector.Storable.prescanl , Data.Vector.Storable.prescanl' , Data.Vector.Storable.postscanl , Data.Vector.Storable.postscanl' , Data.Vector.Storable.scanl , Data.Vector.Storable.scanl' , Data.Vector.Storable.prescanr , Data.Vector.Storable.prescanr' , Data.Vector.Storable.postscanr , Data.Vector.Storable.postscanr' , Data.Vector.Storable.scanr , Data.Vector.Storable.scanr' -- * Conversions -- ** Lists , Data.Vector.Storable.toList , Data.Vector.Storable.fromList , Data.Vector.Storable.fromListN -- ** Different vector types , Data.Vector.Storable.convert -- ** Mutable vectors , Data.Vector.Storable.freeze , Data.Vector.Storable.thaw , Data.Vector.Storable.copy ) where import qualified Data.Vector.Storable rio-0.1.8.0/src/RIO/Vector/Storable/Partial.hs0000644000000000000000000000264413253417303017031 0ustar0000000000000000module RIO.Vector.Storable.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Storable.!) , Data.Vector.Storable.head , Data.Vector.Storable.last -- ** Monadic indexing , Data.Vector.Storable.indexM , Data.Vector.Storable.headM , Data.Vector.Storable.lastM -- ** Extracting subvectors , Data.Vector.Storable.init , Data.Vector.Storable.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Storable.//) , Data.Vector.Storable.update_ -- ** Accumulations , Data.Vector.Storable.accum , Data.Vector.Storable.accumulate_ -- ** Permutations , Data.Vector.Storable.backpermute -- * Folding , Data.Vector.Storable.foldl1 , Data.Vector.Storable.foldl1' , Data.Vector.Storable.foldr1 , Data.Vector.Storable.foldr1' -- ** Specialised folds , Data.Vector.Storable.maximum , Data.Vector.Storable.maximumBy , Data.Vector.Storable.minimum , Data.Vector.Storable.minimumBy , Data.Vector.Storable.minIndex , Data.Vector.Storable.minIndexBy , Data.Vector.Storable.maxIndex , Data.Vector.Storable.maxIndexBy -- ** Monadic folds , Data.Vector.Storable.fold1M , Data.Vector.Storable.fold1M' , Data.Vector.Storable.fold1M_ , Data.Vector.Storable.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Storable.scanl1 , Data.Vector.Storable.scanl1' , Data.Vector.Storable.scanr1 , Data.Vector.Storable.scanr1' ) where import qualified Data.Vector.Storable rio-0.1.8.0/src/RIO/Vector/Storable/Unsafe.hs0000644000000000000000000000304513253417303016652 0ustar0000000000000000module RIO.Vector.Storable.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.Storable.unsafeIndex , Data.Vector.Storable.unsafeHead , Data.Vector.Storable.unsafeLast -- ** Monadic indexing , Data.Vector.Storable.unsafeIndexM , Data.Vector.Storable.unsafeHeadM , Data.Vector.Storable.unsafeLastM -- ** Extracting subvectors , Data.Vector.Storable.unsafeSlice , Data.Vector.Storable.unsafeInit , Data.Vector.Storable.unsafeTail , Data.Vector.Storable.unsafeTake , Data.Vector.Storable.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Storable.unsafeUpd , Data.Vector.Storable.unsafeUpdate_ -- ** Accumulations , Data.Vector.Storable.unsafeAccum , Data.Vector.Storable.unsafeAccumulate_ -- ** Permutations , Data.Vector.Storable.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Storable.unsafeFreeze , Data.Vector.Storable.unsafeThaw , Data.Vector.Storable.unsafeCopy -- * Raw pointers , Data.Vector.Storable.unsafeFromForeignPtr , Data.Vector.Storable.unsafeFromForeignPtr0 , Data.Vector.Storable.unsafeToForeignPtr , Data.Vector.Storable.unsafeToForeignPtr0 , unsafeWith ) where import Data.Vector.Storable(Storable, Vector) import qualified Data.Vector.Storable import Foreign.Ptr(Ptr) import UnliftIO -- | Lifted version of 'Data.Vector.Storable.unsafeWith' unsafeWith :: (MonadUnliftIO m, Storable a) => Vector a -> (Ptr a -> m b) -> m b unsafeWith vec action = withRunInIO $ \unlifter -> Data.Vector.Storable.unsafeWith vec (unlifter . action) rio-0.1.8.0/src/RIO/Vector/Unboxed.hs0000644000000000000000000001227113253417303015263 0ustar0000000000000000{-# LANGUAGE CPP #-} -- | Unboxed @Vector@. Import as: -- -- > import qualified RIO.Vector.Unboxed as VU module RIO.Vector.Unboxed ( -- * Unboxed vectors Data.Vector.Unboxed.Vector , Data.Vector.Unboxed.MVector(..) , Data.Vector.Unboxed.Unbox -- * Accessors -- ** Length information , Data.Vector.Unboxed.length , Data.Vector.Unboxed.null -- ** Indexing , (Data.Vector.Unboxed.!?) -- ** Extracting subvectors , Data.Vector.Unboxed.slice , Data.Vector.Unboxed.take , Data.Vector.Unboxed.drop , Data.Vector.Unboxed.splitAt -- * Construction -- ** Initialisation , Data.Vector.Unboxed.empty , Data.Vector.Unboxed.singleton , Data.Vector.Unboxed.replicate , Data.Vector.Unboxed.generate , Data.Vector.Unboxed.iterateN -- ** Monadic initialisation , Data.Vector.Unboxed.replicateM , Data.Vector.Unboxed.generateM #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.iterateNM #endif , Data.Vector.Unboxed.create #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.createT #endif -- ** Unfolding , Data.Vector.Unboxed.unfoldr , Data.Vector.Unboxed.unfoldrN #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.unfoldrM , Data.Vector.Unboxed.unfoldrNM #endif , Data.Vector.Unboxed.constructN , Data.Vector.Unboxed.constructrN -- ** Enumeration , Data.Vector.Unboxed.enumFromN , Data.Vector.Unboxed.enumFromStepN , Data.Vector.Unboxed.enumFromTo , Data.Vector.Unboxed.enumFromThenTo -- ** Concatenation , Data.Vector.Unboxed.cons , Data.Vector.Unboxed.snoc , (Data.Vector.Unboxed.++) , Data.Vector.Unboxed.concat -- ** Restricting memory usage , Data.Vector.Unboxed.force -- * Modifying vectors -- ** Permutations , Data.Vector.Unboxed.reverse -- ** Safe destructive update , Data.Vector.Unboxed.modify -- * Elementwise operations -- ** Indexing , Data.Vector.Unboxed.indexed -- ** Mapping , Data.Vector.Unboxed.map , Data.Vector.Unboxed.imap , Data.Vector.Unboxed.concatMap -- ** Monadic mapping , Data.Vector.Unboxed.mapM , Data.Vector.Unboxed.imapM , Data.Vector.Unboxed.mapM_ , Data.Vector.Unboxed.imapM_ , Data.Vector.Unboxed.forM , Data.Vector.Unboxed.forM_ -- ** Zipping , Data.Vector.Unboxed.zipWith , Data.Vector.Unboxed.zipWith3 , Data.Vector.Unboxed.zipWith4 , Data.Vector.Unboxed.zipWith5 , Data.Vector.Unboxed.zipWith6 , Data.Vector.Unboxed.izipWith , Data.Vector.Unboxed.izipWith3 , Data.Vector.Unboxed.izipWith4 , Data.Vector.Unboxed.izipWith5 , Data.Vector.Unboxed.izipWith6 , Data.Vector.Unboxed.zip , Data.Vector.Unboxed.zip3 , Data.Vector.Unboxed.zip4 , Data.Vector.Unboxed.zip5 , Data.Vector.Unboxed.zip6 -- ** Monadic zipping , Data.Vector.Unboxed.zipWithM , Data.Vector.Unboxed.izipWithM , Data.Vector.Unboxed.zipWithM_ , Data.Vector.Unboxed.izipWithM_ -- ** Unzipping , Data.Vector.Unboxed.unzip , Data.Vector.Unboxed.unzip3 , Data.Vector.Unboxed.unzip4 , Data.Vector.Unboxed.unzip5 , Data.Vector.Unboxed.unzip6 -- * Working with predicates -- ** Filtering , Data.Vector.Unboxed.filter , Data.Vector.Unboxed.ifilter #if MIN_VERSION_vector(0,12,0) , Data.Vector.Unboxed.uniq , Data.Vector.Unboxed.mapMaybe , Data.Vector.Unboxed.imapMaybe #endif , Data.Vector.Unboxed.filterM , Data.Vector.Unboxed.takeWhile , Data.Vector.Unboxed.dropWhile -- ** Partitioning , Data.Vector.Unboxed.partition , Data.Vector.Unboxed.unstablePartition , Data.Vector.Unboxed.span , Data.Vector.Unboxed.break -- ** Searching , Data.Vector.Unboxed.elem , Data.Vector.Unboxed.notElem , Data.Vector.Unboxed.find , Data.Vector.Unboxed.findIndex , Data.Vector.Unboxed.findIndices , Data.Vector.Unboxed.elemIndex , Data.Vector.Unboxed.elemIndices -- * Folding , Data.Vector.Unboxed.foldl , Data.Vector.Unboxed.foldl' , Data.Vector.Unboxed.foldr , Data.Vector.Unboxed.foldr' , Data.Vector.Unboxed.ifoldl , Data.Vector.Unboxed.ifoldl' , Data.Vector.Unboxed.ifoldr , Data.Vector.Unboxed.ifoldr' -- ** Specialised folds , Data.Vector.Unboxed.all , Data.Vector.Unboxed.any , Data.Vector.Unboxed.and , Data.Vector.Unboxed.or , Data.Vector.Unboxed.sum , Data.Vector.Unboxed.product -- ** Monadic folds , Data.Vector.Unboxed.foldM , Data.Vector.Unboxed.ifoldM , Data.Vector.Unboxed.foldM' , Data.Vector.Unboxed.ifoldM' , Data.Vector.Unboxed.foldM_ , Data.Vector.Unboxed.ifoldM_ , Data.Vector.Unboxed.foldM'_ , Data.Vector.Unboxed.ifoldM'_ -- * Prefix sums (scans) , Data.Vector.Unboxed.prescanl , Data.Vector.Unboxed.prescanl' , Data.Vector.Unboxed.postscanl , Data.Vector.Unboxed.postscanl' , Data.Vector.Unboxed.scanl , Data.Vector.Unboxed.scanl' , Data.Vector.Unboxed.prescanr , Data.Vector.Unboxed.prescanr' , Data.Vector.Unboxed.postscanr , Data.Vector.Unboxed.postscanr' , Data.Vector.Unboxed.scanr , Data.Vector.Unboxed.scanr' -- * Conversions -- ** Lists , Data.Vector.Unboxed.toList , Data.Vector.Unboxed.fromList , Data.Vector.Unboxed.fromListN -- ** Different vector types , Data.Vector.Unboxed.convert -- ** Mutable vectors , Data.Vector.Unboxed.freeze , Data.Vector.Unboxed.thaw , Data.Vector.Unboxed.copy ) where import qualified Data.Vector.Unboxed rio-0.1.8.0/src/RIO/Vector/Unboxed/Partial.hs0000644000000000000000000000270313253417303016656 0ustar0000000000000000module RIO.Vector.Unboxed.Partial ( -- * Accessors -- ** Indexing (Data.Vector.Unboxed.!) , Data.Vector.Unboxed.head , Data.Vector.Unboxed.last -- ** Monadic indexing , Data.Vector.Unboxed.indexM , Data.Vector.Unboxed.headM , Data.Vector.Unboxed.lastM -- ** Extracting subvectors , Data.Vector.Unboxed.init , Data.Vector.Unboxed.tail -- * Modifying vectors -- ** Bulk updates , (Data.Vector.Unboxed.//) , Data.Vector.Unboxed.update , Data.Vector.Unboxed.update_ -- ** Accumulations , Data.Vector.Unboxed.accum , Data.Vector.Unboxed.accumulate , Data.Vector.Unboxed.accumulate_ -- ** Permutations , Data.Vector.Unboxed.backpermute -- * Folding , Data.Vector.Unboxed.foldl1 , Data.Vector.Unboxed.foldl1' , Data.Vector.Unboxed.foldr1 , Data.Vector.Unboxed.foldr1' -- ** Specialised folds , Data.Vector.Unboxed.maximum , Data.Vector.Unboxed.maximumBy , Data.Vector.Unboxed.minimum , Data.Vector.Unboxed.minimumBy , Data.Vector.Unboxed.minIndex , Data.Vector.Unboxed.minIndexBy , Data.Vector.Unboxed.maxIndex , Data.Vector.Unboxed.maxIndexBy -- ** Monadic folds , Data.Vector.Unboxed.fold1M , Data.Vector.Unboxed.fold1M' , Data.Vector.Unboxed.fold1M_ , Data.Vector.Unboxed.fold1M'_ -- * Prefix sums (scans) , Data.Vector.Unboxed.scanl1 , Data.Vector.Unboxed.scanl1' , Data.Vector.Unboxed.scanr1 , Data.Vector.Unboxed.scanr1' ) where import qualified Data.Vector.Unboxed rio-0.1.8.0/src/RIO/Vector/Unboxed/Unsafe.hs0000644000000000000000000000207013253417303016500 0ustar0000000000000000module RIO.Vector.Unboxed.Unsafe ( -- * Accessors -- ** Indexing Data.Vector.Unboxed.unsafeIndex , Data.Vector.Unboxed.unsafeHead , Data.Vector.Unboxed.unsafeLast -- ** Monadic indexing , Data.Vector.Unboxed.unsafeIndexM , Data.Vector.Unboxed.unsafeHeadM , Data.Vector.Unboxed.unsafeLastM -- ** Extracting subvectors , Data.Vector.Unboxed.unsafeSlice , Data.Vector.Unboxed.unsafeInit , Data.Vector.Unboxed.unsafeTail , Data.Vector.Unboxed.unsafeTake , Data.Vector.Unboxed.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Unboxed.unsafeUpd , Data.Vector.Unboxed.unsafeUpdate , Data.Vector.Unboxed.unsafeUpdate_ -- ** Accumulations , Data.Vector.Unboxed.unsafeAccum , Data.Vector.Unboxed.unsafeAccumulate , Data.Vector.Unboxed.unsafeAccumulate_ -- ** Permutations , Data.Vector.Unboxed.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Unboxed.unsafeFreeze , Data.Vector.Unboxed.unsafeThaw , Data.Vector.Unboxed.unsafeCopy ) where import qualified Data.Vector.Unboxed rio-0.1.8.0/src/RIO/Vector/Unsafe.hs0000644000000000000000000000215513253417303015100 0ustar0000000000000000module RIO.Vector.Unsafe ( -- * Immutable vectors Data.Vector.Generic.Vector(..) -- * Accessors -- ** Indexing , Data.Vector.Generic.unsafeIndex , Data.Vector.Generic.unsafeHead , Data.Vector.Generic.unsafeLast -- ** Monadic indexing , Data.Vector.Generic.unsafeIndexM , Data.Vector.Generic.unsafeHeadM , Data.Vector.Generic.unsafeLastM -- ** Extracting subvectors , Data.Vector.Generic.unsafeSlice , Data.Vector.Generic.unsafeInit , Data.Vector.Generic.unsafeTail , Data.Vector.Generic.unsafeTake , Data.Vector.Generic.unsafeDrop -- * Modifying vectors -- ** Bulk updates , Data.Vector.Generic.unsafeUpd , Data.Vector.Generic.unsafeUpdate , Data.Vector.Generic.unsafeUpdate_ -- ** Accumulations , Data.Vector.Generic.unsafeAccum , Data.Vector.Generic.unsafeAccumulate , Data.Vector.Generic.unsafeAccumulate_ -- ** Permutations , Data.Vector.Generic.unsafeBackpermute -- * Conversions -- ** Mutable vectors , Data.Vector.Generic.unsafeFreeze , Data.Vector.Generic.unsafeThaw , Data.Vector.Generic.unsafeCopy ) where import qualified Data.Vector.Generic rio-0.1.8.0/src/RIO/Writer.hs0000644000000000000000000000030413317560112013661 0ustar0000000000000000-- | Provides reexports of 'MonadWriter' and related helpers. -- -- @since 0.1.4.0 module RIO.Writer ( Control.Monad.Writer.MonadWriter (..) ) where import qualified Control.Monad.Writer rio-0.1.8.0/src/RIO/Prelude/Display.hs0000644000000000000000000001121413411212256015412 0ustar0000000000000000{-# LANGUAGE GeneralizedNewtypeDeriving #-} module RIO.Prelude.Display ( Utf8Builder (..) , Display (..) , displayShow , utf8BuilderToText , utf8BuilderToLazyText , displayBytesUtf8 , writeFileUtf8Builder ) where import Data.String (IsString (..)) import Data.ByteString (ByteString) import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Builder as BB import Data.ByteString.Builder (Builder) import Data.Semigroup (Semigroup) import Data.Text (Text) import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TL import UnliftIO import Data.Text.Encoding (decodeUtf8With, encodeUtf8Builder) import Data.Text.Encoding.Error (lenientDecode) import Data.Int import Data.Word import System.Process.Typed (ProcessConfig, setEnvInherit) -- | A builder of binary data, with the invariant that the underlying -- data is supposed to be UTF-8 encoded. -- -- @since 0.1.0.0 newtype Utf8Builder = Utf8Builder { getUtf8Builder :: Builder } deriving (Semigroup, Monoid) -- | @since 0.1.0.0 instance IsString Utf8Builder where fromString = Utf8Builder . BB.stringUtf8 -- | A typeclass for values which can be converted to a -- 'Utf8Builder'. The intention of this typeclass is to provide a -- human-friendly display of the data. -- -- @since 0.1.0.0 class Display a where {-# MINIMAL display | textDisplay #-} display :: a -> Utf8Builder display = display . textDisplay -- | Display data as `Text`, which will also be used for `display` if it is -- not overriden. -- -- @since 0.1.7.0 textDisplay :: a -> Text textDisplay = utf8BuilderToText . display -- | @since 0.1.0.0 instance Display Utf8Builder where display = id -- | @since 0.1.0.0 instance Display Text where display = Utf8Builder . encodeUtf8Builder -- | @since 0.1.0.0 instance Display TL.Text where display = foldMap display . TL.toChunks -- | @since 0.1.0.0 instance Display Char where display = Utf8Builder . BB.charUtf8 -- | @since 0.1.0.0 instance Display Integer where display = Utf8Builder . BB.integerDec -- | @since 0.1.0.0 instance Display Float where display = Utf8Builder . BB.floatDec instance Display Double where display = Utf8Builder . BB.doubleDec -- | @since 0.1.0.0 instance Display Int where display = Utf8Builder . BB.intDec -- | @since 0.1.0.0 instance Display Int8 where display = Utf8Builder . BB.int8Dec -- | @since 0.1.0.0 instance Display Int16 where display = Utf8Builder . BB.int16Dec -- | @since 0.1.0.0 instance Display Int32 where display = Utf8Builder . BB.int32Dec -- | @since 0.1.0.0 instance Display Int64 where display = Utf8Builder . BB.int64Dec -- | @since 0.1.0.0 instance Display Word where display = Utf8Builder . BB.wordDec -- | @since 0.1.0.0 instance Display Word8 where display = Utf8Builder . BB.word8Dec -- | @since 0.1.0.0 instance Display Word16 where display = Utf8Builder . BB.word16Dec -- | @since 0.1.0.0 instance Display Word32 where display = Utf8Builder . BB.word32Dec -- | @since 0.1.0.0 instance Display Word64 where display = Utf8Builder . BB.word64Dec -- | @since 0.1.0.0 instance Display SomeException where display = fromString . displayException -- | @since 0.1.0.0 instance Display IOException where display = fromString . displayException -- | @since 0.1.0.0 instance Display (ProcessConfig a b c) where display = displayShow . setEnvInherit -- | Use the 'Show' instance for a value to convert it to a -- 'Utf8Builder'. -- -- @since 0.1.0.0 displayShow :: Show a => a -> Utf8Builder displayShow = fromString . show -- | Convert a 'ByteString' into a 'Utf8Builder'. -- -- /NOTE/ This function performs no checks to ensure that the data is, -- in fact, UTF8 encoded. If you provide non-UTF8 data, later -- functions may fail. -- -- @since 0.1.0.0 displayBytesUtf8 :: ByteString -> Utf8Builder displayBytesUtf8 = Utf8Builder . BB.byteString -- | Convert a 'Utf8Builder' value into a strict 'Text'. -- -- @since 0.1.0.0 utf8BuilderToText :: Utf8Builder -> Text utf8BuilderToText = decodeUtf8With lenientDecode . BL.toStrict . BB.toLazyByteString . getUtf8Builder -- | Convert a 'Utf8Builder' value into a lazy 'Text'. -- -- @since 0.1.0.0 utf8BuilderToLazyText :: Utf8Builder -> TL.Text utf8BuilderToLazyText = TL.decodeUtf8With lenientDecode . BB.toLazyByteString . getUtf8Builder -- | Write the given 'Utf8Builder' value to a file. -- -- @since 0.1.0.0 writeFileUtf8Builder :: MonadIO m => FilePath -> Utf8Builder -> m () writeFileUtf8Builder fp (Utf8Builder builder) = liftIO $ withBinaryFile fp WriteMode $ \h -> BB.hPutBuilder h builder rio-0.1.8.0/src/RIO/Prelude/Extra.hs0000644000000000000000000000460313312220526015073 0ustar0000000000000000{-# LANGUAGE BangPatterns #-} module RIO.Prelude.Extra ( mapLeft , fromFirst , mapMaybeA , mapMaybeM , forMaybeA , forMaybeM , foldMapM , nubOrd , whenM , unlessM , asIO ) where import qualified Data.Set as Set import Data.Monoid (First (..)) import Data.Foldable (foldlM) import RIO.Prelude.Reexports -- | Apply a function to a 'Left' constructor mapLeft :: (a1 -> a2) -> Either a1 b -> Either a2 b mapLeft f (Left a1) = Left (f a1) mapLeft _ (Right b) = Right b -- | Get a 'First' value with a default fallback fromFirst :: a -> First a -> a fromFirst x = fromMaybe x . getFirst -- | Applicative 'mapMaybe'. mapMaybeA :: Applicative f => (a -> f (Maybe b)) -> [a] -> f [b] mapMaybeA f = fmap catMaybes . traverse f -- | @'forMaybeA' '==' 'flip' 'mapMaybeA'@ forMaybeA :: Applicative f => [a] -> (a -> f (Maybe b)) -> f [b] forMaybeA = flip mapMaybeA -- | Monadic 'mapMaybe'. mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b] mapMaybeM f = liftM catMaybes . mapM f -- | @'forMaybeM' '==' 'flip' 'mapMaybeM'@ forMaybeM :: Monad m => [a] -> (a -> m (Maybe b)) -> m [b] forMaybeM = flip mapMaybeM -- | Extend 'foldMap' to allow side effects. -- -- Internally, this is implemented using a strict left fold. This is used for -- performance reasons. It also necessitates that this function has a @Monad@ -- constraint and not just an @Applicative@ constraint. For more information, -- see -- . -- -- @since 0.1.3.0 foldMapM :: (Monad m, Monoid w, Foldable t) => (a -> m w) -> t a -> m w foldMapM f = foldlM (\acc a -> do w <- f a return $! mappend acc w) mempty -- | Strip out duplicates nubOrd :: Ord a => [a] -> [a] nubOrd = loop mempty where loop _ [] = [] loop !s (a:as) | a `Set.member` s = loop s as | otherwise = a : loop (Set.insert a s) as -- | Run the second value if the first value returns 'True' whenM :: Monad m => m Bool -> m () -> m () whenM boolM action = do x <- boolM if x then action else return () -- | Run the second value if the first value returns 'False' unlessM :: Monad m => m Bool -> m () -> m () unlessM boolM action = do x <- boolM if x then return () else action -- | Helper function to force an action to run in 'IO'. Especially -- useful for overly general contexts, like hspec tests. -- -- @since 0.1.3.0 asIO :: IO a -> IO a asIO = id rio-0.1.8.0/src/RIO/Prelude/IO.hs0000644000000000000000000000335313402156770014331 0ustar0000000000000000{-# LANGUAGE CPP #-} module RIO.Prelude.IO ( withLazyFile , readFileBinary , writeFileBinary , readFileUtf8 , writeFileUtf8 , hPutBuilder ) where import RIO.Prelude.Reexports import qualified Data.ByteString as B import qualified Data.ByteString.Builder as BB import qualified Data.ByteString.Lazy as BL import qualified Data.Text.IO as T import System.IO (hSetEncoding, utf8) -- | Lazily get the contents of a file. Unlike 'BL.readFile', this -- ensures that if an exception is thrown, the file handle is closed -- immediately. withLazyFile :: MonadUnliftIO m => FilePath -> (BL.ByteString -> m a) -> m a withLazyFile fp inner = withBinaryFile fp ReadMode $ inner <=< liftIO . BL.hGetContents -- | Write a file in UTF8 encoding -- -- This function will use OS-specific line ending handling. writeFileUtf8 :: MonadIO m => FilePath -> Text -> m () writeFileUtf8 fp text = liftIO $ withFile fp WriteMode $ \h -> do hSetEncoding h utf8 T.hPutStr h text hPutBuilder :: MonadIO m => Handle -> Builder -> m () hPutBuilder h = liftIO . BB.hPutBuilder h {-# INLINE hPutBuilder #-} -- | Same as 'B.readFile', but generalized to 'MonadIO' readFileBinary :: MonadIO m => FilePath -> m ByteString readFileBinary = liftIO . B.readFile -- | Same as 'B.writeFile', but generalized to 'MonadIO' writeFileBinary :: MonadIO m => FilePath -> ByteString -> m () writeFileBinary fp = liftIO . B.writeFile fp -- | Read a file in UTF8 encoding, throwing an exception on invalid character -- encoding. -- -- This function will use OS-specific line ending handling. readFileUtf8 :: MonadIO m => FilePath -> m Text readFileUtf8 fp = liftIO $ withFile fp ReadMode $ \h -> do hSetEncoding h utf8 T.hGetContents h rio-0.1.8.0/src/RIO/Prelude/Lens.hs0000644000000000000000000000105513253417303014714 0ustar0000000000000000module RIO.Prelude.Lens ( view , Lens.Micro.ASetter , Lens.Micro.ASetter' , Lens.Micro.Getting , Lens.Micro.Lens , Lens.Micro.Lens' , Lens.Micro.SimpleGetter , Lens.Micro.lens , Lens.Micro.over , Lens.Micro.set , Lens.Micro.sets , Lens.Micro.to , (Lens.Micro.^.) ) where import Lens.Micro import Control.Monad.Reader (MonadReader, asks) import Lens.Micro.Internal (( #. )) import Control.Applicative (Const (..)) view :: MonadReader s m => Getting a s a -> m a view l = asks (getConst #. l Const) rio-0.1.8.0/src/RIO/Prelude/Logger.hs0000644000000000000000000004512613402156770015245 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} module RIO.Prelude.Logger ( -- * Standard logging functions logDebug , logInfo , logWarn , logError , logOther -- * Running with logging , withLogFunc , newLogFunc , LogFunc , HasLogFunc (..) , logOptionsHandle -- ** Log options , LogOptions , setLogMinLevel , setLogMinLevelIO , setLogVerboseFormat , setLogVerboseFormatIO , setLogTerminal , setLogUseTime , setLogUseColor , setLogUseLoc -- * Advanced logging functions -- ** Sticky logging , logSticky , logStickyDone -- ** With source , logDebugS , logInfoS , logWarnS , logErrorS , logOtherS -- ** Generic log function , logGeneric -- * Advanced running functions , mkLogFunc , logOptionsMemory -- * Data types , LogLevel (..) , LogSource , CallStack -- * Convenience functions , displayCallStack , noLogging -- * Accessors , logFuncUseColorL ) where import RIO.Prelude.Reexports hiding ((<>)) import RIO.Prelude.Renames import RIO.Prelude.Display import RIO.Prelude.Lens import Data.Text (Text) import qualified Data.Text as T import Control.Monad.IO.Class (MonadIO, liftIO) import GHC.Stack (HasCallStack, CallStack, SrcLoc (..), getCallStack, callStack) import Data.Time import qualified Data.Text.IO as TIO import Data.ByteString.Builder (toLazyByteString, char7, byteString, hPutBuilder) import Data.ByteString.Builder.Extra (flush) import GHC.IO.Handle.Internals (wantWritableHandle) import GHC.IO.Encoding.Types (textEncodingName) import GHC.IO.Handle.Types (Handle__ (..)) import qualified Data.ByteString as B import System.IO (localeEncoding) import GHC.Foreign (peekCString, withCString) import Data.Semigroup (Semigroup (..)) -- | The log level of a message. -- -- @since 0.0.0.0 data LogLevel = LevelDebug | LevelInfo | LevelWarn | LevelError | LevelOther !Text deriving (Eq, Show, Read, Ord) -- | Where in the application a log message came from. Used for -- display purposes only. -- -- @since 0.0.0.0 type LogSource = Text -- | Environment values with a logging function. -- -- @since 0.0.0.0 class HasLogFunc env where logFuncL :: Lens' env LogFunc instance HasLogFunc LogFunc where logFuncL = id -- | A logging function, wrapped in a newtype for better error messages. -- -- An implementation may choose any behavior of this value it wishes, -- including printing to standard output or no action at all. -- -- @since 0.0.0.0 data LogFunc = LogFunc { unLogFunc :: !(CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) , lfOptions :: !(Maybe LogOptions) } -- | Perform both sets of actions per log entry. -- -- @since 0.0.0.0 instance Semigroup LogFunc where LogFunc f o1 <> LogFunc g o2 = LogFunc { unLogFunc = \a b c d -> f a b c d *> g a b c d , lfOptions = o1 `mplus` o2 } -- | 'mempty' peforms no logging. -- -- @since 0.0.0.0 instance Monoid LogFunc where mempty = mkLogFunc $ \_ _ _ _ -> return () mappend = (<>) -- | Create a 'LogFunc' from the given function. -- -- @since 0.0.0.0 mkLogFunc :: (CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) -> LogFunc mkLogFunc f = LogFunc f Nothing -- | Generic, basic function for creating other logging functions. -- -- @since 0.0.0.0 logGeneric :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> LogLevel -> Utf8Builder -> m () logGeneric src level str = do LogFunc logFunc _ <- view logFuncL liftIO $ logFunc callStack src level str -- | Log a debug level message with no source. -- -- @since 0.0.0.0 logDebug :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logDebug = logGeneric "" LevelDebug -- | Log an info level message with no source. -- -- @since 0.0.0.0 logInfo :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logInfo = logGeneric "" LevelInfo -- | Log a warn level message with no source. -- -- @since 0.0.0.0 logWarn :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logWarn = logGeneric "" LevelWarn -- | Log an error level message with no source. -- -- @since 0.0.0.0 logError :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Utf8Builder -> m () logError = logGeneric "" LevelError -- | Log a message with the specified textual level and no source. -- -- @since 0.0.0.0 logOther :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Text -- ^ level -> Utf8Builder -> m () logOther = logGeneric "" . LevelOther -- | Log a debug level message with the given source. -- -- @since 0.0.0.0 logDebugS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logDebugS src = logGeneric src LevelDebug -- | Log an info level message with the given source. -- -- @since 0.0.0.0 logInfoS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logInfoS src = logGeneric src LevelInfo -- | Log a warn level message with the given source. -- -- @since 0.0.0.0 logWarnS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logWarnS src = logGeneric src LevelWarn -- | Log an error level message with the given source. -- -- @since 0.0.0.0 logErrorS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => LogSource -> Utf8Builder -> m () logErrorS src = logGeneric src LevelError -- | Log a message with the specified textual level and the given -- source. -- -- @since 0.0.0.0 logOtherS :: (MonadIO m, MonadReader env m, HasLogFunc env, HasCallStack) => Text -- ^ level -> LogSource -> Utf8Builder -> m () logOtherS src = logGeneric src . LevelOther -- | Write a "sticky" line to the terminal. Any subsequent lines will -- overwrite this one, and that same line will be repeated below -- again. In other words, the line sticks at the bottom of the output -- forever. Running this function again will replace the sticky line -- with a new sticky line. When you want to get rid of the sticky -- line, run 'logStickyDone'. -- -- Note that not all 'LogFunc' implementations will support sticky -- messages as described. However, the 'withLogFunc' implementation -- provided by this module does. -- -- @since 0.0.0.0 logSticky :: (MonadIO m, HasCallStack, MonadReader env m, HasLogFunc env) => Utf8Builder -> m () logSticky = logOther "sticky" -- | This will print out the given message with a newline and disable -- any further stickiness of the line until a new call to 'logSticky' -- happens. -- -- @since 0.0.0.0 logStickyDone :: (MonadIO m, HasCallStack, MonadReader env m, HasLogFunc env) => Utf8Builder -> m () logStickyDone = logOther "sticky-done" -- TODO It might be better at some point to have a 'runSticky' function -- that encompasses the logSticky->logStickyDone pairing. canUseUtf8 :: MonadIO m => Handle -> m Bool canUseUtf8 h = liftIO $ wantWritableHandle "canUseUtf8" h $ \h_ -> do -- TODO also handle haOutputNL for CRLF return $ (textEncodingName <$> haCodec h_) == Just "UTF-8" -- | Create a 'LogOptions' value which will store its data in -- memory. This is primarily intended for testing purposes. This will -- return both a 'LogOptions' value and an 'IORef' containing the -- resulting 'Builder' value. -- -- This will default to non-verbose settings and assume there is a -- terminal attached. These assumptions can be overridden using the -- appropriate @set@ functions. -- -- @since 0.0.0.0 logOptionsMemory :: MonadIO m => m (IORef Builder, LogOptions) logOptionsMemory = do ref <- newIORef mempty let options = LogOptions { logMinLevel = return LevelInfo , logVerboseFormat = return False , logTerminal = True , logUseTime = False , logUseColor = False , logUseLoc = False , logSend = \new -> atomicModifyIORef' ref $ \old -> (old <> new, ()) } return (ref, options) -- | Create a 'LogOptions' value from the given 'Handle' and whether -- to perform verbose logging or not. Individiual settings can be -- overridden using appropriate @set@ functions. -- -- When Verbose Flag is @True@, the following happens: -- -- * @setLogVerboseFormat@ is called with @True@ -- * @setLogUseColor@ is called with @True@ (except on Windows) -- * @setLogUseLoc@ is called with @True@ -- * @setLogUseTime@ is called with @True@ -- * @setLogMinLevel@ is called with 'Debug' log level -- -- @since 0.0.0.0 logOptionsHandle :: MonadIO m => Handle -> Bool -- ^ Verbose Flag -> m LogOptions logOptionsHandle handle' verbose = liftIO $ do terminal <- hIsTerminalDevice handle' useUtf8 <- canUseUtf8 handle' unicode <- if useUtf8 then return True else getCanUseUnicode return LogOptions { logMinLevel = return $ if verbose then LevelDebug else LevelInfo , logVerboseFormat = return verbose , logTerminal = terminal , logUseTime = verbose #if WINDOWS , logUseColor = False #else , logUseColor = verbose && terminal #endif , logUseLoc = verbose , logSend = \builder -> if useUtf8 && unicode then hPutBuilder handle' (builder <> flush) else do let lbs = toLazyByteString builder bs = toStrictBytes lbs case decodeUtf8' bs of Left e -> error $ "mkLogOptions: invalid UTF8 sequence: " ++ show (e, bs) Right text -> do let text' | unicode = text | otherwise = T.map replaceUnicode text TIO.hPutStr handle' text' hFlush handle' } -- | Taken from GHC: determine if we should use Unicode syntax getCanUseUnicode :: IO Bool getCanUseUnicode = do let enc = localeEncoding str = "\x2018\x2019" test = withCString enc str $ \cstr -> do str' <- peekCString enc cstr return (str == str') test `catchIO` \_ -> return False -- | Given a 'LogOptions' value, returns both a new 'LogFunc' and a sub-routine that -- disposes it. -- -- Intended for use if you want to deal with the teardown of 'LogFunc' yourself, -- otherwise prefer the 'withLogFunc' function instead. -- -- @since 0.1.3.0 newLogFunc :: (MonadIO n, MonadIO m) => LogOptions -> n (LogFunc, m ()) newLogFunc options = if logTerminal options then do var <- newMVar mempty return (LogFunc { unLogFunc = stickyImpl var options (simpleLogFunc options) , lfOptions = Just options } , do state <- takeMVar var unless (B.null state) (liftIO $ logSend options "\n") ) else return (LogFunc { unLogFunc = \cs src level str -> simpleLogFunc options cs src (noSticky level) str , lfOptions = Just options } , return () ) -- | Given a 'LogOptions' value, run the given function with the -- specified 'LogFunc'. A common way to use this function is: -- -- @ -- let isVerbose = False -- get from the command line instead -- logOptions' <- logOptionsHandle stderr isVerbose -- let logOptions = setLogUseTime True logOptions' -- withLogFunc logOptions $ \lf -> do -- let app = App -- application specific environment -- { appLogFunc = lf -- , appOtherStuff = ... -- } -- runRIO app $ do -- logInfo "Starting app" -- myApp -- @ -- -- @since 0.0.0.0 withLogFunc :: MonadUnliftIO m => LogOptions -> (LogFunc -> m a) -> m a withLogFunc options inner = withRunInIO $ \run -> do bracket (newLogFunc options) snd (run . inner . fst) -- | Replace Unicode characters with non-Unicode equivalents replaceUnicode :: Char -> Char replaceUnicode '\x2018' = '`' replaceUnicode '\x2019' = '\'' replaceUnicode c = c noSticky :: LogLevel -> LogLevel noSticky (LevelOther "sticky-done") = LevelInfo noSticky (LevelOther "sticky") = LevelInfo noSticky level = level -- | Configuration for how to create a 'LogFunc'. Intended to be used -- with the 'withLogFunc' function. -- -- @since 0.0.0.0 data LogOptions = LogOptions { logMinLevel :: !(IO LogLevel) , logVerboseFormat :: !(IO Bool) , logTerminal :: !Bool , logUseTime :: !Bool , logUseColor :: !Bool , logUseLoc :: !Bool , logSend :: !(Builder -> IO ()) } -- | Set the minimum log level. Messages below this level will not be -- printed. -- -- Default: in verbose mode, 'LevelDebug'. Otherwise, 'LevelInfo'. -- -- @since 0.0.0.0 setLogMinLevel :: LogLevel -> LogOptions -> LogOptions setLogMinLevel level options = options { logMinLevel = return level } -- | Refer to 'setLogMinLevel'. This modifier allows to alter the verbose format -- value dynamically at runtime. -- -- Default: in verbose mode, 'LevelDebug'. Otherwise, 'LevelInfo'. -- -- @since 0.1.3.0 setLogMinLevelIO :: IO LogLevel -> LogOptions -> LogOptions setLogMinLevelIO getLevel options = options { logMinLevel = getLevel } -- | Use the verbose format for printing log messages. -- -- Default: follows the value of the verbose flag. -- -- @since 0.0.0.0 setLogVerboseFormat :: Bool -> LogOptions -> LogOptions setLogVerboseFormat v options = options { logVerboseFormat = return v } -- | Refer to 'setLogVerboseFormat'. This modifier allows to alter the verbose -- format value dynamically at runtime. -- -- Default: follows the value of the verbose flag. -- -- @since 0.1.3.0 setLogVerboseFormatIO :: IO Bool -> LogOptions -> LogOptions setLogVerboseFormatIO getVerboseLevel options = options { logVerboseFormat = getVerboseLevel } -- | Do we treat output as a terminal. If @True@, we will enabled -- sticky logging functionality. -- -- Default: checks if the @Handle@ provided to 'logOptionsHandle' is a -- terminal with 'hIsTerminalDevice'. -- -- @since 0.0.0.0 setLogTerminal :: Bool -> LogOptions -> LogOptions setLogTerminal t options = options { logTerminal = t } -- | Include the time when printing log messages. -- -- Default: true in debug mode, false otherwise. -- -- @since 0.0.0.0 setLogUseTime :: Bool -> LogOptions -> LogOptions setLogUseTime t options = options { logUseTime = t } -- | Use ANSI color codes in the log output. -- -- Default: true if in verbose mode /and/ the 'Handle' is a terminal device. -- -- @since 0.0.0.0 setLogUseColor :: Bool -> LogOptions -> LogOptions setLogUseColor c options = options { logUseColor = c } -- | Use code location in the log output. -- -- Default: true if in verbose mode, false otherwise. -- -- @since 0.1.2.0 setLogUseLoc :: Bool -> LogOptions -> LogOptions setLogUseLoc l options = options { logUseLoc = l } simpleLogFunc :: LogOptions -> CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO () simpleLogFunc lo cs _src level msg = do logLevel <- logMinLevel lo logVerbose <- logVerboseFormat lo when (level >= logLevel) $ do timestamp <- getTimestamp logVerbose logSend lo $ getUtf8Builder $ timestamp <> getLevel logVerbose <> ansi reset <> msg <> getLoc <> ansi reset <> "\n" where reset = "\ESC[0m" setBlack = "\ESC[90m" setGreen = "\ESC[32m" setBlue = "\ESC[34m" setYellow = "\ESC[33m" setRed = "\ESC[31m" setMagenta = "\ESC[35m" ansi :: Utf8Builder -> Utf8Builder ansi xs | logUseColor lo = xs | otherwise = mempty getTimestamp :: Bool -> IO Utf8Builder getTimestamp logVerbose | logVerbose && logUseTime lo = do now <- getZonedTime return $ ansi setBlack <> fromString (formatTime' now) <> ": " | otherwise = return mempty where formatTime' = take timestampLength . formatTime defaultTimeLocale "%F %T.%q" getLevel :: Bool -> Utf8Builder getLevel logVerbose | logVerbose = case level of LevelDebug -> ansi setGreen <> "[debug] " LevelInfo -> ansi setBlue <> "[info] " LevelWarn -> ansi setYellow <> "[warn] " LevelError -> ansi setRed <> "[error] " LevelOther name -> ansi setMagenta <> "[" <> display name <> "] " | otherwise = mempty getLoc :: Utf8Builder getLoc | logUseLoc lo = ansi setBlack <> "\n@(" <> displayCallStack cs <> ")" | otherwise = mempty -- | Convert a 'CallStack' value into a 'Utf8Builder' indicating -- the first source location. -- -- TODO Consider showing the entire call stack instead. -- -- @since 0.0.0.0 displayCallStack :: CallStack -> Utf8Builder displayCallStack cs = case reverse $ getCallStack cs of [] -> "" (_desc, loc):_ -> let file = srcLocFile loc in fromString file <> ":" <> displayShow (srcLocStartLine loc) <> ":" <> displayShow (srcLocStartCol loc) -- | The length of a timestamp in the format "YYYY-MM-DD hh:mm:ss.μμμμμμ". -- This definition is top-level in order to avoid multiple reevaluation at runtime. timestampLength :: Int timestampLength = length (formatTime defaultTimeLocale "%F %T.000000" (UTCTime (ModifiedJulianDay 0) 0)) stickyImpl :: MVar ByteString -> LogOptions -> (CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO ()) -> CallStack -> LogSource -> LogLevel -> Utf8Builder -> IO () stickyImpl ref lo logFunc loc src level msgOrig = modifyMVar_ ref $ \sticky -> do let backSpaceChar = '\8' repeating = mconcat . replicate (B.length sticky) . char7 clear = logSend lo (repeating backSpaceChar <> repeating ' ' <> repeating backSpaceChar) logLevel <- logMinLevel lo case level of LevelOther "sticky-done" -> do clear logFunc loc src LevelInfo msgOrig return mempty LevelOther "sticky" -> do clear let bs = toStrictBytes $ toLazyByteString $ getUtf8Builder msgOrig logSend lo (byteString bs <> flush) return bs _ | level >= logLevel -> do clear logFunc loc src level msgOrig unless (B.null sticky) $ logSend lo (byteString sticky <> flush) return sticky | otherwise -> return sticky -- | Is the log func configured to use color output? -- -- Intended for use by code which wants to optionally add additional color to -- its log messages. -- -- @since 0.1.0.0 logFuncUseColorL :: HasLogFunc env => SimpleGetter env Bool logFuncUseColorL = logFuncL.to (maybe False logUseColor . lfOptions) -- | Disable logging capabilities in a given sub-routine -- -- Intended to skip logging in general purpose implementations, where secrets -- might be logged accidently. -- -- @since 0.1.5.0 noLogging :: (HasLogFunc env, MonadReader env m) => m a -> m a noLogging = local (set logFuncL mempty) rio-0.1.8.0/src/RIO/Prelude/Reexports.hs0000644000000000000000000002164213327276360016022 0ustar0000000000000000{-# LANGUAGE CPP #-} module RIO.Prelude.Reexports ( module UnliftIO -- List imports from UnliftIO? , UnliftIO.Concurrent.ThreadId , UnliftIO.Concurrent.myThreadId , UnliftIO.Concurrent.isCurrentThreadBound , UnliftIO.Concurrent.threadWaitRead , UnliftIO.Concurrent.threadWaitWrite , UnliftIO.Concurrent.threadDelay , yieldThread , Control.Applicative.Alternative , Control.Applicative.Applicative (..) , Control.Applicative.liftA #if !MIN_VERSION_base(4, 10, 0) , Control.Applicative.liftA2 #endif , Control.Applicative.liftA3 , Control.Applicative.many , Control.Applicative.optional , Control.Applicative.some , (Control.Applicative.<|>) , Control.Arrow.first , Control.Arrow.second , (Control.Arrow.&&&) , (Control.Arrow.***) , (Control.Category.>>>) , Control.DeepSeq.NFData(..) , Control.DeepSeq.force , (Control.DeepSeq.$!!) , Control.Monad.Monad(..) , Control.Monad.MonadPlus(..) , Control.Monad.filterM , Control.Monad.foldM , Control.Monad.foldM_ , Control.Monad.forever , Control.Monad.guard , Control.Monad.join , Control.Monad.liftM , Control.Monad.liftM2 , Control.Monad.replicateM_ , Control.Monad.unless , Control.Monad.when , Control.Monad.zipWithM , Control.Monad.zipWithM_ , (Control.Monad.<$!>) , (Control.Monad.<=<) , (Control.Monad.=<<) , (Control.Monad.>=>) , Control.Monad.Catch.MonadThrow(..) , Control.Monad.Reader.MonadReader , Control.Monad.Reader.MonadTrans(..) , Control.Monad.Reader.Reader , Control.Monad.Reader.ReaderT(..) , Control.Monad.Reader.ask , Control.Monad.Reader.asks , Control.Monad.Reader.local , Control.Monad.Reader.runReader , Data.Bool.Bool(..) , Data.Bool.bool , Data.Bool.not , Data.Bool.otherwise , (Data.Bool.&&) , (Data.Bool.||) , Data.ByteString.ByteString , Data.ByteString.Builder.Builder , Data.ByteString.Short.ShortByteString , Data.ByteString.Short.toShort , Data.ByteString.Short.fromShort , Data.Char.Char , Data.Data.Data(..) , Data.Either.Either(..) , Data.Either.either , Data.Either.isLeft , Data.Either.isRight , Data.Either.lefts , Data.Either.partitionEithers , Data.Either.rights , Data.Eq.Eq(..) , Data.Foldable.Foldable , Data.Foldable.all , Data.Foldable.and , Data.Foldable.any , Data.Foldable.asum , Data.Foldable.concat , Data.Foldable.concatMap , Data.Foldable.elem , Data.Foldable.fold , Data.Foldable.foldMap , Data.Foldable.foldl' , Data.Foldable.foldr , Data.Foldable.forM_ , Data.Foldable.for_ , Data.Foldable.length , Data.Foldable.mapM_ , Data.Foldable.msum , Data.Foldable.notElem , Data.Foldable.null , Data.Foldable.or , Data.Foldable.product , Data.Foldable.sequenceA_ , Data.Foldable.sequence_ , Data.Foldable.sum , Data.Foldable.toList , Data.Foldable.traverse_ , Data.Function.const , Data.Function.fix , Data.Function.flip , Data.Function.id , Data.Function.on , (Data.Function.$) , (Data.Function.&) , (Data.Function..) , Data.Functor.Functor(..) , Data.Functor.void , (Data.Functor.$>) , (Data.Functor.<$>) #if MIN_VERSION_base(4, 11, 0) , (Data.Functor.<&>) #else , (<&>) #endif , Data.Functor.Const.Const(..) , Data.Functor.Identity.Identity(..) , Data.Hashable.Hashable , Data.HashMap.Strict.HashMap , Data.HashSet.HashSet , Data.Int.Int , Data.Int.Int8 , Data.Int.Int16 , Data.Int.Int32 , Data.Int.Int64 , Data.IntMap.Strict.IntMap , Data.IntSet.IntSet , Data.List.break , Data.List.drop , Data.List.dropWhile , Data.List.filter , Data.List.lines , Data.List.lookup , Data.List.map , Data.List.replicate , Data.List.reverse , Data.List.span , Data.List.take , Data.List.takeWhile , Data.List.unlines , Data.List.unwords , Data.List.words , Data.List.zip , (Data.List.++) , Data.Map.Strict.Map , Data.Maybe.Maybe(..) , Data.Maybe.catMaybes , Data.Maybe.fromMaybe , Data.Maybe.isJust , Data.Maybe.isNothing , Data.Maybe.listToMaybe , Data.Maybe.mapMaybe , Data.Maybe.maybe , Data.Maybe.maybeToList , Data.Monoid.Monoid (..) , Data.Ord.Ord(..) , Data.Ord.Ordering(..) , Data.Ord.comparing , Data.Proxy.Proxy(..) , Data.Semigroup.Semigroup (..) , Data.Set.Set , Data.String.IsString(..) , Data.Text.Text , Data.Text.Encoding.decodeUtf8' , Data.Text.Encoding.decodeUtf8With , Data.Text.Encoding.encodeUtf8 , Data.Text.Encoding.encodeUtf8Builder , Data.Text.Encoding.Error.UnicodeException(..) , Data.Text.Encoding.Error.lenientDecode , Data.Traversable.Traversable(..) , Data.Traversable.for , Data.Traversable.forM , Data.Vector.Vector , Data.Void.Void , Data.Void.absurd , Data.Word.Word , Data.Word.Word8 , Data.Word.Word16 , Data.Word.Word32 , Data.Word.Word64 , Data.Word.byteSwap16 , Data.Word.byteSwap32 , Data.Word.byteSwap64 , Foreign.Storable.Storable , GHC.Generics.Generic , GHC.Stack.HasCallStack , Numeric.Natural.Natural , Prelude.Bounded (..) , Prelude.Double , Prelude.Enum , Prelude.FilePath , Prelude.Float , Prelude.Floating (..) , Prelude.Fractional (..) , Prelude.IO , Prelude.Integer , Prelude.Integral (..) , Prelude.Num (..) , Prelude.Rational , Prelude.Real (..) , Prelude.RealFloat (..) , Prelude.RealFrac (..) , Prelude.Show , Prelude.String , Prelude.asTypeOf , Prelude.curry , Prelude.error , Prelude.even , Prelude.fromEnum , Prelude.fromIntegral , Prelude.fst , Prelude.gcd , Prelude.lcm , Prelude.odd , Prelude.realToFrac , Prelude.seq , Prelude.show , Prelude.snd , Prelude.subtract , Prelude.uncurry , Prelude.undefined , (Prelude.$!) , (Prelude.^) , (Prelude.^^) , System.Exit.ExitCode(..) , Text.Read.Read , Text.Read.readMaybe -- * Primitive , PrimMonad (..) -- * Unbox , Unbox ) where import Control.Applicative (Applicative) import Control.Monad (Monad (..), liftM, (<=<)) import Control.Monad.Catch (MonadThrow) import Control.Monad.Primitive (PrimMonad (..)) import Control.Monad.Reader (MonadReader, ReaderT (..), ask, asks) import Control.Monad.State (MonadState(..)) import Control.Monad.Writer (MonadWriter (..)) import Data.Bool (otherwise) import Data.ByteString (ByteString) import Data.ByteString.Builder (Builder) import Data.Either (Either (..)) import Data.Foldable (foldMap) import Data.Function (flip, ($), (.)) import Data.Functor (Functor (..)) import Data.Int (Int) import Data.Maybe (Maybe, catMaybes, fromMaybe) import Data.Monoid (Monoid (..)) import Data.Ord (Ord) import Data.Semigroup (Semigroup (..)) import Data.String (IsString (..)) import Data.Text (Text) import Data.Text.Encoding (decodeUtf8', decodeUtf8With, encodeUtf8, encodeUtf8Builder) import Data.Text.Encoding.Error (UnicodeException, lenientDecode) import Data.Traversable (Traversable (..)) import Prelude (FilePath, IO, Show (..)) import UnliftIO import qualified UnliftIO.Concurrent import Data.Vector.Unboxed.Mutable (Unbox) -- Reexports import qualified Control.Applicative import qualified Control.Arrow import qualified Control.Category import qualified Control.DeepSeq import qualified Control.Monad import qualified Control.Monad.Catch import qualified Control.Monad.Reader import qualified Data.Bool import qualified Data.ByteString.Short import qualified Data.Char import qualified Data.Data import qualified Data.Either import qualified Data.Eq import qualified Data.Foldable import qualified Data.Function import qualified Data.Functor import qualified Data.Functor.Const import qualified Data.Functor.Identity import qualified Data.Hashable import qualified Data.HashMap.Strict import qualified Data.HashSet import qualified Data.Int import qualified Data.IntMap.Strict import qualified Data.IntSet import qualified Data.List import qualified Data.Map.Strict import qualified Data.Maybe import qualified Data.Ord import qualified Data.Proxy import qualified Data.Set import qualified Data.Text.Encoding.Error import qualified Data.Traversable import qualified Data.Vector import qualified Data.Void import qualified Data.Word import qualified Foreign.Storable import qualified GHC.Generics import qualified GHC.Stack import qualified Numeric.Natural import qualified Prelude import qualified System.Exit import qualified Text.Read yieldThread :: MonadIO m => m () yieldThread = UnliftIO.Concurrent.yield {-# INLINE yieldThread #-} #if !MIN_VERSION_base(4, 11, 0) (<&>) :: Functor f => f a -> (a -> b) -> f b as <&> f = f Data.Functor.<$> as infixl 1 <&> #endif rio-0.1.8.0/src/RIO/Prelude/Renames.hs0000644000000000000000000000154013253417303015404 0ustar0000000000000000{-# LANGUAGE ConstraintKinds #-} module RIO.Prelude.Renames ( sappend , LByteString , LText , UVector , SVector , GVector , toStrictBytes , fromStrictBytes ) where import RIO.Prelude.Reexports import qualified Data.ByteString.Lazy as BL import qualified Data.Vector.Generic as GVector import qualified Data.Vector.Storable as SVector import qualified Data.Vector.Unboxed as UVector import qualified Data.Text.Lazy as TL import qualified Data.Semigroup sappend :: Semigroup s => s -> s -> s sappend = (Data.Semigroup.<>) type UVector = UVector.Vector type SVector = SVector.Vector type GVector = GVector.Vector type LByteString = BL.ByteString type LText = TL.Text toStrictBytes :: LByteString -> ByteString toStrictBytes = BL.toStrict fromStrictBytes :: ByteString -> LByteString fromStrictBytes = BL.fromStrict rio-0.1.8.0/src/RIO/Prelude/RIO.hs0000644000000000000000000001026713402156770014455 0ustar0000000000000000{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE UndecidableInstances #-} module RIO.Prelude.RIO ( RIO (..) , runRIO , liftRIO -- * SomeRef for Writer/State interfaces , SomeRef , HasStateRef (..) , HasWriteRef (..) , newSomeRef , newUnboxedSomeRef , readSomeRef , writeSomeRef , modifySomeRef ) where import GHC.Exts (RealWorld) import RIO.Prelude.Lens import RIO.Prelude.URef import RIO.Prelude.Reexports import Control.Monad.State (MonadState(..)) import Control.Monad.Writer (MonadWriter(..)) -- | The Reader+IO monad. This is different from a 'ReaderT' because: -- -- * It's not a transformer, it hardcodes IO for simpler usage and -- error messages. -- -- * Instances of typeclasses like 'MonadLogger' are implemented using -- classes defined on the environment, instead of using an -- underlying monad. newtype RIO env a = RIO { unRIO :: ReaderT env IO a } deriving (Functor,Applicative,Monad,MonadIO,MonadReader env,MonadThrow) runRIO :: MonadIO m => env -> RIO env a -> m a runRIO env (RIO (ReaderT f)) = liftIO (f env) liftRIO :: (MonadIO m, MonadReader env m) => RIO env a -> m a liftRIO rio = do env <- ask runRIO env rio instance MonadUnliftIO (RIO env) where askUnliftIO = RIO $ ReaderT $ \r -> withUnliftIO $ \u -> return (UnliftIO (unliftIO u . flip runReaderT r . unRIO)) instance PrimMonad (RIO env) where type PrimState (RIO env) = PrimState IO primitive = RIO . ReaderT . const . primitive -- | Abstraction over how to read from and write to a mutable reference -- -- @since 0.1.4.0 data SomeRef a = SomeRef !(IO a) !(a -> IO ()) -- | Read from a SomeRef -- -- @since 0.1.4.0 readSomeRef :: MonadIO m => SomeRef a -> m a readSomeRef (SomeRef x _) = liftIO x -- | Write to a SomeRef -- -- @since 0.1.4.0 writeSomeRef :: MonadIO m => SomeRef a -> a -> m () writeSomeRef (SomeRef _ x) = liftIO . x -- | Modify a SomeRef -- This function is subject to change due to the lack of atomic operations -- -- @since 0.1.4.0 modifySomeRef :: MonadIO m => SomeRef a -> (a -> a) -> m () modifySomeRef (SomeRef read' write) f = liftIO $ (f <$> read') >>= write ioRefToSomeRef :: IORef a -> SomeRef a ioRefToSomeRef ref = do SomeRef (readIORef ref) (\val -> modifyIORef' ref (\_ -> val)) uRefToSomeRef :: Unbox a => URef RealWorld a -> SomeRef a uRefToSomeRef ref = do SomeRef (readURef ref) (writeURef ref) -- | Environment values with stateful capabilities to SomeRef -- -- @since 0.1.4.0 class HasStateRef s env | env -> s where stateRefL :: Lens' env (SomeRef s) -- | Identity state reference where the SomeRef is the env -- -- @since 0.1.4.0 instance HasStateRef a (SomeRef a) where stateRefL = lens id (\_ x -> x) -- | Environment values with writing capabilities to SomeRef -- -- @since 0.1.4.0 class HasWriteRef w env | env -> w where writeRefL :: Lens' env (SomeRef w) -- | Identity write reference where the SomeRef is the env -- -- @since 0.1.4.0 instance HasWriteRef a (SomeRef a) where writeRefL = lens id (\_ x -> x) instance HasStateRef s env => MonadState s (RIO env) where get = do ref <- view stateRefL liftIO $ readSomeRef ref put st = do ref <- view stateRefL liftIO $ writeSomeRef ref st instance (Monoid w, HasWriteRef w env) => MonadWriter w (RIO env) where tell value = do ref <- view writeRefL liftIO $ modifySomeRef ref (`mappend` value) listen action = do w1 <- view writeRefL >>= liftIO . readSomeRef a <- action w2 <- do refEnv <- view writeRefL v <- liftIO $ readSomeRef refEnv _ <- liftIO $ writeSomeRef refEnv w1 return v return (a, w2) pass action = do (a, transF) <- action ref <- view writeRefL liftIO $ modifySomeRef ref transF return a -- | create a new boxed SomeRef -- -- @since 0.1.4.0 newSomeRef :: MonadIO m => a -> m (SomeRef a) newSomeRef a = do ioRefToSomeRef <$> newIORef a -- | create a new unboxed SomeRef -- -- @since 0.1.4.0 newUnboxedSomeRef :: (MonadIO m, Unbox a) => a -> m (SomeRef a) newUnboxedSomeRef a = uRefToSomeRef <$> (liftIO $ newURef a) rio-0.1.8.0/src/RIO/Prelude/Text.hs0000644000000000000000000000063313256141604014741 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} module RIO.Prelude.Text ( decodeUtf8Lenient , tshow ) where import qualified Data.Text as T import Data.Text.Encoding (decodeUtf8With) import RIO.Prelude.Reexports import Data.Text.Encoding.Error (lenientDecode) tshow :: Show a => a -> Text tshow = T.pack . show decodeUtf8Lenient :: ByteString -> Text decodeUtf8Lenient = decodeUtf8With lenientDecode rio-0.1.8.0/src/RIO/Prelude/Trace.hs0000644000000000000000000001317213253760711015060 0ustar0000000000000000module RIO.Prelude.Trace ( -- * Text trace , traceId , traceIO , traceM , traceEvent , traceEventIO , traceMarker , traceMarkerIO , traceStack -- * Show , traceShow , traceShowId , traceShowIO , traceShowM , traceShowEvent , traceShowEventIO , traceShowMarker , traceShowMarkerIO , traceShowStack -- * Display , traceDisplay , traceDisplayId , traceDisplayIO , traceDisplayM , traceDisplayEvent , traceDisplayEventIO , traceDisplayMarker , traceDisplayMarkerIO , traceDisplayStack ) where import qualified Debug.Trace as Trace import Control.Monad.IO.Class(MonadIO(..)) import RIO.Prelude.Display import RIO.Text (Text) import qualified RIO.Text as Text ---------------------------------------------------- -- Text ---------------------------------------------------- {-# WARNING trace "Trace statement left in code" #-} -- | @since 0.1.0.0 trace :: Text -> a -> a trace = Trace.trace . Text.unpack {-# WARNING traceId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceId :: Text -> Text traceId str = Trace.trace (Text.unpack str) str {-# WARNING traceIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceIO :: MonadIO m => Text -> m () traceIO = liftIO . Trace.traceIO . Text.unpack {-# WARNING traceM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceM :: Applicative f => Text -> f () traceM = Trace.traceM . Text.unpack {-# WARNING traceEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceEvent :: Text -> a -> a traceEvent = Trace.traceEvent . Text.unpack {-# WARNING traceEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceEventIO :: MonadIO m => Text -> m () traceEventIO = liftIO . Trace.traceEventIO . Text.unpack {-# WARNING traceMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceMarker :: Text -> a -> a traceMarker = Trace.traceMarker . Text.unpack {-# WARNING traceMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceMarkerIO :: MonadIO m => Text -> m () traceMarkerIO = liftIO . Trace.traceMarkerIO . Text.unpack {-# WARNING traceStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceStack :: Text -> a -> a traceStack = Trace.traceStack . Text.unpack ---------------------------------------------------- -- Show ---------------------------------------------------- {-# WARNING traceShow "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShow :: Show a => a -> b -> b traceShow = Trace.traceShow {-# WARNING traceShowId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowId :: Show a => a -> a traceShowId = Trace.traceShowId {-# WARNING traceShowIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowIO :: (Show a, MonadIO m) => a -> m () traceShowIO = liftIO . Trace.traceIO . show {-# WARNING traceShowM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowM :: (Show a, Applicative f) => a -> f () traceShowM = Trace.traceM . show {-# WARNING traceShowEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowEvent :: Show a => a -> b -> b traceShowEvent = Trace.traceEvent . show {-# WARNING traceShowEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowEventIO :: (Show a, MonadIO m) => a -> m () traceShowEventIO = liftIO . Trace.traceEventIO . show {-# WARNING traceShowMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowMarker :: Show a => a -> b -> b traceShowMarker = Trace.traceMarker . show {-# WARNING traceShowMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowMarkerIO :: (Show a, MonadIO m) => a -> m () traceShowMarkerIO = liftIO . Trace.traceMarkerIO . show {-# WARNING traceShowStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceShowStack :: Show a => a -> b -> b traceShowStack = Trace.traceStack . show ---------------------------------------------------- -- Display ---------------------------------------------------- {-# WARNING traceDisplay "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplay :: Display a => a -> b -> b traceDisplay = trace . utf8BuilderToText . display {-# WARNING traceDisplayId "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayId :: Display a => a -> a traceDisplayId x = traceDisplay x x {-# WARNING traceDisplayIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayIO :: (Display a, MonadIO m) => a -> m () traceDisplayIO = traceIO . utf8BuilderToText . display {-# WARNING traceDisplayM "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayM :: (Display a, Applicative f) => a -> f () traceDisplayM = traceM . utf8BuilderToText . display {-# WARNING traceDisplayEvent "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayEvent :: Display a => a -> b -> b traceDisplayEvent = traceEvent . utf8BuilderToText . display {-# WARNING traceDisplayEventIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayEventIO :: (Display a, MonadIO m) => a -> m () traceDisplayEventIO = traceEventIO . utf8BuilderToText . display {-# WARNING traceDisplayMarker "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayMarker :: Display a => a -> b -> b traceDisplayMarker = traceMarker . utf8BuilderToText . display {-# WARNING traceDisplayMarkerIO "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayMarkerIO :: (Display a, MonadIO m) => a -> m () traceDisplayMarkerIO = traceMarkerIO . utf8BuilderToText . display {-# WARNING traceDisplayStack "Trace statement left in code" #-} -- | @since 0.1.0.0 traceDisplayStack :: Display a => a -> b -> b traceDisplayStack = traceStack . utf8BuilderToText . display rio-0.1.8.0/src/RIO/Prelude/URef.hs0000644000000000000000000000343313346146472014667 0ustar0000000000000000module RIO.Prelude.URef ( -- * Unboxed references Unbox , URef , IOURef , newURef , readURef , writeURef , modifyURef ) where import RIO.Prelude.Reexports import qualified Data.Vector.Unboxed.Mutable as MUVector -- | An unboxed reference. This works like an 'IORef', but the data is -- stored in a bytearray instead of a heap object, avoiding -- significant allocation overhead in some cases. For a concrete -- example, see this Stack Overflow question: -- . -- -- The first parameter is the state token type, the same as would be -- used for the 'ST' monad. If you're using an 'IO'-based monad, you -- can use the convenience 'IOURef' type synonym instead. -- -- @since 0.0.2.0 newtype URef s a = URef (MUVector.MVector s a) -- | Helpful type synonym for using a 'URef' from an 'IO'-based stack. -- -- @since 0.0.2.0 type IOURef = URef (PrimState IO) -- | Create a new 'URef' -- -- @since 0.0.2.0 newURef :: (PrimMonad m, Unbox a) => a -> m (URef (PrimState m) a) newURef a = fmap URef (MUVector.replicate 1 a) -- | Read the value in a 'URef' -- -- @since 0.0.2.0 readURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> m a readURef (URef v) = MUVector.unsafeRead v 0 -- | Write a value into a 'URef'. Note that this action is strict, and -- will force evalution of the value. -- -- @since 0.0.2.0 writeURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> a -> m () writeURef (URef v) = MUVector.unsafeWrite v 0 -- | Modify a value in a 'URef'. Note that this action is strict, and -- will force evaluation of the result value. -- -- @since 0.0.2.0 modifyURef :: (PrimMonad m, Unbox a) => URef (PrimState m) a -> (a -> a) -> m () modifyURef u f = readURef u >>= writeURef u . f rio-0.1.8.0/test/Spec.hs0000644000000000000000000000005413253417303013042 0ustar0000000000000000{-# OPTIONS_GHC -F -pgmF hspec-discover #-} rio-0.1.8.0/test/RIO/FileSpec.hs0000644000000000000000000000350413402156770014302 0ustar0000000000000000{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} module RIO.FileSpec where import Test.Hspec import System.FilePath (()) import UnliftIO.Temporary (withSystemTempDirectory) import RIO import qualified RIO.ByteString as BS import qualified RIO.File as SUT spec :: Spec spec = do describe "ensureFileDurable" $ do it "ensures a file is durable with an fsync" $ withSystemTempDirectory "rio" $ \dir -> do let fp = dir "ensure_file_durable" writeFileUtf8 fp "Hello World" SUT.ensureFileDurable fp contents <- BS.readFile fp contents `shouldBe` "Hello World" describe "withBinaryFileDurableAtomic" $ do context "read/write" $ do it "works correctly" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "ensure_file_durable_atomic" writeFileUtf8 fp "Hello World" SUT.withBinaryFileDurableAtomic fp ReadWriteMode $ \h -> do input <- BS.hGetLine h input `shouldBe` "Hello World" BS.hPut h "Goodbye World" context "happy path" $ do it "works the same as withFile" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "with_file_durable_atomic" SUT.withBinaryFileDurableAtomic fp WriteMode $ \h -> BS.hPut h "Hello World" contents <- BS.readFile fp contents `shouldBe` "Hello World" describe "withBinaryFileDurable" $ do context "happy path" $ do it "works the same as withFile" $ do withSystemTempDirectory "rio" $ \dir -> do let fp = dir "with_file_durable" SUT.withBinaryFileDurable fp WriteMode $ \h -> BS.hPut h "Hello World" contents <- BS.readFile fp contents `shouldBe` "Hello World" rio-0.1.8.0/test/RIO/ListSpec.hs0000644000000000000000000000213313406724606014336 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} module RIO.ListSpec where import Test.Hspec import RIO import qualified RIO.List as List data TestType = TestType { testTypeContents :: Int } deriving (Eq, Show) testTypeList :: [TestType] testTypeList = [TestType { testTypeContents = 1 }, TestType { testTypeContents = 0 }] spec :: Spec spec = do describe "dropPrefix" $ do it "present" $ List.dropPrefix "foo" "foobar" `shouldBe` "bar" it "absent" $ List.dropPrefix "bar" "foobar" `shouldBe` "foobar" describe "dropSuffix" $ do it "present" $ List.dropSuffix "bar" "foobar" `shouldBe` "foo" it "absent" $ List.dropSuffix "foo" "foobar" `shouldBe` "foobar" describe "maximumByMaybe" $ do it "should support elements that do not have an Ord instance" $ List.maximumByMaybe (compare `on` testTypeContents) testTypeList `shouldBe` (Just TestType { testTypeContents = 1}) describe "minimumByMaybe" $ do it "should support elements that do not have an Ord instance" $ List.minimumByMaybe (compare `on` testTypeContents) testTypeList `shouldBe` (Just TestType { testTypeContents = 0}) rio-0.1.8.0/test/RIO/LoggerSpec.hs0000644000000000000000000000404313327276360014645 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.LoggerSpec (spec) where import Test.Hspec import RIO import Data.ByteString.Builder (toLazyByteString) spec :: Spec spec = do it "sanity" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logDebug "should not appear" logInfo "should appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "should appear\n" it "sticky" $ do (ref, options) <- logOptionsMemory withLogFunc options $ \lf -> runRIO lf $ do logSticky "ABC" logDebug "should not appear" logInfo "should appear" logStickyDone "XYZ" builder <- readIORef ref toLazyByteString builder `shouldBe` "ABC\b\b\b \b\b\bshould appear\nABC\b\b\b \b\b\bXYZ\n" it "setLogMinLevelIO" $ do (ref, options) <- logOptionsMemory logLevelRef <- newIORef LevelDebug withLogFunc (options & setLogMinLevelIO (readIORef logLevelRef)) $ \lf -> runRIO lf $ do logDebug "should appear" -- reset log min level to info atomicModifyIORef' logLevelRef (\_ -> (LevelInfo, ())) logDebug "should not appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "should appear\n" it "setLogVerboseFormatIO" $ do (ref, options) <- logOptionsMemory logVerboseFormatRef <- newIORef True withLogFunc (options & setLogVerboseFormatIO (readIORef logVerboseFormatRef)) $ \lf -> runRIO lf $ do logInfo "verbose log" -- reset verbose format atomicModifyIORef' logVerboseFormatRef (\_ -> (False, ())) logInfo "no verbose log" builder <- readIORef ref toLazyByteString builder `shouldBe` "[info] verbose log\nno verbose log\n" it "noLogging" $ do (ref, options) <- logOptionsMemory withLogFunc (options & setLogVerboseFormat True) $ \lf -> runRIO lf $ do logInfo "should appear" noLogging $ logInfo "should not appear" builder <- readIORef ref toLazyByteString builder `shouldBe` "[info] should appear\n" rio-0.1.8.0/test/RIO/Prelude/ExtraSpec.hs0000644000000000000000000000044513312220526016076 0ustar0000000000000000module RIO.Prelude.ExtraSpec (spec) where import RIO import Test.Hspec spec :: Spec spec = do describe "foldMapM" $ do it "sanity" $ do let helper :: Applicative f => Int -> f [Int] helper = pure . pure res <- foldMapM helper [1..10] res `shouldBe` [1..10] rio-0.1.8.0/test/RIO/Prelude/IOSpec.hs0000644000000000000000000000137613254160206015331 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.IOSpec (spec) where import RIO import Test.Hspec import Test.Hspec.QuickCheck import qualified RIO.ByteString as B import qualified RIO.Text as T spec :: Spec spec = do prop "binary file read/write" $ \(B.pack -> bs1) -> withSystemTempFile "binary-read-write" $ \fp h -> do hClose h writeFileBinary fp bs1 bs2 <- readFileBinary fp bs2 `shouldBe` bs1 -- filter our \r for Windows prop "text file read/write" $ \(T.pack . filter (/= '\r') -> text1) -> withSystemTempFile "binary-read-write" $ \fp h -> do hClose h writeFileUtf8 fp text1 text2 <- readFileUtf8 fp text2 `shouldBe` text1 rio-0.1.8.0/test/RIO/Prelude/RIOSpec.hs0000644000000000000000000000274413317557066015471 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.RIOSpec (spec) where import RIO import RIO.State import RIO.Writer import Test.Hspec import Test.Hspec.QuickCheck spec = do describe "RIO writer instance" $ do it "tell works" $ do ref <- newSomeRef (mempty :: Text) runRIO ref $ do tell "hello\n" tell "world\n" contents <- readSomeRef ref contents `shouldBe` "hello\nworld\n" it "listen works" $ do ref <- newSomeRef (mempty :: Text) ((), str) <- runRIO ref $ listen $ do tell "hello\n" tell "world\n" contents <- readSomeRef ref contents `shouldBe` "" str `shouldBe` "hello\nworld\n" it "pass works" $ do ref <- newSomeRef (mempty :: Text) result <- runRIO ref $ pass $ do tell "hello\n" tell "world\n" return ((), \a -> a <> "!") contents <- readSomeRef ref contents `shouldBe` "hello\nworld\n!" describe "RIO state instance" $ do it "get works" $ do ref <- newSomeRef (mempty :: Text) result <- runRIO ref $ do put "hello world" x <- get return x result `shouldBe` "hello world" it "state works" $ do ref <- newSomeRef (mempty :: Text) newRef <- newSomeRef ("Hello World!" :: Text) result <- runRIO ref $ state (\ref -> ((), "Hello World!")) contents <- readSomeRef ref contents `shouldBe` "Hello World!" rio-0.1.8.0/test/RIO/Prelude/SimpleSpec.hs0000644000000000000000000000056713312220526016251 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE ViewPatterns #-} module RIO.Prelude.SimpleSpec (spec) where import RIO import RIO.Process import Test.Hspec spec :: Spec spec = do it "logging works" $ asIO $ runSimpleApp $ logDebug "logging allowed" it "process calling works" $ asIO $ runSimpleApp $ proc "echo" ["hello"] runProcess_ rio-0.1.8.0/test/RIO/PreludeSpec.hs0000644000000000000000000000203413253417303015014 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.PreludeSpec (spec) where import Test.Hspec import RIO spec :: Spec spec = do describe "URef" $ do it "sanity" $ do ref <- newURef (0 :: Int) x <- readURef ref x `shouldBe` 0 writeURef ref 1 y <- readURef ref y `shouldBe` 1 modifyURef ref (+ 1) z <- readURef ref z `shouldBe` 2 describe "whenM" $ do it "returns True" $ do ref <- newIORef False whenM (return True) (writeIORef ref True) readIORef ref `shouldReturn` True it "returns False" $ do ref <- newIORef False whenM (return False) (writeIORef ref True) readIORef ref `shouldReturn` False describe "unlessM" $ do it "returns True" $ do ref <- newIORef False unlessM (return True) (writeIORef ref True) readIORef ref `shouldReturn` False it "returns False" $ do ref <- newIORef False unlessM (return False) (writeIORef ref True) readIORef ref `shouldReturn` True rio-0.1.8.0/test/RIO/TextSpec.hs0000644000000000000000000000076313253417303014347 0ustar0000000000000000{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module RIO.TextSpec where import Test.Hspec import RIO import qualified RIO.Text as T spec :: Spec spec = do describe "dropPrefix" $ do it "present" $ T.dropPrefix "foo" "foobar" `shouldBe` "bar" it "absent" $ T.dropPrefix "bar" "foobar" `shouldBe` "foobar" describe "dropSuffix" $ do it "present" $ T.dropSuffix "bar" "foobar" `shouldBe` "foo" it "absent" $ T.dropSuffix "foo" "foobar" `shouldBe` "foobar" rio-0.1.8.0/LICENSE0000644000000000000000000000204313253417303011642 0ustar0000000000000000Copyright (c) 2018 Michael Snoyman Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. rio-0.1.8.0/rio.cabal0000644000000000000000000000654413412072055012422 0ustar0000000000000000cabal-version: 1.12 -- This file has been generated from package.yaml by hpack version 0.31.1. -- -- see: https://github.com/sol/hpack -- -- hash: 03c9a11fb87d70a75dd9c78b679c652518ced81d54220e49b2e41637a463958b name: rio version: 0.1.8.0 synopsis: A standard library for Haskell description: See README and Haddocks at category: Control homepage: https://github.com/commercialhaskell/rio#readme bug-reports: https://github.com/commercialhaskell/rio/issues author: Michael Snoyman maintainer: michael@snoyman.com license: MIT license-file: LICENSE build-type: Simple extra-source-files: README.md ChangeLog.md source-repository head type: git location: https://github.com/commercialhaskell/rio library exposed-modules: RIO RIO.ByteString RIO.ByteString.Lazy RIO.ByteString.Lazy.Partial RIO.ByteString.Partial RIO.Char RIO.Char.Partial RIO.Directory RIO.File RIO.FilePath RIO.HashMap RIO.HashMap.Partial RIO.HashSet RIO.List RIO.List.Partial RIO.Map RIO.Map.Partial RIO.Map.Unchecked RIO.Partial RIO.Prelude.Simple RIO.Process RIO.Seq RIO.Set RIO.Set.Partial RIO.Set.Unchecked RIO.State RIO.Text RIO.Text.Lazy RIO.Text.Lazy.Partial RIO.Text.Partial RIO.Time RIO.Vector RIO.Vector.Boxed RIO.Vector.Boxed.Partial RIO.Vector.Boxed.Unsafe RIO.Vector.Partial RIO.Vector.Storable RIO.Vector.Storable.Partial RIO.Vector.Storable.Unsafe RIO.Vector.Unboxed RIO.Vector.Unboxed.Partial RIO.Vector.Unboxed.Unsafe RIO.Vector.Unsafe RIO.Writer other-modules: RIO.Prelude.Display RIO.Prelude.Extra RIO.Prelude.IO RIO.Prelude.Lens RIO.Prelude.Logger RIO.Prelude.Reexports RIO.Prelude.Renames RIO.Prelude.RIO RIO.Prelude.Text RIO.Prelude.Trace RIO.Prelude.URef hs-source-dirs: src/ build-depends: base >=4.9 && <10 , bytestring , containers , deepseq , directory , exceptions , filepath , hashable , microlens , mtl , primitive , process , text , time , typed-process >=0.2.2.0 , unliftio >=0.2.6.0 , unordered-containers , vector if os(windows) cpp-options: -DWINDOWS build-depends: Win32 else build-depends: unix default-language: Haskell2010 test-suite spec type: exitcode-stdio-1.0 main-is: Spec.hs other-modules: RIO.FileSpec RIO.ListSpec RIO.LoggerSpec RIO.Prelude.ExtraSpec RIO.Prelude.IOSpec RIO.Prelude.RIOSpec RIO.Prelude.SimpleSpec RIO.PreludeSpec RIO.TextSpec Paths_rio hs-source-dirs: test build-depends: base >=4.9 && <10 , bytestring , containers , deepseq , directory , exceptions , filepath , hashable , hspec , microlens , mtl , primitive , process , rio , text , time , typed-process >=0.2.2.0 , unliftio >=0.2.6.0 , unordered-containers , vector if os(windows) cpp-options: -DWINDOWS build-depends: Win32 else build-depends: unix default-language: Haskell2010 rio-0.1.8.0/README.md0000644000000000000000000003525013323020537012117 0ustar0000000000000000# The rio library *A standard library for Haskell* ![Rio](https://camo.githubusercontent.com/fc162fb0024699c85f00eae769085a5fe528153e/68747470733a2f2f7777772e61687374617469632e636f6d2f70686f746f732f636974792f76692d76363837315f30305f31343030783434322e6a7067) [![Build Status](https://travis-ci.org/commercialhaskell/rio.svg?branch=master)](https://travis-ci.org/commercialhaskell/rio) [![Build status](https://ci.appveyor.com/api/projects/status/n6935pmtlry77dmn?svg=true)](https://ci.appveyor.com/project/snoyberg/rio-21tpl) __NOTE__ This code is currently in prerelease status, and has been released as a tech preview. A number of us are actively working on improving the project and getting it to a useful first release. For more information, see the [description of goals](https://github.com/snoyberg/codename-karka#readme) and the [issue tracker for discussions](https://github.com/commercialhaskell/rio/issues). If you're reading this file anywhere but Github, you should probably [read the Github version instead](https://github.com/commercialhaskell/rio#readme), which will be more up to date. The goal of the `rio` library is to make it easier to adopt Haskell for writing production software. It is intended as a cross between: * Collection of well designed, trusted libraries * Useful `Prelude` replacement * A set of best practices for writing production quality Haskell code You're free to use any subset of functionality desired in your project. This README will guide you through using `rio` to its fullest extent. ## Standard library While GHC ships with a `base` library, as well as a number of other common packages like `directory` and `transformers`, there are large gaps in functionality provided by these libraries. This choice for a more minimalistic `base` is by design, but it leads to some unfortunate consequences: * For a given task, it's often unclear which is the right library to use * When writing libraries, there is often concern about adding dependencies to any libraries outside of `base`, due to creating a heavier dependency footprint * By avoiding adding dependencies, many libraries end up reimplementing the same functionality, often with incompatible types and type classes, leading to difficulty using libraries together This library attempts to define a standard library for Haskell. One immediate response may be [XKCD #927](https://xkcd.com/927/): ![XKCD Standards](https://imgs.xkcd.com/comics/standards.png) To counter that effect, this library takes a specific approach: __it reuses existing, commonly used libraries__. Instead of defining an incompatible `Map` type, for instance, we standardize on the commonly used one from the `containers` library and reexport it from this library. This library attempts to define a set of libraries as "standard," meaning they are recommended for use, and should be encouraged as dependencies for other libraries. It does this by depending on these libraries itself, and reexporting their types and functions for easy use. Beyond the ecosystem effects we hope to achieve, this will hopefully make the user story much easier. For a new user or team trying to get started, there is an easy library to depend upon for a large percentage of common functionality. See the dependencies of this package to see the list of packages considered standard. The primary interfaces of each of these packages is exposed from this library via a `RIO.`-prefixed module reexporting its interface. ## Prelude replacement The `RIO` module works as a prelude replacement, providing more functionality and types out of the box than the standard prelude (such as common data types like `ByteString` and `Text`), as well as removing common "gotchas", like partial functions and lazy I/O. The guiding principle here is: * If something is safe to use in general and has no expected naming conflicts, expose it from `RIO` * If something should not always be used, or has naming conflicts, expose it from another module in the `RIO.` hierarchy. ## Best practices Below is a set of best practices we recommend following. You're obviously free to take any, all, or none of this. Over time, these will probably develop into much more extensive docs. Some of these design decisions will be catered to by choices in the `rio` library. For Haskellers looking for a set of best practices to follow: you've come to the right place! ### Import practices This library is intended to provide a fully loaded set of basic functionality. You should: * Enable the `NoImplicitPrelude` language extension (see below) * Add `import RIO` as your replacement prelude in all modules * Use the `RIO.`-prefixed modules as necessary, imported using the recommended qualified names in the modules themselves. For example, `import qualified RIO.ByteString as B`. See the module documentation for more information. * Infix operators may be imported unqualified, with a separate import line if necessary. For example, `import RIO.Map ((?!), (\\))`. Do this only if your module contains no overlapping infix names, regardless of qualification. For instance, if you are importing both `RIO.Map.\\` and `RIO.List.\\` do not import either one unqualified. __TODO__ In the future, we may have editor integration or external tooling to help with import management. Also, see project template comments below. ### Language extensions Very few projects these days use bare-bones Haskell 98 or 2010. Instead, almost all codebases enable some set of additional language extensions. Below is a list of extensions we recommend as a good default, in that these are: * Well accepted in the community * Cause little to no code breakage versus leaving them off * Are generally considered safe Our recommended defaults are: ``` AutoDeriveTypeable BangPatterns BinaryLiterals ConstraintKinds DataKinds DefaultSignatures DeriveDataTypeable DeriveFoldable DeriveFunctor DeriveGeneric DeriveTraversable DoAndIfThenElse EmptyDataDecls ExistentialQuantification FlexibleContexts FlexibleInstances FunctionalDependencies GADTs GeneralizedNewtypeDeriving InstanceSigs KindSignatures LambdaCase MonadFailDesugaring MultiParamTypeClasses MultiWayIf NamedFieldPuns NoImplicitPrelude OverloadedStrings PartialTypeSignatures PatternGuards PolyKinds RankNTypes RecordWildCards ScopedTypeVariables StandaloneDeriving TupleSections TypeFamilies TypeSynonymInstances ViewPatterns ``` Notes on some surprising choices: * `RecordWildCards` is really up for debate. It's widely used, but rightfully considered by many to be dangerous. Open question about what we do with it. * Despite the fact that `OverloadedStrings` can break existing code, we recommend its usage to encourage avoidance of the `String` data type. Also, for new code, the risk of breakage is much lower. * `MonadFailDesugaring` helps prevent partial pattern matches in your code, see [#85](https://github.com/commercialhaskell/rio/issues/85) __TODO__ Do we recommend setting in `package.yaml` or in the source files themselves? Need to discuss and come to a conclusion on this point https://github.com/commercialhaskell/rio/issues/9 There are other language extensions which are perfectly fine to use as well, but are not recommended to be turned on by default: ``` CPP TemplateHaskell ForeignFunctionInterface MagicHash UnliftedFFITypes TypeOperators UnboxedTuples PackageImports QuasiQuotes DeriveAnyClass DeriveLift StaticPointers ``` ### GHC Options We recommend using these GHC complier warning flags on all projects, to catch problems that might otherwise go overlooked: * `-Wall` * `-Wcompat` * `-Wincomplete-record-updates` * `-Wincomplete-uni-patterns` * `-Wredundant-constraints` You may add them per file, or to your package.yaml, or pass them on the command line when running ghc. We plan to add these to the package.yaml of our project template, once its ready. For code targeting production use, you should also use the flag that turns all warnings into errors, to force you to resolve the warnings before you ship your code: * `-Werror` Further reading: Alexis King explains why these are a good idea in [her blog post](https://lexi-lambda.github.io/blog/2018/02/10/an-opinionated-guide-to-haskell-in-2018/) which was the original inspiration for this section. ### Monads A primary design choice you'll need to make in your code is how to structure your monads. There are many options out there, with various trade-offs. Instead of going through all of the debates, we're going to point to [an existing blog post](https://www.fpcomplete.com/blog/2017/07/the-rio-monad), and here just give recommendations. * If your code is going to perform I/O: it should live in the `RIO` monad. `RIO` is "reader IO." It's the same as `ReaderT env IO`, but includes some helper functions in this library and leads to nicer type signatures and error messages. * If you need to provide access to specific data to a function, do it via a typeclass constraint on the `env`, _not_ via a concrete env. For example, this is bad: ```haskell myFunction :: RIO Config Foo ``` This is good: ```haskell class HasConfig env where configL :: Lens' env Config -- more on this in a moment myFunction :: HasConfig env => RIO env Foo ``` Reason: by using typeclass constraints on the environment, we can easily compose multiple functions together and collect up the constraints, which wouldn't be possible with concrete environments. We _could_ go more general with mtl-style typeclasses, like `MonadReader` or `MonadHasConfig`, but `RIO` is a perfect balance point in the composability/concreteness space (see blog post above for more details). * When defining `Has`-style typeclasses for the environments, we use lenses (which are exposed by `RIO`) because it provides for easy composability. We also leverage superclasses wherever possible. As an example of how this works in practice: ```haskell -- Defined in RIO.Logger class HasLogFunc env where logFuncL :: Lens' env LogFunc class HasConfig env where configL :: Lens' env Config instance HasConfig Config where configL = id data Env = Env { envLogFunc :: !LogFunc, envConfig :: !Config } class (HasLogFunc env, HasConfig env) => HasEnv env where envL :: Lens' env Env instance HasLogFunc Env where logFuncL = lens envLogFunc (\x y -> x { envLogFunc = y }) instance HasConfig Env where configL = lens envConfig (\x y -> x { envConfig = y }) instance HasEnv Env where envL = id -- And then, at some other part of the code data SuperEnv = SuperEnv { seEnv :: !Env, seOtherStuff :: !OtherStuff } instance HasLogFunc SuperEnv where logFuncL = envL.logFuncL instance HasConfig SuperEnv where configL = envL.configL instance HasEnv SuperEnv where envL = lens seEnv (\x y -> x { seEnv = y }) ``` __TODO__ Open question: how do we decide when we use a `Lens'` versus just a `SimpleGetter` in these `Has` typeclasses? * If you're writing code that you want to be usable outside of `RIO` for some reason, you should stick to the good mtl-style typeclasses: `MonadReader`, `MonadIO`, `MonadUnliftIO`, `MonadThrow`, and `PrimMonad`. It's better to use `MonadReader`+`Has` than to create new typeclasses like `MonadLogger`, though usually just sticking with the simpler `RIO env` is fine (and can easily be converted to the more general form with `liftRIO`). You should avoid using the following typeclasses (intentionally not exposed from this library): `MonadBase`, `MonadBaseControl`, `MonadCatch`, and `MonadMask`. ### Exceptions For in-depth discussion, see [exceptions best practices](https://www.fpcomplete.com/blog/2016/11/exceptions-best-practices-haskell). The basic idea is: * If something can fail, and you want people to deal with that failure every time (e.g., `lookup`), then return a `Maybe` or `Either` value. * If the use will sometimes not want to deal with it, then use exceptions. In the case of pure code, use a `MonadThrow` constraint. In the case of `IO` code: use runtime exceptions via `throwIO` (works in the `RIO` monad too). * You'll be upset and frustrated that you don't know exactly how some `IO` action can fail. Accept that pain, live with it, internalize it, use `tryAny`, and move on. It's the price we pay for async exceptions. * Do all resource allocations with functions like `bracket` and `finally`. It’s a good idea to define an app-wide exception type: ```haskell data AppExceptions = NetworkChangeError Text | FilePathError FilePath | ImpossibleError deriving (Typeable) instance Exception AppExceptions instance Show AppExceptions where show = \case NetworkChangeError err -> "network error: " <> (unpack err) FilePathError fp -> "error accessing filepath at: " <> fp ImpossibleError -> "this codepath should never have been executed. Please report a bug." ``` ### Strict data fields Make data fields strict by default, unless you have a good reason to do otherwise. ### Project template __TODO__ In the future, we'll add a new Stack template for using this library. We'll use hpack, not cabal files, and rely on automatic exposed-module discovery. ### Safety first This library intentionally puts safety first, and therefore avoids promoting partial functions and lazy I/O. If you think you need lazy I/O: you need a streaming data library like conduit instead. __TODO__ Decide if we include a streaming data solution out of the box. https://github.com/commercialhaskell/rio/issues/1 ### When to generalize A common question in Haskell code is when should you generalize. Here are some simple guidelines. For parametric polymorphism: _almost always_ generalize, it makes your type signatures more informative and functions more useful. In other words, `reverse :: [a] -> [a]` is far better than `reverse :: [Int] -> [Int]`. When it comes to typeclasses: the story is more nuanced. For typeclasses provided by `RIO`, like `Foldable` or `Traversable`, it's generally a good thing to generalize to them when possible. The real question is defining your own typeclasses. As a general rule: avoid doing so as long as possible. And _if_ you define a typeclass: make sure its usage can't lead to accidental bugs by allowing you to swap in types you didn't expect. __TODO__ Expand, clarify, examples. ### Coding style __TODO__ Point to coding style guidelines, and discuss [hindent](https://github.com/commercialhaskell/hindent). ### Module hierarchy The `RIO.Prelude.` module hierarchy contains identifiers which are reexported by the `RIO` module. The reason for this is to make it easier to view the generated Haddocks. The `RIO` module itself is intended to be imported unqualified, with `NoImplicitPrelude` enabled. All other modules are _not_ reexported by the `RIO` module, and will document inside of them whether they should be imported qualified or unqualified. rio-0.1.8.0/ChangeLog.md0000644000000000000000000000374513412071757013026 0ustar0000000000000000# Changelog for rio ## 0.1.8.0 * Re-export `Control.Monad.State.modify`, `Control.Monad.State.modify'` and `Control.Monad.State.gets` in `RIO.State` ## 0.1.7.0 * Addition of `textDisplay` to `Display` class. ## 0.1.6.0 * Changed `logUseColor` to default to `False` on Windows, even when verbose and on the terminal * Add `RIO.File` module which offers a family of file handling functions (`withBinaryFileDurable`, `withBinaryFileDurableAtomic`, among others.) with better durability and atomicity guarantees ## 0.1.5.0 * Re-export `Numeric.Natural.Natural` [#119](https://github.com/commercialhaskell/rio/issues/119) * Re-export `Data.Functor.<&>` from GHC 8.4+, falling back local definition for `base < 4.11` [#117](https://github.com/commercialhaskell/rio/issues/117) * Re-export `Data.Proxy.Proxy(..)` * Re-export `fromEnum` from RIO, export `toEnum`, `read` and `fromJust` from RIO.Partial * Add `noLogging` function to skip logging on specific sub-routines * Re-export `Control.Category.>>>` ## 0.1.4.0 * Add `Const` and `Identity` * Add `Reader` and `runReader` * Add instances for `MonadWriter` and `MonadState` to `RIO` via mutable reference [#103](https://github.com/commercialhaskell/rio/issues/103) ## 0.1.3.0 * Add `newLogFunc` function to create `LogFunc` records outside of a callback scope * Allow dynamic reloading of `logMinLevel` and `logVerboseFormat` for the `LogOptions` record * Add `foldMapM` * Add `headMaybe`, `lastMaybe`, `tailMaybe`, `initMaybe`, `maximumMaybe`, `minimumMaybe`, `maximumByMaybe`, `minimumByMaybe` functions to `RIO.List` module (issue #82) * Move non partial functions `scanr1` and `scanl1` from `RIO.List.Partial` to `RIO.List` (issue #82) * Add `SimpleApp` and `runSimpleApp` * Add `asIO` ## 0.1.2.0 * Allow setting usage of code location in the log output ## 0.1.1.0 * Move some accidentally included partial functions ## 0.1.0.0 * Initial stable release ## 0.0 __NOTE__ All releases beginning with 0.0 are considered experimental. Caveat emptor!