persistent-postgresql-2.13.6.2/Database/0000755000000000000000000000000014476403105016242 5ustar0000000000000000persistent-postgresql-2.13.6.2/Database/Persist/0000755000000000000000000000000014646332256017702 5ustar0000000000000000persistent-postgresql-2.13.6.2/Database/Persist/Postgresql/0000755000000000000000000000000014476403105022036 5ustar0000000000000000persistent-postgresql-2.13.6.2/conn-killed/0000755000000000000000000000000014476403105016735 5ustar0000000000000000persistent-postgresql-2.13.6.2/test/0000755000000000000000000000000014646332256015524 5ustar0000000000000000persistent-postgresql-2.13.6.2/Database/Persist/Postgresql.hs0000644000000000000000000024041714646332256022411 0ustar0000000000000000{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE ViewPatterns #-} #if MIN_VERSION_base(4,12,0) {-# LANGUAGE DerivingVia #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE UndecidableInstances #-} #endif -- | A postgresql backend for persistent. module Database.Persist.Postgresql ( withPostgresqlPool , withPostgresqlPoolWithVersion , withPostgresqlPoolWithConf , withPostgresqlPoolModified , withPostgresqlPoolModifiedWithVersion , withPostgresqlConn , withPostgresqlConnWithVersion , createPostgresqlPool , createPostgresqlPoolModified , createPostgresqlPoolModifiedWithVersion , createPostgresqlPoolTailored , createPostgresqlPoolWithConf , module Database.Persist.Sql , ConnectionString , HandleUpdateCollision , copyField , copyUnlessNull , copyUnlessEmpty , copyUnlessEq , excludeNotEqualToOriginal , PostgresConf (..) , PgInterval (..) , upsertWhere , upsertManyWhere , openSimpleConn , openSimpleConnWithVersion , getServerVersion , getSimpleConn , tableName , fieldName , mockMigration , migrateEnableExtension , PostgresConfHooks(..) , defaultPostgresConfHooks , RawPostgresql(..) , createRawPostgresqlPool , createRawPostgresqlPoolModified , createRawPostgresqlPoolModifiedWithVersion , createRawPostgresqlPoolWithConf , createBackend ) where import qualified Database.PostgreSQL.LibPQ as LibPQ import qualified Database.PostgreSQL.Simple as PG import qualified Database.PostgreSQL.Simple.FromField as PGFF import qualified Database.PostgreSQL.Simple.Internal as PG import Database.PostgreSQL.Simple.Ok (Ok(..)) import qualified Database.PostgreSQL.Simple.Transaction as PG import qualified Database.PostgreSQL.Simple.Types as PG import Control.Arrow import Control.Exception (Exception, throw, throwIO) import Control.Monad import Control.Monad.Except import Control.Monad.IO.Unlift (MonadIO(..), MonadUnliftIO) import Control.Monad.Logger (MonadLoggerIO, runNoLoggingT) import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Reader (ReaderT(..), asks, runReaderT) #if !MIN_VERSION_base(4,12,0) import Control.Monad.Trans.Reader (withReaderT) #endif import Control.Monad.Trans.Writer (WriterT(..), runWriterT) import qualified Data.List.NonEmpty as NEL import Data.Proxy (Proxy(..)) import Data.Acquire (Acquire, mkAcquire, with) import Data.Aeson import Data.Aeson.Types (modifyFailure) import qualified Data.Attoparsec.Text as AT import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as B8 import Data.Conduit import qualified Data.Conduit.List as CL import Data.Data (Data) import Data.Either (partitionEithers) import Data.Function (on) import Data.Int (Int64) import Data.IORef import Data.List as List (find, foldl', groupBy, sort) import qualified Data.List as List import Data.List.NonEmpty (NonEmpty) import qualified Data.Map as Map import Data.Maybe import Data.Monoid ((<>)) import qualified Data.Monoid as Monoid import Data.Pool (Pool) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.IO as T import Data.Text.Read (rational) import System.Environment (getEnvironment) #if MIN_VERSION_base(4,12,0) import Database.Persist.Compatible #endif import qualified Data.Vault.Strict as Vault import Database.Persist.Postgresql.Internal import Database.Persist.Sql import qualified Database.Persist.Sql.Util as Util import Database.Persist.SqlBackend import Database.Persist.SqlBackend.StatementCache (StatementCache, mkSimpleStatementCache, mkStatementCache) import System.IO.Unsafe (unsafePerformIO) -- | A @libpq@ connection string. A simple example of connection -- string would be @\"host=localhost port=5432 user=test -- dbname=test password=test\"@. Please read libpq's -- documentation at -- -- for more details on how to create such strings. type ConnectionString = ByteString -- | PostgresServerVersionError exception. This is thrown when persistent -- is unable to find the version of the postgreSQL server. data PostgresServerVersionError = PostgresServerVersionError String instance Show PostgresServerVersionError where show (PostgresServerVersionError uniqueMsg) = "Unexpected PostgreSQL server version, got " <> uniqueMsg instance Exception PostgresServerVersionError -- | Create a PostgreSQL connection pool and run the given action. The pool is -- properly released after the action finishes using it. Note that you should -- not use the given 'ConnectionPool' outside the action since it may already -- have been released. -- The provided action should use 'runSqlConn' and *not* 'runReaderT' because -- the former brackets the database action with transaction begin/commit. withPostgresqlPool :: (MonadLoggerIO m, MonadUnliftIO m) => ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in -- the pool. -> (Pool SqlBackend -> m a) -- ^ Action to be executed that uses the -- connection pool. -> m a withPostgresqlPool ci = withPostgresqlPoolWithVersion getServerVersion ci -- | Same as 'withPostgresPool', but takes a callback for obtaining -- the server version (to work around an Amazon Redshift bug). -- -- @since 2.6.2 withPostgresqlPoolWithVersion :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO (Maybe Double)) -- ^ Action to perform to get the server version. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in -- the pool. -> (Pool SqlBackend -> m a) -- ^ Action to be executed that uses the -- connection pool. -> m a withPostgresqlPoolWithVersion getVerDouble ci = do let getVer = oldGetVersionToNew getVerDouble withSqlPool $ open' (const $ return ()) getVer id ci -- | Same as 'withPostgresqlPool', but can be configured with 'PostgresConf' and 'PostgresConfHooks'. -- -- @since 2.11.0.0 withPostgresqlPoolWithConf :: (MonadUnliftIO m, MonadLoggerIO m) => PostgresConf -- ^ Configuration for connecting to Postgres -> PostgresConfHooks -- ^ Record of callback functions -> (Pool SqlBackend -> m a) -- ^ Action to be executed that uses the -- connection pool. -> m a withPostgresqlPoolWithConf conf hooks = do let getVer = pgConfHooksGetServerVersion hooks modConn = pgConfHooksAfterCreate hooks let logFuncToBackend = open' modConn getVer id (pgConnStr conf) withSqlPoolWithConfig logFuncToBackend (postgresConfToConnectionPoolConfig conf) -- | Same as 'withPostgresqlPool', but with the 'createPostgresqlPoolModified' -- feature. -- -- @since 2.13.5.0 withPostgresqlPoolModified :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> (Pool SqlBackend -> m t) -> m t withPostgresqlPoolModified = withPostgresqlPoolModifiedWithVersion getServerVersion -- | Same as 'withPostgresqlPool', but with the -- 'createPostgresqlPoolModifiedWithVersion' feature. -- -- @since 2.13.5.0 withPostgresqlPoolModifiedWithVersion :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO (Maybe Double)) -- ^ Action to perform to get the server version. -> (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> (Pool SqlBackend -> m t) -> m t withPostgresqlPoolModifiedWithVersion getVerDouble modConn ci = do withSqlPool (open' modConn (oldGetVersionToNew getVerDouble) id ci) -- | Create a PostgreSQL connection pool. Note that it's your -- responsibility to properly close the connection pool when -- unneeded. Use 'withPostgresqlPool' for an automatic resource -- control. createPostgresqlPool :: (MonadUnliftIO m, MonadLoggerIO m) => ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open -- in the pool. -> m (Pool SqlBackend) createPostgresqlPool = createPostgresqlPoolModified (const $ return ()) -- | Same as 'createPostgresqlPool', but additionally takes a callback function -- for some connection-specific tweaking to be performed after connection -- creation. This could be used, for example, to change the schema. For more -- information, see: -- -- -- -- @since 2.1.3 createPostgresqlPoolModified :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> m (Pool SqlBackend) createPostgresqlPoolModified = createPostgresqlPoolModifiedWithVersion getServerVersion -- | Same as other similarly-named functions in this module, but takes callbacks for obtaining -- the server version (to work around an Amazon Redshift bug) and connection-specific tweaking -- (to change the schema). -- -- @since 2.6.2 createPostgresqlPoolModifiedWithVersion :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO (Maybe Double)) -- ^ Action to perform to get the server version. -> (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> m (Pool SqlBackend) createPostgresqlPoolModifiedWithVersion = createPostgresqlPoolTailored open' -- | Same as 'createPostgresqlPoolModifiedWithVersion', but takes a custom connection-creation -- function. -- -- The only time you should reach for this function is if you need to write custom logic for creating -- a connection to the database. -- -- @since 2.13.6 createPostgresqlPoolTailored :: (MonadUnliftIO m, MonadLoggerIO m) => ( (PG.Connection -> IO ()) -> (PG.Connection -> IO (NonEmpty Word)) -> ((PG.Connection -> SqlBackend) -> PG.Connection -> SqlBackend) -> ConnectionString -> LogFunc -> IO SqlBackend ) -- ^ Action that creates a postgresql connection (please see documentation on the un-exported @open'@ function in this same module. -> (PG.Connection -> IO (Maybe Double)) -- ^ Action to perform to get the server version. -> (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> m (Pool SqlBackend) createPostgresqlPoolTailored createConnection getVerDouble modConn ci = do let getVer = oldGetVersionToNew getVerDouble createSqlPool $ createConnection modConn getVer id ci -- | Same as 'createPostgresqlPool', but can be configured with 'PostgresConf' and 'PostgresConfHooks'. -- -- @since 2.11.0.0 createPostgresqlPoolWithConf :: (MonadUnliftIO m, MonadLoggerIO m) => PostgresConf -- ^ Configuration for connecting to Postgres -> PostgresConfHooks -- ^ Record of callback functions -> m (Pool SqlBackend) createPostgresqlPoolWithConf conf hooks = do let getVer = pgConfHooksGetServerVersion hooks modConn = pgConfHooksAfterCreate hooks createSqlPoolWithConfig (open' modConn getVer id (pgConnStr conf)) (postgresConfToConnectionPoolConfig conf) postgresConfToConnectionPoolConfig :: PostgresConf -> ConnectionPoolConfig postgresConfToConnectionPoolConfig conf = ConnectionPoolConfig { connectionPoolConfigStripes = pgPoolStripes conf , connectionPoolConfigIdleTimeout = fromInteger $ pgPoolIdleTimeout conf , connectionPoolConfigSize = pgPoolSize conf } -- | Same as 'withPostgresqlPool', but instead of opening a pool -- of connections, only one connection is opened. -- The provided action should use 'runSqlConn' and *not* 'runReaderT' because -- the former brackets the database action with transaction begin/commit. withPostgresqlConn :: (MonadUnliftIO m, MonadLoggerIO m) => ConnectionString -> (SqlBackend -> m a) -> m a withPostgresqlConn = withPostgresqlConnWithVersion getServerVersion -- | Same as 'withPostgresqlConn', but takes a callback for obtaining -- the server version (to work around an Amazon Redshift bug). -- -- @since 2.6.2 withPostgresqlConnWithVersion :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO (Maybe Double)) -> ConnectionString -> (SqlBackend -> m a) -> m a withPostgresqlConnWithVersion getVerDouble = do let getVer = oldGetVersionToNew getVerDouble withSqlConn . open' (const $ return ()) getVer id open' :: (PG.Connection -> IO ()) -> (PG.Connection -> IO (NonEmpty Word)) -> ((PG.Connection -> SqlBackend) -> PG.Connection -> backend) -- ^ How to construct the actual backend type desired. For most uses, -- this is just 'id', since the desired backend type is 'SqlBackend'. -- But some callers want a @'RawPostgresql' 'SqlBackend'@, and will -- pass in 'withRawConnection'. -> ConnectionString -> LogFunc -> IO backend open' modConn getVer constructor cstr logFunc = do conn <- PG.connectPostgreSQL cstr modConn conn ver <- getVer conn smap <- newIORef mempty return $ constructor (createBackend logFunc ver smap) conn -- | Gets the PostgreSQL server version -- -- @since 2.13.6 getServerVersion :: PG.Connection -> IO (Maybe Double) getServerVersion conn = do [PG.Only version] <- PG.query_ conn "show server_version"; let version' = rational version --- λ> rational "9.8.3" --- Right (9.8,".3") --- λ> rational "9.8.3.5" --- Right (9.8,".3.5") case version' of Right (a,_) -> return $ Just a Left err -> throwIO $ PostgresServerVersionError err getServerVersionNonEmpty :: PG.Connection -> IO (NonEmpty Word) getServerVersionNonEmpty conn = do [PG.Only version] <- PG.query_ conn "show server_version"; case AT.parseOnly parseVersion (T.pack version) of Left err -> throwIO $ PostgresServerVersionError $ "Parse failure on: " <> version <> ". Error: " <> err Right versionComponents -> case NEL.nonEmpty versionComponents of Nothing -> throwIO $ PostgresServerVersionError $ "Empty Postgres version string: " <> version Just neVersion -> pure neVersion where -- Partially copied from the `versions` package -- Typically server_version gives e.g. 12.3 -- In Persistent's CI, we get "12.4 (Debian 12.4-1.pgdg100+1)", so we ignore the trailing data. parseVersion = AT.decimal `AT.sepBy` AT.char '.' -- | Choose upsert sql generation function based on postgresql version. -- PostgreSQL version >= 9.5 supports native upsert feature, -- so depending upon that we have to choose how the sql query is generated. -- upsertFunction :: Double -> Maybe (EntityDef -> Text -> Text) upsertFunction :: a -> NonEmpty Word -> Maybe a upsertFunction f version = if (version >= postgres9dot5) then Just f else Nothing where postgres9dot5 :: NonEmpty Word postgres9dot5 = 9 NEL.:| [5] -- | If the user doesn't supply a Postgres version, we assume this version. -- -- This is currently below any version-specific features Persistent uses. minimumPostgresVersion :: NonEmpty Word minimumPostgresVersion = 9 NEL.:| [4] oldGetVersionToNew :: (PG.Connection -> IO (Maybe Double)) -> (PG.Connection -> IO (NonEmpty Word)) oldGetVersionToNew oldFn = \conn -> do mDouble <- oldFn conn case mDouble of Nothing -> pure minimumPostgresVersion Just double -> do let (major, minor) = properFraction double pure $ major NEL.:| [floor minor] -- | Generate a 'SqlBackend' from a 'PG.Connection'. openSimpleConn :: LogFunc -> PG.Connection -> IO SqlBackend openSimpleConn = openSimpleConnWithVersion getServerVersion -- | Generate a 'SqlBackend' from a 'PG.Connection', but takes a callback for -- obtaining the server version. -- -- @since 2.9.1 openSimpleConnWithVersion :: (PG.Connection -> IO (Maybe Double)) -> LogFunc -> PG.Connection -> IO SqlBackend openSimpleConnWithVersion getVerDouble logFunc conn = do smap <- newIORef mempty serverVersion <- oldGetVersionToNew getVerDouble conn return $ createBackend logFunc serverVersion smap conn underlyingConnectionKey :: Vault.Key PG.Connection underlyingConnectionKey = unsafePerformIO Vault.newKey {-# NOINLINE underlyingConnectionKey #-} -- | Access underlying connection, returning 'Nothing' if the 'SqlBackend' -- provided isn't backed by postgresql-simple. -- -- @since 2.13.0 getSimpleConn :: (BackendCompatible SqlBackend backend) => backend -> Maybe PG.Connection getSimpleConn = Vault.lookup underlyingConnectionKey <$> getConnVault -- | Create the backend given a logging function, server version, mutable statement cell, -- and connection. -- -- @since 2.13.6 createBackend :: LogFunc -> NonEmpty Word -> IORef (Map.Map Text Statement) -> PG.Connection -> SqlBackend createBackend logFunc serverVersion smap conn = maybe id setConnPutManySql (upsertFunction putManySql serverVersion) $ maybe id setConnUpsertSql (upsertFunction upsertSql' serverVersion) $ setConnInsertManySql insertManySql' $ maybe id setConnRepsertManySql (upsertFunction repsertManySql serverVersion) $ modifyConnVault (Vault.insert underlyingConnectionKey conn) $ mkSqlBackend MkSqlBackendArgs { connPrepare = prepare' conn , connStmtMap = smap , connInsertSql = insertSql' , connClose = PG.close conn , connMigrateSql = migrate' , connBegin = \_ mIsolation -> case mIsolation of Nothing -> PG.begin conn Just iso -> PG.beginLevel (case iso of ReadUncommitted -> PG.ReadCommitted -- PG Upgrades uncommitted reads to committed anyways ReadCommitted -> PG.ReadCommitted RepeatableRead -> PG.RepeatableRead Serializable -> PG.Serializable) conn , connCommit = const $ PG.commit conn , connRollback = const $ PG.rollback conn , connEscapeFieldName = escapeF , connEscapeTableName = escapeE . getEntityDBName , connEscapeRawName = escape , connNoLimit = "LIMIT ALL" , connRDBMS = "postgresql" , connLimitOffset = decorateSQLWithLimitOffset "LIMIT ALL" , connLogFunc = logFunc } prepare' :: PG.Connection -> Text -> IO Statement prepare' conn sql = do let query = PG.Query (T.encodeUtf8 sql) return Statement { stmtFinalize = return () , stmtReset = return () , stmtExecute = execute' conn query , stmtQuery = withStmt' conn query } insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult insertSql' ent vals = case getEntityId ent of EntityIdNaturalKey _pdef -> ISRManyKeys sql vals EntityIdField field -> ISRSingle (sql <> " RETURNING " <> escapeF (fieldDB field)) where (fieldNames, placeholders) = unzip (Util.mkInsertPlaceholders ent escapeF) sql = T.concat [ "INSERT INTO " , escapeE $ getEntityDBName ent , if null (getEntityFields ent) then " DEFAULT VALUES" else T.concat [ "(" , T.intercalate "," fieldNames , ") VALUES(" , T.intercalate "," placeholders , ")" ] ] upsertSql' :: EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text upsertSql' ent uniqs updateVal = T.concat [ "INSERT INTO " , escapeE (getEntityDBName ent) , "(" , T.intercalate "," fieldNames , ") VALUES (" , T.intercalate "," placeholders , ") ON CONFLICT (" , T.intercalate "," $ map (escapeF . snd) (NEL.toList uniqs) , ") DO UPDATE SET " , updateVal , " WHERE " , wher , " RETURNING ??" ] where (fieldNames, placeholders) = unzip (Util.mkInsertPlaceholders ent escapeF) wher = T.intercalate " AND " $ map (singleClause . snd) $ NEL.toList uniqs singleClause :: FieldNameDB -> Text singleClause field = escapeE (getEntityDBName ent) <> "." <> (escapeF field) <> " =?" -- | SQL for inserting multiple rows at once and returning their primary keys. insertManySql' :: EntityDef -> [[PersistValue]] -> InsertSqlResult insertManySql' ent valss = ISRSingle sql where (fieldNames, placeholders)= unzip (Util.mkInsertPlaceholders ent escapeF) sql = T.concat [ "INSERT INTO " , escapeE (getEntityDBName ent) , "(" , T.intercalate "," fieldNames , ") VALUES (" , T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," placeholders , ") RETURNING " , Util.commaSeparated $ NEL.toList $ Util.dbIdColumnsEsc escapeF ent ] execute' :: PG.Connection -> PG.Query -> [PersistValue] -> IO Int64 execute' conn query vals = PG.execute conn query (map P vals) withStmt' :: MonadIO m => PG.Connection -> PG.Query -> [PersistValue] -> Acquire (ConduitM () [PersistValue] m ()) withStmt' conn query vals = pull `fmap` mkAcquire openS closeS where openS = do -- Construct raw query rawquery <- PG.formatQuery conn query (map P vals) -- Take raw connection (rt, rr, rc, ids) <- PG.withConnection conn $ \rawconn -> do -- Execute query mret <- LibPQ.exec rawconn rawquery case mret of Nothing -> do merr <- LibPQ.errorMessage rawconn fail $ case merr of Nothing -> "Postgresql.withStmt': unknown error" Just e -> "Postgresql.withStmt': " ++ B8.unpack e Just ret -> do -- Check result status status <- LibPQ.resultStatus ret case status of LibPQ.TuplesOk -> return () _ -> PG.throwResultError "Postgresql.withStmt': bad result status " ret status -- Get number and type of columns cols <- LibPQ.nfields ret oids <- forM [0..cols-1] $ \col -> fmap ((,) col) (LibPQ.ftype ret col) -- Ready to go! rowRef <- newIORef (LibPQ.Row 0) rowCount <- LibPQ.ntuples ret return (ret, rowRef, rowCount, oids) let getters = map (\(col, oid) -> getGetter oid $ PG.Field rt col oid) ids return (rt, rr, rc, getters) closeS (ret, _, _, _) = LibPQ.unsafeFreeResult ret pull x = do y <- liftIO $ pullS x case y of Nothing -> return () Just z -> yield z >> pull x pullS (ret, rowRef, rowCount, getters) = do row <- atomicModifyIORef rowRef (\r -> (r+1, r)) if row == rowCount then return Nothing else fmap Just $ forM (zip getters [0..]) $ \(getter, col) -> do mbs <- LibPQ.getvalue' ret row col case mbs of Nothing -> -- getvalue' verified that the value is NULL. -- However, that does not mean that there are -- no NULL values inside the value (e.g., if -- we're dealing with an array of optional values). return PersistNull Just bs -> do ok <- PGFF.runConversion (getter mbs) conn bs `seq` case ok of Errors (exc:_) -> throw exc Errors [] -> error "Got an Errors, but no exceptions" Ok v -> return v doesTableExist :: (Text -> IO Statement) -> EntityNameDB -> IO Bool doesTableExist getter (EntityNameDB name) = do stmt <- getter sql with (stmtQuery stmt vals) (\src -> runConduit $ src .| start) where sql = "SELECT COUNT(*) FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog'" <> " AND schemaname != 'information_schema' AND tablename=?" vals = [PersistText name] start = await >>= maybe (error "No results when checking doesTableExist") start' start' [PersistInt64 0] = finish False start' [PersistInt64 1] = finish True start' res = error $ "doesTableExist returned unexpected result: " ++ show res finish x = await >>= maybe (return x) (error "Too many rows returned in doesTableExist") migrate' :: [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] CautiousMigration) migrate' allDefs getter entity = fmap (fmap $ map showAlterDb) $ do old <- getColumns getter entity newcols' case partitionEithers old of ([], old'') -> do exists' <- if null old then doesTableExist getter name else return True return $ Right $ migrationText exists' old'' (errs, _) -> return $ Left errs where name = getEntityDBName entity (newcols', udefs, fdefs) = postgresMkColumns allDefs entity migrationText exists' old'' | not exists' = createText newcols fdefs udspair | otherwise = let (acs, ats) = getAlters allDefs entity (newcols, udspair) old' acs' = map (AlterColumn name) acs ats' = map (AlterTable name) ats in acs' ++ ats' where old' = partitionEithers old'' newcols = filter (not . safeToRemove entity . cName) newcols' udspair = map udToPair udefs -- Check for table existence if there are no columns, workaround -- for https://github.com/yesodweb/persistent/issues/152 createText newcols fdefs_ udspair = (addTable newcols entity) : uniques ++ references ++ foreignsAlt where uniques = flip concatMap udspair $ \(uname, ucols) -> [AlterTable name $ AddUniqueConstraint uname ucols] references = mapMaybe (\Column { cName, cReference } -> getAddReference allDefs entity cName =<< cReference ) newcols foreignsAlt = mapMaybe (mkForeignAlt entity) fdefs_ mkForeignAlt :: EntityDef -> ForeignDef -> Maybe AlterDB mkForeignAlt entity fdef = pure $ AlterColumn tableName_ addReference where tableName_ = getEntityDBName entity addReference = AddReference (foreignRefTableDBName fdef) constraintName childfields escapedParentFields (foreignFieldCascade fdef) constraintName = foreignConstraintNameDBName fdef (childfields, parentfields) = unzip (map (\((_,b),(_,d)) -> (b,d)) (foreignFields fdef)) escapedParentFields = map escapeF parentfields addTable :: [Column] -> EntityDef -> AlterDB addTable cols entity = AddTable $ T.concat -- Lower case e: see Database.Persist.Sql.Migration [ "CREATe TABLE " -- DO NOT FIX THE CAPITALIZATION! , escapeE name , "(" , idtxt , if null nonIdCols then "" else "," , T.intercalate "," $ map showColumn nonIdCols , ")" ] where nonIdCols = case entityPrimary entity of Just _ -> cols _ -> filter keepField cols where keepField c = Just (cName c) /= fmap fieldDB (getEntityIdField entity) && not (safeToRemove entity (cName c)) name = getEntityDBName entity idtxt = case getEntityId entity of EntityIdNaturalKey pdef -> T.concat [ " PRIMARY KEY (" , T.intercalate "," $ map (escapeF . fieldDB) $ NEL.toList $ compositeFields pdef , ")" ] EntityIdField field -> let defText = defaultAttribute $ fieldAttrs field sType = fieldSqlType field in T.concat [ escapeF $ fieldDB field , maySerial sType defText , " PRIMARY KEY UNIQUE" , mayDefault defText ] maySerial :: SqlType -> Maybe Text -> Text maySerial SqlInt64 Nothing = " SERIAL8 " maySerial sType _ = " " <> showSqlType sType mayDefault :: Maybe Text -> Text mayDefault def = case def of Nothing -> "" Just d -> " DEFAULT " <> d type SafeToRemove = Bool data AlterColumn = ChangeType Column SqlType Text | IsNull Column | NotNull Column | Add' Column | Drop Column SafeToRemove | Default Column Text | NoDefault Column | Update' Column Text | AddReference EntityNameDB ConstraintNameDB [FieldNameDB] [Text] FieldCascade | DropReference ConstraintNameDB deriving Show data AlterTable = AddUniqueConstraint ConstraintNameDB [FieldNameDB] | DropConstraint ConstraintNameDB deriving Show data AlterDB = AddTable Text | AlterColumn EntityNameDB AlterColumn | AlterTable EntityNameDB AlterTable deriving Show -- | Returns all of the columns in the given table currently in the database. getColumns :: (Text -> IO Statement) -> EntityDef -> [Column] -> IO [Either Text (Either Column (ConstraintNameDB, [FieldNameDB]))] getColumns getter def cols = do let sqlv = T.concat [ "SELECT " , "column_name " , ",is_nullable " , ",COALESCE(domain_name, udt_name)" -- See DOMAINS below , ",column_default " , ",generation_expression " , ",numeric_precision " , ",numeric_scale " , ",character_maximum_length " , "FROM information_schema.columns " , "WHERE table_catalog=current_database() " , "AND table_schema=current_schema() " , "AND table_name=? " ] -- DOMAINS Postgres supports the concept of domains, which are data types -- with optional constraints. An app might make an "email" domain over the -- varchar type, with a CHECK that the emails are valid In this case the -- generated SQL should use the domain name: ALTER TABLE users ALTER COLUMN -- foo TYPE email This code exists to use the domain name (email), instead -- of the underlying type (varchar). This is tested in -- EquivalentTypeTest.hs stmt <- getter sqlv let vals = [ PersistText $ unEntityNameDB $ getEntityDBName def ] columns <- with (stmtQuery stmt vals) (\src -> runConduit $ src .| processColumns .| CL.consume) let sqlc = T.concat [ "SELECT " , "c.constraint_name, " , "c.column_name " , "FROM information_schema.key_column_usage AS c, " , "information_schema.table_constraints AS k " , "WHERE c.table_catalog=current_database() " , "AND c.table_catalog=k.table_catalog " , "AND c.table_schema=current_schema() " , "AND c.table_schema=k.table_schema " , "AND c.table_name=? " , "AND c.table_name=k.table_name " , "AND c.constraint_name=k.constraint_name " , "AND NOT k.constraint_type IN ('PRIMARY KEY', 'FOREIGN KEY') " , "ORDER BY c.constraint_name, c.column_name" ] stmt' <- getter sqlc us <- with (stmtQuery stmt' vals) (\src -> runConduit $ src .| helperU) return $ columns ++ us where refMap = fmap (\cr -> (crTableName cr, crConstraintName cr)) $ Map.fromList $ List.foldl' ref [] cols where ref rs c = maybe rs (\r -> (unFieldNameDB $ cName c, r) : rs) (cReference c) getAll = CL.mapM $ \x -> pure $ case x of [PersistText con, PersistText col] -> (con, col) [PersistByteString con, PersistByteString col] -> (T.decodeUtf8 con, T.decodeUtf8 col) o -> error $ "unexpected datatype returned for postgres o="++show o helperU = do rows <- getAll .| CL.consume return $ map (Right . Right . (ConstraintNameDB . fst . head &&& map (FieldNameDB . snd))) $ groupBy ((==) `on` fst) rows processColumns = CL.mapM $ \x'@((PersistText cname) : _) -> do col <- liftIO $ getColumn getter (getEntityDBName def) x' (Map.lookup cname refMap) pure $ case col of Left e -> Left e Right c -> Right $ Left c -- | Check if a column name is listed as the "safe to remove" in the entity -- list. safeToRemove :: EntityDef -> FieldNameDB -> Bool safeToRemove def (FieldNameDB colName) = any (elem FieldAttrSafeToRemove . fieldAttrs) $ filter ((== FieldNameDB colName) . fieldDB) $ allEntityFields where allEntityFields = getEntityFieldsDatabase def <> case getEntityId def of EntityIdField fdef -> [fdef] _ -> [] getAlters :: [EntityDef] -> EntityDef -> ([Column], [(ConstraintNameDB, [FieldNameDB])]) -> ([Column], [(ConstraintNameDB, [FieldNameDB])]) -> ([AlterColumn], [AlterTable]) getAlters defs def (c1, u1) (c2, u2) = (getAltersC c1 c2, getAltersU u1 u2) where getAltersC [] old = map (\x -> Drop x $ safeToRemove def $ cName x) old getAltersC (new:news) old = let (alters, old') = findAlters defs def new old in alters ++ getAltersC news old' getAltersU :: [(ConstraintNameDB, [FieldNameDB])] -> [(ConstraintNameDB, [FieldNameDB])] -> [AlterTable] getAltersU [] old = map DropConstraint $ filter (not . isManual) $ map fst old getAltersU ((name, cols):news) old = case lookup name old of Nothing -> AddUniqueConstraint name cols : getAltersU news old Just ocols -> let old' = filter (\(x, _) -> x /= name) old in if sort cols == sort ocols then getAltersU news old' else DropConstraint name : AddUniqueConstraint name cols : getAltersU news old' -- Don't drop constraints which were manually added. isManual (ConstraintNameDB x) = "__manual_" `T.isPrefixOf` x getColumn :: (Text -> IO Statement) -> EntityNameDB -> [PersistValue] -> Maybe (EntityNameDB, ConstraintNameDB) -> IO (Either Text Column) getColumn getter tableName' [ PersistText columnName , PersistText isNullable , PersistText typeName , defaultValue , generationExpression , numericPrecision , numericScale , maxlen ] refName_ = runExceptT $ do defaultValue' <- case defaultValue of PersistNull -> pure Nothing PersistText t -> pure $ Just t _ -> throwError $ T.pack $ "Invalid default column: " ++ show defaultValue generationExpression' <- case generationExpression of PersistNull -> pure Nothing PersistText t -> pure $ Just t _ -> throwError $ T.pack $ "Invalid generated column: " ++ show generationExpression let typeStr = case maxlen of PersistInt64 n -> T.concat [typeName, "(", T.pack (show n), ")"] _ -> typeName t <- getType typeStr let cname = FieldNameDB columnName ref <- lift $ fmap join $ traverse (getRef cname) refName_ return Column { cName = cname , cNull = isNullable == "YES" , cSqlType = t , cDefault = fmap stripSuffixes defaultValue' , cGenerated = fmap stripSuffixes generationExpression' , cDefaultConstraintName = Nothing , cMaxLen = Nothing , cReference = fmap (\(a,b,c,d) -> ColumnReference a b (mkCascade c d)) ref } where mkCascade updText delText = FieldCascade { fcOnUpdate = parseCascade updText , fcOnDelete = parseCascade delText } parseCascade txt = case txt of "NO ACTION" -> Nothing "CASCADE" -> Just Cascade "SET NULL" -> Just SetNull "SET DEFAULT" -> Just SetDefault "RESTRICT" -> Just Restrict _ -> error $ "Unexpected value in parseCascade: " <> show txt stripSuffixes t = loop' [ "::character varying" , "::text" ] where loop' [] = t loop' (p:ps) = case T.stripSuffix p t of Nothing -> loop' ps Just t' -> t' getRef cname (_, refName') = do let sql = T.concat [ "SELECT DISTINCT " , "ccu.table_name, " , "tc.constraint_name, " , "rc.update_rule, " , "rc.delete_rule " , "FROM information_schema.constraint_column_usage ccu " , "INNER JOIN information_schema.key_column_usage kcu " , " ON ccu.constraint_name = kcu.constraint_name " , "INNER JOIN information_schema.table_constraints tc " , " ON tc.constraint_name = kcu.constraint_name " , "LEFT JOIN information_schema.referential_constraints AS rc" , " ON rc.constraint_name = ccu.constraint_name " , "WHERE tc.constraint_type='FOREIGN KEY' " , "AND kcu.ordinal_position=1 " , "AND kcu.table_name=? " , "AND kcu.column_name=? " , "AND tc.constraint_name=?" ] stmt <- getter sql cntrs <- with (stmtQuery stmt [ PersistText $ unEntityNameDB tableName' , PersistText $ unFieldNameDB cname , PersistText $ unConstraintNameDB refName' ] ) (\src -> runConduit $ src .| CL.consume) case cntrs of [] -> return Nothing [[PersistText table, PersistText constraint, PersistText updRule, PersistText delRule]] -> return $ Just (EntityNameDB table, ConstraintNameDB constraint, updRule, delRule) xs -> error $ mconcat [ "Postgresql.getColumn: error fetching constraints. Expected a single result for foreign key query for table: " , T.unpack (unEntityNameDB tableName') , " and column: " , T.unpack (unFieldNameDB cname) , " but got: " , show xs ] getType "int4" = pure SqlInt32 getType "int8" = pure SqlInt64 getType "varchar" = pure SqlString getType "text" = pure SqlString getType "date" = pure SqlDay getType "bool" = pure SqlBool getType "timestamptz" = pure SqlDayTime getType "float4" = pure SqlReal getType "float8" = pure SqlReal getType "bytea" = pure SqlBlob getType "time" = pure SqlTime getType "numeric" = getNumeric numericPrecision numericScale getType a = pure $ SqlOther a getNumeric (PersistInt64 a) (PersistInt64 b) = pure $ SqlNumeric (fromIntegral a) (fromIntegral b) getNumeric PersistNull PersistNull = throwError $ T.concat [ "No precision and scale were specified for the column: " , columnName , " in table: " , unEntityNameDB tableName' , ". Postgres defaults to a maximum scale of 147,455 and precision of 16383," , " which is probably not what you intended." , " Specify the values as numeric(total_digits, digits_after_decimal_place)." ] getNumeric a b = throwError $ T.concat [ "Can not get numeric field precision for the column: " , columnName , " in table: " , unEntityNameDB tableName' , ". Expected an integer for both precision and scale, " , "got: " , T.pack $ show a , " and " , T.pack $ show b , ", respectively." , " Specify the values as numeric(total_digits, digits_after_decimal_place)." ] getColumn _ _ columnName _ = return $ Left $ T.pack $ "Invalid result from information_schema: " ++ show columnName -- | Intelligent comparison of SQL types, to account for SqlInt32 vs SqlOther integer sqlTypeEq :: SqlType -> SqlType -> Bool sqlTypeEq x y = let -- Non exhaustive helper to map postgres aliases to the same name. Based on -- https://www.postgresql.org/docs/9.5/datatype.html. -- This prevents needless `ALTER TYPE`s when the type is the same. normalize "int8" = "bigint" normalize "serial8" = "bigserial" normalize v = v in normalize (T.toCaseFold (showSqlType x)) == normalize (T.toCaseFold (showSqlType y)) findAlters :: [EntityDef] -- ^ The list of all entity definitions that persistent is aware of. -> EntityDef -- ^ The entity definition for the entity that we're working on. -> Column -- ^ The column that we're searching for potential alterations for. -> [Column] -> ([AlterColumn], [Column]) findAlters defs edef col@(Column name isNull sqltype def _gen _defConstraintName _maxLen ref) cols = case List.find (\c -> cName c == name) cols of Nothing -> ([Add' col], cols) Just (Column _oldName isNull' sqltype' def' _gen' _defConstraintName' _maxLen' ref') -> let refDrop Nothing = [] refDrop (Just ColumnReference {crConstraintName=cname}) = [DropReference cname] refAdd Nothing = [] refAdd (Just colRef) = case find ((== crTableName colRef) . getEntityDBName) defs of Just refdef | Just _oldName /= fmap fieldDB (getEntityIdField edef) -> [AddReference (crTableName colRef) (crConstraintName colRef) [name] (NEL.toList $ Util.dbIdColumnsEsc escapeF refdef) (crFieldCascade colRef) ] Just _ -> [] Nothing -> error $ "could not find the entityDef for reftable[" ++ show (crTableName colRef) ++ "]" modRef = if equivalentRef ref ref' then [] else refDrop ref' ++ refAdd ref modNull = case (isNull, isNull') of (True, False) -> do guard $ Just name /= fmap fieldDB (getEntityIdField edef) pure (IsNull col) (False, True) -> let up = case def of Nothing -> id Just s -> (:) (Update' col s) in up [NotNull col] _ -> [] modType | sqlTypeEq sqltype sqltype' = [] -- When converting from Persistent pre-2.0 databases, we -- need to make sure that TIMESTAMP WITHOUT TIME ZONE is -- treated as UTC. | sqltype == SqlDayTime && sqltype' == SqlOther "timestamp" = [ChangeType col sqltype $ T.concat [ " USING " , escapeF name , " AT TIME ZONE 'UTC'" ]] | otherwise = [ChangeType col sqltype ""] modDef = if def == def' || isJust (T.stripPrefix "nextval" =<< def') then [] else case def of Nothing -> [NoDefault col] Just s -> [Default col s] dropSafe = if safeToRemove edef name then error "wtf" [Drop col True] else [] in ( modRef ++ modDef ++ modNull ++ modType ++ dropSafe , filter (\c -> cName c /= name) cols ) -- We check if we should alter a foreign key. This is almost an equality check, -- except we consider 'Nothing' and 'Just Restrict' equivalent. equivalentRef :: Maybe ColumnReference -> Maybe ColumnReference -> Bool equivalentRef Nothing Nothing = True equivalentRef (Just cr1) (Just cr2) = crTableName cr1 == crTableName cr2 && crConstraintName cr1 == crConstraintName cr2 && eqCascade (fcOnUpdate $ crFieldCascade cr1) (fcOnUpdate $ crFieldCascade cr2) && eqCascade (fcOnDelete $ crFieldCascade cr1) (fcOnDelete $ crFieldCascade cr2) where eqCascade :: Maybe CascadeAction -> Maybe CascadeAction -> Bool eqCascade Nothing Nothing = True eqCascade Nothing (Just Restrict) = True eqCascade (Just Restrict) Nothing = True eqCascade (Just cs1) (Just cs2) = cs1 == cs2 eqCascade _ _ = False equivalentRef _ _ = False -- | Get the references to be added to a table for the given column. getAddReference :: [EntityDef] -> EntityDef -> FieldNameDB -> ColumnReference -> Maybe AlterDB getAddReference allDefs entity cname cr@ColumnReference {crTableName = s, crConstraintName=constraintName} = do guard $ Just cname /= fmap fieldDB (getEntityIdField entity) pure $ AlterColumn table (AddReference s constraintName [cname] id_ (crFieldCascade cr) ) where table = getEntityDBName entity id_ = fromMaybe (error $ "Could not find ID of entity " ++ show s) $ do entDef <- find ((== s) . getEntityDBName) allDefs return $ NEL.toList $ Util.dbIdColumnsEsc escapeF entDef showColumn :: Column -> Text showColumn (Column n nu sqlType' def gen _defConstraintName _maxLen _ref) = T.concat [ escapeF n , " " , showSqlType sqlType' , " " , if nu then "NULL" else "NOT NULL" , case def of Nothing -> "" Just s -> " DEFAULT " <> s , case gen of Nothing -> "" Just s -> " GENERATED ALWAYS AS (" <> s <> ") STORED" ] showSqlType :: SqlType -> Text showSqlType SqlString = "VARCHAR" showSqlType SqlInt32 = "INT4" showSqlType SqlInt64 = "INT8" showSqlType SqlReal = "DOUBLE PRECISION" showSqlType (SqlNumeric s prec) = T.concat [ "NUMERIC(", T.pack (show s), ",", T.pack (show prec), ")" ] showSqlType SqlDay = "DATE" showSqlType SqlTime = "TIME" showSqlType SqlDayTime = "TIMESTAMP WITH TIME ZONE" showSqlType SqlBlob = "BYTEA" showSqlType SqlBool = "BOOLEAN" -- Added for aliasing issues re: https://github.com/yesodweb/yesod/issues/682 showSqlType (SqlOther (T.toLower -> "integer")) = "INT4" showSqlType (SqlOther t) = t showAlterDb :: AlterDB -> (Bool, Text) showAlterDb (AddTable s) = (False, s) showAlterDb (AlterColumn t ac) = (isUnsafe ac, showAlter t ac) where isUnsafe (Drop _ safeRemove) = not safeRemove isUnsafe _ = False showAlterDb (AlterTable t at) = (False, showAlterTable t at) showAlterTable :: EntityNameDB -> AlterTable -> Text showAlterTable table (AddUniqueConstraint cname cols) = T.concat [ "ALTER TABLE " , escapeE table , " ADD CONSTRAINT " , escapeC cname , " UNIQUE(" , T.intercalate "," $ map escapeF cols , ")" ] showAlterTable table (DropConstraint cname) = T.concat [ "ALTER TABLE " , escapeE table , " DROP CONSTRAINT " , escapeC cname ] showAlter :: EntityNameDB -> AlterColumn -> Text showAlter table (ChangeType c t extra) = T.concat [ "ALTER TABLE " , escapeE table , " ALTER COLUMN " , escapeF (cName c) , " TYPE " , showSqlType t , extra ] showAlter table (IsNull c) = T.concat [ "ALTER TABLE " , escapeE table , " ALTER COLUMN " , escapeF (cName c) , " DROP NOT NULL" ] showAlter table (NotNull c) = T.concat [ "ALTER TABLE " , escapeE table , " ALTER COLUMN " , escapeF (cName c) , " SET NOT NULL" ] showAlter table (Add' col) = T.concat [ "ALTER TABLE " , escapeE table , " ADD COLUMN " , showColumn col ] showAlter table (Drop c _) = T.concat [ "ALTER TABLE " , escapeE table , " DROP COLUMN " , escapeF (cName c) ] showAlter table (Default c s) = T.concat [ "ALTER TABLE " , escapeE table , " ALTER COLUMN " , escapeF (cName c) , " SET DEFAULT " , s ] showAlter table (NoDefault c) = T.concat [ "ALTER TABLE " , escapeE table , " ALTER COLUMN " , escapeF (cName c) , " DROP DEFAULT" ] showAlter table (Update' c s) = T.concat [ "UPDATE " , escapeE table , " SET " , escapeF (cName c) , "=" , s , " WHERE " , escapeF (cName c) , " IS NULL" ] showAlter table (AddReference reftable fkeyname t2 id2 cascade) = T.concat [ "ALTER TABLE " , escapeE table , " ADD CONSTRAINT " , escapeC fkeyname , " FOREIGN KEY(" , T.intercalate "," $ map escapeF t2 , ") REFERENCES " , escapeE reftable , "(" , T.intercalate "," id2 , ")" ] <> renderFieldCascade cascade showAlter table (DropReference cname) = T.concat [ "ALTER TABLE " , escapeE table , " DROP CONSTRAINT " , escapeC cname ] -- | Get the SQL string for the table that a PersistEntity represents. -- Useful for raw SQL queries. tableName :: (PersistEntity record) => record -> Text tableName = escapeE . tableDBName -- | Get the SQL string for the field that an EntityField represents. -- Useful for raw SQL queries. fieldName :: (PersistEntity record) => EntityField record typ -> Text fieldName = escapeF . fieldDBName escapeC :: ConstraintNameDB -> Text escapeC = escapeWith escape escapeE :: EntityNameDB -> Text escapeE = escapeWith escape escapeF :: FieldNameDB -> Text escapeF = escapeWith escape escape :: Text -> Text escape s = T.pack $ '"' : go (T.unpack s) ++ "\"" where go "" = "" go ('"':xs) = "\"\"" ++ go xs go (x:xs) = x : go xs -- | Information required to connect to a PostgreSQL database -- using @persistent@'s generic facilities. These values are the -- same that are given to 'withPostgresqlPool'. data PostgresConf = PostgresConf { pgConnStr :: ConnectionString -- ^ The connection string. -- TODO: Currently stripes, idle timeout, and pool size are all separate fields -- When Persistent next does a large breaking release (3.0?), we should consider making these just a single ConnectionPoolConfig value -- -- Currently there the idle timeout is an Integer, rather than resource-pool's NominalDiffTime type. -- This is because the time package only recently added the Read instance for NominalDiffTime. -- Future TODO: Consider removing the Read instance, and/or making the idle timeout a NominalDiffTime. , pgPoolStripes :: Int -- ^ How many stripes to divide the pool into. See "Data.Pool" for details. -- @since 2.11.0.0 , pgPoolIdleTimeout :: Integer -- Ideally this would be a NominalDiffTime, but that type lacks a Read instance https://github.com/haskell/time/issues/130 -- ^ How long connections can remain idle before being disposed of, in seconds. -- @since 2.11.0.0 , pgPoolSize :: Int -- ^ How many connections should be held in the connection pool. } deriving (Show, Read, Data) instance FromJSON PostgresConf where parseJSON v = modifyFailure ("Persistent: error loading PostgreSQL conf: " ++) $ flip (withObject "PostgresConf") v $ \o -> do let defaultPoolConfig = defaultConnectionPoolConfig database <- o .: "database" host <- o .: "host" port <- o .:? "port" .!= 5432 user <- o .: "user" password <- o .: "password" poolSize <- o .:? "poolsize" .!= (connectionPoolConfigSize defaultPoolConfig) poolStripes <- o .:? "stripes" .!= (connectionPoolConfigStripes defaultPoolConfig) poolIdleTimeout <- o .:? "idleTimeout" .!= (floor $ connectionPoolConfigIdleTimeout defaultPoolConfig) let ci = PG.ConnectInfo { PG.connectHost = host , PG.connectPort = port , PG.connectUser = user , PG.connectPassword = password , PG.connectDatabase = database } cstr = PG.postgreSQLConnectionString ci return $ PostgresConf cstr poolStripes poolIdleTimeout poolSize instance PersistConfig PostgresConf where type PersistConfigBackend PostgresConf = SqlPersistT type PersistConfigPool PostgresConf = ConnectionPool createPoolConfig conf = runNoLoggingT $ createPostgresqlPoolWithConf conf defaultPostgresConfHooks runPool _ = runSqlPool loadConfig = parseJSON applyEnv c0 = do env <- getEnvironment return $ addUser env $ addPass env $ addDatabase env $ addPort env $ addHost env c0 where addParam param val c = c { pgConnStr = B8.concat [pgConnStr c, " ", param, "='", pgescape val, "'"] } pgescape = B8.pack . go where go ('\'':rest) = '\\' : '\'' : go rest go ('\\':rest) = '\\' : '\\' : go rest go ( x :rest) = x : go rest go [] = [] maybeAddParam param envvar env = maybe id (addParam param) $ lookup envvar env addHost = maybeAddParam "host" "PGHOST" addPort = maybeAddParam "port" "PGPORT" addUser = maybeAddParam "user" "PGUSER" addPass = maybeAddParam "password" "PGPASS" addDatabase = maybeAddParam "dbname" "PGDATABASE" -- | Hooks for configuring the Persistent/its connection to Postgres -- -- @since 2.11.0 data PostgresConfHooks = PostgresConfHooks { pgConfHooksGetServerVersion :: PG.Connection -> IO (NonEmpty Word) -- ^ Function to get the version of Postgres -- -- The default implementation queries the server with "show server_version". -- Some variants of Postgres, such as Redshift, don't support showing the version. -- It's recommended you return a hardcoded version in those cases. -- -- @since 2.11.0 , pgConfHooksAfterCreate :: PG.Connection -> IO () -- ^ Action to perform after a connection is created. -- -- Typical uses of this are modifying the connection (e.g. to set the schema) or logging a connection being created. -- -- The default implementation does nothing. -- -- @since 2.11.0 } -- | Default settings for 'PostgresConfHooks'. See the individual fields of 'PostgresConfHooks' for the default values. -- -- @since 2.11.0 defaultPostgresConfHooks :: PostgresConfHooks defaultPostgresConfHooks = PostgresConfHooks { pgConfHooksGetServerVersion = getServerVersionNonEmpty , pgConfHooksAfterCreate = const $ pure () } refName :: EntityNameDB -> FieldNameDB -> ConstraintNameDB refName (EntityNameDB table) (FieldNameDB column) = let overhead = T.length $ T.concat ["_", "_fkey"] (fromTable, fromColumn) = shortenNames overhead (T.length table, T.length column) in ConstraintNameDB $ T.concat [T.take fromTable table, "_", T.take fromColumn column, "_fkey"] where -- Postgres automatically truncates too long foreign keys to a combination of -- truncatedTableName + "_" + truncatedColumnName + "_fkey" -- This works fine for normal use cases, but it creates an issue for Persistent -- Because after running the migrations, Persistent sees the truncated foreign key constraint -- doesn't have the expected name, and suggests that you migrate again -- To workaround this, we copy the Postgres truncation approach before sending foreign key constraints to it. -- -- I believe this will also be an issue for extremely long table names, -- but it's just much more likely to exist with foreign key constraints because they're usually tablename * 2 in length -- Approximation of the algorithm Postgres uses to truncate identifiers -- See makeObjectName https://github.com/postgres/postgres/blob/5406513e997f5ee9de79d4076ae91c04af0c52f6/src/backend/commands/indexcmds.c#L2074-L2080 shortenNames :: Int -> (Int, Int) -> (Int, Int) shortenNames overhead (x, y) | x + y + overhead <= maximumIdentifierLength = (x, y) | x > y = shortenNames overhead (x - 1, y) | otherwise = shortenNames overhead (x, y - 1) -- | Postgres' default maximum identifier length in bytes -- (You can re-compile Postgres with a new limit, but I'm assuming that virtually noone does this). -- See https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS maximumIdentifierLength :: Int maximumIdentifierLength = 63 udToPair :: UniqueDef -> (ConstraintNameDB, [FieldNameDB]) udToPair ud = (uniqueDBName ud, map snd $ NEL.toList $ uniqueFields ud) mockMigrate :: [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)]) mockMigrate allDefs _ entity = fmap (fmap $ map showAlterDb) $ do case partitionEithers [] of ([], old'') -> return $ Right $ migrationText False old'' (errs, _) -> return $ Left errs where name = getEntityDBName entity migrationText exists' old'' = if not exists' then createText newcols fdefs udspair else let (acs, ats) = getAlters allDefs entity (newcols, udspair) old' acs' = map (AlterColumn name) acs ats' = map (AlterTable name) ats in acs' ++ ats' where old' = partitionEithers old'' (newcols', udefs, fdefs) = postgresMkColumns allDefs entity newcols = filter (not . safeToRemove entity . cName) newcols' udspair = map udToPair udefs -- Check for table existence if there are no columns, workaround -- for https://github.com/yesodweb/persistent/issues/152 createText newcols fdefs udspair = (addTable newcols entity) : uniques ++ references ++ foreignsAlt where uniques = flip concatMap udspair $ \(uname, ucols) -> [AlterTable name $ AddUniqueConstraint uname ucols] references = mapMaybe (\Column { cName, cReference } -> getAddReference allDefs entity cName =<< cReference ) newcols foreignsAlt = mapMaybe (mkForeignAlt entity) fdefs -- | Mock a migration even when the database is not present. -- This function performs the same functionality of 'printMigration' -- with the difference that an actual database is not needed. mockMigration :: Migration -> IO () mockMigration mig = do smap <- newIORef mempty let sqlbackend = mkSqlBackend MkSqlBackendArgs { connPrepare = \_ -> do return Statement { stmtFinalize = return () , stmtReset = return () , stmtExecute = undefined , stmtQuery = \_ -> return $ return () } , connInsertSql = undefined , connStmtMap = smap , connClose = undefined , connMigrateSql = mockMigrate , connBegin = undefined , connCommit = undefined , connRollback = undefined , connEscapeFieldName = escapeF , connEscapeTableName = escapeE . getEntityDBName , connEscapeRawName = escape , connNoLimit = undefined , connRDBMS = undefined , connLimitOffset = undefined , connLogFunc = undefined } result = runReaderT $ runWriterT $ runWriterT mig resp <- result sqlbackend mapM_ T.putStrLn $ map snd $ snd resp putManySql :: EntityDef -> Int -> Text putManySql ent n = putManySql' conflictColumns fields ent n where fields = getEntityFields ent conflictColumns = concatMap (map (escapeF . snd) . NEL.toList . uniqueFields) (getEntityUniques ent) repsertManySql :: EntityDef -> Int -> Text repsertManySql ent n = putManySql' conflictColumns fields ent n where fields = NEL.toList $ keyAndEntityFields ent conflictColumns = NEL.toList $ escapeF . fieldDB <$> getEntityKeyFields ent -- | This type is used to determine how to update rows using Postgres' -- @INSERT ... ON CONFLICT KEY UPDATE@ functionality, exposed via -- 'upsertWhere' and 'upsertManyWhere' in this library. -- -- @since 2.12.1.0 data HandleUpdateCollision record where -- | Copy the field directly from the record. CopyField :: EntityField record typ -> HandleUpdateCollision record -- | Only copy the field if it is not equal to the provided value. CopyUnlessEq :: PersistField typ => EntityField record typ -> typ -> HandleUpdateCollision record -- | Copy the field into the database only if the value in the -- corresponding record is non-@NULL@. -- -- @since 2.12.1.0 copyUnlessNull :: PersistField typ => EntityField record (Maybe typ) -> HandleUpdateCollision record copyUnlessNull field = CopyUnlessEq field Nothing -- | Copy the field into the database only if the value in the -- corresponding record is non-empty, where "empty" means the Monoid -- definition for 'mempty'. Useful for 'Text', 'String', 'ByteString', etc. -- -- The resulting 'HandleUpdateCollision' type is useful for the -- 'upsertManyWhere' function. -- -- @since 2.12.1.0 copyUnlessEmpty :: (Monoid.Monoid typ, PersistField typ) => EntityField record typ -> HandleUpdateCollision record copyUnlessEmpty field = CopyUnlessEq field Monoid.mempty -- | Copy the field into the database only if the field is not equal to the -- provided value. This is useful to avoid copying weird nullary data into -- the database. -- -- The resulting 'HandleUpdateCollision' type is useful for the -- 'upsertMany' function. -- -- @since 2.12.1.0 copyUnlessEq :: PersistField typ => EntityField record typ -> typ -> HandleUpdateCollision record copyUnlessEq = CopyUnlessEq -- | Copy the field directly from the record. -- -- @since 2.12.1.0 copyField :: PersistField typ => EntityField record typ -> HandleUpdateCollision record copyField = CopyField -- | Postgres specific 'upsertWhere'. This method does the following: -- It will insert a record if no matching unique key exists. -- If a unique key exists, it will update the relevant field with a user-supplied value, however, -- it will only do this update on a user-supplied condition. -- For example, here's how this method could be called like such: -- -- @ -- upsertWhere record [recordField =. newValue] [recordField /= newValue] -- @ -- -- Called thusly, this method will insert a new record (if none exists) OR update a recordField with a new value -- assuming the condition in the last block is met. -- -- @since 2.12.1.0 upsertWhere :: ( backend ~ PersistEntityBackend record , PersistEntity record , PersistEntityBackend record ~ SqlBackend , MonadIO m , PersistStore backend , BackendCompatible SqlBackend backend , OnlyOneUniqueKey record ) => record -> [Update record] -> [Filter record] -> ReaderT backend m () upsertWhere record updates filts = upsertManyWhere [record] [] updates filts -- | Postgres specific 'upsertManyWhere'. This method does the following: -- It will insert a record if no matching unique key exists. -- If a unique key exists, it will update the relevant field with a user-supplied value, however, -- it will only do this update on a user-supplied condition. -- For example, here's how this method could be called like such: -- -- upsertManyWhere [record] [recordField =. newValue] [recordField !=. newValue] -- -- Called thusly, this method will insert a new record (if none exists) OR update a recordField with a new value -- assuming the condition in the last block is met. -- -- @since 2.12.1.0 upsertManyWhere :: forall record backend m. ( backend ~ PersistEntityBackend record , BackendCompatible SqlBackend backend , PersistEntityBackend record ~ SqlBackend , PersistEntity record , OnlyOneUniqueKey record , MonadIO m ) => [record] -- ^ A list of the records you want to insert, or update -> [HandleUpdateCollision record] -- ^ A list of the fields you want to copy over. -> [Update record] -- ^ A list of the updates to apply that aren't dependent on the record -- being inserted. -> [Filter record] -- ^ A filter condition that dictates the scope of the updates -> ReaderT backend m () upsertManyWhere [] _ _ _ = return () upsertManyWhere records fieldValues updates filters = do conn <- asks projectBackend let uniqDef = onlyOneUniqueDef (Proxy :: Proxy record) uncurry rawExecute $ mkBulkUpsertQuery records conn fieldValues updates filters uniqDef -- | Exclude any record field if it doesn't match the filter record. Used only in `upsertWhere` and -- `upsertManyWhere` -- -- TODO: we could probably make a sum type for the `Filter` record that's passed into the `upsertWhere` and -- `upsertManyWhere` methods that has similar behavior to the HandleCollisionUpdate type. -- -- @since 2.12.1.0 excludeNotEqualToOriginal :: (PersistField typ, PersistEntity rec) => EntityField rec typ -> Filter rec excludeNotEqualToOriginal field = Filter { filterField = field , filterFilter = Ne , filterValue = UnsafeValue $ PersistLiteral_ Unescaped bsForExcludedField } where bsForExcludedField = T.encodeUtf8 $ "EXCLUDED." <> fieldName field -- | This creates the query for 'upsertManyWhere'. If you -- provide an empty list of updates to perform, then it will generate -- a dummy/no-op update using the first field of the record. This avoids -- duplicate key exceptions. mkBulkUpsertQuery :: (PersistEntity record, PersistEntityBackend record ~ SqlBackend, OnlyOneUniqueKey record) => [record] -- ^ A list of the records you want to insert, or update -> SqlBackend -> [HandleUpdateCollision record] -- ^ A list of the fields you want to copy over. -> [Update record] -- ^ A list of the updates to apply that aren't dependent on the record being inserted. -> [Filter record] -- ^ A filter condition that dictates the scope of the updates -> UniqueDef -- ^ The specific uniqueness constraint to use on the record. Postgres -- rquires that we use exactly one relevant constraint, and it can't do -- a catch-all. How frustrating! -> (Text, [PersistValue]) mkBulkUpsertQuery records conn fieldValues updates filters uniqDef = (q, recordValues <> updsValues <> copyUnlessValues <> whereVals) where mfieldDef x = case x of CopyField rec -> Right (fieldDbToText (persistFieldDef rec)) CopyUnlessEq rec val -> Left (fieldDbToText (persistFieldDef rec), toPersistValue val) (fieldsToMaybeCopy, updateFieldNames) = partitionEithers $ map mfieldDef fieldValues fieldDbToText = escapeF . fieldDB entityDef' = entityDef records conflictColumns = map (escapeF . snd) $ NEL.toList $ uniqueFields uniqDef firstField = case entityFieldNames of [] -> error "The entity you're trying to insert does not have any fields." (field:_) -> field entityFieldNames = map fieldDbToText (getEntityFields entityDef') nameOfTable = escapeE . getEntityDBName $ entityDef' copyUnlessValues = map snd fieldsToMaybeCopy recordValues = concatMap (map toPersistValue . toPersistFields) records recordPlaceholders = Util.commaSeparated $ map (Util.parenWrapped . Util.commaSeparated . map (const "?") . toPersistFields) $ records mkCondFieldSet n _ = T.concat [ n , "=COALESCE(" , "NULLIF(" , "EXCLUDED." , n , "," , "?" , ")" , "," , nameOfTable , "." , n ,")" ] condFieldSets = map (uncurry mkCondFieldSet) fieldsToMaybeCopy fieldSets = map (\n -> T.concat [n, "=EXCLUDED.", n, ""]) updateFieldNames upds = map (Util.mkUpdateText' (escapeF) (\n -> T.concat [nameOfTable, ".", n])) updates updsValues = map (\(Update _ val _) -> toPersistValue val) updates (wher, whereVals) = if null filters then ("", []) else (filterClauseWithVals (Just PrefixTableName) conn filters) updateText = case fieldSets <> upds <> condFieldSets of [] -> -- This case is really annoying, and probably unlikely to be -- actually hit - someone would have had to call something like -- `upsertManyWhere [] [] []`, but that would have been caught -- by the prior case. -- Would be nice to have something like a `NonEmpty (These ...)` -- instead of multiple lists... T.concat [firstField, "=", nameOfTable, ".", firstField] xs -> Util.commaSeparated xs q = T.concat [ "INSERT INTO " , nameOfTable , Util.parenWrapped . Util.commaSeparated $ entityFieldNames , " VALUES " , recordPlaceholders , " ON CONFLICT " , Util.parenWrapped $ Util.commaSeparated $ conflictColumns , " DO UPDATE SET " , updateText , wher ] putManySql' :: [Text] -> [FieldDef] -> EntityDef -> Int -> Text putManySql' conflictColumns (filter isFieldNotGenerated -> fields) ent n = q where fieldDbToText = escapeF . fieldDB mkAssignment f = T.concat [f, "=EXCLUDED.", f] table = escapeE . getEntityDBName $ ent columns = Util.commaSeparated $ map fieldDbToText fields placeholders = map (const "?") fields updates = map (mkAssignment . fieldDbToText) fields q = T.concat [ "INSERT INTO " , table , Util.parenWrapped columns , " VALUES " , Util.commaSeparated . replicate n . Util.parenWrapped . Util.commaSeparated $ placeholders , " ON CONFLICT " , Util.parenWrapped . Util.commaSeparated $ conflictColumns , " DO UPDATE SET " , Util.commaSeparated updates ] -- | Enable a Postgres extension. See https://www.postgresql.org/docs/current/static/contrib.html -- for a list. migrateEnableExtension :: Text -> Migration migrateEnableExtension extName = WriterT $ WriterT $ do res :: [Single Int] <- rawSql "SELECT COUNT(*) FROM pg_catalog.pg_extension WHERE extname = ?" [PersistText extName] if res == [Single 0] then return (((), []) , [(False, "CREATe EXTENSION \"" <> extName <> "\"")]) else return (((), []), []) postgresMkColumns :: [EntityDef] -> EntityDef -> ([Column], [UniqueDef], [ForeignDef]) postgresMkColumns allDefs t = mkColumns allDefs t $ setBackendSpecificForeignKeyName refName emptyBackendSpecificOverrides -- | Wrapper for persistent SqlBackends that carry the corresponding -- `Postgresql.Connection`. -- -- @since 2.13.1.0 data RawPostgresql backend = RawPostgresql { persistentBackend :: backend -- ^ The persistent backend -- -- @since 2.13.1.0 , rawPostgresqlConnection :: PG.Connection -- ^ The underlying `PG.Connection` -- -- @since 2.13.1.0 } instance BackendCompatible (RawPostgresql b) (RawPostgresql b) where projectBackend = id instance BackendCompatible b (RawPostgresql b) where projectBackend = persistentBackend withRawConnection :: (PG.Connection -> SqlBackend) -> PG.Connection -> RawPostgresql SqlBackend withRawConnection f conn = RawPostgresql { persistentBackend = f conn , rawPostgresqlConnection = conn } -- | Create a PostgreSQL connection pool which also exposes the -- raw connection. The raw counterpart to 'createPostgresqlPool'. -- -- @since 2.13.1.0 createRawPostgresqlPool :: (MonadUnliftIO m, MonadLoggerIO m) => ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open -- in the pool. -> m (Pool (RawPostgresql SqlBackend)) createRawPostgresqlPool = createRawPostgresqlPoolModified (const $ return ()) -- | The raw counterpart to 'createPostgresqlPoolModified'. -- -- @since 2.13.1.0 createRawPostgresqlPoolModified :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> m (Pool (RawPostgresql SqlBackend)) createRawPostgresqlPoolModified = createRawPostgresqlPoolModifiedWithVersion getServerVersion -- | The raw counterpart to 'createPostgresqlPoolModifiedWithVersion'. -- -- @since 2.13.1.0 createRawPostgresqlPoolModifiedWithVersion :: (MonadUnliftIO m, MonadLoggerIO m) => (PG.Connection -> IO (Maybe Double)) -- ^ Action to perform to get the server version. -> (PG.Connection -> IO ()) -- ^ Action to perform after connection is created. -> ConnectionString -- ^ Connection string to the database. -> Int -- ^ Number of connections to be kept open in the pool. -> m (Pool (RawPostgresql SqlBackend)) createRawPostgresqlPoolModifiedWithVersion getVerDouble modConn ci = do let getVer = oldGetVersionToNew getVerDouble createSqlPool $ open' modConn getVer withRawConnection ci -- | The raw counterpart to 'createPostgresqlPoolWithConf'. -- -- @since 2.13.1.0 createRawPostgresqlPoolWithConf :: (MonadUnliftIO m, MonadLoggerIO m) => PostgresConf -- ^ Configuration for connecting to Postgres -> PostgresConfHooks -- ^ Record of callback functions -> m (Pool (RawPostgresql SqlBackend)) createRawPostgresqlPoolWithConf conf hooks = do let getVer = pgConfHooksGetServerVersion hooks modConn = pgConfHooksAfterCreate hooks createSqlPoolWithConfig (open' modConn getVer withRawConnection (pgConnStr conf)) (postgresConfToConnectionPoolConfig conf) #if MIN_VERSION_base(4,12,0) instance (PersistCore b) => PersistCore (RawPostgresql b) where newtype BackendKey (RawPostgresql b) = RawPostgresqlKey { unRawPostgresqlKey :: BackendKey (Compatible b (RawPostgresql b)) } makeCompatibleKeyInstances [t| forall b. Compatible b (RawPostgresql b) |] #else instance (PersistCore b) => PersistCore (RawPostgresql b) where newtype BackendKey (RawPostgresql b) = RawPostgresqlKey { unRawPostgresqlKey :: BackendKey (RawPostgresql b) } deriving instance (Show (BackendKey b)) => Show (BackendKey (RawPostgresql b)) deriving instance (Read (BackendKey b)) => Read (BackendKey (RawPostgresql b)) deriving instance (Eq (BackendKey b)) => Eq (BackendKey (RawPostgresql b)) deriving instance (Ord (BackendKey b)) => Ord (BackendKey (RawPostgresql b)) deriving instance (Num (BackendKey b)) => Num (BackendKey (RawPostgresql b)) deriving instance (Integral (BackendKey b)) => Integral (BackendKey (RawPostgresql b)) deriving instance (PersistField (BackendKey b)) => PersistField (BackendKey (RawPostgresql b)) deriving instance (PersistFieldSql (BackendKey b)) => PersistFieldSql (BackendKey (RawPostgresql b)) deriving instance (Real (BackendKey b)) => Real (BackendKey (RawPostgresql b)) deriving instance (Enum (BackendKey b)) => Enum (BackendKey (RawPostgresql b)) deriving instance (Bounded (BackendKey b)) => Bounded (BackendKey (RawPostgresql b)) deriving instance (ToJSON (BackendKey b)) => ToJSON (BackendKey (RawPostgresql b)) deriving instance (FromJSON (BackendKey b)) => FromJSON (BackendKey (RawPostgresql b)) #endif #if MIN_VERSION_base(4,12,0) $(pure []) makeCompatibleInstances [t| forall b. Compatible b (RawPostgresql b) |] #else instance HasPersistBackend b => HasPersistBackend (RawPostgresql b) where type BaseBackend (RawPostgresql b) = BaseBackend b persistBackend = persistBackend . persistentBackend instance (PersistStoreRead b) => PersistStoreRead (RawPostgresql b) where get = withReaderT persistentBackend . get getMany = withReaderT persistentBackend . getMany instance (PersistQueryRead b) => PersistQueryRead (RawPostgresql b) where selectSourceRes filts opts = withReaderT persistentBackend $ selectSourceRes filts opts selectFirst filts opts = withReaderT persistentBackend $ selectFirst filts opts selectKeysRes filts opts = withReaderT persistentBackend $ selectKeysRes filts opts count = withReaderT persistentBackend . count exists = withReaderT persistentBackend . exists instance (PersistQueryWrite b) => PersistQueryWrite (RawPostgresql b) where updateWhere filts updates = withReaderT persistentBackend $ updateWhere filts updates deleteWhere = withReaderT persistentBackend . deleteWhere instance (PersistUniqueRead b) => PersistUniqueRead (RawPostgresql b) where getBy = withReaderT persistentBackend . getBy instance (PersistStoreWrite b) => PersistStoreWrite (RawPostgresql b) where insert = withReaderT persistentBackend . insert insert_ = withReaderT persistentBackend . insert_ insertMany = withReaderT persistentBackend . insertMany insertMany_ = withReaderT persistentBackend . insertMany_ insertEntityMany = withReaderT persistentBackend . insertEntityMany insertKey k = withReaderT persistentBackend . insertKey k repsert k = withReaderT persistentBackend . repsert k repsertMany = withReaderT persistentBackend . repsertMany replace k = withReaderT persistentBackend . replace k delete = withReaderT persistentBackend . delete update k = withReaderT persistentBackend . update k updateGet k = withReaderT persistentBackend . updateGet k instance (PersistUniqueWrite b) => PersistUniqueWrite (RawPostgresql b) where deleteBy = withReaderT persistentBackend . deleteBy insertUnique = withReaderT persistentBackend . insertUnique upsert rec = withReaderT persistentBackend . upsert rec upsertBy uniq rec = withReaderT persistentBackend . upsertBy uniq rec putMany = withReaderT persistentBackend . putMany #endif persistent-postgresql-2.13.6.2/Database/Persist/Postgresql/Internal.hs0000644000000000000000000002731714476403105024160 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} module Database.Persist.Postgresql.Internal ( P(..) , PgInterval(..) , getGetter ) where import qualified Database.PostgreSQL.Simple as PG import qualified Database.PostgreSQL.Simple.FromField as PGFF import qualified Database.PostgreSQL.Simple.Internal as PG import qualified Database.PostgreSQL.Simple.ToField as PGTF import qualified Database.PostgreSQL.Simple.TypeInfo.Static as PS import qualified Database.PostgreSQL.Simple.Types as PG import qualified Blaze.ByteString.Builder.Char8 as BBB import qualified Data.Attoparsec.ByteString.Char8 as P import Data.Bits ((.&.)) import Data.ByteString (ByteString) import qualified Data.ByteString.Builder as BB import qualified Data.ByteString.Char8 as B8 import Data.Char (ord) import Data.Data (Typeable) import Data.Fixed (Fixed(..), Pico) import Data.Int (Int64) import qualified Data.IntMap as I import Data.Maybe (fromMaybe) import Data.String.Conversions.Monomorphic (toStrictByteString) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time (NominalDiffTime, localTimeToUTC, utc) import Database.Persist.Sql -- | Newtype used to avoid orphan instances for @postgresql-simple@ classes. -- -- @since 2.13.2.0 newtype P = P { unP :: PersistValue } instance PGTF.ToField P where toField (P (PersistText t)) = PGTF.toField t toField (P (PersistByteString bs)) = PGTF.toField (PG.Binary bs) toField (P (PersistInt64 i)) = PGTF.toField i toField (P (PersistDouble d)) = PGTF.toField d toField (P (PersistRational r)) = PGTF.Plain $ BBB.fromString $ show (fromRational r :: Pico) -- FIXME: Too Ambigous, can not select precision without information about field toField (P (PersistBool b)) = PGTF.toField b toField (P (PersistDay d)) = PGTF.toField d toField (P (PersistTimeOfDay t)) = PGTF.toField t toField (P (PersistUTCTime t)) = PGTF.toField t toField (P PersistNull) = PGTF.toField PG.Null toField (P (PersistList l)) = PGTF.toField $ listToJSON l toField (P (PersistMap m)) = PGTF.toField $ mapToJSON m toField (P (PersistLiteral_ DbSpecific s)) = PGTF.toField (Unknown s) toField (P (PersistLiteral_ Unescaped l)) = PGTF.toField (UnknownLiteral l) toField (P (PersistLiteral_ Escaped e)) = PGTF.toField (Unknown e) toField (P (PersistArray a)) = PGTF.toField $ PG.PGArray $ P <$> a toField (P (PersistObjectId _)) = error "Refusing to serialize a PersistObjectId to a PostgreSQL value" instance PGFF.FromField P where fromField field mdata = fmap P $ case mdata of -- If we try to simply decode based on oid, we will hit unexpected null -- errors. Nothing -> pure PersistNull data' -> getGetter (PGFF.typeOid field) field data' newtype Unknown = Unknown { unUnknown :: ByteString } deriving (Eq, Show, Read, Ord) instance PGFF.FromField Unknown where fromField f mdata = case mdata of Nothing -> PGFF.returnError PGFF.UnexpectedNull f "Database.Persist.Postgresql/PGFF.FromField Unknown" Just dat -> return (Unknown dat) instance PGTF.ToField Unknown where toField (Unknown a) = PGTF.Escape a newtype UnknownLiteral = UnknownLiteral { unUnknownLiteral :: ByteString } deriving (Eq, Show, Read, Ord, Typeable) instance PGFF.FromField UnknownLiteral where fromField f mdata = case mdata of Nothing -> PGFF.returnError PGFF.UnexpectedNull f "Database.Persist.Postgresql/PGFF.FromField UnknownLiteral" Just dat -> return (UnknownLiteral dat) instance PGTF.ToField UnknownLiteral where toField (UnknownLiteral a) = PGTF.Plain $ BB.byteString a type Getter a = PGFF.FieldParser a convertPV :: PGFF.FromField a => (a -> b) -> Getter b convertPV f = (fmap f .) . PGFF.fromField builtinGetters :: I.IntMap (Getter PersistValue) builtinGetters = I.fromList [ (k PS.bool, convertPV PersistBool) , (k PS.bytea, convertPV (PersistByteString . unBinary)) , (k PS.char, convertPV PersistText) , (k PS.name, convertPV PersistText) , (k PS.int8, convertPV PersistInt64) , (k PS.int2, convertPV PersistInt64) , (k PS.int4, convertPV PersistInt64) , (k PS.text, convertPV PersistText) , (k PS.xml, convertPV (PersistByteString . unUnknown)) , (k PS.float4, convertPV PersistDouble) , (k PS.float8, convertPV PersistDouble) , (k PS.money, convertPV PersistRational) , (k PS.bpchar, convertPV PersistText) , (k PS.varchar, convertPV PersistText) , (k PS.date, convertPV PersistDay) , (k PS.time, convertPV PersistTimeOfDay) , (k PS.timestamp, convertPV (PersistUTCTime. localTimeToUTC utc)) , (k PS.timestamptz, convertPV PersistUTCTime) , (k PS.interval, convertPV (PersistLiteralEscaped . pgIntervalToBs)) , (k PS.bit, convertPV PersistInt64) , (k PS.varbit, convertPV PersistInt64) , (k PS.numeric, convertPV PersistRational) , (k PS.void, \_ _ -> return PersistNull) , (k PS.json, convertPV (PersistByteString . unUnknown)) , (k PS.jsonb, convertPV (PersistByteString . unUnknown)) , (k PS.unknown, convertPV (PersistByteString . unUnknown)) -- Array types: same order as above. -- The OIDs were taken from pg_type. , (1000, listOf PersistBool) , (1001, listOf (PersistByteString . unBinary)) , (1002, listOf PersistText) , (1003, listOf PersistText) , (1016, listOf PersistInt64) , (1005, listOf PersistInt64) , (1007, listOf PersistInt64) , (1009, listOf PersistText) , (143, listOf (PersistByteString . unUnknown)) , (1021, listOf PersistDouble) , (1022, listOf PersistDouble) , (1023, listOf PersistUTCTime) , (1024, listOf PersistUTCTime) , (791, listOf PersistRational) , (1014, listOf PersistText) , (1015, listOf PersistText) , (1182, listOf PersistDay) , (1183, listOf PersistTimeOfDay) , (1115, listOf PersistUTCTime) , (1185, listOf PersistUTCTime) , (1187, listOf (PersistLiteralEscaped . pgIntervalToBs)) , (1561, listOf PersistInt64) , (1563, listOf PersistInt64) , (1231, listOf PersistRational) -- no array(void) type , (2951, listOf (PersistLiteralEscaped . unUnknown)) , (199, listOf (PersistByteString . unUnknown)) , (3807, listOf (PersistByteString . unUnknown)) -- no array(unknown) either ] where k (PGFF.typoid -> i) = PG.oid2int i -- A @listOf f@ will use a @PGArray (Maybe T)@ to convert -- the values to Haskell-land. The @Maybe@ is important -- because the usual way of checking NULLs -- (c.f. withStmt') won't check for NULL inside -- arrays---or any other compound structure for that matter. listOf f = convertPV (PersistList . map (nullable f) . PG.fromPGArray) where nullable = maybe PersistNull -- | Get the field parser corresponding to the given 'PG.Oid'. -- -- For example, pass in the 'PG.Oid' of 'PS.bool', and you will get back a -- field parser which parses boolean values in the table into 'PersistBool's. -- -- @since 2.13.2.0 getGetter :: PG.Oid -> Getter PersistValue getGetter oid = fromMaybe defaultGetter $ I.lookup (PG.oid2int oid) builtinGetters where defaultGetter = convertPV (PersistLiteralEscaped . unUnknown) unBinary :: PG.Binary a -> a unBinary (PG.Binary x) = x -- | Represent Postgres interval using NominalDiffTime -- -- @since 2.11.0.0 newtype PgInterval = PgInterval { getPgInterval :: NominalDiffTime } deriving (Eq, Show) pgIntervalToBs :: PgInterval -> ByteString pgIntervalToBs = toStrictByteString . show . getPgInterval instance PGTF.ToField PgInterval where toField (PgInterval t) = PGTF.toField t instance PGFF.FromField PgInterval where fromField f mdata = if PGFF.typeOid f /= PS.typoid PS.interval then PGFF.returnError PGFF.Incompatible f "" else case mdata of Nothing -> PGFF.returnError PGFF.UnexpectedNull f "" Just dat -> case P.parseOnly (nominalDiffTime <* P.endOfInput) dat of Left msg -> PGFF.returnError PGFF.ConversionFailed f msg Right t -> return $ PgInterval t where toPico :: Integer -> Pico toPico = MkFixed -- Taken from Database.PostgreSQL.Simple.Time.Internal.Parser twoDigits :: P.Parser Int twoDigits = do a <- P.digit b <- P.digit let c2d c = ord c .&. 15 return $! c2d a * 10 + c2d b -- Taken from Database.PostgreSQL.Simple.Time.Internal.Parser seconds :: P.Parser Pico seconds = do real <- twoDigits mc <- P.peekChar case mc of Just '.' -> do t <- P.anyChar *> P.takeWhile1 P.isDigit return $! parsePicos (fromIntegral real) t _ -> return $! fromIntegral real where parsePicos :: Int64 -> B8.ByteString -> Pico parsePicos a0 t = toPico (fromIntegral (t' * 10^n)) where n = max 0 (12 - B8.length t) t' = B8.foldl' (\a c -> 10 * a + fromIntegral (ord c .&. 15)) a0 (B8.take 12 t) parseSign :: P.Parser Bool parseSign = P.choice [P.char '-' >> return True, return False] -- Db stores it in [-]HHH:MM:SS.[SSSS] -- For example, nominalDay is stored as 24:00:00 interval :: P.Parser (Bool, Int, Int, Pico) interval = do s <- parseSign h <- P.decimal <* P.char ':' m <- twoDigits <* P.char ':' ss <- seconds if m < 60 && ss <= 60 then return (s, h, m, ss) else fail "Invalid interval" nominalDiffTime :: P.Parser NominalDiffTime nominalDiffTime = do (s, h, m, ss) <- interval let pico = ss + 60 * (fromIntegral m) + 60 * 60 * (fromIntegral (abs h)) return . fromRational . toRational $ if s then (-pico) else pico fromPersistValueError :: Text -- ^ Haskell type, should match Haskell name exactly, e.g. "Int64" -> Text -- ^ Database type(s), should appear different from Haskell name, e.g. "integer" or "INT", not "Int". -> PersistValue -- ^ Incorrect value -> Text -- ^ Error message fromPersistValueError haskellType databaseType received = T.concat [ "Failed to parse Haskell type `" , haskellType , "`; expected " , databaseType , " from database, but received: " , T.pack (show received) , ". Potential solution: Check that your database schema matches your Persistent model definitions." ] instance PersistField PgInterval where toPersistValue = PersistLiteralEscaped . pgIntervalToBs fromPersistValue (PersistLiteral_ DbSpecific bs) = fromPersistValue (PersistLiteralEscaped bs) fromPersistValue x@(PersistLiteral_ Escaped bs) = case P.parseOnly (P.signed P.rational <* P.char 's' <* P.endOfInput) bs of Left _ -> Left $ fromPersistValueError "PgInterval" "Interval" x Right i -> Right $ PgInterval i fromPersistValue x = Left $ fromPersistValueError "PgInterval" "Interval" x instance PersistFieldSql PgInterval where sqlType _ = SqlOther "interval" persistent-postgresql-2.13.6.2/Database/Persist/Postgresql/JSON.hs0000644000000000000000000003042314476403105023145 0ustar0000000000000000{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- | Filter operators for JSON values added to PostgreSQL 9.4 module Database.Persist.Postgresql.JSON ( (@>.) , (<@.) , (?.) , (?|.) , (?&.) , Value() ) where import Data.Aeson (FromJSON, ToJSON, Value, encode, eitherDecodeStrict) import qualified Data.ByteString.Lazy as BSL import Data.Proxy (Proxy) import Data.Text (Text) import qualified Data.Text as T import Data.Text.Encoding as TE (encodeUtf8) import Database.Persist (EntityField, Filter(..), PersistValue(..), PersistField(..), PersistFilter(..)) import Database.Persist.Sql (PersistFieldSql(..), SqlType(..)) import Database.Persist.Types (FilterValue(..)) infix 4 @>., <@., ?., ?|., ?&. -- | This operator checks inclusion of the JSON value -- on the right hand side in the JSON value on the left -- hand side. -- -- === __Objects__ -- -- An empty Object matches any object -- -- @ -- {} \@> {} == True -- {"a":1,"b":false} \@> {} == True -- @ -- -- Any key-value will be matched top-level -- -- @ -- {"a":1,"b":{"c":true"}} \@> {"a":1} == True -- {"a":1,"b":{"c":true"}} \@> {"b":1} == False -- {"a":1,"b":{"c":true"}} \@> {"b":{}} == True -- {"a":1,"b":{"c":true"}} \@> {"c":true} == False -- {"a":1,"b":{"c":true"}} \@> {"b":{c":true}} == True -- @ -- -- === __Arrays__ -- -- An empty Array matches any array -- -- @ -- [] \@> [] == True -- [1,2,"hi",false,null] \@> [] == True -- @ -- -- Any array has to be a sub-set. -- Any object or array will also be compared as being a subset of. -- -- @ -- [1,2,"hi",false,null] \@> [1] == True -- [1,2,"hi",false,null] \@> [null,"hi"] == True -- [1,2,"hi",false,null] \@> ["hi",true] == False -- [1,2,"hi",false,null] \@> ["hi",2,null,false,1] == True -- [1,2,"hi",false,null] \@> [1,2,"hi",false,null,{}] == False -- @ -- -- Arrays and objects inside arrays match the same way they'd -- be matched as being on their own. -- -- @ -- [1,"hi",[false,3],{"a":[null]}] \@> [{}] == True -- [1,"hi",[false,3],{"a":[null]}] \@> [{"a":[]}] == True -- [1,"hi",[false,3],{"a":[null]}] \@> [{"b":[null]}] == False -- [1,"hi",[false,3],{"a":[null]}] \@> [[]] == True -- [1,"hi",[false,3],{"a":[null]}] \@> [[3]] == True -- [1,"hi",[false,3],{"a":[null]}] \@> [[true,3]] == False -- @ -- -- A regular value has to be a member -- -- @ -- [1,2,"hi",false,null] \@> 1 == True -- [1,2,"hi",false,null] \@> 5 == False -- [1,2,"hi",false,null] \@> "hi" == True -- [1,2,"hi",false,null] \@> false == True -- [1,2,"hi",false,null] \@> "2" == False -- @ -- -- An object will never match with an array -- -- @ -- [1,2,"hi",[false,3],{"a":null}] \@> {} == False -- [1,2,"hi",[false,3],{"a":null}] \@> {"a":null} == False -- @ -- -- === __Other values__ -- -- For any other JSON values the `(\@>.)` operator -- functions like an equivalence operator. -- -- @ -- "hello" \@> "hello" == True -- "hello" \@> \"Hello" == False -- "hello" \@> "h" == False -- "hello" \@> {"hello":1} == False -- "hello" \@> ["hello"] == False -- -- 5 \@> 5 == True -- 5 \@> 5.00 == True -- 5 \@> 1 == False -- 5 \@> 7 == False -- 12345 \@> 1234 == False -- 12345 \@> 2345 == False -- 12345 \@> "12345" == False -- 12345 \@> [1,2,3,4,5] == False -- -- true \@> true == True -- true \@> false == False -- false \@> true == False -- true \@> "true" == False -- -- null \@> null == True -- null \@> 23 == False -- null \@> "null" == False -- null \@> {} == False -- @ -- -- @since 2.8.2 (@>.) :: EntityField record Value -> Value -> Filter record (@>.) field val = Filter field (FilterValue val) $ BackendSpecificFilter " @> " -- | Same as '@>.' except the inclusion check is reversed. -- i.e. is the JSON value on the left hand side included -- in the JSON value of the right hand side. -- -- @since 2.8.2 (<@.) :: EntityField record Value -> Value -> Filter record (<@.) field val = Filter field (FilterValue val) $ BackendSpecificFilter " <@ " -- | This operator takes a column and a string to find a -- top-level key/field in an object. -- -- @column ?. string@ -- -- N.B. This operator might have some unexpected interactions -- with non-object values. Please reference the examples. -- -- === __Objects__ -- -- @ -- {"a":null} ? "a" == True -- {"test":false,"a":500} ? "a" == True -- {"b":{"a":[]}} ? "a" == False -- {} ? "a" == False -- {} ? "{}" == False -- {} ? "" == False -- {"":9001} ? "" == True -- @ -- -- === __Arrays__ -- -- This operator will match an array if the string to be matched -- is an element of that array, but nothing else. -- -- @ -- ["a"] ? "a" == True -- [["a"]] ? "a" == False -- [9,false,"1",null] ? "1" == True -- [] ? "[]" == False -- [{"a":true}] ? "a" == False -- @ -- -- === __Other values__ -- -- This operator functions like an equivalence operator on strings only. -- Any other value does not match. -- -- @ -- "a" ? "a" == True -- "1" ? "1" == True -- "ab" ? "a" == False -- 1 ? "1" == False -- null ? "null" == False -- true ? "true" == False -- 1.5 ? "1.5" == False -- @ -- -- @since 2.10.0 (?.) :: EntityField record Value -> Text -> Filter record (?.) = jsonFilter " ?? " -- | This operator takes a column and a list of strings to -- test whether ANY of the elements of the list are top -- level fields in an object. -- -- @column ?|. list@ -- -- /N.B. An empty list __will never match anything__. Also, this/ -- /operator might have some unexpected interactions with/ -- /non-object values. Please reference the examples./ -- -- === __Objects__ -- -- @ -- {"a":null} ?| ["a","b","c"] == True -- {"test":false,"a":500} ?| ["a","b","c"] == True -- {} ?| ["a","{}"] == False -- {"b":{"a":[]}} ?| ["a","c"] == False -- {"b":{"a":[]},"test":null} ?| [] == False -- @ -- -- === __Arrays__ -- -- This operator will match an array if __any__ of the elements -- of the list are matching string elements of the array. -- -- @ -- ["a"] ?| ["a","b","c"] == True -- [["a"]] ?| ["a","b","c"] == False -- [9,false,"1",null] ?| ["a","false"] == False -- [] ?| ["a","b","c"] == False -- [] ?| [] == False -- [{"a":true}] ?| ["a","b","c"] == False -- [null,4,"b",[]] ?| ["a","b","c"] == True -- @ -- -- === __Other values__ -- -- This operator functions much like an equivalence operator -- on strings only. If a string matches with __any__ element of -- the given list, the comparison matches. No other values match. -- -- @ -- "a" ?| ["a","b","c"] == True -- "1" ?| ["a","b","1"] == True -- "ab" ?| ["a","b","c"] == False -- 1 ?| ["a","1"] == False -- null ?| ["a","null"] == False -- true ?| ["a","true"] == False -- "a" ?| [] == False -- @ -- -- @since 2.10.0 (?|.) :: EntityField record Value -> [Text] -> Filter record (?|.) field = jsonFilter " ??| " field . PostgresArray -- | This operator takes a column and a list of strings to -- test whether ALL of the elements of the list are top -- level fields in an object. -- -- @column ?&. list@ -- -- /N.B. An empty list __will match anything__. Also, this/ -- /operator might have some unexpected interactions with/ -- /non-object values. Please reference the examples./ -- -- === __Objects__ -- -- @ -- {"a":null} ?& ["a"] == True -- {"a":null} ?& ["a","a"] == True -- {"test":false,"a":500} ?& ["a"] == True -- {"test":false,"a":500} ?& ["a","b"] == False -- {} ?& ["{}"] == False -- {"b":{"a":[]}} ?& ["a"] == False -- {"b":{"a":[]},"c":false} ?& ["a","c"] == False -- {"a":1,"b":2,"c":3,"d":4} ?& ["b","d"] == True -- {} ?& [] == True -- {"b":{"a":[]},"test":null} ?& [] == True -- @ -- -- === __Arrays__ -- -- This operator will match an array if __all__ of the elements -- of the list are matching string elements of the array. -- -- @ -- ["a"] ?& ["a"] == True -- ["a"] ?& ["a","a"] == True -- [["a"]] ?& ["a"] == False -- ["a","b","c"] ?& ["a","b","d"] == False -- [9,"false","1",null] ?& ["1","false"] == True -- [] ?& ["a","b"] == False -- [{"a":true}] ?& ["a"] == False -- ["a","b","c","d"] ?& ["b","c","d"] == True -- [null,4,{"test":false}] ?& [] == True -- [] ?& [] == True -- @ -- -- === __Other values__ -- -- This operator functions much like an equivalence operator -- on strings only. If a string matches with all elements of -- the given list, the comparison matches. -- -- @ -- "a" ?& ["a"] == True -- "1" ?& ["a","1"] == False -- "b" ?& ["b","b"] == True -- "ab" ?& ["a","b"] == False -- 1 ?& ["1"] == False -- null ?& ["null"] == False -- true ?& ["true"] == False -- 31337 ?& [] == True -- true ?& [] == True -- null ?& [] == True -- @ -- -- @since 2.10.0 (?&.) :: EntityField record Value -> [Text] -> Filter record (?&.) field = jsonFilter " ??& " field . PostgresArray jsonFilter :: PersistField a => Text -> EntityField record Value -> a -> Filter record jsonFilter op field a = Filter field (UnsafeValue a) $ BackendSpecificFilter op ----------------- -- AESON VALUE -- ----------------- instance PersistField Value where toPersistValue = toPersistValueJsonB fromPersistValue = fromPersistValueJsonB instance PersistFieldSql Value where sqlType = sqlTypeJsonB -- FIXME: PersistText might be a bit more efficient, -- but needs testing/profiling before changing it. -- (When entering into the DB the type isn't as important as fromPersistValue) toPersistValueJsonB :: ToJSON a => a -> PersistValue toPersistValueJsonB = PersistLiteralEscaped . BSL.toStrict . encode fromPersistValueJsonB :: FromJSON a => PersistValue -> Either Text a fromPersistValueJsonB (PersistText t) = case eitherDecodeStrict $ TE.encodeUtf8 t of Left str -> Left $ fromPersistValueParseError "FromJSON" t $ T.pack str Right v -> Right v fromPersistValueJsonB (PersistByteString bs) = case eitherDecodeStrict bs of Left str -> Left $ fromPersistValueParseError "FromJSON" bs $ T.pack str Right v -> Right v fromPersistValueJsonB x = Left $ fromPersistValueError "FromJSON" "string or bytea" x -- Constraints on the type might not be necessary, -- but better to leave them in. sqlTypeJsonB :: (ToJSON a, FromJSON a) => Proxy a -> SqlType sqlTypeJsonB _ = SqlOther "JSONB" fromPersistValueError :: Text -- ^ Haskell type, should match Haskell name exactly, e.g. "Int64" -> Text -- ^ Database type(s), should appear different from Haskell name, e.g. "integer" or "INT", not "Int". -> PersistValue -- ^ Incorrect value -> Text -- ^ Error message fromPersistValueError haskellType databaseType received = T.concat [ "Failed to parse Haskell type `" , haskellType , "`; expected " , databaseType , " from database, but received: " , T.pack (show received) , ". Potential solution: Check that your database schema matches your Persistent model definitions." ] fromPersistValueParseError :: (Show a) => Text -- ^ Haskell type, should match Haskell name exactly, e.g. "Int64" -> a -- ^ Received value -> Text -- ^ Additional error -> Text -- ^ Error message fromPersistValueParseError haskellType received err = T.concat [ "Failed to parse Haskell type `" , haskellType , "`, but received " , T.pack (show received) , " | with error: " , err ] newtype PostgresArray a = PostgresArray [a] instance PersistField a => PersistField (PostgresArray a) where toPersistValue (PostgresArray ts) = PersistArray $ toPersistValue <$> ts fromPersistValue (PersistArray as) = PostgresArray <$> traverse fromPersistValue as fromPersistValue wat = Left $ fromPersistValueError "PostgresArray" "array" wat persistent-postgresql-2.13.6.2/conn-killed/Main.hs0000644000000000000000000000760414476403105020164 0ustar0000000000000000{-# LANGUAGE ScopedTypeVariables, StandaloneDeriving, GeneralizedNewtypeDeriving, DerivingStrategies #-} {-# LANGUAGE OverloadedStrings, QuantifiedConstraints #-} {-# LANGUAGE TypeApplications #-} {-# language OverloadedStrings #-} -- | This executable is a test of the issue raised in #1199. module Main where import Prelude hiding (show) import qualified Prelude import qualified Data.Text as Text import Control.Monad.IO.Class import qualified Control.Monad as Monad import qualified UnliftIO.Concurrent as Concurrent import qualified UnliftIO.Exception as Exception import qualified Database.Persist as Persist import qualified Database.Persist.Sql as Persist import qualified Database.Persist.Postgresql as Persist import qualified Control.Monad.Logger as Logger import Control.Monad.Logger import qualified Data.ByteString as BS import qualified Data.Pool as Pool import Data.Time import UnliftIO import Data.Coerce import Control.Monad.Trans.Reader import Control.Monad.Trans newtype LogPrefixT m a = LogPrefixT { runLogPrefixT :: ReaderT LogStr m a } deriving newtype (Functor, Applicative, Monad, MonadIO, MonadTrans) instance MonadLogger m => MonadLogger (LogPrefixT m) where monadLoggerLog loc src lvl msg = LogPrefixT $ ReaderT $ \prefix -> monadLoggerLog loc src lvl (toLogStr prefix <> toLogStr msg) deriving newtype instance (forall a b. Coercible a b => Coercible (m a) (m b), MonadUnliftIO m) => MonadUnliftIO (LogPrefixT m) prefixLogs :: Text.Text -> LogPrefixT m a -> m a prefixLogs prefix = flip runReaderT (toLogStr $! mconcat ["[", prefix, "] "]) . runLogPrefixT infixr 5 `prefixLogs` show :: Show a => a -> Text.Text show = Text.pack . Prelude.show main :: IO () main = runStdoutLoggingT $ Concurrent.myThreadId >>= \tid -> prefixLogs (show tid) $ do -- I started a postgres server with: -- docker run --rm --name some-postgres -p 5432:5432 -e POSTGRES_PASSWORD=secret postgres pool <- Logger.runNoLoggingT $ Persist.createPostgresqlPool "postgresql://postgres:secret@localhost:5433/postgres" 1 logInfoN "creating table..." Monad.void $ liftIO $ createTableFoo pool liftIO getCurrentTime >>= \now -> simulateFailedLongRunningPostgresCall pool -- logInfoN "destroying resources" -- liftIO $ Pool.destroyAllResources pool logInfoN "pg_sleep" result :: Either Exception.SomeException [Persist.Single (Maybe String)] <- Exception.try . (liftIO . (flip Persist.runSqlPersistMPool) pool) $ do Persist.rawSql @(Persist.Single (Maybe String)) "select pg_sleep(2)" [] -- when we try the above we get back: -- 'result: Left libpq: failed (another command is already in progress' -- this is because the connection went back into the pool before it was ready -- or perhaps it should have been destroyed and a new connection created and put into the pool? logInfoN $ "result: " <> show result createTableFoo :: Pool.Pool Persist.SqlBackend -> IO () createTableFoo pool = (flip Persist.runSqlPersistMPool) pool $ do Persist.rawExecute "CREATE table if not exists foo(id int);" [] simulateFailedLongRunningPostgresCall :: (MonadLogger m, MonadUnliftIO m, forall a b. Coercible a b => Coercible (m a) (m b)) => Pool.Pool Persist.SqlBackend -> m () simulateFailedLongRunningPostgresCall pool = do threadId <- Concurrent.forkIO $ (do me <- Concurrent.myThreadId prefixLogs (show me) $ do let numThings :: Int = 100000000 logInfoN $ "start inserting " <> show numThings <> " things" (`Persist.runSqlPool` pool) $ do logInfoN "inside of thing" Monad.forM_ [1 .. numThings] $ \i -> do Monad.when (i `mod` 1000 == 0) $ logInfoN $ "Thing #: " <> show i Persist.rawExecute "insert into foo values(1);" [] ) Concurrent.threadDelay 1000000 Monad.void $ Concurrent.killThread threadId logInfoN "killed thread" persistent-postgresql-2.13.6.2/test/main.hs0000644000000000000000000001666414646332256017021 0ustar0000000000000000{-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -Wno-unused-top-binds #-} import PgInit import Data.Aeson import qualified Data.ByteString as BS import Data.Fixed import Data.IntMap (IntMap) import qualified Data.Text as T import Data.Time import Test.QuickCheck import qualified ArrayAggTest import qualified CompositeTest import qualified CustomConstraintTest import qualified CustomPersistFieldTest import qualified CustomPrimaryKeyReferenceTest import qualified DataTypeTest import qualified EmbedOrderTest import qualified EmbedTest import qualified EmptyEntityTest import qualified EquivalentTypeTestPostgres import qualified ForeignKey import qualified GeneratedColumnTestSQL import qualified HtmlTest import qualified ImplicitUuidSpec import qualified JSONTest import qualified LargeNumberTest import qualified LongIdentifierTest import qualified MaxLenTest import qualified MaybeFieldDefsTest import qualified MigrationColumnLengthTest import qualified MigrationOnlyTest import qualified MigrationReferenceSpec import qualified MigrationTest import qualified MpsCustomPrefixTest import qualified MpsNoPrefixTest import qualified PersistUniqueTest import qualified PersistentTest import qualified PgIntervalTest import qualified PrimaryTest import qualified RawSqlTest import qualified ReadWriteTest import qualified Recursive import qualified RenameTest import qualified SumTypeTest import qualified TransactionLevelTest import qualified TreeTest import qualified TypeLitFieldDefsTest import qualified UniqueTest import qualified UpsertTest import qualified UpsertWhere type Tuple = (,) -- Test lower case names share [mkPersist persistSettings, mkMigrate "dataTypeMigrate"] [persistLowerCase| DataTypeTable no-json text Text textMaxLen Text maxlen=100 bytes ByteString bytesTextTuple (Tuple ByteString Text) bytesMaxLen ByteString maxlen=100 int Int intList [Int] intMap (IntMap Int) double Double bool Bool day Day pico Pico time TimeOfDay utc UTCTime jsonb Value |] instance Arbitrary DataTypeTable where arbitrary = DataTypeTable <$> arbText -- text <*> (T.take 100 <$> arbText) -- textManLen <*> arbitrary -- bytes <*> liftA2 (,) arbitrary arbText -- bytesTextTuple <*> (BS.take 100 <$> arbitrary) -- bytesMaxLen <*> arbitrary -- int <*> arbitrary -- intList <*> arbitrary -- intMap <*> arbitrary -- double <*> arbitrary -- bool <*> arbitrary -- day <*> arbitrary -- pico <*> (arbitrary) -- utc <*> (truncateUTCTime =<< arbitrary) -- utc <*> fmap getValue arbitrary -- value setup :: MonadIO m => Migration -> ReaderT SqlBackend m () setup migration = do printMigration migration runMigrationUnsafe migration main :: IO () main = do runConn $ do mapM_ setup [ PersistentTest.testMigrate , PersistentTest.noPrefixMigrate , PersistentTest.customPrefixMigrate , PersistentTest.treeMigrate , EmbedTest.embedMigrate , EmbedOrderTest.embedOrderMigrate , LargeNumberTest.numberMigrate , UniqueTest.uniqueMigrate , MaxLenTest.maxlenMigrate , MaybeFieldDefsTest.maybeFieldDefMigrate , TypeLitFieldDefsTest.typeLitFieldDefsMigrate , Recursive.recursiveMigrate , CompositeTest.compositeMigrate , TreeTest.treeMigrate , PersistUniqueTest.migration , RenameTest.migration , CustomPersistFieldTest.customFieldMigrate , PrimaryTest.migration , CustomPrimaryKeyReferenceTest.migration , MigrationColumnLengthTest.migration , TransactionLevelTest.migration , LongIdentifierTest.migration , ForeignKey.compositeMigrate , MigrationTest.migrationMigrate , PgIntervalTest.pgIntervalMigrate , UpsertWhere.upsertWhereMigrate , ImplicitUuidSpec.implicitUuidMigrate ] PersistentTest.cleanDB ForeignKey.cleanDB hspec $ do ImplicitUuidSpec.spec MigrationReferenceSpec.spec RenameTest.specsWith runConnAssert DataTypeTest.specsWith runConnAssert (Just (runMigrationSilent dataTypeMigrate)) [ TestFn "text" dataTypeTableText , TestFn "textMaxLen" dataTypeTableTextMaxLen , TestFn "bytes" dataTypeTableBytes , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen , TestFn "int" dataTypeTableInt , TestFn "intList" dataTypeTableIntList , TestFn "intMap" dataTypeTableIntMap , TestFn "bool" dataTypeTableBool , TestFn "day" dataTypeTableDay , TestFn "time" (DataTypeTest.roundTime . dataTypeTableTime) , TestFn "utc" (DataTypeTest.roundUTCTime . dataTypeTableUtc) , TestFn "jsonb" dataTypeTableJsonb ] [ ("pico", dataTypeTablePico) ] dataTypeTableDouble HtmlTest.specsWith runConnAssert (Just (runMigrationSilent HtmlTest.htmlMigrate)) EmbedTest.specsWith runConnAssert EmbedOrderTest.specsWith runConnAssert LargeNumberTest.specsWith runConnAssert ForeignKey.specsWith runConnAssert UniqueTest.specsWith runConnAssert MaxLenTest.specsWith runConnAssert MaybeFieldDefsTest.specsWith runConnAssert TypeLitFieldDefsTest.specsWith runConnAssert Recursive.specsWith runConnAssert SumTypeTest.specsWith runConnAssert (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) MigrationTest.specsWith runConnAssert MigrationOnlyTest.specsWith runConnAssert (Just $ runMigrationSilent MigrationOnlyTest.migrateAll1 >> runMigrationSilent MigrationOnlyTest.migrateAll2 ) PersistentTest.specsWith runConnAssert ReadWriteTest.specsWith runConnAssert PersistentTest.filterOrSpecs runConnAssert RawSqlTest.specsWith runConnAssert UpsertTest.specsWith runConnAssert UpsertTest.Don'tUpdateNull UpsertTest.UpsertPreserveOldKey MpsNoPrefixTest.specsWith runConnAssert MpsCustomPrefixTest.specsWith runConnAssert EmptyEntityTest.specsWith runConnAssert (Just (runMigrationSilent EmptyEntityTest.migration)) CompositeTest.specsWith runConnAssert TreeTest.specsWith runConnAssert PersistUniqueTest.specsWith runConnAssert PrimaryTest.specsWith runConnAssert CustomPersistFieldTest.specsWith runConnAssert CustomPrimaryKeyReferenceTest.specsWith runConnAssert MigrationColumnLengthTest.specsWith runConnAssert EquivalentTypeTestPostgres.specs TransactionLevelTest.specsWith runConnAssert LongIdentifierTest.specsWith runConnAssertUseConf -- Have at least one test use the conf variant of connecting to Postgres, to improve test coverage. JSONTest.specs CustomConstraintTest.specs UpsertWhere.specs PgIntervalTest.specs ArrayAggTest.specs GeneratedColumnTestSQL.specsWith runConnAssert persistent-postgresql-2.13.6.2/test/PgInit.hs0000644000000000000000000001570414476403105017252 0ustar0000000000000000{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module PgInit ( runConn , runConn_ , runConnAssert , runConnAssertUseConf , MonadIO , persistSettings , MkPersistSettings (..) , BackendKey(..) , GenerateKey(..) -- re-exports , module Control.Monad.Trans.Reader , module Control.Monad , module Database.Persist.Sql , module Database.Persist.SqlBackend , module Database.Persist , module Database.Persist.Sql.Raw.QQ , module Init , module Test.Hspec , module Test.Hspec.Expectations.Lifted , module Test.HUnit , AValue (..) , BS.ByteString , Int32, Int64 , liftIO , mkPersist, migrateModels, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase , mkEntityDefList , setImplicitIdDef , SomeException , Text , TestFn(..) , LoggingT , ResourceT , UUID(..) , sqlSettingsUuid ) where import Init ( GenerateKey(..) , MonadFail , RunDb , TestFn(..) , UUID(..) , arbText , asIO , assertEmpty , assertNotEmpty , assertNotEqual , isTravis , liftA2 , sqlSettingsUuid , truncateTimeOfDay , truncateToMicro , truncateUTCTime , (==@) , (@/=) , (@==) ) -- re-exports import Control.Exception (SomeException) import Control.Monad (forM_, liftM, replicateM, void, when) import Control.Monad.Trans.Reader import Data.Aeson (FromJSON, ToJSON, Value(..), object) import qualified Data.Text.Encoding as TE import Database.Persist.Postgresql.JSON () import Database.Persist.Sql.Raw.QQ import Database.Persist.SqlBackend import Database.Persist.TH ( MkPersistSettings(..) , migrateModels , mkEntityDefList , mkMigrate , mkPersist , persistLowerCase , persistUpperCase , setImplicitIdDef , share , sqlSettings ) import Test.Hspec ( Arg , Spec , SpecWith , afterAll_ , before , beforeAll , before_ , describe , fdescribe , fit , hspec , it ) import Test.Hspec.Expectations.Lifted import Test.QuickCheck.Instances () import UnliftIO -- testing import Test.HUnit (Assertion, assertBool, assertFailure, (@=?), (@?=)) import Test.QuickCheck import Control.Monad (unless, (>=>)) import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Logger import Control.Monad.Trans.Resource (ResourceT, runResourceT) import qualified Data.ByteString as BS import qualified Data.HashMap.Strict as HM import Data.Int (Int32, Int64) import Data.Maybe (fromMaybe) import Data.Monoid ((<>)) import Data.Text (Text) import Data.Vector (Vector) import System.Environment (getEnvironment) import System.Log.FastLogger (fromLogStr) import Database.Persist import Database.Persist.Postgresql import Database.Persist.Sql import Database.Persist.TH () _debugOn :: Bool _debugOn = False dockerPg :: IO (Maybe BS.ByteString) dockerPg = do env <- liftIO getEnvironment return $ case lookup "POSTGRES_NAME" env of Just _name -> Just "postgres" -- /persistent/postgres _ -> Nothing persistSettings :: MkPersistSettings persistSettings = sqlSettings { mpsGeneric = True } runConn :: MonadUnliftIO m => SqlPersistT (LoggingT m) t -> m () runConn f = runConn_ f >>= const (return ()) runConn_ :: MonadUnliftIO m => SqlPersistT (LoggingT m) t -> m t runConn_ f = runConnInternal RunConnBasic f -- | Data type to switch between pool creation functions, to ease testing both. data RunConnType = RunConnBasic -- ^ Use 'withPostgresqlPool' | RunConnConf -- ^ Use 'withPostgresqlPoolWithConf' deriving (Show, Eq) runConnInternal :: MonadUnliftIO m => RunConnType -> SqlPersistT (LoggingT m) t -> m t runConnInternal connType f = do travis <- liftIO isTravis let debugPrint = not travis && _debugOn printDebug = if debugPrint then print . fromLogStr else void . return poolSize = 1 connString <- if travis then do pure "host=localhost port=5432 user=perstest password=perstest dbname=persistent" else do host <- fromMaybe "localhost" <$> liftIO dockerPg pure ("host=" <> host <> " port=5432 user=postgres dbname=test") flip runLoggingT (\_ _ _ s -> printDebug s) $ do logInfoN (if travis then "Running in CI" else "CI not detected") let go = case connType of RunConnBasic -> withPostgresqlPool connString poolSize $ runSqlPool f RunConnConf -> do let conf = PostgresConf { pgConnStr = connString , pgPoolStripes = 1 , pgPoolIdleTimeout = 60 , pgPoolSize = poolSize } hooks = defaultPostgresConfHooks withPostgresqlPoolWithConf conf hooks (runSqlPool f) -- horrifying hack :( postgresql is having weird connection failures in -- CI, for no reason that i can determine. see this PR for notes: -- https://github.com/yesodweb/persistent/pull/1197 eres <- try go case eres of Left (err :: SomeException) -> do eres' <- try go case eres' of Left (err' :: SomeException) -> if show err == show err' then throwIO err else throwIO err' Right a -> pure a Right a -> pure a runConnAssert :: SqlPersistT (LoggingT (ResourceT IO)) () -> Assertion runConnAssert actions = do runResourceT $ runConn $ actions >> transactionUndo -- | Like runConnAssert, but uses the "conf" flavor of functions to test that code path. runConnAssertUseConf :: SqlPersistT (LoggingT (ResourceT IO)) () -> Assertion runConnAssertUseConf actions = do runResourceT $ runConnInternal RunConnConf (actions >> transactionUndo) newtype AValue = AValue { getValue :: Value } -- Need a specialized Arbitrary instance instance Arbitrary AValue where arbitrary = AValue <$> frequency [ (1, pure Null) , (1, Bool <$> arbitrary) , (2, Number <$> arbitrary) , (2, String <$> arbText) , (3, Array <$> limitIt 4 (fmap (fmap getValue) arbitrary)) , (3, object <$> arbObject) ] where limitIt :: Int -> Gen a -> Gen a limitIt i x = sized $ \n -> do let m = if n > i then i else n resize m x arbObject = limitIt 4 -- Recursion can make execution divergent $ listOf -- [(,)] -> (,) . liftA2 (,) arbText -- (,) -> Text and Value $ limitIt 4 (fmap getValue arbitrary) -- Again, precaution against divergent recursion. persistent-postgresql-2.13.6.2/test/ArrayAggTest.hs0000644000000000000000000000361414646332256020421 0ustar0000000000000000{-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE DataKinds, FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} -- FIXME {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module ArrayAggTest where import Control.Monad.IO.Class (MonadIO) import Data.Aeson import Data.List (sort) import qualified Data.Text as T import Test.Hspec.Expectations () import PersistentTestModels import PgInit share [mkPersist persistSettings, mkMigrate "jsonTestMigrate"] [persistLowerCase| TestValue json Value |] cleanDB :: (BaseBackend backend ~ SqlBackend, PersistQueryWrite backend, MonadIO m) => ReaderT backend m () cleanDB = deleteWhere ([] :: [Filter TestValue]) emptyArr :: Value emptyArr = toJSON ([] :: [Value]) specs :: Spec specs = do describe "rawSql/array_agg" $ do let runArrayAggTest :: (PersistField [a], Ord a, Show a) => Text -> [a] -> Assertion runArrayAggTest dbField expected = runConnAssert $ do void $ insertMany [ UserPT "a" $ Just "b" , UserPT "c" $ Just "d" , UserPT "e" Nothing , UserPT "g" $ Just "h" ] escape <- getEscapeRawNameFunction let query = T.concat [ "SELECT array_agg(", escape dbField, ") " , "FROM ", escape "UserPT" ] [Single xs] <- rawSql query [] liftIO $ sort xs @?= expected it "works for [Text]" $ do runArrayAggTest "ident" ["a", "c", "e", "g" :: Text] it "works for [Maybe Text]" $ do runArrayAggTest "password" [Nothing, Just "b", Just "d", Just "h" :: Maybe Text] persistent-postgresql-2.13.6.2/test/EquivalentTypeTestPostgres.hs0000644000000000000000000000276514646332256023440 0ustar0000000000000000{-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE DataKinds, FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -Wno-unused-top-binds #-} module EquivalentTypeTestPostgres (specs) where import Control.Monad.Trans.Resource (runResourceT) import qualified Data.Text as T import Database.Persist.TH import PgInit share [mkPersist sqlSettings, mkMigrate "migrateAll1"] [persistLowerCase| EquivalentType sql=equivalent_types field1 Int sqltype=bigint field2 T.Text sqltype=text field3 T.Text sqltype=us_postal_code deriving Eq Show |] share [mkPersist sqlSettings, mkMigrate "migrateAll2"] [persistLowerCase| EquivalentType2 sql=equivalent_types field1 Int sqltype=int8 field2 T.Text field3 T.Text sqltype=us_postal_code deriving Eq Show |] specs :: Spec specs = describe "doesn't migrate equivalent types" $ do it "works" $ asIO $ runResourceT $ runConn $ do _ <- rawExecute "DROP DOMAIN IF EXISTS us_postal_code CASCADE" [] _ <- rawExecute "CREATE DOMAIN us_postal_code AS TEXT CHECK(VALUE ~ '^\\d{5}$')" [] _ <- runMigrationSilent migrateAll1 xs <- getMigration migrateAll2 liftIO $ xs @?= [] persistent-postgresql-2.13.6.2/test/JSONTest.hs0000644000000000000000000007433314476403105017474 0ustar0000000000000000{-# LANGUAGE ExistentialQuantification #-} {-# language DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module JSONTest where import Control.Monad.IO.Class (MonadIO) import Data.Aeson hiding (Key) import qualified Data.Vector as V (fromList) import Test.HUnit (assertBool) import Test.Hspec.Expectations () import Database.Persist import Database.Persist.Postgresql.JSON import PgInit share [mkPersist persistSettings, mkMigrate "jsonTestMigrate"] [persistLowerCase| TestValue json Value deriving Show |] cleanDB :: (BaseBackend backend ~ SqlBackend, PersistQueryWrite backend, MonadIO m) => ReaderT backend m () cleanDB = deleteWhere ([] :: [Filter TestValue]) emptyArr :: Value emptyArr = toJSON ([] :: [Value]) insert' :: (MonadIO m, PersistStoreWrite backend, BaseBackend backend ~ SqlBackend) => Value -> ReaderT backend m (Key TestValue) insert' = insert . TestValue matchKeys :: (Show record, Show (Key record), MonadIO m, Eq (Key record)) => [Key record] -> [Entity record] -> m () matchKeys ys xs = do msg1 `assertBoolIO` (xLen == yLen) forM_ ys $ \y -> msg2 y `assertBoolIO` (y `elem` ks) where ks = entityKey <$> xs xLen = length xs yLen = length ys msg1 = mconcat [ "\nexpected: ", show yLen , "\n but got: ", show xLen , "\n[xs: ", show xs, "]" , "\n[ys: ", show ys, "]" ] msg2 y = mconcat [ "key \"", show y , "\" not in result:\n ", show ks ] setup :: IO TestKeys setup = asIO $ runConn_ $ do void $ runMigrationSilent jsonTestMigrate testKeys teardown :: IO () teardown = asIO $ runConn_ $ do cleanDB shouldBeIO :: (Show a, Eq a, MonadIO m) => a -> a -> m () shouldBeIO x y = liftIO $ shouldBe x y assertBoolIO :: MonadIO m => String -> Bool -> m () assertBoolIO s b = liftIO $ assertBool s b testKeys :: (Monad m, MonadIO m) => ReaderT SqlBackend m TestKeys testKeys = do nullK <- insert' Null boolTK <- insert' $ Bool True boolFK <- insert' $ toJSON False num0K <- insert' $ Number 0 num1K <- insert' $ Number 1 numBigK <- insert' $ toJSON (1234567890 :: Int) numFloatK <- insert' $ Number 0.0 numSmallK <- insert' $ Number 0.0000000000000000123 numFloat2K <- insert' $ Number 1.5 -- numBigFloatK will turn into 9876543210.123457 because JSON numBigFloatK <- insert' $ toJSON (9876543210.123456789 :: Double) strNullK <- insert' $ String "" strObjK <- insert' $ String "{}" strArrK <- insert' $ String "[]" strAK <- insert' $ String "a" strTestK <- insert' $ toJSON ("testing" :: Text) str2K <- insert' $ String "2" strFloatK <- insert' $ String "0.45876" arrNullK <- insert' $ Array $ V.fromList [] arrListK <- insert' $ toJSON [emptyArr,emptyArr,toJSON [emptyArr,emptyArr]] arrList2K <- insert' $ toJSON [emptyArr,toJSON [Number 3,Bool False] ,toJSON [emptyArr,toJSON [Object mempty]] ] arrFilledK <- insert' $ toJSON [Null, Number 4, String "b" ,Object mempty, emptyArr ,object [ "test" .= [Null], "test2" .= String "yes"] ] arrList3K <- insert' $ toJSON [toJSON [String "a"], Number 1] arrList4K <- insert' $ toJSON [String "a", String "b", String "c", String "d"] objNullK <- insert' $ Object mempty objTestK <- insert' $ object ["test" .= Null, "test1" .= String "no"] objDeepK <- insert' $ object ["c" .= Number 24.986, "foo" .= object ["deep1" .= Bool True]] objEmptyK <- insert' $ object ["" .= Number 9001] objFullK <- insert' $ object ["a" .= Number 1, "b" .= Number 2 ,"c" .= Number 3, "d" .= Number 4 ] return TestKeys{..} data TestKeys = TestKeys { nullK :: Key TestValue , boolTK :: Key TestValue , boolFK :: Key TestValue , num0K :: Key TestValue , num1K :: Key TestValue , numBigK :: Key TestValue , numFloatK :: Key TestValue , numSmallK :: Key TestValue , numFloat2K :: Key TestValue , numBigFloatK :: Key TestValue , strNullK :: Key TestValue , strObjK :: Key TestValue , strArrK :: Key TestValue , strAK :: Key TestValue , strTestK :: Key TestValue , str2K :: Key TestValue , strFloatK :: Key TestValue , arrNullK :: Key TestValue , arrListK :: Key TestValue , arrList2K :: Key TestValue , arrFilledK :: Key TestValue , objNullK :: Key TestValue , objTestK :: Key TestValue , objDeepK :: Key TestValue , arrList3K :: Key TestValue , arrList4K :: Key TestValue , objEmptyK :: Key TestValue , objFullK :: Key TestValue } deriving (Eq, Ord, Show) specs :: Spec specs = afterAll_ teardown $ do beforeAll setup $ do describe "Testing JSON operators" $ do describe "@>. object queries" $ do it "matches an empty Object with any object" $ \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. Object mempty] [] [objNullK, objTestK, objDeepK, objEmptyK, objFullK] `matchKeys` vals it "matches a subset of object properties" $ -- {test: null, test1: no} @>. {test: null} == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. object ["test" .= Null]] [] [objTestK] `matchKeys` vals it "matches a nested object against an empty object at the same key" $ -- {c: 24.986, foo: {deep1: true}} @>. {foo: {}} == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. object ["foo" .= object []]] [] [objDeepK] `matchKeys` vals it "doesn't match a nested object against a string at the same key" $ -- {c: 24.986, foo: {deep1: true}} @>. {foo: nope} == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. object ["foo" .= String "nope"]] [] [] `matchKeys` vals it "matches a nested object when the query object is identical" $ -- {c: 24.986, foo: {deep1: true}} @>. {foo: {deep1: true}} == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. (object ["foo" .= object ["deep1" .= True]])] [] [objDeepK] `matchKeys` vals it "doesn't match a nested object when queried with that exact object" $ -- {c: 24.986, foo: {deep1: true}} @>. {deep1: true} == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. object ["deep1" .= True]] [] [] `matchKeys` vals describe "@>. array queries" $ do it "matches an empty Array with any list" $ \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. emptyArr] [] [arrNullK, arrListK, arrList2K, arrFilledK, arrList3K, arrList4K] `matchKeys` vals it "matches list when queried with subset (1 item)" $ -- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [4] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [4 :: Int]] [] [arrFilledK] `matchKeys` vals it "matches list when queried with subset (2 items)" $ -- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [null,'b'] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [Null, String "b"]] [] [arrFilledK] `matchKeys` vals it "doesn't match list when queried with intersecting list (1 match, 1 diff)" $ -- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [null,'d'] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [emptyArr, String "d"]] [] [] `matchKeys` vals it "matches list when queried with same list in different order" $ -- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. -- [[],'b',{test: [null],test2: 'yes'},4,null,{}] == True \TestKeys {..} -> runConnAssert $ do let queryList = toJSON [ emptyArr, String "b" , object [ "test" .= [Null], "test2" .= String "yes"] , Number 4, Null, Object mempty ] vals <- selectList [TestValueJson @>. queryList ] [] [arrFilledK] `matchKeys` vals it "doesn't match list when queried with same list + 1 item" $ -- [null,4,'b',{},[],{test:[null],test2:'yes'}] @>. -- [null,4,'b',{},[],{test:[null],test2: 'yes'}, false] == False \TestKeys {..} -> runConnAssert $ do let testList = toJSON [ Null, Number 4, String "b", Object mempty, emptyArr , object [ "test" .= [Null], "test2" .= String "yes"] , Bool False ] vals <- selectList [TestValueJson @>. testList] [] [] `matchKeys` vals it "matches list when it shares an empty object with the query list" $ -- [null,4,'b',{},[],{test: [null],test2: 'yes'}] @>. [{}] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [Object mempty]] [] [arrFilledK] `matchKeys` vals it "matches list with nested list, when queried with an empty nested list" $ -- [null,4,'b',{},[],{test:[null],test2:'yes'}] @>. [{test:[]}] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [object ["test" .= emptyArr]]] [] [arrFilledK] `matchKeys` vals it "doesn't match list with nested list, when queried with a diff. nested list" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. -- [{"test1":[null]}] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [object ["test1" .= [Null]]]] [] [] `matchKeys` vals it "matches many nested lists when queried with empty nested list" $ -- [[],[],[[],[]]] @>. [[]] == True -- [[],[3,false],[[],[{}]]] @>. [[]] == True -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. [[]] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [emptyArr]] [] [arrListK,arrList2K,arrFilledK, arrList3K] `matchKeys` vals it "matches nested list when queried with a subset of that list" $ -- [[],[3,false],[[],[{}]]] @>. [[3]] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [[3 :: Int]]] [] [arrList2K] `matchKeys` vals it "doesn't match nested list againts a partial intersection of that list" $ -- [[],[3,false],[[],[{}]]] @>. [[true,3]] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON [[Bool True, Number 3]]] [] [] `matchKeys` vals it "matches list when queried with raw number contained in the list" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. 4 == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. Number 4] [] [arrFilledK] `matchKeys` vals it "doesn't match list when queried with raw value not contained in the list" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. 99 == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. Number 99] [] [] `matchKeys` vals it "matches list when queried with raw string contained in the list" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. "b" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "b"] [] [arrFilledK, arrList4K] `matchKeys` vals it "doesn't match list with empty object when queried with \"{}\" " $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. "{}" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "{}"] [] [strObjK] `matchKeys` vals it "doesnt match list with nested object when queried with object (not in list)" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. -- {"test":[null],"test2":"yes"} == False \TestKeys {..} -> runConnAssert $ do let queryObject = object [ "test" .= [Null], "test2" .= String "yes"] vals <- selectList [TestValueJson @>. queryObject ] [] [] `matchKeys` vals describe "@>. string queries" $ do it "matches identical strings" $ -- "testing" @>. "testing" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "testing"] [] [strTestK] `matchKeys` vals it "doesnt match case insensitive" $ -- "testing" @>. "Testing" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "Testing"] [] [] `matchKeys` vals it "doesn't match substrings" $ -- "testing" @>. "test" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "test"] [] [] `matchKeys` vals it "doesn't match strings with object keys" $ -- "testing" @>. {"testing":1} == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. object ["testing" .= Number 1]] [] [] `matchKeys` vals describe "@>. number queries" $ do it "matches identical numbers" $ -- 1 @>. 1 == True -- [1] @>. 1 == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON (1 :: Int)] [] [num1K, arrList3K] `matchKeys` vals it "matches numbers when queried with float" $ -- 0 @>. 0.0 == True -- 0.0 @>. 0.0 == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON (0.0 :: Double)] [] [num0K,numFloatK] `matchKeys` vals it "does not match numbers when queried with a substring of that number" $ -- 1234567890 @>. 123456789 == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON (123456789 :: Int)] [] [] `matchKeys` vals it "does not match number when queried with different number" $ -- 1234567890 @>. 234567890 == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON (234567890 :: Int)] [] [] `matchKeys` vals it "does not match number when queried with string of that number" $ -- 1 @>. "1" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "1"] [] [] `matchKeys` vals it "does not match number when queried with list of digits" $ -- 1234567890 @>. [1,2,3,4,5,6,7,8,9,0] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON ([1,2,3,4,5,6,7,8,9,0] :: [Int])] [] [] `matchKeys` vals describe "@>. boolean queries" $ do it "matches identical booleans (True)" $ -- true @>. true == True -- false @>. true == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. toJSON True] [] [boolTK] `matchKeys` vals it "matches identical booleans (False)" $ -- false @>. false == True -- true @>. false == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. Bool False] [] [boolFK] `matchKeys` vals it "does not match boolean with string of boolean" $ -- true @>. "true" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "true"] [] [] `matchKeys` vals describe "@>. null queries" $ do it "matches nulls" $ -- null @>. null == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. Null] [] [nullK,arrFilledK] `matchKeys` vals it "does not match null with string of null" $ -- null @>. "null" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson @>. String "null"] [] [] `matchKeys` vals describe "<@. queries" $ do it "matches subobject when queried with superobject" $ -- {} <@. {"test":null,"test1":"no","blabla":[]} == True -- {"test":null,"test1":"no"} <@. {"test":null,"test1":"no","blabla":[]} == True \TestKeys {..} -> runConnAssert $ do let queryObject = object ["test" .= Null , "test1" .= String "no" , "blabla" .= emptyArr ] vals <- selectList [TestValueJson <@. queryObject] [] [objNullK,objTestK] `matchKeys` vals it "matches raw values and sublists when queried with superlist" $ -- [] <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True -- null <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True -- false <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] <@. -- [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True \TestKeys {..} -> runConnAssert $ do let queryList = toJSON [ Null, Number 4, String "b", Object mempty, emptyArr , object [ "test" .= [Null], "test2" .= String "yes"] , Bool False ] vals <- selectList [TestValueJson <@. queryList ] [] [arrNullK,arrFilledK,boolFK,nullK] `matchKeys` vals it "matches identical strings" $ -- "a" <@. "a" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson <@. String "a"] [] [strAK] `matchKeys` vals it "matches identical big floats" $ -- 9876543210.123457 <@ 9876543210.123457 == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson <@. Number 9876543210.123457] [] [numBigFloatK] `matchKeys` vals it "doesn't match different big floats" $ -- 9876543210.123457 <@. 9876543210.123456789 == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson <@. Number 9876543210.123456789] [] [] `matchKeys` vals it "matches nulls" $ -- null <@. null == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson <@. Null] [] [nullK] `matchKeys` vals describe "?. queries" $ do it "matches top level keys and not the keys of nested objects" $ -- {"test":null,"test1":"no"} ?. "test" == True -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?. "test" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "test"] [] [objTestK] `matchKeys` vals it "doesn't match nested key" $ -- {"c":24.986,"foo":{"deep1":true"}} ?. "deep1" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "deep1"] [] [] `matchKeys` vals it "matches \"{}\" but not empty object when queried with \"{}\"" $ -- "{}" ?. "{}" == True -- {} ?. "{}" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "{}"] [] [strObjK] `matchKeys` vals it "matches raw empty str and empty str key when queried with \"\"" $ ---- {} ?. "" == False ---- "" ?. "" == True ---- {"":9001} ?. "" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. ""] [] [strNullK,objEmptyK] `matchKeys` vals it "matches lists containing string value when queried with raw string value" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?. "b" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "b"] [] [arrFilledK,arrList4K,objFullK] `matchKeys` vals it "matches lists, objects, and raw values correctly when queried with string" $ -- [["a"]] ?. "a" == False -- "a" ?. "a" == True -- ["a","b","c","d"] ?. "a" == True -- {"a":1,"b":2,"c":3,"d":4} ?. "a" == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "a"] [] [strAK,arrList4K,objFullK] `matchKeys` vals it "matches string list but not real list when queried with \"[]\"" $ -- "[]" ?. "[]" == True -- [] ?. "[]" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "[]"] [] [strArrK] `matchKeys` vals it "does not match null when queried with string null" $ -- null ?. "null" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "null"] [] [] `matchKeys` vals it "does not match bool whe nqueried with string bool" $ -- true ?. "true" == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?. "true"] [] [] `matchKeys` vals describe "?|. queries" $ do it "matches raw vals, lists, objects, and nested objects" $ -- "a" ?|. ["a","b","c"] == True -- [["a"],1] ?|. ["a","b","c"] == False -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?|. ["a","b","c"] == True -- ["a","b","c","d"] ?|. ["a","b","c"] == True -- {"a":1,"b":2,"c":3,"d":4} ?|. ["a","b","c"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["a","b","c"]] [] [strAK,arrFilledK,objDeepK,arrList4K,objFullK] `matchKeys` vals it "matches str object but not object when queried with \"{}\"" $ -- "{}" ?|. ["{}"] == True -- {} ?|. ["{}"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["{}"]] [] [strObjK] `matchKeys` vals it "doesn't match superstrings when queried with substring" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?|. ["test"] == False -- "testing" ?|. ["test"] == False -- {"test":null,"test1":"no"} ?|. ["test"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["test"]] [] [objTestK] `matchKeys` vals it "doesn't match nested keys" $ -- {"c":24.986,"foo":{"deep1":true"}} ?|. ["deep1"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["deep1"]] [] [] `matchKeys` vals it "doesn't match anything when queried with empty list" $ -- ANYTHING ?|. [] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. []] [] [] `matchKeys` vals it "doesn't match raw, non-string, values when queried with strings" $ -- true ?|. ["true","null","1"] == False -- null ?|. ["true","null","1"] == False -- 1 ?|. ["true","null","1"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["true","null","1"]] [] [] `matchKeys` vals it "matches string array when queried with \"[]\"" $ -- [] ?|. ["[]"] == False -- "[]" ?|. ["[]"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?|. ["[]"]] [] [strArrK] `matchKeys` vals describe "?&. queries" $ do it "matches anything when queried with an empty list" $ -- ANYTHING ?&. [] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. []] [] flip matchKeys vals [ nullK , boolTK, boolFK , num0K, num1K, numBigK, numFloatK , numSmallK, numFloat2K, numBigFloatK , strNullK, strObjK, strArrK, strAK , strTestK, str2K, strFloatK , arrNullK, arrListK, arrList2K , arrFilledK, arrList3K, arrList4K , objNullK, objTestK, objDeepK , objEmptyK, objFullK ] it "matches raw values, lists, and objects when queried with string" $ -- "a" ?&. ["a"] == True -- [["a"],1] ?&. ["a"] == False -- ["a","b","c","d"] ?&. ["a"] == True -- {"a":1,"b":2,"c":3,"d":4} ?&. ["a"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["a"]] [] [strAK,arrList4K,objFullK] `matchKeys` vals it "matches raw values, lists, and objects when queried with multiple string" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?&. ["b","c"] == False -- {"c":24.986,"foo":{"deep1":true"}} ?&. ["b","c"] == False -- ["a","b","c","d"] ?&. ["b","c"] == True -- {"a":1,"b":2,"c":3,"d":4} ?&. ["b","c"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["b","c"]] [] [arrList4K,objFullK] `matchKeys` vals it "matches object string when queried with \"{}\"" $ -- {} ?&. ["{}"] == False -- "{}" ?&. ["{}"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["{}"]] [] [strObjK] `matchKeys` vals it "doesn't match superstrings when queried with substring" $ -- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?&. ["test"] == False -- "testing" ?&. ["test"] == False -- {"test":null,"test1":"no"} ?&. ["test"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["test"]] [] [objTestK] `matchKeys` vals it "doesn't match nested keys" $ -- {"c":24.986,"foo":{"deep1":true"}} ?&. ["deep1"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["deep1"]] [] [] `matchKeys` vals it "doesn't match anything when there is a partial match" $ -- "a" ?&. ["a","e"] == False -- ["a","b","c","d"] ?&. ["a","e"] == False -- {"a":1,"b":2,"c":3,"d":4} ?&. ["a","e"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["a","e"]] [] [] `matchKeys` vals it "matches string array when queried with \"[]\"" $ -- [] ?&. ["[]"] == False -- "[]" ?&. ["[]"] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["[]"]] [] [strArrK] `matchKeys` vals it "doesn't match null when queried with string null" $ -- THIS WILL FAIL IF THE IMPLEMENTATION USES -- @ '{null}' @ -- INSTEAD OF -- @ ARRAY['null'] @ -- null ?&. ["null"] == False \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. ["null"]] [] [] `matchKeys` vals it "doesn't match number when queried with str of that number" $ -- [["a"],1] ?&. ["1"] == False -- "1" ?&. ["1"] == True \TestKeys {..} -> runConnAssert $ do str1 <- insert' $ toJSON $ String "1" vals <- selectList [TestValueJson ?&. ["1"]] [] [str1] `matchKeys` vals it "doesn't match empty objs or list when queried with empty string" $ -- {} ?&. [""] == False -- [] ?&. [""] == False -- "" ?&. [""] == True -- {"":9001} ?&. [""] == True \TestKeys {..} -> runConnAssert $ do vals <- selectList [TestValueJson ?&. [""]] [] [strNullK,objEmptyK] `matchKeys` vals persistent-postgresql-2.13.6.2/test/CustomConstraintTest.hs0000644000000000000000000000613014476403105022230 0ustar0000000000000000{-# LANGUAGE EmptyDataDecls #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs, DataKinds, FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE StandaloneDeriving #-} module CustomConstraintTest where import PgInit import qualified Data.Text as T share [mkPersist sqlSettings, mkMigrate "customConstraintMigrate"] [persistLowerCase| CustomConstraint1 some_field Text deriving Show CustomConstraint2 cc_id CustomConstraint1Id constraint=custom_constraint deriving Show CustomConstraint3 -- | This will lead to a constraint with the name custom_constraint3_cc_id1_fkey cc_id1 CustomConstraint1Id cc_id2 CustomConstraint1Id deriving Show |] specs :: Spec specs = do describe "custom constraint used in migration" $ do it "custom constraint is actually created" $ runConnAssert $ do void $ runMigrationSilent customConstraintMigrate void $ runMigrationSilent customConstraintMigrate -- run a second time to ensure the constraint isn't dropped let query = T.concat ["SELECT DISTINCT COUNT(*) " ,"FROM information_schema.constraint_column_usage ccu, " ,"information_schema.key_column_usage kcu, " ,"information_schema.table_constraints tc " ,"WHERE tc.constraint_type='FOREIGN KEY' " ,"AND kcu.constraint_name=tc.constraint_name " ,"AND ccu.constraint_name=kcu.constraint_name " ,"AND kcu.ordinal_position=1 " ,"AND ccu.table_name=? " ,"AND ccu.column_name=? " ,"AND kcu.table_name=? " ,"AND kcu.column_name=? " ,"AND tc.constraint_name=?"] [Single exists_] <- rawSql query [PersistText "custom_constraint1" ,PersistText "id" ,PersistText "custom_constraint2" ,PersistText "cc_id" ,PersistText "custom_constraint"] liftIO $ 1 @?= (exists_ :: Int) it "allows multiple constraints on a single column" $ runConnAssert $ do void $ runMigrationSilent customConstraintMigrate -- | Here we add another foreign key on the same column where the default one already exists. In practice, this could be a compound key with another field. rawExecute "ALTER TABLE \"custom_constraint3\" ADD CONSTRAINT \"extra_constraint\" FOREIGN KEY(\"cc_id1\") REFERENCES \"custom_constraint1\"(\"id\")" [] -- | This is where the error is thrown in `getColumn` void $ getMigration customConstraintMigrate pure () persistent-postgresql-2.13.6.2/test/PgIntervalTest.hs0000644000000000000000000000301614646332256020773 0ustar0000000000000000{-# LANGUAGE EmptyDataDecls #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE GADTs, DataKinds, FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE TypeOperators #-} module PgIntervalTest where import PgInit import Data.Time.Clock (NominalDiffTime) import Database.Persist.Postgresql (PgInterval(..)) import Test.Hspec.QuickCheck share [mkPersist sqlSettings, mkMigrate "pgIntervalMigrate"] [persistLowerCase| PgIntervalDb interval_field PgInterval deriving Eq deriving Show |] -- Postgres Interval has a 1 microsecond resolution, while NominalDiffTime has -- picosecond resolution. Round to the nearest microsecond so that we can be -- fine in the tests. truncate' :: NominalDiffTime -> NominalDiffTime truncate' x = (fromIntegral (round (x * 10^6))) / 10^6 specs :: Spec specs = do describe "Postgres Interval Property tests" $ do prop "Round trips" $ \time -> runConnAssert $ do let eg = PgIntervalDb $ PgInterval (truncate' time) rid <- insert eg r <- getJust rid liftIO $ r `shouldBe` eg persistent-postgresql-2.13.6.2/test/UpsertWhere.hs0000644000000000000000000001703514646332256020343 0ustar0000000000000000{-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} module UpsertWhere where import PgInit import Data.Time import Database.Persist.Postgresql share [mkPersist sqlSettings, mkMigrate "upsertWhereMigrate"] [persistLowerCase| Item name Text sqltype=varchar(80) description Text price Double Maybe quantity Int Maybe UniqueName name deriving Eq Show Ord ItemMigOnly name Text price Double quantity Int UniqueNameMigOnly name createdAt UTCTime MigrationOnly default=CURRENT_TIMESTAMP |] wipe :: IO () wipe = runConnAssert $ do deleteWhere ([] :: [Filter Item]) deleteWhere ([] :: [Filter ItemMigOnly]) itDb :: String -> SqlPersistT (LoggingT (ResourceT IO)) a -> SpecWith (Arg (IO ())) itDb msg action = it msg $ runConnAssert $ void action specs :: Spec specs = describe "UpsertWhere" $ do let item1 = Item "item1" "" (Just 3) Nothing item2 = Item "item2" "hello world" Nothing (Just 2) items = [item1, item2] describe "upsertWhere" $ before_ wipe $ do itDb "inserts appropriately" $ do upsertWhere item1 [ItemDescription =. "i am item 1"] [] Just item <- fmap entityVal <$> getBy (UniqueName "item1") item `shouldBe` item1 itDb "performs only updates given if record already exists" $ do let newDescription = "I am a new description" insert_ item1 upsertWhere (Item "item1" "i am an inserted description" (Just 1) (Just 2)) [ItemDescription =. newDescription] [] Just item <- fmap entityVal <$> getBy (UniqueName "item1") item `shouldBe` item1 { itemDescription = newDescription } itDb "inserts with MigrationOnly fields (#1330)" $ do upsertWhere (ItemMigOnly "foobar" 20 1) [ItemMigOnlyPrice +=. 2] [] describe "upsertManyWhere" $ do itDb "inserts fresh records" $ do insertMany_ items let newItem = Item "item3" "fresh" Nothing Nothing upsertManyWhere (newItem : items) [copyField ItemDescription] [] [] dbItems <- map entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) itDb "updates existing records" $ do let postUpdate = map (\i -> i { itemQuantity = fmap (+1) (itemQuantity i) }) items insertMany_ items upsertManyWhere items [] [ItemQuantity +=. Just 1] [] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` postUpdate itDb "only copies passing values" $ do insertMany_ items let newItems = map (\i -> i { itemQuantity = Just 0, itemPrice = fmap (*2) (itemPrice i) }) items postUpdate = map (\i -> i { itemPrice = fmap (*2) (itemPrice i) }) items upsertManyWhere newItems [ copyUnlessEq ItemQuantity (Just 0) , copyField ItemPrice ] [] [] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` postUpdate itDb "inserts without modifying existing records if no updates specified" $ do let newItem = Item "item3" "hi friends!" Nothing Nothing insertMany_ items upsertManyWhere (newItem : items) [] [] [] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) itDb "inserts without modifying existing records if no updates specified and there's a filter with True condition" $ do let newItem = Item "item3" "hi friends!" Nothing Nothing insertMany_ items upsertManyWhere (newItem : items) [] [] [ItemDescription ==. "hi friends!"] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) itDb "inserts without updating existing records if there are updates specified but there's a filter with a False condition" $ do let newItem = Item "item3" "hi friends!" Nothing Nothing insertMany_ items upsertManyWhere (newItem : items) [] [ItemQuantity +=. Just 1] [ItemDescription ==. "hi friends!"] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) itDb "inserts new records but does not update existing records if there are updates specified but the modification condition is False" $ do let newItem = Item "item3" "hi friends!" Nothing Nothing insertMany_ items upsertManyWhere (newItem : items) [] [ItemQuantity +=. Just 1] [excludeNotEqualToOriginal ItemDescription] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) itDb "inserts new records and updates existing records if there are updates specified and the modification condition is True (because it's empty)" $ do let newItem = Item "item3" "hello world" Nothing Nothing postUpdate = map (\i -> i {itemQuantity = fmap (+ 1) (itemQuantity i)}) items insertMany_ items upsertManyWhere (newItem : items) [] [ItemQuantity +=. Just 1] [] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : postUpdate) itDb "inserts new records and updates existing records if there are updates specified and the modification filter condition is triggered" $ do let newItem = Item "item3" "hi friends!" Nothing Nothing postUpdate = map (\i -> i {itemQuantity = fmap (+1) (itemQuantity i)}) items insertMany_ items upsertManyWhere (newItem : items) [ copyUnlessEq ItemDescription "hi friends!" , copyField ItemPrice ] [ItemQuantity +=. Just 1] [ItemDescription !=. "bye friends!"] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : postUpdate) itDb "inserts an item and doesn't apply the update if the filter condition is triggered" $ do let newItem = Item "item3" "hello world" Nothing Nothing insertMany_ items upsertManyWhere (newItem : items) [] [ItemQuantity +=. Just 1] [excludeNotEqualToOriginal ItemDescription] dbItems <- fmap entityVal <$> selectList [] [] dbItems `shouldMatchList` (newItem : items) persistent-postgresql-2.13.6.2/test/ImplicitUuidSpec.hs0000644000000000000000000000423714646332256021302 0ustar0000000000000000{-# LANGUAGE DataKinds #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} module ImplicitUuidSpec where import PgInit import Data.Proxy import Database.Persist.Postgresql import Database.Persist.ImplicitIdDef import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) share [ mkPersist (sqlSettingsUuid "uuid_generate_v1mc()") , mkEntityDefList "entities" ] [persistLowerCase| WithDefUuid name Text sqltype=varchar(80) deriving Eq Show Ord |] implicitUuidMigrate :: Migration implicitUuidMigrate = do runSqlCommand $ rawExecute "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"" [] migrateModels entities wipe :: IO () wipe = runConnAssert $ do rawExecute "DROP TABLE with_def_uuid;" [] runMigration implicitUuidMigrate itDb :: String -> SqlPersistT (LoggingT (ResourceT IO)) a -> SpecWith (Arg (IO ())) itDb msg action = it msg $ runConnAssert $ void action pass :: IO () pass = pure () spec :: Spec spec = describe "ImplicitUuidSpec" $ before_ wipe $ do describe "WithDefUuidKey" $ do it "works on UUIDs" $ do let withDefUuidKey = WithDefUuidKey (UUID "Hello") pass describe "getEntityId" $ do let Just idField = getEntityIdField (entityDef (Proxy @WithDefUuid)) it "has a UUID SqlType" $ asIO $ do fieldSqlType idField `shouldBe` SqlOther "UUID" it "is an implicit ID column" $ asIO $ do fieldIsImplicitIdColumn idField `shouldBe` True describe "insert" $ do itDb "successfully has a default" $ do let matt = WithDefUuid { withDefUuidName = "Matt" } k <- insert matt mrec <- get k mrec `shouldBe` Just matt persistent-postgresql-2.13.6.2/test/MigrationReferenceSpec.hs0000644000000000000000000000315614476403105022441 0ustar0000000000000000{-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE OverloadedStrings, DataKinds, FlexibleInstances #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# OPTIONS_GHC -Wno-unused-top-binds #-} module MigrationReferenceSpec where import PgInit import Control.Monad.Trans.Writer (censor, mapWriterT) import Data.Text (Text, isInfixOf) share [mkPersist sqlSettings, mkMigrate "referenceMigrate"] [persistLowerCase| LocationCapabilities Id Text bio Text LocationCapabilitiesPrintingProcess locationCapabilitiesId LocationCapabilitiesId LocationCapabilitiesPrintingFinish locationCapabilitiesId LocationCapabilitiesId LocationCapabilitiesSubstrate locationCapabilitiesId LocationCapabilitiesId |] spec :: Spec spec = describe "MigrationReferenceSpec" $ do it "works" $ runConnAssert $ do let noForeignKeys :: CautiousMigration -> CautiousMigration noForeignKeys = filter ((not . isReference) . snd) onlyForeignKeys :: CautiousMigration -> CautiousMigration onlyForeignKeys = filter (isReference . snd) isReference :: Text -> Bool isReference migration = "REFERENCES" `isInfixOf` migration runMigration $ mapWriterT (censor noForeignKeys) $ referenceMigrate runMigration $ mapWriterT (censor onlyForeignKeys) $ referenceMigrate persistent-postgresql-2.13.6.2/ChangeLog.md0000644000000000000000000002312414646332411016711 0ustar0000000000000000# Changelog for persistent-postgresql ## 2.13.6.2 * [#1536](https://github.com/yesodweb/persistent/pull/1536/) * Build with GHC 9.10 ## 2.13.6.1 * [#1518](https://github.com/yesodweb/persistent/pull/1518) * Normalize postgres type aliases to prevent noop migrations ## 2.13.6 * [#1511](https://github.com/yesodweb/persistent/pull/1511) * Add the `createPostgresqlPoolTailored` function to support creating connection pools with a custom connection creation function. * Expose `getServerVersion` and `createBackend` for user's convenience. * [#1516](https://github.com/yesodweb/persistent/pull/1516) * Support postgresql-simple 0.7 and postgresql-libpq 0.10 ## 2.13.5.2 * [#1471](https://github.com/yesodweb/persistent/pull/1471) * Explicitly import `Control.Monad.Trans.lift` to support mtl-2.3. ## 2.13.5.1 * [#1459](https://github.com/yesodweb/persistent/pull/1459) * Make use of `CautiousMigration` type alias for clarity. ## 2.13.5.0 * [#1362](https://github.com/yesodweb/persistent/pull/1362/) * Define `withPostgresqlPoolModifiedWithVersion` ## 2.13.4.1 * [#1367](https://github.com/yesodweb/persistent/pull/1367), [#1366](https://github.com/yesodweb/persistent/pull/1367), [#1338](https://github.com/yesodweb/persistent/pull/1338), [#1335](https://github.com/yesodweb/persistent/pull/1335) * Support GHC 9.2 ## 2.13.4.0 * [#1341](https://github.com/yesodweb/persistent/pull/1341) * Add `SqlBackendHooks` to allow for instrumentation of queries. * [#1327](https://github.com/yesodweb/persistent/pull/1327) * Update backend to support new `StatementCache` interface ## 2.13.3.0 * [#1349](https://github.com/yesodweb/persistent/pull/1349) * Add `BackendCompatible (RawPostgresql b) (RawPostgresql b)` instance. ## 2.13.2.2 * [#1351](https://github.com/yesodweb/persistent/pull/1351/) * Support `aeson-2.0` in test suite. ## 2.13.2.1 * [#1331](https://github.com/yesodweb/persistent/pull/1331) * Fixes a bug where `upsertWhere` would fail on a database table with `MigrationOnly` fields. ## 2.13.2.0 * [#1316](https://github.com/yesodweb/persistent/pull/1316) * Expose some internals in the new `Database.Persist.Postgresql.Internal` module. This gives access to the `P` newtype, which is used for de-serializing `PersistValue`s from `postgresql-simple` code. ## 2.13.1.0 * [#1305](https://github.com/yesodweb/persistent/pull/1305) * Add `RawPostgresql` wrapper, which exposes the underlying Postgres connection used to construct a `SqlBackend`. ## 2.13.0.3 * [#1290](https://github.com/yesodweb/persistent/pull/1290) * Fix the code path for adding references to previously defined columns. ## 2.13.0.2 * Actually release the SafeTORemove fix ## 2.13.0.1 * [#1275](https://github.com/yesodweb/persistent/pull/1275) * Fix `SafeToRemove` ## 2.13.0.0 * [#1225](https://github.com/yesodweb/persistent/pull/1225) * Support `persistent-2.13.0.0` making SQlBackend internal # 2.12.1.1 * [#1235](https://github.com/yesodweb/persistent/pull/1235) * `upsertWhere` and `upsertManyWhere` only worked in cases where a `Primary` key was defined on a record, and no other uniqueness constraints. They have been fixed to only work with records that have a single Uniqueness constraint defined. ## 2.12.1.0 * Added `upsertWhere` and `upsertManyWhere` to `persistent-postgresql`. [#1222](https://github.com/yesodweb/persistent/pull/1222). ## 2.12.0.0 * Decomposed `HaskellName` into `ConstraintNameHS`, `EntityNameHS`, `FieldNameHS`. Decomposed `DBName` into `ConstraintNameDB`, `EntityNameDB`, `FieldNameDB` respectively. [#1174](https://github.com/yesodweb/persistent/pull/1174) * Fix XML conversion [#1192](https://github.com/yesodweb/persistent/pull/1192) ## 2.11.0.1 * Fix foreign key migrations [#1167] https://github.com/yesodweb/persistent/pull/1167 * Fix a bug where a foreign key of a field to its table was ignored. * Fix a bug where a altering details of a foreign key didn't trigger a migration ## 2.11.0.0 * Foreign Key improvements [#1121] https://github.com/yesodweb/persistent/pull/1121 * It is now supported to refer to a table with an auto generated Primary Kay * It is now supported to refer to non-primary fields, using the keyword `References` * Implement interval support. [#1053](https://github.com/yesodweb/persistent/pull/1053) * [#1060](https://github.com/yesodweb/persistent/pull/1060) * The QuasiQuoter now supports `OnDelete` and `OnUpdate` cascade options. * Handle foreign key constraint names over 63 characters. See [#996](https://github.com/yesodweb/persistent/pull/996) for details. * Fix a bug in `upsertSql` query which had not been discovered previously because the query wasn't actually used. [#856](https://github.com/yesodweb/persistent/pull/856) * [#1072](https://github.com/yesodweb/persistent/pull/1072) Refactored `test/JSONTest.hs` to use `hspec` * added `runConn_` to run a db connection and return result * Renamed `db` to `runConnAssert` in `test/PgInit.hs` for clarity * Ran `test/ArrayAggTest.hs` (which was previously written but not being run) * Remove unnecessary deriving of Typeable [#1114](https://github.com/yesodweb/persistent/pull/1114) * Add support for configuring the number of stripes and idle timeout for connection pools [#1098](https://github.com/yesodweb/persistent/pull/1098) * `PostgresConf` has two new fields to configure these values. * Its `FromJSON` instance will default stripes to 1 and idle timeout to 600 seconds * If you're constructing a `PostgresConf` manually, this is a breaking change * Add `createPostgresqlPoolWithConf` and `withPostgresqlPoolWithConf`, which take a `PostgresConf` for the new configuration. ## 2.10.1.2 * Fix issue with multiple foreign keys on single column. [#1010](https://github.com/yesodweb/persistent/pull/1010) ## 2.10.1.1 * Compatibility with latest persistent-template for test suite [#1002](https://github.com/yesodweb/persistent/pull/1002/files) ## 2.10.1 * Added support for the `constraint=` attribute to the Postgresql backend. [#979](https://github.com/yesodweb/persistent/pull/979) ## 2.10.0 * Added question mark operators (`(?.), (?|.), (?&.)`) to `Database.Persist.Postgresql.JSON` [#863](https://github.com/yesodweb/persistent/pull/863) * Changes to certain types: * `PersistValue`: added `PersistArray` data constructor * `Filter`: Changed the `filterValue :: Either a [a]` to `filterValue :: FilterValue` ## 2.9.1 * Add `openSimpleConnWithVersion` function. [#883](https://github.com/yesodweb/persistent/pull/883) ## 2.9.0 * Added support for SQL isolation levels to via SqlBackend. [#812] * Fix [832](https://github.com/yesodweb/persistent/issues/832): `repsertMany` now matches `mapM_ (uncurry repsert)` and is atomic. ## 2.8.2 Added module `Database.Persist.Postgresql.JSON` [#793](https://github.com/yesodweb/persistent/pull/793) * `PersistField` and `PersistFieldSql` instances for `Data.Aeson.Value` * Filter operators `(@>.)` and `(<@.)` to filter on JSON values ## 2.8.1.1 * Added a more detailed error message when a `numeric` column's scale and precision can't be parsed. [#781](https://github.com/yesodweb/persistent/pull/781) ## 2.8.1 * Implemented `connPutManySql` to utilize batched `putMany`. [#770](https://github.com/yesodweb/persistent/pull/770) ## 2.8.0 * Switch from `MonadBaseControl` to `MonadUnliftIO` ## 2.6.3 * Added new function `migrateEnableExtension`, to enable Postgres extensions in migrations. ## 2.6.2.2 * Because `text` and `varchar` are synonyms in Postgresql, don't attempt to migrate between them. [#762](https://github.com/yesodweb/persistent/pull/762) ## 2.6.2.1 * Fix bug where, if a custom column width was set, the field would be migrated every time [#742](https://github.com/yesodweb/persistent/pull/742) ## 2.6.2 * Expose new functions: `withPostgresqlPoolWithVersion`, `withPostgresqlConnWithVersion` and `createPostgresqlPoolModifiedWithVersion`. ## 2.6.1 * Match changes in persistent * Clean up warnings ## 2.6 * Atomic upsert support for postgreSQL backend ## 2.5 * changes for read/write typeclass split ## 2.2.2 * Postgresql primary key is Int4, not Int8 [#519](https://github.com/yesodweb/persistent/issues/519) ## 2.2.1.2 * Allow postgresql-simple 0.5 ## 2.2.1.1 Query pg_catalog instead of information_schema for metadata. This helps with permission issues as reported in issue #501 ## 2.2.1 * Fix treatment of `NULL`s inside arrays. For example, now you can use `array_agg` on a nullable column. * New derived instances for `PostgresConf`: `Read`, `Data` and `Typeable`. * New `mockMigration` function. Works like `printMigration` but doesn't need a database connection. * Fix typo on error message of the `FromJSON` instance of `PostgresConf`. ## 2.2 * Optimize the `insertMany` function to insert all rows and retrieve their keys in one SQL query. [#407](https://github.com/yesodweb/persistent/pull/407) ## 2.1.6 * Postgresql exceptions [#353](https://github.com/yesodweb/persistent/issues/353) ## 2.1.5.3 Migrations for custom primary keys ## 2.1.5.2 Support foreign key references to composite primary keys #389 ## 2.1.5 * Allow timestamp value in database to be serialized (presumes UTC timezone) [Yesod #391](https://github.com/yesodweb/persistent/issues/391) ## 2.1.4 * Treat unknown extension types as PersistDbSpecific values [#385](https://github.com/yesodweb/persistent/pull/385) ## 2.1.3 * Added a `Show` instance for `PostgresConf`. * `createPostgresqlPoolModified` added, see [relevant mailing list discussion](https://groups.google.com/d/msg/yesodweb/qUXrEN_swEo/O0pFwqwQIdcJ) ## 2.1.2.1 Documentation typo fix ## 2.1.1 Added `FromJSON` instance for `PostgresConf`. persistent-postgresql-2.13.6.2/LICENSE0000644000000000000000000000207514476403105015547 0ustar0000000000000000Copyright (c) 2012 Michael Snoyman, http://www.yesodweb.com/ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. persistent-postgresql-2.13.6.2/Setup.lhs0000755000000000000000000000016214476403105016350 0ustar0000000000000000#!/usr/bin/env runhaskell > module Main where > import Distribution.Simple > main :: IO () > main = defaultMain persistent-postgresql-2.13.6.2/persistent-postgresql.cabal0000644000000000000000000000714614646332352022137 0ustar0000000000000000name: persistent-postgresql version: 2.13.6.2 license: MIT license-file: LICENSE author: Felipe Lessa, Michael Snoyman maintainer: Michael Snoyman synopsis: Backend for the persistent library using postgresql. description: Based on the postgresql-simple package category: Database, Yesod stability: Stable cabal-version: >= 1.10 build-type: Simple homepage: http://www.yesodweb.com/book/persistent bug-reports: https://github.com/yesodweb/persistent/issues extra-source-files: ChangeLog.md library build-depends: base >= 4.9 && < 5 , persistent >= 2.13.3 && < 3 , aeson >= 1.0 , attoparsec , blaze-builder , bytestring >= 0.10 , conduit >= 1.2.12 , containers >= 0.5 , monad-logger >= 0.3.25 , mtl , postgresql-simple >= 0.6.1 && < 0.8 , postgresql-libpq >= 0.9.4.2 && < 0.11 , resourcet >= 1.1.9 , resource-pool , string-conversions , text >= 1.2 , time >= 1.6 , transformers >= 0.5 , unliftio-core , vault exposed-modules: Database.Persist.Postgresql , Database.Persist.Postgresql.Internal , Database.Persist.Postgresql.JSON ghc-options: -Wall default-language: Haskell2010 source-repository head type: git location: git://github.com/yesodweb/persistent.git test-suite test type: exitcode-stdio-1.0 main-is: main.hs hs-source-dirs: test other-modules: PgInit ArrayAggTest EquivalentTypeTestPostgres JSONTest CustomConstraintTest PgIntervalTest UpsertWhere ImplicitUuidSpec MigrationReferenceSpec ghc-options: -Wall build-depends: base >= 4.9 && < 5 , persistent , persistent-postgresql , persistent-qq , persistent-test , aeson , bytestring , containers , fast-logger , HUnit , hspec >= 2.4 , hspec-expectations , hspec-expectations-lifted , monad-logger , QuickCheck , quickcheck-instances , resourcet , text , time , transformers , path-pieces , http-api-data , unliftio-core , unliftio , unordered-containers , vector default-language: Haskell2010 executable conn-kill buildable: False main-is: Main.hs hs-source-dirs: conn-killed ghc-options: -threaded build-depends: base , persistent-postgresql , monad-logger , text , unliftio , time , transformers , persistent , bytestring , resource-pool , mtl default-language: Haskell2010