diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a834bb9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,67 @@ +name: Binaries + +defaults: + run: + shell: bash + +on: + push: + branches: + - main + pull_request: + +jobs: + build: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + ghc: ["9.6.6"] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v2 + + - uses: haskell/actions/setup@v2 + id: setup-haskell + with: + ghc-version: ${{ matrix.ghc }} + cabal-version: 3.12.1.0 + + - name: Configure project + run: | + cabal configure --enable-tests --enable-benchmarks + cabal build all --dry-run + + - name: Record dependencies + run: | + cat dist-newstyle/cache/plan.json | jq -r '."install-plan"[].id' | sort | uniq > dependencies.txt + date +"%Y-%m-%d" > date.txt + + - name: Cache cabal store + uses: actions/cache/restore@v4 + with: + path: ${{ steps.setup-haskell.outputs.cabal-store }} + key: | + cache-${{ env.CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('dependencies.txt') }}-${{ hashFiles('date.txt') }} + restore-keys: | + cache-${{ env.CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('dependencies.txt') }}-${{ hashFiles('date.txt') }} + cache-${{ env.CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('dependencies.txt') }} + cache-${{ env.CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }} + + - name: Build + run: cabal build all + + - name: Cache Cabal store + uses: actions/cache/save@v4 + with: + path: ${{ steps.setup-haskell.outputs.cabal-store }} + key: | + cache-${{ env.CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('dependencies.txt') }}-${{ hashFiles('date.txt') }} + + - name: Test + env: + LOCALSTACK_AUTH_TOKEN: ${{ secrets.LOCALSTACK_AUTH_TOKEN }} + run: | + cabal test all --enable-tests --enable-benchmarks diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..10643b2 --- /dev/null +++ b/LICENSE @@ -0,0 +1,28 @@ +BSD 3-Clause License + +Copyright (c) 2024, haskell-works + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/app/App/AWS/Env.hs b/app/App/AWS/Env.hs new file mode 100644 index 0000000..82c77d7 --- /dev/null +++ b/app/App/AWS/Env.hs @@ -0,0 +1,88 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} + +module App.AWS.Env + ( awsLogger + , mkEnv + , newAwsLogger + , setEnvEndpoint + ) where + +import App.Show (tshow) +import Control.Concurrent (myThreadId) +import Control.Monad (when, forM_) +import Data.ByteString (ByteString) +import Data.ByteString.Builder (toLazyByteString) +import Data.Function ((&)) +import Data.Generics.Product.Any (the) +import Lens.Micro ((.~), (%~)) +import Network.HTTP.Client (HttpException (..), HttpExceptionContent (..)) + +import qualified Amazonka as AWS +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as L +import qualified Data.ByteString.Lazy as LBS +import qualified Data.ByteString.Lazy.Char8 as LC8 +import qualified Data.Text.Encoding as T +import qualified App.Console as CIO +import qualified System.IO as IO + +setEnvEndpoint :: Maybe (ByteString, Int, Bool) -> IO AWS.Env -> IO AWS.Env +setEnvEndpoint mHostEndpoint getEnv = do + env <- getEnv + case mHostEndpoint of + Just (host, port, ssl) -> + pure $ env + & the @"overrides" .~ \svc -> do + svc & the @"endpoint" %~ \mkEndpoint region -> do + mkEndpoint region + & the @"host" .~ host + & the @"port" .~ port + & the @"secure" .~ ssl + Nothing -> pure env + +mkEnv :: AWS.Region -> (AWS.LogLevel -> LBS.ByteString -> IO ()) -> IO AWS.Env +mkEnv region lg = do + lgr <- newAwsLogger lg + discoveredEnv <- AWS.newEnv AWS.discover + + pure discoveredEnv + { AWS.logger = lgr + , AWS.region = region + , AWS.retryCheck = retryPolicy 5 + } + +newAwsLogger :: Monad m => (AWS.LogLevel -> LBS.ByteString -> IO ()) -> m AWS.Logger +newAwsLogger lg = return $ \y b -> + let lazyMsg = toLazyByteString b + in case L.toStrict lazyMsg of + msg | BS.isInfixOf "404 Not Found" msg -> lg AWS.Debug lazyMsg + msg | BS.isInfixOf "304 Not Modified" msg -> lg AWS.Debug lazyMsg + _ -> lg y lazyMsg + +retryPolicy :: Int -> Int -> AWS.HttpException -> Bool +retryPolicy maxNum attempt ex = (attempt <= maxNum) && shouldRetry ex + +shouldRetry :: AWS.HttpException -> Bool +shouldRetry ex = case ex of + HttpExceptionRequest _ ctx -> case ctx of + ResponseTimeout -> True + ConnectionTimeout -> True + ConnectionFailure _ -> True + InvalidChunkHeaders -> True + ConnectionClosed -> True + InternalException _ -> True + NoResponseDataReceived -> True + ResponseBodyTooShort _ _ -> True + _ -> False + _ -> False + +awsLogger :: Maybe AWS.LogLevel -> AWS.LogLevel -> LC8.ByteString -> IO () +awsLogger maybeConfigLogLevel msgLogLevel message = + forM_ maybeConfigLogLevel $ \configLogLevel -> + when (msgLogLevel <= configLogLevel) do + threadId <- myThreadId + CIO.hPutStrLn IO.stderr $ "[" <> tshow msgLogLevel <> "] [tid: " <> tshow threadId <> "]" <> text + where text = T.decodeUtf8 $ LBS.toStrict message diff --git a/app/App/Cli/Options.hs b/app/App/Cli/Options.hs new file mode 100644 index 0000000..bc9db4d --- /dev/null +++ b/app/App/Cli/Options.hs @@ -0,0 +1,97 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} + +module App.Cli.Options where + +import App.Options +import Control.Applicative +import Data.ByteString (ByteString) + +import qualified Amazonka.Data as AWS +import qualified App.Cli.Types as CLI +import qualified Data.Text as T +import qualified Options.Applicative as OA + +opts :: OA.ParserInfo CLI.Cmd +opts = OA.info (pCmds <**> OA.helper) $ mconcat + [ OA.fullDesc + , OA.header $ mconcat + [ "rds-data" + ] + ] + +pCmds :: OA.Parser CLI.Cmd +pCmds = + asum + [ subParser "up" + $ OA.info (CLI.CmdOfUpCmd <$> pUpCmd) + $ OA.progDesc "Up command." + , subParser "down" + $ OA.info (CLI.CmdOfDownCmd <$> pDownCmd) + $ OA.progDesc "Down command." + , subParser "local-stack" + $ OA.info (CLI.CmdOfLocalStackCmd <$> pLocalStackCmd) + $ OA.progDesc "Launch a local-stack RDS cluster." + ] + +pUpCmd :: OA.Parser CLI.UpCmd +pUpCmd = + CLI.UpCmd + <$> do OA.strOption $ mconcat + [ OA.long "resource-arn" + , OA.help "Resource ARN" + , OA.metavar "ARN" + ] + <*> do OA.strOption $ mconcat + [ OA.long "secret-arn" + , OA.help "Secret ARN" + , OA.metavar "ARN" + ] + <*> do OA.strOption $ mconcat + [ OA.long "migration-file" + , OA.help "Migration File" + , OA.metavar "FILE" + ] + +pDownCmd :: OA.Parser CLI.DownCmd +pDownCmd = + CLI.DownCmd + <$> do OA.strOption $ mconcat + [ OA.long "resource-arn" + , OA.help "Resource ARN" + , OA.metavar "ARN" + ] + <*> do OA.strOption $ mconcat + [ OA.long "secret-arn" + , OA.help "Secret ARN" + , OA.metavar "ARN" + ] + <*> do OA.strOption $ mconcat + [ OA.long "migration-file" + , OA.help "Migration File" + , OA.metavar "FILE" + ] + +pLocalStackCmd :: OA.Parser CLI.LocalStackCmd +pLocalStackCmd = + pure CLI.LocalStackCmd + +parseEndpoint :: OA.Parser (ByteString, Int, Bool) +parseEndpoint = + (,,) + <$> do OA.option (OA.eitherReader (AWS.fromText . T.pack)) $ mconcat + [ OA.long "host-name-override" + , OA.help "Override the host name (default: s3.amazonaws.com)" + , OA.metavar "HOST_NAME" + ] + <*> do OA.option OA.auto $ mconcat + [ OA.long "host-port-override" + , OA.help "Override the host port" + , OA.metavar "HOST_PORT" + ] + <*> do OA.option OA.auto $ mconcat + [ OA.long "host-ssl-override" + , OA.help "Override the host SSL" + , OA.metavar "HOST_SSL" + ] diff --git a/app/App/Cli/Run.hs b/app/App/Cli/Run.hs new file mode 100644 index 0000000..6e641d9 --- /dev/null +++ b/app/App/Cli/Run.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} + +module App.Cli.Run + ( runCmd + ) where + +import App.Cli.Run.BatchExecuteStatement +import App.Cli.Run.Down +import App.Cli.Run.Example +import App.Cli.Run.ExecuteStatement +import App.Cli.Run.LocalStack +import App.Cli.Run.Up + +import qualified App.Cli.Types as CLI + +runCmd :: CLI.Cmd -> IO () +runCmd = \case + CLI.CmdOfBatchExecuteStatementCmd cmd -> runBatchExecuteStatementCmd cmd + CLI.CmdOfDownCmd cmd -> runDownCmd cmd + CLI.CmdOfExampleCmd cmd -> runExampleCmd cmd + CLI.CmdOfExecuteStatementCmd cmd -> runExecuteStatementCmd cmd + CLI.CmdOfLocalStackCmd cmd -> runLocalStackCmd cmd + CLI.CmdOfUpCmd cmd -> runUpCmd cmd diff --git a/app/App/Cli/Run/BatchExecuteStatement.hs b/app/App/Cli/Run/BatchExecuteStatement.hs new file mode 100644 index 0000000..6c3e2c0 --- /dev/null +++ b/app/App/Cli/Run/BatchExecuteStatement.hs @@ -0,0 +1,12 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module App.Cli.Run.BatchExecuteStatement + ( runBatchExecuteStatementCmd + ) where + +import qualified App.Cli.Types as CLI + +runBatchExecuteStatementCmd :: CLI.BatchExecuteStatementCmd -> IO () +runBatchExecuteStatementCmd _ = pure () diff --git a/app/App/Cli/Run/Down.hs b/app/App/Cli/Run/Down.hs new file mode 100644 index 0000000..4f9d3de --- /dev/null +++ b/app/App/Cli/Run/Down.hs @@ -0,0 +1,124 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeOperators #-} + +{- HLINT ignore "Redundant id" -} +{- HLINT ignore "Use let" -} + +module App.Cli.Run.Down + ( runDownCmd + ) where + +import Control.Monad.IO.Class +import Data.Generics.Product.Any +import Data.Maybe +import Data.RdsData.Migration.Types (MigrationRow (..)) +import Data.RdsData.Types +import Lens.Micro + +import qualified Amazonka as AWS +import qualified App.Cli.Types as CLI +import qualified App.Console as T +import qualified Data.Aeson as J +import Data.RdsData.Aws +import qualified Data.RdsData.Decode.Row as DEC +import Data.RdsData.Polysemy.Core +import Data.RdsData.Polysemy.Error +import Data.RdsData.Polysemy.Migration +import qualified Data.Text as T +import qualified Data.Text.Lazy.Encoding as LT +import qualified Data.Text.Lazy.IO as LT +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.File +import HaskellWorks.Polysemy.Log +import HaskellWorks.Prelude +import Polysemy.Log +import Polysemy.Time.Interpreter.Ghc +import qualified System.IO as IO + +newtype AppError + = AppError Text + deriving (Eq, Show) + +runApp :: () + => CLI.DownCmd + -> Sem + [ Reader AwsResourceArn + , Reader AwsSecretArn + , Reader AWS.Env + , Error AppError + , DataLog AwsLogEntry + , Log + , GhcTime + , DataLog (LogEntry LogMessage) + , Resource + , Embed IO + , Final IO + ] () + -> IO () +runApp cmd f = f + & runReader (AwsResourceArn $ cmd ^. the @"resourceArn") + & runReader (AwsSecretArn $ cmd ^. the @"secretArn") + & runReaderAwsEnvDiscover + & trap @AppError reportFatal + & interpretDataLogAwsLogEntryToLog + & interpretLogDataLog + & interpretTimeGhc + & setLogLevel (Just Info) + & interpretDataLogToJsonStdout (logEntryToJson logMessageToJson) + & runResource + & embedToFinal @IO + & runFinal @IO + +reportFatal :: () + => Member (Embed IO) r + => AppError + -> Sem r () +reportFatal (AppError msg) = + T.putStrLn msg + +runDownCmd :: CLI.DownCmd -> IO () +runDownCmd cmd = runApp cmd do + initialiseDb + & trap @AWS.Error (throw . AppError . T.pack . show) + & trap @RdsDataError (throw . AppError . T.pack . show) + + migrateDown (cmd ^. the @"migrationFp") + & trap @AWS.Error (throw . AppError . T.pack . show) + & trap @IOException (throw . AppError . T.pack . show) + & trap @JsonDecodeError (throw . AppError . T.pack . show) + & trap @RdsDataError (throw . AppError . T.pack . show) + & trap @YamlDecodeError (throw . AppError . T.pack . show) + + id do + res <- executeStatement + ( mconcat + [ "SELECT" + , " uuid," + , " created_at," + , " deployed_by" + , "FROM migration" + ] + ) + & trap @AWS.Error (throw . AppError . T.pack . show) + & trap @RdsDataError (throw . AppError . T.pack . show) + + liftIO . LT.putStrLn $ LT.decodeUtf8 $ J.encode res + + decodeMigrationRow <- pure $ id @(DEC.DecodeRow MigrationRow) $ + MigrationRow + <$> DEC.ulid + <*> DEC.utcTime + <*> DEC.text + + records <- pure $ id @[[Value]] $ fromMaybe [] $ mapM (mapM fromField) =<< res ^. the @"records" + + row <- pure $ DEC.decodeRows decodeMigrationRow records + + liftIO $ IO.print row + + pure () diff --git a/app/App/Cli/Run/Example.hs b/app/App/Cli/Run/Example.hs new file mode 100644 index 0000000..e097887 --- /dev/null +++ b/app/App/Cli/Run/Example.hs @@ -0,0 +1,324 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Use let" -} + +module App.Cli.Run.Example + ( runExampleCmd + ) where + +import Amazonka.RDSData +import App.AWS.Env +import App.Config +import Control.Monad.IO.Class +import Data.ByteString (ByteString) +import Data.Generics.Product.Any +import Data.Int +import Data.Maybe +import Data.Monoid +import Data.RdsData.Types +import Data.Text (Text) +import Data.Time +import Data.UUID (UUID) +import Data.Word +import GHC.Generics +import Lens.Micro +import Text.Printf + +import qualified Amazonka as AWS +import qualified App.Cli.Types as CLI +import qualified App.Console as T +import qualified Data.Aeson as J +import qualified Data.RdsData.Decode.Row as DEC +import qualified Data.RdsData.Encode.Params as ENC +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy.Encoding as LT +import qualified Data.Text.Lazy.IO as LT +import qualified System.IO as IO +import qualified System.IO.Unsafe as IO + +data ExampleRow = ExampleRow + { theBigInt :: Int64 + , theBigSerial :: Int32 + , theBoolean :: Bool + , theByteA :: ByteString + , theCharacter :: Text + , theCharacters :: Text + , theVaryingCharacter :: Text + , theVaryingCharacters :: Text + , theDate :: Day + , theDouble :: Double + , theInteger :: Integer + , theJson :: J.Value + , theJsonB :: J.Value + , theNumeric :: Double + , theNumerics :: Double + , theReal :: Double + , theSmallInt :: Int16 + , theSmallSerial :: Word32 + , theSerial :: Word64 + , theText :: Text + , theTime :: TimeOfDay + , theTimes :: TimeOfDay + , theTimestamp :: UTCTime + , theTimestamps :: UTCTime + , theUuid :: UUID + } deriving (Eq, Show, Generic) + +runExampleCmd :: CLI.ExampleCmd -> IO () +runExampleCmd cmd = do + let theAwsLogLevel = cmd ^. the @"mAwsLogLevel" + let theMHostEndpoint = cmd ^. the @"mHostEndpoint" + let theRegion = cmd ^. the @"region" + let theResourceArn = cmd ^. the @"resourceArn" + let theSecretArn = cmd ^. the @"secretArn" + + envAws <- + liftIO (IO.unsafeInterleaveIO (mkEnv theRegion (awsLogger theAwsLogLevel))) + <&> applyMHostEndpoint theMHostEndpoint + + AWS.runResourceT $ do + sql <- pure $ T.pack $ unlines + [ "CREATE TABLE all_types (" + , " the_bigint bigint not null," + , " the_bigserial bigserial not null," + , " the_boolean boolean not null," + , " the_bytea bytea not null," + , " the_character character not null," + , " the_characters character(2) not null," + , " the_varying_character character varying not null," + , " the_varying_characters character varying(2) not null," + , " the_date date not null," + , " the_double double precision not null," + , " the_integer integer not null," + , " the_json json not null," + , " the_jsonb jsonb not null," + , " the_numeric numeric not null," + , " the_numerics numeric(4, 2) not null," + , " the_real real not null," + , " the_smallint smallint not null," + , " the_smallserial smallserial not null," + , " the_serial serial not null," + , " the_text text not null," + , " the_time time not null," + , " the_times time(2) not null," + , " the_timestamp timestamp not null," + , " the_timestamps timestamp(2) not null," + , " the_uuid uuid not null" + , ")" + ] + + req <- pure $ newExecuteStatement theResourceArn theSecretArn sql + + res <- AWS.send envAws req + + liftIO . LT.putStrLn $ LT.decodeUtf8 $ J.encode res + + AWS.runResourceT $ do + sql <- pure $ T.pack $ unlines + [ "INSERT INTO all_types (" + , " the_bigint," + , " the_bigserial," + , " the_boolean," + , " the_bytea," + , " the_character," + , " the_characters," + , " the_varying_character," + , " the_varying_characters," + , " the_date," + , " the_double," + , " the_integer," + , " the_json," + , " the_jsonb," + , " the_numeric," + , " the_numerics," + , " the_real," + , " the_smallint," + , " the_smallserial," + , " the_serial," + , " the_text," + , " the_time," + , " the_times," + , " the_timestamp," + , " the_timestamps," + , " the_uuid" + , ") VALUES (" + , " :p00," -- the_bigint + , " :p01," -- the_bigserial + , " :p02," -- the_boolean + , " :p03," -- the_bytea + , " :p04," -- the_character + , " :p05," -- the_characters + , " :p06," -- the_varying_character + , " :p07," -- the_varying_characters + , " :p08," -- the_date + , " :p09," -- the_double + , " :p10," -- the_integer + , " :p11," -- the_json + , " :p12," -- the_jsonb + , " :p13," -- the_numeric + , " :p14," -- the_numerics + , " :p15," -- the_real + , " :p16," -- the_smallint + , " :p17," -- the_smallserial + , " :p18," -- the_serial + , " :p19," -- the_text + , " :p20," -- the_time + , " :p21," -- the_times + , " :p22," -- the_timestamp + , " :p23," -- the_timestamps + , " :p24" -- the_uuid + , ")" + ] + + exampleRow <- pure $ ExampleRow + { theBigInt = 1234567890 + , theBigSerial = 1 + , theBoolean = True + , theByteA = T.encodeUtf8 "EjQ=" + , theCharacter = "A" + , theCharacters = "AB" + , theVaryingCharacter = "C" + , theVaryingCharacters = "CD" + , theDate = YearMonthDay 2024 02 04 + , theDouble = 3.14159265359 + , theInteger = 42 + , theJson = "{\"key\":\"value\"}" + , theJsonB = "{\"key\":\"value\"}" + , theNumeric = 1234.56 + , theNumerics = 12.34 + , theReal = 3.14 + , theSmallInt = 12345 + , theSmallSerial = 1 + , theSerial = 1 + , theText = "Some text" + , theTime = TimeOfDay 12 34 56 + , theTimes = TimeOfDay 12 34 56.780 + , theTimestamp = read "2024-02-04 12:34:56" + , theTimestamps = read "2024-02-04 12:34:56" + , theUuid = read "550e8400-e29b-41d4-a716-446655440000" + } + + row <- pure $ flip appEndo [] $ mconcat $ fmap Endo + [ ENC.encodeParams ENC.int64 (exampleRow ^. the @"theBigInt" ) + , ENC.encodeParams ENC.int32 (exampleRow ^. the @"theBigSerial" ) + , ENC.encodeParams ENC.bool (exampleRow ^. the @"theBoolean" ) + , ENC.encodeParams ENC.bytestring (exampleRow ^. the @"theByteA" ) + , ENC.encodeParams ENC.text (exampleRow ^. the @"theCharacter" ) + , ENC.encodeParams ENC.text (exampleRow ^. the @"theCharacters" ) + , ENC.encodeParams ENC.text (exampleRow ^. the @"theVaryingCharacter" ) + , ENC.encodeParams ENC.text (exampleRow ^. the @"theVaryingCharacters" ) + , ENC.encodeParams ENC.day (exampleRow ^. the @"theDate" ) + , ENC.encodeParams ENC.double (exampleRow ^. the @"theDouble" ) + , ENC.encodeParams ENC.integer (exampleRow ^. the @"theInteger" ) + , ENC.encodeParams ENC.json (exampleRow ^. the @"theJson" ) + , ENC.encodeParams ENC.json (exampleRow ^. the @"theJsonB" ) + , ENC.encodeParams ENC.double (exampleRow ^. the @"theNumeric" ) + , ENC.encodeParams ENC.double (exampleRow ^. the @"theNumerics" ) + , ENC.encodeParams ENC.double (exampleRow ^. the @"theReal" ) + , ENC.encodeParams ENC.int16 (exampleRow ^. the @"theSmallInt" ) + , ENC.encodeParams ENC.word32 (exampleRow ^. the @"theSmallSerial" ) + , ENC.encodeParams ENC.word64 (exampleRow ^. the @"theSerial" ) + , ENC.encodeParams ENC.text (exampleRow ^. the @"theText" ) + , ENC.encodeParams ENC.timeOfDay (exampleRow ^. the @"theTime" ) + , ENC.encodeParams ENC.timeOfDay (exampleRow ^. the @"theTimes" ) + , ENC.encodeParams ENC.utcTime (exampleRow ^. the @"theTimestamp" ) + , ENC.encodeParams ENC.utcTime (exampleRow ^. the @"theTimestamps" ) + , ENC.encodeParams ENC.uuid (exampleRow ^. the @"theUuid" ) + ] + + liftIO $ LT.putStrLn $ LT.decodeUtf8 $ J.encode row + + req <- pure $ newBatchExecuteStatement theResourceArn theSecretArn sql + & the @"parameterSets" ?~ + [ zip row [0..] <&> + ( \(v, i) -> + toSqlParameter v + & the @"name" ?~ T.pack ("p" <> printf "%02d" (id @Int i)) + ) + ] + + liftIO $ IO.putStrLn $ "===> " <> show req + + res <- AWS.send envAws req + + liftIO . LT.putStrLn $ LT.decodeUtf8 $ J.encode res + + AWS.runResourceT $ do + sql <- pure $ T.pack $ unlines + [ "SELECT" + , " the_bigint," + , " the_bigserial," + , " the_boolean," + , " the_bytea," + , " the_character," + , " the_characters," + , " the_varying_character," + , " the_varying_characters," + , " the_date," + , " the_double," + , " the_integer," + , " the_json," + , " the_jsonb," + , " the_numeric," + , " the_numerics," + , " the_real," + , " the_smallint," + , " the_smallserial," + , " the_serial," + , " the_text," + , " the_time," + , " the_times," + , " the_timestamp," + , " the_timestamps," + , " the_uuid" + , "FROM all_types" + ] + + liftIO $ T.putStrLn sql + + req <- pure $ newExecuteStatement theResourceArn theSecretArn sql + + res <- AWS.send envAws req + + liftIO . LT.putStrLn $ LT.decodeUtf8 $ J.encode res + + decodeExampleRow <- pure $ id @(DEC.DecodeRow ExampleRow) $ + ExampleRow + <$> DEC.int64 + <*> DEC.int32 + <*> DEC.bool + <*> DEC.bytestring + <*> DEC.text + <*> DEC.text + <*> DEC.text + <*> DEC.text + <*> DEC.day + <*> DEC.double + <*> DEC.integer + <*> DEC.json + <*> DEC.json + <*> DEC.double + <*> DEC.double + <*> DEC.double + <*> DEC.int16 + <*> DEC.word32 + <*> DEC.word64 + <*> DEC.text + <*> DEC.timeOfDay + <*> DEC.timeOfDay + <*> DEC.utcTime + <*> DEC.utcTime + <*> DEC.uuid + + records <- pure $ id @[[Value]] $ fromMaybe [] $ mapM (mapM fromField) =<< res ^. the @"records" + + row <- pure $ DEC.decodeRows decodeExampleRow records + + liftIO $ IO.print row + + pure () diff --git a/app/App/Cli/Run/ExecuteStatement.hs b/app/App/Cli/Run/ExecuteStatement.hs new file mode 100644 index 0000000..85878b9 --- /dev/null +++ b/app/App/Cli/Run/ExecuteStatement.hs @@ -0,0 +1,43 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE TypeApplications #-} + +module App.Cli.Run.ExecuteStatement + ( runExecuteStatementCmd + ) where + +import Amazonka.RDSData +import App.AWS.Env +import App.Config +import Control.Monad.IO.Class +import Data.Generics.Product.Any +import Lens.Micro + +import qualified Amazonka as AWS +import qualified App.Cli.Types as CLI +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Text.Encoding as T +import qualified Data.Text.IO as T +import qualified System.IO.Unsafe as IO + +runExecuteStatementCmd :: CLI.ExecuteStatementCmd -> IO () +runExecuteStatementCmd cmd = do + let theAwsLogLevel = cmd ^. the @"mAwsLogLevel" + let theMHostEndpoint = cmd ^. the @"mHostEndpoint" + let theRegion = cmd ^. the @"region" + let theResourceArn = cmd ^. the @"resourceArn" + let theSecretArn = cmd ^. the @"secretArn" + let theSql = cmd ^. the @"sql" + + envAws <- + liftIO (IO.unsafeInterleaveIO (mkEnv theRegion (awsLogger theAwsLogLevel))) + <&> applyMHostEndpoint theMHostEndpoint + + let req = newExecuteStatement theResourceArn theSecretArn theSql + + AWS.runResourceT $ do + res <- AWS.send envAws req + + liftIO . T.putStrLn $ T.decodeUtf8 $ LBS.toStrict $ J.encode res diff --git a/app/App/Cli/Run/LocalStack.hs b/app/App/Cli/Run/LocalStack.hs new file mode 100644 index 0000000..9bd3b17 --- /dev/null +++ b/app/App/Cli/Run/LocalStack.hs @@ -0,0 +1,137 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Redundant id" -} +{- HLINT ignore "Use let" -} + +module App.Cli.Run.LocalStack + ( runLocalStackCmd, + runApp, + reportFatal, + ) where + +import Lens.Micro + +import qualified App.Cli.Types as CLI +import qualified App.Console as T +import qualified Control.Concurrent as IO +import qualified Control.Concurrent.STM as IO +import qualified Control.Exception as IO +import Control.Monad.IO.Class (MonadIO (..)) +import qualified Control.Monad.Trans.Resource as IO +import qualified Control.Monad.Trans.Resource.Internal as IO +import Data.Acquire (ReleaseType (ReleaseNormal)) +import Data.Generics.Product.Any +import Data.RdsData.Polysemy.Test.Cluster +import Data.RdsData.Polysemy.Test.Env +import GHC.IORef (IORef) +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.Amazonka.LocalStack +import qualified HaskellWorks.Polysemy.Control.Concurrent.STM as STM +import qualified HaskellWorks.Polysemy.Control.Concurrent.STM.TVar as STM +import HaskellWorks.Polysemy.Hedgehog +import HaskellWorks.Prelude +import HaskellWorks.TestContainers.LocalStack +import qualified Hedgehog as H +import Polysemy.Log +import Polysemy.Time.Interpreter.Ghc +import qualified System.Exit as IO +import qualified TestContainers.Monad as TC +import qualified TestContainers.Tasty as TC + +newtype AppError + = AppError Text + deriving (Eq, Show) + +runApp :: () + => Sem + [ Error AppError + , DataLog AwsLogEntry + , Log + , GhcTime + , DataLog (LogEntry LogMessage) + , Resource + , Embed IO + , Final IO + ] () + -> IO () +runApp f = f + & trap @AppError reportFatal + & interpretDataLogAwsLogEntryToLog + & interpretLogDataLog + & interpretTimeGhc + & setLogLevel (Just Info) + & interpretDataLogStdout + & runResource + & embedToFinal @IO + & runFinal @IO + +reportFatal :: () + => Member (Embed IO) r + => AppError + -> Sem r () +reportFatal (AppError msg) = + T.putStrLn msg + + +startContainers :: () + => TC.Config + -> IO (TC.Container, IO.InternalState) +startContainers tcConfig = + TC.runTestContainer tcConfig do + result <- setupContainers + releaseMap <- IO.liftResourceT IO.getInternalState + + -- N.B. runResourceT runs the finalizers on every resource. We don't want it to! We want to run + -- finalization in the release function that is called by Tasty! stateAlloc increments a references + -- count to accomodate for exactly these kind of cases. + liftIO $ IO.stateAlloc releaseMap + pure (result, releaseMap) + +stopContainers :: () + => (a, IORef IO.ReleaseMap) + -> IO () +stopContainers (_, internalState) = do + T.putStrLn "Stopping containers" + IO.stateCleanup ReleaseNormal internalState + +runLocalStackCmd :: CLI.LocalStackCmd -> IO () +runLocalStackCmd _ = do + tvRunning <- IO.newTVarIO False + + tcConfig <- TC.determineConfig + + void $ IO.bracket (startContainers tcConfig) stopContainers \(container, _) -> do + void $ H.check $ propertyOnce do + STM.atomically $ STM.writeTVar tvRunning True + + void $ runLocalTestEnv (pure container) do + rdsClusterDetails <- createRdsDbCluster (pure container) + + runReaderResourceAndSecretArnsFromResponses rdsClusterDetails do + lsEp <- getLocalStackEndpoint container + jotShow_ lsEp -- Localstack endpoint + let port = lsEp ^. the @"port" + let exampleCmd = "awslocal --endpoint-url=http://localhost:" <> show port <> " s3 ls" + -- Example awslocal command: + jot_ exampleCmd + jotShowM_ $ ask @AwsResourceArn + jotShowM_ $ ask @AwsSecretArn + pure () + + failure -- Not a failure + + running <- IO.readTVarIO tvRunning + + if running + then do + T.putStrLn "Localstack RDS cluster is running. Type CTRL-C to exit." + void . forever $ IO.threadDelay 10000000 + else do + T.putStrLn "Failed to start Localstack RDS cluster." + IO.exitFailure diff --git a/app/App/Cli/Run/Up.hs b/app/App/Cli/Run/Up.hs new file mode 100644 index 0000000..111d56b --- /dev/null +++ b/app/App/Cli/Run/Up.hs @@ -0,0 +1,84 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Redundant id" -} +{- HLINT ignore "Use let" -} + +module App.Cli.Run.Up + ( runUpCmd + ) where + +import Data.Generics.Product.Any +import Lens.Micro + +import qualified Amazonka as AWS +import qualified App.Cli.Types as CLI +import qualified App.Console as T +import Data.RdsData.Aws +import Data.RdsData.Polysemy.Core +import Data.RdsData.Polysemy.Error +import Data.RdsData.Polysemy.Migration +import qualified Data.Text as T +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.File +import HaskellWorks.Polysemy.Log +import HaskellWorks.Prelude +import Polysemy.Log +import Polysemy.Time.Interpreter.Ghc + +newtype AppError + = AppError Text + deriving (Eq, Show) + +runApp :: () + => CLI.UpCmd + -> Sem + [ Reader AwsResourceArn + , Reader AwsSecretArn + , Reader AWS.Env + , Error AppError + , DataLog AwsLogEntry + , Log + , GhcTime + , DataLog (LogEntry LogMessage) + , Resource + , Embed IO + , Final IO + ] () + -> IO () +runApp cmd f = f + & runReader (AwsResourceArn $ cmd ^. the @"resourceArn") + & runReader (AwsSecretArn $ cmd ^. the @"secretArn") + & runReaderAwsEnvDiscover + & trap @AppError reportFatal + & interpretDataLogAwsLogEntryToLog + & interpretLogDataLog + & interpretTimeGhc + & setLogLevel (Just Info) + & interpretDataLogToJsonStdout (logEntryToJson logMessageToJson) + & runResource + & embedToFinal @IO + & runFinal @IO + +reportFatal :: () + => Member (Embed IO) r + => AppError + -> Sem r () +reportFatal (AppError msg) = + T.putStrLn msg + +runUpCmd :: CLI.UpCmd -> IO () +runUpCmd cmd = runApp cmd do + initialiseDb + & trap @AWS.Error (throw . AppError . T.pack . show) + & trap @RdsDataError (throw . AppError . T.pack . show) + + migrateUp (cmd ^. the @"migrationFp") + & trap @AWS.Error (throw . AppError . T.pack . show) + & trap @IOException (throw . AppError . T.pack . show) + & trap @JsonDecodeError (throw . AppError . T.pack . show) + & trap @RdsDataError (throw . AppError . T.pack . show) + & trap @YamlDecodeError (throw . AppError . T.pack . show) diff --git a/app/App/Cli/Types.hs b/app/App/Cli/Types.hs new file mode 100644 index 0000000..e45da17 --- /dev/null +++ b/app/App/Cli/Types.hs @@ -0,0 +1,72 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module App.Cli.Types + ( Cmd(..) + , BatchExecuteStatementCmd(..) + , DownCmd(..) + , ExampleCmd(..) + , ExecuteStatementCmd(..) + , LocalStackCmd(..) + , UpCmd(..) + ) where + +import Data.ByteString (ByteString) +import Data.RdsData.Types () +import Data.Text +import GHC.Generics + +import qualified Amazonka as AWS +import qualified Amazonka.RDSData as AWS + +data Cmd = + CmdOfBatchExecuteStatementCmd BatchExecuteStatementCmd + | CmdOfDownCmd DownCmd + | CmdOfExampleCmd ExampleCmd + | CmdOfExecuteStatementCmd ExecuteStatementCmd + | CmdOfLocalStackCmd LocalStackCmd + | CmdOfUpCmd UpCmd + +data ExecuteStatementCmd = ExecuteStatementCmd + { mAwsLogLevel :: Maybe AWS.LogLevel + , region :: AWS.Region + , mHostEndpoint :: Maybe (ByteString, Int, Bool) + , resourceArn :: Text + , secretArn :: Text + , sql :: Text + } deriving Generic + +data BatchExecuteStatementCmd = BatchExecuteStatementCmd + { mAwsLogLevel :: Maybe AWS.LogLevel + , region :: AWS.Region + , mHostEndpoint :: Maybe (ByteString, Int, Bool) + , parameterSets :: Maybe [[AWS.SqlParameter]] + , resourceArn :: Text + , secretArn :: Text + , sql :: Text + } deriving Generic + +data ExampleCmd = ExampleCmd + { mAwsLogLevel :: Maybe AWS.LogLevel + , region :: AWS.Region + , mHostEndpoint :: Maybe (ByteString, Int, Bool) + , resourceArn :: Text + , secretArn :: Text + } deriving Generic + +data UpCmd = UpCmd + { resourceArn :: Text + , secretArn :: Text + , migrationFp :: FilePath + } deriving Generic + +data DownCmd = DownCmd + { resourceArn :: Text + , secretArn :: Text + , migrationFp :: FilePath + } deriving Generic + +data LocalStackCmd = LocalStackCmd + deriving Generic diff --git a/app/App/Config.hs b/app/App/Config.hs new file mode 100644 index 0000000..5bfde1c --- /dev/null +++ b/app/App/Config.hs @@ -0,0 +1,28 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE TypeApplications #-} + +module App.Config + ( applyMHostEndpoint + ) where + +import Lens.Micro +import Data.ByteString (ByteString) +import Data.Generics.Product.Any + +import qualified Amazonka as AWS + +applyMHostEndpoint :: Maybe (ByteString, Int, Bool) -> AWS.Env -> AWS.Env +applyMHostEndpoint = \case + Just (host, port, ssl) -> + \env -> + env + & the @"overrides" .~ \svc -> + svc & the @"endpoint" %~ \mkEndpoint region -> + mkEndpoint region + & the @"host" .~ host + & the @"port" .~ port + & the @"secure" .~ ssl + Nothing -> id diff --git a/app/App/Console.hs b/app/App/Console.hs new file mode 100644 index 0000000..f6121b8 --- /dev/null +++ b/app/App/Console.hs @@ -0,0 +1,35 @@ +module App.Console + ( putStrLn, + print, + hPutStrLn, + hPrint, + ) where + +import Control.Exception (bracket_) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Data.Text (Text) +import Prelude (IO, Show (..), ($), (.)) + +import qualified Control.Concurrent.QSem as IO +import qualified Data.Text.IO as T +import qualified System.IO as IO +import qualified System.IO.Unsafe as IO + +sem :: IO.QSem +sem = IO.unsafePerformIO $ IO.newQSem 1 +{-# NOINLINE sem #-} + +consoleBracket :: IO a -> IO a +consoleBracket = bracket_ (IO.waitQSem sem) (IO.signalQSem sem) + +putStrLn :: MonadIO m => Text -> m () +putStrLn = liftIO . consoleBracket . T.putStrLn + +print :: (MonadIO m, Show a) => a -> m () +print = liftIO . consoleBracket . IO.print + +hPutStrLn :: MonadIO m => IO.Handle -> Text -> m () +hPutStrLn h = liftIO . consoleBracket . T.hPutStrLn h + +hPrint :: (MonadIO m, Show a) => IO.Handle -> a -> m () +hPrint h = liftIO . consoleBracket . IO.hPrint h diff --git a/app/App/Options.hs b/app/App/Options.hs new file mode 100644 index 0000000..a6e7aee --- /dev/null +++ b/app/App/Options.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module App.Options where + +import qualified Amazonka.Data as AWS +import qualified Data.Text as T +import qualified Options.Applicative as OA + +text :: AWS.FromText a => OA.ReadM a +text = OA.eitherReader (AWS.fromText . T.pack) + +subParser :: String -> OA.ParserInfo a -> OA.Parser a +subParser availableCommand pInfo = + OA.hsubparser $ OA.command availableCommand pInfo <> OA.metavar availableCommand + +pref :: OA.ParserPrefs +pref = + OA.prefs $ mconcat + [ OA.showHelpOnEmpty + ] diff --git a/app/App/Show.hs b/app/App/Show.hs new file mode 100644 index 0000000..7594422 --- /dev/null +++ b/app/App/Show.hs @@ -0,0 +1,10 @@ +module App.Show + ( tshow, + ) where + +import Data.Text (Text) + +import qualified Data.Text as T + +tshow :: Show a => a -> Text +tshow = T.pack . show diff --git a/app/Main.hs b/app/Main.hs new file mode 100644 index 0000000..2867f5f --- /dev/null +++ b/app/Main.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} + +module Main where + +import App.Options + +import qualified App.Cli.Run as CLI +import qualified App.Cli.Options as CLI +import qualified Options.Applicative as OA + +main :: IO () +main = do + cmd <- OA.customExecParser pref CLI.opts + + CLI.runCmd cmd diff --git a/cabal.project b/cabal.project new file mode 100644 index 0000000..7fa9a80 --- /dev/null +++ b/cabal.project @@ -0,0 +1,10 @@ +packages: . + +package amazonka-rds-data + ghc-options: -XDuplicateRecordFields + +package amazonka-sso + ghc-options: -XDuplicateRecordFields + +package amazonka-sts + ghc-options: -XDuplicateRecordFields diff --git a/db/migration.yaml b/db/migration.yaml new file mode 100644 index 0000000..59e4f32 --- /dev/null +++ b/db/migration.yaml @@ -0,0 +1,50 @@ +description: This file contains the migration steps for the database schema. +plan: + - id: 01J6EWQ9NQRBQA3GT3FTCSRPJN + description: Create Users table + up: + - | # sql + CREATE TABLE users (name VARCHAR(50)); + + - | # sql + CREATE INDEX idx_users_name ON users (name); + + down: + - | # sql + DROP INDEX idx_users_name; + + - | # sql + DROP TABLE users; + + - id: 01J6EWQ9NQRBQA3GT3FTCSRPJN + description: Create Projects table + up: + - | # sql + CREATE TABLE projects (name VARCHAR(50)); + + - | # sql + CREATE INDEX idx_projects_name ON projects (name); + + down: + - | # sql + DROP INDEX idx_projects_name; + + - | # sql + DROP TABLE projects; + + - id: 01J6PQ201YECE9R5XYTVWCRJC1 + description: Populate users table + up: + - | # sql + INSERT INTO users (name) VALUES ('Alice'); + + - | # sql + SELECT COUNT(name) FROM users; + + - | # sql + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_type = 'BASE TABLE'; + + down: [] diff --git a/integration/Spec.hs b/integration/Spec.hs new file mode 100644 index 0000000..e1cee34 --- /dev/null +++ b/integration/Spec.hs @@ -0,0 +1 @@ +{-# OPTIONS_GHC -F -pgmF tasty-discover -optF --hide-successes #-} diff --git a/integration/Test/Data/RdsData/Migration/ConnectionSpec.hs b/integration/Test/Data/RdsData/Migration/ConnectionSpec.hs new file mode 100644 index 0000000..57d6965 --- /dev/null +++ b/integration/Test/Data/RdsData/Migration/ConnectionSpec.hs @@ -0,0 +1,97 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Move brackets to avoid $" -} +{- HLINT ignore "Redundant id" -} +{- HLINT ignore "Redundant pure" -} +{- HLINT ignore "Use camelCase" -} +{- HLINT ignore "Use let" -} + +module Test.Data.RdsData.Migration.ConnectionSpec + ( tasty_rds_integration_test, + ) where + +import qualified Amazonka.Types as AWS +import Data.Function +import Data.Generics.Product.Any +import qualified Data.List as L +import Data.RdsData.Polysemy.Core +import Data.RdsData.Polysemy.Error +import Data.RdsData.Polysemy.Migration +import Data.RdsData.Polysemy.Test.Cluster +import Data.RdsData.Polysemy.Test.Env +import Data.RdsData.Polysemy.Test.Workspace +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.File +import HaskellWorks.Polysemy.Hedgehog +import HaskellWorks.Polysemy.Prelude +import HaskellWorks.TestContainers.LocalStack +import Lens.Micro +import qualified Test.Tasty as Tasty +import qualified Test.Tasty.Hedgehog as H +import qualified TestContainers.Tasty as TC + +-- cabal test rds-data-test --test-options "--pattern \"/RDS integration test/\"" +tasty_rds_integration_test :: Tasty.TestTree +tasty_rds_integration_test = + TC.withContainers setupContainers $ \getContainer -> + H.testProperty "RDS integration test" $ propertyOnce $ localWorkspace $ runLocalTestEnv getContainer $ do + rdsClusterDetails <- createRdsDbCluster getContainer + + runReaderResourceAndSecretArnsFromResponses rdsClusterDetails $ do + id $ do + initialiseDb + & trapFail @RdsDataError + & trapFail @AWS.Error + & jotShowDataLog @AwsLogEntry + + migrateUp "db/migration.yaml" + & trapFail @AWS.Error + & trapFail @IOException + & trapFail @JsonDecodeError + & trapFail @RdsDataError + & trapFail @YamlDecodeError + & jotShowDataLog + + upResult <- + ( executeStatement $ mconcat + [ "SELECT table_name" + , " FROM information_schema.tables" + , " WHERE table_schema = 'public'" + , " AND table_type = 'BASE TABLE';" + ] + ) + & trapFail @AWS.Error + & trapFail @RdsDataError + & jotShowDataLog + + let upTables = upResult ^.. the @"records" . each . each . each . the @"stringValue" . _Just + + L.sort upTables === ["migration", "projects", "users"] + + migrateDown "db/migration.yaml" + & trapFail @AWS.Error + & trapFail @IOException + & trapFail @JsonDecodeError + & trapFail @RdsDataError + & trapFail @YamlDecodeError + & jotShowDataLog + + downResult <- + ( executeStatement $ mconcat + [ "SELECT table_name" + , " FROM information_schema.tables" + , " WHERE table_schema = 'public'" + , " AND table_type = 'BASE TABLE';" + ] + ) + & trapFail @AWS.Error + & trapFail @RdsDataError + & jotShowDataLog + + let downTables = downResult ^.. the @"records" . each . each . each . the @"stringValue" . _Just + + L.sort downTables === ["migration"] diff --git a/integration/Test/Data/RdsData/TestTypes.hs b/integration/Test/Data/RdsData/TestTypes.hs new file mode 100644 index 0000000..48dec9a --- /dev/null +++ b/integration/Test/Data/RdsData/TestTypes.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} + +{- HLINT ignore "Monad law, left identity" -} +{- HLINT ignore "Use let" -} + +module Test.Data.RdsData.TestTypes + ( Record(..) + ) where + +import Data.ByteString (ByteString) +import Data.RdsData.Internal.Aeson () +import Data.Text (Text) +import Data.Time (Day, TimeOfDay (..), UTCTime) +import Data.ULID (ULID) +import Data.UUID (UUID) +import GHC.Generics + +import qualified Data.Aeson as J + +data Record = Record + { bigint :: Int + , bigserial :: Int + , boolean :: Bool + , bytea :: ByteString + , character :: Text + , characters :: Text + , varyingCharacter :: Text + , varyingCharacters :: Text + , date :: Day + , double :: Double + , integer :: Integer + , json :: J.Value + , jsonb :: J.Value + , numeric :: Text + , numerics :: Text + , real :: Double + , smallint :: Int + , smallserial :: Int + , serial :: Int + , text :: Text + , time :: TimeOfDay + , times :: TimeOfDay + , timestamp :: UTCTime + , timestamps :: UTCTime + , ulid :: ULID + , uuid :: UUID + } + deriving (Show, Eq, Generic) diff --git a/polysemy/Data/RdsData/Polysemy.hs b/polysemy/Data/RdsData/Polysemy.hs new file mode 100644 index 0000000..034e92b --- /dev/null +++ b/polysemy/Data/RdsData/Polysemy.hs @@ -0,0 +1,2 @@ +module Data.RdsData.Polysemy + () where diff --git a/polysemy/Data/RdsData/Polysemy/Core.hs b/polysemy/Data/RdsData/Polysemy/Core.hs new file mode 100644 index 0000000..54ea33c --- /dev/null +++ b/polysemy/Data/RdsData/Polysemy/Core.hs @@ -0,0 +1,114 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +module Data.RdsData.Polysemy.Core + ( executeStatement, + executeStatement_, + initialiseDb, + newExecuteStatement, + newBatchExecuteStatement, + ) where + +import Amazonka.Env (Env) +import qualified Amazonka.RDSData.BatchExecuteStatement as AWS +import qualified Amazonka.RDSData.ExecuteStatement as AWS +import qualified Amazonka.Types as AWS +import Control.Monad.IO.Class (MonadIO) +import Data.Generics.Product.Any +import Data.RdsData.Aws +import Data.RdsData.Polysemy.Error +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Prelude +import Lens.Micro + +newExecuteStatement :: () + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Text + -> Sem r AWS.ExecuteStatement +newExecuteStatement sql = do + AwsResourceArn theResourceArn <- ask + AwsSecretArn theSecretArn <- ask + + pure $ AWS.newExecuteStatement theResourceArn theSecretArn sql + +newBatchExecuteStatement :: () + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Text + -> Sem r AWS.BatchExecuteStatement +newBatchExecuteStatement sql = do + AwsResourceArn theResourceArn <- ask + AwsSecretArn theSecretArn <- ask + + pure $ AWS.newBatchExecuteStatement theResourceArn theSecretArn sql + +executeStatement :: () + => Member (DataLog AwsLogEntry) r + => Member (Embed m) r + => Member (Error AWS.Error) r + => Member (Error RdsDataError) r + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Member (Reader Env) r + => Member Log r + => Member Resource r + => MonadIO m + => Text + -> Sem r AWS.ExecuteStatementResponse +executeStatement sql = do + res <- newExecuteStatement sql >>= sendAws + + case res ^. the @"httpStatus" of + 200 -> do + info $ "Successfully executed statement. Results: " <> tshow res + pure res + _ -> throw $ RdsDataError $ "Failed to initialise database: " <> tshow res + +executeStatement_ :: () + => Member (DataLog AwsLogEntry) r + => Member (Embed m) r + => Member (Error AWS.Error) r + => Member (Error RdsDataError) r + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Member (Reader Env) r + => Member Log r + => Member Resource r + => MonadIO m + => Text + -> Sem r () +executeStatement_ = void . executeStatement + +initialiseDb :: () + => Member (DataLog AwsLogEntry) r + => Member (Embed m) r + => Member (Error AWS.Error) r + => Member (Error RdsDataError) r + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Member (Reader Env) r + => Member Log r + => Member Resource r + => MonadIO m + => Sem r () +initialiseDb = do + executeStatement_ $ mconcat + [ "CREATE TABLE IF NOT EXISTS migration (" + , " ulid CHAR(26) NOT NULL PRIMARY KEY," + , " created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP," + , " deployed_by TEXT NOT NULL," + , " CONSTRAINT valid_ulid_constraint" + , " CHECK (ulid::text ~ '^[0-9A-HJKMNP-TV-Z]{26}$')" + , ");" + ] + + executeStatement_ + "CREATE INDEX idx_migration_created_at ON migration (created_at);" + + executeStatement_ + "CREATE INDEX idx_migration_deployed_by ON migration (deployed_by);" diff --git a/polysemy/Data/RdsData/Polysemy/Error.hs b/polysemy/Data/RdsData/Polysemy/Error.hs new file mode 100644 index 0000000..50bbda1 --- /dev/null +++ b/polysemy/Data/RdsData/Polysemy/Error.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE StandaloneDeriving #-} + +module Data.RdsData.Polysemy.Error + ( RdsDataError(..) + ) where + +import Data.Text (Text) + +data RdsDataError where + EnvironmentVariableMissing + :: String + -> RdsDataError + + RdsDataError + :: Text + -> RdsDataError + +deriving instance Show RdsDataError diff --git a/polysemy/Data/RdsData/Polysemy/Migration.hs b/polysemy/Data/RdsData/Polysemy/Migration.hs new file mode 100644 index 0000000..41c2014 --- /dev/null +++ b/polysemy/Data/RdsData/Polysemy/Migration.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +module Data.RdsData.Polysemy.Migration + ( migrateDown, + migrateUp, + ) where + +import qualified Amazonka.Env as AWS +import qualified Amazonka.Types as AWS +import Data.Generics.Product.Any +import Data.RdsData.Aws +import Data.RdsData.Migration.Types hiding (id) +import Data.RdsData.Polysemy.Core +import Data.RdsData.Polysemy.Error +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.File +import HaskellWorks.Polysemy.Prelude +import Lens.Micro + +migrateDown :: () + => Member (DataLog AwsLogEntry) r + => Member (Embed IO) r + => Member (Error AWS.Error) r + => Member (Error IOException) r + => Member (Error JsonDecodeError) r + => Member (Error RdsDataError) r + => Member (Error YamlDecodeError) r + => Member (Reader AWS.Env) r + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Member Log r + => Member Resource r + => FilePath + -> Sem r () +migrateDown _migrationFp = do + value :: Migration <- readYamlFile "db/migration.yaml" + + let statements = value ^.. the @"plan" . to reverse . each . the @"down" . each + + forM_ statements $ \statement -> do + info $ "Executing statement: " <> tshow statement + + executeStatement (statement ^. the @1) + +migrateUp :: () + => Member (DataLog AwsLogEntry) r + => Member (Embed IO) r + => Member (Error AWS.Error) r + => Member (Error IOException) r + => Member (Error JsonDecodeError) r + => Member (Error RdsDataError) r + => Member (Error YamlDecodeError) r + => Member (Reader AWS.Env) r + => Member (Reader AwsResourceArn) r + => Member (Reader AwsSecretArn) r + => Member Log r + => Member Resource r + => FilePath + -> Sem r () +migrateUp _migrationFp = do + value :: Migration <- readYamlFile "db/migration.yaml" + + let statements = value ^.. the @"plan" . each . the @"up" . each + + forM_ statements $ \statement -> do + info $ "Executing statement: " <> tshow statement + + executeStatement (statement ^. the @1) diff --git a/rds-data.cabal b/rds-data.cabal new file mode 100644 index 0000000..7f67365 --- /dev/null +++ b/rds-data.cabal @@ -0,0 +1,308 @@ +cabal-version: 3.6 + +name: rds-data +version: 0.0.0.1 +synopsis: Codecs for use with AWS rds-data +description: Codecs for use with AWS rds-data. +category: Data +stability: Experimental +homepage: http://github.com/haskell-works/rds-data#readme +bug-reports: https://github.com/haskell-works/rds-data/issues +author: John Ky +maintainer: newhoggy@gmail.com +copyright: 2024 John Ky +license: BSD-3-Clause +license-file: LICENSE +tested-with: GHC == 9.6.6 +build-type: Simple +extra-source-files: README.md + +source-repository head + type: git + location: https://github.com/haskell-works/rds-data + +common base { build-depends: base >= 4.11 && < 5 } + +common aeson { build-depends: aeson >= 2.2 && < 2.3 } +common aeson-pretty { build-depends: aeson-pretty >= 0.8 && < 1.0 } +common amazonka { build-depends: amazonka >= 2.0 && < 3 } +common amazonka-core { build-depends: amazonka-core >= 2.0 && < 3 } +common amazonka-rds { build-depends: amazonka-rds >= 2.0 && < 3 } +common amazonka-rds-data { build-depends: amazonka-rds-data >= 2.0 && < 3 } +common amazonka-secretsmanager { build-depends: amazonka-secretsmanager >= 2.0 && < 3 } +common base64-bytestring { build-depends: base64-bytestring >= 1.2.1 && < 2 } +common bytestring { build-depends: bytestring >= 0.11 && < 0.13 } +common contravariant { build-depends: contravariant >= 1.5.5 && < 2 } +common doctest { build-depends: doctest >= 0.22.2 && < 0.23 } +common doctest-discover { build-depends: doctest-discover >= 0.2 && < 0.3 } +common generic-lens { build-depends: generic-lens >= 2.2.2.0 && < 3 } +common hedgehog { build-depends: hedgehog >= 1.4 && < 2 } +common hedgehog-extras { build-depends: hedgehog-extras >= 0.6.0.2 && < 0.7 } +common http-client { build-depends: http-client >= 0.5.14 && < 0.8 } +common hw-polysemy-amazonka { build-depends: hw-polysemy:amazonka >= 0.2.14.2 && < 0.3 } +common hw-polysemy-core { build-depends: hw-polysemy:core >= 0.2.14.2 && < 0.3 } +common hw-polysemy-hedgehog { build-depends: hw-polysemy:hedgehog >= 0.2.14.2 && < 0.3 } +common hw-polysemy-testcontainers-localstack { build-depends: hw-polysemy:testcontainers-localstack >= 0.2.14.2 && < 0.3 } +common microlens { build-depends: microlens >= 0.4.13 && < 0.5 } +common mtl { build-depends: mtl >= 2 && < 3 } +common optparse-applicative { build-depends: optparse-applicative >= 0.18.1.0 && < 0.19 } +common polysemy { build-depends: polysemy >= 1.9.2 && < 2 } +common polysemy-log { build-depends: polysemy-log >= 0.10.0.0 && < 1 } +common polysemy-plugin { build-depends: polysemy-plugin >= 0.4.5.2 && < 1 } +common polysemy-time { build-depends: polysemy-time >= 0.7 && < 1 } +common resourcet { build-depends: resourcet >= 1.3 && < 2 } +common stm { build-depends: stm >= 2.5 && < 3 } +common tasty { build-depends: tasty >= 1.4 && < 2 } +common tasty-discover { build-depends: tasty-discover >= 5 && < 6 } +common tasty-hedgehog { build-depends: tasty-hedgehog >= 1.4 && < 2 } +common testcontainers { build-depends: testcontainers >= 0.5.0.0 && < 1 } +common text { build-depends: text >= 2.1 && < 3 } +common time { build-depends: time >= 1.12.2 && < 2 } +common transformers { build-depends: transformers >= 0.5 && < 0.7 } +common ulid { build-depends: ulid >= 0.3.2.0 && < 0.4 } +common uuid { build-depends: uuid >= 1.3.15 && < 1.4 } + +common rds-data-polysemy { build-depends: rds-data:polysemy } +common rds-data-testlib { build-depends: rds-data:testlib } + +common rds-data + build-depends: rds-data + +common project-config + default-language: Haskell2010 + default-extensions: BlockArguments + ImportQualifiedPost + + ghc-options: -Wall + -Wcompat + -Wincomplete-record-updates + -Wincomplete-uni-patterns + -Wno-unticked-promoted-constructors + -Wpartial-fields + -Wredundant-constraints + -Wunused-imports + +library + import: base, project-config + , aeson + , amazonka-core + , amazonka-rds + , amazonka-rds-data + , amazonka-secretsmanager + , bytestring + , contravariant + , generic-lens + , microlens + , mtl + , text + , time + , transformers + , ulid + , uuid + exposed-modules: Data.RdsData + Data.RdsData.Aws + Data.RdsData.Decode.Array + Data.RdsData.Decode.Row + Data.RdsData.Decode.Value + Data.RdsData.Encode.Array + Data.RdsData.Encode.Param + Data.RdsData.Encode.Params + Data.RdsData.Encode.Row + Data.RdsData.Encode.Value + Data.RdsData.Internal.Aeson + Data.RdsData.Internal.Convert + Data.RdsData.Internal.Maybe + Data.RdsData.Migration + Data.RdsData.Migration.Types + Data.RdsData.Orphans + Data.RdsData.Types + Data.RdsData.Types.Array + Data.RdsData.Types.Param + Data.RdsData.Types.Value + other-modules: Paths_rds_data + autogen-modules: Paths_rds_data + hs-source-dirs: src + +library polysemy + import: base, project-config + , aeson + , amazonka + , amazonka-core + , amazonka-rds-data + , bytestring + , contravariant + , generic-lens + , hw-polysemy-amazonka + , hw-polysemy-core + , microlens + , mtl + , polysemy + , polysemy-log + , polysemy-plugin + , rds-data + , text + , time + , transformers + , ulid + , uuid + visibility: public + build-depends: rds-data + exposed-modules: Data.RdsData.Polysemy + Data.RdsData.Polysemy.Core + Data.RdsData.Polysemy.Error + Data.RdsData.Polysemy.Migration + ghc-options: -fplugin=Polysemy.Plugin + hs-source-dirs: polysemy + +library testlib + import: base, project-config + , aeson + , amazonka + , amazonka-core + , amazonka-rds + , amazonka-rds-data + , amazonka-secretsmanager + , base64-bytestring + , bytestring + , contravariant + , generic-lens + , hw-polysemy-amazonka + , hw-polysemy-core + , hw-polysemy-hedgehog + , hw-polysemy-testcontainers-localstack + , microlens + , mtl + , polysemy + , polysemy-log + , polysemy-plugin + , rds-data + , rds-data-polysemy + , text + , time + , transformers + , ulid + , uuid + visibility: public + build-depends: rds-data + exposed-modules: Data.RdsData.Polysemy.Test.Cluster + Data.RdsData.Polysemy.Test.Env + Data.RdsData.Polysemy.Test.Workspace + ghc-options: -fplugin=Polysemy.Plugin + hs-source-dirs: testlib + +executable rds-data + import: base, project-config + , aeson + , amazonka + , amazonka-rds-data + , bytestring + , generic-lens + , hedgehog + , http-client + , hw-polysemy-amazonka + , hw-polysemy-core + , hw-polysemy-hedgehog + , hw-polysemy-testcontainers-localstack + , microlens + , optparse-applicative + , polysemy + , polysemy-log + , polysemy-plugin + , polysemy-time + , resourcet + , rds-data + , rds-data-polysemy + , rds-data-testlib + , stm + , testcontainers + , text + , time + , ulid + , uuid + main-is: Main.hs + ghc-options: -fplugin=Polysemy.Plugin + other-modules: App.AWS.Env + App.Cli.Options + App.Cli.Run + App.Cli.Run.BatchExecuteStatement + App.Cli.Run.Down + App.Cli.Run.Example + App.Cli.Run.ExecuteStatement + App.Cli.Run.LocalStack + App.Cli.Run.Up + App.Cli.Types + App.Config + App.Console + App.Options + App.Show + hs-source-dirs: app + ghc-options: -threaded "-with-rtsopts=-I0 -A16m -N2 --disable-delayed-os-memory-return" + build-depends: rds-data + +test-suite rds-data-test + import: base, project-config + , aeson + , aeson-pretty + , amazonka-rds-data + , bytestring + , generic-lens + , hedgehog + , hedgehog-extras + , microlens + , rds-data + , tasty + , tasty-hedgehog + , text + , time + , ulid + , uuid + type: exitcode-stdio-1.0 + main-is: Spec.hs + other-modules: Data.RdsData.TestTypes + Data.RdsDataSpec + hs-source-dirs: test + ghc-options: -threaded -rtsopts -with-rtsopts=-N + build-tool-depends: tasty-discover:tasty-discover + +test-suite rds-data-integration + import: base, project-config + , aeson + , aeson-pretty + , amazonka + , amazonka-core + , amazonka-rds + , amazonka-rds-data + , amazonka-secretsmanager + , base64-bytestring + , bytestring + , generic-lens + , hedgehog + , hedgehog-extras + , hw-polysemy-amazonka + , hw-polysemy-core + , hw-polysemy-hedgehog + , hw-polysemy-testcontainers-localstack + , microlens + , polysemy + , polysemy-log + , polysemy-plugin + , rds-data + , rds-data-testlib + , tasty + , tasty-discover + , tasty-hedgehog + , testcontainers + , text + , time + , ulid + , uuid + type: exitcode-stdio-1.0 + main-is: Spec.hs + build-depends: rds-data, + rds-data:polysemy, + other-modules: Test.Data.RdsData.Migration.ConnectionSpec + Test.Data.RdsData.TestTypes + ghc-options: -fplugin=Polysemy.Plugin + hs-source-dirs: integration + ghc-options: -threaded -rtsopts -with-rtsopts=-N + build-tool-depends: tasty-discover:tasty-discover diff --git a/src/Data/RdsData.hs b/src/Data/RdsData.hs new file mode 100644 index 0000000..978772f --- /dev/null +++ b/src/Data/RdsData.hs @@ -0,0 +1,3 @@ +module Data.RdsData where + +import Data.RdsData.Types () diff --git a/src/Data/RdsData/Aws.hs b/src/Data/RdsData/Aws.hs new file mode 100644 index 0000000..a1aca53 --- /dev/null +++ b/src/Data/RdsData/Aws.hs @@ -0,0 +1,12 @@ +module Data.RdsData.Aws + ( AwsResourceArn(..), + AwsSecretArn(..), + ) where + +import Data.Text (Text) + +newtype AwsResourceArn = AwsResourceArn Text + deriving (Eq, Show) + +newtype AwsSecretArn = AwsSecretArn Text + deriving (Eq, Show) diff --git a/src/Data/RdsData/Decode/Array.hs b/src/Data/RdsData/Decode/Array.hs new file mode 100644 index 0000000..4db26d2 --- /dev/null +++ b/src/Data/RdsData/Decode/Array.hs @@ -0,0 +1,216 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE LambdaCase #-} + +{- HLINT ignore "Use <&>" -} + +module Data.RdsData.Decode.Array + ( DecodeArray(..) + + , decodeArrayFailedMessage + + , arrays + , bools + , doubles + , integers + , texts + + , days + , int16s + , int32s + , int64s + , int8s + , ints + , jsons + , lazyTexts + , strings + , timesOfDay + , ulids + , utcTimes + , uuids + , word16s + , word32s + , word64s + , word8s + , words + ) where + +import Control.Applicative +import Data.Int +import Data.RdsData.Internal.Aeson +import Data.RdsData.Types.Array +import Data.Text (Text) +import Data.Time +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Word +import Prelude hiding (maybe, null, words) + +import qualified Data.Aeson as J +import qualified Data.RdsData.Internal.Convert as CONV +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy as LT +import qualified Data.UUID as UUID +import qualified Prelude as P + +newtype DecodeArray a = DecodeArray + { decodeArray :: Array -> Either Text a + } deriving Functor + +instance Applicative DecodeArray where + pure a = DecodeArray \_ -> Right a + DecodeArray f <*> DecodeArray a = DecodeArray \v -> f v <*> a v + +instance Alternative DecodeArray where + empty = DecodeArray \_ -> Left "empty" + DecodeArray a <|> DecodeArray b = DecodeArray \v -> + either (const (b v)) Right (a v) + +instance Monad DecodeArray where + DecodeArray a >>= f = DecodeArray \v -> do + a' <- a v + decodeArray (f a') v + +-------------------------------------------------------------------------------- + +decodeArrayFailedMessage :: Text -> Text -> Maybe Text -> Array -> Text +decodeArrayFailedMessage item type_ reason value = + mconcat + [ "Failed to decode " <> item <> " of type " <> type_ <> " from Array of " <> toJsonText value + , P.maybe "" (" because " <>) reason + ] + +-------------------------------------------------------------------------------- + +integers :: DecodeArray [Integer] +integers = + DecodeArray \v -> + case v of + ArrayOfIntegers es -> Right es + _ -> Left $ decodeArrayFailedMessage "integers" "ArrayOfIntegers" Nothing v + +texts :: DecodeArray [Text] +texts = + DecodeArray \v -> + case v of + ArrayOfTexts es -> Right es + _ -> Left $ decodeArrayFailedMessage "texts" "ArrayOfStrings" Nothing v + +bools :: DecodeArray [Bool] +bools = + DecodeArray \v -> + case v of + ArrayOfBools es -> Right es + _ -> Left $ decodeArrayFailedMessage "bools" "ArrayOfBooleans" Nothing v + +doubles :: DecodeArray [Double] +doubles = + DecodeArray \v -> + case v of + ArrayOfDoubles es -> Right es + _ -> Left $ decodeArrayFailedMessage "doubles" "ArrayOfDoubles" Nothing v + +arrays :: DecodeArray [Array] +arrays = + DecodeArray \v -> + case v of + ArrayOfArrays es -> Right es + _ -> Left $ decodeArrayFailedMessage "arrays" "ArrayOfArrays" Nothing v + +-------------------------------------------------------------------------------- + +ints :: DecodeArray [Int] +ints = + fmap fromIntegral <$> integers + +int8s :: DecodeArray [Int8] +int8s = + fmap fromIntegral <$> integers + +int16s :: DecodeArray [Int16] +int16s = + fmap fromIntegral <$> integers + +int32s :: DecodeArray [Int32] +int32s = + fmap fromIntegral <$> integers + +int64s :: DecodeArray [Int64] +int64s = + fmap fromIntegral <$> integers + +words :: DecodeArray [Word] +words = + fmap fromIntegral <$> integers + +word8s :: DecodeArray [Word8] +word8s = + fmap fromIntegral <$> integers + +word16s :: DecodeArray [Word16] +word16s = + fmap fromIntegral <$> integers + +word32s :: DecodeArray [Word32] +word32s = + fmap fromIntegral <$> integers + +word64s :: DecodeArray [Word64] +word64s = + fmap fromIntegral <$> integers + +lazyTexts :: DecodeArray [LT.Text] +lazyTexts = + fmap LT.fromStrict <$> texts + +strings :: DecodeArray [String] +strings = + fmap T.unpack <$> texts + +jsons :: DecodeArray [J.Value] +jsons = do + ts <- texts + case traverse (J.eitherDecodeStrict' . T.encodeUtf8) ts of + Right js -> pure js + Left e -> DecodeArray \_ -> Left $ "Failed to decode JSON: " <> T.pack e + +timesOfDay :: DecodeArray [TimeOfDay] +timesOfDay = do + ts <- texts + case traverse (parseTimeM True defaultTimeLocale "%H:%M:%S". T.unpack) ts of + Just tod -> pure tod + Nothing -> DecodeArray \_ -> Left "Failed to decode TimeOfDay" + +utcTimes :: DecodeArray [UTCTime] +utcTimes = do + ts <- texts + case traverse (parseTimeM True defaultTimeLocale "%Y-%m-%d %H:%M:%S" . T.unpack) ts of + Just utct -> pure utct + Nothing -> DecodeArray \_ -> Left "Failed to decode UTCTime" + +days :: DecodeArray [Day] +days = do + ts <- texts + case traverse (parseTimeM True defaultTimeLocale "%Y-%m-%d" . T.unpack) ts of + Just d -> pure d + Nothing -> DecodeArray \_ -> Left "Failed to decode Day" + +-- | Decode an array of ULIDs +-- ULIDs are encoded as strings in the database and have have better database performance +-- than UUIDs stored as strings in the database. +ulids :: DecodeArray [ULID] +ulids = do + ts <- texts + case traverse CONV.textToUlid ts of + Right u -> pure u + Left msg -> DecodeArray \_ -> Left $ "Failed to decode UUID: " <> msg + +uuids :: DecodeArray [UUID] +uuids = do + ts <- texts + case traverse (UUID.fromString . T.unpack) ts of + Just u -> pure u + Nothing -> DecodeArray \_ -> Left "Failed to decode UUID" diff --git a/src/Data/RdsData/Decode/Row.hs b/src/Data/RdsData/Decode/Row.hs new file mode 100644 index 0000000..98c3bd9 --- /dev/null +++ b/src/Data/RdsData/Decode/Row.hs @@ -0,0 +1,221 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Data.RdsData.Decode.Row + ( DecodeRow(..) + , integer + , int + , int8 + , int16 + , int32 + , int64 + , bool + , double + , string + , text + , lazyText + , word + , word8 + , word16 + , word32 + , word64 + , bytestring + , lazyBytestring + , timeOfDay + , day + , ulid + , utcTime + , uuid + , ignore + , json + , maybe + , column + , decodeRow + , decodeRows + ) where + +import Control.Monad.Except +import Control.Monad.State +import Data.ByteString (ByteString) +import Control.Monad +import Data.Functor.Identity ( Identity ) +import Data.Int +import Data.RdsData.Decode.Value (DecodeValue) +import Data.RdsData.Types.Value +import Data.Text +import Data.Time +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Word +import Prelude hiding (maybe) + +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Decode.Value as DV +import qualified Data.RdsData.Internal.Convert as CONV +import qualified Data.Text as T +import qualified Data.Text.Lazy as LT +import qualified Data.UUID as UUID + +newtype DecodeRow a = DecodeRow + { unDecodeRow :: ExceptT Text (StateT [Value] Identity) a + } + deriving (Applicative, Functor, Monad, MonadState [Value], MonadError Text) + +instance MonadFail DecodeRow where + fail = DecodeRow . throwError . pack + +maybe :: DecodeRow a -> DecodeRow (Maybe a) +maybe r = do + cs <- get + case cs of + ValueOfNull : vs -> do + put vs + pure Nothing + _ -> Just <$> r + +decodeRowValue :: () + => MonadError Text m + => DecodeValue a + -> Value + -> m a +decodeRowValue decoder v = + case DV.decodeValue decoder v of + Right a -> pure a + Left e -> throwError $ "Failed to decode Value: " <> e + +column :: () + => DecodeValue a + -> DecodeRow a +column decoder = do + cs <- get + case cs of + v : vs -> do + s <- decodeRowValue decoder v + put vs + pure s + [] -> do + throwError "Expected RdsText, but got no more values in row." + +integer :: DecodeRow Integer +integer = + column DV.integer + +int :: DecodeRow Int +int = + column DV.int + +int8 :: DecodeRow Int8 +int8 = + column DV.int8 + +int16 :: DecodeRow Int16 +int16 = + column DV.int16 + +int32 :: DecodeRow Int32 +int32 = + column DV.int32 + +int64 :: DecodeRow Int64 +int64 = + column DV.int64 + +word :: DecodeRow Word +word = + column DV.word + +word8 :: DecodeRow Word8 +word8 = + column DV.word8 + +word16 :: DecodeRow Word16 +word16 = + column DV.word16 + +word32 :: DecodeRow Word32 +word32 = + column DV.word32 + +word64 :: DecodeRow Word64 +word64 = + column DV.word64 + +text :: DecodeRow Text +text = + column DV.text + +lazyText :: DecodeRow LT.Text +lazyText = + column DV.lazyText + +bool :: DecodeRow Bool +bool = + column DV.bool + +double :: DecodeRow Double +double = + column DV.double + +bytestring :: DecodeRow ByteString +bytestring = + column DV.bytestring + +lazyBytestring :: DecodeRow LBS.ByteString +lazyBytestring = + column DV.lazyBytestring + +string :: DecodeRow String +string = + column DV.string + +json :: DecodeRow J.Value +json = + column DV.json + +timeOfDay :: DecodeRow TimeOfDay +timeOfDay = do + t <- text + case parseTimeM True defaultTimeLocale "%H:%M:%S%Q" (T.unpack t) of + Just a -> pure a + Nothing -> throwError $ "Failed to parse TimeOfDay: " <> T.pack (show t) + +ulid :: DecodeRow ULID +ulid = do + t <- text + case CONV.textToUlid t of + Right a -> pure a + Left msg -> throwError $ "Failed to parse ULID: " <> msg + +utcTime :: DecodeRow UTCTime +utcTime = do + t <- text + case parseTimeM True defaultTimeLocale "%Y-%m-%d %H:%M:%S" (T.unpack t) of + Just a -> pure a + Nothing -> throwError $ "Failed to parse UTCTime: " <> T.pack (show t) + +uuid :: DecodeRow UUID +uuid = do + t <- text + case UUID.fromString (T.unpack t) of + Just a -> pure a + Nothing -> throwError $ "Failed to parse UUID: " <> T.pack (show t) + +day :: DecodeRow Day +day = do + t <- text + case parseTimeM True defaultTimeLocale "%Y-%m-%d" (T.unpack t) of + Just a -> pure a + Nothing -> throwError $ "Failed to parse Day: " <> T.pack (show t) + +ignore :: DecodeRow () +ignore = + void $ column DV.rdsValue + +decodeRow :: DecodeRow a -> [Value] -> Either Text a +decodeRow r = evalState (runExceptT (unDecodeRow r)) + +decodeRows :: DecodeRow a -> [[Value]] -> Either Text [a] +decodeRows r = traverse (decodeRow r) diff --git a/src/Data/RdsData/Decode/Value.hs b/src/Data/RdsData/Decode/Value.hs new file mode 100644 index 0000000..1158c70 --- /dev/null +++ b/src/Data/RdsData/Decode/Value.hs @@ -0,0 +1,287 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE OverloadedStrings #-} + +{- HLINT ignore "Use <&>" -} + +module Data.RdsData.Decode.Value + ( DecodeValue(..) + + , rdsValue + , decodeValueFailedMessage + , decodeValueFailed + , maybe + , array + , base64 + , bool + , double + , text + , integer + , null + , int + , int8 + , int16 + , int32 + , int64 + , word + , word8 + , word16 + , word32 + , word64 + , bytestring + , lazyText + , lazyBytestring + , string + , json + , timeOfDay + , utcTime + , uuid + , day + + ) where + +import Amazonka.Data.Base64 +import Control.Applicative +import Data.ByteString (ByteString) +import Data.Int +import Data.RdsData.Decode.Array (DecodeArray(..)) +import Data.RdsData.Internal.Aeson +import Data.RdsData.Types.Value +import Data.Text (Text) +import Data.Time +import Data.UUID (UUID) +import Data.Word +import Prelude hiding (maybe, null) + +import qualified Amazonka.Data.ByteString as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Internal.Convert as CONV +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Lazy as LT +import qualified Data.UUID as UUID +import qualified Prelude as P + +newtype DecodeValue a = DecodeValue + { decodeValue :: Value -> Either Text a + } deriving Functor + +instance Applicative DecodeValue where + pure a = DecodeValue \_ -> Right a + DecodeValue f <*> DecodeValue a = DecodeValue \v -> f v <*> a v + +instance Alternative DecodeValue where + empty = DecodeValue \_ -> Left "empty" + DecodeValue a <|> DecodeValue b = DecodeValue \v -> + either (const (b v)) Right (a v) + +instance Monad DecodeValue where + DecodeValue a >>= f = DecodeValue \v -> do + a' <- a v + decodeValue (f a') v + +-------------------------------------------------------------------------------- + +rdsValue :: DecodeValue Value +rdsValue = + DecodeValue Right + +decodeValueFailedMessage :: Text -> Text -> Maybe Text -> Value -> Text +decodeValueFailedMessage item type_ reason value = + mconcat + [ "Failed to decode " <> item <> " of type " <> type_ <> " from Value of " <> toJsonText value + , P.maybe "" (" because " <>) reason + ] + +decodeValueFailed :: Text -> Text -> Maybe Text -> DecodeValue a +decodeValueFailed value type_ reason = + DecodeValue $ Left . decodeValueFailedMessage value type_ reason + +-------------------------------------------------------------------------------- + +maybe :: DecodeValue a -> DecodeValue (Maybe a) +maybe (DecodeValue f) = + DecodeValue \v -> + case v of + ValueOfNull -> Right Nothing + _ -> Just <$> f v + +-------------------------------------------------------------------------------- + +array :: DecodeArray a -> DecodeValue a +array decoder = + DecodeValue \v -> + case v of + ValueOfArray a -> decodeArray decoder a + _ -> Left $ decodeValueFailedMessage "array" "Array" Nothing v + +base64 :: DecodeValue Base64 +base64 = + DecodeValue \v -> + case v of + ValueOfBase64 b64 -> Right b64 + _ -> Left $ decodeValueFailedMessage "base64" "Base64" Nothing v + +bool :: DecodeValue Bool +bool = + DecodeValue \v -> + case v of + ValueOfBool b -> Right b + _ -> Left $ decodeValueFailedMessage "bool" "Bool" Nothing v + +double :: DecodeValue Double +double = + DecodeValue \v -> + case v of + ValueOfDouble n -> Right n + ValueOfText t -> + case CONV.textToDouble t of + Just n -> Right n + Nothing -> Left $ decodeValueFailedMessage "double" "Double" (Just "failed to parse text as double") v + _ -> Left $ decodeValueFailedMessage "double" "Double" Nothing v + +text :: DecodeValue Text +text = + DecodeValue \v -> + case v of + ValueOfText s -> Right s + _ -> Left $ decodeValueFailedMessage "text" "Text" Nothing v + +integer :: DecodeValue Integer +integer = + DecodeValue \v -> + case v of + ValueOfInteger n -> Right n + _ -> Left $ decodeValueFailedMessage "integer" "Integer" Nothing v + +null :: DecodeValue () +null = + DecodeValue \v -> + case v of + ValueOfNull -> Right () + _ -> Left $ decodeValueFailedMessage "null" "()" Nothing v + +-------------------------------------------------------------------------------- + +int :: DecodeValue Int +int = + asum + [ fromIntegral <$> int64 + , decodeValueFailed "int" "Int" Nothing + ] + +int8 :: DecodeValue Int8 +int8 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "int8" "Int8" Nothing + ] + +int16 :: DecodeValue Int16 +int16 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "int16" "Int16" Nothing + ] + +int32 :: DecodeValue Int32 +int32 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "int32" "Int32" Nothing + ] + +int64 :: DecodeValue Int64 +int64 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "int64" "Int64" Nothing + ] + +word :: DecodeValue Word +word = + asum + [ fromIntegral <$> integer + , decodeValueFailed "word" "Word" Nothing + ] + +word8 :: DecodeValue Word8 +word8 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "word8" "Word8" Nothing + ] + +word16 :: DecodeValue Word16 +word16 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "word16" "Word16" Nothing + ] + +word32 :: DecodeValue Word32 +word32 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "word32" "Word32" Nothing + ] + +word64 :: DecodeValue Word64 +word64 = + asum + [ fromIntegral <$> integer + , decodeValueFailed "word64" "Word64" Nothing + ] + +bytestring :: DecodeValue ByteString +bytestring = + AWS.toBS <$> base64 + +lazyText :: DecodeValue LT.Text +lazyText = + LT.fromStrict <$> text + +lazyBytestring :: DecodeValue LBS.ByteString +lazyBytestring = + LBS.fromStrict <$> bytestring + +string :: DecodeValue String +string = + T.unpack <$> text + +json :: DecodeValue J.Value +json = do + t <- text + case J.eitherDecode (LBS.fromStrict (T.encodeUtf8 t)) of + Right v -> pure v + Left e -> decodeValueFailed "json" "Value" (Just (T.pack e)) + +timeOfDay :: DecodeValue TimeOfDay +timeOfDay = do + t <- text + case parseTimeM True defaultTimeLocale "%H:%M:%S%Q" (T.unpack t) of + Just a -> pure a + Nothing -> decodeValueFailed "timeOfDay" "TimeOfDay" (Just (T.pack (show t))) + +utcTime :: DecodeValue UTCTime +utcTime = do + t <- text + case parseTimeM True defaultTimeLocale "%Y-%m-%d %H:%M:%S" (T.unpack t) of + Just a -> pure a + Nothing -> decodeValueFailed "utcTime" "UTCTime" Nothing + +uuid :: DecodeValue UUID +uuid = do + t <- text + case UUID.fromString (T.unpack t) of + Just a -> pure a + Nothing -> decodeValueFailed "uuid" "UUID" Nothing + +day :: DecodeValue Day +day = do + t <- text + case parseTimeM True defaultTimeLocale "%Y-%m-%d" (T.unpack t) of + Just a -> pure a + Nothing -> decodeValueFailed "day" "Day" Nothing diff --git a/src/Data/RdsData/Encode/Array.hs b/src/Data/RdsData/Encode/Array.hs new file mode 100644 index 0000000..609b052 --- /dev/null +++ b/src/Data/RdsData/Encode/Array.hs @@ -0,0 +1,153 @@ +{-# LANGUAGE BlockArguments #-} + +module Data.RdsData.Encode.Array + ( EncodeArray(..) + + , rdsArray + , arrays + + , bools + , doubles + , integers + , texts + + , days + , int16s + , int32s + , int64s + , int8s + , ints + , jsons + , lazyTexts + , timesOfDay + , ulids + , uuids + , utcTimes + , word16s + , word32s + , word64s + , word8s + , words + ) where + +import Data.Functor.Contravariant +import Data.Int +import Data.RdsData.Internal.Convert +import Data.RdsData.Types.Array (Array(..)) +import Data.Text (Text) +import Data.Time +import Data.ULID +import Data.UUID +import Data.Word +import Prelude hiding (words) + +import qualified Data.Aeson as J +import qualified Data.Text.Lazy as LT + +newtype EncodeArray a = EncodeArray + { encodeArray :: a -> Array + } + +instance Contravariant EncodeArray where + contramap f (EncodeArray g) = + EncodeArray (g . f) + +-------------------------------------------------------------------------------- + +rdsArray :: EncodeArray Array +rdsArray = + EncodeArray id + +-------------------------------------------------------------------------------- + +arrays :: EncodeArray Array -> EncodeArray [Array] +arrays = + contramap ArrayOfArrays + +-------------------------------------------------------------------------------- + +bools :: EncodeArray [Bool] +bools = + ArrayOfBools >$< rdsArray + +integers :: EncodeArray [Integer] +integers = + ArrayOfIntegers >$< rdsArray + +texts :: EncodeArray [Text] +texts = + ArrayOfTexts >$< rdsArray + +doubles :: EncodeArray [Double] +doubles = + ArrayOfDoubles >$< rdsArray + +-------------------------------------------------------------------------------- + +ints :: EncodeArray [Int] +ints = + fmap fromIntegral >$< integers + +int8s :: EncodeArray [Int8] +int8s = + fmap fromIntegral >$< integers + +int16s :: EncodeArray [Int16] +int16s = + fmap fromIntegral >$< integers + +int32s :: EncodeArray [Int32] +int32s = + fmap fromIntegral >$< integers + +int64s :: EncodeArray [Int64] +int64s = + fmap fromIntegral >$< integers + +words :: EncodeArray [Word] +words = + fmap fromIntegral >$< integers + +word8s :: EncodeArray [Word8] +word8s = + fmap fromIntegral >$< integers + +word16s :: EncodeArray [Word16] +word16s = + fmap fromIntegral >$< integers + +word32s :: EncodeArray [Word32] +word32s = + fmap fromIntegral >$< integers + +word64s :: EncodeArray [Word64] +word64s = + fmap fromIntegral >$< integers + +lazyTexts :: EncodeArray [LT.Text] +lazyTexts = + fmap LT.toStrict >$< texts + +timesOfDay :: EncodeArray [TimeOfDay] +timesOfDay = + fmap timeOfDayToText >$< texts + +days :: EncodeArray [Day] +days = + fmap dayToText >$< texts + +jsons :: EncodeArray [J.Value] +jsons = + fmap jsonToText >$< texts + +ulids :: EncodeArray [ULID] +ulids = + fmap ulidToText >$< texts + +uuids :: EncodeArray [UUID] +uuids = + fmap uuidToText >$< texts + +utcTimes :: EncodeArray [UTCTime] +utcTimes = + fmap utcTimeToText >$< texts diff --git a/src/Data/RdsData/Encode/Param.hs b/src/Data/RdsData/Encode/Param.hs new file mode 100644 index 0000000..7ab5ba4 --- /dev/null +++ b/src/Data/RdsData/Encode/Param.hs @@ -0,0 +1,204 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Redundant flip" -} + +module Data.RdsData.Encode.Param + ( EncodeParam(..) + + , rdsParam + + , named + , typed + + , maybe + + , array + , bool + , bytestring + , double + , integer + , null + , text + + , base64 + , day + , int + , int16 + , int32 + , int64 + , int8 + , json + , lazyBytestring + , lazyText + , timeOfDay + , ulid + , utcTime + , uuid + , word + , word16 + , word32 + , word64 + , word8 + ) where + +import Data.ByteString (ByteString) +import Data.Functor.Contravariant +import Data.Generics.Product.Any +import Data.Int +import Data.RdsData.Encode.Array +import Data.RdsData.Types.Param +import Data.RdsData.Types.Value +import Data.Text (Text) +import Data.Time +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Word +import Lens.Micro +import Prelude hiding (maybe, null) + +import qualified Amazonka.Bytes as AWS +import qualified Amazonka.Data.Base64 as AWS +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Internal.Convert as CONV +import qualified Data.Text.Lazy as LT +import qualified Prelude as P + +newtype EncodeParam a = EncodeParam + { encodeParam :: a -> Param + } + +instance Contravariant EncodeParam where + contramap f (EncodeParam g) = + EncodeParam (g . f) + +-------------------------------------------------------------------------------- + +named :: Text -> EncodeParam a -> EncodeParam a +named n (EncodeParam f) = + EncodeParam \a -> f a & the @"name" .~ Just n + +typed :: AWS.TypeHint -> EncodeParam a -> EncodeParam a +typed t (EncodeParam f) = + EncodeParam \a -> f a & the @"hint" .~ Just t + +rdsParam :: EncodeParam Param +rdsParam = + EncodeParam id + +-------------------------------------------------------------------------------- + +maybe :: EncodeParam a -> EncodeParam (Maybe a) +maybe = + EncodeParam . P.maybe (Param Nothing Nothing ValueOfNull) . encodeParam + +-------------------------------------------------------------------------------- + +array :: EncodeArray a -> EncodeParam a +array enc = + Param Nothing Nothing . ValueOfArray . encodeArray enc >$< rdsParam + +base64 :: EncodeParam AWS.Base64 +base64 = + Param Nothing Nothing . ValueOfBase64 >$< rdsParam + +bool :: EncodeParam Bool +bool = + Param Nothing Nothing . ValueOfBool >$< rdsParam + +double :: EncodeParam Double +double = + Param Nothing Nothing . ValueOfDouble >$< rdsParam + +null :: EncodeParam () +null = + Param Nothing Nothing . const ValueOfNull >$< rdsParam + +integer :: EncodeParam Integer +integer = + Param Nothing Nothing . ValueOfInteger >$< rdsParam + +text :: EncodeParam Text +text = + Param Nothing Nothing . ValueOfText >$< rdsParam + +-------------------------------------------------------------------------------- + +int :: EncodeParam Int +int = + fromIntegral >$< integer + +int8 :: EncodeParam Int8 +int8 = + fromIntegral >$< integer + +int16 :: EncodeParam Int16 +int16 = + fromIntegral >$< integer + +int32 :: EncodeParam Int32 +int32 = + fromIntegral >$< integer + +int64 :: EncodeParam Int64 +int64 = + fromIntegral >$< integer + +word :: EncodeParam Word +word = + fromIntegral >$< integer + +word8 :: EncodeParam Word8 +word8 = + fromIntegral >$< integer + +word16 :: EncodeParam Word16 +word16 = + fromIntegral >$< integer + +word32 :: EncodeParam Word32 +word32 = + fromIntegral >$< integer + +word64 :: EncodeParam Word64 +word64 = + fromIntegral >$< integer + +lazyText :: EncodeParam LT.Text +lazyText = + LT.toStrict >$< text + +bytestring :: EncodeParam ByteString +bytestring = + (AWS.Base64 . AWS.encodeBase64) >$< base64 + +lazyBytestring :: EncodeParam LBS.ByteString +lazyBytestring = + LBS.toStrict >$< bytestring + +timeOfDay :: EncodeParam TimeOfDay +timeOfDay = + CONV.timeOfDayToText >$< text & typed AWS.TypeHint_TIME + +day :: EncodeParam Day +day = + CONV.dayToText >$< text & typed AWS.TypeHint_DATE + +json :: EncodeParam J.Value +json = + CONV.jsonToText >$< text & typed AWS.TypeHint_JSON + +ulid :: EncodeParam ULID +ulid = + CONV.ulidToText >$< text + +utcTime :: EncodeParam UTCTime +utcTime = + CONV.utcTimeToText >$< text & typed AWS.TypeHint_TIMESTAMP + +uuid :: EncodeParam UUID +uuid = + CONV.uuidToText >$< text & typed AWS.TypeHint_UUID diff --git a/src/Data/RdsData/Encode/Params.hs b/src/Data/RdsData/Encode/Params.hs new file mode 100644 index 0000000..d2e2b4a --- /dev/null +++ b/src/Data/RdsData/Encode/Params.hs @@ -0,0 +1,212 @@ +{-# LANGUAGE BlockArguments #-} + +module Data.RdsData.Encode.Params + ( EncodeParams(..) + + , rdsValue + + , column + + , maybe + + , array + , bool + , bytestring + , double + , integer + , null + , text + + , base64 + , day + , int + , int8 + , int16 + , int32 + , int64 + , json + , lazyBytestring + , lazyText + , timeOfDay + , ulid + , utcTime + , uuid + , word + , word8 + , word16 + , word32 + , word64 + ) where + +import Data.ByteString (ByteString) +import Data.Functor.Contravariant +import Data.Functor.Contravariant.Divisible +import Data.Int +import Data.RdsData.Encode.Array (EncodeArray(..)) +import Data.RdsData.Encode.Param (EncodeParam(..)) +import Data.RdsData.Types.Param +import Data.Text (Text) +import Data.Time +import Data.UUID (UUID) +import Data.ULID (ULID) +import Data.Void +import Data.Word +import Prelude hiding (maybe, null) + +import qualified Amazonka.Data.Base64 as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Encode.Param as EP +import qualified Data.Text.Lazy as LT +import qualified Prelude as P + +newtype EncodeParams a = EncodeParams + { encodeParams :: a -> [Param] -> [Param] + } + +instance Contravariant EncodeParams where + contramap f (EncodeParams g) = + EncodeParams (g . f) + +instance Divisible EncodeParams where + divide f (EncodeParams g) (EncodeParams h) = + EncodeParams \a -> + case f a of + (b, c) -> g b . h c + conquer = + EncodeParams $ const id + +instance Decidable EncodeParams where + choose f (EncodeParams g) (EncodeParams h) = + EncodeParams \a -> + case f a of + Left b -> g b + Right c -> h c + lose f = + EncodeParams $ absurd . f + +-------------------------------------------------------------------------------- + +rdsValue :: EncodeParams Param +rdsValue = + EncodeParams (:) + +-------------------------------------------------------------------------------- + +column :: EncodeParam a -> EncodeParams a +column (EncodeParam f) = + EncodeParams \a -> (f a:) + +-------------------------------------------------------------------------------- + +maybe :: EncodeParams a -> EncodeParams (Maybe a) +maybe = + choose (P.maybe (Left ()) Right) null + +-------------------------------------------------------------------------------- + +array :: EncodeArray a -> EncodeParams a +array = + column . EP.array + +base64 :: EncodeParams AWS.Base64 +base64 = + column EP.base64 + +bool :: EncodeParams Bool +bool = + column EP.bool + +double :: EncodeParams Double +double = + column EP.double + +null :: EncodeParams () +null = + column EP.null + +integer :: EncodeParams Integer +integer = + column EP.integer + +text :: EncodeParams Text +text = + column EP.text + +-------------------------------------------------------------------------------- + +int :: EncodeParams Int +int = + column EP.int + +int8 :: EncodeParams Int8 +int8 = + column EP.int8 + +int16 :: EncodeParams Int16 +int16 = + column EP.int16 + +int32 :: EncodeParams Int32 +int32 = + column EP.int32 + +int64 :: EncodeParams Int64 +int64 = + column EP.int64 + +word :: EncodeParams Word +word = + column EP.word + +word8 :: EncodeParams Word8 +word8 = + column EP.word8 + +word16 :: EncodeParams Word16 +word16 = + column EP.word16 + +word32 :: EncodeParams Word32 +word32 = + column EP.word32 + +word64 :: EncodeParams Word64 +word64 = + column EP.word64 + +lazyText :: EncodeParams LT.Text +lazyText = + column EP.lazyText + +bytestring :: EncodeParams ByteString +bytestring = + column EP.bytestring + +lazyBytestring :: EncodeParams LBS.ByteString +lazyBytestring = + column EP.lazyBytestring + +timeOfDay :: EncodeParams TimeOfDay +timeOfDay = + column EP.timeOfDay + +day :: EncodeParams Day +day = + column EP.day + +json :: EncodeParams J.Value +json = + column EP.json + +ulid :: EncodeParams ULID +ulid = + column EP.ulid + +utcTime :: EncodeParams UTCTime +utcTime = + column EP.utcTime + +uuid :: EncodeParams UUID +uuid = + column EP.uuid diff --git a/src/Data/RdsData/Encode/Row.hs b/src/Data/RdsData/Encode/Row.hs new file mode 100644 index 0000000..8265d1c --- /dev/null +++ b/src/Data/RdsData/Encode/Row.hs @@ -0,0 +1,212 @@ +{-# LANGUAGE BlockArguments #-} + +module Data.RdsData.Encode.Row + ( EncodeRow(..) + + , rdsValue + + , column + + , maybe + + , array + , bool + , bytestring + , double + , integer + , null + , text + + , base64 + , day + , int + , int8 + , int16 + , int32 + , int64 + , json + , lazyBytestring + , lazyText + , timeOfDay + , ulid + , utcTime + , uuid + , word + , word8 + , word16 + , word32 + , word64 + ) where + +import Data.ByteString (ByteString) +import Data.Functor.Contravariant +import Data.Functor.Contravariant.Divisible +import Data.Int +import Data.RdsData.Encode.Array (EncodeArray(..)) +import Data.RdsData.Encode.Value (EncodeValue(..)) +import Data.RdsData.Types.Value +import Data.Text (Text) +import Data.Time +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Void +import Data.Word +import Prelude hiding (maybe, null) + +import qualified Amazonka.Data.Base64 as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Encode.Value as EV +import qualified Data.Text.Lazy as LT +import qualified Prelude as P + +newtype EncodeRow a = EncodeRow + { encodeRow :: a -> [Value] -> [Value] + } + +instance Contravariant EncodeRow where + contramap f (EncodeRow g) = + EncodeRow (g . f) + +instance Divisible EncodeRow where + divide f (EncodeRow g) (EncodeRow h) = + EncodeRow \a -> + case f a of + (b, c) -> g b . h c + conquer = + EncodeRow $ const id + +instance Decidable EncodeRow where + choose f (EncodeRow g) (EncodeRow h) = + EncodeRow \a -> + case f a of + Left b -> g b + Right c -> h c + lose f = + EncodeRow $ absurd . f + +-------------------------------------------------------------------------------- + +rdsValue :: EncodeRow Value +rdsValue = + EncodeRow (:) + +-------------------------------------------------------------------------------- + +column :: EncodeValue a -> EncodeRow a +column (EncodeValue f) = + EncodeRow \a -> (f a:) + +-------------------------------------------------------------------------------- + +maybe :: EncodeRow a -> EncodeRow (Maybe a) +maybe = + choose (P.maybe (Left ()) Right) null + +-------------------------------------------------------------------------------- + +array :: EncodeArray a -> EncodeRow a +array = + column . EV.array + +base64 :: EncodeRow AWS.Base64 +base64 = + column EV.base64 + +bool :: EncodeRow Bool +bool = + column EV.bool + +double :: EncodeRow Double +double = + column EV.double + +null :: EncodeRow () +null = + column EV.null + +integer :: EncodeRow Integer +integer = + column EV.integer + +text :: EncodeRow Text +text = + column EV.text + +-------------------------------------------------------------------------------- + +int :: EncodeRow Int +int = + column EV.int + +int8 :: EncodeRow Int8 +int8 = + column EV.int8 + +int16 :: EncodeRow Int16 +int16 = + column EV.int16 + +int32 :: EncodeRow Int32 +int32 = + column EV.int32 + +int64 :: EncodeRow Int64 +int64 = + column EV.int64 + +word :: EncodeRow Word +word = + column EV.word + +word8 :: EncodeRow Word8 +word8 = + column EV.word8 + +word16 :: EncodeRow Word16 +word16 = + column EV.word16 + +word32 :: EncodeRow Word32 +word32 = + column EV.word32 + +word64 :: EncodeRow Word64 +word64 = + column EV.word64 + +lazyText :: EncodeRow LT.Text +lazyText = + column EV.lazyText + +bytestring :: EncodeRow ByteString +bytestring = + column EV.bytestring + +lazyBytestring :: EncodeRow LBS.ByteString +lazyBytestring = + column EV.lazyBytestring + +timeOfDay :: EncodeRow TimeOfDay +timeOfDay = + column EV.timeOfDay + +day :: EncodeRow Day +day = + column EV.day + +json :: EncodeRow J.Value +json = + column EV.json + +ulid :: EncodeRow ULID +ulid = + column EV.ulid + +utcTime :: EncodeRow UTCTime +utcTime = + column EV.utcTime + +uuid :: EncodeRow UUID +uuid = + column EV.uuid diff --git a/src/Data/RdsData/Encode/Value.hs b/src/Data/RdsData/Encode/Value.hs new file mode 100644 index 0000000..64a93e5 --- /dev/null +++ b/src/Data/RdsData/Encode/Value.hs @@ -0,0 +1,185 @@ +{-# LANGUAGE BlockArguments #-} + +module Data.RdsData.Encode.Value + ( EncodeValue(..) + + , rdsValue + + , maybe + + , array + , bool + , bytestring + , double + , integer + , null + , text + + , base64 + , day + , int + , int8 + , int16 + , int32 + , int64 + , json + , lazyBytestring + , lazyText + , timeOfDay + , utcTime + , ulid + , uuid + , word + , word8 + , word16 + , word32 + , word64 + ) where + +import Data.ByteString (ByteString) +import Data.Functor.Contravariant +import Data.Int +import Data.RdsData.Encode.Array +import Data.RdsData.Types.Value +import Data.Text (Text) +import Data.Time +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Word +import Prelude hiding (maybe, null) + +import qualified Amazonka.Bytes as AWS +import qualified Amazonka.Data.Base64 as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Internal.Convert as CONV +import qualified Data.Text.Lazy as LT +import qualified Prelude as P + +newtype EncodeValue a = EncodeValue + { encodeValue :: a -> Value + } + +instance Contravariant EncodeValue where + contramap f (EncodeValue g) = + EncodeValue (g . f) + +-------------------------------------------------------------------------------- + +rdsValue :: EncodeValue Value +rdsValue = + EncodeValue id + +-------------------------------------------------------------------------------- + +maybe :: EncodeValue a -> EncodeValue (Maybe a) +maybe = + EncodeValue . P.maybe ValueOfNull . encodeValue + +-------------------------------------------------------------------------------- + +array :: EncodeArray a -> EncodeValue a +array enc = + ValueOfArray . encodeArray enc >$< rdsValue + +base64 :: EncodeValue AWS.Base64 +base64 = + ValueOfBase64 >$< rdsValue + +bool :: EncodeValue Bool +bool = + ValueOfBool >$< rdsValue + +double :: EncodeValue Double +double = + ValueOfDouble >$< rdsValue + +null :: EncodeValue () +null = + const ValueOfNull >$< rdsValue + +integer :: EncodeValue Integer +integer = + ValueOfInteger >$< rdsValue + +text :: EncodeValue Text +text = + ValueOfText >$< rdsValue + +-------------------------------------------------------------------------------- + +int :: EncodeValue Int +int = + fromIntegral >$< integer + +int8 :: EncodeValue Int8 +int8 = + fromIntegral >$< integer + +int16 :: EncodeValue Int16 +int16 = + fromIntegral >$< integer + +int32 :: EncodeValue Int32 +int32 = + fromIntegral >$< integer + +int64 :: EncodeValue Int64 +int64 = + fromIntegral >$< integer + +word :: EncodeValue Word +word = + fromIntegral >$< integer + +word8 :: EncodeValue Word8 +word8 = + fromIntegral >$< integer + +word16 :: EncodeValue Word16 +word16 = + fromIntegral >$< integer + +word32 :: EncodeValue Word32 +word32 = + fromIntegral >$< integer + +word64 :: EncodeValue Word64 +word64 = + fromIntegral >$< integer + +lazyText :: EncodeValue LT.Text +lazyText = + LT.toStrict >$< text + +bytestring :: EncodeValue ByteString +bytestring = + (AWS.Base64 . AWS.encodeBase64) >$< base64 + +lazyBytestring :: EncodeValue LBS.ByteString +lazyBytestring = + LBS.toStrict >$< bytestring + +timeOfDay :: EncodeValue TimeOfDay +timeOfDay = + CONV.timeOfDayToText >$< text + +day :: EncodeValue Day +day = + CONV.dayToText >$< text + +json :: EncodeValue J.Value +json = + CONV.jsonToText >$< text + +utcTime :: EncodeValue UTCTime +utcTime = + CONV.utcTimeToText >$< text + +ulid :: EncodeValue ULID +ulid = + CONV.ulidToText >$< text + +uuid :: EncodeValue UUID +uuid = + CONV.uuidToText >$< text diff --git a/src/Data/RdsData/Internal/Aeson.hs b/src/Data/RdsData/Internal/Aeson.hs new file mode 100644 index 0000000..c56db0c --- /dev/null +++ b/src/Data/RdsData/Internal/Aeson.hs @@ -0,0 +1,48 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE FlexibleInstances #-} + +{-# OPTIONS_GHC -fno-warn-orphans #-} + +module Data.RdsData.Internal.Aeson where + +import Data.Aeson +import Data.Text (Text) + +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Text.Encoding as T + +infixr 8 .=! +infixr 8 .=? + +-- | A key-value pair difference list for encoding a JSON object. +(.=!) :: (KeyValue e kv, ToJSON v) => Key -> v -> [kv] -> [kv] +(.=!) n v = (n .= v:) + +-- | A key-value pair difference list for encoding a JSON object where Nothing encodes absence of the key-value pair. +(.=?) :: (KeyValue e kv, ToJSON v) => Key -> Maybe v -> [kv] -> [kv] +(.=?) n mv = case mv of + Just v -> (n .= v:) + Nothing -> id + +toJsonText :: ToJSON a => a -> Text +toJsonText field = T.decodeUtf8 (LBS.toStrict (J.encode field)) + +instance ToJSON AWS.ColumnMetadata where + +instance ToJSON AWS.UpdateResult where + +-- Customized options for toJSON +myOptions :: Options +myOptions = defaultOptions { omitNothingFields = True } + +instance ToJSON AWS.ExecuteStatementResponse where + toJSON = genericToJSON myOptions + +instance FromJSON AWS.ExecuteStatementResponse where + +instance ToJSON AWS.BatchExecuteStatementResponse where + toJSON = genericToJSON myOptions + +instance FromJSON AWS.BatchExecuteStatementResponse where diff --git a/src/Data/RdsData/Internal/Convert.hs b/src/Data/RdsData/Internal/Convert.hs new file mode 100644 index 0000000..e80bf8f --- /dev/null +++ b/src/Data/RdsData/Internal/Convert.hs @@ -0,0 +1,64 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} + +module Data.RdsData.Internal.Convert + ( dayToText + , jsonToText + , timeOfDayToText + , ulidToText + , uuidToText + , utcTimeToText + , textToDouble + , textToUlid + ) where + +import Data.Bifunctor +import Data.ULID (ULID) +import Data.UUID (UUID) +import Data.Text (Text) +import Data.Time + ( formatTime, defaultTimeLocale, Day, UTCTime, TimeOfDay ) +import Prelude hiding (maybe, null) +import Text.Read + +import qualified Data.Aeson as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.ULID as ULID +import qualified Data.ULID.Base32 as ULID +import qualified Data.UUID as UUID + +timeOfDayToText :: TimeOfDay -> Text +timeOfDayToText = + T.pack . formatTime defaultTimeLocale "%H:%M:%S%Q" + +dayToText :: Day -> Text +dayToText = + T.pack . formatTime defaultTimeLocale "%Y-%m-%d" + +jsonToText :: J.Value -> Text +jsonToText = + T.decodeUtf8 . LBS.toStrict . J.encode + +utcTimeToText :: UTCTime -> Text +utcTimeToText = + T.pack . formatTime defaultTimeLocale "%Y-%m-%d %H:%M:%S" + +ulidToText :: ULID -> Text +ulidToText = + ULID.encode 26 . ULID.ulidToInteger + +textToUlid :: Text -> Either Text ULID +textToUlid t = + bimap (const ("Unable to decode ULID: " <> t)) id (readEither @ULID (T.unpack t)) + +uuidToText :: UUID -> Text +uuidToText = + T.pack . UUID.toString + +textToDouble :: Text -> Maybe Double +textToDouble text = + case reads (T.unpack text) of + [(x, "")] -> Just x + _ -> Nothing diff --git a/src/Data/RdsData/Internal/Maybe.hs b/src/Data/RdsData/Internal/Maybe.hs new file mode 100644 index 0000000..93a8280 --- /dev/null +++ b/src/Data/RdsData/Internal/Maybe.hs @@ -0,0 +1,7 @@ +module Data.RdsData.Internal.Maybe + ( toMaybe + ) where + +toMaybe :: a -> Bool -> Maybe a +toMaybe x True = Just x +toMaybe _ _ = Nothing diff --git a/src/Data/RdsData/Migration.hs b/src/Data/RdsData/Migration.hs new file mode 100644 index 0000000..b7394c4 --- /dev/null +++ b/src/Data/RdsData/Migration.hs @@ -0,0 +1,3 @@ +module Data.RdsData.Migration + ( + ) where diff --git a/src/Data/RdsData/Migration/Types.hs b/src/Data/RdsData/Migration/Types.hs new file mode 100644 index 0000000..c7b3904 --- /dev/null +++ b/src/Data/RdsData/Migration/Types.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +{- HLINT ignore "Use let" -} + +module Data.RdsData.Migration.Types + ( MigrationRow(..), + RdsClusterDetails(..), + Migration(..), + Step(..), + Statement(..), + ) where + +import Amazonka.Data (FromJSON, ToJSON) +import qualified Amazonka.RDS as AWS +import qualified Amazonka.SecretsManager as AWS +import Data.RdsData.Orphans () +import Data.Text (Text) +import Data.Time +import Data.ULID (ULID) +import GHC.Generics + +data MigrationRow = MigrationRow + { uuid :: ULID + , createdBy :: UTCTime + , deployedBy :: Text + } deriving (Eq, Show, Generic) + +data RdsClusterDetails = RdsClusterDetails + { createDbClusterResponse :: AWS.CreateDBClusterResponse + , createSecretResponse :: AWS.CreateSecretResponse + } deriving (Eq, Generic, Show) + +data Migration = Migration + { description :: Text + , plan :: [Step] + } + deriving (Eq, Generic, Show) + +instance ToJSON Migration + +instance FromJSON Migration + +data Step = Step + { id :: ULID + , description :: Text + , up :: [Statement] + , down :: [Statement] + } + deriving (Eq, Generic, Show) + +instance ToJSON Step + +instance FromJSON Step + +newtype Statement = Statement Text + deriving (Eq, Generic, Show) + deriving newtype (ToJSON, FromJSON) diff --git a/src/Data/RdsData/Orphans.hs b/src/Data/RdsData/Orphans.hs new file mode 100644 index 0000000..9528796 --- /dev/null +++ b/src/Data/RdsData/Orphans.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} + +{-# OPTIONS_GHC -fno-warn-orphans #-} + +{- HLINT ignore "Use let" -} + +module Data.RdsData.Orphans + ( + ) where + +import Amazonka.Data (FromJSON, ToJSON) +import qualified Data.Aeson as J +import Data.RdsData.Internal.Convert +import qualified Data.Text as T +import Data.ULID (ULID) + +instance FromJSON ULID where + parseJSON = J.withText "ULID" $ \txt -> + either (fail . T.unpack) return $ textToUlid txt + +instance ToJSON ULID where + toJSON = J.toJSON . show + diff --git a/src/Data/RdsData/Types.hs b/src/Data/RdsData/Types.hs new file mode 100644 index 0000000..4d91a55 --- /dev/null +++ b/src/Data/RdsData/Types.hs @@ -0,0 +1,15 @@ +module Data.RdsData.Types + ( Array(..) + , Param(..) + , Value(..) + , fromArrayValue + , toArrayValue + , fromField + , toField + , toSqlParameter + ) where + +import Data.RdsData.Internal.Aeson () +import Data.RdsData.Types.Array +import Data.RdsData.Types.Param +import Data.RdsData.Types.Value diff --git a/src/Data/RdsData/Types/Array.hs b/src/Data/RdsData/Types/Array.hs new file mode 100644 index 0000000..f2d9430 --- /dev/null +++ b/src/Data/RdsData/Types/Array.hs @@ -0,0 +1,49 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Use <&>" -} + +module Data.RdsData.Types.Array + ( Array(..) + , fromArrayValue + , toArrayValue + ) where + +import Control.Applicative +import Data.Generics.Product.Any +import Data.Text +import GHC.Generics +import Lens.Micro + +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J + +data Array = + ArrayOfArrays [Array] + | ArrayOfBools [Bool] + | ArrayOfDoubles [Double] + | ArrayOfIntegers [Integer] + | ArrayOfTexts [Text] + deriving (Eq, Generic, Show) + +instance J.ToJSON Array where + +fromArrayValue :: AWS.ArrayValue -> Maybe Array +fromArrayValue v = + asum + [ v ^. the @"booleanValues" >>= pure . ArrayOfBools + , v ^. the @"doubleValues" >>= pure . ArrayOfDoubles + , v ^. the @"longValues" >>= pure . ArrayOfIntegers + , v ^. the @"stringValues" >>= pure . ArrayOfTexts + , v ^. the @"arrayValues" >>= fmap ArrayOfArrays . traverse fromArrayValue + ] + +toArrayValue :: Array -> AWS.ArrayValue +toArrayValue = \case + ArrayOfArrays vs -> AWS.newArrayValue & the @"arrayValues" .~ Just (toArrayValue <$> vs) + ArrayOfBools vs -> AWS.newArrayValue & the @"booleanValues" .~ Just vs + ArrayOfDoubles vs -> AWS.newArrayValue & the @"doubleValues" .~ Just vs + ArrayOfIntegers vs -> AWS.newArrayValue & the @"longValues" .~ Just vs + ArrayOfTexts vs -> AWS.newArrayValue & the @"stringValues" .~ Just vs diff --git a/src/Data/RdsData/Types/Param.hs b/src/Data/RdsData/Types/Param.hs new file mode 100644 index 0000000..08ea09e --- /dev/null +++ b/src/Data/RdsData/Types/Param.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Use <&>" -} + +module Data.RdsData.Types.Param + ( Param(..) + , toSqlParameter + ) where + +import Data.Generics.Product.Any +import Data.RdsData.Types.Value +import Data.Text +import GHC.Generics +import Lens.Micro + +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J + +data Param = Param + { name :: Maybe Text + , hint :: Maybe AWS.TypeHint + , value :: Value + } deriving (Eq, Generic, Show) + +instance J.ToJSON Param where + +toSqlParameter :: Param -> AWS.SqlParameter +toSqlParameter (Param n h v) = + AWS.newSqlParameter & the @"name" .~ n & the @"typeHint" .~ h & the @"value" ?~ toField v diff --git a/src/Data/RdsData/Types/Value.hs b/src/Data/RdsData/Types/Value.hs new file mode 100644 index 0000000..5255252 --- /dev/null +++ b/src/Data/RdsData/Types/Value.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Use <&>" -} + +module Data.RdsData.Types.Value + ( Value(..) + , fromField + , toField + ) where + +import Amazonka.Data.Base64 +import Control.Applicative +import Data.Generics.Product.Any +import Data.RdsData.Internal.Maybe +import Data.RdsData.Types.Array +import Data.Text +import GHC.Generics +import Lens.Micro + +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J + +data Value = + ValueOfArray Array + | ValueOfBase64 Base64 + | ValueOfBool Bool + | ValueOfDouble Double + | ValueOfInteger Integer + | ValueOfText Text + | ValueOfNull + deriving (Eq, Generic, Show) + +instance J.ToJSON Value where + +fromField :: AWS.Field -> Maybe Value +fromField field = + asum + [ field ^. the @"arrayValue" >>= fromArrayValue >>= pure . ValueOfArray + , field ^. the @"blobValue" >>= pure . ValueOfBase64 + , field ^. the @"booleanValue" >>= pure . ValueOfBool + , field ^. the @"doubleValue" >>= pure . ValueOfDouble + , field ^. the @"longValue" >>= pure . ValueOfInteger + , field ^. the @"stringValue" >>= pure . ValueOfText + , field ^. the @"isNull" >>= toMaybe ValueOfNull + ] + +toField :: Value -> AWS.Field +toField = \case + ValueOfArray v -> AWS.newField & the @"arrayValue" .~ Just (toArrayValue v) + ValueOfBase64 v -> AWS.newField & the @"blobValue" .~ Just v + ValueOfBool v -> AWS.newField & the @"booleanValue" .~ Just v + ValueOfDouble v -> AWS.newField & the @"doubleValue" .~ Just v + ValueOfInteger v -> AWS.newField & the @"longValue" .~ Just v + ValueOfText v -> AWS.newField & the @"stringValue" .~ Just v + ValueOfNull -> AWS.newField & the @"isNull" .~ Just True diff --git a/test/Data/RdsData/TestTypes.hs b/test/Data/RdsData/TestTypes.hs new file mode 100644 index 0000000..e432214 --- /dev/null +++ b/test/Data/RdsData/TestTypes.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} + +{- HLINT ignore "Monad law, left identity" -} +{- HLINT ignore "Use let" -} + +module Data.RdsData.TestTypes + ( Record(..) + ) where + +import Data.ByteString (ByteString) +import Data.RdsData.Internal.Aeson () +import Data.Text (Text) +import Data.Time (Day, TimeOfDay(..), UTCTime) +import Data.ULID (ULID) +import Data.UUID (UUID) +import GHC.Generics + +import qualified Data.Aeson as J + +data Record = Record + { bigint :: Int + , bigserial :: Int + , boolean :: Bool + , bytea :: ByteString + , character :: Text + , characters :: Text + , varyingCharacter :: Text + , varyingCharacters :: Text + , date :: Day + , double :: Double + , integer :: Integer + , json :: J.Value + , jsonb :: J.Value + , numeric :: Text + , numerics :: Text + , real :: Double + , smallint :: Int + , smallserial :: Int + , serial :: Int + , text :: Text + , time :: TimeOfDay + , times :: TimeOfDay + , timestamp :: UTCTime + , timestamps :: UTCTime + , ulid :: ULID + , uuid :: UUID + } + deriving (Show, Eq, Generic) diff --git a/test/Data/RdsDataSpec.hs b/test/Data/RdsDataSpec.hs new file mode 100644 index 0000000..29a06e9 --- /dev/null +++ b/test/Data/RdsDataSpec.hs @@ -0,0 +1,183 @@ +{-# LANGUAGE BlockArguments #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Monad law, left identity" -} +{- HLINT ignore "Use let" -} + +module Data.RdsDataSpec where + +import Data.Generics.Product.Any +import Data.Maybe +import Data.RdsData.Internal.Aeson () +import Data.RdsData.Types.Value +import Data.Time (TimeOfDay (..)) +import Hedgehog +import Lens.Micro + +import qualified Amazonka.RDSData as AWS +import qualified Data.Aeson as J +import qualified Data.Aeson.Encode.Pretty as J +import qualified Data.ByteString.Lazy as LBS +import qualified Data.RdsData.Decode.Row as DEC +import qualified Data.RdsData.Encode.Row as ENC +import qualified Data.RdsData.TestTypes as TT +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Hedgehog as H +import qualified Hedgehog.Extras as H +import qualified Hedgehog.Extras.Test.Golden as H + +hprop_stub :: Property +hprop_stub = H.propertyOnce $ do + H.evalIO $ pure () + response <- H.evalIO (J.eitherDecodeFileStrict @AWS.ExecuteStatementResponse "test/files/row.json") + >>= either (const H.failure) pure + + records <- pure (response ^. the @"records") + >>= maybe H.failure pure + + values <- pure $ mapMaybe fromField <$> records + + rows <- pure + do + DEC.decodeRows + do + TT.Record + <$> DEC.int + <*> DEC.int + <*> DEC.bool + <*> DEC.bytestring + <*> DEC.text + <*> DEC.text + <*> DEC.text + <*> DEC.text + <*> DEC.day + <*> DEC.double + <*> DEC.integer + <*> DEC.json + <*> DEC.json + <*> DEC.text + <*> DEC.text + <*> DEC.double + <*> DEC.int + <*> DEC.int + <*> DEC.int + <*> DEC.text + <*> DEC.timeOfDay + <*> DEC.timeOfDay + <*> DEC.utcTime + <*> DEC.utcTime + <*> DEC.ulid + <*> DEC.uuid + values + >>= either (\s -> H.noteShow_ s >> H.failure) pure + + rows === + [ TT.Record + { TT.bigint = 1234567890 + , TT.bigserial = 1 + , TT.boolean = True + , TT.bytea = "EjQ=" + , TT.character = "A" + , TT.characters = "AB" + , TT.varyingCharacter = "C" + , TT.varyingCharacters = "CD" + , TT.date = read "2024-02-04" + , TT.double = 3.14159265359 + , TT.integer = 42 + , TT.json = fromJust $ J.decode "{\"key\": \"value\"}" + , TT.jsonb = fromJust $ J.decode "{\"key\": \"value\"}" + , TT.numeric = "1234.56" + , TT.numerics = "12.34" + , TT.real = 3.14 + , TT.smallint = 12345 + , TT.smallserial = 1 + , TT.serial = 1 + , TT.text = "Some text" + , TT.time = TimeOfDay 12 34 56 + , TT.times = TimeOfDay 12 34 56.78 + , TT.timestamp = read "2024-02-04 12:34:56 UTC" + , TT.timestamps = read "2024-02-04 12:34:56 UTC" + , TT.ulid = read "0123456789ABCDEFGHJKMNPQRS" + , TT.uuid = read "550e8400-e29b-41d4-a716-446655440000" + } + ] + + +hprop_stub2 :: Property +hprop_stub2 = H.propertyOnce $ do + record <- pure + TT.Record + { TT.bigint = 1234567890 + , TT.bigserial = 1 + , TT.boolean = True + , TT.bytea = "EjQ=" + , TT.character = "A" + , TT.characters = "AB" + , TT.varyingCharacter = "C" + , TT.varyingCharacters = "CD" + , TT.date = read "2024-02-04" + , TT.double = 3.14159265359 + , TT.integer = 42 + , TT.json = fromJust $ J.decode "{\"key\": \"value\"}" + , TT.jsonb = fromJust $ J.decode "{\"key\": \"value\"}" + , TT.numeric = "1234.56" + , TT.numerics = "12.34" + , TT.real = 3.14 + , TT.smallint = 12345 + , TT.smallserial = 1 + , TT.serial = 1 + , TT.text = "Some text" + , TT.time = TimeOfDay 12 34 56 + , TT.times = TimeOfDay 12 34 56.78 + , TT.timestamp = read "2024-02-04 12:34:56 UTC" + , TT.timestamps = read "2024-02-04 12:34:56 UTC" + , TT.ulid = read "0123456789ABCDEFGHJKMNPQRS" + , TT.uuid = read "550e8400-e29b-41d4-a716-446655440000" + } + + row <- pure $ + mconcat + [ ENC.encodeRow ENC.int (record ^. the @"bigint" ) [] + , ENC.encodeRow ENC.int (record ^. the @"bigserial" ) [] + , ENC.encodeRow ENC.bool (record ^. the @"boolean" ) [] + , ENC.encodeRow ENC.bytestring (record ^. the @"bytea" ) [] + , ENC.encodeRow ENC.text (record ^. the @"character" ) [] + , ENC.encodeRow ENC.text (record ^. the @"characters" ) [] + , ENC.encodeRow ENC.text (record ^. the @"varyingCharacter" ) [] + , ENC.encodeRow ENC.text (record ^. the @"varyingCharacters") [] + , ENC.encodeRow ENC.day (record ^. the @"date" ) [] + , ENC.encodeRow ENC.double (record ^. the @"double" ) [] + , ENC.encodeRow ENC.integer (record ^. the @"integer" ) [] + , ENC.encodeRow ENC.json (record ^. the @"json" ) [] + , ENC.encodeRow ENC.json (record ^. the @"jsonb" ) [] + , ENC.encodeRow ENC.text (record ^. the @"numeric" ) [] + , ENC.encodeRow ENC.text (record ^. the @"numerics" ) [] + , ENC.encodeRow ENC.double (record ^. the @"real" ) [] + , ENC.encodeRow ENC.int (record ^. the @"smallint" ) [] + , ENC.encodeRow ENC.int (record ^. the @"smallserial" ) [] + , ENC.encodeRow ENC.int (record ^. the @"serial" ) [] + , ENC.encodeRow ENC.text (record ^. the @"text" ) [] + , ENC.encodeRow ENC.timeOfDay (record ^. the @"time" ) [] + , ENC.encodeRow ENC.timeOfDay (record ^. the @"times" ) [] + , ENC.encodeRow ENC.utcTime (record ^. the @"timestamp" ) [] + , ENC.encodeRow ENC.utcTime (record ^. the @"timestamps" ) [] + , ENC.encodeRow ENC.uuid (record ^. the @"uuid" ) [] + ] + + json <- pure $ T.unpack $ T.decodeUtf8 $ LBS.toStrict $ J.encodePretty row + + H.diffVsGoldenFile json "test/files/golden/row.json" + + response <- pure $ + AWS.newExecuteStatementResponse 0 + & the @"records" .~ Just [toField <$> row] + + responseJson <- pure $ T.unpack $ T.decodeUtf8 $ LBS.toStrict $ J.encodePretty response + + H.diffVsGoldenFile responseJson "test/files/golden/rds-row.json" + + True === True + diff --git a/test/Spec.hs b/test/Spec.hs new file mode 100644 index 0000000..e1cee34 --- /dev/null +++ b/test/Spec.hs @@ -0,0 +1 @@ +{-# OPTIONS_GHC -F -pgmF tasty-discover -optF --hide-successes #-} diff --git a/test/files/golden/rds-row.json b/test/files/golden/rds-row.json new file mode 100644 index 0000000..8dac371 --- /dev/null +++ b/test/files/golden/rds-row.json @@ -0,0 +1,82 @@ +{ + "httpStatus": 0, + "records": [ + [ + { + "longValue": 1234567890 + }, + { + "longValue": 1 + }, + { + "booleanValue": true + }, + { + "blobValue": "UldwUlBRPT0=" + }, + { + "stringValue": "A" + }, + { + "stringValue": "AB" + }, + { + "stringValue": "C" + }, + { + "stringValue": "CD" + }, + { + "stringValue": "2024-02-04" + }, + { + "doubleValue": 3.14159265359 + }, + { + "longValue": 42 + }, + { + "stringValue": "{\"key\":\"value\"}" + }, + { + "stringValue": "{\"key\":\"value\"}" + }, + { + "stringValue": "1234.56" + }, + { + "stringValue": "12.34" + }, + { + "doubleValue": 3.14 + }, + { + "longValue": 12345 + }, + { + "longValue": 1 + }, + { + "longValue": 1 + }, + { + "stringValue": "Some text" + }, + { + "stringValue": "12:34:56" + }, + { + "stringValue": "12:34:56.78" + }, + { + "stringValue": "2024-02-04 12:34:56" + }, + { + "stringValue": "2024-02-04 12:34:56" + }, + { + "stringValue": "550e8400-e29b-41d4-a716-446655440000" + } + ] + ] +} \ No newline at end of file diff --git a/test/files/golden/row.json b/test/files/golden/row.json new file mode 100644 index 0000000..923e988 --- /dev/null +++ b/test/files/golden/row.json @@ -0,0 +1,102 @@ +[ + { + "contents": 1234567890, + "tag": "ValueOfInteger" + }, + { + "contents": 1, + "tag": "ValueOfInteger" + }, + { + "contents": true, + "tag": "ValueOfBool" + }, + { + "contents": "UldwUlBRPT0=", + "tag": "ValueOfBase64" + }, + { + "contents": "A", + "tag": "ValueOfText" + }, + { + "contents": "AB", + "tag": "ValueOfText" + }, + { + "contents": "C", + "tag": "ValueOfText" + }, + { + "contents": "CD", + "tag": "ValueOfText" + }, + { + "contents": "2024-02-04", + "tag": "ValueOfText" + }, + { + "contents": 3.14159265359, + "tag": "ValueOfDouble" + }, + { + "contents": 42, + "tag": "ValueOfInteger" + }, + { + "contents": "{\"key\":\"value\"}", + "tag": "ValueOfText" + }, + { + "contents": "{\"key\":\"value\"}", + "tag": "ValueOfText" + }, + { + "contents": "1234.56", + "tag": "ValueOfText" + }, + { + "contents": "12.34", + "tag": "ValueOfText" + }, + { + "contents": 3.14, + "tag": "ValueOfDouble" + }, + { + "contents": 12345, + "tag": "ValueOfInteger" + }, + { + "contents": 1, + "tag": "ValueOfInteger" + }, + { + "contents": 1, + "tag": "ValueOfInteger" + }, + { + "contents": "Some text", + "tag": "ValueOfText" + }, + { + "contents": "12:34:56", + "tag": "ValueOfText" + }, + { + "contents": "12:34:56.78", + "tag": "ValueOfText" + }, + { + "contents": "2024-02-04 12:34:56", + "tag": "ValueOfText" + }, + { + "contents": "2024-02-04 12:34:56", + "tag": "ValueOfText" + }, + { + "contents": "550e8400-e29b-41d4-a716-446655440000", + "tag": "ValueOfText" + } +] \ No newline at end of file diff --git a/test/files/rds-schema.sql b/test/files/rds-schema.sql new file mode 100644 index 0000000..171052f --- /dev/null +++ b/test/files/rds-schema.sql @@ -0,0 +1,28 @@ +CREATE TABLE all_types ( + the_bigint bigint not null, + the_bigserial bigserial not null, + the_boolean boolean not null, + the_bytea bytea not null, + the_character character not null, + the_characters character(2) not null, + the_varying_character character varying not null, + the_varying_characters character varying(2) not null, + the_date date not null, + the_double double precision not null, + the_integer integer not null, + the_json json not null, + the_jsonb jsonb not null, + the_numeric numeric not null, + the_numerics numeric(4, 2) not null, + the_real real not null, + the_smallint smallint not null, + the_smallserial smallserial not null, + the_serial serial not null, + the_text text not null, + the_time time not null, + the_times time(2) not null, + the_timestamp timestamp not null, + the_timestamps timestamp(2) not null, + the_uuid uuid not null, + the_ulid char(16) not null +) diff --git a/test/files/rds-value.sql b/test/files/rds-value.sql new file mode 100644 index 0000000..6dcd490 --- /dev/null +++ b/test/files/rds-value.sql @@ -0,0 +1,54 @@ +INSERT INTO all_types ( + the_bigint, + the_bigserial, + the_boolean, + the_bytea, + the_character, + the_characters, + the_varying_character, + the_varying_characters, + the_date, + the_double, + the_integer, + the_json, + the_jsonb, + the_numeric, + the_numerics, + the_real, + the_smallint, + the_smallserial, + the_serial, + the_text, + the_time, + the_times, + the_timestamp, + the_timestamps, + the_uuid +) VALUES ( + 1234567890, -- the_bigint + DEFAULT, -- the_bigserial + TRUE, -- the_boolean + E'\\x1234', -- the_bytea + 'A', -- the_character + 'AB', -- the_characters + 'C', -- the_varying_character + 'CD', -- the_varying_characters + '2024-02-04', -- the_date + 3.14159265359, -- the_double + 42, -- the_integer + '{"key":"value"}', -- the_json + '{"key":"value"}', -- the_jsonb + 1234.56, -- the_numeric + 12.34, -- the_numerics + 3.14, -- the_real + 12345, -- the_smallint + DEFAULT, -- the_smallserial + DEFAULT, -- the_serial + 'Some text', -- the_text + '12:34:56', -- the_time + '12:34:56.78', -- the_times + '2024-02-04 12:34:56', -- the_timestamp + '2024-02-04 12:34:56', -- the_timestamps + '0123456789ABCDEFGHJKMNPQRS', -- the_ulid + '550e8400-e29b-41d4-a716-446655440000' -- the_uuid +); diff --git a/test/files/row.json b/test/files/row.json new file mode 100644 index 0000000..7b6ad98 --- /dev/null +++ b/test/files/row.json @@ -0,0 +1,86 @@ +{ + "httpStatus": 200, + "numberOfRecordsUpdated": 0, + "records": [ + [ + { + "longValue": 1234567890 + }, + { + "longValue": 1 + }, + { + "booleanValue": true + }, + { + "blobValue": "EjQ=" + }, + { + "stringValue": "A" + }, + { + "stringValue": "AB" + }, + { + "stringValue": "C" + }, + { + "stringValue": "CD" + }, + { + "stringValue": "2024-02-04" + }, + { + "doubleValue": 3.14159265359 + }, + { + "longValue": 42 + }, + { + "stringValue": "{\"key\":\"value\"}" + }, + { + "stringValue": "{\"key\":\"value\"}" + }, + { + "stringValue": "1234.56" + }, + { + "stringValue": "12.34" + }, + { + "doubleValue": 3.14 + }, + { + "longValue": 12345 + }, + { + "longValue": 1 + }, + { + "longValue": 1 + }, + { + "stringValue": "Some text" + }, + { + "stringValue": "12:34:56" + }, + { + "stringValue": "12:34:56.780" + }, + { + "stringValue": "2024-02-04 12:34:56" + }, + { + "stringValue": "2024-02-04 12:34:56" + }, + { + "stringValue": "0123456789ABCDEFGHJKMNPQRS" + }, + { + "stringValue": "550e8400-e29b-41d4-a716-446655440000" + } + ] + ] +} diff --git a/test/files/schema.sql b/test/files/schema.sql new file mode 100644 index 0000000..f6a00d7 --- /dev/null +++ b/test/files/schema.sql @@ -0,0 +1,47 @@ +CREATE TABLE all_types ( + the_bigint bigint not null, + the_bigserial bigserial not null, + the_bit bit not null, + the_bits bit(2) not null, + the_varying_bit bit varying not null, + the_varying_bits bit varying(2) not null, + the_boolean boolean not null, + the_box box not null, + the_bytea bytea not null, + the_character character not null, + the_characters character(2) not null, + the_varying_character character varying not null, + the_varying_characters character varying(2) not null, + the_cidr cidr not null, + the_circle circle not null, + the_date date not null, + the_double double precision not null, + the_inet inet not null, + the_integer integer not null, + the_json json not null, + the_jsonb jsonb not null, + the_line line not null, + the_lseg lseg not null, + the_macaddr macaddr not null, + the_macaddr8 macaddr8 not null, + the_money money not null, + the_numeric numeric not null, + the_numerics numeric(4, 2) not null, + the_path path not null, + the_point point not null, + the_polygon polygon not null, + the_real real not null, + the_smallint smallint not null, + the_smallserial smallserial not null, + the_serial serial not null, + the_text text not null, + the_time time not null, + the_times time(2) not null, + the_timestamp timestamp not null, + the_timestamps timestamp(2) not null, + the_tsquery tsquery not null, + the_tsvector tsvector not null, + the_ulid char(16) not null, + the_uuid uuid not null, + the_xml xml not null +) diff --git a/test/files/value.sql b/test/files/value.sql new file mode 100644 index 0000000..7b541b7 --- /dev/null +++ b/test/files/value.sql @@ -0,0 +1,92 @@ +INSERT INTO all_types ( + the_bigint, + the_bigserial, + the_bit, + the_bits, + the_varying_bit, + the_varying_bits, + the_boolean, + the_box, + the_bytea, + the_character, + the_characters, + the_varying_character, + the_varying_characters, + the_cidr, + the_circle, + the_date, + the_double, + the_inet, + the_integer, + the_json, + the_jsonb, + the_line, + the_lseg, + the_macaddr, + the_macaddr8, + the_money, + the_numeric, + the_numerics, + the_path, + the_point, + the_polygon, + the_real, + the_smallint, + the_smallserial, + the_serial, + the_text, + the_time, + the_times, + the_timestamp, + the_timestamps, + the_tsquery, + the_tsvector, + the_uuid, + the_xml +) VALUES ( + 1234567890, -- the_bigint + DEFAULT, -- the_bigserial + B'1', -- the_bit + B'10', -- the_bits + B'0', -- the_varying_bit + B'11', -- the_varying_bits + TRUE, -- the_boolean + '((1,1),(2,2))', -- the_box + E'\\x1234', -- the_bytea + 'A', -- the_character + 'AB', -- the_characters + 'C', -- the_varying_character + 'CD', -- the_varying_characters + '192.168.1.0/24', -- the_cidr + '((0,0),2)', -- the_circle + '2024-02-04', -- the_date + 3.14159265359, -- the_double + '192.168.0.1', -- the_inet + 42, -- the_integer + '{"key":"value"}', -- the_json + '{"key":"value"}', -- the_jsonb + '((1,1),(2,2))', -- the_line + '[(1,2),(3,4)]', -- the_lseg + '00:11:22:33:44:55', -- the_macaddr + '00:11:22:33:44:55:66:77', -- the_macaddr8 + 12345.67, -- the_money + 1234.56, -- the_numeric + 12.34, -- the_numerics + '[(1,2),(3,4)]', -- the_path + '(5,6)', -- the_point + '((1,2),(3,4))', -- the_polygon + 3.14, -- the_real + 12345, -- the_smallint + DEFAULT, -- the_smallserial + DEFAULT, -- the_serial + 'Some text', -- the_text + '12:34:56', -- the_time + '12:34:56.78', -- the_times + '2024-02-04 12:34:56', -- the_timestamp + '2024-02-04 12:34:56', -- the_timestamps + to_tsquery('english', 'query'), -- the_tsquery + to_tsvector('english', 'document'), -- the_tsvector + '0123456789ABCDEFGHJKMNPQRS', -- the_ulid + '550e8400-e29b-41d4-a716-446655440000', -- the_uuid + 'data' -- the_xml +); diff --git a/testlib/Data/RdsData/Polysemy/Test/Cluster.hs b/testlib/Data/RdsData/Polysemy/Test/Cluster.hs new file mode 100644 index 0000000..3ead327 --- /dev/null +++ b/testlib/Data/RdsData/Polysemy/Test/Cluster.hs @@ -0,0 +1,110 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +{- HLINT ignore "Redundant id" -} +{- HLINT ignore "Redundant pure" -} +{- HLINT ignore "Use let" -} + +module Data.RdsData.Polysemy.Test.Cluster + ( RdsClusterDetails(..), + createRdsDbCluster, + ) where + +import qualified Amazonka as AWS +import qualified Amazonka.RDS as AWS +import qualified Amazonka.SecretsManager as AWS +import Data.Aeson ((.=)) +import qualified Data.Aeson as J +import qualified Data.ByteString.Base64 as B64 +import qualified Data.ByteString.Lazy as LBS +import Data.Function +import Data.Generics.Product.Any +import Data.RdsData.Migration.Types (RdsClusterDetails (RdsClusterDetails)) +import Data.RdsData.Polysemy.Test.Workspace +import qualified Data.Text.Encoding as T +import qualified Data.UUID as UUID +import qualified Data.UUID.V4 as UUID +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.Amazonka.LocalStack (getLocalStackEndpoint, + inspectContainer) +import HaskellWorks.Polysemy.Hedgehog +import HaskellWorks.Prelude +import HaskellWorks.TestContainers.LocalStack +import Lens.Micro + +createRdsDbCluster :: () + => HasCallStack + => Member (Embed IO) r + => Member (Reader AWS.Env) r + => Member Hedgehog r + => Member Resource r + => IO Container + -> Sem r RdsClusterDetails +createRdsDbCluster getContainer = withFrozenCallStack do + container <- embed getContainer + jotShowM_ $ getLocalStackEndpoint container + jotYamlM_ $ inspectContainer container + masterUsername <- pure "masterUsername" + masterPassword <- pure "masterPassword" + + let dbClusterId = "my-cluster" + + createDbClusterRequest <- + pure $ + AWS.newCreateDBCluster dbClusterId "aurora-postgresql" + & the @"masterUsername" .~ Just masterUsername + & the @"masterUserPassword" .~ Just masterPassword + & the @"enableHttpEndpoint" .~ Just True + & the @"databaseName" .~ Just databaseName + + createDbClusterResponse <- + sendAws createDbClusterRequest + & jotShowDataLog @AwsLogEntry + & trapFail + + let secretName = "my-aurora-cluster" + + secretString <- + jotYaml $ + J.object + [ "engine" .= id @Text "aurora-postgresql" + , "username" .= id @Text masterUsername + , "password" .= id @Text masterPassword + , "host" .= id @Text "localhost" + , "dbname" .= id @Text databaseName + , "port" .= id @Text "4510" + ] + + uuid <- embed UUID.nextRandom + + let clientRequestToken = T.encodeUtf8 $ UUID.toText uuid + + let secretStringText = T.decodeUtf8 $ LBS.toStrict $ J.encode secretString + + createSecretReq <- + pure $ + AWS.newCreateSecret secretName + & the @"secretString" ?~ AWS.Sensitive secretStringText + & the @"clientRequestToken" ?~ T.decodeUtf8 (B64.encode clientRequestToken) + + createSecetResp <- + sendAws createSecretReq + & jotShowDataLog @AwsLogEntry + & trapFail + + createDbInstanceReq <- + pure $ + AWS.newCreateDBInstance dbClusterId "my-db-instance" "db.t3.medium" + & the @"engine" .~ "aurora-postgresql" + + _dbInstanceIdResp <- + jotShowM $ + sendAws createDbInstanceReq + & jotShowDataLog @AwsLogEntry + & trapFail + + pure (RdsClusterDetails createDbClusterResponse createSecetResp) diff --git a/testlib/Data/RdsData/Polysemy/Test/Env.hs b/testlib/Data/RdsData/Polysemy/Test/Env.hs new file mode 100644 index 0000000..a56856e --- /dev/null +++ b/testlib/Data/RdsData/Polysemy/Test/Env.hs @@ -0,0 +1,97 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeOperators #-} + +{- HLINT ignore "Redundant pure" -} +{- HLINT ignore "Use let" -} + +module Data.RdsData.Polysemy.Test.Env + ( AwsResourceArn(..), + AwsSecretArn(..), + runLocalTestEnv, + runTestEnv, + runReaderFromEnvOrFail, + runReaderResourceAndSecretArnsFromResponses, + ) where + +import qualified Amazonka as AWS +import Data.Generics.Product.Any +import Data.RdsData.Aws +import Data.RdsData.Migration.Types +import Data.RdsData.Polysemy.Error +import qualified Data.Text as Text +import HaskellWorks.Polysemy.Amazonka +import HaskellWorks.Polysemy.Amazonka.LocalStack +import HaskellWorks.Polysemy.Error +import HaskellWorks.Polysemy.Hedgehog +import HaskellWorks.Polysemy.System.Environment +import HaskellWorks.Prelude +import HaskellWorks.TestContainers.LocalStack (Container) +import Lens.Micro +import Polysemy +import Polysemy.Error +import Polysemy.Reader + +runTestEnv :: () + => HasCallStack + => Member (Embed IO) r + => Member Hedgehog r + => Sem + ( Reader AWS.Env + : Reader AwsResourceArn + : Reader AwsSecretArn + : r) + a + -> Sem r a +runTestEnv f = + withFrozenCallStack $ f + & runReaderAwsEnvDiscover + & runReaderFromEnvOrFail (AwsResourceArn . Text.pack) "AURORA_RESOURCE_ARN" + & runReaderFromEnvOrFail (AwsSecretArn . Text.pack) "AURORA_SECRET_ARN" + +runLocalTestEnv :: () + => HasCallStack + => Member (Embed IO) r + => IO Container + -> Sem + ( Reader AWS.Env + : r) + a + -> Sem r a +runLocalTestEnv getContainer f = + withFrozenCallStack $ f + & runReaderLocalAwsEnvDiscover getContainer + +runReaderFromEnvOrFail :: forall i r a. () + => Member (Embed IO) r + => Member Hedgehog r + => (String -> i) + -> String + -> Sem (Reader i ': r) a + -> Sem r a +runReaderFromEnvOrFail f envVar action = do + env <- lookupEnv envVar + & onNothingM (throw (EnvironmentVariableMissing envVar) & trapFail) + + runReader (f env) action + +runReaderResourceAndSecretArnsFromResponses :: () + => Member Hedgehog r + => RdsClusterDetails + -> Sem (Reader AwsResourceArn : Reader AwsSecretArn : r) a + -> Sem r a +runReaderResourceAndSecretArnsFromResponses details f = do + resourceArn <- (details ^. the @"createDbClusterResponse" . the @"dbCluster" . _Just . the @"dbClusterArn") + & nothingFail + + secretArn <- (details ^. the @"createSecretResponse" ^. the @"arn") + & nothingFail + + f & runReader (AwsResourceArn resourceArn) + & runReader (AwsSecretArn secretArn) diff --git a/testlib/Data/RdsData/Polysemy/Test/Workspace.hs b/testlib/Data/RdsData/Polysemy/Test/Workspace.hs new file mode 100644 index 0000000..849bb99 --- /dev/null +++ b/testlib/Data/RdsData/Polysemy/Test/Workspace.hs @@ -0,0 +1,41 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeOperators #-} + +module Data.RdsData.Polysemy.Test.Workspace + ( databaseName, + localWorkspace, + ) where + +import qualified Data.Text as Text + +import HaskellWorks.Polysemy +import HaskellWorks.Polysemy.Hedgehog +import HaskellWorks.Polysemy.Prelude +import Polysemy () + +databaseName :: Text +databaseName = "rds_data_migration" + +localWorkspace :: () + => HasCallStack + => Member (Embed IO) r + => Member Hedgehog r + => Member Log r + => Sem + ( Reader Workspace + : Reader ProjectRoot + : Reader PackagePath + : Resource + : r) + () + -> Sem r () +localWorkspace f = + withFrozenCallStack $ do + cabalProjectDir <- findCabalProjectDir "." + + f & moduleWorkspace (Text.unpack databaseName) + & runReader (ProjectRoot cabalProjectDir) + & runReader (PackagePath "rds-data") + & runResource