{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}

-- Derived from AWS service descriptions, licensed under Apache 2.0.

-- |
-- Module      : Amazonka.Glue.Types.SparkSQL
-- Copyright   : (c) 2013-2023 Brendan Hay
-- License     : Mozilla Public License, v. 2.0.
-- Maintainer  : Brendan Hay
-- Stability   : auto-generated
-- Portability : non-portable (GHC extensions)
module Amazonka.Glue.Types.SparkSQL where

import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import Amazonka.Glue.Types.GlueSchema
import Amazonka.Glue.Types.SqlAlias
import qualified Amazonka.Prelude as Prelude

-- | Specifies a transform where you enter a SQL query using Spark SQL syntax
-- to transform the data. The output is a single @DynamicFrame@.
--
-- /See:/ 'newSparkSQL' smart constructor.
data SparkSQL = SparkSQL'
  { -- | Specifies the data schema for the SparkSQL transform.
    SparkSQL -> Maybe [GlueSchema]
outputSchemas :: Prelude.Maybe [GlueSchema],
    -- | The name of the transform node.
    SparkSQL -> Text
name :: Prelude.Text,
    -- | The data inputs identified by their node names. You can associate a
    -- table name with each input node to use in the SQL query. The name you
    -- choose must meet the Spark SQL naming restrictions.
    SparkSQL -> NonEmpty Text
inputs :: Prelude.NonEmpty Prelude.Text,
    -- | A SQL query that must use Spark SQL syntax and return a single data set.
    SparkSQL -> Text
sqlQuery :: Prelude.Text,
    -- | A list of aliases. An alias allows you to specify what name to use in
    -- the SQL for a given input. For example, you have a datasource named
    -- \"MyDataSource\". If you specify @From@ as MyDataSource, and @Alias@ as
    -- SqlName, then in your SQL you can do:
    --
    -- @select * from SqlName@
    --
    -- and that gets data from MyDataSource.
    SparkSQL -> [SqlAlias]
sqlAliases :: [SqlAlias]
  }
  deriving (SparkSQL -> SparkSQL -> Bool
forall a. (a -> a -> Bool) -> (a -> a -> Bool) -> Eq a
/= :: SparkSQL -> SparkSQL -> Bool
$c/= :: SparkSQL -> SparkSQL -> Bool
== :: SparkSQL -> SparkSQL -> Bool
$c== :: SparkSQL -> SparkSQL -> Bool
Prelude.Eq, ReadPrec [SparkSQL]
ReadPrec SparkSQL
Int -> ReadS SparkSQL
ReadS [SparkSQL]
forall a.
(Int -> ReadS a)
-> ReadS [a] -> ReadPrec a -> ReadPrec [a] -> Read a
readListPrec :: ReadPrec [SparkSQL]
$creadListPrec :: ReadPrec [SparkSQL]
readPrec :: ReadPrec SparkSQL
$creadPrec :: ReadPrec SparkSQL
readList :: ReadS [SparkSQL]
$creadList :: ReadS [SparkSQL]
readsPrec :: Int -> ReadS SparkSQL
$creadsPrec :: Int -> ReadS SparkSQL
Prelude.Read, Int -> SparkSQL -> ShowS
[SparkSQL] -> ShowS
SparkSQL -> String
forall a.
(Int -> a -> ShowS) -> (a -> String) -> ([a] -> ShowS) -> Show a
showList :: [SparkSQL] -> ShowS
$cshowList :: [SparkSQL] -> ShowS
show :: SparkSQL -> String
$cshow :: SparkSQL -> String
showsPrec :: Int -> SparkSQL -> ShowS
$cshowsPrec :: Int -> SparkSQL -> ShowS
Prelude.Show, forall x. Rep SparkSQL x -> SparkSQL
forall x. SparkSQL -> Rep SparkSQL x
forall a.
(forall x. a -> Rep a x) -> (forall x. Rep a x -> a) -> Generic a
$cto :: forall x. Rep SparkSQL x -> SparkSQL
$cfrom :: forall x. SparkSQL -> Rep SparkSQL x
Prelude.Generic)

-- |
-- Create a value of 'SparkSQL' with all optional fields omitted.
--
-- Use <https://hackage.haskell.org/package/generic-lens generic-lens> or <https://hackage.haskell.org/package/optics optics> to modify other optional fields.
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'outputSchemas', 'sparkSQL_outputSchemas' - Specifies the data schema for the SparkSQL transform.
--
-- 'name', 'sparkSQL_name' - The name of the transform node.
--
-- 'inputs', 'sparkSQL_inputs' - The data inputs identified by their node names. You can associate a
-- table name with each input node to use in the SQL query. The name you
-- choose must meet the Spark SQL naming restrictions.
--
-- 'sqlQuery', 'sparkSQL_sqlQuery' - A SQL query that must use Spark SQL syntax and return a single data set.
--
-- 'sqlAliases', 'sparkSQL_sqlAliases' - A list of aliases. An alias allows you to specify what name to use in
-- the SQL for a given input. For example, you have a datasource named
-- \"MyDataSource\". If you specify @From@ as MyDataSource, and @Alias@ as
-- SqlName, then in your SQL you can do:
--
-- @select * from SqlName@
--
-- and that gets data from MyDataSource.
newSparkSQL ::
  -- | 'name'
  Prelude.Text ->
  -- | 'inputs'
  Prelude.NonEmpty Prelude.Text ->
  -- | 'sqlQuery'
  Prelude.Text ->
  SparkSQL
newSparkSQL :: Text -> NonEmpty Text -> Text -> SparkSQL
newSparkSQL Text
pName_ NonEmpty Text
pInputs_ Text
pSqlQuery_ =
  SparkSQL'
    { $sel:outputSchemas:SparkSQL' :: Maybe [GlueSchema]
outputSchemas = forall a. Maybe a
Prelude.Nothing,
      $sel:name:SparkSQL' :: Text
name = Text
pName_,
      $sel:inputs:SparkSQL' :: NonEmpty Text
inputs = forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced forall t b. AReview t b -> b -> t
Lens.# NonEmpty Text
pInputs_,
      $sel:sqlQuery:SparkSQL' :: Text
sqlQuery = Text
pSqlQuery_,
      $sel:sqlAliases:SparkSQL' :: [SqlAlias]
sqlAliases = forall a. Monoid a => a
Prelude.mempty
    }

-- | Specifies the data schema for the SparkSQL transform.
sparkSQL_outputSchemas :: Lens.Lens' SparkSQL (Prelude.Maybe [GlueSchema])
sparkSQL_outputSchemas :: Lens' SparkSQL (Maybe [GlueSchema])
sparkSQL_outputSchemas = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\SparkSQL' {Maybe [GlueSchema]
outputSchemas :: Maybe [GlueSchema]
$sel:outputSchemas:SparkSQL' :: SparkSQL -> Maybe [GlueSchema]
outputSchemas} -> Maybe [GlueSchema]
outputSchemas) (\s :: SparkSQL
s@SparkSQL' {} Maybe [GlueSchema]
a -> SparkSQL
s {$sel:outputSchemas:SparkSQL' :: Maybe [GlueSchema]
outputSchemas = Maybe [GlueSchema]
a} :: SparkSQL) forall b c a. (b -> c) -> (a -> b) -> a -> c
Prelude.. forall (f :: * -> *) (g :: * -> *) s t a b.
(Functor f, Functor g) =>
AnIso s t a b -> Iso (f s) (g t) (f a) (g b)
Lens.mapping forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced

-- | The name of the transform node.
sparkSQL_name :: Lens.Lens' SparkSQL Prelude.Text
sparkSQL_name :: Lens' SparkSQL Text
sparkSQL_name = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\SparkSQL' {Text
name :: Text
$sel:name:SparkSQL' :: SparkSQL -> Text
name} -> Text
name) (\s :: SparkSQL
s@SparkSQL' {} Text
a -> SparkSQL
s {$sel:name:SparkSQL' :: Text
name = Text
a} :: SparkSQL)

-- | The data inputs identified by their node names. You can associate a
-- table name with each input node to use in the SQL query. The name you
-- choose must meet the Spark SQL naming restrictions.
sparkSQL_inputs :: Lens.Lens' SparkSQL (Prelude.NonEmpty Prelude.Text)
sparkSQL_inputs :: Lens' SparkSQL (NonEmpty Text)
sparkSQL_inputs = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\SparkSQL' {NonEmpty Text
inputs :: NonEmpty Text
$sel:inputs:SparkSQL' :: SparkSQL -> NonEmpty Text
inputs} -> NonEmpty Text
inputs) (\s :: SparkSQL
s@SparkSQL' {} NonEmpty Text
a -> SparkSQL
s {$sel:inputs:SparkSQL' :: NonEmpty Text
inputs = NonEmpty Text
a} :: SparkSQL) forall b c a. (b -> c) -> (a -> b) -> a -> c
Prelude.. forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced

-- | A SQL query that must use Spark SQL syntax and return a single data set.
sparkSQL_sqlQuery :: Lens.Lens' SparkSQL Prelude.Text
sparkSQL_sqlQuery :: Lens' SparkSQL Text
sparkSQL_sqlQuery = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\SparkSQL' {Text
sqlQuery :: Text
$sel:sqlQuery:SparkSQL' :: SparkSQL -> Text
sqlQuery} -> Text
sqlQuery) (\s :: SparkSQL
s@SparkSQL' {} Text
a -> SparkSQL
s {$sel:sqlQuery:SparkSQL' :: Text
sqlQuery = Text
a} :: SparkSQL)

-- | A list of aliases. An alias allows you to specify what name to use in
-- the SQL for a given input. For example, you have a datasource named
-- \"MyDataSource\". If you specify @From@ as MyDataSource, and @Alias@ as
-- SqlName, then in your SQL you can do:
--
-- @select * from SqlName@
--
-- and that gets data from MyDataSource.
sparkSQL_sqlAliases :: Lens.Lens' SparkSQL [SqlAlias]
sparkSQL_sqlAliases :: Lens' SparkSQL [SqlAlias]
sparkSQL_sqlAliases = forall s a b t. (s -> a) -> (s -> b -> t) -> Lens s t a b
Lens.lens (\SparkSQL' {[SqlAlias]
sqlAliases :: [SqlAlias]
$sel:sqlAliases:SparkSQL' :: SparkSQL -> [SqlAlias]
sqlAliases} -> [SqlAlias]
sqlAliases) (\s :: SparkSQL
s@SparkSQL' {} [SqlAlias]
a -> SparkSQL
s {$sel:sqlAliases:SparkSQL' :: [SqlAlias]
sqlAliases = [SqlAlias]
a} :: SparkSQL) forall b c a. (b -> c) -> (a -> b) -> a -> c
Prelude.. forall s t a b. (Coercible s a, Coercible t b) => Iso s t a b
Lens.coerced

instance Data.FromJSON SparkSQL where
  parseJSON :: Value -> Parser SparkSQL
parseJSON =
    forall a. String -> (Object -> Parser a) -> Value -> Parser a
Data.withObject
      String
"SparkSQL"
      ( \Object
x ->
          Maybe [GlueSchema]
-> Text -> NonEmpty Text -> Text -> [SqlAlias] -> SparkSQL
SparkSQL'
            forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"OutputSchemas" forall a. Parser (Maybe a) -> a -> Parser a
Data..!= forall a. Monoid a => a
Prelude.mempty)
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser a
Data..: Key
"Name")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser a
Data..: Key
"Inputs")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser a
Data..: Key
"SqlQuery")
            forall (f :: * -> *) a b. Applicative f => f (a -> b) -> f a -> f b
Prelude.<*> (Object
x forall a. FromJSON a => Object -> Key -> Parser (Maybe a)
Data..:? Key
"SqlAliases" forall a. Parser (Maybe a) -> a -> Parser a
Data..!= forall a. Monoid a => a
Prelude.mempty)
      )

instance Prelude.Hashable SparkSQL where
  hashWithSalt :: Int -> SparkSQL -> Int
hashWithSalt Int
_salt SparkSQL' {[SqlAlias]
Maybe [GlueSchema]
NonEmpty Text
Text
sqlAliases :: [SqlAlias]
sqlQuery :: Text
inputs :: NonEmpty Text
name :: Text
outputSchemas :: Maybe [GlueSchema]
$sel:sqlAliases:SparkSQL' :: SparkSQL -> [SqlAlias]
$sel:sqlQuery:SparkSQL' :: SparkSQL -> Text
$sel:inputs:SparkSQL' :: SparkSQL -> NonEmpty Text
$sel:name:SparkSQL' :: SparkSQL -> Text
$sel:outputSchemas:SparkSQL' :: SparkSQL -> Maybe [GlueSchema]
..} =
    Int
_salt
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Maybe [GlueSchema]
outputSchemas
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Text
name
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` NonEmpty Text
inputs
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` Text
sqlQuery
      forall a. Hashable a => Int -> a -> Int
`Prelude.hashWithSalt` [SqlAlias]
sqlAliases

instance Prelude.NFData SparkSQL where
  rnf :: SparkSQL -> ()
rnf SparkSQL' {[SqlAlias]
Maybe [GlueSchema]
NonEmpty Text
Text
sqlAliases :: [SqlAlias]
sqlQuery :: Text
inputs :: NonEmpty Text
name :: Text
outputSchemas :: Maybe [GlueSchema]
$sel:sqlAliases:SparkSQL' :: SparkSQL -> [SqlAlias]
$sel:sqlQuery:SparkSQL' :: SparkSQL -> Text
$sel:inputs:SparkSQL' :: SparkSQL -> NonEmpty Text
$sel:name:SparkSQL' :: SparkSQL -> Text
$sel:outputSchemas:SparkSQL' :: SparkSQL -> Maybe [GlueSchema]
..} =
    forall a. NFData a => a -> ()
Prelude.rnf Maybe [GlueSchema]
outputSchemas
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Text
name
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf NonEmpty Text
inputs
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf Text
sqlQuery
      seq :: forall a b. a -> b -> b
`Prelude.seq` forall a. NFData a => a -> ()
Prelude.rnf [SqlAlias]
sqlAliases

instance Data.ToJSON SparkSQL where
  toJSON :: SparkSQL -> Value
toJSON SparkSQL' {[SqlAlias]
Maybe [GlueSchema]
NonEmpty Text
Text
sqlAliases :: [SqlAlias]
sqlQuery :: Text
inputs :: NonEmpty Text
name :: Text
outputSchemas :: Maybe [GlueSchema]
$sel:sqlAliases:SparkSQL' :: SparkSQL -> [SqlAlias]
$sel:sqlQuery:SparkSQL' :: SparkSQL -> Text
$sel:inputs:SparkSQL' :: SparkSQL -> NonEmpty Text
$sel:name:SparkSQL' :: SparkSQL -> Text
$sel:outputSchemas:SparkSQL' :: SparkSQL -> Maybe [GlueSchema]
..} =
    [Pair] -> Value
Data.object
      ( forall a. [Maybe a] -> [a]
Prelude.catMaybes
          [ (Key
"OutputSchemas" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..=) forall (f :: * -> *) a b. Functor f => (a -> b) -> f a -> f b
Prelude.<$> Maybe [GlueSchema]
outputSchemas,
            forall a. a -> Maybe a
Prelude.Just (Key
"Name" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= Text
name),
            forall a. a -> Maybe a
Prelude.Just (Key
"Inputs" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= NonEmpty Text
inputs),
            forall a. a -> Maybe a
Prelude.Just (Key
"SqlQuery" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= Text
sqlQuery),
            forall a. a -> Maybe a
Prelude.Just (Key
"SqlAliases" forall kv v. (KeyValue kv, ToJSON v) => Key -> v -> kv
Data..= [SqlAlias]
sqlAliases)
          ]
      )