diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index 17121b8ebc7d5..d7b4778ce2c54 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -65,6 +65,7 @@ library -- Encoder related , uuid , vector + , vector-builder -- Logging related , network @@ -274,6 +275,7 @@ library , Control.Concurrent.Extended , Control.Lens.Extended , Data.Aeson.Extended + , Data.Aeson.Ordered , Data.HashMap.Strict.InsOrd.Extended , Data.Parser.JSONPath , Data.Sequence.NonEmpty diff --git a/server/src-lib/Data/Aeson/Ordered.hs b/server/src-lib/Data/Aeson/Ordered.hs new file mode 100644 index 0000000000000..5eda436b008da --- /dev/null +++ b/server/src-lib/Data/Aeson/Ordered.hs @@ -0,0 +1,222 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveDataTypeable #-} + +-- | A version of aeson that parses with key order preserved. +-- +-- Copyright: +-- (c) 2011-2016 Bryan O'Sullivan +-- (c) 2011 MailRank, Inc. + +module Data.Aeson.Ordered + ( Value(..) + , Object + , Array + , Data.Aeson.Ordered.safeUnion + , value + , decode + , Data.Aeson.Ordered.toList + , fromList + , insert + , delete + , empty + , eitherDecode + , toEncJSON + , Data.Aeson.Ordered.lookup + ) where + +import Control.Applicative hiding (empty) +import qualified Data.Aeson as J +import Data.Aeson.Parser (jstring) +import Data.Attoparsec.ByteString (Parser) +import qualified Data.Attoparsec.ByteString as A +import qualified Data.Attoparsec.ByteString.Char8 as A8 +import Data.Bifunctor +import Data.ByteString (ByteString) +import qualified Data.ByteString.Lazy as L +import Data.Data +import Data.Functor +import Data.HashMap.Strict.InsOrd (InsOrdHashMap) +import qualified Data.HashMap.Strict.InsOrd as OMap +import Data.Scientific +import Data.Text (Text) +import qualified Data.Text as T +import Data.Vector (Vector) +import qualified Data.Vector as V +import GHC.Generics +import Hasura.EncJSON +import Hasura.Prelude hiding (empty, first, second) +-- import Prelude hiding (error, undefined) + +-------------------------------------------------------------------------------- +-- Encoding via Hasura's EncJSON + +toEncJSON :: Value -> EncJSON +toEncJSON = + \case + Object (Object_ omap) -> + encJFromAssocList (map (second toEncJSON) (OMap.toList omap)) + Array vec -> encJFromList (map toEncJSON (V.toList vec)) + String s -> encJFromJValue s + Number sci -> encJFromJValue sci + Bool b -> encJFromJValue b + Null -> encJFromJValue J.Null + +-------------------------------------------------------------------------------- +-- Copied constants from aeson + +#define BACKSLASH 92 +#define CLOSE_CURLY 125 +#define CLOSE_SQUARE 93 +#define COMMA 44 +#define DOUBLE_QUOTE 34 +#define OPEN_CURLY 123 +#define OPEN_SQUARE 91 +#define C_0 48 +#define C_9 57 +#define C_A 65 +#define C_F 70 +#define C_a 97 +#define C_f 102 +#define C_n 110 +#define C_t 116 + +-------------------------------------------------------------------------------- +-- Our altered type + +-- | A JSON \"object\" (key\/value map). This is where this type +-- differs to the 'aeson' package. +newtype Object = Object_ { unObject_ :: InsOrdHashMap Text Value} + deriving (Eq, Read, Show, Typeable, Data, Generic) + +-- | Union the keys, ordered, in two maps, erroring on duplicates. +safeUnion :: Object -> Object -> Either String Object +safeUnion (Object_ x) (Object_ y) = + fmap + Object_ + (traverse + id + (OMap.unionWithKey + (\k _a _b -> Left ("Duplicate key: " ++ T.unpack k)) + (fmap Right x) + (fmap Right y))) + +-- | Empty object. +empty :: Object +empty = Object_ mempty + +-- | Insert before the element at index i. Think of it in terms of +-- 'splitAt', which is (take k, drop k). Deletes existing key, if any. +insert :: (Int, Text) -> Value -> Object -> Object +insert (idx, key) val = + Object_ . + OMap.fromList . + uncurry (<>) . + second ((key, val) :) . + splitAt idx . + OMap.toList . + OMap.delete key . + unObject_ + +-- | Lookup a key. +lookup :: Text -> Object -> Maybe Value +lookup key (Object_ omap) = OMap.lookup key omap + +-- | Delete a key. +delete :: Text -> Object -> Object +delete key (Object_ omap) = Object_ (OMap.delete key omap) + +-- | ToList a key. +toList :: Object -> [(Text,Value)] +toList (Object_ omap) = OMap.toList omap + +-- | FromList a key. +fromList :: [(Text,Value)] -> Object +fromList = Object_ . OMap.fromList + +-- | A JSON \"array\" (sequence). +type Array = Vector Value + +-- | A JSON value represented as a Haskell value. Intentionally +-- shadowing the 'Value' from the aeson package. +data Value + = Object !Object + | Array !Array + | String !Text + | Number !Scientific + | Bool !Bool + | Null + deriving (Eq, Read, Show, Typeable, Data, Generic) + +-------------------------------------------------------------------------------- +-- Top-level entry points + +eitherDecode :: L.ByteString -> Either String Value +eitherDecode = A.parseOnly value . L.toStrict + +decode :: ByteString -> Maybe Value +decode = either (const Nothing) Just . A.parseOnly value + +-------------------------------------------------------------------------------- +-- Modified aeson parser + +-- Copied from the aeson package. +arrayValues :: Parser Array +arrayValues = do + skipSpace + w <- A.peekWord8' + if w == CLOSE_SQUARE + then A.anyWord8 >> return V.empty + else loop [] 1 + where + loop acc !len = do + v <- (value A. "json list value") <* skipSpace + ch <- A.satisfy (\w -> w == COMMA || w == CLOSE_SQUARE) A. "',' or ']'" + if ch == COMMA + then skipSpace >> loop (v:acc) (len+1) + else return (V.reverse (V.fromListN len (v:acc))) +{-# INLINE arrayValues #-} + +-- Copied from aeson package. +objectValues :: Parser (InsOrdHashMap Text Value) +objectValues = do + skipSpace + w <- A.peekWord8' + if w == CLOSE_CURLY + then A.anyWord8 >> return OMap.empty + else loop OMap.empty + where + -- Why use acc pattern here, you may ask? because 'H.fromList' use 'unsafeInsert' + -- and it's much faster because it's doing in place update to the 'HashMap'! + loop acc = do + k <- (jstring A. "object key") <* skipSpace <* (A8.char ':' A. "':'") + v <- (value A. "object value") <* skipSpace + ch <- A.satisfy (\w -> w == COMMA || w == CLOSE_CURLY) A. "',' or '}'" + let acc' = OMap.insert k v acc + if ch == COMMA + then skipSpace >> loop acc' + else pure acc' +{-# INLINE objectValues #-} + +-- Copied from aeson package. +value :: Parser Value +value = do + skipSpace + w <- A.peekWord8' + case w of + DOUBLE_QUOTE -> String <$> jstring + OPEN_CURLY -> A.anyWord8 *> (Object . Object_ <$> objectValues) + OPEN_SQUARE -> A.anyWord8 *> (Array <$> arrayValues) + C_f -> A8.string "false" $> Bool False + C_t -> A8.string "true" $> Bool True + C_n -> A8.string "null" $> Null + _ | w >= 48 && w <= 57 || w == 45 + -> Number <$> A8.scientific + | otherwise -> fail "not a valid json value" +{-# INLINE value #-} + +-- Copied from aeson package. +-- | The only valid whitespace in a JSON document is space, newline, +-- carriage return, and tab. +skipSpace :: Parser () +skipSpace = A.skipWhile $ \w -> w == 0x20 || w == 0x0a || w == 0x0d || w == 0x09 +{-# INLINE skipSpace #-} diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs index 6c3106010186a..db52ccbda3599 100644 --- a/server/src-lib/Hasura/GraphQL/Execute.hs +++ b/server/src-lib/Hasura/GraphQL/Execute.hs @@ -1,11 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE RecordWildCards #-} + module Hasura.GraphQL.Execute - ( GQExecPlan(..) + ( GQExecPlanPartial(..) + , GQFieldPartialPlan(..) + , GQFieldResolvedPlan(..) - , ExecPlanPartial , getExecPlanPartial , ExecOp(..) - , ExecPlanResolved , getResolvedExecPlan , execRemoteGQ , getSubsOp @@ -21,11 +24,14 @@ module Hasura.GraphQL.Execute import Control.Exception (try) import Control.Lens import Data.Has +import Data.List (nub) +import Data.Time import qualified Data.Aeson as J import qualified Data.CaseInsensitive as CI import qualified Data.HashMap.Strict as Map -import qualified Data.HashSet as Set +import qualified Data.HashMap.Strict.InsOrd as OMap +import qualified Data.Sequence as Seq import qualified Data.String.Conversions as CS import qualified Data.Text as T import qualified Language.GraphQL.Draft.Syntax as G @@ -33,6 +39,7 @@ import qualified Network.HTTP.Client as HTTP import qualified Network.HTTP.Types as N import qualified Network.Wreq as Wreq + import Hasura.EncJSON import Hasura.GraphQL.Context import Hasura.GraphQL.Logging @@ -47,13 +54,14 @@ import Hasura.RQL.Types import Hasura.Server.Context import Hasura.Server.Utils (RequestId, filterRequestHeaders) +import Hasura.SQL.Time +import Hasura.SQL.Value import qualified Hasura.GraphQL.Execute.LiveQuery as EL import qualified Hasura.GraphQL.Execute.Plan as EP import qualified Hasura.GraphQL.Execute.Query as EQ import qualified Hasura.GraphQL.Resolve as GR import qualified Hasura.GraphQL.Validate as VQ -import qualified Hasura.GraphQL.Validate.Types as VT import qualified Hasura.Logging as L -- The current execution plan of a graphql operation, it is @@ -61,10 +69,19 @@ import qualified Hasura.Logging as L -- -- The 'a' is parameterised so this AST can represent -- intermediate passes -data GQExecPlan a - = GExPHasura !a - | GExPRemote !RemoteSchemaInfo !G.TypedOperationDefinition - deriving (Functor, Foldable, Traversable) +data GQFieldPartialPlan + = GQFieldPartialHasura !(GCtx, VQ.Field) + | GQFieldPartialRemote !RemoteSchemaInfo !VQ.Field + +data GQFieldResolvedPlan + = GQFieldResolvedHasura !ExecOp + | GQFieldResolvedRemote !RemoteSchemaInfo !G.OperationType !VQ.Field + +data GQExecPlanPartial + = GQExecPlanPartial + { execOpType :: G.OperationType + , execFieldPlans :: Seq.Seq GQFieldPartialPlan + } -- | Execution context data ExecutionCtx @@ -79,85 +96,65 @@ data ExecutionCtx , _ecxEnableAllowList :: !Bool } --- Enforces the current limitation -assertSameLocationNodes - :: (MonadError QErr m) => [VT.TypeLoc] -> m VT.TypeLoc -assertSameLocationNodes typeLocs = - case Set.toList (Set.fromList typeLocs) of - -- this shouldn't happen - [] -> return VT.TLHasuraType - [loc] -> return loc - _ -> throw400 NotSupported msg - where - msg = "cannot mix top level fields from two different graphql servers" - --- TODO: we should fix this function asap --- as this will fail when there is a fragment at the top level -getTopLevelNodes :: G.TypedOperationDefinition -> [G.Name] -getTopLevelNodes opDef = - mapMaybe f $ G._todSelectionSet opDef - where - f = \case - G.SelectionField fld -> Just $ G._fName fld - G.SelectionFragmentSpread _ -> Nothing - G.SelectionInlineFragment _ -> Nothing - -gatherTypeLocs :: GCtx -> [G.Name] -> [VT.TypeLoc] -gatherTypeLocs gCtx nodes = - catMaybes $ flip map nodes $ \node -> - VT._fiLoc <$> Map.lookup node schemaNodes - where - schemaNodes = - let qr = VT._otiFields $ _gQueryRoot gCtx - mr = VT._otiFields <$> _gMutRoot gCtx - in maybe qr (Map.union qr) mr - --- This is for when the graphql query is validated -type ExecPlanPartial = GQExecPlan (GCtx, VQ.RootSelSet) - getExecPlanPartial :: (MonadError QErr m) => UserInfo -> SchemaCache -> Bool -> GQLReqParsed - -> m ExecPlanPartial -getExecPlanPartial userInfo sc enableAL req = do - + -> m GQExecPlanPartial +getExecPlanPartial userInfo sc enableAL req -- check if query is in allowlist + = do when enableAL checkQueryInAllowlist - - (gCtx, _) <- flip runStateT sc $ getGCtx role gCtxRoleMap + (gCtx, _) <- flip runStateT sc $ getGCtx role gCtxRoleMap queryParts <- flip runReaderT gCtx $ VQ.getQueryParts req - - let opDef = VQ.qpOpDef queryParts - topLevelNodes = getTopLevelNodes opDef - -- gather TypeLoc of topLevelNodes - typeLocs = gatherTypeLocs gCtx topLevelNodes - - -- see if they are all the same - typeLoc <- assertSameLocationNodes typeLocs - - case typeLoc of - VT.TLHasuraType -> do - rootSelSet <- runReaderT (VQ.validateGQ queryParts) gCtx - return $ GExPHasura (gCtx, rootSelSet) - VT.TLRemoteType _ rsi -> - return $ GExPRemote rsi opDef + let remoteSchemas = scRemoteSchemas sc + rootSelSet <- runReaderT (VQ.validateGQ queryParts) gCtx + runReaderT (generatePlan rootSelSet) (gCtx, remoteSchemas) where + generatePlan :: + (MonadError QErr m, MonadReader (GCtx, RemoteSchemaMap) m) + => VQ.RootSelSet + -> m GQExecPlanPartial + generatePlan = + \case + VQ.RQuery selSet -> + (GQExecPlanPartial G.OperationTypeQuery) <$> + (mapM generateFieldPlan selSet) + VQ.RMutation selSet -> + (GQExecPlanPartial G.OperationTypeMutation) <$> + (mapM generateFieldPlan selSet) + VQ.RSubscription field -> + (GQExecPlanPartial G.OperationTypeSubscription) <$> + (fmap Seq.singleton $ generateFieldPlan field) + generateFieldPlan :: + (MonadError QErr m, MonadReader (GCtx, RemoteSchemaMap) m) + => VQ.Field + -> m GQFieldPartialPlan + generateFieldPlan field = + case VQ._fSource field of + TLHasuraType -> do + (gCtx, _) <- ask + pure $ GQFieldPartialHasura (gCtx, field) + TLRemoteType rsName -> do + (_, rsMap) <- ask + rsCtx <- + onNothing (Map.lookup rsName rsMap) $ + throw500 "remote schema not found" + pure $ GQFieldPartialRemote (rscInfo rsCtx) field role = userRole userInfo gCtxRoleMap = scGCtxMap sc - - checkQueryInAllowlist = + checkQueryInAllowlist -- only for non-admin roles + = when (role /= adminRole) $ do let notInAllowlist = not $ VQ.isQueryInAllowlist (_grQuery req) (scAllowlist sc) when notInAllowlist $ modifyQErr modErr $ throwVE "query is not allowed" - modErr e = let msg = "query is not in any of the allowlists" - in e{qeInternal = Just $ J.object [ "message" J..= J.String msg]} + in e {qeInternal = Just $ J.object ["message" J..= J.String msg]} -- An execution operation, in case of @@ -168,10 +165,6 @@ data ExecOp | ExOpMutation !LazyRespTx | ExOpSubs !EL.LiveQueryPlan --- The graphql query is resolved into an execution operation -type ExecPlanResolved - = GQExecPlan ExecOp - getResolvedExecPlan :: (MonadError QErr m, MonadIO m) => PGExecCtx @@ -182,41 +175,63 @@ getResolvedExecPlan -> SchemaCache -> SchemaCacheVer -> GQLReqUnparsed - -> m ExecPlanResolved -getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx - enableAL sc scVer reqUnparsed = do - planM <- liftIO $ EP.getPlan scVer (userRole userInfo) - opNameM queryStr planCache + -> m (Seq.Seq GQFieldResolvedPlan) +getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx enableAL sc scVer reqUnparsed = do + planM <- + liftIO $ EP.getPlan scVer (userRole userInfo) opNameM queryStr planCache let usrVars = userVars userInfo - case planM of + case planM -- plans are only for queries and subscriptions - Just plan -> GExPHasura <$> case plan of - EP.RPQuery queryPlan -> do - (tx, genSql) <- EQ.queryOpFromPlan usrVars queryVars queryPlan - return $ ExOpQuery tx (Just genSql) - EP.RPSubs subsPlan -> - ExOpSubs <$> EL.reuseLiveQueryPlan pgExecCtx usrVars queryVars subsPlan + of + Just plan -> + case plan of + EP.RPQuery queryPlan -> do + (tx, genSql) <- EQ.queryOpFromPlan usrVars queryVars queryPlan + let queryOp = ExOpQuery tx (Just genSql) + pure $ pure $ GQFieldResolvedHasura queryOp + EP.RPSubs subsPlan -> do + subOp <- + ExOpSubs <$> + EL.reuseLiveQueryPlan pgExecCtx usrVars queryVars subsPlan + pure $ pure $ GQFieldResolvedHasura subOp Nothing -> noExistingPlan where GQLReq opNameM queryStr queryVars = reqUnparsed addPlanToCache plan = - liftIO $ EP.addPlan scVer (userRole userInfo) - opNameM queryStr plan planCache + -- liftIO $ + EP.addPlan scVer (userRole userInfo) opNameM queryStr plan planCache noExistingPlan = do req <- toParsed reqUnparsed - partialExecPlan <- getExecPlanPartial userInfo sc enableAL req - forM partialExecPlan $ \(gCtx, rootSelSet) -> - case rootSelSet of - VQ.RMutation selSet -> - ExOpMutation <$> getMutOp gCtx sqlGenCtx userInfo selSet - VQ.RQuery selSet -> do - (queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx userInfo selSet - traverse_ (addPlanToCache . EP.RPQuery) plan - return $ ExOpQuery queryTx (Just genSql) - VQ.RSubscription fld -> do - (lqOp, plan) <- getSubsOp pgExecCtx gCtx sqlGenCtx userInfo fld - traverse_ (addPlanToCache . EP.RPSubs) plan - return $ ExOpSubs lqOp + (GQExecPlanPartial opType fieldPlans) <- + getExecPlanPartial userInfo sc enableAL req + case opType of + G.OperationTypeQuery -> + forM fieldPlans $ \case + GQFieldPartialHasura (gCtx, field) -> do + (queryTx, plan, genSql) <- + getQueryOp gCtx sqlGenCtx userInfo (Seq.singleton field) + -- traverse_ (addPlanToCache . EP.RPQuery) plan + (return . GQFieldResolvedHasura) $ ExOpQuery queryTx (Just genSql) + GQFieldPartialRemote rsInfo field -> + return $ GQFieldResolvedRemote rsInfo G.OperationTypeQuery field + G.OperationTypeMutation -> + forM fieldPlans $ \case + GQFieldPartialHasura (gCtx, field) -> do + mutationTx <- + getMutOp gCtx sqlGenCtx userInfo (Seq.singleton field) + (return . GQFieldResolvedHasura) $ ExOpMutation mutationTx + GQFieldPartialRemote rsInfo field -> + return $ + GQFieldResolvedRemote rsInfo G.OperationTypeMutation field + G.OperationTypeSubscription -> + forM fieldPlans $ \case + GQFieldPartialHasura (gCtx, field) -> do + (lqOp, plan) <- getSubsOp pgExecCtx gCtx sqlGenCtx userInfo field + -- traverse_ (addPlanToCache . EP.RPSubs) plan + (return . GQFieldResolvedHasura) $ ExOpSubs lqOp + GQFieldPartialRemote rsInfo field -> + return $ + GQFieldResolvedRemote rsInfo G.OperationTypeSubscription field -- Monad for resolving a hasura query/mutation type E m = @@ -345,18 +360,19 @@ execRemoteGQ => RequestId -> UserInfo -> [N.Header] - -> GQLReqUnparsed -> RemoteSchemaInfo - -> G.TypedOperationDefinition + -> G.OperationType + -> VQ.SelSet -> m (HttpResponse EncJSON) -execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do +execRemoteGQ reqId userInfo reqHdrs rsi opType selSet = do execCtx <- ask let logger = _ecxLogger execCtx manager = _ecxHttpManager execCtx - opTy = G._todType opDef - when (opTy == G.OperationTypeSubscription) $ + when (opType == G.OperationTypeSubscription) $ throw400 NotSupported "subscription to remote server is not supported" hdrs <- getHeadersFromConf hdrConf + gqlReq <- fieldsToRequest opType (toList selSet) + let body = encJToLBS $ encJFromJValue gqlReq let confHdrs = map (\(k, v) -> (CI.mk $ CS.cs k, CS.cs v)) hdrs clientHdrs = bool [] filteredHeaders fwdClientHdrs -- filter out duplicate headers @@ -372,11 +388,11 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do let req = initReq { HTTP.method = "POST" , HTTP.requestHeaders = finalHeaders - , HTTP.requestBody = HTTP.RequestBodyLBS (J.encode q) + , HTTP.requestBody = HTTP.RequestBodyLBS body , HTTP.responseTimeout = HTTP.responseTimeoutMicro (timeout * 1000000) } - liftIO $ logGraphqlQuery logger $ QueryLog q Nothing reqId + -- liftIO $ logGraphqlQuery logger $ QueryLog q Nothing reqId res <- liftIO $ try $ HTTP.httpLbs req manager resp <- either httpThrow return res let cookieHdrs = getCookieHdr (resp ^.. Wreq.responseHeader "Set-Cookie") @@ -403,3 +419,182 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do mkRespHeaders hdrs = map (\(k, v) -> Header (bsToTxt $ CI.original k, bsToTxt v)) hdrs + + +fieldsToRequest + :: (MonadIO m, MonadError QErr m) + => G.OperationType + -> [VQ.Field] + -> m GQLReqParsed +fieldsToRequest opType fields = do + case traverse fieldToField fields of + Right gfields -> do + let variableTups = nub (concat $ map getVariables fields) + variableDefinitions = map fst variableTups + variableValues = + Just $ + Map.fromList + (map (\(varDef, val) -> (G._vdVariable varDef, val)) variableTups) + pure + (GQLReq + { _grOperationName = Nothing + , _grQuery = + GQLExecDoc + [ G.ExecutableDefinitionOperation + (G.OperationDefinitionTyped + (emptyOperationDefinition + { G._todVariableDefinitions = variableDefinitions + , G._todSelectionSet = (map G.SelectionField gfields) + })) + ] + , _grVariables = variableValues + }) + Left err -> throw500 ("While converting remote field: " <> err) + where + emptyOperationDefinition = + G.TypedOperationDefinition + { G._todType = opType + , G._todName = Nothing + , G._todVariableDefinitions = [] + , G._todDirectives = [] + , G._todSelectionSet = [] + } + getVariables :: VQ.Field -> [(G.VariableDefinition, J.Value)] + getVariables VQ.Field {_fArguments} = + flip mapMaybe (Map.toList _fArguments) $ \(_name, a@AnnInpVal {..}) -> + let varDefM = + G.VariableDefinition <$> _aivVariable <*> Just _aivType <*> + Just Nothing + valueM = + rightToMaybe (fmap gValueConstToValue $ annInpValToGValueConst a) + in (,) <$> varDefM <*> valueM + where + rightToMaybe = + \case + Left _ -> Nothing + Right b -> Just b + +fieldToField :: VQ.Field -> Either Text G.Field +fieldToField VQ.Field{..} = do + _fArguments <- traverse makeArgument (Map.toList _fArguments) + _fSelectionSet <- fmap G.SelectionField . toList <$> + traverse fieldToField _fSelSet + _fDirectives <- pure [] + _fAlias <- pure (Just _fAlias) + pure $ + G.Field{..} + +makeArgument :: (G.Name, AnnInpVal) -> Either Text G.Argument +makeArgument (_aName, annInpVal) = + do _aValue <- annInpValToGValue annInpVal + pure $ G.Argument {..} + +annInpValToGValue :: AnnInpVal -> Either Text G.Value +annInpValToGValue AnnInpVal{..} = do + fromMaybe (pure G.VNull) $ case _aivVariable of + Nothing -> case _aivValue of + AGScalar _ty mv -> + pgcolvalueToGValue <$> mv + AGEnum _ _enumVal -> + pure (Left "enum not supported") + AGObject _ mobj -> + flip fmap mobj $ \obj -> do + fields <- + traverse + (\(_ofName, av) -> do + _ofValue <- annInpValToGValue av + pure (G.ObjectFieldG {..})) + (OMap.toList obj) + pure (G.VObject (G.ObjectValueG fields)) + AGArray _ mvs -> + fmap (G.VList . G.ListValueG) . traverse annInpValToGValue <$> mvs + Just variable -> pure . pure $ G.VVariable variable + +annInpValToGValueConst :: AnnInpVal -> Either Text G.ValueConst +annInpValToGValueConst AnnInpVal{..} = do + fromMaybe (pure G.VCNull) $ + case _aivValue of + AGScalar _ty mv -> + pgcolvalueToGValueConst <$> mv + AGEnum _ _enumVal -> + pure (Left "enum not supported") + AGObject _ mobj -> + flip fmap mobj $ \obj -> do + fields <- + traverse + (\(_ofName, av) -> do + _ofValue <- annInpValToGValueConst av + pure (G.ObjectFieldG {..})) + (OMap.toList obj) + pure (G.VCObject (G.ObjectValueG fields)) + AGArray _ mvs -> + fmap (G.VCList . G.ListValueG) . traverse annInpValToGValueConst <$> mvs + +pgcolvalueToGValue :: PGScalarValue -> Either Text G.Value +pgcolvalueToGValue colVal = case colVal of + PGValInteger i -> pure $ G.VInt $ fromIntegral i + PGValSmallInt i -> pure $ G.VInt $ fromIntegral i + PGValBigInt i -> pure $ G.VInt $ fromIntegral i + PGValFloat f -> pure $ G.VFloat $ realToFrac f + PGValDouble d -> pure $ G.VFloat $ realToFrac d + -- TODO: Scientific is a danger zone; use its safe conv function. + PGValNumeric sc -> pure $ G.VFloat $ realToFrac sc + PGValBoolean b -> pure $ G.VBoolean b + PGValChar t -> pure $ G.VString (G.StringValue (T.singleton t)) + PGValVarchar t -> pure $ G.VString (G.StringValue t) + PGValText t -> pure $ G.VString (G.StringValue t) + PGValDate d -> pure $ G.VString $ G.StringValue $ T.pack $ showGregorian d + PGValTimeStampTZ u -> pure $ + G.VString $ G.StringValue $ T.pack $ formatTime defaultTimeLocale "%FT%T%QZ" u + PGValTimeTZ (ZonedTimeOfDay tod tz) -> pure $ + G.VString $ G.StringValue $ T.pack (show tod ++ timeZoneOffsetString tz) + PGNull _ -> pure G.VNull + PGValJSON {} -> Left "PGValJSON: cannot convert" + PGValJSONB {} -> Left "PGValJSONB: cannot convert" + PGValGeo {} -> Left "PGValGeo: cannot convert" + PGValRaster {} -> Left "PGValRaster: cannot convert" + PGValUnknown t -> pure $ G.VString $ G.StringValue t + +pgcolvalueToGValueConst :: PGScalarValue -> Either Text G.ValueConst +pgcolvalueToGValueConst colVal = case colVal of + PGValInteger i -> pure $ G.VCInt $ fromIntegral i + PGValSmallInt i -> pure $ G.VCInt $ fromIntegral i + PGValBigInt i -> pure $ G.VCInt $ fromIntegral i + PGValFloat f -> pure $ G.VCFloat $ realToFrac f + PGValDouble d -> pure $ G.VCFloat $ realToFrac d + -- TODO: Scientific is a danger zone; use its safe conv function. + PGValNumeric sc -> pure $ G.VCFloat $ realToFrac sc + PGValBoolean b -> pure $ G.VCBoolean b + PGValChar t -> pure $ G.VCString (G.StringValue (T.singleton t)) + PGValVarchar t -> pure $ G.VCString (G.StringValue t) + PGValText t -> pure $ G.VCString (G.StringValue t) + PGValDate d -> pure $ G.VCString $ G.StringValue $ T.pack $ showGregorian d + PGValTimeStampTZ u -> pure $ + G.VCString $ G.StringValue $ T.pack $ formatTime defaultTimeLocale "%FT%T%QZ" u + PGValTimeTZ (ZonedTimeOfDay tod tz) -> pure $ + G.VCString $ G.StringValue $ T.pack (show tod ++ timeZoneOffsetString tz) + PGNull _ -> pure G.VCNull + PGValJSON {} -> Left "PGValJSON: cannot convert" + PGValJSONB {} -> Left "PGValJSONB: cannot convert" + PGValGeo {} -> Left "PGValGeo: cannot convert" + PGValRaster {} -> Left "PGValRaster: cannot convert" + PGValUnknown t -> pure $ G.VCString $ G.StringValue t + +gValueConstToValue :: G.ValueConst -> J.Value +gValueConstToValue = + \case + (G.VCInt i) -> J.toJSON i + (G.VCFloat f) -> J.toJSON f + (G.VCString (G.StringValue s)) -> J.toJSON s + (G.VCBoolean b) -> J.toJSON b + G.VCNull -> J.Null + (G.VCEnum s) -> J.toJSON s + (G.VCList (G.ListValueG list)) -> J.toJSON (map gValueConstToValue list) + (G.VCObject (G.ObjectValueG xs)) -> fieldsToObject xs + where + fieldsToObject = + J.Object . + Map.fromList . + map + (\(G.ObjectFieldG {_ofName = G.Name name, _ofValue}) -> + (name, gValueConstToValue _ofValue)) diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index 4bc0382e7ef59..13fd50a64668c 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -114,21 +114,36 @@ explainGQLQuery -> GQLExplain -> m EncJSON explainGQLQuery pgExecCtx sc sqlGenCtx enableAL (GQLExplain query userVarsRaw) = do - execPlan <- E.getExecPlanPartial userInfo sc enableAL query - (gCtx, rootSelSet) <- case execPlan of - E.GExPHasura (gCtx, rootSelSet) -> - return (gCtx, rootSelSet) - E.GExPRemote _ _ -> - throw400 InvalidParams "only hasura queries can be explained" - case rootSelSet of - GV.RQuery selSet -> - runInTx $ encJFromJValue <$> traverse (explainField userInfo gCtx sqlGenCtx) (toList selSet) - GV.RMutation _ -> + E.GQExecPlanPartial opType fieldPlans <- + E.getExecPlanPartial userInfo sc enableAL query + let hasuraFieldPlans = mapMaybe getHasuraField (toList fieldPlans) + if null hasuraFieldPlans + then throw400 InvalidParams "only hasura queries can be explained" + else pure () + case opType of + G.OperationTypeQuery -> + runInTx $ + encJFromJValue <$> + traverse + (\(gCtx, field) -> explainField userInfo gCtx sqlGenCtx field) + hasuraFieldPlans + G.OperationTypeMutation -> throw400 InvalidParams "only queries can be explained" - GV.RSubscription rootField -> do + G.OperationTypeSubscription -> do + (gCtx, rootField) <- getRootField hasuraFieldPlans (plan, _) <- E.getSubsOp pgExecCtx gCtx sqlGenCtx userInfo rootField runInTx $ encJFromJValue <$> E.explainLiveQueryPlan plan where usrVars = mkUserVars $ maybe [] Map.toList userVarsRaw userInfo = mkUserInfo (fromMaybe adminRole $ roleFromVars usrVars) usrVars runInTx = liftEither <=< liftIO . runExceptT . runLazyTx pgExecCtx + getHasuraField = + \case + E.GQFieldPartialHasura a -> Just a + _ -> Nothing + getRootField = + \case + [] -> throw500 "no field found in subscription" + [fld] -> pure fld + _ -> + throw500 "expected only one field in subscription" diff --git a/server/src-lib/Hasura/GraphQL/RemoteServer.hs b/server/src-lib/Hasura/GraphQL/RemoteServer.hs index f4aa1537b78b6..719c013eb8198 100644 --- a/server/src-lib/Hasura/GraphQL/RemoteServer.hs +++ b/server/src-lib/Hasura/GraphQL/RemoteServer.hs @@ -63,7 +63,7 @@ fetchRemoteSchema manager name def@(RemoteSchemaInfo url headerConf _ timeout) = let (sDoc, qRootN, mRootN, sRootN) = fromIntrospection introspectRes typMap <- either remoteSchemaErr return $ VT.fromSchemaDoc sDoc $ - VT.TLRemoteType name def + VT.TLRemoteType name let mQrTyp = Map.lookup qRootN typMap mMrTyp = maybe Nothing (`Map.lookup` typMap) mRootN mSrTyp = maybe Nothing (`Map.lookup` typMap) sRootN diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs index 9b21407e44936..14969ff1098a9 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs @@ -2,6 +2,8 @@ module Hasura.GraphQL.Transport.HTTP ( runGQ ) where +import qualified Data.Sequence as Seq +import qualified Data.Text as T import qualified Network.HTTP.Types as N import Hasura.EncJSON @@ -26,13 +28,22 @@ runGQ -> m (HttpResponse EncJSON) runGQ reqId userInfo reqHdrs req = do E.ExecutionCtx _ sqlGenCtx pgExecCtx planCache sc scVer _ enableAL <- ask - execPlan <- E.getResolvedExecPlan pgExecCtx planCache + fieldPlans <- E.getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx enableAL sc scVer req - case execPlan of - E.GExPHasura resolvedOp -> + fieldResps <- forM fieldPlans $ \case + E.GQFieldResolvedHasura resolvedOp -> flip HttpResponse Nothing <$> runHasuraGQ reqId req userInfo resolvedOp - E.GExPRemote rsi opDef -> - E.execRemoteGQ reqId userInfo reqHdrs req rsi opDef + E.GQFieldResolvedRemote rsi opType field -> + E.execRemoteGQ reqId userInfo reqHdrs rsi opType (Seq.singleton field) + + let mergedResp = mergeResponses (fmap _hrBody fieldResps) + case mergedResp of + Left e -> + throw400 + UnexpectedPayload + ("could not merge data from results: " <> T.pack e) + Right mergedGQResp -> + pure (HttpResponse mergedGQResp (foldMap _hrHeaders fieldResps)) runHasuraGQ :: ( MonadIO m diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs index 14cbe501acfb1..7a18fe0570f2e 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs @@ -1,3 +1,6 @@ +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE RecordWildCards #-} + module Hasura.GraphQL.Transport.HTTP.Protocol ( GQLReq(..) , GQLReqUnparsed @@ -15,22 +18,32 @@ module Hasura.GraphQL.Transport.HTTP.Protocol , RemoteGqlResp(..) , GraphqlResponse(..) , encodeGraphqlResponse + , mergeResponses + , getMergedGQResp ) where +import Control.Lens import Hasura.EncJSON import Hasura.GraphQL.Utils import Hasura.Prelude import Hasura.RQL.Types + import Language.GraphQL.Draft.Instances () import qualified Data.Aeson as J import qualified Data.Aeson.Casing as J +import qualified Data.Aeson.Ordered as OJ import qualified Data.Aeson.TH as J import qualified Data.ByteString.Lazy as BL import qualified Data.HashMap.Strict as Map +import qualified Data.Vector as V import qualified Language.GraphQL.Draft.Parser as G import qualified Language.GraphQL.Draft.Syntax as G +import qualified VectorBuilder.Builder as VB +import qualified VectorBuilder.Vector as VB + + newtype GQLExecDoc = GQLExecDoc { unGQLExecDoc :: [G.ExecutableDefinition] } @@ -83,11 +96,33 @@ encodeGQErr :: Bool -> QErr -> J.Value encodeGQErr includeInternal qErr = J.object [ "errors" J..= [encodeGQLErr includeInternal qErr]] + +-- | https://graphql.github.io/graphql-spec/June2018/#sec-Response-Format +-- +-- NOTE: this type and parseGQRespValue are a lax representation of the spec, +-- since... +-- - remote GraphQL servers may not conform strictly, and... +-- - we use this type as an accumulator. +-- +-- Ideally we'd have something correct by construction for hasura results +-- someplace. +data GQRespValue = + GQRespValue + { _gqRespData :: OJ.Object + -- ^ 'OJ.empty' (corresponding to the invalid `"data": {}`) indicates an error. + , _gqRespErrors :: VB.Builder OJ.Value + -- ^ An 'OJ.Array', but with efficient cons and concatenation. Null indicates + -- query success. + } + +makeLenses ''GQRespValue + data GQResult a = GQSuccess !a | GQPreExecError ![J.Value] | GQExecError ![J.Value] - deriving (Show, Eq, Functor, Foldable, Traversable) + | GQGeneric !GQRespValue + deriving (Functor, Foldable, Traversable) type GQResponse = GQResult BL.ByteString @@ -96,13 +131,6 @@ isExecError = \case GQExecError _ -> True _ -> False -encodeGQResp :: GQResponse -> EncJSON -encodeGQResp gqResp = - encJFromAssocList $ case gqResp of - GQSuccess r -> [("data", encJFromLBS r)] - GQPreExecError e -> [("errors", encJFromJValue e)] - GQExecError e -> [("data", "null"), ("errors", encJFromJValue e)] - -- | Represents GraphQL response from a remote server data RemoteGqlResp = RemoteGqlResp @@ -128,3 +156,64 @@ encodeGraphqlResponse :: GraphqlResponse -> EncJSON encodeGraphqlResponse = \case GRHasura resp -> encodeGQResp resp GRRemote resp -> encodeRemoteGqlResp resp + +emptyResp :: GQRespValue +emptyResp = GQRespValue OJ.empty VB.empty + +parseGQRespValue :: EncJSON -> Either String GQRespValue +parseGQRespValue = OJ.eitherDecode . encJToLBS >=> \case + OJ.Object obj -> do + _gqRespData <- + case OJ.lookup "data" obj of + -- "an error was encountered before execution began": + Nothing -> pure OJ.empty + -- "an error was encountered during the execution that prevented a valid response": + Just OJ.Null -> pure OJ.empty + Just (OJ.Object dobj) -> pure dobj + Just _ -> Left "expected object or null for GraphQL data response" + _gqRespErrors <- + case OJ.lookup "errors" obj of + Nothing -> pure VB.empty + Just (OJ.Array vec) -> pure $ VB.vector vec + Just _ -> Left "expected array for GraphQL error response" + pure (GQRespValue {_gqRespData, _gqRespErrors}) + _ -> Left "expected object for GraphQL response" + +encodeGQRespValue :: GQRespValue -> EncJSON +encodeGQRespValue GQRespValue{..} = OJ.toEncJSON $ OJ.Object $ OJ.fromList $ + -- "If the data entry in the response is not present, the errors entry in the + -- response must not be empty. It must contain at least one error. " + if _gqRespData == OJ.empty && not anyErrors + then + let msg = "Somehow did not accumulate any errors or data from graphql queries" + in [("errors", OJ.Array $ V.singleton $ OJ.Object (OJ.fromList [("message", OJ.String msg)]) )] + else + -- NOTE: "If an error was encountered during the execution that prevented + -- a valid response, the data entry in the response should be null." + -- TODO it's not clear to me how we can enforce that here or if we should try. + ("data", OJ.Object _gqRespData) : + [("errors", OJ.Array gqRespErrorsV) | anyErrors ] + where + gqRespErrorsV = VB.build _gqRespErrors + anyErrors = not $ V.null gqRespErrorsV + +encodeGQResp :: GQResponse -> EncJSON +encodeGQResp = \case + GQSuccess r -> encJFromAssocList [("data", encJFromLBS r)] + GQPreExecError e -> encJFromAssocList [("errors", encJFromJValue e)] + GQExecError e -> encJFromAssocList [("data", "null"), ("errors", encJFromJValue e)] + GQGeneric v -> encodeGQRespValue v + +-- | See 'mergeResponseData'. +getMergedGQResp :: Traversable t=> t EncJSON -> Either String GQRespValue +getMergedGQResp = + mergeGQResp <=< traverse parseGQRespValue + where mergeGQResp = flip foldM emptyResp $ \respAcc GQRespValue{..} -> + respAcc & gqRespErrors <>~ _gqRespErrors + & mapMOf gqRespData (OJ.safeUnion _gqRespData) + +-- | Union several graphql responses, with the ordering of the top-level fields +-- determined by the input list. +mergeResponses :: Traversable t=> t EncJSON -> Either String EncJSON +mergeResponses = + fmap encodeGQRespValue . getMergedGQResp diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs index a27de37d53012..052f934e53fb4 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs @@ -17,6 +17,7 @@ import qualified Data.ByteString.Lazy as BL import qualified Data.CaseInsensitive as CI import qualified Data.HashMap.Strict as Map import qualified Data.IORef as IORef +import qualified Data.Sequence as Seq import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Data.Time.Clock as TC @@ -46,6 +47,7 @@ import Hasura.Server.Utils (RequestId, import qualified Hasura.GraphQL.Execute as E import qualified Hasura.GraphQL.Execute.LiveQuery as LQ import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS +import qualified Hasura.GraphQL.Validate as VQ import qualified Hasura.Logging as L @@ -281,58 +283,85 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do execPlan <- either (withComplete . preExecErr requestId) return execPlanE let execCtx = E.ExecutionCtx logger sqlGenCtx pgExecCtx planCache sc scVer httpMgr enableAL - - case execPlan of - E.GExPHasura resolvedOp -> - runHasuraGQ requestId q userInfo resolvedOp - E.GExPRemote rsi opDef -> - runRemoteGQ execCtx requestId userInfo reqHdrs opDef rsi + splitExecPlansE <- runExceptT $ splitExecPlan execPlan + splitExecPlans <- either (withComplete . preExecErr requestId) return splitExecPlansE + case splitExecPlans of + Left lqOp -> do + -- log the graphql query + liftIO $ logGraphqlQuery logger $ QueryLog q Nothing requestId + lqId <- liftIO $ LQ.addLiveQuery lqMap lqOp liveQOnChange + liftIO $ STM.atomically $ + STMMap.insert (lqId, _grOperationName q) opId opMap + logOpEv ODStarted (Just requestId) + + Right queryOrMutPlans -> do + logOpEv ODStarted (Just requestId) + fieldRespsE <- liftIO $ + runExceptT $ do + flip mapM queryOrMutPlans $ \case + E.GQFieldResolvedHasura execOp -> + case execOp of + E.ExOpQuery opTx genSql -> + fmap (encodeGQResp . GQSuccess . encJToLBS) $ + execQueryOrMut requestId q genSql $ + runLazyTx' pgExecCtx opTx + E.ExOpMutation opTx -> + fmap (encodeGQResp . GQSuccess . encJToLBS) $ + execQueryOrMut requestId q Nothing $ + runLazyTx pgExecCtx $ withUserInfo userInfo opTx + E.ExOpSubs {} -> + throwError + (err500 + Unexpected + "did not expect subscription field here") + E.GQFieldResolvedRemote rsi opType field -> + runRemoteGQ execCtx requestId userInfo reqHdrs rsi opType (Seq.singleton field) + case fieldRespsE of + Left err -> postExecErr requestId err + Right fieldResps -> do + let mergedResponse = getMergedGQResp fieldResps + case mergedResponse of + Left e -> + postExecErr requestId $ + err500 + UnexpectedPayload + ("could not merge data from results: " <> T.pack e) + Right resp -> do + sendGenericResp resp + sendCompleted (Just requestId) where - runHasuraGQ :: RequestId -> GQLReqUnparsed -> UserInfo -> E.ExecOp - -> ExceptT () IO () - runHasuraGQ reqId query userInfo = \case - E.ExOpQuery opTx genSql -> - execQueryOrMut reqId query genSql $ runLazyTx' pgExecCtx opTx - E.ExOpMutation opTx -> - execQueryOrMut reqId query Nothing $ - runLazyTx pgExecCtx $ withUserInfo userInfo opTx - E.ExOpSubs lqOp -> do - -- log the graphql query - liftIO $ logGraphqlQuery logger $ QueryLog query Nothing reqId - lqId <- liftIO $ LQ.addLiveQuery lqMap lqOp liveQOnChange - liftIO $ STM.atomically $ - STMMap.insert (lqId, _grOperationName q) opId opMap - logOpEv ODStarted (Just reqId) + -- This function breaks the execution plan as Left subscription or Right queryOrMut + splitExecPlan :: (QErrM m) => Seq.Seq E.GQFieldResolvedPlan -> m (Either LQ.LiveQueryPlan (Seq.Seq E.GQFieldResolvedPlan)) + splitExecPlan fieldPlans = do + let subscriptionFields = mapMaybe getSubscriptionOps (toList fieldPlans) + case subscriptionFields of + [] -> pure $ Right fieldPlans + [field] -> pure $ Left field + _ -> throw500 "expected only one field in subscription" + + getSubscriptionOps = \case + E.GQFieldResolvedHasura (E.ExOpSubs lqOp) -> Just lqOp + _ -> Nothing execQueryOrMut reqId query genSql action = do - logOpEv ODStarted (Just reqId) -- log the generated SQL and the graphql query - liftIO $ logGraphqlQuery logger $ QueryLog query genSql reqId + -- liftIO $ logGraphqlQuery logger $ QueryLog query genSql reqId resp <- liftIO $ runExceptT action - either (postExecErr reqId) sendSuccResp resp - sendCompleted (Just reqId) - - runRemoteGQ :: E.ExecutionCtx -> RequestId -> UserInfo -> [H.Header] - -> G.TypedOperationDefinition -> RemoteSchemaInfo - -> ExceptT () IO () - runRemoteGQ execCtx reqId userInfo reqHdrs opDef rsi = do - when (G._todType opDef == G.OperationTypeSubscription) $ - withComplete $ preExecErr reqId $ + liftEither resp + + runRemoteGQ :: (MonadError QErr m, MonadIO m) + => E.ExecutionCtx -> RequestId -> UserInfo -> [H.Header] + -> RemoteSchemaInfo -> G.OperationType -> VQ.SelSet + -> m EncJSON + runRemoteGQ execCtx reqId userInfo reqHdrs rsi opType selSet = do + when (opType == G.OperationTypeSubscription) $ + throwError $ err400 NotSupported "subscription to remote server is not supported" -- if it's not a subscription, use HTTP to execute the query on the remote resp <- runExceptT $ flip runReaderT execCtx $ - E.execRemoteGQ reqId userInfo reqHdrs q rsi opDef - either (postExecErr reqId) (sendRemoteResp reqId . _hrBody) resp - sendCompleted (Just reqId) - - sendRemoteResp reqId resp = - case J.eitherDecodeStrict (encJToBS resp) of - Left e -> postExecErr reqId $ invalidGqlErr $ T.pack e - Right res -> sendMsg wsConn $ SMData $ DataMsg opId (GRRemote res) - - invalidGqlErr err = err500 Unexpected $ - "Failed parsing GraphQL response from remote: " <> err + E.execRemoteGQ reqId userInfo reqHdrs rsi opType selSet + liftEither (fmap _hrBody resp) WSServerEnv logger pgExecCtx lqMap gCtxMapRef httpMgr _ sqlGenCtx planCache _ enableAL = serverEnv @@ -378,9 +407,8 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do ERTGraphqlCompliant -> J.object ["errors" J..= [errFn False qErr]] sendMsg wsConn $ SMErr $ ErrorMsg opId err - sendSuccResp encJson = - sendMsg wsConn $ SMData $ DataMsg opId $ - GRHasura $ GQSuccess $ encJToLBS encJson + sendGenericResp gqResp = + sendMsg wsConn $ SMData $ DataMsg opId $ GRHasura $ GQGeneric gqResp withComplete :: ExceptT () IO () -> ExceptT () IO a withComplete action = do diff --git a/server/src-lib/Hasura/GraphQL/Validate/Field.hs b/server/src-lib/Hasura/GraphQL/Validate/Field.hs index 49a247fc2360b..c21d86dff7982 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Field.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Field.hs @@ -54,6 +54,7 @@ data Field , _fType :: !G.NamedType , _fArguments :: !ArgsMap , _fSelSet :: !SelSet + , _fSource :: !TypeLoc } deriving (Eq, Show) $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True} @@ -206,6 +207,7 @@ denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do let fldTy = _fiTy fldInfo fldBaseTy = getBaseTy fldTy + fldSource = _fiLoc fldInfo fldTyInfo <- getTyInfo fldBaseTy @@ -227,13 +229,18 @@ denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do throwVE $ "internal error: unexpected input type for field: " <> showName name + -- TODO: breaking! fix this! + (TIIFace _, _) -> throwVE $ "interface types not supported" + + (TIUnion _, _) -> throwVE $ "union types not supported" + -- when scalar/enum and no empty set (_, _) -> throwVE $ "field " <> showName name <> " must not have a " <> "selection since type " <> G.showGT fldTy <> " has no subfields" withPathK "directives" $ withDirectives dirs $ return $ - Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields + Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields fldSource denormInlnFrag :: ( MonadReader ValidationCtx m diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs index d721a363da02c..a35c0703879bb 100644 --- a/server/src-lib/Hasura/GraphQL/Validate/Types.hs +++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs @@ -171,11 +171,13 @@ type ParamMap = Map.HashMap G.Name InpValInfo -- | location of the type: a hasura type or a remote type data TypeLoc = TLHasuraType - | TLRemoteType !RemoteSchemaName !RemoteSchemaInfo + | TLRemoteType !RemoteSchemaName deriving (Show, Eq, TH.Lift, Generic) instance Hashable TypeLoc +$(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True} ''TypeLoc) + data ObjFldInfo = ObjFldInfo { _fiDesc :: !(Maybe G.Description) diff --git a/server/tests-py/graphql_server.py b/server/tests-py/graphql_server.py index c747de65f4d97..4e1e966920717 100644 --- a/server/tests-py/graphql_server.py +++ b/server/tests-py/graphql_server.py @@ -45,7 +45,7 @@ def get(self, request): def post(self, request): if not request.json: return Response(HTTPStatus.BAD_REQUEST) - res = hello_schema.execute(request.json['query']) + res = hello_schema.execute(request.json['query'], variables = request.json['variables']) return mkJSONResp(res) class User(graphene.ObjectType): diff --git a/server/tests-py/queries/remote_schemas/basic_query_with_variables.yaml b/server/tests-py/queries/remote_schemas/basic_query_with_variables.yaml new file mode 100644 index 0000000000000..9603124087d21 --- /dev/null +++ b/server/tests-py/queries/remote_schemas/basic_query_with_variables.yaml @@ -0,0 +1,14 @@ +description: Simple GraphQL query with variables +url: /v1/graphql +status: 200 +response: + data: + hello: Hello i am a variable +query: + query: | + query ($myArg: String) { + hello(arg: $myArg) + } + + variables: + myArg: i am a variable diff --git a/server/tests-py/test_schema_stitching.py b/server/tests-py/test_schema_stitching.py index 22daad12cac46..82b459d0d8ad6 100644 --- a/server/tests-py/test_schema_stitching.py +++ b/server/tests-py/test_schema_stitching.py @@ -186,6 +186,8 @@ def test_bulk_remove_add_remote_schema(self, hge_ctx): st_code, resp = hge_ctx.v1q_f(self.dir + '/basic_bulk_remove_add.yaml') assert st_code == 200, resp + def test_remote_query_variables(self, hge_ctx): + check_query_f(hge_ctx, self.dir + '/basic_query_with_variables.yaml') class TestAddRemoteSchemaTbls: """ tests with adding a table in hasura """ @@ -347,10 +349,10 @@ def test_remote_query_error(self, ws_client): try: ev = next(resp) print(ev) - assert ev['type'] == 'data' and ev['id'] == query_id, ev + assert ev['type'] == 'error' and ev['id'] == query_id, ev assert 'errors' in ev['payload'] assert ev['payload']['errors'][0]['message'] == \ - 'Cannot query field "blah" on type "User".' + 'field "blah" not found in type: \'User\'' finally: ws_client.stop(query_id)