diff --git a/contrib/metadata-types/generated/HasuraMetadataV2.hs b/contrib/metadata-types/generated/HasuraMetadataV2.hs index 1eea578a8dd..33e8e2c63db 100644 --- a/contrib/metadata-types/generated/HasuraMetadataV2.hs +++ b/contrib/metadata-types/generated/HasuraMetadataV2.hs @@ -1382,9 +1382,9 @@ instance FromJSON HeaderFromValue where parseJSON (Object v) = HeaderFromValue <$> v - .: "name" + .: "name" <*> v - .: "value" + .: "value" instance ToJSON HeaderFromEnv where toJSON (HeaderFromEnv nameHeaderFromEnv valueFromEnvHeaderFromEnv) = @@ -1397,9 +1397,9 @@ instance FromJSON HeaderFromEnv where parseJSON (Object v) = HeaderFromEnv <$> v - .: "name" + .: "name" <*> v - .: "value_from_env" + .: "value_from_env" instance ToJSON ObjectField where toJSON (ObjectField descriptionObjectField nameObjectField objectFieldTypeObjectField) = @@ -1413,11 +1413,11 @@ instance FromJSON ObjectField where parseJSON (Object v) = ObjectField <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON HasuraMetadataV2 where toJSON (HasuraMetadataV2 actionsHasuraMetadataV2 allowlistHasuraMetadataV2 cronTriggersHasuraMetadataV2 customTypesHasuraMetadataV2 functionsHasuraMetadataV2 queryCollectionsHasuraMetadataV2 remoteSchemasHasuraMetadataV2 tablesHasuraMetadataV2 versionHasuraMetadataV2) = @@ -1437,23 +1437,23 @@ instance FromJSON HasuraMetadataV2 where parseJSON (Object v) = HasuraMetadataV2 <$> v - .:? "actions" + .:? "actions" <*> v - .:? "allowlist" + .:? "allowlist" <*> v - .:? "cron_triggers" + .:? "cron_triggers" <*> v - .:? "custom_types" + .:? "custom_types" <*> v - .:? "functions" + .:? "functions" <*> v - .:? "query_collections" + .:? "query_collections" <*> v - .:? "remote_schemas" + .:? "remote_schemas" <*> v - .: "tables" + .: "tables" <*> v - .: "version" + .: "version" instance ToJSON Action where toJSON (Action commentAction definitionAction nameAction permissionsAction) = @@ -1468,13 +1468,13 @@ instance FromJSON Action where parseJSON (Object v) = Action <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" <*> v - .:? "permissions" + .:? "permissions" instance ToJSON ActionDefinition where toJSON (ActionDefinition argumentsActionDefinition forwardClientHeadersActionDefinition handlerActionDefinition headersActionDefinition kindActionDefinition outputTypeActionDefinition actionDefinitionTypeActionDefinition) = @@ -1492,19 +1492,19 @@ instance FromJSON ActionDefinition where parseJSON (Object v) = ActionDefinition <$> v - .:? "arguments" + .:? "arguments" <*> v - .:? "forward_client_headers" + .:? "forward_client_headers" <*> v - .: "handler" + .: "handler" <*> v - .:? "headers" + .:? "headers" <*> v - .:? "kind" + .:? "kind" <*> v - .:? "output_type" + .:? "output_type" <*> v - .:? "type" + .:? "type" instance ToJSON ActionDefinitionType where toJSON MutationActionDefinitionType = "mutation" @@ -1527,9 +1527,9 @@ instance FromJSON InputArgument where parseJSON (Object v) = InputArgument <$> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON Header where toJSON (Header nameHeader valueHeader valueFromEnvHeader) = @@ -1543,11 +1543,11 @@ instance FromJSON Header where parseJSON (Object v) = Header <$> v - .: "name" + .: "name" <*> v - .:? "value" + .:? "value" <*> v - .:? "value_from_env" + .:? "value_from_env" instance ToJSON Permission where toJSON (Permission rolePermission) = @@ -1559,7 +1559,7 @@ instance FromJSON Permission where parseJSON (Object v) = Permission <$> v - .: "role" + .: "role" instance ToJSON AllowList where toJSON (AllowList collectionAllowList) = @@ -1571,7 +1571,7 @@ instance FromJSON AllowList where parseJSON (Object v) = AllowList <$> v - .: "collection" + .: "collection" instance ToJSON CronTrigger where toJSON (CronTrigger commentCronTrigger headersCronTrigger includeInMetadataCronTrigger nameCronTrigger payloadCronTrigger retryConfCronTrigger scheduleCronTrigger webhookCronTrigger) = @@ -1590,21 +1590,21 @@ instance FromJSON CronTrigger where parseJSON (Object v) = CronTrigger <$> v - .:? "comment" + .:? "comment" <*> v - .: "headers" + .: "headers" <*> v - .: "include_in_metadata" + .: "include_in_metadata" <*> v - .: "name" + .: "name" <*> v - .:? "payload" + .:? "payload" <*> v - .:? "retry_conf" + .:? "retry_conf" <*> v - .: "schedule" + .: "schedule" <*> v - .: "webhook" + .: "webhook" instance ToJSON RetryConfST where toJSON (RetryConfST numRetriesRetryConfST retryIntervalSecondsRetryConfST timeoutSecondsRetryConfST toleranceSecondsRetryConfST) = @@ -1652,11 +1652,11 @@ instance FromJSON EnumType where parseJSON (Object v) = EnumType <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "values" + .: "values" instance ToJSON EnumValue where toJSON (EnumValue descriptionEnumValue isDeprecatedEnumValue valueEnumValue) = @@ -1670,11 +1670,11 @@ instance FromJSON EnumValue where parseJSON (Object v) = EnumValue <$> v - .:? "description" + .:? "description" <*> v - .:? "is_deprecated" + .:? "is_deprecated" <*> v - .: "value" + .: "value" instance ToJSON InputObjectType where toJSON (InputObjectType descriptionInputObjectType fieldsInputObjectType nameInputObjectType) = @@ -1688,11 +1688,11 @@ instance FromJSON InputObjectType where parseJSON (Object v) = InputObjectType <$> v - .:? "description" + .:? "description" <*> v - .: "fields" + .: "fields" <*> v - .: "name" + .: "name" instance ToJSON InputObjectField where toJSON (InputObjectField descriptionInputObjectField nameInputObjectField inputObjectFieldTypeInputObjectField) = @@ -1706,11 +1706,11 @@ instance FromJSON InputObjectField where parseJSON (Object v) = InputObjectField <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON ObjectType where toJSON (ObjectType descriptionObjectType fieldsObjectType nameObjectType relationshipsObjectType) = @@ -1725,13 +1725,13 @@ instance FromJSON ObjectType where parseJSON (Object v) = ObjectType <$> v - .:? "description" + .:? "description" <*> v - .: "fields" + .: "fields" <*> v - .: "name" + .: "name" <*> v - .:? "relationships" + .:? "relationships" instance ToJSON CustomTypeObjectRelationship where toJSON (CustomTypeObjectRelationship fieldMappingCustomTypeObjectRelationship nameCustomTypeObjectRelationship remoteTableCustomTypeObjectRelationship customTypeObjectRelationshipTypeCustomTypeObjectRelationship) = @@ -1746,13 +1746,13 @@ instance FromJSON CustomTypeObjectRelationship where parseJSON (Object v) = CustomTypeObjectRelationship <$> v - .: "field_mapping" + .: "field_mapping" <*> v - .: "name" + .: "name" <*> v - .: "remote_table" + .: "remote_table" <*> v - .: "type" + .: "type" instance ToJSON CustomTypeObjectRelationshipType where toJSON TypeArrayCustomTypeObjectRelationshipType = "array" @@ -1783,9 +1783,9 @@ instance FromJSON QualifiedTable where parseJSON (Object v) = QualifiedTable <$> v - .: "name" + .: "name" <*> v - .: "schema" + .: "schema" instance ToJSON ScalarType where toJSON (ScalarType descriptionScalarType nameScalarType) = @@ -1798,9 +1798,9 @@ instance FromJSON ScalarType where parseJSON (Object v) = ScalarType <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" instance ToJSON CustomFunction where toJSON (CustomFunction configurationCustomFunction functionCustomFunction) = @@ -1813,9 +1813,9 @@ instance FromJSON CustomFunction where parseJSON (Object v) = CustomFunction <$> v - .:? "configuration" + .:? "configuration" <*> v - .: "function" + .: "function" instance ToJSON FunctionConfiguration where toJSON (FunctionConfiguration sessionArgumentFunctionConfiguration) = @@ -1847,9 +1847,9 @@ instance FromJSON QualifiedFunction where parseJSON (Object v) = QualifiedFunction <$> v - .: "name" + .: "name" <*> v - .: "schema" + .: "schema" instance ToJSON QueryCollectionEntry where toJSON (QueryCollectionEntry commentQueryCollectionEntry definitionQueryCollectionEntry nameQueryCollectionEntry) = @@ -1863,11 +1863,11 @@ instance FromJSON QueryCollectionEntry where parseJSON (Object v) = QueryCollectionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON Definition where toJSON (Definition queriesDefinition) = @@ -1879,7 +1879,7 @@ instance FromJSON Definition where parseJSON (Object v) = Definition <$> v - .: "queries" + .: "queries" instance ToJSON QueryCollection where toJSON (QueryCollection nameQueryCollection queryQueryCollection) = @@ -1892,9 +1892,9 @@ instance FromJSON QueryCollection where parseJSON (Object v) = QueryCollection <$> v - .: "name" + .: "name" <*> v - .: "query" + .: "query" instance ToJSON RemoteSchema where toJSON (RemoteSchema commentRemoteSchema definitionRemoteSchema nameRemoteSchema) = @@ -1908,11 +1908,11 @@ instance FromJSON RemoteSchema where parseJSON (Object v) = RemoteSchema <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON RemoteSchemaDef where toJSON (RemoteSchemaDef forwardClientHeadersRemoteSchemaDef headersRemoteSchemaDef timeoutSecondsRemoteSchemaDef urlRemoteSchemaDef urlFromEnvRemoteSchemaDef) = @@ -1954,29 +1954,29 @@ instance FromJSON TableEntry where parseJSON (Object v) = TableEntry <$> v - .:? "array_relationships" + .:? "array_relationships" <*> v - .:? "computed_fields" + .:? "computed_fields" <*> v - .:? "configuration" + .:? "configuration" <*> v - .:? "delete_permissions" + .:? "delete_permissions" <*> v - .:? "event_triggers" + .:? "event_triggers" <*> v - .:? "insert_permissions" + .:? "insert_permissions" <*> v - .:? "is_enum" + .:? "is_enum" <*> v - .:? "object_relationships" + .:? "object_relationships" <*> v - .:? "remote_relationships" + .:? "remote_relationships" <*> v - .:? "select_permissions" + .:? "select_permissions" <*> v - .: "table" + .: "table" <*> v - .:? "update_permissions" + .:? "update_permissions" instance ToJSON ArrayRelationship where toJSON (ArrayRelationship commentArrayRelationship nameArrayRelationship usingArrayRelationship) = @@ -1990,11 +1990,11 @@ instance FromJSON ArrayRelationship where parseJSON (Object v) = ArrayRelationship <$> v - .:? "comment" + .:? "comment" <*> v - .: "name" + .: "name" <*> v - .: "using" + .: "using" instance ToJSON ArrRelUsing where toJSON (ArrRelUsing foreignKeyConstraintOnArrRelUsing manualConfigurationArrRelUsing) = @@ -2020,9 +2020,9 @@ instance FromJSON ArrRelUsingFKeyOn where parseJSON (Object v) = ArrRelUsingFKeyOn <$> v - .: "column" + .: "column" <*> v - .: "table" + .: "table" instance ToJSON ArrRelUsingManualMapping where toJSON (ArrRelUsingManualMapping columnMappingArrRelUsingManualMapping remoteTableArrRelUsingManualMapping) = @@ -2035,9 +2035,9 @@ instance FromJSON ArrRelUsingManualMapping where parseJSON (Object v) = ArrRelUsingManualMapping <$> v - .: "column_mapping" + .: "column_mapping" <*> v - .: "remote_table" + .: "remote_table" instance ToJSON ComputedField where toJSON (ComputedField commentComputedField definitionComputedField nameComputedField) = @@ -2051,11 +2051,11 @@ instance FromJSON ComputedField where parseJSON (Object v) = ComputedField <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON ComputedFieldDefinition where toJSON (ComputedFieldDefinition functionComputedFieldDefinition sessionArgumentComputedFieldDefinition tableArgumentComputedFieldDefinition) = @@ -2069,11 +2069,11 @@ instance FromJSON ComputedFieldDefinition where parseJSON (Object v) = ComputedFieldDefinition <$> v - .: "function" + .: "function" <*> v - .:? "session_argument" + .:? "session_argument" <*> v - .:? "table_argument" + .:? "table_argument" instance ToJSON TableConfig where toJSON (TableConfig customColumnNamesTableConfig customNameTableConfig customRootFieldsTableConfig) = @@ -2129,11 +2129,11 @@ instance FromJSON DeletePermissionEntry where parseJSON (Object v) = DeletePermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON DeletePermission where toJSON (DeletePermission filterDeletePermission) = @@ -2171,17 +2171,17 @@ instance FromJSON EventTrigger where parseJSON (Object v) = EventTrigger <$> v - .: "definition" + .: "definition" <*> v - .:? "headers" + .:? "headers" <*> v - .: "name" + .: "name" <*> v - .: "retry_conf" + .: "retry_conf" <*> v - .:? "webhook" + .:? "webhook" <*> v - .:? "webhook_from_env" + .:? "webhook_from_env" instance ToJSON EventTriggerDefinition where toJSON (EventTriggerDefinition deleteEventTriggerDefinition enableManualEventTriggerDefinition insertEventTriggerDefinition updateEventTriggerDefinition) = @@ -2196,13 +2196,13 @@ instance FromJSON EventTriggerDefinition where parseJSON (Object v) = EventTriggerDefinition <$> v - .:? "delete" + .:? "delete" <*> v - .: "enable_manual" + .: "enable_manual" <*> v - .:? "insert" + .:? "insert" <*> v - .:? "update" + .:? "update" instance ToJSON OperationSpec where toJSON (OperationSpec columnsOperationSpec payloadOperationSpec) = @@ -2215,9 +2215,9 @@ instance FromJSON OperationSpec where parseJSON (Object v) = OperationSpec <$> v - .: "columns" + .: "columns" <*> v - .:? "payload" + .:? "payload" instance ToJSON EventTriggerColumns where toJSON (EnumInEventTriggerColumns x) = toJSON x @@ -2262,11 +2262,11 @@ instance FromJSON InsertPermissionEntry where parseJSON (Object v) = InsertPermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON InsertPermission where toJSON (InsertPermission backendOnlyInsertPermission checkInsertPermission columnsInsertPermission setInsertPermission) = @@ -2281,13 +2281,13 @@ instance FromJSON InsertPermission where parseJSON (Object v) = InsertPermission <$> v - .:? "backend_only" + .:? "backend_only" <*> v - .:? "check" + .:? "check" <*> v - .: "columns" + .: "columns" <*> v - .:? "set" + .:? "set" instance ToJSON ObjectRelationship where toJSON (ObjectRelationship commentObjectRelationship nameObjectRelationship usingObjectRelationship) = @@ -2301,11 +2301,11 @@ instance FromJSON ObjectRelationship where parseJSON (Object v) = ObjectRelationship <$> v - .:? "comment" + .:? "comment" <*> v - .: "name" + .: "name" <*> v - .: "using" + .: "using" instance ToJSON ObjRelUsing where toJSON (ObjRelUsing foreignKeyConstraintOnObjRelUsing manualConfigurationObjRelUsing) = @@ -2331,9 +2331,9 @@ instance FromJSON ObjRelUsingManualMapping where parseJSON (Object v) = ObjRelUsingManualMapping <$> v - .: "column_mapping" + .: "column_mapping" <*> v - .: "remote_table" + .: "remote_table" instance ToJSON RemoteRelationship where toJSON (RemoteRelationship definitionRemoteRelationship nameRemoteRelationship) = @@ -2346,9 +2346,9 @@ instance FromJSON RemoteRelationship where parseJSON (Object v) = RemoteRelationship <$> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON RemoteRelationshipDef where toJSON (RemoteRelationshipDef hasuraFieldsRemoteRelationshipDef remoteFieldRemoteRelationshipDef remoteSchemaRemoteRelationshipDef) = @@ -2362,11 +2362,11 @@ instance FromJSON RemoteRelationshipDef where parseJSON (Object v) = RemoteRelationshipDef <$> v - .: "hasura_fields" + .: "hasura_fields" <*> v - .: "remote_field" + .: "remote_field" <*> v - .: "remote_schema" + .: "remote_schema" instance ToJSON RemoteFieldValue where toJSON (RemoteFieldValue argumentsRemoteFieldValue fieldRemoteFieldValue) = @@ -2379,9 +2379,9 @@ instance FromJSON RemoteFieldValue where parseJSON (Object v) = RemoteFieldValue <$> v - .: "arguments" + .: "arguments" <*> v - .:? "field" + .:? "field" instance ToJSON SelectPermissionEntry where toJSON (SelectPermissionEntry commentSelectPermissionEntry permissionSelectPermissionEntry roleSelectPermissionEntry) = @@ -2395,11 +2395,11 @@ instance FromJSON SelectPermissionEntry where parseJSON (Object v) = SelectPermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON SelectPermission where toJSON (SelectPermission allowAggregationsSelectPermission columnsSelectPermission computedFieldsSelectPermission filterSelectPermission limitSelectPermission) = @@ -2415,15 +2415,15 @@ instance FromJSON SelectPermission where parseJSON (Object v) = SelectPermission <$> v - .:? "allow_aggregations" + .:? "allow_aggregations" <*> v - .: "columns" + .: "columns" <*> v - .:? "computed_fields" + .:? "computed_fields" <*> v - .:? "filter" + .:? "filter" <*> v - .:? "limit" + .:? "limit" instance ToJSON UpdatePermissionEntry where toJSON (UpdatePermissionEntry commentUpdatePermissionEntry permissionUpdatePermissionEntry roleUpdatePermissionEntry) = @@ -2437,11 +2437,11 @@ instance FromJSON UpdatePermissionEntry where parseJSON (Object v) = UpdatePermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON UpdatePermission where toJSON (UpdatePermission checkUpdatePermission columnsUpdatePermission filterUpdatePermission setUpdatePermission) = @@ -2456,10 +2456,10 @@ instance FromJSON UpdatePermission where parseJSON (Object v) = UpdatePermission <$> v - .:? "check" + .:? "check" <*> v - .: "columns" + .: "columns" <*> v - .:? "filter" + .:? "filter" <*> v - .:? "set" + .:? "set" diff --git a/contrib/metadata-types/generated/HasuraMetadataV3.hs b/contrib/metadata-types/generated/HasuraMetadataV3.hs index 092e1e95d68..8d4f9c278ac 100644 --- a/contrib/metadata-types/generated/HasuraMetadataV3.hs +++ b/contrib/metadata-types/generated/HasuraMetadataV3.hs @@ -2048,9 +2048,9 @@ instance FromJSON HeaderFromValue where parseJSON (Object v) = HeaderFromValue <$> v - .: "name" + .: "name" <*> v - .: "value" + .: "value" instance ToJSON HeaderFromEnv where toJSON (HeaderFromEnv nameHeaderFromEnv valueFromEnvHeaderFromEnv) = @@ -2063,9 +2063,9 @@ instance FromJSON HeaderFromEnv where parseJSON (Object v) = HeaderFromEnv <$> v - .: "name" + .: "name" <*> v - .: "value_from_env" + .: "value_from_env" instance ToJSON ObjectField where toJSON (ObjectField descriptionObjectField nameObjectField objectFieldTypeObjectField) = @@ -2079,11 +2079,11 @@ instance FromJSON ObjectField where parseJSON (Object v) = ObjectField <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON HasuraMetadataV2 where toJSON (HasuraMetadataV2 actionsHasuraMetadataV2 allowlistHasuraMetadataV2 cronTriggersHasuraMetadataV2 customTypesHasuraMetadataV2 functionsHasuraMetadataV2 queryCollectionsHasuraMetadataV2 remoteSchemasHasuraMetadataV2 tablesHasuraMetadataV2 versionHasuraMetadataV2) = @@ -2103,23 +2103,23 @@ instance FromJSON HasuraMetadataV2 where parseJSON (Object v) = HasuraMetadataV2 <$> v - .:? "actions" + .:? "actions" <*> v - .:? "allowlist" + .:? "allowlist" <*> v - .:? "cron_triggers" + .:? "cron_triggers" <*> v - .:? "custom_types" + .:? "custom_types" <*> v - .:? "functions" + .:? "functions" <*> v - .:? "query_collections" + .:? "query_collections" <*> v - .:? "remote_schemas" + .:? "remote_schemas" <*> v - .: "tables" + .: "tables" <*> v - .: "version" + .: "version" instance ToJSON Action where toJSON (Action commentAction definitionAction nameAction permissionsAction) = @@ -2134,13 +2134,13 @@ instance FromJSON Action where parseJSON (Object v) = Action <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" <*> v - .:? "permissions" + .:? "permissions" instance ToJSON ActionDefinition where toJSON (ActionDefinition argumentsActionDefinition forwardClientHeadersActionDefinition handlerActionDefinition headersActionDefinition kindActionDefinition outputTypeActionDefinition actionDefinitionTypeActionDefinition) = @@ -2158,19 +2158,19 @@ instance FromJSON ActionDefinition where parseJSON (Object v) = ActionDefinition <$> v - .:? "arguments" + .:? "arguments" <*> v - .:? "forward_client_headers" + .:? "forward_client_headers" <*> v - .: "handler" + .: "handler" <*> v - .:? "headers" + .:? "headers" <*> v - .:? "kind" + .:? "kind" <*> v - .:? "output_type" + .:? "output_type" <*> v - .:? "type" + .:? "type" instance ToJSON ActionDefinitionType where toJSON MutationActionDefinitionType = "mutation" @@ -2193,9 +2193,9 @@ instance FromJSON InputArgument where parseJSON (Object v) = InputArgument <$> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON Header where toJSON (Header nameHeader valueHeader valueFromEnvHeader) = @@ -2209,11 +2209,11 @@ instance FromJSON Header where parseJSON (Object v) = Header <$> v - .: "name" + .: "name" <*> v - .:? "value" + .:? "value" <*> v - .:? "value_from_env" + .:? "value_from_env" instance ToJSON Permission where toJSON (Permission rolePermission) = @@ -2225,7 +2225,7 @@ instance FromJSON Permission where parseJSON (Object v) = Permission <$> v - .: "role" + .: "role" instance ToJSON AllowList where toJSON (AllowList collectionAllowList) = @@ -2237,7 +2237,7 @@ instance FromJSON AllowList where parseJSON (Object v) = AllowList <$> v - .: "collection" + .: "collection" instance ToJSON CronTrigger where toJSON (CronTrigger commentCronTrigger headersCronTrigger includeInMetadataCronTrigger nameCronTrigger payloadCronTrigger retryConfCronTrigger scheduleCronTrigger webhookCronTrigger) = @@ -2256,21 +2256,21 @@ instance FromJSON CronTrigger where parseJSON (Object v) = CronTrigger <$> v - .:? "comment" + .:? "comment" <*> v - .: "headers" + .: "headers" <*> v - .: "include_in_metadata" + .: "include_in_metadata" <*> v - .: "name" + .: "name" <*> v - .:? "payload" + .:? "payload" <*> v - .:? "retry_conf" + .:? "retry_conf" <*> v - .: "schedule" + .: "schedule" <*> v - .: "webhook" + .: "webhook" instance ToJSON RetryConfST where toJSON (RetryConfST numRetriesRetryConfST retryIntervalSecondsRetryConfST timeoutSecondsRetryConfST toleranceSecondsRetryConfST) = @@ -2318,11 +2318,11 @@ instance FromJSON EnumType where parseJSON (Object v) = EnumType <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "values" + .: "values" instance ToJSON EnumValue where toJSON (EnumValue descriptionEnumValue isDeprecatedEnumValue valueEnumValue) = @@ -2336,11 +2336,11 @@ instance FromJSON EnumValue where parseJSON (Object v) = EnumValue <$> v - .:? "description" + .:? "description" <*> v - .:? "is_deprecated" + .:? "is_deprecated" <*> v - .: "value" + .: "value" instance ToJSON InputObjectType where toJSON (InputObjectType descriptionInputObjectType fieldsInputObjectType nameInputObjectType) = @@ -2354,11 +2354,11 @@ instance FromJSON InputObjectType where parseJSON (Object v) = InputObjectType <$> v - .:? "description" + .:? "description" <*> v - .: "fields" + .: "fields" <*> v - .: "name" + .: "name" instance ToJSON InputObjectField where toJSON (InputObjectField descriptionInputObjectField nameInputObjectField inputObjectFieldTypeInputObjectField) = @@ -2372,11 +2372,11 @@ instance FromJSON InputObjectField where parseJSON (Object v) = InputObjectField <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" <*> v - .: "type" + .: "type" instance ToJSON ObjectType where toJSON (ObjectType descriptionObjectType fieldsObjectType nameObjectType relationshipsObjectType) = @@ -2391,13 +2391,13 @@ instance FromJSON ObjectType where parseJSON (Object v) = ObjectType <$> v - .:? "description" + .:? "description" <*> v - .: "fields" + .: "fields" <*> v - .: "name" + .: "name" <*> v - .:? "relationships" + .:? "relationships" instance ToJSON CustomTypeObjectRelationship where toJSON (CustomTypeObjectRelationship fieldMappingCustomTypeObjectRelationship nameCustomTypeObjectRelationship remoteTableCustomTypeObjectRelationship customTypeObjectRelationshipTypeCustomTypeObjectRelationship) = @@ -2412,13 +2412,13 @@ instance FromJSON CustomTypeObjectRelationship where parseJSON (Object v) = CustomTypeObjectRelationship <$> v - .: "field_mapping" + .: "field_mapping" <*> v - .: "name" + .: "name" <*> v - .: "remote_table" + .: "remote_table" <*> v - .: "type" + .: "type" instance ToJSON CustomTypeObjectRelationshipType where toJSON TypeArrayCustomTypeObjectRelationshipType = "array" @@ -2449,9 +2449,9 @@ instance FromJSON QualifiedTable where parseJSON (Object v) = QualifiedTable <$> v - .: "name" + .: "name" <*> v - .: "schema" + .: "schema" instance ToJSON ScalarType where toJSON (ScalarType descriptionScalarType nameScalarType) = @@ -2464,9 +2464,9 @@ instance FromJSON ScalarType where parseJSON (Object v) = ScalarType <$> v - .:? "description" + .:? "description" <*> v - .: "name" + .: "name" instance ToJSON CustomFunction where toJSON (CustomFunction configurationCustomFunction functionCustomFunction) = @@ -2479,9 +2479,9 @@ instance FromJSON CustomFunction where parseJSON (Object v) = CustomFunction <$> v - .:? "configuration" + .:? "configuration" <*> v - .: "function" + .: "function" instance ToJSON FunctionConfiguration where toJSON (FunctionConfiguration sessionArgumentFunctionConfiguration) = @@ -2513,9 +2513,9 @@ instance FromJSON QualifiedFunction where parseJSON (Object v) = QualifiedFunction <$> v - .: "name" + .: "name" <*> v - .: "schema" + .: "schema" instance ToJSON QueryCollectionEntry where toJSON (QueryCollectionEntry commentQueryCollectionEntry definitionQueryCollectionEntry nameQueryCollectionEntry) = @@ -2529,11 +2529,11 @@ instance FromJSON QueryCollectionEntry where parseJSON (Object v) = QueryCollectionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON Definition where toJSON (Definition queriesDefinition) = @@ -2545,7 +2545,7 @@ instance FromJSON Definition where parseJSON (Object v) = Definition <$> v - .: "queries" + .: "queries" instance ToJSON QueryCollection where toJSON (QueryCollection nameQueryCollection queryQueryCollection) = @@ -2558,9 +2558,9 @@ instance FromJSON QueryCollection where parseJSON (Object v) = QueryCollection <$> v - .: "name" + .: "name" <*> v - .: "query" + .: "query" instance ToJSON RemoteSchema where toJSON (RemoteSchema commentRemoteSchema definitionRemoteSchema nameRemoteSchema) = @@ -2574,11 +2574,11 @@ instance FromJSON RemoteSchema where parseJSON (Object v) = RemoteSchema <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON RemoteSchemaDef where toJSON (RemoteSchemaDef forwardClientHeadersRemoteSchemaDef headersRemoteSchemaDef timeoutSecondsRemoteSchemaDef urlRemoteSchemaDef urlFromEnvRemoteSchemaDef) = @@ -2620,29 +2620,29 @@ instance FromJSON TableEntry where parseJSON (Object v) = TableEntry <$> v - .:? "array_relationships" + .:? "array_relationships" <*> v - .:? "computed_fields" + .:? "computed_fields" <*> v - .:? "configuration" + .:? "configuration" <*> v - .:? "delete_permissions" + .:? "delete_permissions" <*> v - .:? "event_triggers" + .:? "event_triggers" <*> v - .:? "insert_permissions" + .:? "insert_permissions" <*> v - .:? "is_enum" + .:? "is_enum" <*> v - .:? "object_relationships" + .:? "object_relationships" <*> v - .:? "remote_relationships" + .:? "remote_relationships" <*> v - .:? "select_permissions" + .:? "select_permissions" <*> v - .: "table" + .: "table" <*> v - .:? "update_permissions" + .:? "update_permissions" instance ToJSON ArrayRelationship where toJSON (ArrayRelationship commentArrayRelationship nameArrayRelationship usingArrayRelationship) = @@ -2656,11 +2656,11 @@ instance FromJSON ArrayRelationship where parseJSON (Object v) = ArrayRelationship <$> v - .:? "comment" + .:? "comment" <*> v - .: "name" + .: "name" <*> v - .: "using" + .: "using" instance ToJSON ArrRelUsing where toJSON (ArrRelUsing foreignKeyConstraintOnArrRelUsing manualConfigurationArrRelUsing) = @@ -2686,9 +2686,9 @@ instance FromJSON ArrRelUsingFKeyOn where parseJSON (Object v) = ArrRelUsingFKeyOn <$> v - .: "column" + .: "column" <*> v - .: "table" + .: "table" instance ToJSON ArrRelUsingManualMapping where toJSON (ArrRelUsingManualMapping columnMappingArrRelUsingManualMapping remoteTableArrRelUsingManualMapping) = @@ -2701,9 +2701,9 @@ instance FromJSON ArrRelUsingManualMapping where parseJSON (Object v) = ArrRelUsingManualMapping <$> v - .: "column_mapping" + .: "column_mapping" <*> v - .: "remote_table" + .: "remote_table" instance ToJSON ComputedField where toJSON (ComputedField commentComputedField definitionComputedField nameComputedField) = @@ -2717,11 +2717,11 @@ instance FromJSON ComputedField where parseJSON (Object v) = ComputedField <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON ComputedFieldDefinition where toJSON (ComputedFieldDefinition functionComputedFieldDefinition sessionArgumentComputedFieldDefinition tableArgumentComputedFieldDefinition) = @@ -2735,11 +2735,11 @@ instance FromJSON ComputedFieldDefinition where parseJSON (Object v) = ComputedFieldDefinition <$> v - .: "function" + .: "function" <*> v - .:? "session_argument" + .:? "session_argument" <*> v - .:? "table_argument" + .:? "table_argument" instance ToJSON TableConfig where toJSON (TableConfig customColumnNamesTableConfig customNameTableConfig customRootFieldsTableConfig) = @@ -2795,11 +2795,11 @@ instance FromJSON DeletePermissionEntry where parseJSON (Object v) = DeletePermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON DeletePermission where toJSON (DeletePermission filterDeletePermission) = @@ -2837,17 +2837,17 @@ instance FromJSON EventTrigger where parseJSON (Object v) = EventTrigger <$> v - .: "definition" + .: "definition" <*> v - .:? "headers" + .:? "headers" <*> v - .: "name" + .: "name" <*> v - .: "retry_conf" + .: "retry_conf" <*> v - .:? "webhook" + .:? "webhook" <*> v - .:? "webhook_from_env" + .:? "webhook_from_env" instance ToJSON EventTriggerDefinition where toJSON (EventTriggerDefinition deleteEventTriggerDefinition enableManualEventTriggerDefinition insertEventTriggerDefinition updateEventTriggerDefinition) = @@ -2862,13 +2862,13 @@ instance FromJSON EventTriggerDefinition where parseJSON (Object v) = EventTriggerDefinition <$> v - .:? "delete" + .:? "delete" <*> v - .: "enable_manual" + .: "enable_manual" <*> v - .:? "insert" + .:? "insert" <*> v - .:? "update" + .:? "update" instance ToJSON OperationSpec where toJSON (OperationSpec columnsOperationSpec payloadOperationSpec) = @@ -2881,9 +2881,9 @@ instance FromJSON OperationSpec where parseJSON (Object v) = OperationSpec <$> v - .: "columns" + .: "columns" <*> v - .:? "payload" + .:? "payload" instance ToJSON EventTriggerColumns where toJSON (EnumInEventTriggerColumns x) = toJSON x @@ -2928,11 +2928,11 @@ instance FromJSON InsertPermissionEntry where parseJSON (Object v) = InsertPermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON InsertPermission where toJSON (InsertPermission backendOnlyInsertPermission checkInsertPermission columnsInsertPermission setInsertPermission) = @@ -2947,13 +2947,13 @@ instance FromJSON InsertPermission where parseJSON (Object v) = InsertPermission <$> v - .:? "backend_only" + .:? "backend_only" <*> v - .:? "check" + .:? "check" <*> v - .: "columns" + .: "columns" <*> v - .:? "set" + .:? "set" instance ToJSON ObjectRelationship where toJSON (ObjectRelationship commentObjectRelationship nameObjectRelationship usingObjectRelationship) = @@ -2967,11 +2967,11 @@ instance FromJSON ObjectRelationship where parseJSON (Object v) = ObjectRelationship <$> v - .:? "comment" + .:? "comment" <*> v - .: "name" + .: "name" <*> v - .: "using" + .: "using" instance ToJSON ObjRelUsing where toJSON (ObjRelUsing foreignKeyConstraintOnObjRelUsing manualConfigurationObjRelUsing) = @@ -2997,9 +2997,9 @@ instance FromJSON ObjRelUsingManualMapping where parseJSON (Object v) = ObjRelUsingManualMapping <$> v - .: "column_mapping" + .: "column_mapping" <*> v - .: "remote_table" + .: "remote_table" instance ToJSON RemoteRelationship where toJSON (RemoteRelationship definitionRemoteRelationship nameRemoteRelationship) = @@ -3012,9 +3012,9 @@ instance FromJSON RemoteRelationship where parseJSON (Object v) = RemoteRelationship <$> v - .: "definition" + .: "definition" <*> v - .: "name" + .: "name" instance ToJSON RemoteRelationshipDef where toJSON (RemoteRelationshipDef hasuraFieldsRemoteRelationshipDef remoteFieldRemoteRelationshipDef remoteSchemaRemoteRelationshipDef) = @@ -3028,11 +3028,11 @@ instance FromJSON RemoteRelationshipDef where parseJSON (Object v) = RemoteRelationshipDef <$> v - .: "hasura_fields" + .: "hasura_fields" <*> v - .: "remote_field" + .: "remote_field" <*> v - .: "remote_schema" + .: "remote_schema" instance ToJSON RemoteFieldValue where toJSON (RemoteFieldValue argumentsRemoteFieldValue fieldRemoteFieldValue) = @@ -3045,9 +3045,9 @@ instance FromJSON RemoteFieldValue where parseJSON (Object v) = RemoteFieldValue <$> v - .: "arguments" + .: "arguments" <*> v - .:? "field" + .:? "field" instance ToJSON SelectPermissionEntry where toJSON (SelectPermissionEntry commentSelectPermissionEntry permissionSelectPermissionEntry roleSelectPermissionEntry) = @@ -3061,11 +3061,11 @@ instance FromJSON SelectPermissionEntry where parseJSON (Object v) = SelectPermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON SelectPermission where toJSON (SelectPermission allowAggregationsSelectPermission columnsSelectPermission computedFieldsSelectPermission filterSelectPermission limitSelectPermission) = @@ -3081,15 +3081,15 @@ instance FromJSON SelectPermission where parseJSON (Object v) = SelectPermission <$> v - .:? "allow_aggregations" + .:? "allow_aggregations" <*> v - .: "columns" + .: "columns" <*> v - .:? "computed_fields" + .:? "computed_fields" <*> v - .:? "filter" + .:? "filter" <*> v - .:? "limit" + .:? "limit" instance ToJSON UpdatePermissionEntry where toJSON (UpdatePermissionEntry commentUpdatePermissionEntry permissionUpdatePermissionEntry roleUpdatePermissionEntry) = @@ -3103,11 +3103,11 @@ instance FromJSON UpdatePermissionEntry where parseJSON (Object v) = UpdatePermissionEntry <$> v - .:? "comment" + .:? "comment" <*> v - .: "permission" + .: "permission" <*> v - .: "role" + .: "role" instance ToJSON UpdatePermission where toJSON (UpdatePermission checkUpdatePermission columnsUpdatePermission filterUpdatePermission setUpdatePermission) = @@ -3122,13 +3122,13 @@ instance FromJSON UpdatePermission where parseJSON (Object v) = UpdatePermission <$> v - .:? "check" + .:? "check" <*> v - .: "columns" + .: "columns" <*> v - .:? "filter" + .:? "filter" <*> v - .:? "set" + .:? "set" instance ToJSON PGConnectionParameters where toJSON (PGConnectionParameters databasePGConnectionParameters hostPGConnectionParameters passwordPGConnectionParameters portPGConnectionParameters usernamePGConnectionParameters) = @@ -3144,15 +3144,15 @@ instance FromJSON PGConnectionParameters where parseJSON (Object v) = PGConnectionParameters <$> v - .: "database" + .: "database" <*> v - .: "host" + .: "host" <*> v - .:? "password" + .:? "password" <*> v - .: "port" + .: "port" <*> v - .: "username" + .: "username" instance ToJSON BaseSource where toJSON (BaseSource functionsBaseSource nameBaseSource tablesBaseSource) = @@ -3166,11 +3166,11 @@ instance FromJSON BaseSource where parseJSON (Object v) = BaseSource <$> v - .:? "functions" + .:? "functions" <*> v - .: "name" + .: "name" <*> v - .: "tables" + .: "tables" instance ToJSON PGSource where toJSON (PGSource configurationPGSource functionsPGSource kindPGSource namePGSource tablesPGSource) = @@ -3186,15 +3186,15 @@ instance FromJSON PGSource where parseJSON (Object v) = PGSource <$> v - .: "configuration" + .: "configuration" <*> v - .:? "functions" + .:? "functions" <*> v - .: "kind" + .: "kind" <*> v - .: "name" + .: "name" <*> v - .: "tables" + .: "tables" instance ToJSON PGConfiguration where toJSON (PGConfiguration connectionInfoPGConfiguration readReplicasPGConfiguration) = @@ -3207,9 +3207,9 @@ instance FromJSON PGConfiguration where parseJSON (Object v) = PGConfiguration <$> v - .: "connection_info" + .: "connection_info" <*> v - .:? "read_replicas" + .:? "read_replicas" instance ToJSON PGSourceConnectionInfo where toJSON (PGSourceConnectionInfo databaseURLPGSourceConnectionInfo isolationLevelPGSourceConnectionInfo poolSettingsPGSourceConnectionInfo sslConfigurationPGSourceConnectionInfo usePreparedStatementsPGSourceConnectionInfo) = @@ -3225,15 +3225,15 @@ instance FromJSON PGSourceConnectionInfo where parseJSON (Object v) = PGSourceConnectionInfo <$> v - .: "database_url" + .: "database_url" <*> v - .:? "isolation_level" + .:? "isolation_level" <*> v - .:? "pool_settings" + .:? "pool_settings" <*> v - .:? "ssl_configuration" + .:? "ssl_configuration" <*> v - .:? "use_prepared_statements" + .:? "use_prepared_statements" instance ToJSON DatabaseURL where toJSON (PGConnectionParametersClassInDatabaseURL x) = toJSON x @@ -3309,15 +3309,15 @@ instance FromJSON PGCERTSettings where parseJSON (Object v) = PGCERTSettings <$> v - .: "sslcert" + .: "sslcert" <*> v - .: "sslkey" + .: "sslkey" <*> v - .: "sslmode" + .: "sslmode" <*> v - .:? "sslpassword" + .:? "sslpassword" <*> v - .: "sslrootcert" + .: "sslrootcert" instance ToJSON FromEnv where toJSON (FromEnv fromEnvFromEnv) = @@ -3329,7 +3329,7 @@ instance FromJSON FromEnv where parseJSON (Object v) = FromEnv <$> v - .: "from_env" + .: "from_env" instance ToJSON Sslpassword where toJSON (FromEnvInSslpassword x) = toJSON x @@ -3363,15 +3363,15 @@ instance FromJSON MSSQLSource where parseJSON (Object v) = MSSQLSource <$> v - .: "configuration" + .: "configuration" <*> v - .:? "functions" + .:? "functions" <*> v - .: "kind" + .: "kind" <*> v - .: "name" + .: "name" <*> v - .: "tables" + .: "tables" instance ToJSON MSSQLConfiguration where toJSON (MSSQLConfiguration connectionInfoMSSQLConfiguration) = @@ -3383,7 +3383,7 @@ instance FromJSON MSSQLConfiguration where parseJSON (Object v) = MSSQLConfiguration <$> v - .: "connection_info" + .: "connection_info" instance ToJSON MSSQLSourceConnectionInfo where toJSON (MSSQLSourceConnectionInfo connectionStringMSSQLSourceConnectionInfo poolSettingsMSSQLSourceConnectionInfo) = @@ -3396,9 +3396,9 @@ instance FromJSON MSSQLSourceConnectionInfo where parseJSON (Object v) = MSSQLSourceConnectionInfo <$> v - .: "connection_string" + .: "connection_string" <*> v - .:? "pool_settings" + .:? "pool_settings" instance ToJSON MSSQLPoolSettings where toJSON (MSSQLPoolSettings idleTimeoutMSSQLPoolSettings maxConnectionsMSSQLPoolSettings) = @@ -3435,15 +3435,15 @@ instance FromJSON BigQuerySource where parseJSON (Object v) = BigQuerySource <$> v - .: "configuration" + .: "configuration" <*> v - .:? "functions" + .:? "functions" <*> v - .: "kind" + .: "kind" <*> v - .: "name" + .: "name" <*> v - .: "tables" + .: "tables" instance ToJSON BigQueryConfiguration where toJSON (BigQueryConfiguration datasetsBigQueryConfiguration projectIDBigQueryConfiguration serviceAccountBigQueryConfiguration) = @@ -3457,11 +3457,11 @@ instance FromJSON BigQueryConfiguration where parseJSON (Object v) = BigQueryConfiguration <$> v - .: "datasets" + .: "datasets" <*> v - .: "project_id" + .: "project_id" <*> v - .: "service_account" + .: "service_account" instance ToJSON Datasets where toJSON (FromEnvInDatasets x) = toJSON x @@ -3518,27 +3518,27 @@ instance FromJSON HasuraMetadataV3 where parseJSON (Object v) = HasuraMetadataV3 <$> v - .:? "actions" + .:? "actions" <*> v - .:? "allowlist" + .:? "allowlist" <*> v - .:? "api_limits" + .:? "api_limits" <*> v - .:? "cron_triggers" + .:? "cron_triggers" <*> v - .:? "custom_types" + .:? "custom_types" <*> v - .:? "inherited_roles" + .:? "inherited_roles" <*> v - .:? "query_collections" + .:? "query_collections" <*> v - .:? "remote_schemas" + .:? "remote_schemas" <*> v - .: "rest_endpoints" + .: "rest_endpoints" <*> v - .: "sources" + .: "sources" <*> v - .: "version" + .: "version" instance ToJSON APILimits where toJSON (APILimits depthLimitAPILimits disabledAPILimits nodeLimitAPILimits rateLimitAPILimits) = @@ -3553,13 +3553,13 @@ instance FromJSON APILimits where parseJSON (Object v) = APILimits <$> v - .:? "depth_limit" + .:? "depth_limit" <*> v - .: "disabled" + .: "disabled" <*> v - .:? "node_limit" + .:? "node_limit" <*> v - .:? "rate_limit" + .:? "rate_limit" instance ToJSON DepthLimit where toJSON (DepthLimit globalDepthLimit perRoleDepthLimit) = @@ -3572,9 +3572,9 @@ instance FromJSON DepthLimit where parseJSON (Object v) = DepthLimit <$> v - .: "global" + .: "global" <*> v - .: "per_role" + .: "per_role" instance ToJSON NodeLimit where toJSON (NodeLimit globalNodeLimit perRoleNodeLimit) = @@ -3587,9 +3587,9 @@ instance FromJSON NodeLimit where parseJSON (Object v) = NodeLimit <$> v - .: "global" + .: "global" <*> v - .: "per_role" + .: "per_role" instance ToJSON RateLimit where toJSON (RateLimit globalRateLimit perRoleRateLimit) = @@ -3602,9 +3602,9 @@ instance FromJSON RateLimit where parseJSON (Object v) = RateLimit <$> v - .: "global" + .: "global" <*> v - .: "per_role" + .: "per_role" instance ToJSON RateLimitRule where toJSON (RateLimitRule maxReqsPerMinRateLimitRule uniqueParamsRateLimitRule) = @@ -3617,9 +3617,9 @@ instance FromJSON RateLimitRule where parseJSON (Object v) = RateLimitRule <$> v - .: "max_reqs_per_min" + .: "max_reqs_per_min" <*> v - .: "unique_params" + .: "unique_params" instance ToJSON UniqueParams where toJSON (EnumInUniqueParams x) = toJSON x @@ -3650,9 +3650,9 @@ instance FromJSON InheritedRole where parseJSON (Object v) = InheritedRole <$> v - .: "role_name" + .: "role_name" <*> v - .: "role_set" + .: "role_set" instance ToJSON RESTEndpoint where toJSON (RESTEndpoint commentRESTEndpoint definitionRESTEndpoint methodsRESTEndpoint nameRESTEndpoint urlRESTEndpoint) = @@ -3668,15 +3668,15 @@ instance FromJSON RESTEndpoint where parseJSON (Object v) = RESTEndpoint <$> v - .:? "comment" + .:? "comment" <*> v - .: "definition" + .: "definition" <*> v - .: "methods" + .: "methods" <*> v - .: "name" + .: "name" <*> v - .: "url" + .: "url" instance ToJSON RESTEndpointDefinition where toJSON (RESTEndpointDefinition queryRESTEndpointDefinition) = @@ -3688,7 +3688,7 @@ instance FromJSON RESTEndpointDefinition where parseJSON (Object v) = RESTEndpointDefinition <$> v - .: "query" + .: "query" instance ToJSON QueryClass where toJSON (QueryClass collectionNameQueryClass queryNameQueryClass) = @@ -3701,9 +3701,9 @@ instance FromJSON QueryClass where parseJSON (Object v) = QueryClass <$> v - .: "collection_name" + .: "collection_name" <*> v - .: "query_name" + .: "query_name" instance ToJSON Method where toJSON PatchMethod = "PATCH" @@ -3731,15 +3731,15 @@ instance FromJSON Source where parseJSON (Object v) = Source <$> v - .: "configuration" + .: "configuration" <*> v - .:? "functions" + .:? "functions" <*> v - .: "kind" + .: "kind" <*> v - .: "name" + .: "name" <*> v - .: "tables" + .: "tables" instance ToJSON Configuration where toJSON (Configuration connectionInfoConfiguration readReplicasConfiguration datasetsConfiguration projectIDConfiguration serviceAccountConfiguration) = diff --git a/server/VERSIONS.json b/server/VERSIONS.json index 3940dbcf3a9..5198ead9b19 100644 --- a/server/VERSIONS.json +++ b/server/VERSIONS.json @@ -2,5 +2,5 @@ "cabal-install": "3.10.1.0", "ghc": "9.2.5", "hlint": "3.4.1", - "ormolu": "0.5.0.1" + "ormolu": "0.7.0.0" } diff --git a/server/lib/aeson-ordered/src/Data/Aeson/Ordered.hs b/server/lib/aeson-ordered/src/Data/Aeson/Ordered.hs index fde8ce43123..4b700e4e8c8 100644 --- a/server/lib/aeson-ordered/src/Data/Aeson/Ordered.hs +++ b/server/lib/aeson-ordered/src/Data/Aeson/Ordered.hs @@ -211,7 +211,7 @@ fromOrderedObject obj = map (bimap K.fromText fromOrdered) $ Data.Aeson.Ordered.toList obj -asObject :: IsString s => Value -> Either s Object +asObject :: (IsString s) => Value -> Either s Object asObject = \case Object o -> Right o _ -> Left "expecting ordered object" diff --git a/server/lib/api-tests/app-produce-feature-matrix/Main.hs b/server/lib/api-tests/app-produce-feature-matrix/Main.hs index 4a71a1541b8..c23c5ab1a75 100644 --- a/server/lib/api-tests/app-produce-feature-matrix/Main.hs +++ b/server/lib/api-tests/app-produce-feature-matrix/Main.hs @@ -51,31 +51,31 @@ data Options w = Options { connectionString :: w ::: String - "Postgres connection string" + "Postgres connection string" "postgresql://hasura:hasura@127.0.0.1:65002/hasura", output :: w ::: FilePath - "Feature matrix output file path" + "Feature matrix output file path" "/tmp/feature_matrix_tool_output.html", overrideOutputFile :: w ::: Bool - "Override output file if exists", + "Override output file if exists", createDirectory :: w ::: Bool - "Create directory if not exists", + "Create directory if not exists", -- this is just a flag, we take care of splitting the arguments ourselves. noAsk :: w ::: Bool - "Do not ask to override output file or create a directory if missing", + "Do not ask to override output file or create a directory if missing", -- this is just a flag, we take care of splitting the arguments ourselves. hspec :: w ::: Bool - "arguments for hspec" + "arguments for hspec" } deriving (Generic) diff --git a/server/lib/api-tests/src/SpecHook.hs b/server/lib/api-tests/src/SpecHook.hs index 30bf7160143..3d3fc5f094a 100644 --- a/server/lib/api-tests/src/SpecHook.hs +++ b/server/lib/api-tests/src/SpecHook.hs @@ -113,11 +113,12 @@ setupTestingMode = do environment <- getEnvironment lookupTestingMode environment `onLeft` error -hook :: HasCallStack => SpecWith GlobalTestEnvironment -> Spec +hook :: (HasCallStack) => SpecWith GlobalTestEnvironment -> Spec hook specs = do (testingMode, (logger, _cleanupLogger)) <- - runIO $ - readIORef globalConfigRef `onNothingM` do + runIO + $ readIORef globalConfigRef + `onNothingM` do testingMode <- setupTestingMode (logger, cleanupLogger) <- setupLogger setupGlobalConfig testingMode (logger, cleanupLogger) @@ -134,8 +135,8 @@ hook specs = do TestNoBackends -> True -- this is for catching "everything else" TestNewPostgresVariant {} -> "Postgres" `elem` labels - aroundAllWith (const . bracket (setupTestEnvironment testingMode logger) teardownTestEnvironment) $ - mapSpecForest (filterForestWithLabels shouldRunTest) (contextualizeLogger specs) + aroundAllWith (const . bracket (setupTestEnvironment testingMode logger) teardownTestEnvironment) + $ mapSpecForest (filterForestWithLabels shouldRunTest) (contextualizeLogger specs) {-# NOINLINE globalConfigRef #-} globalConfigRef :: IORef (Maybe (TestingMode, (Logger, IO ()))) diff --git a/server/lib/api-tests/src/Test/API/ConcurrentBulkSpec.hs b/server/lib/api-tests/src/Test/API/ConcurrentBulkSpec.hs index 1c308c3ad92..5ecd5e61fd1 100644 --- a/server/lib/api-tests/src/Test/API/ConcurrentBulkSpec.hs +++ b/server/lib/api-tests/src/Test/API/ConcurrentBulkSpec.hs @@ -90,8 +90,8 @@ postgresRunSqlQuery testEnvironment bulkType = do let backendTypeMetadata = fromMaybe (error "Expected a backend type but got nothing") $ getBackendTypeConfig testEnvironment sourceName = BackendType.backendSourceName backendTypeMetadata backendPrefix = BackendType.backendTypeString backendTypeMetadata - postV2Query 200 testEnvironment $ - [interpolateYaml| + postV2Query 200 testEnvironment + $ [interpolateYaml| type: #{bulkType} args: - type: #{backendPrefix}_run_sql @@ -153,8 +153,8 @@ mssqlRunSqlQuery testEnvironment bulkType = do let backendTypeMetadata = fromMaybe (error "Expected a backend type but got nothing") $ getBackendTypeConfig testEnvironment sourceName = BackendType.backendSourceName backendTypeMetadata backendPrefix = BackendType.backendTypeString backendTypeMetadata - postV2Query 200 testEnvironment $ - [interpolateYaml| + postV2Query 200 testEnvironment + $ [interpolateYaml| type: #{bulkType} args: - type: #{backendPrefix}_run_sql diff --git a/server/lib/api-tests/src/Test/API/Metadata/ComputedFieldsSpec.hs b/server/lib/api-tests/src/Test/API/Metadata/ComputedFieldsSpec.hs index 20f6a860dbd..54825210436 100644 --- a/server/lib/api-tests/src/Test/API/Metadata/ComputedFieldsSpec.hs +++ b/server/lib/api-tests/src/Test/API/Metadata/ComputedFieldsSpec.hs @@ -68,52 +68,52 @@ setupFunctions :: TestEnvironment -> [Fixture.SetupAction] setupFunctions testEnv = let schemaName = Schema.getSchemaName testEnv articleTableSQL = unSchemaName schemaName <> ".article" - in [ SetupAction.noTeardown $ - BigQuery.run_ $ - T.unpack $ - T.unwords $ - [ "CREATE TABLE FUNCTION ", - fetch_articles_returns_table schemaName, - "(a_id INT64, search STRING)", - "RETURNS TABLE", - "AS (", - "SELECT t.id, t.title, t.content FROM", - articleTableSQL, - "AS t WHERE t.author_id = a_id and (t.title LIKE `search` OR t.content LIKE `search`)", - ");" - ], - SetupAction.noTeardown $ - BigQuery.run_ $ - T.unpack $ - T.unwords $ - [ "CREATE TABLE FUNCTION ", - fetch_articles schemaName, - "(a_id INT64, search STRING)", - "AS (", - "SELECT t.* FROM", - articleTableSQL, - "AS t WHERE t.author_id = a_id and (t.title LIKE `search` OR t.content LIKE `search`)", - ");" - ], - SetupAction.noTeardown $ - BigQuery.run_ $ - T.unpack $ - T.unwords $ - [ "CREATE TABLE FUNCTION ", - function_no_args schemaName <> "()", - "AS (", - "SELECT t.* FROM", - articleTableSQL, - "AS t);" - ], - SetupAction.noTeardown $ - BigQuery.run_ $ - T.unpack $ - T.unwords $ - [ "CREATE FUNCTION ", - add_int schemaName <> "(a INT64, b INT64)", - "RETURNS INT64 AS (a + b);" - ] + in [ SetupAction.noTeardown + $ BigQuery.run_ + $ T.unpack + $ T.unwords + $ [ "CREATE TABLE FUNCTION ", + fetch_articles_returns_table schemaName, + "(a_id INT64, search STRING)", + "RETURNS TABLE", + "AS (", + "SELECT t.id, t.title, t.content FROM", + articleTableSQL, + "AS t WHERE t.author_id = a_id and (t.title LIKE `search` OR t.content LIKE `search`)", + ");" + ], + SetupAction.noTeardown + $ BigQuery.run_ + $ T.unpack + $ T.unwords + $ [ "CREATE TABLE FUNCTION ", + fetch_articles schemaName, + "(a_id INT64, search STRING)", + "AS (", + "SELECT t.* FROM", + articleTableSQL, + "AS t WHERE t.author_id = a_id and (t.title LIKE `search` OR t.content LIKE `search`)", + ");" + ], + SetupAction.noTeardown + $ BigQuery.run_ + $ T.unpack + $ T.unwords + $ [ "CREATE TABLE FUNCTION ", + function_no_args schemaName <> "()", + "AS (", + "SELECT t.* FROM", + articleTableSQL, + "AS t);" + ], + SetupAction.noTeardown + $ BigQuery.run_ + $ T.unpack + $ T.unwords + $ [ "CREATE FUNCTION ", + add_int schemaName <> "(a INT64, b INT64)", + "RETURNS INT64 AS (a + b);" + ] ] fetch_articles_returns_table :: SchemaName -> T.Text diff --git a/server/lib/api-tests/src/Test/API/Metadata/TablesSpec.hs b/server/lib/api-tests/src/Test/API/Metadata/TablesSpec.hs index 453fca856ec..7666ac7dd7b 100644 --- a/server/lib/api-tests/src/Test/API/Metadata/TablesSpec.hs +++ b/server/lib/api-tests/src/Test/API/Metadata/TablesSpec.hs @@ -49,8 +49,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, @@ -205,8 +205,9 @@ tests = do |] _ -> error "Unimplemented" - when (backendType == "sqlite") $ - actual >>= \result -> result `shouldAtLeastBe` expected + when (backendType == "sqlite") + $ actual + >>= \result -> result `shouldAtLeastBe` expected it "Returns null for an invalid table" \testEnvironment -> do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment @@ -227,5 +228,5 @@ tests = do - made_up_table |] - when (backendType == "sqlite") $ - shouldReturnYaml testEnvironment actual Null + when (backendType == "sqlite") + $ shouldReturnYaml testEnvironment actual Null diff --git a/server/lib/api-tests/src/Test/API/Metadata/TransparentDefaultsSpec.hs b/server/lib/api-tests/src/Test/API/Metadata/TransparentDefaultsSpec.hs index 8d0cc367b0b..e224339c856 100644 --- a/server/lib/api-tests/src/Test/API/Metadata/TransparentDefaultsSpec.hs +++ b/server/lib/api-tests/src/Test/API/Metadata/TransparentDefaultsSpec.hs @@ -48,7 +48,8 @@ tests = do it "does not include defaults on stand alone export" \testEnvironment -> do response <- postMetadata testEnvironment exportMetadata let response' = Object $ response CL.^. AL.key "metadata" . AL._Object & CL.sans "sources" - expected = [yaml| version: 3 |] -- Doesn't include defaults + expected = [yaml| version: 3 |] + -- Doesn't include defaults response' `shouldBe` expected describe "with metadata modifications" do @@ -58,7 +59,8 @@ tests = do response <- postMetadata testEnvironment exportMetadata let response' = Object $ response CL.^. AL.key "metadata" . AL._Object & CL.sans "sources" - expected = [yaml| version: 3 |] -- Shouldn't include defaults + expected = [yaml| version: 3 |] + -- Shouldn't include defaults response' `shouldBe` expected exportMetadata :: Value diff --git a/server/lib/api-tests/src/Test/Auth/Authorization/DisableRootFields/DefaultRootFieldsSpec.hs b/server/lib/api-tests/src/Test/Auth/Authorization/DisableRootFields/DefaultRootFieldsSpec.hs index 5b7aaeb3891..42239ec8cb3 100644 --- a/server/lib/api-tests/src/Test/Auth/Authorization/DisableRootFields/DefaultRootFieldsSpec.hs +++ b/server/lib/api-tests/src/Test/Auth/Authorization/DisableRootFields/DefaultRootFieldsSpec.hs @@ -36,8 +36,8 @@ spec = setupPermissionsAction permissions testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/DataConnector/MetadataApiSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MetadataApiSpec.hs index 17be2c7f032..8feb1a128c8 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MetadataApiSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MetadataApiSpec.hs @@ -223,7 +223,8 @@ schemaInspectionTests = describe "Schema and Source Inspection" $ do args: name: *backendString |] - ) -- Note: These fields are backend specific so we ignore their values and just verify their shapes: + ) + -- Note: These fields are backend specific so we ignore their values and just verify their shapes: <&> Lens.set (key "config_schema_response" . key "other_schemas") J.Null <&> Lens.set (key "config_schema_response" . key "config_schema") J.Null <&> Lens.set (key "capabilities" . _Object . Lens.at "datasets") Nothing @@ -403,10 +404,10 @@ schemaCrudTests = describe "A series of actions to setup and teardown a source w let capabilities = getBackendTypeConfig testEnvironment >>= BackendType.parseCapabilities let foreignKeySupport = fromMaybe False $ capabilities ^? _Just . API.cDataSchema . API.dscSupportsForeignKeys let relationshipsSupport = isJust $ capabilities ^? _Just . API.cRelationships . _Just - unless relationshipsSupport $ - pendingWith "Backend does not support local relationships" - unless foreignKeySupport $ - pendingWith "Backend does not support Foreign Key constraints" + unless relationshipsSupport + $ pendingWith "Backend does not support local relationships" + unless foreignKeySupport + $ pendingWith "Backend does not support Foreign Key constraints" case (backendTypeString &&& backendSourceName) <$> getBackendTypeConfig testEnvironment of Nothing -> pendingWith "Backend Type not found in testEnvironment" @@ -439,10 +440,10 @@ schemaCrudTests = describe "A series of actions to setup and teardown a source w let capabilities = getBackendTypeConfig testEnvironment >>= BackendType.parseCapabilities let foreignKeySupport = fromMaybe False $ capabilities ^? _Just . API.cDataSchema . API.dscSupportsForeignKeys let relationshipsSupport = isJust $ capabilities ^? _Just . API.cRelationships . _Just - unless relationshipsSupport $ - pendingWith "Backend does not support local relationships" - unless foreignKeySupport $ - pendingWith "Backend does not support Foreign Key constraints" + unless relationshipsSupport + $ pendingWith "Backend does not support local relationships" + unless foreignKeySupport + $ pendingWith "Backend does not support Foreign Key constraints" case (backendTypeString &&& backendSourceName) <$> TestEnvironment.getBackendTypeConfig testEnvironment of Nothing -> pendingWith "Backend Type not found in testEnvironment" @@ -539,10 +540,10 @@ schemaCrudTests = describe "A series of actions to setup and teardown a source w let capabilities = getBackendTypeConfig testEnvironment >>= BackendType.parseCapabilities let foreignKeySupport = fromMaybe False $ capabilities ^? _Just . API.cDataSchema . API.dscSupportsForeignKeys let relationshipsSupport = isJust $ capabilities ^? _Just . API.cRelationships . _Just - unless relationshipsSupport $ - pendingWith "Backend does not support local relationships" - unless foreignKeySupport $ - pendingWith "Backend does not support Foreign Key constraints" + unless relationshipsSupport + $ pendingWith "Backend does not support local relationships" + unless foreignKeySupport + $ pendingWith "Backend does not support Foreign Key constraints" case (backendTypeString &&& backendSourceName) <$> TestEnvironment.getBackendTypeConfig testEnvironment of Nothing -> pendingWith "Backend Type not found in testEnvironment" diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/AggregateQuerySpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/AggregateQuerySpec.hs index 8647d5446ec..8f84f6ca5fb 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/AggregateQuerySpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/AggregateQuerySpec.hs @@ -115,8 +115,8 @@ tests = describe "Aggregate Query Tests" $ do [ [ ("ArtistIds_Id", API.mkColumnFieldValue $ J.Number 1), ("ArtistNames_Name", API.mkColumnFieldValue $ J.String "AC/DC"), ( "nodes_Albums", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [("nodes_Title", API.mkColumnFieldValue $ J.String "For Those About To Rock We Salute You")], [("nodes_Title", API.mkColumnFieldValue $ J.String "Let There Be Rock")] ] @@ -144,43 +144,45 @@ tests = describe "Aggregate Query Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Artist") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("ArtistIds_Id", API.ColumnField (API.ColumnName "ArtistId") (API.ScalarType "number")), - ("ArtistNames_Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string")), - ( "nodes_Albums", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Albums") - ( emptyQuery - & API.qFields ?~ mkFieldsMap [("nodes_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string"))] - ) - ) - ) - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("ArtistIds_Id", API.ColumnField (API.ColumnName "ArtistId") (API.ScalarType "number")), + ("ArtistNames_Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string")), + ( "nodes_Albums", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Albums") + ( emptyQuery + & API.qFields + ?~ mkFieldsMap [("nodes_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string"))] + ) + ) + ) + ] + & API.qLimit + ?~ 1 ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Artist", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Albums", - API.Relationship - { _rTargetTable = mkTableName "Album", - _rRelationshipType = API.ArrayRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Artist", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Albums", + API.Relationship + { _rTargetTable = mkTableName "Album", + _rRelationshipType = API.ArrayRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } + ) + ] + } + ] ) mockAgentGraphqlTest "works with multiple aggregate fields and through array relations" $ \_testEnv performGraphqlRequest -> do @@ -219,15 +221,15 @@ tests = describe "Aggregate Query Tests" $ do ] rows = [ [ ( "nodes_Lines", - API.mkRelationshipFieldValue $ - mkAggregatesQueryResponse + API.mkRelationshipFieldValue + $ mkAggregatesQueryResponse [ ("aggregate_count", J.Number 2) ] ) ], [ ( "nodes_Lines", - API.mkRelationshipFieldValue $ - mkAggregatesQueryResponse + API.mkRelationshipFieldValue + $ mkAggregatesQueryResponse [ ("aggregate_count", J.Number 4) ] ) @@ -260,48 +262,50 @@ tests = describe "Aggregate Query Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Invoice") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ( "nodes_Lines", - API.RelField - ( API.RelationshipField - (API.RelationshipName "InvoiceLines") - ( emptyQuery & API.qAggregates ?~ mkFieldsMap [("aggregate_count", API.StarCount)] - ) - ) - ) - ] - & API.qAggregates - ?~ mkFieldsMap - [ ("counts_count", API.StarCount), - ("counts_uniqueBillingCountries", API.ColumnCount (API.ColumnCountAggregate (API.ColumnName "BillingCountry") True)), - ("ids_minimum_Id", API.SingleColumn (singleColumnAggregateMin (API.ColumnName "InvoiceId") (API.ScalarType "number"))), - ("ids_max_InvoiceId", API.SingleColumn (singleColumnAggregateMax (API.ColumnName "InvoiceId") (API.ScalarType "number"))) - ] - & API.qLimit ?~ 2 - & API.qAggregatesLimit ?~ 2 + ?~ mkFieldsMap + [ ( "nodes_Lines", + API.RelField + ( API.RelationshipField + (API.RelationshipName "InvoiceLines") + ( emptyQuery & API.qAggregates ?~ mkFieldsMap [("aggregate_count", API.StarCount)] + ) + ) + ) + ] + & API.qAggregates + ?~ mkFieldsMap + [ ("counts_count", API.StarCount), + ("counts_uniqueBillingCountries", API.ColumnCount (API.ColumnCountAggregate (API.ColumnName "BillingCountry") True)), + ("ids_minimum_Id", API.SingleColumn (singleColumnAggregateMin (API.ColumnName "InvoiceId") (API.ScalarType "number"))), + ("ids_max_InvoiceId", API.SingleColumn (singleColumnAggregateMax (API.ColumnName "InvoiceId") (API.ScalarType "number"))) + ] + & API.qLimit + ?~ 2 + & API.qAggregatesLimit + ?~ 2 ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Invoice", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "InvoiceLines", - API.Relationship - { _rTargetTable = mkTableName "InvoiceLine", - _rRelationshipType = API.ArrayRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "InvoiceId", API.ColumnName "InvoiceId")] - } - ) - ] - } - ] + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Invoice", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "InvoiceLines", + API.Relationship + { _rTargetTable = mkTableName "InvoiceLine", + _rRelationshipType = API.ArrayRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "InvoiceId", API.ColumnName "InvoiceId")] + } + ) + ] + } + ] ) singleColumnAggregateMax :: API.ColumnName -> API.ScalarType -> API.SingleColumnAggregate diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/BasicQuerySpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/BasicQuerySpec.hs index 7653ec9042d..064f5a7098d 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/BasicQuerySpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/BasicQuerySpec.hs @@ -138,16 +138,17 @@ tests = describe "Basic Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("id", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), - ("title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")) - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("id", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), + ("title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")) + ] + & API.qLimit + ?~ 1 ) ) @@ -192,16 +193,17 @@ tests = describe "Basic Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Artist") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("id", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), - ("name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") - ] - & API.qLimit ?~ 3 -- The permissions limit is smaller than the query limit, so it is used + ?~ mkFieldsMap + [ ("id", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), + ("name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") + ] + & API.qLimit + ?~ 3 -- The permissions limit is smaller than the query limit, so it is used ) ) @@ -242,21 +244,21 @@ tests = describe "Basic Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Customer") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("CustomerId", API.ColumnField (API.ColumnName "CustomerId") $ API.ScalarType "number") - ] - & API.qWhere - ?~ API.Exists - (API.UnrelatedTable $ mkTableName "Employee") - ( API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "EmployeeId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 1) (API.ScalarType "number")) - ) + ?~ mkFieldsMap + [ ("CustomerId", API.ColumnField (API.ColumnName "CustomerId") $ API.ScalarType "number") + ] + & API.qWhere + ?~ API.Exists + (API.UnrelatedTable $ mkTableName "Employee") + ( API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "EmployeeId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 1) (API.ScalarType "number")) + ) ) ) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/CustomScalarsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/CustomScalarsSpec.hs index 38cd68af162..16329db555b 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/CustomScalarsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/CustomScalarsSpec.hs @@ -87,20 +87,21 @@ tests = describe "Custom scalar parsing tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "MyCustomScalarsTable") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("MyIntColumn", API.ColumnField (API.ColumnName "MyIntColumn") $ API.ScalarType "MyInt"), - ("MyFloatColumn", API.ColumnField (API.ColumnName "MyFloatColumn") $ API.ScalarType "MyFloat"), - ("MyStringColumn", API.ColumnField (API.ColumnName "MyStringColumn") $ API.ScalarType "MyString"), - ("MyBooleanColumn", API.ColumnField (API.ColumnName "MyBooleanColumn") $ API.ScalarType "MyBoolean"), - ("MyIDColumn", API.ColumnField (API.ColumnName "MyIDColumn") $ API.ScalarType "MyID"), - ("MyAnythingColumn", API.ColumnField (API.ColumnName "MyAnythingColumn") $ API.ScalarType "MyAnything") - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("MyIntColumn", API.ColumnField (API.ColumnName "MyIntColumn") $ API.ScalarType "MyInt"), + ("MyFloatColumn", API.ColumnField (API.ColumnName "MyFloatColumn") $ API.ScalarType "MyFloat"), + ("MyStringColumn", API.ColumnField (API.ColumnName "MyStringColumn") $ API.ScalarType "MyString"), + ("MyBooleanColumn", API.ColumnField (API.ColumnName "MyBooleanColumn") $ API.ScalarType "MyBoolean"), + ("MyIDColumn", API.ColumnField (API.ColumnName "MyIDColumn") $ API.ScalarType "MyID"), + ("MyAnythingColumn", API.ColumnField (API.ColumnName "MyAnythingColumn") $ API.ScalarType "MyAnything") + ] + & API.qLimit + ?~ 1 ) ) @@ -144,49 +145,50 @@ tests = describe "Custom scalar parsing tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "MyCustomScalarsTable") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("MyIntColumn", API.ColumnField (API.ColumnName "MyIntColumn") $ API.ScalarType "MyInt"), - ("MyFloatColumn", API.ColumnField (API.ColumnName "MyFloatColumn") $ API.ScalarType "MyFloat"), - ("MyStringColumn", API.ColumnField (API.ColumnName "MyStringColumn") $ API.ScalarType "MyString"), - ("MyBooleanColumn", API.ColumnField (API.ColumnName "MyBooleanColumn") $ API.ScalarType "MyBoolean"), - ("MyIDColumn", API.ColumnField (API.ColumnName "MyIDColumn") $ API.ScalarType "MyID"), - ("MyAnythingColumn", API.ColumnField (API.ColumnName "MyAnythingColumn") $ API.ScalarType "MyAnything") - ] - & API.qLimit ?~ 1 - & API.qWhere - ?~ And - ( Set.fromList - [ ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyBooleanColumn") (ScalarType "MyBoolean")) - (ScalarValueComparison $ ScalarValue (J.Bool True) (ScalarType "MyBoolean")), - ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyFloatColumn") (ScalarType "MyFloat")) - (ScalarValueComparison $ ScalarValue (J.Number 3.14) (ScalarType "MyFloat")), - ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyStringColumn") (ScalarType "MyString")) - (ScalarValueComparison $ ScalarValue (J.String "foo") (ScalarType "MyString")), - ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyIDColumn") (ScalarType "MyID")) - (ScalarValueComparison $ ScalarValue (J.String "x") (ScalarType "MyID")), - ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyIntColumn") (ScalarType "MyInt")) - (ScalarValueComparison $ ScalarValue (J.Number 42.0) (ScalarType "MyInt")), - ApplyBinaryComparisonOperator - Equal - (ComparisonColumn CurrentTable (ColumnName "MyAnythingColumn") (ScalarType "MyAnything")) - (ScalarValueComparison $ ScalarValue (J.Object mempty) (ScalarType "MyAnything")) - ] - ) + ?~ mkFieldsMap + [ ("MyIntColumn", API.ColumnField (API.ColumnName "MyIntColumn") $ API.ScalarType "MyInt"), + ("MyFloatColumn", API.ColumnField (API.ColumnName "MyFloatColumn") $ API.ScalarType "MyFloat"), + ("MyStringColumn", API.ColumnField (API.ColumnName "MyStringColumn") $ API.ScalarType "MyString"), + ("MyBooleanColumn", API.ColumnField (API.ColumnName "MyBooleanColumn") $ API.ScalarType "MyBoolean"), + ("MyIDColumn", API.ColumnField (API.ColumnName "MyIDColumn") $ API.ScalarType "MyID"), + ("MyAnythingColumn", API.ColumnField (API.ColumnName "MyAnythingColumn") $ API.ScalarType "MyAnything") + ] + & API.qLimit + ?~ 1 + & API.qWhere + ?~ And + ( Set.fromList + [ ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyBooleanColumn") (ScalarType "MyBoolean")) + (ScalarValueComparison $ ScalarValue (J.Bool True) (ScalarType "MyBoolean")), + ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyFloatColumn") (ScalarType "MyFloat")) + (ScalarValueComparison $ ScalarValue (J.Number 3.14) (ScalarType "MyFloat")), + ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyStringColumn") (ScalarType "MyString")) + (ScalarValueComparison $ ScalarValue (J.String "foo") (ScalarType "MyString")), + ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyIDColumn") (ScalarType "MyID")) + (ScalarValueComparison $ ScalarValue (J.String "x") (ScalarType "MyID")), + ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyIntColumn") (ScalarType "MyInt")) + (ScalarValueComparison $ ScalarValue (J.Number 42.0) (ScalarType "MyInt")), + ApplyBinaryComparisonOperator + Equal + (ComparisonColumn CurrentTable (ColumnName "MyAnythingColumn") (ScalarType "MyAnything")) + (ScalarValueComparison $ ScalarValue (J.Object mempty) (ScalarType "MyAnything")) + ] + ) ) ) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/DeleteMutationsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/DeleteMutationsSpec.hs index 6e55352b717..51a8d56e9db 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/DeleteMutationsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/DeleteMutationsSpec.hs @@ -114,8 +114,8 @@ tests = do [ ("deletedRows_AlbumId", API.mkColumnFieldValue $ J.Number 112), ("deletedRows_Title", API.mkColumnFieldValue $ J.String "The Number of The Beast"), ( "deletedRows_Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 90), ("Name", API.mkColumnFieldValue $ J.String "Iron Maiden") ] @@ -126,8 +126,8 @@ tests = do [ ("deletedRows_AlbumId", API.mkColumnFieldValue $ J.Number 113), ("deletedRows_Title", API.mkColumnFieldValue $ J.String "The X Factor"), ( "deletedRows_Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 90), ("Name", API.mkColumnFieldValue $ J.String "Iron Maiden") ] @@ -138,8 +138,8 @@ tests = do [ ("deletedRows_AlbumId", API.mkColumnFieldValue $ J.Number 114), ("deletedRows_Title", API.mkColumnFieldValue $ J.String "Virtual XI"), ( "deletedRows_Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 90), ("Name", API.mkColumnFieldValue $ J.String "Iron Maiden") ] @@ -179,57 +179,58 @@ tests = do let expectedRequest = emptyMutationRequest & API.mrTableRelationships - .~ Set.fromList - [ API.TableRelationships - { API._trelSourceTable = mkTableName "Album", - API._trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Artist", - API.Relationship - { API._rTargetTable = mkTableName "Artist", - API._rRelationshipType = API.ObjectRelationship, - API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] - & API.mrOperations - .~ [ API.DeleteOperation $ - API.DeleteMutationOperation - { API._dmoTable = mkTableName "Album", - API._dmoWhere = - Just . API.And $ - Set.fromList - [ API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 90) (API.ScalarType "number")), - API.ApplyBinaryComparisonOperator - API.GreaterThan - (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 111) (API.ScalarType "number")) - ], - API._dmoReturningFields = - mkFieldsMap - [ ("deletedRows_AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), - ("deletedRows_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), - ( "deletedRows_Artist", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Artist") - ( emptyQuery - & API.qFields - ?~ mkFieldsMap - [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), - ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") - ] - ) - ) - ) - ] - } - ] + .~ Set.fromList + [ API.TableRelationships + { API._trelSourceTable = mkTableName "Album", + API._trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Artist", + API.Relationship + { API._rTargetTable = mkTableName "Artist", + API._rRelationshipType = API.ObjectRelationship, + API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } + ) + ] + } + ] + & API.mrOperations + .~ [ API.DeleteOperation + $ API.DeleteMutationOperation + { API._dmoTable = mkTableName "Album", + API._dmoWhere = + Just + . API.And + $ Set.fromList + [ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 90) (API.ScalarType "number")), + API.ApplyBinaryComparisonOperator + API.GreaterThan + (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 111) (API.ScalarType "number")) + ], + API._dmoReturningFields = + mkFieldsMap + [ ("deletedRows_AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), + ("deletedRows_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), + ( "deletedRows_Artist", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Artist") + ( emptyQuery + & API.qFields + ?~ mkFieldsMap + [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), + ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") + ] + ) + ) + ) + ] + } + ] _mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest) mockAgentGraphqlTest "delete row by pk with delete permissions" $ \_testEnv performGraphqlRequest -> do @@ -257,8 +258,8 @@ tests = do [ ("AlbumId", API.mkColumnFieldValue $ J.Number 112), ("Title", API.mkColumnFieldValue $ J.String "The Number of The Beast"), ( "Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 90), ("Name", API.mkColumnFieldValue $ J.String "Iron Maiden") ] @@ -286,55 +287,56 @@ tests = do let expectedRequest = emptyMutationRequest & API.mrTableRelationships - .~ Set.fromList - [ API.TableRelationships - { API._trelSourceTable = mkTableName "Album", - API._trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Artist", - API.Relationship - { API._rTargetTable = mkTableName "Artist", - API._rRelationshipType = API.ObjectRelationship, - API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] - & API.mrOperations - .~ [ API.DeleteOperation $ - API.DeleteMutationOperation - { API._dmoTable = mkTableName "Album", - API._dmoWhere = - Just . API.And $ - Set.fromList - [ API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 90) (API.ScalarType "number")), - API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 112) (API.ScalarType "number")) - ], - API._dmoReturningFields = - mkFieldsMap - [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), - ("Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), - ( "Artist", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Artist") - ( emptyQuery - & API.qFields - ?~ mkFieldsMap - [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), - ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") - ] - ) - ) - ) - ] - } - ] + .~ Set.fromList + [ API.TableRelationships + { API._trelSourceTable = mkTableName "Album", + API._trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Artist", + API.Relationship + { API._rTargetTable = mkTableName "Artist", + API._rRelationshipType = API.ObjectRelationship, + API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } + ) + ] + } + ] + & API.mrOperations + .~ [ API.DeleteOperation + $ API.DeleteMutationOperation + { API._dmoTable = mkTableName "Album", + API._dmoWhere = + Just + . API.And + $ Set.fromList + [ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 90) (API.ScalarType "number")), + API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 112) (API.ScalarType "number")) + ], + API._dmoReturningFields = + mkFieldsMap + [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), + ("Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), + ( "Artist", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Artist") + ( emptyQuery + & API.qFields + ?~ mkFieldsMap + [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), + ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") + ] + ) + ) + ) + ] + } + ] _mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/ErrorSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/ErrorSpec.hs index 806e860f575..07079793802 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/ErrorSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/ErrorSpec.hs @@ -94,16 +94,17 @@ tests = describe "Error Protocol Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("id", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("id", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] + & API.qLimit + ?~ 1 ) ) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/InsertMutationsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/InsertMutationsSpec.hs index 504785208ad..5594ccf56e0 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/InsertMutationsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/InsertMutationsSpec.hs @@ -123,8 +123,8 @@ tests = do [ ("insertedRows_AlbumId", API.mkColumnFieldValue $ J.Number 9001), ("insertedRows_Title", API.mkColumnFieldValue $ J.String "Super Mega Rock"), ( "insertedRows_Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 2), ("Name", API.mkColumnFieldValue $ J.String "Accept") ] @@ -135,8 +135,8 @@ tests = do [ ("insertedRows_AlbumId", API.mkColumnFieldValue $ J.Number 9002), ("insertedRows_Title", API.mkColumnFieldValue $ J.String "Accept This"), ( "insertedRows_Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("ArtistId", API.mkColumnFieldValue $ J.Number 2), ("Name", API.mkColumnFieldValue $ J.String "Accept") ] @@ -171,76 +171,76 @@ tests = do let expectedRequest = emptyMutationRequest & API.mrTableRelationships - .~ Set.fromList - [ API.TableRelationships - { API._trelSourceTable = mkTableName "Album", - API._trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Artist", - API.Relationship - { API._rTargetTable = mkTableName "Artist", - API._rRelationshipType = API.ObjectRelationship, - API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] - & API.mrInsertSchema - .~ Set.fromList - [ API.TableInsertSchema - { API._tisTable = mkTableName "Album", - API._tisPrimaryKey = Just $ API.ColumnName "AlbumId" :| [], - API._tisFields = - mkFieldsMap - [ ("AlbumId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "AlbumId") (API.ColumnTypeScalar $ API.ScalarType "number") False (Just API.AutoIncrement)), - ("ArtistId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "ArtistId") (API.ColumnTypeScalar $ API.ScalarType "number") False Nothing), - ("Title", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "Title") (API.ColumnTypeScalar $ API.ScalarType "string") False Nothing) - ] - } - ] - & API.mrOperations - .~ [ API.InsertOperation $ - API.InsertMutationOperation - { API._imoTable = mkTableName "Album", - API._imoRows = - [ API.RowObject $ - mkFieldsMap - [ ("AlbumId", API.mkColumnInsertFieldValue $ J.Number 9001), - ("ArtistId", API.mkColumnInsertFieldValue $ J.Number 2), - ("Title", API.mkColumnInsertFieldValue $ J.String "Super Mega Rock") - ], - API.RowObject $ - mkFieldsMap - [ ("AlbumId", API.mkColumnInsertFieldValue $ J.Number 9002), - ("ArtistId", API.mkColumnInsertFieldValue $ J.Number 2), - ("Title", API.mkColumnInsertFieldValue $ J.String "Accept This") - ] - ], - API._imoPostInsertCheck = - Just $ - API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 2) (API.ScalarType "number")), - API._imoReturningFields = - mkFieldsMap - [ ("insertedRows_AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), - ("insertedRows_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), - ( "insertedRows_Artist", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Artist") - ( emptyQuery - & API.qFields - ?~ mkFieldsMap - [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), - ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") - ] - ) - ) - ) - ] - } - ] + .~ Set.fromList + [ API.TableRelationships + { API._trelSourceTable = mkTableName "Album", + API._trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Artist", + API.Relationship + { API._rTargetTable = mkTableName "Artist", + API._rRelationshipType = API.ObjectRelationship, + API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } + ) + ] + } + ] + & API.mrInsertSchema + .~ Set.fromList + [ API.TableInsertSchema + { API._tisTable = mkTableName "Album", + API._tisPrimaryKey = Just $ API.ColumnName "AlbumId" :| [], + API._tisFields = + mkFieldsMap + [ ("AlbumId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "AlbumId") (API.ColumnTypeScalar $ API.ScalarType "number") False (Just API.AutoIncrement)), + ("ArtistId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "ArtistId") (API.ColumnTypeScalar $ API.ScalarType "number") False Nothing), + ("Title", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "Title") (API.ColumnTypeScalar $ API.ScalarType "string") False Nothing) + ] + } + ] + & API.mrOperations + .~ [ API.InsertOperation + $ API.InsertMutationOperation + { API._imoTable = mkTableName "Album", + API._imoRows = + [ API.RowObject + $ mkFieldsMap + [ ("AlbumId", API.mkColumnInsertFieldValue $ J.Number 9001), + ("ArtistId", API.mkColumnInsertFieldValue $ J.Number 2), + ("Title", API.mkColumnInsertFieldValue $ J.String "Super Mega Rock") + ], + API.RowObject + $ mkFieldsMap + [ ("AlbumId", API.mkColumnInsertFieldValue $ J.Number 9002), + ("ArtistId", API.mkColumnInsertFieldValue $ J.Number 2), + ("Title", API.mkColumnInsertFieldValue $ J.String "Accept This") + ] + ], + API._imoPostInsertCheck = + Just + $ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 2) (API.ScalarType "number")), + API._imoReturningFields = + mkFieldsMap + [ ("insertedRows_AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")), + ("insertedRows_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")), + ( "insertedRows_Artist", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Artist") + ( emptyQuery + & API.qFields + ?~ mkFieldsMap + [ ("ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"), + ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string") + ] + ) + ) + ) + ] + } + ] _mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/OrderBySpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/OrderBySpec.hs index 193cb32dc7d..5409446963b 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/OrderBySpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/OrderBySpec.hs @@ -113,17 +113,19 @@ tests = describe "Order By Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] - & API.qLimit ?~ 3 - & API.qOrderBy ?~ API.OrderBy mempty (API.OrderByElement [] (API.OrderByColumn (API.ColumnName "AlbumId")) API.Ascending :| []) + ?~ mkFieldsMap + [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] + & API.qLimit + ?~ 3 + & API.qOrderBy + ?~ API.OrderBy mempty (API.OrderByElement [] (API.OrderByColumn (API.ColumnName "AlbumId")) API.Ascending :| []) ) ) @@ -156,51 +158,53 @@ tests = describe "Order By Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Artist") ( emptyQuery - & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string"))] - & API.qLimit ?~ 2 - & API.qOrderBy - ?~ API.OrderBy - ( HashMap.fromList + & API.qFields + ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string"))] + & API.qLimit + ?~ 2 + & API.qOrderBy + ?~ API.OrderBy + ( HashMap.fromList + [ ( API.RelationshipName "Albums", + API.OrderByRelation Nothing mempty + ) + ] + ) + ( NE.fromList + [ API.OrderByElement [API.RelationshipName "Albums"] API.OrderByStarCountAggregate API.Ascending, + API.OrderByElement + [API.RelationshipName "Albums"] + ( API.OrderBySingleColumnAggregate + $ API.SingleColumnAggregate + (API.SingleColumnAggregateFunction [G.name|max|]) + (API.ColumnName "AlbumId") + (API.ScalarType "number") + ) + API.Ascending + ] + ) + ) + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Artist", + _trelRelationships = + HashMap.fromList [ ( API.RelationshipName "Albums", - API.OrderByRelation Nothing mempty + API.Relationship + { _rTargetTable = mkTableName "Album", + _rRelationshipType = API.ArrayRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } ) ] - ) - ( NE.fromList - [ API.OrderByElement [API.RelationshipName "Albums"] API.OrderByStarCountAggregate API.Ascending, - API.OrderByElement - [API.RelationshipName "Albums"] - ( API.OrderBySingleColumnAggregate $ - API.SingleColumnAggregate - (API.SingleColumnAggregateFunction [G.name|max|]) - (API.ColumnName "AlbumId") - (API.ScalarType "number") - ) - API.Ascending - ] - ) - ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Artist", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Albums", - API.Relationship - { _rTargetTable = mkTableName "Album", - _rRelationshipType = API.ArrayRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] + } + ] ) rowsResponse :: [[(API.FieldName, API.FieldValue)]] -> API.QueryResponse diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/QueryRelationshipsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/QueryRelationshipsSpec.hs index efaf096ee6e..0b93a990190 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/QueryRelationshipsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/QueryRelationshipsSpec.hs @@ -162,14 +162,14 @@ tests = describe "Object Relationships Tests" $ do mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "For Those About To Rock (We Salute You)"), ( "Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [("Name", API.mkColumnFieldValue $ J.String "Rock")] ] ), ( "MediaType", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [("Name", API.mkColumnFieldValue $ J.String "MPEG audio file")] ] ) @@ -192,56 +192,57 @@ tests = describe "Object Relationships Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Track") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string"), - ( "Genre", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Genre") - (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) - ) - ), - ( "MediaType", - API.RelField - ( API.RelationshipField - (API.RelationshipName "MediaType") - (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) - ) - ) - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string"), + ( "Genre", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Genre") + (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) + ) + ), + ( "MediaType", + API.RelField + ( API.RelationshipField + (API.RelationshipName "MediaType") + (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) + ) + ) + ] + & API.qLimit + ?~ 1 ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Track", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Genre", - API.Relationship - { _rTargetTable = mkTableName "Genre", - _rRelationshipType = API.ObjectRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] - } - ), - ( API.RelationshipName "MediaType", - API.Relationship - { _rTargetTable = mkTableName "MediaType", - _rRelationshipType = API.ObjectRelationship, - _rColumnMapping = - HashMap.fromList - [(API.ColumnName "MediaTypeId", API.ColumnName "MediaTypeId")] - } - ) - ] - } - ] + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Track", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Genre", + API.Relationship + { _rTargetTable = mkTableName "Genre", + _rRelationshipType = API.ObjectRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] + } + ), + ( API.RelationshipName "MediaType", + API.Relationship + { _rTargetTable = mkTableName "MediaType", + _rRelationshipType = API.ObjectRelationship, + _rColumnMapping = + HashMap.fromList + [(API.ColumnName "MediaTypeId", API.ColumnName "MediaTypeId")] + } + ) + ] + } + ] ) mockAgentGraphqlTest "works with an order by that navigates relationships" $ \_testEnv performGraphqlRequest -> do @@ -262,11 +263,11 @@ tests = describe "Object Relationships Tests" $ do let queryResponse = mkRowsQueryResponse [ [ ( "Album", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ( "Artist", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [[("Name", API.mkColumnFieldValue $ J.String "Zeca Pagodinho")]] ) ] @@ -291,87 +292,88 @@ tests = describe "Object Relationships Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Track") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string"), - ( "Album", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Album") - ( emptyQuery - & API.qFields - ?~ mkFieldsMap - [ ( "Artist", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Artist") - (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string"))]) - ) + ?~ mkFieldsMap + [ ("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string"), + ( "Album", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Album") + ( emptyQuery + & API.qFields + ?~ mkFieldsMap + [ ( "Artist", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Artist") + (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string"))]) ) - ] - ) - ) - ) - ] - & API.qLimit ?~ 1 - & API.qOrderBy - ?~ API.OrderBy - ( HashMap.fromList - [ ( API.RelationshipName "Album", - API.OrderByRelation - Nothing - ( HashMap.fromList - [ ( API.RelationshipName "Artist", - API.OrderByRelation - Nothing - mempty ) ] - ) + ) + ) + ) + ] + & API.qLimit + ?~ 1 + & API.qOrderBy + ?~ API.OrderBy + ( HashMap.fromList + [ ( API.RelationshipName "Album", + API.OrderByRelation + Nothing + ( HashMap.fromList + [ ( API.RelationshipName "Artist", + API.OrderByRelation + Nothing + mempty + ) + ] + ) + ) + ] + ) + ( NE.fromList + [ API.OrderByElement [API.RelationshipName "Album", API.RelationshipName "Artist"] (API.OrderByColumn (API.ColumnName "Name")) API.Descending, + API.OrderByElement [] (API.OrderByColumn (API.ColumnName "Name")) API.Ascending + ] + ) + ) + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Track", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Album", + API.Relationship + { _rTargetTable = mkTableName "Album", + _rRelationshipType = API.ObjectRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "AlbumId", API.ColumnName "AlbumId")] + } ) ] - ) - ( NE.fromList - [ API.OrderByElement [API.RelationshipName "Album", API.RelationshipName "Artist"] (API.OrderByColumn (API.ColumnName "Name")) API.Descending, - API.OrderByElement [] (API.OrderByColumn (API.ColumnName "Name")) API.Ascending + }, + API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Album", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Artist", + API.Relationship + { _rTargetTable = mkTableName "Artist", + _rRelationshipType = API.ObjectRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] + } + ) ] - ) - ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Track", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Album", - API.Relationship - { _rTargetTable = mkTableName "Album", - _rRelationshipType = API.ObjectRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "AlbumId", API.ColumnName "AlbumId")] - } - ) - ] - }, - API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Album", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Artist", - API.Relationship - { _rTargetTable = mkTableName "Artist", - _rRelationshipType = API.ObjectRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")] - } - ) - ] - } - ] + } + ] ) mockAgentGraphqlTest "works with an order by that navigates a relationship with table permissions" $ \_testEnv performGraphqlRequest -> do @@ -402,69 +404,71 @@ tests = describe "Object Relationships Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Employee") ( emptyQuery - & API.qFields ?~ mkFieldsMap [("EmployeeId", API.ColumnField (API.ColumnName "EmployeeId") $ API.ScalarType "number")] - & API.qLimit ?~ 1 - & API.qWhere - ?~ API.Exists - (API.RelatedTable $ API.RelationshipName "SupportRepForCustomers") - ( API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "Country") $ API.ScalarType "string") - (API.AnotherColumnComparison (API.ComparisonColumn API.QueryTable (API.ColumnName "Country") $ API.ScalarType "string")) - ) - & API.qOrderBy - ?~ API.OrderBy - ( HashMap.fromList - [ ( API.RelationshipName "SupportRepForCustomers", - API.OrderByRelation - ( Just $ - API.Exists (API.RelatedTable $ API.RelationshipName "SupportRep") $ - API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "Country") $ API.ScalarType "string") - (API.AnotherColumnComparison (API.ComparisonColumn API.QueryTable (API.ColumnName "Country") $ API.ScalarType "string")) - ) - mempty + & API.qFields + ?~ mkFieldsMap [("EmployeeId", API.ColumnField (API.ColumnName "EmployeeId") $ API.ScalarType "number")] + & API.qLimit + ?~ 1 + & API.qWhere + ?~ API.Exists + (API.RelatedTable $ API.RelationshipName "SupportRepForCustomers") + ( API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "Country") $ API.ScalarType "string") + (API.AnotherColumnComparison (API.ComparisonColumn API.QueryTable (API.ColumnName "Country") $ API.ScalarType "string")) + ) + & API.qOrderBy + ?~ API.OrderBy + ( HashMap.fromList + [ ( API.RelationshipName "SupportRepForCustomers", + API.OrderByRelation + ( Just + $ API.Exists (API.RelatedTable $ API.RelationshipName "SupportRep") + $ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "Country") $ API.ScalarType "string") + (API.AnotherColumnComparison (API.ComparisonColumn API.QueryTable (API.ColumnName "Country") $ API.ScalarType "string")) + ) + mempty + ) + ] + ) + (API.OrderByElement [API.RelationshipName "SupportRepForCustomers"] API.OrderByStarCountAggregate API.Descending :| []) + ) + & API.qrRelationships + .~ Set.fromList + [ API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Customer", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "SupportRep", + API.Relationship + { _rTargetTable = mkTableName "Employee", + _rRelationshipType = API.ObjectRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "SupportRepId", API.ColumnName "EmployeeId")] + } ) ] - ) - (API.OrderByElement [API.RelationshipName "SupportRepForCustomers"] API.OrderByStarCountAggregate API.Descending :| []) - ) - & API.qrRelationships - .~ Set.fromList - [ API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Customer", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "SupportRep", - API.Relationship - { _rTargetTable = mkTableName "Employee", - _rRelationshipType = API.ObjectRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "SupportRepId", API.ColumnName "EmployeeId")] - } - ) - ] - }, - API.RTable - API.TableRelationships - { _trelSourceTable = mkTableName "Employee", - _trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "SupportRepForCustomers", - API.Relationship - { _rTargetTable = mkTableName "Customer", - _rRelationshipType = API.ArrayRelationship, - _rColumnMapping = HashMap.fromList [(API.ColumnName "EmployeeId", API.ColumnName "SupportRepId")] - } - ) - ] - } - ] + }, + API.RTable + API.TableRelationships + { _trelSourceTable = mkTableName "Employee", + _trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "SupportRepForCustomers", + API.Relationship + { _rTargetTable = mkTableName "Customer", + _rRelationshipType = API.ArrayRelationship, + _rColumnMapping = HashMap.fromList [(API.ColumnName "EmployeeId", API.ColumnName "SupportRepId")] + } + ) + ] + } + ] ) -------------------------------------------------------------------------------- @@ -474,7 +478,9 @@ noRelationshipsCapabilityMockConfig = Mock.chinookMock { Mock._capabilitiesResponse = Mock._capabilitiesResponse Mock.chinookMock - & API.crCapabilities . API.cRelationships .~ Nothing -- Remove relationships capability + & API.crCapabilities + . API.cRelationships + .~ Nothing -- Remove relationships capability } noRelationshipsCapabilitySourceMetadata :: J.Value diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/RemoteRelationshipsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/RemoteRelationshipsSpec.hs index 67fbabbabeb..53058784f11 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/RemoteRelationshipsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/RemoteRelationshipsSpec.hs @@ -111,7 +111,7 @@ postgresTables = pgSourceName :: String pgSourceName = "pg_source" -setupPostgres :: HasCallStack => TestEnvironment -> IO () +setupPostgres :: (HasCallStack) => TestEnvironment -> IO () setupPostgres testEnv = do let sourceConfig = Postgres.defaultSourceConfiguration testEnv schemaName = Schema.getSchemaName testEnv @@ -142,7 +142,7 @@ setupPostgres testEnv = do name: #{tableName table} |] -registerRemoteRelationships :: HasCallStack => TestEnvironment -> IO () +registerRemoteRelationships :: (HasCallStack) => TestEnvironment -> IO () registerRemoteRelationships testEnv = do let mockAgentSourceName = BackendType.backendSourceName Mock.backendTypeMetadata schemaName = Schema.getSchemaName testEnv @@ -214,8 +214,8 @@ tests = do let queryResponse = mkRowsQueryResponse [ [ ( "query", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("AlbumId", API.mkColumnFieldValue $ J.Number 1), ("Title", API.mkColumnFieldValue $ J.String "For Those About To Rock We Salute You") ], @@ -226,8 +226,8 @@ tests = do ) ], [ ( "query", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("AlbumId", API.mkColumnFieldValue $ J.Number 2), ("Title", API.mkColumnFieldValue $ J.String "Balls to the Wall") ], @@ -264,22 +264,22 @@ tests = do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] + ?~ mkFieldsMap + [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] ) - & API._QRTable - . API.trForeach - ?~ NonEmpty.fromList - [ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], - HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))] - ] + & API._QRTable + . API.trForeach + ?~ NonEmpty.fromList + [ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], + HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))] + ] ) mockAgentGraphqlTest "can act as the target of a remote object relationship" $ \testEnv performGraphqlRequest -> do @@ -301,8 +301,8 @@ tests = do let queryResponse = mkRowsQueryResponse [ [ ( "query", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("AlbumId", API.mkColumnFieldValue $ J.Number 3), ("Title", API.mkColumnFieldValue $ J.String "Restless and Wild") ] @@ -310,8 +310,8 @@ tests = do ) ], [ ( "query", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("AlbumId", API.mkColumnFieldValue $ J.Number 1), ("Title", API.mkColumnFieldValue $ J.String "For Those About To Rock We Salute You") ] @@ -319,8 +319,8 @@ tests = do ) ], [ ( "query", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("AlbumId", API.mkColumnFieldValue $ J.Number 4), ("Title", API.mkColumnFieldValue $ J.String "Let There Be Rock") ] @@ -355,23 +355,23 @@ tests = do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] + ?~ mkFieldsMap + [ ("AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] ) - & API._QRTable - . API.trForeach - ?~ NonEmpty.fromList - [ HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 3) (API.ScalarType "number"))], - HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], - HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 4) (API.ScalarType "number"))] - ] + & API._QRTable + . API.trForeach + ?~ NonEmpty.fromList + [ HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 3) (API.ScalarType "number"))], + HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], + HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 4) (API.ScalarType "number"))] + ] ) mockAgentGraphqlTest "can act as the target of an aggregation over a remote array relationship" $ \testEnv performGraphqlRequest -> do @@ -398,8 +398,8 @@ tests = do let queryResponse = mkRowsQueryResponse [ [ ( "query", - API.mkRelationshipFieldValue $ - mkQueryResponse + API.mkRelationshipFieldValue + $ mkQueryResponse [ [ ("nodes_AlbumId", API.mkColumnFieldValue $ J.Number 1), ("nodes_Title", API.mkColumnFieldValue $ J.String "For Those About To Rock We Salute You") ], @@ -412,8 +412,8 @@ tests = do ) ], [ ( "query", - API.mkRelationshipFieldValue $ - mkQueryResponse + API.mkRelationshipFieldValue + $ mkQueryResponse [ [ ("nodes_AlbumId", API.mkColumnFieldValue $ J.Number 2), ("nodes_Title", API.mkColumnFieldValue $ J.String "Balls to the Wall") ], @@ -458,23 +458,24 @@ tests = do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("nodes_AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("nodes_Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] - & API.qAggregates ?~ mkFieldsMap [("aggregate_count", API.StarCount)] + ?~ mkFieldsMap + [ ("nodes_AlbumId", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("nodes_Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] + & API.qAggregates + ?~ mkFieldsMap [("aggregate_count", API.StarCount)] ) - & API._QRTable - . API.trForeach - ?~ NonEmpty.fromList - [ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], - HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))] - ] + & API._QRTable + . API.trForeach + ?~ NonEmpty.fromList + [ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))], + HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))] + ] ) errorTests :: SpecWith (TestEnvironment, Mock.MockAgentEnvironment) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/TransformedConfigurationSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/TransformedConfigurationSpec.hs index f6ddde6aa9e..b4d84f8837b 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/TransformedConfigurationSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/TransformedConfigurationSpec.hs @@ -133,16 +133,17 @@ tests = describe "Transformed Configuration Tests" $ do _mrrRecordedRequest `shouldBe` Just - ( Query $ - mkTableRequest + ( Query + $ mkTableRequest (mkTableName "Album") ( emptyQuery & API.qFields - ?~ mkFieldsMap - [ ("id", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), - ("title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") - ] - & API.qLimit ?~ 1 + ?~ mkFieldsMap + [ ("id", API.ColumnField (API.ColumnName "AlbumId") $ API.ScalarType "number"), + ("title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string") + ] + & API.qLimit + ?~ 1 ) ) diff --git a/server/lib/api-tests/src/Test/DataConnector/MockAgent/UpdateMutationsSpec.hs b/server/lib/api-tests/src/Test/DataConnector/MockAgent/UpdateMutationsSpec.hs index f8b9378f6c3..0456ab505c3 100644 --- a/server/lib/api-tests/src/Test/DataConnector/MockAgent/UpdateMutationsSpec.hs +++ b/server/lib/api-tests/src/Test/DataConnector/MockAgent/UpdateMutationsSpec.hs @@ -134,8 +134,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 3), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Another Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -145,8 +145,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 4), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Another Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -156,8 +156,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 5), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Another Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -193,78 +193,79 @@ tests = do let expectedRequest = emptyMutationRequest & API.mrTableRelationships - .~ Set.fromList - [ API.TableRelationships - { API._trelSourceTable = mkTableName "Track", - API._trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Genre", - API.Relationship - { API._rTargetTable = mkTableName "Genre", - API._rRelationshipType = API.ObjectRelationship, - API._rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] - } - ) - ] - } - ] - & API.mrOperations - .~ [ API.UpdateOperation $ - API.UpdateMutationOperation - { API._umoTable = mkTableName "Track", - API._umoUpdates = - Set.fromList - [ API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "Name", - API._rcovValue = J.String "Another Name", - API._rcovValueType = API.ScalarType "string" - }, - API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "Milliseconds", - API._rcovValue = J.Number 1000, - API._rcovValueType = API.ScalarType "number" - }, - API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "AlbumId", - API._rcovValue = J.Number 3, - API._rcovValueType = API.ScalarType "number" - } + .~ Set.fromList + [ API.TableRelationships + { API._trelSourceTable = mkTableName "Track", + API._trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Genre", + API.Relationship + { API._rTargetTable = mkTableName "Genre", + API._rRelationshipType = API.ObjectRelationship, + API._rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] + } + ) + ] + } + ] + & API.mrOperations + .~ [ API.UpdateOperation + $ API.UpdateMutationOperation + { API._umoTable = mkTableName "Track", + API._umoUpdates = + Set.fromList + [ API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "Name", + API._rcovValue = J.String "Another Name", + API._rcovValueType = API.ScalarType "string" + }, + API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "Milliseconds", + API._rcovValue = J.Number 1000, + API._rcovValueType = API.ScalarType "number" + }, + API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "AlbumId", + API._rcovValue = J.Number 3, + API._rcovValueType = API.ScalarType "number" + } + ], + API._umoWhere = + Just + . API.And + $ Set.fromList + [ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), + API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "GenreId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 1) (API.ScalarType "number")) ], - API._umoWhere = - Just . API.And $ - Set.fromList - [ API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), - API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "GenreId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 1) (API.ScalarType "number")) - ], - API._umoPostUpdateCheck = - Just $ - API.ApplyBinaryComparisonOperator - API.GreaterThan - (API.ComparisonColumn API.CurrentTable (API.ColumnName "UnitPrice") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 0) (API.ScalarType "number")), - API._umoReturningFields = - mkFieldsMap - [ ("updatedRows_TrackId", API.ColumnField (API.ColumnName "TrackId") (API.ScalarType "number")), - ("updatedRows_Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string")), - ( "updatedRows_Genre", - API.RelField - ( API.RelationshipField - (API.RelationshipName "Genre") - (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) - ) - ) - ] - } - ] + API._umoPostUpdateCheck = + Just + $ API.ApplyBinaryComparisonOperator + API.GreaterThan + (API.ComparisonColumn API.CurrentTable (API.ColumnName "UnitPrice") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 0) (API.ScalarType "number")), + API._umoReturningFields = + mkFieldsMap + [ ("updatedRows_TrackId", API.ColumnField (API.ColumnName "TrackId") (API.ScalarType "number")), + ("updatedRows_Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string")), + ( "updatedRows_Genre", + API.RelField + ( API.RelationshipField + (API.RelationshipName "Genre") + (emptyQuery & API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")]) + ) + ) + ] + } + ] _mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest) mockAgentGraphqlTest "update_many rows with update permissions" $ \_testEnv performGraphqlRequest -> do @@ -297,8 +298,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 3), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Another Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -314,8 +315,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 4), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Better Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -325,8 +326,8 @@ tests = do [ ("updatedRows_TrackId", API.mkColumnFieldValue $ J.Number 5), ("updatedRows_Name", API.mkColumnFieldValue $ J.String "Better Name"), ( "updatedRows_Genre", - API.mkRelationshipFieldValue $ - mkRowsQueryResponse + API.mkRelationshipFieldValue + $ mkRowsQueryResponse [ [ ("Name", API.mkColumnFieldValue $ J.String "Rock") ] ] @@ -362,8 +363,8 @@ tests = do |] let sharedPostUpdateCheck = - Just $ - API.ApplyBinaryComparisonOperator + Just + $ API.ApplyBinaryComparisonOperator API.GreaterThan (API.ComparisonColumn API.CurrentTable (API.ColumnName "UnitPrice") $ API.ScalarType "number") (API.ScalarValueComparison $ API.ScalarValue (J.Number 0) (API.ScalarType "number")) @@ -382,99 +383,101 @@ tests = do let expectedRequest = emptyMutationRequest & API.mrTableRelationships - .~ Set.fromList - [ API.TableRelationships - { API._trelSourceTable = mkTableName "Track", - API._trelRelationships = - HashMap.fromList - [ ( API.RelationshipName "Genre", - API.Relationship - { API._rTargetTable = mkTableName "Genre", - API._rRelationshipType = API.ObjectRelationship, - API._rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] - } - ) - ] - } - ] - & API.mrOperations - .~ [ API.UpdateOperation $ - API.UpdateMutationOperation - { API._umoTable = mkTableName "Track", - API._umoUpdates = - Set.fromList - [ API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "Name", - API._rcovValue = J.String "Another Name", - API._rcovValueType = API.ScalarType "string" - }, - API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "Milliseconds", - API._rcovValue = J.Number 1000, - API._rcovValueType = API.ScalarType "number" - }, - API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "AlbumId", - API._rcovValue = J.Number 3, - API._rcovValueType = API.ScalarType "number" - } + .~ Set.fromList + [ API.TableRelationships + { API._trelSourceTable = mkTableName "Track", + API._trelRelationships = + HashMap.fromList + [ ( API.RelationshipName "Genre", + API.Relationship + { API._rTargetTable = mkTableName "Genre", + API._rRelationshipType = API.ObjectRelationship, + API._rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")] + } + ) + ] + } + ] + & API.mrOperations + .~ [ API.UpdateOperation + $ API.UpdateMutationOperation + { API._umoTable = mkTableName "Track", + API._umoUpdates = + Set.fromList + [ API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "Name", + API._rcovValue = J.String "Another Name", + API._rcovValueType = API.ScalarType "string" + }, + API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "Milliseconds", + API._rcovValue = J.Number 1000, + API._rcovValueType = API.ScalarType "number" + }, + API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "AlbumId", + API._rcovValue = J.Number 3, + API._rcovValueType = API.ScalarType "number" + } + ], + API._umoWhere = + Just + . API.And + $ Set.fromList + [ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), + API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "TrackId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")) ], - API._umoWhere = - Just . API.And $ - Set.fromList - [ API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), - API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "TrackId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")) - ], - API._umoPostUpdateCheck = sharedPostUpdateCheck, - API._umoReturningFields = sharedReturning - }, - API.UpdateOperation $ - API.UpdateMutationOperation - { API._umoTable = mkTableName "Track", - API._umoUpdates = - Set.fromList - [ API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "Name", - API._rcovValue = J.String "Better Name", - API._rcovValueType = API.ScalarType "string" - }, - API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "UnitPrice", - API._rcovValue = J.Number 1, - API._rcovValueType = API.ScalarType "number" - }, - API.SetColumn $ - API.RowColumnOperatorValue - { API._rcovColumn = API.ColumnName "AlbumId", - API._rcovValue = J.Number 3, - API._rcovValueType = API.ScalarType "number" - } + API._umoPostUpdateCheck = sharedPostUpdateCheck, + API._umoReturningFields = sharedReturning + }, + API.UpdateOperation + $ API.UpdateMutationOperation + { API._umoTable = mkTableName "Track", + API._umoUpdates = + Set.fromList + [ API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "Name", + API._rcovValue = J.String "Better Name", + API._rcovValueType = API.ScalarType "string" + }, + API.CustomUpdateColumnOperator (API.UpdateColumnOperatorName [G.name|inc|]) + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "UnitPrice", + API._rcovValue = J.Number 1, + API._rcovValueType = API.ScalarType "number" + }, + API.SetColumn + $ API.RowColumnOperatorValue + { API._rcovColumn = API.ColumnName "AlbumId", + API._rcovValue = J.Number 3, + API._rcovValueType = API.ScalarType "number" + } + ], + API._umoWhere = + Just + . API.And + $ Set.fromList + [ API.ApplyBinaryComparisonOperator + API.Equal + (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), + API.ApplyBinaryComparisonOperator + API.GreaterThan + (API.ComparisonColumn API.CurrentTable (API.ColumnName "TrackId") $ API.ScalarType "number") + (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")) ], - API._umoWhere = - Just . API.And $ - Set.fromList - [ API.ApplyBinaryComparisonOperator - API.Equal - (API.ComparisonColumn API.CurrentTable (API.ColumnName "AlbumId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")), - API.ApplyBinaryComparisonOperator - API.GreaterThan - (API.ComparisonColumn API.CurrentTable (API.ColumnName "TrackId") $ API.ScalarType "number") - (API.ScalarValueComparison $ API.ScalarValue (J.Number 3) (API.ScalarType "number")) - ], - API._umoPostUpdateCheck = sharedPostUpdateCheck, - API._umoReturningFields = sharedReturning - } - ] + API._umoPostUpdateCheck = sharedPostUpdateCheck, + API._umoReturningFields = sharedReturning + } + ] _mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest) diff --git a/server/lib/api-tests/src/Test/Databases/BigQuery/Schema/ComputedFields/TableSpec.hs b/server/lib/api-tests/src/Test/Databases/BigQuery/Schema/ComputedFields/TableSpec.hs index 94d7d570695..7fa27f88aa3 100644 --- a/server/lib/api-tests/src/Test/Databases/BigQuery/Schema/ComputedFields/TableSpec.hs +++ b/server/lib/api-tests/src/Test/Databases/BigQuery/Schema/ComputedFields/TableSpec.hs @@ -98,8 +98,8 @@ setupFunction testEnv = let schemaName = Schema.getSchemaName testEnv in [ Fixture.SetupAction { Fixture.setupAction = - BigQuery.run_ $ - [i| + BigQuery.run_ + $ [i| CREATE TABLE FUNCTION #{ unSchemaName schemaName }.fetch_articles_implicit_return(a_id INT64, search STRING) AS SELECT article_alias.* @@ -111,8 +111,8 @@ setupFunction testEnv = }, Fixture.SetupAction { Fixture.setupAction = - BigQuery.run_ $ - [i| + BigQuery.run_ + $ [i| CREATE TABLE FUNCTION #{ unSchemaName schemaName }.fetch_articles_explicit_return(a_id INT64, search STRING) RETURNS TABLE AS SELECT article_alias.id, article_alias.title, article_alias.content, article_alias.author_id diff --git a/server/lib/api-tests/src/Test/Databases/Postgres/JsonbSpec.hs b/server/lib/api-tests/src/Test/Databases/Postgres/JsonbSpec.hs index fa72a84f367..c79fba24726 100644 --- a/server/lib/api-tests/src/Test/Databases/Postgres/JsonbSpec.hs +++ b/server/lib/api-tests/src/Test/Databases/Postgres/JsonbSpec.hs @@ -49,8 +49,8 @@ spec = jsonType :: Schema.ScalarType jsonType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstPostgres = Just "JSON", Schema.bstCitus = Just "JSON", Schema.bstCockroach = Just "JSON" @@ -58,8 +58,8 @@ jsonType = jsonbType :: Schema.ScalarType jsonbType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstPostgres = Just "JSONB", Schema.bstCitus = Just "JSONB", Schema.bstCockroach = Just "JSONB" @@ -67,8 +67,8 @@ jsonbType = mkJsonValue :: Text -> Schema.ScalarValue mkJsonValue json = - Schema.VCustomValue $ - Schema.defaultBackendScalarValue + Schema.VCustomValue + $ Schema.defaultBackendScalarValue { Schema.bsvPostgres = Just (Schema.Quoted json), Schema.bsvCitus = Just (Schema.Quoted json), Schema.bsvCockroach = Just (Schema.Quoted json) diff --git a/server/lib/api-tests/src/Test/Databases/SQLServer/DefaultValues/OnConflictSpec.hs b/server/lib/api-tests/src/Test/Databases/SQLServer/DefaultValues/OnConflictSpec.hs index 735263c339e..26cfcf4aff3 100644 --- a/server/lib/api-tests/src/Test/Databases/SQLServer/DefaultValues/OnConflictSpec.hs +++ b/server/lib/api-tests/src/Test/Databases/SQLServer/DefaultValues/OnConflictSpec.hs @@ -70,8 +70,8 @@ schema = defaultDateTimeType :: Schema.ScalarType defaultDateTimeType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstMssql = Just "DATETIME DEFAULT GETDATE()", Schema.bstCitus = Just "TIMESTAMP DEFAULT NOW()", Schema.bstPostgres = Just "TIMESTAMP DEFAULT NOW()", diff --git a/server/lib/api-tests/src/Test/EventTriggers/EventTriggersSpecialCharactersSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/EventTriggersSpecialCharactersSpec.hs index ee5ee8b3dc3..299fa52befb 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/EventTriggersSpecialCharactersSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/EventTriggersSpecialCharactersSpec.hs @@ -82,8 +82,8 @@ schema = [dummyTable] tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "special characters of different languages in event trigger payload are encoded in UTF-8" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let backendTypeMetadata = fromMaybe (error "Expected a backend type but got nothing") $ getBackendTypeConfig testEnvironment sourceName = BackendType.backendSourceName backendTypeMetadata schemaName = Schema.getSchemaName testEnvironment @@ -166,8 +166,8 @@ dbSetup testEnvironment webhookServer = do -- Track table using custom_name for the special character column since GraphQL -- spec does not support special characters - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: #{backendPrefix}_track_table diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerDropSourceCleanupSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerDropSourceCleanupSpec.hs index dbb93376800..19320000e17 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerDropSourceCleanupSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerDropSourceCleanupSpec.hs @@ -75,8 +75,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "dropping a source with event triggers should remove 'hdb_catalog' schema and the SQL triggers created on the table" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment insertQuery = @@ -115,8 +115,8 @@ tests = eventPayload `shouldBeYaml` expectedEventPayload - it "drop source, check the table works as it was before event trigger was created on it" $ - \(testEnvironment, _) -> do + it "drop source, check the table works as it was before event trigger was created on it" + $ \(testEnvironment, _) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let dropSourceQuery = @@ -184,8 +184,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: mssql_create_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerNextRetryAtTimezoneSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerNextRetryAtTimezoneSpec.hs index 39bc15b7fc9..337a5dc11ac 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerNextRetryAtTimezoneSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggerNextRetryAtTimezoneSpec.hs @@ -77,8 +77,8 @@ tests = -- The test checks that the event trigger retries as expected. In the test, we fire up the event trigger by adding a -- row to the table. We wait for a few seconds so the event has retried completely and then see if the number of -- retries are 2 (the event retries once) - it "check: the total number of tries is (number of retries + 1)" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: the total number of tries is (number of retries + 1)" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment insertQuery = @@ -154,8 +154,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerNextRetryEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/nextRetry" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: mssql_create_event_trigger @@ -175,8 +175,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer = do mssqlTeardown :: TestEnvironment -> IO () mssqlTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: mssql_delete_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersForReplicationSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersForReplicationSpec.hs index 8ab932532b2..36d8c43217a 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersForReplicationSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersForReplicationSpec.hs @@ -83,8 +83,8 @@ articlesTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "verify trigger status when logical replication is used" do - it "verify trigger is enabled on logical replication" $ - \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do + it "verify trigger is enabled on logical replication" + $ \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do mssqlSetupWithEventTriggers testEnvironment webhookServer "True" let getTriggerInfoQuery = [interpolateYaml| @@ -116,8 +116,8 @@ tests = (GraphqlEngine.postV2Query 200 testEnvironment getTriggerInfoQuery) expectedResponseForEnablingTriggers - it "verify trigger is disabled on logical replication" $ - \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do + it "verify trigger is disabled on logical replication" + $ \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do mssqlSetupWithEventTriggers testEnvironment webhookServer "False" let getTriggerInfoQuery = [interpolateYaml| @@ -158,8 +158,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer triggerOnReplication = let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: mssql_create_event_trigger @@ -181,8 +181,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer triggerOnReplication = mssqlTeardown :: TestEnvironment -> IO () mssqlTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: mssql_delete_event_trigger args: name: author_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersNameQuotingSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersNameQuotingSpec.hs index bb481a30fe4..2b7f3914703 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersNameQuotingSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersNameQuotingSpec.hs @@ -69,8 +69,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "weird trigger names are allowed" do - it "metadata_api: allow creating an event trigger with weird name via replace_metadata" $ - \(testEnvironment, (webhookServer, _)) -> do + it "metadata_api: allow creating an event trigger with weird name via replace_metadata" + $ \(testEnvironment, (webhookServer, _)) -> do let createEventTriggerWithWeirdName = addEventTriggerViaReplaceMetadata testEnvironment "weird]name]" webhookServer createEventTriggerWithWeirdNameExpectedResponse = diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUniqueNameSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUniqueNameSpec.hs index fccf0658336..7762a16a635 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUniqueNameSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUniqueNameSpec.hs @@ -87,8 +87,8 @@ articlesTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "only unique trigger names are allowed" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment insertQuery = @@ -127,8 +127,8 @@ tests = eventPayload `shouldBeYaml` expectedEventPayload - it "metadata_api: does not allow creating an event trigger with a name that already exists" $ - \(testEnvironment, (webhookServer, _)) -> do + it "metadata_api: does not allow creating an event trigger with a name that already exists" + $ \(testEnvironment, (webhookServer, _)) -> do -- metadata <- GraphqlEngine.exportMetadata testEnvironment let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment @@ -160,8 +160,8 @@ tests = (GraphqlEngine.postWithHeadersStatus 400 testEnvironment "/v1/metadata/" mempty createEventTriggerWithDuplicateName) createEventTriggerWithDuplicateNameExpectedResponse - it "replace_metadata: does not allow creating an event trigger with a name that already exists" $ - \(testEnvironment, (webhookServer, _)) -> do + it "replace_metadata: does not allow creating an event trigger with a name that already exists" + $ \(testEnvironment, (webhookServer, _)) -> do let replaceMetadata = getReplaceMetadata testEnvironment webhookServer replaceMetadataWithDuplicateNameExpectedResponse = @@ -186,8 +186,8 @@ mssqlSetupWithEventTriggers testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: mssql_create_event_trigger @@ -251,8 +251,8 @@ getReplaceMetadata testEnvironment webhookServer = mssqlTeardown :: TestEnvironment -> IO () mssqlTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: mssql_delete_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUntrackTableCleanupSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUntrackTableCleanupSpec.hs index c10646e681b..491a072b1d6 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUntrackTableCleanupSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/MSSQL/EventTriggersUntrackTableCleanupSpec.hs @@ -74,8 +74,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "untrack a table with event triggers should remove the SQL triggers created on the table" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment insertQuery = @@ -114,8 +114,8 @@ tests = eventPayload `shouldBeYaml` expectedEventPayload - it "untrack table, check the SQL triggers are deleted from the table" $ - \(testEnvironment, _) -> do + it "untrack table, check the SQL triggers are deleted from the table" + $ \(testEnvironment, _) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment untrackTableQuery = @@ -178,8 +178,8 @@ mssqlSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: mssql_create_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersClearMetadataSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersClearMetadataSpec.hs index 0bf7638269b..b3578d7f4a0 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersClearMetadataSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersClearMetadataSpec.hs @@ -76,8 +76,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "doing clear_metadata with an event trigger containing auto cleanup config should succeed" do - it "remove source via replace_metadata, check that the event_log table is removed as well" $ - \(testEnvironment, (_, _)) -> do + it "remove source via replace_metadata, check that the event_log table is removed as well" + $ \(testEnvironment, (_, _)) -> do -- remove the source using replace_meatadata API let clearMetadata = [yaml| @@ -105,8 +105,8 @@ postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersExtensionSchemaSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersExtensionSchemaSpec.hs index 058628668c5..53974448bff 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersExtensionSchemaSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersExtensionSchemaSpec.hs @@ -76,8 +76,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "event triggers should work when extensions are created in different schema using 'extensions_schema'" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let insertQuery = @@ -169,7 +169,7 @@ tests = -- ** Setup and teardown override -postgresSetup :: HasCallStack => TestEnvironment -> GraphqlEngine.Server -> IO () +postgresSetup :: (HasCallStack) => TestEnvironment -> GraphqlEngine.Server -> IO () postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment @@ -186,8 +186,8 @@ postgresSetup testEnvironment webhookServer = do webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" -- create a new source - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: pg_add_source args: name: *sourceName @@ -200,8 +200,8 @@ postgresSetup testEnvironment webhookServer = do Schema.trackTable sourceName theTable testEnvironment -- create the event trigger - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger @@ -216,10 +216,10 @@ postgresSetup testEnvironment webhookServer = do columns: "*" |] -postgresTeardown :: HasCallStack => TestEnvironment -> IO () +postgresTeardown :: (HasCallStack) => TestEnvironment -> IO () postgresTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: pg_delete_event_trigger @@ -228,8 +228,8 @@ postgresTeardown testEnvironment = do source: hge_test |] - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: pg_drop_source diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersForReplicationSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersForReplicationSpec.hs index 1ab5e7e7372..06ae7181106 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersForReplicationSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersForReplicationSpec.hs @@ -83,8 +83,8 @@ articlesTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "verify trigger status when logical replication is used" do - it "verify trigger is enabled on logical replication" $ - \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do + it "verify trigger is enabled on logical replication" + $ \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do postgresSetupWithEventTriggers testEnvironment webhookServer "True" let getTriggerInfoQuery = [interpolateYaml| @@ -118,8 +118,8 @@ tests = (GraphqlEngine.postV2Query 200 testEnvironment getTriggerInfoQuery) expectedResponseForEnablingTriggers - it "verify trigger is disabled on logical replication" $ - \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do + it "verify trigger is disabled on logical replication" + $ \(testEnvironment, (webhookServer, (Webhook.EventsQueue _eventsQueue))) -> do postgresSetupWithEventTriggers testEnvironment webhookServer "False" let getTriggerInfoQuery = [interpolateYaml| @@ -162,8 +162,8 @@ postgresSetupWithEventTriggers testEnvironment webhookServer triggerOnReplicatio let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: pg_create_event_trigger args: name: author_trigger @@ -183,8 +183,8 @@ postgresSetupWithEventTriggers testEnvironment webhookServer triggerOnReplicatio postgresTeardown :: TestEnvironment -> IO () postgresTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: pg_delete_event_trigger args: name: author_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRecreationSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRecreationSpec.hs index 41bb6d09ea4..9bf0009f4db 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRecreationSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRecreationSpec.hs @@ -165,8 +165,8 @@ postgresTeardown :: TestEnvironment -> IO () postgresTeardown testEnvironment = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment - GraphqlEngine.postV2Query_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postV2Query_ testEnvironment + $ [interpolateYaml| type: run_sql args: source: postgres diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersReplaceMetadataCleanupSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersReplaceMetadataCleanupSpec.hs index ad834ca7e05..e29be293a30 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersReplaceMetadataCleanupSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersReplaceMetadataCleanupSpec.hs @@ -75,8 +75,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "removing a source with event trigger via replace_metadata should also remove the event trigger related stuffs (hdb_catalog.event_log)" do - it "remove source via replace_metadata, check that the event_log table is removed as well" $ - \(testEnvironment, (_, _)) -> do + it "remove source via replace_metadata, check that the event_log table is removed as well" + $ \(testEnvironment, (_, _)) -> do -- `hdb_catalog.event_log` should be existing before (as we have added an event trigger in setup) checkIfPGTableExists testEnvironment "hdb_catalog.event_log" >>= (`shouldBe` True) @@ -112,8 +112,8 @@ postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRunSQLSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRunSQLSpec.hs index f564fce520b..7d134193424 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRunSQLSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersRunSQLSpec.hs @@ -120,8 +120,8 @@ args: result_type: CommandOk result: null |] - it "inserting a new row should work fine" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "inserting a new row should work fine" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment shouldReturnYaml @@ -251,8 +251,8 @@ renameTableContainingTriggerTests = do result_type: CommandOk result: null |] - it "inserting a new row should work fine" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "inserting a new row should work fine" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment shouldReturnYaml @@ -294,8 +294,8 @@ postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger @@ -324,8 +324,8 @@ postgresSetup testEnvironment webhookServer = do postgresTeardown :: TestEnvironment -> IO () postgresTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: pg_delete_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUniqueNameSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUniqueNameSpec.hs index 0ad642d144c..b8e5d0304ae 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUniqueNameSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUniqueNameSpec.hs @@ -87,8 +87,8 @@ articlesTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "only unique trigger names are allowed" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let insertQuery = @@ -127,8 +127,8 @@ tests = eventPayload `shouldBeYaml` expectedEventPayload - it "metadata_api: does not allow creating an event trigger with a name that already exists" $ - \(testEnvironment, (webhookServer, _)) -> do + it "metadata_api: does not allow creating an event trigger with a name that already exists" + $ \(testEnvironment, (webhookServer, _)) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment -- metadata <- GraphqlEngine.exportMetadata testEnvironment @@ -160,8 +160,8 @@ tests = (GraphqlEngine.postWithHeadersStatus 400 testEnvironment "/v1/metadata/" mempty createEventTriggerWithDuplicateName) createEventTriggerWithDuplicateNameExpectedResponse - it "replace_metadata: does not allow creating an event trigger with a name that already exists" $ - \(testEnvironment, (webhookServer, _)) -> do + it "replace_metadata: does not allow creating an event trigger with a name that already exists" + $ \(testEnvironment, (webhookServer, _)) -> do let replaceMetadata = getReplaceMetadata testEnvironment webhookServer replaceMetadataWithDuplicateNameExpectedResponse = @@ -186,8 +186,8 @@ postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger @@ -251,8 +251,8 @@ getReplaceMetadata testEnvironment webhookServer = postgresTeardown :: TestEnvironment -> IO () postgresTeardown testEnvironment = do - GraphqlEngine.postMetadata_ testEnvironment $ - [yaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [yaml| type: bulk args: - type: pg_delete_event_trigger diff --git a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUntrackTableCleanupSpec.hs b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUntrackTableCleanupSpec.hs index bf61de1b418..59be6246fe8 100644 --- a/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUntrackTableCleanupSpec.hs +++ b/server/lib/api-tests/src/Test/EventTriggers/PG/EventTriggersUntrackTableCleanupSpec.hs @@ -74,8 +74,8 @@ authorsTable tableName = tests :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) tests = describe "untrack a table with event triggers should remove the SQL triggers created on the table" do - it "check: inserting a new row invokes a event trigger" $ - \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do + it "check: inserting a new row invokes a event trigger" + $ \(testEnvironment, (_, (Webhook.EventsQueue eventsQueue))) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let insertQuery = @@ -114,8 +114,8 @@ tests = eventPayload `shouldBeYaml` expectedEventPayload - it "untrack table, check the SQL triggers are deleted from the table" $ - \(testEnvironment, _) -> do + it "untrack table, check the SQL triggers are deleted from the table" + $ \(testEnvironment, _) -> do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let untrackTableQuery = @@ -168,8 +168,8 @@ postgresSetup testEnvironment webhookServer = do let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: bulk args: - type: pg_create_event_trigger diff --git a/server/lib/api-tests/src/Test/Mutations/Upsert/OnConflictSpec.hs b/server/lib/api-tests/src/Test/Mutations/Upsert/OnConflictSpec.hs index d4f7d2e76f5..a789f61cd98 100644 --- a/server/lib/api-tests/src/Test/Mutations/Upsert/OnConflictSpec.hs +++ b/server/lib/api-tests/src/Test/Mutations/Upsert/OnConflictSpec.hs @@ -106,8 +106,8 @@ schema = serialInt :: Schema.ScalarType serialInt = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstCitus = Just "INT", Schema.bstPostgres = Just "INT", Schema.bstCockroach = Just "INT4" diff --git a/server/lib/api-tests/src/Test/PortedFromPytest/TestGraphQLQueryBasicCitus.hs b/server/lib/api-tests/src/Test/PortedFromPytest/TestGraphQLQueryBasicCitus.hs index b4b69fd21e8..15b72546170 100644 --- a/server/lib/api-tests/src/Test/PortedFromPytest/TestGraphQLQueryBasicCitus.hs +++ b/server/lib/api-tests/src/Test/PortedFromPytest/TestGraphQLQueryBasicCitus.hs @@ -308,8 +308,8 @@ tests = do describe "test_nested_select_with_foreign_key_alter" do -- from: queries/graphql_query/citus/nested_select_with_foreign_key_alter_citus.yaml [0] it "Alter foreign key constraint on article table" \testEnvironment -> do - void $ - GraphqlEngine.postV2Query + void + $ GraphqlEngine.postV2Query 200 testEnvironment [interpolateYaml| diff --git a/server/lib/api-tests/src/Test/Queries/AggregationSpec.hs b/server/lib/api-tests/src/Test/Queries/AggregationSpec.hs index 4041247e281..2b76a5fa62a 100644 --- a/server/lib/api-tests/src/Test/Queries/AggregationSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/AggregationSpec.hs @@ -36,8 +36,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True, Fixture.skipTests = Just "BigQuery returns numbers as strings, which means the second test fails" } diff --git a/server/lib/api-tests/src/Test/Queries/DistinctSpec.hs b/server/lib/api-tests/src/Test/Queries/DistinctSpec.hs index da9acbae588..2cb43c99aba 100644 --- a/server/lib/api-tests/src/Test/Queries/DistinctSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/DistinctSpec.hs @@ -47,8 +47,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } diff --git a/server/lib/api-tests/src/Test/Queries/Errors/InvalidQuerySpec.hs b/server/lib/api-tests/src/Test/Queries/Errors/InvalidQuerySpec.hs index e4a64b125c3..f8bfa31a666 100644 --- a/server/lib/api-tests/src/Test/Queries/Errors/InvalidQuerySpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Errors/InvalidQuerySpec.hs @@ -56,8 +56,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Errors/MissingTableSpec.hs b/server/lib/api-tests/src/Test/Queries/Errors/MissingTableSpec.hs index b3993a59cee..a7dddcabe7d 100644 --- a/server/lib/api-tests/src/Test/Queries/Errors/MissingTableSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Errors/MissingTableSpec.hs @@ -56,8 +56,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Errors/NoQueriesAvailableSpec.hs b/server/lib/api-tests/src/Test/Queries/Errors/NoQueriesAvailableSpec.hs index 347193fb10e..570246a1e78 100644 --- a/server/lib/api-tests/src/Test/Queries/Errors/NoQueriesAvailableSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Errors/NoQueriesAvailableSpec.hs @@ -54,8 +54,8 @@ spec = do [ BigQuery.setupTablesAction tables testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/FilterSearch/FilterSearchSpec.hs b/server/lib/api-tests/src/Test/Queries/FilterSearch/FilterSearchSpec.hs index 0ec50e796e4..970f5f6ad84 100644 --- a/server/lib/api-tests/src/Test/Queries/FilterSearch/FilterSearchSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/FilterSearch/FilterSearchSpec.hs @@ -54,8 +54,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/NativeQueriesSpec.hs b/server/lib/api-tests/src/Test/Queries/NativeQueriesSpec.hs index 233e4b33e65..2245a604886 100644 --- a/server/lib/api-tests/src/Test/Queries/NativeQueriesSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/NativeQueriesSpec.hs @@ -21,8 +21,8 @@ featureFlagForNativeQueries = "HASURA_FF_NATIVE_QUERY_INTERFACE" spec :: SpecWith GlobalTestEnvironment spec = - Fixture.hgeWithEnv [(featureFlagForNativeQueries, "True")] $ - Fixture.runClean -- re-run fixture setup on every test + Fixture.hgeWithEnv [(featureFlagForNativeQueries, "True")] + $ Fixture.runClean -- re-run fixture setup on every test ( NE.fromList [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) { Fixture.setupTeardown = \(testEnvironment, _) -> @@ -66,8 +66,8 @@ tests = do (Schema.trackLogicalModelCommand source backendTypeMetadata helloWorldLogicalModel) -- we expect this to fail - void $ - GraphqlEngine.postMetadataWithStatus + void + $ GraphqlEngine.postMetadataWithStatus 400 testEnvironment (Schema.trackNativeQueryCommand source backendTypeMetadata helloWorldNativeQuery) diff --git a/server/lib/api-tests/src/Test/Queries/NestedObjectSpec.hs b/server/lib/api-tests/src/Test/Queries/NestedObjectSpec.hs index cdb8b7de3e2..d70cc261e61 100644 --- a/server/lib/api-tests/src/Test/Queries/NestedObjectSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/NestedObjectSpec.hs @@ -53,8 +53,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Paginate/LimitSpec.hs b/server/lib/api-tests/src/Test/Queries/Paginate/LimitSpec.hs index 291fafc1c45..8173c9ac038 100644 --- a/server/lib/api-tests/src/Test/Queries/Paginate/LimitSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Paginate/LimitSpec.hs @@ -54,8 +54,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Paginate/OffsetSpec.hs b/server/lib/api-tests/src/Test/Queries/Paginate/OffsetSpec.hs index 48a0c2c282a..acefdbf2c99 100644 --- a/server/lib/api-tests/src/Test/Queries/Paginate/OffsetSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Paginate/OffsetSpec.hs @@ -53,8 +53,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Simple/JSONSpec.hs b/server/lib/api-tests/src/Test/Queries/Simple/JSONSpec.hs index 3a6e6d380a2..316ec201eb3 100644 --- a/server/lib/api-tests/src/Test/Queries/Simple/JSONSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Simple/JSONSpec.hs @@ -28,8 +28,8 @@ import Test.Hspec (SpecWith, it) spec :: SpecWith GlobalTestEnvironment spec = do - withEachProtocol $ - Fixture.run + withEachProtocol + $ Fixture.run ( NE.fromList [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) { Fixture.setupTeardown = \(testEnvironment, _) -> @@ -51,8 +51,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } @@ -69,8 +69,8 @@ schema = { tableColumns = [ Schema.column "id" Schema.TInt, Schema.column "name" Schema.TStr, - Schema.column "address" $ - Schema.TCustomType + Schema.column "address" + $ Schema.TCustomType Schema.defaultBackendScalarType { Schema.bstCitus = Just "JSON", Schema.bstCockroach = Just "JSON", @@ -82,8 +82,8 @@ schema = tableData = [ [ Schema.VInt 1, Schema.VStr "Justin", - Schema.VCustomValue $ - Schema.defaultBackendScalarValue + Schema.VCustomValue + $ Schema.defaultBackendScalarValue { Schema.bsvCitus = Just (Schema.Quoted "{ \"city\": \"Bristol\" }"), Schema.bsvCockroach = Just (Schema.Quoted "{ \"city\": \"Bristol\" }"), Schema.bsvPostgres = Just (Schema.Quoted "{ \"city\": \"Bristol\" }"), diff --git a/server/lib/api-tests/src/Test/Queries/Simple/ObjectQueriesSpec.hs b/server/lib/api-tests/src/Test/Queries/Simple/ObjectQueriesSpec.hs index 12b0c2c2117..e230d2f5b6b 100644 --- a/server/lib/api-tests/src/Test/Queries/Simple/ObjectQueriesSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Simple/ObjectQueriesSpec.hs @@ -36,18 +36,18 @@ import Test.Hspec (SpecWith, describe, it) spec :: SpecWith GlobalTestEnvironment spec = do withHge emptyHgeConfig $ do - withPostgresSource "postgres-source" $ - withSchemaName "test_schema" $ - withPostgresSchema schema $ - tests + withPostgresSource "postgres-source" + $ withSchemaName "test_schema" + $ withPostgresSchema schema + $ tests - DC.withDcPostgresSource "dc-postgres-source" $ - withSchemaName "test_schema" $ - DC.withDcPostgresSchema schema $ - tests + DC.withDcPostgresSource "dc-postgres-source" + $ withSchemaName "test_schema" + $ DC.withDcPostgresSchema schema + $ tests - withEachProtocol $ - Fixture.run + withEachProtocol + $ Fixture.run ( NE.fromList [ (Fixture.fixture $ Fixture.Backend Citus.backendTypeMetadata) { Fixture.setupTeardown = \(testEnvironment, _) -> @@ -69,8 +69,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Simple/OperationNameSpec.hs b/server/lib/api-tests/src/Test/Queries/Simple/OperationNameSpec.hs index 57fa755a41a..84bb0273bae 100644 --- a/server/lib/api-tests/src/Test/Queries/Simple/OperationNameSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Simple/OperationNameSpec.hs @@ -27,8 +27,8 @@ import Test.Hspec (SpecWith, describe, it) spec :: SpecWith GlobalTestEnvironment spec = do - withEachProtocol $ - Fixture.run + withEachProtocol + $ Fixture.run ( NE.fromList [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) { Fixture.setupTeardown = \(testEnv, _) -> @@ -55,8 +55,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/Simple/PrimaryKeySpec.hs b/server/lib/api-tests/src/Test/Queries/Simple/PrimaryKeySpec.hs index 8b79d60d1b7..a08ff78b85c 100644 --- a/server/lib/api-tests/src/Test/Queries/Simple/PrimaryKeySpec.hs +++ b/server/lib/api-tests/src/Test/Queries/Simple/PrimaryKeySpec.hs @@ -28,8 +28,8 @@ import Test.Hspec (SpecWith, describe, it) spec :: SpecWith GlobalTestEnvironment spec = - withEachProtocol $ - Fixture.run + withEachProtocol + $ Fixture.run ( NE.fromList [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) { Fixture.setupTeardown = \(testEnvironment, _) -> diff --git a/server/lib/api-tests/src/Test/Queries/SortSpec.hs b/server/lib/api-tests/src/Test/Queries/SortSpec.hs index e2f45533891..1aed8a610b5 100644 --- a/server/lib/api-tests/src/Test/Queries/SortSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/SortSpec.hs @@ -54,8 +54,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/AliasesSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/AliasesSpec.hs index 888e059f4f9..e1f12974fb8 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/AliasesSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/AliasesSpec.hs @@ -50,8 +50,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/DirectivesSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/DirectivesSpec.hs index 5ac15736a3a..ce4a56c3495 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/DirectivesSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/DirectivesSpec.hs @@ -52,8 +52,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeAndSkipSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeAndSkipSpec.hs index bb34211a1c3..005ab15bc85 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeAndSkipSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeAndSkipSpec.hs @@ -52,8 +52,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeSpec.hs index 9076f379238..83b9ef68914 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/IncludeSpec.hs @@ -55,8 +55,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/SkipSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/SkipSpec.hs index 163cd8ea66d..436b146fcce 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/SkipSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/Directives/SkipSpec.hs @@ -55,8 +55,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/FragmentsSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/FragmentsSpec.hs index bde717d8ad3..4407c22dc3e 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/FragmentsSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/FragmentsSpec.hs @@ -50,8 +50,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/VariablesSpec.hs b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/VariablesSpec.hs index fe3e8816c7d..682563255b4 100644 --- a/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/VariablesSpec.hs +++ b/server/lib/api-tests/src/Test/Queries/VariablesAliasesFragments/VariablesSpec.hs @@ -50,8 +50,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Regression/AggregateBoolExpConflictSpec.hs b/server/lib/api-tests/src/Test/Regression/AggregateBoolExpConflictSpec.hs index 34b08629c35..bb8ad60b15d 100644 --- a/server/lib/api-tests/src/Test/Regression/AggregateBoolExpConflictSpec.hs +++ b/server/lib/api-tests/src/Test/Regression/AggregateBoolExpConflictSpec.hs @@ -76,8 +76,8 @@ schema = serialInt :: Schema.ScalarType serialInt = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstCitus = Just "INT", Schema.bstPostgres = Just "INT", Schema.bstCockroach = Just "INT4" diff --git a/server/lib/api-tests/src/Test/Regression/LongIdentifiers3796Spec.hs b/server/lib/api-tests/src/Test/Regression/LongIdentifiers3796Spec.hs index b3c09bcb963..6b8cb2ab1a4 100644 --- a/server/lib/api-tests/src/Test/Regression/LongIdentifiers3796Spec.hs +++ b/server/lib/api-tests/src/Test/Regression/LongIdentifiers3796Spec.hs @@ -57,8 +57,8 @@ spec = do [ BigQuery.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } diff --git a/server/lib/api-tests/src/Test/Regression/MultiColumnObjectRelationshipsSpec.hs b/server/lib/api-tests/src/Test/Regression/MultiColumnObjectRelationshipsSpec.hs index a0c67b45595..73dacb99e6e 100644 --- a/server/lib/api-tests/src/Test/Regression/MultiColumnObjectRelationshipsSpec.hs +++ b/server/lib/api-tests/src/Test/Regression/MultiColumnObjectRelationshipsSpec.hs @@ -51,8 +51,8 @@ spec = do setupRelationships BigQuery.backendTypeMetadata testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } }, diff --git a/server/lib/api-tests/src/Test/Regression/MultiplexQuerySpec.hs b/server/lib/api-tests/src/Test/Regression/MultiplexQuerySpec.hs index 30bbaf03b8a..8ae87fa87a3 100644 --- a/server/lib/api-tests/src/Test/Regression/MultiplexQuerySpec.hs +++ b/server/lib/api-tests/src/Test/Regression/MultiplexQuerySpec.hs @@ -66,7 +66,8 @@ setupFunctions testEnv = fetch_users = Schema.unSchemaName schemaName <> ".fetch_users" in Fixture.SetupAction { Fixture.setupAction = do - Postgres.run_ testEnv $ + Postgres.run_ testEnv + $ -- get_age postgres function returns the age of a user calculated from the -- birth_year column and in_year input parameter. The in_year should be a future year -- from 2022 (the year when this test is being written) @@ -87,7 +88,8 @@ setupFunctions testEnv = end; $function$ |] - Postgres.run_ testEnv $ + Postgres.run_ testEnv + $ -- fetch_users postgres function returns the list of users whose age is equal to given "age" input parameter -- in given future "in_year" parameter. The in_year should be a future year -- from 2022 (the year when this test is being written) and "age" should not be a negative value. diff --git a/server/lib/api-tests/src/Test/Regression/NullRemoteRelationship8345Spec.hs b/server/lib/api-tests/src/Test/Regression/NullRemoteRelationship8345Spec.hs index 0bf1c1baffb..c5d142311d9 100644 --- a/server/lib/api-tests/src/Test/Regression/NullRemoteRelationship8345Spec.hs +++ b/server/lib/api-tests/src/Test/Regression/NullRemoteRelationship8345Spec.hs @@ -35,16 +35,16 @@ spec = Fixture.runWithLocalTestEnvironment contexts tests contexts = NE.fromList $ do (rhsName, rhsMkLocalEnv, rhsSetup, rhsTeardown, albumJoin, artistJoin) <- [rhsPostgres, rhsRemoteServer] (lhsName, lhsMkLocalEnv, lhsSetup, lhsTeardown) <- [lhsPostgres, lhsRemoteServer] - pure $ - Fixture.Fixture + pure + $ Fixture.Fixture { Fixture.name = Fixture.Combine lhsName rhsName, Fixture.mkLocalTestEnvironment = \testEnvironment -> do lhsServer <- lhsMkLocalEnv testEnvironment rhsServer <- rhsMkLocalEnv testEnvironment pure $ LocalTestTestEnvironment lhsServer rhsServer, Fixture.setupTeardown = \(testEnvironment, LocalTestTestEnvironment lhsServer rhsServer) -> do - pure $ - Fixture.SetupAction + pure + $ Fixture.SetupAction { Fixture.setupAction = do let schemaName = Schema.getSchemaName testEnvironment -- RHS must always be setup before the LHS @@ -98,8 +98,8 @@ spec = Fixture.runWithLocalTestEnvironment contexts tests rhsRemoteServerMkLocalTestEnvironment, rhsRemoteServerSetup, rhsRemoteServerTeardown, - const $ - [yaml| + const + $ [yaml| to_remote_schema: remote_schema: target lhs_fields: [album_id] @@ -108,8 +108,8 @@ spec = Fixture.runWithLocalTestEnvironment contexts tests arguments: album_id: $album_id |], - const $ - [yaml| + const + $ [yaml| to_remote_schema: remote_schema: target lhs_fields: [artist_id] @@ -288,7 +288,7 @@ data LHSQuery m = LHSQuery } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSQuery m) where +instance (Typeable m) => Morpheus.GQLType (LHSQuery m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackArgs = LHSHasuraTrackArgs @@ -309,7 +309,7 @@ data LHSHasuraTrack m = LHSHasuraTrack } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSHasuraTrack m) where +instance (Typeable m) => Morpheus.GQLType (LHSHasuraTrack m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackOrderBy = LHSHasuraTrackOrderBy @@ -368,12 +368,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = Nothing -> \_ _ -> EQ Just orderByArg -> orderTrack orderByArg limitFunction = maybe Hasura.Prelude.id take ta_limit - pure $ - tracks - & filter filterFunction - & sortBy orderByFunction - & limitFunction - & map mkTrack + pure + $ tracks + & filter filterFunction + & sortBy orderByFunction + & limitFunction + & map mkTrack -- Returns True iif the given track matches the given boolean expression. matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId, maybeArtistId) (LHSHasuraTrackBoolExp {..}) = and @@ -395,18 +395,18 @@ lhsRemoteServerMkLocalTestEnvironment _ = (trackId2, trackTitle2, trackAlbumId2, trackArtistId2) = flip foldMap orderByList \LHSHasuraTrackOrderBy {..} -> if - | Just idOrder <- tob_id -> case idOrder of - Asc -> compare trackId1 trackId2 - Desc -> compare trackId2 trackId1 - | Just titleOrder <- tob_title -> case titleOrder of - Asc -> compare trackTitle1 trackTitle2 - Desc -> compare trackTitle2 trackTitle1 - | Just albumIdOrder <- tob_album_id -> - compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 - | Just artistIdOrder <- tob_artist_id -> - compareWithNullLast artistIdOrder trackArtistId1 trackArtistId2 - | otherwise -> - error "empty track_order object" + | Just idOrder <- tob_id -> case idOrder of + Asc -> compare trackId1 trackId2 + Desc -> compare trackId2 trackId1 + | Just titleOrder <- tob_title -> case titleOrder of + Asc -> compare trackTitle1 trackTitle2 + Desc -> compare trackTitle2 trackTitle1 + | Just albumIdOrder <- tob_album_id -> + compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 + | Just artistIdOrder <- tob_artist_id -> + compareWithNullLast artistIdOrder trackArtistId1 trackArtistId2 + | otherwise -> + error "empty track_order object" compareWithNullLast Desc x1 x2 = compareWithNullLast Asc x2 x1 compareWithNullLast Asc Nothing Nothing = EQ compareWithNullLast Asc (Just _) Nothing = LT diff --git a/server/lib/api-tests/src/Test/Regression/ObjectRelationshipsLimit7936Spec.hs b/server/lib/api-tests/src/Test/Regression/ObjectRelationshipsLimit7936Spec.hs index 0a196c5ab90..8d68d96e707 100644 --- a/server/lib/api-tests/src/Test/Regression/ObjectRelationshipsLimit7936Spec.hs +++ b/server/lib/api-tests/src/Test/Regression/ObjectRelationshipsLimit7936Spec.hs @@ -309,7 +309,7 @@ setupMetadata testEnvironment = do -- -- We use 'Visual' internally to easily display the 'Value' as YAML -- when the test suite uses its 'Show' instance. -shouldReturnOneOfYaml :: HasCallStack => TestEnvironment -> IO Value -> [Value] -> IO () +shouldReturnOneOfYaml :: (HasCallStack) => TestEnvironment -> IO Value -> [Value] -> IO () shouldReturnOneOfYaml testEnv actualIO candidates = do let Fixture.Options {stringifyNumbers} = _options testEnv actual <- actualIO diff --git a/server/lib/api-tests/src/Test/Regression/RemoteRelationshipStringifyNum8387Spec.hs b/server/lib/api-tests/src/Test/Regression/RemoteRelationshipStringifyNum8387Spec.hs index fa97d67557d..115b973dc70 100644 --- a/server/lib/api-tests/src/Test/Regression/RemoteRelationshipStringifyNum8387Spec.hs +++ b/server/lib/api-tests/src/Test/Regression/RemoteRelationshipStringifyNum8387Spec.hs @@ -30,11 +30,11 @@ import Test.Schema.RemoteRelationships.MetadataAPI.Common qualified as Common spec :: SpecWith GlobalTestEnvironment spec = do - Fixture.hgeWithEnv [("HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES", "true")] $ - Fixture.runWithLocalTestEnvironment contexts testsWithFeatureOn + Fixture.hgeWithEnv [("HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES", "true")] + $ Fixture.runWithLocalTestEnvironment contexts testsWithFeatureOn - Fixture.hgeWithEnv [("HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES", "false")] $ - Fixture.runWithLocalTestEnvironment contexts testsWithFeatureOff + Fixture.hgeWithEnv [("HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES", "false")] + $ Fixture.runWithLocalTestEnvironment contexts testsWithFeatureOff where lhsFixtures = [lhsPostgres, lhsRemoteServer] rhsFixtures = [rhsPostgres] @@ -80,8 +80,8 @@ rhsPostgres = [ SetupAction.noTeardown (rhsPostgresSetup testEnv) ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } @@ -124,29 +124,29 @@ album = floatType :: Schema.ScalarType floatType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstPostgres = Just "NUMERIC" } mkFloatValue :: Text -> Schema.ScalarValue mkFloatValue int = - Schema.VCustomValue $ - Schema.defaultBackendScalarValue + Schema.VCustomValue + $ Schema.defaultBackendScalarValue { Schema.bsvPostgres = Just (Schema.Unquoted int) } bigIntType :: Schema.ScalarType bigIntType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstPostgres = Just "BIGINT" } mkBigIntValue :: Text -> Schema.ScalarValue mkBigIntValue int = - Schema.VCustomValue $ - Schema.defaultBackendScalarValue + Schema.VCustomValue + $ Schema.defaultBackendScalarValue { Schema.bsvPostgres = Just (Schema.Unquoted int) } diff --git a/server/lib/api-tests/src/Test/Regression/StreamConflictSpec.hs b/server/lib/api-tests/src/Test/Regression/StreamConflictSpec.hs index e1b0050dddb..73eafc225f2 100644 --- a/server/lib/api-tests/src/Test/Regression/StreamConflictSpec.hs +++ b/server/lib/api-tests/src/Test/Regression/StreamConflictSpec.hs @@ -19,8 +19,8 @@ spec = [ Postgres.setupTablesAction schema testEnv ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.skipTests = Just "Disabled until we can dynamically change server settings per test. To test, add EFHideStreamFields to soSubscriptions in Harness.Constants -> serveOptions" } diff --git a/server/lib/api-tests/src/Test/Regression/UsingTheSameFunctionForRootFieldAndComputedField8643Spec.hs b/server/lib/api-tests/src/Test/Regression/UsingTheSameFunctionForRootFieldAndComputedField8643Spec.hs index b2f69f7cd06..de3f731aa52 100644 --- a/server/lib/api-tests/src/Test/Regression/UsingTheSameFunctionForRootFieldAndComputedField8643Spec.hs +++ b/server/lib/api-tests/src/Test/Regression/UsingTheSameFunctionForRootFieldAndComputedField8643Spec.hs @@ -54,21 +54,21 @@ schema = functionSetup :: TestEnvironment -> Fixture.SetupAction functionSetup testEnvironment = let schemaName = unSchemaName (getSchemaName testEnvironment) - in SetupAction.noTeardown $ - Postgres.run_ testEnvironment $ - "CREATE FUNCTION " - <> schemaName - <> ".authors(author_row " - <> schemaName - <> ".author) \ - \RETURNS SETOF " - <> schemaName - <> ".author AS $$ \ - \ SELECT * \ - \ FROM " - <> schemaName - <> ".author \ - \$$ LANGUAGE sql STABLE;" + in SetupAction.noTeardown + $ Postgres.run_ testEnvironment + $ "CREATE FUNCTION " + <> schemaName + <> ".authors(author_row " + <> schemaName + <> ".author) \ + \RETURNS SETOF " + <> schemaName + <> ".author AS $$ \ + \ SELECT * \ + \ FROM " + <> schemaName + <> ".author \ + \$$ LANGUAGE sql STABLE;" -------------------------------------------------------------------------------- -- Tests diff --git a/server/lib/api-tests/src/Test/ScheduledEvents/ScheduledEventsInvalidEnvVarSpec.hs b/server/lib/api-tests/src/Test/ScheduledEvents/ScheduledEventsInvalidEnvVarSpec.hs index 31de0a50e0d..9dad0688094 100644 --- a/server/lib/api-tests/src/Test/ScheduledEvents/ScheduledEventsInvalidEnvVarSpec.hs +++ b/server/lib/api-tests/src/Test/ScheduledEvents/ScheduledEventsInvalidEnvVarSpec.hs @@ -82,8 +82,8 @@ tests = do scheduledEventsWithInvalidEnvVar :: SpecWith (TestEnvironment, (GraphqlEngine.Server, Webhook.EventsQueue)) scheduledEventsWithInvalidEnvVar = describe "creating a scheduled event with invalid env var should add a failed invocation log" do - it "check the invocation log requests added for failed request corresponding to invalid header" $ - \(testEnvironment, (_, _)) -> do + it "check the invocation log requests added for failed request corresponding to invalid header" + $ \(testEnvironment, (_, _)) -> do -- get all the scheduled event invocations let getScheduledEventInvocationsQuery = [yaml| @@ -113,8 +113,8 @@ scheduledEventsWithInvalidEnvVar = postgresSetup :: TestEnvironment -> GraphqlEngine.Server -> IO () postgresSetup testEnvironment webhookServer = do let webhookServerEchoEndpoint = GraphqlEngine.serverUrl webhookServer ++ "/echo" - GraphqlEngine.postMetadata_ testEnvironment $ - [interpolateYaml| + GraphqlEngine.postMetadata_ testEnvironment + $ [interpolateYaml| type: create_scheduled_event args: webhook: #{webhookServerEchoEndpoint} diff --git a/server/lib/api-tests/src/Test/Schema/ComputedFields/TableSpec.hs b/server/lib/api-tests/src/Test/Schema/ComputedFields/TableSpec.hs index 21b54624f73..1da99244b1a 100644 --- a/server/lib/api-tests/src/Test/Schema/ComputedFields/TableSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/ComputedFields/TableSpec.hs @@ -45,8 +45,8 @@ spec = <> bigquerySetupFunctions testEnv <> setupMetadata testEnv, Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } @@ -114,8 +114,8 @@ postgresSetupFunctions testEnv = articleTableSQL = unSchemaName schemaName <> ".article" in [ Fixture.SetupAction { Fixture.setupAction = - Postgres.run_ testEnv $ - [i| + Postgres.run_ testEnv + $ [i| CREATE FUNCTION #{ fetch_articles schemaName }(author_row author, search TEXT) RETURNS SETOF article AS $$ SELECT * @@ -130,8 +130,8 @@ postgresSetupFunctions testEnv = }, Fixture.SetupAction { Fixture.setupAction = - Postgres.run_ testEnv $ - [i| + Postgres.run_ testEnv + $ [i| CREATE FUNCTION #{ fetch_articles_no_user_args schemaName }(author_row author) RETURNS SETOF article AS $$ SELECT * @@ -149,8 +149,8 @@ bigquerySetupFunctions testEnv = articleTableSQL = unSchemaName schemaName <> ".article" in [ Fixture.SetupAction { Fixture.setupAction = - BigQuery.run_ $ - [i| + BigQuery.run_ + $ [i| CREATE TABLE FUNCTION #{ fetch_articles schemaName }(a_id INT64, search STRING) AS @@ -163,8 +163,8 @@ bigquerySetupFunctions testEnv = }, Fixture.SetupAction { Fixture.setupAction = - BigQuery.run_ $ - [i| + BigQuery.run_ + $ [i| CREATE TABLE FUNCTION #{ fetch_articles_no_user_args schemaName }(a_id INT64) AS diff --git a/server/lib/api-tests/src/Test/Schema/DataValidations/Permissions/SelectSpec.hs b/server/lib/api-tests/src/Test/Schema/DataValidations/Permissions/SelectSpec.hs index 24ab41a1f33..ba08f753064 100644 --- a/server/lib/api-tests/src/Test/Schema/DataValidations/Permissions/SelectSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/DataValidations/Permissions/SelectSpec.hs @@ -63,8 +63,8 @@ spec = do setupMetadata DoesNotSupportArrayTypes BigQuery.backendTypeMetadata testEnvironment ], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } @@ -226,8 +226,8 @@ tests arrayTypeSupport = describe "Permissions on queries" do shouldReturnYaml testEnvironment actual expected it "Editor role can select in review and published articles only" \testEnvironment -> do - when (arrayTypeSupport == DoesNotSupportArrayTypes) $ - pendingWith "Backend does not support array types" + when (arrayTypeSupport == DoesNotSupportArrayTypes) + $ pendingWith "Backend does not support array types" let schemaName :: Schema.SchemaName schemaName = Schema.getSchemaName testEnvironment diff --git a/server/lib/api-tests/src/Test/Schema/DefaultValues/OnConflictSpec.hs b/server/lib/api-tests/src/Test/Schema/DefaultValues/OnConflictSpec.hs index d2e87d8b24a..a4e79a54fd4 100644 --- a/server/lib/api-tests/src/Test/Schema/DefaultValues/OnConflictSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/DefaultValues/OnConflictSpec.hs @@ -79,8 +79,8 @@ schema = defaultDateTimeType :: Schema.ScalarType defaultDateTimeType = - Schema.TCustomType $ - Schema.defaultBackendScalarType + Schema.TCustomType + $ Schema.defaultBackendScalarType { Schema.bstMssql = Just "DATETIME DEFAULT GETDATE()", Schema.bstCitus = Just "TIMESTAMP DEFAULT NOW()", Schema.bstPostgres = Just "TIMESTAMP DEFAULT NOW()", diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/FromRemoteSchemaSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/FromRemoteSchemaSpec.hs index 7f896bcd654..b2ee74c64d0 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/FromRemoteSchemaSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/FromRemoteSchemaSpec.hs @@ -37,15 +37,15 @@ spec = Fixture.runWithLocalTestEnvironmentSingleSetup (NE.fromList [context]) te (Fixture.fixture $ Fixture.RemoteGraphQLServer) { -- start only one remote server Fixture.mkLocalTestEnvironment = \_testEnvironment -> - RemoteServer.run $ - RemoteServer.generateQueryInterpreter $ - Query - { object = objectResolver, - writer = writerResolver, - artist = artistResolver, - objects = objectsResolver, - articles = articlesResolver - }, + RemoteServer.run + $ RemoteServer.generateQueryInterpreter + $ Query + { object = objectResolver, + writer = writerResolver, + artist = artistResolver, + objects = objectsResolver, + articles = articlesResolver + }, -- set that remote server as both source and target, for convenience -- start a RHS Postgres for Metadata tests only setupTeardown = \(testEnvironment, server) -> @@ -181,7 +181,7 @@ type Article { |] -knownObjects :: Monad m => [(Int, Object m)] +knownObjects :: (Monad m) => [(Int, Object m)] knownObjects = [ (101, ObjectWriter writer1), (102, ObjectWriter writer2), @@ -202,28 +202,29 @@ knownObjects = article3 = Article (pure 303) (pure "Article3") (pure 201) (pure 102) article4 = Article (pure 304) (pure "Article4") (pure 202) (pure 102) -objectResolver :: Monad m => Arg "id" Int -> m (Maybe (Object m)) +objectResolver :: (Monad m) => Arg "id" Int -> m (Maybe (Object m)) objectResolver (Arg objectId) = pure $ lookup objectId knownObjects -writerResolver :: Monad m => Arg "id" Int -> m (Maybe (Writer m)) +writerResolver :: (Monad m) => Arg "id" Int -> m (Maybe (Writer m)) writerResolver (Arg objectId) = pure $ case lookup objectId knownObjects of Just (ObjectWriter w) -> Just w _ -> Nothing -artistResolver :: Monad m => Arg "id" Int -> m (Maybe (Artist m)) +artistResolver :: (Monad m) => Arg "id" Int -> m (Maybe (Artist m)) artistResolver (Arg objectId) = pure $ case lookup objectId knownObjects of Just (ObjectArtist a) -> Just a _ -> Nothing -objectsResolver :: Monad m => Arg "ids" [Int] -> m [Maybe (Object m)] +objectsResolver :: (Monad m) => Arg "ids" [Int] -> m [Maybe (Object m)] objectsResolver (Arg objectIds) = pure [lookup objectId knownObjects | objectId <- objectIds] -articlesResolver :: Monad m => Arg "ids" [Int] -> m [Maybe (Article m)] +articlesResolver :: (Monad m) => Arg "ids" [Int] -> m [Maybe (Article m)] articlesResolver (Arg objectIds) = - pure $ - objectIds <&> \objectId -> + pure + $ objectIds + <&> \objectId -> case lookup objectId knownObjects of Just (ObjectArticle a) -> Just a _ -> Nothing diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/Common.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/Common.hs index 1b8f3f94c0a..917a8e11043 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/Common.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/Common.hs @@ -65,8 +65,8 @@ data LocalTestTestEnvironment = LocalTestTestEnvironment dbTodbRemoteRelationshipFixture :: Fixture.Fixture LocalTestTestEnvironment dbTodbRemoteRelationshipFixture = - ( Fixture.fixture $ - Fixture.Combine + ( Fixture.fixture + $ Fixture.Combine (Fixture.Backend Postgres.backendTypeMetadata) (Fixture.Backend Postgres.backendTypeMetadata) ) @@ -440,7 +440,7 @@ data LHSQuery m = LHSQuery } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSQuery m) where +instance (Typeable m) => Morpheus.GQLType (LHSQuery m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackArgs = LHSHasuraTrackArgs @@ -460,7 +460,7 @@ data LHSHasuraTrack m = LHSHasuraTrack } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSHasuraTrack m) where +instance (Typeable m) => Morpheus.GQLType (LHSHasuraTrack m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackOrderBy = LHSHasuraTrackOrderBy @@ -517,12 +517,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = Nothing -> \_ _ -> EQ Just orderByArg -> orderTrack orderByArg limitFunction = maybe Hasura.Prelude.id take ta_limit - pure $ - tracks - & filter filterFunction - & sortBy orderByFunction - & limitFunction - & map mkTrack + pure + $ tracks + & filter filterFunction + & sortBy orderByFunction + & limitFunction + & map mkTrack -- Returns True iif the given track matches the given boolean expression. matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) = and @@ -543,16 +543,16 @@ lhsRemoteServerMkLocalTestEnvironment _ = (trackId2, trackTitle2, trackAlbumId2) = flip foldMap orderByList \LHSHasuraTrackOrderBy {..} -> if - | Just idOrder <- tob_id -> case idOrder of - Asc -> compare trackId1 trackId2 - Desc -> compare trackId2 trackId1 - | Just titleOrder <- tob_title -> case titleOrder of - Asc -> compare trackTitle1 trackTitle2 - Desc -> compare trackTitle2 trackTitle1 - | Just albumIdOrder <- tob_album_id -> - compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 - | otherwise -> - error "empty track_order object" + | Just idOrder <- tob_id -> case idOrder of + Asc -> compare trackId1 trackId2 + Desc -> compare trackId2 trackId1 + | Just titleOrder <- tob_title -> case titleOrder of + Asc -> compare trackTitle1 trackTitle2 + Desc -> compare trackTitle2 trackTitle1 + | Just albumIdOrder <- tob_album_id -> + compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 + | otherwise -> + error "empty track_order object" compareWithNullLast Desc x1 x2 = compareWithNullLast Asc x2 x1 compareWithNullLast Asc Nothing Nothing = EQ compareWithNullLast Asc (Just _) Nothing = LT diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/DBtoDBRelationshipSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/DBtoDBRelationshipSpec.hs index d9bb9e67467..eeda601c019 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/DBtoDBRelationshipSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/DBtoDBRelationshipSpec.hs @@ -90,8 +90,8 @@ tests = describe "drop-source-metadata-tests" do let sources = key "sources" . values -- Extract the 'source' DB info from the sources field in metadata sourceDB = - Unsafe.fromJust $ - findOf + Unsafe.fromJust + $ findOf sources (has $ key "name" . _String . only "source") metadata diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/RSToDBRelationshipSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/RSToDBRelationshipSpec.hs index b6ee321ff11..29837f1f4af 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/RSToDBRelationshipSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/MetadataAPI/DropSource/RSToDBRelationshipSpec.hs @@ -89,8 +89,8 @@ tests = describe "drop-source-metadata-tests" do let remoteSchemas = key "remote_schemas" . values -- Extract the 'source' remote schema and check if any remote relationships exists sourceRemoteSchema = - Unsafe.fromJust $ - findOf + Unsafe.fromJust + $ findOf remoteSchemas (has $ key "name" . _String . only "source") metadata diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBArrayRelationshipSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBArrayRelationshipSpec.hs index bb3608f9a90..1c09b054cd1 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBArrayRelationshipSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBArrayRelationshipSpec.hs @@ -239,7 +239,7 @@ lhsRole2 = selectPermissionSource = Just lhsSourceName_ } -createRemoteRelationship :: HasCallStack => Value -> Value -> TestEnvironment -> IO () +createRemoteRelationship :: (HasCallStack) => Value -> Value -> TestEnvironment -> IO () createRemoteRelationship lhsTableName rhsTableName testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -333,7 +333,7 @@ rhsTable = -------------------------------------------------------------------------------- -- LHS Postgres -lhsPostgresSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsPostgresSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsPostgresSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceConfig = Postgres.defaultSourceConfiguration testEnvironment @@ -359,7 +359,7 @@ lhsPostgresSetup rhsTableName (wholeTestEnvironment, _) = do -------------------------------------------------------------------------------- -- LHS Cockroach -lhsCockroachSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsCockroachSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsCockroachSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceConfig = Cockroach.defaultSourceConfiguration testEnvironment @@ -386,7 +386,7 @@ lhsCockroachSetup rhsTableName (wholeTestEnvironment, _) = do -------------------------------------------------------------------------------- -- LHS Citus -lhsCitusSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsCitusSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsCitusSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceConfig = Citus.defaultSourceConfiguration testEnvironment @@ -412,7 +412,7 @@ lhsCitusSetup rhsTableName (wholeTestEnvironment, _) = do -------------------------------------------------------------------------------- -- LHS SQLServer -lhsSQLServerSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsSQLServerSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsSQLServerSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceConfig = SQLServer.defaultSourceConfiguration testEnvironment @@ -438,7 +438,7 @@ lhsSQLServerSetup rhsTableName (wholeTestEnvironment, _) = do -------------------------------------------------------------------------------- -- LHS SQLite -lhsSqliteSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO API.DatasetCloneName +lhsSqliteSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO API.DatasetCloneName lhsSqliteSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment let cloneName = API.DatasetCloneName $ tshow (uniqueTestId testEnvironment) <> "-lhs" @@ -502,7 +502,7 @@ data Query m = Query } deriving (Generic) -instance Typeable m => Morpheus.GQLType (Query m) +instance (Typeable m) => Morpheus.GQLType (Query m) data HasuraArtistArgs = HasuraArtistArgs { aa_where :: Maybe HasuraArtistBoolExp, @@ -520,7 +520,7 @@ data HasuraArtist m = HasuraArtist } deriving (Generic) -instance Typeable m => Morpheus.GQLType (HasuraArtist m) where +instance (Typeable m) => Morpheus.GQLType (HasuraArtist m) where typeOptions _ _ = hasuraTypeOptions data HasuraArtistOrderBy = HasuraArtistOrderBy @@ -575,12 +575,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = Nothing -> \_ _ -> EQ Just orderByArg -> orderArtist orderByArg limitFunction = maybe id take aa_limit - pure $ - artists - & filter filterFunction - & sortBy orderByFunction - & limitFunction - & map mkArtist + pure + $ artists + & filter filterFunction + & sortBy orderByFunction + & limitFunction + & map mkArtist -- Returns True iif the given artist matches the given boolean expression. matchArtist artistInfo@(artistId, artistName) (HasuraArtistBoolExp {..}) = and @@ -599,13 +599,13 @@ lhsRemoteServerMkLocalTestEnvironment _ = (artistId2, artistName2) = flip foldMap orderByList \HasuraArtistOrderBy {..} -> if - | Just idOrder <- aob_id -> - compareWithNullLast idOrder artistId1 artistId2 - | Just nameOrder <- aob_name -> case nameOrder of - Asc -> compare artistName1 artistName2 - Desc -> compare artistName2 artistName1 - | otherwise -> - error "empty artist_order object" + | Just idOrder <- aob_id -> + compareWithNullLast idOrder artistId1 artistId2 + | Just nameOrder <- aob_name -> case nameOrder of + Asc -> compare artistName1 artistName2 + Desc -> compare artistName2 artistName1 + | otherwise -> + error "empty artist_order object" compareWithNullLast Desc x1 x2 = compareWithNullLast Asc x2 x1 compareWithNullLast Asc Nothing Nothing = EQ compareWithNullLast Asc (Just _) Nothing = LT @@ -623,7 +623,7 @@ lhsRemoteServerMkLocalTestEnvironment _ = a_name = pure $ Just artistName } -lhsRemoteServerSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsRemoteServerSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsRemoteServerSetup tableName (testEnvironment, maybeRemoteServer) = case maybeRemoteServer of Nothing -> error "XToDBArrayRelationshipSpec: remote server local testEnvironment did not succesfully create a server" Just remoteServer -> do @@ -832,14 +832,14 @@ schemaTests = . key "fields" . values albumsField = - Unsafe.fromJust $ - findOf + Unsafe.fromJust + $ findOf focusArtistFields (has $ key "name" . _String . only "albums") introspectionResult albumsAggregateField = - Unsafe.fromJust $ - findOf + Unsafe.fromJust + $ findOf focusArtistFields (has $ key "name" . _String . only "albums_aggregate") introspectionResult diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBObjectRelationshipSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBObjectRelationshipSpec.hs index b22843f3c71..7da8ce99f6c 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBObjectRelationshipSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToDBObjectRelationshipSpec.hs @@ -257,7 +257,7 @@ album = lhsPostgresMkLocalTestEnvironment :: TestEnvironment -> Managed (Maybe Server) lhsPostgresMkLocalTestEnvironment _ = pure Nothing -lhsPostgresSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsPostgresSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsPostgresSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceName = "source" @@ -323,7 +323,7 @@ args: lhsCitusMkLocalTestEnvironment :: TestEnvironment -> Managed (Maybe Server) lhsCitusMkLocalTestEnvironment _ = pure Nothing -lhsCitusSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsCitusSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsCitusSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceName = "source" @@ -389,7 +389,7 @@ args: lhsCockroachMkLocalTestEnvironment :: TestEnvironment -> Managed (Maybe Server) lhsCockroachMkLocalTestEnvironment _ = pure Nothing -lhsCockroachSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsCockroachSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsCockroachSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceName = "source" @@ -455,7 +455,7 @@ lhsCockroachSetup rhsTableName (wholeTestEnvironment, _) = do lhsSQLServerMkLocalTestEnvironment :: TestEnvironment -> Managed (Maybe Server) lhsSQLServerMkLocalTestEnvironment _ = pure Nothing -lhsSQLServerSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsSQLServerSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsSQLServerSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment sourceName = "source" @@ -519,7 +519,7 @@ args: -------------------------------------------------------------------------------- -- LHS SQLite -lhsSqliteSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO API.DatasetCloneName +lhsSqliteSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO API.DatasetCloneName lhsSqliteSetup rhsTableName (wholeTestEnvironment, _) = do let testEnvironment = focusFixtureLeft wholeTestEnvironment let cloneName = API.DatasetCloneName $ tshow (uniqueTestId testEnvironment) <> "-lhs" @@ -611,7 +611,7 @@ data Query m = Query } deriving (Generic) -instance Typeable m => Morpheus.GQLType (Query m) +instance (Typeable m) => Morpheus.GQLType (Query m) data HasuraTrackArgs = HasuraTrackArgs { ta_where :: Maybe HasuraTrackBoolExp, @@ -630,7 +630,7 @@ data HasuraTrack m = HasuraTrack } deriving (Generic) -instance Typeable m => Morpheus.GQLType (HasuraTrack m) where +instance (Typeable m) => Morpheus.GQLType (HasuraTrack m) where typeOptions _ _ = hasuraTypeOptions data HasuraTrackOrderBy = HasuraTrackOrderBy @@ -687,12 +687,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = Nothing -> \_ _ -> EQ Just orderByArg -> orderTrack orderByArg limitFunction = maybe Hasura.Prelude.id take ta_limit - pure $ - tracks - & filter filterFunction - & sortBy orderByFunction - & limitFunction - & map mkTrack + pure + $ tracks + & filter filterFunction + & sortBy orderByFunction + & limitFunction + & map mkTrack -- Returns True iif the given track matches the given boolean expression. matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (HasuraTrackBoolExp {..}) = and @@ -713,16 +713,16 @@ lhsRemoteServerMkLocalTestEnvironment _ = (trackId2, trackTitle2, trackAlbumId2) = flip foldMap orderByList \HasuraTrackOrderBy {..} -> if - | Just idOrder <- tob_id -> case idOrder of - Asc -> compare trackId1 trackId2 - Desc -> compare trackId2 trackId1 - | Just titleOrder <- tob_title -> case titleOrder of - Asc -> compare trackTitle1 trackTitle2 - Desc -> compare trackTitle2 trackTitle1 - | Just albumIdOrder <- tob_album_id -> - compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 - | otherwise -> - error "empty track_order object" + | Just idOrder <- tob_id -> case idOrder of + Asc -> compare trackId1 trackId2 + Desc -> compare trackId2 trackId1 + | Just titleOrder <- tob_title -> case titleOrder of + Asc -> compare trackTitle1 trackTitle2 + Desc -> compare trackTitle2 trackTitle1 + | Just albumIdOrder <- tob_album_id -> + compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 + | otherwise -> + error "empty track_order object" compareWithNullLast Desc x1 x2 = compareWithNullLast Asc x2 x1 compareWithNullLast Asc Nothing Nothing = EQ compareWithNullLast Asc (Just _) Nothing = LT @@ -745,7 +745,7 @@ lhsRemoteServerMkLocalTestEnvironment _ = t_album_id = pure albumId } -lhsRemoteServerSetup :: HasCallStack => Value -> (TestEnvironment, Maybe Server) -> IO () +lhsRemoteServerSetup :: (HasCallStack) => Value -> (TestEnvironment, Maybe Server) -> IO () lhsRemoteServerSetup tableName (testEnvironment, maybeRemoteServer) = case maybeRemoteServer of Nothing -> error "XToDBObjectRelationshipSpec: remote server local testEnvironment did not succesfully create a server" Just remoteServer -> do diff --git a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToRemoteSchemaRelationshipSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToRemoteSchemaRelationshipSpec.hs index 9db0002ffe4..d7133b4a17d 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToRemoteSchemaRelationshipSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteRelationships/XToRemoteSchemaRelationshipSpec.hs @@ -47,8 +47,8 @@ spec :: SpecWith GlobalTestEnvironment spec = Fixture.runWithLocalTestEnvironment contexts tests where contexts = - NE.fromList $ - map + NE.fromList + $ map mkFixture [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) { Fixture.mkLocalTestEnvironment = lhsPostgresMkLocalTestEnvironment, @@ -431,7 +431,7 @@ data LHSQuery m = LHSQuery } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSQuery m) where +instance (Typeable m) => Morpheus.GQLType (LHSQuery m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackArgs = LHSHasuraTrackArgs @@ -451,7 +451,7 @@ data LHSHasuraTrack m = LHSHasuraTrack } deriving (Generic) -instance Typeable m => Morpheus.GQLType (LHSHasuraTrack m) where +instance (Typeable m) => Morpheus.GQLType (LHSHasuraTrack m) where typeOptions _ _ = hasuraTypeOptions data LHSHasuraTrackOrderBy = LHSHasuraTrackOrderBy @@ -508,12 +508,12 @@ lhsRemoteServerMkLocalTestEnvironment _ = Nothing -> \_ _ -> EQ Just orderByArg -> orderTrack orderByArg limitFunction = maybe Hasura.Prelude.id take ta_limit - pure $ - tracks - & filter filterFunction - & sortBy orderByFunction - & limitFunction - & map mkTrack + pure + $ tracks + & filter filterFunction + & sortBy orderByFunction + & limitFunction + & map mkTrack -- Returns True iif the given track matches the given boolean expression. matchTrack trackInfo@(trackId, trackTitle, maybeAlbumId) (LHSHasuraTrackBoolExp {..}) = and @@ -534,16 +534,16 @@ lhsRemoteServerMkLocalTestEnvironment _ = (trackId2, trackTitle2, trackAlbumId2) = flip foldMap orderByList \LHSHasuraTrackOrderBy {..} -> if - | Just idOrder <- tob_id -> case idOrder of - Asc -> compare trackId1 trackId2 - Desc -> compare trackId2 trackId1 - | Just titleOrder <- tob_title -> case titleOrder of - Asc -> compare trackTitle1 trackTitle2 - Desc -> compare trackTitle2 trackTitle1 - | Just albumIdOrder <- tob_album_id -> - compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 - | otherwise -> - error "empty track_order object" + | Just idOrder <- tob_id -> case idOrder of + Asc -> compare trackId1 trackId2 + Desc -> compare trackId2 trackId1 + | Just titleOrder <- tob_title -> case titleOrder of + Asc -> compare trackTitle1 trackTitle2 + Desc -> compare trackTitle2 trackTitle1 + | Just albumIdOrder <- tob_album_id -> + compareWithNullLast albumIdOrder trackAlbumId1 trackAlbumId2 + | otherwise -> + error "empty track_order object" compareWithNullLast Desc x1 x2 = compareWithNullLast Asc x2 x1 compareWithNullLast Asc Nothing Nothing = EQ compareWithNullLast Asc (Just _) Nothing = LT diff --git a/server/lib/api-tests/src/Test/Schema/RemoteSchemaCustomizationSpec.hs b/server/lib/api-tests/src/Test/Schema/RemoteSchemaCustomizationSpec.hs index 14c3b1c741d..8bb42fb884a 100644 --- a/server/lib/api-tests/src/Test/Schema/RemoteSchemaCustomizationSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/RemoteSchemaCustomizationSpec.hs @@ -27,11 +27,11 @@ spec = Fixture.runWithLocalTestEnvironment (NE.fromList [context]) tests (Fixture.fixture $ Fixture.RemoteGraphQLServer) { -- start only one remote server Fixture.mkLocalTestEnvironment = \_testEnvironment -> - RemoteServer.run $ - RemoteServer.generateQueryInterpreter $ - Query - { echoEnum = echoEnumResolver - }, + RemoteServer.run + $ RemoteServer.generateQueryInterpreter + $ Query + { echoEnum = echoEnumResolver + }, setupTeardown = \(testEnvironment, server) -> [ Fixture.SetupAction { Fixture.setupAction = @@ -175,5 +175,5 @@ enum Profession { |] -echoEnumResolver :: Monad m => Arg "x" Profession -> m Profession +echoEnumResolver :: (Monad m) => Arg "x" Profession -> m Profession echoEnumResolver (Arg x) = pure x diff --git a/server/lib/api-tests/src/Test/Schema/TableRelationships/ArrayRelationshipsSpec.hs b/server/lib/api-tests/src/Test/Schema/TableRelationships/ArrayRelationshipsSpec.hs index a39d5c5e64c..aae04c35635 100644 --- a/server/lib/api-tests/src/Test/Schema/TableRelationships/ArrayRelationshipsSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/TableRelationships/ArrayRelationshipsSpec.hs @@ -44,8 +44,8 @@ spec = do { Fixture.setupTeardown = \(testEnv, _) -> [BigQuery.setupTablesAction schema testEnv], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } diff --git a/server/lib/api-tests/src/Test/Schema/TableRelationships/ObjectRelationshipsSpec.hs b/server/lib/api-tests/src/Test/Schema/TableRelationships/ObjectRelationshipsSpec.hs index fdc4cc1064b..e2800742756 100644 --- a/server/lib/api-tests/src/Test/Schema/TableRelationships/ObjectRelationshipsSpec.hs +++ b/server/lib/api-tests/src/Test/Schema/TableRelationships/ObjectRelationshipsSpec.hs @@ -72,8 +72,8 @@ spec = do { Fixture.setupTeardown = \(testEnv, _) -> [BigQuery.setupTablesAction schema testEnv], Fixture.customOptions = - Just $ - Fixture.defaultOptions + Just + $ Fixture.defaultOptions { Fixture.stringifyNumbers = True } } diff --git a/server/lib/arrows-extra/src/Control/Arrow/Interpret.hs b/server/lib/arrows-extra/src/Control/Arrow/Interpret.hs index c92f9513678..aab6c55b9b2 100644 --- a/server/lib/arrows-extra/src/Control/Arrow/Interpret.hs +++ b/server/lib/arrows-extra/src/Control/Arrow/Interpret.hs @@ -78,7 +78,7 @@ import Control.Monad.Trans.Writer -- NB: This is conceptually different from `ArrowApply`, which expresses that a -- given `Arrow` /is/ a Kleisli arrow. `ArrowInterpret` has no such condition -- on @arr@. -interpretWriter :: ArrowWriter w arr => Writer w a `arr` a +interpretWriter :: (ArrowWriter w arr) => Writer w a `arr` a interpretWriter = proc m -> do let (a, w) = runWriter m tellA -< w diff --git a/server/lib/dc-api/test/Command.hs b/server/lib/dc-api/test/Command.hs index 6fc2a097152..c85da0b9633 100644 --- a/server/lib/dc-api/test/Command.hs +++ b/server/lib/dc-api/test/Command.hs @@ -211,15 +211,15 @@ agentOptionsParser = <> metavar "JSON" <> help "The configuration JSON to be sent to the agent via the X-Hasura-DataConnector-Config header. If omitted, datasets will be used to load test data and provide this configuration dynamically" ) - <|> DatasetConfig - <$> optional - ( option - configValue - ( long "merge-agent-config" - <> metavar "JSON" - <> help "Datasets will be used to load test data and provide configuration JSON to be sent to the agent via the X-Hasura-DataConnector-Config header. This config will be merged with the dataset-provided config before being sent to the agent." - ) - ) + <|> DatasetConfig + <$> optional + ( option + configValue + ( long "merge-agent-config" + <> metavar "JSON" + <> help "Datasets will be used to load test data and provide configuration JSON to be sent to the agent via the X-Hasura-DataConnector-Config header. This config will be merged with the dataset-provided config before being sent to the agent." + ) + ) ) exportDataConfigParser :: Parser ExportDataConfig @@ -252,5 +252,5 @@ baseUrl = eitherReader $ left show . parseBaseUrl configValue :: ReadM API.Config configValue = fmap API.Config jsonValue -jsonValue :: FromJSON v => ReadM v +jsonValue :: (FromJSON v) => ReadM v jsonValue = eitherReader (eitherDecodeStrict' . Text.encodeUtf8 . Text.pack) diff --git a/server/lib/dc-api/test/Test/AgentAPI.hs b/server/lib/dc-api/test/Test/AgentAPI.hs index a9775c1e164..11e029afe15 100644 --- a/server/lib/dc-api/test/Test/AgentAPI.hs +++ b/server/lib/dc-api/test/Test/AgentAPI.hs @@ -46,13 +46,13 @@ import Test.Expectations (yamlShow) import Test.Sandwich (HasBaseContext, expectationFailure) import Prelude -client :: RunClient m => API.Routes (AsClientT m) +client :: (RunClient m) => API.Routes (AsClientT m) client = genericClient @API.Routes getCapabilitiesGuarded :: (HasBaseContext context, MonadReader context m, MonadThrow m, MonadIO m) => AgentClientT m API.CapabilitiesResponse getCapabilitiesGuarded = guardCapabilitiesResponse =<< (client // API._capabilities) -guardCapabilitiesResponse :: MonadThrow m => Union API.CapabilitiesResponses -> m API.CapabilitiesResponse +guardCapabilitiesResponse :: (MonadThrow m) => Union API.CapabilitiesResponses -> m API.CapabilitiesResponse guardCapabilitiesResponse = API.capabilitiesCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected CapabilitiesResponse" @@ -72,7 +72,7 @@ getSchemaGuarded = do (sourceName, config) <- getSourceNameAndConfig guardSchemaResponse =<< (client // API._schema) sourceName config -guardSchemaResponse :: MonadThrow m => Union API.SchemaResponses -> m API.SchemaResponse +guardSchemaResponse :: (MonadThrow m) => Union API.SchemaResponses -> m API.SchemaResponse guardSchemaResponse = API.schemaCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected SchemaResponse" @@ -84,7 +84,7 @@ queryGuarded queryRequest = do (sourceName, config) <- getSourceNameAndConfig guardQueryResponse =<< (client // API._query) sourceName config queryRequest -guardQueryResponse :: MonadThrow m => Union API.QueryResponses -> m API.QueryResponse +guardQueryResponse :: (MonadThrow m) => Union API.QueryResponses -> m API.QueryResponse guardQueryResponse = API.queryCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected QueryResponse" @@ -96,7 +96,7 @@ queryExpectError queryRequest = do (sourceName, config) <- getSourceNameAndConfig guardQueryErrorResponse =<< (client // API._query) sourceName config queryRequest -guardQueryErrorResponse :: MonadThrow m => Union API.QueryResponses -> m API.ErrorResponse +guardQueryErrorResponse :: (MonadThrow m) => Union API.QueryResponses -> m API.ErrorResponse guardQueryErrorResponse = API.queryCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected ErrorResponse" @@ -116,7 +116,7 @@ mutationGuarded mutationRequest = do (sourceName, config) <- getSourceNameAndConfig guardMutationResponse =<< (client // API._mutation) sourceName config mutationRequest -guardMutationResponse :: MonadThrow m => Union API.MutationResponses -> m API.MutationResponse +guardMutationResponse :: (MonadThrow m) => Union API.MutationResponses -> m API.MutationResponse guardMutationResponse = API.mutationCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected MutationResponse" @@ -128,7 +128,7 @@ mutationExpectError mutationRequest = do (sourceName, config) <- getSourceNameAndConfig guardMutationErrorResponse =<< (client // API._mutation) sourceName config mutationRequest -guardMutationErrorResponse :: MonadThrow m => Union API.MutationResponses -> m API.ErrorResponse +guardMutationErrorResponse :: (MonadThrow m) => Union API.MutationResponses -> m API.ErrorResponse guardMutationErrorResponse = API.mutationCase defaultAction successAction errorAction where defaultAction = expectationFailure "Expected ErrorResponse" diff --git a/server/lib/dc-api/test/Test/AgentClient.hs b/server/lib/dc-api/test/Test/AgentClient.hs index b079e4573e0..5f39d67e729 100644 --- a/server/lib/dc-api/test/Test/AgentClient.hs +++ b/server/lib/dc-api/test/Test/AgentClient.hs @@ -51,7 +51,7 @@ import Prelude ------------------------------------------------------------------------------- -newtype AgentIOClient = AgentIOClient (forall m. MonadIO m => Client m (NamedRoutes API.Routes)) +newtype AgentIOClient = AgentIOClient (forall m. (MonadIO m) => Client m (NamedRoutes API.Routes)) configHeader :: HeaderName configHeader = CI.mk "X-Hasura-DataConnector-Config" @@ -61,7 +61,7 @@ newtype AgentAuthKey = AgentAuthKey {getAgentAuthKey :: ByteString} eeLicenseKeyHeader :: HeaderName eeLicenseKeyHeader = CI.mk "X-Hasura-License" -mkHttpClientManager :: MonadIO m => SensitiveOutputHandling -> Maybe AgentAuthKey -> m HttpClient.Manager +mkHttpClientManager :: (MonadIO m) => SensitiveOutputHandling -> Maybe AgentAuthKey -> m HttpClient.Manager mkHttpClientManager sensitiveOutputHandling agentAuthKey = let modifyRequest = addHeaderRedaction sensitiveOutputHandling . maybe id addLicenseKeyHeader agentAuthKey settings = HttpClient.defaultManagerSettings {HttpClient.managerModifyRequest = pure . modifyRequest} @@ -76,7 +76,7 @@ addHeaderRedaction sensitiveOutputHandling request = AllowSensitiveOutput -> request DisallowSensitiveOutput -> request {HttpClient.redactHeaders = HttpClient.redactHeaders request <> Set.fromList [configHeader, eeLicenseKeyHeader]} -mkAgentIOClient :: MonadIO m => SensitiveOutputHandling -> Maybe AgentAuthKey -> BaseUrl -> m AgentIOClient +mkAgentIOClient :: (MonadIO m) => SensitiveOutputHandling -> Maybe AgentAuthKey -> BaseUrl -> m AgentIOClient mkAgentIOClient sensitiveOutputHandling agentAuthKey agentBaseUrl = do manager <- mkHttpClientManager sensitiveOutputHandling agentAuthKey let clientEnv = mkClientEnv manager agentBaseUrl @@ -90,7 +90,7 @@ data AgentClientConfig = AgentClientConfig _accSensitiveOutputHandling :: SensitiveOutputHandling } -mkAgentClientConfig :: MonadIO m => SensitiveOutputHandling -> Maybe AgentAuthKey -> BaseUrl -> m AgentClientConfig +mkAgentClientConfig :: (MonadIO m) => SensitiveOutputHandling -> Maybe AgentAuthKey -> BaseUrl -> m AgentClientConfig mkAgentClientConfig sensitiveOutputHandling agentAuthKey agentBaseUrl = do manager <- mkHttpClientManager sensitiveOutputHandling agentAuthKey pure $ AgentClientConfig agentBaseUrl manager sensitiveOutputHandling @@ -174,10 +174,10 @@ runRequestAcceptStatus' acceptStatus request = do then pure $ response else throwClientError $ mkFailureResponse _accBaseUrl request response -getClientState :: Monad m => AgentClientT m AgentClientState +getClientState :: (Monad m) => AgentClientT m AgentClientState getClientState = AgentClientT get -incrementRequestCounter :: Monad m => AgentClientT m () +incrementRequestCounter :: (Monad m) => AgentClientT m () incrementRequestCounter = AgentClientT $ modify' \state -> state {_acsRequestCounter = _acsRequestCounter state + 1} redactJsonResponse :: Method -> ByteString -> J.Value -> J.Value diff --git a/server/lib/dc-api/test/Test/Data.hs b/server/lib/dc-api/test/Test/Data.hs index ee745a38b43..9831da9bae4 100644 --- a/server/lib/dc-api/test/Test/Data.hs +++ b/server/lib/dc-api/test/Test/Data.hs @@ -801,8 +801,8 @@ mkSubqueryAggregatesFieldValue :: HashMap API.FieldName J.Value -> API.FieldValu mkSubqueryAggregatesFieldValue aggregates = API.mkRelationshipFieldValue $ API.QueryResponse Nothing (Just aggregates) -mkAndExpr :: Foldable f => f API.Expression -> API.Expression +mkAndExpr :: (Foldable f) => f API.Expression -> API.Expression mkAndExpr = API.And . Set.fromList . Foldable.toList -mkOrExpr :: Foldable f => f API.Expression -> API.Expression +mkOrExpr :: (Foldable f) => f API.Expression -> API.Expression mkOrExpr = API.Or . Set.fromList . Foldable.toList diff --git a/server/lib/dc-api/test/Test/Expectations.hs b/server/lib/dc-api/test/Test/Expectations.hs index d4b7ecaceda..d97694d9c38 100644 --- a/server/lib/dc-api/test/Test/Expectations.hs +++ b/server/lib/dc-api/test/Test/Expectations.hs @@ -30,7 +30,7 @@ newtype YamlShow = YamlShow {unYamlShow :: Value} instance Show YamlShow where show = T.unpack . TE.decodeUtf8With TE.lenientDecode . Yaml.encode . unYamlShow -yamlShow :: ToJSON value => value -> String +yamlShow :: (ToJSON value) => value -> String yamlShow = show . YamlShow . toJSON -- | Compares two JSON values for equality, but prints their diff upon failure diff --git a/server/lib/dc-api/test/Test/Specs/SchemaSpec.hs b/server/lib/dc-api/test/Test/Specs/SchemaSpec.hs index 33e6c17847b..b95da87acab 100644 --- a/server/lib/dc-api/test/Test/Specs/SchemaSpec.hs +++ b/server/lib/dc-api/test/Test/Specs/SchemaSpec.hs @@ -141,7 +141,7 @@ spec TestData {..} API.Capabilities {..} = describe "schema API" $ preloadAgentS Maybe API.TableInfo -> -- Actual table ExampleT innerContext m () ) -> - forall context. HasPreloadedAgentSchema context => SpecFree context IO () + forall context. (HasPreloadedAgentSchema context) => SpecFree context IO () testPerTable description test = describe description $ do forM_ _tdSchemaTables $ \expectedTable@API.TableInfo {..} -> do diff --git a/server/lib/error-message/src/Hasura/Base/ToErrorValue.hs b/server/lib/error-message/src/Hasura/Base/ToErrorValue.hs index 9728e226f03..13211bde43b 100644 --- a/server/lib/error-message/src/Hasura/Base/ToErrorValue.hs +++ b/server/lib/error-message/src/Hasura/Base/ToErrorValue.hs @@ -37,17 +37,17 @@ instance ToErrorValue () where -- > [J.Number 1, J.Bool True, J.String "three"] -- Will be printed as: -- > "[1, true, \"three\"]" -instance ToErrorValue a => ToErrorValue [a] where +instance (ToErrorValue a) => ToErrorValue [a] where toErrorValue values = "[" <> commaSeparatedValues <> "]" where commaSeparatedValues = foldr1 (<>) $ List.intersperse (toErrorMessage ", ") (map toErrorValue values) -- | Will be printed as a list -instance ToErrorValue a => ToErrorValue (NonEmpty a) where +instance (ToErrorValue a) => ToErrorValue (NonEmpty a) where toErrorValue = toErrorValue . NonEmpty.toList -- | Will be printed as a list -instance ToErrorValue a => ToErrorValue (HashSet a) where +instance (ToErrorValue a) => ToErrorValue (HashSet a) where toErrorValue = toErrorValue . HashSet.toList -- | Will be printed with single quotes surrounding it diff --git a/server/lib/error-message/test/Hasura/Base/ErrorMessageSpec.hs b/server/lib/error-message/test/Hasura/Base/ErrorMessageSpec.hs index a96a18edfb3..bcfc80c25f6 100644 --- a/server/lib/error-message/test/Hasura/Base/ErrorMessageSpec.hs +++ b/server/lib/error-message/test/Hasura/Base/ErrorMessageSpec.hs @@ -105,7 +105,7 @@ spec = newtype Thing a = Thing a deriving newtype (Eq, Hashable) -instance Show a => ToErrorValue (Thing a) where +instance (Show a) => ToErrorValue (Thing a) where toErrorValue (Thing x) = toErrorMessage $ "Thing " <> Text.pack (show x) newtype SingleQuoted = SingleQuoted Char diff --git a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Generator.hs b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Generator.hs index 133f43f6e27..434d4fa4e34 100644 --- a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Generator.hs +++ b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Generator.hs @@ -96,7 +96,7 @@ instance Generator Void where instance Generator Name where genValue = genValueWith [genName] -generate :: MonadIO m => Gen a -> m a +generate :: (MonadIO m) => Gen a -> m a generate = Gen.sample ------------------------------------------------------------------------------- @@ -107,7 +107,7 @@ genDocument :: Gen Document genDocument = Document <$> Gen.list (Range.linear 0 3) genDefinition -genExecutableDocument :: Generator a => Gen (ExecutableDocument a) +genExecutableDocument :: (Generator a) => Gen (ExecutableDocument a) genExecutableDocument = ExecutableDocument <$> Gen.list (Range.linear 1 3) genExecutableDefinition @@ -219,21 +219,21 @@ genDefinition = DefinitionTypeSystem <$> genTypeSystemDefinition ] -genExecutableDefinition :: Generator a => Gen (ExecutableDefinition a) +genExecutableDefinition :: (Generator a) => Gen (ExecutableDefinition a) genExecutableDefinition = Gen.choice [ ExecutableDefinitionOperation <$> genOperationDefinition, ExecutableDefinitionFragment <$> genFragmentDefinition ] -genOperationDefinition :: Generator a => Gen (OperationDefinition FragmentSpread a) +genOperationDefinition :: (Generator a) => Gen (OperationDefinition FragmentSpread a) genOperationDefinition = Gen.choice [ OperationDefinitionTyped <$> genTypedOperationDefinition, OperationDefinitionUnTyped <$> genSelectionSet ] -genTypedOperationDefinition :: Generator a => Gen (TypedOperationDefinition FragmentSpread a) +genTypedOperationDefinition :: (Generator a) => Gen (TypedOperationDefinition FragmentSpread a) genTypedOperationDefinition = TypedOperationDefinition <$> genOperationType @@ -422,10 +422,10 @@ genTypeSystemDirectiveLocation = -- Structure -genSelectionSet :: Generator a => Gen (SelectionSet FragmentSpread a) +genSelectionSet :: (Generator a) => Gen (SelectionSet FragmentSpread a) genSelectionSet = mkListNonEmpty genSelection -genSelection :: Generator a => Gen (Selection FragmentSpread a) +genSelection :: (Generator a) => Gen (Selection FragmentSpread a) genSelection = Gen.recursive Gen.choice @@ -435,20 +435,20 @@ genSelection = SelectionInlineFragment <$> genInlineFragment ] -genFragmentSpread :: Generator a => Gen (FragmentSpread a) +genFragmentSpread :: (Generator a) => Gen (FragmentSpread a) genFragmentSpread = FragmentSpread <$> genName <*> genDirectives -genInlineFragment :: Generator a => Gen (InlineFragment FragmentSpread a) +genInlineFragment :: (Generator a) => Gen (InlineFragment FragmentSpread a) genInlineFragment = InlineFragment <$> Gen.maybe genName <*> genDirectives <*> genSelectionSet -genField :: Generator a => Gen (Field FragmentSpread a) +genField :: (Generator a) => Gen (Field FragmentSpread a) genField = Field <$> Gen.maybe genName @@ -457,16 +457,16 @@ genField = <*> genDirectives <*> genSelectionSet -genDirective :: Generator a => Gen (Directive a) +genDirective :: (Generator a) => Gen (Directive a) genDirective = Directive <$> genName <*> (HashMap.fromList <$> mkList genArgument) -genDirectives :: Generator a => Gen [Directive a] +genDirectives :: (Generator a) => Gen [Directive a] genDirectives = mkList genDirective -genArgument :: Generator a => Gen (Name, Value a) +genArgument :: (Generator a) => Gen (Name, Value a) genArgument = (,) <$> genName <*> genValue ------------------------------------------------------------------------------- diff --git a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Parser.hs b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Parser.hs index 5ccad3e2a1b..ea6f41c6da4 100644 --- a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Parser.hs +++ b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Parser.hs @@ -136,10 +136,10 @@ class PossibleTypes pos where instance PossibleTypes () where possibleTypes = pure () -selectionSet :: Variable var => Parser (AST.SelectionSet AST.FragmentSpread var) +selectionSet :: (Variable var) => Parser (AST.SelectionSet AST.FragmentSpread var) selectionSet = braces $ many1 selection -selection :: Variable var => Parser (AST.Selection AST.FragmentSpread var) +selection :: (Variable var) => Parser (AST.Selection AST.FragmentSpread var) selection = AST.SelectionField <$> field -- Inline first to catch `on` case @@ -155,7 +155,7 @@ aliasAndFld = do Nothing -> return (Nothing, n) {-# INLINE aliasAndFld #-} -field :: Variable var => Parser (AST.Field AST.FragmentSpread var) +field :: (Variable var) => Parser (AST.Field AST.FragmentSpread var) field = do (alM, n) <- aliasAndFld AST.Field alM n @@ -165,7 +165,7 @@ field = do -- * Fragments -fragmentSpread :: Variable var => Parser (AST.FragmentSpread var) +fragmentSpread :: (Variable var) => Parser (AST.FragmentSpread var) -- TODO: Make sure it fails when `... on`. -- See https://facebook.github.io/graphql/#FragmentSpread fragmentSpread = @@ -175,7 +175,7 @@ fragmentSpread = <*> optempty directives -- InlineFragment tried first in order to guard against 'on' keyword -inlineFragment :: Variable var => Parser (AST.InlineFragment AST.FragmentSpread var) +inlineFragment :: (Variable var) => Parser (AST.InlineFragment AST.FragmentSpread var) inlineFragment = AST.InlineFragment <$ tok "..." @@ -207,7 +207,7 @@ number = do -- This will try to pick the first type it can runParser. If you are working with -- explicit types use the `typedValue` parser. -value :: Variable var => Parser (AST.Value var) +value :: (Variable var) => Parser (AST.Value var) value = tok ( AST.VVariable <$> variable @@ -254,17 +254,17 @@ stringLiteral = unescapeText =<< (char '"' *> jstring_ "string") unescapeText :: Text -> Parser Text unescapeText str = either fail pure $ A.parseOnly jstring ("\"" <> encodeUtf8 str <> "\"") -listLiteral :: Variable var => Parser [AST.Value var] +listLiteral :: (Variable var) => Parser [AST.Value var] listLiteral = brackets (many value) "list" -objectLiteral :: Variable var => Parser (HashMap AST.Name (AST.Value var)) +objectLiteral :: (Variable var) => Parser (HashMap AST.Name (AST.Value var)) objectLiteral = braces (objectFields many) "object" -arguments :: Variable var => Parser (HashMap AST.Name (AST.Value var)) +arguments :: (Variable var) => Parser (HashMap AST.Name (AST.Value var)) arguments = parens (objectFields many1) "arguments" objectFields :: - Variable var => + (Variable var) => (forall b. Parser b -> Parser [b]) -> Parser (HashMap AST.Name (AST.Value var)) objectFields several = foldM insertField HashMap.empty =<< several objectField @@ -276,10 +276,10 @@ objectFields several = foldM insertField HashMap.empty =<< several objectField -- * Directives -directives :: Variable var => Parser [AST.Directive var] +directives :: (Variable var) => Parser [AST.Directive var] directives = many1 directive -directive :: Variable var => Parser (AST.Directive var) +directive :: (Variable var) => Parser (AST.Directive var) directive = AST.Directive <$ tok "@" @@ -370,7 +370,7 @@ fieldDefinition = argumentsDefinition :: Parser (AST.ArgumentsDefinition AST.InputValueDefinition) argumentsDefinition = parens $ many1 inputValueDefinition -interfaceTypeDefinition :: PossibleTypes pos => Parser (AST.InterfaceTypeDefinition pos AST.InputValueDefinition) +interfaceTypeDefinition :: (PossibleTypes pos) => Parser (AST.InterfaceTypeDefinition pos AST.InputValueDefinition) interfaceTypeDefinition = AST.InterfaceTypeDefinition <$> optDesc @@ -526,7 +526,7 @@ between :: Parser Text -> Parser Text -> Parser a -> Parser a between open close p = tok open *> p <* tok close -- `empty` /= `pure mempty` for `Parser`. -optempty :: Monoid a => Parser a -> Parser a +optempty :: (Monoid a) => Parser a -> Parser a optempty = option mempty data Expecting diff --git a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Printer.hs b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Printer.hs index 326c3639dfc..dec90e1d337 100644 --- a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Printer.hs +++ b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Printer.hs @@ -124,7 +124,7 @@ instance Printer T.Text where doubleP = T.pack . show class Print a where - printP :: Printer b => a -> b + printP :: (Printer b) => a -> b instance Print Void where printP = absurd @@ -168,7 +168,7 @@ typedOperationDefinition :: (Print (frag var), Print var, Printer a) => TypedOpe typedOperationDefinition op = operationType (_todType op) <> charP ' ' <> nodeP op -operationType :: Printer a => OperationType -> a +operationType :: (Printer a) => OperationType -> a operationType = \case OperationTypeQuery -> "query" OperationTypeMutation -> "mutation" @@ -204,7 +204,7 @@ field (Field alias name args dirs selSets) = <> charP ' ' <> selectionSetP selSets -optAlias :: Printer a => Maybe Name -> a +optAlias :: (Printer a) => Maybe Name -> a optAlias = maybe mempty (\a -> nameP a <> textP ": ") inlineFragment :: (Print (frag var), Print var, Printer a) => InlineFragment frag var -> a @@ -214,14 +214,14 @@ inlineFragment (InlineFragment tc ds sels) = <> optempty directives ds <> selectionSetP sels -instance Print var => Print (FragmentSpread var) where +instance (Print var) => Print (FragmentSpread var) where printP (FragmentSpread name ds) = "..." <> nameP name <> optempty directives ds instance Print (NoFragments var) where printP = \case {} -fragmentDefinition :: Printer a => FragmentDefinition -> a +fragmentDefinition :: (Printer a) => FragmentDefinition -> a fragmentDefinition (FragmentDefinition name tc dirs sels) = "fragment " <> nameP name @@ -240,7 +240,7 @@ directive (Directive name args) = arguments :: (Print var, Printer a) => HashMap Name (Value var) -> a arguments xs = charP '(' <> objectFields xs <> charP ')' -variableDefinitions :: Printer a => [VariableDefinition] -> a +variableDefinitions :: (Printer a) => [VariableDefinition] -> a variableDefinitions vars = mconcat [ charP '(', @@ -250,26 +250,26 @@ variableDefinitions vars = where vars' = intersperse (charP ',') $ map variableDefinition vars -variableDefinition :: Printer a => VariableDefinition -> a +variableDefinition :: (Printer a) => VariableDefinition -> a variableDefinition (VariableDefinition var ty defVal) = variableP var <> ": " <> graphQLType ty <> maybe mempty defaultValue defVal -defaultValue :: Printer a => Value Void -> a +defaultValue :: (Printer a) => Value Void -> a defaultValue v = " = " <> value v -description :: Printer a => Maybe Description -> a +description :: (Printer a) => Maybe Description -> a description Nothing = mempty description (Just desc) = dispatchStringPrinter (unDescription desc) <> " \n" -- | Type Reference -graphQLType :: Printer a => GType -> a +graphQLType :: (Printer a) => GType -> a graphQLType (TypeNamed n x) = nameP x <> nonNull n graphQLType (TypeList n x) = listType x <> nonNull n -listType :: Printer a => GType -> a +listType :: (Printer a) => GType -> a listType ty = charP '[' <> graphQLType ty <> charP ']' -nonNull :: Printer a => Nullability -> a +nonNull :: (Printer a) => Nullability -> a nonNull n = bool (charP '!') mempty $ unNullability n -- | Primitives @@ -290,7 +290,7 @@ value = \case -- | Print a given text as a normal string or as a block string, depending on -- its content. -dispatchStringPrinter :: Printer a => Text -> a +dispatchStringPrinter :: (Printer a) => Text -> a dispatchStringPrinter t = if printAsBlockString then blockStringValue t else stringValue t where @@ -322,10 +322,10 @@ dispatchStringPrinter t = isSourceCharacter = not . isControl -- | We use Aeson to decode string values, and therefore use Aeson to encode them back. -stringValue :: Printer a => Text -> a +stringValue :: (Printer a) => Text -> a stringValue s = textP $ LT.toStrict $ LTE.decodeUtf8 $ J.encode s -blockStringValue :: Printer a => Text -> a +blockStringValue :: (Printer a) => Text -> a blockStringValue t = textP "\"\"\"\n" <> textP t <> textP "\n\"\"\"" listValue :: (Print var, Printer a) => [Value var] -> a @@ -341,7 +341,7 @@ objectFields o = mconcat $ intersperse (charP ',') $ map objectField $ HashMap.t where objectField (name, val) = nameP name <> ": " <> value val -fromBool :: Printer a => Bool -> a +fromBool :: (Printer a) => Bool -> a fromBool True = "true" fromBool False = "false" @@ -352,7 +352,7 @@ optempty f xs schemaDefinition :: forall a. - Printer a => + (Printer a) => SchemaDefinition -> a schemaDefinition (SchemaDefinition dirs rootOpDefs) = @@ -362,15 +362,15 @@ schemaDefinition (SchemaDefinition dirs rootOpDefs) = <> mconcat (intersperse (charP ' ') (map rootOperationTypeDefinition rootOpDefs)) <> " }" -rootOperationTypeDefinition :: Printer a => RootOperationTypeDefinition -> a +rootOperationTypeDefinition :: (Printer a) => RootOperationTypeDefinition -> a rootOperationTypeDefinition (RootOperationTypeDefinition opType rootName) = operationType opType <> ": " <> nameP rootName -typeSystemDefinition :: Printer a => TypeSystemDefinition -> a +typeSystemDefinition :: (Printer a) => TypeSystemDefinition -> a typeSystemDefinition (TypeSystemDefinitionSchema schemaDefn) = schemaDefinition schemaDefn typeSystemDefinition (TypeSystemDefinitionType typeDefn) = typeDefinitionP typeDefn -schemaDocument :: Printer a => SchemaDocument -> a +schemaDocument :: (Printer a) => SchemaDocument -> a schemaDocument (SchemaDocument typeDefns) = mconcat $ intersperse (textP "\n\n") $ map typeSystemDefinition $ sort $ filter isNotBuiltInScalar typeDefns where @@ -383,7 +383,7 @@ schemaDocument (SchemaDocument typeDefns) = ) = name `notElem` builtInScalars isNotBuiltInScalar _ = True -typeDefinitionP :: Printer a => TypeDefinition () InputValueDefinition -> a +typeDefinitionP :: (Printer a) => TypeDefinition () InputValueDefinition -> a typeDefinitionP (TypeDefinitionScalar scalarDefn) = scalarTypeDefinition scalarDefn typeDefinitionP (TypeDefinitionObject objDefn) = objectTypeDefinition objDefn typeDefinitionP (TypeDefinitionInterface interfaceDefn) = interfaceTypeDefinition interfaceDefn @@ -391,7 +391,7 @@ typeDefinitionP (TypeDefinitionUnion unionDefn) = unionTypeDefinition unionDefn typeDefinitionP (TypeDefinitionEnum enumDefn) = enumTypeDefinition enumDefn typeDefinitionP (TypeDefinitionInputObject inpObjDefn) = inputObjectTypeDefinition inpObjDefn -scalarTypeDefinition :: Printer a => ScalarTypeDefinition -> a +scalarTypeDefinition :: (Printer a) => ScalarTypeDefinition -> a scalarTypeDefinition (ScalarTypeDefinition desc name dirs) = description desc <> "scalar " @@ -400,7 +400,7 @@ scalarTypeDefinition (ScalarTypeDefinition desc name dirs) = then mempty else charP ' ' <> optempty directives dirs -inputValueDefinition :: Printer a => InputValueDefinition -> a +inputValueDefinition :: (Printer a) => InputValueDefinition -> a inputValueDefinition (InputValueDefinition desc name gType defVal dirs) = description desc <> nameP name @@ -411,7 +411,7 @@ inputValueDefinition (InputValueDefinition desc name gType defVal dirs) = then mempty else charP ' ' <> optempty directives dirs -fieldDefinition :: Printer a => FieldDefinition InputValueDefinition -> a +fieldDefinition :: (Printer a) => FieldDefinition InputValueDefinition -> a fieldDefinition (FieldDefinition desc name args gType dirs) = description desc <> nameP name @@ -425,7 +425,7 @@ fieldDefinition (FieldDefinition desc name args gType dirs) = <> graphQLType gType <> optempty directives dirs -objectTypeDefinition :: Printer a => ObjectTypeDefinition InputValueDefinition -> a +objectTypeDefinition :: (Printer a) => ObjectTypeDefinition InputValueDefinition -> a objectTypeDefinition (ObjectTypeDefinition desc name ifaces dirs fieldDefinitions) = description desc <> "type " @@ -444,7 +444,7 @@ objectTypeDefinition (ObjectTypeDefinition desc name ifaces dirs fieldDefinition <> "\n" <> "}" -interfaceTypeDefinition :: Printer a => InterfaceTypeDefinition () InputValueDefinition -> a +interfaceTypeDefinition :: (Printer a) => InterfaceTypeDefinition () InputValueDefinition -> a interfaceTypeDefinition (InterfaceTypeDefinition desc name dirs fieldDefinitions _possibleTypes) = -- `possibleTypes` are not included with an interface definition in a GraphQL IDL description desc @@ -462,7 +462,7 @@ interfaceTypeDefinition (InterfaceTypeDefinition desc name dirs fieldDefinitions <> "\n" <> "}" -unionTypeDefinition :: Printer a => UnionTypeDefinition -> a +unionTypeDefinition :: (Printer a) => UnionTypeDefinition -> a unionTypeDefinition (UnionTypeDefinition desc name dirs members) = description desc <> "union " @@ -472,14 +472,14 @@ unionTypeDefinition (UnionTypeDefinition desc name dirs members) = <> textP " = " <> mconcat (intersperse (textP " | ") $ map nameP $ sort members) -enumValueDefinition :: Printer a => EnumValueDefinition -> a +enumValueDefinition :: (Printer a) => EnumValueDefinition -> a enumValueDefinition (EnumValueDefinition desc name dirs) = description desc <> nameP (unEnumValue name) <> charP ' ' <> optempty directives dirs -enumTypeDefinition :: Printer a => EnumTypeDefinition -> a +enumTypeDefinition :: (Printer a) => EnumTypeDefinition -> a enumTypeDefinition (EnumTypeDefinition desc name dirs enumValDefns) = description desc <> "enum " @@ -495,7 +495,7 @@ enumTypeDefinition (EnumTypeDefinition desc name dirs enumValDefns) = <> "\n" <> "}" -inputObjectTypeDefinition :: Printer a => InputObjectTypeDefinition InputValueDefinition -> a +inputObjectTypeDefinition :: (Printer a) => InputObjectTypeDefinition InputValueDefinition -> a inputObjectTypeDefinition (InputObjectTypeDefinition desc name dirs valDefns) = description desc <> "input " diff --git a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax.hs b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax.hs index a9e74d04796..b4d5459115f 100644 --- a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax.hs +++ b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax.hs @@ -149,9 +149,9 @@ data ExecutableDefinition var | ExecutableDefinitionFragment FragmentDefinition deriving stock (Eq, Generic, Lift, Ord, Show, Functor, Foldable, Traversable) -instance Hashable var => Hashable (ExecutableDefinition var) +instance (Hashable var) => Hashable (ExecutableDefinition var) -instance NFData var => NFData (ExecutableDefinition var) +instance (NFData var) => NFData (ExecutableDefinition var) partitionExDefs :: [ExecutableDefinition var] -> @@ -320,7 +320,7 @@ data Value var deriving stock (Eq, Generic, Ord, Show, Functor, Foldable, Traversable) deriving anyclass (Hashable, NFData) -instance Lift var => Lift (Value var) where +instance (Lift var) => Lift (Value var) where liftTyped (VVariable a) = [||VVariable a||] liftTyped VNull = [||VNull||] liftTyped (VInt a) = [||VInt a||] @@ -343,7 +343,7 @@ data Directive var = Directive deriving stock (Eq, Generic, Ord, Show, Functor, Foldable, Traversable) deriving anyclass (Hashable, NFData) -instance Lift var => Lift (Directive var) where +instance (Lift var) => Lift (Directive var) where liftTyped Directive {..} = [|| Directive diff --git a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax/Name.hs b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax/Name.hs index efd2c59567b..802ed5be6b6 100644 --- a/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax/Name.hs +++ b/server/lib/graphql-parser/src/Language/GraphQL/Draft/Syntax/Name.hs @@ -57,7 +57,7 @@ newtype NameSuffix = Suffix {unNameSuffix :: Text} deriving stock (Eq, Lift, Ord, Show) deriving newtype (Semigroup, Hashable, NFData, Pretty, J.ToJSONKey, J.ToJSON) -parseName :: MonadFail m => Text -> m Name +parseName :: (MonadFail m) => Text -> m Name parseName text = maybe (fail errorMessage) pure $ mkName text where errorMessage = T.unpack text <> " is not valid GraphQL name" @@ -106,7 +106,7 @@ convertNameToSuffix = coerce unsafeMkName :: Text -> Name unsafeMkName = Name -parseSuffix :: MonadFail m => Text -> m NameSuffix +parseSuffix :: (MonadFail m) => Text -> m NameSuffix parseSuffix text = maybe (fail errorMessage) pure $ mkNameSuffix text where errorMessage = T.unpack text <> " is not valid GraphQL suffix" diff --git a/server/lib/hasura-base/src/Hasura/Base/Error.hs b/server/lib/hasura-base/src/Hasura/Base/Error.hs index cd9684332a1..c27d69ad4e7 100644 --- a/server/lib/hasura-base/src/Hasura/Base/Error.hs +++ b/server/lib/hasura-base/src/Hasura/Base/Error.hs @@ -202,8 +202,8 @@ instance ToJSON QErr where "error" .= msg, "code" .= code ] - toJSON (QErr jPath _ msg code (Just extra)) = object $ - case extra of + toJSON (QErr jPath _ msg code (Just extra)) = object + $ case extra of ExtraInternal e -> err ++ ["internal" .= e] ExtraExtensions {} -> err HideInconsistencies -> [] @@ -346,8 +346,8 @@ throw500WithDetail t detail = throwConnectionError :: (QErrM m) => Text -> m a throwConnectionError t = - throwError $ - (err500 Unexpected t) + throwError + $ (err500 Unexpected t) { qeInternal = Just HideInconsistencies, qeCode = ConnectionNotEstablished } @@ -407,7 +407,9 @@ indexedFoldlA' f = proc (e, (acc0, (xs, s))) -> (| foldlA' (\acc (i, v) -> (| withPathIA ((e, (acc, (v, s))) >- f) |) i) - |) acc0 (zip [0 ..] (toList xs)) + |) + acc0 + (zip [0 ..] (toList xs)) indexedTraverseA_ :: (ArrowChoice arr, ArrowError QErr arr, Foldable t) => diff --git a/server/lib/hasura-base/src/Hasura/Base/Instances.hs b/server/lib/hasura-base/src/Hasura/Base/Instances.hs index 6bee64b3496..2f4b15b89ce 100644 --- a/server/lib/hasura-base/src/Hasura/Base/Instances.hs +++ b/server/lib/hasura-base/src/Hasura/Base/Instances.hs @@ -101,9 +101,9 @@ deriving instance TH.Lift Seconds instance AC.HasCodec C.CronSchedule where codec = - AC.named "CronSchedule" $ - AC.bimapCodec C.parseCronSchedule C.serializeCronSchedule $ - AC.codec @Text + AC.named "CronSchedule" + $ AC.bimapCodec C.parseCronSchedule C.serializeCronSchedule + $ AC.codec @Text -------------------------------------------------------------------------------- -- JSON diff --git a/server/lib/hasura-extras/src/Autodocodec/Extended.hs b/server/lib/hasura-extras/src/Autodocodec/Extended.hs index 85474d8f28b..95f34412de6 100644 --- a/server/lib/hasura-extras/src/Autodocodec/Extended.hs +++ b/server/lib/hasura-extras/src/Autodocodec/Extended.hs @@ -107,17 +107,17 @@ graphQLFieldNameCodec :: JSONCodec G.Name graphQLFieldNameCodec = named "GraphQLName" $ bimapCodec dec enc codec where dec text = - maybeToEither ("invalid GraphQL field name '" <> T.unpack text <> "'") $ - G.mkName text + maybeToEither ("invalid GraphQL field name '" <> T.unpack text <> "'") + $ G.mkName text enc = G.unName graphQLFieldDescriptionCodec :: JSONCodec G.Description graphQLFieldDescriptionCodec = dimapCodec G.Description G.unDescription codec -graphQLValueCodec :: forall var. Typeable var => JSONCodec var -> JSONCodec (G.Value var) +graphQLValueCodec :: forall var. (Typeable var) => JSONCodec var -> JSONCodec (G.Value var) graphQLValueCodec varCodec = - named ("GraphQLValue_" <> typeableName @var) $ - matchChoicesCodec + named ("GraphQLValue_" <> typeableName @var) + $ matchChoicesCodec [ (isVVariable, dimapCodec G.VVariable fromVVariable varCodec), -- The VVariable case must be first in case its codec overlaps with other cases (isVNull, dimapCodec (const G.VNull) (const ()) nullCodec), (isVInt, dimapCodec (G.VInt . toInteger) fromVInt integerCodec), -- It's important to try VInt first because the Scientific codec will match integers @@ -169,10 +169,10 @@ hashSetCodec = hashSetCodecWith codec -- | Serializes a hash set by converting it to a list. This matches the FromJSON -- and ToJSON instances in aeson. This version accepts a codec for individual -- set values as an argument. -hashSetCodecWith :: Hashable a => JSONCodec a -> JSONCodec (HashSet a) +hashSetCodecWith :: (Hashable a) => JSONCodec a -> JSONCodec (HashSet a) hashSetCodecWith elemCodec = - dimapCodec HashSet.fromList HashSet.toList $ - listCodec elemCodec + dimapCodec HashSet.fromList HashSet.toList + $ listCodec elemCodec -- | Codec for integral numbers with specified lower and upper bounds. integralWithBoundsCodec :: (Integral i, Bounded i) => NumberBounds -> JSONCodec i @@ -186,14 +186,14 @@ integralWithBoundsCodec bounds = -- | Codec for integral numbers with specified lower bound. integralWithLowerBoundCodec :: forall i. (Integral i, Bounded i) => i -> JSONCodec i integralWithLowerBoundCodec minInt = - integralWithBoundsCodec $ - NumberBounds (fromIntegral minInt) (fromIntegral (maxBound @i)) + integralWithBoundsCodec + $ NumberBounds (fromIntegral minInt) (fromIntegral (maxBound @i)) -- | Codec for integral numbers with specified lower bound. integralWithUpperBoundCodec :: forall i. (Integral i, Bounded i) => i -> JSONCodec i integralWithUpperBoundCodec maxInt = - integralWithBoundsCodec $ - NumberBounds (fromIntegral (minBound @i)) (fromIntegral maxInt) + integralWithBoundsCodec + $ NumberBounds (fromIntegral (minBound @i)) (fromIntegral maxInt) -- | Codec for integer with a generous bounds check that matches the behavior of -- aeson integer deserialization. @@ -221,7 +221,7 @@ parseIntegralFromScientific s = case floatingOrInteger @Float s of -- function omits the field during serialization if the Haskell value is -- @Nothing@. This version includes the field with a serialized value of @null@. optionalFieldOrIncludedNull :: - HasCodec output => + (HasCodec output) => -- | Key Text -> -- | Documentation @@ -236,7 +236,7 @@ optionalFieldOrIncludedNull key doc = optionalFieldOrIncludedNullWith key codec -- function omits the field during serialization if the Haskell value is -- @Nothing@. This version includes the field with a serialized value of @null@. optionalFieldOrIncludedNull' :: - HasCodec output => + (HasCodec output) => -- | Key Text -> ObjectCodec (Maybe output) (Maybe output) @@ -257,8 +257,8 @@ optionalFieldOrIncludedNullWith :: Text -> ObjectCodec (Maybe output) (Maybe output) optionalFieldOrIncludedNullWith key c doc = - orIncludedNullHelper $ - OptionalKeyCodec key (maybeCodec c) (Just doc) + orIncludedNullHelper + $ OptionalKeyCodec key (maybeCodec c) (Just doc) -- | An optional field that might be @null@ where a @Nothing@ value should be -- represented as @null@ on serialization instead of omitting the field. @@ -273,8 +273,8 @@ optionalFieldOrIncludedNullWith' :: JSONCodec output -> ObjectCodec (Maybe output) (Maybe output) optionalFieldOrIncludedNullWith' key c = - orIncludedNullHelper $ - OptionalKeyCodec key (maybeCodec c) Nothing + orIncludedNullHelper + $ OptionalKeyCodec key (maybeCodec c) Nothing orIncludedNullHelper :: ObjectCodec (Maybe (Maybe input)) (Maybe (Maybe output)) -> ObjectCodec (Maybe input) (Maybe output) orIncludedNullHelper = dimapCodec dec enc @@ -317,7 +317,7 @@ refinedCodec = refinedCodecWith codec -- package. -- -- This version requires a codec to be provided for the underlying value type. -refinedCodecWith :: R.Predicate p a => JSONCodec a -> JSONCodec (R.Refined p a) +refinedCodecWith :: (R.Predicate p a) => JSONCodec a -> JSONCodec (R.Refined p a) refinedCodecWith underlyingCodec = bimapCodec dec enc underlyingCodec where dec = mapLeft show . R.refine @@ -357,7 +357,7 @@ disjointMatchChoicesNECodec l = go l -- | Map a fixed set of two values to boolean values when serializing. The first -- argument is the value to map to @True@, the second is the value to map to -- @False@. -boolConstCodec :: Eq a => a -> a -> JSONCodec a +boolConstCodec :: (Eq a) => a -> a -> JSONCodec a boolConstCodec trueCase falseCase = dimapCodec (bool trueCase falseCase) @@ -411,16 +411,16 @@ optionalVersionField v = -- discriminator field is @Text@. discriminatorField :: Text -> Text -> ObjectCodec a () discriminatorField name value = - dimapCodec (const ()) (const value) $ - requiredFieldWith' name (literalTextCodec value) + dimapCodec (const ()) (const value) + $ requiredFieldWith' name (literalTextCodec value) -- | Useful in an object codec for a field that indicates the type of the -- object within a union. This version assumes that the type of the -- discriminator field is @Bool@. discriminatorBoolField :: Text -> Bool -> ObjectCodec a () discriminatorBoolField name value = - dimapCodec (const ()) (const value) $ - requiredFieldWith' name (EqCodec value boolCodec) + dimapCodec (const ()) (const value) + $ requiredFieldWith' name (EqCodec value boolCodec) -- | Represents a text field wrapped in an object with a single property -- named @from_env@. diff --git a/server/lib/hasura-extras/src/Data/Aeson/Extended.hs b/server/lib/hasura-extras/src/Data/Aeson/Extended.hs index 7bcef2b7d6f..cd54ad5295b 100644 --- a/server/lib/hasura-extras/src/Data/Aeson/Extended.hs +++ b/server/lib/hasura-extras/src/Data/Aeson/Extended.hs @@ -32,7 +32,7 @@ class FromJSONKeyValue a where instance ToJSONKeyValue Void where toJSONKeyValue = absurd -instance ToJSONKeyValue a => ToJSONKeyValue (Const a b) where +instance (ToJSONKeyValue a) => ToJSONKeyValue (Const a b) where toJSONKeyValue = toJSONKeyValue . getConst -- | Similar to 'FromJSON', except the parser can also source data with which @@ -62,5 +62,5 @@ mapWithJSONPath :: (a -> Parser b) -> [a] -> Parser [b] mapWithJSONPath parser xs = traverse (\(idx, item) -> parser item Index idx) $ zip [0 ..] xs -encodeToStrictText :: ToJSON a => a -> Text +encodeToStrictText :: (ToJSON a) => a -> Text encodeToStrictText = toStrict . toLazyText . encodeToTextBuilder diff --git a/server/lib/hasura-extras/src/Data/HashMap/Strict/Extended.hs b/server/lib/hasura-extras/src/Data/HashMap/Strict/Extended.hs index 1c2a6ddb668..00839bbe59d 100644 --- a/server/lib/hasura-extras/src/Data/HashMap/Strict/Extended.hs +++ b/server/lib/hasura-extras/src/Data/HashMap/Strict/Extended.hs @@ -25,7 +25,7 @@ import Data.List qualified as L import Data.List.NonEmpty (NonEmpty (..)) import Prelude -fromListOn :: Hashable k => (v -> k) -> [v] -> HashMap k v +fromListOn :: (Hashable k) => (v -> k) -> [v] -> HashMap k v fromListOn f = HashMap.fromList . Prelude.map (\v -> (f v, v)) -- | Given a 'Foldable' sequence of values and a function that extracts a key from each value, @@ -113,7 +113,7 @@ unionsWith f = F.foldl' (HashMap.unionWith f) HashMap.empty -- | Homogenise maps, such that all maps range over the full set of -- keys, inserting a default value as needed. -homogenise :: Hashable a => b -> [HashMap a b] -> (HashSet a, [HashMap a b]) +homogenise :: (Hashable a) => b -> [HashMap a b] -> (HashSet a, [HashMap a b]) homogenise defaultValue maps = let ks = S.unions $ L.map HashMap.keysSet maps defaults = HashMap.fromList [(k, defaultValue) | k <- S.toList ks] diff --git a/server/lib/hasura-extras/src/Data/HashMap/Strict/InsOrd/Extended.hs b/server/lib/hasura-extras/src/Data/HashMap/Strict/InsOrd/Extended.hs index d83e24c4148..123f5cfeca7 100644 --- a/server/lib/hasura-extras/src/Data/HashMap/Strict/InsOrd/Extended.hs +++ b/server/lib/hasura-extras/src/Data/HashMap/Strict/InsOrd/Extended.hs @@ -17,7 +17,7 @@ instance Filterable (InsOrdHashMap.InsOrdHashMap k) where mapMaybe = InsOrdHashMap.mapMaybe filter = InsOrdHashMap.filter -partition :: Hashable k => (v -> Bool) -> InsOrdHashMap.InsOrdHashMap k v -> (InsOrdHashMap.InsOrdHashMap k v, InsOrdHashMap.InsOrdHashMap k v) +partition :: (Hashable k) => (v -> Bool) -> InsOrdHashMap.InsOrdHashMap k v -> (InsOrdHashMap.InsOrdHashMap k v, InsOrdHashMap.InsOrdHashMap k v) partition predicate = InsOrdHashMap.foldlWithKey' ( \(left, right) key val -> diff --git a/server/lib/hasura-extras/src/Data/HashMap/Strict/Multi.hs b/server/lib/hasura-extras/src/Data/HashMap/Strict/Multi.hs index 765a3544521..1363d6d2a7d 100644 --- a/server/lib/hasura-extras/src/Data/HashMap/Strict/Multi.hs +++ b/server/lib/hasura-extras/src/Data/HashMap/Strict/Multi.hs @@ -43,7 +43,7 @@ instance (Hashable k, Ord v) => Monoid (MultiMap k v) where -- | Construct a 'MmultiMap' with a single key, to which only one -- value is associated. -singleton :: Hashable k => k -> v -> MultiMap k v +singleton :: (Hashable k) => k -> v -> MultiMap k v singleton k v = MultiMap $ HashMap.singleton k (S.singleton v) -- | Construct a 'MultiMap' with the supplied mappings. @@ -73,7 +73,7 @@ toList (MultiMap m) = HashMap.toList $ fmap (S.toList) m -- | Return the value to which the specified key is mapped, or 'Nothing' if -- this map contains no mapping for the key. -lookup :: Hashable k => k -> MultiMap k v -> S.Set v +lookup :: (Hashable k) => k -> MultiMap k v -> S.Set v lookup k (MultiMap m) = fromMaybe S.empty $ HashMap.lookup k m -- | Associate the specified value with the specified key in this map. diff --git a/server/lib/hasura-extras/src/Data/HashMap/Strict/NonEmpty.hs b/server/lib/hasura-extras/src/Data/HashMap/Strict/NonEmpty.hs index e93b724a54e..b5fab53c37f 100644 --- a/server/lib/hasura-extras/src/Data/HashMap/Strict/NonEmpty.hs +++ b/server/lib/hasura-extras/src/Data/HashMap/Strict/NonEmpty.hs @@ -51,7 +51,7 @@ newtype NEHashMap k v = NEHashMap {unNEHashMap :: HashMap k v} ------------------------------------------------------------------------------- -- | Construct a non-empty map with a single element. -singleton :: Hashable k => k -> v -> NEHashMap k v +singleton :: (Hashable k) => k -> v -> NEHashMap k v singleton k v = NEHashMap $ HashMap.singleton k v -- | Construct a non-empty map with the supplied mappings. @@ -66,12 +66,12 @@ fromHashMap m -- * if the provided list contains duplicate mappings, the later mappings take -- precedence; -- * if the provided list is empty, returns 'Nothing'. -fromList :: Hashable k => [(k, v)] -> Maybe (NEHashMap k v) +fromList :: (Hashable k) => [(k, v)] -> Maybe (NEHashMap k v) fromList [] = Nothing fromList v = Just $ NEHashMap $ HashMap.fromList v -- | A variant of 'fromList' that uses 'NonEmpty' inputs. -fromNonEmpty :: Hashable k => NonEmpty (k, v) -> NEHashMap k v +fromNonEmpty :: (Hashable k) => NonEmpty (k, v) -> NEHashMap k v fromNonEmpty (x NE.:| xs) = NEHashMap (HashMap.fromList (x : xs)) -- | Convert a non-empty map to a 'HashMap'. @@ -91,14 +91,14 @@ toList = HashMap.toList . unNEHashMap -- | Return the value to which the specified key is mapped, or 'Nothing' if -- this map contains no mapping for the key. -lookup :: Hashable k => k -> NEHashMap k v -> Maybe v +lookup :: (Hashable k) => k -> NEHashMap k v -> Maybe v lookup k (NEHashMap m) = HashMap.lookup k m -- | Return the value to which the specified key is mapped, or 'Nothing' if -- this map contains no mapping for the key. -- -- This is a flipped version of 'lookup'. -(!?) :: Hashable k => NEHashMap k v -> k -> Maybe v +(!?) :: (Hashable k) => NEHashMap k v -> k -> Maybe v (!?) = flip lookup -- | Return a list of this map's keys. @@ -115,14 +115,14 @@ elems = HashMap.elems . unNEHashMap -- -- If a key occurs in both maps, the left map @m1@ (first argument) will be -- preferred. -union :: Hashable k => NEHashMap k v -> NEHashMap k v -> NEHashMap k v +union :: (Hashable k) => NEHashMap k v -> NEHashMap k v -> NEHashMap k v union (NEHashMap m1) (NEHashMap m2) = NEHashMap $ HashMap.union m1 m2 -- | The union of two maps using a given value-wise union function. -- -- If a key occurs in both maps, the provided function (first argument) will be -- used to compute the result. -unionWith :: Hashable k => (v -> v -> v) -> NEHashMap k v -> NEHashMap k v -> NEHashMap k v +unionWith :: (Hashable k) => (v -> v -> v) -> NEHashMap k v -> NEHashMap k v -> NEHashMap k v unionWith fun (NEHashMap m1) (NEHashMap m2) = NEHashMap $ HashMap.unionWith fun m1 m2 ------------------------------------------------------------------------------- @@ -132,7 +132,7 @@ unionWith fun (NEHashMap m1) (NEHashMap m2) = NEHashMap $ HashMap.unionWith fun -- The size of the result may be smaller if f maps two or more distinct keys to -- the same new key. In this case there is no guarantee which of the associated -- values is chosen for the conflicting key. -mapKeys :: Hashable k2 => (k1 -> k2) -> NEHashMap k1 v -> NEHashMap k2 v +mapKeys :: (Hashable k2) => (k1 -> k2) -> NEHashMap k1 v -> NEHashMap k2 v mapKeys fun (NEHashMap m) = NEHashMap $ HashMap.mapKeys fun m ------------------------------------------------------------------------------- diff --git a/server/lib/hasura-extras/src/Data/List/Extended.hs b/server/lib/hasura-extras/src/Data/List/Extended.hs index 05f9624367a..5fdf7cb7aa5 100644 --- a/server/lib/hasura-extras/src/Data/List/Extended.hs +++ b/server/lib/hasura-extras/src/Data/List/Extended.hs @@ -19,7 +19,7 @@ import Data.List qualified as L import Data.List.NonEmpty qualified as NE import Prelude -duplicates :: Hashable a => [a] -> Set.HashSet a +duplicates :: (Hashable a) => [a] -> Set.HashSet a duplicates = HashMap.keysSet . HashMap.filter (> 1) . HashMap.fromListWith (+) . map (,1 :: Int) @@ -30,13 +30,13 @@ duplicates = uniques :: (Ord a) => [a] -> [a] uniques = nubOrd -getDifference :: Hashable a => [a] -> [a] -> Set.HashSet a +getDifference :: (Hashable a) => [a] -> [a] -> Set.HashSet a getDifference = Set.difference `on` Set.fromList -getDifferenceOn :: Hashable k => (v -> k) -> [v] -> [v] -> [v] +getDifferenceOn :: (Hashable k) => (v -> k) -> [v] -> [v] -> [v] getDifferenceOn f l = HashMap.elems . HashMap.differenceOn f l -getOverlapWith :: Hashable k => (v -> k) -> [v] -> [v] -> [(v, v)] +getOverlapWith :: (Hashable k) => (v -> k) -> [v] -> [v] -> [(v, v)] getOverlapWith getKey left right = HashMap.elems $ HashMap.intersectionWith (,) (mkMap left) (mkMap right) where @@ -49,7 +49,7 @@ getOverlapWith getKey left right = -- -- >>> longestCommonPrefix [] -- [] -longestCommonPrefix :: Eq a => [[a]] -> [a] +longestCommonPrefix :: (Eq a) => [[a]] -> [a] longestCommonPrefix [] = [] longestCommonPrefix (x : xs) = foldr prefix x xs where diff --git a/server/lib/hasura-extras/src/Data/Parser/CacheControl.hs b/server/lib/hasura-extras/src/Data/Parser/CacheControl.hs index 2f367f898e6..c220de132d9 100644 --- a/server/lib/hasura-extras/src/Data/Parser/CacheControl.hs +++ b/server/lib/hasura-extras/src/Data/Parser/CacheControl.hs @@ -31,13 +31,13 @@ data CacheControlDirective deriving (Show, Eq) -- | Tries to parse the @max-age@ or @s-maxage@ present in the value of @Cache-Control@ header -parseMaxAge :: Integral a => Text -> Either String a +parseMaxAge :: (Integral a) => Text -> Either String a parseMaxAge t = parseCacheControl t >>= findMaxAge >>= maybe (Left notFoundErr) Right where notFoundErr = "could not find max-age/s-maxage" -findMaxAge :: Integral a => CacheControl -> Either String (Maybe a) +findMaxAge :: (Integral a) => CacheControl -> Either String (Maybe a) findMaxAge cacheControl = do case findCCDTokenWithVal checkMaxAgeToken cacheControl of Just (_, val) -> Just <$> first parseErr (AT.parseOnly AT.decimal val) diff --git a/server/lib/hasura-extras/src/Data/Parser/Expires.hs b/server/lib/hasura-extras/src/Data/Parser/Expires.hs index 98ba4f04847..c4908bd5f2d 100644 --- a/server/lib/hasura-extras/src/Data/Parser/Expires.hs +++ b/server/lib/hasura-extras/src/Data/Parser/Expires.hs @@ -10,7 +10,7 @@ import Data.Time.Format (defaultTimeLocale, parseTimeM) import Hasura.Prelude -- | Extracts an absolute expiration time from a Expires header. -parseExpirationTime :: MonadError String m => Text -> m UTCTime +parseExpirationTime :: (MonadError String m) => Text -> m UTCTime parseExpirationTime = fromText >>> parseTimeM True defaultTimeLocale "%a, %d %b %Y %T GMT" diff --git a/server/lib/hasura-extras/src/Data/Parser/JSONPath.hs b/server/lib/hasura-extras/src/Data/Parser/JSONPath.hs index bd5d57f349b..a64125d03b0 100644 --- a/server/lib/hasura-extras/src/Data/Parser/JSONPath.hs +++ b/server/lib/hasura-extras/src/Data/Parser/JSONPath.hs @@ -37,8 +37,8 @@ encodeJSONPath path = "$" <> foldMap formatPart path parseJSONPath :: Text -> Either Text JSONPath parseJSONPath "$" = Right [] parseJSONPath txt = - Bifunctor.first (const invalidMessage) $ - parseOnly (optional (char '$') *> many1' element <* endOfInput) txt + Bifunctor.first (const invalidMessage) + $ parseOnly (optional (char '$') *> many1' element <* endOfInput) txt where invalidMessage = txt @@ -76,8 +76,12 @@ bracketElement = do pure result where parseJSONString inQuotes = - maybe (fail "Invalid JSON string") (pure . K.fromText) . J.decode . TL.encodeUtf8 $ - "\"" <> inQuotes <> "\"" + maybe (fail "Invalid JSON string") (pure . K.fromText) + . J.decode + . TL.encodeUtf8 + $ "\"" + <> inQuotes + <> "\"" doubleQuotedString = do void $ char '"' diff --git a/server/lib/hasura-extras/src/Data/Text/Extended.hs b/server/lib/hasura-extras/src/Data/Text/Extended.hs index 2bc1af0eaab..4e36541060a 100644 --- a/server/lib/hasura-extras/src/Data/Text/Extended.hs +++ b/server/lib/hasura-extras/src/Data/Text/Extended.hs @@ -43,16 +43,16 @@ instance ToTxt Void where instance ToTxt (G.Value Void) where toTxt = TB.run . G.value -bquote :: ToTxt t => t -> Text +bquote :: (ToTxt t) => t -> Text bquote t = DT.singleton '`' <> toTxt t <> DT.singleton '`' -squote :: ToTxt t => t -> Text +squote :: (ToTxt t) => t -> Text squote t = DT.singleton '\'' <> toTxt t <> DT.singleton '\'' -dquote :: ToTxt t => t -> Text +dquote :: (ToTxt t) => t -> Text dquote t = DT.singleton '"' <> toTxt t <> DT.singleton '"' -paren :: ToTxt t => t -> Text +paren :: (ToTxt t) => t -> Text paren t = "(" <> toTxt t <> ")" parenB :: TB.Builder -> TB.Builder @@ -66,12 +66,12 @@ commaSeparated = DT.intercalate ", " . fmap toTxt . toList infixr 6 <>> -(<>>) :: ToTxt t => Text -> t -> Text +(<>>) :: (ToTxt t) => Text -> t -> Text (<>>) lTxt a = lTxt <> dquote a infixr 6 <<> -(<<>) :: ToTxt t => t -> Text -> Text +(<<>) :: (ToTxt t) => t -> Text -> Text (<<>) a rTxt = dquote a <> rTxt infixr 6 <~> diff --git a/server/lib/hasura-extras/src/Data/Text/NonEmpty.hs b/server/lib/hasura-extras/src/Data/Text/NonEmpty.hs index c46d4a924f6..e87a6973bb7 100644 --- a/server/lib/hasura-extras/src/Data/Text/NonEmpty.hs +++ b/server/lib/hasura-extras/src/Data/Text/NonEmpty.hs @@ -34,7 +34,7 @@ mkNonEmptyText text = Just $ NonEmptyText text mkNonEmptyTextUnsafe :: Text -> NonEmptyText mkNonEmptyTextUnsafe = NonEmptyText -parseNonEmptyText :: MonadFail m => Text -> m NonEmptyText +parseNonEmptyText :: (MonadFail m) => Text -> m NonEmptyText parseNonEmptyText text = mkNonEmptyText text `onNothing` fail "empty string not allowed" nonEmptyText :: Text -> Code Q NonEmptyText @@ -66,7 +66,8 @@ instance FromJSONKey NonEmptyText where instance PG.FromCol NonEmptyText where fromCol bs = - mkNonEmptyText <$> PG.fromCol bs + mkNonEmptyText + <$> PG.fromCol bs >>= maybe (Left "empty string not allowed") Right instance HasCodec NonEmptyText where diff --git a/server/lib/hasura-extras/src/Data/Trie.hs b/server/lib/hasura-extras/src/Data/Trie.hs index bf2ae9530e4..2e10082347e 100644 --- a/server/lib/hasura-extras/src/Data/Trie.hs +++ b/server/lib/hasura-extras/src/Data/Trie.hs @@ -61,21 +61,21 @@ singleton ps v = foldr (\p t -> Trie (HashMap.singleton p t) Nothing) (Trie Hash ------------------------------------------------------------------------------- -- | Find a value at the given path, if any. -lookup :: Hashable k => [k] -> Trie k v -> Maybe v +lookup :: (Hashable k) => [k] -> Trie k v -> Maybe v lookup [] (Trie _ value) = value lookup (p : ps) (Trie tmap _) = lookup ps =<< HashMap.lookup p tmap -- | Insert the given value at the given path. -- -- If there's already a value at the given path, it is replaced. -insert :: Hashable k => [k] -> v -> Trie k v -> Trie k v +insert :: (Hashable k) => [k] -> v -> Trie k v -> Trie k v insert = insertWith const -- | Insert the value at the given path. -- -- If there's already a value at the given path, the old value is replaced by -- the result of applying the given function to the new and old value. -insertWith :: Hashable k => (v -> v -> v) -> [k] -> v -> Trie k v -> Trie k v +insertWith :: (Hashable k) => (v -> v -> v) -> [k] -> v -> Trie k v -> Trie k v insertWith fun path newValue t = go t path where go (Trie tmap value) = \case diff --git a/server/lib/hasura-extras/src/GHC/Generics/Extended.hs b/server/lib/hasura-extras/src/GHC/Generics/Extended.hs index 7674ef21fd6..d2f1ee968c0 100644 --- a/server/lib/hasura-extras/src/GHC/Generics/Extended.hs +++ b/server/lib/hasura-extras/src/GHC/Generics/Extended.hs @@ -18,7 +18,7 @@ constrName a = genericConstrName $ from a class HasConstructor (f :: Type -> Type) where genericConstrName :: f x -> String -instance HasConstructor f => HasConstructor (D1 c f) where +instance (HasConstructor f) => HasConstructor (D1 c f) where genericConstrName (M1 x) = genericConstrName x {-# INLINE genericConstrName #-} @@ -27,6 +27,6 @@ instance (HasConstructor x, HasConstructor y) => HasConstructor (x :+: y) where genericConstrName (R1 r) = genericConstrName r {-# INLINE genericConstrName #-} -instance Constructor c => HasConstructor (C1 c f) where +instance (Constructor c) => HasConstructor (C1 c f) where genericConstrName x = conName x {-# INLINE genericConstrName #-} diff --git a/server/lib/hasura-extras/src/Network/HTTP/Client/CreateManager.hs b/server/lib/hasura-extras/src/Network/HTTP/Client/CreateManager.hs index 9dea2eb5257..ba0b90aea0f 100644 --- a/server/lib/hasura-extras/src/Network/HTTP/Client/CreateManager.hs +++ b/server/lib/hasura-extras/src/Network/HTTP/Client/CreateManager.hs @@ -16,5 +16,5 @@ import Network.Types.Extended (TlsAllow) mkHttpManager :: IO [TlsAllow] -> Blocklist -> IO HTTP.Manager mkHttpManager currentAllow blocklist = do tlsSettings <- HTTP.dynamicTlsSettings currentAllow - HTTP.newManager $ - Restricted.mkRestrictedManagerSettings (block blocklist) Nothing (Just tlsSettings) + HTTP.newManager + $ Restricted.mkRestrictedManagerSettings (block blocklist) Nothing (Just tlsSettings) diff --git a/server/lib/hasura-extras/src/Network/HTTP/Client/Transformable.hs b/server/lib/hasura-extras/src/Network/HTTP/Client/Transformable.hs index 33c20ab8b90..8aa22e1d47e 100644 --- a/server/lib/hasura-extras/src/Network/HTTP/Client/Transformable.hs +++ b/server/lib/hasura-extras/src/Network/HTTP/Client/Transformable.hs @@ -105,7 +105,7 @@ instance J.ToJSON Client.Request where -- -- NOTE: This function will throw an error in 'MonadThrow' if the URL is -- invalid. -mkRequestThrow :: MonadThrow m => Text -> m Client.Request +mkRequestThrow :: (MonadThrow m) => Text -> m Client.Request mkRequestThrow = Client.parseRequest . T.unpack -- | 'mkRequestThrow' with the 'MonadThrow' instance specialized to 'Either'. diff --git a/server/lib/hasura-extras/src/Network/Types/Extended.hs b/server/lib/hasura-extras/src/Network/Types/Extended.hs index 8d30797219c..f565add8bac 100644 --- a/server/lib/hasura-extras/src/Network/Types/Extended.hs +++ b/server/lib/hasura-extras/src/Network/Types/Extended.hs @@ -22,10 +22,10 @@ data Network = Network instance HasCodec Network where codec = - AC.object "Network" $ - Network - <$> optionalFieldWithDefault' "tls_allowlist" [] - AC..= networkTlsAllowlist + AC.object "Network" + $ Network + <$> optionalFieldWithDefault' "tls_allowlist" [] + AC..= networkTlsAllowlist instance FromJSON Network where parseJSON = withObject "Network" $ \o -> Network <$> o .:? "tls_allowlist" .!= [] @@ -45,14 +45,14 @@ data TlsAllow = TlsAllow instance HasCodec TlsAllow where codec = - AC.object "TlsAllow" $ - TlsAllow - <$> requiredField' "host" - AC..= taHost + AC.object "TlsAllow" + $ TlsAllow + <$> requiredField' "host" + AC..= taHost <*> optionalField' "suffix" - AC..= taSuffix + AC..= taSuffix <*> optionalField' "permissions" - AC..= taPermit + AC..= taPermit instance FromJSON TlsAllow where parseJSON j = aString j <|> anObject j @@ -65,9 +65,12 @@ instance FromJSON TlsAllow where anObject = withObject "TlsAllow" $ \o -> TlsAllow - <$> o .: "host" - <*> o .:? "suffix" - <*> o .:? "permissions" + <$> o + .: "host" + <*> o + .:? "suffix" + <*> o + .:? "permissions" instance ToJSON TlsAllow where toJSON (TlsAllow h p a) = @@ -88,8 +91,9 @@ instance HasCodec TlsPermission where instance FromJSON TlsPermission where parseJSON (String "self-signed") = pure SelfSigned parseJSON _ = - fail $ - "TlsPermission expecting one of " <> intercalate ", " (map (show :: TlsPermission -> String) [minBound .. maxBound]) + fail + $ "TlsPermission expecting one of " + <> intercalate ", " (map (show :: TlsPermission -> String) [minBound .. maxBound]) instance ToJSON TlsPermission where toJSON SelfSigned = String "self-signed" @@ -105,8 +109,10 @@ data DropHostFromTLSAllowlist = DropHostFromTLSAllowlist instance FromJSON DropHostFromTLSAllowlist where parseJSON = withObject "DropHostFromTLSAllowlist" $ \o -> DropHostFromTLSAllowlist - <$> o .: "host" - <*> o .:? "suffix" + <$> o + .: "host" + <*> o + .:? "suffix" instance ToJSON DropHostFromTLSAllowlist where toJSON (DropHostFromTLSAllowlist h p) = diff --git a/server/lib/hasura-json-encoding/src/Hasura/EncJSON.hs b/server/lib/hasura-json-encoding/src/Hasura/EncJSON.hs index ea39ea2b23b..be28cb23c89 100644 --- a/server/lib/hasura-json-encoding/src/Hasura/EncJSON.hs +++ b/server/lib/hasura-json-encoding/src/Hasura/EncJSON.hs @@ -146,7 +146,7 @@ encJFromLBS :: BL.ByteString -> EncJSON encJFromLBS = EncJSON . BB.lazyByteString {-# INLINE encJFromLBS #-} -encJFromJValue :: J.ToJSON a => a -> EncJSON +encJFromJValue :: (J.ToJSON a) => a -> EncJSON encJFromJValue = encJFromBuilder . J.fromEncoding . J.toEncoding {-# INLINE encJFromJValue #-} diff --git a/server/lib/hasura-prelude/src/Hasura/Prelude.hs b/server/lib/hasura-prelude/src/Hasura/Prelude.hs index 03e66c06e7f..00bae557104 100644 --- a/server/lib/hasura-prelude/src/Hasura/Prelude.hs +++ b/server/lib/hasura-prelude/src/Hasura/Prelude.hs @@ -175,16 +175,16 @@ import Prelude as M hiding (fail, init, lookup) -- | Performs default 'Applicative' action if 'Nothing' is -- given. Otherwise returns content of 'Just' pured to 'Applicative'. -onNothing :: Applicative m => Maybe a -> m a -> m a +onNothing :: (Applicative m) => Maybe a -> m a -> m a onNothing m act = maybe act pure m -- | Monadic version of 'onNothing'. -onNothingM :: Monad m => m (Maybe a) -> m a -> m a +onNothingM :: (Monad m) => m (Maybe a) -> m a -> m a onNothingM m act = m >>= (`onNothing` act) -- | Perform some operation on 'Just', given the field inside the -- 'Just'. Like most good things in life, this is a specialized 'for_'. -onJust :: Applicative m => Maybe a -> (a -> m ()) -> m () +onJust :: (Applicative m) => Maybe a -> (a -> m ()) -> m () onJust = for_ -- | Like 'when', but return either 'Nothing' if the predicate was 'False', @@ -192,7 +192,7 @@ onJust = for_ -- -- > whenMaybe True (print 1) == fmap Just (print 1) -- > whenMaybe False (print 1) == pure Nothing -whenMaybe :: Applicative m => Bool -> m a -> m (Maybe a) +whenMaybe :: (Applicative m) => Bool -> m a -> m (Maybe a) whenMaybe True = fmap Just whenMaybe False = const $ pure Nothing @@ -225,7 +225,7 @@ spanMaybeM f = go . toList -- | Upgrade a 'Maybe' to a 'MaybeT'. -- -- cf. http://hackage.haskell.org/package/errors-2.3.0/docs/src/Control.Error.Util.html#hoistMaybe -hoistMaybe :: Applicative m => Maybe b -> MaybeT m b +hoistMaybe :: (Applicative m) => Maybe b -> MaybeT m b hoistMaybe = MaybeT . pure -------------------------------------------------------------------------------- @@ -233,11 +233,11 @@ hoistMaybe = MaybeT . pure -- | Eliminate an 'Either' by puring the 'Right' value and applying an -- applicative action to the 'Left' value. -onLeft :: Applicative m => Either e a -> (e -> m a) -> m a +onLeft :: (Applicative m) => Either e a -> (e -> m a) -> m a onLeft e f = either f pure e -- | Similar to 'onLeft', but accepts a monadic action on its LHS. -onLeftM :: Monad m => m (Either e a) -> (e -> m a) -> m a +onLeftM :: (Monad m) => m (Either e a) -> (e -> m a) -> m a onLeftM e f = e >>= (`onLeft` f) -- | Map over the 'Left' value of an 'Either'. This is a @@ -246,13 +246,13 @@ mapLeft :: (e1 -> e2) -> Either e1 a -> Either e2 a mapLeft = Data.Bifunctor.first -- | Like 'liftEither', but accepts a monadic action. -liftEitherM :: MonadError e m => m (Either e a) -> m a +liftEitherM :: (MonadError e m) => m (Either e a) -> m a liftEitherM action = action >>= liftEither -- | Upgrade an 'Either' to an 'ExceptT'. -- -- cf. http://hackage.haskell.org/package/errors-2.3.0/docs/src/Control.Error.Util.html#hoistEither -hoistEither :: Applicative m => Either e a -> ExceptT e m a +hoistEither :: (Applicative m) => Either e a -> ExceptT e m a hoistEither = ExceptT . pure -------------------------------------------------------------------------------- @@ -261,7 +261,7 @@ hoistEither = ExceptT . pure -- | 'choice' ps tries to apply the actions in the list ps in order, -- until one of them succeeds. Returns the value of the succeeding -- action. -choice :: Alternative f => [f a] -> f a +choice :: (Alternative f) => [f a] -> f a choice = asum -- | Nondeterministically choose an element from a Foldable collection. @@ -293,26 +293,26 @@ base64Decode = Base64.decodeLenient . BL.fromStrict . txtToBs -- | Given 'Show' @a@, convert @a@ into a 'Text'. -tshow :: Show a => a -> Text +tshow :: (Show a) => a -> Text tshow = T.pack . show -------------------------------------------------------------------------------- -- Trace debugging -- | Labeled, prettified 'traceShowId'. -ltrace :: Show a => String -> a -> a +ltrace :: (Show a) => String -> a -> a ltrace lbl x = Debug.trace (lbl <> ": " <> TL.unpack (PS.pShow x)) x {- HLINT ignore ltrace -} -- | Labeled, prettified 'traceShowM'. -ltraceM :: Applicative m => Show a => String -> a -> m () +ltraceM :: (Applicative m) => (Show a) => String -> a -> m () ltraceM lbl x = Debug.traceM (lbl <> ": " <> TL.unpack (PS.pShow x)) {- HLINT ignore ltraceM -} -- | Trace a prettified value to a file. -traceToFile :: Show a => FilePath -> a -> a +traceToFile :: (Show a) => FilePath -> a -> a traceToFile filepath x = Debug.trace ("tracing to " <> filepath) @@ -321,7 +321,7 @@ traceToFile filepath x = {- HLINT ignore traceToFile -} -- | Trace a prettified value to a file in an Applicative context. -traceToFileM :: Applicative m => Show a => FilePath -> a -> m () +traceToFileM :: (Applicative m) => (Show a) => FilePath -> a -> m () traceToFileM filepath x = Debug.traceM $ unwords @@ -355,7 +355,7 @@ oMapFromL f = InsOrdHashMap.fromList . map (\v -> (f v, v)) -- result of the input action will be evaluated to WHNF. -- -- The result 'DiffTime' is guaranteed to be >= 0. -withElapsedTime :: MonadIO m => m a -> m (DiffTime, a) +withElapsedTime :: (MonadIO m) => m a -> m (DiffTime, a) withElapsedTime ma = do stopTimer <- startTimer !a <- ma diff --git a/server/lib/incremental/src/Hasura/Incremental/Internal/Cache.hs b/server/lib/incremental/src/Hasura/Incremental/Internal/Cache.hs index 167f8de93c9..5e1951dc556 100644 --- a/server/lib/incremental/src/Hasura/Incremental/Internal/Cache.hs +++ b/server/lib/incremental/src/Hasura/Incremental/Internal/Cache.hs @@ -28,7 +28,7 @@ class (ArrowKleisli m arr) => ArrowCache m arr | arr -> m where -- -- __Note that only direct inputs and outputs of the given arrow are cached.__ If an arrow -- provides access to values through a side-channel, they will __not__ participate in caching. - cache :: (Given Accesses => Eq a) => arr a b -> arr a b + cache :: ((Given Accesses) => Eq a) => arr a b -> arr a b -- | Creates a new 'Dependency', which allows fine-grained caching of composite values; see the -- documentation for 'Dependency' for more details. @@ -54,10 +54,10 @@ instance (Monoid w, ArrowCache m arr) => ArrowCache m (WriterA w arr) where dependOn = liftA dependOn {-# INLINE dependOn #-} -instance MonadIO m => ArrowCache m (Rule m) where +instance (MonadIO m) => ArrowCache m (Rule m) where cache :: forall a b. - (Given Accesses => Eq a) => + ((Given Accesses) => Eq a) => Rule m a b -> Rule m a b cache r0 = Rule \s a k -> do @@ -71,8 +71,8 @@ instance MonadIO m => ArrowCache m (Rule m) where cached :: Accesses -> a -> b -> Rule m a (b, Accesses) -> Rule m a b cached accesses a b (Rule r) = Rule \s a' k -> if - | unchanged accesses a a' -> (k $! (s <> accesses)) b (cached accesses a b (Rule r)) - | otherwise -> r s a' \s' (b', accesses') r' -> k s' b' (cached accesses' a' b' r') + | unchanged accesses a a' -> (k $! (s <> accesses)) b (cached accesses a b (Rule r)) + | otherwise -> r s a' \s' (b', accesses') r' -> k s' b' (cached accesses' a' b' r') newDependency = Rule \s a k -> do key <- DependencyRoot <$> newUniqueS diff --git a/server/lib/incremental/src/Hasura/Incremental/Internal/Dependency.hs b/server/lib/incremental/src/Hasura/Incremental/Internal/Dependency.hs index 4fe33c507e1..ca13585e4f1 100644 --- a/server/lib/incremental/src/Hasura/Incremental/Internal/Dependency.hs +++ b/server/lib/incremental/src/Hasura/Incremental/Internal/Dependency.hs @@ -43,7 +43,7 @@ selectD k (Dependency dk a) = Dependency (DependencyChild k dk) (select k a) selectKeyD :: (Select a, Selector a ~ ConstS k v) => k -> Dependency a -> Dependency v selectKeyD = selectD . ConstS -selectMaybeD :: Select a => Selector a b -> Dependency (Maybe a) -> Dependency (Maybe b) +selectMaybeD :: (Select a) => Selector a b -> Dependency (Maybe a) -> Dependency (Maybe b) selectMaybeD = selectD . FMapS -- | Tracks whether a 'Dependency' is a “root” dependency created by 'newDependency' or a “child” @@ -92,7 +92,7 @@ recordAccess depKey !access (Accesses accesses) = case depKey of DependencyChild selector parentKey -> recordAccess parentKey (AccessedParts $ DM.singleton selector access) (Accesses accesses) -unchanged :: (Given Accesses => Eq a) => Accesses -> a -> a -> Bool +unchanged :: ((Given Accesses) => Eq a) => Accesses -> a -> a -> Bool unchanged accesses a b = give accesses (a == b) -- | Records the accesses made within a single 'Dependency' and its children. The 'Semigroup' @@ -102,8 +102,8 @@ unchanged accesses a b = give accesses (a == b) -- accessed. -- * 'AccessedParts' records a set of accesses for individual parts of a dependency. data Access a where - AccessedAll :: Eq a => Access a - AccessedParts :: Select a => DM.DMap (Selector a) Access -> Access a + AccessedAll :: (Eq a) => Access a + AccessedParts :: (Select a) => DM.DMap (Selector a) Access -> Access a instance Semigroup (Access a) where AccessedAll <> _ = AccessedAll diff --git a/server/lib/incremental/src/Hasura/Incremental/Internal/Rule.hs b/server/lib/incremental/src/Hasura/Incremental/Internal/Rule.hs index 777daf0b74c..d867a7c4dbc 100644 --- a/server/lib/incremental/src/Hasura/Incremental/Internal/Rule.hs +++ b/server/lib/incremental/src/Hasura/Incremental/Internal/Rule.hs @@ -296,7 +296,7 @@ class (Arrow arr) => ArrowDistribute arr where -- This is intended to be used as a control operator in @proc@ notation; see -- Note [Weird control operator types] in "Control.Arrow.Extended". keyed :: - Hashable k => + (Hashable k) => arr (e, (k, (a, s))) b -> arr (e, (HashMap k a, s)) (HashMap k b) @@ -310,7 +310,7 @@ instance (Monoid w, ArrowDistribute arr) => ArrowDistribute (WriterA w arr) wher instance ArrowDistribute (Rule m) where keyed :: forall a b k e s. - Hashable k => + (Hashable k) => Rule m (e, (k, (a, s))) b -> Rule m (e, (HashMap k a, s)) (HashMap k b) keyed r0 = keyedWith HashMap.empty diff --git a/server/lib/incremental/src/Hasura/Incremental/Select.hs b/server/lib/incremental/src/Hasura/Incremental/Select.hs index fc721807332..227fea7c480 100644 --- a/server/lib/incremental/src/Hasura/Incremental/Select.hs +++ b/server/lib/incremental/src/Hasura/Incremental/Select.hs @@ -20,7 +20,6 @@ module Hasura.Incremental.Select where import Data.Dependent.Map qualified as DM -import "some" Data.GADT.Compare import Data.HashMap.Strict qualified as HashMap import Data.Kind import Data.Proxy (Proxy (..)) @@ -31,6 +30,7 @@ import GHC.Records (HasField (..)) import GHC.TypeLits (KnownSymbol, sameSymbol, symbolVal) import Hasura.Prelude import Unsafe.Coerce (unsafeCoerce) +import "some" Data.GADT.Compare -- | The 'Select' class provides a way to access subparts of a product type using a reified -- 'Selector'. A @'Selector' a b@ is essentially a function from @a@ to @b@, and indeed 'select' @@ -48,7 +48,7 @@ class (GCompare (Selector a)) => Select a where select :: Selector a b -> a -> b type Selector r = FieldS r - default select :: Selector a ~ FieldS a => Selector a b -> a -> b + default select :: (Selector a ~ FieldS a) => Selector a b -> a -> b select (FieldS (_ :: Proxy s)) = getField @s instance (Ord k, Hashable k) => Select (HashMap k v) where @@ -65,13 +65,13 @@ newtype FMap f x = FMap {unFMap :: f x} data FMapS f a b where FMapS :: Selector a b -> FMapS f a (f b) -instance Select a => GEq (FMapS f a) where +instance (Select a) => GEq (FMapS f a) where FMapS sel1 `geq` FMapS sel2 = case sel1 `geq` sel2 of Just Refl -> Just Refl Nothing -> Nothing -instance Select a => GCompare (FMapS f a) where +instance (Select a) => GCompare (FMapS f a) where gcompare (FMapS sel1) (FMapS sel2) = case gcompare sel1 sel2 of GLT -> GLT @@ -82,7 +82,7 @@ instance (Functor f, Select a) => Select (FMap f a) where type Selector (FMap f a) = FMapS f a select (FMapS s) = unFMap . fmap (select s) -deriving via FMap Maybe a instance Select a => Select (Maybe a) +deriving via FMap Maybe a instance (Select a) => Select (Maybe a) -- | The constant selector, which is useful for representing selectors into data structures where -- all fields have the same type. Matching on a value of type @'ConstS' k a b@ causes @a@ and @b@ to @@ -137,7 +137,7 @@ type role UniqueS nominal newtype UniqueS a = UniqueS Unique deriving (Eq) -newUniqueS :: MonadIO m => m (UniqueS a) +newUniqueS :: (MonadIO m) => m (UniqueS a) newUniqueS = UniqueS <$> liftIO newUnique {-# INLINE newUniqueS #-} diff --git a/server/lib/incremental/test/Hasura/IncrementalSpec.hs b/server/lib/incremental/test/Hasura/IncrementalSpec.hs index 1f391b6b11c..43a191e5fd8 100644 --- a/server/lib/incremental/test/Hasura/IncrementalSpec.hs +++ b/server/lib/incremental/test/Hasura/IncrementalSpec.hs @@ -71,7 +71,8 @@ spec = do Inc.cache $ arrM (tell . S.singleton) -< (k, v) returnA -< v * 2 ) - |) m + |) + m (result1, log1) <- runWriterT . Inc.build rule $ HashMap.fromList [("a", 1), ("b", 2)] Inc.result result1 `shouldBe` HashMap.fromList [("a", 2), ("b", 4)] diff --git a/server/lib/pg-client/src/Database/PG/Query/Class.hs b/server/lib/pg-client/src/Database/PG/Query/Class.hs index 884a54a8dd6..eaea9b61299 100644 --- a/server/lib/pg-client/src/Database/PG/Query/Class.hs +++ b/server/lib/pg-client/src/Database/PG/Query/Class.hs @@ -146,7 +146,7 @@ instance FromCol Bool where instance FromCol UUID where fromCol = fromColHelper PD.uuid -instance FromCol a => FromCol (Maybe a) where +instance (FromCol a) => FromCol (Maybe a) where fromCol Nothing = return Nothing fromCol bs = Just <$> fromCol bs @@ -198,21 +198,21 @@ buildMat r = do VM.unsafeWrite mvx (rowInt ir) vy V.unsafeFreeze mvx -instance FromRow a => FromRes [a] where +instance (FromRow a) => FromRes [a] where fromRes (ResultOkEmpty _) = throwError "Expecting data. Instead, status is 'CommandOk'" fromRes (ResultOkData res) = do rm <- liftIO $ buildMat res ExceptT $ return $ fmap V.toList $ sequence $ V.map fromRow rm -instance FromRow a => FromRes (V.Vector a) where +instance (FromRow a) => FromRes (V.Vector a) where fromRes (ResultOkEmpty _) = throwError "Expecting data. Instead, status is 'CommandOk'" fromRes (ResultOkData res) = do rm <- liftIO $ buildMat res ExceptT $ return $ sequence $ V.map fromRow rm -instance FromRow a => FromRes (SingleRow a) where +instance (FromRow a) => FromRes (SingleRow a) where fromRes (ResultOkEmpty _) = throwError "Expecting data. Instead, status is 'CommandOk'" fromRes (ResultOkData res) = do @@ -221,7 +221,7 @@ instance FromRow a => FromRes (SingleRow a) where then ExceptT $ return $ SingleRow <$> fromRow (rm V.! 0) else throwError "Rows returned != 1" -instance FromRow a => FromRes (Maybe a) where +instance (FromRow a) => FromRes (Maybe a) where fromRes (ResultOkEmpty _) = throwError "Expecting data. Instead, status is 'CommandOk'" fromRes (ResultOkData res) = do @@ -241,7 +241,7 @@ colMismatch expected actual = show actual ] -instance FromCol a => FromRow (Identity a) where +instance (FromCol a) => FromRow (Identity a) where fromRow row = case V.length row of 1 -> fmap Identity $ fromCol $ row V.! 0 c -> throwError $ colMismatch 1 c diff --git a/server/lib/pg-client/src/Database/PG/Query/Connection.hs b/server/lib/pg-client/src/Database/PG/Query/Connection.hs index 4b8d29efb37..a35803e7bc5 100644 --- a/server/lib/pg-client/src/Database/PG/Query/Connection.hs +++ b/server/lib/pg-client/src/Database/PG/Query/Connection.hs @@ -124,11 +124,11 @@ type PGError = Either PGErrInternal PGConnErr type PGExec a = ExceptT PGError IO a throwPGIntErr :: - MonadError PGError m => PGErrInternal -> m a + (MonadError PGError m) => PGErrInternal -> m a throwPGIntErr = throwError . Left throwPGConnErr :: - MonadError PGError m => PGConnErr -> m a + (MonadError PGError m) => PGConnErr -> m a throwPGConnErr = throwError . Right readConnErr :: PQ.Connection -> IO Text @@ -416,7 +416,7 @@ cancelOnAsync conn action = do `catch` (\(PGCancelErr msg) -> throwPGIntErr $ PGIUnexpected $ "error cancelling query: " <> msg) mkPGRetryPolicy :: - MonadIO m => + (MonadIO m) => -- | number of retries Int -> PGRetryPolicyM m diff --git a/server/lib/pg-client/src/Database/PG/Query/PTI.hs b/server/lib/pg-client/src/Database/PG/Query/PTI.hs index 3bc14cc22fc..c35bc349600 100644 --- a/server/lib/pg-client/src/Database/PG/Query/PTI.hs +++ b/server/lib/pg-client/src/Database/PG/Query/PTI.hs @@ -19,7 +19,7 @@ import Prelude mkOid :: Word32 -> PQ.Oid mkOid = PQ.Oid . fromIntegral -unOid :: Integral n => PQ.Oid -> n +unOid :: (Integral n) => PQ.Oid -> n unOid (PQ.Oid oid') = fromIntegral oid' -- * Constants diff --git a/server/lib/pg-client/src/Database/PG/Query/Transaction.hs b/server/lib/pg-client/src/Database/PG/Query/Transaction.hs index 7e51fc50f64..5e7c5f3ebec 100644 --- a/server/lib/pg-client/src/Database/PG/Query/Transaction.hs +++ b/server/lib/pg-client/src/Database/PG/Query/Transaction.hs @@ -96,7 +96,7 @@ newtype TxET e m a = TxET MonadFix ) -transformerJoinTxET :: Monad m => TxET e (TxET e m) a -> TxET e m a +transformerJoinTxET :: (Monad m) => TxET e (TxET e m) a -> TxET e m a transformerJoinTxET x = TxET $ ReaderT $ \pgConn -> do result <- runReaderT (txHandler $ runExceptT (runReaderT (txHandler x) pgConn)) pgConn @@ -112,9 +112,9 @@ instance MonadTrans (TxET e) where instance MFunctor (TxET e) where hoist f = TxET . hoist (hoist f) . txHandler -deriving via (ReaderT PGConn (ExceptT e m)) instance MonadBase IO m => MonadBase IO (TxET e m) +deriving via (ReaderT PGConn (ExceptT e m)) instance (MonadBase IO m) => MonadBase IO (TxET e m) -deriving via (ReaderT PGConn (ExceptT e m)) instance MonadBaseControl IO m => MonadBaseControl IO (TxET e m) +deriving via (ReaderT PGConn (ExceptT e m)) instance (MonadBaseControl IO m) => MonadBaseControl IO (TxET e m) type TxE e a = TxET e IO a @@ -236,7 +236,7 @@ describePreparedStatement ef name = TxET $ describePrepared pgConn name serverVersion :: - MonadIO m => TxET e m Int + (MonadIO m) => TxET e m Int serverVersion = do conn <- asks pgPQConn liftIO $ PQ.serverVersion conn diff --git a/server/lib/pg-client/test/Spec.hs b/server/lib/pg-client/test/Spec.hs index b481b1791ee..589793239b8 100644 --- a/server/lib/pg-client/test/Spec.hs +++ b/server/lib/pg-client/test/Spec.hs @@ -80,7 +80,7 @@ withFreshPool pool action = . const $ lift action -err :: Show a => a -> IO (Maybe String) +err :: (Show a) => a -> IO (Maybe String) err = pure . Just . show nada :: IO () diff --git a/server/lib/resource-pool/Data/Pool.hs b/server/lib/resource-pool/Data/Pool.hs index 82ad5aee578..861c44a3f0e 100644 --- a/server/lib/resource-pool/Data/Pool.hs +++ b/server/lib/resource-pool/Data/Pool.hs @@ -445,7 +445,7 @@ tryTakeResource pool@Pool {..} = do return $ Just <$> create - `onException` atomically (modifyTVar_ inUse (subtract 1)) + `onException` atomically (modifyTVar_ inUse (subtract 1)) return $ (,local) <$> resource {-# INLINEABLE tryTakeResource #-} diff --git a/server/lib/schema-parsers/src/Data/GADT/Compare/Extended.hs b/server/lib/schema-parsers/src/Data/GADT/Compare/Extended.hs index 02c35956cfc..40a76fb5a0a 100644 --- a/server/lib/schema-parsers/src/Data/GADT/Compare/Extended.hs +++ b/server/lib/schema-parsers/src/Data/GADT/Compare/Extended.hs @@ -17,7 +17,7 @@ strengthenOrdering GT = GGT infixr 6 `extendGOrdering` -extendGOrdering :: GOrdering a b -> (a ~ b => GOrdering c d) -> GOrdering c d +extendGOrdering :: GOrdering a b -> ((a ~ b) => GOrdering c d) -> GOrdering c d extendGOrdering GLT _ = GLT extendGOrdering GEQ x = x extendGOrdering GGT _ = GGT diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Class.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Class.hs index a1ceececfa0..f11d32804ca 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Class.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Class.hs @@ -22,8 +22,8 @@ class (Monad m, Typeable m) => MonadParse m where -- caught. parseErrorWith :: ParseErrorCode -> ErrorMessage -> m a -withPath :: MonadParse m => J.JSONPath -> m a -> m a +withPath :: (MonadParse m) => J.JSONPath -> m a -> m a withPath path action = foldr withKey action path -parseError :: MonadParse m => ErrorMessage -> m a +parseError :: (MonadParse m) => ErrorMessage -> m a parseError = parseErrorWith ValidationFailed diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Directives.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Directives.hs index 03dd7dfd56e..dadb20ab4c6 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Directives.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Directives.hs @@ -76,7 +76,7 @@ import Prelude -- Directives may be "hidden", in which case they won't advertised in the -- schema, but silently accepted. This is un-advisable and should only be used -- when there's no other way around it. -directivesInfo :: forall m origin. MonadParse m => [DirectiveInfo origin] +directivesInfo :: forall m origin. (MonadParse m) => [DirectiveInfo origin] directivesInfo = do dir <- inclusionDirectives @m <> customDirectives @m guard $ dAdvertised dir @@ -84,13 +84,13 @@ directivesInfo = do -- | Not exported, only used internally; identical to 'directivesInfo', but also -- contains hidden directives. -allDirectives :: forall m origin. MonadParse m => [DirectiveInfo origin] +allDirectives :: forall m origin. (MonadParse m) => [DirectiveInfo origin] allDirectives = map dDefinition $ inclusionDirectives @m <> customDirectives @m -inclusionDirectives :: forall m origin. MonadParse m => [Directive origin m] +inclusionDirectives :: forall m origin. (MonadParse m) => [Directive origin m] inclusionDirectives = [includeDirective @m, skipDirective @m] -customDirectives :: forall m origin. MonadParse m => [Directive origin m] +customDirectives :: forall m origin. (MonadParse m) => [Directive origin m] customDirectives = [cachedDirective @m, multipleRootFieldsDirective @m] -- | Parses directives, given a location. Ensures that all directives are known @@ -105,7 +105,7 @@ customDirectives = [cachedDirective @m, multipleRootFieldsDirective @m] -- withDirective dMap cached $ for_ \_ -> tagAsCached parseDirectives :: forall origin m. - MonadParse m => + (MonadParse m) => [Directive origin m] -> G.DirectiveLocation -> [G.Directive Variable] -> @@ -166,7 +166,7 @@ withDirective dmap key callback = callback $ runIdentity <$> DM.lookup key dmap -- Cached custom directive. -cachedDirective :: forall m origin. MonadParse m => Directive origin m +cachedDirective :: forall m origin. (MonadParse m) => Directive origin m cachedDirective = mkDirective Name._cached @@ -190,7 +190,7 @@ cached = DirectiveKey Name._cached -- Subscription tests custom directive. -multipleRootFieldsDirective :: MonadParse m => Directive origin m +multipleRootFieldsDirective :: (MonadParse m) => Directive origin m multipleRootFieldsDirective = mkDirective Name.__multiple_top_level_fields @@ -204,7 +204,7 @@ multipleRootFields = DirectiveKey Name.__multiple_top_level_fields -- Built-in inclusion directives -skipDirective :: MonadParse m => Directive origin m +skipDirective :: (MonadParse m) => Directive origin m skipDirective = mkDirective Name._skip @@ -216,7 +216,7 @@ skipDirective = ] ifArgument -includeDirective :: MonadParse m => Directive origin m +includeDirective :: (MonadParse m) => Directive origin m includeDirective = mkDirective Name._include @@ -234,7 +234,7 @@ skip = DirectiveKey Name._skip include :: DirectiveKey Bool include = DirectiveKey Name._include -ifArgument :: MonadParse m => InputFieldsParser origin m Bool +ifArgument :: (MonadParse m) => InputFieldsParser origin m Bool ifArgument = field Name._if Nothing boolean -- Parser type for directives. @@ -250,7 +250,7 @@ data Directive origin m where Directive origin m data DirectiveKey a where - DirectiveKey :: Typeable a => G.Name -> DirectiveKey a + DirectiveKey :: (Typeable a) => G.Name -> DirectiveKey a instance GEq DirectiveKey where geq diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Convert.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Convert.hs index dd494b77fb9..d33934654cc 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Convert.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Convert.hs @@ -27,7 +27,7 @@ import Language.GraphQL.Draft.Syntax qualified as G -- Disable custom prelude warnings in preparation for extracting this module into a separate package. {-# ANN module ("HLint: ignore Use onNothing" :: String) #-} -valueToJSON :: MonadParse m => G.GType -> InputValue Variable -> m J.Value +valueToJSON :: (MonadParse m) => G.GType -> InputValue Variable -> m J.Value valueToJSON expectedType inputVal = do peeledVal <- peelVariable expectedType inputVal pure $ valueToJSON' peeledVal diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Input.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Input.hs index a3f2fce099c..85552304485 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Input.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Input.hs @@ -48,10 +48,10 @@ import Language.GraphQL.Draft.Syntax hiding (Definition) -- ure that out on its own, so we have to be explicit to give -- it a little help. -inputParserInput :: forall k. 'Input <: k => ParserInput k :~: InputValue Variable +inputParserInput :: forall k. ('Input <: k) => ParserInput k :~: InputValue Variable inputParserInput = case subKind @'Input @k of KRefl -> Refl; KBoth -> Refl -pInputParser :: forall origin k m a. 'Input <: k => Parser origin k m a -> InputValue Variable -> m a +pInputParser :: forall origin k m a. ('Input <: k) => Parser origin k m a -> InputValue Variable -> m a pInputParser = gcastWith (inputParserInput @k) pParser -- | Parses some collection of input fields. Build an 'InputFieldsParser' using @@ -74,7 +74,7 @@ instance (Functor m) => Functor (InputFieldsParser origin m) where {-# INLINE fmap #-} fmap f = \(InputFieldsParser d p) -> InputFieldsParser d (fmap (fmap f) p) -instance Applicative m => Applicative (InputFieldsParser origin m) where +instance (Applicative m) => Applicative (InputFieldsParser origin m) where {-# INLINE pure #-} pure v = InputFieldsParser [] (const $ pure v) {-# INLINE (<*>) #-} @@ -321,7 +321,7 @@ fieldWithDefault name description defaultValue parser = -- combinators enum :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> NonEmpty (Definition origin EnumValueInfo, a) -> @@ -359,7 +359,7 @@ enum name description values = -- This would prevent the creation of an object with no fields, which is against -- the spec. object :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> InputFieldsParser origin m a -> diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Parser.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Parser.hs index e9ff599210a..1c0756be13d 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Parser.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Parser.hs @@ -48,12 +48,12 @@ import Witherable (catMaybes, mapMaybe) infixl 1 `bind` -bind :: Monad m => Parser origin k m a -> (a -> m b) -> Parser origin k m b +bind :: (Monad m) => Parser origin k m a -> (a -> m b) -> Parser origin k m b bind p f = p {pParser = pParser p >=> f} infixl 1 `bindFields` -bindFields :: Monad m => InputFieldsParser origin m a -> (a -> m b) -> InputFieldsParser origin m b +bindFields :: (Monad m) => InputFieldsParser origin m a -> (a -> m b) -> InputFieldsParser origin m b bindFields p f = p {ifParser = ifParser p >=> f} -- | A parser for a single field in a selection set. Build a 'FieldParser' @@ -67,7 +67,7 @@ data FieldParser origin m a = FieldParser infixl 1 `bindField` -bindField :: Monad m => FieldParser origin m a -> (a -> m b) -> FieldParser origin m b +bindField :: (Monad m) => FieldParser origin m a -> (a -> m b) -> FieldParser origin m b bindField p f = p {fParser = fParser p >=> f} -- | A single parsed field in a selection set. @@ -171,7 +171,7 @@ setInputFieldsParserDirectives dLst (InputFieldsParser defs p) = -- | A variant of 'selectionSetObject' which doesn't implement any interfaces selectionSet :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> [FieldParser origin m a] -> @@ -202,17 +202,17 @@ safeSelectionSet name description fields = printEntry (fieldName, originsM) = let origins = uniques $ catMaybes originsM in if - | null origins -> toErrorValue fieldName - | any Maybe.isNothing originsM -> - toErrorValue fieldName <> " defined in " <> toErrorValue origins <> " and of unknown origin" - | otherwise -> - toErrorValue fieldName <> " defined in " <> toErrorValue origins + | null origins -> toErrorValue fieldName + | any Maybe.isNothing originsM -> + toErrorValue fieldName <> " defined in " <> toErrorValue origins <> " and of unknown origin" + | otherwise -> + toErrorValue fieldName <> " defined in " <> toErrorValue origins duplicatesList = printEntry <$> HashMap.toList duplicates -- Should this rather take a non-empty `FieldParser` list? -- See also Note [Selectability of tables]. selectionSetObject :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | Fields of this object, including any fields that are required from the @@ -254,15 +254,15 @@ selectionSetObject name description parsers implementsInterfaces = for fields \selectionField@Field {_fName, _fAlias, _fDirectives} -> do parsedValue <- if - | _fName == $$(litName "__typename") -> - pure $ SelectTypename $ getName name - | Just parser <- HashMap.lookup _fName parserMap -> - withKey (Key (K.fromText (unName _fName))) $ - SelectField <$> parser selectionField - | otherwise -> - withKey (Key (K.fromText (unName _fName))) $ - parseError $ - "field " <> toErrorValue _fName <> " not found in type: " <> toErrorValue name + | _fName == $$(litName "__typename") -> + pure $ SelectTypename $ getName name + | Just parser <- HashMap.lookup _fName parserMap -> + withKey (Key (K.fromText (unName _fName))) $ + SelectField <$> parser selectionField + | otherwise -> + withKey (Key (K.fromText (unName _fName))) $ + parseError $ + "field " <> toErrorValue _fName <> " not found in type: " <> toErrorValue name _dirMap <- parseDirectives customDirectives (DLExecutable EDLFIELD) _fDirectives -- insert processing of custom directives here pure parsedValue @@ -334,7 +334,7 @@ selectionSetUnion name description objectImplementations = -- See also Note [The delicate balance of GraphQL kinds] in "Hasura.GraphQL.Parser.Schema". selection :: forall m origin a b. - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | parser for the input arguments @@ -349,7 +349,7 @@ selection name description argumentsParser resultParser = rawSelection :: forall m origin a b. - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | parser for the input arguments @@ -394,7 +394,7 @@ rawSelection name description argumentsParser resultParser = -- See also Note [The delicate balance of GraphQL kinds] in "Hasura.GraphQL.Parser.Schema". subselection :: forall m origin a b. - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | parser for the input arguments @@ -409,7 +409,7 @@ subselection name description argumentsParser bodyParser = rawSubselection :: forall m origin a b. - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | parser for the input arguments @@ -439,7 +439,7 @@ rawSubselection name description argumentsParser bodyParser = -- | A shorthand for a 'selection' that takes no arguments. selection_ :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | type of the result @@ -450,7 +450,7 @@ selection_ name description = selection name description (pure ()) -- | A shorthand for a 'subselection' that takes no arguments. subselection_ :: - MonadParse m => + (MonadParse m) => Name -> Maybe Description -> -- | parser for the subselection set diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Scalars.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Scalars.hs index 223d2b72240..2e15c1abf58 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Scalars.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/Scalars.hs @@ -60,46 +60,46 @@ import Language.GraphQL.Draft.Syntax hiding (Definition) -------------------------------------------------------------------------------- -- Built-in scalars -namedBoolean :: MonadParse m => Name -> Parser origin 'Both m Bool +namedBoolean :: (MonadParse m) => Name -> Parser origin 'Both m Bool namedBoolean name = mkScalar name \case GraphQLValue (VBoolean b) -> pure b JSONValue (J.Bool b) -> pure b v -> typeMismatch name "a boolean" v -boolean :: MonadParse m => Parser origin 'Both m Bool +boolean :: (MonadParse m) => Parser origin 'Both m Bool boolean = namedBoolean GName._Boolean -namedInt :: MonadParse m => Name -> Parser origin 'Both m Int32 +namedInt :: (MonadParse m) => Name -> Parser origin 'Both m Int32 namedInt name = mkScalar name \case GraphQLValue (VInt i) -> scientificToInteger $ fromInteger i JSONValue (J.Number n) -> scientificToInteger n v -> typeMismatch name "a 32-bit integer" v -int :: MonadParse m => Parser origin 'Both m Int32 +int :: (MonadParse m) => Parser origin 'Both m Int32 int = namedInt GName._Int -namedFloat :: MonadParse m => Name -> Parser origin 'Both m Double +namedFloat :: (MonadParse m) => Name -> Parser origin 'Both m Double namedFloat name = mkScalar name \case GraphQLValue (VFloat f) -> scientificToFloat f GraphQLValue (VInt i) -> scientificToFloat $ fromInteger i JSONValue (J.Number n) -> scientificToFloat n v -> typeMismatch name "a float" v -float :: MonadParse m => Parser origin 'Both m Double +float :: (MonadParse m) => Parser origin 'Both m Double float = namedFloat GName._Float -namedString :: MonadParse m => Name -> Parser origin 'Both m Text +namedString :: (MonadParse m) => Name -> Parser origin 'Both m Text namedString name = mkScalar name \case GraphQLValue (VString s) -> pure s JSONValue (J.String s) -> pure s v -> typeMismatch name "a string" v -string :: MonadParse m => Parser origin 'Both m Text +string :: (MonadParse m) => Parser origin 'Both m Text string = namedString GName._String -- | As an input type, any string or integer input value should be coerced to ID as Text -- https://spec.graphql.org/June2018/#sec-ID -namedIdentifier :: MonadParse m => Name -> Parser origin 'Both m Text +namedIdentifier :: (MonadParse m) => Name -> Parser origin 'Both m Text namedIdentifier name = mkScalar name \case GraphQLValue (VString s) -> pure s GraphQLValue (VInt i) -> pure . Text.pack $ show i @@ -109,13 +109,13 @@ namedIdentifier name = mkScalar name \case where parseScientific = fmap (Text.pack . show @Int) . scientificToInteger -identifier :: MonadParse m => Parser origin 'Both m Text +identifier :: (MonadParse m) => Parser origin 'Both m Text identifier = namedIdentifier GName._ID -------------------------------------------------------------------------------- -- Custom scalars -uuid :: MonadParse m => Parser origin 'Both m UUID.UUID +uuid :: (MonadParse m) => Parser origin 'Both m UUID.UUID uuid = mkScalar name \case GraphQLValue (VString s) -> parseJSON $ J.String s JSONValue v -> parseJSON v @@ -123,14 +123,14 @@ uuid = mkScalar name \case where name = $$(litName "uuid") -json, jsonb :: MonadParse m => Parser origin 'Both m J.Value +json, jsonb :: (MonadParse m) => Parser origin 'Both m J.Value json = jsonScalar $$(litName "json") Nothing jsonb = jsonScalar $$(litName "jsonb") Nothing -- | Additional validation on integers. We do keep the same type name in the schema for backwards -- compatibility. -- TODO: when we can do a breaking change, we can rename the type to "NonNegativeInt". -nonNegativeInt :: MonadParse m => Parser origin 'Both m Int32 +nonNegativeInt :: (MonadParse m) => Parser origin 'Both m Int32 nonNegativeInt = mkScalar GName._Int \case GraphQLValue (VInt i) | i >= 0 -> scientificToInteger $ fromInteger i JSONValue (J.Number n) | n >= 0 -> scientificToInteger n @@ -140,7 +140,7 @@ nonNegativeInt = mkScalar GName._Int \case -- we declare a cusom scalar that can represent 64-bit ints, which accepts both int literals and -- string literals. We do keep the same type name in the schema for backwards compatibility. -- TODO: when we can do a breaking change, we can rename the type to "BigInt". -bigInt :: MonadParse m => Parser origin 'Both m Int64 +bigInt :: (MonadParse m) => Parser origin 'Both m Int64 bigInt = mkScalar GName._Int \case GraphQLValue (VInt i) -> scientificToInteger $ fromInteger i JSONValue (J.Number n) -> scientificToInteger n @@ -154,7 +154,7 @@ bigInt = mkScalar GName._Int \case -- | Parser for 'Scientific'. Certain backends like BigQuery support -- Decimal/BigDecimal and need an arbitrary precision number. -scientific :: MonadParse m => Parser origin 'Both m Scientific +scientific :: (MonadParse m) => Parser origin 'Both m Scientific scientific = mkScalar name \case GraphQLValue (VFloat f) -> pure f GraphQLValue (VInt i) -> pure $ S.scientific i 0 @@ -168,7 +168,7 @@ scientific = mkScalar name \case -- | Creates a parser that transforms its input into a JSON value. 'valueToJSON' -- does properly unpack variables. -jsonScalar :: MonadParse m => Name -> Maybe Description -> Parser origin 'Both m J.Value +jsonScalar :: (MonadParse m) => Name -> Maybe Description -> Parser origin 'Both m J.Value jsonScalar name description = Parser { pType = schemaType, @@ -182,7 +182,7 @@ jsonScalar name description = -- | Creates a custom scalar, exposed in the schema with the given name. mkScalar :: - MonadParse m => + (MonadParse m) => Name -> (InputValue Variable -> m a) -> Parser origin 'Both m a diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/TypeChecking.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/TypeChecking.hs index c33f841ae86..a7039c6fb66 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/TypeChecking.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Internal/TypeChecking.hs @@ -46,10 +46,10 @@ import Language.GraphQL.Draft.Syntax hiding (Definition) -- were to change. We no longer cache execution plans; but we might do it -- again in the future, which is why we haven't removed some of the code -- that deals with re-usability. -peelVariable :: MonadParse m => GType -> InputValue Variable -> m (InputValue Variable) +peelVariable :: (MonadParse m) => GType -> InputValue Variable -> m (InputValue Variable) peelVariable = peelVariableWith False -peelVariableWith :: MonadParse m => Bool -> GType -> InputValue Variable -> m (InputValue Variable) +peelVariableWith :: (MonadParse m) => Bool -> GType -> InputValue Variable -> m (InputValue Variable) peelVariableWith locationHasDefaultValue locationType = \case GraphQLValue (VVariable var) -> do typeCheck locationHasDefaultValue locationType var @@ -73,7 +73,7 @@ peelVariableWith locationHasDefaultValue locationType = \case -- might allow a nullable variable at a non-nullable location: when either side -- has a non-null default value. That's because GraphQL conflates nullability -- and optionality. See also Note [When are fields optional?]. -typeCheck :: MonadParse m => Bool -> GType -> Variable -> m () +typeCheck :: (MonadParse m) => Bool -> GType -> Variable -> m () typeCheck locationHasDefaultValue locationType variable@Variable {vInfo, vType} = unless (isVariableUsageAllowed locationHasDefaultValue locationType variable) $ parseError $ diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Monad.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Monad.hs index 67d12e982e4..1ec6358910a 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Monad.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Monad.hs @@ -23,7 +23,7 @@ newtype Parse a = Parse deriving newtype (Functor, Applicative, Monad) runParse :: - MonadError ParseError m => + (MonadError ParseError m) => Parse a -> m a runParse parse = diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Collect.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Collect.hs index ccab8e78c51..30da2db4e0f 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Collect.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Collect.hs @@ -27,7 +27,7 @@ import Language.GraphQL.Draft.Syntax ) data TypeDefinitionsWrapper origin where - TypeDefinitionsWrapper :: HasTypeDefinitions origin a => a -> TypeDefinitionsWrapper origin + TypeDefinitionsWrapper :: (HasTypeDefinitions origin a) => a -> TypeDefinitionsWrapper origin {- Note [Collecting types from the GraphQL schema] @@ -93,7 +93,7 @@ different data sources. -- attempting to detect any conflicting defintions that may have made it this -- far (See 'ConflictingDefinitions' for details). collectTypeDefinitions :: - HasTypeDefinitions origin a => + (HasTypeDefinitions origin a) => a -> Either (ConflictingDefinitions origin) (HashMap Name (SomeDefinitionTypeInfo origin)) collectTypeDefinitions x = @@ -201,10 +201,10 @@ instance HasTypeDefinitions origin (Definition origin (TypeInfo origin k)) where | someOld == someNew -> put $! HashMap.insert dName (someOld, stack `NE.cons` origins) definitions | otherwise -> throwError $ ConflictingDefinitions (someNew, stack) (someOld, origins) -instance HasTypeDefinitions origin a => HasTypeDefinitions origin [a] where +instance (HasTypeDefinitions origin a) => HasTypeDefinitions origin [a] where accumulateTypeDefinitions = traverse_ accumulateTypeDefinitions -instance HasTypeDefinitions origin a => HasTypeDefinitions origin (Maybe a) where +instance (HasTypeDefinitions origin a) => HasTypeDefinitions origin (Maybe a) where accumulateTypeDefinitions = traverse_ accumulateTypeDefinitions instance HasTypeDefinitions origin (TypeDefinitionsWrapper origin) where diff --git a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Definition.hs b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Definition.hs index c6f8c372b12..cd5bfc1fccb 100644 --- a/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Definition.hs +++ b/server/lib/schema-parsers/src/Hasura/GraphQL/Parser/Schema/Definition.hs @@ -266,7 +266,7 @@ data k1 :<: k2 where class k1 <: k2 where subKind :: k1 :<: k2 -instance k1 ~ k2 => k1 <: k2 where +instance (k1 ~ k2) => k1 <: k2 where subKind = KRefl instance {-# OVERLAPPING #-} k <: 'Both where @@ -591,11 +591,11 @@ data Definition origin a = Definition } deriving (Functor, Foldable, Traversable, Generic) -instance Hashable a => Hashable (Definition origin a) where +instance (Hashable a) => Hashable (Definition origin a) where hashWithSalt salt Definition {..} = salt `hashWithSalt` dName `hashWithSalt` dInfo -instance Eq a => Eq (Definition origin a) where +instance (Eq a) => Eq (Definition origin a) where (==) = eq1 instance Eq1 (Definition origin) where diff --git a/server/lib/schema-parsers/test/Hasura/GraphQL/Parser/TestUtils.hs b/server/lib/schema-parsers/test/Hasura/GraphQL/Parser/TestUtils.hs index e2b257e804d..dbba4492e13 100644 --- a/server/lib/schema-parsers/test/Hasura/GraphQL/Parser/TestUtils.hs +++ b/server/lib/schema-parsers/test/Hasura/GraphQL/Parser/TestUtils.hs @@ -28,9 +28,9 @@ instance MonadParse TestMonad where fakeScalar :: G.Name -> G.Value Variable fakeScalar name = if - | name == GName._Int -> G.VInt 4242 - | name == GName._Boolean -> G.VBoolean False - | otherwise -> error $ "no test value implemented for scalar " <> show name + | name == GName._Int -> G.VInt 4242 + | name == GName._Boolean -> G.VBoolean False + | otherwise -> error $ "no test value implemented for scalar " <> show name fakeInputFieldValue :: forall origin. InputFieldInfo origin -> G.Value Variable fakeInputFieldValue (InputFieldInfo t _) = go t diff --git a/server/lib/test-harness/src/Harness/Backend/BigQuery.hs b/server/lib/test-harness/src/Harness/Backend/BigQuery.hs index 5b7b062b406..c7e0bd0a00c 100644 --- a/server/lib/test-harness/src/Harness/Backend/BigQuery.hs +++ b/server/lib/test-harness/src/Harness/Backend/BigQuery.hs @@ -70,7 +70,7 @@ backendTypeMetadata = -------------------------------------------------------------------------------- -getServiceAccount :: HasCallStack => IO ServiceAccount +getServiceAccount :: (HasCallStack) => IO ServiceAccount getServiceAccount = getEnvJson Constants.bigqueryServiceKeyVar getProjectId :: (HasCallStack) => IO BigQueryProjectId @@ -78,10 +78,10 @@ getProjectId = BigQueryProjectId <$> getEnvString Constants.bigqueryProjectIdVar -- | Run a plain Standard SQL string against the server, ignore the -- result. Just checks for errors. -run_ :: HasCallStack => String -> IO () +run_ :: (HasCallStack) => String -> IO () run_ query = do - void $ - runWithRetry + void + $ runWithRetry ( \conn -> do res <- Execute.executeBigQuery @@ -90,7 +90,7 @@ run_ query = do onLeft res \x -> liftIO (bigQueryError x query) ) -bigQueryError :: HasCallStack => Execute.ExecuteProblem -> String -> IO a +bigQueryError :: (HasCallStack) => Execute.ExecuteProblem -> String -> IO a bigQueryError e query = error ( unlines @@ -112,22 +112,22 @@ removeDataset schemaName = void $ runWithRetry (\conn -> Execute.deleteDataset conn $ unSchemaName schemaName) -- | Serialize Table into a SQL statement, as needed, and execute it on the BigQuery backend -createTable :: HasCallStack => SchemaName -> Schema.Table -> IO () +createTable :: (HasCallStack) => SchemaName -> Schema.Table -> IO () createTable schemaName table@Schema.Table {tableName, tableColumns} = do - run_ $ - T.unpack $ - T.unwords - ( [ "CREATE TABLE", - unSchemaName schemaName <> "." <> tableName, - "(", - commaSeparated (mkColumn <$> tableColumns), - -- Primary keys are not supported by BigQuery - -- Foreign keys are not support by BigQuery - ")" - ] - <> tableInsertions table - <> [";"] - ) + run_ + $ T.unpack + $ T.unwords + ( [ "CREATE TABLE", + unSchemaName schemaName <> "." <> tableName, + "(", + commaSeparated (mkColumn <$> tableColumns), + -- Primary keys are not supported by BigQuery + -- Foreign keys are not support by BigQuery + ")" + ] + <> tableInsertions table + <> [";"] + ) -- | Generates a temporary table from structs, which is used to populate the table above. -- Along the lines of: @@ -146,7 +146,7 @@ tableInsertions (Schema.Table {tableColumns, tableData}) = cellInsertion column VNull = ["CAST", "(", serialize VNull, "AS", scalarType (Schema.columnType column), ")", "AS", Schema.columnName column] cellInsertion column value = [serialize value, "AS", Schema.columnName column] -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "INT64" Schema.TStr -> "STRING" @@ -176,15 +176,15 @@ serialize = \case VCustomValue bsv -> Schema.formatBackendScalarValueType $ Schema.backendScalarValue bsv bsvBigQuery -- | Serialize Table into an SQL DROP statement and execute it -dropTable :: HasCallStack => SchemaName -> Schema.Table -> IO () +dropTable :: (HasCallStack) => SchemaName -> Schema.Table -> IO () dropTable schemaName Schema.Table {tableName} = do - run_ $ - T.unpack $ - T.unwords - [ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently - unSchemaName schemaName <> "." <> tableName, - ";" - ] + run_ + $ T.unpack + $ T.unwords + [ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently + unSchemaName schemaName <> "." <> tableName, + ";" + ] -- | Post an http request to start tracking -- Overriding here because bigquery's API is uncommon @@ -220,7 +220,7 @@ untrackTable testEnvironment schemaName Schema.Table {tableName} = do -- | Setup the schema in the most expected way. -- NOTE: Certain test modules may warrant having their own local version. -setup :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +setup :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () setup tables' (testEnvironment, _) = do let source = BackendType.backendSourceName backendTypeMetadata backendType = BackendType.backendTypeString backendTypeMetadata @@ -275,7 +275,7 @@ teardown _ (testEnvironment, _) = do (GraphqlEngine.setSources testEnvironment mempty Nothing) (removeDataset schemaName) -setupTablesAction :: HasCallStack => [Schema.Table] -> TestEnvironment -> SetupAction +setupTablesAction :: (HasCallStack) => [Schema.Table] -> TestEnvironment -> SetupAction setupTablesAction ts env = SetupAction (setup ts (env, ())) diff --git a/server/lib/test-harness/src/Harness/Backend/Citus.hs b/server/lib/test-harness/src/Harness/Backend/Citus.hs index 4a796e1c004..0ade7e65d5a 100644 --- a/server/lib/test-harness/src/Harness/Backend/Citus.hs +++ b/server/lib/test-harness/src/Harness/Backend/Citus.hs @@ -73,7 +73,7 @@ backendTypeMetadata = -------------------------------------------------------------------------------- -- | Check the citus server is live and ready to accept connections. -livenessCheck :: HasCallStack => IO () +livenessCheck :: (HasCallStack) => IO () livenessCheck = loop Constants.postgresLivenessCheckAttempts where loop 0 = error ("Liveness check failed for Citus.") @@ -93,18 +93,18 @@ livenessCheck = loop Constants.postgresLivenessCheckAttempts -- | when we are creating databases, we want to connect with the 'original' DB -- we started with -runWithInitialDb_ :: HasCallStack => TestEnvironment -> Text -> IO () +runWithInitialDb_ :: (HasCallStack) => TestEnvironment -> Text -> IO () runWithInitialDb_ testEnvironment = runInternal testEnvironment Constants.defaultCitusConnectionString -- | Run a plain SQL query. -run_ :: HasCallStack => TestEnvironment -> Text -> IO () +run_ :: (HasCallStack) => TestEnvironment -> Text -> IO () run_ testEnvironment = runInternal testEnvironment (Constants.citusConnectionString (uniqueTestId testEnvironment)) --- | Run a plain SQL query. -- On error, print something useful for debugging. -runInternal :: HasCallStack => TestEnvironment -> Text -> Text -> IO () +runInternal :: (HasCallStack) => TestEnvironment -> Text -> Text -> IO () runInternal testEnvironment connectionString query = do startTime <- getCurrentTime catch @@ -148,12 +148,12 @@ defaultSourceConfiguration testEnvironment = |] -- | Serialize Table into a Citus-SQL statement, as needed, and execute it on the Citus backend -createTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +createTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () createTable testEnv Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableReferences, tableConstraints, tableUniqueIndexes} = do let schemaName = Schema.getSchemaName testEnv - run_ testEnv $ - [i| + run_ testEnv + $ [i| CREATE TABLE #{ unSchemaName schemaName }."#{ tableName }" ( #{ commaSeparated $ (mkColumnSql <$> tableColumns) @@ -166,7 +166,7 @@ createTable testEnv Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, for_ tableUniqueIndexes (run_ testEnv . Postgres.createUniqueIndexSql schemaName tableName) -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "integer" Schema.TStr -> "text" @@ -185,13 +185,13 @@ mkColumnSql Schema.Column {columnName, columnType, columnNullable, columnDefault ] -- | Serialize tableData into a Citus-SQL insert statement and execute it. -insertTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +insertTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () insertTable testEnv Schema.Table {tableName, tableColumns, tableData} | null tableData = pure () | otherwise = do let schemaName = Schema.unSchemaName $ Schema.getSchemaName testEnv - run_ testEnv $ - [i| + run_ testEnv + $ [i| INSERT INTO "#{ schemaName }"."#{ tableName }" (#{ commaSeparated (Postgres.wrapIdentifier . Schema.columnName <$> tableColumns) }) VALUES #{ commaSeparated $ mkRow <$> tableData }; @@ -217,19 +217,19 @@ mkRow row = ] -- | Serialize Table into a Citus-SQL DROP statement and execute it -dropTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +dropTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () dropTable testEnvironment Schema.Table {tableName} = do let schemaName = Schema.unSchemaName $ Schema.getSchemaName testEnvironment -- We don't want @IF EXISTS@ here, because we don't want this to fail silently. run_ testEnvironment $ [i| DROP TABLE #{ schemaName }.#{ tableName }; |] -- | Post an http request to start tracking the table -trackTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +trackTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () trackTable testEnvironment table = Schema.trackTable (BackendType.backendSourceName backendTypeMetadata) table testEnvironment -- | Post an http request to stop tracking the table -untrackTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +untrackTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () untrackTable testEnvironment table = Schema.untrackTable (BackendType.backendSourceName backendTypeMetadata) table testEnvironment @@ -282,7 +282,7 @@ dropDatabase testEnvironment = do -- | Setup the schema in the most expected way. -- NOTE: Certain test modules may warrant having their own local version. -setup :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +setup :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () setup tables (testEnvironment, _) = do -- Clear and reconfigure the metadata GraphqlEngine.setSource testEnvironment (defaultSourceMetadata testEnvironment) Nothing @@ -317,7 +317,7 @@ setupTablesAction ts env = -- | Teardown the schema and tracking in the most expected way. -- NOTE: Certain test modules may warrant having their own version. -teardown :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +teardown :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () teardown _ (testEnvironment, _) = GraphqlEngine.setSources testEnvironment mempty Nothing diff --git a/server/lib/test-harness/src/Harness/Backend/Cockroach.hs b/server/lib/test-harness/src/Harness/Backend/Cockroach.hs index 02f9e160174..d9019fb8c2b 100644 --- a/server/lib/test-harness/src/Harness/Backend/Cockroach.hs +++ b/server/lib/test-harness/src/Harness/Backend/Cockroach.hs @@ -72,7 +72,7 @@ backendTypeMetadata = -------------------------------------------------------------------------------- -- | Check the cockroach server is live and ready to accept connections. -livenessCheck :: HasCallStack => IO () +livenessCheck :: (HasCallStack) => IO () livenessCheck = loop Constants.postgresLivenessCheckAttempts where loop 0 = error ("Liveness check failed for CockroachDB.") @@ -92,19 +92,19 @@ livenessCheck = loop Constants.postgresLivenessCheckAttempts -- | when we are creating databases, we want to connect with the 'original' DB -- we started with -runWithInitialDb_ :: HasCallStack => TestEnvironment -> Text -> IO () +runWithInitialDb_ :: (HasCallStack) => TestEnvironment -> Text -> IO () runWithInitialDb_ testEnvironment = runInternal testEnvironment Constants.defaultCockroachConnectionString -- | Run a plain SQL query. -- On error, print something useful for debugging. -run_ :: HasCallStack => TestEnvironment -> Text -> IO () +run_ :: (HasCallStack) => TestEnvironment -> Text -> IO () run_ testEnvironment = runInternal testEnvironment (Constants.cockroachConnectionString (uniqueTestId testEnvironment)) --- | Run a plain SQL query. -- On error, print something useful for debugging. -runInternal :: HasCallStack => TestEnvironment -> Text -> Text -> IO () +runInternal :: (HasCallStack) => TestEnvironment -> Text -> Text -> IO () runInternal testEnvironment connectionString query = do startTime <- getCurrentTime catch @@ -166,7 +166,7 @@ createTable testEnv Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, for_ tableUniqueIndexes (run_ testEnv . Postgres.createUniqueIndexSql schemaName tableName) -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "integer" Schema.TStr -> "text" @@ -190,8 +190,8 @@ insertTable testEnvironment Schema.Table {tableName, tableColumns, tableData} | null tableData = pure () | otherwise = do let schemaName = Schema.getSchemaName testEnvironment - run_ testEnvironment $ - T.unwords + run_ testEnvironment + $ T.unwords [ "INSERT INTO", Schema.unSchemaName schemaName <> "." <> wrapIdentifier tableName, "(", @@ -231,8 +231,8 @@ mkRow row = -- | Serialize Table into a PL-SQL DROP statement and execute it dropTable :: TestEnvironment -> Schema.Table -> IO () dropTable testEnvironment Schema.Table {tableName} = do - run_ testEnvironment $ - T.unwords + run_ testEnvironment + $ T.unwords [ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently Constants.cockroachDb <> "." <> tableName, ";" @@ -240,8 +240,8 @@ dropTable testEnvironment Schema.Table {tableName} = do dropTableIfExists :: TestEnvironment -> Schema.Table -> IO () dropTableIfExists testEnvironment Schema.Table {tableName} = do - run_ testEnvironment $ - T.unwords + run_ testEnvironment + $ T.unwords [ "DROP TABLE IF EXISTS", Constants.cockroachDb <> "." <> tableName ] @@ -311,7 +311,7 @@ setup tables (testEnvironment, _) = do -- NOTE: Certain test modules may warrant having their own version. -- Because the Fixture takes care of dropping the DB, all we do here is -- clear the metadata with `replace_metadata`. -teardown :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +teardown :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () teardown _ (testEnvironment, _) = GraphqlEngine.setSources testEnvironment mempty Nothing diff --git a/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock.hs b/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock.hs index 894e0ffe044..f23c27d635a 100644 --- a/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock.hs +++ b/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock.hs @@ -146,8 +146,8 @@ mkLocalTestEnvironment' mockConfig _ = mkTestResource do maeRecordedRequest <- I.newIORef Nothing maeRecordedRequestConfig <- I.newIORef Nothing maeThread <- Async.async $ runMockServer maeConfig maeRecordedRequest maeRecordedRequestConfig - pure $ - AcquiredResource + pure + $ AcquiredResource { resourceValue = MockAgentEnvironment {..}, waitForResource = healthCheck $ "http://127.0.0.1:" <> show mockAgentPort <> "/health", teardownResource = Async.cancel maeThread @@ -167,19 +167,19 @@ mockMutationResponse :: API.MutationResponse -> MockRequestConfig mockMutationResponse mutationResponse = defaultMockRequestConfig {_mutationResponse = \_ -> Right mutationResponse} -mockAgentGraphqlTest :: HasCallStack => String -> (TestEnvironment -> (MockRequestConfig -> RequestHeaders -> J.Value -> IO MockRequestResults) -> Expectation) -> SpecWith (Arg ((TestEnvironment, MockAgentEnvironment) -> Expectation)) +mockAgentGraphqlTest :: (HasCallStack) => String -> (TestEnvironment -> (MockRequestConfig -> RequestHeaders -> J.Value -> IO MockRequestResults) -> Expectation) -> SpecWith (Arg ((TestEnvironment, MockAgentEnvironment) -> Expectation)) mockAgentGraphqlTest name testBody = it name $ \(env, agentEnv) -> let performGraphqlRequest mockRequestConfig requestHeaders graphqlRequest = performRecordedRequest agentEnv mockRequestConfig (GraphqlEngine.postGraphqlWithHeaders env requestHeaders graphqlRequest) in testBody env performGraphqlRequest -mockAgentMetadataTest :: HasCallStack => String -> (TestEnvironment -> (MockRequestConfig -> Int -> J.Value -> IO MockRequestResults) -> Expectation) -> SpecWith (Arg ((TestEnvironment, MockAgentEnvironment) -> Expectation)) +mockAgentMetadataTest :: (HasCallStack) => String -> (TestEnvironment -> (MockRequestConfig -> Int -> J.Value -> IO MockRequestResults) -> Expectation) -> SpecWith (Arg ((TestEnvironment, MockAgentEnvironment) -> Expectation)) mockAgentMetadataTest name testBody = it name $ \(env, agentEnv) -> let performMetadataRequest mockRequestConfig status metadataRequest = performRecordedRequest agentEnv mockRequestConfig (GraphqlEngine.postMetadataWithStatus status env metadataRequest) in testBody env performMetadataRequest -performRecordedRequest :: HasCallStack => MockAgentEnvironment -> MockRequestConfig -> IO J.Value -> IO MockRequestResults +performRecordedRequest :: (HasCallStack) => MockAgentEnvironment -> MockRequestConfig -> IO J.Value -> IO MockRequestResults performRecordedRequest MockAgentEnvironment {..} mockRequestConfig performRequest = do -- Set the Agent with the 'MockConfig' I.modifyIORef maeConfig (\mockConfig -> mockConfig {_requestConfig = mockRequestConfig}) diff --git a/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock/Server.hs b/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock/Server.hs index 274e309f4ab..d588d8e2fe2 100644 --- a/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock/Server.hs +++ b/server/lib/test-harness/src/Harness/Backend/DataConnector/Mock/Server.hs @@ -63,8 +63,8 @@ capabilities = { _qcForeach = Just API.ForeachCapabilities }, API._cMutations = - Just $ - API.MutationCapabilities + Just + $ API.MutationCapabilities { API._mcInsertCapabilities = Just API.InsertCapabilities {API._icSupportsNestedInserts = False}, API._mcUpdateCapabilities = Just API.UpdateCapabilities, API._mcDeleteCapabilities = Just API.DeleteCapabilities, @@ -110,8 +110,8 @@ capabilities = } where scalarTypesCapabilities = - API.ScalarTypesCapabilities $ - HashMap.fromList + API.ScalarTypesCapabilities + $ HashMap.fromList [ mkScalarTypeCapability "number" minMaxFunctions numericUpdateOperators $ Just API.GraphQLFloat, mkScalarTypeCapability "string" minMaxFunctions mempty $ Just API.GraphQLString, mkScalarTypeCapability "MyInt" mempty numericUpdateOperators $ Just API.GraphQLInt, @@ -134,16 +134,16 @@ capabilities = minMaxFunctions :: API.ScalarType -> API.AggregateFunctions minMaxFunctions resultType = - API.AggregateFunctions $ - HashMap.fromList $ - (,resultType) - <$> [[G.name|min|], [G.name|max|]] + API.AggregateFunctions + $ HashMap.fromList + $ (,resultType) + <$> [[G.name|min|], [G.name|max|]] numericUpdateOperators :: API.ScalarType -> API.UpdateColumnOperators numericUpdateOperators scalarType = - API.UpdateColumnOperators $ - HashMap.fromList $ - [(API.UpdateColumnOperatorName [G.name|inc|], API.UpdateColumnOperatorDefinition scalarType)] + API.UpdateColumnOperators + $ HashMap.fromList + $ [(API.UpdateColumnOperatorName [G.name|inc|], API.UpdateColumnOperatorDefinition scalarType)] -- | Stock Schema for a Chinook Agent schema :: API.SchemaResponse @@ -216,8 +216,8 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "AlbumId" :| [], API._tiDescription = Just "Collection of music albums created by artists", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.singleton (API.ConstraintName "Artist") (API.Constraint (mkTableName "Artist") (HashMap.singleton (API.ColumnName "ArtistId") (API.ColumnName "ArtistId"))), + API.ForeignKeys + $ HashMap.singleton (API.ConstraintName "Artist") (API.Constraint (mkTableName "Artist") (HashMap.singleton (API.ColumnName "ArtistId") (API.ColumnName "ArtistId"))), API._tiInsertable = True, API._tiUpdatable = True, API._tiDeletable = True @@ -347,8 +347,8 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "CustomerId" :| [], API._tiDescription = Just "Collection of customers who can buy tracks", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.singleton (API.ConstraintName "CustomerSupportRep") (API.Constraint (mkTableName "Employee") (HashMap.singleton (API.ColumnName "SupportRepId") (API.ColumnName "EmployeeId"))), + API.ForeignKeys + $ HashMap.singleton (API.ConstraintName "CustomerSupportRep") (API.Constraint (mkTableName "Employee") (HashMap.singleton (API.ColumnName "SupportRepId") (API.ColumnName "EmployeeId"))), API._tiInsertable = True, API._tiUpdatable = True, API._tiDeletable = True @@ -496,8 +496,8 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "EmployeeId" :| [], API._tiDescription = Just "Collection of employees who work for the business", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.singleton (API.ConstraintName "EmployeeReportsTo") (API.Constraint (mkTableName "Employee") (HashMap.singleton (API.ColumnName "ReportsTo") (API.ColumnName "EmployeeId"))), + API.ForeignKeys + $ HashMap.singleton (API.ConstraintName "EmployeeReportsTo") (API.Constraint (mkTableName "Employee") (HashMap.singleton (API.ColumnName "ReportsTo") (API.ColumnName "EmployeeId"))), API._tiInsertable = True, API._tiUpdatable = True, API._tiDeletable = True @@ -621,9 +621,9 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "InvoiceId" :| [], API._tiDescription = Just "Collection of invoices of music purchases by a customer", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.singleton (API.ConstraintName "InvoiceCustomer") $ - API.Constraint (mkTableName "Customer") (HashMap.singleton (API.ColumnName "CustomerId") (API.ColumnName "CustomerId")), + API.ForeignKeys + $ HashMap.singleton (API.ConstraintName "InvoiceCustomer") + $ API.Constraint (mkTableName "Customer") (HashMap.singleton (API.ColumnName "CustomerId") (API.ColumnName "CustomerId")), API._tiInsertable = True, API._tiUpdatable = True, API._tiDeletable = True @@ -681,8 +681,8 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "InvoiceLineId" :| [], API._tiDescription = Just "Collection of track purchasing line items of invoices", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.fromList + API.ForeignKeys + $ HashMap.fromList [ (API.ConstraintName "Invoice", API.Constraint (mkTableName "Invoice") (HashMap.singleton (API.ColumnName "InvoiceId") (API.ColumnName "InvoiceId"))), (API.ConstraintName "Track", API.Constraint (mkTableName "Track") (HashMap.singleton (API.ColumnName "TrackId") (API.ColumnName "TrackId"))) ], @@ -809,8 +809,8 @@ schema = API._tiPrimaryKey = Just $ API.ColumnName "TrackId" :| [], API._tiDescription = Just "Collection of music tracks", API._tiForeignKeys = - API.ForeignKeys $ - HashMap.fromList + API.ForeignKeys + $ HashMap.fromList [ (API.ConstraintName "Album", API.Constraint (mkTableName "Album") (HashMap.singleton (API.ColumnName "AlbumId") (API.ColumnName "AlbumId"))), (API.ConstraintName "Genre", API.Constraint (mkTableName "Genre") (HashMap.singleton (API.ColumnName "GenreId") (API.ColumnName "GenreId"))), (API.ConstraintName "MediaType", API.Constraint (mkTableName "MediaType") (HashMap.singleton (API.ColumnName "MediaTypeId") (API.ColumnName "MediaTypeId"))) diff --git a/server/lib/test-harness/src/Harness/Backend/DataConnector/Sqlite.hs b/server/lib/test-harness/src/Harness/Backend/DataConnector/Sqlite.hs index bee640a3ed0..1393b3715b3 100644 --- a/server/lib/test-harness/src/Harness/Backend/DataConnector/Sqlite.hs +++ b/server/lib/test-harness/src/Harness/Backend/DataConnector/Sqlite.hs @@ -150,7 +150,7 @@ createUntrackedTables tables (testEnvironment, _) = do insertTable sourceName testEnvironment table -- | Post an http request to start tracking the table -trackTable :: HasCallStack => String -> TestEnvironment -> Schema.Table -> IO () +trackTable :: (HasCallStack) => String -> TestEnvironment -> Schema.Table -> IO () trackTable sourceName testEnvironment Schema.Table {tableName} = do let backendType = BackendType.backendTypeString backendTypeMetadata requestType = backendType <> "_track_table" @@ -189,16 +189,16 @@ createTable sourceName testEnv Schema.Table {tableName, tableColumns, tablePrima -- Build a SQL QUERY AND THEN CALL run_sql let expr = - Text.unpack $ - Text.unwords + Text.unpack + $ Text.unwords [ "CREATE TABLE", wrapIdentifier (Schema.unSchemaName schemaName) <> "." <> wrapIdentifier tableName, "(", - Text.commaSeparated $ - (mkColumn <$> tableColumns) - <> (bool [mkPrimaryKey pk] [] (null pk)) - <> (mkReference schemaName <$> tableReferences) - <> map uniqueConstraint tableConstraints, + Text.commaSeparated + $ (mkColumn <$> tableColumns) + <> (bool [mkPrimaryKey pk] [] (null pk)) + <> (mkReference schemaName <$> tableReferences) + <> map uniqueConstraint tableConstraints, ");" ] runSql testEnv sourceName expr @@ -287,18 +287,18 @@ insertTable sourceName testEnv Schema.Table {tableName, tableColumns, tableData} | null tableData = pure () | otherwise = do let schemaName = Schema.getSchemaName testEnv - runSql testEnv sourceName $ - Text.unpack $ - Text.unwords - [ "INSERT INTO", - wrapIdentifier (Schema.unSchemaName schemaName) <> "." <> wrapIdentifier tableName, - "(", - Text.commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns), - ")", - "VALUES", - Text.commaSeparated $ mkRow <$> tableData, - ";" - ] + runSql testEnv sourceName + $ Text.unpack + $ Text.unwords + [ "INSERT INTO", + wrapIdentifier (Schema.unSchemaName schemaName) <> "." <> wrapIdentifier tableName, + "(", + Text.commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns), + ")", + "VALUES", + Text.commaSeparated $ mkRow <$> tableData, + ";" + ] mkRow :: [Schema.ScalarValue] -> Text mkRow row = diff --git a/server/lib/test-harness/src/Harness/Backend/Postgres.hs b/server/lib/test-harness/src/Harness/Backend/Postgres.hs index 671bfff9e71..7bf0b162f35 100644 --- a/server/lib/test-harness/src/Harness/Backend/Postgres.hs +++ b/server/lib/test-harness/src/Harness/Backend/Postgres.hs @@ -95,15 +95,15 @@ backendTypeMetadata = -- interesting thing here is the database: in both modes, we specify an -- /initial/ database (returned by this function), which we use only as a way -- to create other databases for testing. -defaultConnectInfo :: HasCallStack => GlobalTestEnvironment -> Postgres.ConnectInfo +defaultConnectInfo :: (HasCallStack) => GlobalTestEnvironment -> Postgres.ConnectInfo defaultConnectInfo globalTestEnvironment = case testingMode globalTestEnvironment of TestNewPostgresVariant opts@Options {..} -> let getComponent :: forall a. String -> Last a -> a getComponent component = fromMaybe - ( error $ - unlines + ( error + $ unlines [ "Postgres URI is missing its " <> component <> " component.", "Postgres options: " <> TL.unpack (pShow opts) ] @@ -130,12 +130,12 @@ defaultConnectInfo globalTestEnvironment = -- for this 'TestEnvironment'. makeFreshDbConnectionString :: TestEnvironment -> Postgres.PostgresServerUrl makeFreshDbConnectionString testEnvironment = - Postgres.PostgresServerUrl $ - bsToTxt $ - Postgres.postgreSQLConnectionString - (defaultConnectInfo (globalEnvironment testEnvironment)) - { Postgres.connectDatabase = T.unpack (uniqueDbName (uniqueTestId testEnvironment)) - } + Postgres.PostgresServerUrl + $ bsToTxt + $ Postgres.postgreSQLConnectionString + (defaultConnectInfo (globalEnvironment testEnvironment)) + { Postgres.connectDatabase = T.unpack (uniqueDbName (uniqueTestId testEnvironment)) + } -- | Default Postgres connection string that we use for our admin purposes -- (setting up / deleting per-test databases) @@ -146,15 +146,15 @@ defaultPostgresConnectionString = . Postgres.postgreSQLConnectionString . defaultConnectInfo -metadataLivenessCheck :: HasCallStack => IO () +metadataLivenessCheck :: (HasCallStack) => IO () metadataLivenessCheck = doLivenessCheck (Postgres.PostgresServerUrl $ T.pack postgresqlMetadataConnectionString) -livenessCheck :: HasCallStack => TestEnvironment -> IO () +livenessCheck :: (HasCallStack) => TestEnvironment -> IO () livenessCheck = doLivenessCheck . makeFreshDbConnectionString -- | Check the postgres server is live and ready to accept connections. -doLivenessCheck :: HasCallStack => Postgres.PostgresServerUrl -> IO () +doLivenessCheck :: (HasCallStack) => Postgres.PostgresServerUrl -> IO () doLivenessCheck (Postgres.PostgresServerUrl connectionString) = loop Constants.postgresLivenessCheckAttempts where loop 0 = error ("Liveness check failed for PostgreSQL.") @@ -173,11 +173,11 @@ doLivenessCheck (Postgres.PostgresServerUrl connectionString) = loop Constants.p -- | Run a plain SQL query. -- On error, print something useful for debugging. -run_ :: HasCallStack => TestEnvironment -> Text -> IO () +run_ :: (HasCallStack) => TestEnvironment -> Text -> IO () run_ testEnvironment = Postgres.run (makeFreshDbConnectionString testEnvironment, testEnvironment) -runCustomDB_ :: HasCallStack => TestEnvironment -> Postgres.ConnectInfo -> Text -> IO () +runCustomDB_ :: (HasCallStack) => TestEnvironment -> Postgres.ConnectInfo -> Text -> IO () runCustomDB_ testEnvironment connectionInfo = Postgres.run (postgresServerUrl connectionInfo, testEnvironment) @@ -276,7 +276,7 @@ createUniqueIndexSql (SchemaName schemaName) tableName = \case Schema.UniqueIndexExpression ex -> [i| CREATE UNIQUE INDEX ON "#{ schemaName }"."#{ tableName }" ((#{ ex })) |] -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "integer" Schema.TStr -> "text" diff --git a/server/lib/test-harness/src/Harness/Backend/Sqlserver.hs b/server/lib/test-harness/src/Harness/Backend/Sqlserver.hs index b7db77c3662..be726b6c1ba 100644 --- a/server/lib/test-harness/src/Harness/Backend/Sqlserver.hs +++ b/server/lib/test-harness/src/Harness/Backend/Sqlserver.hs @@ -63,7 +63,7 @@ backendTypeMetadata = -------------------------------------------------------------------------------- -- | Check that the SQLServer service is live and ready to accept connections. -livenessCheck :: HasCallStack => IO () +livenessCheck :: (HasCallStack) => IO () livenessCheck = loop Constants.sqlserverLivenessCheckAttempts where loop 0 = error ("Liveness check failed for SQLServer.") @@ -80,19 +80,19 @@ livenessCheck = loop Constants.sqlserverLivenessCheckAttempts ) -- | run SQL with the currently created DB for this test -run_ :: HasCallStack => TestEnvironment -> String -> IO () +run_ :: (HasCallStack) => TestEnvironment -> String -> IO () run_ testEnvironment = runInternal testEnvironment (Constants.sqlserverConnectInfo (uniqueTestId testEnvironment)) -- | when we are creating databases, we want to connect with the 'original' DB -- we started with -runWithInitialDb_ :: HasCallStack => TestEnvironment -> String -> IO () +runWithInitialDb_ :: (HasCallStack) => TestEnvironment -> String -> IO () runWithInitialDb_ testEnvironment = runInternal testEnvironment Constants.sqlserverAdminConnectInfo -- | Run a plain SQL string against the server, ignore the -- result. Just checks for errors. -runInternal :: HasCallStack => TestEnvironment -> Text -> String -> IO () +runInternal :: (HasCallStack) => TestEnvironment -> Text -> String -> IO () runInternal testEnvironment connectionString query' = do startTime <- getCurrentTime catch @@ -143,20 +143,20 @@ createTable _ Schema.Table {tableUniqueIndexes = _ : _} = error "Not Implemented createTable _ Schema.Table {tableConstraints = _ : _} = error "Not Implemented: SqlServer test harness support for constraints" createTable testEnvironment Schema.Table {tableName, tableColumns, tablePrimaryKey = pk, tableReferences} = do let schemaName = Schema.getSchemaName testEnvironment - run_ testEnvironment $ - T.unpack $ - T.unwords - [ "CREATE TABLE", - Schema.unSchemaName schemaName <> "." <> tableName, - "(", - commaSeparated $ - (mkColumn <$> tableColumns) - <> (bool [mkPrimaryKey pk] [] (null pk)) - <> (mkReference <$> tableReferences), - ");" - ] + run_ testEnvironment + $ T.unpack + $ T.unwords + [ "CREATE TABLE", + Schema.unSchemaName schemaName <> "." <> tableName, + "(", + commaSeparated + $ (mkColumn <$> tableColumns) + <> (bool [mkPrimaryKey pk] [] (null pk)) + <> (mkReference <$> tableReferences), + ");" + ] -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "int" Schema.TStr -> "nvarchar(127)" @@ -211,23 +211,23 @@ mkReference Schema.Reference {referenceLocalColumn, referenceTargetTable, refere <> referenceLocalColumn -- | Serialize tableData into a T-SQL insert statement and execute it. -insertTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +insertTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () insertTable testEnvironment Schema.Table {tableName, tableColumns, tableData} | null tableData = pure () | otherwise = do let schemaName = Schema.getSchemaName testEnvironment - run_ testEnvironment $ - T.unpack $ - T.unwords - [ "INSERT INTO", - Schema.unSchemaName schemaName <> "." <> wrapIdentifier tableName, - "(", - commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns), - ")", - "VALUES", - commaSeparated $ mkRow <$> tableData, - ";" - ] + run_ testEnvironment + $ T.unpack + $ T.unwords + [ "INSERT INTO", + Schema.unSchemaName schemaName <> "." <> wrapIdentifier tableName, + "(", + commaSeparated (wrapIdentifier . Schema.columnName <$> tableColumns), + ")", + "VALUES", + commaSeparated $ mkRow <$> tableData, + ";" + ] -- | MSSQL identifiers which may contain spaces or be case-sensitive needs to be wrapped in @[]@. -- @@ -256,23 +256,23 @@ mkRow row = ] -- | Serialize Table into a T-SQL DROP statement and execute it -dropTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +dropTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () dropTable testEnvironment Schema.Table {tableName} = do - run_ testEnvironment $ - T.unpack $ - T.unwords - [ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently - T.pack Constants.sqlserverDb <> "." <> tableName, - ";" - ] + run_ testEnvironment + $ T.unpack + $ T.unwords + [ "DROP TABLE", -- we don't want @IF EXISTS@ here, because we don't want this to fail silently + T.pack Constants.sqlserverDb <> "." <> tableName, + ";" + ] -- | Post an http request to start tracking the table -trackTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +trackTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () trackTable testEnvironment table = Schema.trackTable (backendSourceName backendTypeMetadata) table testEnvironment -- | Post an http request to stop tracking the table -untrackTable :: HasCallStack => TestEnvironment -> Schema.Table -> IO () +untrackTable :: (HasCallStack) => TestEnvironment -> Schema.Table -> IO () untrackTable testEnvironment table = Schema.untrackTable (backendSourceName backendTypeMetadata) table testEnvironment @@ -311,7 +311,7 @@ createSchema testEnvironment schemaName = do -- | Setup the schema in the most expected way. -- NOTE: Certain test modules may warrant having their own local version. -setup :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +setup :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () setup tables (testEnvironment, _) = do -- Clear and reconfigure the metadata GraphqlEngine.setSource testEnvironment (defaultSourceMetadata testEnvironment) Nothing @@ -327,7 +327,7 @@ setup tables (testEnvironment, _) = do -- | Teardown the schema and tracking in the most expected way. -- NOTE: Certain test modules may warrant having their own version. -teardown :: HasCallStack => [Schema.Table] -> (TestEnvironment, ()) -> IO () +teardown :: (HasCallStack) => [Schema.Table] -> (TestEnvironment, ()) -> IO () teardown _ (testEnvironment, _) = GraphqlEngine.setSources testEnvironment mempty Nothing diff --git a/server/lib/test-harness/src/Harness/DataConnectorAgent.hs b/server/lib/test-harness/src/Harness/DataConnectorAgent.hs index 7bec201e1ba..12ff2ef2403 100644 --- a/server/lib/test-harness/src/Harness/DataConnectorAgent.hs +++ b/server/lib/test-harness/src/Harness/DataConnectorAgent.hs @@ -47,5 +47,5 @@ withClone agentUri TestEnvironment {..} templateName useClone = runDataConnector (liftIO $ useClone response) ((dcClient // API._datasets // API._deleteClone) cloneName) -createManagedClone :: MonadManaged m => String -> TestEnvironment -> API.DatasetTemplateName -> m API.DatasetCreateCloneResponse +createManagedClone :: (MonadManaged m) => String -> TestEnvironment -> API.DatasetTemplateName -> m API.DatasetCreateCloneResponse createManagedClone agentUri testEnvironment templateName = managed (withClone agentUri testEnvironment templateName) diff --git a/server/lib/test-harness/src/Harness/Env.hs b/server/lib/test-harness/src/Harness/Env.hs index b44a5114c67..29a8c29a069 100644 --- a/server/lib/test-harness/src/Harness/Env.hs +++ b/server/lib/test-harness/src/Harness/Env.hs @@ -19,39 +19,39 @@ import System.Environment (lookupEnv) -- | Get an environment variable and parse it to a value using 'read'. getEnvRead :: (Read a, Typeable a, HasCallStack) => String -> IO a getEnvRead var = - withFrozenCallStack $ - getEnvWith var readVarValue + withFrozenCallStack + $ getEnvWith var readVarValue -- | Get an environment variable without parsing it. getEnvString :: (IsString a, HasCallStack) => String -> IO a getEnvString var = - withFrozenCallStack $ - getEnvWith var (\_ value -> pure (fromString value)) + withFrozenCallStack + $ getEnvWith var (\_ value -> pure (fromString value)) -- | Get a json environment variable and parse it. getEnvJson :: forall a. (Typeable a, J.FromJSON a, HasCallStack) => String -> IO a getEnvJson var = - withFrozenCallStack $ - getEnvWith var decodeJson + withFrozenCallStack + $ getEnvWith var decodeJson -- | Get a environment variable holding a path to a json file and parse the contents of the file. getEnvJsonFile :: forall a. (Typeable a, J.FromJSON a, HasCallStack) => String -> IO a getEnvJsonFile var = - withFrozenCallStack $ - getEnvWith var (\var' value -> decodeJson var' =<< readFile value) + withFrozenCallStack + $ getEnvWith var (\var' value -> decodeJson var' =<< readFile value) ------------------------------------------------------------------------------------------- -- * Helpers -- | Fetches a a value from an environment variable and applies a function to the variable and value. -getEnvWith :: HasCallStack => String -> (String -> String -> IO a) -> IO a +getEnvWith :: (HasCallStack) => String -> (String -> String -> IO a) -> IO a getEnvWith var f = withFrozenCallStack $ do f var =<< getEnv var -- | Like 'System.Environment.getEnv', but with 'HasCallStack'. -getEnv :: HasCallStack => String -> IO String +getEnv :: (HasCallStack) => String -> IO String getEnv var = do value <- lookupEnv var onNothing value (error $ "getEnv: " <> var <> " does not exist (no environment variable)") diff --git a/server/lib/test-harness/src/Harness/Exceptions.hs b/server/lib/test-harness/src/Harness/Exceptions.hs index 5ed1fe64e7a..a4d70dee088 100644 --- a/server/lib/test-harness/src/Harness/Exceptions.hs +++ b/server/lib/test-harness/src/Harness/Exceptions.hs @@ -23,7 +23,7 @@ import Hasura.Prelude hiding (first) -- Will run the action. If the action fails and throws an exception, -- it will run the cleanup and will throw the original exception after it is done. -- If the cleanup fails as well, it will throw both raised exceptions. -catchRethrow :: HasCallStack => IO a -> IO () -> IO a +catchRethrow :: (HasCallStack) => IO a -> IO () -> IO a catchRethrow action cleanup = catch -- attempt action @@ -37,7 +37,7 @@ catchRethrow action cleanup = ) -- | Try actions in order. If one succeeds, it succeeds. If both fail, throw both exceptions. -tryInOrder :: HasCallStack => IO a -> IO a -> IO a +tryInOrder :: (HasCallStack) => IO a -> IO a -> IO a tryInOrder action1 action2 = catch action1 @@ -59,7 +59,7 @@ forFinally_ list f = -- raised as a single 'Exceptions' exception. If 'Exceptions' thrown in the -- 'actions', these are collapsed into a single top-level 'Exceptions' -- exception. -rethrowAll :: HasCallStack => [IO ()] -> IO () +rethrowAll :: (HasCallStack) => [IO ()] -> IO () rethrowAll actions = do exns <- concat diff --git a/server/lib/test-harness/src/Harness/GraphqlEngine.hs b/server/lib/test-harness/src/Harness/GraphqlEngine.hs index 481c1b4fbea..f43caf894ea 100644 --- a/server/lib/test-harness/src/Harness/GraphqlEngine.hs +++ b/server/lib/test-harness/src/Harness/GraphqlEngine.hs @@ -96,7 +96,7 @@ import Test.Hspec -- See 'postWithHeaders' to issue a request with 'Http.RequestHeaders'. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -post :: HasCallStack => TestEnvironment -> String -> Value -> IO Value +post :: (HasCallStack) => TestEnvironment -> String -> Value -> IO Value post testEnvironment path v = withFrozenCallStack $ postWithHeaders testEnvironment path mempty v -- | Same as 'post', but ignores the value. @@ -104,7 +104,7 @@ post testEnvironment path v = withFrozenCallStack $ postWithHeaders testEnvironm -- See 'postWithHeaders_' to issue a request with 'Http.RequestHeaders'. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -post_ :: HasCallStack => TestEnvironment -> String -> Value -> IO () +post_ :: (HasCallStack) => TestEnvironment -> String -> Value -> IO () post_ testEnvironment path v = void $ withFrozenCallStack $ postWithHeaders_ testEnvironment path mempty v -- | Post some JSON to graphql-engine, getting back more JSON. @@ -114,7 +114,7 @@ post_ testEnvironment path v = void $ withFrozenCallStack $ postWithHeaders_ tes -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postWithHeaders :: - HasCallStack => TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO Value + (HasCallStack) => TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO Value postWithHeaders = withFrozenCallStack $ postWithHeadersStatus 200 @@ -125,7 +125,7 @@ postWithHeaders = -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postWithHeadersStatus :: - HasCallStack => Int -> TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO Value + (HasCallStack) => Int -> TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO Value postWithHeadersStatus statusCode testEnv@(getServer -> Server {urlPrefix, port}) path headers requestBody = do testLogMessage testEnv $ LogHGERequest (T.pack path) requestBody @@ -159,13 +159,13 @@ postWithHeadersStatusViaWebSocket connection headers requestBody = do WS.sendTextDatas connection - [ encode $ - object + [ encode + $ object [ "type" .= String "connection_init", "payload" .= object ["headers" .= preparedHeaders] ], - encode $ - object + encode + $ object [ "id" .= String "some-request-id", "type" .= String "start", "payload" .= requestBody @@ -181,7 +181,7 @@ postWithHeadersStatusViaWebSocket connection headers requestBody = do -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postWithHeaders_ :: - HasCallStack => TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO () + (HasCallStack) => TestEnvironment -> String -> Http.RequestHeaders -> Value -> IO () postWithHeaders_ testEnvironment path headers v = void $ withFrozenCallStack $ postWithHeaders testEnvironment path headers v @@ -189,32 +189,32 @@ postWithHeaders_ testEnvironment path headers v = -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postGraphqlYaml :: - HasCallStack => TestEnvironment -> Value -> IO Value + (HasCallStack) => TestEnvironment -> Value -> IO Value postGraphqlYaml testEnvironment v = withFrozenCallStack $ postGraphqlYamlWithHeaders testEnvironment mempty v -- | Same as 'postWithHeaders', but defaults to the graphql end-point. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postGraphqlYamlWithHeaders :: - HasCallStack => TestEnvironment -> Http.RequestHeaders -> Value -> IO Value + (HasCallStack) => TestEnvironment -> Http.RequestHeaders -> Value -> IO Value postGraphqlYamlWithHeaders testEnvironment headers = withFrozenCallStack $ postWithHeaders testEnvironment "/v1/graphql" headers -postGraphql :: Has PostGraphql testEnvironment => testEnvironment -> Value -> IO Value +postGraphql :: (Has PostGraphql testEnvironment) => testEnvironment -> Value -> IO Value postGraphql = getPostGraphql . getter -- | Same as 'postGraphqlYaml', but adds the @{query:..}@ wrapper. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -postGraphqlInternal :: HasCallStack => TestEnvironment -> Value -> IO Value +postGraphqlInternal :: (HasCallStack) => TestEnvironment -> Value -> IO Value postGraphqlInternal testEnvironment value = withFrozenCallStack $ postGraphqlYaml testEnvironment (object ["query" .= value]) -- | Same as 'postGraphql', but accepts variables to the GraphQL query as well. -postGraphqlWithVariables :: HasCallStack => TestEnvironment -> Value -> Value -> IO Value +postGraphqlWithVariables :: (HasCallStack) => TestEnvironment -> Value -> Value -> IO Value postGraphqlWithVariables testEnvironment query variables = - withFrozenCallStack $ - postGraphqlYaml + withFrozenCallStack + $ postGraphqlYaml testEnvironment ( object [ "query" .= query, @@ -224,7 +224,7 @@ postGraphqlWithVariables testEnvironment query variables = -- | Same as postGraphql but accepts a list of 'Pair' to pass -- additional parameters to the endpoint. -postGraphqlWithPair :: HasCallStack => TestEnvironment -> Value -> [Pair] -> IO Value +postGraphqlWithPair :: (HasCallStack) => TestEnvironment -> Value -> [Pair] -> IO Value postGraphqlWithPair testEnvironment value pair = withFrozenCallStack $ postGraphqlYaml testEnvironment (object $ ["query" .= value] <> pair) @@ -232,15 +232,15 @@ postGraphqlWithPair testEnvironment value pair = -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. postGraphqlWithHeaders :: - HasCallStack => TestEnvironment -> Http.RequestHeaders -> Value -> IO Value + (HasCallStack) => TestEnvironment -> Http.RequestHeaders -> Value -> IO Value postGraphqlWithHeaders testEnvironment headers value = withFrozenCallStack $ postGraphqlYamlWithHeaders testEnvironment headers (object ["query" .= value]) -- | post to /v1/graphql/explain endpoint -postExplain :: HasCallStack => TestEnvironment -> Value -> IO Value +postExplain :: (HasCallStack) => TestEnvironment -> Value -> IO Value postExplain testEnvironment value = - withFrozenCallStack $ - postWithHeaders + withFrozenCallStack + $ postWithHeaders testEnvironment "/v1/graphql/explain" mempty @@ -265,27 +265,27 @@ withHTTP testEnvironment = -- @headers@ are mostly irrelevant for the admin endpoint @v1/metadata@. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -postMetadata_ :: HasCallStack => TestEnvironment -> Value -> IO () +postMetadata_ :: (HasCallStack) => TestEnvironment -> Value -> IO () postMetadata_ testEnvironment = withFrozenCallStack $ post_ (withHTTP testEnvironment) "/v1/metadata" -postMetadata :: HasCallStack => TestEnvironment -> Value -> IO Value +postMetadata :: (HasCallStack) => TestEnvironment -> Value -> IO Value postMetadata testEnvironment = withFrozenCallStack $ post (withHTTP testEnvironment) "/v1/metadata" -postMetadataWithStatus :: HasCallStack => Int -> TestEnvironment -> Value -> IO Value +postMetadataWithStatus :: (HasCallStack) => Int -> TestEnvironment -> Value -> IO Value postMetadataWithStatus statusCode testEnvironment v = withFrozenCallStack $ postWithHeadersStatus statusCode (withHTTP testEnvironment) "/v1/metadata" mempty v -postMetadataWithStatusAndHeaders :: HasCallStack => Int -> TestEnvironment -> Http.RequestHeaders -> Value -> IO Value +postMetadataWithStatusAndHeaders :: (HasCallStack) => Int -> TestEnvironment -> Http.RequestHeaders -> Value -> IO Value postMetadataWithStatusAndHeaders statusCode testEnvironment = withFrozenCallStack $ postWithHeadersStatus statusCode (withHTTP testEnvironment) "/v1/metadata" -- | Resets metadata, removing all sources or remote schemas. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -clearMetadata :: HasCallStack => TestEnvironment -> IO () +clearMetadata :: (HasCallStack) => TestEnvironment -> IO () clearMetadata s = withFrozenCallStack $ postMetadata_ s [yaml|{type: clear_metadata, args: {}}|] -exportMetadata :: HasCallStack => TestEnvironment -> IO Value +exportMetadata :: (HasCallStack) => TestEnvironment -> IO Value exportMetadata s = withFrozenCallStack $ postMetadata s [yaml|{type: export_metadata, args: {}}|] -- | Reload metadata @@ -303,15 +303,15 @@ args: {} -- @headers@ are mostly irrelevant for the admin endpoint @v2/query@. -- -- Note: We add 'withFrozenCallStack' to reduce stack trace clutter. -postV2Query :: HasCallStack => Int -> TestEnvironment -> Value -> IO Value +postV2Query :: (HasCallStack) => Int -> TestEnvironment -> Value -> IO Value postV2Query statusCode testEnvironment = withFrozenCallStack $ postWithHeadersStatus statusCode testEnvironment "/v2/query" mempty -postV2Query_ :: HasCallStack => TestEnvironment -> Value -> IO () +postV2Query_ :: (HasCallStack) => TestEnvironment -> Value -> IO () postV2Query_ testEnvironment = withFrozenCallStack $ post_ testEnvironment "/v2/query" -postV1Query :: HasCallStack => Int -> TestEnvironment -> Value -> IO Value +postV1Query :: (HasCallStack) => Int -> TestEnvironment -> Value -> IO Value postV1Query statusCode testEnvironment = withFrozenCallStack $ postWithHeadersStatus statusCode testEnvironment "/v1/query" mempty @@ -418,8 +418,8 @@ runApp metadataDbUrl serveOptions = do runManagedT managedServerCtx \(appInit, appEnv) -> App.runAppM appEnv do appCtx <- App.initialiseAppContext env serveOptions appInit - lowerManagedT $ - App.runHGEServer + lowerManagedT + $ App.runHGEServer (const $ pure ()) appCtx initTime diff --git a/server/lib/test-harness/src/Harness/Http.hs b/server/lib/test-harness/src/Harness/Http.hs index 28eda116328..84d7e7ae52a 100644 --- a/server/lib/test-harness/src/Harness/Http.hs +++ b/server/lib/test-harness/src/Harness/Http.hs @@ -30,19 +30,19 @@ import Network.HTTP.Types qualified as Http -- | Performs get, doesn't return the result. Simply throws if there's -- not a 200 response. -get_ :: HasCallStack => String -> IO () +get_ :: (HasCallStack) => String -> IO () get_ = getWithStatus [200] -- | Performs get, doesn't return the result. Simply throws if there's -- not an expected response status code. -getWithStatus :: HasCallStack => [Int] -> String -> IO () +getWithStatus :: (HasCallStack) => [Int] -> String -> IO () getWithStatus acceptableStatusCodes url = Http.withResponse @_ @IO (fromString url) \response -> do let actualStatusCode = Http.getResponseStatusCode response unless (actualStatusCode `elem` acceptableStatusCodes) $ do body <- runConduit $ Http.getResponseBody response .| foldMapC id - fail $ - unlines + fail + $ unlines [ "The HTTP response had an unexpected response code.", "URL: " <> url, "Expected status codes: " <> show acceptableStatusCodes, @@ -53,22 +53,22 @@ getWithStatus acceptableStatusCodes url = -- | Post the JSON to the given URL, and produces a very descriptive -- exception on failure. -postValue :: HasCallStack => String -> Http.RequestHeaders -> Value -> IO Value +postValue :: (HasCallStack) => String -> Http.RequestHeaders -> Value -> IO Value postValue = postValueWithStatus 200 post :: String -> Http.RequestHeaders -> Value -> IO (Http.Response L8.ByteString) post url headers value = do let request = - Http.setRequestHeaders headers $ - Http.setRequestMethod Http.methodPost $ - Http.setRequestBodyJSON value (fromString url) + Http.setRequestHeaders headers + $ Http.setRequestMethod Http.methodPost + $ Http.setRequestBodyJSON value (fromString url) response <- Http.httpLbs request unless ("Content-Type" `elem` (fst <$> Http.getResponseHeaders response)) $ error "Missing Content-Type header in response" pure response -- | Post the JSON to the given URL and expected HTTP response code. -- Produces a very descriptive exception or failure. -postValueWithStatus :: HasCallStack => Int -> String -> Http.RequestHeaders -> Value -> IO Value +postValueWithStatus :: (HasCallStack) => Int -> String -> Http.RequestHeaders -> Value -> IO Value postValueWithStatus statusCode url headers value = do response <- post url headers value let requestBodyString = L8.unpack $ encode value @@ -104,23 +104,23 @@ postValueWithStatus statusCode url headers value = do reportError = error . unlines -- | Wait for a service to become healthy. -healthCheck :: HasCallStack => String -> IO () +healthCheck :: (HasCallStack) => String -> IO () healthCheck url = do result <- healthCheck' url case result of Healthy -> return () Unhealthy failures -> - error $ - "Health check failed for URL: " - ++ url - ++ ", with failures: " - ++ show failures - ++ "\nIs graphql-engine starting up without errors outside of this test suite?" + error + $ "Health check failed for URL: " + ++ url + ++ ", with failures: " + ++ show failures + ++ "\nIs graphql-engine starting up without errors outside of this test suite?" data HealthCheckResult = Healthy | Unhealthy [Http.HttpException] -- | Wait for a service to become healthy. -healthCheck' :: HasCallStack => String -> IO HealthCheckResult +healthCheck' :: (HasCallStack) => String -> IO HealthCheckResult healthCheck' url = loop [] httpHealthCheckAttempts where loop failures 0 = return $ Unhealthy failures diff --git a/server/lib/test-harness/src/Harness/Logging.hs b/server/lib/test-harness/src/Harness/Logging.hs index 2a4bb8b100f..80331847ece 100644 --- a/server/lib/test-harness/src/Harness/Logging.hs +++ b/server/lib/test-harness/src/Harness/Logging.hs @@ -15,10 +15,10 @@ import Test.Hspec.Core.Runner import Test.Hspec.Core.Spec -- | Make the logger in the 'GlobalTestEnvironment' add context about the specs that use it. -contextualizeLogger :: Has Logger a => SpecWith a -> SpecWith a +contextualizeLogger :: (Has Logger a) => SpecWith a -> SpecWith a contextualizeLogger = mapSpecForest (map contextualizeTree) -contextualizeTree :: forall a. Has Logger a => SpecTree a -> SpecTree a +contextualizeTree :: forall a. (Has Logger a) => SpecTree a -> SpecTree a contextualizeTree spectree = go [] spectree where go :: [Text] -> SpecTree a -> SpecTree a @@ -29,8 +29,8 @@ contextualizeTree spectree = go [] spectree action (map (go ps) children) go ps (Leaf item) = - Leaf $ - item + Leaf + $ item { itemExample = \params actionRunner progressCallback -> itemExample diff --git a/server/lib/test-harness/src/Harness/Logging/Messages.hs b/server/lib/test-harness/src/Harness/Logging/Messages.hs index 3384fe9cd27..74248a764cd 100644 --- a/server/lib/test-harness/src/Harness/Logging/Messages.hs +++ b/server/lib/test-harness/src/Harness/Logging/Messages.hs @@ -44,7 +44,7 @@ import System.Log.FastLogger qualified as FL import Test.Hspec.Core.Format -- | Newtype wrapper around logging action to encapsulate existential type. -newtype Logger = Logger {runLogger :: forall a. LoggableMessage a => a -> IO ()} +newtype Logger = Logger {runLogger :: forall a. (LoggableMessage a) => a -> IO ()} -- | Log a structured message in tests testLogMessage :: (Has Logger env, LoggableMessage msg) => env -> msg -> IO () @@ -87,7 +87,7 @@ class LoggableMessage a where -- -- If you find yourself wanting to do this, consider defining a new, bespoke -- message type that describes what you want to log. -instance TypeError ('Text "Please define a custom message type rather than logging raw JSON values") => LoggableMessage Value where +instance (TypeError ('Text "Please define a custom message type rather than logging raw JSON values")) => LoggableMessage Value where fromLoggableMessage = error "Please define a custom message type rather than logging raw JSON values" newtype LogTrace = LogTrace Text @@ -96,7 +96,7 @@ instance LoggableMessage LogTrace where fromLoggableMessage (LogTrace msg) = object [("type", String "LogTrace"), ("message", String msg)] -logTrace :: TraceString a => a -> LogTrace +logTrace :: (TraceString a) => a -> LogTrace logTrace = LogTrace . toTraceString newtype LogHspecEvent = LogHspecEvent {unLogHspecEvent :: Event} @@ -114,11 +114,11 @@ instance LoggableMessage LogHspecEvent where where encEvent :: Text -> [Pair] -> Value encEvent eventTag eventFields = - object $ - [ ("type", String "Hspec Event"), - ("event_tag", toJSON eventTag) - ] - <> eventFields + object + $ [ ("type", String "Hspec Event"), + ("event_tag", toJSON eventTag) + ] + <> eventFields encPath :: ([String], String) -> [Pair] encPath (groups, req) = @@ -333,7 +333,7 @@ instance LoggableMessage LogFixtureTeardownFailed where -- to sort through. newtype LogHarness = LogHarness {unLogHarness :: Text} -logHarness :: TraceString a => a -> LogHarness +logHarness :: (TraceString a) => a -> LogHarness logHarness = LogHarness . toTraceString instance LoggableMessage LogHarness where @@ -347,5 +347,5 @@ instance LoggableMessage LogHarness where flLogger :: (FL.LogStr -> IO ()) -> Logger flLogger logAction = Logger (logAction . msgToLogStr) -msgToLogStr :: LoggableMessage a => a -> FL.LogStr +msgToLogStr :: (LoggableMessage a) => a -> FL.LogStr msgToLogStr = FL.toLogStr . (<> "\n") . encode . fromLoggableMessage diff --git a/server/lib/test-harness/src/Harness/RemoteServer.hs b/server/lib/test-harness/src/Harness/RemoteServer.hs index 31a7a7b9719..f66b83a5ac1 100644 --- a/server/lib/test-harness/src/Harness/RemoteServer.hs +++ b/server/lib/test-harness/src/Harness/RemoteServer.hs @@ -59,17 +59,18 @@ run :: run (Interpreter interpreter) = mkTestResource do let urlPrefix = "http://127.0.0.1" port <- bracket (Warp.openFreePort) (Socket.close . snd) (pure . fst) - thread <- Async.async $ - Spock.runSpockNoBanner port $ - Spock.spockT id $ do - Spock.get "/" $ do - Spock.json $ J.String "OK" - Spock.post "/graphql" $ do - req <- Spock.request - body <- liftIO $ Wai.strictRequestBody req - result <- liftIO $ interpreter body - Spock.setHeader "Content-Type" "application/json; charset=utf-8" - Spock.lazyBytes result + thread <- Async.async + $ Spock.runSpockNoBanner port + $ Spock.spockT id + $ do + Spock.get "/" $ do + Spock.json $ J.String "OK" + Spock.post "/graphql" $ do + req <- Spock.request + body <- liftIO $ Wai.strictRequestBody req + result <- liftIO $ interpreter body + Spock.setHeader "Content-Type" "application/json; charset=utf-8" + Spock.lazyBytes result let server = Server {port = fromIntegral port, urlPrefix, thread} Http.healthCheck $ serverUrl server pure @@ -152,21 +153,21 @@ run (Interpreter interpreter) = mkTestResource do -- - https://github.com/morpheusgraphql/mythology-api/blob/master/src/Mythology/API.hs generateInterpreter :: forall query mutation. - RootResolverConstraint IO () query mutation Undefined => + (RootResolverConstraint IO () query mutation Undefined) => query (Resolver QUERY () IO) -> mutation (Resolver MUTATION () IO) -> Interpreter generateInterpreter queryResolver mutationResolver = - Interpreter $ - Morpheus.interpreter $ - defaultRootResolver {queryResolver, mutationResolver} + Interpreter + $ Morpheus.interpreter + $ defaultRootResolver {queryResolver, mutationResolver} -- | This function is similar to 'generateInterpreter', but only expects a -- resolver for queries. The resulting 'Interpreter' only supports queries, and -- handles neither mutations nor subscriptions. generateQueryInterpreter :: forall query. - RootResolverConstraint IO () query Undefined Undefined => + (RootResolverConstraint IO () query Undefined Undefined) => query (Resolver QUERY () IO) -> Interpreter generateQueryInterpreter queryResolver = diff --git a/server/lib/test-harness/src/Harness/Schema.hs b/server/lib/test-harness/src/Harness/Schema.hs index a51938168d6..d78c9feacfc 100644 --- a/server/lib/test-harness/src/Harness/Schema.hs +++ b/server/lib/test-harness/src/Harness/Schema.hs @@ -84,7 +84,7 @@ resolveTableSchema testEnv tbl = Just schemaName -> schemaName -- | track_table API call -trackTable :: HasCallStack => String -> Table -> TestEnvironment -> IO () +trackTable :: (HasCallStack) => String -> Table -> TestEnvironment -> IO () trackTable source tbl testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -97,7 +97,7 @@ trackTable source tbl testEnvironment = do args: *trackTableArgs |] -mkTrackTableV2ApiObject :: HasCallStack => TestEnvironment -> BackendTypeConfig -> String -> Table -> Value +mkTrackTableV2ApiObject :: (HasCallStack) => TestEnvironment -> BackendTypeConfig -> String -> Table -> Value mkTrackTableV2ApiObject testEnvironment backendTypeConfig source tbl@(Table {tableName}) = do let schema = resolveTableSchema testEnvironment tbl tableField = mkTableField backendTypeConfig schema tableName @@ -115,15 +115,15 @@ mkTrackTableV2ApiObject testEnvironment backendTypeConfig source tbl@(Table {tab columnConfig :: Column -> Maybe J.Pair columnConfig col = do alias <- columnGqlAlias col - return $ - ( K.fromText $ columnName col, - [yaml| + return + $ ( K.fromText $ columnName col, + [yaml| custom_name: *alias |] - ) + ) -- | track_tables API call -trackTables :: HasCallStack => String -> [Table] -> TestEnvironment -> IO Value +trackTables :: (HasCallStack) => String -> [Table] -> TestEnvironment -> IO Value trackTables source tables testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -138,7 +138,7 @@ trackTables source tables testEnvironment = do |] -- | track_tables API call, with the allow_warnings setting and expecting a specific http response status code -trackTablesWithStatus :: HasCallStack => String -> [Table] -> AllowWarnings -> Int -> TestEnvironment -> IO Value +trackTablesWithStatus :: (HasCallStack) => String -> [Table] -> AllowWarnings -> Int -> TestEnvironment -> IO Value trackTablesWithStatus source tables allowWarnings expectedHttpStatus testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -155,7 +155,7 @@ trackTablesWithStatus source tables allowWarnings expectedHttpStatus testEnviron |] -- | untrack_table API call -untrackTable :: HasCallStack => String -> Table -> TestEnvironment -> IO () +untrackTable :: (HasCallStack) => String -> Table -> TestEnvironment -> IO () untrackTable source tbl testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -168,7 +168,7 @@ untrackTable source tbl testEnvironment = do args: *untrackTableArgs |] -mkUntrackTableApiObject :: HasCallStack => TestEnvironment -> BackendTypeConfig -> String -> Table -> Bool -> Value +mkUntrackTableApiObject :: (HasCallStack) => TestEnvironment -> BackendTypeConfig -> String -> Table -> Bool -> Value mkUntrackTableApiObject testEnvironment backendTypeConfig source tbl@(Table {tableName}) cascade = do let schema = resolveTableSchema testEnvironment tbl tableField = mkTableField backendTypeConfig schema tableName @@ -179,7 +179,7 @@ mkUntrackTableApiObject testEnvironment backendTypeConfig source tbl@(Table {tab |] -- | untrack_tables API call -untrackTables :: HasCallStack => String -> [(Table, Bool)] -> TestEnvironment -> IO Value +untrackTables :: (HasCallStack) => String -> [(Table, Bool)] -> TestEnvironment -> IO Value untrackTables source tablesWithCascade testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -195,7 +195,7 @@ untrackTables source tablesWithCascade testEnvironment = do |] -- | untrack_tables API call, with the allow_warnings setting and expecting a specific http response status code -untrackTablesWithStatus :: HasCallStack => String -> [(Table, Bool)] -> AllowWarnings -> Int -> TestEnvironment -> IO Value +untrackTablesWithStatus :: (HasCallStack) => String -> [(Table, Bool)] -> AllowWarnings -> Int -> TestEnvironment -> IO Value untrackTablesWithStatus source tablesWithCascade allowWarnings expectedHttpStatus testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -212,7 +212,7 @@ untrackTablesWithStatus source tablesWithCascade allowWarnings expectedHttpStatu allow_warnings: *allowWarnings |] -trackFunction :: HasCallStack => String -> String -> TestEnvironment -> IO () +trackFunction :: (HasCallStack) => String -> String -> TestEnvironment -> IO () trackFunction source functionName testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -230,7 +230,7 @@ args: |] -- | Unified untrack function -untrackFunction :: HasCallStack => String -> String -> TestEnvironment -> IO () +untrackFunction :: (HasCallStack) => String -> String -> TestEnvironment -> IO () untrackFunction source functionName testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -248,7 +248,7 @@ args: |] trackComputedField :: - HasCallStack => + (HasCallStack) => String -> Table -> String -> @@ -285,7 +285,7 @@ args: |] -- | Unified untrack computed field -untrackComputedField :: HasCallStack => String -> Table -> String -> TestEnvironment -> IO () +untrackComputedField :: (HasCallStack) => String -> Table -> String -> TestEnvironment -> IO () untrackComputedField source Table {tableName} fieldName testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata @@ -327,7 +327,7 @@ mkTableField backendTypeMetadata schemaName tableName = BackendType.DataConnector _ -> dcFieldName -- | Unified track object relationships -trackObjectRelationships :: HasCallStack => Table -> TestEnvironment -> IO () +trackObjectRelationships :: (HasCallStack) => Table -> TestEnvironment -> IO () trackObjectRelationships tbl@(Table {tableName, tableReferences, tableManualRelationships}) testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment localSchema = resolveTableSchema testEnvironment tbl @@ -385,7 +385,7 @@ mkArrayRelationshipName tableName referenceLocalColumn referenceTargetColumn ref in tableName <> "s_by_" <> referenceLocalColumn <> "_to_" <> columnName -- | Unified track array relationships -trackArrayRelationships :: HasCallStack => Table -> TestEnvironment -> IO () +trackArrayRelationships :: (HasCallStack) => Table -> TestEnvironment -> IO () trackArrayRelationships tbl@(Table {tableName, tableReferences, tableManualRelationships}) testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment localSchema = resolveTableSchema testEnvironment tbl @@ -442,7 +442,7 @@ args: GraphqlEngine.postMetadata_ testEnvironment payload -- | Unified untrack relationships -untrackRelationships :: HasCallStack => Table -> TestEnvironment -> IO () +untrackRelationships :: (HasCallStack) => Table -> TestEnvironment -> IO () untrackRelationships Table {tableName, tableReferences, tableManualRelationships} testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment schema = getSchemaName testEnvironment @@ -480,7 +480,7 @@ untrackRelationships Table {tableName, tableReferences, tableManualRelationships ) -- | Unified RunSQL -runSQL :: HasCallStack => String -> String -> TestEnvironment -> IO () +runSQL :: (HasCallStack) => String -> String -> TestEnvironment -> IO () runSQL source sql testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment prefix = BackendType.backendTypeString backendTypeMetadata @@ -496,7 +496,7 @@ args: read_only: false |] -addSource :: HasCallStack => Text -> Value -> TestEnvironment -> IO () +addSource :: (HasCallStack) => Text -> Value -> TestEnvironment -> IO () addSource sourceName sourceConfig testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment backendType = BackendType.backendTypeString backendTypeMetadata diff --git a/server/lib/test-harness/src/Harness/Schema/LogicalModel.hs b/server/lib/test-harness/src/Harness/Schema/LogicalModel.hs index 3e9f8b39177..7df41965705 100644 --- a/server/lib/test-harness/src/Harness/Schema/LogicalModel.hs +++ b/server/lib/test-harness/src/Harness/Schema/LogicalModel.hs @@ -105,31 +105,31 @@ trackLogicalModelCommand sourceName backendTypeConfig (LogicalModel {logicalMode logicalModelColumnReference, logicalModelColumnName } -> - J.object $ - [ ("name" .= logicalModelColumnName), - ( "type", - J.object - [ ("logical_model" .= logicalModelColumnReference) - ] - ) - ] + J.object + $ [ ("name" .= logicalModelColumnName), + ( "type", + J.object + [ ("logical_model" .= logicalModelColumnReference) + ] + ) + ] LogicalModelReference { logicalModelColumnReferenceType = ArrayReference, logicalModelColumnReference, logicalModelColumnName } -> - J.object $ - [ ("name" .= logicalModelColumnName), - ( "type", - J.object $ - [ ( "array", - J.object $ - [ ("logical_model" .= logicalModelColumnReference) - ] - ) - ] - ) - ] + J.object + $ [ ("name" .= logicalModelColumnName), + ( "type", + J.object + $ [ ( "array", + J.object + $ [ ("logical_model" .= logicalModelColumnReference) + ] + ) + ] + ) + ] LogicalModelScalar {..} -> let descriptionPair = case logicalModelColumnDescription of Just desc -> [("description" .= desc)] @@ -137,12 +137,12 @@ trackLogicalModelCommand sourceName backendTypeConfig (LogicalModel {logicalMode in -- this is the old way to encode these, but we'll keep using -- in the tests for now to ensure we remain backwards -- compatible - J.object $ - [ ("name" .= logicalModelColumnName), - ("type" .= (BackendType.backendScalarType backendTypeConfig) logicalModelColumnType), - ("nullable" .= logicalModelColumnNullable) - ] - <> descriptionPair + J.object + $ [ ("name" .= logicalModelColumnName), + ("type" .= (BackendType.backendScalarType backendTypeConfig) logicalModelColumnType), + ("nullable" .= logicalModelColumnNullable) + ] + <> descriptionPair ) columns = returnTypeToJson logicalModelColumns @@ -163,7 +163,7 @@ trackLogicalModelCommand sourceName backendTypeConfig (LogicalModel {logicalMode fields: *columns |] -trackLogicalModel :: HasCallStack => String -> LogicalModel -> TestEnvironment -> IO () +trackLogicalModel :: (HasCallStack) => String -> LogicalModel -> TestEnvironment -> IO () trackLogicalModel sourceName ctmType testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment @@ -182,7 +182,7 @@ untrackLogicalModelCommand source backendTypeMetadata LogicalModel {logicalModel name: *logicalModelName |] -untrackLogicalModel :: HasCallStack => String -> LogicalModel -> TestEnvironment -> IO () +untrackLogicalModel :: (HasCallStack) => String -> LogicalModel -> TestEnvironment -> IO () untrackLogicalModel source ctmType testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment diff --git a/server/lib/test-harness/src/Harness/Schema/NativeQuery.hs b/server/lib/test-harness/src/Harness/Schema/NativeQuery.hs index 3a6e38480a6..c6034b1d037 100644 --- a/server/lib/test-harness/src/Harness/Schema/NativeQuery.hs +++ b/server/lib/test-harness/src/Harness/Schema/NativeQuery.hs @@ -75,11 +75,11 @@ trackNativeQueryCommand sourceName backendTypeConfig (NativeQuery {nativeQueryOb Nothing -> [] value = - J.object $ - [ ("type" .= (BackendType.backendScalarType backendTypeConfig) nativeQueryColumnType), - ("nullable" .= nativeQueryColumnNullable) - ] - <> descriptionPair + J.object + $ [ ("type" .= (BackendType.backendScalarType backendTypeConfig) nativeQueryColumnType), + ("nullable" .= nativeQueryColumnNullable) + ] + <> descriptionPair in (key, value) ) @@ -101,7 +101,7 @@ trackNativeQueryCommand sourceName backendTypeConfig (NativeQuery {nativeQueryOb returns: *nativeQueryLogicalModel |] -trackNativeQuery :: HasCallStack => String -> NativeQuery -> TestEnvironment -> IO () +trackNativeQuery :: (HasCallStack) => String -> NativeQuery -> TestEnvironment -> IO () trackNativeQuery sourceName logMod testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment @@ -120,7 +120,7 @@ untrackNativeQueryCommand source backendTypeMetadata NativeQuery {nativeQueryNam root_field_name: *nativeQueryName |] -untrackNativeQuery :: HasCallStack => String -> NativeQuery -> TestEnvironment -> IO () +untrackNativeQuery :: (HasCallStack) => String -> NativeQuery -> TestEnvironment -> IO () untrackNativeQuery source logMod testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment diff --git a/server/lib/test-harness/src/Harness/Schema/StoredProcedure.hs b/server/lib/test-harness/src/Harness/Schema/StoredProcedure.hs index 9ce4da12bc5..9321e7c0d04 100644 --- a/server/lib/test-harness/src/Harness/Schema/StoredProcedure.hs +++ b/server/lib/test-harness/src/Harness/Schema/StoredProcedure.hs @@ -71,11 +71,11 @@ trackStoredProcedureCommand sourceName backendTypeConfig StoredProcedure {stored Nothing -> [] value = - J.object $ - [ ("type" .= (BackendType.backendScalarType backendTypeConfig) storedProcedureColumnType), - ("nullable" .= storedProcedureColumnNullable) - ] - <> descriptionPair + J.object + $ [ ("type" .= (BackendType.backendScalarType backendTypeConfig) storedProcedureColumnType), + ("nullable" .= storedProcedureColumnNullable) + ] + <> descriptionPair in (key, value) ) @@ -97,7 +97,7 @@ trackStoredProcedureCommand sourceName backendTypeConfig StoredProcedure {stored returns: *storedProcedureLogicalModel |] -trackStoredProcedure :: HasCallStack => String -> StoredProcedure -> TestEnvironment -> IO () +trackStoredProcedure :: (HasCallStack) => String -> StoredProcedure -> TestEnvironment -> IO () trackStoredProcedure sourceName logMod testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment @@ -116,7 +116,7 @@ untrackStoredProcedureCommand source backendTypeMetadata StoredProcedure {stored stored_procedure: *storedProcedureName |] -untrackStoredProcedure :: HasCallStack => String -> StoredProcedure -> TestEnvironment -> IO () +untrackStoredProcedure :: (HasCallStack) => String -> StoredProcedure -> TestEnvironment -> IO () untrackStoredProcedure source logMod testEnvironment = do let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment diff --git a/server/lib/test-harness/src/Harness/Services/Database/Postgres.hs b/server/lib/test-harness/src/Harness/Services/Database/Postgres.hs index 0ff0e859887..dc28069ba9b 100644 --- a/server/lib/test-harness/src/Harness/Services/Database/Postgres.hs +++ b/server/lib/test-harness/src/Harness/Services/Database/Postgres.hs @@ -97,23 +97,23 @@ mkFreshPostgresDbIO :: (Has Logger testEnvironment, Has PostgresServerUrl testEn mkFreshPostgresDbIO testEnv = do freshDbName <- drawFreshDbName createDatabase testEnv freshDbName - return $ - ( FreshPostgresDb freshDbName, - dropDatabase testEnv freshDbName - ) + return + $ ( FreshPostgresDb freshDbName, + dropDatabase testEnv freshDbName + ) where drawFreshDbName :: IO Text drawFreshDbName = do uuid <- tshow <$> liftIO UUID.nextRandom - return $ - "freshdb_" - <> T.map - ( \a -> - if Data.Char.isAlphaNum a - then a - else '_' - ) - uuid + return + $ "freshdb_" + <> T.map + ( \a -> + if Data.Char.isAlphaNum a + then a + else '_' + ) + uuid mkFreshDbConnectionString :: PostgresServerUrl -> FreshPostgresDb -> PostgresServerUrl mkFreshDbConnectionString (PostgresServerUrl pgUrl) (FreshPostgresDb db) = @@ -259,7 +259,7 @@ createUniqueIndexSql (SchemaName schemaName) tableName = \case wrapIdentifier :: Text -> Text wrapIdentifier identifier = "\"" <> identifier <> "\"" -scalarType :: HasCallStack => Schema.ScalarType -> Text +scalarType :: (HasCallStack) => Schema.ScalarType -> Text scalarType = \case Schema.TInt -> "integer" Schema.TStr -> "varchar" diff --git a/server/lib/test-harness/src/Harness/Services/ExternalProcess/DCPostgresAgent.hs b/server/lib/test-harness/src/Harness/Services/ExternalProcess/DCPostgresAgent.hs index 72eb7baf51f..9fe15d8fe9c 100644 --- a/server/lib/test-harness/src/Harness/Services/ExternalProcess/DCPostgresAgent.hs +++ b/server/lib/test-harness/src/Harness/Services/ExternalProcess/DCPostgresAgent.hs @@ -178,8 +178,8 @@ spawnAgent testEnv (DcPgConfig {dcPgConfigEnvironmentVars}) = do create_group = True } `catchAny` ( \exn -> - error $ - unlines + error + $ unlines [ "Failed to spawn postgres-agent process:", show exn ] diff --git a/server/lib/test-harness/src/Harness/Services/ExternalProcess/GraphqlEngine.hs b/server/lib/test-harness/src/Harness/Services/ExternalProcess/GraphqlEngine.hs index 4b4e90e696e..18f0f79b9a1 100644 --- a/server/lib/test-harness/src/Harness/Services/ExternalProcess/GraphqlEngine.hs +++ b/server/lib/test-harness/src/Harness/Services/ExternalProcess/GraphqlEngine.hs @@ -133,7 +133,7 @@ mkHgeInstancePool logger = do return $ HgeServerHandle hge cleanup withPool :: - Has Logger testEnvironment => + (Has Logger testEnvironment) => testEnvironment -> HgePool -> HgePoolArguments -> @@ -209,18 +209,18 @@ spawnServer testEnv (HgeConfig {hgeConfigEnvironmentVars}) = do ] ) { env = - Just $ - ("HASURA_GRAPHQL_GRACEFUL_SHUTDOWN_TIMEOUT", "0") - : ("HASURA_GRAPHQL_ADMIN_SECRET", T.unpack adminSecret) - : allEnv, + Just + $ ("HASURA_GRAPHQL_GRACEFUL_SHUTDOWN_TIMEOUT", "0") + : ("HASURA_GRAPHQL_ADMIN_SECRET", T.unpack adminSecret) + : allEnv, std_in = CreatePipe, std_out = CreatePipe, std_err = CreatePipe, create_group = True } `catchAny` ( \exn -> - error $ - unlines + error + $ unlines [ "Failed to spawn Graphql-Engine process:", show exn ] diff --git a/server/lib/test-harness/src/Harness/Services/GraphqlEngine.hs b/server/lib/test-harness/src/Harness/Services/GraphqlEngine.hs index 1102067a031..28a3b2747c1 100644 --- a/server/lib/test-harness/src/Harness/Services/GraphqlEngine.hs +++ b/server/lib/test-harness/src/Harness/Services/GraphqlEngine.hs @@ -40,8 +40,8 @@ withHge :: withHge hgeConfig specs = do flip aroundWith specs \action testEnvironment -> runManaged do server <- drawFromPool testEnvironment hgeConfig - liftIO $ - action + liftIO + $ action ( PostGraphql (hgePostGraphql (server, testEnvironment)), (ShouldReturnYamlF (shouldReturnYamlFInternal defaultOptions), (server, testEnvironment)) ) diff --git a/server/lib/test-harness/src/Harness/Services/Source/DCPostgres.hs b/server/lib/test-harness/src/Harness/Services/Source/DCPostgres.hs index e09c031f2f0..a639000ff2f 100644 --- a/server/lib/test-harness/src/Harness/Services/Source/DCPostgres.hs +++ b/server/lib/test-harness/src/Harness/Services/Source/DCPostgres.hs @@ -222,9 +222,9 @@ dc_pg_track_table testEnvironment (Schema.Table {tableName, tableColumns}) = do columnConfig :: Schema.Column -> Maybe J.Pair columnConfig col = do alias <- Schema.columnGqlAlias col - return $ - ( K.fromText $ Schema.columnName col, - [yaml| + return + $ ( K.fromText $ Schema.columnName col, + [yaml| custom_name: *alias |] - ) + ) diff --git a/server/lib/test-harness/src/Harness/Services/Source/Postgres.hs b/server/lib/test-harness/src/Harness/Services/Source/Postgres.hs index 549a87a698c..80c625a1dfe 100644 --- a/server/lib/test-harness/src/Harness/Services/Source/Postgres.hs +++ b/server/lib/test-harness/src/Harness/Services/Source/Postgres.hs @@ -150,9 +150,9 @@ pg_track_table testEnvironment (Schema.Table {tableName, tableColumns}) = do columnConfig :: Schema.Column -> Maybe J.Pair columnConfig col = do alias <- Schema.columnGqlAlias col - return $ - ( K.fromText $ Schema.columnName col, - [yaml| + return + $ ( K.fromText $ Schema.columnName col, + [yaml| custom_name: *alias |] - ) + ) diff --git a/server/lib/test-harness/src/Harness/Subscriptions.hs b/server/lib/test-harness/src/Harness/Subscriptions.hs index 0d1f1af922e..48c4e8fc4a8 100644 --- a/server/lib/test-harness/src/Harness/Subscriptions.hs +++ b/server/lib/test-harness/src/Harness/Subscriptions.hs @@ -191,7 +191,7 @@ withSubscriptionsHeaders headers = aroundAllWith \actionWithSubAndTest testEnv - -- | Get the next response received on a subscription. -- Blocks until data is available. -getNextResponse :: HasCallStack => SubscriptionHandle -> IO Value +getNextResponse :: (HasCallStack) => SubscriptionHandle -> IO Value getNextResponse handle = do let time = seconds subscriptionsTimeoutTime res <- timeout (fromIntegral $ diffTimeToMicroSeconds time) $ takeMVar (unSubscriptionHandle handle) @@ -202,10 +202,10 @@ subscriptionsTimeoutTime = 20 mkInitMessageHeaders :: HgeServerInstance -> [(T.Text, T.Text)] -> Value mkInitMessageHeaders hgeInstance hdrs = - ( toJSON $ - Map.fromList $ - [ ("content-type", "application/json"), + ( toJSON + $ Map.fromList + $ [ ("content-type", "application/json"), ("X-Hasura-Admin-Secret", hgeAdminSecret hgeInstance) ] - <> hdrs + <> hdrs ) diff --git a/server/lib/test-harness/src/Harness/Test/CustomOptions.hs b/server/lib/test-harness/src/Harness/Test/CustomOptions.hs index 284bfd5dfb0..dc97c10a74b 100644 --- a/server/lib/test-harness/src/Harness/Test/CustomOptions.hs +++ b/server/lib/test-harness/src/Harness/Test/CustomOptions.hs @@ -27,7 +27,7 @@ data Options = Options -- -- NOTE: This function throws an impure exception if the options are -- irreconcilable. -combineOptions :: HasCallStack => Maybe Options -> Maybe Options -> Maybe Options +combineOptions :: (HasCallStack) => Maybe Options -> Maybe Options -> Maybe Options combineOptions (Just lhs) (Just rhs) = let -- 'stringifyNumbers' can only be unified if both sides have the same value. stringifyNumbers = diff --git a/server/lib/test-harness/src/Harness/Test/Fixture.hs b/server/lib/test-harness/src/Harness/Test/Fixture.hs index de8d04c6f85..07f55e1c43c 100644 --- a/server/lib/test-harness/src/Harness/Test/Fixture.hs +++ b/server/lib/test-harness/src/Harness/Test/Fixture.hs @@ -311,7 +311,7 @@ fixtureRepl Fixture {name, mkLocalTestEnvironment, setupTeardown} globalTestEnvi runSetupActions :: Logger -> [SetupAction] -> IO (IO ()) runSetupActions logger acts = go acts [] where - log :: forall a. LoggableMessage a => a -> IO () + log :: forall a. (LoggableMessage a) => a -> IO () log = runLogger logger go :: [SetupAction] -> [IO ()] -> IO (IO ()) @@ -460,14 +460,14 @@ withPermissions (toList -> permissions) spec = do where succeeding :: (ActionWith TestEnvironment -> IO ()) -> ActionWith TestEnvironment -> IO () succeeding k test = k \testEnvironment -> do - for_ permissions $ - postMetadata_ testEnvironment - . Permissions.createPermissionMetadata testEnvironment + for_ permissions + $ postMetadata_ testEnvironment + . Permissions.createPermissionMetadata testEnvironment test testEnvironment {permissions = NonAdmin permissions} `finally` do - for_ permissions $ - postMetadata_ testEnvironment - . Permissions.dropPermissionMetadata testEnvironment + for_ permissions + $ postMetadata_ testEnvironment + . Permissions.dropPermissionMetadata testEnvironment failing :: (ActionWith TestEnvironment -> IO ()) -> ActionWith TestEnvironment -> IO () failing k test = k \testEnvironment -> do @@ -475,16 +475,16 @@ withPermissions (toList -> permissions) spec = do -- they lead to test failures. for_ (subsequences permissions) \subsequence -> unless (subsequence == permissions) do - for_ subsequence $ - postMetadata_ testEnvironment - . Permissions.createPermissionMetadata testEnvironment + for_ subsequence + $ postMetadata_ testEnvironment + . Permissions.createPermissionMetadata testEnvironment let attempt :: IO () -> IO () attempt x = try x >>= \case Right _ -> - expectationFailure $ - mconcat + expectationFailure + $ mconcat [ "Unexpectedly adequate permissions:\n", ppShow subsequence ] @@ -492,6 +492,6 @@ withPermissions (toList -> permissions) spec = do pure () attempt (test testEnvironment {permissions = NonAdmin subsequence}) `finally` do - for_ subsequence $ - postMetadata_ testEnvironment - . Permissions.dropPermissionMetadata testEnvironment + for_ subsequence + $ postMetadata_ testEnvironment + . Permissions.dropPermissionMetadata testEnvironment diff --git a/server/lib/test-harness/src/Harness/Test/ScalarType.hs b/server/lib/test-harness/src/Harness/Test/ScalarType.hs index 0f24bbe259f..aa222e77a95 100644 --- a/server/lib/test-harness/src/Harness/Test/ScalarType.hs +++ b/server/lib/test-harness/src/Harness/Test/ScalarType.hs @@ -159,8 +159,8 @@ backendScalarValue bsv fn = case fn bsv of defaultSerialType :: ScalarType defaultSerialType = - TCustomType $ - defaultBackendScalarType + TCustomType + $ defaultBackendScalarType { bstMssql = Just "INT IDENTITY(1,1)", bstCitus = Just "SERIAL", -- cockroachdb's serial behaves differently than postgresql's serial: diff --git a/server/lib/test-harness/src/Harness/TestEnvironment.hs b/server/lib/test-harness/src/Harness/TestEnvironment.hs index 862e9e6986d..b14a07ac8f0 100644 --- a/server/lib/test-harness/src/Harness/TestEnvironment.hs +++ b/server/lib/test-harness/src/Harness/TestEnvironment.hs @@ -172,7 +172,7 @@ stopServer Server {thread} = Async.cancel thread -- | Log an unstructured trace string. Should only be used directly in specs, -- not in the Harness modules. {-# ANN testLogTrace ("HLINT: ignore" :: String) #-} -testLogTrace :: TraceString a => TestEnvironment -> a -> IO () +testLogTrace :: (TraceString a) => TestEnvironment -> a -> IO () testLogTrace testEnv = testLogMessage testEnv . logTrace @@ -186,7 +186,7 @@ testLogShow testEnv = -- in the Harness modules, not in Specs. -- -- This should ideally be replaced with more specific logging functions. -testLogHarness :: TraceString a => TestEnvironment -> a -> IO () +testLogHarness :: (TraceString a) => TestEnvironment -> a -> IO () testLogHarness testEnv = testLogMessage testEnv . logHarness -- Compatibility with the new, componentised fixtures: @@ -239,10 +239,10 @@ getSchemaNameInternal testEnv = getSchemaNameByTestIdAndBackendType (fmap backen getSchemaNameByTestIdAndBackendType :: Maybe BackendType -> UniqueTestId -> SchemaName getSchemaNameByTestIdAndBackendType Nothing _ = SchemaName "hasura" -- the `Nothing` case is for tests with multiple schemas getSchemaNameByTestIdAndBackendType (Just BigQuery) uniqueTestId = - SchemaName $ - T.pack $ - "hasura_test_" - <> show uniqueTestId + SchemaName + $ T.pack + $ "hasura_test_" + <> show uniqueTestId getSchemaNameByTestIdAndBackendType (Just Postgres) _ = SchemaName Constants.postgresDb getSchemaNameByTestIdAndBackendType (Just SQLServer) _ = SchemaName $ T.pack Constants.sqlserverDb getSchemaNameByTestIdAndBackendType (Just Citus) _ = SchemaName Constants.citusDb diff --git a/server/lib/test-harness/src/Harness/Webhook.hs b/server/lib/test-harness/src/Harness/Webhook.hs index 157806a3161..0a1583e9079 100644 --- a/server/lib/test-harness/src/Harness/Webhook.hs +++ b/server/lib/test-harness/src/Harness/Webhook.hs @@ -56,24 +56,25 @@ run = mkTestResource do let mkJSONPathE = either (error . T.unpack) id . parseJSONPath eventJSONPath = mkJSONPathE "$.event.data" in iResultToMaybe =<< executeJSONPath eventJSONPath <$> jsonBody - liftIO $ - Chan.writeChan eventsQueueChan $ - fromMaybe (error "error in parsing the event data from the body") eventDataPayload - thread <- Async.async $ - Spock.runSpockNoBanner port $ - Spock.spockT id $ do - Spock.get "/" $ - Spock.json $ - J.String "OK" - Spock.post "/hello" $ - Spock.json $ - J.String "world" - Spock.post "/echo" $ do - extractEventDataInsertIntoEventQueue - Spock.json $ J.object ["success" J..= True] - Spock.post "/nextRetry" $ do - extractEventDataInsertIntoEventQueue - Spock.setStatus HTTP.status503 + liftIO + $ Chan.writeChan eventsQueueChan + $ fromMaybe (error "error in parsing the event data from the body") eventDataPayload + thread <- Async.async + $ Spock.runSpockNoBanner port + $ Spock.spockT id + $ do + Spock.get "/" + $ Spock.json + $ J.String "OK" + Spock.post "/hello" + $ Spock.json + $ J.String "world" + Spock.post "/echo" $ do + extractEventDataInsertIntoEventQueue + Spock.json $ J.object ["success" J..= True] + Spock.post "/nextRetry" $ do + extractEventDataInsertIntoEventQueue + Spock.setStatus HTTP.status503 let server = Server {port = fromIntegral port, urlPrefix, thread} pure diff --git a/server/lib/test-harness/src/Harness/Yaml.hs b/server/lib/test-harness/src/Harness/Yaml.hs index 69ec53cfc5a..1dbbff92957 100644 --- a/server/lib/test-harness/src/Harness/Yaml.hs +++ b/server/lib/test-harness/src/Harness/Yaml.hs @@ -109,7 +109,7 @@ parseToMatch _ actual = actual shouldReturnYamlF :: (HasCallStack, Has ShouldReturnYamlF testEnvironment) => testEnvironment -> (Value -> IO Value) -> IO Value -> Value -> IO () shouldReturnYamlF = getShouldReturnYamlF . getter -shouldReturnYamlFInternal :: HasCallStack => Options -> (Value -> IO Value) -> IO Value -> Value -> IO () +shouldReturnYamlFInternal :: (HasCallStack) => Options -> (Value -> IO Value) -> IO Value -> Value -> IO () shouldReturnYamlFInternal options transform actualIO expected = do actual <- actualIO >>= transform >>= \actual -> @@ -129,12 +129,12 @@ shouldReturnYamlFInternal options transform actualIO expected = do -- Since @Data.Yaml@ uses the same underlying 'Value' type as -- @Data.Aeson@, we could pull that in as a dependency and alias -- some of these functions accordingly. -shouldBeYaml :: HasCallStack => Value -> Value -> IO () +shouldBeYaml :: (HasCallStack) => Value -> Value -> IO () shouldBeYaml actual expected = do shouldBe (Visual actual) (Visual expected) -- | Assert that the expected json value should be a subset of the actual value, in the sense of 'jsonSubsetOf'. -shouldAtLeastBe :: HasCallStack => Value -> Value -> IO () +shouldAtLeastBe :: (HasCallStack) => Value -> Value -> IO () shouldAtLeastBe actual expected | expected `jsonSubsetOf` actual = return () shouldAtLeastBe actual expected = expectationFailure $ "The expected value:\n\n" <> show (Visual expected) <> "\nis not a subset of the actual value:\n\n" <> show (Visual actual) @@ -155,8 +155,8 @@ jsonSubsetOf _sub _sup = False subobjectOf :: KM.KeyMap J.Value -> KM.KeyMap J.Value -> Bool subobjectOf sub sup = - KM.foldr (&&) True $ - KM.alignWith + KM.foldr (&&) True + $ KM.alignWith ( \case This _ -> False -- key is only in the sub That _ -> True -- key is only in sup diff --git a/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Database.hs b/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Database.hs index dedeb08dc88..6637e6bfcb4 100644 --- a/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Database.hs +++ b/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Database.hs @@ -38,13 +38,13 @@ newtype DatabaseSchema = DatabaseSchema dbContainer :: TC.TestContainer Database dbContainer = do container <- - TC.run $ - TC.containerRequest (TC.fromTag ("postgis/postgis:15-3.3-alpine")) - & TC.setSuffixedName "hge-test-upgrade-db" - & TC.setCmd ["-F"] - & TC.setEnv [("POSTGRES_PASSWORD", "password")] - & TC.setExpose [5432] - & TC.setWaitingFor (TC.waitUntilTimeout 30 (TC.waitUntilMappedPortReachable 5432)) + TC.run + $ TC.containerRequest (TC.fromTag ("postgis/postgis:15-3.3-alpine")) + & TC.setSuffixedName "hge-test-upgrade-db" + & TC.setCmd ["-F"] + & TC.setEnv [("POSTGRES_PASSWORD", "password")] + & TC.setExpose [5432] + & TC.setWaitingFor (TC.waitUntilTimeout 30 (TC.waitUntilMappedPortReachable 5432)) -- The container has a complicated startup script that starts the server, -- shuts it down, then starts it again, so waiting for the port is not enough. liftIO $ sleep 5 diff --git a/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Server.hs b/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Server.hs index 4ba7cda9e30..5193ec3b6d4 100644 --- a/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Server.hs +++ b/server/lib/upgrade-tests/src/Hasura/UpgradeTests/Server.hs @@ -57,18 +57,18 @@ withBaseHge version (DatabaseSchema schemaUrl) f = TC.runTestContainer TC.defaultConfig do port <- liftIO getFreePort _container <- - TC.run $ - TC.containerRequest (TC.fromTag ("hasura/graphql-engine:" <> version)) - & TC.setSuffixedName "hge-test-upgrade-base-server" - & TC.setCmd - [ "graphql-engine", - "--database-url", - Text.pack schemaUrl, - "serve", - "--server-port", - tshow port - ] - & TC.withNetwork hostNetwork + TC.run + $ TC.containerRequest (TC.fromTag ("hasura/graphql-engine:" <> version)) + & TC.setSuffixedName "hge-test-upgrade-base-server" + & TC.setCmd + [ "graphql-engine", + "--database-url", + Text.pack schemaUrl, + "serve", + "--server-port", + tshow port + ] + & TC.withNetwork hostNetwork let url = "http://localhost:" <> show port liftIO do Http.healthCheck $ url <> "/healthz" diff --git a/server/lib/upgrade-tests/src/Main.hs b/server/lib/upgrade-tests/src/Main.hs index e92d1657797..dde82c8ab80 100644 --- a/server/lib/upgrade-tests/src/Main.hs +++ b/server/lib/upgrade-tests/src/Main.hs @@ -118,7 +118,7 @@ introspectionQuery = J.object ["query" .= Text.decodeUtf8 rawQuery] -- | Gets the length of @.data.__schema.types@ from an introspected schema. -- -- We use this to ensure that the metadata looks correct. -typeLength :: forall m. MonadFail m => J.Value -> m Int +typeLength :: forall m. (MonadFail m) => J.Value -> m Int typeLength schema = do types <- getProperty "data" schema >>= getProperty "__schema" >>= getProperty "types" case types of diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs index a2708454c96..717704e4f6d 100644 --- a/server/src-exec/Main.hs +++ b/server/src-exec/Main.hs @@ -112,13 +112,13 @@ runApp env (HGEOptions rci metadataDbUrl hgeCmd) = do let Loggers _ logger _ = appEnvLoggers appEnv _idleGCThread <- - C.forkImmortal "ourIdleGC" logger $ - GC.ourIdleGC logger (seconds 0.3) (seconds 10) (seconds 60) + C.forkImmortal "ourIdleGC" logger + $ GC.ourIdleGC logger (seconds 0.3) (seconds 10) (seconds 60) runAppM appEnv do appStateRef <- initialiseAppContext env serveOptions appInit - lowerManagedT $ - runHGEServer (const $ pure ()) appStateRef initTime Nothing OSSConsole ekgStore + lowerManagedT + $ runHGEServer (const $ pure ()) appStateRef initTime Nothing OSSConsole ekgStore HCExport -> do metadataConnection <- initMetadataConnectionInfo env metadataDbUrl rci res <- runTxWithMinimalPool metadataConnection fetchMetadataFromCatalog diff --git a/server/src-lib/Control/Concurrent/Extended.hs b/server/src-lib/Control/Concurrent/Extended.hs index 47105a3aa77..1a9e04a879c 100644 --- a/server/src-lib/Control/Concurrent/Extended.hs +++ b/server/src-lib/Control/Concurrent/Extended.hs @@ -57,7 +57,7 @@ sleep = Base.threadDelay . round . Microseconds -- | Note: Please consider using 'forkManagedT' instead to ensure reliable -- resource cleanup. forkImmortal :: - ForkableMonadIO m => + (ForkableMonadIO m) => -- | A label describing this thread's function (see 'labelThread'). String -> Logger Hasura -> @@ -96,7 +96,7 @@ newtype ThreadShutdown m = ThreadShutdown {tsThreadShutdown :: m ()} -- used. Generally, the result should only be used later in the same ManagedT -- scope. forkManagedT :: - ForkableMonadIO m => + (ForkableMonadIO m) => String -> Logger Hasura -> m Void -> @@ -124,7 +124,7 @@ data Forever m = forall a. Forever a (a -> m a) -- For reference, this function is used to run the async actions processor. Check -- `asyncActionsProcessor` forkManagedTWithGracefulShutdown :: - ForkableMonadIO m => + (ForkableMonadIO m) => String -> Logger Hasura -> ThreadShutdown m -> @@ -138,30 +138,31 @@ forkManagedTWithGracefulShutdown label logger (ThreadShutdown threadShutdownHand liftIO $ unLogger logger (ImmortalThreadRestarted label) -- In this case, we are handling unexpected exceptions. -- i.e This does not catch the asynchronous exception which stops the thread. - Immortal.onUnexpectedFinish this logAndPause $ - ( do - let mLoop (Forever loopFunctionInitArg loopFunction) = - flip iterateM_ loopFunctionInitArg $ \args -> do - liftIO $ - STM.atomically $ do - STM.readTVar threadStateTVar >>= \case - ThreadShutdownInitiated -> do - -- signal to the finalizer that we are now blocking - -- and blocking forever since this - -- var moves monotonically from forked -> shutdown -> blocking - STM.writeTVar threadStateTVar ThreadBlocking - ThreadBlocking -> STM.retry - ThreadForked -> pure () - loopFunction args - t <- LA.async $ mLoop =<< loopIteration - LA.link t - void $ LA.wait t - ) + Immortal.onUnexpectedFinish this logAndPause + $ ( do + let mLoop (Forever loopFunctionInitArg loopFunction) = + flip iterateM_ loopFunctionInitArg $ \args -> do + liftIO + $ STM.atomically + $ do + STM.readTVar threadStateTVar >>= \case + ThreadShutdownInitiated -> do + -- signal to the finalizer that we are now blocking + -- and blocking forever since this + -- var moves monotonically from forked -> shutdown -> blocking + STM.writeTVar threadStateTVar ThreadBlocking + ThreadBlocking -> STM.retry + ThreadForked -> pure () + loopFunction args + t <- LA.async $ mLoop =<< loopIteration + LA.link t + void $ LA.wait t + ) ) ( \thread -> do - liftIO $ - STM.atomically $ - STM.modifyTVar' threadStateTVar (const ThreadShutdownInitiated) + liftIO + $ STM.atomically + $ STM.modifyTVar' threadStateTVar (const ThreadShutdownInitiated) -- the threadShutdownHandler here will wait for any in-flight events -- to finish processing {- @@ -201,8 +202,9 @@ forkManagedTWithGracefulShutdown label logger (ThreadShutdown threadShutdownHand processing events without the graceful shutdown timeout. -} threadShutdownHandler - liftIO $ - STM.atomically $ do + liftIO + $ STM.atomically + $ do STM.readTVar threadStateTVar >>= STM.check . (== ThreadBlocking) unLogger logger (ImmortalThreadStopping label) liftIO $ Immortal.stop thread diff --git a/server/src-lib/Control/Monad/Circular.hs b/server/src-lib/Control/Monad/Circular.hs index f06a8240b6c..abb06467f03 100644 --- a/server/src-lib/Control/Monad/Circular.hs +++ b/server/src-lib/Control/Monad/Circular.hs @@ -115,7 +115,7 @@ instance MonadTrans (CircularT k v) where -- | Allow code in 'CircularT' to have access to any underlying state -- capabilities, hiding the fact that 'CircularT' itself is a state monad. -instance MonadState s m => MonadState s (CircularT k v m) where +instance (MonadState s m) => MonadState s (CircularT k v m) where get = lift get put x = lift $ put x diff --git a/server/src-lib/Control/Monad/Memoize.hs b/server/src-lib/Control/Monad/Memoize.hs index 01e9acd62f0..dc8a8ae0d34 100644 --- a/server/src-lib/Control/Monad/Memoize.hs +++ b/server/src-lib/Control/Monad/Memoize.hs @@ -85,7 +85,7 @@ which allows us to get sharing mostly for free. The memoization strategy also annotates cached parsers with a Unique that can be used to break cycles while traversing the graph, so we get observable sharing as well. -} -class Monad m => MonadMemoize m where +class (Monad m) => MonadMemoize m where -- | Memoizes a parser constructor function for the extent of a single schema -- construction process. This is mostly useful for recursive parsers; -- see Note [Tying the knot] for more details. @@ -112,7 +112,7 @@ class Monad m => MonadMemoize m where m p instance - MonadMemoize m => + (MonadMemoize m) => MonadMemoize (ReaderT a m) where memoizeOn name key = mapReaderT (memoizeOn name key) @@ -133,16 +133,16 @@ newtype MemoizeT m a = MemoizeT -- | Allow code in 'MemoizeT' to have access to any underlying state capabilities, -- hiding the fact that 'MemoizeT' itself is a state monad. -instance MonadState s m => MonadState s (MemoizeT m) where +instance (MonadState s m) => MonadState s (MemoizeT m) where get = lift get put = lift . put -runMemoizeT :: forall m a. Monad m => MemoizeT m a -> m a +runMemoizeT :: forall m a. (Monad m) => MemoizeT m a -> m a runMemoizeT = flip evalStateT mempty . unMemoizeT -- | see Note [MemoizeT requires MonadIO] instance - MonadIO m => + (MonadIO m) => MonadMemoize (MemoizeT m) where memoizeOn name key buildParser = MemoizeT do diff --git a/server/src-lib/Control/Monad/Trans/Managed.hs b/server/src-lib/Control/Monad/Trans/Managed.hs index 461a6756539..6601a349aef 100644 --- a/server/src-lib/Control/Monad/Trans/Managed.hs +++ b/server/src-lib/Control/Monad/Trans/Managed.hs @@ -47,19 +47,19 @@ newtype ManagedT m a = ManagedT {runManagedT :: forall r. (a -> m r) -> m r} deriving (MonadTrans) via Codensity -- | Allocate a resource by providing setup and finalizer actions. -allocate :: MonadBaseControl IO m => m a -> (a -> m b) -> ManagedT m a +allocate :: (MonadBaseControl IO m) => m a -> (a -> m b) -> ManagedT m a allocate setup finalize = ManagedT (bracket setup finalize) -- | Allocate a resource but do not return a reference to it. -allocate_ :: MonadBaseControl IO m => m a -> m b -> ManagedT m () +allocate_ :: (MonadBaseControl IO m) => m a -> m b -> ManagedT m () allocate_ setup finalize = ManagedT (\k -> bracket_ setup finalize (k ())) -- | Run the provided computation by returning its result, and run any finalizers. -- Watch out: this function might leak finalized resources. -lowerManagedT :: Monad m => ManagedT m a -> m a +lowerManagedT :: (Monad m) => ManagedT m a -> m a lowerManagedT m = runManagedT m return -hoistManagedTReaderT :: Monad m => r -> ManagedT (ReaderT r m) a -> ManagedT m a +hoistManagedTReaderT :: (Monad m) => r -> ManagedT (ReaderT r m) a -> ManagedT m a hoistManagedTReaderT r cod = ManagedT $ \k -> runReaderT (runManagedT cod (lift . k)) r @@ -70,7 +70,7 @@ hoistManagedTReaderT r cod = ManagedT $ \k -> -- -- We need to be careful not to leak allocated resources via the use of -- recursively-defined monadic actions when making use of this instance. -instance MonadIO m => MonadFix (ManagedT m) where +instance (MonadIO m) => MonadFix (ManagedT m) where mfix f = ManagedT \k -> do m <- liftIO C.newEmptyMVar ans <- liftIO $ unsafeDupableInterleaveIO (C.readMVar m) diff --git a/server/src-lib/Database/MSSQL/Transaction.hs b/server/src-lib/Database/MSSQL/Transaction.hs index bdf2852d48e..b548cfa4d38 100644 --- a/server/src-lib/Database/MSSQL/Transaction.hs +++ b/server/src-lib/Database/MSSQL/Transaction.hs @@ -82,7 +82,8 @@ runTxE :: ExceptT e m a runTxE ef txIsolation tx pool = do withMSSQLPool pool (asTransaction ef txIsolation (`execTx` tx)) - >>= hoistEither . mapLeft (ef . MSSQLConnError) + >>= hoistEither + . mapLeft (ef . MSSQLConnError) -- | Useful for building transactions which return no data. -- @@ -134,7 +135,7 @@ singleRowQueryE ef = rawQueryE ef singleRowResult -- This function simply concatenates each single-column row into one long 'Text' string. forJsonQueryE :: forall m e. - MonadIO m => + (MonadIO m) => (MSSQLTxError -> e) -> ODBC.Query -> TxET e m Text @@ -184,7 +185,7 @@ buildGenericQueryTxE errorF query convertQ runQuery = TxET $ ReaderT $ withExceptT errorF . execQuery query convertQ . runQuery -- | Map the error type for a 'TxET'. -withTxET :: Monad m => (e1 -> e2) -> TxET e1 m a -> TxET e2 m a +withTxET :: (Monad m) => (e1 -> e2) -> TxET e1 m a -> TxET e2 m a withTxET f (TxET m) = TxET $ hoist (withExceptT f) m -- | A successful result from a query is a list of rows where each row contains @@ -206,9 +207,9 @@ rawQueryE :: TxET e m a rawQueryE ef rf q = do rows <- buildGenericQueryTxE ef q id ODBC.query - liftEither $ - mapLeft (ef . MSSQLQueryError q . ODBC.DataRetrievalError) $ - rf (MSSQLResult rows) + liftEither + $ mapLeft (ef . MSSQLQueryError q . ODBC.DataRetrievalError) + $ rf (MSSQLResult rows) -- | Combinator for abstracting over the query type and ensuring we catch exceptions. -- @@ -256,7 +257,7 @@ instance Show TxIsolation where -- | Wraps an action in a transaction. Rolls back on errors. asTransaction :: forall e a m. - MonadIO m => + (MonadIO m) => (MSSQLTxError -> e) -> TxIsolation -> (ODBC.Connection -> ExceptT e m a) -> @@ -279,14 +280,14 @@ asTransaction ef txIsolation action conn = do withExceptT ef $ execTx conn rollbackTx throwError err -beginTx :: MonadIO m => TxT m () +beginTx :: (MonadIO m) => TxT m () beginTx = unitQuery "BEGIN TRANSACTION" -setTxIsoLevelTx :: MonadIO m => TxIsolation -> TxT m () +setTxIsoLevelTx :: (MonadIO m) => TxIsolation -> TxT m () setTxIsoLevelTx txIso = unitQuery $ ODBC.rawUnescapedText $ "SET TRANSACTION ISOLATION LEVEL " <> tshow txIso <> ";" -commitTx :: MonadIO m => TxT m () +commitTx :: (MonadIO m) => TxT m () commitTx = getTransactionState >>= \case TSActive -> @@ -296,7 +297,7 @@ commitTx = TSNoActive -> throwError $ MSSQLInternal "No active transaction exist; cannot commit" -rollbackTx :: MonadIO m => TxT m () +rollbackTx :: (MonadIO m) => TxT m () rollbackTx = let rollback = unitQuery "ROLLBACK TRANSACTION" in getTransactionState >>= \case @@ -319,6 +320,6 @@ getTransactionState = 0 -> pure TSNoActive -1 -> pure TSUncommittable _ -> - throwError $ - MSSQLQueryError query $ - ODBC.DataRetrievalError "Unexpected value for XACT_STATE" + throwError + $ MSSQLQueryError query + $ ODBC.DataRetrievalError "Unexpected value for XACT_STATE" diff --git a/server/src-lib/Hasura/App.hs b/server/src-lib/Hasura/App.hs index 07a2b596e72..0921635f8e9 100644 --- a/server/src-lib/Hasura/App.hs +++ b/server/src-lib/Hasura/App.hs @@ -257,8 +257,8 @@ initMetadataConnectionInfo :: PostgresConnInfo (Maybe UrlConf) -> m PG.ConnInfo initMetadataConnectionInfo env metadataDbURL dbURL = - fmap bciMetadataConnInfo $ - initBasicConnectionInfo + fmap bciMetadataConnInfo + $ initBasicConnectionInfo env metadataDbURL dbURL @@ -385,8 +385,8 @@ initialiseAppEnv env BasicConnectionInfo {..} serveOptions@ServeOptions {..} liv loggers@(Loggers _loggerCtx logger pgLogger) <- mkLoggers soEnabledLogTypes soLogLevel -- SIDE EFFECT: print a warning if no admin secret is set. - when (null soAdminSecret) $ - unLogger + when (null soAdminSecret) + $ unLogger logger StartupLog { slLogLevel = LevelWarn, @@ -411,8 +411,8 @@ initialiseAppEnv env BasicConnectionInfo {..} serveOptions@ServeOptions {..} liv -- Migrate the catalog and fetch the metdata. metadataWithVersion <- - lift $ - migrateCatalogAndFetchMetadata + lift + $ migrateCatalogAndFetchMetadata logger metadataDbPool bciDefaultPostgres @@ -553,13 +553,13 @@ migrateCatalogAndFetchMetadata -- DB has been set correctly currentTime <- liftIO Clock.getCurrentTime result <- - runExceptT $ - PG.runTx pool (PG.Serializable, Just PG.ReadWrite) $ - migrateCatalog - defaultSourceConfig - extensionsSchema - maintenanceMode - currentTime + runExceptT + $ PG.runTx pool (PG.Serializable, Just PG.ReadWrite) + $ migrateCatalog + defaultSourceConfig + extensionsSchema + maintenanceMode + currentTime case result of Left err -> do unLogger @@ -604,9 +604,9 @@ buildFirstSchemaCache let cacheBuildParams = CacheBuildParams httpManager pgSourceResolver mssqlSourceResolver cacheStaticConfig buildReason = CatalogSync result <- - runExceptT $ - runCacheBuild cacheBuildParams $ - buildRebuildableSchemaCacheWithReason buildReason logger env metadataWithVersion cacheDynamicConfig mSchemaRegistryContext + runExceptT + $ runCacheBuild cacheBuildParams + $ buildRebuildableSchemaCacheWithReason buildReason logger env metadataWithVersion cacheDynamicConfig mSchemaRegistryContext result `onLeft` \err -> do -- TODO: we used to bundle the first schema cache build with the catalog -- migration, using the same error handler for both, meaning that an @@ -692,14 +692,14 @@ instance HttpLog AppM where buildExtraHttpLogMetadata _ _ = () logHttpError logger loggingSettings userInfoM reqId waiReq req qErr headers _ = - unLogger logger $ - mkHttpLog $ - mkHttpErrorLogContext userInfoM loggingSettings reqId waiReq req qErr Nothing Nothing headers + unLogger logger + $ mkHttpLog + $ mkHttpErrorLogContext userInfoM loggingSettings reqId waiReq req qErr Nothing Nothing headers logHttpSuccess logger loggingSettings userInfoM reqId waiReq reqBody response compressedResponse qTime cType headers (CommonHttpLogMetadata rb batchQueryOpLogs, ()) = - unLogger logger $ - mkHttpLog $ - mkHttpAccessLogContext userInfoM loggingSettings reqId waiReq reqBody (BL.length response) compressedResponse qTime cType headers rb batchQueryOpLogs + unLogger logger + $ mkHttpLog + $ mkHttpAccessLogContext userInfoM loggingSettings reqId waiReq reqBody (BL.length response) compressedResponse qTime cType headers rb batchQueryOpLogs instance MonadExecuteQuery AppM where cacheLookup _ _ _ _ _ _ = pure $ Right ([], ResponseUncached Nothing) @@ -713,21 +713,21 @@ instance UserAuthentication AppM where instance MonadMetadataApiAuthorization AppM where authorizeV1QueryApi query handlerCtx = runExceptT do let currRole = _uiRole $ hcUser handlerCtx - when (requiresAdmin query && currRole /= adminRoleName) $ - withPathK "args" $ - throw400 AccessDenied accessDeniedErrMsg + when (requiresAdmin query && currRole /= adminRoleName) + $ withPathK "args" + $ throw400 AccessDenied accessDeniedErrMsg authorizeV1MetadataApi _ handlerCtx = runExceptT do let currRole = _uiRole $ hcUser handlerCtx - when (currRole /= adminRoleName) $ - withPathK "args" $ - throw400 AccessDenied accessDeniedErrMsg + when (currRole /= adminRoleName) + $ withPathK "args" + $ throw400 AccessDenied accessDeniedErrMsg authorizeV2QueryApi _ handlerCtx = runExceptT do let currRole = _uiRole $ hcUser handlerCtx - when (currRole /= adminRoleName) $ - withPathK "args" $ - throw400 AccessDenied accessDeniedErrMsg + when (currRole /= adminRoleName) + $ withPathK "args" + $ throw400 AccessDenied accessDeniedErrMsg instance ConsoleRenderer AppM where type ConsoleType AppM = CEConsoleType @@ -826,7 +826,7 @@ instance MonadEECredentialsStorage AppM where -- currently due `throwErrExit`. -- | Parse cli arguments to graphql-engine executable. -parseArgs :: EnabledLogTypes impl => Env.Environment -> IO (HGEOptions (ServeOptions impl)) +parseArgs :: (EnabledLogTypes impl) => Env.Environment -> IO (HGEOptions (ServeOptions impl)) parseArgs env = do rawHGEOpts <- execParser opts let eitherOpts = runWithEnv (Env.toList env) $ mkHGEOptions rawHGEOpts @@ -942,8 +942,8 @@ runHGEServer setupHook appStateRef initTime startupStatusHook consoleType ekgSto setForkIOWithMetrics :: Warp.Settings -> Warp.Settings setForkIOWithMetrics = Warp.setFork \f -> do - void $ - C.forkIOWithUnmask + void + $ C.forkIOWithUnmask ( \unmask -> bracket_ ( do @@ -966,9 +966,9 @@ runHGEServer setupHook appStateRef initTime startupStatusHook consoleType ekgSto finishTime <- liftIO Clock.getCurrentTime let apiInitTime = realToFrac $ Clock.diffUTCTime finishTime initTime - unLogger logger $ - mkGenericLog LevelInfo "server" $ - StartupTimeInfo "starting API server" apiInitTime + unLogger logger + $ mkGenericLog LevelInfo "server" + $ StartupTimeInfo "starting API server" apiInitTime -- Here we block until the shutdown latch 'MVar' is filled, and then -- shut down the server. Once this blocking call returns, we'll tidy up @@ -1028,8 +1028,8 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do wsServerEnv <- lift $ WS.createWSServerEnv appStateRef HasuraApp app actionSubState stopWsServer <- - lift $ - mkWaiApp + lift + $ mkWaiApp setupHook appStateRef consoleType @@ -1066,8 +1066,8 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do -- start a background thread to create new cron events _cronEventsThread <- - C.forkManagedT "runCronEventsGenerator" logger $ - runCronEventsGenerator logger fetchedCronTriggerStatsLogger (getSchemaCache appStateRef) + C.forkManagedT "runCronEventsGenerator" logger + $ runCronEventsGenerator logger fetchedCronTriggerStatsLogger (getSchemaCache appStateRef) startScheduledEventsPollerThread logger appEnvLockedEventsCtx EventingDisabled -> @@ -1075,9 +1075,9 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do -- start a background thread to check for updates _updateThread <- - C.forkManagedT "checkForUpdates" logger $ - liftIO $ - checkForUpdates loggerCtx appEnvManager + C.forkManagedT "checkForUpdates" logger + $ liftIO + $ checkForUpdates loggerCtx appEnvManager -- Start a background thread for source pings _sourcePingPoller <- @@ -1093,9 +1093,9 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do -- initialise the websocket connection reaper thread _websocketConnectionReaperThread <- - C.forkManagedT "websocket connection reaper thread" logger $ - liftIO $ - WS.websocketConnectionReaper getLatestConfigForWSServer getSchemaCache' (_wseServer wsServerEnv) + C.forkManagedT "websocket connection reaper thread" logger + $ liftIO + $ WS.websocketConnectionReaper getLatestConfigForWSServer getSchemaCache' (_wseServer wsServerEnv) dbUid <- getMetadataDbUid `onLeftM` throwErrJExit DatabaseMigrationError @@ -1109,15 +1109,15 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do -- start a background thread for telemetry _telemetryThread <- - C.forkManagedT "runTelemetry" logger $ - runTelemetry logger appStateRef dbUid pgVersion computeResources + C.forkManagedT "runTelemetry" logger + $ runTelemetry logger appStateRef dbUid pgVersion computeResources -- forking a dedicated polling thread to dynamically get the latest JWK settings -- set by the user and update the JWK accordingly. This will help in applying the -- updates without restarting HGE. _ <- - C.forkManagedT "update JWK" logger $ - updateJwkCtxThread (getAppContext appStateRef) appEnvManager logger + C.forkManagedT "update JWK" logger + $ updateJwkCtxThread (getAppContext appStateRef) appEnvManager logger -- These cleanup actions are not directly associated with any -- resource, but we still need to make sure we clean them up here. @@ -1166,13 +1166,19 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do res <- Retry.retrying Retry.retryPolicyDefault isRetryRequired (return $ unlockEventsInSource @b _siConfiguration nonEmptyLockedEvents) case res of Left err -> - logger $ - mkGenericLog LevelWarn "event_trigger" $ - "Error while unlocking event trigger events of source: " <> sourceNameText <> " error:" <> showQErr err + logger + $ mkGenericLog LevelWarn "event_trigger" + $ "Error while unlocking event trigger events of source: " + <> sourceNameText + <> " error:" + <> showQErr err Right count -> - logger $ - mkGenericLog LevelInfo "event_trigger" $ - tshow count <> " events of source " <> sourceNameText <> " were successfully unlocked" + logger + $ mkGenericLog LevelInfo "event_trigger" + $ tshow count + <> " events of source " + <> sourceNameText + <> " were successfully unlocked" shutdownAsyncActions :: LockedEventsCtx -> @@ -1208,20 +1214,20 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do MetadataDBShutdownAction metadataDBShutdownAction -> runExceptT metadataDBShutdownAction >>= \case Left err -> - logger $ - mkGenericLog LevelWarn (T.pack actionType) $ - "Error while unlocking the processing " - <> tshow actionType - <> " err - " - <> showQErr err + logger + $ mkGenericLog LevelWarn (T.pack actionType) + $ "Error while unlocking the processing " + <> tshow actionType + <> " err - " + <> showQErr err Right () -> pure () | otherwise = do processingEventsCount <- processingEventsCountAction' if (processingEventsCount == 0) then - logger $ - mkGenericLog @Text LevelInfo (T.pack actionType) $ - "All in-flight events have finished processing" + logger + $ mkGenericLog @Text LevelInfo (T.pack actionType) + $ "All in-flight events have finished processing" else unless (processingEventsCount == 0) $ do C.sleep (5) -- sleep for 5 seconds and then repeat waitForProcessingAction l actionType processingEventsCountAction' shutdownAction (maxTimeout - (Seconds 5)) @@ -1294,9 +1300,9 @@ mkHGEServer setupHook appStateRef consoleType ekgStore = do Nothing -- start a background thread to handle async action live queries - void $ - C.forkManagedT "asyncActionSubscriptionsProcessor" logger $ - asyncActionSubscriptionsProcessor actionSubState + void + $ C.forkManagedT "asyncActionSubscriptionsProcessor" logger + $ asyncActionSubscriptionsProcessor actionSubState startScheduledEventsPollerThread logger lockedEventsCtx = do AppEnv {..} <- lift askAppEnv @@ -1362,7 +1368,8 @@ notifySchemaCacheSyncTx (MetadataResourceVersion resourceVersion) instanceId inv getCatalogStateTx :: PG.TxE QErr CatalogState getCatalogStateTx = - mkCatalogState . PG.getRow + mkCatalogState + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -1408,7 +1415,8 @@ mkConsoleHTML :: CEConsoleType -> Either String Text mkConsoleHTML path authMode enableTelemetry consoleAssetsDir consoleSentryDsn ceConsoleType = - renderHtmlTemplate consoleTmplt $ + renderHtmlTemplate consoleTmplt + $ -- variables required to render the template J.object [ "isAdminSecretSet" J..= isAdminSecretSet authMode, diff --git a/server/src-lib/Hasura/App/State.hs b/server/src-lib/Hasura/App/State.hs index 384e3edd7ab..3e6ec49986c 100644 --- a/server/src-lib/Hasura/App/State.hs +++ b/server/src-lib/Hasura/App/State.hs @@ -223,11 +223,11 @@ rebuildRebuildableAppContext :: rebuildRebuildableAppContext readerCtx (RebuildableAppContext _ _ rule) serveOptions env = do let newInvalidationKeys = InvalidationKeys result <- - liftEitherM $ - liftIO $ - runExceptT $ - flip runReaderT readerCtx $ - Inc.build rule (serveOptions, env, newInvalidationKeys) + liftEitherM + $ liftIO + $ runExceptT + $ flip runReaderT readerCtx + $ Inc.build rule (serveOptions, env, newInvalidationKeys) let appContext = Inc.result result !newCtx = RebuildableAppContext appContext newInvalidationKeys (Inc.rebuildRule result) pure newCtx @@ -286,8 +286,8 @@ buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do -< do (logger, httpManager) <- ask authModeRes <- - runExceptT $ - setupAuthMode + runExceptT + $ setupAuthMode adminSecretHashSet webHook jwtSecrets @@ -301,9 +301,9 @@ buildAppContextRule = proc (ServeOptions {..}, env, _keys) -> do buildResponseInternalErrorsConfig = Inc.cache proc (adminInternalErrors, devMode) -> do let responseInternalErrorsConfig = if - | isDevModeEnabled devMode -> InternalErrorsAllRequests - | isAdminInternalErrorsEnabled adminInternalErrors -> InternalErrorsAdminOnly - | otherwise -> InternalErrorsDisabled + | isDevModeEnabled devMode -> InternalErrorsAllRequests + | isAdminInternalErrorsEnabled adminInternalErrors -> InternalErrorsAdminOnly + | otherwise -> InternalErrorsDisabled returnA -< responseInternalErrorsConfig -------------------------------------------------------------------------------- diff --git a/server/src-lib/Hasura/Backends/BigQuery/Connection.hs b/server/src-lib/Hasura/Backends/BigQuery/Connection.hs index ae018e2bb22..02b0683f6ed 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Connection.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Connection.hs @@ -87,7 +87,7 @@ resolveConfigurationJson env = \case Right sa -> pure . Right $ sa resolveConfigurationInput :: - QErrM m => + (QErrM m) => Env.Environment -> ConfigurationInput -> m Text @@ -96,7 +96,7 @@ resolveConfigurationInput env = \case FromEnv v -> MSSQLConn.getEnv env v resolveConfigurationInputs :: - QErrM m => + (QErrM m) => Env.Environment -> ConfigurationInputs -> m [Text] @@ -104,12 +104,12 @@ resolveConfigurationInputs env = \case FromYamls a -> pure a FromEnvs v -> filter (not . T.null) . T.splitOn "," <$> MSSQLConn.getEnv env v -initConnection :: MonadIO m => ServiceAccount -> BigQueryProjectId -> Maybe RetryOptions -> m BigQueryConnection +initConnection :: (MonadIO m) => ServiceAccount -> BigQueryProjectId -> Maybe RetryOptions -> m BigQueryConnection initConnection _bqServiceAccount _bqProjectId _bqRetryOptions = do _bqAccessTokenMVar <- liftIO $ newMVar Nothing -- `runBigQuery` initializes the token pure BigQueryConnection {..} -getAccessToken :: MonadIO m => ServiceAccount -> m (Either TokenProblem TokenResp) +getAccessToken :: (MonadIO m) => ServiceAccount -> m (Either TokenProblem TokenResp) getAccessToken sa = do eJwt <- encodeBearerJWT sa ["https://www.googleapis.com/auth/cloud-platform"] case eJwt of @@ -119,9 +119,9 @@ getAccessToken sa = do Left unicodeEx -> pure . Left . BearerTokenDecodeProblem $ unicodeEx Right assertion -> do tokenFetchResponse :: Response (Either JSONException TokenResp) <- - httpJSONEither $ - setRequestBodyJSON (mkTokenRequest assertion) $ - parseRequest_ ("POST " <> tokenURL) + httpJSONEither + $ setRequestBodyJSON (mkTokenRequest assertion) + $ parseRequest_ ("POST " <> tokenURL) if getResponseStatusCode tokenFetchResponse /= 200 then pure . Left . TokenRequestNonOK . getResponseStatus $ tokenFetchResponse else case getResponseBody tokenFetchResponse of @@ -162,14 +162,14 @@ getAccessToken sa = do mkSigInput n = header <> "." <> payload where header = - b64EncodeJ $ - J.object + b64EncodeJ + $ J.object [ "alg" J..= ("RS256" :: T.Text), "typ" J..= ("JWT" :: T.Text) ] payload = - b64EncodeJ $ - J.object + b64EncodeJ + $ J.object [ "aud" J..= tokenURL, "scope" J..= T.intercalate " " (map unScope scopes), "iat" J..= n, @@ -178,10 +178,11 @@ getAccessToken sa = do ] -- | Get a usable token. If the token has expired refresh it. -getUsableToken :: MonadIO m => BigQueryConnection -> m (Either TokenProblem TokenResp) +getUsableToken :: (MonadIO m) => BigQueryConnection -> m (Either TokenProblem TokenResp) getUsableToken BigQueryConnection {_bqServiceAccount, _bqAccessTokenMVar} = - liftIO $ - modifyMVar _bqAccessTokenMVar $ \mTokenResp -> do + liftIO + $ modifyMVar _bqAccessTokenMVar + $ \mTokenResp -> do case mTokenResp of Nothing -> do refreshedToken <- getAccessToken _bqServiceAccount diff --git a/server/src-lib/Hasura/Backends/BigQuery/DDL.hs b/server/src-lib/Hasura/Backends/BigQuery/DDL.hs index 6dcbf6e8b9b..e6c6173fafa 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/DDL.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/DDL.hs @@ -37,8 +37,8 @@ fetchAndValidateEnumValues :: [RawColumnInfo 'BigQuery] -> m (Either QErr EnumValues) fetchAndValidateEnumValues _ _ _ _ = - runExceptT $ - throw400 NotSupported "Enum tables are not supported for BigQuery sources" + runExceptT + $ throw400 NotSupported "Enum tables are not supported for BigQuery sources" buildFunctionInfo :: (MonadError QErr m) => diff --git a/server/src-lib/Hasura/Backends/BigQuery/DDL/BoolExp.hs b/server/src-lib/Hasura/Backends/BigQuery/DDL/BoolExp.hs index de34491c3a7..b84554970ed 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/DDL/BoolExp.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/DDL/BoolExp.hs @@ -35,8 +35,8 @@ parseBoolExpOperations rhsParser _rootTableFieldInfoMap _fields columnRef value v -> pure . AEQ False <$> parseWithTy columnType v parseOperation :: ColumnType 'BigQuery -> (Text, J.Value) -> m (OpExpG 'BigQuery v) - parseOperation columnType (opStr, val) = withPathK opStr $ - case opStr of + parseOperation columnType (opStr, val) = withPathK opStr + $ case opStr of "_eq" -> parseEq "$eq" -> parseEq "_neq" -> parseNeq diff --git a/server/src-lib/Hasura/Backends/BigQuery/DDL/ComputedField.hs b/server/src-lib/Hasura/Backends/BigQuery/DDL/ComputedField.hs index 4e885b5e013..be942cc549a 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/DDL/ComputedField.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/DDL/ComputedField.hs @@ -92,7 +92,7 @@ buildComputedFieldInfo trackedTables table tableColumns computedField ComputedFi where mkComputedFieldInfo :: forall n. - MV.MonadValidate [ComputedFieldError] n => + (MV.MonadValidate [ComputedFieldError] n) => n (ComputedFieldInfo 'BigQuery) mkComputedFieldInfo = do -- Currently, we only support functions returning set of rows in computed field. @@ -152,8 +152,10 @@ buildComputedFieldInfo trackedTables table tableColumns computedField ComputedFi showErrors :: [ComputedFieldError] -> Text showErrors allErrors = "the computed field " - <> computedField <<> " cannot be added to table " - <> table <<> " " + <> computedField + <<> " cannot be added to table " + <> table + <<> " " <> reasonMessage where reasonMessage = makeReasonMessage allErrors (showError _bqcfdFunction) diff --git a/server/src-lib/Hasura/Backends/BigQuery/DDL/RunSQL.hs b/server/src-lib/Hasura/Backends/BigQuery/DDL/RunSQL.hs index 7f40d49fea0..a563d01e38b 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/DDL/RunSQL.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/DDL/RunSQL.hs @@ -107,8 +107,8 @@ recordSetAsHeaderAndRows Execute.RecordSet {rows} = J.toJSON (thead : tbody) recordSetAsSchema :: Execute.RecordSet -> J.Value recordSetAsSchema rs@(Execute.RecordSet {rows}) = - recordSetAsHeaderAndRows $ - rs + recordSetAsHeaderAndRows + $ rs { Execute.rows = InsOrdHashMap.adjust (Execute.TextOutputValue . LT.toStrict . encodeToLazyText . J.toJSON) diff --git a/server/src-lib/Hasura/Backends/BigQuery/DDL/Source.hs b/server/src-lib/Hasura/Backends/BigQuery/DDL/Source.hs index 57c969023f7..2b9b2722446 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/DDL/Source.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/DDL/Source.hs @@ -98,8 +98,9 @@ resolveSource sourceConfig = let result = (,) <$> tables <*> routines case result of Left err -> - throw400 Unexpected $ - "unexpected exception while connecting to database: " <> tshow err + throw400 Unexpected + $ "unexpected exception while connecting to database: " + <> tshow err Right (restTables, restRoutines) -> do seconds <- liftIO $ fmap systemSeconds getSystemTime let functions = FunctionOverloads <$> HashMap.groupOnNE (routineReferenceToFunctionName . routineReference) restRoutines diff --git a/server/src-lib/Hasura/Backends/BigQuery/Execute.hs b/server/src-lib/Hasura/Backends/BigQuery/Execute.hs index a499a646b08..13f8c48e309 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Execute.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Execute.hs @@ -153,7 +153,7 @@ executeProblemMessage showDetails = \case HideDetails -> summary InsecurelyShowDetails -> summary <> " and body:\n" <> LT.toStrict (LT.decodeUtf8 (J.encode body)) where - showErr :: forall a. Show a => a -> Text + showErr :: forall a. (Show a) => a -> Text showErr err = case showDetails of HideDetails -> "" @@ -257,7 +257,7 @@ bigQueryProjectUrl projectId = -- Executing the planned actions forest runExecute :: - MonadIO m => + (MonadIO m) => BigQuerySourceConfig -> Execute (BigQuery.Job, RecordSet) -> m (Either ExecuteProblem (BigQuery.Job, RecordSet)) @@ -431,7 +431,7 @@ streamBigQuery conn bigquery = do Left e -> pure (Left e) -- | Execute a query without expecting any output (e.g. CREATE TABLE or INSERT) -executeBigQuery :: MonadIO m => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem ()) +executeBigQuery :: (MonadIO m) => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem ()) executeBigQuery conn bigquery = do jobResult <- runExceptT $ createQueryJob conn bigquery case jobResult of @@ -560,9 +560,9 @@ createQueryJob conn BigQuery {..} = do <> "/jobs?alt=json&prettyPrint=false" req = - jsonRequestHeader $ - setRequestBodyLBS body $ - parseRequest_ url + jsonRequestHeader + $ setRequestBodyLBS body + $ parseRequest_ url body = J.encode @@ -659,9 +659,9 @@ insertDataset conn datasetId = <> "/datasets?alt=json&prettyPrint=false" req = - jsonRequestHeader $ - setRequestBodyLBS body $ - parseRequest_ url + jsonRequestHeader + $ setRequestBodyLBS body + $ parseRequest_ url body = J.encode @@ -890,25 +890,25 @@ instance J.FromJSON BigQueryField where do flag :: Text <- o .: "type" if - | flag == "NUMERIC" || flag == "DECIMAL" -> pure FieldDECIMAL - | flag == "BIGNUMERIC" || flag == "BIGDECIMAL" -> - pure FieldBIGDECIMAL - | flag == "INT64" || flag == "INTEGER" -> pure FieldINTEGER - | flag == "FLOAT64" || flag == "FLOAT" -> pure FieldFLOAT - | flag == "BOOLEAN" || flag == "BOOL" -> pure FieldBOOL - | flag == "STRING" -> pure FieldSTRING - | flag == "JSON" -> pure FieldJSON - | flag == "DATE" -> pure FieldDATE - | flag == "TIME" -> pure FieldTIME - | flag == "DATETIME" -> pure FieldDATETIME - | flag == "TIMESTAMP" -> pure FieldTIMESTAMP - | flag == "GEOGRAPHY" -> pure FieldGEOGRAPHY - | flag == "BYTES" -> pure FieldBYTES - | flag == "RECORD" || flag == "STRUCT" -> - do - fields <- o .: "fields" - pure (FieldSTRUCT fields) - | otherwise -> fail ("Unsupported field type: " ++ show flag) + | flag == "NUMERIC" || flag == "DECIMAL" -> pure FieldDECIMAL + | flag == "BIGNUMERIC" || flag == "BIGDECIMAL" -> + pure FieldBIGDECIMAL + | flag == "INT64" || flag == "INTEGER" -> pure FieldINTEGER + | flag == "FLOAT64" || flag == "FLOAT" -> pure FieldFLOAT + | flag == "BOOLEAN" || flag == "BOOL" -> pure FieldBOOL + | flag == "STRING" -> pure FieldSTRING + | flag == "JSON" -> pure FieldJSON + | flag == "DATE" -> pure FieldDATE + | flag == "TIME" -> pure FieldTIME + | flag == "DATETIME" -> pure FieldDATETIME + | flag == "TIMESTAMP" -> pure FieldTIMESTAMP + | flag == "GEOGRAPHY" -> pure FieldGEOGRAPHY + | flag == "BYTES" -> pure FieldBYTES + | flag == "RECORD" || flag == "STRUCT" -> + do + fields <- o .: "fields" + pure (FieldSTRUCT fields) + | otherwise -> fail ("Unsupported field type: " ++ show flag) mode <- o .:? "mode" .!= Nullable pure BigQueryField {..} ) diff --git a/server/src-lib/Hasura/Backends/BigQuery/FromIr.hs b/server/src-lib/Hasura/Backends/BigQuery/FromIr.hs index a6c961462bd..5fd1be0d2b2 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/FromIr.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/FromIr.hs @@ -185,8 +185,8 @@ data ParentSelectFromEntity runFromIr :: FromIrConfig -> FromIr a -> Validate (NonEmpty Error) (a, FromIrWriter) runFromIr config fromIr = - runWriterT $ - evalStateT + runWriterT + $ evalStateT (runReaderT (unFromIr fromIr) (FromIrReader {config})) (FromIrState {indices = mempty}) @@ -460,8 +460,8 @@ fromSelectAggregate minnerJoinFields annSelectG = do ) ] indexColumn = - ColumnExpression $ - FieldName + ColumnExpression + $ FieldName { fieldNameEntity = innerSelectAlias, fieldName = unEntityAlias indexAlias } @@ -796,9 +796,9 @@ fromFunction parentEntityAlias functionName positionalArgs namedArgs = do case parentEntityAlias of NoParentEntity -> refute $ pure NoParentEntityInternalError ParentEntityAlias entityAlias -> - pure $ - ColumnExpression $ - FieldName columnName (entityAliasText entityAlias) + pure + $ ColumnExpression + $ FieldName columnName (entityAliasText entityAlias) fromAnnBoolExp :: Ir.GBoolExp 'BigQuery (Ir.AnnBoolExpFld 'BigQuery Expression) -> @@ -1427,9 +1427,11 @@ fromArrayAggregateSelectG annRelationSelectG = do joinRightTable = fromAlias (selectFrom select), joinOn, joinProvenance = - ArrayAggregateJoinProvenance $ - mapMaybe (\p -> (,aggregateProjectionsFieldOrigin p) <$> projectionAlias p) . toList . selectProjections $ - select, + ArrayAggregateJoinProvenance + $ mapMaybe (\p -> (,aggregateProjectionsFieldOrigin p) <$> projectionAlias p) + . toList + . selectProjections + $ select, -- Above: Needed by DataLoader to determine the type of -- Haskell-native join to perform. joinFieldName, @@ -1818,7 +1820,7 @@ fromGBoolExp = -- | Attempt to refine a list into a 'NonEmpty'. If the given list is empty, -- this will 'refute' the computation with an 'UnexpectedEmptyList' error. -toNonEmpty :: MonadValidate (NonEmpty Error) m => [x] -> m (NonEmpty x) +toNonEmpty :: (MonadValidate (NonEmpty Error) m) => [x] -> m (NonEmpty x) toNonEmpty = \case [] -> refute (UnexpectedEmptyList :| []) x : xs -> pure (x :| xs) diff --git a/server/src-lib/Hasura/Backends/BigQuery/Instances/Execute.hs b/server/src-lib/Hasura/Backends/BigQuery/Instances/Execute.hs index c41f8ad22ba..a7f1d911b53 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Instances/Execute.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Instances/Execute.hs @@ -140,7 +140,7 @@ bqDBMutationPlan _userInfo _stringifyNum _sourceName _sourceConfig _mrf _headers -- explain bqDBQueryExplain :: - MonadError E.QErr m => + (MonadError E.QErr m) => RootFieldAlias -> UserInfo -> SourceName -> @@ -152,22 +152,22 @@ bqDBQueryExplain :: bqDBQueryExplain fieldName userInfo sourceName sourceConfig qrf _ _ = do select <- planNoPlan (BigQuery.bigQuerySourceConfigToFromIrConfig sourceConfig) userInfo qrf let textSQL = selectSQLTextForExplain select - pure $ - AB.mkAnyBackend $ - DBStepInfo @'BigQuery - sourceName - sourceConfig - Nothing - ( OnBaseMonad $ - pure $ - withNoStatistics $ - encJFromJValue $ - ExplainPlan - fieldName - (Just $ textSQL) - (Just $ T.lines $ textSQL) - ) - () + pure + $ AB.mkAnyBackend + $ DBStepInfo @'BigQuery + sourceName + sourceConfig + Nothing + ( OnBaseMonad + $ pure + $ withNoStatistics + $ encJFromJValue + $ ExplainPlan + fieldName + (Just $ textSQL) + (Just $ T.lines $ textSQL) + ) + () -- | Get the SQL text for a select, with parameters left as $1, $2, .. holes. selectSQLTextForExplain :: BigQuery.Select -> Text @@ -225,10 +225,11 @@ bqDBRemoteRelationshipPlan userInfo sourceName sourceConfig lhs lhsSchema argume rowsArgument :: UnpreparedValue 'BigQuery rowsArgument = - UVParameter IR.Unknown $ - ColumnValue (ColumnScalar BigQuery.StringScalarType) $ - BigQuery.StringValue . LT.toStrict $ - J.encodeToLazyText lhs + UVParameter IR.Unknown + $ ColumnValue (ColumnScalar BigQuery.StringScalarType) + $ BigQuery.StringValue + . LT.toStrict + $ J.encodeToLazyText lhs recordSetDefinitionList = (coerceToColumn argumentId, BigQuery.IntegerScalarType) : HashMap.toList (fmap snd joinColumnMapping) diff --git a/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs b/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs index f44d25f98bf..436f32cdcfe 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Instances/Schema.hs @@ -112,7 +112,9 @@ bqColumnParser columnType nullability = case columnType of case soBigQueryStringNumericInput of Options.EnableBigQueryStringNumericInput -> custom Options.DisableBigQueryStringNumericInput -> builtin - peelWithOrigin . fmap (ColumnValue columnType) . bqPossiblyNullable nullability + peelWithOrigin + . fmap (ColumnValue columnType) + . bqPossiblyNullable nullability <$> case scalarType of -- bytestrings -- we only accept string literals @@ -122,25 +124,25 @@ bqColumnParser columnType nullability = case columnType of -- floating point values BigQuery.FloatScalarType -> - pure $ - BigQuery.FloatValue - <$> numericInputParser (BigQuery.doubleToFloat64 <$> P.float) BQP.bqFloat64 + pure + $ BigQuery.FloatValue + <$> numericInputParser (BigQuery.doubleToFloat64 <$> P.float) BQP.bqFloat64 BigQuery.IntegerScalarType -> - pure $ - BigQuery.IntegerValue - <$> numericInputParser (BigQuery.intToInt64 . fromIntegral <$> P.int) BQP.bqInt64 + pure + $ BigQuery.IntegerValue + <$> numericInputParser (BigQuery.intToInt64 . fromIntegral <$> P.int) BQP.bqInt64 BigQuery.DecimalScalarType -> - pure $ - BigQuery.DecimalValue - <$> numericInputParser - (BigQuery.Decimal . BigQuery.scientificToText <$> P.scientific) - BQP.bqDecimal + pure + $ BigQuery.DecimalValue + <$> numericInputParser + (BigQuery.Decimal . BigQuery.scientificToText <$> P.scientific) + BQP.bqDecimal BigQuery.BigDecimalScalarType -> - pure $ - BigQuery.BigDecimalValue - <$> numericInputParser - (BigQuery.BigDecimal . BigQuery.scientificToText <$> P.scientific) - BQP.bqBigDecimal + pure + $ BigQuery.BigDecimalValue + <$> numericInputParser + (BigQuery.BigDecimal . BigQuery.scientificToText <$> P.scientific) + BQP.bqBigDecimal -- boolean type BigQuery.BoolScalarType -> pure $ BigQuery.BoolValue <$> P.boolean BigQuery.DateScalarType -> pure $ BigQuery.DateValue . BigQuery.Date <$> stringBased _Date @@ -155,7 +157,9 @@ bqColumnParser columnType nullability = case columnType of ColumnEnumReference (EnumReference tableName enumValues customTableName) -> case nonEmpty (HashMap.toList enumValues) of Just enumValuesList -> - peelWithOrigin . fmap (ColumnValue columnType) . bqPossiblyNullable nullability + peelWithOrigin + . fmap (ColumnValue columnType) + . bqPossiblyNullable nullability <$> bqEnumParser tableName enumValuesList customTableName nullability Nothing -> throw400 ValidationFailed "empty enum values" where @@ -165,7 +169,8 @@ bqColumnParser columnType nullability = case columnType of { pType = schemaType, pParser = P.valueToJSON (P.toGraphQLType schemaType) - >=> either (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) pure . runAesonParser J.parseJSON + >=> either (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) pure + . runAesonParser J.parseJSON } stringBased :: (MonadParse m) => G.Name -> Parser 'Both m Text stringBased scalarName = @@ -206,7 +211,8 @@ bqOrderByOperators :: ) ) bqOrderByOperators _tCase = - (Name._order_by,) $ + (Name._order_by,) + $ -- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB NE.fromList [ ( define Name._asc "in ascending order, nulls first", @@ -246,103 +252,103 @@ bqComparisonExps = P.memoize 'comparisonExps $ \columnType -> do -- textParser <- columnParser (ColumnScalar @'BigQuery BigQuery.StringScalarType) (G.Nullability False) let name = P.getName typedParser <> Name.__BigQuery_comparison_exp desc = - G.Description $ - "Boolean expression to compare columns of type " - <> P.getName typedParser - <<> ". All fields are combined with logical 'AND'." + G.Description + $ "Boolean expression to compare columns of type " + <> P.getName typedParser + <<> ". All fields are combined with logical 'AND'." -- textListParser = fmap openValueOrigin <$> P.list textParser columnListParser = fmap IR.openValueOrigin <$> P.list typedParser mkListLiteral :: [ColumnValue 'BigQuery] -> IR.UnpreparedValue 'BigQuery mkListLiteral = IR.UVLiteral . BigQuery.ListExpression . fmap (BigQuery.ValueExpression . cvValue) - pure $ - P.object name (Just desc) $ - fmap catMaybes $ - sequenceA $ - concat - [ -- from https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types: - -- GEOGRAPHY comparisons are not supported. To compare GEOGRAPHY values, use ST_Equals. - guard (isScalarColumnWhere (/= BigQuery.GeographyScalarType) columnType) - *> equalityOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser) - (mkListLiteral <$> columnListParser), - guard (isScalarColumnWhere (/= BigQuery.GeographyScalarType) columnType) - *> comparisonOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser), - -- Ops for String type - guard (isScalarColumnWhere (== BigQuery.StringScalarType) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__like) - (Just "does the column match the given pattern") - (ALIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nlike) - (Just "does the column NOT match the given pattern") - (ANLIKE . IR.mkParameter <$> typedParser) - ], - -- Ops for Bytes type - guard (isScalarColumnWhere (== BigQuery.BytesScalarType) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__like) - (Just "does the column match the given pattern") - (ALIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nlike) - (Just "does the column NOT match the given pattern") - (ANLIKE . IR.mkParameter <$> typedParser) - ], - -- Ops for Geography type - guard (isScalarColumnWhere (== BigQuery.GeographyScalarType) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "contains"])) - (Just "does the column contain the given geography value") - (ABackendSpecific . BigQuery.ASTContains . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "equals"])) - (Just "is the column equal to given geography value (directionality is ignored)") - (ABackendSpecific . BigQuery.ASTEquals . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "touches"])) - (Just "does the column have at least one point in common with the given geography value") - (ABackendSpecific . BigQuery.ASTTouches . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "within"])) - (Just "is the column contained in the given geography value") - (ABackendSpecific . BigQuery.ASTWithin . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) - (Just "does the column spatially intersect the given geography value") - (ABackendSpecific . BigQuery.ASTIntersects . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) - (Just "is the column within a given distance from the given geometry value") - (ABackendSpecific . BigQuery.ASTDWithin <$> dWithinGeogOpParser) - ] - ] + pure + $ P.object name (Just desc) + $ fmap catMaybes + $ sequenceA + $ concat + [ -- from https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types: + -- GEOGRAPHY comparisons are not supported. To compare GEOGRAPHY values, use ST_Equals. + guard (isScalarColumnWhere (/= BigQuery.GeographyScalarType) columnType) + *> equalityOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser) + (mkListLiteral <$> columnListParser), + guard (isScalarColumnWhere (/= BigQuery.GeographyScalarType) columnType) + *> comparisonOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser), + -- Ops for String type + guard (isScalarColumnWhere (== BigQuery.StringScalarType) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__like) + (Just "does the column match the given pattern") + (ALIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nlike) + (Just "does the column NOT match the given pattern") + (ANLIKE . IR.mkParameter <$> typedParser) + ], + -- Ops for Bytes type + guard (isScalarColumnWhere (== BigQuery.BytesScalarType) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__like) + (Just "does the column match the given pattern") + (ALIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nlike) + (Just "does the column NOT match the given pattern") + (ANLIKE . IR.mkParameter <$> typedParser) + ], + -- Ops for Geography type + guard (isScalarColumnWhere (== BigQuery.GeographyScalarType) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "contains"])) + (Just "does the column contain the given geography value") + (ABackendSpecific . BigQuery.ASTContains . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "equals"])) + (Just "is the column equal to given geography value (directionality is ignored)") + (ABackendSpecific . BigQuery.ASTEquals . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "touches"])) + (Just "does the column have at least one point in common with the given geography value") + (ABackendSpecific . BigQuery.ASTTouches . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "within"])) + (Just "is the column contained in the given geography value") + (ABackendSpecific . BigQuery.ASTWithin . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) + (Just "does the column spatially intersect the given geography value") + (ABackendSpecific . BigQuery.ASTIntersects . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) + (Just "is the column within a given distance from the given geometry value") + (ABackendSpecific . BigQuery.ASTDWithin <$> dWithinGeogOpParser) + ] + ] bqCountTypeInput :: (MonadParse n) => @@ -370,12 +376,12 @@ geographyWithinDistanceInput = do -- practically BigQuery (as of 2021-11-19) doesn't support TRUE as use_spheroid parameter for ST_DWITHIN booleanParser <- columnParser (ColumnScalar BigQuery.BoolScalarType) (G.Nullability True) floatParser <- columnParser (ColumnScalar BigQuery.FloatScalarType) (G.Nullability False) - pure $ - P.object Name._st_dwithin_input Nothing $ - DWithinGeogOp - <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) - <*> (IR.mkParameter <$> P.field Name._from Nothing geographyParser) - <*> (IR.mkParameter <$> P.fieldWithDefault Name._use_spheroid Nothing (G.VBoolean False) booleanParser) + pure + $ P.object Name._st_dwithin_input Nothing + $ DWithinGeogOp + <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) + <*> (IR.mkParameter <$> P.field Name._from Nothing geographyParser) + <*> (IR.mkParameter <$> P.fieldWithDefault Name._use_spheroid Nothing (G.VBoolean False) booleanParser) -- | Computed field parser. bqComputedField :: @@ -400,19 +406,19 @@ bqComputedField ComputedFieldInfo {..} tableName tableInfo = runMaybeT do selectionSetParser <- MaybeT (fmap (P.multiple . P.nonNullableParser) <$> tableSelectionSet returnTableInfo) selectArgsParser <- lift $ tableArguments returnTableInfo let fieldArgsParser = liftA2 (,) functionArgsParser selectArgsParser - pure $ - P.subselection fieldName fieldDescription fieldArgsParser selectionSetParser - <&> \((functionArgs', args), fields) -> - IR.AFComputedField _cfiXComputedFieldInfo _cfiName $ - IR.CFSTable JASMultipleRows $ - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, - IR._asnPerm = tablePermissionsInfo returnTablePermissions, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Nothing - } + pure + $ P.subselection fieldName fieldDescription fieldArgsParser selectionSetParser + <&> \((functionArgs', args), fields) -> + IR.AFComputedField _cfiXComputedFieldInfo _cfiName + $ IR.CFSTable JASMultipleRows + $ IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, + IR._asnPerm = tablePermissionsInfo returnTablePermissions, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Nothing + } BigQuery.ReturnTableSchema returnFields -> do -- Check if the computed field is available in the select permission selectPermissions <- hoistMaybe $ tableSelectPermissions roleName tableInfo @@ -424,22 +430,22 @@ bqComputedField ComputedFieldInfo {..} tableName tableInfo = runMaybeT do selectionSetParser <- do fieldParsers <- lift $ for returnFields selectArbitraryField let description = G.Description $ "column fields returning by " <>> _cfiName - pure $ - P.selectionSetObject objectTypeName (Just description) fieldParsers [] - <&> parsedSelectionsToFields IR.AFExpression - pure $ - P.subselection fieldName fieldDescription functionArgsParser selectionSetParser - <&> \(functionArgs', fields) -> - IR.AFComputedField _cfiXComputedFieldInfo _cfiName $ - IR.CFSTable JASMultipleRows $ - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, - IR._asnPerm = IR.noTablePermissions, - IR._asnArgs = IR.noSelectArgs, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Nothing - } + pure + $ P.selectionSetObject objectTypeName (Just description) fieldParsers [] + <&> parsedSelectionsToFields IR.AFExpression + pure + $ P.subselection fieldName fieldDescription functionArgsParser selectionSetParser + <&> \(functionArgs', fields) -> + IR.AFComputedField _cfiXComputedFieldInfo _cfiName + $ IR.CFSTable JASMultipleRows + $ IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, + IR._asnPerm = IR.noTablePermissions, + IR._asnArgs = IR.noSelectArgs, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Nothing + } where fieldDescription :: Maybe G.Description fieldDescription = G.Description <$> _cfiDescription @@ -449,9 +455,9 @@ bqComputedField ComputedFieldInfo {..} tableName tableInfo = runMaybeT do SchemaT r m (FieldParser n (AnnotatedField 'BigQuery)) selectArbitraryField (columnName, graphQLName, columnType) = do field <- columnParser @'BigQuery (ColumnScalar columnType) (G.Nullability True) - pure $ - P.selection_ graphQLName Nothing field - $> IR.mkAnnColumnField columnName (ColumnScalar columnType) Nothing Nothing + pure + $ P.selection_ graphQLName Nothing field + $> IR.mkAnnColumnField columnName (ColumnScalar columnType) Nothing Nothing computedFieldFunctionArgs :: (G.Name -> G.Name) -> @@ -471,16 +477,19 @@ bqComputedField ComputedFieldInfo {..} tableName tableInfo = runMaybeT do let userArgsParser = P.object objectName Nothing argumentParsers let fieldDesc = - G.Description $ - "input parameters for computed field " - <> _cfiName <<> " defined on table " <>> tableName + G.Description + $ "input parameters for computed field " + <> _cfiName + <<> " defined on table " + <>> tableName argsField | null userInputArgs = P.fieldOptional Name._args (Just fieldDesc) userArgsParser | otherwise = Just <$> P.field Name._args (Just fieldDesc) userArgsParser - pure $ - argsField `P.bindFields` \maybeInputArguments -> do + pure + $ argsField + `P.bindFields` \maybeInputArguments -> do let tableColumnInputs = HashMap.map BigQuery.AETableColumn $ HashMap.mapKeys getFuncArgNameTxt _cffComputedFieldImplicitArgs pure $ FunctionArgsExp mempty $ maybe mempty HashMap.fromList maybeInputArguments <> tableColumnInputs diff --git a/server/src-lib/Hasura/Backends/BigQuery/Meta.hs b/server/src-lib/Hasura/Backends/BigQuery/Meta.hs index c3388fc3498..16d7f11e03b 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Meta.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Meta.hs @@ -190,7 +190,7 @@ instance FromJSON RestType where -- | Get all tables from all specified data sets. getTables :: - MonadIO m => + (MonadIO m) => BigQuerySourceConfig -> m (Either RestProblem [RestTable]) getTables BigQuerySourceConfig {..} = @@ -199,7 +199,7 @@ getTables BigQuerySourceConfig {..} = -- | Get tables in the dataset. getTablesForDataSet :: - MonadIO m => + (MonadIO m) => BigQueryConnection -> BigQueryDataset -> m (Either RestProblem [RestTable]) @@ -220,8 +220,8 @@ getTablesForDataSet conn dataSet = do where run pageToken acc = do let req = - setRequestHeader "Content-Type" ["application/json"] $ - parseRequest_ url + setRequestHeader "Content-Type" ["application/json"] + $ parseRequest_ url eResp <- runBigQuery conn req case eResp of Left e -> pure (Left (GetTablesBigQueryProblem e)) @@ -237,13 +237,13 @@ getTablesForDataSet conn dataSet = do _ -> pure (Left (RESTRequestNonOK (getResponseStatus resp))) where url = - T.unpack $ - "GET https://bigquery.googleapis.com/bigquery/v2/projects/" - <> getBigQueryProjectId (_bqProjectId conn) - <> "/datasets/" - <> getBigQueryDataset dataSet - <> "/tables?alt=json&" - <> encodeParams extraParameters + T.unpack + $ "GET https://bigquery.googleapis.com/bigquery/v2/projects/" + <> getBigQueryProjectId (_bqProjectId conn) + <> "/datasets/" + <> getBigQueryDataset dataSet + <> "/tables?alt=json&" + <> encodeParams extraParameters extraParameters = pageTokenParam where pageTokenParam = @@ -253,7 +253,7 @@ getTablesForDataSet conn dataSet = do -- | Get tables in the schema. getTable :: - MonadIO m => + (MonadIO m) => BigQueryConnection -> BigQueryDataset -> Text -> @@ -263,8 +263,8 @@ getTable conn dataSet tableId = do where run = do let req = - setRequestHeader "Content-Type" ["application/json"] $ - parseRequest_ url + setRequestHeader "Content-Type" ["application/json"] + $ parseRequest_ url eResp <- runBigQuery conn req case eResp of Left e -> pure (Left (GetTablesBigQueryProblem e)) @@ -277,15 +277,15 @@ getTable conn dataSet tableId = do _ -> pure (Left (RESTRequestNonOK (getResponseStatus resp))) where url = - T.unpack $ - "GET https://bigquery.googleapis.com/bigquery/v2/projects/" - <> getBigQueryProjectId (_bqProjectId conn) - <> "/datasets/" - <> getBigQueryDataset dataSet - <> "/tables/" - <> tableId - <> "?alt=json&" - <> encodeParams extraParameters + T.unpack + $ "GET https://bigquery.googleapis.com/bigquery/v2/projects/" + <> getBigQueryProjectId (_bqProjectId conn) + <> "/datasets/" + <> getBigQueryDataset dataSet + <> "/tables/" + <> tableId + <> "?alt=json&" + <> encodeParams extraParameters extraParameters = [] encodeParams :: [(Text, Text)] -> Text @@ -410,12 +410,15 @@ data RestRoutineList = RestRoutineList instance FromJSON RestRoutineList where parseJSON = withObject "Object" $ \o -> RestRoutineList - <$> o .:? "routines" .!= [] -- "routine" field is absent when there are no routines defined - <*> o .:? "nextPageToken" + <$> o + .:? "routines" + .!= [] -- "routine" field is absent when there are no routines defined + <*> o + .:? "nextPageToken" -- | Get all routines from all specified data sets. getRoutines :: - MonadIO m => + (MonadIO m) => BigQuerySourceConfig -> m (Either RestProblem [RestRoutine]) getRoutines BigQuerySourceConfig {..} = @@ -424,7 +427,7 @@ getRoutines BigQuerySourceConfig {..} = -- | Get routines in the dataset. getRoutinesForDataSet :: - MonadIO m => + (MonadIO m) => BigQueryConnection -> BigQueryDataset -> m (Either RestProblem [RestRoutine]) @@ -433,8 +436,8 @@ getRoutinesForDataSet conn dataSet = do where run pageToken acc = do let req = - setRequestHeader "Content-Type" ["application/json"] $ - parseRequest_ url + setRequestHeader "Content-Type" ["application/json"] + $ parseRequest_ url eResp <- runBigQuery conn req case eResp of Left e -> pure (Left (GetRoutinesBigQueryProblem e)) @@ -450,13 +453,13 @@ getRoutinesForDataSet conn dataSet = do _ -> pure (Left (RESTRequestNonOK (getResponseStatus resp))) where url = - T.unpack $ - "GET https://bigquery.googleapis.com/bigquery/v2/projects/" - <> getBigQueryProjectId (_bqProjectId conn) - <> "/datasets/" - <> getBigQueryDataset dataSet - <> "/routines?alt=json&" - <> encodeParams extraParameters + T.unpack + $ "GET https://bigquery.googleapis.com/bigquery/v2/projects/" + <> getBigQueryProjectId (_bqProjectId conn) + <> "/datasets/" + <> getBigQueryDataset dataSet + <> "/routines?alt=json&" + <> encodeParams extraParameters extraParameters = pageTokenParam <> readMaskParam where pageTokenParam = diff --git a/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs b/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs index 4c691d3cc52..ed0dc524a29 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Parser/Scalars.hs @@ -39,7 +39,7 @@ import Language.GraphQL.Draft.Syntax qualified as G import Language.GraphQL.Draft.Syntax.QQ qualified as G import Text.ParserCombinators.ReadP -bqInt64 :: forall origin m. MonadParse m => Parser origin 'Both m BigQuery.Int64 +bqInt64 :: forall origin m. (MonadParse m) => Parser origin 'Both m BigQuery.Int64 bqInt64 = mkScalar name "64-bit integers. Accepts both string and number literals." \case GraphQLValue (VInt i) | checkIntegerBounds i -> return $ BigQuery.Int64 (tshow i) @@ -71,7 +71,7 @@ bqInt64 = mkScalar name "64-bit integers. Accepts both string and number literal boundsFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " lies outside the accepted numerical integral bounds." integralFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " has a non-zero fractional part." -bqFloat64 :: forall origin m. MonadParse m => Parser origin 'Both m BigQuery.Float64 +bqFloat64 :: forall origin m. (MonadParse m) => Parser origin 'Both m BigQuery.Float64 bqFloat64 = mkScalar name "64-bit floats. Accepts both string and number literals." \case GraphQLValue (VFloat f) -> floatSci (tshow f) f GraphQLValue (VInt i) -> floatSci (tshow i) (fromInteger i) @@ -96,17 +96,17 @@ bqFloat64 = mkScalar name "64-bit floats. Accepts both string and number literal boundsFailure :: forall a. Text -> m a boundsFailure inputText = parseErrorWith ParseFailed $ "The value " <> toErrorMessage inputText <> " lies outside the accepted numerical integral bounds." -bqBigDecimal :: MonadParse m => Parser origin 'Both m BigQuery.BigDecimal +bqBigDecimal :: (MonadParse m) => Parser origin 'Both m BigQuery.BigDecimal bqBigDecimal = mkScalar name "BigDecimals. Accepts both string and number literals." $ fmap (BigQuery.BigDecimal . BigQuery.scientificToText) . decimal name where name = [G.name|bigquery_bigdecimal|] -bqDecimal :: MonadParse m => Parser origin 'Both m BigQuery.Decimal +bqDecimal :: (MonadParse m) => Parser origin 'Both m BigQuery.Decimal bqDecimal = mkScalar name "Decimals. Accepts both string and number literals." $ fmap (BigQuery.Decimal . BigQuery.scientificToText) . decimal name where name = [G.name|bigquery_decimal|] -decimal :: MonadParse f => Name -> InputValue Variable -> f Scientific +decimal :: (MonadParse f) => Name -> InputValue Variable -> f Scientific decimal name = \case GraphQLValue (VFloat f) -> pure f GraphQLValue (VInt i) -> pure $ S.scientific i 0 @@ -123,7 +123,7 @@ decimal name = \case -- Local helpers mkScalar :: - MonadParse m => + (MonadParse m) => Name -> Description -> (InputValue Variable -> m a) -> @@ -139,13 +139,13 @@ mkScalar name desc parser = typeNamed :: Name -> Maybe Description -> Type origin 'Both typeNamed name description = TNamed NonNullable $ Definition name description Nothing [] TIScalar -stringNotationError :: MonadParse m => G.Name -> Text -> m a +stringNotationError :: (MonadParse m) => G.Name -> Text -> m a stringNotationError typeName actualString = - parseError $ - "expected " - <> toErrorMessage (tshow typeName) - <> " represented as a string, but got " - <> dquote actualString - <> ", which is not a recognizable " - <> toErrorMessage (tshow typeName) - <> "." + parseError + $ "expected " + <> toErrorMessage (tshow typeName) + <> " represented as a string, but got " + <> dquote actualString + <> ", which is not a recognizable " + <> toErrorMessage (tshow typeName) + <> "." diff --git a/server/src-lib/Hasura/Backends/BigQuery/Plan.hs b/server/src-lib/Hasura/Backends/BigQuery/Plan.hs index e92b9470732..46ae5b5bfa0 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Plan.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Plan.hs @@ -26,7 +26,7 @@ import Hasura.Session -- Top-level planner planNoPlan :: - MonadError E.QErr m => + (MonadError E.QErr m) => FromIrConfig -> UserInfo -> QueryDB 'BigQuery Void (UnpreparedValue 'BigQuery) -> diff --git a/server/src-lib/Hasura/Backends/BigQuery/Schema/Introspection.hs b/server/src-lib/Hasura/Backends/BigQuery/Schema/Introspection.hs index 7b6860bef6c..faba2fbbfae 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Schema/Introspection.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Schema/Introspection.hs @@ -35,9 +35,9 @@ listAllTables sourceName = do query :: LT.Text query = - LT.intercalate "union all" $ - map queryPerDataset $ - _scDatasets sourceConfig + LT.intercalate "union all" + $ map queryPerDataset + $ _scDatasets sourceConfig (_, recordSet) <- Execute.streamBigQuery (_scConnection sourceConfig) (BigQuery query mempty) diff --git a/server/src-lib/Hasura/Backends/BigQuery/Source.hs b/server/src-lib/Hasura/Backends/BigQuery/Source.hs index 456b97689d6..72dacca82a1 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Source.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Source.hs @@ -95,8 +95,10 @@ data TokenResp = TokenResp instance J.FromJSON TokenResp where parseJSON = J.withObject "TokenResp" $ \o -> TokenResp - <$> o J..: "access_token" - <*> o J..: "expires_in" + <$> o + J..: "access_token" + <*> o + J..: "expires_in" data ServiceAccount = ServiceAccount { _saClientEmail :: Text, @@ -107,11 +109,14 @@ data ServiceAccount = ServiceAccount instance HasCodec ServiceAccount where codec = - object "BigQueryServiceAccount" $ - ServiceAccount - <$> requiredField' "client_email" .= _saClientEmail - <*> requiredField' "private_key" .= _saPrivateKey - <*> requiredField' "project_id" .= _saProjectId + object "BigQueryServiceAccount" + $ ServiceAccount + <$> requiredField' "client_email" + .= _saClientEmail + <*> requiredField' "private_key" + .= _saPrivateKey + <*> requiredField' "project_id" + .= _saProjectId instance J.FromJSON ServiceAccount where parseJSON = J.genericParseJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = False} @@ -132,7 +137,7 @@ data ConfigurationJSON a -- @FromYamlJSON@ case should be attempted last because there is a possibility -- that the decoding for @a@ is not disjoint from the other decoding cases. This -- presents some asymmetry that is a little tricky to capture in a codec. -instance HasCodec a => HasCodec (ConfigurationJSON a) where +instance (HasCodec a) => HasCodec (ConfigurationJSON a) where codec = parseAlternative (parseAlternative mainCodec fromEnvEncodedAsNestedJSON) yamlJSONCodec where -- This is the only codec in this implementation that is used for @@ -141,8 +146,8 @@ instance HasCodec a => HasCodec (ConfigurationJSON a) where -- encoding. mainCodec :: JSONCodec (ConfigurationJSON a) mainCodec = - dimapCodec dec enc $ - eitherCodec + dimapCodec dec enc + $ eitherCodec fromEnvCodec ( bimapCodec -- Fail parsing at this point because @codec \@a@ should only be @@ -169,12 +174,13 @@ instance HasCodec a => HasCodec (ConfigurationJSON a) where bimapCodec (eitherDecodeJSONViaCodec . BL.fromStrict . TE.encodeUtf8) id - $ codec @Text "JSON-encoded string" + $ codec @Text + "JSON-encoded string" yamlJSONCodec :: ValueCodec a (ConfigurationJSON a) yamlJSONCodec = FromYamlJSON <$> codec @a -instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where +instance (J.FromJSON a) => J.FromJSON (ConfigurationJSON a) where parseJSON = \case J.Object o | Just (J.String text) <- KM.lookup "from_env" o -> pure (FromEnvJSON text) J.String s -> case J.eitherDecode . BL.fromStrict . TE.encodeUtf8 $ s of @@ -182,7 +188,7 @@ instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where Right sa -> pure sa j -> fmap FromYamlJSON (J.parseJSON j) -instance J.ToJSON a => J.ToJSON (ConfigurationJSON a) where +instance (J.ToJSON a) => J.ToJSON (ConfigurationJSON a) where toJSON = \case FromEnvJSON i -> J.object ["from_env" J..= i] FromYamlJSON j -> J.toJSON j @@ -268,14 +274,20 @@ instance J.ToJSON BigQueryConnSourceConfig where -- instances. instance HasCodec BigQueryConnSourceConfig where codec = - object "BigQueryConnSourceConfig" $ - BigQueryConnSourceConfig - <$> requiredField' "service_account" .= _cscServiceAccount - <*> requiredField' "datasets" .= _cscDatasets - <*> requiredField' "project_id" .= _cscProjectId - <*> optionalFieldOrNull' "global_select_limit" .= _cscGlobalSelectLimit - <*> optionalFieldOrNull' "retry_base_delay" .= _cscRetryBaseDelay - <*> optionalFieldOrNull' "retry_limit" .= _cscRetryLimit + object "BigQueryConnSourceConfig" + $ BigQueryConnSourceConfig + <$> requiredField' "service_account" + .= _cscServiceAccount + <*> requiredField' "datasets" + .= _cscDatasets + <*> requiredField' "project_id" + .= _cscProjectId + <*> optionalFieldOrNull' "global_select_limit" + .= _cscGlobalSelectLimit + <*> optionalFieldOrNull' "retry_base_delay" + .= _cscRetryBaseDelay + <*> optionalFieldOrNull' "retry_limit" + .= _cscRetryLimit deriving stock instance Show BigQueryConnSourceConfig @@ -307,15 +319,15 @@ instance Show BigQuerySourceConfig where instance J.ToJSON BigQuerySourceConfig where toJSON BigQuerySourceConfig {..} = - J.object $ - [ "service_account" J..= _bqServiceAccount _scConnection, - "datasets" J..= _scDatasets, - "project_id" J..= _bqProjectId _scConnection, - "global_select_limit" J..= _scGlobalSelectLimit - ] - <> case _bqRetryOptions _scConnection of - Just RetryOptions {..} -> - [ "base_delay" J..= diffTimeToMicroSeconds (microseconds _retryBaseDelay), - "retry_limit" J..= _retryNumRetries - ] - Nothing -> [] + J.object + $ [ "service_account" J..= _bqServiceAccount _scConnection, + "datasets" J..= _scDatasets, + "project_id" J..= _bqProjectId _scConnection, + "global_select_limit" J..= _scGlobalSelectLimit + ] + <> case _bqRetryOptions _scConnection of + Just RetryOptions {..} -> + [ "base_delay" J..= diffTimeToMicroSeconds (microseconds _retryBaseDelay), + "retry_limit" J..= _retryNumRetries + ] + Nothing -> [] diff --git a/server/src-lib/Hasura/Backends/BigQuery/ToQuery.hs b/server/src-lib/Hasura/Backends/BigQuery/ToQuery.hs index 9641300aef2..5f864d53ced 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/ToQuery.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/ToQuery.hs @@ -428,8 +428,8 @@ fromArrayAgg :: ArrayAgg -> Printer fromArrayAgg ArrayAgg {..} = SeqPrinter [ "ARRAY_AGG(", - IndentPrinter 10 $ - SepByPrinter + IndentPrinter 10 + $ SepByPrinter " " [ "STRUCT(" <+> IndentPrinter 7 projections <+> ")", fromOrderBys diff --git a/server/src-lib/Hasura/Backends/BigQuery/Types.hs b/server/src-lib/Hasura/Backends/BigQuery/Types.hs index f4b88a3cdc9..dd80cc2ccdc 100644 --- a/server/src-lib/Hasura/Backends/BigQuery/Types.hs +++ b/server/src-lib/Hasura/Backends/BigQuery/Types.hs @@ -331,13 +331,13 @@ data Countable fieldname | DistinctCountable (NonEmpty fieldname) deriving stock (Eq, Ord, Show, Generic, Data, Lift) -deriving anyclass instance FromJSON a => FromJSON (Countable a) +deriving anyclass instance (FromJSON a) => FromJSON (Countable a) -deriving anyclass instance Hashable a => Hashable (Countable a) +deriving anyclass instance (Hashable a) => Hashable (Countable a) -deriving anyclass instance ToJSON a => ToJSON (Countable a) +deriving anyclass instance (ToJSON a) => ToJSON (Countable a) -deriving anyclass instance NFData a => NFData (Countable a) +deriving anyclass instance (NFData a) => NFData (Countable a) data From = FromQualifiedTable (Aliased TableName) @@ -381,13 +381,13 @@ data Aliased a = Aliased } deriving stock (Eq, Ord, Show, Generic, Data, Lift, Functor) -deriving anyclass instance FromJSON a => FromJSON (Aliased a) +deriving anyclass instance (FromJSON a) => FromJSON (Aliased a) -deriving anyclass instance Hashable a => Hashable (Aliased a) +deriving anyclass instance (Hashable a) => Hashable (Aliased a) -deriving anyclass instance ToJSON a => ToJSON (Aliased a) +deriving anyclass instance (ToJSON a) => ToJSON (Aliased a) -deriving anyclass instance NFData a => NFData (Aliased a) +deriving anyclass instance (NFData a) => NFData (Aliased a) data TableName = TableName { tableName :: Text, @@ -398,10 +398,12 @@ data TableName = TableName instance HasCodec TableName where codec = - object "BigQueryTableName" $ - TableName - <$> requiredField' "name" .= tableName - <*> requiredField' "dataset" .= tableNameSchema + object "BigQueryTableName" + $ TableName + <$> requiredField' "name" + .= tableName + <*> requiredField' "dataset" + .= tableNameSchema instance FromJSON TableName where parseJSON = @@ -626,8 +628,8 @@ data ScalarType deriving (FromJSON, ToJSON) via AC.Autodocodec ScalarType instance HasCodec ScalarType where - codec = AC.named "ScalarType" $ - boundedEnumCodec \case + codec = AC.named "ScalarType" + $ boundedEnumCodec \case StringScalarType -> "STRING" BytesScalarType -> "BYTES" IntegerScalarType -> "INT64" @@ -708,11 +710,11 @@ data BooleanOperators a | ASTDWithin (DWithinGeogOp a) deriving stock (Eq, Generic, Foldable, Functor, Traversable, Show) -instance NFData a => NFData (BooleanOperators a) +instance (NFData a) => NFData (BooleanOperators a) -instance Hashable a => Hashable (BooleanOperators a) +instance (Hashable a) => Hashable (BooleanOperators a) -instance ToJSON a => J.ToJSONKeyValue (BooleanOperators a) where +instance (ToJSON a) => J.ToJSONKeyValue (BooleanOperators a) where toJSONKeyValue = \case ASTContains a -> ("_st_contains", J.toJSON a) ASTEquals a -> ("_st_equals", J.toJSON a) @@ -731,10 +733,12 @@ data FunctionName = FunctionName instance HasCodec FunctionName where codec = - object "BigQueryFunctionName" $ - FunctionName - <$> requiredField' "name" .= functionName - <*> optionalField' "dataset" .= functionNameSchema + object "BigQueryFunctionName" + $ FunctionName + <$> requiredField' "name" + .= functionName + <*> optionalField' "dataset" + .= functionNameSchema instance FromJSON FunctionName where parseJSON = @@ -769,11 +773,14 @@ data ComputedFieldDefinition = ComputedFieldDefinition instance HasCodec ComputedFieldDefinition where codec = - AC.object "BigQueryComputedFieldDefinition" $ - ComputedFieldDefinition - <$> requiredField' "function" AC..= _bqcfdFunction - <*> optionalField' "return_table" AC..= _bqcfdReturnTable - <*> requiredField' "argument_mapping" AC..= _bqcfdArgumentMapping + AC.object "BigQueryComputedFieldDefinition" + $ ComputedFieldDefinition + <$> requiredField' "function" + AC..= _bqcfdFunction + <*> optionalField' "return_table" + AC..= _bqcfdReturnTable + <*> requiredField' "argument_mapping" + AC..= _bqcfdArgumentMapping instance ToJSON ComputedFieldDefinition where toJSON = J.genericToJSON hasuraJSON {J.omitNothingFields = True} @@ -804,8 +811,8 @@ data ComputedFieldReturn instance ToJSON ComputedFieldReturn where toJSON = - J.genericToJSON $ - J.defaultOptions + J.genericToJSON + $ J.defaultOptions { J.constructorTagModifier = J.snakeCase, J.sumEncoding = J.TaggedObject "type" "info" } @@ -891,9 +898,11 @@ isNumType = getGQLTableName :: TableName -> Either QErr G.Name getGQLTableName (TableName table schema) = do let textName = schema <> "_" <> table - onNothing (G.mkName textName) $ - throw400 ValidationFailed $ - "cannot include " <> textName <> " in the GraphQL schema because it is not a valid GraphQL identifier" + onNothing (G.mkName textName) + $ throw400 ValidationFailed + $ "cannot include " + <> textName + <> " in the GraphQL schema because it is not a valid GraphQL identifier" -------------------------------------------------------------------------------- -- Liberal numeric parsers/printers (via JSON) @@ -904,10 +913,10 @@ getGQLTableName (TableName table schema) = do -- These printers may do something more clever later. See PG backend's -- equivalent functions. -liberalIntegralPrinter :: Coercible Text a => a -> J.Value +liberalIntegralPrinter :: (Coercible Text a) => a -> J.Value liberalIntegralPrinter a = J.toJSON (coerce a :: Text) -liberalDecimalPrinter :: Coercible a Text => a -> J.Value +liberalDecimalPrinter :: (Coercible a Text) => a -> J.Value liberalDecimalPrinter a = J.toJSON (coerce a :: Text) -- | Parse from text by simply validating it contains digits; diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs index 5d6e888d118..4185bed878d 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs @@ -95,12 +95,12 @@ instance Backend 'DataConnector where where scalarTypesCapabilities = API.unScalarTypesCapabilities $ API._cScalarTypes _scCapabilities insertOps typeName API.ScalarTypeCapabilities {..} m = - HashMap.foldrWithKey insertOp m $ - API.unAggregateFunctions _stcAggregateFunctions + HashMap.foldrWithKey insertOp m + $ API.unAggregateFunctions _stcAggregateFunctions where insertOp funtionName resultTypeName = - HashMap.insertWith HashMap.union funtionName $ - HashMap.singleton + HashMap.insertWith HashMap.union funtionName + $ HashMap.singleton (DC.mkScalarType _scCapabilities typeName) (DC.mkScalarType _scCapabilities resultTypeName) @@ -176,11 +176,11 @@ data CustomBooleanOperator a = CustomBooleanOperator } deriving stock (Eq, Generic, Foldable, Functor, Traversable, Show) -instance NFData a => NFData (CustomBooleanOperator a) +instance (NFData a) => NFData (CustomBooleanOperator a) -instance Hashable a => Hashable (CustomBooleanOperator a) +instance (Hashable a) => Hashable (CustomBooleanOperator a) -instance J.ToJSON a => ToJSONKeyValue (CustomBooleanOperator a) where +instance (J.ToJSON a) => ToJSONKeyValue (CustomBooleanOperator a) where toJSONKeyValue CustomBooleanOperator {..} = (fromText _cboName, J.toJSON _cboRHS) parseValue :: DC.ScalarType -> J.Value -> J.Parser J.Value diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/ConfigTransform.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/ConfigTransform.hs index 139d91d3ad8..47196a76695 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/ConfigTransform.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/ConfigTransform.hs @@ -57,7 +57,7 @@ validateConnSourceConfig dcName sourceName configSchemaResponse connSourceConfig validateConfiguration sourceName dcName configSchemaResponse transformedConfig validateConfiguration :: - MonadError QErr m => + (MonadError QErr m) => Common.SourceName -> DC.DataConnectorName -> API.ConfigSchemaResponse -> @@ -65,11 +65,11 @@ validateConfiguration :: m () validateConfiguration sourceName dataConnectorName configSchema config = do let errors = API.validateConfigAgainstConfigSchema configSchema config - unless (null errors) $ - let errorsText = Text.unlines (("- " <>) . Text.pack <$> errors) - in throw400 - DataConnectorError - ("Configuration for source " <> Text.dquote sourceName <> " is not valid based on the configuration schema declared by the " <> Text.dquote dataConnectorName <> " data connector agent. Errors:\n" <> errorsText) + unless (null errors) + $ let errorsText = Text.unlines (("- " <>) . Text.pack <$> errors) + in throw400 + DataConnectorError + ("Configuration for source " <> Text.dquote sourceName <> " is not valid based on the configuration schema declared by the " <> Text.dquote dataConnectorName <> " data connector agent. Errors:\n" <> errorsText) additionalFunctions :: Env.Environment -> HashMap Text (J.Value -> Either Kriti.CustomFunctionError J.Value) additionalFunctions env = KFunc.environmentFunctions env diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Execute.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Execute.hs index 4cbcfbd91ed..4b76688fdb5 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Execute.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Execute.hs @@ -40,7 +40,7 @@ encodePreparedQueryToJsonText = \case QueryRequest req -> encodeToJsonText req MutationRequest req -> encodeToJsonText req -encodeToJsonText :: J.ToJSON a => a -> Text +encodeToJsonText :: (J.ToJSON a) => a -> Text encodeToJsonText = TE.decodeUtf8 . BL.toStrict . J.encode @@ -66,8 +66,8 @@ instance BackendExecute 'DataConnector where mkDBQueryExplain fieldName UserInfo {..} sourceName sourceConfig ir _headers _gName = do queryPlan@Plan {..} <- Plan.mkQueryPlan _uiSession sourceConfig ir transformedSourceConfig <- transformSourceConfig sourceConfig (Just _uiSession) - pure $ - mkAnyBackend @'DataConnector + pure + $ mkAnyBackend @'DataConnector DBStepInfo { dbsiSourceName = sourceName, dbsiSourceConfig = transformedSourceConfig, @@ -123,8 +123,9 @@ buildExplainAction fieldName sourceName SourceConfig {..} Plan {..} = Nothing -> pure . encJFromJValue . toExplainPlan fieldName $ _pRequest Just API.ExplainCapabilities -> do explainResponse <- Client.explain sourceName _scConfig _pRequest - pure . encJFromJValue $ - ExplainPlan + pure + . encJFromJValue + $ ExplainPlan fieldName (Just (API._erQuery explainResponse)) (Just (API._erLines explainResponse)) diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs index e081340fd80..a90acd0e851 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Metadata.hs @@ -118,7 +118,7 @@ arityJsonAggSelect = \case API.FunctionArityMany -> JASMultipleRows functionReturnTypeFromAPI :: - MonadError QErr m => + (MonadError QErr m) => DC.FunctionName -> (Maybe (FunctionReturnType 'DataConnector), API.FunctionReturnType) -> m DC.TableName @@ -126,13 +126,13 @@ functionReturnTypeFromAPI funcGivenName = \case (Just (DC.FunctionReturnsTable t), _) -> pure t (_, API.FunctionReturnsTable t) -> pure (Witch.into t) _ -> - throw400 NotSupported $ - "Function " - <> toTxt funcGivenName - <> " is missing a return type - This should be explicit in metadata, or inferred from agent" + throw400 NotSupported + $ "Function " + <> toTxt funcGivenName + <> " is missing a return type - This should be explicit in metadata, or inferred from agent" buildFunctionInfo' :: - MonadError QErr m => + (MonadError QErr m) => SourceName -> DC.FunctionName -> SystemDefined -> @@ -165,10 +165,10 @@ buildFunctionInfo' (Just (DC.FunctionReturnsTable t), _) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector t (_, API.FunctionReturnsTable t) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector (Witch.into t) _ -> - throw400 NotSupported $ - "Function " - <> tshow funcName - <> " is missing a return type - This should be explicit in metadata, or inferred from agent" + throw400 NotSupported + $ "Function " + <> tshow funcName + <> " is missing a return type - This should be explicit in metadata, or inferred from agent" inputArguments <- do let argNames = map API._faInputArgName infoArgs @@ -178,15 +178,17 @@ buildFunctionInfo' case _fcSessionArgument of Nothing -> pure $ Seq.fromList $ map IAUserProvided infoArgs Just sessionArgName -> do - unless (any (\arg -> getFuncArgNameTxt sessionArgName == API._faInputArgName arg) infoArgs) $ - throw400 NotSupported $ - "Session argument not mappable: " <> tshow sessionArgName - pure $ - Seq.fromList $ - flip map infoArgs $ \arg -> - if getFuncArgNameTxt sessionArgName == API._faInputArgName arg - then IASessionVariables sessionArgName - else IAUserProvided arg + unless (any (\arg -> getFuncArgNameTxt sessionArgName == API._faInputArgName arg) infoArgs) + $ throw400 NotSupported + $ "Session argument not mappable: " + <> tshow sessionArgName + pure + $ Seq.fromList + $ flip map infoArgs + $ \arg -> + if getFuncArgNameTxt sessionArgName == API._faInputArgName arg + then IASessionVariables sessionArgName + else IAUserProvided arg functionReturnType <- functionReturnTypeFromAPI funcName (_fcResponse, returnType) @@ -227,7 +229,8 @@ resolveBackendInfo' logger = proc (invalidationKeys, optionsMap) -> do ( \dataConnectorName dataConnectorOptions -> do getDataConnectorCapabilitiesIfNeeded -< (invalidationKeys, dataConnectorName, dataConnectorOptions) ) - |) (toHashMap optionsMap) + |) + (toHashMap optionsMap) returnA -< HashMap.catMaybes maybeDataConnectorCapabilities where getDataConnectorCapabilitiesIfNeeded :: @@ -240,7 +243,8 @@ resolveBackendInfo' logger = proc (invalidationKeys, optionsMap) -> do withRecordInconsistency ( bindErrorA -< ExceptT $ getDataConnectorCapabilities dataConnectorOptions httpMgr ) - |) metadataObj + |) + metadataObj getDataConnectorCapabilities :: DC.DataConnectorOptions -> @@ -289,8 +293,8 @@ resolveSourceConfig' getDataConnectorInfo :: (MonadError QErr m) => DC.DataConnectorName -> HashMap DC.DataConnectorName DC.DataConnectorInfo -> m DC.DataConnectorInfo getDataConnectorInfo dataConnectorName backendInfo = - onNothing (HashMap.lookup dataConnectorName backendInfo) $ - throw400 DataConnectorError ("Data connector named " <> toTxt dataConnectorName <<> " was not found in the data connector backend info") + onNothing (HashMap.lookup dataConnectorName backendInfo) + $ throw400 DataConnectorError ("Data connector named " <> toTxt dataConnectorName <<> " was not found in the data connector backend info") mkRawColumnType :: API.Capabilities -> API.ColumnType -> RQL.T.C.RawColumnType 'DataConnector mkRawColumnType capabilities = \case @@ -318,8 +322,8 @@ resolveDatabaseMetadata' logger SourceMetadata {_smName} sourceConfig@DC.SourceC { _ptmiOid = OID 0, -- TODO: This is wrong and needs to be fixed. It is used for diffing tables and seeing what's new/deleted/altered, so reusing 0 for all tables is problematic. _ptmiColumns = do API.ColumnInfo {..} <- _tiColumns - pure $ - RQL.T.C.RawColumnInfo + pure + $ RQL.T.C.RawColumnInfo { rciName = Witch.from _ciName, rciPosition = 1, -- TODO: This is very wrong and needs to be fixed. It is used for diffing tables and seeing what's new/deleted/altered, so reusing 1 for all columns is problematic. rciType = mkRawColumnType _scCapabilities _ciType, @@ -353,8 +357,8 @@ resolveDatabaseMetadata' logger SourceMetadata {_smName} sourceConfig@DC.SourceC in HashMap.fromList do infos@(API.FunctionInfo {..} NEList.:| _) <- grouped pure (Witch.into _fiName, FunctionOverloads infos) - in pure $ - DBObjectsIntrospection + in pure + $ DBObjectsIntrospection { _rsTables = tables, _rsFunctions = functions, _rsScalars = mempty @@ -392,8 +396,8 @@ toTableObjectType capabilities API.ObjectTypeDefinition {..} = toTableObjectFieldDefinition API.ColumnInfo {..} = do fieldType <- getFieldType capabilities _ciType fieldName <- G.mkName $ API.unColumnName _ciName - pure $ - RQL.T.T.TableObjectFieldDefinition + pure + $ RQL.T.T.TableObjectFieldDefinition { _tofdColumn = Witch.from _ciName, _tofdName = fieldName, _tofdDescription = G.Description <$> _ciDescription, @@ -406,8 +410,9 @@ toTableObjectType capabilities API.ObjectTypeDefinition {..} = -- metadata. buildForeignKeySet :: API.ForeignKeys -> HashSet (RQL.T.T.ForeignKeyMetadata 'DataConnector) buildForeignKeySet (API.ForeignKeys constraints) = - HashSet.fromList $ - constraints & HashMap.foldMapWithKey @[RQL.T.T.ForeignKeyMetadata 'DataConnector] + HashSet.fromList + $ constraints + & HashMap.foldMapWithKey @[RQL.T.T.ForeignKeyMetadata 'DataConnector] \constraintName API.Constraint {..} -> maybeToList do let columnMapAssocList = HashMap.foldrWithKey' (\(API.ColumnName k) (API.ColumnName v) acc -> (DC.ColumnName k, DC.ColumnName v) : acc) [] _cColumnMapping columnMapping <- NEHashMap.fromList columnMapAssocList @@ -443,8 +448,8 @@ parseBoolExpOperations' rhsParser rootFieldInfoMap fieldInfoMap columnRef value v -> pure . AEQ False <$> parseWithTy columnType v parseOperation :: (Text, J.Value) -> m (OpExpG 'DataConnector v) - parseOperation (opStr, val) = withPathK opStr $ - case opStr of + parseOperation (opStr, val) = withPathK opStr + $ case opStr of "_eq" -> parseEq "$eq" -> parseEq "_neq" -> parseNeq @@ -523,12 +528,16 @@ parseBoolExpOperations' rhsParser rootFieldInfoMap fieldInfoMap columnRef value validateRhsColumn :: RQL.T.T.FieldInfoMap (RQL.T.T.FieldInfo 'DataConnector) -> DC.ColumnName -> m DC.ColumnName validateRhsColumn fieldInfoMap' rhsCol = do rhsType <- RQL.T.T.askColumnType fieldInfoMap' rhsCol "column operators can only compare table columns" - when (columnType /= rhsType) $ - throw400 UnexpectedPayload $ - "incompatible column types: " - <> columnRef <<> " has type " - <> columnType <<> ", but " - <> rhsCol <<> " has type " <>> rhsType + when (columnType /= rhsType) + $ throw400 UnexpectedPayload + $ "incompatible column types: " + <> columnRef + <<> " has type " + <> columnType + <<> ", but " + <> rhsCol + <<> " has type " + <>> rhsType pure rhsCol parseCollectableType' :: @@ -564,8 +573,8 @@ buildObjectRelationshipInfo' :: ObjRelDef 'DataConnector -> m (RelInfo 'DataConnector, Seq SchemaDependency) buildObjectRelationshipInfo' sourceConfig sourceName fks tableName objRel = do - ifSupportsLocalRelationships sourceName sourceConfig $ - defaultBuildObjectRelationshipInfo sourceName fks tableName objRel + ifSupportsLocalRelationships sourceName sourceConfig + $ defaultBuildObjectRelationshipInfo sourceName fks tableName objRel buildArrayRelationshipInfo' :: (MonadError QErr m) => @@ -576,8 +585,8 @@ buildArrayRelationshipInfo' :: ArrRelDef 'DataConnector -> m (RelInfo 'DataConnector, Seq SchemaDependency) buildArrayRelationshipInfo' sourceConfig sourceName fks tableName arrRel = - ifSupportsLocalRelationships sourceName sourceConfig $ - defaultBuildArrayRelationshipInfo sourceName fks tableName arrRel + ifSupportsLocalRelationships sourceName sourceConfig + $ defaultBuildArrayRelationshipInfo sourceName fks tableName arrRel ifSupportsLocalRelationships :: (MonadError QErr m) => SourceName -> DC.SourceConfig -> m a -> m a ifSupportsLocalRelationships sourceName DC.SourceConfig {..} action = do diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs index 0f9982801ee..41bc4644f36 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Schema.hs @@ -134,9 +134,13 @@ buildFunctionQueryFields' :: buildFunctionQueryFields' mkRootFieldName functionName functionInfo tableName = do let -- Implementation modified from buildFunctionQueryFieldsPG funcDesc = - Just . GQL.Description $ - flip fromMaybe (RQL._fiComment functionInfo <|> RQL._fiDescription functionInfo) $ - "execute function " <> functionName <<> " which returns " <>> tableName + Just + . GQL.Description + $ flip fromMaybe (RQL._fiComment functionInfo <|> RQL._fiDescription functionInfo) + $ "execute function " + <> functionName + <<> " which returns " + <>> tableName queryResultType = case RQL._fiJsonAggSelect functionInfo of @@ -176,17 +180,17 @@ selectFunction mkRootFieldName fi@RQL.FunctionInfo {..} description = runMaybeT functionArgsParser <- customFunctionArgs fi _fiGQLName _fiGQLArgsName let argsParser = liftA2 (,) functionArgsParser tableArgsParser functionFieldName = RQL.runMkRootFieldName mkRootFieldName _fiGQLName - pure $ - P.subselection functionFieldName description argsParser selectionSetParser - <&> \((funcArgs, tableArgs''), fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, - IR._asnPerm = GS.S.tablePermissionsInfo selectPermissions, - IR._asnArgs = tableArgs'', - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.subselection functionFieldName description argsParser selectionSetParser + <&> \((funcArgs, tableArgs''), fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, + IR._asnPerm = GS.S.tablePermissionsInfo selectPermissions, + IR._asnArgs = tableArgs'', + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } where returnFunctionParser = case _fiJsonAggSelect of @@ -198,15 +202,15 @@ selectFunction mkRootFieldName fi@RQL.FunctionInfo {..} description = runMaybeT -- | The custom SQL functions' input "args" field parser -- > function_name(args: function_args) customFunctionArgs :: - MonadBuildSchema 'DataConnector r m n => + (MonadBuildSchema 'DataConnector r m n) => RQL.FunctionInfo 'DataConnector -> GQL.Name -> GQL.Name -> GS.C.SchemaT r m (P.InputFieldsParser n (RQL.FunctionArgsExp 'DataConnector (IR.UnpreparedValue 'DataConnector))) customFunctionArgs RQL.FunctionInfo {..} functionName functionArgsName = functionArgs' - ( FTACustomFunction $ - RQL.CustomFunctionNames + ( FTACustomFunction + $ RQL.CustomFunctionNames { cfnFunctionName = functionName, cfnArgsName = functionArgsName } @@ -216,7 +220,7 @@ customFunctionArgs RQL.FunctionInfo {..} functionName functionArgsName = -- NOTE: Modified version of server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs ~ functionArgs functionArgs' :: forall r m n. - MonadBuildSchema 'DataConnector r m n => + (MonadBuildSchema 'DataConnector r m n) => FunctionTrackedAs 'DataConnector -> Seq.Seq (RQL.FunctionInputArgument 'DataConnector) -> GS.C.SchemaT r m (P.InputFieldsParser n (RQL.FunctionArgsExp 'DataConnector (IR.UnpreparedValue 'DataConnector))) @@ -228,41 +232,44 @@ functionArgs' functionTrackedAs (toList -> inputArgs) = do (names, session, optional, mandatory) = mconcat $ snd $ mapAccumL splitArguments 1 inputArgs defaultArguments = RQL.FunctionArgsExp (snd <$> session) HashMap.empty if - | length session > 1 -> - throw500 "there shouldn't be more than one session argument" - | null optional && null mandatory -> - pure $ pure defaultArguments - | otherwise -> do - argumentParsers <- sequenceA $ optional <> mandatory - objectName <- - mkTypename . RQL.applyTypeNameCaseIdentifier tCase - <$> case functionTrackedAs of - FTAComputedField computedFieldName _sourceName tableName -> do - tableInfo <- GS.C.askTableInfo tableName - computedFieldGQLName <- GS.C.textToName $ computedFieldNameToText computedFieldName - tableGQLName <- GS.T.getTableIdentifierName @'DataConnector tableInfo - pure $ RQL.mkFunctionArgsTypeName computedFieldGQLName tableGQLName - FTACustomFunction (CustomFunctionNames {cfnArgsName}) -> - pure $ fromCustomName cfnArgsName - let fieldName = Name._args - fieldDesc = - case functionTrackedAs of - FTAComputedField computedFieldName _sourceName tableName -> - GQL.Description $ - "input parameters for computed field " - <> computedFieldName <<> " defined on table " <>> tableName - FTACustomFunction (CustomFunctionNames {cfnFunctionName}) -> - GQL.Description $ "input parameters for function " <>> cfnFunctionName - objectParser = - P.object objectName Nothing (sequenceA argumentParsers) `P.bind` \arguments -> do - let foundArguments = HashMap.fromList $ catMaybes arguments <> session - argsWithNames = zip names inputArgs + | length session > 1 -> + throw500 "there shouldn't be more than one session argument" + | null optional && null mandatory -> + pure $ pure defaultArguments + | otherwise -> do + argumentParsers <- sequenceA $ optional <> mandatory + objectName <- + mkTypename + . RQL.applyTypeNameCaseIdentifier tCase + <$> case functionTrackedAs of + FTAComputedField computedFieldName _sourceName tableName -> do + tableInfo <- GS.C.askTableInfo tableName + computedFieldGQLName <- GS.C.textToName $ computedFieldNameToText computedFieldName + tableGQLName <- GS.T.getTableIdentifierName @'DataConnector tableInfo + pure $ RQL.mkFunctionArgsTypeName computedFieldGQLName tableGQLName + FTACustomFunction (CustomFunctionNames {cfnArgsName}) -> + pure $ fromCustomName cfnArgsName + let fieldName = Name._args + fieldDesc = + case functionTrackedAs of + FTAComputedField computedFieldName _sourceName tableName -> + GQL.Description + $ "input parameters for computed field " + <> computedFieldName + <<> " defined on table " + <>> tableName + FTACustomFunction (CustomFunctionNames {cfnFunctionName}) -> + GQL.Description $ "input parameters for function " <>> cfnFunctionName + objectParser = + P.object objectName Nothing (sequenceA argumentParsers) `P.bind` \arguments -> do + let foundArguments = HashMap.fromList $ catMaybes arguments <> session + argsWithNames = zip names inputArgs - -- All args have names in DC for now - named <- HashMap.fromList . catMaybes <$> traverse (namedArgument foundArguments) argsWithNames - pure $ RQL.FunctionArgsExp [] named + -- All args have names in DC for now + named <- HashMap.fromList . catMaybes <$> traverse (namedArgument foundArguments) argsWithNames + pure $ RQL.FunctionArgsExp [] named - pure $ P.field fieldName (Just fieldDesc) objectParser + pure $ P.field fieldName (Just fieldDesc) objectParser where sessionPlaceholder :: DC.ArgumentExp (IR.UnpreparedValue b) sessionPlaceholder = DC.AEInput IR.UVSession @@ -466,23 +473,26 @@ columnParser' :: GS.C.SchemaT r m (P.Parser 'P.Both n (IR.ValueWithOrigin (RQL.ColumnValue 'DataConnector))) columnParser' columnType nullability = case columnType of RQL.ColumnScalar scalarType@(DC.ScalarType name graphQLType) -> - P.memoizeOn 'columnParser' (scalarType, nullability) $ - GS.C.peelWithOrigin . fmap (RQL.ColumnValue columnType) . possiblyNullable' scalarType nullability - <$> do - gqlName <- - GQL.mkName name - `onNothing` throw400 ValidationFailed ("The column type name " <> name <<> " is not a valid GraphQL name") - pure $ case graphQLType of - Nothing -> P.jsonScalar gqlName (Just "A custom scalar type") - Just DC.GraphQLInt -> (J.Number . fromIntegral) <$> P.namedInt gqlName - Just DC.GraphQLFloat -> (J.Number . fromFloatDigits) <$> P.namedFloat gqlName - Just DC.GraphQLString -> J.String <$> P.namedString gqlName - Just DC.GraphQLBoolean -> J.Bool <$> P.namedBoolean gqlName - Just DC.GraphQLID -> J.String <$> P.namedIdentifier gqlName + P.memoizeOn 'columnParser' (scalarType, nullability) + $ GS.C.peelWithOrigin + . fmap (RQL.ColumnValue columnType) + . possiblyNullable' scalarType nullability + <$> do + gqlName <- + GQL.mkName name + `onNothing` throw400 ValidationFailed ("The column type name " <> name <<> " is not a valid GraphQL name") + pure $ case graphQLType of + Nothing -> P.jsonScalar gqlName (Just "A custom scalar type") + Just DC.GraphQLInt -> (J.Number . fromIntegral) <$> P.namedInt gqlName + Just DC.GraphQLFloat -> (J.Number . fromFloatDigits) <$> P.namedFloat gqlName + Just DC.GraphQLString -> J.String <$> P.namedString gqlName + Just DC.GraphQLBoolean -> J.Bool <$> P.namedBoolean gqlName + Just DC.GraphQLID -> J.String <$> P.namedIdentifier gqlName RQL.ColumnEnumReference (RQL.EnumReference tableName enumValues customTableName) -> case nonEmpty (HashMap.toList enumValues) of Just enumValuesList -> - GS.C.peelWithOrigin . fmap (RQL.ColumnValue columnType) + GS.C.peelWithOrigin + . fmap (RQL.ColumnValue columnType) <$> enumParser' tableName enumValuesList customTableName nullability Nothing -> throw400 ValidationFailed "empty enum values" @@ -510,7 +520,8 @@ orderByOperators' :: RQL.SourceInfo 'DataConnector -> NamingCase -> (GQL.Name, N orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase = let dcName = DC._scDataConnectorName _siConfiguration orderBy = GQL.addSuffixes (DC.unDataConnectorName dcName) [$$(GQL.litSuffix "_order_by")] - in (orderBy,) $ + in (orderBy,) + $ -- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB NE.fromList [ ( define $$(GQL.litName "asc") "in ascending order", @@ -537,28 +548,28 @@ comparisonExps' columnType = do typedParser <- columnParser' columnType (GQL.Nullability False) let name = GQL.addSuffixes (P.getName typedParser) [$$(GQL.litSuffix "_"), GQL.convertNameToSuffix (DC.unDataConnectorName dataConnectorName), $$(GQL.litSuffix "_comparison_exp")] desc = - GQL.Description $ - "Boolean expression to compare columns of type " - <> P.getName typedParser - <<> ". All fields are combined with logical 'AND'." + GQL.Description + $ "Boolean expression to compare columns of type " + <> P.getName typedParser + <<> ". All fields are combined with logical 'AND'." columnListParser = fmap IR.openValueOrigin <$> P.list typedParser customOperators <- (fmap . fmap . fmap) IR.ABackendSpecific <$> mkCustomOperators sourceInfo tCase collapseIfNull (P.getName typedParser) - pure $ - P.object name (Just desc) $ - fmap catMaybes $ - sequenceA $ - concat - [ GS.BE.equalityOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser) - (mkListLiteral <$> columnListParser), - GS.BE.comparisonOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser), - customOperators - ] + pure + $ P.object name (Just desc) + $ fmap catMaybes + $ sequenceA + $ concat + [ GS.BE.equalityOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser) + (mkListLiteral <$> columnListParser), + GS.BE.comparisonOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser), + customOperators + ] where mkListLiteral :: [RQL.ColumnValue 'DataConnector] -> IR.UnpreparedValue 'DataConnector mkListLiteral columnValues = @@ -584,9 +595,12 @@ comparisonExps' columnType = do GS.C.SchemaT r m (P.InputFieldsParser n (Maybe (CustomBooleanOperator (IR.UnpreparedValue 'DataConnector)))) mkCustomOperator tCase collapseIfNull (operatorName, argType) = do argParser <- mkArgParser argType - pure $ - GS.BE.mkBoolOperator tCase collapseIfNull (fromCustomName operatorName) Nothing $ - CustomBooleanOperator (GQL.unName operatorName) . Just . Right <$> argParser + pure + $ GS.BE.mkBoolOperator tCase collapseIfNull (fromCustomName operatorName) Nothing + $ CustomBooleanOperator (GQL.unName operatorName) + . Just + . Right + <$> argParser mkArgParser :: DC.ScalarType -> GS.C.SchemaT r m (P.Parser 'P.Both n (IR.UnpreparedValue 'DataConnector)) mkArgParser argType = @@ -611,12 +625,12 @@ tableArgs' tableInfo = do _saOffset = offsetArg, _saDistinct = Nothing } - pure $ - mkSelectArgs - <$> whereParser - <*> orderByParser - <*> GS.S.tableLimitArg - <*> GS.S.tableOffsetArg + pure + $ mkSelectArgs + <$> whereParser + <*> orderByParser + <*> GS.S.tableLimitArg + <*> GS.S.tableOffsetArg countTypeInput' :: (MonadParse n) => diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types.hs index 2dd25d23735..10fba12654f 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types.hs @@ -88,11 +88,14 @@ instance HasCodec ConnSourceConfig where codec = AC.bimapCodec dec enc $ AC.possiblyJointEitherCodec withValueProp inlineConfig where withValueProp = - AC.object "DataConnectorConnSourceConfig" $ - ConnSourceConfig - <$> requiredField' "value" AC..= value - <*> optionalField' "template" AC..= template - <*> optionalField' "timeout" AC..= timeout + AC.object "DataConnectorConnSourceConfig" + $ ConnSourceConfig + <$> requiredField' "value" + AC..= value + <*> optionalField' "template" + AC..= template + <*> optionalField' "timeout" + AC..= timeout inlineConfig = codec @API.Config dec (Left config) = Right config @@ -119,9 +122,9 @@ sourceTimeoutMicroseconds = \case instance HasCodec SourceTimeout where codec = - AC.dimapCodec dec enc $ - AC.disjointEitherCodec secondsCodec $ - AC.disjointEitherCodec millisecondsCodec microsecondsCodec + AC.dimapCodec dec enc + $ AC.disjointEitherCodec secondsCodec + $ AC.disjointEitherCodec millisecondsCodec microsecondsCodec where secondsCodec = AC.object "DataConnectorSourceTimeoutSeconds" $ requiredFieldWith' "seconds" AC.scientificCodec millisecondsCodec = AC.object "DataConnectorSourceTimeoutMilliseconds" $ requiredFieldWith' "milliseconds" AC.scientificCodec @@ -165,13 +168,20 @@ data SourceConfig = SourceConfig instance Eq SourceConfig where SourceConfig ep1 capabilities1 config1 template1 _ timeout1 dcName1 env1 == SourceConfig ep2 capabilities2 config2 template2 _ timeout2 dcName2 env2 = - ep1 == ep2 - && capabilities1 == capabilities2 - && config1 == config2 - && template1 == template2 - && timeout1 == timeout2 - && dcName1 == dcName2 - && env1 == env2 + ep1 + == ep2 + && capabilities1 + == capabilities2 + && config1 + == config2 + && template1 + == template2 + && timeout1 + == timeout2 + && dcName1 + == dcName2 + && env1 + == env2 instance Show SourceConfig where show _ = "SourceConfig" @@ -198,9 +208,9 @@ data FunctionReturnType instance AC.HasCodec FunctionReturnType where codec = - AC.named "FunctionReturnType" $ - AC.object "FunctionReturnType" $ - AC.discriminatedUnionCodec "type" enc dec + AC.named "FunctionReturnType" + $ AC.object "FunctionReturnType" + $ AC.discriminatedUnionCodec "type" enc dec where typeField = pure () tableField = AC.requiredField' "table" @@ -223,10 +233,12 @@ data DataConnectorOptions = DataConnectorOptions instance HasCodec DataConnectorOptions where codec = - AC.object "DataConnectorOptions" $ - DataConnectorOptions - <$> requiredFieldWith' "uri" baseUrlCodec AC..= _dcoUri - <*> optionalField' "display_name" AC..= _dcoDisplayName + AC.object "DataConnectorOptions" + $ DataConnectorOptions + <$> requiredFieldWith' "uri" baseUrlCodec + AC..= _dcoUri + <*> optionalField' "display_name" + AC..= _dcoDisplayName instance FromJSON DataConnectorOptions where parseJSON = genericParseJSON hasuraJSON @@ -265,7 +277,8 @@ instance HasCodec TableName where instance FromJSON TableName where parseJSON value = - TableName <$> J.parseJSON value + TableName + <$> J.parseJSON value -- Fallback parsing of a single string to support older metadata <|> J.withText "TableName" (\text -> pure . TableName $ text :| []) value diff --git a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types/Mutations.hs b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types/Mutations.hs index 4f414dd61d8..d354941cf28 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types/Mutations.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Adapter/Types/Mutations.hs @@ -30,11 +30,11 @@ data DataConnectorUpdateVariant v = SingleBatch (UpdateBatch 'DataConnector UpdateOperator v) | MultipleBatches [UpdateBatch 'DataConnector UpdateOperator v] -deriving stock instance Backend 'DataConnector => Functor DataConnectorUpdateVariant +deriving stock instance (Backend 'DataConnector) => Functor DataConnectorUpdateVariant -deriving stock instance Backend 'DataConnector => Foldable DataConnectorUpdateVariant +deriving stock instance (Backend 'DataConnector) => Foldable DataConnectorUpdateVariant -deriving stock instance Backend 'DataConnector => Traversable DataConnectorUpdateVariant +deriving stock instance (Backend 'DataConnector) => Traversable DataConnectorUpdateVariant -------------------------------------------------------------------------------- diff --git a/server/src-lib/Hasura/Backends/DataConnector/Agent/Client.hs b/server/src-lib/Hasura/Backends/DataConnector/Agent/Client.hs index 8e60e6c4791..007ab3558b1 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Agent/Client.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Agent/Client.hs @@ -53,7 +53,7 @@ newtype AgentClientT m a = AgentClientT (ReaderT AgentClientContext m a) runAgentClientT :: AgentClientT m a -> AgentClientContext -> m a runAgentClientT (AgentClientT action) ctx = runReaderT action ctx -askClientContext :: Monad m => AgentClientT m AgentClientContext +askClientContext :: (Monad m) => AgentClientT m AgentClientContext askClientContext = AgentClientT ask instance (MonadIO m, MonadTrace m, MonadError QErr m) => RunClient (AgentClientT m) where diff --git a/server/src-lib/Hasura/Backends/DataConnector/Logging.hs b/server/src-lib/Hasura/Backends/DataConnector/Logging.hs index a26e5f0dddc..2f01d59279a 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Logging.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Logging.hs @@ -47,8 +47,8 @@ instance ToEngineLog AgentCommunicationLog Hasura where (LevelDebug, ELTDataConnectorLog, logJson) where logJson = - object $ - catMaybes + object + $ catMaybes [ ("requestMethod" .=) . _rliRequestMethod <$> _aclRequest, ("requestUri" .=) . _rliRequestUri <$> _aclRequest, ("requestHeaders" .=) . _rliRequestHeaders <$> _aclRequest, diff --git a/server/src-lib/Hasura/Backends/DataConnector/Plan/Common.hs b/server/src-lib/Hasura/Backends/DataConnector/Plan/Common.hs index b461b30ff49..381d48ad3ed 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Plan/Common.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Plan/Common.hs @@ -165,7 +165,7 @@ data Cardinality -------------------------------------------------------------------------------- prepareLiteral :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> UnpreparedValue 'DataConnector -> m Literal @@ -181,7 +181,7 @@ prepareLiteral sessionVariables = \case parseSessionVariable :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariable -> SessionVarType 'DataConnector -> Text -> @@ -280,7 +280,7 @@ removeAlwaysFalseExpression = \case other -> Just other translateOp :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> API.ColumnName -> API.ScalarType -> diff --git a/server/src-lib/Hasura/Backends/DataConnector/Plan/MutationPlan.hs b/server/src-lib/Hasura/Backends/DataConnector/Plan/MutationPlan.hs index 132c4f14fb6..29c034ef436 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Plan/MutationPlan.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Plan/MutationPlan.hs @@ -76,7 +76,7 @@ recordTableInsertSchema tableName tableInsertSchema = -------------------------------------------------------------------------------- mkMutationPlan :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> MutationDB 'DataConnector Void (UnpreparedValue 'DataConnector) -> m (Plan API.MutationRequest API.MutationResponse) @@ -85,7 +85,7 @@ mkMutationPlan sessionVariables mutationDB = do pure $ Plan request (reshapeResponseToMutationGqlShape mutationDB) translateMutationDB :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> MutationDB 'DataConnector Void (UnpreparedValue 'DataConnector) -> m API.MutationRequest @@ -97,8 +97,8 @@ translateMutationDB sessionVariables = \case & HashMap.toList & fmap (\(tableName, TableInsertSchema {..}) -> API.TableInsertSchema tableName _tisPrimaryKey _tisFields) let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships))) - pure $ - API.MutationRequest + pure + $ API.MutationRequest { _mrTableRelationships = apiTableRelationships, _mrInsertSchema = Set.fromList apiTableInsertSchema, _mrOperations = [API.InsertOperation insertOperation] @@ -106,10 +106,11 @@ translateMutationDB sessionVariables = \case MDBUpdate update -> do (updateOperations, tableRelationships) <- CPS.runWriterT $ translateUpdate sessionVariables update let apiTableRelationships = - Set.fromList $ - uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships))) - pure $ - API.MutationRequest + Set.fromList + $ uncurry API.TableRelationships + <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships))) + pure + $ API.MutationRequest { _mrTableRelationships = apiTableRelationships, _mrInsertSchema = mempty, _mrOperations = API.UpdateOperation <$> updateOperations @@ -117,10 +118,11 @@ translateMutationDB sessionVariables = \case MDBDelete delete -> do (deleteOperation, tableRelationships) <- CPS.runWriterT $ translateDelete sessionVariables delete let apiTableRelationships = - Set.fromList $ - uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships))) - pure $ - API.MutationRequest + Set.fromList + $ uncurry API.TableRelationships + <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships))) + pure + $ API.MutationRequest { _mrTableRelationships = apiTableRelationships, _mrInsertSchema = mempty, _mrOperations = [API.DeleteOperation deleteOperation] @@ -133,7 +135,7 @@ eitherKey (FunctionNameKey f, x) = Left (f, x) eitherKey (TableNameKey t, x) = Right (t, x) translateInsert :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnotatedInsert 'DataConnector Void (UnpreparedValue 'DataConnector) -> CPS.WriterT (TableRelationships, TableInsertSchemas) m API.InsertMutationOperation @@ -142,8 +144,8 @@ translateInsert sessionVariables AnnotatedInsert {_aiData = AnnotatedInsertData rows <- lift $ traverse (translateInsertRow sessionVariables tableName _aiTableColumns _aiPresetValues) _aiInsertObject postInsertCheck <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) insertCheckCondition returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _aiOutput - pure $ - API.InsertMutationOperation + pure + $ API.InsertMutationOperation { API._imoTable = tableName, API._imoRows = rows, API._imoPostInsertCheck = postInsertCheck, @@ -181,7 +183,7 @@ captureTableInsertSchema tableName tableColumns primaryKey ExtraTableMetadata {. recordTableInsertSchema tableName $ TableInsertSchema primaryKey' fieldSchemas translateInsertRow :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> API.TableName -> [ColumnInfo 'DataConnector] -> @@ -221,7 +223,7 @@ translateInsertRow sessionVariables tableName tableColumns defaultColumnValues i & HashMap.fromList translateUpdate :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnotatedUpdateG 'DataConnector Void (UnpreparedValue 'DataConnector) -> CPS.WriterT TableRelationships m [API.UpdateMutationOperation] @@ -231,7 +233,7 @@ translateUpdate sessionVariables annUpdate@AnnotatedUpdateG {..} = do MultipleBatches batches -> traverse (translateUpdateBatch sessionVariables annUpdate) batches translateUpdateBatch :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnotatedUpdateG 'DataConnector Void (UnpreparedValue 'DataConnector) -> UpdateBatch 'DataConnector UpdateOperator (UnpreparedValue 'DataConnector) -> @@ -242,8 +244,8 @@ translateUpdateBatch sessionVariables AnnotatedUpdateG {..} UpdateBatch {..} = d postUpdateCheck <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) _auCheck returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _auOutput - pure $ - API.UpdateMutationOperation + pure + $ API.UpdateMutationOperation { API._umoTable = tableName, API._umoWhere = whereExp, API._umoUpdates = updates, @@ -255,7 +257,7 @@ translateUpdateBatch sessionVariables AnnotatedUpdateG {..} UpdateBatch {..} = d translateUpdateOperations :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> HashMap ColumnName (UpdateOperator (UnpreparedValue 'DataConnector)) -> m (Set API.RowUpdate) @@ -277,15 +279,15 @@ translateUpdateOperations sessionVariables columnUpdates = ArrayLiteral _scalarType _values -> throw400 NotSupported "translateUpdateOperations: Array literals are not supported as column update values" translateDelete :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnDelG 'DataConnector Void (UnpreparedValue 'DataConnector) -> CPS.WriterT TableRelationships m API.DeleteMutationOperation translateDelete sessionVariables AnnDel {..} = do whereExp <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) (BoolAnd [permissionFilter, whereClause]) returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _adOutput - pure $ - API.DeleteMutationOperation + pure + $ API.DeleteMutationOperation { API._dmoTable = tableName, API._dmoWhere = whereExp, API._dmoReturningFields = HashMap.mapKeys (API.FieldName . getFieldNameTxt) returningFields @@ -335,7 +337,7 @@ translateMutField sessionVariables tableName fieldName = \case -------------------------------------------------------------------------------- reshapeResponseToMutationGqlShape :: - MonadError QErr m => + (MonadError QErr m) => MutationDB 'DataConnector Void v -> API.MutationResponse -> m J.Encoding @@ -356,7 +358,7 @@ reshapeResponseToMutationGqlShape mutationDb mutationResponse = do throw400 NotSupported "reshapeResponseToMutationGqlShape: function mutations not implemented for the Data Connector backend." reshapeOutputForSingleBatchOperation :: - MonadError QErr m => + (MonadError QErr m) => MutationOutputG 'DataConnector Void v -> API.MutationResponse -> m J.Encoding @@ -367,13 +369,13 @@ reshapeOutputForSingleBatchOperation mutationOutput API.MutationResponse {..} = reshapeMutationOutput mutationOutput mutationOperationResult reshapeOutputForMultipleBatchOperation :: - MonadError QErr m => + (MonadError QErr m) => [MutationOutputG 'DataConnector Void v] -> API.MutationResponse -> m J.Encoding reshapeOutputForMultipleBatchOperation mutationOutputs API.MutationResponse {..} = do - unless (operationResultCount >= requiredResultCount) $ - throw500 ("Data Connector agent returned " <> tshow operationResultCount <> " mutation operation results where at least " <> tshow requiredResultCount <> " was expected") + unless (operationResultCount >= requiredResultCount) + $ throw500 ("Data Connector agent returned " <> tshow operationResultCount <> " mutation operation results where at least " <> tshow requiredResultCount <> " was expected") reshapedResults <- zip mutationOutputs _mrOperationResults @@ -385,7 +387,7 @@ reshapeOutputForMultipleBatchOperation mutationOutputs API.MutationResponse {..} operationResultCount = length _mrOperationResults reshapeMutationOutput :: - MonadError QErr m => + (MonadError QErr m) => MutationOutputG 'DataConnector Void v -> API.MutationOperationResults -> m J.Encoding @@ -395,7 +397,7 @@ reshapeMutationOutput mutationOutput mutationOperationResults = MOutMultirowFields mutFields -> reshapeMutFields mutFields mutationOperationResults reshapeReturningRows :: - MonadError QErr m => + (MonadError QErr m) => Cardinality -> FieldPrefix -> AnnFieldsG 'DataConnector Void v -> @@ -416,7 +418,7 @@ reshapeReturningRows cardinality fieldNamePrefix annFields API.MutationOperation rows = fromMaybe mempty _morReturning reshapeMutFields :: - MonadError QErr m => + (MonadError QErr m) => MutFldsG 'DataConnector Void v -> API.MutationOperationResults -> m J.Encoding diff --git a/server/src-lib/Hasura/Backends/DataConnector/Plan/QueryPlan.hs b/server/src-lib/Hasura/Backends/DataConnector/Plan/QueryPlan.hs index 709a3804631..92915c661d4 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Plan/QueryPlan.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Plan/QueryPlan.hs @@ -63,7 +63,7 @@ instance Monoid FieldsAndAggregates where -- | Map a 'QueryDB 'DataConnector' term into a 'Plan' mkQueryPlan :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> SourceConfig -> QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) -> @@ -83,7 +83,7 @@ mkQueryPlan sessionVariables (SourceConfig {}) ir = do translateAnnSimpleSelectToQueryRequest :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnSimpleSelectG 'DataConnector Void (UnpreparedValue 'DataConnector) -> m API.QueryRequest @@ -92,7 +92,7 @@ translateAnnSimpleSelectToQueryRequest sessionVariables simpleSelect = translateAnnAggregateSelectToQueryRequest :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> AnnAggregateSelectG 'DataConnector Void (UnpreparedValue 'DataConnector) -> m API.QueryRequest @@ -101,7 +101,7 @@ translateAnnAggregateSelectToQueryRequest sessionVariables aggregateSelect = translateAnnSelectToQueryRequest :: forall m fieldType. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> (TableRelationshipsKey -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) -> AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) -> @@ -115,8 +115,8 @@ translateAnnSelectToQueryRequest sessionVariables translateFieldsAndAggregates s (query, TableRelationships tableRelationships) <- CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates (TableNameKey (Witch.into tableName)) selectG) let relationships = mkRelationships <$> HashMap.toList tableRelationships - pure $ - API.QRTable + pure + $ API.QRTable API.TableRequest { _trTable = Witch.into tableName, _trRelationships = Set.fromList relationships, @@ -128,8 +128,8 @@ translateAnnSelectToQueryRequest sessionVariables translateFieldsAndAggregates s (query, TableRelationships tableRelationships) <- CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates (FunctionNameKey (Witch.into functionName)) selectG) let relationships = mkRelationships <$> HashMap.toList tableRelationships - pure $ - API.QRFunction + pure + $ API.QRFunction API.FunctionRequest { _frFunction = Witch.into functionName, _frRelationships = Set.fromList relationships, @@ -185,8 +185,8 @@ translateAnnSelect sessionVariables translateFieldsAndAggregates entityName sele _qAggregates = mapFieldNameHashMap <$> _faaAggregates, _qAggregatesLimit = _saLimit (_asnArgs selectG) <* _faaAggregates, -- Only include the aggregates limit if we actually have aggregrates _qLimit = - fmap getMin $ - foldMap + fmap getMin + $ foldMap (fmap Min) [ _saLimit (_asnArgs selectG), _tpLimit (_asnPerm selectG) @@ -323,7 +323,8 @@ translateAnnField :: CPS.WriterT writerOutput m (Maybe API.Field) translateAnnField sessionVariables sourceTableName = \case AFNestedObject nestedObj -> - Just . API.NestedObjField (Witch.from $ _anosColumn nestedObj) + Just + . API.NestedObjField (Witch.from $ _anosColumn nestedObj) <$> translateNestedObjectSelect sessionVariables sourceTableName nestedObj AFNestedArray _ (ANASSimple field) -> fmap mkArrayField <$> translateAnnField sessionVariables sourceTableName field @@ -353,8 +354,10 @@ translateAnnField sessionVariables sourceTableName = \case _rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping objRel) } - pure . Just . API.RelField $ - API.RelationshipField + pure + . Just + . API.RelField + $ API.RelationshipField relationshipName ( API.Query { _qFields = Just $ mapFieldNameHashMap fields, @@ -406,8 +409,9 @@ translateArrayRelationSelect sessionVariables sourceName translateFieldsAndAggre _rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel) } - pure . API.RelField $ - API.RelationshipField + pure + . API.RelField + $ API.RelationshipField relationshipName query @@ -437,8 +441,8 @@ translateTableAggregateField sessionVariables sourceName fieldName = \case TAFAgg aggregateFields -> do let fieldNamePrefix = prefixWith fieldName translatedAggregateFields <- lift $ mconcat <$> traverse (uncurry (translateAggregateField fieldNamePrefix)) aggregateFields - pure $ - FieldsAndAggregates + pure + $ FieldsAndAggregates Nothing (Just translatedAggregateFields) TAFNodes _ fields -> @@ -450,7 +454,7 @@ translateTableAggregateField sessionVariables sourceName fieldName = \case pure mempty translateAggregateField :: - MonadError QErr m => + (MonadError QErr m) => FieldPrefix -> FieldName -> AggregateField 'DataConnector (UnpreparedValue 'DataConnector) -> @@ -485,7 +489,7 @@ translateAggregateField fieldPrefix fieldName = \case -- to us pure mempty -translateSingleColumnAggregateFunction :: MonadError QErr m => Text -> m API.SingleColumnAggregateFunction +translateSingleColumnAggregateFunction :: (MonadError QErr m) => Text -> m API.SingleColumnAggregateFunction translateSingleColumnAggregateFunction functionName = fmap API.SingleColumnAggregateFunction (G.mkName functionName) `onNothing` throw500 ("translateSingleColumnAggregateFunction: Invalid aggregate function encountered: " <> functionName) @@ -515,7 +519,7 @@ translateNestedObjectSelect sessionVariables relationshipKey selectG = do -------------------------------------------------------------------------------- reshapeResponseToQueryShape :: - MonadError QErr m => + (MonadError QErr m) => QueryDB 'DataConnector Void v -> API.QueryResponse -> m J.Encoding @@ -526,7 +530,7 @@ reshapeResponseToQueryShape queryDb response = QDBAggregation aggregateSelect -> reshapeTableAggregateFields (_asnFields aggregateSelect) response reshapeSimpleSelectRows :: - MonadError QErr m => + (MonadError QErr m) => Cardinality -> AnnFieldsG 'DataConnector Void v -> API.QueryResponse -> @@ -546,7 +550,7 @@ reshapeSimpleSelectRows cardinality fields API.QueryResponse {..} = rows = fromMaybe mempty _qrRows reshapeTableAggregateFields :: - MonadError QErr m => + (MonadError QErr m) => TableAggregateFieldsG 'DataConnector Void v -> API.QueryResponse -> m J.Encoding @@ -567,7 +571,7 @@ reshapeTableAggregateFields tableAggregateFields API.QueryResponse {..} = do responseAggregates = fromMaybe mempty _qrAggregates reshapeAggregateFields :: - MonadError QErr m => + (MonadError QErr m) => FieldPrefix -> AggregateFields 'DataConnector v -> HashMap API.FieldName J.Value -> @@ -602,7 +606,7 @@ reshapeAggregateFields fieldPrefix aggregateFields responseAggregates = do pure $ encodeAssocListAsObject reshapedFields reshapeAnnFields :: - MonadError QErr m => + (MonadError QErr m) => FieldPrefix -> AnnFieldsG 'DataConnector Void v -> HashMap API.FieldName API.FieldValue -> @@ -619,7 +623,7 @@ reshapeAnnFields fieldNamePrefix fields responseRow = do pure $ encodeAssocListAsObject reshapedFields reshapeField :: - MonadError QErr m => + (MonadError QErr m) => AnnFieldG 'DataConnector Void v -> m API.FieldValue -> -- This lookup is lazy (behind the monad) so that we can _not_ do it when we've got an AFExpression m J.Encoding @@ -660,7 +664,7 @@ reshapeField field responseFieldValue = AFExpression txt -> pure $ JE.text txt reshapeAnnRelationSelect :: - MonadError QErr m => + (MonadError QErr m) => (Fields (fieldType v) -> API.QueryResponse -> m J.Encoding) -> AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType v) -> API.FieldValue -> diff --git a/server/src-lib/Hasura/Backends/DataConnector/Plan/RemoteRelationshipPlan.hs b/server/src-lib/Hasura/Backends/DataConnector/Plan/RemoteRelationshipPlan.hs index 2ee1cfffcf5..def11500dcd 100644 --- a/server/src-lib/Hasura/Backends/DataConnector/Plan/RemoteRelationshipPlan.hs +++ b/server/src-lib/Hasura/Backends/DataConnector/Plan/RemoteRelationshipPlan.hs @@ -33,7 +33,7 @@ import Witch qualified mkRemoteRelationshipPlan :: forall m. - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> SourceConfig -> -- | List of join json objects, each of which contains IDs to be laterally-joined against @@ -84,23 +84,23 @@ mkRemoteRelationshipPlan sessionVariables _sourceConfig joinIds joinIdsSchema ar whereClause <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) _aosTargetFilter pure (fields, whereClause) let apiTableRelationships = Set.fromList $ tableRelationshipsToList tableRelationships - pure $ - API.QRTable $ - API.TableRequest - { _trTable = tableName, - _trRelationships = apiTableRelationships, - _trQuery = - API.Query - { _qFields = Just $ mapFieldNameHashMap fields, - _qAggregates = Nothing, - _qAggregatesLimit = Nothing, - _qLimit = Nothing, - _qOffset = Nothing, - _qWhere = whereClause, - _qOrderBy = Nothing - }, - _trForeach = Just foreachRowFilter - } + pure + $ API.QRTable + $ API.TableRequest + { _trTable = tableName, + _trRelationships = apiTableRelationships, + _trQuery = + API.Query + { _qFields = Just $ mapFieldNameHashMap fields, + _qAggregates = Nothing, + _qAggregatesLimit = Nothing, + _qLimit = Nothing, + _qOffset = Nothing, + _qWhere = whereClause, + _qOrderBy = Nothing + }, + _trForeach = Just foreachRowFilter + } tableRelationshipsToList :: HashMap TableRelationshipsKey (HashMap API.RelationshipName API.Relationship) -> [API.Relationships] tableRelationshipsToList m = map (either (API.RFunction . uncurry API.FunctionRelationships) (API.RTable . uncurry API.TableRelationships) . tableRelationshipsKeyToEither) (HashMap.toList m) @@ -109,7 +109,7 @@ tableRelationshipsKeyToEither :: (TableRelationshipsKey, c) -> Either (API.Funct tableRelationshipsKeyToEither (FunctionNameKey f, x) = Left (f, x) tableRelationshipsKeyToEither (TableNameKey t, x) = Right (t, x) -translateForeachRowFilter :: MonadError QErr m => FieldName -> HashMap FieldName (ColumnName, ScalarType) -> J.Object -> m (HashMap API.ColumnName API.ScalarValue) +translateForeachRowFilter :: (MonadError QErr m) => FieldName -> HashMap FieldName (ColumnName, ScalarType) -> J.Object -> m (HashMap API.ColumnName API.ScalarValue) translateForeachRowFilter argumentIdFieldName joinIdsSchema joinIds = joinIds & KM.toList @@ -134,7 +134,7 @@ translateForeachRowFilter argumentIdFieldName joinIdsSchema joinIds = ) & fmap HashMap.fromList -extractArgumentIds :: MonadError QErr m => FieldName -> NonEmpty J.Object -> m (NonEmpty J.Value) +extractArgumentIds :: (MonadError QErr m) => FieldName -> NonEmpty J.Object -> m (NonEmpty J.Value) extractArgumentIds argumentIdFieldName joinIds = let argumentIdPropertyKey = K.fromText $ getFieldNameTxt argumentIdFieldName in joinIds @@ -147,7 +147,7 @@ extractArgumentIds argumentIdFieldName joinIds = -------------------------------------------------------------------------------- reshapeResponseToRemoteRelationshipQueryShape :: - MonadError QErr m => + (MonadError QErr m) => FieldName -> NonEmpty J.Value -> FieldName -> @@ -155,8 +155,8 @@ reshapeResponseToRemoteRelationshipQueryShape :: API.QueryResponse -> m J.Encoding reshapeResponseToRemoteRelationshipQueryShape argumentIdFieldName argumentIdValues resultFieldName sourceRelationshipSelection API.QueryResponse {..} = do - when (actualRowCount /= expectedRowCount) $ - throw500 ("Data Connector agent returned " <> tshow actualRowCount <> " foreach query response rows, but " <> tshow expectedRowCount <> " were expected") + when (actualRowCount /= expectedRowCount) + $ throw500 ("Data Connector agent returned " <> tshow actualRowCount <> " foreach query response rows, but " <> tshow expectedRowCount <> " were expected") argumentResultObjects <- forM (zip rows (NE.toList argumentIdValues)) $ \(row, argumentId) -> do queryFieldValue <- @@ -182,7 +182,7 @@ reshapeResponseToRemoteRelationshipQueryShape argumentIdFieldName argumentIdValu expectedRowCount = length argumentIdValues reshapeForeachQueryResponse :: - MonadError QErr m => + (MonadError QErr m) => SourceRelationshipSelection 'DataConnector Void v -> API.QueryResponse -> m J.Encoding diff --git a/server/src-lib/Hasura/Backends/MSSQL/Connection.hs b/server/src-lib/Hasura/Backends/MSSQL/Connection.hs index 05d40db5e90..9ae01f8bcbb 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Connection.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Connection.hs @@ -42,19 +42,19 @@ import Hasura.Base.Error import Hasura.Prelude import Hasura.RQL.Types.ResizePool -class MonadError QErr m => MonadMSSQLTx m where +class (MonadError QErr m) => MonadMSSQLTx m where liftMSSQLTx :: MSTx.TxE QErr a -> m a -instance MonadMSSQLTx m => MonadMSSQLTx (ReaderT s m) where +instance (MonadMSSQLTx m) => MonadMSSQLTx (ReaderT s m) where liftMSSQLTx = lift . liftMSSQLTx -instance MonadMSSQLTx m => MonadMSSQLTx (StateT s m) where +instance (MonadMSSQLTx m) => MonadMSSQLTx (StateT s m) where liftMSSQLTx = lift . liftMSSQLTx instance (Monoid w, MonadMSSQLTx m) => MonadMSSQLTx (WriterT w m) where liftMSSQLTx = lift . liftMSSQLTx -instance MonadIO m => MonadMSSQLTx (MSTx.TxET QErr m) where +instance (MonadIO m) => MonadMSSQLTx (MSTx.TxET QErr m) where liftMSSQLTx = hoist liftIO -- | ODBC connection string for MSSQL server @@ -114,17 +114,24 @@ instance ToJSON MSSQLPoolSettings where instance FromJSON MSSQLPoolSettings where parseJSON = withObject "MSSQL pool settings" $ \o -> MSSQLPoolSettings - <$> o .:? "max_connections" - <*> o .:? "total_max_connections" - <*> o .:? "idle_timeout" .!= _mpsIdleTimeout defaultMSSQLPoolSettings + <$> o + .:? "max_connections" + <*> o + .:? "total_max_connections" + <*> o + .:? "idle_timeout" + .!= _mpsIdleTimeout defaultMSSQLPoolSettings instance HasCodec MSSQLPoolSettings where codec = - AC.object "MSSQLPoolSettings" $ - MSSQLPoolSettings - <$> optionalFieldWithDefault' "max_connections" (Just defaultMSSQLMaxConnections) AC..= _mpsMaxConnections - <*> optionalFieldOrNull' "total_max_connections" AC..= _mpsTotalMaxConnections - <*> optionalFieldWithDefault' "idle_timeout" (_mpsIdleTimeout defaultMSSQLPoolSettings) AC..= _mpsIdleTimeout + AC.object "MSSQLPoolSettings" + $ MSSQLPoolSettings + <$> optionalFieldWithDefault' "max_connections" (Just defaultMSSQLMaxConnections) + AC..= _mpsMaxConnections + <*> optionalFieldOrNull' "total_max_connections" + AC..= _mpsTotalMaxConnections + <*> optionalFieldWithDefault' "idle_timeout" (_mpsIdleTimeout defaultMSSQLPoolSettings) + AC..= _mpsIdleTimeout defaultMSSQLMaxConnections :: Int defaultMSSQLMaxConnections = 50 @@ -149,10 +156,12 @@ instance NFData MSSQLConnectionInfo instance HasCodec MSSQLConnectionInfo where codec = - AC.object "MSSQLConnectionInfo" $ - MSSQLConnectionInfo - <$> requiredField' "connection_string" AC..= _mciConnectionString - <*> requiredField' "pool_settings" AC..= _mciPoolSettings + AC.object "MSSQLConnectionInfo" + $ MSSQLConnectionInfo + <$> requiredField' "connection_string" + AC..= _mciConnectionString + <*> requiredField' "pool_settings" + AC..= _mciPoolSettings instance ToJSON MSSQLConnectionInfo where toJSON = genericToJSON hasuraJSON @@ -162,7 +171,9 @@ instance FromJSON MSSQLConnectionInfo where parseJSON = withObject "Object" $ \o -> MSSQLConnectionInfo <$> ((o .: "database_url") <|> (o .: "connection_string")) - <*> o .:? "pool_settings" .!= defaultMSSQLPoolSettings + <*> o + .:? "pool_settings" + .!= defaultMSSQLPoolSettings data MSSQLConnConfiguration = MSSQLConnConfiguration { _mccConnectionInfo :: MSSQLConnectionInfo, @@ -176,10 +187,12 @@ instance NFData MSSQLConnConfiguration instance HasCodec MSSQLConnConfiguration where codec = - AC.object "MSSQLConnConfiguration" $ - MSSQLConnConfiguration - <$> requiredField' "connection_info" AC..= _mccConnectionInfo - <*> optionalFieldOrNull' "read_replicas" AC..= _mccReadReplicas + AC.object "MSSQLConnConfiguration" + $ MSSQLConnConfiguration + <$> requiredField' "connection_info" + AC..= _mccConnectionInfo + <*> optionalFieldOrNull' "read_replicas" + AC..= _mccReadReplicas instance FromJSON MSSQLConnConfiguration where parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} @@ -189,8 +202,8 @@ instance ToJSON MSSQLConnConfiguration where toEncoding = genericToEncoding hasuraJSON {omitNothingFields = True} createMSSQLPool :: - MonadIO m => - QErrM m => + (MonadIO m) => + (QErrM m) => InputConnectionString -> MSPool.ConnectionOptions -> Env.Environment -> @@ -201,7 +214,7 @@ createMSSQLPool iConnString connOptions env = do pure (connString, pool) resolveInputConnectionString :: - QErrM m => + (QErrM m) => Env.Environment -> InputConnectionString -> m MSPool.ConnectionString @@ -210,7 +223,7 @@ resolveInputConnectionString env = (RawString cs) -> pure cs (FromEnvironment envVar) -> MSPool.ConnectionString <$> getEnv env envVar -getEnv :: QErrM m => Env.Environment -> Text -> m Text +getEnv :: (QErrM m) => Env.Environment -> Text -> m Text getEnv env k = do let mEnv = Env.lookupEnv env (unpack k) case mEnv of @@ -253,8 +266,8 @@ mkMSSQLExecCtx pool resizeStrategy = -- Resize the primary pool resizeMSSQLPool pool maxConnections serverReplicas -- Return the summary. Only the primary pool is resized - pure $ - SourceResizePoolSummary + pure + $ SourceResizePoolSummary { _srpsPrimaryResized = True, _srpsReadReplicasResized = False, _srpsConnectionSet = [] diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL.hs index d845d48f62a..87787077595 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL.hs @@ -61,8 +61,8 @@ fetchAndValidateEnumValues :: [RawColumnInfo 'MSSQL] -> m (Either QErr EnumValues) fetchAndValidateEnumValues _ _ _ _ = - runExceptT $ - throw400 NotSupported "Enum tables are not supported for MSSQL sources" + runExceptT + $ throw400 NotSupported "Enum tables are not supported for MSSQL sources" buildFunctionInfo :: (MonadError QErr m) => diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs index 2a2d42e0600..72b0921278c 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs @@ -40,8 +40,8 @@ parseBoolExpOperations rhsParser _rootTableFieldInfoMap _fields columnRef value v -> pure . AEQ False <$> parseWithTy columnType v parseOperation :: ColumnType 'MSSQL -> (Text, J.Value) -> m (OpExpG 'MSSQL v) - parseOperation columnType (opStr, val) = withPathK opStr $ - case opStr of + parseOperation columnType (opStr, val) = withPathK opStr + $ case opStr of "_eq" -> parseEq "$eq" -> parseEq "_neq" -> parseNeq @@ -104,15 +104,16 @@ parseBoolExpOperations rhsParser _rootTableFieldInfoMap _fields columnRef value parseOneNoSess ty = rhsParser (CollectableTypeScalar ty) guardType validTys = - unless (isScalarColumnWhere (`elem` validTys) colTy) $ - throwError $ - buildMsg colTy validTys + unless (isScalarColumnWhere (`elem` validTys) colTy) + $ throwError + $ buildMsg colTy validTys buildMsg ty expTys = - err400 UnexpectedPayload $ - " is of type " - <> ty <<> "; this operator works only on columns of type " - <> T.intercalate "/" (map dquote expTys) + err400 UnexpectedPayload + $ " is of type " + <> ty + <<> "; this operator works only on columns of type " + <> T.intercalate "/" (map dquote expTys) parseVal :: (J.FromJSON a) => m a parseVal = decodeValue val diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs index a9797d99887..8bc1c0bcf32 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/EventTrigger.hs @@ -90,10 +90,10 @@ fetchUndeliveredEvents :: FetchBatchSize -> m [Event 'MSSQL] fetchUndeliveredEvents sourceConfig sourceName triggerNames _ fetchBatchSize = do - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ - fetchEvents sourceName triggerNames fetchBatchSize + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ fetchEvents sourceName triggerNames fetchBatchSize setRetry :: (MonadIO m, MonadError QErr m) => @@ -103,10 +103,10 @@ setRetry :: MaintenanceMode MaintenanceModeVersion -> m () setRetry sourceConfig event retryTime maintenanceModeVersion = do - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ - setRetryTx event retryTime maintenanceModeVersion + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ setRetryTx event retryTime maintenanceModeVersion insertManualEvent :: (MonadIO m, MonadError QErr m) => @@ -118,11 +118,12 @@ insertManualEvent :: Maybe Tracing.TraceContext -> m EventId insertManualEvent sourceConfig tableName triggerName payload _userInfo _traceCtx = - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ - -- TODO: Include TraceContext in payload - insertMSSQLManualEventTx tableName triggerName payload + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ + -- TODO: Include TraceContext in payload + insertMSSQLManualEventTx tableName triggerName payload getMaintenanceModeVersion :: ( MonadIO m, @@ -131,10 +132,10 @@ getMaintenanceModeVersion :: MSSQLSourceConfig -> m MaintenanceModeVersion getMaintenanceModeVersion sourceConfig = - liftEitherM $ - liftIO $ - runMSSQLSourceReadTx sourceConfig $ - getMaintenanceModeVersionTx + liftEitherM + $ liftIO + $ runMSSQLSourceReadTx sourceConfig + $ getMaintenanceModeVersionTx recordSuccess :: (MonadIO m) => @@ -144,8 +145,9 @@ recordSuccess :: MaintenanceMode MaintenanceModeVersion -> m (Either QErr ()) recordSuccess sourceConfig event invocation maintenanceModeVersion = - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do + liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do insertInvocation (tmName (eTrigger event)) invocation setSuccessTx event maintenanceModeVersion @@ -169,8 +171,9 @@ recordError' :: MaintenanceMode MaintenanceModeVersion -> m (Either QErr ()) recordError' sourceConfig event invocation processEventError maintenanceModeVersion = - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do + liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do for_ invocation $ insertInvocation (tmName (eTrigger event)) case processEventError of PESetRetry retryTime -> do @@ -183,11 +186,12 @@ redeliverEvent :: EventId -> m () redeliverEvent sourceConfig eventId = - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do - checkEventTx eventId - markForDeliveryTx eventId + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do + checkEventTx eventId + markForDeliveryTx eventId dropTriggerAndArchiveEvents :: (MonadIO m, MonadError QErr m) => @@ -196,11 +200,12 @@ dropTriggerAndArchiveEvents :: TableName -> m () dropTriggerAndArchiveEvents sourceConfig triggerName table = - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do - dropTriggerQ triggerName (tableSchema table) - archiveEvents triggerName + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do + dropTriggerQ triggerName (tableSchema table) + archiveEvents triggerName dropDanglingSQLTrigger :: (MonadIO m, MonadError QErr m) => @@ -210,10 +215,11 @@ dropDanglingSQLTrigger :: HashSet Ops -> m () dropDanglingSQLTrigger sourceConfig triggerName table ops = - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do - traverse_ (dropTriggerOp triggerName (tableSchema table)) ops + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do + traverse_ (dropTriggerOp triggerName (tableSchema table)) ops createTableEventTrigger :: (MonadIO m) => @@ -227,8 +233,9 @@ createTableEventTrigger :: Maybe (PrimaryKey 'MSSQL (ColumnInfo 'MSSQL)) -> m (Either QErr ()) createTableEventTrigger _sqlGen sourceConfig table columns triggerName triggerOnReplication opsDefinition primaryKeyMaybe = do - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do + liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do mkAllTriggersQ triggerName table triggerOnReplication columns opsDefinition primaryKeyMaybe createMissingSQLTriggers :: @@ -252,8 +259,9 @@ createMissingSQLTriggers triggerName triggerOnReplication opsDefinition = do - liftEitherM $ - runMSSQLSourceWriteTx sourceConfig $ do + liftEitherM + $ runMSSQLSourceWriteTx sourceConfig + $ do for_ (tdInsert opsDefinition) (doesSQLTriggerExist INSERT) for_ (tdUpdate opsDefinition) (doesSQLTriggerExist UPDATE) for_ (tdDelete opsDefinition) (doesSQLTriggerExist DELETE) @@ -261,8 +269,8 @@ createMissingSQLTriggers doesSQLTriggerExist op opSpec = do let triggerNameWithOp = "notify_hasura_" <> triggerNameToTxt triggerName <> "_" <> tshow op doesOpTriggerExist <- - liftMSSQLTx $ - singleRowQueryE + liftMSSQLTx + $ singleRowQueryE HGE.defaultMSSQLTxErrorHandler [ODBC.sql| SELECT CASE WHEN EXISTS @@ -289,8 +297,9 @@ unlockEventsInSource :: NE.NESet EventId -> m (Either QErr Int) unlockEventsInSource sourceConfig eventIds = - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ do + liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ do unlockEventsTx $ toList eventIds -- Check if any trigger for any of the operation exists with the 'triggerName' @@ -301,10 +310,10 @@ checkIfTriggerExists :: HashSet Ops -> m Bool checkIfTriggerExists sourceConfig triggerName ops = do - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ - fmap or (traverse (checkIfTriggerExistsQ triggerName) (HashSet.toList ops)) + liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ fmap or (traverse (checkIfTriggerExistsQ triggerName) (HashSet.toList ops)) ---- DATABASE QUERIES --------------------- -- @@ -439,9 +448,10 @@ fetchEvents source triggerNames (FetchBatchSize fetchBatchSize) = do -- Due to the problematic variable substitution of `ODBC.sql` it is imperative that -- we resort to template strings, since that does not do any changes to the string. events <- - multiRowQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText . LT.toStrict $ - $(makeRelativeToProject "src-rsr/mssql/mssql_fetch_events.sql.shakespeare" >>= ST.stextFile) + multiRowQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + . LT.toStrict + $ $(makeRelativeToProject "src-rsr/mssql/mssql_fetch_events.sql.shakespeare" >>= ST.stextFile) mapM uncurryEvent events where -- Creates a list of trigger names to be used for 'IN' operator @@ -458,8 +468,8 @@ fetchEvents source triggerNames (FetchBatchSize fetchBatchSize) = do createdAt' <- bsToUTCTime createdAt "conversion of created_at to UTCTime failed while fetching MSSQL events" retryAt <- traverse (`bsToUTCTime` "conversion of next_retry_at to UTCTime failed while fetching MSSQL events") nextRetryAt - pure $ - Event + pure + $ Event { eId = EventId (bsToTxt eventId), eSource = source, eTable = (TableName tn (SchemaName sn)), @@ -525,8 +535,8 @@ checkEventTx eventId = do getEvent (x : _) = return x assertEventUnlocked locked = - when locked $ - throw400 Busy "event is already being processed" + when locked + $ throw400 Busy "event is already being processed" markForDeliveryTx :: EventId -> TxE QErr () markForDeliveryTx eventId = do @@ -543,11 +553,13 @@ markForDeliveryTx eventId = do unlockEventsTx :: [EventId] -> TxE QErr Int unlockEventsTx eventIds = do numEvents <- - singleRowQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText . LT.toStrict $ - -- EventIds as list of VALUES (Eg: ('123-abc'), ('456-vgh'), ('234-asd')) - let eventIdsValues = generateValuesFromEvents eventIds - in $(makeRelativeToProject "src-rsr/mssql/mssql_unlock_events.sql.shakespeare" >>= ST.stextFile) + singleRowQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + . LT.toStrict + $ + -- EventIds as list of VALUES (Eg: ('123-abc'), ('456-vgh'), ('234-asd')) + let eventIdsValues = generateValuesFromEvents eventIds + in $(makeRelativeToProject "src-rsr/mssql/mssql_unlock_events.sql.shakespeare" >>= ST.stextFile) return numEvents where generateValuesFromEvents :: [EventId] -> Text @@ -560,13 +572,13 @@ getMaintenanceModeVersionTx :: TxE QErr MaintenanceModeVersion getMaintenanceModeVersionTx = do catalogVersion <- getSourceCatalogVersion if - | catalogVersion == latestSourceCatalogVersion -> pure CurrentMMVersion - | otherwise -> - throw500 $ - "Maintenance mode is only supported with catalog versions: " - <> tshow latestSourceCatalogVersion - <> " but received " - <> tshow catalogVersion + | catalogVersion == latestSourceCatalogVersion -> pure CurrentMMVersion + | otherwise -> + throw500 + $ "Maintenance mode is only supported with catalog versions: " + <> tshow latestSourceCatalogVersion + <> " but received " + <> tshow catalogVersion convertUTCToDatetime2 :: (MonadIO m) => UTCTime -> m Datetime2 convertUTCToDatetime2 utcTime = do @@ -579,8 +591,8 @@ checkIfTriggerExistsQ :: TxE QErr Bool checkIfTriggerExistsQ triggerName op = do let triggerNameWithOp = "notify_hasura_" <> triggerNameToTxt triggerName <> "_" <> tshow op - liftMSSQLTx $ - singleRowQueryE + liftMSSQLTx + $ singleRowQueryE HGE.defaultMSSQLTxErrorHandler -- We check the existence of trigger across the entire database irrespective of -- the schema of the table @@ -666,8 +678,8 @@ checkSpatialDataTypeColumns allCols (SubscribeOpSpec listenCols deliveryCols) = deliveryColumns = getApplicableColumns allCols $ fromMaybe SubCStar deliveryCols isGeoTypesInListenCols = any (isScalarColumnWhere isGeoType . ciType) listenColumns isGeoTypesInDeliversCols = any (isScalarColumnWhere isGeoType . ciType) deliveryColumns - when (isGeoTypesInListenCols || isGeoTypesInDeliversCols) $ - throw400 NotSupported "Event triggers for MS-SQL sources are not supported on tables having Geometry or Geography column types" + when (isGeoTypesInListenCols || isGeoTypesInDeliversCols) + $ throw400 NotSupported "Event triggers for MS-SQL sources are not supported on tables having Geometry or Geography column types" where isGeoType = (`elem` geoTypes) @@ -682,8 +694,10 @@ mkInsertTriggerQ :: mkInsertTriggerQ triggerName table allCols triggerOnReplication subOpSpec@(SubscribeOpSpec _listenCols deliveryCols) = do checkSpatialDataTypeColumns allCols subOpSpec liftMSSQLTx $ do - unitQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText . LT.toStrict $ do + unitQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + . LT.toStrict + $ do let deliveryColumns = getApplicableColumns allCols $ fromMaybe SubCStar deliveryCols mkInsertTriggerQuery table triggerName deliveryColumns triggerOnReplication @@ -698,8 +712,10 @@ mkDeleteTriggerQ :: mkDeleteTriggerQ triggerName table allCols triggerOnReplication subOpSpec@(SubscribeOpSpec _listenCols deliveryCols) = do checkSpatialDataTypeColumns allCols subOpSpec liftMSSQLTx $ do - unitQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText . LT.toStrict $ do + unitQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + . LT.toStrict + $ do let deliveryColumns = getApplicableColumns allCols $ fromMaybe SubCStar deliveryCols mkDeleteTriggerQuery table triggerName deliveryColumns triggerOnReplication @@ -718,9 +734,10 @@ mkUpdateTriggerQ triggerName table allCols triggerOnReplication primaryKeyMaybe primaryKey <- onNothing primaryKeyMaybe (throw400 NotSupported "Update event triggers for MS-SQL sources are only supported on tables with primary keys") let deliveryColumns = getApplicableColumns allCols $ fromMaybe SubCStar deliveryCols listenColumns = getApplicableColumns allCols listenCols - unitQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText . LT.toStrict $ - mkUpdateTriggerQuery table triggerName listenColumns deliveryColumns primaryKey triggerOnReplication + unitQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + . LT.toStrict + $ mkUpdateTriggerQuery table triggerName listenColumns deliveryColumns primaryKey triggerOnReplication -- Create alias for columns -- eg: If colPrefixMaybe is defined then 'inserted.id as payload.data.old.id' @@ -910,11 +927,11 @@ addCleanupSchedules sourceConfig triggersWithcleanupConfig = lastScheduledTime ) triggersWithcleanupConfig - unless (null scheduledTriggersAndTimestamps) $ - liftEitherM $ - liftIO $ - runMSSQLSourceWriteTx sourceConfig $ - insertEventTriggerCleanupLogsTx scheduledTriggersAndTimestamps + unless (null scheduledTriggersAndTimestamps) + $ liftEitherM + $ liftIO + $ runMSSQLSourceWriteTx sourceConfig + $ insertEventTriggerCleanupLogsTx scheduledTriggersAndTimestamps -- | Insert the cleanup logs for the given trigger name and schedules insertEventTriggerCleanupLogsTx :: [(TriggerName, [Datetimeoffset])] -> TxET QErr IO () @@ -929,8 +946,8 @@ insertEventTriggerCleanupLogsTx triggerNameWithSchedules = ) where sqlValues = - commaSeparated $ - map + commaSeparated + $ map ( \(triggerName, schedules) -> generateSQLValuesFromListWith ( \schedule -> @@ -1029,10 +1046,10 @@ getCleanupEventsForDeletion sourceConfig = markCleanupEventsAsDeadTx :: [Text] -> TxE QErr () markCleanupEventsAsDeadTx toDeadEvents = do let deadEventsValues = generateSQLValuesFromList toDeadEvents - unless (null toDeadEvents) $ - unitQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText $ - [ST.st| + unless (null toDeadEvents) + $ unitQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + $ [ST.st| UPDATE hdb_catalog.hdb_event_log_cleanups SET status = 'dead' WHERE id = ANY ( SELECT id from (VALUES #{deadEventsValues}) AS X(id)); @@ -1113,9 +1130,9 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do else do let eventIdsValues = generateSQLValuesFromList deadEventIDs -- Lock the events in the database so that other HGE instances don't pick them up for deletion. - unitQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText $ - [ST.st| + unitQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + $ [ST.st| UPDATE hdb_catalog.event_log SET locked = SYSDATETIMEOFFSET() AT TIME ZONE 'UTC' WHERE id = ANY ( SELECT id from (VALUES #{eventIdsValues}) AS X(id)) @@ -1125,26 +1142,26 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do -- to appropriate value. Please note that the event_id won't exist anymore in the event_log -- table, but we are still retaining it for debugging purpose. deletedInvocationLogs :: [Int] <- -- This will be an array of 1 and is only used to count the number of deleted rows. - multiRowQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText $ - if tlccCleanInvocationLogs - then - [ST.st| + multiRowQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + $ if tlccCleanInvocationLogs + then + [ST.st| DELETE FROM hdb_catalog.event_invocation_logs OUTPUT 1 WHERE event_id = ANY ( SELECT id from (VALUES #{eventIdsValues}) AS X(id)); |] - else - [ST.st| + else + [ST.st| UPDATE hdb_catalog.event_invocation_logs SET trigger_name = '#{qTriggerName}' WHERE event_id = ANY ( SELECT id from (VALUES #{eventIdsValues}) AS X(id)); |] -- Finally delete the event logs. deletedEventLogs :: [Int] <- -- This will be an array of 1 and is only used to count the number of deleted rows. - multiRowQueryE HGE.defaultMSSQLTxErrorHandler $ - rawUnescapedText $ - [ST.st| + multiRowQueryE HGE.defaultMSSQLTxErrorHandler + $ rawUnescapedText + $ [ST.st| DELETE FROM hdb_catalog.event_log OUTPUT 1 WHERE id = ANY ( SELECT id from (VALUES #{eventIdsValues}) AS X(id)); @@ -1270,8 +1287,8 @@ fetchEventById sourceConfig getEventById = do fetchEventByIdTxE' <- liftIO $ runMSSQLSourceReadTx sourceConfig $ fetchEventByIdTxE getEventById case fetchEventByIdTxE' of Left err -> - throwError $ - prefixQErr ("unexpected error while fetching event with id " <> eventId <> ": ") err + throwError + $ prefixQErr ("unexpected error while fetching event with id " <> eventId <> ": ") err Right eventLogWithInvocations -> do if isNothing (elwiEvent eventLogWithInvocations) then throw400 NotExists errMsg diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs index 3f17830e1b2..9f673aff881 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs @@ -81,8 +81,8 @@ runSQL mssqlRunSQL@MSSQLRunSQL {..} = do then do (results, metadataUpdater) <- runTx _siConfiguration $ withMetadataCheck _siTables -- Build schema cache with updated metadata - withNewInconsistentObjsCheck $ - buildSchemaCacheWithInvalidations mempty {ciSources = HS.singleton _mrsSource} metadataUpdater + withNewInconsistentObjsCheck + $ buildSchemaCacheWithInvalidations mempty {ciSources = HS.singleton _mrsSource} metadataUpdater pure results else runTx _siConfiguration sqlQueryTx pure $ encJFromJValue $ toResult results diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs index 0c056bbf6f0..6d9e9cceeef 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs @@ -97,8 +97,8 @@ postDropSourceHook (MSSQLSourceConfig _ mssqlExecCtx _) tableTriggersMap = do doesSchemaExist :: (MonadMSSQLTx m) => SchemaName -> m Bool doesSchemaExist (SchemaName schemaName) = do - liftMSSQLTx $ - Tx.singleRowQueryE + liftMSSQLTx + $ Tx.singleRowQueryE HGE.defaultMSSQLTxErrorHandler [ODBC.sql| SELECT CAST ( @@ -112,8 +112,8 @@ doesSchemaExist (SchemaName schemaName) = do doesTableExist :: (MonadMSSQLTx m) => TableName -> m Bool doesTableExist tableName = do - liftMSSQLTx $ - Tx.singleRowQueryE + liftMSSQLTx + $ Tx.singleRowQueryE HGE.defaultMSSQLTxErrorHandler [ODBC.sql| SELECT CAST ( @@ -137,16 +137,16 @@ prepareCatalog sourceConfig = mssqlRunSerializableTx (_mscExecCtx sourceConfig) eventLogTableExist <- doesTableExist $ TableName "event_log" "hdb_catalog" sourceVersionTableExist <- doesTableExist $ TableName "hdb_source_catalog_version" "hdb_catalog" if - -- Fresh database - | not hdbCatalogExist -> liftMSSQLTx do - unitQueryE HGE.defaultMSSQLTxErrorHandler "CREATE SCHEMA hdb_catalog" - initSourceCatalog - return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) - -- Only 'hdb_catalog' schema defined - | not (sourceVersionTableExist || eventLogTableExist) -> do - liftMSSQLTx initSourceCatalog - return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) - | otherwise -> migrateSourceCatalog + -- Fresh database + | not hdbCatalogExist -> liftMSSQLTx do + unitQueryE HGE.defaultMSSQLTxErrorHandler "CREATE SCHEMA hdb_catalog" + initSourceCatalog + return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) + -- Only 'hdb_catalog' schema defined + | not (sourceVersionTableExist || eventLogTableExist) -> do + liftMSSQLTx initSourceCatalog + return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) + | otherwise -> migrateSourceCatalog where initSourceCatalog = do unitQueryE HGE.defaultMSSQLTxErrorHandler $(makeRelativeToProject "src-rsr/mssql/init_mssql_source.sql" >>= ODBC.sqlFile) @@ -165,11 +165,11 @@ migrateSourceCatalogFrom :: (MonadMSSQLTx m) => SourceCatalogVersion -> m (Recre migrateSourceCatalogFrom prevVersion | prevVersion == latestSourceCatalogVersion = pure (RETDoNothing, SCMSNothingToDo $ Version.unSourceCatalogVersion latestSourceCatalogVersion) | [] <- neededMigrations = - throw400 NotSupported $ - "Expected source catalog version <= " - <> tshow latestSourceCatalogVersion - <> ", but the current version is " - <> tshow prevVersion + throw400 NotSupported + $ "Expected source catalog version <= " + <> tshow latestSourceCatalogVersion + <> ", but the current version is " + <> tshow prevVersion | otherwise = do liftMSSQLTx $ traverse_ snd neededMigrations setSourceCatalogVersion latestSourceCatalogVersion diff --git a/server/src-lib/Hasura/Backends/MSSQL/DDL/Source/Version.hs b/server/src-lib/Hasura/Backends/MSSQL/DDL/Source/Version.hs index 3be42c15786..8e66a22832d 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/DDL/Source/Version.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/DDL/Source/Version.hs @@ -30,7 +30,7 @@ latestSourceCatalogVersion = Version.SourceCatalogVersion 4 previousSourceCatalogVersions :: [SourceCatalogVersion] previousSourceCatalogVersions = [initialSourceCatalogVersion .. pred latestSourceCatalogVersion] -setSourceCatalogVersion :: MonadMSSQLTx m => SourceCatalogVersion -> m () +setSourceCatalogVersion :: (MonadMSSQLTx m) => SourceCatalogVersion -> m () setSourceCatalogVersion (Version.SourceCatalogVersion version) = liftMSSQLTx $ unitQueryE HGE.defaultMSSQLTxErrorHandler setSourceCatalogVersionQuery where @@ -50,7 +50,7 @@ setSourceCatalogVersion (Version.SourceCatalogVersion version) = COMMIT TRANSACTION |] -getSourceCatalogVersion :: MonadMSSQLTx m => m SourceCatalogVersion +getSourceCatalogVersion :: (MonadMSSQLTx m) => m SourceCatalogVersion getSourceCatalogVersion = Version.SourceCatalogVersion <$> liftMSSQLTx (singleRowQueryE HGE.defaultMSSQLTxErrorHandler [ODBC.sql| SELECT version FROM hdb_catalog.hdb_source_catalog_version |]) diff --git a/server/src-lib/Hasura/Backends/MSSQL/Execute/Delete.hs b/server/src-lib/Hasura/Backends/MSSQL/Execute/Delete.hs index d2cff576f65..56c04d5a4f9 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Execute/Delete.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Execute/Delete.hs @@ -70,9 +70,9 @@ buildDeleteTx :: buildDeleteTx deleteOperation stringifyNum queryTags = do let withAlias = "with_alias" createInsertedTempTableQuery = - toQueryFlat $ - TQ.fromSelectIntoTempTable $ - TSQL.toSelectIntoTempTable tempTableNameDeleted (_adTable deleteOperation) (_adAllCols deleteOperation) RemoveConstraints + toQueryFlat + $ TQ.fromSelectIntoTempTable + $ TSQL.toSelectIntoTempTable tempTableNameDeleted (_adTable deleteOperation) (_adAllCols deleteOperation) RemoveConstraints -- Create a temp table Tx.unitQueryE defaultMSSQLTxErrorHandler (createInsertedTempTableQuery `withQueryTags` queryTags) diff --git a/server/src-lib/Hasura/Backends/MSSQL/Execute/Insert.hs b/server/src-lib/Hasura/Backends/MSSQL/Execute/Insert.hs index 7273da26e8a..97af4620a2a 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Execute/Insert.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Execute/Insert.hs @@ -156,9 +156,9 @@ buildInsertTx tableName withAlias stringifyNum insert queryTags = do -- Create #inserted temporary table let createInsertedTempTableQuery = - toQueryFlat $ - TQ.fromSelectIntoTempTable $ - TSQL.toSelectIntoTempTable tempTableNameInserted tableName tableColumns RemoveConstraints + toQueryFlat + $ TQ.fromSelectIntoTempTable + $ TSQL.toSelectIntoTempTable tempTableNameInserted tableName tableColumns RemoveConstraints Tx.unitQueryE defaultMSSQLTxErrorHandler (createInsertedTempTableQuery `withQueryTags` queryTags) @@ -181,8 +181,8 @@ buildInsertTx tableName withAlias stringifyNum insert queryTags = do Tx.unitQueryE defaultMSSQLTxErrorHandler (dropInsertedTempTableQuery `withQueryTags` queryTags) -- Raise an exception if the check condition is not met - unless (checkConditionInt == 0) $ - throw400 PermissionError "check constraint of an insert/update permission has failed" + unless (checkConditionInt == 0) + $ throw400 PermissionError "check constraint of an insert/update permission has failed" pure $ encJFromText responseText @@ -199,7 +199,7 @@ buildInsertTx tableName withAlias stringifyNum insert queryTags = do -- -- Should be used as part of a bigger transaction in 'buildInsertTx'. buildUpsertTx :: - MonadIO m => + (MonadIO m) => TSQL.TableName -> AnnotatedInsert 'MSSQL Void Expression -> IfMatched Expression -> @@ -212,18 +212,19 @@ buildUpsertTx tableName insert ifMatched queryTags = do allTableColumns = _aiTableColumns $ _aiData insert insertColumns = filter (\c -> ciColumn c `elem` insertColumnNames) allTableColumns createValuesTempTableQuery = - toQueryFlat $ - TQ.fromSelectIntoTempTable $ - -- We want to KeepConstraints here so the user can omit values for identity columns such as `id` - TSQL.toSelectIntoTempTable tempTableNameValues tableName insertColumns KeepConstraints + toQueryFlat + $ TQ.fromSelectIntoTempTable + $ + -- We want to KeepConstraints here so the user can omit values for identity columns such as `id` + TSQL.toSelectIntoTempTable tempTableNameValues tableName insertColumns KeepConstraints -- Create #values temporary table Tx.unitQueryE defaultMSSQLTxErrorHandler (createValuesTempTableQuery `withQueryTags` queryTags) -- Store values in #values temporary table let insertValuesIntoTempTableQuery = - toQueryFlat $ - TQ.fromInsertValuesIntoTempTable $ - TSQL.toInsertValuesIntoTempTable tempTableNameValues insert + toQueryFlat + $ TQ.fromInsertValuesIntoTempTable + $ TSQL.toInsertValuesIntoTempTable tempTableNameValues insert Tx.unitQueryE mutationMSSQLTxErrorHandler (insertValuesIntoTempTableQuery `withQueryTags` queryTags) -- Run the MERGE query and store the mutated rows in #inserted temporary table @@ -236,7 +237,7 @@ buildUpsertTx tableName insert ifMatched queryTags = do -- | Builds a response to the user using the values in the temporary table named #inserted. buildInsertResponseTx :: - MonadIO m => + (MonadIO m) => Options.StringifyNumbers -> Text -> AnnotatedInsert 'MSSQL Void Expression -> diff --git a/server/src-lib/Hasura/Backends/MSSQL/Execute/Update.hs b/server/src-lib/Hasura/Backends/MSSQL/Execute/Update.hs index 9b6ed2fa76b..4f7e3fcf8db 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Execute/Update.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Execute/Update.hs @@ -76,9 +76,9 @@ buildUpdateTx :: buildUpdateTx updateOperation stringifyNum queryTags = do let withAlias = "with_alias" createInsertedTempTableQuery = - toQueryFlat $ - TQ.fromSelectIntoTempTable $ - TSQL.toSelectIntoTempTable tempTableNameUpdated (_auTable updateOperation) (_auAllCols updateOperation) RemoveConstraints + toQueryFlat + $ TQ.fromSelectIntoTempTable + $ TSQL.toSelectIntoTempTable tempTableNameUpdated (_auTable updateOperation) (_auAllCols updateOperation) RemoveConstraints -- Create a temp table Tx.unitQueryE defaultMSSQLTxErrorHandler (createInsertedTempTableQuery `withQueryTags` queryTags) let updateQuery = TQ.fromUpdate <$> TSQL.fromUpdate updateOperation @@ -106,6 +106,6 @@ buildUpdateTx updateOperation stringifyNum queryTags = do -- Drop the temp table Tx.unitQueryE defaultMSSQLTxErrorHandler (toQueryFlat (dropTempTableQuery tempTableNameUpdated) `withQueryTags` queryTags) -- Raise an exception if the check condition is not met - unless (checkConditionInt == (0 :: Int)) $ - throw400 PermissionError "check constraint of an insert/update permission has failed" + unless (checkConditionInt == (0 :: Int)) + $ throw400 PermissionError "check constraint of an insert/update permission has failed" pure $ encJFromText responseText diff --git a/server/src-lib/Hasura/Backends/MSSQL/FromIr.hs b/server/src-lib/Hasura/Backends/MSSQL/FromIr.hs index 9a48bf2c297..b88c9423d3e 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/FromIr.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/FromIr.hs @@ -92,7 +92,7 @@ type FromIrInner = StateT (Map Text Int) (Validate (NonEmpty Error)) -- | Run a 'FromIr' action, throwing errors that have been collected using the -- supplied action, and attach CTEs created from native queries to the select query. -runFromIrUseCTEs :: MonadError QErr m => FromIr Select -> m (QueryWithDDL Select) +runFromIrUseCTEs :: (MonadError QErr m) => FromIr Select -> m (QueryWithDDL Select) runFromIrUseCTEs fromir = runIdentity <$> runFromIr attachCTEs (Identity fromir) -- | Run a 'FromIr' action, throwing errors that have been collected using the @@ -105,7 +105,7 @@ runFromIrUseCTEsT = runFromIr attachCTEs -- -- If CTEs were reported, we throw an error, since we don't support native queries -- in this context yet. -runFromIrErrorOnCTEs :: MonadError QErr m => FromIr a -> m (QueryWithDDL a) +runFromIrErrorOnCTEs :: (MonadError QErr m) => FromIr a -> m (QueryWithDDL a) runFromIrErrorOnCTEs fromir = runIdentity <$> runFromIr errorOnCTEs (Identity fromir) -- | Run a 'FromIr' action, throwing errors that have been collected using the supplied action. @@ -118,10 +118,10 @@ runFromIr toResult = . traverse (runWriterT . unFromIr) -- | attach CTEs created from native queries to the select query. -attachCTEs :: MonadValidate (NonEmpty Error) m => (Select, IRWriter) -> m (QueryWithDDL Select) +attachCTEs :: (MonadValidate (NonEmpty Error) m) => (Select, IRWriter) -> m (QueryWithDDL Select) attachCTEs (select, IRWriter before after ctes) = - pure $ - QueryWithDDL + pure + $ QueryWithDDL { qwdBeforeSteps = before, qwdQuery = select {selectWith = ctes <> selectWith select}, qwdAfterSteps = after @@ -129,12 +129,12 @@ attachCTEs (select, IRWriter before after ctes) = -- | If CTEs were reported, we throw an error, since we don't support native queries -- in this context yet. -errorOnCTEs :: MonadValidate (NonEmpty Error) m => (a, IRWriter) -> m (QueryWithDDL a) +errorOnCTEs :: (MonadValidate (NonEmpty Error) m) => (a, IRWriter) -> m (QueryWithDDL a) errorOnCTEs (result, IRWriter {irwBefore, irwAfter, irwCTEs}) = case irwCTEs of Nothing -> - pure $ - QueryWithDDL + pure + $ QueryWithDDL { qwdBeforeSteps = irwBefore, qwdQuery = result, qwdAfterSteps = irwAfter @@ -178,8 +178,8 @@ generateAlias template = do occurrence <- M.findWithDefault 1 rendered <$> FromIr get pure (rendered <> tshow occurrence) where - rendered = T.take 20 $ - case template of + rendered = T.take 20 + $ case template of ArrayRelationTemplate sample -> "ar_" <> sample ArrayAggregateTemplate sample -> "aa_" <> sample ObjectRelationTemplate sample -> "or_" <> sample diff --git a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Expression.hs b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Expression.hs index 20f1c37a34f..e4ddea15b4f 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Expression.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Expression.hs @@ -51,25 +51,25 @@ fromGBoolExp = selectFrom <- lift (aliasQualifiedTable _geTable) scopedTo selectFrom $ do whereExpression <- fromGBoolExp _geWhere - pure $ - ExistsExpression $ - emptySelect - { selectOrderBy = Nothing, - selectProjections = - [ ExpressionProjection - ( Aliased - { aliasedThing = trueExpression, - aliasedAlias = existsFieldName - } - ) - ], - selectFrom = Just selectFrom, - selectJoins = mempty, - selectWhere = Where [whereExpression], - selectTop = NoTop, - selectFor = NoFor, - selectOffset = Nothing - } + pure + $ ExistsExpression + $ emptySelect + { selectOrderBy = Nothing, + selectProjections = + [ ExpressionProjection + ( Aliased + { aliasedThing = trueExpression, + aliasedAlias = existsFieldName + } + ) + ], + selectFrom = Just selectFrom, + selectJoins = mempty, + selectWhere = Where [whereExpression], + selectTop = NoTop, + selectFor = NoFor, + selectOffset = Nothing + } -- | Translate boolean expressions into TSQL 'Expression's. -- diff --git a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Insert.hs b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Insert.hs index a341bf9b642..9982420aee3 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Insert.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Insert.hs @@ -75,17 +75,16 @@ toMerge :: FromIr Merge toMerge tableName insertRows allColumns IfMatched {..} = do let insertColumnNames = - HS.toList $ - HashMap.keysSet _imColumnPresets - <> HS.unions (map (HashMap.keysSet . HashMap.fromList . IR.getInsertColumns) insertRows) + HS.toList + $ HashMap.keysSet _imColumnPresets + <> HS.unions (map (HashMap.keysSet . HashMap.fromList . IR.getInsertColumns) insertRows) allColumnNames = map IR.ciColumn allColumns matchConditions <- - flip runReaderT (EntityAlias "target") $ -- the table is aliased as "target" in MERGE sql - fromGBoolExp _imConditions - - pure $ - Merge + flip runReaderT (EntityAlias "target") + $ fromGBoolExp _imConditions -- the table is aliased as "target" in MERGE sql + pure + $ Merge { mergeTargetTable = tableName, mergeUsing = MergeUsing tempTableNameValues insertColumnNames, mergeOn = MergeOn _imMatchColumns, diff --git a/server/src-lib/Hasura/Backends/MSSQL/FromIr/MutationResponse.hs b/server/src-lib/Hasura/Backends/MSSQL/FromIr/MutationResponse.hs index db967f91c5d..c34e44397de 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/FromIr/MutationResponse.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/FromIr/MutationResponse.hs @@ -106,8 +106,8 @@ selectMutationOutputAndCheckCondition alias mutationOutputSelect checkBoolExp = ExpressionProjection $ Aliased (SelectExpression mutationOutputSelect) "mutation_response" checkConstraintProjection = -- apply ISNULL() to avoid check constraint select statement yielding empty rows - ExpressionProjection $ - Aliased (FunctionApplicationExpression $ FunExpISNULL (SelectExpression checkConstraintSelect) (ValueExpression (ODBC.IntValue 0))) "check_constraint_select" + ExpressionProjection + $ Aliased (FunctionApplicationExpression $ FunExpISNULL (SelectExpression checkConstraintSelect) (ValueExpression (ODBC.IntValue 0))) "check_constraint_select" in emptySelect {selectProjections = [mutationOutputProjection, checkConstraintProjection]} where checkConstraintSelect = @@ -116,8 +116,8 @@ selectMutationOutputAndCheckCondition alias mutationOutputSelect checkBoolExp = sumAggregate = OpAggregate "SUM" - [ ColumnExpression $ - FieldName + [ ColumnExpression + $ FieldName { fieldNameEntity = subQueryAlias, fieldName = checkEvaluationFieldName } diff --git a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Query.hs b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Query.hs index c6cdc45d259..649b4745472 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/FromIr/Query.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/FromIr/Query.hs @@ -65,8 +65,9 @@ fromSelect jsonAggSelect annSimpleSel = IR.JASMultipleRows -> guardSelectYieldingNull emptyArrayExpression <$> fromSelectRows annSimpleSel IR.JASSingleObject -> - fmap (guardSelectYieldingNull nullExpression) $ - fromSelectRows annSimpleSel <&> \sel -> + fmap (guardSelectYieldingNull nullExpression) + $ fromSelectRows annSimpleSel + <&> \sel -> sel { selectFor = JsonFor @@ -79,8 +80,8 @@ fromSelect jsonAggSelect annSimpleSel = let isNullApplication = FunExpISNULL (SelectExpression select) fallbackExpression in emptySelect { selectProjections = - [ ExpressionProjection $ - Aliased + [ ExpressionProjection + $ Aliased { aliasedThing = FunctionApplicationExpression isNullApplication, aliasedAlias = "root" } @@ -124,8 +125,8 @@ fromSourceRelationship lhs lhsSchema argumentId relationshipField = do } where projectArgumentId column = - ExpressionProjection $ - Aliased + ExpressionProjection + $ Aliased { aliasedThing = column, aliasedAlias = IR.getFieldNameTxt argumentId } @@ -137,9 +138,9 @@ fromSourceRelationship lhs lhsSchema argumentId relationshipField = do { openJsonExpression = ValueExpression (ODBC.TextValue $ lbsToTxt $ J.encode lhs), openJsonWith = - Just $ - toJsonFieldSpec argumentId IntegerType - NE.:| map (uncurry toJsonFieldSpec . second snd) (HashMap.toList lhsSchema) + Just + $ toJsonFieldSpec argumentId IntegerType + NE.:| map (uncurry toJsonFieldSpec . second snd) (HashMap.toList lhsSchema) }, aliasedAlias = "lhs" } @@ -167,9 +168,9 @@ fromRemoteRelationFieldsG existingJoins joinColumns (IR.FieldName name, field) = ) ( fromObjectRelationSelectG existingJoins - ( withJoinColumns $ - runIdentity $ - traverse (Identity . getConst) selectionSet + ( withJoinColumns + $ runIdentity + $ traverse (Identity . getConst) selectionSet ) ) IR.SourceRelationshipArray selectionSet -> @@ -178,10 +179,10 @@ fromRemoteRelationFieldsG existingJoins joinColumns (IR.FieldName name, field) = JoinFieldSource JsonArray (Aliased {aliasedThing, aliasedAlias = name}) ) ( fromArraySelectG - ( IR.ASSimple $ - withJoinColumns $ - runIdentity $ - traverse (Identity . getConst) selectionSet + ( IR.ASSimple + $ withJoinColumns + $ runIdentity + $ traverse (Identity . getConst) selectionSet ) ) IR.SourceRelationshipArrayAggregate selectionSet -> @@ -190,10 +191,10 @@ fromRemoteRelationFieldsG existingJoins joinColumns (IR.FieldName name, field) = JoinFieldSource JsonArray (Aliased {aliasedThing, aliasedAlias = name}) ) ( fromArraySelectG - ( IR.ASAggregate $ - withJoinColumns $ - runIdentity $ - traverse (Identity . getConst) selectionSet + ( IR.ASAggregate + $ withJoinColumns + $ runIdentity + $ traverse (Identity . getConst) selectionSet ) ) where @@ -233,8 +234,8 @@ fromSelectRows annSelectG = do runReaderT (fromGBoolExp permFilter) (fromAlias selectFrom) let selectProjections = map fieldSourceProjections fieldSources - pure $ - emptySelect + pure + $ emptySelect { selectOrderBy = argsOrderBy, selectTop = permissionBasedTop <> argsTop, selectProjections, @@ -260,11 +261,11 @@ fromSelectRows annSelectG = do mkNodesSelect :: Args -> Where -> Expression -> Top -> From -> [(Int, (IR.FieldName, [FieldSource]))] -> [(Int, Projection)] mkNodesSelect Args {..} foreignKeyConditions filterExpression permissionBasedTop selectFrom nodes = [ ( index, - ExpressionProjection $ - Aliased + ExpressionProjection + $ Aliased { aliasedThing = - SelectExpression $ - emptySelect + SelectExpression + $ emptySelect { selectProjections = map fieldSourceProjections fieldSources, selectTop = permissionBasedTop <> argsTop, selectFrom = pure selectFrom, @@ -294,42 +295,42 @@ mkNodesSelect Args {..} foreignKeyConditions filterExpression permissionBasedTop mkAggregateSelect :: Args -> Where -> Expression -> From -> [(Int, (IR.FieldName, [Projection]))] -> [(Int, Projection)] mkAggregateSelect Args {..} foreignKeyConditions filterExpression selectFrom aggregates = [ ( index, - ExpressionProjection $ - Aliased + ExpressionProjection + $ Aliased { aliasedThing = - safeJsonQueryExpression JsonSingleton $ - SelectExpression $ - emptySelect - { selectProjections = projections, - selectTop = NoTop, - selectFrom = - pure $ - FromSelect - Aliased - { aliasedAlias = aggSubselectName, - aliasedThing = - emptySelect - { selectProjections = pure StarProjection, - selectTop = argsTop, - selectFrom = pure selectFrom, - selectJoins = argsJoins, - selectWhere = argsWhere <> Where [filterExpression] <> foreignKeyConditions, - selectFor = NoFor, - selectOrderBy = mempty, - selectOffset = argsOffset - } - }, - selectJoins = mempty, - selectWhere = mempty, - selectFor = - JsonFor - ForJson - { jsonCardinality = JsonSingleton, - jsonRoot = NoRoot + safeJsonQueryExpression JsonSingleton + $ SelectExpression + $ emptySelect + { selectProjections = projections, + selectTop = NoTop, + selectFrom = + pure + $ FromSelect + Aliased + { aliasedAlias = aggSubselectName, + aliasedThing = + emptySelect + { selectProjections = pure StarProjection, + selectTop = argsTop, + selectFrom = pure selectFrom, + selectJoins = argsJoins, + selectWhere = argsWhere <> Where [filterExpression] <> foreignKeyConditions, + selectFor = NoFor, + selectOrderBy = mempty, + selectOffset = argsOffset + } }, - selectOrderBy = mempty, - selectOffset = Nothing - }, + selectJoins = mempty, + selectWhere = mempty, + selectFor = + JsonFor + ForJson + { jsonCardinality = JsonSingleton, + jsonRoot = NoRoot + }, + selectOrderBy = mempty, + selectOffset = Nothing + }, aliasedAlias = IR.getFieldNameTxt fieldName } ) @@ -352,21 +353,21 @@ fromStoredProcedure storedProcedure = do (\(arg, (typ, val)) -> Declare (getArgumentName arg) typ val) (HashMap.toList (IR.spArgs storedProcedure)) sql = - InterpolatedQuery $ - IIText ("EXECUTE " <> T.toTxt (IR.spStoredProcedure storedProcedure) <> " ") - : intercalate - [IIText ", "] - ( map - ( \(ArgumentName name) -> - [ IIText "@", - IIText (T.toTxt name), - IIText " = ", - IIText "@", - IIText (T.toTxt name) - ] - ) - (HashMap.keys (IR.spArgs storedProcedure)) - ) + InterpolatedQuery + $ IIText ("EXECUTE " <> T.toTxt (IR.spStoredProcedure storedProcedure) <> " ") + : intercalate + [IIText ", "] + ( map + ( \(ArgumentName name) -> + [ IIText "@", + IIText (T.toTxt name), + IIText " = ", + IIText "@", + IIText (T.toTxt name) + ] + ) + (HashMap.keys (IR.spArgs storedProcedure)) + ) storedProcedureReturnType = IR.spLogicalModel storedProcedure rawTempTableName = T.toTxt storedProcedureName aliasedTempTableName = Aliased (TempTableName rawTempTableName) rawTempTableName @@ -415,10 +416,10 @@ fromSelectAggregate -- then we'll have a LHS table that we're joining on. So we get the -- conditions expressions from the field mappings. The LHS table is -- the entityAlias, and the RHS table is selectFrom. - mforeignKeyConditions <- fmap (Where . fromMaybe []) $ - for mparentRelationship $ - \(entityAlias, mapping) -> - runReaderT (fromMapping selectFrom mapping) entityAlias + mforeignKeyConditions <- fmap (Where . fromMaybe []) + $ for mparentRelationship + $ \(entityAlias, mapping) -> + runReaderT (fromMapping selectFrom mapping) entityAlias filterExpression <- runReaderT (fromGBoolExp permFilter) (fromAlias selectFrom) args'@Args {argsExistingJoins} <- runReaderT (fromSelectArgsG args) (fromAlias selectFrom) @@ -431,23 +432,23 @@ fromSelectAggregate pure emptySelect { selectProjections = - map snd $ - sortBy (comparing fst) $ - expss - <> mkNodesSelect args' mforeignKeyConditions filterExpression permissionBasedTop selectFrom nodes - <> mkAggregateSelect args' mforeignKeyConditions filterExpression selectFrom aggregates, + map snd + $ sortBy (comparing fst) + $ expss + <> mkNodesSelect args' mforeignKeyConditions filterExpression permissionBasedTop selectFrom nodes + <> mkAggregateSelect args' mforeignKeyConditions filterExpression selectFrom aggregates, selectTop = NoTop, selectFrom = - pure $ - FromOpenJson $ - Aliased - { aliasedThing = - OpenJson - { openJsonExpression = ValueExpression $ ODBC.TextValue "[0]", - openJsonWith = Nothing - }, - aliasedAlias = existsFieldName - }, + pure + $ FromOpenJson + $ Aliased + { aliasedThing = + OpenJson + { openJsonExpression = ValueExpression $ ODBC.TextValue "[0]", + openJsonWith = Nothing + }, + aliasedAlias = existsFieldName + }, selectJoins = mempty, -- JOINs and WHEREs are only relevant in subselects selectWhere = mempty, selectFor = JsonFor ForJson {jsonCardinality = JsonSingleton, jsonRoot = NoRoot}, @@ -537,11 +538,11 @@ fromTableExpFieldG :: -- TODO: Convert function to be similar to Nodes function Maybe (ReaderT EntityAlias FromIr (Int, Projection)) fromTableExpFieldG = \case (index, (IR.FieldName name, IR.TAFExp text)) -> - Just $ - pure $ - ( index, - fieldSourceProjections $ - ExpressionFieldSource + Just + $ pure + $ ( index, + fieldSourceProjections + $ ExpressionFieldSource Aliased { aliasedThing = TSQL.ValueExpression (ODBC.TextValue text), aliasedAlias = name @@ -554,11 +555,11 @@ fromTableAggFieldG :: Maybe (Int, (IR.FieldName, [Projection])) fromTableAggFieldG = \case (index, (fieldName, IR.TAFAgg (aggregateFields :: [(IR.FieldName, IR.AggregateField 'MSSQL Expression)]))) -> - Just $ - let aggregates = - aggregateFields <&> \(fieldName', aggregateField) -> - fromAggregateField (IR.getFieldNameTxt fieldName') aggregateField - in (index, (fieldName, aggregates)) + Just + $ let aggregates = + aggregateFields <&> \(fieldName', aggregateField) -> + fromAggregateField (IR.getFieldNameTxt fieldName') aggregateField + in (index, (fieldName, aggregates)) _ -> Nothing fromTableNodesFieldG :: @@ -590,14 +591,14 @@ fromAggregateField alias aggregateField = ExpressionProjection $ Aliased (ValueExpression (ODBC.TextValue text)) (IR.getFieldNameTxt fieldName) -- See Hasura.RQL.Types.Backend.supportsAggregateComputedFields IR.SFComputedField _ _ -> error "Aggregate computed fields aren't currently supported for MSSQL!" - in ExpressionProjection $ - flip Aliased alias $ - safeJsonQueryExpression JsonSingleton $ - SelectExpression $ - emptySelect - { selectProjections = projections, - selectFor = JsonFor $ ForJson JsonSingleton NoRoot - } + in ExpressionProjection + $ flip Aliased alias + $ safeJsonQueryExpression JsonSingleton + $ SelectExpression + $ emptySelect + { selectProjections = projections, + selectFor = JsonFor $ ForJson JsonSingleton NoRoot + } where columnFieldAggEntity col = columnNameToFieldName col $ EntityAlias aggSubselectName diff --git a/server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs b/server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs index 9f52bc6e281..cb6ec6d318b 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs @@ -114,7 +114,7 @@ msDBQueryPlan userInfo sourceName sourceConfig qrf _ _ = do mssqlRunReadOnly (_mscExecCtx sourceConfig) (fmap withNoStatistics queryTx) runShowplan :: - MonadIO m => + (MonadIO m) => ODBC.Query -> Tx.TxET QErr m [Text] runShowplan query = Tx.withTxET defaultMSSQLTxErrorHandler do @@ -126,7 +126,7 @@ runShowplan query = Tx.withTxET defaultMSSQLTxErrorHandler do pure texts msDBQueryExplain :: - MonadError QErr m => + (MonadError QErr m) => RootFieldAlias -> UserInfo -> SourceName -> @@ -140,21 +140,21 @@ msDBQueryExplain fieldName userInfo sourceName sourceConfig qrf _ _ = do statement <- qwdQuery <$> planQuery sessionVariables qrf let query = toQueryPretty (fromSelect statement) queryString = ODBC.renderQuery query - odbcQuery = OnBaseMonad $ - mssqlRunReadOnly + odbcQuery = OnBaseMonad + $ mssqlRunReadOnly (_mscExecCtx sourceConfig) do showplan <- runShowplan query - pure $ - withNoStatistics $ - encJFromJValue $ - ExplainPlan - fieldName - (Just queryString) - (Just showplan) - pure $ - AB.mkAnyBackend $ - DBStepInfo @'MSSQL sourceName sourceConfig Nothing odbcQuery () + pure + $ withNoStatistics + $ encJFromJValue + $ ExplainPlan + fieldName + (Just queryString) + (Just showplan) + pure + $ AB.mkAnyBackend + $ DBStepInfo @'MSSQL sourceName sourceConfig Nothing odbcQuery () msDBSubscriptionExplain :: (MonadIO m, MonadBaseControl IO m, MonadError QErr m) => @@ -208,16 +208,16 @@ multiplexRootReselect variables rootReselect = } ], selectFrom = - Just $ - FromOpenJson + Just + $ FromOpenJson Aliased { aliasedThing = OpenJson { openJsonExpression = ValueExpression (ODBC.TextValue $ lbsToTxt $ J.encode variables), openJsonWith = - Just $ - NE.fromList + Just + $ NE.fromList [ ScalarField GuidType DataLengthUnspecified resultIdAlias (Just $ IndexPath RootPath 0), JsonField resultVarsAlias (Just $ IndexPath RootPath 1) ] @@ -287,8 +287,8 @@ msDBLiveQuerySubscriptionPlan UserInfo {_uiSession, _uiRole} _sourceName sourceC cohortVariables <- prepareStateCohortVariables sourceConfig _uiSession prepareState queryTags <- ask let parameterizedPlan = ParameterizedSubscriptionQueryPlan _uiRole $ (MultiplexedQuery' reselect queryTags) - pure $ - SubscriptionQueryPlan parameterizedPlan sourceConfig dummyCohortId () cohortVariables namespace + pure + $ SubscriptionQueryPlan parameterizedPlan sourceConfig dummyCohortId () cohortVariables namespace prepareStateCohortVariables :: (MonadError QErr m, MonadIO m, MonadBaseControl IO m) => @@ -299,8 +299,8 @@ prepareStateCohortVariables :: prepareStateCohortVariables sourceConfig session prepState = do (namedVars, posVars) <- validateVariables sourceConfig session prepState let PrepareState {sessionVariables} = prepState - pure $ - mkCohortVariables + pure + $ mkCohortVariables sessionVariables session namedVars @@ -365,8 +365,8 @@ validateVariables sourceConfig sessionVariableValues prepState = do if null projAll then Nothing else - Just $ - renderQuery + Just + $ renderQuery emptySelect { selectProjections = projAll, selectFrom = sessionOpenJson occSessionVars @@ -398,8 +398,8 @@ validateVariables sourceConfig sessionVariableValues prepState = do sessionOpenJson occSessionVars = nonEmpty (getSessionVariables occSessionVars) <&> \fields -> - FromOpenJson $ - Aliased + FromOpenJson + $ Aliased ( OpenJson (ValueExpression $ ODBC.TextValue $ lbsToTxt $ J.encode occSessionVars) (pure (sessField <$> fields)) diff --git a/server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs b/server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs index 73aa124ace9..e1addfdcc87 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs @@ -148,8 +148,8 @@ msTableArgs tableInfo = do orderByArg <- orderByParser limitArg <- tableLimitArg offsetArg <- tableOffsetArg - pure $ - IR.SelectArgs + pure + $ IR.SelectArgs { IR._saWhere = whereArg, IR._saOrderBy = orderByArg, IR._saLimit = limitArg, @@ -172,42 +172,46 @@ msColumnParser columnType nullability = case columnType of -- example, exposing all the float types as a GraphQL Float type is -- incorrect, similarly exposing all the integer types as a GraphQL Int ColumnScalar scalarType -> - P.memoizeOn 'msColumnParser (scalarType, nullability) $ - peelWithOrigin . fmap (ColumnValue columnType) . msPossiblyNullable scalarType nullability - <$> case scalarType of - -- text - MSSQL.CharType -> pure $ mkCharValue <$> P.string - MSSQL.VarcharType -> pure $ mkCharValue <$> P.string - MSSQL.WcharType -> pure $ ODBC.TextValue <$> P.string - MSSQL.WvarcharType -> pure $ ODBC.TextValue <$> P.string - MSSQL.WtextType -> pure $ ODBC.TextValue <$> P.string - MSSQL.TextType -> pure $ ODBC.TextValue <$> P.string - -- integer - MSSQL.IntegerType -> pure $ ODBC.IntValue . fromIntegral <$> P.int - MSSQL.SmallintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int - MSSQL.BigintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int - MSSQL.TinyintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int - -- float - MSSQL.NumericType -> pure $ ODBC.DoubleValue <$> P.float - MSSQL.DecimalType -> pure $ ODBC.DoubleValue <$> P.float - MSSQL.FloatType -> pure $ ODBC.DoubleValue <$> P.float - MSSQL.RealType -> pure $ ODBC.DoubleValue <$> P.float - -- boolean - MSSQL.BitType -> pure $ ODBC.BoolValue <$> P.boolean - _ -> do - name <- MSSQL.mkMSSQLScalarTypeName scalarType - let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar - pure $ - P.Parser - { pType = schemaType, - pParser = - P.valueToJSON (P.toGraphQLType schemaType) - >=> either (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) pure . (MSSQL.parseScalarValue scalarType) - } + P.memoizeOn 'msColumnParser (scalarType, nullability) + $ peelWithOrigin + . fmap (ColumnValue columnType) + . msPossiblyNullable scalarType nullability + <$> case scalarType of + -- text + MSSQL.CharType -> pure $ mkCharValue <$> P.string + MSSQL.VarcharType -> pure $ mkCharValue <$> P.string + MSSQL.WcharType -> pure $ ODBC.TextValue <$> P.string + MSSQL.WvarcharType -> pure $ ODBC.TextValue <$> P.string + MSSQL.WtextType -> pure $ ODBC.TextValue <$> P.string + MSSQL.TextType -> pure $ ODBC.TextValue <$> P.string + -- integer + MSSQL.IntegerType -> pure $ ODBC.IntValue . fromIntegral <$> P.int + MSSQL.SmallintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int + MSSQL.BigintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int + MSSQL.TinyintType -> pure $ ODBC.IntValue . fromIntegral <$> P.int + -- float + MSSQL.NumericType -> pure $ ODBC.DoubleValue <$> P.float + MSSQL.DecimalType -> pure $ ODBC.DoubleValue <$> P.float + MSSQL.FloatType -> pure $ ODBC.DoubleValue <$> P.float + MSSQL.RealType -> pure $ ODBC.DoubleValue <$> P.float + -- boolean + MSSQL.BitType -> pure $ ODBC.BoolValue <$> P.boolean + _ -> do + name <- MSSQL.mkMSSQLScalarTypeName scalarType + let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar + pure + $ P.Parser + { pType = schemaType, + pParser = + P.valueToJSON (P.toGraphQLType schemaType) + >=> either (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) pure + . (MSSQL.parseScalarValue scalarType) + } ColumnEnumReference (EnumReference tableName enumValues customTableName) -> case nonEmpty (HashMap.toList enumValues) of Just enumValuesList -> - peelWithOrigin . fmap (ColumnValue columnType) + peelWithOrigin + . fmap (ColumnValue columnType) <$> msEnumParser tableName enumValuesList customTableName nullability Nothing -> throw400 ValidationFailed "empty enum values" where @@ -270,7 +274,8 @@ msOrderByOperators :: ) ) msOrderByOperators _tCase = - (Name._order_by,) $ + (Name._order_by,) + $ -- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB NE.fromList [ ( define Name._asc "in ascending order, nulls first", @@ -311,75 +316,75 @@ msComparisonExps = P.memoize 'comparisonExps \columnType -> do -- field info let name = P.getName typedParser <> Name.__MSSQL_comparison_exp desc = - G.Description $ - "Boolean expression to compare columns of type " - <> P.getName typedParser - <<> ". All fields are combined with logical 'AND'." + G.Description + $ "Boolean expression to compare columns of type " + <> P.getName typedParser + <<> ". All fields are combined with logical 'AND'." -- Naming convention tCase <- retrieve $ _rscNamingConvention . _siCustomization @'MSSQL - pure $ - P.object name (Just desc) $ - fmap catMaybes $ - sequenceA $ - concat - [ -- Common ops for all types - equalityOperators - tCase - collapseIfNull - (mkParameter <$> typedParser) - (mkListLiteral <$> columnListParser), - comparisonOperators - tCase - collapseIfNull - (mkParameter <$> typedParser), - -- Ops for String like types - guard (isScalarColumnWhere (`elem` MSSQL.stringTypes) columnType) - *> [ P.fieldOptional - Name.__like - (Just "does the column match the given pattern") - (ALIKE . mkParameter <$> typedParser), - P.fieldOptional - Name.__nlike - (Just "does the column NOT match the given pattern") - (ANLIKE . mkParameter <$> typedParser) - ], - -- Ops for Geometry/Geography types - guard (isScalarColumnWhere (`elem` MSSQL.geoTypes) columnType) - *> [ P.fieldOptional - Name.__st_contains - (Just "does the column contain the given value") - (ABackendSpecific . MSSQL.ASTContains . mkParameter <$> typedParser), - P.fieldOptional - Name.__st_equals - (Just "is the column equal to given value (directionality is ignored)") - (ABackendSpecific . MSSQL.ASTEquals . mkParameter <$> typedParser), - P.fieldOptional - Name.__st_intersects - (Just "does the column spatially intersect the given value") - (ABackendSpecific . MSSQL.ASTIntersects . mkParameter <$> typedParser), - P.fieldOptional - Name.__st_overlaps - (Just "does the column 'spatially overlap' (intersect but not completely contain) the given value") - (ABackendSpecific . MSSQL.ASTOverlaps . mkParameter <$> typedParser), - P.fieldOptional - Name.__st_within - (Just "is the column contained in the given value") - (ABackendSpecific . MSSQL.ASTWithin . mkParameter <$> typedParser) - ], - -- Ops for Geometry types - guard (isScalarColumnWhere (MSSQL.GeometryType ==) columnType) - *> [ P.fieldOptional - Name.__st_crosses - (Just "does the column cross the given geometry value") - (ABackendSpecific . MSSQL.ASTCrosses . mkParameter <$> typedParser), - P.fieldOptional - Name.__st_touches - (Just "does the column have at least one point in common with the given geometry value") - (ABackendSpecific . MSSQL.ASTTouches . mkParameter <$> typedParser) - ] - ] + pure + $ P.object name (Just desc) + $ fmap catMaybes + $ sequenceA + $ concat + [ -- Common ops for all types + equalityOperators + tCase + collapseIfNull + (mkParameter <$> typedParser) + (mkListLiteral <$> columnListParser), + comparisonOperators + tCase + collapseIfNull + (mkParameter <$> typedParser), + -- Ops for String like types + guard (isScalarColumnWhere (`elem` MSSQL.stringTypes) columnType) + *> [ P.fieldOptional + Name.__like + (Just "does the column match the given pattern") + (ALIKE . mkParameter <$> typedParser), + P.fieldOptional + Name.__nlike + (Just "does the column NOT match the given pattern") + (ANLIKE . mkParameter <$> typedParser) + ], + -- Ops for Geometry/Geography types + guard (isScalarColumnWhere (`elem` MSSQL.geoTypes) columnType) + *> [ P.fieldOptional + Name.__st_contains + (Just "does the column contain the given value") + (ABackendSpecific . MSSQL.ASTContains . mkParameter <$> typedParser), + P.fieldOptional + Name.__st_equals + (Just "is the column equal to given value (directionality is ignored)") + (ABackendSpecific . MSSQL.ASTEquals . mkParameter <$> typedParser), + P.fieldOptional + Name.__st_intersects + (Just "does the column spatially intersect the given value") + (ABackendSpecific . MSSQL.ASTIntersects . mkParameter <$> typedParser), + P.fieldOptional + Name.__st_overlaps + (Just "does the column 'spatially overlap' (intersect but not completely contain) the given value") + (ABackendSpecific . MSSQL.ASTOverlaps . mkParameter <$> typedParser), + P.fieldOptional + Name.__st_within + (Just "is the column contained in the given value") + (ABackendSpecific . MSSQL.ASTWithin . mkParameter <$> typedParser) + ], + -- Ops for Geometry types + guard (isScalarColumnWhere (MSSQL.GeometryType ==) columnType) + *> [ P.fieldOptional + Name.__st_crosses + (Just "does the column cross the given geometry value") + (ABackendSpecific . MSSQL.ASTCrosses . mkParameter <$> typedParser), + P.fieldOptional + Name.__st_touches + (Just "does the column have at least one point in common with the given geometry value") + (ABackendSpecific . MSSQL.ASTTouches . mkParameter <$> typedParser) + ] + ] where mkListLiteral :: [ColumnValue 'MSSQL] -> UnpreparedValue 'MSSQL mkListLiteral = diff --git a/server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs b/server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs index 7a529a1cca4..808ce76c076 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs @@ -77,9 +77,9 @@ runQuery :: m (DiffTime, EncJSON) runQuery reqId query fieldName _userInfo logger _ _sourceConfig tx genSql _ = do logQueryLog logger $ mkQueryLog query fieldName genSql reqId - withElapsedTime $ - newSpan ("MSSQL Query for root field " <>> fieldName) $ - fmap snd (run tx) + withElapsedTime + $ newSpan ("MSSQL Query for root field " <>> fieldName) + $ fmap snd (run tx) runQueryExplain :: ( MonadIO m, @@ -114,9 +114,9 @@ runMutation :: m (DiffTime, EncJSON) runMutation reqId query fieldName _userInfo logger _ _sourceConfig tx _genSql _ = do logQueryLog logger $ mkQueryLog query fieldName Nothing reqId - withElapsedTime $ - newSpan ("MSSQL Mutation for root field " <>> fieldName) $ - run tx + withElapsedTime + $ newSpan ("MSSQL Mutation for root field " <>> fieldName) + $ run tx runSubscription :: (MonadIO m, MonadBaseControl IO m) => diff --git a/server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs b/server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs index 8dfce357389..c341db3a67b 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs @@ -64,8 +64,8 @@ instance Backend 'MSSQL where type HealthCheckTest 'MSSQL = HealthCheckTestSql healthCheckImplementation = - Just $ - HealthCheckImplementation + Just + $ HealthCheckImplementation { _hciDefaultTest = defaultHealthCheckTestSql, _hciTestCodec = codec } diff --git a/server/src-lib/Hasura/Backends/MSSQL/Meta.hs b/server/src-lib/Hasura/Backends/MSSQL/Meta.hs index c7096d48e74..c54e008efff 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Meta.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Meta.hs @@ -142,9 +142,9 @@ transformTable tableInfo = foreignKeysMetadata = HS.fromList $ map ForeignKeyMetadata $ coalesceKeys $ concat foreignKeys primaryKey = transformPrimaryKey <$> staJoinedSysPrimaryKey tableInfo identityColumns = - map (ColumnName . scName) $ - filter scIsIdentity $ - staJoinedSysColumn tableInfo + map (ColumnName . scName) + $ filter scIsIdentity + $ staJoinedSysColumn tableInfo in ( tableName, DBTableMetadata tableOID diff --git a/server/src-lib/Hasura/Backends/MSSQL/Plan.hs b/server/src-lib/Hasura/Backends/MSSQL/Plan.hs index 818875fd68d..d255e458313 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Plan.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Plan.hs @@ -49,7 +49,7 @@ import Network.HTTP.Types qualified as HTTP -- Top-level planner planQuery :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> QueryDB 'MSSQL Void (UnpreparedValue 'MSSQL) -> m (QueryWithDDL Select) @@ -59,7 +59,7 @@ planQuery sessionVariables queryDB = do -- | For more information, see the module/documentation of 'Hasura.GraphQL.Execute.RemoteJoin.Source'. planSourceRelationship :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> -- | List of json objects, each of which becomes a row of the table NE.NonEmpty J.Object -> @@ -88,7 +88,7 @@ planSourceRelationship ) runIrWrappingRoot :: - MonadError QErr m => + (MonadError QErr m) => FromIr Select -> m (QueryWithDDL Select) runIrWrappingRoot selectAction = @@ -97,7 +97,7 @@ runIrWrappingRoot selectAction = -- | Prepare a value without any query planning; we just execute the -- query with the values embedded. prepareValueQuery :: - MonadError QErr m => + (MonadError QErr m) => SessionVariables -> UnpreparedValue 'MSSQL -> m Expression @@ -126,7 +126,7 @@ prepareValueQuery sessionVariables = <*> pure DataLengthMax planSubscription :: - MonadError QErr m => + (MonadError QErr m) => InsOrdHashMap.InsOrdHashMap G.Name (QueryDB 'MSSQL Void (UnpreparedValue 'MSSQL)) -> SessionVariables -> m (Reselect, PrepareState) @@ -222,8 +222,8 @@ prepareValueSubscription globalVariables = modify' (\s -> s {sessionVariables = sessionVariables s <> globalVariables}) pure $ resultVarExp (RootPath `FieldPath` "session") UVSessionVar _typ text -> do - unless (text `Set.member` globalVariables) $ - throw400 + unless (text `Set.member` globalVariables) + $ throw400 NotFound ("missing session variable: " <>> sessionVariableToText text) modify' (\s -> s {sessionVariables = text `Set.insert` sessionVariables s}) @@ -251,12 +251,12 @@ prepareValueSubscription globalVariables = where resultVarExp :: JsonPath -> Expression resultVarExp = - JsonValueExpression $ - ColumnExpression $ - FieldName - { fieldNameEntity = rowAlias, - fieldName = resultVarsAlias - } + JsonValueExpression + $ ColumnExpression + $ FieldName + { fieldNameEntity = rowAlias, + fieldName = resultVarsAlias + } queryDot :: Text -> JsonPath queryDot name = RootPath `FieldPath` "query" `FieldPath` name diff --git a/server/src-lib/Hasura/Backends/MSSQL/SQL/Error.hs b/server/src-lib/Hasura/Backends/MSSQL/SQL/Error.hs index f2fe0fcd753..c9623dd5570 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/SQL/Error.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/SQL/Error.hs @@ -175,23 +175,23 @@ mkMSSQLTxErrorHandler isExpectedError = \case let unexpectedQueryError = (Error.internalError "database query error") { Error.qeInternal = - Just $ - Error.ExtraInternal $ - object - [ "query" .= ODBC.renderQuery query, - "exception" .= odbcExceptionToJSONValue exception - ] + Just + $ Error.ExtraInternal + $ object + [ "query" .= ODBC.renderQuery query, + "exception" .= odbcExceptionToJSONValue exception + ] } - in fromMaybe unexpectedQueryError $ - asExpectedError exception - <&> \err -> err {Error.qeInternal = Just $ Error.ExtraInternal $ object ["query" .= ODBC.renderQuery query]} + in fromMaybe unexpectedQueryError + $ asExpectedError exception + <&> \err -> err {Error.qeInternal = Just $ Error.ExtraInternal $ object ["query" .= ODBC.renderQuery query]} MSSQLConnError exception -> let unexpectedConnError = (Error.internalError "mssql connection error") { Error.qeInternal = - Just $ - Error.ExtraInternal $ - object ["exception" .= odbcExceptionToJSONValue exception] + Just + $ Error.ExtraInternal + $ object ["exception" .= odbcExceptionToJSONValue exception] } in fromMaybe unexpectedConnError $ asExpectedError exception MSSQLInternal err -> diff --git a/server/src-lib/Hasura/Backends/MSSQL/Schema/IfMatched.hs b/server/src-lib/Hasura/Backends/MSSQL/Schema/IfMatched.hs index cc69c897305..99467082ed9 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Schema/IfMatched.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Schema/IfMatched.hs @@ -95,8 +95,8 @@ ifMatchedObjectParser tableInfo = runMaybeT do updateColumnsName = Name._update_columns whereName = Name._where whereExpParser <- tableBoolExp tableInfo - pure $ - P.object objectName (Just objectDesc) do + pure + $ P.object objectName (Just objectDesc) do _imConditions <- (\whereExp -> BoolAnd $ updateFilter : maybeToList whereExp) <$> P.fieldOptional whereName Nothing whereExpParser @@ -130,18 +130,19 @@ tableInsertMatchColumnsEnum tableInfo = do columns <- tableSelectColumns tableInfo let enumName = mkTypename $ tableGQLName <> Name.__insert_match_column description = - Just $ - G.Description $ - "select match_columns of table " <>> tableInfoName tableInfo - pure $ - P.enum enumName description - <$> nonEmpty - [ ( define $ ciName column, - ciColumn column - ) - | SCIScalarColumn column <- columns, - isMatchColumnValid column - ] + Just + $ G.Description + $ "select match_columns of table " + <>> tableInfoName tableInfo + pure + $ P.enum enumName description + <$> nonEmpty + [ ( define $ ciName column, + ciColumn column + ) + | SCIScalarColumn column <- columns, + isMatchColumnValid column + ] where define name = P.Definition name (Just $ G.Description "column name") Nothing [] P.EnumValueInfo diff --git a/server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs b/server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs index 2e1c836aadd..7cd0c06beff 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs @@ -243,15 +243,15 @@ fromInsert Insert {..} = SepByPrinter NewlinePrinter $ ["INSERT INTO " <+> fromTableName insertTable] - <> [ "(" <+> SepByPrinter ", " (map (fromNameText . columnNameText) insertColumns) <+> ")" - | not (null insertColumns) - ] - <> [ fromInsertOutput insertOutput, - "INTO " <+> fromTempTable insertTempTable, - if null insertColumns - then "VALUES " <+> SepByPrinter ", " (map (const "(DEFAULT)") insertValues) - else fromValuesList insertValues - ] + <> [ "(" <+> SepByPrinter ", " (map (fromNameText . columnNameText) insertColumns) <+> ")" + | not (null insertColumns) + ] + <> [ fromInsertOutput insertOutput, + "INTO " <+> fromTempTable insertTempTable, + if null insertColumns + then "VALUES " <+> SepByPrinter ", " (map (const "(DEFAULT)") insertValues) + else fromValuesList insertValues + ] fromSetValue :: SetValue -> Printer fromSetValue = \case @@ -300,8 +300,8 @@ fromMergeUsing MergeUsing {..} = let alias = "merge_temptable" columnNameToProjection ColumnName {columnNameText} = -- merge_temptable.column_name AS column_name - FieldNameProjection $ - Aliased + FieldNameProjection + $ Aliased { aliasedThing = FieldName columnNameText alias, aliasedAlias = columnNameText } @@ -342,8 +342,8 @@ fromMergeWhenMatched (MergeWhenMatched updateColumns updateCondition updatePrese updateSet :: UpdateSet updateSet = - HashMap.fromList $ - map + HashMap.fromList + $ map ( \cn@ColumnName {..} -> ( cn, UpdateSet $ ColumnExpression $ FieldName columnNameText mergeSourceAlias @@ -498,14 +498,14 @@ fromSelectIntoTempTable SelectIntoTempTable {sittTempTableName, sittColumns, sit "FROM " <+> fromTableName sittFromTableName, "WHERE " <+> falsePrinter ] - <> case sittConstraints of - RemoveConstraints -> - [ "UNION ALL SELECT " <+> columns, - "FROM " <+> fromTableName sittFromTableName, - "WHERE " <+> falsePrinter - ] - KeepConstraints -> - [] + <> case sittConstraints of + RemoveConstraints -> + [ "UNION ALL SELECT " <+> columns, + "FROM " <+> fromTableName sittFromTableName, + "WHERE " <+> falsePrinter + ] + KeepConstraints -> + [] where -- column names separated by commas columns = @@ -550,26 +550,26 @@ fromSelect Select {..} = fmap fromWith selectWith ?<+> result (map fromProjection (toList selectProjections)) ) ] - <> ["FROM " <+> IndentPrinter 5 (fromFrom f) | Just f <- [selectFrom]] - <> [ SepByPrinter - NewlinePrinter - ( map - ( \Join {..} -> - SeqPrinter - [ "OUTER APPLY (", - IndentPrinter 13 (fromJoinSource joinSource), - ") ", - NewlinePrinter, - "AS ", - fromJoinAlias joinJoinAlias - ] - ) - selectJoins - ), - fromWhere selectWhere, - fromOrderBys selectTop selectOffset selectOrderBy, - fromFor selectFor - ] + <> ["FROM " <+> IndentPrinter 5 (fromFrom f) | Just f <- [selectFrom]] + <> [ SepByPrinter + NewlinePrinter + ( map + ( \Join {..} -> + SeqPrinter + [ "OUTER APPLY (", + IndentPrinter 13 (fromJoinSource joinSource), + ") ", + NewlinePrinter, + "AS ", + fromJoinAlias joinJoinAlias + ] + ) + selectJoins + ), + fromWhere selectWhere, + fromOrderBys selectTop selectOffset selectOrderBy, + fromFor selectFor + ] fromWith :: With -> Printer fromWith (With withSelects) = diff --git a/server/src-lib/Hasura/Backends/MSSQL/Types/Insert.hs b/server/src-lib/Hasura/Backends/MSSQL/Types/Insert.hs index eded70386ab..75051e69e3f 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Types/Insert.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Types/Insert.hs @@ -25,11 +25,11 @@ data BackendInsert v = BackendInsert deriving instance (Backend 'MSSQL, Show (IfMatched v), Show v) => Show (BackendInsert v) -deriving instance Backend 'MSSQL => Functor BackendInsert +deriving instance (Backend 'MSSQL) => Functor BackendInsert -deriving instance Backend 'MSSQL => Foldable BackendInsert +deriving instance (Backend 'MSSQL) => Foldable BackendInsert -deriving instance Backend 'MSSQL => Traversable BackendInsert +deriving instance (Backend 'MSSQL) => Traversable BackendInsert -- | The IR data representing an @if_matched@ clause, which handles upserts. data IfMatched v = IfMatched @@ -45,8 +45,8 @@ data IfMatched v = IfMatched deriving instance (Backend 'MSSQL, Show (AnnBoolExp 'MSSQL v), Show v) => Show (IfMatched v) -deriving instance Backend 'MSSQL => Functor IfMatched +deriving instance (Backend 'MSSQL) => Functor IfMatched -deriving instance Backend 'MSSQL => Foldable IfMatched +deriving instance (Backend 'MSSQL) => Foldable IfMatched -deriving instance Backend 'MSSQL => Traversable IfMatched +deriving instance (Backend 'MSSQL) => Traversable IfMatched diff --git a/server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs b/server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs index 63f375d49f1..adb5b41499b 100644 --- a/server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs +++ b/server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs @@ -630,7 +630,7 @@ fromDataLength = \case DataLengthInt len -> "(" <> tshow len <> ")" DataLengthMax -> "(max)" -mkMSSQLScalarTypeName :: MonadError QErr m => ScalarType -> m G.Name +mkMSSQLScalarTypeName :: (MonadError QErr m) => ScalarType -> m G.Name mkMSSQLScalarTypeName = \case CharType -> pure GName._String WcharType -> pure GName._String @@ -730,7 +730,8 @@ parseScalarValue scalarType jValue = case scalarType of parseGeoJSONAsWKT :: J.Value -> Either QErr Text parseGeoJSONAsWKT jv = runAesonParser (J.parseJSON @Geo.GeometryWithCRS) jv - >>= fmap WKT.getWKT . WKT.toWKT + >>= fmap WKT.getWKT + . WKT.toWKT isComparableType, isNumType :: ScalarType -> Bool isComparableType = \case @@ -753,16 +754,20 @@ isNumType = \case getGQLTableName :: TableName -> Either QErr G.Name getGQLTableName tn = do let textName = snakeCaseName (tableName tn) (tableSchema tn) - onNothing (G.mkName textName) $ - throw400 ValidationFailed $ - "cannot include " <> textName <> " in the GraphQL schema because it is not a valid GraphQL identifier" + onNothing (G.mkName textName) + $ throw400 ValidationFailed + $ "cannot include " + <> textName + <> " in the GraphQL schema because it is not a valid GraphQL identifier" getGQLFunctionName :: FunctionName -> Either QErr G.Name getGQLFunctionName fn = do let textName = snakeCaseName (functionName fn) (functionSchema fn) - onNothing (G.mkName textName) $ - throw400 ValidationFailed $ - "cannot include " <> textName <> " in the GraphQL schema because it is not a valid GraphQL identifier" + onNothing (G.mkName textName) + $ throw400 ValidationFailed + $ "cannot include " + <> textName + <> " in the GraphQL schema because it is not a valid GraphQL identifier" snakeCaseName :: Text -> SchemaName -> Text snakeCaseName tableName (SchemaName tableSchema) = diff --git a/server/src-lib/Hasura/Backends/Postgres/Connection/MonadTx.hs b/server/src-lib/Hasura/Backends/Postgres/Connection/MonadTx.hs index 76924f401dd..72c3ad929bc 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Connection/MonadTx.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Connection/MonadTx.hs @@ -100,11 +100,11 @@ runTxWithCtxAndUserInfo :: m a runTxWithCtxAndUserInfo userInfo pgExecCtx pgExecTxType pgExecFrom tx = do traceCtx <- Tracing.currentContext - liftEitherM $ - runExceptT $ - (_pecRunTx pgExecCtx) (PGExecCtxInfo pgExecTxType pgExecFrom) $ - withTraceContext traceCtx $ - withUserInfo userInfo tx + liftEitherM + $ runExceptT + $ (_pecRunTx pgExecCtx) (PGExecCtxInfo pgExecTxType pgExecFrom) + $ withTraceContext traceCtx + $ withUserInfo userInfo tx -- | This runs the given set of statements (Tx) without wrapping them in BEGIN -- and COMMIT. This should only be used for running a single statement query! @@ -119,17 +119,18 @@ runQueryTx :: m a runQueryTx pgExecCtx pgExecFrom tx = do let pgExecCtxInfo = PGExecCtxInfo NoTxRead pgExecFrom - liftEitherM $ - runExceptT $ - (_pecRunTx pgExecCtx) pgExecCtxInfo tx + liftEitherM + $ runExceptT + $ (_pecRunTx pgExecCtx) pgExecCtxInfo tx setHeadersTx :: (MonadIO m) => SessionVariables -> PG.TxET QErr m () setHeadersTx session = do PG.unitQE defaultTxErrorHandler setSess () False where setSess = - PG.fromText $ - "SET LOCAL \"hasura.user\" = " <> toSQLTxt (sessionInfoJsonExp session) + PG.fromText + $ "SET LOCAL \"hasura.user\" = " + <> toSQLTxt (sessionInfoJsonExp session) sessionInfoJsonExp :: SessionVariables -> S.SQLExp sessionInfoJsonExp = S.SELit . encodeToStrictText @@ -153,69 +154,69 @@ withTraceContext :: PG.TxET QErr m a withTraceContext ctx tx = setTraceContextInTx ctx >> tx -deriving instance Tracing.MonadTrace m => Tracing.MonadTrace (PG.TxET e m) +deriving instance (Tracing.MonadTrace m) => Tracing.MonadTrace (PG.TxET e m) -checkDbConnection :: MonadTx m => m () +checkDbConnection :: (MonadTx m) => m () checkDbConnection = do PG.Discard () <- liftTx $ PG.withQE defaultTxErrorHandler [PG.sql| SELECT 1; |] () False pure () -doesSchemaExist :: MonadTx m => SchemaName -> m Bool +doesSchemaExist :: (MonadTx m) => SchemaName -> m Bool doesSchemaExist schemaName = - liftTx $ - (runIdentity . PG.getRow) - <$> PG.withQE - defaultTxErrorHandler - [PG.sql| + liftTx + $ (runIdentity . PG.getRow) + <$> PG.withQE + defaultTxErrorHandler + [PG.sql| SELECT EXISTS ( SELECT 1 FROM information_schema.schemata WHERE schema_name = $1 ) |] - (Identity schemaName) - False + (Identity schemaName) + False -doesTableExist :: MonadTx m => SchemaName -> TableName -> m Bool +doesTableExist :: (MonadTx m) => SchemaName -> TableName -> m Bool doesTableExist schemaName tableName = - liftTx $ - (runIdentity . PG.getRow) - <$> PG.withQE - defaultTxErrorHandler - [PG.sql| + liftTx + $ (runIdentity . PG.getRow) + <$> PG.withQE + defaultTxErrorHandler + [PG.sql| SELECT EXISTS ( SELECT 1 FROM pg_tables WHERE schemaname = $1 AND tablename = $2 ) |] - (schemaName, tableName) - False + (schemaName, tableName) + False -isExtensionAvailable :: MonadTx m => Text -> m Bool +isExtensionAvailable :: (MonadTx m) => Text -> m Bool isExtensionAvailable extensionName = - liftTx $ - (runIdentity . PG.getRow) - <$> PG.withQE - defaultTxErrorHandler - [PG.sql| + liftTx + $ (runIdentity . PG.getRow) + <$> PG.withQE + defaultTxErrorHandler + [PG.sql| SELECT EXISTS ( SELECT 1 FROM pg_catalog.pg_available_extensions WHERE name = $1 ) |] - (Identity extensionName) - False + (Identity extensionName) + False -enablePgcryptoExtension :: forall m. MonadTx m => ExtensionsSchema -> m () +enablePgcryptoExtension :: forall m. (MonadTx m) => ExtensionsSchema -> m () enablePgcryptoExtension (ExtensionsSchema extensionsSchema) = do pgcryptoAvailable <- isExtensionAvailable "pgcrypto" if pgcryptoAvailable then createPgcryptoExtension else - throw400 Unexpected $ - "pgcrypto extension is required, but could not find the extension in the " - <> "PostgreSQL server. Please make sure this extension is available." + throw400 Unexpected + $ "pgcrypto extension is required, but could not find the extension in the " + <> "PostgreSQL server. Please make sure this extension is available." where createPgcryptoExtension :: m () createPgcryptoExtension = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE needsPGCryptoError (PG.fromText $ "CREATE EXTENSION IF NOT EXISTS pgcrypto SCHEMA " <> extensionsSchema) () @@ -232,8 +233,8 @@ enablePgcryptoExtension (ExtensionsSchema extensionsSchema) = do addHintForExtensionError pgErrDetail = e { PG.pgteError = - PG.PGIStatement $ - PG.PGStmtErrDetail + PG.PGIStatement + $ PG.PGStmtErrDetail { PG.edExecStatus = PG.edExecStatus pgErrDetail, PG.edStatusCode = PG.edStatusCode pgErrDetail, PG.edMessage = @@ -257,7 +258,8 @@ enablePgcryptoExtension (ExtensionsSchema extensionsSchema) = do dropHdbCatalogSchema :: (MonadTx m) => m () dropHdbCatalogSchema = - liftTx $ + liftTx + $ -- This is where -- 1. Metadata storage:- Metadata and its stateful information stored -- 2. Postgres source:- Table event trigger related stuff & insert permission check function stored diff --git a/server/src-lib/Hasura/Backends/Postgres/Connection/Settings.hs b/server/src-lib/Hasura/Backends/Postgres/Connection/Settings.hs index f8c528edeec..01cacc436cd 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Connection/Settings.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Connection/Settings.hs @@ -76,15 +76,21 @@ instance NFData PostgresPoolSettings instance HasCodec PostgresPoolSettings where codec = - CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgpoolsettings" $ - AC.object "PostgresPoolSettings" $ - PostgresPoolSettings - <$> optionalFieldOrNull "max_connections" maxConnectionsDoc .== _ppsMaxConnections - <*> optionalFieldOrNull "total_max_connections" totalMaxConnectionsDoc .== _ppsTotalMaxConnections - <*> optionalFieldOrNull "idle_timeout" idleTimeoutDoc .== _ppsIdleTimeout - <*> optionalFieldOrNull "retries" retriesDoc .== _ppsRetries - <*> optionalFieldOrNull "pool_timeout" poolTimeoutDoc .== _ppsPoolTimeout - <*> (parseConnLifeTime `rmapCodec` optionalFieldOrNull "connection_lifetime" connectionLifetimeDoc) .== _ppsConnectionLifetime + CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgpoolsettings" + $ AC.object "PostgresPoolSettings" + $ PostgresPoolSettings + <$> optionalFieldOrNull "max_connections" maxConnectionsDoc + .== _ppsMaxConnections + <*> optionalFieldOrNull "total_max_connections" totalMaxConnectionsDoc + .== _ppsTotalMaxConnections + <*> optionalFieldOrNull "idle_timeout" idleTimeoutDoc + .== _ppsIdleTimeout + <*> optionalFieldOrNull "retries" retriesDoc + .== _ppsRetries + <*> optionalFieldOrNull "pool_timeout" poolTimeoutDoc + .== _ppsPoolTimeout + <*> (parseConnLifeTime `rmapCodec` optionalFieldOrNull "connection_lifetime" connectionLifetimeDoc) + .== _ppsConnectionLifetime where maxConnectionsDoc = "Maximum number of connections to be kept in the pool (default: 50)" totalMaxConnectionsDoc = "Total maximum number of connections across all instances (cloud only, default: null)" @@ -108,11 +114,16 @@ instance ToJSON PostgresPoolSettings where instance FromJSON PostgresPoolSettings where parseJSON = withObject "PostgresPoolSettings" $ \o -> PostgresPoolSettings - <$> o .:? "max_connections" - <*> o .:? "total_max_connections" - <*> o .:? "idle_timeout" - <*> o .:? "retries" - <*> o .:? "pool_timeout" + <$> o + .:? "max_connections" + <*> o + .:? "total_max_connections" + <*> o + .:? "idle_timeout" + <*> o + .:? "retries" + <*> o + .:? "pool_timeout" <*> ((o .:? "connection_lifetime") <&> parseConnLifeTime) data DefaultPostgresPoolSettings = DefaultPostgresPoolSettings @@ -183,10 +194,11 @@ deriving via (Max SSLMode) instance Semigroup SSLMode instance HasCodec SSLMode where codec = - named "SSLMode" $ - stringConstCodec $ - NonEmpty.fromList $ - (\m -> (m, tshow m)) <$> [minBound ..] + named "SSLMode" + $ stringConstCodec + $ NonEmpty.fromList + $ (\m -> (m, tshow m)) + <$> [minBound ..] instance FromJSON SSLMode where parseJSON = withText "SSLMode" $ \case @@ -240,14 +252,19 @@ data PGClientCerts p a = PGClientCerts instance (HasCodec p, HasCodec a) => HasCodec (PGClientCerts p a) where codec = - CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgcertsettings" $ - AC.object "PGClientCerts" $ - PGClientCerts - <$> optionalFieldOrNull "sslcert" sslcertDoc .== pgcSslCert - <*> optionalFieldOrNull "sslkey" sslkeyDoc .== pgcSslKey - <*> optionalFieldOrNull "sslrootcert" sslrootcertDoc .== pgcSslRootCert - <*> requiredField "sslmode" sslmodeDoc .== pgcSslMode - <*> optionalFieldOrNull "sslpassword" sslpasswordDoc .== pgcSslPassword + CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgcertsettings" + $ AC.object "PGClientCerts" + $ PGClientCerts + <$> optionalFieldOrNull "sslcert" sslcertDoc + .== pgcSslCert + <*> optionalFieldOrNull "sslkey" sslkeyDoc + .== pgcSslKey + <*> optionalFieldOrNull "sslrootcert" sslrootcertDoc + .== pgcSslRootCert + <*> requiredField "sslmode" sslmodeDoc + .== pgcSslMode + <*> optionalFieldOrNull "sslpassword" sslpasswordDoc + .== pgcSslPassword where sslcertDoc = "Environment variable which stores the client certificate." sslkeyDoc = "Environment variable which stores the client private key." @@ -299,13 +316,13 @@ instance Hashable PG.TxIsolation instance HasCodec PG.TxIsolation where codec = - named "TxIsolation" $ - stringConstCodec $ - NonEmpty.fromList $ - [ (PG.ReadCommitted, "read-committed"), - (PG.RepeatableRead, "repeatable-read"), - (PG.Serializable, "serializable") - ] + named "TxIsolation" + $ stringConstCodec + $ NonEmpty.fromList + $ [ (PG.ReadCommitted, "read-committed"), + (PG.RepeatableRead, "repeatable-read"), + (PG.Serializable, "serializable") + ] instance FromJSON PG.TxIsolation where parseJSON = withText "Q.TxIsolation" $ \t -> @@ -331,14 +348,19 @@ instance NFData PostgresSourceConnInfo instance HasCodec PostgresSourceConnInfo where codec = - CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgsourceconnectioninfo" $ - AC.object "PostgresSourceConnInfo" $ - PostgresSourceConnInfo - <$> requiredField "database_url" databaseUrlDoc .== _psciDatabaseUrl - <*> optionalFieldOrNull "pool_settings" poolSettingsDoc .== _psciPoolSettings - <*> optionalFieldWithDefault "use_prepared_statements" False usePreparedStatementsDoc .== _psciUsePreparedStatements - <*> optionalFieldWithDefault "isolation_level" PG.ReadCommitted isolationLevelDoc .== _psciIsolationLevel - <*> optionalFieldOrNull "ssl_configuration" sslConfigurationDoc .== _psciSslConfiguration + CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgsourceconnectioninfo" + $ AC.object "PostgresSourceConnInfo" + $ PostgresSourceConnInfo + <$> requiredField "database_url" databaseUrlDoc + .== _psciDatabaseUrl + <*> optionalFieldOrNull "pool_settings" poolSettingsDoc + .== _psciPoolSettings + <*> optionalFieldWithDefault "use_prepared_statements" False usePreparedStatementsDoc + .== _psciUsePreparedStatements + <*> optionalFieldWithDefault "isolation_level" PG.ReadCommitted isolationLevelDoc + .== _psciIsolationLevel + <*> optionalFieldOrNull "ssl_configuration" sslConfigurationDoc + .== _psciSslConfiguration where databaseUrlDoc = "The database connection URL as a string, as an environment variable, or as connection parameters." poolSettingsDoc = "Connection pool settings" @@ -364,11 +386,18 @@ instance ToJSON PostgresSourceConnInfo where instance FromJSON PostgresSourceConnInfo where parseJSON = withObject "PostgresSourceConnInfo" $ \o -> PostgresSourceConnInfo - <$> o .: "database_url" - <*> o .:? "pool_settings" - <*> o .:? "use_prepared_statements" .!= False -- By default, preparing statements is OFF for postgres source - <*> o .:? "isolation_level" .!= PG.ReadCommitted - <*> o .:? "ssl_configuration" + <$> o + .: "database_url" + <*> o + .:? "pool_settings" + <*> o + .:? "use_prepared_statements" + .!= False -- By default, preparing statements is OFF for postgres source + <*> o + .:? "isolation_level" + .!= PG.ReadCommitted + <*> o + .:? "ssl_configuration" defaultPostgresExtensionsSchema :: ExtensionsSchema defaultPostgresExtensionsSchema = ExtensionsSchema "public" @@ -430,11 +459,13 @@ supportedConnectionTemplateVersions = [1] instance FromJSON ConnectionTemplate where parseJSON = withObject "ConnectionTemplate" $ \o -> do version <- o .:? "version" .!= 1 - when (version `notElem` supportedConnectionTemplateVersions) $ - fail $ - "Supported versions are " <> show supportedConnectionTemplateVersions + when (version `notElem` supportedConnectionTemplateVersions) + $ fail + $ "Supported versions are " + <> show supportedConnectionTemplateVersions ConnectionTemplate version - <$> o .: "template" + <$> o + .: "template" instance ToJSON ConnectionTemplate where toJSON ConnectionTemplate {..} = @@ -445,11 +476,13 @@ instance ToJSON ConnectionTemplate where instance HasCodec ConnectionTemplate where codec = - CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgconnectiontemplate" $ - AC.object "ConnectionTemplate" $ - ConnectionTemplate - <$> optionalFieldWithOmittedDefault "version" 1 ctVersionInfoDoc AC..= _ctVersion - <*> requiredField "template" ctTemplateInfoDoc AC..= _ctTemplate + CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgconnectiontemplate" + $ AC.object "ConnectionTemplate" + $ ConnectionTemplate + <$> optionalFieldWithOmittedDefault "version" 1 ctVersionInfoDoc + AC..= _ctVersion + <*> requiredField "template" ctTemplateInfoDoc + AC..= _ctTemplate where ctVersionInfoDoc = "Optional connection template version (supported versions: [1], default: 1)" ctTemplateInfoDoc = "Connection kriti template (read more in the docs)" @@ -527,31 +560,42 @@ instance NFData PostgresConnConfiguration instance FromJSON PostgresConnConfiguration where parseJSON = withObject "PostgresConnConfiguration" $ \o -> do PostgresConnConfiguration - <$> o .: "connection_info" - <*> o .:? "read_replicas" - <*> o .:? "extensions_schema" .!= defaultPostgresExtensionsSchema - <*> o .:? "connection_template" - <*> o .:? "connection_set" + <$> o + .: "connection_info" + <*> o + .:? "read_replicas" + <*> o + .:? "extensions_schema" + .!= defaultPostgresExtensionsSchema + <*> o + .:? "connection_template" + <*> o + .:? "connection_set" instance ToJSON PostgresConnConfiguration where toJSON PostgresConnConfiguration {..} = - object $ - ["connection_info" .= _pccConnectionInfo] - <> maybe mempty (\readReplicas -> ["read_replicas" .= readReplicas]) _pccReadReplicas - <> bool mempty (["extensions_schema" .= _pccExtensionsSchema]) (_pccExtensionsSchema /= defaultPostgresExtensionsSchema) - <> maybe mempty (\connTemplate -> ["connection_template" .= connTemplate]) _pccConnectionTemplate - <> maybe mempty (\connSet -> ["connection_set" .= NEMap.elems (getPostgresConnectionSet connSet)]) _pccConnectionSet + object + $ ["connection_info" .= _pccConnectionInfo] + <> maybe mempty (\readReplicas -> ["read_replicas" .= readReplicas]) _pccReadReplicas + <> bool mempty (["extensions_schema" .= _pccExtensionsSchema]) (_pccExtensionsSchema /= defaultPostgresExtensionsSchema) + <> maybe mempty (\connTemplate -> ["connection_template" .= connTemplate]) _pccConnectionTemplate + <> maybe mempty (\connSet -> ["connection_set" .= NEMap.elems (getPostgresConnectionSet connSet)]) _pccConnectionSet instance HasCodec PostgresConnConfiguration where codec = - CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgconfiguration" $ - AC.object "PostgresConnConfiguration" $ - PostgresConnConfiguration - <$> requiredField "connection_info" connectionInfoDoc .== _pccConnectionInfo - <*> optionalFieldOrNull "read_replicas" readReplicasDoc .== _pccReadReplicas - <*> optionalFieldWithOmittedDefault "extensions_schema" defaultPostgresExtensionsSchema extensionsSchemaDoc .== _pccExtensionsSchema - <*> optionalFieldOrNull "connection_template" connectionTemplateDoc .== _pccConnectionTemplate - <*> optionalFieldOrNull "connection_set" connectionSetDoc .== _pccConnectionSet + CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgconfiguration" + $ AC.object "PostgresConnConfiguration" + $ PostgresConnConfiguration + <$> requiredField "connection_info" connectionInfoDoc + .== _pccConnectionInfo + <*> optionalFieldOrNull "read_replicas" readReplicasDoc + .== _pccReadReplicas + <*> optionalFieldWithOmittedDefault "extensions_schema" defaultPostgresExtensionsSchema extensionsSchemaDoc + .== _pccExtensionsSchema + <*> optionalFieldOrNull "connection_template" connectionTemplateDoc + .== _pccConnectionTemplate + <*> optionalFieldOrNull "connection_set" connectionSetDoc + .== _pccConnectionSet where connectionInfoDoc = "Connection parameters for the source" readReplicasDoc = "Optional list of read replica configuration (supported only in cloud/enterprise versions)" diff --git a/server/src-lib/Hasura/Backends/Postgres/Connection/VersionCheck.hs b/server/src-lib/Hasura/Backends/Postgres/Connection/VersionCheck.hs index 515f27ec582..486485ab57b 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Connection/VersionCheck.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Connection/VersionCheck.hs @@ -34,8 +34,8 @@ data CockroachDbVersion = CockroachDbVersion runCockroachVersionCheck :: Env.Environment -> PG.PostgresConnConfiguration -> IO (Either QErr ()) runCockroachVersionCheck env connConf = do result <- - withPostgresDB env connConf $ - PG.rawQE PG.dmlTxErrorHandler (PG.fromText "select version();") [] False + withPostgresDB env connConf + $ PG.rawQE PG.dmlTxErrorHandler (PG.fromText "select version();") [] False pure case result of -- running the query failed Left err -> @@ -45,8 +45,8 @@ runCockroachVersionCheck env connConf = do case parseCrdbVersion versionString of -- parsing the query output failed Left err -> - Left $ - crdbVersionCheckErr500 + Left + $ crdbVersionCheckErr500 [ "version-parse-error" .= show err, "version-string" .= versionString ] @@ -57,8 +57,8 @@ runCockroachVersionCheck env connConf = do Right () else -- the crdb version is not supported - Left $ - crdbVersionCheckErr500 + Left + $ crdbVersionCheckErr500 [ "version-string" .= versionString ] diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL.hs b/server/src-lib/Hasura/Backends/Postgres/DDL.hs index ec87ce03a47..3630d939d0a 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL.hs @@ -47,8 +47,9 @@ parseCollectableType pgType = \case CollectableTypeArray ofType -> do vals <- runAesonParser parseJSON val scalarValues <- parseScalarValuesColumnType ofType vals - return . PSESQLExp $ - SETyAnn + return + . PSESQLExp + $ SETyAnn (SEArray $ map (toTxtValue . ColumnValue ofType) scalarValues) (mkTypeAnn $ CollectableTypeArray (unsafePGColumnToBackend ofType)) diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/BoolExp.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/BoolExp.hs index 096c7a8d67a..1246e82cf1c 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/BoolExp.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/BoolExp.hs @@ -60,8 +60,8 @@ parseBoolExpOperations rhsParser rootFieldInfoMap fim columnRef value = do columnType = CollectableTypeScalar $ columnReferenceType column parseOperation :: ColumnReference ('Postgres pgKind) -> (Text, Value) -> m (OpExpG ('Postgres pgKind) v) - parseOperation column (opStr, val) = withPathK opStr $ - case opStr of + parseOperation column (opStr, val) = withPathK opStr + $ case opStr of "$cast" -> parseCast "_cast" -> parseCast "$eq" -> parseEq @@ -206,8 +206,8 @@ parseBoolExpOperations rhsParser rootFieldInfoMap fim columnRef value = do castedColumn = ColumnReferenceCast column (ColumnScalar targetType) checkValidCast targetType parsedCastedComparisons <- - withPathK targetTypeName $ - parseOperations castedColumn castedComparisons + withPathK targetTypeName + $ parseOperations castedColumn castedComparisons return (targetType, parsedCastedComparisons) return . ACast $ HashMap.fromList parsedCastOperations @@ -216,8 +216,11 @@ parseBoolExpOperations rhsParser rootFieldInfoMap fim columnRef value = do (ColumnScalar PGGeography, PGGeometry) -> return () (ColumnScalar PGJSONB, PGText) -> return () _ -> - throw400 UnexpectedPayload $ - "cannot cast column of type " <> colTy <<> " to type " <>> targetType + throw400 UnexpectedPayload + $ "cannot cast column of type " + <> colTy + <<> " to type " + <>> targetType parseGeometryOp f = guardType [PGGeometry] >> ABackendSpecific . f <$> parseOneNoSess colTy val @@ -265,12 +268,16 @@ parseBoolExpOperations rhsParser rootFieldInfoMap fim columnRef value = do validateRhsCol fieldInfoMap rhsCol = do rhsType <- askColumnType fieldInfoMap rhsCol "column operators can only compare postgres columns" - when (colTy /= rhsType) $ - throw400 UnexpectedPayload $ - "incompatible column types: " - <> column <<> " has type " - <> colTy <<> ", but " - <> rhsCol <<> " has type " <>> rhsType + when (colTy /= rhsType) + $ throw400 UnexpectedPayload + $ "incompatible column types: " + <> column + <<> " has type " + <> colTy + <<> ", but " + <> rhsCol + <<> " has type " + <>> rhsType pure rhsCol parseWithTy ty = rhsParser (CollectableTypeScalar ty) val @@ -282,14 +289,15 @@ parseBoolExpOperations rhsParser rootFieldInfoMap fim columnRef value = do parseManyWithType ty = rhsParser (CollectableTypeArray ty) val guardType validTys = - unless (isScalarColumnWhere (`elem` validTys) colTy) $ - throwError $ - buildMsg colTy validTys + unless (isScalarColumnWhere (`elem` validTys) colTy) + $ throwError + $ buildMsg colTy validTys buildMsg ty expTys = - err400 UnexpectedPayload $ - " is of type " - <> ty <<> "; this operator works only on columns of type " - <> T.intercalate "/" (map dquote expTys) + err400 UnexpectedPayload + $ " is of type " + <> ty + <<> "; this operator works only on columns of type " + <> T.intercalate "/" (map dquote expTys) parseVal :: (FromJSON a) => m a parseVal = decodeValue val diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/ComputedField.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/ComputedField.hs index dcda7fd904c..fd0bf97f063 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/ComputedField.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/ComputedField.hs @@ -58,21 +58,23 @@ showError qf = \case showFunctionTableArgument functionArg <> " of type " <> ty - <<> " is not the table to which the computed field is being added" + <<> " is not the table to which the computed field is being added" CFVEInvalidSessionArgument (ISANotFound argName) -> argName <<> " is not an input argument of the function " <>> qf CFVEInvalidSessionArgument (ISANotJSON functionArg) -> showFunctionSessionArgument functionArg <> " is not of type JSON" CFVENotBaseReturnType scalarType -> "the function " - <> qf <<> " returning type " + <> qf + <<> " returning type " <> pgScalarTypeToText scalarType <> " is not a BASE type" CFVEReturnTableNotFound table -> "the function " - <> qf <<> " returning set of table " + <> qf + <<> " returning set of table " <> table - <<> " is not tracked or not found in database" + <<> " is not tracked or not found in database" CFVENoInputArguments -> "the function " <> qf <<> " has no input arguments" CFVEFunctionVolatile -> @@ -110,36 +112,36 @@ buildComputedFieldInfo trackedTables table _tableColumns computedField definitio computedFieldGraphQLName = G.mkName $ computedFieldNameToText computedField mkComputedFieldInfo :: - MV.MonadValidate [ComputedFieldValidateError] n => + (MV.MonadValidate [ComputedFieldValidateError] n) => n (ComputedFieldInfo ('Postgres pgKind)) mkComputedFieldInfo = do -- Check if computed field name is a valid GraphQL name - unless (isJust computedFieldGraphQLName) $ - MV.dispute $ - pure $ - CFVENotValidGraphQLName computedField + unless (isJust computedFieldGraphQLName) + $ MV.dispute + $ pure + $ CFVENotValidGraphQLName computedField -- Check if function is VOLATILE - when (rfiFunctionType rawFunctionInfo == FTVOLATILE) $ - MV.dispute $ - pure CFVEFunctionVolatile + when (rfiFunctionType rawFunctionInfo == FTVOLATILE) + $ MV.dispute + $ pure CFVEFunctionVolatile -- Validate and resolve return type returnType <- if rfiReturnsTable rawFunctionInfo then do let returnTable = typeToTable functionReturnType - unless (returnTable `S.member` trackedTables) $ - MV.dispute $ - pure $ - CFVEReturnTableNotFound returnTable + unless (returnTable `S.member` trackedTables) + $ MV.dispute + $ pure + $ CFVEReturnTableNotFound returnTable pure $ PG.CFRSetofTable returnTable else do let scalarType = _qptName functionReturnType - unless (isBaseType functionReturnType) $ - MV.dispute $ - pure $ - CFVENotBaseReturnType scalarType + unless (isBaseType functionReturnType) + $ MV.dispute + $ pure + $ CFVENotBaseReturnType scalarType pure $ PG.CFRScalar scalarType -- Validate and resolve table argument @@ -175,8 +177,8 @@ buildComputedFieldInfo trackedTables table _tableColumns computedField definitio MV.refute $ pure $ CFVEInvalidSessionArgument $ ISANotFound argName let inputArgSeq = - Seq.fromList $ - dropTableAndSessionArgument tableArgument maybePGSessionArg inputArgs + Seq.fromList + $ dropTableAndSessionArgument tableArgument maybePGSessionArg inputArgs computedFieldArgs = PG.ComputedFieldImplicitArguments tableArgument maybePGSessionArg computedFieldFunction = ComputedFieldFunction function inputArgSeq computedFieldArgs $ rfiDescription rawFunctionInfo @@ -189,17 +191,17 @@ buildComputedFieldInfo trackedTables table _tableColumns computedField definitio QualifiedPGType -> n () validateTableArgumentType tableArg qpt = do - when (_qptType qpt /= PGKindComposite) $ - MV.dispute $ - pure $ - CFVEInvalidTableArgument $ - ITANotComposite tableArg + when (_qptType qpt /= PGKindComposite) + $ MV.dispute + $ pure + $ CFVEInvalidTableArgument + $ ITANotComposite tableArg let typeTable = typeToTable qpt - unless (table == typeTable) $ - MV.dispute $ - pure $ - CFVEInvalidTableArgument $ - ITANotTable typeTable tableArg + unless (table == typeTable) + $ MV.dispute + $ pure + $ CFVEInvalidTableArgument + $ ITANotTable typeTable tableArg validateSessionArgumentType :: (MV.MonadValidate [ComputedFieldValidateError] n) => @@ -207,17 +209,19 @@ buildComputedFieldInfo trackedTables table _tableColumns computedField definitio QualifiedPGType -> n () validateSessionArgumentType sessionArg qpt = do - unless (isJSONType $ _qptName qpt) $ - MV.dispute $ - pure $ - CFVEInvalidSessionArgument $ - ISANotJSON sessionArg + unless (isJSONType $ _qptName qpt) + $ MV.dispute + $ pure + $ CFVEInvalidSessionArgument + $ ISANotJSON sessionArg showErrors :: [ComputedFieldValidateError] -> Text showErrors allErrors = "the computed field " - <> computedField <<> " cannot be added to table " - <> table <<> " " + <> computedField + <<> " cannot be added to table " + <> table + <<> " " <> reasonMessage where reasonMessage = makeReasonMessage allErrors (showError function) diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs index 4a7ca5b9440..254a4048519 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/EventTrigger.hs @@ -93,9 +93,9 @@ fetchUndeliveredEvents sourceConfig sourceName triggerNames maintenanceMode fetc case fetchEventsTxE of Left err -> throwError $ prefixQErr "something went wrong while fetching events: " err Right fetchEventsTx -> - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery fetchEventsTx + liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery fetchEventsTx setRetry :: ( MonadIO m, @@ -124,12 +124,12 @@ insertManualEvent sourceConfig tableName triggerName payload userInfo traceCtx = -- while being used in the PG function `insert_event_log`. -- See Issue(#7087) for more details on a bug that was being caused -- in the absence of these methods. - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ - setHeadersTx (_uiSession userInfo) - >> setTraceContextInTx traceCtx - >> insertPGManualEvent tableName triggerName payload + liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ setHeadersTx (_uiSession userInfo) + >> setTraceContextInTx traceCtx + >> insertPGManualEvent tableName triggerName payload getMaintenanceModeVersion :: ( MonadIO m, @@ -148,8 +148,9 @@ recordSuccess :: MaintenanceMode MaintenanceModeVersion -> m (Either QErr ()) recordSuccess sourceConfig event invocation maintenanceModeVersion = - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ do + liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ do insertInvocation (tmName (eTrigger event)) invocation setSuccessTx event maintenanceModeVersion @@ -173,8 +174,9 @@ recordError' :: MaintenanceMode MaintenanceModeVersion -> m (Either QErr ()) recordError' sourceConfig event invocation processEventError maintenanceModeVersion = - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ do + liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ do for_ invocation $ insertInvocation (tmName (eTrigger event)) case processEventError of PESetRetry retryTime -> setRetryTx event retryTime maintenanceModeVersion @@ -197,11 +199,12 @@ dropTriggerAndArchiveEvents :: QualifiedTable -> m () dropTriggerAndArchiveEvents sourceConfig triggerName _table = - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ do - dropTriggerQ triggerName - archiveEvents triggerName + liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ do + dropTriggerQ triggerName + archiveEvents triggerName createMissingSQLTriggers :: ( MonadIO m, @@ -218,8 +221,9 @@ createMissingSQLTriggers :: TriggerOpsDef ('Postgres pgKind) -> m () createMissingSQLTriggers serverConfigCtx sourceConfig table (allCols, _) triggerName triggerOnReplication opsDefinition = do - liftEitherM $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ do + liftEitherM + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ do for_ (tdInsert opsDefinition) (doesSQLTriggerExist INSERT) for_ (tdUpdate opsDefinition) (doesSQLTriggerExist UPDATE) for_ (tdDelete opsDefinition) (doesSQLTriggerExist DELETE) @@ -227,7 +231,8 @@ createMissingSQLTriggers serverConfigCtx sourceConfig table (allCols, _) trigger doesSQLTriggerExist op opSpec = do let opTriggerName = pgTriggerName op triggerName doesOpTriggerFunctionExist <- - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -239,9 +244,9 @@ createMissingSQLTriggers serverConfigCtx sourceConfig table (allCols, _) trigger |] (Identity opTriggerName) True - unless doesOpTriggerFunctionExist $ - flip runReaderT serverConfigCtx $ - mkTrigger triggerName table triggerOnReplication allCols op opSpec + unless doesOpTriggerFunctionExist + $ flip runReaderT serverConfigCtx + $ mkTrigger triggerName table triggerOnReplication allCols op opSpec createTableEventTrigger :: (Backend ('Postgres pgKind), MonadIO m, MonadBaseControl IO m) => @@ -256,8 +261,8 @@ createTableEventTrigger :: m (Either QErr ()) createTableEventTrigger serverConfigCtx sourceConfig table columns triggerName triggerOnReplication opsDefinition _ = runPgSourceWriteTx sourceConfig InternalRawQuery $ do -- Create the given triggers - flip runReaderT serverConfigCtx $ - mkAllTriggersQ triggerName table triggerOnReplication columns opsDefinition + flip runReaderT serverConfigCtx + $ mkAllTriggersQ triggerName table triggerOnReplication columns opsDefinition dropDanglingSQLTrigger :: ( MonadIO m, @@ -269,10 +274,10 @@ dropDanglingSQLTrigger :: HashSet Ops -> m () dropDanglingSQLTrigger sourceConfig triggerName _ ops = - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ - traverse_ (dropTriggerOp triggerName) ops + liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ traverse_ (dropTriggerOp triggerName) ops updateColumnInEventTrigger :: QualifiedTable -> @@ -320,25 +325,26 @@ checkIfTriggerExists :: HashSet Ops -> m Bool checkIfTriggerExists sourceConfig triggerName ops = do - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ - -- We want to avoid creating event triggers with same name since this will - -- cause undesired behaviour. Note that only SQL functions associated with - -- SQL triggers are dropped when "replace = true" is set in the event trigger - -- configuration. Hence, the best way to check if we should allow the - -- creation of a trigger with the name 'triggerName' is to check if any - -- function with such a name exists in the the hdb_catalog. - -- - -- For eg: If a create_event_trigger request comes with trigger name as - -- "triggerName" and there is already a trigger with "triggerName" in the - -- metadata, then - -- 1. When "replace = false", the function with name 'triggerName' exists - -- so the creation is not allowed - -- 2. When "replace = true", the function with name 'triggerName' is first - -- dropped, hence we are allowed to create the trigger with name - -- 'triggerName' - fmap or (traverse (checkIfFunctionExistsQ triggerName) (HashSet.toList ops)) + liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ + -- We want to avoid creating event triggers with same name since this will + -- cause undesired behaviour. Note that only SQL functions associated with + -- SQL triggers are dropped when "replace = true" is set in the event trigger + -- configuration. Hence, the best way to check if we should allow the + -- creation of a trigger with the name 'triggerName' is to check if any + -- function with such a name exists in the the hdb_catalog. + -- + -- For eg: If a create_event_trigger request comes with trigger name as + -- "triggerName" and there is already a trigger with "triggerName" in the + -- metadata, then + -- 1. When "replace = false", the function with name 'triggerName' exists + -- so the creation is not allowed + -- 2. When "replace = true", the function with name 'triggerName' is first + -- dropped, hence we are allowed to create the trigger with name + -- 'triggerName' + fmap or (traverse (checkIfFunctionExistsQ triggerName) (HashSet.toList ops)) ---- DATABASE QUERIES --------------------- -- @@ -377,7 +383,8 @@ insertPGManualEvent :: Value -> PG.TxE QErr EventId insertPGManualEvent (QualifiedObject schemaName tableName) triggerName rowData = do - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -404,15 +411,15 @@ getMaintenanceModeVersionTx = liftTx $ do -- the previous version and the current version will change depending -- upon between which versions we need to support maintenance mode if - | catalogVersion == MetadataCatalogVersion 40 -> pure PreviousMMVersion - -- The catalog is migrated to the 43rd version for a source - -- which was initialised by a v1 graphql-engine instance (See @initSource@). - | catalogVersion == MetadataCatalogVersion 43 -> pure CurrentMMVersion - | catalogVersion == latestCatalogVersion -> pure CurrentMMVersion - | otherwise -> - throw500 $ - "Maintenance mode is only supported with catalog versions: 40, 43 and " - <> tshow latestCatalogVersionString + | catalogVersion == MetadataCatalogVersion 40 -> pure PreviousMMVersion + -- The catalog is migrated to the 43rd version for a source + -- which was initialised by a v1 graphql-engine instance (See @initSource@). + | catalogVersion == MetadataCatalogVersion 43 -> pure CurrentMMVersion + | catalogVersion == latestCatalogVersion -> pure CurrentMMVersion + | otherwise -> + throw500 + $ "Maintenance mode is only supported with catalog versions: 40, 43 and " + <> tshow latestCatalogVersionString -- | Lock and return events not yet being processed or completed, up to some -- limit. Process events approximately in created_at order, but we make no @@ -631,8 +638,8 @@ checkEvent eid = do getEvent (x : _) = return x assertEventUnlocked (Identity locked) = - when locked $ - throw400 Busy "event is already being processed" + when locked + $ throw400 Busy "event is already being processed" markForDelivery :: EventId -> PG.TxE QErr () markForDelivery eid = @@ -658,7 +665,8 @@ redeliverEventTx eventId = do -- when a graceful shutdown is initiated. unlockEventsTx :: [EventId] -> PG.TxE QErr Int unlockEventsTx eventIds = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -709,45 +717,47 @@ mkTriggerFunctionQ triggerName (QualifiedObject schema table) allCols op (Subscr strfyNum <- asks stringifyNum let dbQualifiedTriggerName = pgIdenTrigger op triggerName () <- - liftTx $ - PG.multiQE defaultTxErrorHandler $ - PG.fromText . TL.toStrict $ - let -- If there are no specific delivery columns selected by user then all the columns will be delivered - -- in payload hence 'SubCStar'. - deliveryColumns = fromMaybe SubCStar deliveryColumns' - getApplicableColumns = \case - SubCStar -> allCols - SubCArray cols -> getColInfos cols allCols + liftTx + $ PG.multiQE defaultTxErrorHandler + $ PG.fromText + . TL.toStrict + $ let + -- If there are no specific delivery columns selected by user then all the columns will be delivered + -- in payload hence 'SubCStar'. + deliveryColumns = fromMaybe SubCStar deliveryColumns' + getApplicableColumns = \case + SubCStar -> allCols + SubCArray cols -> getColInfos cols allCols - -- Columns that should be present in the payload. By default, all columns are present. - applicableDeliveryCols = getApplicableColumns deliveryColumns - getRowExpression opVar = applyRowToJson' $ mkRowExpression opVar strfyNum applicableDeliveryCols + -- Columns that should be present in the payload. By default, all columns are present. + applicableDeliveryCols = getApplicableColumns deliveryColumns + getRowExpression opVar = applyRowToJson' $ mkRowExpression opVar strfyNum applicableDeliveryCols - -- Columns that user subscribed to listen for changes. By default, we listen on all columns. - applicableListenCols = getApplicableColumns listenColumns - renderRow opVar = applyRow $ mkRowExpression opVar strfyNum applicableListenCols + -- Columns that user subscribed to listen for changes. By default, we listen on all columns. + applicableListenCols = getApplicableColumns listenColumns + renderRow opVar = applyRow $ mkRowExpression opVar strfyNum applicableListenCols - oldDataExp = case op of - INSERT -> SENull - UPDATE -> getRowExpression OLD - DELETE -> getRowExpression OLD - MANUAL -> SENull - newDataExp = case op of - INSERT -> getRowExpression NEW - UPDATE -> getRowExpression NEW - DELETE -> SENull - MANUAL -> SENull + oldDataExp = case op of + INSERT -> SENull + UPDATE -> getRowExpression OLD + DELETE -> getRowExpression OLD + MANUAL -> SENull + newDataExp = case op of + INSERT -> getRowExpression NEW + UPDATE -> getRowExpression NEW + DELETE -> SENull + MANUAL -> SENull - name = triggerNameToTxt triggerName - qualifiedTriggerName = unQualifiedTriggerName dbQualifiedTriggerName - schemaName = pgFmtLit $ getSchemaTxt schema - tableName = pgFmtLit $ getTableTxt table + name = triggerNameToTxt triggerName + qualifiedTriggerName = unQualifiedTriggerName dbQualifiedTriggerName + schemaName = pgFmtLit $ getSchemaTxt schema + tableName = pgFmtLit $ getTableTxt table - oldRow = toSQLTxt $ renderRow OLD - newRow = toSQLTxt $ renderRow NEW - oldPayloadExpression = toSQLTxt oldDataExp - newPayloadExpression = toSQLTxt newDataExp - in $(makeRelativeToProject "src-rsr/trigger.sql.shakespeare" >>= ST.stextFile) + oldRow = toSQLTxt $ renderRow OLD + newRow = toSQLTxt $ renderRow NEW + oldPayloadExpression = toSQLTxt oldDataExp + newPayloadExpression = toSQLTxt newDataExp + in $(makeRelativeToProject "src-rsr/trigger.sql.shakespeare" >>= ST.stextFile) pure dbQualifiedTriggerName where applyRowToJson' e = SEFnApp "row_to_json" [e] Nothing @@ -758,11 +768,11 @@ mkTriggerFunctionQ triggerName (QualifiedObject schema table) allCols op (Subscr mkRowExp $ map (\col -> toExtractor (mkQId opVar strfyNum col) col) columns mkQId opVar strfyNum colInfo = - toJSONableExp strfyNum (ciType colInfo) False Nothing $ - SEQIdentifier $ - QIdentifier (opToQual opVar) $ - toIdentifier $ - ciColumn colInfo + toJSONableExp strfyNum (ciType colInfo) False Nothing + $ SEQIdentifier + $ QIdentifier (opToQual opVar) + $ toIdentifier + $ ciColumn colInfo -- Generate the SQL expression toExtractor sqlExp column @@ -778,8 +788,8 @@ checkIfTriggerExistsForTableQ :: QualifiedTable -> PG.TxE QErr Bool checkIfTriggerExistsForTableQ (QualifiedTriggerName triggerName) (QualifiedObject schemaName tableName) = - fmap (runIdentity . PG.getRow) $ - PG.withQE + fmap (runIdentity . PG.getRow) + $ PG.withQE defaultTxErrorHandler -- 'regclass' converts non-quoted strings to lowercase but since identifiers -- such as table name needs are case-sensitive, we add quotes to table name @@ -802,8 +812,8 @@ checkIfFunctionExistsQ :: PG.TxE QErr Bool checkIfFunctionExistsQ triggerName op = do let qualifiedTriggerName = pgTriggerName op triggerName - fmap (runIdentity . PG.getRow) $ - PG.withQE + fmap (runIdentity . PG.getRow) + $ PG.withQE defaultTxErrorHandler [PG.sql| SELECT EXISTS ( @@ -833,13 +843,13 @@ mkTrigger triggerName table triggerOnReplication allCols op subOpSpec = do -- check if the SQL trigger exists and only if the SQL trigger doesn't exist -- we create the SQL trigger. doesTriggerExist <- liftTx $ checkIfTriggerExistsForTableQ (pgTriggerName op triggerName) table - unless doesTriggerExist $ - let createTriggerSqlQuery = - PG.fromText $ createTriggerSQL dbTriggerNameTxt (toSQLTxt table) (tshow op) - in liftTx $ do - PG.unitQE defaultTxErrorHandler createTriggerSqlQuery () False - when (triggerOnReplication == TOREnableTrigger) $ - PG.unitQE defaultTxErrorHandler (alwaysEnableTriggerQuery dbTriggerNameTxt (toSQLTxt table)) () False + unless doesTriggerExist + $ let createTriggerSqlQuery = + PG.fromText $ createTriggerSQL dbTriggerNameTxt (toSQLTxt table) (tshow op) + in liftTx $ do + PG.unitQE defaultTxErrorHandler createTriggerSqlQuery () False + when (triggerOnReplication == TOREnableTrigger) + $ PG.unitQE defaultTxErrorHandler (alwaysEnableTriggerQuery dbTriggerNameTxt (toSQLTxt table)) () False where createTriggerSQL triggerNameTxt tableName opText = [ST.st| @@ -847,8 +857,8 @@ mkTrigger triggerName table triggerOnReplication allCols op subOpSpec = do |] alwaysEnableTriggerQuery triggerNameTxt tableTxt = - PG.fromText $ - [ST.st| + PG.fromText + $ [ST.st| ALTER TABLE #{tableTxt} ENABLE ALWAYS TRIGGER #{triggerNameTxt}; |] @@ -894,18 +904,18 @@ addCleanupSchedules sourceConfig triggersWithcleanupConfig = lastScheduledTime ) triggersWithcleanupConfig - unless (null scheduledTriggersAndTimestamps) $ - liftEitherM $ - liftIO $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ - insertEventTriggerCleanupLogsTx scheduledTriggersAndTimestamps + unless (null scheduledTriggersAndTimestamps) + $ liftEitherM + $ liftIO + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ insertEventTriggerCleanupLogsTx scheduledTriggersAndTimestamps -- | Insert the cleanup logs for the fiven trigger name and schedules insertEventTriggerCleanupLogsTx :: [(TriggerName, [Time.UTCTime])] -> PG.TxET QErr IO () insertEventTriggerCleanupLogsTx triggersWithschedules = do let insertCleanupEventsSql = - TB.run $ - toSQL + TB.run + $ toSQL S.SQLInsert { siTable = cleanupLogTable, siCols = map unsafePGCol ["trigger_name", "scheduled_at", "status"], @@ -995,8 +1005,8 @@ getCleanupEventsForDeletion sourceConfig = markCleanupEventsAsDeadTx :: [Text] -> PG.TxE QErr () markCleanupEventsAsDeadTx toDeadEvents = do - unless (null toDeadEvents) $ - PG.unitQE + unless (null toDeadEvents) + $ PG.unitQE defaultTxErrorHandler [PG.sql| UPDATE hdb_catalog.hdb_event_log_cleanups l @@ -1104,7 +1114,8 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do deletedInvocationLogs <- if tlccCleanInvocationLogs then - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -1130,7 +1141,8 @@ deleteEventTriggerLogsTx TriggerLogCleanupConfig {..} = do pure 0 -- Finally delete the event logs. deletedEventLogs <- - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -1264,8 +1276,8 @@ fetchEventById sourceConfig getEventById = do fetchEventByIdTxE' <- liftIO $ runPgSourceReadTx sourceConfig $ fetchEventByIdTxE getEventById case fetchEventByIdTxE' of Left err -> - throwError $ - prefixQErr ("unexpected error while fetching event with id " <> eventId <> ": ") err + throwError + $ prefixQErr ("unexpected error while fetching event with id " <> eventId <> ": ") err Right eventLogWithInvocations -> do if isNothing (elwiEvent eventLogWithInvocations) then throw400 NotExists errMsg diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/Function.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/Function.hs index 097e10c27cb..12634aceb5b 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/Function.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/Function.hs @@ -96,8 +96,8 @@ buildFunctionInfo source qf systemDefined fc@FunctionConfig {..} permissions raw throwValidateError = MV.dispute . pure validateFunction = do - unless (has _Right $ qualifiedObjectToName qf) $ - throwValidateError FunctionNameNotGQLCompliant + unless (has _Right $ qualifiedObjectToName qf) + $ throwValidateError FunctionNameNotGQLCompliant when hasVariadic $ throwValidateError FunctionVariadic when (retTyTyp /= PGKindComposite) $ throwValidateError FunctionReturnNotCompositeType unless returnsTab $ throwValidateError FunctionReturnNotTable @@ -111,8 +111,8 @@ buildFunctionInfo source qf systemDefined fc@FunctionConfig {..} permissions raw -- This is the one exception where we do some validation. We're not -- commited to this check, and it would be backwards compatible to remove -- it, but this seemed like an obvious case: - when (funVol /= FTVOLATILE && _fcExposedAs == Just FEAMutation) $ - throwValidateError NonVolatileFunctionAsMutation + when (funVol /= FTVOLATILE && _fcExposedAs == Just FEAMutation) + $ throwValidateError NonVolatileFunctionAsMutation -- If 'exposed_as' is omitted we'll infer it from the volatility: let exposeAs = flip fromMaybe _fcExposedAs $ case funVol of FTVOLATILE -> FEAMutation @@ -148,9 +148,9 @@ buildFunctionInfo source qf systemDefined fc@FunctionConfig {..} permissions raw pure ( functionInfo, SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @('Postgres pgKind) retTable + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @('Postgres pgKind) retTable ) DRTable ) @@ -158,19 +158,20 @@ buildFunctionInfo source qf systemDefined fc@FunctionConfig {..} permissions raw validateFunctionArgNames = do let argNames = mapMaybe faName functionArgs invalidArgs = filter (isNothing . G.mkName . getFuncArgNameTxt) argNames - unless (null invalidArgs) $ - throwValidateError $ - FunctionInvalidArgumentNames invalidArgs + unless (null invalidArgs) + $ throwValidateError + $ FunctionInvalidArgumentNames invalidArgs makeInputArguments = case _fcSessionArgument of Nothing -> pure $ Seq.fromList $ map IAUserProvided functionArgs Just sessionArgName -> do - unless (any (\arg -> Just sessionArgName == faName arg) functionArgs) $ - throwValidateError $ - FunctionInvalidSessionArgument sessionArgName - fmap Seq.fromList $ - forM functionArgs $ \arg -> + unless (any (\arg -> Just sessionArgName == faName arg) functionArgs) + $ throwValidateError + $ FunctionInvalidSessionArgument sessionArgName + fmap Seq.fromList + $ forM functionArgs + $ \arg -> if Just sessionArgName == faName arg then do let argTy = _qptName $ faType arg @@ -181,7 +182,8 @@ buildFunctionInfo source qf systemDefined fc@FunctionConfig {..} permissions raw showErrors allErrors = "the function " - <> qf <<> " cannot be tracked " + <> qf + <<> " cannot be tracked " <> makeReasonMessage allErrors showOneError showOneError = \case diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs index 3cd58af1e57..846a8467384 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/RunSQL.hs @@ -222,10 +222,10 @@ runRunSQL sqlGen q@RunSQL {..} = do if (isSchemaCacheBuildRequiredRunSQL q) then do -- see Note [Checking metadata consistency in run_sql] - withMetadataCheck @pgKind sqlGen rSource rCascade rTxAccessMode $ - withTraceContext traceCtx $ - withUserInfo userInfo $ - execRawSQL rSql + withMetadataCheck @pgKind sqlGen rSource rCascade rTxAccessMode + $ withTraceContext traceCtx + $ withUserInfo userInfo + $ execRawSQL rSql else do runTxWithCtx pgExecCtx (Tx rTxAccessMode Nothing) RunSQLQuery $ execRawSQL rSql where @@ -265,8 +265,8 @@ withMetadataCheck sqlGen source cascade txAccess runSQLQuery = do (queryResult, metadataUpdater) <- runTxWithMetadataCheck source _siConfiguration txAccess _siTables _siFunctions cascade runSQLQuery -- Build schema cache with updated metadata - withNewInconsistentObjsCheck $ - buildSchemaCacheWithInvalidations mempty {ciSources = HS.singleton source} metadataUpdater + withNewInconsistentObjsCheck + $ buildSchemaCacheWithInvalidations mempty {ciSources = HS.singleton source} metadataUpdater postRunSQLSchemaCache <- askSchemaCache @@ -278,14 +278,15 @@ withMetadataCheck sqlGen source cascade txAccess runSQLQuery = do recreateEventTriggers :: PGSourceConfig -> SchemaCache -> m () recreateEventTriggers sourceConfig schemaCache = do let tables = fromMaybe mempty $ unsafeTableCache @('Postgres pgKind) source $ scSources schemaCache - liftEitherM $ - runPgSourceWriteTx sourceConfig RunSQLQuery $ - forM_ (HashMap.elems tables) $ \(TableInfo coreInfo _ eventTriggers _) -> do - let table = _tciName coreInfo - columns = fmap (\(SCIScalarColumn col) -> col) $ getCols $ _tciFieldInfoMap coreInfo - forM_ (HashMap.toList eventTriggers) $ \(triggerName, EventTriggerInfo {etiOpsDef, etiTriggerOnReplication}) -> do - flip runReaderT sqlGen $ - mkAllTriggersQ triggerName table etiTriggerOnReplication columns etiOpsDef + liftEitherM + $ runPgSourceWriteTx sourceConfig RunSQLQuery + $ forM_ (HashMap.elems tables) + $ \(TableInfo coreInfo _ eventTriggers _) -> do + let table = _tciName coreInfo + columns = fmap (\(SCIScalarColumn col) -> col) $ getCols $ _tciFieldInfoMap coreInfo + forM_ (HashMap.toList eventTriggers) $ \(triggerName, EventTriggerInfo {etiOpsDef, etiTriggerOnReplication}) -> do + flip runReaderT sqlGen + $ mkAllTriggersQ triggerName table etiTriggerOnReplication columns etiOpsDef -- | @'runTxWithMetadataCheck source sourceConfig txAccess tableCache functionCache cascadeDependencies tx' checks for -- changes in GraphQL Engine metadata when a @'tx' is executed on the database alters Postgres @@ -311,83 +312,84 @@ runTxWithMetadataCheck :: PG.TxET QErr m a -> m (a, MetadataModifier) runTxWithMetadataCheck source sourceConfig txAccess tableCache functionCache cascadeDependencies tx = - liftEitherM $ - runExceptT $ - _pecRunTx (_pscExecCtx sourceConfig) (PGExecCtxInfo (Tx txAccess Nothing) RunSQLQuery) $ do - -- Running in a transaction helps to rollback the @'tx' execution in case of any exceptions + liftEitherM + $ runExceptT + $ _pecRunTx (_pscExecCtx sourceConfig) (PGExecCtxInfo (Tx txAccess Nothing) RunSQLQuery) + $ do + -- Running in a transaction helps to rollback the @'tx' execution in case of any exceptions - -- Before running the @'tx', fetch metadata of existing tables and functions from Postgres. - let tableNames = HashMap.keysSet tableCache - computedFieldFunctions = mconcat $ map getComputedFieldFunctions (HashMap.elems tableCache) - functionNames = HashMap.keysSet functionCache <> computedFieldFunctions - (preTxTablesMeta, preTxFunctionsMeta) <- fetchTablesFunctionsMetadata tableCache tableNames functionNames + -- Before running the @'tx', fetch metadata of existing tables and functions from Postgres. + let tableNames = HashMap.keysSet tableCache + computedFieldFunctions = mconcat $ map getComputedFieldFunctions (HashMap.elems tableCache) + functionNames = HashMap.keysSet functionCache <> computedFieldFunctions + (preTxTablesMeta, preTxFunctionsMeta) <- fetchTablesFunctionsMetadata tableCache tableNames functionNames - -- Since the @'tx' may alter table/function names we use the OIDs of underlying tables - -- (sourced from 'pg_class' for tables and 'pg_proc' for functions), which remain unchanged in the - -- case if a table/function is renamed. - let tableOids = HS.fromList $ map (_ptmiOid . Diff.tmInfo) preTxTablesMeta - functionOids = HS.fromList $ map Diff.fmOid preTxFunctionsMeta + -- Since the @'tx' may alter table/function names we use the OIDs of underlying tables + -- (sourced from 'pg_class' for tables and 'pg_proc' for functions), which remain unchanged in the + -- case if a table/function is renamed. + let tableOids = HS.fromList $ map (_ptmiOid . Diff.tmInfo) preTxTablesMeta + functionOids = HS.fromList $ map Diff.fmOid preTxFunctionsMeta - -- Run the transaction - txResult <- tx + -- Run the transaction + txResult <- tx - (postTxTablesMeta, postTxFunctionMeta) <- - uncurry (fetchTablesFunctionsMetadata tableCache) - -- Fetch names of tables and functions using OIDs which also contains renamed items - =<< fetchTablesFunctionsFromOids tableOids functionOids + (postTxTablesMeta, postTxFunctionMeta) <- + uncurry (fetchTablesFunctionsMetadata tableCache) + -- Fetch names of tables and functions using OIDs which also contains renamed items + =<< fetchTablesFunctionsFromOids tableOids functionOids - -- Calculate the tables diff (dropped & altered tables) - let tablesDiff = Diff.getTablesDiff preTxTablesMeta postTxTablesMeta - -- Calculate the functions diff. For calculating diff for functions, only consider - -- query/mutation functions and exclude functions underpinning computed fields. - -- Computed field functions are being processed under each table diff. - -- See @'getTablesDiff' and @'Diff.processTablesDiff' - excludeComputedFieldFunctions = filter ((`HashMap.member` functionCache) . Diff.fmFunction) - functionsDiff = - Diff.getFunctionsDiff - (excludeComputedFieldFunctions preTxFunctionsMeta) - (excludeComputedFieldFunctions postTxFunctionMeta) + -- Calculate the tables diff (dropped & altered tables) + let tablesDiff = Diff.getTablesDiff preTxTablesMeta postTxTablesMeta + -- Calculate the functions diff. For calculating diff for functions, only consider + -- query/mutation functions and exclude functions underpinning computed fields. + -- Computed field functions are being processed under each table diff. + -- See @'getTablesDiff' and @'Diff.processTablesDiff' + excludeComputedFieldFunctions = filter ((`HashMap.member` functionCache) . Diff.fmFunction) + functionsDiff = + Diff.getFunctionsDiff + (excludeComputedFieldFunctions preTxFunctionsMeta) + (excludeComputedFieldFunctions postTxFunctionMeta) - dontAllowFunctionOverloading $ - Diff.getOverloadedFunctions - (HashMap.keys functionCache) - (excludeComputedFieldFunctions postTxFunctionMeta) + dontAllowFunctionOverloading + $ Diff.getOverloadedFunctions + (HashMap.keys functionCache) + (excludeComputedFieldFunctions postTxFunctionMeta) - -- Update metadata with schema change caused by @'tx' - metadataUpdater <- execWriterT do - -- Collect indirect dependencies of altered tables - tableIndirectDeps <- Diff.getIndirectDependenciesFromTableDiff source tablesDiff + -- Update metadata with schema change caused by @'tx' + metadataUpdater <- execWriterT do + -- Collect indirect dependencies of altered tables + tableIndirectDeps <- Diff.getIndirectDependenciesFromTableDiff source tablesDiff - -- If table indirect dependencies exist and cascading is not enabled then report an exception - unless (null tableIndirectDeps || cascadeDependencies) $ reportDependentObjectsExist tableIndirectDeps + -- If table indirect dependencies exist and cascading is not enabled then report an exception + unless (null tableIndirectDeps || cascadeDependencies) $ reportDependentObjectsExist tableIndirectDeps - -- Purge all the table dependents - traverse_ purgeSourceAndSchemaDependencies tableIndirectDeps + -- Purge all the table dependents + traverse_ purgeSourceAndSchemaDependencies tableIndirectDeps - -- Collect function names from purged table dependencies - let purgedFunctions = collectFunctionsInDeps tableIndirectDeps - Diff.FunctionsDiff droppedFunctions alteredFunctions = functionsDiff + -- Collect function names from purged table dependencies + let purgedFunctions = collectFunctionsInDeps tableIndirectDeps + Diff.FunctionsDiff droppedFunctions alteredFunctions = functionsDiff - -- Drop functions in metadata. Exclude functions that were already dropped as part of table indirect dependencies - purgeFunctionsFromMetadata $ droppedFunctions \\ purgedFunctions + -- Drop functions in metadata. Exclude functions that were already dropped as part of table indirect dependencies + purgeFunctionsFromMetadata $ droppedFunctions \\ purgedFunctions - -- If any function type is altered to VOLATILE then raise an exception - dontAllowFunctionAlteredVolatile alteredFunctions + -- If any function type is altered to VOLATILE then raise an exception + dontAllowFunctionAlteredVolatile alteredFunctions - -- Propagate table changes to metadata - Diff.processTablesDiff source tableCache tablesDiff + -- Propagate table changes to metadata + Diff.processTablesDiff source tableCache tablesDiff - pure (txResult, metadataUpdater) + pure (txResult, metadataUpdater) where dontAllowFunctionOverloading :: (MonadError QErr n) => [FunctionName ('Postgres pgKind)] -> n () dontAllowFunctionOverloading overloadedFunctions = - unless (null overloadedFunctions) $ - throw400 NotSupported $ - "the following tracked function(s) cannot be overloaded: " - <> commaSeparated overloadedFunctions + unless (null overloadedFunctions) + $ throw400 NotSupported + $ "the following tracked function(s) cannot be overloaded: " + <> commaSeparated overloadedFunctions dontAllowFunctionAlteredVolatile :: (MonadError QErr n) => @@ -395,9 +397,11 @@ runTxWithMetadataCheck source sourceConfig txAccess tableCache functionCache cas n () dontAllowFunctionAlteredVolatile alteredFunctions = forM_ alteredFunctions $ \(qf, newTy) -> do - when (newTy == FTVOLATILE) $ - throw400 NotSupported $ - "type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now" + when (newTy == FTVOLATILE) + $ throw400 NotSupported + $ "type of function " + <> qf + <<> " is altered to \"VOLATILE\" which is not supported now" purgeFunctionsFromMetadata :: (Monad n) => diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/Source.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/Source.hs index 63de5be43a3..e95f010d061 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/Source.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/Source.hs @@ -170,9 +170,9 @@ resolveDatabaseMetadata sourceMetadata sourceConfig = runExceptT $ _pecRunTx (_pscExecCtx sourceConfig) (PGExecCtxInfo (Tx PG.ReadOnly Nothing) InternalRawQuery) do tablesMeta <- fetchTableMetadata $ HashMap.keysSet $ InsOrdHashMap.toHashMap $ _smTables sourceMetadata let allFunctions = - Set.fromList $ - InsOrdHashMap.keys (_smFunctions sourceMetadata) -- Tracked functions - <> concatMap getComputedFieldFunctionsMetadata (InsOrdHashMap.elems $ _smTables sourceMetadata) -- Computed field functions + Set.fromList + $ InsOrdHashMap.keys (_smFunctions sourceMetadata) -- Tracked functions + <> concatMap getComputedFieldFunctionsMetadata (InsOrdHashMap.elems $ _smTables sourceMetadata) -- Computed field functions functionsMeta <- fetchFunctionMetadata @pgKind allFunctions pgScalars <- fetchPgScalars let scalarsMap = HashMap.fromList do @@ -197,28 +197,28 @@ prepareCatalog sourceConfig = _pecRunTx (_pscExecCtx sourceConfig) (PGExecCtxInf eventLogTableExist <- doesTableExist "hdb_catalog" "event_log" sourceVersionTableExist <- doesTableExist "hdb_catalog" "hdb_source_catalog_version" if - -- Fresh database - | not hdbCatalogExist -> liftTx do - PG.unitQE defaultTxErrorHandler "CREATE SCHEMA hdb_catalog" () False - enablePgcryptoExtension $ _pscExtensionsSchema sourceConfig - initPgSourceCatalog - return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) - -- Only 'hdb_catalog' schema defined - | not (sourceVersionTableExist || eventLogTableExist) -> do - liftTx initPgSourceCatalog - return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) - -- Source is initialised by pre multisource support servers - | not sourceVersionTableExist && eventLogTableExist -> do - -- Update the Source Catalog to v43 to include the new migration - -- changes. Skipping this step will result in errors. - currMetadataCatalogVersion <- liftTx getCatalogVersion - -- we migrate to the 43 version, which is the migration where - -- metadata separation is introduced - migrateTo43MetadataCatalog currMetadataCatalogVersion - liftTx createVersionTable - -- Migrate the catalog from initial version i.e '0' - migrateSourceCatalogFrom initialSourceCatalogVersion - | otherwise -> migrateSourceCatalog + -- Fresh database + | not hdbCatalogExist -> liftTx do + PG.unitQE defaultTxErrorHandler "CREATE SCHEMA hdb_catalog" () False + enablePgcryptoExtension $ _pscExtensionsSchema sourceConfig + initPgSourceCatalog + return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) + -- Only 'hdb_catalog' schema defined + | not (sourceVersionTableExist || eventLogTableExist) -> do + liftTx initPgSourceCatalog + return (RETDoNothing, Version.SCMSInitialized $ Version.unSourceCatalogVersion latestSourceCatalogVersion) + -- Source is initialised by pre multisource support servers + | not sourceVersionTableExist && eventLogTableExist -> do + -- Update the Source Catalog to v43 to include the new migration + -- changes. Skipping this step will result in errors. + currMetadataCatalogVersion <- liftTx getCatalogVersion + -- we migrate to the 43 version, which is the migration where + -- metadata separation is introduced + migrateTo43MetadataCatalog currMetadataCatalogVersion + liftTx createVersionTable + -- Migrate the catalog from initial version i.e '0' + migrateSourceCatalogFrom initialSourceCatalogVersion + | otherwise -> migrateSourceCatalog where initPgSourceCatalog = do () <- PG.multiQE defaultTxErrorHandler $(makeRelativeToProject "src-rsr/init_pg_source.sql" >>= PG.sqlFromFile) @@ -284,11 +284,11 @@ migrateSourceCatalogFrom :: migrateSourceCatalogFrom prevVersion | prevVersion == latestSourceCatalogVersion = pure (RETDoNothing, Version.SCMSNothingToDo $ Version.unSourceCatalogVersion latestSourceCatalogVersion) | [] <- neededMigrations = - throw400 NotSupported $ - "Expected source catalog version <= " - <> tshow latestSourceCatalogVersion - <> ", but the current version is " - <> tshow prevVersion + throw400 NotSupported + $ "Expected source catalog version <= " + <> tshow latestSourceCatalogVersion + <> ", but the current version is " + <> tshow prevVersion | otherwise = do liftTx $ traverse_ snd neededMigrations setSourceCatalogVersion @@ -329,17 +329,16 @@ upMigrationsUntil43 = ) |] in TH.listE - -- version 0.8 is the only non-integral catalog version - -- The 41st migration which included only source catalog migration - -- was introduced before metadata separation changes were introduced - -- in the graphql-engine. Now the earlier 41st migration has been - -- moved to source catalog migrations and the 41st up migration is removed - -- entirely. - $ - [|(Version.MetadataCatalogVersion08, $(migrationFromFile "08" "1"))|] - : migrationsFromFile [2 .. 3] + -- version 0.8 is the only non-integral catalog version + -- The 41st migration which included only source catalog migration + -- was introduced before metadata separation changes were introduced + -- in the graphql-engine. Now the earlier 41st migration has been + -- moved to source catalog migrations and the 41st up migration is removed + -- entirely. + $ [|(Version.MetadataCatalogVersion08, $(migrationFromFile "08" "1"))|] + : migrationsFromFile [2 .. 3] ++ [|(Version.MetadataCatalogVersion 3, from3To4)|] - : migrationsFromFile [5 .. 40] + : migrationsFromFile [5 .. 40] ++ migrationsFromFile [42 .. 43] ) @@ -375,16 +374,16 @@ pgFetchTableMetadata :: m (DBTablesMetadata ('Postgres pgKind)) pgFetchTableMetadata tables = do results <- - liftTx $ - PG.withQE + liftTx + $ PG.withQE defaultTxErrorHandler (tableMetadata @pgKind) [PG.ViaJSON $ LE.uniques $ Set.toList tables] True - pure $ - HashMap.fromList $ - flip map results $ - \(schema, table, PG.ViaJSON info) -> (QualifiedObject schema table, info) + pure + $ HashMap.fromList + $ flip map results + $ \(schema, table, PG.ViaJSON info) -> (QualifiedObject schema table, info) -- | Fetch Cockroach metadata of all user tables cockroachFetchTableMetadata :: @@ -394,16 +393,16 @@ cockroachFetchTableMetadata :: m (DBTablesMetadata ('Postgres pgKind)) cockroachFetchTableMetadata _tables = do results <- - liftTx $ - PG.rawQE + liftTx + $ PG.rawQE defaultTxErrorHandler (tableMetadata @pgKind) [] True - pure $ - HashMap.fromList $ - flip map results $ - \(schema, table, PG.ViaJSON info) -> (QualifiedObject schema table, info) + pure + $ HashMap.fromList + $ flip map results + $ \(schema, table, PG.ViaJSON info) -> (QualifiedObject schema table, info) class FetchFunctionMetadata (pgKind :: PostgresKind) where fetchFunctionMetadata :: @@ -424,30 +423,32 @@ instance FetchFunctionMetadata 'Cockroach where pgFetchFunctionMetadata :: (MonadTx m) => Set.HashSet QualifiedFunction -> m (DBFunctionsMetadata ('Postgres pgKind)) pgFetchFunctionMetadata functions = do results <- - liftTx $ - PG.withQE + liftTx + $ PG.withQE defaultTxErrorHandler $(makeRelativeToProject "src-rsr/pg_function_metadata.sql" >>= PG.sqlFromFile) [PG.ViaJSON functions] True - pure $ - HashMap.fromList $ - flip map results $ - \(schema, table, PG.ViaJSON infos) -> (QualifiedObject schema table, infos) + pure + $ HashMap.fromList + $ flip map results + $ \(schema, table, PG.ViaJSON infos) -> (QualifiedObject schema table, infos) -- | Fetch all scalar types from Postgres fetchPgScalars :: (MonadTx m) => m (HashSet PGScalarType) fetchPgScalars = - liftTx $ - PG.getViaJSON . runIdentity . PG.getRow - <$> PG.withQE - defaultTxErrorHandler - [PG.sql| + liftTx + $ PG.getViaJSON + . runIdentity + . PG.getRow + <$> PG.withQE + defaultTxErrorHandler + [PG.sql| SELECT coalesce(json_agg(typname), '[]') FROM pg_catalog.pg_type where typtype = 'b' |] - () - True + () + True -- | Clean source database after dropping in metadata postDropSourceHook :: @@ -472,35 +473,36 @@ postDropSourceHook sourceConfig tableTriggersMap = do -- -- 3. non-default postgres source (necessarily without metadata tables) -- In this case, we want to drop the entire "hdb_catalog" schema. - liftEitherM $ - runPgSourceWriteTx sourceConfig InternalRawQuery $ do + liftEitherM + $ runPgSourceWriteTx sourceConfig InternalRawQuery + $ do hdbMetadataTableExist <- doesTableExist "hdb_catalog" "hdb_metadata" if - -- If "hdb_metadata" exists, we have one of two possible cases: - -- * this is a metadata database (type 2) - -- * this is a default database (type 1) - -- - -- Both of the possible cases might have source-related tables. And in - -- both the cases we only want to drop the source-related tables - -- leaving rest of the schema intact. - -- - -- To adhere to the spec described above, we use DROP IF EXISTS - -- statements for all source-related tables. The IF EXISTS lets us - -- handle both cases uniformly, doing "ideally" nothing in the type 2 - -- database, and for default databases, we drop only source-related - -- tables from the database's "hdb_catalog" schema. - | hdbMetadataTableExist -> do - -- drop the event trigger functions from the table for default sources - for_ (HashMap.toList tableTriggersMap) $ \(_table, triggers) -> - for_ triggers $ \triggerName -> - liftTx $ dropTriggerQ triggerName - PG.multiQE - defaultTxErrorHandler - $(makeRelativeToProject "src-rsr/drop_pg_source.sql" >>= PG.sqlFromFile) - -- Otherwise, we have a non-default postgres source, which has no metadata tables. - -- We drop the entire "hdb_catalog" schema as discussed above. - | otherwise -> - dropHdbCatalogSchema + -- If "hdb_metadata" exists, we have one of two possible cases: + -- * this is a metadata database (type 2) + -- * this is a default database (type 1) + -- + -- Both of the possible cases might have source-related tables. And in + -- both the cases we only want to drop the source-related tables + -- leaving rest of the schema intact. + -- + -- To adhere to the spec described above, we use DROP IF EXISTS + -- statements for all source-related tables. The IF EXISTS lets us + -- handle both cases uniformly, doing "ideally" nothing in the type 2 + -- database, and for default databases, we drop only source-related + -- tables from the database's "hdb_catalog" schema. + | hdbMetadataTableExist -> do + -- drop the event trigger functions from the table for default sources + for_ (HashMap.toList tableTriggersMap) $ \(_table, triggers) -> + for_ triggers $ \triggerName -> + liftTx $ dropTriggerQ triggerName + PG.multiQE + defaultTxErrorHandler + $(makeRelativeToProject "src-rsr/drop_pg_source.sql" >>= PG.sqlFromFile) + -- Otherwise, we have a non-default postgres source, which has no metadata tables. + -- We drop the entire "hdb_catalog" schema as discussed above. + | otherwise -> + dropHdbCatalogSchema -- Destory postgres source connection liftIO $ _pecDestroyConnections (_pscExecCtx sourceConfig) diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs index f6771722fc7..af315b66aab 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/Source/Version.hs @@ -32,10 +32,10 @@ latestSourceCatalogVersion = Version.SourceCatalogVersion 3 previousSourceCatalogVersions :: [SourceCatalogVersion pgKind] previousSourceCatalogVersions = [initialSourceCatalogVersion .. pred latestSourceCatalogVersion] -setSourceCatalogVersion :: MonadTx m => m () +setSourceCatalogVersion :: (MonadTx m) => m () setSourceCatalogVersion = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_source_catalog_version(version, upgraded_on) @@ -46,14 +46,15 @@ setSourceCatalogVersion = (Identity (tshow latestSourceCatalogVersion)) False -getSourceCatalogVersion :: MonadTx m => m (SourceCatalogVersion postgres) +getSourceCatalogVersion :: (MonadTx m) => m (SourceCatalogVersion postgres) getSourceCatalogVersion = do versionText <- - liftTx $ - runIdentity . PG.getRow - <$> PG.withQE - defaultTxErrorHandler - [PG.sql| SELECT version FROM hdb_catalog.hdb_source_catalog_version |] - () - False + liftTx + $ runIdentity + . PG.getRow + <$> PG.withQE + defaultTxErrorHandler + [PG.sql| SELECT version FROM hdb_catalog.hdb_source_catalog_version |] + () + False readEither (T.unpack versionText) `onLeft` (throw500 . (("Invalid source catalog version in the metadata: " <>) . T.pack)) diff --git a/server/src-lib/Hasura/Backends/Postgres/DDL/Table.hs b/server/src-lib/Hasura/Backends/Postgres/DDL/Table.hs index fe125809a8f..e9160487031 100644 --- a/server/src-lib/Hasura/Backends/Postgres/DDL/Table.hs +++ b/server/src-lib/Hasura/Backends/Postgres/DDL/Table.hs @@ -49,8 +49,9 @@ fetchAndValidateEnumValues :: [RawColumnInfo ('Postgres pgKind)] -> m (Either QErr EnumValues) fetchAndValidateEnumValues pgSourceConfig tableName maybePrimaryKey columnInfos = - runExceptT $ - either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate + runExceptT + $ either (throw400 ConstraintViolation . showErrors) pure + =<< runValidateT fetchAndValidate where fetchAndValidate :: (MonadIO n, MonadBaseControl IO n, MonadValidate [EnumTableIntegrityError ('Postgres pgKind)] n) => @@ -62,9 +63,9 @@ fetchAndValidateEnumValues pgSourceConfig tableName maybePrimaryKey columnInfos Nothing -> refute mempty Just primaryKeyColumn -> do result <- - runPgSourceReadTx pgSourceConfig $ - runValidateT $ - fetchEnumValuesFromDb tableName primaryKeyColumn maybeCommentColumn + runPgSourceReadTx pgSourceConfig + $ runValidateT + $ fetchEnumValuesFromDb tableName primaryKeyColumn maybeCommentColumn case result of Left e -> (refute . pure . EnumTablePostgresError . qeError) e Right (Left vErrs) -> refute vErrs @@ -112,7 +113,8 @@ fetchAndValidateEnumValues pgSourceConfig tableName maybePrimaryKey columnInfos "values " <> commaSeparated (reverse otherValues) <> ", and " - <> lastValue <<> pluralString + <> lastValue + <<> pluralString in "the " <> valuesString EnumTableNonTextualCommentColumn colInfo -> typeMismatch "comment column" colInfo PGText EnumTableTooManyColumns cols -> @@ -127,8 +129,11 @@ fetchAndValidateEnumValues pgSourceConfig tableName maybePrimaryKey columnInfos in "the table’s " <> description <> " (" - <> rciName colInfo <<> ") must have type " - <> expected <<> ", not type " <>> scalarType + <> rciName colInfo + <<> ") must have type " + <> expected + <<> ", not type " + <>> scalarType fetchEnumValuesFromDb :: forall pgKind m. @@ -141,16 +146,16 @@ fetchEnumValuesFromDb tableName primaryKeyColumn maybeCommentColumn = do let nullExtr = Extractor SENull Nothing commentExtr = maybe nullExtr (mkExtr . rciName) maybeCommentColumn query = - PG.fromBuilder $ - toSQL + PG.fromBuilder + $ toSQL mkSelect { selFrom = Just $ mkSimpleFromExp tableName, selExtr = [mkExtr (rciName primaryKeyColumn), commentExtr] } rawEnumValues <- liftTx $ PG.withQE defaultTxErrorHandler query () True when (null rawEnumValues) $ dispute [EnumTableNoEnumValues] - let enumValues = flip map rawEnumValues $ - \(enumValueText, comment) -> + let enumValues = flip map rawEnumValues + $ \(enumValueText, comment) -> case mkValidEnumValueName enumValueText of Nothing -> Left enumValueText Just enumValue -> Right (EnumValue enumValue, EnumValueInfo comment) diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Insert.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Insert.hs index ac86c9076c1..75cd7294d71 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Insert.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Insert.hs @@ -62,8 +62,8 @@ convertToSQLTransaction (IR.AnnotatedInsert fieldName isSingle annIns mutationOu if null $ IR._aiInsertObject annIns then pure $ IR.buildEmptyMutResp mutationOutput else - withPaths ["selectionSet", fieldName, "args", suffix] $ - insertMultipleObjects annIns mempty userInfo mutationOutput planVars stringifyNum tCase + withPaths ["selectionSet", fieldName, "args", suffix] + $ insertMultipleObjects annIns mempty userInfo mutationOutput planVars stringifyNum tCase where withPaths p x = foldr ($) x $ withPathK <$> p suffix = bool "objects" "object" isSingle @@ -160,8 +160,8 @@ insertObject singleObjIns additionalColumns userInfo planVars stringifyNum tCase let cte = mkInsertQ table onConflict finalInsCols checkCond PGE.MutateResp affRows colVals <- - liftTx $ - PGE.mutateAndFetchCols @pgKind table allColumns (PGT.MCCheckConstraint cte, planVars) stringifyNum tCase + liftTx + $ PGE.mutateAndFetchCols @pgKind table allColumns (PGT.MCCheckConstraint cte, planVars) stringifyNum tCase colValM <- asSingleObject colVals arrRelAffRows <- bool (withArrRels colValM) (return 0) $ null allAfterInsertRels @@ -183,8 +183,8 @@ insertObject singleObjIns additionalColumns userInfo planVars stringifyNum tCase afterInsertDepCols :: [ColumnInfo ('Postgres pgKind)] afterInsertDepCols = - flip (getColInfos @('Postgres pgKind)) allColumns $ - concatMap (HashMap.keys . riMapping . IR._riRelationInfo) allAfterInsertRels + flip (getColInfos @('Postgres pgKind)) allColumns + $ concatMap (HashMap.keys . riMapping . IR._riRelationInfo) allAfterInsertRels objToArr :: forall a b. IR.ObjectRelationInsert b a -> IR.ArrayRelationInsert b a objToArr IR.RelationInsert {..} = IR.RelationInsert (singleToMulti _riInsertData) _riRelationInfo @@ -199,8 +199,8 @@ insertObject singleObjIns additionalColumns userInfo planVars stringifyNum tCase colVal <- onNothing colValM $ throw400 NotSupported cannotInsArrRelErr afterInsertDepColsWithVal <- fetchFromColVals colVal afterInsertDepCols arrInsARows <- - forM allAfterInsertRels $ - insertArrRel afterInsertDepColsWithVal userInfo planVars stringifyNum tCase + forM allAfterInsertRels + $ insertArrRel afterInsertDepColsWithVal userInfo planVars stringifyNum tCase return $ sum arrInsARows asSingleObject :: @@ -214,7 +214,8 @@ insertObject singleObjIns additionalColumns userInfo planVars stringifyNum tCase cannotInsArrRelErr :: Text cannotInsArrRelErr = "cannot proceed to insert array relations since insert to table " - <> table <<> " affects zero rows" + <> table + <<> " affects zero rows" insertObjRel :: forall pgKind m. @@ -252,8 +253,10 @@ insertObjRel planVars userInfo stringifyNum tCase objRelIns = rColInfos = getColInfos rCols allCols errMsg = "cannot proceed to insert object relation " - <> relName <<> " since insert to table " - <> table <<> " affects zero rows" + <> relName + <<> " since insert to table " + <> table + <<> " affects zero rows" insertArrRel :: ( MonadTx m, @@ -272,16 +275,16 @@ insertArrRel :: m Int insertArrRel resCols userInfo planVars stringifyNum tCase arrRelIns = withPathK (relNameToTxt $ riName relInfo) $ do - let additionalColumns = HashMap.fromList $ - flip mapMaybe resCols \(column, value) -> do + let additionalColumns = HashMap.fromList + $ flip mapMaybe resCols \(column, value) -> do target <- HashMap.lookup column mapping Just (target, value) resBS <- - withPathK "data" $ - insertMultipleObjects multiObjIns additionalColumns userInfo mutOutput planVars stringifyNum tCase + withPathK "data" + $ insertMultipleObjects multiObjIns additionalColumns userInfo mutOutput planVars stringifyNum tCase resObj <- decodeEncJSON resBS - onNothing (HashMap.lookup ("affected_rows" :: Text) resObj) $ - throw500 "affected_rows not returned in array rel insert" + onNothing (HashMap.lookup ("affected_rows" :: Text) resObj) + $ throw500 "affected_rows not returned in array rel insert" where IR.RelationInsert multiObjIns relInfo = arrRelIns mapping = riMapping relInfo @@ -304,25 +307,25 @@ validateInsert :: m () validateInsert insCols objRels addCols = do -- validate insertCols - unless (null insConflictCols) $ - throw400 ValidationFailed $ - "cannot insert " - <> showPGCols insConflictCols - <> " columns as their values are already being determined by parent insert" + unless (null insConflictCols) + $ throw400 ValidationFailed + $ "cannot insert " + <> showPGCols insConflictCols + <> " columns as their values are already being determined by parent insert" forM_ objRels $ \relInfo -> do let lCols = HashMap.keys $ riMapping relInfo relName = riName relInfo relNameTxt = relNameToTxt relName lColConflicts = lCols `intersect` (addCols <> insCols) - withPathK relNameTxt $ - unless (null lColConflicts) $ - throw400 ValidationFailed $ - "cannot insert object relationship " - <> relName - <<> " as " - <> showPGCols lColConflicts - <> " column values are already determined" + withPathK relNameTxt + $ unless (null lColConflicts) + $ throw400 ValidationFailed + $ "cannot insert object relationship " + <> relName + <<> " as " + <> showPGCols lColConflicts + <> " column values are already determined" where insConflictCols = insCols `intersect` addCols @@ -360,10 +363,11 @@ fetchFromColVals colVal reqCols = forM reqCols $ \ci -> do let valM = HashMap.lookup (ciColumn ci) colVal val <- - onNothing valM $ - throw500 $ - "column " - <> ciColumn ci <<> " not found in given colVal" + onNothing valM + $ throw500 + $ "column " + <> ciColumn ci + <<> " not found in given colVal" let pgColVal = case val of TENull -> Postgres.SENull TELit t -> Postgres.SELit t diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Mutation.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Mutation.hs index 36fc30af781..c79b781a1bc 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Mutation.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Mutation.hs @@ -95,9 +95,9 @@ runMutation :: Mutation ('Postgres pgKind) -> m EncJSON runMutation mut = - bool (mutateAndReturn mut) (mutateAndSel mut) $ - hasNestedFld $ - _mOutput mut + bool (mutateAndReturn mut) (mutateAndSel mut) + $ hasNestedFld + $ _mOutput mut mutateAndReturn :: ( MonadTx m, @@ -211,9 +211,9 @@ mutateAndSel (Mutation qt q mutationOutput allCols strfyNum tCase) = do withCheckPermission :: (MonadError QErr m) => m (a, Bool) -> m a withCheckPermission sqlTx = do (rawResponse, checkConstraint) <- sqlTx - unless checkConstraint $ - throw400 PermissionError $ - "check constraint of an insert/update permission has failed" + unless checkConstraint + $ throw400 PermissionError + $ "check constraint of an insert/update permission has failed" pure rawResponse executeMutationOutputQuery :: @@ -270,8 +270,8 @@ mutateAndFetchCols qt cols (cte, p) strfyNum tCase = do rawIdentifier = S.tableAliasToIdentifier rawAlias tabFrom = FromIdentifier $ FIIdentifier (unTableIdentifier rawIdentifier) tabPerm = TablePerm annBoolExpTrue Nothing - selFlds = flip map cols $ - \ci -> (fromCol @('Postgres pgKind) $ ciColumn ci, mkAnnColumnFieldAsText ci) + selFlds = flip map cols + $ \ci -> (fromCol @('Postgres pgKind) $ ciColumn ci, mkAnnColumnFieldAsText ci) sqlText = toQuery selectWith @@ -299,16 +299,16 @@ mutateAndFetchCols qt cols (cte, p) strfyNum tCase = do ] affRowsSel = - S.SESelect $ - S.mkSelect + S.SESelect + $ S.mkSelect { S.selExtr = [S.Extractor S.countStar Nothing], S.selFrom = Just $ S.FromExp [S.FIIdentifier rawIdentifier] } (colSel, customSQLCTEs) = - runWriter $ - S.SESelect - <$> mkSQLSelect - JASMultipleRows - ( AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum tCase - ) + runWriter + $ S.SESelect + <$> mkSQLSelect + JASMultipleRows + ( AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum tCase + ) diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Prepare.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Prepare.hs index 917af51351b..115dbcb7598 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Prepare.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Prepare.hs @@ -134,7 +134,8 @@ prepareWithoutPlan userInfo = \case fmap S.SELit <$> onNothing maybeSessionVariableValue $ throw400 NotFound - $ "missing session variable: " <>> sessionVariableToText sessVar + $ "missing session variable: " + <>> sessionVariableToText sessVar pure $ withTypeAnn ty sessionVariableValue -- | The map of user session variables is always given the number (1) as its diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs index f7acf213c29..ea5aadcf717 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs @@ -152,10 +152,10 @@ mkMultiplexedQuery rootFields = S.Extractor (mkQualifiedIdentifier fldRespIdentifier (Identifier "root")) (Just $ S.toColumnAlias $ Identifier "result") ], S.selFrom = - Just $ - S.FromExp - [ S.FIJoin $ - S.JoinExpr subsInputFromItem S.LeftOuter responseLateralFromItem (S.JoinOn $ S.BELit True) + Just + $ S.FromExp + [ S.FIJoin + $ S.JoinExpr subsInputFromItem S.LeftOuter responseLateralFromItem (S.JoinOn $ S.BELit True) ] } @@ -170,8 +170,8 @@ mkMultiplexedQuery rootFields = [S.toColumnAlias $ Identifier "result_id", S.toColumnAlias $ Identifier "result_vars"] (sqlFrom, customSQLCTEs) = - runWriter $ - traverse + runWriter + $ traverse ( \(fieldAlias, resolvedAST) -> toSQLFromItem (S.mkTableAlias $ G.unName fieldAlias) resolvedAST ) @@ -217,10 +217,10 @@ mkStreamingMultiplexedQuery (fieldAlias, resolvedAST) = S.Extractor (mkQualifiedIdentifier fldRespIdentifier (Identifier "cursor")) (Just $ S.toColumnAlias $ Identifier "cursor") ], S.selFrom = - Just $ - S.FromExp - [ S.FIJoin $ - S.JoinExpr subsInputFromItem S.LeftOuter responseLateralFromItem (S.JoinOn $ S.BELit True) + Just + $ S.FromExp + [ S.FIJoin + $ S.JoinExpr subsInputFromItem S.LeftOuter responseLateralFromItem (S.JoinOn $ S.BELit True) ] } @@ -294,8 +294,8 @@ resolveMultiplexedValue allSessionVars = \case pure $ fromResVars (CollectableTypeScalar PGJSON) ["session"] where fromResVars pgType jPath = - addTypeAnnotation pgType $ - S.SEOpApp + addTypeAnnotation pgType + $ S.SEOpApp (S.SQLOp "#>>") [ S.SEQIdentifier $ S.QIdentifier (S.QualifiedIdentifier subsIdentifier Nothing) (Identifier "result_vars"), S.SEArray $ map S.SELit jPath diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Types.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Types.hs index d8d6339eae9..ea5009cdac5 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Types.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Types.hs @@ -118,8 +118,8 @@ mkPGExecCtx defaultIsoLevel pool resizeStrategy = -- Resize the pool resizePostgresPool pool maxConnections serverReplicas -- Return the summary. Only the primary pool is resized - pure $ - SourceResizePoolSummary + pure + $ SourceResizePoolSummary { _srpsPrimaryResized = True, _srpsReadReplicasResized = False, _srpsConnectionSet = [] diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/Execute.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/Execute.hs index 877edc2a1a8..9d2e90dbde1 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/Execute.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/Execute.hs @@ -146,9 +146,9 @@ pgDBQueryPlan userInfo sourceName sourceConfig qrf reqHeaders operationName = do let connectionTemplateResolver = connectionTemplateConfigResolver (_pscConnectionTemplateConfig sourceConfig) queryContext = - Just $ - QueryContext operationName $ - QueryOperationType G.OperationTypeQuery + Just + $ QueryContext operationName + $ QueryOperationType G.OperationTypeQuery in applyConnectionTemplateResolverNonAdmin connectionTemplateResolver userInfo reqHeaders queryContext let preparedSQLWithQueryTags = appendPreparedSQLWithQueryTags (irToRootFieldPlan planVals preparedQuery) queryTagsComment let (action, preparedSQL) = mkCurPlanTx userInfo preparedSQLWithQueryTags @@ -157,7 +157,7 @@ pgDBQueryPlan userInfo sourceName sourceConfig qrf reqHeaders operationName = do -- | Used by the @dc-postgres-agent to compile a query. pgDBQueryPlanSimple :: - MonadError QErr m => + (MonadError QErr m) => UserInfo -> QueryTagsComment -> QueryDB ('Postgres 'Vanilla) Void (UnpreparedValue ('Postgres 'Vanilla)) -> @@ -198,13 +198,13 @@ pgDBQueryExplain fieldName userInfo sourceName sourceConfig rootSelection reqHea let connectionTemplateResolver = connectionTemplateConfigResolver (_pscConnectionTemplateConfig sourceConfig) queryContext = - Just $ - QueryContext operationName $ - QueryOperationType G.OperationTypeQuery + Just + $ QueryContext operationName + $ QueryOperationType G.OperationTypeQuery in applyConnectionTemplateResolverNonAdmin connectionTemplateResolver userInfo reqHeaders queryContext - pure $ - AB.mkAnyBackend $ - DBStepInfo @('Postgres pgKind) sourceName sourceConfig Nothing action resolvedConnectionTemplate + pure + $ AB.mkAnyBackend + $ DBStepInfo @('Postgres pgKind) sourceName sourceConfig Nothing action resolvedConnectionTemplate pgDBSubscriptionExplain :: ( MonadError QErr m, @@ -223,10 +223,11 @@ pgDBSubscriptionExplain plan = do resolvedConnectionTemplate = _sqpResolvedConnectionTemplate plan cohortId <- newCohortId explanationLines <- - liftEitherM $ - runExceptT $ - _pecRunTx pgExecCtx (PGExecCtxInfo (Tx PG.ReadOnly Nothing) (GraphQLQuery resolvedConnectionTemplate)) $ - map runIdentity <$> PGL.executeQuery explainQuery [(cohortId, _sqpVariables plan)] + liftEitherM + $ runExceptT + $ _pecRunTx pgExecCtx (PGExecCtxInfo (Tx PG.ReadOnly Nothing) (GraphQLQuery resolvedConnectionTemplate)) + $ map runIdentity + <$> PGL.executeQuery explainQuery [(cohortId, _sqpVariables plan)] pure $ SubscriptionQueryPlanExplanation queryText explanationLines $ _sqpVariables plan -- mutation @@ -245,10 +246,10 @@ convertDelete :: convertDelete userInfo deleteOperation stringifyNum = do queryTags <- ask preparedDelete <- traverse (prepareWithoutPlan userInfo) deleteOperation - pure $ - OnBaseMonad $ - flip runReaderT queryTags $ - PGE.execDeleteQuery stringifyNum (_adNamingConvention deleteOperation) userInfo (preparedDelete, Seq.empty) + pure + $ OnBaseMonad + $ flip runReaderT queryTags + $ PGE.execDeleteQuery stringifyNum (_adNamingConvention deleteOperation) userInfo (preparedDelete, Seq.empty) convertUpdate :: forall pgKind m. @@ -267,10 +268,10 @@ convertUpdate userInfo updateOperation stringifyNum = do if Postgres.updateVariantIsEmpty $ IR._auUpdateVariant updateOperation then pure $ OnBaseMonad $ pure $ IR.buildEmptyMutResp $ IR._auOutput preparedUpdate else - pure $ - OnBaseMonad $ - flip runReaderT queryTags $ - PGE.execUpdateQuery stringifyNum (_auNamingConvention updateOperation) userInfo (preparedUpdate, Seq.empty) + pure + $ OnBaseMonad + $ flip runReaderT queryTags + $ PGE.execUpdateQuery stringifyNum (_auNamingConvention updateOperation) userInfo (preparedUpdate, Seq.empty) convertInsert :: forall pgKind m. @@ -286,10 +287,10 @@ convertInsert :: convertInsert userInfo insertOperation stringifyNum = do queryTags <- ask preparedInsert <- traverse (prepareWithoutPlan userInfo) insertOperation - pure $ - OnBaseMonad $ - flip runReaderT queryTags $ - convertToSQLTransaction preparedInsert userInfo Seq.empty stringifyNum (_aiNamingConvention insertOperation) + pure + $ OnBaseMonad + $ flip runReaderT queryTags + $ convertToSQLTransaction preparedInsert userInfo Seq.empty stringifyNum (_aiNamingConvention insertOperation) -- | A pared-down version of 'Query.convertQuerySelSet', for use in execution of -- special case of SQL function mutations (see 'MDBFunction'). @@ -309,16 +310,16 @@ convertFunction userInfo jsonAggSelect unpreparedQuery = do queryTags <- ask -- Transform the RQL AST into a prepared SQL query (preparedQuery, PlanningSt {_psPrepped = planVals}) <- - flip runStateT initPlanningSt $ - traverse (prepareWithPlan userInfo) unpreparedQuery + flip runStateT initPlanningSt + $ traverse (prepareWithPlan userInfo) unpreparedQuery let queryResultFn = case jsonAggSelect of JASMultipleRows -> QDBMultipleRows JASSingleObject -> QDBSingleRow let preparedSQLWithQueryTags = appendPreparedSQLWithQueryTags (irToRootFieldPlan planVals $ queryResultFn preparedQuery) queryTags - pure $! - fst $ - mkCurPlanTx userInfo preparedSQLWithQueryTags -- forget (Maybe PreparedSql) + pure + $! fst + $ mkCurPlanTx userInfo preparedSQLWithQueryTags -- forget (Maybe PreparedSql) pgDBMutationPlan :: forall pgKind m. @@ -340,9 +341,9 @@ pgDBMutationPlan userInfo stringifyNum sourceName sourceConfig mrf reqHeaders op let connectionTemplateResolver = connectionTemplateConfigResolver (_pscConnectionTemplateConfig sourceConfig) queryContext = - Just $ - QueryContext operationName $ - QueryOperationType G.OperationTypeMutation + Just + $ QueryContext operationName + $ QueryOperationType G.OperationTypeMutation in applyConnectionTemplateResolverNonAdmin connectionTemplateResolver userInfo reqHeaders queryContext go resolvedConnectionTemplate <$> case mrf of MDBInsert s -> convertInsert userInfo s stringifyNum @@ -379,9 +380,9 @@ pgDBLiveQuerySubscriptionPlan :: m (SubscriptionQueryPlan ('Postgres pgKind) (MultiplexedQuery ('Postgres pgKind))) pgDBLiveQuerySubscriptionPlan userInfo _sourceName sourceConfig namespace unpreparedAST reqHeaders operationName = do (preparedAST, PGL.QueryParametersInfo {..}) <- - flip runStateT mempty $ - for unpreparedAST $ - traverse (PGL.resolveMultiplexedValue (_uiSession userInfo)) + flip runStateT mempty + $ for unpreparedAST + $ traverse (PGL.resolveMultiplexedValue (_uiSession userInfo)) subscriptionQueryTagsComment <- ask let multiplexedQuery = PGL.mkMultiplexedQuery $ InsOrdHashMap.mapKeys _rfaAlias preparedAST multiplexedQueryWithQueryTags = @@ -393,9 +394,9 @@ pgDBLiveQuerySubscriptionPlan userInfo _sourceName sourceConfig namespace unprep let connectionTemplateResolver = connectionTemplateConfigResolver (_pscConnectionTemplateConfig sourceConfig) queryContext = - Just $ - QueryContext operationName $ - QueryOperationType G.OperationTypeSubscription + Just + $ QueryContext operationName + $ QueryOperationType G.OperationTypeSubscription in applyConnectionTemplateResolverNonAdmin connectionTemplateResolver userInfo reqHeaders queryContext -- Cohort Id: Used for validating the multiplexed query. See @'testMultiplexedQueryTx'. @@ -441,8 +442,8 @@ pgDBStreamingSubscriptionPlan :: m (SubscriptionQueryPlan ('Postgres pgKind) (MultiplexedQuery ('Postgres pgKind))) pgDBStreamingSubscriptionPlan userInfo _sourceName sourceConfig (rootFieldAlias, unpreparedAST) reqHeaders operationName = do (preparedAST, PGL.QueryParametersInfo {..}) <- - flip runStateT mempty $ - traverse (PGL.resolveMultiplexedValue (_uiSession userInfo)) unpreparedAST + flip runStateT mempty + $ traverse (PGL.resolveMultiplexedValue (_uiSession userInfo)) unpreparedAST subscriptionQueryTagsComment <- ask let multiplexedQuery = PGL.mkStreamingMultiplexedQuery (G._rfaAlias rootFieldAlias, preparedAST) multiplexedQueryWithQueryTags = @@ -454,9 +455,9 @@ pgDBStreamingSubscriptionPlan userInfo _sourceName sourceConfig (rootFieldAlias, let connectionTemplateResolver = connectionTemplateConfigResolver (_pscConnectionTemplateConfig sourceConfig) queryContext = - Just $ - QueryContext operationName $ - QueryOperationType G.OperationTypeSubscription + Just + $ QueryContext operationName + $ QueryOperationType G.OperationTypeSubscription in applyConnectionTemplateResolverNonAdmin connectionTemplateResolver userInfo reqHeaders queryContext -- Cohort Id: Used for validating the multiplexed query. See @'testMultiplexedQueryTx'. @@ -530,7 +531,8 @@ mkCurPlanTx userInfo ps@(PreparedSql q prepMap) = in (,Just ps) $ OnBaseMonad do -- https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/database/#connection-level-attributes Tracing.attachMetadata [("db.system", "postgresql")] - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.rawQE dmlTxErrorHandler q prepArgs True -- convert a query from an intermediate representation to... another @@ -606,11 +608,11 @@ pgDBRemoteRelationshipPlan userInfo sourceName sourceConfig lhs lhsSchema argume rowsArgument :: UnpreparedValue ('Postgres pgKind) rowsArgument = - UVParameter Unknown $ - ColumnValue (ColumnScalar Postgres.PGJSONB) $ - Postgres.PGValJSONB $ - PG.JSONB $ - J.toJSON lhs + UVParameter Unknown + $ ColumnValue (ColumnScalar Postgres.PGJSONB) + $ Postgres.PGValJSONB + $ PG.JSONB + $ J.toJSON lhs jsonToRecordSet :: IR.SelectFromG ('Postgres pgKind) (UnpreparedValue ('Postgres pgKind)) recordSetDefinitionList = diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/Metadata.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/Metadata.hs index 07f4a33e2a0..f8a3701bba3 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/Metadata.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/Metadata.hs @@ -57,48 +57,48 @@ class PostgresMetadata (pgKind :: PostgresKind) where -- duplicate oids will point to a more "general" type. pgTypeOidMapping :: InsOrdHashMap.InsOrdHashMap Postgres.PGScalarType PQ.Oid pgTypeOidMapping = - InsOrdHashMap.fromList $ - [ (Postgres.PGSmallInt, PTI.int2), - (Postgres.PGSerial, PTI.int4), - (Postgres.PGInteger, PTI.int4), - (Postgres.PGBigSerial, PTI.int8), - (Postgres.PGBigInt, PTI.int8), - (Postgres.PGFloat, PTI.float4), - (Postgres.PGDouble, PTI.float8), - (Postgres.PGMoney, PTI.numeric), - (Postgres.PGNumeric, PTI.numeric), - (Postgres.PGBoolean, PTI.bool), - (Postgres.PGChar, PTI.bpchar), - (Postgres.PGVarchar, PTI.varchar), - (Postgres.PGText, PTI.text), - (Postgres.PGDate, PTI.date), - (Postgres.PGTimeStamp, PTI.timestamp), - (Postgres.PGTimeStampTZ, PTI.timestamptz), - (Postgres.PGTimeTZ, PTI.timetz), - (Postgres.PGJSON, PTI.json), - (Postgres.PGJSONB, PTI.jsonb), - (Postgres.PGUUID, PTI.uuid), - (Postgres.PGArray Postgres.PGSmallInt, PTI.int2_array), - (Postgres.PGArray Postgres.PGSerial, PTI.int4_array), - (Postgres.PGArray Postgres.PGInteger, PTI.int4_array), - (Postgres.PGArray Postgres.PGBigSerial, PTI.int8_array), - (Postgres.PGArray Postgres.PGBigInt, PTI.int8_array), - (Postgres.PGArray Postgres.PGFloat, PTI.float4_array), - (Postgres.PGArray Postgres.PGDouble, PTI.float8_array), - (Postgres.PGArray Postgres.PGMoney, PTI.numeric_array), - (Postgres.PGArray Postgres.PGNumeric, PTI.numeric_array), - (Postgres.PGArray Postgres.PGBoolean, PTI.bool_array), - (Postgres.PGArray Postgres.PGChar, PTI.char_array), - (Postgres.PGArray Postgres.PGVarchar, PTI.varchar_array), - (Postgres.PGArray Postgres.PGText, PTI.text_array), - (Postgres.PGArray Postgres.PGDate, PTI.date_array), - (Postgres.PGArray Postgres.PGTimeStamp, PTI.timestamp_array), - (Postgres.PGArray Postgres.PGTimeStampTZ, PTI.timestamptz_array), - (Postgres.PGArray Postgres.PGTimeTZ, PTI.timetz_array), - (Postgres.PGArray Postgres.PGJSON, PTI.json_array), - (Postgres.PGArray Postgres.PGJSON, PTI.jsonb_array), - (Postgres.PGArray Postgres.PGUUID, PTI.uuid_array) - ] + InsOrdHashMap.fromList + $ [ (Postgres.PGSmallInt, PTI.int2), + (Postgres.PGSerial, PTI.int4), + (Postgres.PGInteger, PTI.int4), + (Postgres.PGBigSerial, PTI.int8), + (Postgres.PGBigInt, PTI.int8), + (Postgres.PGFloat, PTI.float4), + (Postgres.PGDouble, PTI.float8), + (Postgres.PGMoney, PTI.numeric), + (Postgres.PGNumeric, PTI.numeric), + (Postgres.PGBoolean, PTI.bool), + (Postgres.PGChar, PTI.bpchar), + (Postgres.PGVarchar, PTI.varchar), + (Postgres.PGText, PTI.text), + (Postgres.PGDate, PTI.date), + (Postgres.PGTimeStamp, PTI.timestamp), + (Postgres.PGTimeStampTZ, PTI.timestamptz), + (Postgres.PGTimeTZ, PTI.timetz), + (Postgres.PGJSON, PTI.json), + (Postgres.PGJSONB, PTI.jsonb), + (Postgres.PGUUID, PTI.uuid), + (Postgres.PGArray Postgres.PGSmallInt, PTI.int2_array), + (Postgres.PGArray Postgres.PGSerial, PTI.int4_array), + (Postgres.PGArray Postgres.PGInteger, PTI.int4_array), + (Postgres.PGArray Postgres.PGBigSerial, PTI.int8_array), + (Postgres.PGArray Postgres.PGBigInt, PTI.int8_array), + (Postgres.PGArray Postgres.PGFloat, PTI.float4_array), + (Postgres.PGArray Postgres.PGDouble, PTI.float8_array), + (Postgres.PGArray Postgres.PGMoney, PTI.numeric_array), + (Postgres.PGArray Postgres.PGNumeric, PTI.numeric_array), + (Postgres.PGArray Postgres.PGBoolean, PTI.bool_array), + (Postgres.PGArray Postgres.PGChar, PTI.char_array), + (Postgres.PGArray Postgres.PGVarchar, PTI.varchar_array), + (Postgres.PGArray Postgres.PGText, PTI.text_array), + (Postgres.PGArray Postgres.PGDate, PTI.date_array), + (Postgres.PGArray Postgres.PGTimeStamp, PTI.timestamp_array), + (Postgres.PGArray Postgres.PGTimeStampTZ, PTI.timestamptz_array), + (Postgres.PGArray Postgres.PGTimeTZ, PTI.timetz_array), + (Postgres.PGArray Postgres.PGJSON, PTI.json_array), + (Postgres.PGArray Postgres.PGJSON, PTI.jsonb_array), + (Postgres.PGArray Postgres.PGUUID, PTI.uuid_array) + ] instance PostgresMetadata 'Vanilla where validateRel _ _ _ = pure () diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/NativeQueries.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/NativeQueries.hs index 4564aef31f6..1f1b6705c06 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/NativeQueries.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/NativeQueries.hs @@ -100,10 +100,12 @@ validateNativeQuery pgTypeOidMapping env connConf logicalModel model = do throwError (err400 ValidationFailed "Failed to validate query") { qeInternal = - Just $ - ExtraInternal $ - toJSON @Text $ - "Column named '" <> toTxt name <> "' is not returned from the query." + Just + $ ExtraInternal + $ toJSON @Text + $ "Column named '" + <> toTxt name + <> "' is not returned from the query." } Just actualOid | Just expectedOid <- InsOrdHashMap.lookup expectedType pgTypeOidMapping, @@ -111,20 +113,20 @@ validateNativeQuery pgTypeOidMapping env connConf logicalModel model = do throwError (err400 ValidationFailed "Failed to validate query") { qeInternal = - Just $ - ExtraInternal $ - toJSON @Text $ - Text.unwords $ - [ "Return column '" <> name <> "' has a type mismatch.", - "The expected type is '" <> toTxt expectedType <> "'," - ] - <> case Map.lookup actualOid (invertPgTypeOidMap pgTypeOidMapping) of - Just t -> - ["but the actual type is '" <> toTxt t <> "'."] - Nothing -> - [ "and has the " <> tshow expectedOid <> ",", - "but the actual type has the " <> tshow actualOid <> "." - ] + Just + $ ExtraInternal + $ toJSON @Text + $ Text.unwords + $ [ "Return column '" <> name <> "' has a type mismatch.", + "The expected type is '" <> toTxt expectedType <> "'," + ] + <> case Map.lookup actualOid (invertPgTypeOidMap pgTypeOidMapping) of + Just t -> + ["but the actual type is '" <> toTxt t <> "'."] + Nothing -> + [ "and has the " <> tshow expectedOid <> ",", + "but the actual type has the " <> tshow actualOid <> "." + ] } Just {} -> pure () @@ -182,7 +184,7 @@ renameIQ = runRenaming . fmap InterpolatedQuery . mapM renameII . getInterpolate -- When subsequently rendering the prepared statement definition however, it -- is more convenient to inspect the environment by index. -- Therefore we invert the map as part of renaming. - inverseMap :: Ord b => Map a b -> Map b a + inverseMap :: (Ord b) => Map a b -> Map b a inverseMap = Map.fromList . map swap . Map.toList -- | Pretty print an interpolated query with numbered parameters. @@ -200,7 +202,7 @@ renderIQ (InterpolatedQuery items) = foldMap printItem items -- Used by 'validateNativeQuery'. Exported for testing. nativeQueryToPreparedStatement :: forall m pgKind. - MonadError QErr m => + (MonadError QErr m) => LogicalModelMetadata ('Postgres pgKind) -> NativeQueryMetadata ('Postgres pgKind) -> m (BS.ByteString, Text) @@ -241,9 +243,10 @@ nativeQueryToPreparedStatement logicalModel model = do preparedQuery = "PREPARE " <> prepname <> argumentSignature <> " AS " <> wrapInCTE logimoCode - when (Set.empty /= undeclaredArguments) $ - throwError $ - err400 ValidationFailed $ - "Undeclared arguments: " <> commaSeparated (map tshow $ Set.toList undeclaredArguments) + when (Set.empty /= undeclaredArguments) + $ throwError + $ err400 ValidationFailed + $ "Undeclared arguments: " + <> commaSeparated (map tshow $ Set.toList undeclaredArguments) pure (Text.encodeUtf8 prepname, preparedQuery) diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs index a652fc3a801..8289c8c82f0 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs @@ -376,9 +376,9 @@ buildTableRelayQueryFields mkRootFieldName tableName tableInfo gqlName pkeyColum tCase = _rscNamingConvention customization fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName rootFieldName = runMkRootFieldName mkRootFieldName $ applyFieldNameCaseIdentifier tCase (mkRelayConnectionField gqlName) - fmap afold $ - optionalFieldParser QDBConnection $ - selectTableConnection tableInfo rootFieldName fieldDesc pkeyColumns + fmap afold + $ optionalFieldParser QDBConnection + $ selectTableConnection tableInfo rootFieldName fieldDesc pkeyColumns buildFunctionRelayQueryFields :: forall r m n pgKind. @@ -393,9 +393,9 @@ buildFunctionRelayQueryFields :: SchemaT r m [FieldParser n (QueryDB ('Postgres pgKind) (RemoteRelationshipField IR.UnpreparedValue) (IR.UnpreparedValue ('Postgres pgKind)))] buildFunctionRelayQueryFields mkRootFieldName functionName functionInfo tableName pkeyColumns = do let fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName - fmap afold $ - optionalFieldParser QDBConnection $ - selectFunctionConnection mkRootFieldName functionInfo fieldDesc pkeyColumns + fmap afold + $ optionalFieldParser QDBConnection + $ selectFunctionConnection mkRootFieldName functionInfo fieldDesc pkeyColumns pgkBuildTableUpdateMutationFields :: forall r m n pgKind. @@ -435,23 +435,24 @@ columnParser columnType nullability = case columnType of -- TODO: introduce new dedicated scalars for Postgres column types. name <- mkScalarTypeName scalarType let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar - pure $ - peelWithOrigin $ - fmap (ColumnValue columnType) $ - possiblyNullable scalarType nullability $ - P.Parser - { pType = schemaType, - pParser = - P.valueToJSON (P.toGraphQLType schemaType) >=> \case - J.Null -> P.parseError $ "unexpected null value for type " <> toErrorValue name - value -> - runAesonParser (parsePGValue scalarType) value - `onLeft` (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) - } + pure + $ peelWithOrigin + $ fmap (ColumnValue columnType) + $ possiblyNullable scalarType nullability + $ P.Parser + { pType = schemaType, + pParser = + P.valueToJSON (P.toGraphQLType schemaType) >=> \case + J.Null -> P.parseError $ "unexpected null value for type " <> toErrorValue name + value -> + runAesonParser (parsePGValue scalarType) value + `onLeft` (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError) + } ColumnEnumReference (EnumReference tableName enumValues tableCustomName) -> case nonEmpty (HashMap.toList enumValues) of Just enumValuesList -> - peelWithOrigin . fmap (ColumnValue columnType) + peelWithOrigin + . fmap (ColumnValue columnType) <$> enumParser @pgKind tableName enumValuesList tableCustomName nullability Nothing -> throw400 ValidationFailed "empty enum values" @@ -519,8 +520,8 @@ orderByOperators :: NamingCase -> (G.Name, NonEmpty (Definition P.EnumValueInfo, (BasicOrderType ('Postgres pgKind), NullsOrderType ('Postgres pgKind)))) orderByOperators tCase = - (Name._order_by,) $ - NE.fromList + (Name._order_by,) + $ NE.fromList [ ( define (applyEnumValueCase tCase Name._asc) "in ascending order, nulls last", (Postgres.OTAsc, Postgres.NullsLast) ), @@ -571,286 +572,286 @@ comparisonExps = memoize 'comparisonExps \columnType -> do maybeCastParser <- castExp columnType tCase let name = applyTypeNameCaseCust tCase $ P.getName typedParser <> Name.__comparison_exp desc = - G.Description $ - "Boolean expression to compare columns of type " - <> P.getName typedParser - <<> ". All fields are combined with logical 'AND'." + G.Description + $ "Boolean expression to compare columns of type " + <> P.getName typedParser + <<> ". All fields are combined with logical 'AND'." textListParser = fmap IR.openValueOrigin <$> P.list textParser columnListParser = fmap IR.openValueOrigin <$> P.list typedParser -- Naming conventions - pure $ - P.object name (Just desc) $ - fmap catMaybes $ - sequenceA $ - concat - [ flip (maybe []) maybeCastParser $ \castParser -> - [ P.fieldOptional Name.__cast Nothing (ACast <$> castParser) - ], - -- Common ops for all types - equalityOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser) - (mkListParameter columnType <$> columnListParser), - -- Comparison ops for non Raster types - guard (isScalarColumnWhere (/= PGRaster) columnType) - *> comparisonOperators - tCase - collapseIfNull - (IR.mkParameter <$> typedParser), - -- Ops for Raster types - guard (isScalarColumnWhere (== PGRaster) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "rast"])) - Nothing - (ABackendSpecific . ASTIntersectsRast . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "nband", "geom"])) - Nothing - (ABackendSpecific . ASTIntersectsNbandGeom <$> ingInputParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "geom", "nband"])) - Nothing - (ABackendSpecific . ASTIntersectsGeomNband <$> ignInputParser) - ], - -- Ops for String like types - guard (isScalarColumnWhere isStringType columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__like) - (Just "does the column match the given pattern") - (ALIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nlike) - (Just "does the column NOT match the given pattern") - (ANLIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__ilike) - (Just "does the column match the given case-insensitive pattern") - (ABackendSpecific . AILIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nilike) - (Just "does the column NOT match the given case-insensitive pattern") - (ABackendSpecific . ANILIKE . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__similar) - (Just "does the column match the given SQL regular expression") - (ABackendSpecific . ASIMILAR . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nsimilar) - (Just "does the column NOT match the given SQL regular expression") - (ABackendSpecific . ANSIMILAR . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__regex) - (Just "does the column match the given POSIX regular expression, case sensitive") - (ABackendSpecific . AREGEX . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__iregex) - (Just "does the column match the given POSIX regular expression, case insensitive") - (ABackendSpecific . AIREGEX . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__nregex) - (Just "does the column NOT match the given POSIX regular expression, case sensitive") - (ABackendSpecific . ANREGEX . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__niregex) - (Just "does the column NOT match the given POSIX regular expression, case insensitive") - (ABackendSpecific . ANIREGEX . IR.mkParameter <$> typedParser) - ], - -- Ops for JSONB type - guard (isScalarColumnWhere (== PGJSONB) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__contains) - (Just "does the column contain the given json value at the top level") - (ABackendSpecific . AContains . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_contained", "in"])) - (Just "is the column contained in the given json value") - (ABackendSpecific . AContainedIn . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "key"])) - (Just "does the string exist as a top-level key in the column") - (ABackendSpecific . AHasKey . IR.mkParameter <$> nullableTextParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "keys", "any"])) - (Just "do any of these strings exist as top-level keys in the column") - (ABackendSpecific . AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "keys", "all"])) - (Just "do all of these strings exist as top-level keys in the column") - (ABackendSpecific . AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser) - ], - -- Ops for Geography type - guard (isScalarColumnWhere (== PGGeography) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) - (Just "does the column spatially intersect the given geography value") - (ABackendSpecific . ASTIntersects . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) - (Just "is the column within a given distance from the given geography value") - (ABackendSpecific . ASTDWithinGeog <$> geogInputParser) - ], - -- Ops for Geometry type - guard (isScalarColumnWhere (== PGGeometry) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "contains"])) - (Just "does the column contain the given geometry value") - (ABackendSpecific . ASTContains . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "crosses"])) - (Just "does the column cross the given geometry value") - (ABackendSpecific . ASTCrosses . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "equals"])) - (Just "is the column equal to given geometry value (directionality is ignored)") - (ABackendSpecific . ASTEquals . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "overlaps"])) - (Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value") - (ABackendSpecific . ASTOverlaps . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "touches"])) - (Just "does the column have atleast one point in common with the given geometry value") - (ABackendSpecific . ASTTouches . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "within"])) - (Just "is the column contained in the given geometry value") - (ABackendSpecific . ASTWithin . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) - (Just "does the column spatially intersect the given geometry value") - (ABackendSpecific . ASTIntersects . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "3d", "intersects"])) - (Just "does the column spatially intersect the given geometry value in 3D") - (ABackendSpecific . AST3DIntersects . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) - (Just "is the column within a given distance from the given geometry value") - (ABackendSpecific . ASTDWithinGeom <$> geomInputParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "3d", "d", "within"])) - (Just "is the column within a given 3D distance from the given geometry value") - (ABackendSpecific . AST3DDWithinGeom <$> geomInputParser) - ], - -- Ops for Ltree type - guard (isScalarColumnWhere (== PGLtree) columnType) - *> [ mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__ancestor) - (Just "is the left argument an ancestor of right (or equal)?") - (ABackendSpecific . AAncestor . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_ancestor", "any"])) - (Just "does array contain an ancestor of `ltree`?") - (ABackendSpecific . AAncestorAny . mkListLiteral columnType <$> columnListParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__descendant) - (Just "is the left argument a descendant of right (or equal)?") - (ABackendSpecific . ADescendant . IR.mkParameter <$> typedParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_descendant", "any"])) - (Just "does array contain a descendant of `ltree`?") - (ABackendSpecific . ADescendantAny . mkListLiteral columnType <$> columnListParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedName Name.__matches) - (Just "does `ltree` match `lquery`?") - (ABackendSpecific . AMatches . IR.mkParameter <$> lqueryParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_matches", "any"])) - (Just "does `ltree` match any `lquery` in array?") - (ABackendSpecific . AMatchesAny . mkListLiteral (ColumnScalar PGLquery) <$> textListParser), - mkBoolOperator - tCase - collapseIfNull - (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_matches", "fulltext"])) - (Just "does `ltree` match `ltxtquery`?") - (ABackendSpecific . AMatchesFulltext . IR.mkParameter <$> ltxtqueryParser) - ] - ] + pure + $ P.object name (Just desc) + $ fmap catMaybes + $ sequenceA + $ concat + [ flip (maybe []) maybeCastParser $ \castParser -> + [ P.fieldOptional Name.__cast Nothing (ACast <$> castParser) + ], + -- Common ops for all types + equalityOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser) + (mkListParameter columnType <$> columnListParser), + -- Comparison ops for non Raster types + guard (isScalarColumnWhere (/= PGRaster) columnType) + *> comparisonOperators + tCase + collapseIfNull + (IR.mkParameter <$> typedParser), + -- Ops for Raster types + guard (isScalarColumnWhere (== PGRaster) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "rast"])) + Nothing + (ABackendSpecific . ASTIntersectsRast . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "nband", "geom"])) + Nothing + (ABackendSpecific . ASTIntersectsNbandGeom <$> ingInputParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects", "geom", "nband"])) + Nothing + (ABackendSpecific . ASTIntersectsGeomNband <$> ignInputParser) + ], + -- Ops for String like types + guard (isScalarColumnWhere isStringType columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__like) + (Just "does the column match the given pattern") + (ALIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nlike) + (Just "does the column NOT match the given pattern") + (ANLIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__ilike) + (Just "does the column match the given case-insensitive pattern") + (ABackendSpecific . AILIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nilike) + (Just "does the column NOT match the given case-insensitive pattern") + (ABackendSpecific . ANILIKE . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__similar) + (Just "does the column match the given SQL regular expression") + (ABackendSpecific . ASIMILAR . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nsimilar) + (Just "does the column NOT match the given SQL regular expression") + (ABackendSpecific . ANSIMILAR . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__regex) + (Just "does the column match the given POSIX regular expression, case sensitive") + (ABackendSpecific . AREGEX . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__iregex) + (Just "does the column match the given POSIX regular expression, case insensitive") + (ABackendSpecific . AIREGEX . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__nregex) + (Just "does the column NOT match the given POSIX regular expression, case sensitive") + (ABackendSpecific . ANREGEX . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__niregex) + (Just "does the column NOT match the given POSIX regular expression, case insensitive") + (ABackendSpecific . ANIREGEX . IR.mkParameter <$> typedParser) + ], + -- Ops for JSONB type + guard (isScalarColumnWhere (== PGJSONB) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__contains) + (Just "does the column contain the given json value at the top level") + (ABackendSpecific . AContains . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_contained", "in"])) + (Just "is the column contained in the given json value") + (ABackendSpecific . AContainedIn . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "key"])) + (Just "does the string exist as a top-level key in the column") + (ABackendSpecific . AHasKey . IR.mkParameter <$> nullableTextParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "keys", "any"])) + (Just "do any of these strings exist as top-level keys in the column") + (ABackendSpecific . AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_has", "keys", "all"])) + (Just "do all of these strings exist as top-level keys in the column") + (ABackendSpecific . AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser) + ], + -- Ops for Geography type + guard (isScalarColumnWhere (== PGGeography) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) + (Just "does the column spatially intersect the given geography value") + (ABackendSpecific . ASTIntersects . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) + (Just "is the column within a given distance from the given geography value") + (ABackendSpecific . ASTDWithinGeog <$> geogInputParser) + ], + -- Ops for Geometry type + guard (isScalarColumnWhere (== PGGeometry) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "contains"])) + (Just "does the column contain the given geometry value") + (ABackendSpecific . ASTContains . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "crosses"])) + (Just "does the column cross the given geometry value") + (ABackendSpecific . ASTCrosses . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "equals"])) + (Just "is the column equal to given geometry value (directionality is ignored)") + (ABackendSpecific . ASTEquals . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "overlaps"])) + (Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value") + (ABackendSpecific . ASTOverlaps . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "touches"])) + (Just "does the column have atleast one point in common with the given geometry value") + (ABackendSpecific . ASTTouches . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "within"])) + (Just "is the column contained in the given geometry value") + (ABackendSpecific . ASTWithin . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "intersects"])) + (Just "does the column spatially intersect the given geometry value") + (ABackendSpecific . ASTIntersects . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "3d", "intersects"])) + (Just "does the column spatially intersect the given geometry value in 3D") + (ABackendSpecific . AST3DIntersects . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "d", "within"])) + (Just "is the column within a given distance from the given geometry value") + (ABackendSpecific . ASTDWithinGeom <$> geomInputParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_st", "3d", "d", "within"])) + (Just "is the column within a given 3D distance from the given geometry value") + (ABackendSpecific . AST3DDWithinGeom <$> geomInputParser) + ], + -- Ops for Ltree type + guard (isScalarColumnWhere (== PGLtree) columnType) + *> [ mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__ancestor) + (Just "is the left argument an ancestor of right (or equal)?") + (ABackendSpecific . AAncestor . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_ancestor", "any"])) + (Just "does array contain an ancestor of `ltree`?") + (ABackendSpecific . AAncestorAny . mkListLiteral columnType <$> columnListParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__descendant) + (Just "is the left argument a descendant of right (or equal)?") + (ABackendSpecific . ADescendant . IR.mkParameter <$> typedParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_descendant", "any"])) + (Just "does array contain a descendant of `ltree`?") + (ABackendSpecific . ADescendantAny . mkListLiteral columnType <$> columnListParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedName Name.__matches) + (Just "does `ltree` match `lquery`?") + (ABackendSpecific . AMatches . IR.mkParameter <$> lqueryParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_matches", "any"])) + (Just "does `ltree` match any `lquery` in array?") + (ABackendSpecific . AMatchesAny . mkListLiteral (ColumnScalar PGLquery) <$> textListParser), + mkBoolOperator + tCase + collapseIfNull + (C.fromAutogeneratedTuple $$(G.litGQLIdentifier ["_matches", "fulltext"])) + (Just "does `ltree` match `ltxtquery`?") + (ABackendSpecific . AMatchesFulltext . IR.mkParameter <$> ltxtqueryParser) + ] + ] where mkListLiteral :: ColumnType ('Postgres pgKind) -> [ColumnValue ('Postgres pgKind)] -> IR.UnpreparedValue ('Postgres pgKind) mkListLiteral columnType columnValues = - IR.UVLiteral $ - SETyAnn + IR.UVLiteral + $ SETyAnn (SEArray $ txtEncoder . cvValue <$> columnValues) (mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType) mkListParameter :: ColumnType ('Postgres pgKind) -> [ColumnValue ('Postgres pgKind)] -> IR.UnpreparedValue ('Postgres pgKind) mkListParameter columnType columnValues = do let scalarType = unsafePGColumnToBackend columnType - IR.UVParameter IR.Unknown $ - ColumnValue + IR.UVParameter IR.Unknown + $ ColumnValue (ColumnScalar $ Postgres.PGArray scalarType) (Postgres.PGValArray $ cvValue <$> columnValues) @@ -884,12 +885,12 @@ geographyWithinDistanceInput = do -- this field non-nullable in a future release. booleanParser <- columnParser (ColumnScalar PGBoolean) (G.Nullability True) floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False) - pure $ - P.object Name._st_d_within_geography_input Nothing $ - DWithinGeogOp - <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) - <*> (IR.mkParameter <$> P.field Name._from Nothing geographyParser) - <*> (IR.mkParameter <$> P.fieldWithDefault Name._use_spheroid Nothing (G.VBoolean True) booleanParser) + pure + $ P.object Name._st_d_within_geography_input Nothing + $ DWithinGeogOp + <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) + <*> (IR.mkParameter <$> P.field Name._from Nothing geographyParser) + <*> (IR.mkParameter <$> P.fieldWithDefault Name._use_spheroid Nothing (G.VBoolean True) booleanParser) geometryWithinDistanceInput :: forall pgKind m n r. @@ -898,11 +899,11 @@ geometryWithinDistanceInput :: geometryWithinDistanceInput = do geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False) floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False) - pure $ - P.object Name._st_d_within_input Nothing $ - DWithinGeomOp - <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) - <*> (IR.mkParameter <$> P.field Name._from Nothing geometryParser) + pure + $ P.object Name._st_d_within_input Nothing + $ DWithinGeomOp + <$> (IR.mkParameter <$> P.field Name._distance Nothing floatParser) + <*> (IR.mkParameter <$> P.field Name._from Nothing geometryParser) intersectsNbandGeomInput :: forall pgKind m n r. @@ -911,11 +912,11 @@ intersectsNbandGeomInput :: intersectsNbandGeomInput = do geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False) integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False) - pure $ - P.object Name._st_intersects_nband_geom_input Nothing $ - STIntersectsNbandGeommin - <$> (IR.mkParameter <$> P.field Name._nband Nothing integerParser) - <*> (IR.mkParameter <$> P.field Name._geommin Nothing geometryParser) + pure + $ P.object Name._st_intersects_nband_geom_input Nothing + $ STIntersectsNbandGeommin + <$> (IR.mkParameter <$> P.field Name._nband Nothing integerParser) + <*> (IR.mkParameter <$> P.field Name._geommin Nothing geometryParser) intersectsGeomNbandInput :: forall pgKind m n r. @@ -924,11 +925,11 @@ intersectsGeomNbandInput :: intersectsGeomNbandInput = do geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False) integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False) - pure $ - P.object Name._st_intersects_geom_nband_input Nothing $ - STIntersectsGeomminNband - <$> (IR.mkParameter <$> P.field Name._geommin Nothing geometryParser) - <*> (fmap IR.mkParameter <$> P.fieldOptional Name._nband Nothing integerParser) + pure + $ P.object Name._st_intersects_geom_nband_input Nothing + $ STIntersectsGeomminNband + <$> (IR.mkParameter <$> P.field Name._geommin Nothing geometryParser) + <*> (fmap IR.mkParameter <$> P.fieldOptional Name._nband Nothing integerParser) countTypeInput :: (MonadParse n) => diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/Transport.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/Transport.hs index 76230df7ff9..aaa6c543a47 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/Transport.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/Transport.hs @@ -81,10 +81,10 @@ runPGQuery :: runPGQuery reqId query fieldName _userInfo logger _ sourceConfig tx genSql resolvedConnectionTemplate = do -- log the generated SQL and the graphql query logQueryLog logger $ mkQueryLog query fieldName genSql reqId (resolvedConnectionTemplate <$ resolvedConnectionTemplate) - withElapsedTime $ - newSpan ("Postgres Query for root field " <>> fieldName) $ - runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) $ - fmap snd (runOnBaseMonad tx) + withElapsedTime + $ newSpan ("Postgres Query for root field " <>> fieldName) + $ runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) + $ fmap snd (runOnBaseMonad tx) runPGMutation :: ( MonadIO m, @@ -107,10 +107,10 @@ runPGMutation :: runPGMutation reqId query fieldName userInfo logger _ sourceConfig tx _genSql resolvedConnectionTemplate = do -- log the graphql query logQueryLog logger $ mkQueryLog query fieldName Nothing reqId (resolvedConnectionTemplate <$ resolvedConnectionTemplate) - withElapsedTime $ - newSpan ("Postgres Mutation for root field " <>> fieldName) $ - runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate) $ - runOnBaseMonad tx + withElapsedTime + $ newSpan ("Postgres Mutation for root field " <>> fieldName) + $ runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate) + $ runOnBaseMonad tx runPGSubscription :: (MonadIO m, MonadBaseControl IO m) => @@ -120,10 +120,10 @@ runPGSubscription :: ResolvedConnectionTemplate ('Postgres pgKind) -> m (DiffTime, Either QErr [(CohortId, B.ByteString)]) runPGSubscription sourceConfig query variables resolvedConnectionTemplate = - withElapsedTime $ - runExceptT $ - runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) $ - PGL.executeMultiplexedQuery query variables + withElapsedTime + $ runExceptT + $ runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) + $ PGL.executeMultiplexedQuery query variables runPGStreamingSubscription :: (MonadIO m, MonadBaseControl IO m) => @@ -133,8 +133,9 @@ runPGStreamingSubscription :: ResolvedConnectionTemplate ('Postgres pgKind) -> m (DiffTime, Either QErr [(CohortId, B.ByteString, CursorVariableValues)]) runPGStreamingSubscription sourceConfig query variables resolvedConnectionTemplate = - withElapsedTime $ - runExceptT $ do + withElapsedTime + $ runExceptT + $ do res <- runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) $ PGL.executeStreamingMultiplexedQuery query variables pure $ res <&> (\(cohortId, cohortRes, cursorVariableVals) -> (cohortId, cohortRes, PG.getViaJSON cursorVariableVals)) @@ -149,8 +150,8 @@ runPGQueryExplain :: DBStepInfo ('Postgres pgKind) -> m EncJSON runPGQueryExplain _ (DBStepInfo _ sourceConfig _ action resolvedConnectionTemplate) = - runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) $ - fmap arResult (runOnBaseMonad action) + runQueryTx (_pscExecCtx sourceConfig) (GraphQLQuery resolvedConnectionTemplate) + $ fmap arResult (runOnBaseMonad action) mkQueryLog :: GQLReqUnparsed -> @@ -191,10 +192,10 @@ runPGMutationTransaction :: m (DiffTime, RootFieldMap EncJSON) runPGMutationTransaction reqId query userInfo logger sourceConfig resolvedConnectionTemplate mutations = do logQueryLog logger $ mkQueryLog query (mkUnNamespacedRootFieldAlias Name._transaction) Nothing reqId (resolvedConnectionTemplate <$ resolvedConnectionTemplate) - withElapsedTime $ - runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate) $ - flip InsOrdHashMap.traverseWithKey mutations \fieldName dbsi -> - newSpan ("Postgres Mutation for root field " <>> fieldName) $ - fmap arResult $ - runOnBaseMonad $ - dbsiAction dbsi + withElapsedTime + $ runTxWithCtxAndUserInfo userInfo (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) (GraphQLQuery resolvedConnectionTemplate) + $ flip InsOrdHashMap.traverseWithKey mutations \fieldName dbsi -> + newSpan ("Postgres Mutation for root field " <>> fieldName) + $ fmap arResult + $ runOnBaseMonad + $ dbsiAction dbsi diff --git a/server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs b/server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs index c43131b2403..e11535f8bf0 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs @@ -127,8 +127,8 @@ instance type HealthCheckTest ('Postgres pgKind) = HealthCheckTestSql healthCheckImplementation = - Just $ - HealthCheckImplementation + Just + $ HealthCheckImplementation { _hciDefaultTest = defaultHealthCheckTestSql, _hciTestCodec = codec } diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs index c48d323a304..b557e4e7417 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs @@ -281,8 +281,8 @@ mkRowExp extrs = mkSelect { selExtr = [Extractor (SERowIdentifier $ toIdentifier innerSelName) Nothing], selFrom = - Just $ - FromExp + Just + $ FromExp [mkSelFromExp False innerSel innerSelName] } in SESelect outerSel @@ -349,7 +349,7 @@ mkQIdentifier q t = QIdentifier (QualifiedIdentifier q Nothing) (toIdentifier t) mkQIdentifierTable :: (IsIdentifier a) => QualifiedTable -> a -> QIdentifier mkQIdentifierTable q = QIdentifier (mkQual q) . toIdentifier -mkIdentifierSQLExp :: forall a. IsIdentifier a => Qual -> a -> SQLExp +mkIdentifierSQLExp :: forall a. (IsIdentifier a) => Qual -> a -> SQLExp mkIdentifierSQLExp q = SEQIdentifier . QIdentifier q . toIdentifier data QIdentifier @@ -686,8 +686,8 @@ instance Hashable FunctionArgs instance ToSQL FunctionArgs where toSQL (FunctionArgs positionalArgs namedArgsMap) = - let namedArgs = flip map (HashMap.toList namedArgsMap) $ - \(argName, argVal) -> SENamedArg (Identifier argName) argVal + let namedArgs = flip map (HashMap.toList namedArgsMap) + $ \(argName, argVal) -> SENamedArg (Identifier argName) argVal in parenB $ ", " <+> (positionalArgs <> namedArgs) data FunctionDefinitionListItem = FunctionDefinitionListItem @@ -729,8 +729,8 @@ functionNameToTableAlias = mkTableAlias . qualifiedObjectToText -- Using the function name as the relation name, and the columns as the relation schema. mkFunctionAlias :: QualifiedObject FunctionName -> Maybe [(ColumnAlias, PGScalarType)] -> FunctionAlias mkFunctionAlias alias listM = - FunctionAlias (functionNameToTableAlias alias) $ - fmap (map (uncurry FunctionDefinitionListItem)) listM + FunctionAlias (functionNameToTableAlias alias) + $ fmap (map (uncurry FunctionDefinitionListItem)) listM instance ToSQL FunctionAlias where toSQL (FunctionAlias tableAlias (Just definitionList)) = @@ -899,16 +899,16 @@ simplifyBoolExp be = case be of let e1s = simplifyBoolExp e1 e2s = simplifyBoolExp e2 in if - | e1s == BELit True -> e2s - | e2s == BELit True -> e1s - | otherwise -> BEBin AndOp e1s e2s + | e1s == BELit True -> e2s + | e2s == BELit True -> e1s + | otherwise -> BEBin AndOp e1s e2s BEBin OrOp e1 e2 -> let e1s = simplifyBoolExp e1 e2s = simplifyBoolExp e2 in if - | e1s == BELit False -> e2s - | e2s == BELit False -> e1s - | otherwise -> BEBin OrOp e1s e2s + | e1s == BELit False -> e2s + | e2s == BELit False -> e1s + | otherwise -> BEBin OrOp e1s e2s e -> e mkExists :: FromItem -> BoolExp -> BoolExp @@ -1045,10 +1045,11 @@ buildUpsertSetExp :: buildUpsertSetExp cols preSet = SetExp $ map SetExpItem $ HashMap.toList setExps where - setExps = HashMap.union preSet $ - HashMap.fromList $ - flip map cols $ \col -> - (col, SEExcluded $ toIdentifier col) + setExps = HashMap.union preSet + $ HashMap.fromList + $ flip map cols + $ \col -> + (col, SEExcluded $ toIdentifier col) newtype UsingExp = UsingExp [TableName] deriving (Show, Eq) diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs index 305d8d11aa5..be7230dbdcf 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs @@ -165,8 +165,8 @@ getTableNameAndPrefixHash identifier = getTableIdentifierAndPrefixHash :: TableIdentifier -> MyState TableIdentifier getTableIdentifierAndPrefixHash identifier = do tables <- _tables <$> get - pure $ - if Set.member identifier tables + pure + $ if Set.member identifier tables then TableIdentifier $ mkPrefixedTableName (unTableIdentifier identifier) else identifier @@ -233,19 +233,19 @@ uSelect (S.Select ctes distinctM extrs fromM whereM groupByM havingM orderByM li -- Potentially introduces a new alias so it should go before the rest. newFromM <- mapM uFromExp fromM - newWhereM <- forM whereM $ - \(S.WhereFrag be) -> S.WhereFrag <$> uBoolExp be - newGroupByM <- forM groupByM $ - \(S.GroupByExp l) -> S.GroupByExp <$> mapM uSqlExp l - newHavingM <- forM havingM $ - \(S.HavingExp be) -> S.HavingExp <$> uBoolExp be + newWhereM <- forM whereM + $ \(S.WhereFrag be) -> S.WhereFrag <$> uBoolExp be + newGroupByM <- forM groupByM + $ \(S.GroupByExp l) -> S.GroupByExp <$> mapM uSqlExp l + newHavingM <- forM havingM + $ \(S.HavingExp be) -> S.HavingExp <$> uBoolExp be newOrderByM <- mapM uOrderBy orderByM newDistinctM <- mapM uDistinct distinctM newExtrs <- mapM uExtractor extrs newLimitM <- mapM uLimit limitM newOffsetM <- mapM uOffset offsetM - pure $ - S.Select + pure + $ S.Select newCTEs newDistinctM newExtrs @@ -306,8 +306,9 @@ uFromItem fromItem = case fromItem of newAls <- addAliasAndPrefixHash alias pure $ S.FISelectWith isLateral newSelectWith newAls S.FIValues (S.ValuesExp tups) alias mCols -> do - newValExp <- fmap S.ValuesExp $ - forM tups $ \(S.TupleExp ts) -> + newValExp <- fmap S.ValuesExp + $ forM tups + $ \(S.TupleExp ts) -> S.TupleExp <$> mapM uSqlExp ts pure $ S.FIValues newValExp (prefixHashTableAlias alias) (fmap (map prefixHashColumnAlias) mCols) -- _Note_: Potentially introduces a new alias diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/Types.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/Types.hs index 415803c900e..799123ffdf9 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/Types.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/Types.hs @@ -201,8 +201,9 @@ data TableType deriving (Eq) instance PG.FromCol TableType where - fromCol bs = flip PG.fromColHelper bs $ - PD.enum $ \case + fromCol bs = flip PG.fromColHelper bs + $ PD.enum + $ \case "BASE TABLE" -> Just TTBaseTable "VIEW" -> Just TTView "FOREIGN TABLE" -> Just TTForeignTable @@ -287,10 +288,12 @@ instance (HasCodec a, Typeable a) => HasCodec (QualifiedObject a) where codec = parseAlternative objCodec strCodec where objCodec = - AC.object ("PostgresQualified_" <> typeableName @a) $ - QualifiedObject - <$> optionalFieldWithDefault' "schema" publicSchema AC..= qSchema - <*> requiredField' "name" AC..= qName + AC.object ("PostgresQualified_" <> typeableName @a) + $ QualifiedObject + <$> optionalFieldWithDefault' "schema" publicSchema + AC..= qSchema + <*> requiredField' "name" + AC..= qName strCodec = QualifiedObject publicSchema <$> codec @a instance (FromJSON a) => FromJSON (QualifiedObject a) where @@ -298,8 +301,11 @@ instance (FromJSON a) => FromJSON (QualifiedObject a) where QualifiedObject publicSchema <$> parseJSON v parseJSON (Object o) = QualifiedObject - <$> o .:? "schema" .!= publicSchema - <*> o .: "name" + <$> o + .:? "schema" + .!= publicSchema + <*> o + .: "name" parseJSON _ = fail "expecting a string/object for QualifiedObject" @@ -313,10 +319,10 @@ instance (ToJSON a) => ToJSON (QualifiedObject a) where instance (ToJSON a, ToTxt a) => ToJSONKey (QualifiedObject a) where toJSONKey = ToJSONKeyText (K.fromText . qualifiedObjectToText) (text . qualifiedObjectToText) -instance ToTxt a => ToTxt (QualifiedObject a) where +instance (ToTxt a) => ToTxt (QualifiedObject a) where toTxt = qualifiedObjectToText -instance ToTxt a => ToErrorValue (QualifiedObject a) where +instance (ToTxt a) => ToErrorValue (QualifiedObject a) where toErrorValue (QualifiedObject sn o) = ErrorValue.squote $ getSchemaTxt sn <> "." <> toTxt o instance (Hashable a) => Hashable (QualifiedObject a) @@ -325,17 +331,17 @@ instance (ToSQL a) => ToSQL (QualifiedObject a) where toSQL (QualifiedObject sn o) = toSQL sn <> "." <> toSQL o -qualifiedObjectToText :: ToTxt a => QualifiedObject a -> Text +qualifiedObjectToText :: (ToTxt a) => QualifiedObject a -> Text qualifiedObjectToText (QualifiedObject sn o) | sn == publicSchema = toTxt o | otherwise = getSchemaTxt sn <> "." <> toTxt o -snakeCaseQualifiedObject :: ToTxt a => QualifiedObject a -> Text +snakeCaseQualifiedObject :: (ToTxt a) => QualifiedObject a -> Text snakeCaseQualifiedObject (QualifiedObject sn o) | sn == publicSchema = toTxt o | otherwise = getSchemaTxt sn <> "_" <> toTxt o -getIdentifierQualifiedObject :: ToTxt a => QualifiedObject a -> Either QErr C.GQLNameIdentifier +getIdentifierQualifiedObject :: (ToTxt a) => QualifiedObject a -> Either QErr C.GQLNameIdentifier getIdentifierQualifiedObject obj@(QualifiedObject sn o) = do let tLst = if sn == publicSchema @@ -350,9 +356,10 @@ getIdentifierQualifiedObject obj@(QualifiedObject sn o) = do `onNothing` throw400 ValidationFailed ( "cannot include " - <> obj <<> " in the GraphQL schema because " + <> obj + <<> " in the GraphQL schema because " <> C.toSnakeT tLst - <<> " is not a valid GraphQL identifier" + <<> " is not a valid GraphQL identifier" ) namingConventionSupport :: SupportedNamingCase @@ -361,12 +368,13 @@ namingConventionSupport = AllConventions qualifiedObjectToName :: (ToTxt a, MonadError QErr m) => QualifiedObject a -> m G.Name qualifiedObjectToName objectName = do let textName = snakeCaseQualifiedObject objectName - onNothing (G.mkName textName) $ - throw400 ValidationFailed $ - "cannot include " - <> objectName <<> " in the GraphQL schema because " - <> textName - <<> " is not a valid GraphQL identifier" + onNothing (G.mkName textName) + $ throw400 ValidationFailed + $ "cannot include " + <> objectName + <<> " in the GraphQL schema because " + <> textName + <<> " is not a valid GraphQL identifier" -- | Represents a database table qualified with the schema name. type QualifiedTable = QualifiedObject TableName @@ -570,8 +578,8 @@ instance FromJSON PGScalarType where parseJSON (Object o) = do typeType <- o .: "type" typeName <- o .: "name" - pure $ - case typeType of + pure + $ case typeType of PGKindEnum -> PGEnumScalar typeName PGKindComposite -> PGCompositeScalar typeName _ -> textToPGScalarType typeName @@ -638,8 +646,8 @@ instance NFData PGTypeKind instance Hashable PGTypeKind instance FromJSON PGTypeKind where - parseJSON = withText "postgresTypeKind" $ - \t -> pure $ case t of + parseJSON = withText "postgresTypeKind" + $ \t -> pure $ case t of "b" -> PGKindBase "c" -> PGKindComposite "d" -> PGKindDomain @@ -712,7 +720,7 @@ instance NFData PGRawFunctionInfo $(deriveJSON hasuraJSON ''PGRawFunctionInfo) -mkScalarTypeName :: MonadError QErr m => PGScalarType -> m G.Name +mkScalarTypeName :: (MonadError QErr m) => PGScalarType -> m G.Name mkScalarTypeName PGInteger = pure GName._Int mkScalarTypeName PGBoolean = pure GName._Boolean mkScalarTypeName PGFloat = pure GName._Float @@ -731,7 +739,8 @@ mkScalarTypeName (PGCompositeScalar compositeScalarType) = `onNothing` throw400 ValidationFailed ( "cannot use SQL type " - <> compositeScalarType <<> " in the GraphQL schema because its name is not a " + <> compositeScalarType + <<> " in the GraphQL schema because its name is not a " <> "valid GraphQL identifier" ) mkScalarTypeName scalarType = @@ -739,7 +748,8 @@ mkScalarTypeName scalarType = `onNothing` throw400 ValidationFailed ( "cannot use SQL type " - <> scalarType <<> " in the GraphQL schema because its name is not a " + <> scalarType + <<> " in the GraphQL schema because its name is not a " <> "valid GraphQL identifier" ) diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/Value.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/Value.hs index 0fb3b9cf2ee..aff6512ac8e 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/Value.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/Value.hs @@ -151,8 +151,8 @@ withScalarTypeAnn :: PGScalarType -> S.SQLExp -> S.SQLExp withScalarTypeAnn colTy v = S.SETyAnn v . S.mkTypeAnn $ CollectableTypeScalar colTy withTypeAnn :: CollectableType PGScalarType -> S.SQLExp -> S.SQLExp -withTypeAnn ty expr = flip S.SETyAnn (S.mkTypeAnn ty) $ - case ty of +withTypeAnn ty expr = flip S.SETyAnn (S.mkTypeAnn ty) + $ case ty of CollectableTypeScalar baseTy -> withConstructorFn baseTy expr CollectableTypeArray _ -> expr @@ -255,17 +255,17 @@ txtEncodedVal = \case PGNull _ -> TENull PGValJSON (PG.JSON j) -> - TELit $ - TL.toStrict $ - AE.encodeToLazyText j + TELit + $ TL.toStrict + $ AE.encodeToLazyText j PGValJSONB (PG.JSONB j) -> - TELit $ - TL.toStrict $ - AE.encodeToLazyText j + TELit + $ TL.toStrict + $ AE.encodeToLazyText j PGValGeo o -> - TELit $ - TL.toStrict $ - AE.encodeToLazyText o + TELit + $ TL.toStrict + $ AE.encodeToLazyText o PGValRaster r -> TELit $ TC.toText $ getRasterWKB r PGValUUID u -> TELit $ UUID.toText u PGValLtree (Ltree t) -> TELit t @@ -304,7 +304,7 @@ binEncoder = \case PGValUnknown t -> (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text)) PGValArray s -> (PTI.auto, Just (TE.encodeUtf8 $ buildArrayLiteral s, PQ.Text)) -formatTimestamp :: FormatTime t => t -> Text +formatTimestamp :: (FormatTime t) => t -> Text formatTimestamp = T.pack . formatTime defaultTimeLocale "%0Y-%m-%dT%T%QZ" txtEncoder :: PGScalarValue -> S.SQLExp diff --git a/server/src-lib/Hasura/Backends/Postgres/Schema/OnConflict.hs b/server/src-lib/Hasura/Backends/Postgres/Schema/OnConflict.hs index 35ae3bc7f1c..237125d7849 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Schema/OnConflict.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Schema/OnConflict.hs @@ -101,8 +101,8 @@ conflictObjectParser tableInfo maybeUpdatePerms constraints = do ( partialSQLExpToUnpreparedValue <$> upiSet, fmap partialSQLExpToUnpreparedValue <$> upiFilter ) - pure $ - P.object objectName (Just objectDesc) do + pure + $ P.object objectName (Just objectDesc) do constraintField <- P.field Name._constraint Nothing constraintParser let updateColumnsField = P.fieldWithDefault (applyFieldNameCaseIdentifier tCase updateColumnsFieldName) Nothing (G.VList []) (P.list updateColumnsEnum) @@ -115,16 +115,17 @@ conflictObjectParser tableInfo maybeUpdatePerms constraints = do -- this can only happen if the placeholder was used (parseError "erroneous column name") - pure $ - let UniqueConstraint (Constraint {_cName}) _ = constraintField - constraintTarget = IR.CTConstraint _cName - in case updateColumns of - [] -> IR.OCCDoNothing $ Just constraintTarget - _ -> - IR.OCCUpdate $ - IR.OnConflictClauseData constraintTarget updateColumns presetColumns $ - IR.BoolAnd $ - updateFilter : maybeToList whereExp + pure + $ let UniqueConstraint (Constraint {_cName}) _ = constraintField + constraintTarget = IR.CTConstraint _cName + in case updateColumns of + [] -> IR.OCCDoNothing $ Just constraintTarget + _ -> + IR.OCCUpdate + $ IR.OnConflictClauseData constraintTarget updateColumns presetColumns + $ IR.BoolAnd + $ updateFilter + : maybeToList whereExp -- | Constructs a Parser for the name of the constraints on a given table. -- diff --git a/server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs b/server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs index e880d4cf5a3..63148305797 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs @@ -76,17 +76,17 @@ selectFunction mkRootFieldName fi@FunctionInfo {..} description = runMaybeT do functionArgsParser <- customSQLFunctionArgs fi _fiGQLName _fiGQLArgsName let argsParser = liftA2 (,) functionArgsParser tableArgsParser functionFieldName = runMkRootFieldName mkRootFieldName _fiGQLName - pure $ - P.subselection functionFieldName description argsParser selectionSetParser - <&> \((funcArgs, tableArgs'), fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = tableArgs', - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.subselection functionFieldName description argsParser selectionSetParser + <&> \((funcArgs, tableArgs'), fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = tableArgs', + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } where returnFunctionParser = case _fiJsonAggSelect of @@ -127,25 +127,25 @@ selectFunctionAggregate mkRootFieldName fi@FunctionInfo {..} description = runMa argsParser = liftA2 (,) functionArgsParser tableArgsParser selectionName = mkTypename (applyTypeNameCaseIdentifier tCase $ mkTableAggregateTypeName tableGQLName) aggregationParser = - fmap (parsedSelectionsToFields IR.TAFExp) $ - P.nonNullableParser $ - P.selectionSet - selectionName - Nothing - [ IR.TAFNodes xNodesAgg <$> P.subselection_ Name._nodes Nothing nodesParser, - IR.TAFAgg <$> P.subselection_ Name._aggregate Nothing aggregateParser - ] - pure $ - P.subselection aggregateFieldName description argsParser aggregationParser - <&> \((funcArgs, tableArgs'), fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = tableArgs', - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + fmap (parsedSelectionsToFields IR.TAFExp) + $ P.nonNullableParser + $ P.selectionSet + selectionName + Nothing + [ IR.TAFNodes xNodesAgg <$> P.subselection_ Name._nodes Nothing nodesParser, + IR.TAFAgg <$> P.subselection_ Name._aggregate Nothing aggregateParser + ] + pure + $ P.subselection aggregateFieldName description argsParser aggregationParser + <&> \((funcArgs, tableArgs'), fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = tableArgs', + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } selectFunctionConnection :: forall pgKind r m n. @@ -177,24 +177,24 @@ selectFunctionConnection mkRootFieldName fi@FunctionInfo {..} description pkeyCo tableConnectionArgsParser <- tableConnectionArgs pkeyColumns returnTableInfo functionArgsParser <- customSQLFunctionArgs fi _fiGQLName _fiGQLArgsName let argsParser = liftA2 (,) functionArgsParser tableConnectionArgsParser - pure $ - P.subselection fieldName description argsParser selectionSetParser - <&> \((funcArgs, (args, split, slice)), fields) -> - IR.ConnectionSelect - { IR._csXRelay = xRelayInfo, - IR._csPrimaryKeyColumns = pkeyColumns, - IR._csSplit = split, - IR._csSlice = slice, - IR._csSelect = - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } - } + pure + $ P.subselection fieldName description argsParser selectionSetParser + <&> \((funcArgs, (args, split, slice)), fields) -> + IR.ConnectionSelect + { IR._csXRelay = xRelayInfo, + IR._csPrimaryKeyColumns = pkeyColumns, + IR._csSplit = split, + IR._csSlice = slice, + IR._csSelect = + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } + } -- | Computed field parser computedFieldPG :: @@ -225,8 +225,8 @@ computedFieldPG ComputedFieldInfo {..} parentTable tableInfo = runMaybeT do fieldArgsParser = do args <- functionArgsParser colOp <- scalarSelectionArgumentsParser @('Postgres pgKind) $ ColumnScalar scalarReturnType - pure $ - IR.AFComputedField + pure + $ IR.AFComputedField _cfiXComputedFieldInfo _cfiName ( IR.CFSScalar @@ -247,19 +247,19 @@ computedFieldPG ComputedFieldInfo {..} parentTable tableInfo = runMaybeT do selectionSetParser <- MaybeT (fmap (P.multiple . P.nonNullableParser) <$> tableSelectionSet otherTableInfo) selectArgsParser <- lift $ tableArguments otherTableInfo let fieldArgsParser = liftA2 (,) functionArgsParser selectArgsParser - pure $ - P.subselection fieldName fieldDescription fieldArgsParser selectionSetParser - <&> \((functionArgs', args), fields) -> - IR.AFComputedField _cfiXComputedFieldInfo _cfiName $ - IR.CFSTable JASMultipleRows $ - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, - IR._asnPerm = tablePermissionsInfo remotePerms, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.subselection fieldName fieldDescription fieldArgsParser selectionSetParser + <&> \((functionArgs', args), fields) -> + IR.AFComputedField _cfiXComputedFieldInfo _cfiName + $ IR.CFSTable JASMultipleRows + $ IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing, + IR._asnPerm = tablePermissionsInfo remotePerms, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } where fieldDescription :: Maybe G.Description fieldDescription = G.Description <$> _cfiDescription @@ -292,8 +292,8 @@ customSQLFunctionArgs :: SchemaT r m (InputFieldsParser n (FunctionArgsExp ('Postgres pgKind) (IR.UnpreparedValue ('Postgres pgKind)))) customSQLFunctionArgs FunctionInfo {..} functionName functionArgsName = functionArgs - ( FTACustomFunction $ - CustomFunctionNames + ( FTACustomFunction + $ CustomFunctionNames { cfnFunctionName = functionName, cfnArgsName = functionArgsName } @@ -331,60 +331,63 @@ functionArgs functionTrackedAs (toList -> inputArgs) = do defaultArguments = FunctionArgsExp (snd <$> session) HashMap.empty if - | length session > 1 -> - -- We somehow found more than one session argument; this should never - -- happen and is an error on our side. - throw500 "there shouldn't be more than one session argument" - | null optional && null mandatory -> - -- There are no user-provided arguments to the function: there will be - -- no args field. - pure $ pure defaultArguments - | otherwise -> do - -- There are user-provided arguments: we need to parse an args object. - argumentParsers <- sequenceA $ optional <> mandatory - objectName <- - mkTypename . applyTypeNameCaseIdentifier tCase - <$> case functionTrackedAs of - FTAComputedField computedFieldName _sourceName tableName -> do - tableInfo <- askTableInfo tableName - computedFieldGQLName <- textToName $ computedFieldNameToText computedFieldName - tableGQLName <- getTableIdentifierName @('Postgres pgKind) tableInfo - pure $ mkFunctionArgsTypeName computedFieldGQLName tableGQLName - FTACustomFunction (CustomFunctionNames {cfnArgsName}) -> - pure $ C.fromCustomName cfnArgsName - let fieldName = Name._args - fieldDesc = - case functionTrackedAs of - FTAComputedField computedFieldName _sourceName tableName -> - G.Description $ - "input parameters for computed field " - <> computedFieldName <<> " defined on table " <>> tableName - FTACustomFunction (CustomFunctionNames {cfnFunctionName}) -> - G.Description $ "input parameters for function " <>> cfnFunctionName - objectParser = - P.object objectName Nothing (sequenceA argumentParsers) `P.bind` \arguments -> do - -- After successfully parsing, we create a dictionary of the parsed fields - -- and we re-iterate through the original list of sql arguments, now with - -- the knowledge of their graphql name. - let foundArguments = HashMap.fromList $ catMaybes arguments <> session - argsWithNames = zip names inputArgs + | length session > 1 -> + -- We somehow found more than one session argument; this should never + -- happen and is an error on our side. + throw500 "there shouldn't be more than one session argument" + | null optional && null mandatory -> + -- There are no user-provided arguments to the function: there will be + -- no args field. + pure $ pure defaultArguments + | otherwise -> do + -- There are user-provided arguments: we need to parse an args object. + argumentParsers <- sequenceA $ optional <> mandatory + objectName <- + mkTypename + . applyTypeNameCaseIdentifier tCase + <$> case functionTrackedAs of + FTAComputedField computedFieldName _sourceName tableName -> do + tableInfo <- askTableInfo tableName + computedFieldGQLName <- textToName $ computedFieldNameToText computedFieldName + tableGQLName <- getTableIdentifierName @('Postgres pgKind) tableInfo + pure $ mkFunctionArgsTypeName computedFieldGQLName tableGQLName + FTACustomFunction (CustomFunctionNames {cfnArgsName}) -> + pure $ C.fromCustomName cfnArgsName + let fieldName = Name._args + fieldDesc = + case functionTrackedAs of + FTAComputedField computedFieldName _sourceName tableName -> + G.Description + $ "input parameters for computed field " + <> computedFieldName + <<> " defined on table " + <>> tableName + FTACustomFunction (CustomFunctionNames {cfnFunctionName}) -> + G.Description $ "input parameters for function " <>> cfnFunctionName + objectParser = + P.object objectName Nothing (sequenceA argumentParsers) `P.bind` \arguments -> do + -- After successfully parsing, we create a dictionary of the parsed fields + -- and we re-iterate through the original list of sql arguments, now with + -- the knowledge of their graphql name. + let foundArguments = HashMap.fromList $ catMaybes arguments <> session + argsWithNames = zip names inputArgs - -- All elements (in the orignal sql order) that are found in the result map - -- are treated as positional arguments, whether they were originally named or - -- not. - (positional, left) <- spanMaybeM (\(name, _) -> pure $ HashMap.lookup name foundArguments) argsWithNames + -- All elements (in the orignal sql order) that are found in the result map + -- are treated as positional arguments, whether they were originally named or + -- not. + (positional, left) <- spanMaybeM (\(name, _) -> pure $ HashMap.lookup name foundArguments) argsWithNames - -- If there are arguments left, it means we found one that was not passed - -- positionally. As a result, any remaining argument will have to be passed - -- by name. We fail with a parse error if we encounter a positional sql - -- argument (that does not have a name in the sql function), as: - -- * only the last positional arguments can be omitted; - -- * it has no name we can use. - -- We also fail if we find a mandatory argument that was not - -- provided by the user. - named <- HashMap.fromList . catMaybes <$> traverse (namedArgument foundArguments) left - pure $ FunctionArgsExp positional named - pure $ P.field fieldName (Just fieldDesc) objectParser + -- If there are arguments left, it means we found one that was not passed + -- positionally. As a result, any remaining argument will have to be passed + -- by name. We fail with a parse error if we encounter a positional sql + -- argument (that does not have a name in the sql function), as: + -- * only the last positional arguments can be omitted; + -- * it has no name we can use. + -- We also fail if we find a mandatory argument that was not + -- provided by the user. + named <- HashMap.fromList . catMaybes <$> traverse (namedArgument foundArguments) left + pure $ FunctionArgsExp positional named + pure $ P.field fieldName (Just fieldDesc) objectParser where sessionPlaceholder :: Postgres.ArgumentExp (IR.UnpreparedValue b) sessionPlaceholder = Postgres.AEInput IR.UVSession @@ -440,8 +443,8 @@ functionArgs functionTrackedAs (toList -> inputArgs) = do Just _ -> pure $ Just (name, parsedValue) Nothing -> P.parseErrorWith P.NotSupported "Only last set of positional arguments can be omitted" Nothing -> - whenMaybe (not $ Postgres.unHasDefault $ Postgres.faHasDefault arg) $ - P.parseErrorWith P.NotSupported "Non default arguments cannot be omitted" + whenMaybe (not $ Postgres.unHasDefault $ Postgres.faHasDefault arg) + $ P.parseErrorWith P.NotSupported "Non default arguments cannot be omitted" buildFunctionQueryFieldsPG :: forall r m n pgKind. @@ -456,9 +459,13 @@ buildFunctionQueryFieldsPG :: buildFunctionQueryFieldsPG mkRootFieldName functionName functionInfo tableName = do let -- select function funcDesc = - Just . G.Description $ - flip fromMaybe (_fiComment functionInfo) $ - "execute function " <> functionName <<> " which returns " <>> tableName + Just + . G.Description + $ flip fromMaybe (_fiComment functionInfo) + $ "execute function " + <> functionName + <<> " which returns " + <>> tableName -- select function agg funcAggDesc = Just $ G.Description $ "execute function " <> functionName <<> " and query aggregates on result of table type " <>> tableName diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/BoolExp.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/BoolExp.hs index 4a27143876e..8a406313db5 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/BoolExp.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/BoolExp.hs @@ -166,9 +166,9 @@ translateBoolExp = \case functionAlias <- S.toTableAlias <$> freshIdentifier function let functionIdentifier = S.tableAliasToIdentifier functionAlias functionExp = - mkComputedFieldFunctionExp currTableReference function sessionArgPresence $ - Just $ - functionAlias + mkComputedFieldFunctionExp currTableReference function sessionArgPresence + $ Just + $ functionAlias S.mkExists (S.FIFunc functionExp) <$> withCurrentTable (S.QualifiedIdentifier functionIdentifier Nothing) (translateBoolExp be) AVAggregationPredicates aggPreds -> translateAVAggregationPredicates aggPreds @@ -182,11 +182,11 @@ freshIdentifier obj = do curVarNum <- get put $ curVarNum + 1 let newIdentifier = - Identifier $ - "_be_" - <> tshow curVarNum - <> "_" - <> snakeCaseQualifiedObject obj + Identifier + $ "_be_" + <> tshow curVarNum + <> "_" + <> snakeCaseQualifiedObject obj return newIdentifier identifierWithSuffix :: (ToTxt a) => QualifiedObject a -> Text -> Identifier @@ -290,8 +290,8 @@ translateAggPredsSubselect fromExp = pure $ S.FISimple relTableName $ Just $ S.toTableAlias relTableNameAlias -- WHERE AND AND whereExp = sqlAnd $ [tableRelExp, rowPermExp] ++ maybeToList mFilter - pure $ - S.mkSelFromItem + pure + $ S.mkSelFromItem S.mkSelect { S.selExtr = [extractorsExp], S.selFrom = Just $ S.FromExp fromExp, @@ -324,13 +324,14 @@ translateAggPredArguments predArgs relTableNameIdentifier = translateTableRelationship :: HashMap PGCol PGCol -> TableIdentifier -> BoolExpM S.BoolExp translateTableRelationship colMapping relTableNameIdentifier = do BoolExpCtx {currTableReference} <- ask - pure $ - sqlAnd $ - flip map (HashMap.toList colMapping) $ \(lCol, rCol) -> - S.BECompare - S.SEQ - (S.mkIdentifierSQLExp (S.QualifiedIdentifier relTableNameIdentifier Nothing) rCol) - (S.mkIdentifierSQLExp currTableReference lCol) + pure + $ sqlAnd + $ flip map (HashMap.toList colMapping) + $ \(lCol, rCol) -> + S.BECompare + S.SEQ + (S.mkIdentifierSQLExp (S.QualifiedIdentifier relTableNameIdentifier Nothing) rCol) + (S.mkIdentifierSQLExp currTableReference lCol) data LHSField b = LColumn FieldName diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Delete.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Delete.hs index e25a034fafc..774dde9277b 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Delete.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Delete.hs @@ -17,7 +17,7 @@ import Hasura.RQL.Types.Backend import Hasura.RQL.Types.BackendType mkDelete :: - Backend ('Postgres pgKind) => + (Backend ('Postgres pgKind)) => AnnDel ('Postgres pgKind) -> S.SQLDelete mkDelete (AnnDel tn (fltr, wc) _ _ _) = diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Insert.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Insert.hs index 58073515153..f297846f082 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Insert.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Insert.hs @@ -19,7 +19,7 @@ import Hasura.RQL.Types.Backend import Hasura.RQL.Types.BackendType mkInsertCTE :: - Backend ('Postgres pgKind) => + (Backend ('Postgres pgKind)) => InsertQueryP1 ('Postgres pgKind) -> S.TopLevelCTE mkInsertCTE (InsertQueryP1 tn cols vals conflict (insCheck, updCheck) _ _) = @@ -40,7 +40,7 @@ mkInsertCTE (InsertQueryP1 tn cols vals conflict (insCheck, updCheck) _ _) = toSQLBool = toSQLBoolExp $ S.QualTable tn toSQLConflict :: - Backend ('Postgres pgKind) => + (Backend ('Postgres pgKind)) => QualifiedTable -> OnConflictClause ('Postgres pgKind) S.SQLExp -> S.SQLConflict @@ -91,8 +91,8 @@ insertOrUpdateCheckExpr :: Maybe S.BoolExp -> S.Extractor insertOrUpdateCheckExpr qt (Just _conflict) insCheck (Just updCheck) = - asCheckErrorExtractor $ - S.SECond + asCheckErrorExtractor + $ S.SECond ( S.BECompare S.SEQ (S.SEQIdentifier (S.QIdentifier (S.mkQual qt) (Identifier "xmax"))) diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Mutation.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Mutation.hs index 8806e2744f2..e0f41d48343 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Mutation.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Mutation.hs @@ -50,13 +50,16 @@ mkSelectExpFromColumnValues qt allCols = \case extractor = S.selectStar' $ S.QualifiedIdentifier rowIdentifier $ Just $ S.TypeAnn $ toSQLTxt qt sortedCols = sortCols allCols mkTupsFromColVal colVal = - fmap S.TupleExp $ - forM sortedCols $ \ci -> do + fmap S.TupleExp + $ forM sortedCols + $ \ci -> do let pgCol = ciColumn ci val <- - onNothing (HashMap.lookup pgCol colVal) $ - throw500 $ - "column " <> pgCol <<> " not found in returning values" + onNothing (HashMap.lookup pgCol colVal) + $ throw500 + $ "column " + <> pgCol + <<> " not found in returning values" pure $ txtEncodedToSQLExp (ciType ci) val selNoRows = diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Returning.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Returning.hs index 2f8bdb535c2..d94b4e15f59 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Returning.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Returning.hs @@ -63,8 +63,8 @@ pgColsToSelFlds :: [ColumnInfo ('Postgres pgKind)] -> [(FieldName, AnnField ('Postgres pgKind))] pgColsToSelFlds cols = - flip map cols $ - \pgColInfo -> + flip map cols + $ \pgColInfo -> ( fromCol @('Postgres pgKind) $ ciColumn pgColInfo, mkAnnColumnField (ciColumn pgColInfo) (ciType pgColInfo) Nothing Nothing -- ^^ Nothing because mutations aren't supported @@ -96,8 +96,8 @@ mkMutFldExp :: mkMutFldExp cteAlias preCalAffRows strfyNum tCase = \case MCount -> let countExp = - S.SESelect $ - S.mkSelect + S.SESelect + $ S.mkSelect { S.selExtr = [S.Extractor S.countStar Nothing], S.selFrom = Just $ S.FromExp $ pure $ S.FIIdentifier cteAlias } @@ -168,8 +168,8 @@ mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum tCase = allColumnsAlias = S.mkTableAlias $ "aca__" <> snakeCaseQualifiedObject qt allColumnsIdentifier = S.tableAliasToIdentifier allColumnsAlias allColumnsSelect = - S.CTESelect $ - S.mkSelect + S.CTESelect + $ S.mkSelect { S.selExtr = map (S.mkExtr . ciColumn) (sortCols allCols), S.selFrom = Just $ S.mkIdenFromExp mutationResultIdentifier } @@ -210,10 +210,10 @@ mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum tCase = mkCheckErrorExp :: TableIdentifier -> S.SQLExp mkCheckErrorExp alias = let boolAndCheckConstraint = - S.handleIfNull (S.SEBool $ S.BELit True) $ - S.SEFnApp "bool_and" [S.SEIdentifier checkConstraintIdentifier] Nothing - in S.SESelect $ - S.mkSelect + S.handleIfNull (S.SEBool $ S.BELit True) + $ S.SEFnApp "bool_and" [S.SEIdentifier checkConstraintIdentifier] Nothing + in S.SESelect + $ S.mkSelect { S.selExtr = [S.Extractor boolAndCheckConstraint Nothing], S.selFrom = Just $ S.mkIdenFromExp alias } diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Aggregate.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Aggregate.hs index 8025613cf43..96feb3dfbbd 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Aggregate.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Aggregate.hs @@ -50,9 +50,9 @@ mkAggregateSelect annAggSel = do let ( (selectSource, nodeExtractors, topExtractor), SelectWriter {_swJoinTree = joinTree, _swCustomSQLCTEs = customSQLCTEs} ) = - runWriter $ - flip runReaderT strfyNum $ - processAnnAggregateSelect sourcePrefixes rootFieldName annAggSel + runWriter + $ flip runReaderT strfyNum + $ processAnnAggregateSelect sourcePrefixes rootFieldName annAggSel -- select the relevant columns and subquery we want to aggregate selectNode = SelectNode nodeExtractors joinTree -- aggregate the results into a top-level return value diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Connection.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Connection.hs index d0bd8fe22ea..66b98cf8038 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Connection.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Connection.hs @@ -58,17 +58,17 @@ mkConnectionSelect connectionSelect = do let ( (connectionSource, topExtractor, nodeExtractors), SelectWriter {_swJoinTree = joinTree, _swCustomSQLCTEs = customSQLCTEs} ) = - runWriter $ - flip runReaderT strfyNum $ - processConnectionSelect - sourcePrefixes - rootFieldName - (S.toTableAlias rootIdentifier) - mempty - connectionSelect + runWriter + $ flip runReaderT strfyNum + $ processConnectionSelect + sourcePrefixes + rootFieldName + (S.toTableAlias rootIdentifier) + mempty + connectionSelect selectNode = - MultiRowSelectNode [topExtractor] $ - SelectNode nodeExtractors joinTree + MultiRowSelectNode [topExtractor] + $ SelectNode nodeExtractors joinTree selectWith = connectionToSelectWith (S.toTableAlias rootIdentifier) connectionSource selectNode tell customSQLCTEs diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Aliases.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Aliases.hs index 5daabdd9355..4cd3d901e83 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Aliases.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Aliases.hs @@ -46,8 +46,8 @@ mkAnnOrderByAlias tablePrefix parAls similarFields = \case AOCArrayAggregation relInfo _ aggOrderBy -> let rn = riName relInfo arrPfx = - mkArrayRelationSourcePrefix tablePrefix parAls similarFields $ - mkOrderByFieldName rn + mkArrayRelationSourcePrefix tablePrefix parAls similarFields + $ mkOrderByFieldName rn obAls = S.tableIdentifierToColumnAlias arrPfx <> "." <> mkAggregateOrderByAlias aggOrderBy in S.toColumnAlias obAls AOCComputedField cfOrderBy -> @@ -83,7 +83,7 @@ mkAggregateOrderByAlias = AAOCount -> "count" AAOOp opText _resultType col -> opText <> "." <> getPGColTxt (ciColumn col) -mkOrderByFieldName :: ToTxt a => a -> FieldName +mkOrderByFieldName :: (ToTxt a) => a -> FieldName mkOrderByFieldName name = FieldName $ toTxt name <> "." <> "order_by" @@ -94,8 +94,8 @@ mkArrayRelationSourcePrefix :: FieldName -> TableIdentifier mkArrayRelationSourcePrefix parentSourcePrefix parentFieldName similarFieldsMap fieldName = - mkArrayRelationTableIdentifier parentSourcePrefix parentFieldName $ - HashMap.lookupDefault [fieldName] fieldName similarFieldsMap + mkArrayRelationTableIdentifier parentSourcePrefix parentFieldName + $ HashMap.lookupDefault [fieldName] fieldName similarFieldsMap mkArrayRelationTableIdentifier :: TableIdentifier -> FieldName -> [FieldName] -> TableIdentifier mkArrayRelationTableIdentifier pfx parAls flds = @@ -109,9 +109,9 @@ mkArrayRelationAlias :: FieldName -> S.TableAlias mkArrayRelationAlias parentFieldName similarFieldsMap fieldName = - S.mkTableAlias $ - mkUniqArrayRelationAlias parentFieldName $ - HashMap.lookupDefault [fieldName] fieldName similarFieldsMap + S.mkTableAlias + $ mkUniqArrayRelationAlias parentFieldName + $ HashMap.lookupDefault [fieldName] fieldName similarFieldsMap -- array relationships are not grouped, so have to be prefixed by -- parent's alias diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Extractor.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Extractor.hs index 3f84352d726..1a412d01d5e 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Extractor.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Extractor.hs @@ -81,7 +81,7 @@ aggregateFieldsToExtractorExps sourcePrefix aggregateFields = mkAggregateOrderByExtractorAndFields :: forall pgKind. - Backend ('Postgres pgKind) => + (Backend ('Postgres pgKind)) => AnnotatedAggregateOrderBy ('Postgres pgKind) -> (S.Extractor, AggregateFields ('Postgres pgKind) S.SQLExp) mkAggregateOrderByExtractorAndFields annAggOrderBy = @@ -95,8 +95,8 @@ mkAggregateOrderByExtractorAndFields annAggOrderBy = pgType = ciType pgColumnInfo in ( S.Extractor (S.SEFnApp opText [S.SEIdentifier $ toIdentifier pgColumn] Nothing) alias, [ ( FieldName opText, - AFOp $ - AggregateOp + AFOp + $ AggregateOp opText [ ( fromCol @('Postgres pgKind) pgColumn, SFCol pgColumn pgType @@ -132,20 +132,20 @@ withJsonAggExtr permLimitSubQuery ordBy alias = rowIdentifier = S.mkQIdenExp subSelIdentifier alias extr = S.Extractor (mkSimpleJsonAgg rowIdentifier newOrderBy) Nothing fromExp = - S.FromExp $ - pure $ - S.mkSelFromItem subSelect $ - S.toTableAlias subSelAls - in S.SESelect $ - S.mkSelect + S.FromExp + $ pure + $ S.mkSelFromItem subSelect + $ S.toTableAlias subSelAls + in S.SESelect + $ S.mkSelect { S.selExtr = pure extr, S.selFrom = Just fromExp } mkSubSelect limit = let jsonRowExtr = - flip S.Extractor (Just alias) $ - S.mkQIdenExp unnestTableIdentifier alias + flip S.Extractor (Just alias) + $ S.mkQIdenExp unnestTableIdentifier alias obExtrs = flip map newOBAliases $ \a -> S.Extractor (S.mkQIdenExp unnestTableIdentifier a) $ Just $ S.toColumnAlias a in S.mkSelect @@ -156,16 +156,18 @@ withJsonAggExtr permLimitSubQuery ordBy alias = } unnestFromItem = - let arrayAggItems = flip map (rowIdenExp : obCols) $ - \s -> S.SEFnApp "array_agg" [s] Nothing - in S.FIUnnest arrayAggItems (S.toTableAlias unnestTable) $ - alias : map S.toColumnAlias newOBAliases + let arrayAggItems = flip map (rowIdenExp : obCols) + $ \s -> S.SEFnApp "array_agg" [s] Nothing + in S.FIUnnest arrayAggItems (S.toTableAlias unnestTable) + $ alias + : map S.toColumnAlias newOBAliases newOrderBy = S.OrderByExp <$> NE.nonEmpty newOBItems (newOBItems, obCols, newOBAliases) = maybe ([], [], []) transformOrderBy ordBy - transformOrderBy (S.OrderByExp l) = unzip3 $ - flip map (zip (toList l) [1 ..]) $ \(obItem, i :: Int) -> + transformOrderBy (S.OrderByExp l) = unzip3 + $ flip map (zip (toList l) [1 ..]) + $ \(obItem, i :: Int) -> let iden = Identifier $ "ob_col_" <> tshow i in ( obItem {S.oExpression = S.SEIdentifier iden}, S.oExpression obItem, diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/GenerateSelect.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/GenerateSelect.hs index fe2426fbcea..81a01d662f8 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/GenerateSelect.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/GenerateSelect.hs @@ -87,19 +87,19 @@ generateSQLSelect joinCondition selectSource selectNode = -- function to create a joined from item from two from items leftOuterJoin current new = - S.FIJoin $ - S.JoinExpr current S.LeftOuter new $ - S.JoinOn $ - S.BELit True + S.FIJoin + $ S.JoinExpr current S.LeftOuter new + $ S.JoinOn + $ S.BELit True -- this is the from eexp for the final select joinedFrom :: S.FromItem joinedFrom = - foldl' leftOuterJoin baseFromItem $ - map objectRelationToFromItem (HashMap.toList objectRelations) - <> map arrayRelationToFromItem (HashMap.toList arrayRelations) - <> map arrayConnectionToFromItem (HashMap.toList arrayConnections) - <> map computedFieldToFromItem (HashMap.toList computedFields) + foldl' leftOuterJoin baseFromItem + $ map objectRelationToFromItem (HashMap.toList objectRelations) + <> map arrayRelationToFromItem (HashMap.toList arrayRelations) + <> map arrayConnectionToFromItem (HashMap.toList arrayConnections) + <> map computedFieldToFromItem (HashMap.toList computedFields) objectRelationToFromItem :: (ObjectRelationSource, SelectNode) -> S.FromItem @@ -116,8 +116,8 @@ generateSQLSelect joinCondition selectSource selectNode = let ArrayRelationSource _ colMapping source = arrayRelationSource alias = S.toTableAlias $ _ssPrefix source select = - generateSQLSelectFromArrayNode source arraySelectNode $ - mkJoinCond baseSelectIdentifier colMapping + generateSQLSelectFromArrayNode source arraySelectNode + $ mkJoinCond baseSelectIdentifier colMapping in S.mkLateralFromItem select alias arrayConnectionToFromItem :: @@ -150,8 +150,8 @@ generateSQLSelectFromArrayNode selectSource (MultiRowSelectNode topExtractors se S.mkSelect { S.selExtr = topExtractors, S.selFrom = - Just $ - S.FromExp + Just + $ S.FromExp [ S.mkSelFromItem (generateSQLSelect joinCondition selectSource selectNode) $ S.toTableAlias @@ -161,8 +161,9 @@ generateSQLSelectFromArrayNode selectSource (MultiRowSelectNode topExtractors se mkJoinCond :: S.TableIdentifier -> HashMap PGCol PGCol -> S.BoolExp mkJoinCond baseTablepfx colMapn = - foldl' (S.BEBin S.AndOp) (S.BELit True) $ - flip map (HashMap.toList colMapn) $ \(lCol, rCol) -> + foldl' (S.BEBin S.AndOp) (S.BELit True) + $ flip map (HashMap.toList colMapn) + $ \(lCol, rCol) -> S.BECompare S.SEQ (S.mkQIdenExp baseTablepfx lCol) (S.mkSIdenExp rCol) connectionToSelectWith :: @@ -285,23 +286,23 @@ connectionToSelectWith rootSelectAlias arrayConnectionSource arraySelectNode = fromPageInfoSelection = let hasPrevPage = - S.SEBool $ - S.mkExists (S.FIIdentifier baseSelectIdentifier) $ - S.BECompare S.SLT (S.SEIdentifier rowNumberIdentifier) $ - S.SESelect $ - S.mkSelect - { S.selFrom = Just $ S.FromExp [S.FIIdentifier cursorsSelectAliasIdentifier], - S.selExtr = [S.Extractor (S.SEIdentifier startRowNumberIdentifier) Nothing] - } + S.SEBool + $ S.mkExists (S.FIIdentifier baseSelectIdentifier) + $ S.BECompare S.SLT (S.SEIdentifier rowNumberIdentifier) + $ S.SESelect + $ S.mkSelect + { S.selFrom = Just $ S.FromExp [S.FIIdentifier cursorsSelectAliasIdentifier], + S.selExtr = [S.Extractor (S.SEIdentifier startRowNumberIdentifier) Nothing] + } hasNextPage = - S.SEBool $ - S.mkExists (S.FIIdentifier baseSelectIdentifier) $ - S.BECompare S.SGT (S.SEIdentifier rowNumberIdentifier) $ - S.SESelect $ - S.mkSelect - { S.selFrom = Just $ S.FromExp [S.FIIdentifier cursorsSelectAliasIdentifier], - S.selExtr = [S.Extractor (S.SEIdentifier endRowNumberIdentifier) Nothing] - } + S.SEBool + $ S.mkExists (S.FIIdentifier baseSelectIdentifier) + $ S.BECompare S.SGT (S.SEIdentifier rowNumberIdentifier) + $ S.SESelect + $ S.mkSelect + { S.selFrom = Just $ S.FromExp [S.FIIdentifier cursorsSelectAliasIdentifier], + S.selExtr = [S.Extractor (S.SEIdentifier endRowNumberIdentifier) Nothing] + } select = S.mkSelect diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs index 59eeb509c6f..b16ac998dc8 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs @@ -66,8 +66,8 @@ mkFirstElementExp expIdentifier = mkLastElementExp :: S.SQLExp -> S.SQLExp mkLastElementExp expIdentifier = let arrayExp = S.SEFnApp "array_agg" [expIdentifier] Nothing - in S.SEArrayIndex arrayExp $ - S.SEFnApp "array_length" [arrayExp, S.intToSQLExp 1] Nothing + in S.SEArrayIndex arrayExp + $ S.SEFnApp "array_length" [arrayExp, S.intToSQLExp 1] Nothing cursorIdentifier :: Identifier cursorIdentifier = Identifier "__cursor" @@ -136,12 +136,12 @@ selectFromToFromItem prefix = \case FromTable tn -> S.FISimple tn Nothing FromIdentifier i -> S.FIIdentifier $ TableIdentifier $ unFIIdentifier i FromFunction qf args defListM -> - S.FIFunc $ - S.FunctionExp qf (fromTableRowArgs prefix args) $ - Just $ - S.mkFunctionAlias - qf - (fmap (fmap (first S.toColumnAlias)) defListM) + S.FIFunc + $ S.FunctionExp qf (fromTableRowArgs prefix args) + $ Just + $ S.mkFunctionAlias + qf + (fmap (fmap (first S.toColumnAlias)) defListM) FromStoredProcedure {} -> error "selectFromToFromItem: FromStoredProcedure" FromNativeQuery lm -> S.FIIdentifier (S.tableAliasToIdentifier $ nativeQueryNameToAlias (nqRootFieldName lm)) diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/JoinTree.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/JoinTree.hs index b9261eafa0d..07f955a34af 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/JoinTree.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/JoinTree.hs @@ -72,8 +72,8 @@ withWriteArrayRelation action = where updateJoinTree joinTree (source, topExtractor, nodeExtractors) = let arraySelectNode = - MultiRowSelectNode [topExtractor] $ - SelectNode nodeExtractors joinTree + MultiRowSelectNode [topExtractor] + $ SelectNode nodeExtractors joinTree in mempty {_jtArrayRelations = HashMap.singleton source arraySelectNode} withWriteArrayConnection :: @@ -92,8 +92,8 @@ withWriteArrayConnection action = where updateJoinTree joinTree (source, topExtractor, nodeExtractors) = let arraySelectNode = - MultiRowSelectNode [topExtractor] $ - SelectNode nodeExtractors joinTree + MultiRowSelectNode [topExtractor] + $ SelectNode nodeExtractors joinTree in mempty {_jtArrayConnections = HashMap.singleton source arraySelectNode} withWriteComputedFieldTableSet :: diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/OrderBy.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/OrderBy.hs index c95909533ce..3e285d6fc6e 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/OrderBy.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/OrderBy.hs @@ -112,10 +112,10 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c let ordByAlias = mkAnnOrderByAlias sourcePrefix fieldAlias similarArrayFields annObCol (ordByAlias,) <$> case annObCol of AOCColumn pgColInfo -> - pure $ - S.mkQIdenExp (mkBaseTableIdentifier sourcePrefix) $ - toIdentifier $ - ciColumn pgColInfo + pure + $ S.mkQIdenExp (mkBaseTableIdentifier sourcePrefix) + $ toIdentifier + $ ciColumn pgColInfo AOCObjectRelation relInfo relFilter rest -> withWriteObjectRelation $ do let RelInfo {riName = relName, riMapping = colMapping, riTarget = relTarget} = relInfo relSourcePrefix = mkObjectRelationTableAlias sourcePrefix relName @@ -174,8 +174,8 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c computedFieldSourcePrefix = mkComputedFieldTableIdentifier sourcePrefix fieldName (topExtractor, fields) = mkAggregateOrderByExtractorAndFields aggOrderBy fromItem = - selectFromToFromItem sourcePrefix $ - FromFunction _cfobFunction _cfobFunctionArgsExp Nothing + selectFromToFromItem sourcePrefix + $ FromFunction _cfobFunction _cfobFunctionArgsExp Nothing functionQual = S.QualifiedIdentifier (TableIdentifier $ qualifiedObjectToText _cfobFunction) Nothing selectSource = SelectSource @@ -241,11 +241,11 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c [OrderByItemG ('Postgres pgKind) (AnnotatedOrderByElement ('Postgres pgKind) (SQLExpression ('Postgres pgKind)), (S.ColumnAlias, SQLExpression ('Postgres pgKind)))] -> S.SQLExp mkCursorExp orderByItemExps = - S.applyJsonBuildObj $ - flip concatMap orderByItemExps $ - \orderByItemExp -> - let OrderByItemG _ (annObCol, (_, valExp)) _ = orderByItemExp - in annObColToJSONField valExp annObCol + S.applyJsonBuildObj + $ flip concatMap orderByItemExps + $ \orderByItemExp -> + let OrderByItemG _ (annObCol, (_, valExp)) _ = orderByItemExp + in annObColToJSONField valExp annObCol where mkAggOrderByValExp valExp = \case AAOCount -> [S.SELit "count", valExp] diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Process.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Process.hs index c4421133027..a471ac0ff2f 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Process.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Process.hs @@ -114,8 +114,8 @@ processSelectParams whereSource <- selectFromToQual selectFrom let fromItem = selectFromToFromItem (identifierToTableIdentifier $ _pfBase sourcePrefixes) selectFrom finalWhere = - toSQLBoolExp whereSource $ - maybe permFilter (andAnnBoolExps permFilter) whereM + toSQLBoolExp whereSource + $ maybe permFilter (andAnnBoolExps permFilter) whereM sortingAndSlicing = SortingAndSlicing selectSorting selectSlicing selectSource = SelectSource @@ -165,8 +165,8 @@ processSelectParams let cteName = nativeQueryNameToAlias (nqRootFieldName nq) -- emit the query itself to the Writer - tell $ - mempty + tell + $ mempty { _swCustomSQLCTEs = CustomSQLCTEs (HashMap.singleton cteName (nqInterpolatedQuery nq)) } @@ -211,9 +211,9 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do annFieldExtr <- processAnnFields thisSourcePrefix fieldName similarArrayFields annFields tCase pure ( [annFieldExtr], - withJsonAggExtr permLimitSubQuery (orderByForJsonAgg selectSource) $ - S.toColumnAlias $ - toIdentifier fieldName + withJsonAggExtr permLimitSubQuery (orderByForJsonAgg selectSource) + $ S.toColumnAlias + $ toIdentifier fieldName ) TAFExp e -> pure @@ -222,13 +222,14 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do ) let topLevelExtractor = - flip S.Extractor (Just $ S.toColumnAlias $ toIdentifier fieldAlias) $ - S.applyJsonBuildObj $ - flip concatMap (map (second snd) processedFields) $ - \(FieldName fieldText, fieldExp) -> [S.SELit fieldText, fieldExp] + flip S.Extractor (Just $ S.toColumnAlias $ toIdentifier fieldAlias) + $ S.applyJsonBuildObj + $ flip concatMap (map (second snd) processedFields) + $ \(FieldName fieldText, fieldExp) -> [S.SELit fieldText, fieldExp] nodeExtractors = - HashMap.fromList $ - concatMap (fst . snd) processedFields <> orderByAndDistinctExtrs + HashMap.fromList + $ concatMap (fst . snd) processedFields + <> orderByAndDistinctExtrs pure (selectSource, nodeExtractors, topLevelExtractor) where @@ -236,8 +237,9 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do permLimit = _tpLimit tablePermissions orderBy = _saOrderBy tableArgs permLimitSubQuery = mkPermissionLimitSubQuery permLimit aggSelFields orderBy - similarArrayFields = HashMap.unions $ - flip map (map snd aggSelFields) $ \case + similarArrayFields = HashMap.unions + $ flip map (map snd aggSelFields) + $ \case TAFAgg _ -> mempty TAFNodes _ annFlds -> mkSimilarArrayFields annFlds orderBy @@ -256,15 +258,15 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do then PLSQRequired limit else PLSQNotRequired where - hasAggregateField = flip any (map snd aggFields) $ - \case + hasAggregateField = flip any (map snd aggFields) + $ \case TAFAgg _ -> True _ -> False hasAggOrderBy = case orderBys of Nothing -> False - Just l -> flip any (concatMap toList $ toList l) $ - \case + Just l -> flip any (concatMap toList $ toList l) + $ \case AOCArrayAggregation {} -> True _ -> False @@ -301,8 +303,8 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields tCase = do nativeQueryIdentifier = S.tableAliasToIdentifier cteName -- emit the query itself to the Writer - tell $ - mempty + tell + $ mempty { _swCustomSQLCTEs = CustomSQLCTEs (HashMap.singleton cteName (nqInterpolatedQuery nq)) } @@ -347,9 +349,10 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields tCase = do Nothing -> pure computedFieldSQLExp Just caseBoolExp -> let boolExp = - S.simplifyBoolExp $ - toSQLBoolExp (S.QualifiedIdentifier baseTableIdentifier Nothing) $ - _accColCaseBoolExpField <$> caseBoolExp + S.simplifyBoolExp + $ toSQLBoolExp (S.QualifiedIdentifier baseTableIdentifier Nothing) + $ _accColCaseBoolExpField + <$> caseBoolExp in pure $ S.SECond boolExp computedFieldSQLExp S.SENull AFComputedField _ _ (CFSTable selectTy sel) -> withWriteComputedFieldTableSet $ do let computedFieldSourcePrefix = @@ -362,8 +365,8 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields tCase = do sel let computedFieldTableSetSource = ComputedFieldTableSetSource fieldName selectSource extractor = - asJsonAggExtr selectTy (S.toColumnAlias fieldName) PLSQNotRequired $ - orderByForJsonAgg selectSource + asJsonAggExtr selectTy (S.toColumnAlias fieldName) PLSQNotRequired + $ orderByForJsonAgg selectSource pure ( computedFieldTableSetSource, extractor, @@ -381,46 +384,47 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields tCase = do toSQLCol (AnnColumnField col typ asText colOpM caseBoolExpMaybe) = do strfyNum <- ask let sqlExpression = - withColumnOp colOpM $ - S.mkQIdenExp baseTableIdentifier col + withColumnOp colOpM + $ S.mkQIdenExp baseTableIdentifier col finalSQLExpression = -- Check out [SQL generation for inherited role] case caseBoolExpMaybe of Nothing -> sqlExpression Just caseBoolExp -> let boolExp = - S.simplifyBoolExp $ - toSQLBoolExp (S.QualifiedIdentifier baseTableIdentifier Nothing) $ - _accColCaseBoolExpField <$> caseBoolExp + S.simplifyBoolExp + $ toSQLBoolExp (S.QualifiedIdentifier baseTableIdentifier Nothing) + $ _accColCaseBoolExpField + <$> caseBoolExp in S.SECond boolExp sqlExpression S.SENull pure $ toJSONableExp strfyNum typ asText tCase finalSQLExpression fromScalarComputedField :: ComputedFieldScalarSelect ('Postgres pgKind) S.SQLExp -> m S.SQLExp fromScalarComputedField computedFieldScalar = do strfyNum <- ask - pure $ - toJSONableExp strfyNum (ColumnScalar ty) False Nothing $ - withColumnOp colOpM $ - S.SEFunction $ - S.FunctionExp fn (fromTableRowArgs sourcePrefix args) Nothing + pure + $ toJSONableExp strfyNum (ColumnScalar ty) False Nothing + $ withColumnOp colOpM + $ S.SEFunction + $ S.FunctionExp fn (fromTableRowArgs sourcePrefix args) Nothing where ComputedFieldScalarSelect fn args ty colOpM = computedFieldScalar mkNodeId :: SourceName -> QualifiedTable -> PrimaryKeyColumns ('Postgres pgKind) -> S.SQLExp mkNodeId _sourceName (QualifiedObject tableSchema tableName) pkeyColumns = let columnInfoToSQLExp pgColumnInfo = - toJSONableExp Options.Don'tStringifyNumbers (ciType pgColumnInfo) False Nothing $ - S.mkQIdenExp (mkBaseTableIdentifier sourcePrefix) $ - ciColumn pgColumnInfo + toJSONableExp Options.Don'tStringifyNumbers (ciType pgColumnInfo) False Nothing + $ S.mkQIdenExp (mkBaseTableIdentifier sourcePrefix) + $ ciColumn pgColumnInfo in -- See Note [Relay Node id]. - encodeBase64 $ - flip S.SETyAnn S.textTypeAnn $ - S.applyJsonBuildArray $ - [ S.intToSQLExp $ nodeIdVersionInt currentNodeIdVersion, - S.SELit (getSchemaTxt tableSchema), - S.SELit (toTxt tableName) - ] - <> map columnInfoToSQLExp (toList pkeyColumns) + encodeBase64 + $ flip S.SETyAnn S.textTypeAnn + $ S.applyJsonBuildArray + $ [ S.intToSQLExp $ nodeIdVersionInt currentNodeIdVersion, + S.SELit (getSchemaTxt tableSchema), + S.SELit (toTxt tableName) + ] + <> map columnInfoToSQLExp (toList pkeyColumns) withColumnOp :: Maybe S.ColumnOp -> S.SQLExp -> S.SQLExp withColumnOp colOpM sqlExp = case colOpM of @@ -434,16 +438,16 @@ mkSimilarArrayFields :: Maybe (NE.NonEmpty (AnnotatedOrderByItemG ('Postgres pgKind) v)) -> SimilarArrayFields mkSimilarArrayFields annFields maybeOrderBys = - HashMap.fromList $ - flip map allTuples $ - \(relNameAndArgs, fieldName) -> (fieldName, getSimilarFields relNameAndArgs) + HashMap.fromList + $ flip map allTuples + $ \(relNameAndArgs, fieldName) -> (fieldName, getSimilarFields relNameAndArgs) where getSimilarFields relNameAndArgs = map snd $ filter ((== relNameAndArgs) . fst) allTuples allTuples = arrayRelationTuples <> aggOrderByRelationTuples arrayRelationTuples = let arrayFields = mapMaybe getAnnArr annFields - in flip map arrayFields $ - \(f, relSel) -> (getArrayRelNameAndSelectArgs relSel, f) + in flip map arrayFields + $ \(f, relSel) -> (getArrayRelNameAndSelectArgs relSel, f) getAnnArr :: (a, AnnFieldG ('Postgres pgKind) r v) -> @@ -458,8 +462,8 @@ mkSimilarArrayFields annFields maybeOrderBys = ( (relName, noSelectArgs), fieldName ) - in map mkItem $ - maybe + in map mkItem + $ maybe [] (mapMaybe (fetchAggOrderByRels . obiColumn) . toList) maybeOrderBys @@ -549,17 +553,17 @@ aggregateFieldToExp sourcePrefix aggFlds strfyNum = jsonRow colFldsToExtr opText (FieldName t, SFCol col ty) = [ S.SELit t, - toJSONableExp strfyNum ty False Nothing $ - S.SEFnApp opText [S.SEIdentifier $ toIdentifier col] Nothing + toJSONableExp strfyNum ty False Nothing + $ S.SEFnApp opText [S.SEIdentifier $ toIdentifier col] Nothing ] colFldsToExtr opText (FieldName t, SFComputedField _cfName (ComputedFieldScalarSelect {..})) = [ S.SELit t, - toJSONableExp strfyNum (ColumnScalar _cfssType) False Nothing $ - S.SEFnApp + toJSONableExp strfyNum (ColumnScalar _cfssType) False Nothing + $ S.SEFnApp opText - [ withColumnOp _cfssScalarArguments $ - S.SEFunction $ - S.FunctionExp _cfssFunction (fromTableRowArgsDon'tAddBase sourcePrefix _cfssArguments) Nothing + [ withColumnOp _cfssScalarArguments + $ S.SEFunction + $ S.FunctionExp _cfssFunction (fromTableRowArgsDon'tAddBase sourcePrefix _cfssArguments) Nothing ] Nothing ] @@ -663,18 +667,18 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection permLimitSubQuery = PLSQNotRequired primaryKeyColumnsObjectExp = - S.applyJsonBuildObj $ - flip concatMap (toList primaryKeyColumns) $ - \pgColumnInfo -> - [ S.SELit $ getPGColTxt $ ciColumn pgColumnInfo, - toJSONableExp Options.Don'tStringifyNumbers (ciType pgColumnInfo) False tCase $ - S.mkQIdenExp (mkBaseTableIdentifier thisPrefix) $ - ciColumn pgColumnInfo - ] + S.applyJsonBuildObj + $ flip concatMap (toList primaryKeyColumns) + $ \pgColumnInfo -> + [ S.SELit $ getPGColTxt $ ciColumn pgColumnInfo, + toJSONableExp Options.Don'tStringifyNumbers (ciType pgColumnInfo) False tCase + $ S.mkQIdenExp (mkBaseTableIdentifier thisPrefix) + $ ciColumn pgColumnInfo + ] primaryKeyColumnExtractors = - flip map (toList primaryKeyColumns) $ - \pgColumnInfo -> + flip map (toList primaryKeyColumns) + $ \pgColumnInfo -> let pgColumn = ciColumn pgColumnInfo in ( contextualizeBaseTableColumn thisPrefix pgColumn, S.mkQIdenExp (mkBaseTableIdentifier thisPrefix) pgColumn @@ -700,16 +704,18 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection mkEqualityCompareExp (ConnectionSplit _ v orderByItem) = let obAlias = - mkAnnOrderByAlias thisPrefix fieldAlias similarArrayFields $ - obiColumn orderByItem + mkAnnOrderByAlias thisPrefix fieldAlias similarArrayFields + $ obiColumn orderByItem in S.BECompare S.SEQ (S.SEIdentifier $ toIdentifier obAlias) v - similarArrayFields = HashMap.unions $ - flip map (map snd fields) $ \case + similarArrayFields = HashMap.unions + $ flip map (map snd fields) + $ \case ConnectionTypename {} -> mempty ConnectionPageInfo {} -> mempty - ConnectionEdges edges -> HashMap.unions $ - flip map (map snd edges) $ \case + ConnectionEdges edges -> HashMap.unions + $ flip map (map snd edges) + $ \case EdgeTypename {} -> mempty EdgeCursor {} -> mempty EdgeNode annFields -> @@ -728,49 +734,55 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection SelectSource -> n S.SQLExp processFields selectSource = - fmap (S.applyJsonBuildObj . concat) $ - forM fields $ - \(FieldName fieldText, field) -> - (S.SELit fieldText :) . pure - <$> case field of - ConnectionTypename t -> pure $ withForceAggregation S.textTypeAnn $ S.SELit t - ConnectionPageInfo pageInfoFields -> pure $ processPageInfoFields pageInfoFields - ConnectionEdges edges -> - fmap (flip mkSimpleJsonAgg (orderByForJsonAgg selectSource) . S.applyJsonBuildObj . concat) $ - forM edges $ - \(FieldName edgeText, edge) -> - (S.SELit edgeText :) . pure - <$> case edge of - EdgeTypename t -> pure $ S.SELit t - EdgeCursor -> pure $ encodeBase64 $ S.SEIdentifier (toIdentifier cursorIdentifier) - EdgeNode annFields -> do - let edgeFieldName = - FieldName $ - getFieldNameTxt fieldAlias <> "." <> fieldText <> "." <> edgeText - edgeFieldIdentifier = toIdentifier edgeFieldName - annFieldsExtrExp <- processAnnFields thisPrefix edgeFieldName similarArrayFields annFields tCase - modify' (<> [annFieldsExtrExp]) - pure $ S.SEIdentifier edgeFieldIdentifier + fmap (S.applyJsonBuildObj . concat) + $ forM fields + $ \(FieldName fieldText, field) -> + (S.SELit fieldText :) + . pure + <$> case field of + ConnectionTypename t -> pure $ withForceAggregation S.textTypeAnn $ S.SELit t + ConnectionPageInfo pageInfoFields -> pure $ processPageInfoFields pageInfoFields + ConnectionEdges edges -> + fmap (flip mkSimpleJsonAgg (orderByForJsonAgg selectSource) . S.applyJsonBuildObj . concat) + $ forM edges + $ \(FieldName edgeText, edge) -> + (S.SELit edgeText :) + . pure + <$> case edge of + EdgeTypename t -> pure $ S.SELit t + EdgeCursor -> pure $ encodeBase64 $ S.SEIdentifier (toIdentifier cursorIdentifier) + EdgeNode annFields -> do + let edgeFieldName = + FieldName + $ getFieldNameTxt fieldAlias + <> "." + <> fieldText + <> "." + <> edgeText + edgeFieldIdentifier = toIdentifier edgeFieldName + annFieldsExtrExp <- processAnnFields thisPrefix edgeFieldName similarArrayFields annFields tCase + modify' (<> [annFieldsExtrExp]) + pure $ S.SEIdentifier edgeFieldIdentifier processPageInfoFields infoFields = - S.applyJsonBuildObj $ - flip concatMap infoFields $ - \(FieldName fieldText, field) -> (:) (S.SELit fieldText) $ pure case field of - PageInfoTypename t -> withForceAggregation S.textTypeAnn $ S.SELit t - PageInfoHasNextPage -> - withForceAggregation S.boolTypeAnn $ - mkSingleFieldSelect (S.SEIdentifier hasNextPageIdentifier) pageInfoSelectAliasIdentifier - PageInfoHasPreviousPage -> - withForceAggregation S.boolTypeAnn $ - mkSingleFieldSelect (S.SEIdentifier hasPreviousPageIdentifier) pageInfoSelectAliasIdentifier - PageInfoStartCursor -> - withForceAggregation S.textTypeAnn $ - encodeBase64 $ - mkSingleFieldSelect (S.SEIdentifier startCursorIdentifier) cursorsSelectAliasIdentifier - PageInfoEndCursor -> - withForceAggregation S.textTypeAnn $ - encodeBase64 $ - mkSingleFieldSelect (S.SEIdentifier endCursorIdentifier) cursorsSelectAliasIdentifier + S.applyJsonBuildObj + $ flip concatMap infoFields + $ \(FieldName fieldText, field) -> (:) (S.SELit fieldText) $ pure case field of + PageInfoTypename t -> withForceAggregation S.textTypeAnn $ S.SELit t + PageInfoHasNextPage -> + withForceAggregation S.boolTypeAnn + $ mkSingleFieldSelect (S.SEIdentifier hasNextPageIdentifier) pageInfoSelectAliasIdentifier + PageInfoHasPreviousPage -> + withForceAggregation S.boolTypeAnn + $ mkSingleFieldSelect (S.SEIdentifier hasPreviousPageIdentifier) pageInfoSelectAliasIdentifier + PageInfoStartCursor -> + withForceAggregation S.textTypeAnn + $ encodeBase64 + $ mkSingleFieldSelect (S.SEIdentifier startCursorIdentifier) cursorsSelectAliasIdentifier + PageInfoEndCursor -> + withForceAggregation S.textTypeAnn + $ encodeBase64 + $ mkSingleFieldSelect (S.SEIdentifier endCursorIdentifier) cursorsSelectAliasIdentifier where mkSingleFieldSelect field fromIdentifier = S.SESelect diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Simple.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Simple.hs index 47a6ea5d5ce..c6165f2e0c2 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Simple.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Simple.hs @@ -61,14 +61,14 @@ mkSQLSelect jsonAggSelect annSel = do -- : the join tree required for relationships (built via @MonadWriter@) -- : any top-level Common Table Expressions needed for Native Queries ((selectSource, nodeExtractors), SelectWriter {_swJoinTree = joinTree, _swCustomSQLCTEs = customSQLCTEs}) = - runWriter $ - flip runReaderT strfyNum $ - processAnnSimpleSelect sourcePrefixes rootFldName permLimitSubQuery annSel + runWriter + $ flip runReaderT strfyNum + $ processAnnSimpleSelect sourcePrefixes rootFldName permLimitSubQuery annSel selectNode = SelectNode nodeExtractors joinTree topExtractor = - asJsonAggExtr jsonAggSelect rootFldAls permLimitSubQuery $ - orderByForJsonAgg selectSource + asJsonAggExtr jsonAggSelect rootFldAls permLimitSubQuery + $ orderByForJsonAgg selectSource arrayNode = MultiRowSelectNode [topExtractor] selectNode tell customSQLCTEs diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Streaming.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Streaming.hs index 2d3be3ea712..fdf35daa3bf 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Streaming.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Streaming.hs @@ -105,13 +105,13 @@ mkStreamSQLSelect (AnnSelectStreamG () fields from perm args strfyNum) = do sqlSelect = AnnSelectG fields from perm selectArgs strfyNum Nothing permLimitSubQuery = PLSQNotRequired ((selectSource, nodeExtractors), SelectWriter {_swJoinTree = joinTree, _swCustomSQLCTEs = customSQLCTEs}) = - runWriter $ - flip runReaderT strfyNum $ - processAnnSimpleSelect sourcePrefixes rootFldName permLimitSubQuery sqlSelect + runWriter + $ flip runReaderT strfyNum + $ processAnnSimpleSelect sourcePrefixes rootFldName permLimitSubQuery sqlSelect selectNode = SelectNode nodeExtractors joinTree topExtractor = - asJsonAggExtr JASMultipleRows rootFldAls permLimitSubQuery $ - orderByForJsonAgg selectSource + asJsonAggExtr JASMultipleRows rootFldAls permLimitSubQuery + $ orderByForJsonAgg selectSource cursorLatestValueExp :: S.SQLExp = let columnAlias = ciName cursorColInfo pgColumn = ciColumn cursorColInfo @@ -124,9 +124,9 @@ mkStreamSQLSelect (AnnSelectStreamG () fields from perm args strfyNum) = do colExp = [ S.SELit (G.unName columnAlias), S.SETyAnn - ( mkMaxOrMinSQLExp maxOrMinTxt $ - toIdentifier $ - contextualizeBaseTableColumn rootFldIdentifier pgColumn + ( mkMaxOrMinSQLExp maxOrMinTxt + $ toIdentifier + $ contextualizeBaseTableColumn rootFldIdentifier pgColumn ) S.textTypeAnn ] @@ -146,8 +146,8 @@ mkStreamSQLSelect (AnnSelectStreamG () fields from perm args strfyNum) = do -- TODO: these functions also exist in `resolveMultiplexedValue`, de-duplicate these! fromResVars pgType jPath = - addTypeAnnotation pgType $ - S.SEOpApp + addTypeAnnotation pgType + $ S.SEOpApp (S.SQLOp "#>>") [ S.SEQIdentifier $ S.QIdentifier (S.QualifiedIdentifier (TableIdentifier "_subs") Nothing) (Identifier "result_vars"), S.SEArray $ map S.SELit jPath diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Update.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Update.hs index 27cfdc1245d..b8b2493141e 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Update.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Update.hs @@ -33,7 +33,7 @@ data UpdateCTE -- | Create the update CTE. mkUpdateCTE :: forall pgKind. - Backend ('Postgres pgKind) => + (Backend ('Postgres pgKind)) => AnnotatedUpdate ('Postgres pgKind) -> UpdateCTE mkUpdateCTE (AnnotatedUpdateG tn permFltr chk updateVariant _ columnsInfo _tCase) = @@ -53,8 +53,8 @@ mkUpdateCTE (AnnotatedUpdateG tn permFltr chk updateVariant _ columnsInfo _tCase checkConstraint :: Maybe S.RetExp checkConstraint = - Just $ - S.RetExp + Just + $ S.RetExp [ S.selectStar, asCheckErrorExtractor . insertCheckConstraint @@ -75,8 +75,9 @@ mkUpdateCTE (AnnotatedUpdateG tn permFltr chk updateVariant _ columnsInfo _tCase } expandOperator :: [ColumnInfo ('Postgres pgKind)] -> (PGCol, UpdateOpExpression S.SQLExp) -> S.SetExpItem -expandOperator infos (column, op) = S.SetExpItem $ - (column,) $ case op of +expandOperator infos (column, op) = S.SetExpItem + $ (column,) + $ case op of UpdateSet e -> e UpdateInc e -> S.mkSQLOpExp S.incOp identifier (asNum e) UpdateAppend e -> S.mkSQLOpExp S.jsonbConcatOp identifier (asJSON e) @@ -90,7 +91,7 @@ expandOperator infos (column, op) = S.SetExpItem $ asText e = S.SETyAnn e S.textTypeAnn asJSON e = S.SETyAnn e S.jsonbTypeAnn asArray a = S.SETyAnn (S.SEArray a) S.textArrTypeAnn - asNum e = S.SETyAnn e $ - case find (\info -> ciColumn info == column) infos <&> ciType of + asNum e = S.SETyAnn e + $ case find (\info -> ciColumn info == column) infos <&> ciType of Just (ColumnScalar s) -> S.mkTypeAnn $ CollectableTypeScalar s _ -> S.numericTypeAnn diff --git a/server/src-lib/Hasura/Backends/Postgres/Types/BoolExp.hs b/server/src-lib/Hasura/Backends/Postgres/Types/BoolExp.hs index 6d84263010b..fc2649e6fa2 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Types/BoolExp.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Types/BoolExp.hs @@ -82,11 +82,11 @@ data BooleanOperators a | AMatchesFulltext a deriving stock (Eq, Generic, Foldable, Functor, Traversable, Show) -instance NFData a => NFData (BooleanOperators a) +instance (NFData a) => NFData (BooleanOperators a) -instance Hashable a => Hashable (BooleanOperators a) +instance (Hashable a) => Hashable (BooleanOperators a) -instance ToJSON a => ToJSONKeyValue (BooleanOperators a) where +instance (ToJSON a) => ToJSONKeyValue (BooleanOperators a) where toJSONKeyValue = \case AILIKE a -> ("_ilike", toJSON a) ANILIKE a -> ("_nilike", toJSON a) diff --git a/server/src-lib/Hasura/Backends/Postgres/Types/ComputedField.hs b/server/src-lib/Hasura/Backends/Postgres/Types/ComputedField.hs index 9489e21db80..07f1367ce6d 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Types/ComputedField.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Types/ComputedField.hs @@ -36,11 +36,14 @@ instance Hashable ComputedFieldDefinition instance HasCodec ComputedFieldDefinition where codec = - AC.object "PostgresComputedFieldDefinition" $ - ComputedFieldDefinition - <$> requiredField' "function" AC..= _cfdFunction - <*> optionalField' "table_argument" AC..= _cfdTableArgument - <*> optionalField' "session_argument" AC..= _cfdSessionArgument + AC.object "PostgresComputedFieldDefinition" + $ ComputedFieldDefinition + <$> requiredField' "function" + AC..= _cfdFunction + <*> optionalField' "table_argument" + AC..= _cfdTableArgument + <*> optionalField' "session_argument" + AC..= _cfdSessionArgument instance ToJSON ComputedFieldDefinition where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} @@ -53,10 +56,10 @@ instance FromJSON ComputedFieldDefinition where data FunctionTableArgument = FTAFirst | FTANamed + -- | argument name FunctionArgName - -- ^ argument name + -- | argument index Int - -- ^ argument index deriving (Show, Eq, Ord, Generic) instance NFData FunctionTableArgument @@ -71,10 +74,10 @@ instance ToJSON FunctionTableArgument where -- SQL function as a JSON object. data FunctionSessionArgument = FunctionSessionArgument + -- | The argument name FunctionArgName - -- ^ The argument name + -- | The ordinal position in the function input parameters Int - -- ^ The ordinal position in the function input parameters deriving (Show, Eq, Ord, Generic) instance NFData FunctionSessionArgument @@ -117,8 +120,8 @@ instance Hashable ComputedFieldReturn instance ToJSON ComputedFieldReturn where toJSON = - genericToJSON $ - defaultOptions + genericToJSON + $ defaultOptions { constructorTagModifier = snakeCase . drop 3, sumEncoding = TaggedObject "type" "info" } diff --git a/server/src-lib/Hasura/Backends/Postgres/Types/Table.hs b/server/src-lib/Hasura/Backends/Postgres/Types/Table.hs index 25727729477..6da34250789 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Types/Table.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Types/Table.hs @@ -20,6 +20,9 @@ mutableView :: Text -> m () mutableView qt f mVI operation = - unless (isMutable f mVI) $ - throw400 NotSupported $ - "view " <> qt <<> " is not " <> operation + unless (isMutable f mVI) + $ throw400 NotSupported + $ "view " + <> qt + <<> " is not " + <> operation diff --git a/server/src-lib/Hasura/Backends/Postgres/Types/Update.hs b/server/src-lib/Hasura/Backends/Postgres/Types/Update.hs index 84ab290a292..564832fa9e7 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Types/Update.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Types/Update.hs @@ -38,15 +38,15 @@ data PgUpdateVariant pgKind v = SingleBatch (UpdateBatch ('Postgres pgKind) UpdateOpExpression v) | MultipleBatches [UpdateBatch ('Postgres pgKind) UpdateOpExpression v] -deriving stock instance Eq (UpdateBatch ('Postgres pgKind) UpdateOpExpression v) => Eq (PgUpdateVariant pgKind v) +deriving stock instance (Eq (UpdateBatch ('Postgres pgKind) UpdateOpExpression v)) => Eq (PgUpdateVariant pgKind v) -deriving stock instance Show (UpdateBatch ('Postgres pgKind) UpdateOpExpression v) => Show (PgUpdateVariant pgKind v) +deriving stock instance (Show (UpdateBatch ('Postgres pgKind) UpdateOpExpression v)) => Show (PgUpdateVariant pgKind v) -deriving stock instance Backend ('Postgres pgKind) => Functor (PgUpdateVariant pgKind) +deriving stock instance (Backend ('Postgres pgKind)) => Functor (PgUpdateVariant pgKind) -deriving stock instance Backend ('Postgres pgKind) => Foldable (PgUpdateVariant pgKind) +deriving stock instance (Backend ('Postgres pgKind)) => Foldable (PgUpdateVariant pgKind) -deriving stock instance Backend ('Postgres pgKind) => Traversable (PgUpdateVariant pgKind) +deriving stock instance (Backend ('Postgres pgKind)) => Traversable (PgUpdateVariant pgKind) -- | Are we updating anything? updateVariantIsEmpty :: PgUpdateVariant b v -> Bool diff --git a/server/src-lib/Hasura/ClientCredentials.hs b/server/src-lib/Hasura/ClientCredentials.hs index ebf85c7be80..9ca6b3372d6 100644 --- a/server/src-lib/Hasura/ClientCredentials.hs +++ b/server/src-lib/Hasura/ClientCredentials.hs @@ -16,7 +16,8 @@ import Hasura.RQL.Types.EECredentials (EEClientCredentials (..), EEClientId (..) getEEClientCredentialsTx :: PG.TxE QErr (Maybe EEClientCredentials) getEEClientCredentialsTx = - makeClientCredentials . PG.getRow + makeClientCredentials + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| diff --git a/server/src-lib/Hasura/Eventing/Common.hs b/server/src-lib/Hasura/Eventing/Common.hs index 0b3a1571637..8bc362ef5a0 100644 --- a/server/src-lib/Hasura/Eventing/Common.hs +++ b/server/src-lib/Hasura/Eventing/Common.hs @@ -37,19 +37,21 @@ data LockedEventsCtx = LockedEventsCtx -- event engine context saveLockedEvents :: (MonadIO m) => [EventId] -> TVar (Set.Set EventId) -> m () saveLockedEvents eventIds lockedEvents = - liftIO $ - atomically $ do + liftIO + $ atomically + $ do lockedEventsVals <- readTVar lockedEvents - writeTVar lockedEvents $! - Set.union lockedEventsVals $ - Set.fromList eventIds + writeTVar lockedEvents + $! Set.union lockedEventsVals + $ Set.fromList eventIds -- | Remove an event from the 'LockedEventsCtx' after it has been processed removeEventFromLockedEvents :: - MonadIO m => EventId -> TVar (Set.Set EventId) -> m () + (MonadIO m) => EventId -> TVar (Set.Set EventId) -> m () removeEventFromLockedEvents eventId lockedEvents = - liftIO $ - atomically $ do + liftIO + $ atomically + $ do lockedEventsVals <- readTVar lockedEvents writeTVar lockedEvents $! Set.delete eventId lockedEventsVals diff --git a/server/src-lib/Hasura/Eventing/EventTrigger.hs b/server/src-lib/Hasura/Eventing/EventTrigger.hs index a0532272cc6..04e7b0b9a20 100644 --- a/server/src-lib/Hasura/Eventing/EventTrigger.hs +++ b/server/src-lib/Hasura/Eventing/EventTrigger.hs @@ -168,11 +168,11 @@ data EventPayload (b :: BackendType) = EventPayload } deriving (Generic) -deriving instance Backend b => Show (EventPayload b) +deriving instance (Backend b) => Show (EventPayload b) -deriving instance Backend b => Eq (EventPayload b) +deriving instance (Backend b) => Eq (EventPayload b) -instance Backend b => J.ToJSON (EventPayload b) where +instance (Backend b) => J.ToJSON (EventPayload b) where toJSON = J.genericToJSON hasuraJSON {J.omitNothingFields = True} defaultMaxEventThreads :: Refined Positive Int @@ -181,26 +181,28 @@ defaultMaxEventThreads = unsafeRefine 100 defaultFetchInterval :: DiffTime defaultFetchInterval = seconds 1 -initEventEngineCtx :: MonadIO m => Refined Positive Int -> Refined NonNegative Milliseconds -> Refined NonNegative Int -> m EventEngineCtx +initEventEngineCtx :: (MonadIO m) => Refined Positive Int -> Refined NonNegative Milliseconds -> Refined NonNegative Int -> m EventEngineCtx initEventEngineCtx maxThreads fetchInterval _eeCtxFetchSize = do _eeCtxEventThreadsCapacity <- liftIO $ newTVarIO $ unrefine maxThreads let _eeCtxFetchInterval = milliseconds $ unrefine fetchInterval return EventEngineCtx {..} -saveLockedEventTriggerEvents :: MonadIO m => SourceName -> [EventId] -> TVar (HashMap SourceName (Set.Set EventId)) -> m () +saveLockedEventTriggerEvents :: (MonadIO m) => SourceName -> [EventId] -> TVar (HashMap SourceName (Set.Set EventId)) -> m () saveLockedEventTriggerEvents sourceName eventIds lockedEvents = - liftIO $ - atomically $ do + liftIO + $ atomically + $ do lockedEventsVals <- readTVar lockedEvents case HashMap.lookup sourceName lockedEventsVals of Nothing -> writeTVar lockedEvents $! HashMap.singleton sourceName (Set.fromList eventIds) Just _ -> writeTVar lockedEvents $! HashMap.insertWith Set.union sourceName (Set.fromList eventIds) lockedEventsVals removeEventTriggerEventFromLockedEvents :: - MonadIO m => SourceName -> EventId -> TVar (HashMap SourceName (Set.Set EventId)) -> m () + (MonadIO m) => SourceName -> EventId -> TVar (HashMap SourceName (Set.Set EventId)) -> m () removeEventTriggerEventFromLockedEvents sourceName eventId lockedEvents = - liftIO $ - atomically $ do + liftIO + $ atomically + $ do lockedEventsVals <- readTVar lockedEvents writeTVar lockedEvents $! HashMap.adjust (Set.delete eventId) sourceName lockedEventsVals @@ -267,8 +269,8 @@ logFetchedEventsStatistics logger backendEvents = L.logStats logger (FetchedEventsStats numEventsFetchedPerSource 1) where numEventsFetchedPerSource = - let sourceNames = flip map backendEvents $ - \backendEvent -> AB.dispatchAnyBackend @Backend backendEvent _ewsSourceName + let sourceNames = flip map backendEvents + $ \backendEvent -> AB.dispatchAnyBackend @Backend backendEvent _ewsSourceName in NumEventsFetchedPerSource $ HashMap.fromListWith (+) [(sourceName, 1) | sourceName <- sourceNames] {-# ANN processEventQueue ("HLint: ignore Use withAsync" :: String) #-} @@ -326,7 +328,9 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac -} allSources <- scSources <$> liftIO getSchemaCache fetchBatchSize <- unrefine . _eeCtxFetchSize <$> liftIO getEventEngineCtx - events <- liftIO . fmap concat $ + events <- liftIO + . fmap concat + $ -- fetch pending events across all the sources asynchronously LA.forConcurrently (HashMap.toList allSources) \(sourceName, sourceCache) -> AB.dispatchAnyBackend @BackendEventTrigger sourceCache \(SourceInfo {..} :: SourceInfo b) -> do @@ -387,8 +391,9 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac -- depends on not putting anything that can throw in the body here: AB.dispatchAnyBackend @BackendEventTrigger eventWithSource \(eventWithSource' :: EventWithSource b) -> mask_ $ do - liftIO $ - atomically $ do + liftIO + $ atomically + $ do -- block until < HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE threads: maxCapacity <- readTVar _eeCtxEventThreadsCapacity activeThreadCount <- readTVar activeEventProcessingThreads @@ -396,12 +401,12 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac modifyTVar' activeEventProcessingThreads (+ 1) -- since there is some capacity in our worker threads, we can launch another: t <- - LA.async $ - flip runReaderT (logger, httpMgr) $ - processEvent eventWithSource' - `finally` - -- NOTE!: this needs to happen IN THE FORKED THREAD: - decrementActiveThreadCount + LA.async + $ flip runReaderT (logger, httpMgr) + $ processEvent eventWithSource' + `finally` + -- NOTE!: this needs to happen IN THE FORKED THREAD: + decrementActiveThreadCount LA.link t -- return when next batch ready; some 'processEvent' threads may be running. @@ -409,47 +414,53 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac let lenEvents = length events if - | lenEvents == fetchBatchSize -> do - -- If we've seen N fetches in a row from the DB come back full (i.e. only limited - -- by our LIMIT clause), then we say we're clearly falling behind: - let clearlyBehind = fullFetchCount >= 3 - unless alreadyWarned $ - when clearlyBehind $ - L.unLogger logger $ - L.UnstructuredLog L.LevelWarn $ - fromString $ - "Events processor may not be keeping up with events generated in postgres, " - <> "or we're working on a backlog of events. Consider increasing " - <> "HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE" - return (eventsNext, (fullFetchCount + 1), (alreadyWarned || clearlyBehind)) - | otherwise -> do - when (lenEvents /= fetchBatchSize && alreadyWarned) $ - -- emit as warning in case users are only logging warning severity and saw above - L.unLogger logger $ - L.UnstructuredLog L.LevelWarn $ - fromString $ - "It looks like the events processor is keeping up again." - return (eventsNext, 0, False) + | lenEvents == fetchBatchSize -> do + -- If we've seen N fetches in a row from the DB come back full (i.e. only limited + -- by our LIMIT clause), then we say we're clearly falling behind: + let clearlyBehind = fullFetchCount >= 3 + unless alreadyWarned + $ when clearlyBehind + $ L.unLogger logger + $ L.UnstructuredLog L.LevelWarn + $ fromString + $ "Events processor may not be keeping up with events generated in postgres, " + <> "or we're working on a backlog of events. Consider increasing " + <> "HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE" + return (eventsNext, (fullFetchCount + 1), (alreadyWarned || clearlyBehind)) + | otherwise -> do + when (lenEvents /= fetchBatchSize && alreadyWarned) + $ + -- emit as warning in case users are only logging warning severity and saw above + L.unLogger logger + $ L.UnstructuredLog L.LevelWarn + $ fromString + $ "It looks like the events processor is keeping up again." + return (eventsNext, 0, False) decrementActiveThreadCount = - liftIO $ - atomically $ - modifyTVar' activeEventProcessingThreads (subtract 1) + liftIO + $ atomically + $ modifyTVar' activeEventProcessingThreads (subtract 1) -- \| Extract a trace context from an event trigger payload. - extractEventContext :: forall io. MonadIO io => J.Value -> io (Maybe Tracing.TraceContext) + extractEventContext :: forall io. (MonadIO io) => J.Value -> io (Maybe Tracing.TraceContext) extractEventContext e = do let traceIdMaybe = - Tracing.traceIdFromHex . txtToBs - =<< e ^? JL.key "trace_context" . JL.key "trace_id" . JL._String + Tracing.traceIdFromHex + . txtToBs + =<< e + ^? JL.key "trace_context" . JL.key "trace_id" . JL._String for traceIdMaybe $ \traceId -> do freshSpanId <- Tracing.randomSpanId let parentSpanId = - Tracing.spanIdFromHex . txtToBs - =<< e ^? JL.key "trace_context" . JL.key "span_id" . JL._String + Tracing.spanIdFromHex + . txtToBs + =<< e + ^? JL.key "trace_context" . JL.key "span_id" . JL._String samplingState = - Tracing.samplingStateFromHeader $ - e ^? JL.key "trace_context" . JL.key "sampling_state" . JL._String + Tracing.samplingStateFromHeader + $ e + ^? JL.key "trace_context" . JL.key "sampling_state" . JL._String pure $ Tracing.TraceContext traceId freshSpanId parentSpanId samplingState processEvent :: @@ -474,8 +485,8 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac eventProcessTime <- liftIO getCurrentTime let eventQueueTime = realToFrac $ diffUTCTime eventProcessTime eventFetchedTime _ <- liftIO $ EKG.Distribution.add (smEventQueueTime serverMetrics) eventQueueTime - liftIO $ - observeHistogramWithLabel + liftIO + $ observeHistogramWithLabel getPrometheusMetricsGranularity True (eventQueueTimeSeconds eventTriggerMetrics) @@ -526,8 +537,9 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac eventTriggerProcessAction = do eventExecutionStartTime <- liftIO getCurrentTime eitherReqRes <- - runExceptT $ - mkRequest headers httpTimeout payload requestTransform (_envVarValue webhook) >>= \reqDetails -> do + runExceptT + $ mkRequest headers httpTimeout payload requestTransform (_envVarValue webhook) + >>= \reqDetails -> do let request = extractRequest reqDetails logger' res details = do logHTTPForET res extraLogCtx details (_envVarName webhook) logHeaders @@ -601,8 +613,8 @@ processEventQueue logger statsLogger httpMgr getSchemaCache getEventEngineCtx ac (EventStatusWithTriggerLabel eventSuccessLabel (Just (DynamicEventTriggerLabel (etiName eti) sourceName))) Left eventError -> do -- TODO (paritosh): We can also add a label to the metric to indicate the type of error - liftIO $ - incEventTriggerCounterWithLabel + liftIO + $ incEventTriggerCounterWithLabel getPrometheusMetricsGranularity True (eventInvocationTotal eventTriggerMetrics) @@ -711,8 +723,8 @@ retryOrSetError e retryConf eventTriggerMetrics err = do -- current_try = tries + 1 , allowed_total_tries = rcNumRetries retryConf + 1 if triesExhausted && noRetryHeader then do - liftIO $ - incEventTriggerCounterWithLabel + liftIO + $ incEventTriggerCounterWithLabel getPrometheusMetricsGranularity True (eventProcessedTotal eventTriggerMetrics) @@ -758,19 +770,20 @@ logQErr err = do L.unLogger logger $ EventInternalErr err getEventTriggerInfoFromEvent :: - forall b. Backend b => SchemaCache -> Event b -> Either Text (EventTriggerInfo b) + forall b. (Backend b) => SchemaCache -> Event b -> Either Text (EventTriggerInfo b) getEventTriggerInfoFromEvent sc e = do let table = eTable e mTableInfo = unsafeTableInfo @b (eSource e) table $ scSources sc tableInfo <- onNothing mTableInfo $ Left ("table '" <> table <<> "' not found") let triggerName = tmName $ eTrigger e mEventTriggerInfo = HashMap.lookup triggerName (_tiEventTriggerInfoMap tableInfo) - onNothing mEventTriggerInfo $ - Left + onNothing mEventTriggerInfo + $ Left ( "event trigger '" <> triggerNameToTxt triggerName <> "' on table '" - <> table <<> "' not found" + <> table + <<> "' not found" ) incEventTriggerCounterWithLabel :: diff --git a/server/src-lib/Hasura/Eventing/HTTP.hs b/server/src-lib/Hasura/Eventing/HTTP.hs index cb9d61bda4b..d088362964c 100644 --- a/server/src-lib/Hasura/Eventing/HTTP.hs +++ b/server/src-lib/Hasura/Eventing/HTTP.hs @@ -173,19 +173,19 @@ instance J.ToJSON (HTTPRespExtra a) where toJSON (HTTPRespExtra resp ctxt req webhookVarName logHeaders) = case resp of Left errResp -> - J.object $ - [ "response" J..= J.toJSON errResp, - "request" J..= sanitiseReqJSON req, - "event_id" J..= elEventId ctxt - ] - ++ eventName + J.object + $ [ "response" J..= J.toJSON errResp, + "request" J..= sanitiseReqJSON req, + "event_id" J..= elEventId ctxt + ] + ++ eventName Right okResp -> - J.object $ - [ "response" J..= J.toJSON okResp, - "request" J..= J.toJSON req, - "event_id" J..= elEventId ctxt - ] - ++ eventName + J.object + $ [ "response" J..= J.toJSON okResp, + "request" J..= J.toJSON req, + "event_id" J..= elEventId ctxt + ] + ++ eventName where eventName = case elEventName ctxt of Just name -> ["event_name" J..= name] @@ -194,8 +194,8 @@ instance J.ToJSON (HTTPRespExtra a) where HVValue txt -> J.String txt HVEnv txt -> J.String txt getRedactedHeaders = - J.Object $ - foldr (\(HeaderConf name val) -> KM.insert (J.fromText name) (getValue val)) mempty logHeaders + J.Object + $ foldr (\(HeaderConf name val) -> KM.insert (J.fromText name) (getValue val)) mempty logHeaders updateReqDetail v reqType = let webhookRedactedReq = J.toJSON v & key reqType . key "url" .~ J.String webhookVarName redactedReq = webhookRedactedReq & key reqType . key "headers" .~ getRedactedHeaders @@ -291,7 +291,7 @@ data TransformableRequestError a deriving (Show) mkRequest :: - MonadError (TransformableRequestError a) m => + (MonadError (TransformableRequestError a) m) => [HTTP.Header] -> HTTP.ResponseTimeout -> -- | the request body. It is passed as a 'BL.Bytestring' because we need to diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index bd401289c61..d1ed456ce72 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -206,13 +206,13 @@ runCronEventsGenerator logger cronTriggerStatsLogger getSC = do withCronTrigger cronTriggerCache cronTriggerStat = do case HashMap.lookup (_ctsName cronTriggerStat) cronTriggerCache of Nothing -> do - L.unLogger logger $ - ScheduledTriggerInternalErr $ - err500 Unexpected "could not find scheduled trigger in the schema cache" + L.unLogger logger + $ ScheduledTriggerInternalErr + $ err500 Unexpected "could not find scheduled trigger in the schema cache" pure Nothing Just cronTrigger -> - pure $ - Just (cronTrigger, cronTriggerStat) + pure + $ Just (cronTrigger, cronTriggerStat) insertCronEventsFor :: (MonadMetadataStorage m, MonadError QErr m) => @@ -227,7 +227,8 @@ insertCronEventsFor cronTriggersWithStats = do generateCronEventsFrom :: UTCTime -> CronTriggerInfo -> [CronEventSeed] generateCronEventsFrom startTime CronTriggerInfo {..} = - map (CronEventSeed ctiName) $ + map (CronEventSeed ctiName) + $ -- generate next 100 events; see getDeprivedCronTriggerStatsTx: generateScheduleTimes startTime 100 ctiSchedule @@ -259,9 +260,11 @@ processCronEvents logger httpMgr scheduledTriggerMetrics cronEvents cronTriggers forConcurrently_ cronEvents $ \(CronEvent id' name st _ tries _ _) -> do case HashMap.lookup name cronTriggersInfo of Nothing -> - logInternalError $ - err500 Unexpected $ - "could not find cron trigger " <> name <<> " in the schema cache" + logInternalError + $ err500 Unexpected + $ "could not find cron trigger " + <> name + <<> " in the schema cache" Just CronTriggerInfo {..} -> do let payload = ScheduledEventWebhookPayload @@ -276,16 +279,16 @@ processCronEvents logger httpMgr scheduledTriggerMetrics cronEvents cronTriggers retryCtx = RetryContext tries ctiRetryConf eventProcessingTimeout = min upperBoundScheduledEventTimeout (unrefine $ strcTimeoutSeconds $ ctiRetryConf) processScheduledEventAction = - runExceptT $ - flip runReaderT (logger, httpMgr) $ - processScheduledEvent - scheduledTriggerMetrics - id' - ctiHeaders - retryCtx - payload - ctiWebhookInfo - Cron + runExceptT + $ flip runReaderT (logger, httpMgr) + $ processScheduledEvent + scheduledTriggerMetrics + id' + ctiHeaders + retryCtx + payload + ctiWebhookInfo + Cron eventProcessedMaybe <- timeout (fromInteger (diffTimeToMicroSeconds eventProcessingTimeout)) $ processScheduledEventAction case eventProcessedMaybe of @@ -354,8 +357,8 @@ processOneOffScheduledEvents case webhookAndHeaderInfo of Right (webhookEnvRecord, eventHeaderInfo) -> do let processScheduledEventAction = - flip runReaderT (logger, httpMgr) $ - processScheduledEvent scheduledTriggerMetrics _ooseId eventHeaderInfo retryCtx payload webhookEnvRecord OneOff + flip runReaderT (logger, httpMgr) + $ processScheduledEvent scheduledTriggerMetrics _ooseId eventHeaderInfo retryCtx payload webhookEnvRecord OneOff eventTimeout = unrefine $ strcTimeoutSeconds $ _ooseRetryConf @@ -407,22 +410,22 @@ processScheduledTriggers :: LockedEventsCtx -> m (Forever m) processScheduledTriggers getEnvHook logger statsLogger httpMgr scheduledTriggerMetrics getSC LockedEventsCtx {..} = do - return $ - Forever () $ - const do - cronTriggersInfo <- scCronTriggers <$> liftIO getSC - env <- liftIO getEnvHook - getScheduledEventsForDelivery (HashMap.keys cronTriggersInfo) >>= \case - Left e -> logInternalError e - Right (cronEvents, oneOffEvents) -> do - logFetchedScheduledEventsStats statsLogger (CronEventsCount $ length cronEvents) (OneOffScheduledEventsCount $ length oneOffEvents) - processCronEvents logger httpMgr scheduledTriggerMetrics cronEvents cronTriggersInfo leCronEvents - processOneOffScheduledEvents env logger httpMgr scheduledTriggerMetrics oneOffEvents leOneOffEvents - -- NOTE: cron events are scheduled at times with minute resolution (as on - -- unix), while one-off events can be set for arbitrary times. The sleep - -- time here determines how overdue a scheduled event (cron or one-off) - -- might be before we begin processing: - liftIO $ sleep (seconds 10) + return + $ Forever () + $ const do + cronTriggersInfo <- scCronTriggers <$> liftIO getSC + env <- liftIO getEnvHook + getScheduledEventsForDelivery (HashMap.keys cronTriggersInfo) >>= \case + Left e -> logInternalError e + Right (cronEvents, oneOffEvents) -> do + logFetchedScheduledEventsStats statsLogger (CronEventsCount $ length cronEvents) (OneOffScheduledEventsCount $ length oneOffEvents) + processCronEvents logger httpMgr scheduledTriggerMetrics cronEvents cronTriggersInfo leCronEvents + processOneOffScheduledEvents env logger httpMgr scheduledTriggerMetrics oneOffEvents leOneOffEvents + -- NOTE: cron events are scheduled at times with minute resolution (as on + -- unix), while one-off events can be set for arbitrary times. The sleep + -- time here determines how overdue a scheduled event (cron or one-off) + -- might be before we begin processing: + liftIO $ sleep (seconds 10) where logInternalError err = liftIO . L.unLogger logger $ ScheduledTriggerInternalErr err @@ -462,8 +465,9 @@ processScheduledEvent scheduledTriggerMetrics eventId eventHeaders retryCtx payl responseTransform = mkResponseTransform <$> sewpResponseTransform payload eitherReqRes <- - runExceptT $ - mkRequest headers httpTimeout webhookReqBody requestTransform (_envVarValue webhookUrl) >>= \reqDetails -> do + runExceptT + $ mkRequest headers httpTimeout webhookReqBody requestTransform (_envVarValue webhookUrl) + >>= \reqDetails -> do let request = extractRequest reqDetails logger e d = do logHTTPForST e extraLogCtx d (_envVarName webhookUrl) decodedHeaders @@ -889,8 +893,8 @@ unlockAllLockedScheduledEventsTx = do insertCronEventsTx :: [CronEventSeed] -> PG.TxE QErr () insertCronEventsTx cronSeeds = do let insertCronEventsSql = - TB.run $ - toSQL + TB.run + $ toSQL S.SQLInsert { siTable = cronEventsTable, siCols = map unsafePGCol ["trigger_name", "scheduled_time"], @@ -905,7 +909,8 @@ insertCronEventsTx cronSeeds = do insertOneOffScheduledEventTx :: OneOffEvent -> PG.TxE QErr EventId insertOneOffScheduledEventTx CreateScheduledEvent {..} = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -952,18 +957,18 @@ mkScheduledEventStatusFilter :: [ScheduledEventStatus] -> S.BoolExp mkScheduledEventStatusFilter = \case [] -> S.BELit True v -> - S.BEIN (S.SEIdentifier $ Identifier "status") $ - map (S.SELit . scheduledEventStatusToText) v + S.BEIN (S.SEIdentifier $ Identifier "status") + $ map (S.SELit . scheduledEventStatusToText) v scheduledTimeOrderBy :: S.OrderByExp scheduledTimeOrderBy = let scheduledTimeCol = S.SEIdentifier $ Identifier "scheduled_time" - in S.OrderByExp $ - flip (NE.:|) [] $ - S.OrderByItem - scheduledTimeCol - (Just S.OTAsc) - Nothing + in S.OrderByExp + $ flip (NE.:|) [] + $ S.OrderByItem + scheduledTimeCol + (Just S.OTAsc) + Nothing -- | Build a select expression which outputs total count and -- list of json rows with pagination limit and offset applied @@ -1016,7 +1021,7 @@ withCount (count, PG.ViaJSON a) = WithOptionalTotalCount (Just count) a withoutCount :: PG.ViaJSON a -> WithOptionalTotalCount a withoutCount (PG.ViaJSON a) = WithOptionalTotalCount Nothing a -executeWithOptionalTotalCount :: J.FromJSON a => PG.Query -> RowsCountOption -> PG.TxE QErr (WithOptionalTotalCount a) +executeWithOptionalTotalCount :: (J.FromJSON a) => PG.Query -> RowsCountOption -> PG.TxE QErr (WithOptionalTotalCount a) executeWithOptionalTotalCount sql getRowsCount = case getRowsCount of IncludeRowsCount -> (withCount . PG.getRow) <$> PG.withQE defaultTxErrorHandler sql () False @@ -1150,8 +1155,8 @@ getScheduledEventsInvocationsQueryNoPagination (EventTables oneOffInvocationsTab let invocationTable = cronInvocationsTable eventTable = cronEventsTable' joinCondition = - S.JoinOn $ - S.BECompare + S.JoinOn + $ S.BECompare S.SEQ (S.SEQIdentifier $ S.mkQIdentifierTable eventTable $ Identifier "id") (S.SEQIdentifier $ S.mkQIdentifierTable invocationTable $ Identifier "event_id") diff --git a/server/src-lib/Hasura/Function/API.hs b/server/src-lib/Hasura/Function/API.hs index fe55759cc3f..73060008f90 100644 --- a/server/src-lib/Hasura/Function/API.hs +++ b/server/src-lib/Hasura/Function/API.hs @@ -61,16 +61,22 @@ trackFunctionP1 :: m () trackFunctionP1 sourceName qf = do rawSchemaCache <- askSchemaCache - unless (isJust $ AB.unpackAnyBackend @b =<< HashMap.lookup sourceName (scSources rawSchemaCache)) $ - throw400 NotExists $ - sourceName <<> " is not a known " <> reify (backendTag @b) <<> " source" - when (isJust $ unsafeFunctionInfo @b sourceName qf $ scSources rawSchemaCache) $ - throw400 AlreadyTracked $ - "function already tracked: " <>> qf + unless (isJust $ AB.unpackAnyBackend @b =<< HashMap.lookup sourceName (scSources rawSchemaCache)) + $ throw400 NotExists + $ sourceName + <<> " is not a known " + <> reify (backendTag @b) + <<> " source" + when (isJust $ unsafeFunctionInfo @b sourceName qf $ scSources rawSchemaCache) + $ throw400 AlreadyTracked + $ "function already tracked: " + <>> qf let qt = functionToTable @b qf - when (isJust $ unsafeTableInfo @b sourceName qt $ scSources rawSchemaCache) $ - throw400 NotSupported $ - "table with name " <> qf <<> " already exists" + when (isJust $ unsafeTableInfo @b sourceName qt $ scSources rawSchemaCache) + $ throw400 NotSupported + $ "table with name " + <> qf + <<> " already exists" trackFunctionP2 :: forall b m. @@ -84,8 +90,11 @@ trackFunctionP2 sourceName qf config comment = do buildSchemaCacheFor (MOSourceObjId sourceName $ AB.mkAnyBackend $ SMOFunction @b qf) $ MetadataModifier - $ metaSources . ix sourceName . toSourceMetadata . (smFunctions @b) - %~ InsOrdHashMap.insert qf (FunctionMetadata qf config mempty comment) + $ metaSources + . ix sourceName + . toSourceMetadata + . (smFunctions @b) + %~ InsOrdHashMap.insert qf (FunctionMetadata qf config mempty comment) pure successMsg getSingleUniqueFunctionOverload :: @@ -118,13 +127,19 @@ data TrackFunctionV2 (b :: BackendType) = TrackFunctionV2 _tfv2Comment :: Maybe Text } -instance Backend b => FromJSON (TrackFunctionV2 b) where +instance (Backend b) => FromJSON (TrackFunctionV2 b) where parseJSON = withObject "TrackFunctionV2" $ \o -> TrackFunctionV2 - <$> o .:? "source" .!= defaultSource - <*> o .: "function" - <*> o .:? "configuration" .!= emptyFunctionConfig - <*> o .:? "comment" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "function" + <*> o + .:? "configuration" + .!= emptyFunctionConfig + <*> o + .:? "comment" runTrackFunctionV2 :: forall b m. @@ -182,9 +197,9 @@ runUntrackFunc :: m EncJSON runUntrackFunc (UnTrackFunction functionName sourceName) = do void $ askFunctionInfo @b sourceName functionName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropFunctionInMetadata @b sourceName functionName + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropFunctionInMetadata @b sourceName functionName pure successMsg {- Note [Function Permissions] @@ -217,9 +232,13 @@ instance (Backend b) => FromJSON (FunctionPermissionArgument b) where parseJSON v = flip (withObject "FunctionPermissionArgument") v $ \o -> FunctionPermissionArgument - <$> o .: "function" - <*> o .:? "source" .!= defaultSource - <*> o .: "role" + <$> o + .: "function" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "role" runCreateFunctionPermission :: forall b m. @@ -234,33 +253,38 @@ runCreateFunctionPermission (FunctionPermissionArgument functionName source role metadata <- getMetadata sourceCache <- scSources <$> askSchemaCache functionInfo <- askFunctionInfo @b source functionName - when (doesFunctionPermissionExist @b metadata source functionName role) $ - throw400 AlreadyExists $ - "permission of role " - <> role <<> " already exists for function " - <> functionName <<> " in source: " <>> source + when (doesFunctionPermissionExist @b metadata source functionName role) + $ throw400 AlreadyExists + $ "permission of role " + <> role + <<> " already exists for function " + <> functionName + <<> " in source: " + <>> source (functionTableName, functionTableInfo) <- do let tn = _fiReturnType functionInfo case unsafeTableInfo @b source tn sourceCache of Nothing -> throw400 NotExists ("function's return table " <> tn <<> " not found in the cache") Just info -> pure (tn, info) - unless (role `HashMap.member` _tiRolePermInfoMap functionTableInfo) $ - throw400 NotSupported $ - "function permission can only be added when the function's return table " - <> functionTableName <<> " has select permission configured for role: " <>> role + unless (role `HashMap.member` _tiRolePermInfoMap functionTableInfo) + $ throw400 NotSupported + $ "function permission can only be added when the function's return table " + <> functionTableName + <<> " has select permission configured for role: " + <>> role buildSchemaCacheFor - ( MOSourceObjId source $ - AB.mkAnyBackend (SMOFunctionPermission @b functionName role) + ( MOSourceObjId source + $ AB.mkAnyBackend (SMOFunctionPermission @b functionName role) ) $ MetadataModifier $ metaSources - . ix - source - . toSourceMetadata - . (smFunctions @b) - . ix functionName - . fmPermissions - %~ (:) (FunctionPermissionInfo role) + . ix + source + . toSourceMetadata + . (smFunctions @b) + . ix functionName + . fmPermissions + %~ (:) (FunctionPermissionInfo role) pure successMsg dropFunctionPermissionInMetadata :: @@ -271,8 +295,14 @@ dropFunctionPermissionInMetadata :: RoleName -> MetadataModifier dropFunctionPermissionInMetadata source function role = - MetadataModifier $ - metaSources . ix source . toSourceMetadata . (smFunctions @b) . ix function . fmPermissions %~ filter ((/=) role . _fpmRole) + MetadataModifier + $ metaSources + . ix source + . toSourceMetadata + . (smFunctions @b) + . ix function + . fmPermissions + %~ filter ((/=) role . _fpmRole) doesFunctionPermissionExist :: forall b. (BackendMetadata b) => Metadata -> SourceName -> FunctionName b -> RoleName -> Bool doesFunctionPermissionExist metadata sourceName functionName roleName = @@ -289,15 +319,18 @@ runDropFunctionPermission :: m EncJSON runDropFunctionPermission (FunctionPermissionArgument functionName source role) = do metadata <- getMetadata - unless (doesFunctionPermissionExist @b metadata source functionName role) $ - throw400 NotExists $ - "permission of role " - <> role <<> " does not exist for function " - <> functionName <<> " in source: " <>> source + unless (doesFunctionPermissionExist @b metadata source functionName role) + $ throw400 NotExists + $ "permission of role " + <> role + <<> " does not exist for function " + <> functionName + <<> " in source: " + <>> source buildSchemaCacheFor - ( MOSourceObjId source $ - AB.mkAnyBackend $ - SMOFunctionPermission @b functionName role + ( MOSourceObjId source + $ AB.mkAnyBackend + $ SMOFunctionPermission @b functionName role ) $ dropFunctionPermissionInMetadata @b source functionName role pure successMsg @@ -311,16 +344,20 @@ data SetFunctionCustomization b = SetFunctionCustomization _sfcConfiguration :: FunctionConfig b } -deriving instance Backend b => Show (SetFunctionCustomization b) +deriving instance (Backend b) => Show (SetFunctionCustomization b) -deriving instance Backend b => Eq (SetFunctionCustomization b) +deriving instance (Backend b) => Eq (SetFunctionCustomization b) instance (Backend b) => FromJSON (SetFunctionCustomization b) where parseJSON = withObject "set function customization" $ \o -> SetFunctionCustomization - <$> o .:? "source" .!= defaultSource - <*> o .: "function" - <*> o .: "configuration" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "function" + <*> o + .: "configuration" -- | Changes the custom names of a function. Used in the API command 'pg_set_function_customization'. runSetFunctionCustomization :: @@ -333,5 +370,6 @@ runSetFunctionCustomization (SetFunctionCustomization source function config) = buildSchemaCacheFor (MOSourceObjId source $ AB.mkAnyBackend $ SMOFunction @b function) $ MetadataModifier - $ ((functionMetadataSetter @b source function) . fmConfiguration) .~ config + $ ((functionMetadataSetter @b source function) . fmConfiguration) + .~ config return successMsg diff --git a/server/src-lib/Hasura/Function/Cache.hs b/server/src-lib/Hasura/Function/Cache.hs index 48bc96fd7c7..db007b58c3a 100644 --- a/server/src-lib/Hasura/Function/Cache.hs +++ b/server/src-lib/Hasura/Function/Cache.hs @@ -84,7 +84,7 @@ data InputArgument a | IASessionVariables FunctionArgName deriving (Show, Eq, Functor, Generic) -instance ToJSON a => ToJSON (InputArgument a) where +instance (ToJSON a) => ToJSON (InputArgument a) where toJSON = genericToJSON defaultOptions {constructorTagModifier = snakeCase . drop 2, sumEncoding = TaggedObject "type" "argument"} toEncoding = genericToEncoding defaultOptions {constructorTagModifier = snakeCase . drop 2, sumEncoding = TaggedObject "type" "argument"} @@ -114,8 +114,10 @@ newtype FunctionPermissionInfo = FunctionPermissionInfo instance HasCodec FunctionPermissionInfo where codec = - AC.object "FunctionPermissionInfo" $ - FunctionPermissionInfo <$> AC.requiredField' "role" AC..= _fpmRole + AC.object "FunctionPermissionInfo" + $ FunctionPermissionInfo + <$> AC.requiredField' "role" + AC..= _fpmRole instance FromJSON FunctionPermissionInfo where parseJSON = genericParseJSON hasuraJSON @@ -140,17 +142,22 @@ instance NFData FunctionCustomRootFields instance HasCodec FunctionCustomRootFields where codec = - AC.bimapCodec checkForDup id $ - AC.object "FunctionCustomRootFields" $ - FunctionCustomRootFields - <$> AC.optionalFieldWith' "function" graphQLFieldNameCodec AC..= _fcrfFunction - <*> AC.optionalFieldWith' "function_aggregate" graphQLFieldNameCodec AC..= _fcrfFunctionAggregate + AC.bimapCodec checkForDup id + $ AC.object "FunctionCustomRootFields" + $ FunctionCustomRootFields + <$> AC.optionalFieldWith' "function" graphQLFieldNameCodec + AC..= _fcrfFunction + <*> AC.optionalFieldWith' "function_aggregate" graphQLFieldNameCodec + AC..= _fcrfFunctionAggregate where checkForDup (FunctionCustomRootFields (Just f) (Just fa)) | f == fa = - Left $ - T.unpack $ - "the following custom root field names are duplicated: " <> toTxt f <<> " and " <>> toTxt fa + Left + $ T.unpack + $ "the following custom root field names are duplicated: " + <> toTxt f + <<> " and " + <>> toTxt fa checkForDup fields = Right fields instance ToJSON FunctionCustomRootFields where @@ -165,10 +172,12 @@ instance FromJSON FunctionCustomRootFields where case (function, functionAggregate) of (Just f, Just fa) | f == fa -> - fail $ - T.unpack $ - "the following custom root field names are duplicated: " - <> toTxt f <<> " and " <>> toTxt fa + fail + $ T.unpack + $ "the following custom root field names are duplicated: " + <> toTxt f + <<> " and " + <>> toTxt fa _ -> pure () @@ -209,9 +218,9 @@ data FunctionInfo (b :: BackendType) = FunctionInfo } deriving (Generic) -deriving instance Backend b => Show (FunctionInfo b) +deriving instance (Backend b) => Show (FunctionInfo b) -deriving instance Backend b => Eq (FunctionInfo b) +deriving instance (Backend b) => Eq (FunctionInfo b) instance (Backend b) => ToJSON (FunctionInfo b) where toJSON = genericToJSON hasuraJSON @@ -224,9 +233,9 @@ data TrackableFunctionInfo b = TrackableFunctionInfo } deriving (Generic) -deriving instance Backend b => Show (TrackableFunctionInfo b) +deriving instance (Backend b) => Show (TrackableFunctionInfo b) -deriving instance Backend b => Eq (TrackableFunctionInfo b) +deriving instance (Backend b) => Eq (TrackableFunctionInfo b) instance (Backend b) => ToJSON (TrackableFunctionInfo b) where toJSON (TrackableFunctionInfo name volitility) = @@ -239,9 +248,9 @@ newtype TrackableTableInfo b = TrackableTableInfo {tfTableiName :: TableName b} deriving (Generic) -deriving instance Backend b => Show (TrackableTableInfo b) +deriving instance (Backend b) => Show (TrackableTableInfo b) -deriving instance Backend b => Eq (TrackableTableInfo b) +deriving instance (Backend b) => Eq (TrackableTableInfo b) instance (Backend b) => ToJSON (TrackableTableInfo b) where toJSON (TrackableTableInfo ti) = object ["name" Data.Aeson..= ti] @@ -252,9 +261,9 @@ data TrackableInfo b = TrackableInfo } deriving (Generic) -deriving instance Backend b => Show (TrackableInfo b) +deriving instance (Backend b) => Show (TrackableInfo b) -deriving instance Backend b => Eq (TrackableInfo b) +deriving instance (Backend b) => Eq (TrackableInfo b) instance (Backend b) => ToJSON (TrackableInfo b) where toJSON (TrackableInfo functions tables) = @@ -281,32 +290,43 @@ data FunctionConfig b = FunctionConfig } deriving (Generic) -deriving stock instance Backend b => Show (FunctionConfig b) +deriving stock instance (Backend b) => Show (FunctionConfig b) -deriving stock instance Backend b => Eq (FunctionConfig b) +deriving stock instance (Backend b) => Eq (FunctionConfig b) -instance Backend b => NFData (FunctionConfig b) +instance (Backend b) => NFData (FunctionConfig b) -instance Backend b => HasCodec (FunctionConfig b) where +instance (Backend b) => HasCodec (FunctionConfig b) where codec = - AC.object "FunctionConfig" $ - FunctionConfig - <$> AC.optionalField' "session_argument" AC..= _fcSessionArgument - <*> AC.optionalField' "exposed_as" AC..= _fcExposedAs - <*> AC.optionalFieldWithDefault' "custom_root_fields" emptyFunctionCustomRootFields AC..= _fcCustomRootFields - <*> AC.optionalFieldWith' "custom_name" graphQLFieldNameCodec AC..= _fcCustomName - <*> AC.optionalFieldWith' "response" codec AC..= _fcResponse + AC.object "FunctionConfig" + $ FunctionConfig + <$> AC.optionalField' "session_argument" + AC..= _fcSessionArgument + <*> AC.optionalField' "exposed_as" + AC..= _fcExposedAs + <*> AC.optionalFieldWithDefault' "custom_root_fields" emptyFunctionCustomRootFields + AC..= _fcCustomRootFields + <*> AC.optionalFieldWith' "custom_name" graphQLFieldNameCodec + AC..= _fcCustomName + <*> AC.optionalFieldWith' "response" codec + AC..= _fcResponse -instance Backend b => FromJSON (FunctionConfig b) where +instance (Backend b) => FromJSON (FunctionConfig b) where parseJSON = withObject "FunctionConfig" $ \obj -> FunctionConfig - <$> obj .:? "session_argument" - <*> obj .:? "exposed_as" - <*> obj .:? "custom_root_fields" .!= emptyFunctionCustomRootFields - <*> obj .:? "custom_name" - <*> obj .:? "response" + <$> obj + .:? "session_argument" + <*> obj + .:? "exposed_as" + <*> obj + .:? "custom_root_fields" + .!= emptyFunctionCustomRootFields + <*> obj + .:? "custom_name" + <*> obj + .:? "response" -instance Backend b => ToJSON (FunctionConfig b) where +instance (Backend b) => ToJSON (FunctionConfig b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} toEncoding = genericToEncoding hasuraJSON {omitNothingFields = True} @@ -318,11 +338,11 @@ type DBFunctionsMetadata b = HashMap (FunctionName b) (FunctionOverloads b) newtype FunctionOverloads b = FunctionOverloads {getFunctionOverloads :: NonEmpty (RawFunctionInfo b)} -deriving newtype instance Backend b => Eq (FunctionOverloads b) +deriving newtype instance (Backend b) => Eq (FunctionOverloads b) -deriving newtype instance Backend b => Show (FunctionOverloads b) +deriving newtype instance (Backend b) => Show (FunctionOverloads b) -deriving newtype instance FromJSON (RawFunctionInfo b) => FromJSON (FunctionOverloads b) +deriving newtype instance (FromJSON (RawFunctionInfo b)) => FromJSON (FunctionOverloads b) data FunctionArgsExpG a = FunctionArgsExp { _faePositional :: [a], diff --git a/server/src-lib/Hasura/Function/Metadata.hs b/server/src-lib/Hasura/Function/Metadata.hs index 8b3da314ead..35021390ac5 100644 --- a/server/src-lib/Hasura/Function/Metadata.hs +++ b/server/src-lib/Hasura/Function/Metadata.hs @@ -42,15 +42,15 @@ instance (Backend b) => FromJSON (FunctionMetadata b) where parseJSON = withObject "FunctionMetadata" $ \o -> FunctionMetadata <$> o - .: "function" + .: "function" <*> o - .:? "configuration" - .!= emptyFunctionConfig + .:? "configuration" + .!= emptyFunctionConfig <*> o - .:? "permissions" - .!= [] + .:? "permissions" + .!= [] <*> o - .:? "comment" + .:? "comment" instance (Backend b) => HasCodec (FunctionMetadata b) where codec = @@ -63,14 +63,14 @@ instance (Backend b) => HasCodec (FunctionMetadata b) where ) $ AC.object (backendPrefix @b <> "FunctionMetadata") $ FunctionMetadata - <$> requiredField "function" nameDoc - AC..= _fmFunction + <$> requiredField "function" nameDoc + AC..= _fmFunction <*> optionalFieldWithOmittedDefault "configuration" emptyFunctionConfig configDoc - AC..= _fmConfiguration + AC..= _fmConfiguration <*> optionalFieldWithOmittedDefault' "permissions" [] - AC..= _fmPermissions + AC..= _fmPermissions <*> optionalField' "comment" - AC..= _fmComment + AC..= _fmComment where nameDoc = "Name of the SQL function" configDoc = "Configuration for the SQL function" diff --git a/server/src-lib/Hasura/GC.hs b/server/src-lib/Hasura/GC.hs index 64dc8ecf6cd..5140f1faa5f 100644 --- a/server/src-lib/Hasura/GC.hs +++ b/server/src-lib/Hasura/GC.hs @@ -60,19 +60,19 @@ ourIdleGC (Logger logger) idleInterval minGCInterval maxNoGCInterval = -- a major GC was run since last iteration (cool!), reset timer: if - | major_gcs > major_gcs_prev -> do - startTimer >>= go gcs major_gcs + | major_gcs > major_gcs_prev -> do + startTimer >>= go gcs major_gcs - -- we are idle and its a good time to do a GC, or we're overdue and must run a GC: - | areIdle || areOverdue -> do - when (areOverdue && not areIdle) $ - logger $ - UnstructuredLog LevelWarn $ - "Overdue for a major GC: forcing one even though we don't appear to be idle" - performMajorGC - startTimer >>= go (gcs + 1) (major_gcs + 1) + -- we are idle and its a good time to do a GC, or we're overdue and must run a GC: + | areIdle || areOverdue -> do + when (areOverdue && not areIdle) + $ logger + $ UnstructuredLog LevelWarn + $ "Overdue for a major GC: forcing one even though we don't appear to be idle" + performMajorGC + startTimer >>= go (gcs + 1) (major_gcs + 1) - -- else keep the timer running, waiting for us to go idle: - | otherwise -> do - C.sleep idleInterval - go gcs major_gcs timerSinceLastMajorGC + -- else keep the timer running, waiting for us to go idle: + | otherwise -> do + C.sleep idleInterval + go gcs major_gcs timerSinceLastMajorGC diff --git a/server/src-lib/Hasura/GraphQL/Analyse.hs b/server/src-lib/Hasura/GraphQL/Analyse.hs index d8a57f2090f..ef8f0e8394e 100644 --- a/server/src-lib/Hasura/GraphQL/Analyse.hs +++ b/server/src-lib/Hasura/GraphQL/Analyse.hs @@ -145,8 +145,8 @@ analyzeGraphQLQuery schema G.TypedOperationDefinition {..} = runAnalysis schema throwDiagnosis RootTypeNotAnObject -- analyze the variables variables <- analyzeVariables _todVariableDefinitions - pure $ - Structure + pure + $ Structure (fromMaybe mempty selection) variables @@ -169,8 +169,9 @@ analyzeObjectSelectionSet (G.ObjectTypeDefinition {..}) selectionSet = do G.SelectionInlineFragment inlineFrag -> mconcat <$> traverse analyzeSelection (G._ifSelectionSet inlineFrag) G.SelectionField field@G.Field {..} -> - fmap join $ - withField _fName $ withCatchAndRecord do + fmap join + $ withField _fName + $ withCatchAndRecord do -- attempt to find that field in the object's definition G.FieldDefinition {..} <- findDefinition _fName @@ -207,21 +208,21 @@ analyzeObjectSelectionSet (G.ObjectTypeDefinition {..}) selectionSet = do mergeFields name field1 field2 = case (field1, field2) of -- both are scalars: we check that they're the same (FieldScalarInfo t1 s1, FieldScalarInfo t2 _) -> do - when (t1 /= t2) $ - throwDiagnosis $ - MismatchedFields name t1 t2 + when (t1 /= t2) + $ throwDiagnosis + $ MismatchedFields name t1 t2 pure $ FieldScalarInfo t1 s1 -- both are enums: we check that they're the same (FieldEnumInfo t1 e1, FieldEnumInfo t2 _) -> do - when (t1 /= t2) $ - throwDiagnosis $ - MismatchedFields name t1 t2 + when (t1 /= t2) + $ throwDiagnosis + $ MismatchedFields name t1 t2 pure $ FieldEnumInfo t1 e1 -- both are objects, we merge their selection sets (FieldObjectInfo t1 o1, FieldObjectInfo t2 o2) -> do - when (t1 /= t2) $ - throwDiagnosis $ - MismatchedFields name t1 t2 + when (t1 /= t2) + $ throwDiagnosis + $ MismatchedFields name t1 t2 mergedSelection <- HashMap.unionWithM mergeFields @@ -250,17 +251,17 @@ analyzeField gType typeDefinition G.Field {..} = case typeDefinition of throwDiagnosis $ InputObjectInOutput $ G._iotdName iotd G.TypeDefinitionScalar std -> do -- scalars do not admit a selection set - unless (null _fSelectionSet) $ - throwDiagnosis $ - ScalarSelectionSet $ - G._stdName std + unless (null _fSelectionSet) + $ throwDiagnosis + $ ScalarSelectionSet + $ G._stdName std pure $ Just $ FieldScalarInfo gType $ ScalarInfo std G.TypeDefinitionEnum etd -> do -- enums do not admit a selection set - unless (null _fSelectionSet) $ - throwDiagnosis $ - EnumSelectionSet $ - G._etdName etd + unless (null _fSelectionSet) + $ throwDiagnosis + $ EnumSelectionSet + $ G._etdName etd pure $ Just $ FieldEnumInfo gType $ EnumInfo etd G.TypeDefinitionUnion _utd -> -- TODO: implement unions @@ -270,18 +271,18 @@ analyzeField gType typeDefinition G.Field {..} = case typeDefinition of pure Nothing G.TypeDefinitionObject otd -> do -- TODO: check field arguments? - when (null _fSelectionSet) $ - throwDiagnosis $ - ObjectMissingSelectionSet $ - G._otdName otd + when (null _fSelectionSet) + $ throwDiagnosis + $ ObjectMissingSelectionSet + $ G._otdName otd subselection <- analyzeObjectSelectionSet otd _fSelectionSet - pure $ - Just $ - FieldObjectInfo gType $ - ObjectInfo - { _oiTypeDefinition = otd, - _oiSelection = subselection - } + pure + $ Just + $ FieldObjectInfo gType + $ ObjectInfo + { _oiTypeDefinition = otd, + _oiSelection = subselection + } -------------------------------------------------------------------------------- -- Variables analysis @@ -364,10 +365,10 @@ newtype Analysis a runAnalysis :: G.SchemaIntrospection -> Analysis a -> (Maybe a, [Text]) runAnalysis schema (Analysis a) = - postProcess $ - runWriter $ - flip runReaderT (pure "$", schema) $ - runExceptT a + postProcess + $ runWriter + $ flip runReaderT (pure "$", schema) + $ runExceptT a where -- if there was an uncaught error, add it to the list postProcess = \case diff --git a/server/src-lib/Hasura/GraphQL/ApolloFederation.hs b/server/src-lib/Hasura/GraphQL/ApolloFederation.hs index 174010ab7b6..bcadfc4b833 100644 --- a/server/src-lib/Hasura/GraphQL/ApolloFederation.hs +++ b/server/src-lib/Hasura/GraphQL/ApolloFederation.hs @@ -62,8 +62,8 @@ anyParser = J.Object obj -> case KMap.lookup typenameKey obj of Just (J.String txt) -> case G.mkName txt of Just tName -> - pure $ - ApolloFederationAnyType + pure + $ ApolloFederationAnyType { afTypename = tName, afPKValues = KMap.delete typenameKey obj } @@ -110,30 +110,34 @@ modifyApolloFedParserFunc cvValue <- case KMap.lookup (K.fromText colName) afPKValues of Nothing -> P.parseError . toErrorMessage $ "cannot find " <> colName <> " in _Any type" Just va -> liftQErr $ parseScalarValueColumnType (ciType columnInfo) va - pure $ - IR.BoolField . IR.AVColumn columnInfo . pure . IR.AEQ True . IR.mkParameter $ - ValueNoOrigin $ - ColumnValue {..} + pure + $ IR.BoolField + . IR.AVColumn columnInfo + . pure + . IR.AEQ True + . IR.mkParameter + $ ValueNoOrigin + $ ColumnValue {..} let whereExpr = Just $ IR.BoolAnd $ toList allConstraints sourceName = _siName sourceConfig = _siConfiguration tableName = _tciName _tiCoreInfo queryDBRoot = - IR.QDBR $ - IR.QDBSingleRow $ - IR.AnnSelectG - { IR._asnFields = annField, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = selectPermissions, - IR._asnArgs = IR.noSelectArgs {IR._saWhere = whereExpr}, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = tCase - } - pure $ - IR.RFDB sourceName $ - AB.mkAnyBackend $ - IR.SourceConfigWith sourceConfig Nothing $ - queryDBRoot + IR.QDBR + $ IR.QDBSingleRow + $ IR.AnnSelectG + { IR._asnFields = annField, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = selectPermissions, + IR._asnArgs = IR.noSelectArgs {IR._saWhere = whereExpr}, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = tCase + } + pure + $ IR.RFDB sourceName + $ AB.mkAnyBackend + $ IR.SourceConfigWith sourceConfig Nothing + $ queryDBRoot where liftQErr = either (P.parseError . toErrorMessage . qeError) pure . runExcept @@ -176,11 +180,11 @@ apolloRootFields apolloFederationStatus apolloFedTableParsers = -- `serviceField` is essential to connect hasura to gateway, `entityField` -- is essential only if we have types that has @key directive if - | isApolloFederationEnabled apolloFederationStatus && not (null apolloFedTableParsers) -> - [serviceField, entityField] - | isApolloFederationEnabled apolloFederationStatus -> - [serviceField] - | otherwise -> [] + | isApolloFederationEnabled apolloFederationStatus && not (null apolloFedTableParsers) -> + [serviceField, entityField] + | isApolloFederationEnabled apolloFederationStatus -> + [serviceField] + | otherwise -> [] -- helpers @@ -228,16 +232,17 @@ generateSDLFromIntrospection genSdlType (G.SchemaIntrospection sIntro) = sdl getSchemaDocument :: G.SchemaDocument getSchemaDocument = - G.SchemaDocument $ - G.TypeSystemDefinitionSchema (G.SchemaDefinition Nothing rootOpTypeDefns) : typeDefns + G.SchemaDocument + $ G.TypeSystemDefinitionSchema (G.SchemaDefinition Nothing rootOpTypeDefns) + : typeDefns -- | Filter out schema components from sdl which are not required by apollo federation filterTypeDefinition :: G.TypeDefinition possibleTypes G.InputValueDefinition -> G.TypeDefinition possibleTypes G.InputValueDefinition filterTypeDefinition = \case G.TypeDefinitionObject (G.ObjectTypeDefinition a b c d e) -> -- We are skipping the schema types here - G.TypeDefinitionObject $ - G.ObjectTypeDefinition a b c d (filter (not . T.isPrefixOf "__" . G.unName . G._fldName) e) + G.TypeDefinitionObject + $ G.ObjectTypeDefinition a b c d (filter (not . T.isPrefixOf "__" . G.unName . G._fldName) e) typeDef -> typeDef generateSDLWithAllTypes :: G.SchemaIntrospection -> Text diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs index 7f09c75192d..518f2654706 100644 --- a/server/src-lib/Hasura/GraphQL/Execute.hs +++ b/server/src-lib/Hasura/GraphQL/Execute.hs @@ -131,61 +131,64 @@ buildSubscriptionPlan userInfo rootFields parameterizedQueryHash reqHeaders oper ((liveQueryOnSourceFields, noRelationActionFields), streamingFields) <- foldlM go ((mempty, mempty), mempty) (InsOrdHashMap.toList rootFields) if - | null liveQueryOnSourceFields && null streamingFields -> - pure $ SEAsyncActionsWithNoRelationships noRelationActionFields - | null noRelationActionFields -> do - if - | null liveQueryOnSourceFields -> do - case InsOrdHashMap.toList streamingFields of - [] -> throw500 "empty selset for subscription" - [(rootFieldName, (sourceName, exists))] -> do - subscriptionPlan <- AB.dispatchAnyBackend @EB.BackendExecute - exists - \(IR.SourceConfigWith sourceConfig queryTagsConfig (IR.QDBR qdb) :: IR.SourceConfigWith db b) -> do - let subscriptionQueryTagsAttributes = encodeQueryTags $ QTLiveQuery $ LivequeryMetadata rootFieldName parameterizedQueryHash - queryTagsComment = Tagged.untag $ createQueryTags @m subscriptionQueryTagsAttributes queryTagsConfig - SubscriptionQueryPlan . AB.mkAnyBackend . MultiplexedSubscriptionQueryPlan - <$> runReaderT - ( EB.mkDBStreamingSubscriptionPlan - userInfo - sourceName - sourceConfig - (rootFieldName, qdb) - reqHeaders - operationName - ) - queryTagsComment - pure $ - SEOnSourceDB $ - SSStreaming rootFieldName $ - (sourceName, subscriptionPlan) - _ -> throw400 NotSupported "exactly one root field is allowed for streaming subscriptions" - | null streamingFields -> do - let allActionIds = HS.fromList $ map fst $ lefts $ toList liveQueryOnSourceFields - pure $ - SEOnSourceDB $ - SSLivequery allActionIds $ \actionLogMap -> do - sourceSubFields <- for liveQueryOnSourceFields $ \case - Right x -> pure x - Left (actionId, (srcConfig, dbExecution)) -> do - let sourceName = EA._aaqseSource dbExecution - actionLogResponse <- - HashMap.lookup actionId actionLogMap - `onNothing` throw500 "unexpected: cannot lookup action_id in the map" - let selectAST = EA._aaqseSelectBuilder dbExecution $ actionLogResponse - queryDB = case EA._aaqseJsonAggSelect dbExecution of - JASMultipleRows -> IR.QDBMultipleRows selectAST - JASSingleObject -> IR.QDBSingleRow selectAST - pure $ (sourceName, AB.mkAnyBackend $ IR.SourceConfigWith srcConfig Nothing (IR.QDBR queryDB)) + | null liveQueryOnSourceFields && null streamingFields -> + pure $ SEAsyncActionsWithNoRelationships noRelationActionFields + | null noRelationActionFields -> do + if + | null liveQueryOnSourceFields -> do + case InsOrdHashMap.toList streamingFields of + [] -> throw500 "empty selset for subscription" + [(rootFieldName, (sourceName, exists))] -> do + subscriptionPlan <- AB.dispatchAnyBackend @EB.BackendExecute + exists + \(IR.SourceConfigWith sourceConfig queryTagsConfig (IR.QDBR qdb) :: IR.SourceConfigWith db b) -> do + let subscriptionQueryTagsAttributes = encodeQueryTags $ QTLiveQuery $ LivequeryMetadata rootFieldName parameterizedQueryHash + queryTagsComment = Tagged.untag $ createQueryTags @m subscriptionQueryTagsAttributes queryTagsConfig + SubscriptionQueryPlan + . AB.mkAnyBackend + . MultiplexedSubscriptionQueryPlan + <$> runReaderT + ( EB.mkDBStreamingSubscriptionPlan + userInfo + sourceName + sourceConfig + (rootFieldName, qdb) + reqHeaders + operationName + ) + queryTagsComment + pure + $ SEOnSourceDB + $ SSStreaming rootFieldName + $ (sourceName, subscriptionPlan) + _ -> throw400 NotSupported "exactly one root field is allowed for streaming subscriptions" + | null streamingFields -> do + let allActionIds = HS.fromList $ map fst $ lefts $ toList liveQueryOnSourceFields + pure + $ SEOnSourceDB + $ SSLivequery allActionIds + $ \actionLogMap -> do + sourceSubFields <- for liveQueryOnSourceFields $ \case + Right x -> pure x + Left (actionId, (srcConfig, dbExecution)) -> do + let sourceName = EA._aaqseSource dbExecution + actionLogResponse <- + HashMap.lookup actionId actionLogMap + `onNothing` throw500 "unexpected: cannot lookup action_id in the map" + let selectAST = EA._aaqseSelectBuilder dbExecution $ actionLogResponse + queryDB = case EA._aaqseJsonAggSelect dbExecution of + JASMultipleRows -> IR.QDBMultipleRows selectAST + JASSingleObject -> IR.QDBSingleRow selectAST + pure $ (sourceName, AB.mkAnyBackend $ IR.SourceConfigWith srcConfig Nothing (IR.QDBR queryDB)) - case InsOrdHashMap.toList sourceSubFields of - [] -> throw500 "empty selset for subscription" - ((rootFieldName, sub) : _) -> buildAction sub sourceSubFields rootFieldName - | otherwise -> throw400 NotSupported "streaming and livequery subscriptions cannot be executed in the same subscription" - | otherwise -> - throw400 - NotSupported - "async action queries with no relationships aren't expected to mix with normal source database queries" + case InsOrdHashMap.toList sourceSubFields of + [] -> throw500 "empty selset for subscription" + ((rootFieldName, sub) : _) -> buildAction sub sourceSubFields rootFieldName + | otherwise -> throw400 NotSupported "streaming and livequery subscriptions cannot be executed in the same subscription" + | otherwise -> + throw400 + NotSupported + "async action queries with no relationships aren't expected to mix with normal source database queries" where go :: ( ( RootFieldMap @@ -223,16 +226,16 @@ buildSubscriptionPlan userInfo rootFields parameterizedQueryHash reqHeaders oper _ -> LiveQuery newQDB <- AB.traverseBackend @EB.BackendExecute e \(IR.SourceConfigWith srcConfig queryTagsConfig (IR.QDBR qdb)) -> do let (newQDB, remoteJoins) = RJ.getRemoteJoinsQueryDB qdb - unless (isNothing remoteJoins) $ - throw400 NotSupported "Remote relationships are not allowed in subscriptions" + unless (isNothing remoteJoins) + $ throw400 NotSupported "Remote relationships are not allowed in subscriptions" pure $ IR.SourceConfigWith srcConfig queryTagsConfig (IR.QDBR newQDB) case subscriptionType of Streaming -> pure (accLiveQueryFields, InsOrdHashMap.insert gName (src, newQDB) accStreamingFields) LiveQuery -> pure (first (InsOrdHashMap.insert gName (Right (src, newQDB))) accLiveQueryFields, accStreamingFields) IR.RFAction action -> do let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action - unless (isNothing remoteJoins) $ - throw400 NotSupported "Remote relationships are not allowed in subscriptions" + unless (isNothing remoteJoins) + $ throw400 NotSupported "Remote relationships are not allowed in subscriptions" case noRelsDBAST of IR.AQAsync q -> do let actionId = IR._aaaqActionId q @@ -256,7 +259,9 @@ buildSubscriptionPlan userInfo rootFields parameterizedQueryHash reqHeaders oper qdbs <- traverse (checkField @b sourceName) allFields let subscriptionQueryTagsAttributes = encodeQueryTags $ QTLiveQuery $ LivequeryMetadata rootFieldName parameterizedQueryHash let queryTagsComment = Tagged.untag $ createQueryTags @m subscriptionQueryTagsAttributes queryTagsConfig - SubscriptionQueryPlan . AB.mkAnyBackend . MultiplexedSubscriptionQueryPlan + SubscriptionQueryPlan + . AB.mkAnyBackend + . MultiplexedSubscriptionQueryPlan <$> runReaderT (EB.mkLiveQuerySubscriptionPlan userInfo sourceName sourceConfig (_rfaNamespace rootFieldName) qdbs reqHeaders operationName) queryTagsComment pure (sourceName, subscriptionPlan) @@ -287,9 +292,9 @@ checkQueryInAllowlist allowListStatus allowlistMode userInfo req schemaCache = let query = G.ExecutableDocument . unGQLExecDoc $ _grQuery req allowlist = scAllowlist schemaCache allowed = allowlistAllowsQuery allowlist allowlistMode role query - unless allowed $ - modifyQErr modErr $ - throw400 ValidationFailed "query is not allowed" + unless allowed + $ modifyQErr modErr + $ throw400 ValidationFailed "query is not allowed" where role = _uiRole userInfo modErr e = @@ -366,8 +371,8 @@ getResolvedExecPlan Tracing.attachMetadata [("parameterized_query_hash", bsToTxt $ unParamQueryHash parameterizedQueryHash)] pure (parameterizedQueryHash, QueryExecutionPlan executionPlan queryRootFields dirMap) G.TypedOperationDefinition G.OperationTypeMutation _ varDefs directives inlinedSelSet -> do - when (readOnlyMode == ReadOnlyModeEnabled) $ - throw400 NotSupported "Mutations are not allowed when read-only mode is enabled" + when (readOnlyMode == ReadOnlyModeEnabled) + $ throw400 NotSupported "Mutations are not allowed when read-only mode is enabled" (executionPlan, parameterizedQueryHash) <- EM.convertMutationSelectionSet env @@ -408,8 +413,8 @@ getResolvedExecPlan [] -> throw500 "empty selset for subscription" [_] -> pure () _ -> - unless (allowMultipleRootFields && isSingleNamespace unpreparedAST) $ - throw400 ValidationFailed "subscriptions must select one top level field" + unless (allowMultipleRootFields && isSingleNamespace unpreparedAST) + $ throw400 ValidationFailed "subscriptions must select one top level field" subscriptionPlan <- buildSubscriptionPlan userInfo unpreparedAST parameterizedQueryHash reqHeaders maybeOperationName pure (parameterizedQueryHash, SubscriptionExecutionPlan subscriptionPlan) -- the parameterized query hash is calculated here because it is used in multiple diff --git a/server/src-lib/Hasura/GraphQL/Execute/Action.hs b/server/src-lib/Hasura/GraphQL/Execute/Action.hs index 0b0f7f993e4..400a9c81e9f 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Action.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Action.hs @@ -137,7 +137,8 @@ asSingleRowJsonResp :: [PG.PrepArg] -> PG.TxE QErr EncJSON asSingleRowJsonResp query args = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.rawQE dmlTxErrorHandler query args True -- | Synchronously execute webhook handler and resolve response to action "output" @@ -160,8 +161,8 @@ resolveActionExecution httpManager env logger prometheusMetrics IR.AnnActionExec m (ActionWebhookResponse, HTTP.ResponseHeaders) runWebhook = -- TODO: do we need to add the logger as a reader? can't we just give it as an argument? - flip runReaderT logger $ - callWebhook + flip runReaderT logger + $ callWebhook env httpManager prometheusMetrics @@ -180,20 +181,24 @@ throwUnexpected :: (MonadError QErr m) => Text -> m () throwUnexpected = throw400 Unexpected -- Webhook response object should conform to action output fields -validateResponseObject :: MonadError QErr m => KM.KeyMap J.Value -> IR.ActionOutputFields -> m () +validateResponseObject :: (MonadError QErr m) => KM.KeyMap J.Value -> IR.ActionOutputFields -> m () validateResponseObject obj outputField = do -- Note: Fields not specified in the output are ignored - void $ - flip HashMap.traverseWithKey outputField $ \fieldName fieldTy -> + void + $ flip HashMap.traverseWithKey outputField + $ \fieldName fieldTy -> -- When field is non-nullable, it has to present in the response with no null value unless (G.isNullable fieldTy) $ case KM.lookup (K.fromText $ G.unName fieldName) obj of Nothing -> - throwUnexpected $ - "field " <> fieldName <<> " expected in webhook response, but not found" + throwUnexpected + $ "field " + <> fieldName + <<> " expected in webhook response, but not found" Just v -> - when (v == J.Null) $ - throwUnexpected $ - "expecting not null value for field " <>> fieldName + when (v == J.Null) + $ throwUnexpected + $ "expecting not null value for field " + <>> fieldName -- Validates the webhook response against the output type validateResponse :: (MonadError QErr m) => J.Value -> GraphQLType -> IR.ActionOutputFields -> m () @@ -203,24 +208,28 @@ validateResponse webhookResponse' outputType outputF = (J.Null, _) -> do unless (isNullableType outputType) $ throwUnexpected "got null for the action webhook response" (J.Number _, (GraphQLType (G.TypeNamed _ name))) -> do - unless (name == GName._Int || name == GName._Float) $ - throwUnexpected $ - "got scalar String for the action webhook response, expecting " <> G.unName name + unless (name == GName._Int || name == GName._Float) + $ throwUnexpected + $ "got scalar String for the action webhook response, expecting " + <> G.unName name (J.Bool _, (GraphQLType (G.TypeNamed _ name))) -> do - unless (name == GName._Boolean) $ - throwUnexpected $ - "got scalar Boolean for the action webhook response, expecting " <> G.unName name + unless (name == GName._Boolean) + $ throwUnexpected + $ "got scalar Boolean for the action webhook response, expecting " + <> G.unName name (J.String _, (GraphQLType (G.TypeNamed _ name))) -> do - unless (name == GName._String || name == GName._ID) $ - throwUnexpected $ - "got scalar String for the action webhook response, expecting " <> G.unName name + unless (name == GName._String || name == GName._ID) + $ throwUnexpected + $ "got scalar String for the action webhook response, expecting " + <> G.unName name (J.Array _, (GraphQLType (G.TypeNamed _ name))) -> throwUnexpected $ "got array for the action webhook response, expecting " <> G.unName name (J.Array objs, (GraphQLType (G.TypeList _ outputType''))) -> do traverse_ (\o -> validateResponse o (GraphQLType outputType'') outputF) objs ((J.Object obj), (GraphQLType (G.TypeNamed _ name))) -> do - when (isInBuiltScalar (G.unName name)) $ - throwUnexpected $ - "got object for the action webhook response, expecting " <> G.unName name + when (isInBuiltScalar (G.unName name)) + $ throwUnexpected + $ "got object for the action webhook response, expecting " + <> G.unName name validateResponseObject obj outputF (_, (GraphQLType (G.TypeList _ _))) -> throwUnexpected $ "expecting array for the action webhook response" @@ -230,8 +239,9 @@ makeActionResponseNoRelations :: IR.ActionFields -> GraphQLType -> IR.ActionOutp makeActionResponseNoRelations annFields outputType outputF shouldCheckOutputField webhookResponse = let mkResponseObject :: IR.ActionFields -> KM.KeyMap J.Value -> AO.Value mkResponseObject fields obj = - AO.object $ - flip mapMaybe fields $ \(fieldName, annField) -> + AO.object + $ flip mapMaybe fields + $ \(fieldName, annField) -> let fieldText = getFieldNameTxt fieldName in (fieldText,) <$> case annField of IR.ACFExpression t -> Just $ AO.String t @@ -347,30 +357,31 @@ resolveAsyncActionQuery userInfo annAction = pure $ encJFromOrderedValue $ AO.object resolvedFields IR.ASISource sourceName sourceConfig -> let jsonAggSelect = mkJsonAggSelect outputType - in AAQEOnSourceDB sourceConfig $ - AsyncActionQuerySourceExecution sourceName jsonAggSelect $ \actionLogResponse -> + in AAQEOnSourceDB sourceConfig + $ AsyncActionQuerySourceExecution sourceName jsonAggSelect + $ \actionLogResponse -> let annotatedFields = asyncFields <&> second \case IR.AsyncTypename t -> RS.AFExpression t IR.AsyncOutput annFields -> - RS.AFComputedField () (ComputedFieldName [nonEmptyTextQQ|__action_computed_field|]) $ - RS.CFSTable jsonAggSelect $ - processOutputSelectionSet TF.AEActionResponsePayload outputType definitionList annFields stringifyNumerics + RS.AFComputedField () (ComputedFieldName [nonEmptyTextQQ|__action_computed_field|]) + $ RS.CFSTable jsonAggSelect + $ processOutputSelectionSet TF.AEActionResponsePayload outputType definitionList annFields stringifyNumerics IR.AsyncId -> mkAnnFldFromPGCol idColumn IR.AsyncCreatedAt -> mkAnnFldFromPGCol createdAtColumn IR.AsyncErrors -> mkAnnFldFromPGCol errorsColumn jsonbToRecordSet = QualifiedObject "pg_catalog" $ FunctionName "jsonb_to_recordset" actionLogInput = - IR.UVParameter IR.Unknown $ - ColumnValue (ColumnScalar PGJSONB) $ - PGValJSONB $ - PG.JSONB $ - J.toJSON [actionLogResponse] + IR.UVParameter IR.Unknown + $ ColumnValue (ColumnScalar PGJSONB) + $ PGValJSONB + $ PG.JSONB + $ J.toJSON [actionLogResponse] functionArgs = FunctionArgsExp [TF.AEInput actionLogInput] mempty tableFromExp = - RS.FromFunction jsonbToRecordSet functionArgs $ - Just + RS.FromFunction jsonbToRecordSet functionArgs + $ Just [idColumn, createdAtColumn, responsePayloadColumn, errorsColumn, sessionVarsColumn] tableArguments = RS.noSelectArgs @@ -414,12 +425,12 @@ resolveAsyncActionQuery userInfo annAction = ciMutability = ColumnMutability False False } sessionVarValue = - IR.UVParameter IR.Unknown $ - ColumnValue (ColumnScalar PGJSONB) $ - PGValJSONB $ - PG.JSONB $ - J.toJSON $ - _uiSession userInfo + IR.UVParameter IR.Unknown + $ ColumnValue (ColumnScalar PGJSONB) + $ PGValJSONB + $ PG.JSONB + $ J.toJSON + $ _uiSession userInfo sessionVarsColumnEq = BoolField $ AVColumn sessionVarsColumnInfo [AEQ True sessionVarValue] in -- For non-admin roles, accessing an async action's response should be allowed only for the user -- who initiated the action through mutation. The action's response is accessible for a query/subscription @@ -447,36 +458,37 @@ asyncActionsProcessor :: Maybe GH.GQLQueryText -> m (Forever m) asyncActionsProcessor getEnvHook logger getSCFromRef' getFetchInterval lockedActionEvents gqlQueryText = - return $ - Forever () $ - const $ do - fetchInterval <- liftIO getFetchInterval - case fetchInterval of - -- async actions processor thread is a polling thread, so we sleep - -- for a second in case the fetch interval is not provided and try to - -- get it in the next iteration. If the fetch interval is available, - -- we check for async actions to process. - Skip -> liftIO $ sleep $ seconds 1 - Interval sleepTime -> do - actionCache <- scActions <$> liftIO getSCFromRef' - let asyncActions = - HashMap.filter ((== ActionMutation ActionAsynchronous) . (^. aiDefinition . adType)) actionCache - unless (HashMap.null asyncActions) $ do - -- fetch undelivered action events only when there's at least - -- one async action present in the schema cache - asyncInvocationsE <- fetchUndeliveredActionEvents - asyncInvocations <- liftIO $ onLeft asyncInvocationsE mempty - -- save the actions that are currently fetched from the DB to - -- be processed in a TVar (Set LockedActionEventId) and when - -- the action is processed we remove it from the set. This set - -- is maintained because on shutdown of the graphql-engine, we - -- would like to wait for a certain time (see `--graceful-shutdown-time`) - -- during which to complete all the in-flight actions. So, when this - -- locked action events set TVar is empty, it will mean that there are - -- no events that are in the 'processing' state - saveLockedEvents (map (EventId . actionIdToText . _aliId) asyncInvocations) lockedActionEvents - LA.mapConcurrently_ (callHandler actionCache) asyncInvocations - liftIO $ sleep $ milliseconds (unrefine sleepTime) + return + $ Forever () + $ const + $ do + fetchInterval <- liftIO getFetchInterval + case fetchInterval of + -- async actions processor thread is a polling thread, so we sleep + -- for a second in case the fetch interval is not provided and try to + -- get it in the next iteration. If the fetch interval is available, + -- we check for async actions to process. + Skip -> liftIO $ sleep $ seconds 1 + Interval sleepTime -> do + actionCache <- scActions <$> liftIO getSCFromRef' + let asyncActions = + HashMap.filter ((== ActionMutation ActionAsynchronous) . (^. aiDefinition . adType)) actionCache + unless (HashMap.null asyncActions) $ do + -- fetch undelivered action events only when there's at least + -- one async action present in the schema cache + asyncInvocationsE <- fetchUndeliveredActionEvents + asyncInvocations <- liftIO $ onLeft asyncInvocationsE mempty + -- save the actions that are currently fetched from the DB to + -- be processed in a TVar (Set LockedActionEventId) and when + -- the action is processed we remove it from the set. This set + -- is maintained because on shutdown of the graphql-engine, we + -- would like to wait for a certain time (see `--graceful-shutdown-time`) + -- during which to complete all the in-flight actions. So, when this + -- locked action events set TVar is empty, it will mean that there are + -- no events that are in the 'processing' state + saveLockedEvents (map (EventId . actionIdToText . _aliId) asyncInvocations) lockedActionEvents + LA.mapConcurrently_ (callHandler actionCache) asyncInvocations + liftIO $ sleep $ milliseconds (unrefine sleepTime) where callHandler :: ActionCache -> ActionLogItem -> m () callHandler actionCache actionLogItem = @@ -503,22 +515,22 @@ asyncActionsProcessor getEnvHook logger getSCFromRef' getFetchInterval lockedAct eitherRes <- do env <- liftIO getEnvHook AppEnv {..} <- askAppEnv - runExceptT $ - flip runReaderT logger $ - callWebhook - env - appEnvManager - appEnvPrometheusMetrics - outputType - outputFields - reqHeaders - confHeaders - forwardClientHeaders - webhookUrl - (ActionWebhookPayload actionContext sessionVariables inputPayload gqlQueryText) - timeout - metadataRequestTransform - metadataResponseTransform + runExceptT + $ flip runReaderT logger + $ callWebhook + env + appEnvManager + appEnvPrometheusMetrics + outputType + outputFields + reqHeaders + confHeaders + forwardClientHeaders + webhookUrl + (ActionWebhookPayload actionContext sessionVariables inputPayload gqlQueryText) + timeout + metadataRequestTransform + metadataResponseTransform resE <- setActionStatus actionId $ case eitherRes of Left e -> AASError e @@ -611,9 +623,9 @@ callWebhook case httpResponse of Left e -> - throw500WithDetail "http exception when calling webhook" $ - J.toJSON $ - ActionInternalError (getHttpExceptionJson (ShowErrorInfo True) $ HttpException e) requestInfo Nothing + throw500WithDetail "http exception when calling webhook" + $ J.toJSON + $ ActionInternalError (getHttpExceptionJson (ShowErrorInfo True) $ HttpException e) requestInfo Nothing Right responseWreq -> do -- TODO(SOLOMON): Remove 'wreq' let responseBody = responseWreq ^. Wreq.responseBody @@ -621,9 +633,10 @@ callWebhook actionName = _acName $ _awpAction actionWebhookPayload responseStatus = responseWreq ^. Wreq.responseStatus mkResponseInfo respBody = - ActionResponseInfo (HTTP.statusCode responseStatus) respBody $ - toHeadersConf $ - responseWreq ^. Wreq.responseHeaders + ActionResponseInfo (HTTP.statusCode responseStatus) respBody + $ toHeadersConf + $ responseWreq + ^. Wreq.responseHeaders transformedResponseBody <- case metadataResponseTransform of Nothing -> pure responseBody @@ -653,40 +666,40 @@ callWebhook case J.eitherDecode transformedResponseBody of Left e -> do let responseInfo = mkResponseInfo $ J.String $ bsToTxt $ BL.toStrict responseBody - throw500WithDetail "not a valid json response from webhook" $ - J.toJSON $ - ActionInternalError (J.toJSON $ "invalid json: " <> e) requestInfo $ - Just responseInfo + throw500WithDetail "not a valid json response from webhook" + $ J.toJSON + $ ActionInternalError (J.toJSON $ "invalid json: " <> e) requestInfo + $ Just responseInfo Right responseValue -> do let responseInfo = mkResponseInfo responseValue addInternalToErr e = let actionInternalError = - J.toJSON $ - ActionInternalError (J.String "unexpected response") requestInfo $ - Just responseInfo + J.toJSON + $ ActionInternalError (J.String "unexpected response") requestInfo + $ Just responseInfo in e {qeInternal = Just $ ExtraInternal actionInternalError} if - | HTTP.statusIsSuccessful responseStatus -> do - modifyQErr addInternalToErr $ do - webhookResponse <- decodeValue responseValue - validateResponse responseValue outputType outputFields - pure (webhookResponse, mkSetCookieHeaders responseWreq) - | HTTP.statusIsClientError responseStatus -> do - ActionWebhookErrorResponse message maybeCode maybeExtensions <- - modifyQErr addInternalToErr $ decodeValue responseValue - let code = maybe Unexpected ActionWebhookCode maybeCode - qErr = QErr [] responseStatus message code (ExtraExtensions <$> maybeExtensions) - throwError qErr - | otherwise -> do - let err = - J.toJSON $ - "expecting 2xx or 4xx status code, but found " - ++ show (HTTP.statusCode responseStatus) - throw500WithDetail "internal error" $ - J.toJSON $ - ActionInternalError err requestInfo $ - Just responseInfo + | HTTP.statusIsSuccessful responseStatus -> do + modifyQErr addInternalToErr $ do + webhookResponse <- decodeValue responseValue + validateResponse responseValue outputType outputFields + pure (webhookResponse, mkSetCookieHeaders responseWreq) + | HTTP.statusIsClientError responseStatus -> do + ActionWebhookErrorResponse message maybeCode maybeExtensions <- + modifyQErr addInternalToErr $ decodeValue responseValue + let code = maybe Unexpected ActionWebhookCode maybeCode + qErr = QErr [] responseStatus message code (ExtraExtensions <$> maybeExtensions) + throwError qErr + | otherwise -> do + let err = + J.toJSON + $ "expecting 2xx or 4xx status code, but found " + ++ show (HTTP.statusCode responseStatus) + throw500WithDetail "internal error" + $ J.toJSON + $ ActionInternalError err requestInfo + $ Just responseInfo processOutputSelectionSet :: TF.ArgumentExp v -> @@ -700,11 +713,11 @@ processOutputSelectionSet tableRowInput actionOutputType definitionList actionFi where annotatedFields = fmap actionFieldToAnnField <$> actionFields jsonbToPostgresRecordFunction = - QualifiedObject "pg_catalog" $ - FunctionName $ - if isListType actionOutputType - then "jsonb_to_recordset" -- Multirow array response - else "jsonb_to_record" -- Single object response + QualifiedObject "pg_catalog" + $ FunctionName + $ if isListType actionOutputType + then "jsonb_to_recordset" -- Multirow array response + else "jsonb_to_record" -- Single object response functionArgs = FunctionArgsExp [tableRowInput] mempty selectFrom = RS.FromFunction jsonbToPostgresRecordFunction functionArgs $ Just definitionList @@ -725,7 +738,8 @@ insertActionTx :: J.Value -> PG.TxE QErr ActionId insertActionTx actionName sessionVariables httpHeaders inputArgsPayload = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| diff --git a/server/src-lib/Hasura/GraphQL/Execute/Action/Subscription.hs b/server/src-lib/Hasura/GraphQL/Execute/Action/Subscription.hs index 24dc76efbef..7776cdeeef7 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Action/Subscription.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Action/Subscription.hs @@ -78,8 +78,8 @@ asyncActionSubscriptionsProcessor subsState = forever do -- There's no point in holding the operation in the state. removeAsyncActionLiveQuery subsState opId Just newLqId -> - addAsyncActionLiveQuery subsState opId actionIds onError $ - LAAQOnSourceDB $ - LiveAsyncActionQueryOnSource newLqId actionLogMap lqRestarter + addAsyncActionLiveQuery subsState opId actionIds onError + $ LAAQOnSourceDB + $ LiveAsyncActionQueryOnSource newLqId actionLogMap lqRestarter -- Sleep for a second liftIO $ C.sleep $ seconds 1 diff --git a/server/src-lib/Hasura/GraphQL/Execute/Backend.hs b/server/src-lib/Hasura/GraphQL/Execute/Backend.hs index 5afeb7e5c2c..2dcc9df58a4 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Backend.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Backend.hs @@ -205,8 +205,8 @@ convertRemoteSourceRelationship argumentIdField = ( fromCol @b argumentIdColumn, - AFColumn $ - AnnColumnField + AFColumn + $ AnnColumnField { _acfColumn = argumentIdColumn, _acfType = argumentIdColumnType, _acfAsText = False, diff --git a/server/src-lib/Hasura/GraphQL/Execute/Common.hs b/server/src-lib/Hasura/GraphQL/Execute/Common.hs index c978ccb325a..1b42c98274a 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Common.hs @@ -28,7 +28,7 @@ import Network.Wai.Extended qualified as Wai -- | TODO (from master): Limitation: This parses the query, which is not ideal if we already -- have the query cached. The parsing happens unnecessary. But getting this to -- either return a plan or parse was tricky and complicated. -class Monad m => MonadGQLExecutionCheck m where +class (Monad m) => MonadGQLExecutionCheck m where checkGQLExecution :: UserInfo -> ([HTTP.Header], Wai.IpAddress) -> @@ -54,7 +54,7 @@ class Monad m => MonadGQLExecutionCheck m where SchemaCache -> m (Either QErr ()) -instance MonadGQLExecutionCheck m => MonadGQLExecutionCheck (ReaderT r m) where +instance (MonadGQLExecutionCheck m) => MonadGQLExecutionCheck (ReaderT r m) where checkGQLExecution ui det enableAL sc req requestId = lift $ checkGQLExecution ui det enableAL sc req requestId @@ -64,7 +64,7 @@ instance MonadGQLExecutionCheck m => MonadGQLExecutionCheck (ReaderT r m) where checkGQLBatchedReqs userInfo requestId reqs sc = lift $ checkGQLBatchedReqs userInfo requestId reqs sc -instance MonadGQLExecutionCheck m => MonadGQLExecutionCheck (ExceptT e m) where +instance (MonadGQLExecutionCheck m) => MonadGQLExecutionCheck (ExceptT e m) where checkGQLExecution ui det enableAL sc req requestId = lift $ checkGQLExecution ui det enableAL sc req requestId @@ -74,7 +74,7 @@ instance MonadGQLExecutionCheck m => MonadGQLExecutionCheck (ExceptT e m) where checkGQLBatchedReqs userInfo requestId reqs sc = lift $ checkGQLBatchedReqs userInfo requestId reqs sc -instance MonadGQLExecutionCheck m => MonadGQLExecutionCheck (Tracing.TraceT m) where +instance (MonadGQLExecutionCheck m) => MonadGQLExecutionCheck (Tracing.TraceT m) where checkGQLExecution ui det enableAL sc req requestId = lift $ checkGQLExecution ui det enableAL sc req requestId diff --git a/server/src-lib/Hasura/GraphQL/Execute/Inline.hs b/server/src-lib/Hasura/GraphQL/Execute/Inline.hs index 4d88be2e84e..a896b8469ea 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Inline.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Inline.hs @@ -79,7 +79,7 @@ type MonadInline m = MonadState InlineState m ) -type InlineMT m a = MonadError QErr m => (StateT InlineState (ReaderT InlineEnv m)) a +type InlineMT m a = (MonadError QErr m) => (StateT InlineState (ReaderT InlineEnv m)) a type InlineM a = InlineMT (Except QErr) a @@ -130,15 +130,15 @@ inlineSelectionSet fragmentDefinitions selectionSet = do -- check. -- TODO: Do this check using a feature flag isFragmentValidationEnabled = False - when (isFragmentValidationEnabled && (usedFragmentNames /= definedFragmentNames)) $ - throw400 ValidationFailed $ - "following fragment(s) have been defined, but have not been used in the query - " - <> T.concat - ( L.intersperse ", " $ - map unName $ - Set.toList $ - Set.difference definedFragmentNames usedFragmentNames - ) + when (isFragmentValidationEnabled && (usedFragmentNames /= definedFragmentNames)) + $ throw400 ValidationFailed + $ "following fragment(s) have been defined, but have not been used in the query - " + <> T.concat + ( L.intersperse ", " + $ map unName + $ Set.toList + $ Set.difference definedFragmentNames usedFragmentNames + ) -- The below code is a manual inlining of 'runInlineMT', as appearently the -- inlining optimization does not trigger, even with the INLINE pragma. traverse inlineSelection selectionSet @@ -160,26 +160,27 @@ inlineSelectionSet fragmentDefinitions selectionSet = do SelectionInlineFragment inlineFragment -> fragmentsInSelectionSet $ _ifSelectionSet inlineFragment inlineSelection :: - MonadInline m => + (MonadInline m) => Selection FragmentSpread Name -> m (Selection NoFragments Name) inlineSelection (SelectionField field) = withPathK "selectionSet" $ SelectionField <$> inlineField field inlineSelection (SelectionFragmentSpread spread) = - withPathK "selectionSet" $ - SelectionInlineFragment <$> inlineFragmentSpread spread + withPathK "selectionSet" + $ SelectionInlineFragment + <$> inlineFragmentSpread spread inlineSelection (SelectionInlineFragment fragment@InlineFragment {_ifSelectionSet}) = do selectionSet <- traverse inlineSelection _ifSelectionSet pure $! SelectionInlineFragment fragment {_ifSelectionSet = selectionSet} {-# INLINE inlineField #-} -inlineField :: MonadInline m => Field FragmentSpread Name -> m (Field NoFragments Name) +inlineField :: (MonadInline m) => Field FragmentSpread Name -> m (Field NoFragments Name) inlineField field@(Field {_fSelectionSet}) = withPathK (unName $ _fName field) $ do selectionSet <- traverse inlineSelection _fSelectionSet pure $! field {_fSelectionSet = selectionSet} inlineFragmentSpread :: - MonadInline m => + (MonadInline m) => FragmentSpread Name -> m (InlineFragment NoFragments Name) inlineFragmentSpread FragmentSpread {_fsName, _fsDirectives} = do @@ -187,41 +188,42 @@ inlineFragmentSpread FragmentSpread {_fsName, _fsDirectives} = do InlineState {_isFragmentCache} <- get if - -- If we’ve already inlined this fragment, no need to process it again. - | Just fragment <- HashMap.lookup _fsName _isFragmentCache -> - pure $! addSpreadDirectives fragment - -- Fragment cycles are always illegal; see - -- http://spec.graphql.org/June2018/#sec-Fragment-spreads-must-not-form-cycles - | (fragmentCycle, _ : _) <- break (== _fsName) _ieFragmentStack -> - throw400 ValidationFailed $ - "the fragment definition(s) " - <> englishList "and" (toTxt <$> (_fsName :| reverse fragmentCycle)) - <> " form a cycle" - -- We didn’t hit the fragment cache, so look up the definition and convert - -- it to an inline fragment. - | Just FragmentDefinition {_fdTypeCondition, _fdSelectionSet} <- - HashMap.lookup _fsName _ieFragmentDefinitions -> withPathK (unName _fsName) $ do - selectionSet <- - locally ieFragmentStack (_fsName :) $ - traverse inlineSelection _fdSelectionSet + -- If we’ve already inlined this fragment, no need to process it again. + | Just fragment <- HashMap.lookup _fsName _isFragmentCache -> + pure $! addSpreadDirectives fragment + -- Fragment cycles are always illegal; see + -- http://spec.graphql.org/June2018/#sec-Fragment-spreads-must-not-form-cycles + | (fragmentCycle, _ : _) <- break (== _fsName) _ieFragmentStack -> + throw400 ValidationFailed + $ "the fragment definition(s) " + <> englishList "and" (toTxt <$> (_fsName :| reverse fragmentCycle)) + <> " form a cycle" + -- We didn’t hit the fragment cache, so look up the definition and convert + -- it to an inline fragment. + | Just FragmentDefinition {_fdTypeCondition, _fdSelectionSet} <- + HashMap.lookup _fsName _ieFragmentDefinitions -> withPathK (unName _fsName) $ do + selectionSet <- + locally ieFragmentStack (_fsName :) + $ traverse inlineSelection _fdSelectionSet - let fragment = - InlineFragment - { _ifTypeCondition = Just _fdTypeCondition, - -- As far as I can tell, the GraphQL spec says that directives - -- on the fragment definition do NOT apply to the fields in its - -- selection set. - _ifDirectives = [], - _ifSelectionSet = selectionSet - } - modify' $ over isFragmentCache $ HashMap.insert _fsName fragment - pure $! addSpreadDirectives fragment + let fragment = + InlineFragment + { _ifTypeCondition = Just _fdTypeCondition, + -- As far as I can tell, the GraphQL spec says that directives + -- on the fragment definition do NOT apply to the fields in its + -- selection set. + _ifDirectives = [], + _ifSelectionSet = selectionSet + } + modify' $ over isFragmentCache $ HashMap.insert _fsName fragment + pure $! addSpreadDirectives fragment - -- If we get here, the fragment name is unbound; raise an error. - -- http://spec.graphql.org/June2018/#sec-Fragment-spread-target-defined - | otherwise -> - throw400 ValidationFailed $ - "reference to undefined fragment " <>> _fsName + -- If we get here, the fragment name is unbound; raise an error. + -- http://spec.graphql.org/June2018/#sec-Fragment-spread-target-defined + | otherwise -> + throw400 ValidationFailed + $ "reference to undefined fragment " + <>> _fsName where addSpreadDirectives fragment = fragment {_ifDirectives = _ifDirectives fragment ++ _fsDirectives} diff --git a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs index 9ee20153cd7..4aecc806175 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs @@ -107,8 +107,8 @@ convertMutationSelectionSet reqId maybeOperationName = do mutationParser <- - onNothing (gqlMutationParser gqlContext) $ - throw400 ValidationFailed "no mutations exist" + onNothing (gqlMutationParser gqlContext) + $ throw400 ValidationFailed "no mutations exist" (resolvedDirectives, resolvedSelSet) <- resolveVariables varDefs (fromMaybe HashMap.empty (GH._grVariables gqlUnparsed)) directives fields -- Parse the GraphQL query into the RQL AST @@ -140,8 +140,8 @@ convertMutationSelectionSet RFRemote remoteField -> do RemoteSchemaRootField remoteSchemaInfo resultCustomizer resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField let (noRelsRemoteField, remoteJoins) = RJ.getRemoteJoinsGraphQLField resolvedRemoteField - pure $ - buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeMutation noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed) + pure + $ buildExecStepRemote remoteSchemaInfo resultCustomizer G.OperationTypeMutation noRelsRemoteField remoteJoins (GH._grOperationName gqlUnparsed) RFAction action -> do let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionMutation action (actionName, _fch) <- pure $ case noRelsDBAST of diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs index f7ca5bf77bd..728186262dd 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs @@ -40,7 +40,7 @@ import Language.GraphQL.Draft.Syntax qualified as G import Network.HTTP.Types qualified as HTTP parseGraphQLQuery :: - MonadError QErr m => + (MonadError QErr m) => GQLContext -> [G.VariableDefinition] -> Maybe (HashMap G.Name J.Value) -> @@ -130,8 +130,8 @@ convertQuerySelSet let (noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsActionQuery action (actionExecution, actionName, fch) <- pure $ case noRelsDBAST of AQQuery s -> - ( AEPSync $ - resolveActionExecution + ( AEPSync + $ resolveActionExecution httpManager env logger diff --git a/server/src-lib/Hasura/GraphQL/Execute/Remote.hs b/server/src-lib/Hasura/GraphQL/Execute/Remote.hs index 8470d07bd66..4e3130436bf 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Remote.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Remote.hs @@ -48,7 +48,7 @@ getVariableDefinitionAndValue var@(Variable varInfo gType varValue) = unresolveVariables :: forall fragments. - Functor fragments => + (Functor fragments) => G.SelectionSet fragments Variable -> G.SelectionSet fragments G.Name unresolveVariables = @@ -157,9 +157,10 @@ resolveRemoteVariable :: resolveRemoteVariable userInfo = \case SessionPresetVariable sessionVar typeName presetInfo -> do sessionVarVal <- - onNothing (getSessionVariableValue sessionVar $ _uiSession userInfo) $ - throw400 NotFound $ - sessionVar <<> " session variable expected, but not found" + onNothing (getSessionVariableValue sessionVar $ _uiSession userInfo) + $ throw400 NotFound + $ sessionVar + <<> " session variable expected, but not found" varName <- sessionVariableToGraphQLName sessionVar `onNothing` throw500 ("'" <> sessionVariableToText sessionVar <> "' cannot be made into a valid GraphQL name") @@ -173,10 +174,10 @@ resolveRemoteVariable userInfo = \case Just i -> pure $ G.VInt i "Boolean" -> if - | sessionVarVal `elem` ["true", "false"] -> - pure $ G.VBoolean $ "true" == sessionVarVal - | otherwise -> - throw400 CoercionError $ sessionVarVal <<> " cannot be coerced into a Boolean value" + | sessionVarVal `elem` ["true", "false"] -> + pure $ G.VBoolean $ "true" == sessionVarVal + | otherwise -> + throw400 CoercionError $ sessionVarVal <<> " cannot be coerced into a Boolean value" "Float" -> case readMaybe $ T.unpack sessionVarVal of Nothing -> @@ -228,7 +229,7 @@ resolveRemoteField userInfo = traverse (resolveRemoteVariable userInfo) -- | TODO: Documentation. runVariableCache :: - Monad m => + (Monad m) => StateT RemoteJSONVariableMap m a -> m a runVariableCache = flip evalStateT mempty diff --git a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Collect.hs b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Collect.hs index 7d832ad00e0..02ef7338585 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Collect.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Collect.hs @@ -63,7 +63,7 @@ import Language.GraphQL.Draft.Syntax qualified as G -- Returns the transformed selection set, in which remote fields have been -- inserted, and for which the @r@ type is now 'Void'. getRemoteJoinsQueryDB :: - Backend b => + (Backend b) => QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> (QueryDB b Void (UnpreparedValue b), Maybe RemoteJoins) getRemoteJoinsQueryDB = @@ -85,7 +85,7 @@ getRemoteJoinsQueryDB = -- Returns the transformed selection set, in which remote fields have been -- inserted, and for which the @r@ type is now 'Void'. getRemoteJoinsMutationDB :: - Backend b => + (Backend b) => MutationDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> (MutationDB b Void (UnpreparedValue b), Maybe RemoteJoins) getRemoteJoinsMutationDB = @@ -118,7 +118,7 @@ getRemoteJoinsActionMutation = AMSync sync -> AMSync <$> transformSyncAction sync getRemoteJoinsSourceRelation :: - Backend b => + (Backend b) => SourceRelationshipSelection b (RemoteRelationshipField UnpreparedValue) UnpreparedValue -> (SourceRelationshipSelection b Void UnpreparedValue, Maybe RemoteJoins) getRemoteJoinsSourceRelation = @@ -190,7 +190,7 @@ transformAsyncFields :: transformAsyncFields = traverseOf _AsyncOutput transformActionFields transformMutationOutput :: - Backend b => + (Backend b) => MutationOutputG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (MutationOutputG b Void (UnpreparedValue b)) transformMutationOutput = \case @@ -207,13 +207,13 @@ transformSyncAction :: transformSyncAction = traverseOf aaeFields transformActionFields transformSelect :: - Backend b => + (Backend b) => AnnSimpleSelectG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (AnnSimpleSelectG b Void (UnpreparedValue b)) transformSelect = traverseOf asnFields transformAnnFields transformStreamSelect :: - Backend b => + (Backend b) => AnnSimpleStreamSelectG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (AnnSimpleStreamSelectG b Void (UnpreparedValue b)) transformStreamSelect select@AnnSelectStreamG {_assnFields = fields} = do @@ -222,36 +222,36 @@ transformStreamSelect select@AnnSelectStreamG {_assnFields = fields} = do pure select {_assnFields = transformedFields} transformAggregateSelect :: - Backend b => + (Backend b) => AnnAggregateSelectG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (AnnAggregateSelectG b Void (UnpreparedValue b)) transformAggregateSelect = - traverseOf asnFields $ - traverseFields $ - traverseOf (_TAFNodes . _2) transformAnnFields + traverseOf asnFields + $ traverseFields + $ traverseOf (_TAFNodes . _2) transformAnnFields -- Relay doesn't support remote relationships: we can drill down directly to the -- inner non-relay selection sets. transformConnectionSelect :: forall b. - Backend b => + (Backend b) => ConnectionSelect b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (ConnectionSelect b Void (UnpreparedValue b)) transformConnectionSelect = - traverseOf (csSelect . asnFields) $ - traverseFields $ - traverseOf _ConnectionEdges $ - traverseFields $ - traverseOf _EdgeNode transformAnnFields + traverseOf (csSelect . asnFields) + $ traverseFields + $ traverseOf _ConnectionEdges + $ traverseFields + $ traverseOf _EdgeNode transformAnnFields transformObjectSelect :: - Backend b => + (Backend b) => AnnObjectSelectG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (AnnObjectSelectG b Void (UnpreparedValue b)) transformObjectSelect = traverseOf aosFields transformAnnFields transformNestedObjectSelect :: - Backend b => + (Backend b) => AnnNestedObjectSelectG b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b) -> Collector (AnnNestedObjectSelectG b Void (UnpreparedValue b)) transformNestedObjectSelect = traverseOf anosFields transformAnnFields @@ -286,7 +286,7 @@ transformGraphQLSelectionSet = \case -- 'RemoteJoin'. transformAnnFields :: forall src. - Backend src => + (Backend src) => AnnFieldsG src (RemoteRelationshipField UnpreparedValue) (UnpreparedValue src) -> Collector (AnnFieldsG src Void (UnpreparedValue src)) transformAnnFields fields = do @@ -362,22 +362,22 @@ transformAnnFields fields = do -- selection set. columnFields :: HashMap (Column src) FieldName columnFields = - HashMap.fromList $ - [ (_acfColumn annColumn, alias) - | (alias, annColumn) <- getFields _AFColumn fields - ] + HashMap.fromList + $ [ (_acfColumn annColumn, alias) + | (alias, annColumn) <- getFields _AFColumn fields + ] -- This is a map of computed field name to its alias of all computed fields -- in the selection set. computedFields :: HashMap.HashMap ComputedFieldName FieldName computedFields = - HashMap.fromList $ - [ (fieldName, alias) - | -- Note that we do not currently care about input arguments to a computed - -- field because only computed fields which do not accept input arguments - -- are currently allowed. - (alias, fieldName) <- getFields (_AFComputedField . _2) fields - ] + HashMap.fromList + $ [ (fieldName, alias) + | -- Note that we do not currently care about input arguments to a computed + -- field because only computed fields which do not accept input arguments + -- are currently allowed. + (alias, fieldName) <- getFields (_AFComputedField . _2) fields + ] -- Annotate a 'DBJoinField' with its field name and an alias so that it may -- be used to construct a remote join. @@ -423,8 +423,8 @@ transformAnnFields fields = do let functionArgs = flip FunctionArgsExp mempty $ fromComputedFieldImplicitArguments @b UVSession _scfComputedFieldImplicitArgs fieldSelect = - flip CFSScalar Nothing $ - ComputedFieldScalarSelect _scfFunction functionArgs _scfType Nothing + flip CFSScalar Nothing + $ ComputedFieldScalarSelect _scfFunction functionArgs _scfType Nothing in AFComputedField _scfXField _scfName fieldSelect -- | Transforms an action's selection set. @@ -477,10 +477,10 @@ transformActionFields fields = do -- selection set. scalarFields :: HashMap G.Name FieldName scalarFields = - HashMap.fromList $ - [ (name, alias) - | (alias, name) <- getFields _ACFScalar fields - ] + HashMap.fromList + $ [ (name, alias) + | (alias, name) <- getFields _ACFScalar fields + ] -- Annotate a join field with its field name and an alias so that it may -- be used to construct a remote join. @@ -530,8 +530,8 @@ transformObjectSelectionSet :: transformObjectSelectionSet typename selectionSet = do -- we need to keep track of whether any subfield contained a remote join (annotatedFields, subfieldsContainRemoteJoins) <- - listens isJust $ - flip InsOrdHashMap.traverseWithKey selectionSet \alias field -> + listens isJust + $ flip InsOrdHashMap.traverseWithKey selectionSet \alias field -> withField (G.unName <$> typename) (G.unName alias) do case field of FieldGraphQL f -> (,Nothing) <$> transformGraphQLField f @@ -544,29 +544,29 @@ transformObjectSelectionSet typename selectionSet = do remoteJoins = InsOrdHashMap.mapMaybe snd annotatedFields additionalFields = if - | isJust typename && (not (null remoteJoins) || subfieldsContainRemoteJoins) -> - -- We are in a situation in which the type name matters, and we know - -- that there is at least one remote join in this part of tree, meaning - -- we might need to branch on the typename when traversing the join - -- tree: we insert a custom field that will return the type name. - InsOrdHashMap.singleton internalTypeAlias $ - mkGraphQLField - (Just internalTypeAlias) - GName.___typename - mempty - mempty - SelectionSetNone - | otherwise -> - -- Either the typename doesn't matter, or this tree doesn't have remote - -- joins; this selection set isn't "ambiguous". - mempty + | isJust typename && (not (null remoteJoins) || subfieldsContainRemoteJoins) -> + -- We are in a situation in which the type name matters, and we know + -- that there is at least one remote join in this part of tree, meaning + -- we might need to branch on the typename when traversing the join + -- tree: we insert a custom field that will return the type name. + InsOrdHashMap.singleton internalTypeAlias + $ mkGraphQLField + (Just internalTypeAlias) + GName.___typename + mempty + mempty + SelectionSetNone + | otherwise -> + -- Either the typename doesn't matter, or this tree doesn't have remote + -- joins; this selection set isn't "ambiguous". + mempty transformedFields = fmap fst annotatedFields <> additionalFields case NEMap.fromList $ InsOrdHashMap.toList remoteJoins of Nothing -> pure $ fmap FieldGraphQL transformedFields Just neRemoteJoins -> do collect $ NEMap.mapKeys (\fieldGName -> QualifiedFieldName (G.unName <$> typename) (G.unName fieldGName)) neRemoteJoins - pure $ - fmap + pure + $ fmap FieldGraphQL (transformedFields <> InsOrdHashMap.fromList [(_fAlias fld, fld) | fld <- toList phantomFields]) where @@ -580,8 +580,8 @@ transformObjectSelectionSet typename selectionSet = do -- in the selection set. We do not yet support lhs join fields which take -- arguments. To be consistent with that, we ignore fields with arguments noArgsGraphQLFields = - HashMap.fromList $ - flip mapMaybe (InsOrdHashMap.toList selectionSet) \(alias, field) -> case field of + HashMap.fromList + $ flip mapMaybe (InsOrdHashMap.toList selectionSet) \(alias, field) -> case field of FieldGraphQL f -> if null (_fArguments f) then Just (_fName f, FieldName $ G.unName alias) @@ -653,8 +653,8 @@ createRemoteJoin joinColumnAliases = \case (,(rhsColumn, rhsColumnType)) <$> HashMap.lookup joinFieldName joinColumnAliases anySourceJoin = - AB.mkAnyBackend $ - RemoteSourceJoin + AB.mkAnyBackend + $ RemoteSourceJoin _rssName _rssConfig transformedSourceRelationship @@ -672,7 +672,7 @@ createRemoteJoin joinColumnAliases = \case -- NOTE: if the @fieldName@ argument is a valid GraphQL name, then the -- constructed alias MUST also be a valid GraphQL name. getJoinColumnAlias :: - Hashable field => + (Hashable field) => FieldName -> field -> HashMap field FieldName -> diff --git a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs index d01d2f21794..5b77b150af3 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Join.hs @@ -122,8 +122,8 @@ processRemoteJoins requestId logger agentLicenseKey env requestHeaders userInfo -- Resulting JSON object, as a 'ByteString'. m BL.ByteString callRemoteServer remoteSchemaInfo request = - fmap (view _3) $ - execRemoteGQ env userInfo requestHeaders remoteSchemaInfo request + fmap (view _3) + $ execRemoteGQ env userInfo requestHeaders remoteSchemaInfo request -- | Fold the join tree. -- @@ -149,8 +149,9 @@ foldJoinTreeWith :: m (f JO.Value) foldJoinTreeWith callSource callRemoteSchema userInfo lhs joinTree reqHeaders operationName = do (compositeValue, joins) <- collectJoinArguments (assignJoinIds joinTree) lhs - joinIndices <- fmap catMaybes $ - for joins $ \JoinArguments {..} -> do + joinIndices <- fmap catMaybes + $ for joins + $ \JoinArguments {..} -> do let joinArguments = IntMap.fromList $ map swap $ HashMap.toList _jalArguments previousStep <- case _jalJoin of RemoteJoinRemoteSchema remoteSchemaJoin childJoinTree -> do @@ -302,10 +303,10 @@ collectJoinArguments joinTree lhs = do traverseObject joinTree_ object = do let joinTreeNodes = unJoinTree joinTree_ phantomFields = - HS.fromList $ - map getFieldNameTxt $ - concatMap (getPhantomFields . snd) $ - toList joinTree_ + HS.fromList + $ map getFieldNameTxt + $ concatMap (getPhantomFields . snd) + $ toList joinTree_ -- If we need the typename to disambiguate branches in the join tree, it -- will be present in the answer as a placeholder internal field. @@ -334,12 +335,14 @@ collectJoinArguments joinTree lhs = do Just (Leaf (joinId, remoteJoin)) -> do joinArgument <- forM (getJoinColumnMapping remoteJoin) $ \alias -> do let aliasTxt = getFieldNameTxt $ getAliasFieldName alias - onNothing (JO.lookup aliasTxt object) $ - throw500 $ - "a join column is missing from the response: " <> aliasTxt + onNothing (JO.lookup aliasTxt object) + $ throw500 + $ "a join column is missing from the response: " + <> aliasTxt if HashMap.null (HashMap.filter (== JO.Null) joinArgument) then - Just . CVFromRemote + Just + . CVFromRemote <$> getReplacementToken joinId remoteJoin (JoinArgument joinArgument) (FieldName fieldName) else -- we do not join with the remote field if any of the leaves of -- the join argument are null @@ -351,7 +354,9 @@ collectJoinArguments joinTree lhs = do then pure Nothing else pure $ Just $ CVOrdValue value_ - pure . InsOrdHashMap.fromList $ + pure + . InsOrdHashMap.fromList + $ -- filter out the Nothings mapMaybe sequenceA compositeObject @@ -367,14 +372,14 @@ joinResults remoteResults compositeValues = do replaceToken :: ReplacementToken -> m JO.Value replaceToken (ReplacementToken joinCallId argumentId) = do joinCallResults <- - onNothing (IntMap.lookup joinCallId remoteResults) $ - throw500 $ - "couldn't find results for the join with id: " - <> tshow joinCallId - onNothing (IntMap.lookup argumentId joinCallResults) $ - throw500 $ - "couldn't find a value for argument id in the join results: " - <> tshow (argumentId, joinCallId) + onNothing (IntMap.lookup joinCallId remoteResults) + $ throw500 + $ "couldn't find results for the join with id: " + <> tshow joinCallId + onNothing (IntMap.lookup argumentId joinCallResults) + $ throw500 + $ "couldn't find a value for argument id in the join results: " + <> tshow (argumentId, joinCallId) ------------------------------------------------------------------------------- diff --git a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/RemoteSchema.hs b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/RemoteSchema.hs index c641898a005..7cc5abdbb46 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/RemoteSchema.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/RemoteSchema.hs @@ -109,8 +109,8 @@ buildRemoteSchemaCall RemoteSchemaJoin {..} arguments userInfo = do -- for each join argument, we generate a unique field, with the alias -- "f" <> argumentId fields <- flip IntMap.traverseWithKey arguments $ \argumentId (JoinArgument argument) -> do - graphqlArgs <- fmap HashMap.fromList $ - for (HashMap.toList argument) \(FieldName columnName, value) -> do + graphqlArgs <- fmap HashMap.fromList + $ for (HashMap.toList argument) \(FieldName columnName, value) -> do graphQLName <- parseGraphQLName columnName graphQLValue <- ordJSONValueToGValue value pure (graphQLName, graphQLValue) @@ -128,8 +128,10 @@ buildRemoteSchemaCall RemoteSchemaJoin {..} arguments userInfo = do -- this constructs the actual GraphQL Request that can be sent to the remote for (NE.nonEmpty $ IntMap.elems fields) $ \neFields -> do gqlRequest <- - fmap fieldsToRequest . runVariableCache . for neFields $ - \(field, _, _) -> traverse (resolveRemoteVariable userInfo) field + fmap fieldsToRequest + . runVariableCache + . for neFields + $ \(field, _, _) -> traverse (resolveRemoteVariable userInfo) field let customizer = foldMap (view _3) fields responsePath = fmap (ResponsePath . view _2) fields pure $ RemoteSchemaCall customizer gqlRequest responsePath @@ -138,7 +140,7 @@ buildRemoteSchemaCall RemoteSchemaJoin {..} arguments userInfo = do -- selection set at the leaf of the tree we construct. fieldCallsToField :: forall m. - MonadError QErr m => + (MonadError QErr m) => -- | user input arguments to the remote join field HashMap.HashMap G.Name (P.InputValue RemoteSchemaVariable) -> -- | Contains the values of the variables that have been defined in the remote join definition @@ -219,7 +221,7 @@ createArguments variables (RemoteArguments arguments) = -- >>> combineValues (Object (fromList [("id", Number 1)]) (Object (fromList [("name", String "foo")]) -- Object (fromList [("id", Number 1), ("name", String "foo")]) combineValues :: - MonadError QErr m => + (MonadError QErr m) => G.Name -> G.Value RemoteSchemaVariable -> G.Value RemoteSchemaVariable -> @@ -228,14 +230,14 @@ combineValues name v1 v2 = case (v1, v2) of (G.VObject l, G.VObject r) -> G.VObject <$> HashMap.unionWithM combineValues l r (G.VList l, G.VList r) -> pure $ G.VList $ l <> r (l, r) -> - throw500 $ - "combineValues: cannot combine values (" - <> tshow l - <> ") and (" - <> tshow r - <> ") for field " - <> G.unName name - <> "; lists can only be merged with lists, objects can only be merged with objects" + throw500 + $ "combineValues: cannot combine values (" + <> tshow l + <> ") and (" + <> tshow r + <> ") for field " + <> G.unName name + <> "; lists can only be merged with lists, objects can only be merged with objects" -- | Craft a GraphQL query document from the list of fields. fieldsToRequest :: NonEmpty (G.Field G.NoFragments P.Variable) -> GQLReqOutgoing @@ -283,17 +285,17 @@ executeRemoteSchemaCall networkFunction (RemoteSchemaCall customizer request _) responseObject <- AO.asObject responseJSON `onLeft` throw500 let errors = AO.lookup "errors" responseObject if - | isNothing errors || errors == Just AO.Null -> - case AO.lookup "data" responseObject of - Nothing -> throw500 "\"data\" field not found in remote response" - Just v -> - let v' = applyResultCustomizer customizer v - in AO.asObject v' `onLeft` throw500 - | otherwise -> - throwError - (err400 Unexpected "Errors from remote server") - { qeInternal = Just $ ExtraInternal $ J.object ["errors" J..= (AO.fromOrdered <$> errors)] - } + | isNothing errors || errors == Just AO.Null -> + case AO.lookup "data" responseObject of + Nothing -> throw500 "\"data\" field not found in remote response" + Just v -> + let v' = applyResultCustomizer customizer v + in AO.asObject v' `onLeft` throw500 + | otherwise -> + throwError + (err400 Unexpected "Errors from remote server") + { qeInternal = Just $ ExtraInternal $ J.object ["errors" J..= (AO.fromOrdered <$> errors)] + } ------------------------------------------------------------------------------- -- Step 3: extracting the join index @@ -333,9 +335,9 @@ buildJoinIndex RemoteSchemaCall {..} response = `onNothing` throw500 ("failed to lookup key '" <> toTxt k <> "' in response") go objValue ks _ -> - throw500 $ - "unexpected non-object json value found while path not empty: " - <> commaSeparated path + throw500 + $ "unexpected non-object json value found while path not empty: " + <> commaSeparated path ------------------------------------------------------------------------------- -- Local helpers diff --git a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Source.hs b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Source.hs index d5c2a0c27d0..b1ee23b18a1 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Source.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/RemoteJoin/Source.hs @@ -73,8 +73,8 @@ makeSourceJoinCall networkFunction userInfo remoteSourceJoin jaFieldName joinArg -- AB.dispatchAnyBackend @EB.BackendExecute remoteSourceJoin \(sjc :: SourceJoinCall b) -> -- buildSourceJoinCall @b userInfo jaFieldName joinArguments sjc maybeSourceCall <- - AB.dispatchAnyBackend @EB.BackendExecute remoteSourceJoin $ - buildSourceJoinCall userInfo jaFieldName joinArguments reqHeaders operationName + AB.dispatchAnyBackend @EB.BackendExecute remoteSourceJoin + $ buildSourceJoinCall userInfo jaFieldName joinArguments reqHeaders operationName -- if there actually is a remote call: for maybeSourceCall \sourceCall -> do -- step 2: send this call over the network @@ -108,11 +108,11 @@ buildSourceJoinCall :: buildSourceJoinCall userInfo jaFieldName joinArguments reqHeaders operationName remoteSourceJoin = do let rows = IntMap.toList joinArguments <&> \(argumentId, argument) -> - KM.insert "__argument_id__" (J.toJSON argumentId) $ - KM.fromList $ - map (bimap (K.fromText . getFieldNameTxt) JO.fromOrdered) $ - HashMap.toList $ - unJoinArgument argument + KM.insert "__argument_id__" (J.toJSON argumentId) + $ KM.fromList + $ map (bimap (K.fromText . getFieldNameTxt) JO.fromOrdered) + $ HashMap.toList + $ unJoinArgument argument rowSchema = fmap snd (_rsjJoinColumns remoteSourceJoin) for (NE.nonEmpty rows) $ \nonEmptyRows -> do let sourceConfig = _rsjSourceConfig remoteSourceJoin @@ -136,9 +136,9 @@ buildSourceJoinCall userInfo jaFieldName joinArguments reqHeaders operationName -- NOTE: We're making an assumption that the 'FieldName' propagated upwards -- from 'collectJoinArguments' is reasonable to use for logging. let rootFieldAlias = mkUnNamespacedRootFieldAlias fieldName - pure $ - AB.mkAnyBackend $ - SourceJoinCall rootFieldAlias sourceConfig stepInfo + pure + $ AB.mkAnyBackend + $ SourceJoinCall rootFieldAlias sourceConfig stepInfo ------------------------------------------------------------------------------- -- Step 3: extracting the join index @@ -180,22 +180,23 @@ buildJoinIndex response = do Right (i, "") -> pure i _ -> Nothing throwInvalidJsonErr errMsg = - throw500 $ - "failed to decode JSON response from the source: " <> errMsg + throw500 + $ "failed to decode JSON response from the source: " + <> errMsg throwMissingRelationshipDataErr = - throw500 $ - "cannot find relationship data (aliased as 'f') within the source \ - \response" + throw500 + $ "cannot find relationship data (aliased as 'f') within the source \ + \response" throwMissingArgumentIdErr = - throw500 $ - "cannot find '__argument_id__' within the source response" + throw500 + $ "cannot find '__argument_id__' within the source response" throwInvalidArgumentIdValueErr = throw500 $ "expected 'argument_id' to get parsed as backend integer type" throwNoNestedObjectErr = - throw500 $ - "expected an object one level deep in the remote schema's response, \ - \but found an array/scalar value instead" + throw500 + $ "expected an object one level deep in the remote schema's response, \ + \but found an array/scalar value instead" throwNoListOfObjectsErr = - throw500 $ - "expected a list of objects in the remote schema's response, but found \ - \an object/scalar value instead" + throw500 + $ "expected a list of objects in the remote schema's response, but found \ + \an object/scalar value instead" diff --git a/server/src-lib/Hasura/GraphQL/Execute/Resolve.hs b/server/src-lib/Hasura/GraphQL/Execute/Resolve.hs index 375ed185bfd..a01a5f78e75 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Resolve.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Resolve.hs @@ -37,8 +37,9 @@ resolveVariables definitions jsonValues directives selSet = do case variableDefinitions of a :| [] -> return a _ -> - throw400 ParseFailed $ - "multiple definitions for variable " <>> variableName + throw400 ParseFailed + $ "multiple definitions for variable " + <>> variableName ((directives', selSet'), usedVariables) <- flip runStateT mempty $ do d <- traverse (traverse (resolveVariable uniqueVariables)) directives s <- traverse (traverse (resolveVariable uniqueVariables)) selSet @@ -51,29 +52,29 @@ resolveVariables definitions jsonValues directives selSet = do -- TODO: Do this check using a feature flag isVariableValidationEnabled = False - when (isVariableValidationEnabled && usedVariables /= variablesByNameSet) $ - throw400 ValidationFailed $ - "following variable(s) have been defined, but have not been used in the query - " - <> T.concat - ( L.intersperse ", " $ - map G.unName $ - HS.toList $ - HS.difference variablesByNameSet usedVariables - ) + when (isVariableValidationEnabled && usedVariables /= variablesByNameSet) + $ throw400 ValidationFailed + $ "following variable(s) have been defined, but have not been used in the query - " + <> T.concat + ( L.intersperse ", " + $ map G.unName + $ HS.toList + $ HS.difference variablesByNameSet usedVariables + ) -- There may be variables which have a default value and may not be -- included in the variables JSON Map. So, we should only see, if a -- variable is inlcuded in the JSON Map, then it must be used in the -- query - when (HS.difference jsonVariableNames usedVariables /= HS.empty) $ - throw400 ValidationFailed $ - "unexpected variables in variableValues: " - <> T.concat - ( L.intersperse ", " $ - map G.unName $ - HS.toList $ - HS.difference jsonVariableNames usedVariables - ) + when (HS.difference jsonVariableNames usedVariables /= HS.empty) + $ throw400 ValidationFailed + $ "unexpected variables in variableValues: " + <> T.concat + ( L.intersperse ", " + $ map G.unName + $ HS.toList + $ HS.difference jsonVariableNames usedVariables + ) return (directives', selSet') where @@ -86,10 +87,11 @@ resolveVariables definitions jsonValues directives selSet = do Nothing | isOptional -> pure $ GraphQLValue $ absurd <$> defaultValue | otherwise -> - throw400 ValidationFailed $ - "expecting a value for non-nullable variable: " <>> _vdName - pure $! - Variable + throw400 ValidationFailed + $ "expecting a value for non-nullable variable: " + <>> _vdName + pure + $! Variable { vInfo = if isOptional then VIOptional _vdName defaultValue diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Options.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Options.hs index 4951defd07c..905ad076d75 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Options.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Options.hs @@ -44,8 +44,10 @@ instance J.ToJSON SubscriptionsOptions where instance J.FromJSON SubscriptionsOptions where parseJSON = J.withObject "live query options" \o -> SubscriptionsOptions - <$> o J..: "batch_size" - <*> o J..: "refetch_delay" + <$> o + J..: "batch_size" + <*> o + J..: "refetch_delay" newtype BatchSize = BatchSize {unBatchSize :: Refined NonNegative Int} deriving (Show, Eq, J.ToJSON, J.FromJSON) diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Plan.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Plan.hs index e8887dfe441..68877e1e1c8 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Plan.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Plan.hs @@ -235,8 +235,8 @@ mkCohortVariables :: ValidatedCursorVariables -> CohortVariables mkCohortVariables requiredSessionVariables sessionVariableValues = - CohortVariables $ - filterSessionVariables + CohortVariables + $ filterSessionVariables (\k _ -> Set.member k requiredSessionVariables) sessionVariableValues diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/Common.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/Common.hs index 937407e47c1..8aab66fb486 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/Common.hs @@ -93,8 +93,8 @@ newtype SubscriberMetadata = SubscriberMetadata {unSubscriberMetadata :: J.Value mkSubscriberMetadata :: WS.WSId -> OperationId -> Maybe OperationName -> RequestId -> SubscriberMetadata mkSubscriberMetadata websocketId operationId operationName reqId = - SubscriberMetadata $ - J.object + SubscriberMetadata + $ J.object [ "websocket_id" J..= websocketId, "operation_id" J..= operationId, "operation_name" J..= operationName, @@ -198,8 +198,8 @@ dumpCohortMap cohortMap = do cohorts <- STM.atomically $ TMap.toList cohortMap fmap J.toJSON . forM cohorts $ \(variableValues, cohort) -> do cohortJ <- dumpCohort cohort - return $ - J.object + return + $ J.object [ "variables" J..= variableValues, "cohort" J..= cohortJ ] @@ -209,8 +209,8 @@ dumpCohortMap cohortMap = do prevResHash <- STM.readTVar respTV curOpIds <- TMap.toList curOps newOpIds <- TMap.toList newOps - return $ - J.object + return + $ J.object [ "resp_id" J..= respId, "current_ops" J..= map fst curOpIds, "new_ops" J..= map fst newOpIds, @@ -302,8 +302,8 @@ dumpPollerMap extended pollerMap = if extended then Just <$> dumpCohortMap cohortsMap else return Nothing - return $ - J.object + return + $ J.object [ "source" J..= source, "role" J..= role, "thread_id" J..= show (Immortal.threadId threadId), diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/LiveQuery.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/LiveQuery.hs index 7ce1a5a438c..fd294672d3a 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/LiveQuery.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/LiveQuery.hs @@ -59,8 +59,8 @@ pushResultToCohort result !respHashM (SubscriptionMetadata dTime) cohortSnapshot return (newSinks <> curSinks, mempty) else return (newSinks, curSinks) pushResultToSubscribers subscribersToPush - pure $ - over + pure + $ over (each . each) ( \Subscriber {..} -> SubscriberExecutionDetails _sId _sMetadata @@ -77,7 +77,7 @@ pushResultToCohort result !respHashM (SubscriptionMetadata dTime) cohortSnapshot -- active 'Poller'. This needs to be async exception safe. pollLiveQuery :: forall b. - BackendTransport b => + (BackendTransport b) => PollerId -> STM.TVar PollerResponseState -> SubscriptionsOptions -> @@ -138,8 +138,9 @@ pollLiveQuery pollerId pollerResponseState lqOpts (sourceName, sourceConfig) rol case mxRes of Left _ -> Nothing Right resp -> Just $ getSum $ foldMap (Sum . BS.length . snd) resp - (pushTime, cohortsExecutionDetails) <- withElapsedTime $ - A.forConcurrently operations $ \(res, cohortId, respData, snapshot) -> do + (pushTime, cohortsExecutionDetails) <- withElapsedTime + $ A.forConcurrently operations + $ \(res, cohortId, respData, snapshot) -> do (pushedSubscribers, ignoredSubscribers) <- pushResultToCohort res (fst <$> respData) lqMeta snapshot pure @@ -158,8 +159,8 @@ pollLiveQuery pollerId pollerResponseState lqOpts (sourceName, sourceConfig) rol let pgExecutionTime = case reify (backendTag @b) of Postgres Vanilla -> Just queryExecutionTime _ -> Nothing - pure $ - BatchExecutionDetails + pure + $ BatchExecutionDetails pgExecutionTime queryExecutionTime pushTime diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/StreamingQuery.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/StreamingQuery.hs index da6e83311b4..56bf5bc37e8 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/StreamingQuery.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/Poll/StreamingQuery.hs @@ -198,8 +198,8 @@ pushResultToCohort result !respHashM (SubscriptionMetadata dTime) cursorValues r else -- when the response is unchanged, the response is only sent to the newly added subscribers return (newSinks, curSinks) pushResultToSubscribers subscribersToPush - pure $ - over + pure + $ over (each . each) ( \Subscriber {..} -> SubscriberExecutionDetails _sId _sMetadata @@ -230,15 +230,15 @@ pushResultToCohort result !respHashM (SubscriptionMetadata dTime) cursorValues r response = result <&> \payload -> SubscriptionResponse payload dTime pushResultToSubscribers subscribers = - unless isResponseEmpty $ - flip A.mapConcurrently_ subscribers $ - \Subscriber {..} -> _sOnChangeCallback response + unless isResponseEmpty + $ flip A.mapConcurrently_ subscribers + $ \Subscriber {..} -> _sOnChangeCallback response -- | A single iteration of the streaming query polling loop. Invocations on the -- same mutable objects may race. pollStreamingQuery :: forall b. - BackendTransport b => + (BackendTransport b) => PollerId -> STM.TVar PollerResponseState -> SubscriptionsOptions -> @@ -308,8 +308,9 @@ pollStreamingQuery pollerId pollerResponseState streamingQueryOpts (sourceName, case mxRes of Left _ -> Nothing Right resp -> Just $ getSum $ foldMap ((\(_, sqlResp, _) -> Sum . BS.length $ sqlResp)) resp - (pushTime, cohortsExecutionDetails) <- withElapsedTime $ - A.forConcurrently operations $ \(res, cohortId, respData, latestCursorValueMaybe, (snapshot, cohort)) -> do + (pushTime, cohortsExecutionDetails) <- withElapsedTime + $ A.forConcurrently operations + $ \(res, cohortId, respData, latestCursorValueMaybe, (snapshot, cohort)) -> do let latestCursorValue@(CursorVariableValues updatedCursorVarVal) = let prevCursorVariableValue = CursorVariableValues $ C._unValidatedVariables $ C._cvCursorVariables $ C._csVariables snapshot in case latestCursorValueMaybe of @@ -417,8 +418,8 @@ pollStreamingQuery pollerId pollerResponseState streamingQueryOpts (sourceName, newCohort <- do existingSubs <- TMap.new newSubs <- TMap.new - pure $ - C.Cohort + pure + $ C.Cohort (C._cCohortId currentCohort) (C._cPreviousResponse currentCohort) existingSubs @@ -477,8 +478,8 @@ pollStreamingQuery pollerId pollerResponseState streamingQueryOpts (sourceName, oldCohortNewSubscribers = C._cNewSubscribers oldCohort mergedExistingSubscribers <- TMap.union newCohortExistingSubscribers oldCohortExistingSubscribers mergedNewSubscribers <- TMap.union newCohortNewSubscribers oldCohortNewSubscribers - pure $ - newCohort + pure + $ newCohort { C._cNewSubscribers = mergedNewSubscribers, C._cExistingSubscribers = mergedExistingSubscribers } diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/State.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/State.hs index e7d5fec3a77..e7dfdadddad 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/State.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/State.hs @@ -83,15 +83,19 @@ data SubscriptionsState = SubscriptionsState initSubscriptionsState :: SubscriptionPostPollHook -> IO SubscriptionsState initSubscriptionsState pollHook = - STM.atomically $ - SubscriptionsState <$> STMMap.new <*> STMMap.new <*> pure pollHook <*> TMap.new + STM.atomically + $ SubscriptionsState + <$> STMMap.new + <*> STMMap.new + <*> pure pollHook + <*> TMap.new dumpSubscriptionsState :: Bool -> LiveQueriesOptions -> StreamQueriesOptions -> SubscriptionsState -> IO J.Value dumpSubscriptionsState extended liveQOpts streamQOpts (SubscriptionsState lqMap streamMap _ _) = do lqMapJ <- dumpPollerMap extended lqMap streamMapJ <- dumpPollerMap extended streamMap - return $ - J.object + return + $ J.object [ "options" J..= liveQOpts, "live_queries_map" J..= lqMapJ, "stream_queries_map" J..= streamMapJ, @@ -160,7 +164,7 @@ findPollerForSubscriber subscriber pollerMap pollerKey cohortKey addToCohort add -- | Fork a thread handling a regular (live query) subscription addLiveQuery :: forall b. - BackendTransport b => + (BackendTransport b) => L.Logger L.Hasura -> ServerMetrics -> PrometheusMetrics -> @@ -199,8 +203,8 @@ addLiveQuery $assertNFHere subscriber -- so we don't write thunks to mutable vars (pollerMaybe, ()) <- - STM.atomically $ - findPollerForSubscriber + STM.atomically + $ findPollerForSubscriber subscriber lqMap handlerId @@ -214,8 +218,9 @@ addLiveQuery -- cancelled after putTMVar for_ pollerMaybe $ \poller -> do pollerId <- PollerId <$> UUID.nextRandom - threadRef <- forkImmortal ("pollLiveQuery." <> show pollerId) logger $ - forever $ do + threadRef <- forkImmortal ("pollLiveQuery." <> show pollerId) logger + $ forever + $ do (lqOpts, _) <- getSubscriptionOptions let SubscriptionsOptions _ refetchInterval = lqOpts pollLiveQuery @b @@ -272,7 +277,7 @@ addLiveQuery -- | Fork a thread handling a streaming subscription addStreamSubscriptionQuery :: forall b. - BackendTransport b => + (BackendTransport b) => L.Logger L.Hasura -> ServerMetrics -> PrometheusMetrics -> @@ -314,8 +319,8 @@ addStreamSubscriptionQuery $assertNFHere subscriber -- so we don't write thunks to mutable vars (handlerM, cohortCursorTVar) <- - STM.atomically $ - findPollerForSubscriber + STM.atomically + $ findPollerForSubscriber subscriber streamQueryMap handlerId @@ -329,8 +334,9 @@ addStreamSubscriptionQuery -- cancelled after putTMVar for_ handlerM $ \handler -> do pollerId <- PollerId <$> UUID.nextRandom - threadRef <- forkImmortal ("pollStreamingQuery." <> show (unPollerId pollerId)) logger $ - forever $ do + threadRef <- forkImmortal ("pollStreamingQuery." <> show (unPollerId pollerId)) logger + $ forever + $ do (_, streamQOpts) <- getSubscriptionOptions let SubscriptionsOptions _ refetchInterval = streamQOpts pollStreamingQuery @b @@ -398,8 +404,9 @@ removeLiveQuery :: Maybe OperationName -> IO () removeLiveQuery logger serverMetrics prometheusMetrics lqState lqId@(SubscriberDetails handlerId cohortId sinkId) granularPrometheusMetricsState maybeOperationName = mask_ $ do - join $ - STM.atomically $ do + join + $ STM.atomically + $ do detM <- getQueryDet lqMap case detM of Nothing -> return (pure ()) @@ -421,8 +428,9 @@ removeLiveQuery logger serverMetrics prometheusMetrics lqState lqId@(SubscriberD getQueryDet subMap = do pollerM <- STMMap.lookup handlerId subMap - fmap join $ - forM pollerM $ \poller -> do + fmap join + $ forM pollerM + $ \poller -> do cohortM <- TMap.lookup cohortId (_pCohorts poller) return $ (poller,) <$> cohortM @@ -445,7 +453,8 @@ removeLiveQuery logger serverMetrics prometheusMetrics lqState lqId@(SubscriberD then do STMMap.delete handlerId lqMap threadRefM <- fmap _pThread <$> STM.tryReadTMVar ioState - return $ + return + $ -- deferred IO: case threadRefM of Just threadRef -> do @@ -461,15 +470,15 @@ removeLiveQuery logger serverMetrics prometheusMetrics lqState lqId@(SubscriberD -- This would seem to imply addLiveQuery broke or a bug -- elsewhere. Be paranoid and log: Nothing -> - L.unLogger logger $ - L.UnstructuredLog L.LevelError $ - fromString $ - "In removeLiveQuery no worker thread installed. Please report this as a bug: " - <> show lqId + L.unLogger logger + $ L.UnstructuredLog L.LevelError + $ fromString + $ "In removeLiveQuery no worker thread installed. Please report this as a bug: " + <> show lqId else do let numSubscriptionMetric = submActiveSubscriptions $ pmSubscriptionMetrics $ prometheusMetrics - return $ - recordMetricWithLabel + return + $ recordMetricWithLabel granularPrometheusMetricsState True (GaugeVector.dec numSubscriptionMetric promMetricGranularLabel) @@ -486,8 +495,9 @@ removeStreamingQuery :: Maybe OperationName -> IO () removeStreamingQuery logger serverMetrics prometheusMetrics subscriptionState (SubscriberDetails handlerId (cohortId, cursorVariableTV) sinkId) granularPrometheusMetricsState maybeOperationName = mask_ $ do - join $ - STM.atomically $ do + join + $ STM.atomically + $ do detM <- getQueryDet streamQMap case detM of Nothing -> return (pure ()) @@ -512,8 +522,9 @@ removeStreamingQuery logger serverMetrics prometheusMetrics subscriptionState (S pollerM <- STMMap.lookup handlerId subMap (CursorVariableValues currentCohortCursorVal) <- STM.readTVar cursorVariableTV let updatedCohortId = modifyCursorCohortVariables (mkUnsafeValidateVariables currentCohortCursorVal) cohortId - fmap join $ - forM pollerM $ \poller -> do + fmap join + $ forM pollerM + $ \poller -> do cohortM <- TMap.lookup updatedCohortId (_pCohorts poller) return $ (poller,updatedCohortId,) <$> cohortM @@ -536,7 +547,8 @@ removeStreamingQuery logger serverMetrics prometheusMetrics subscriptionState (S then do STMMap.delete handlerId streamQMap threadRefM <- fmap _pThread <$> STM.tryReadTMVar ioState - return $ + return + $ -- deferred IO: case threadRefM of Just threadRef -> do @@ -552,20 +564,20 @@ removeStreamingQuery logger serverMetrics prometheusMetrics subscriptionState (S -- This would seem to imply addStreamSubscriptionQuery broke or a bug -- elsewhere. Be paranoid and log: Nothing -> - L.unLogger logger $ - L.UnstructuredLog L.LevelError $ - fromString $ - "In removeStreamingQuery no worker thread installed. Please report this as a bug: " - <> " poller_id: " - <> show handlerId - <> ", cohort_id: " - <> show cohortId - <> ", subscriber_id:" - <> show sinkId + L.unLogger logger + $ L.UnstructuredLog L.LevelError + $ fromString + $ "In removeStreamingQuery no worker thread installed. Please report this as a bug: " + <> " poller_id: " + <> show handlerId + <> ", cohort_id: " + <> show cohortId + <> ", subscriber_id:" + <> show sinkId else do let numSubscriptionMetric = submActiveSubscriptions $ pmSubscriptionMetrics $ prometheusMetrics - return $ - recordMetricWithLabel + return + $ recordMetricWithLabel granularPrometheusMetricsState True (GaugeVector.dec numSubscriptionMetric promMetricGranularLabel) @@ -613,8 +625,8 @@ addAsyncActionLiveQuery :: LiveAsyncActionQuery -> IO () addAsyncActionLiveQuery queriesState opId actionIds onException liveQuery = - STM.atomically $ - TMap.insert (AsyncActionQueryLive actionIds onException liveQuery) opId queriesState + STM.atomically + $ TMap.insert (AsyncActionQueryLive actionIds onException liveQuery) opId queriesState removeAsyncActionLiveQuery :: AsyncActionSubscriptionState -> OperationId -> IO () diff --git a/server/src-lib/Hasura/GraphQL/Execute/Subscription/TMap.hs b/server/src-lib/Hasura/GraphQL/Execute/Subscription/TMap.hs index 5f542a63908..194f7e92bb6 100644 --- a/server/src-lib/Hasura/GraphQL/Execute/Subscription/TMap.hs +++ b/server/src-lib/Hasura/GraphQL/Execute/Subscription/TMap.hs @@ -33,13 +33,13 @@ reset = flip writeTVar HashMap.empty . unTMap null :: TMap k v -> STM Bool null = fmap HashMap.null . readTVar . unTMap -lookup :: Hashable k => k -> TMap k v -> STM (Maybe v) +lookup :: (Hashable k) => k -> TMap k v -> STM (Maybe v) lookup k = fmap (HashMap.lookup k) . readTVar . unTMap -insert :: Hashable k => v -> k -> TMap k v -> STM () +insert :: (Hashable k) => v -> k -> TMap k v -> STM () insert !v k mapTv = modifyTVar' (unTMap mapTv) $ HashMap.insert k v -delete :: Hashable k => k -> TMap k v -> STM () +delete :: (Hashable k) => k -> TMap k v -> STM () delete k mapTv = modifyTVar' (unTMap mapTv) $ HashMap.delete k toList :: TMap k v -> STM [(k, v)] @@ -51,7 +51,7 @@ filterWithKey f mapTV = modifyTVar' (unTMap mapTV) $ HashMap.filterWithKey f replace :: TMap k v -> HashMap.HashMap k v -> STM () replace mapTV v = void $ swapTVar (unTMap mapTV) v -union :: Hashable k => TMap k v -> TMap k v -> STM (TMap k v) +union :: (Hashable k) => TMap k v -> TMap k v -> STM (TMap k v) union mapA mapB = do l <- readTVar $ unTMap mapA r <- readTVar $ unTMap mapB diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs index 642a1277dcb..031c4904f47 100644 --- a/server/src-lib/Hasura/GraphQL/Explain.hs +++ b/server/src-lib/Hasura/GraphQL/Explain.hs @@ -80,8 +80,8 @@ explainQueryField agentLicenseKey userInfo reqHeaders operationName fieldName ro exists \(SourceConfigWith sourceConfig _ (QDBR db)) -> do let (newDB, remoteJoins) = RJ.getRemoteJoinsQueryDB db - unless (isNothing remoteJoins) $ - throw400 InvalidParams "queries with remote relationships cannot be explained" + unless (isNothing remoteJoins) + $ throw400 InvalidParams "queries with remote relationships cannot be explained" mkDBQueryExplain fieldName userInfo sourceName sourceConfig newDB reqHeaders operationName AB.dispatchAnyBackend @BackendTransport step (runDBQueryExplain agentLicenseKey) diff --git a/server/src-lib/Hasura/GraphQL/Logging/ExecutionLog.hs b/server/src-lib/Hasura/GraphQL/Logging/ExecutionLog.hs index cc7ad895bb3..5890a7891f3 100644 --- a/server/src-lib/Hasura/GraphQL/Logging/ExecutionLog.hs +++ b/server/src-lib/Hasura/GraphQL/Logging/ExecutionLog.hs @@ -45,31 +45,31 @@ statsToAnyBackend :: forall b. (HasTag b) => ActionResult b -> (Maybe (AnyBacken statsToAnyBackend ActionResult {..} = (fmap (mkAnyBackend @b . ExecutionStats) arStatistics, arResult) -deriving newtype instance Backend b => J.ToJSON (ExecutionStats b) +deriving newtype instance (Backend b) => J.ToJSON (ExecutionStats b) instance J.ToJSON ExecutionLog where toJSON (ExecutionLog reqId mstatistics) = - J.object $ - [ "request_id" J..= reqId, - "statistics" J..= case mstatistics of - Just statistics -> dispatchAnyBackend' @J.ToJSON statistics J.toJSON - Nothing -> J.toJSON () - ] + J.object + $ [ "request_id" J..= reqId, + "statistics" J..= case mstatistics of + Just statistics -> dispatchAnyBackend' @J.ToJSON statistics J.toJSON + Nothing -> J.toJSON () + ] instance L.ToEngineLog ExecutionLog L.Hasura where toEngineLog ql = (L.LevelInfo, L.ELTExecutionLog, J.toJSON ql) -class Monad m => MonadExecutionLog m where +class (Monad m) => MonadExecutionLog m where logExecutionLog :: L.Logger L.Hasura -> ExecutionLog -> m () -instance MonadExecutionLog m => MonadExecutionLog (ExceptT e m) where +instance (MonadExecutionLog m) => MonadExecutionLog (ExceptT e m) where logExecutionLog logger l = lift $ logExecutionLog logger l -instance MonadExecutionLog m => MonadExecutionLog (ReaderT r m) where +instance (MonadExecutionLog m) => MonadExecutionLog (ReaderT r m) where logExecutionLog logger l = lift $ logExecutionLog logger l -instance MonadExecutionLog m => MonadExecutionLog (TraceT m) where +instance (MonadExecutionLog m) => MonadExecutionLog (TraceT m) where logExecutionLog logger l = lift $ logExecutionLog logger l diff --git a/server/src-lib/Hasura/GraphQL/Logging/QueryLog.hs b/server/src-lib/Hasura/GraphQL/Logging/QueryLog.hs index b9857c33bb1..1569a76b9cb 100644 --- a/server/src-lib/Hasura/GraphQL/Logging/QueryLog.hs +++ b/server/src-lib/Hasura/GraphQL/Logging/QueryLog.hs @@ -51,14 +51,14 @@ data GeneratedQuery = GeneratedQuery instance J.ToJSON QueryLog where toJSON (QueryLog gqlQuery generatedQuery reqId kind) = - J.object $ - [ "query" J..= gqlQuery, - -- NOTE: this customizes the default JSON instance of a pair - "generated_sql" J..= fmap fromPair generatedQuery, - "request_id" J..= reqId, - "kind" J..= kind - ] - <> maybe [] (\val -> ["connection_template" J..= val]) (getResolvedConnectionTemplate kind) + J.object + $ [ "query" J..= gqlQuery, + -- NOTE: this customizes the default JSON instance of a pair + "generated_sql" J..= fmap fromPair generatedQuery, + "request_id" J..= reqId, + "kind" J..= kind + ] + <> maybe [] (\val -> ["connection_template" J..= val]) (getResolvedConnectionTemplate kind) where fromPair p = HashMap.fromList [first toTxt p] getResolvedConnectionTemplate :: QueryLogKind -> Maybe (BackendResolvedConnectionTemplate) @@ -75,17 +75,17 @@ instance J.ToJSON GeneratedQuery where instance L.ToEngineLog QueryLog L.Hasura where toEngineLog ql = (L.LevelInfo, L.ELTQueryLog, J.toJSON ql) -class Monad m => MonadQueryLog m where +class (Monad m) => MonadQueryLog m where logQueryLog :: L.Logger L.Hasura -> QueryLog -> m () -instance MonadQueryLog m => MonadQueryLog (ExceptT e m) where +instance (MonadQueryLog m) => MonadQueryLog (ExceptT e m) where logQueryLog logger l = lift $ logQueryLog logger l -instance MonadQueryLog m => MonadQueryLog (ReaderT r m) where +instance (MonadQueryLog m) => MonadQueryLog (ReaderT r m) where logQueryLog logger l = lift $ logQueryLog logger l -instance MonadQueryLog m => MonadQueryLog (TraceT m) where +instance (MonadQueryLog m) => MonadQueryLog (TraceT m) where logQueryLog logger l = lift $ logQueryLog logger l diff --git a/server/src-lib/Hasura/GraphQL/Namespace.hs b/server/src-lib/Hasura/GraphQL/Namespace.hs index 0262c046a38..f132d0e987b 100644 --- a/server/src-lib/Hasura/GraphQL/Namespace.hs +++ b/server/src-lib/Hasura/GraphQL/Namespace.hs @@ -97,7 +97,8 @@ customizeNamespace (Just namespace) fromParsedSelection mkNamespaceTypename fiel where parser :: Parser 'Output n (NamespacedField a) parser = - Namespaced . InsOrdHashMap.mapWithKey fromParsedSelection + Namespaced + . InsOrdHashMap.mapWithKey fromParsedSelection <$> P.selectionSet (runMkTypename mkNamespaceTypename namespace) Nothing fieldParsers customizeNamespace Nothing _ _ fieldParsers = -- No namespace so just wrap the field parser results in @NotNamespaced@. diff --git a/server/src-lib/Hasura/GraphQL/ParameterizedQueryHash.hs b/server/src-lib/Hasura/GraphQL/ParameterizedQueryHash.hs index d39fdc5cfbc..726b1980ac4 100644 --- a/server/src-lib/Hasura/GraphQL/ParameterizedQueryHash.hs +++ b/server/src-lib/Hasura/GraphQL/ParameterizedQueryHash.hs @@ -156,7 +156,8 @@ normalizeSelectionSet = (normalizeSelection =<<) J.Number _ -> G.VNull J.Array l -> G.VList $ jsonToNormalizedGQLVal <$> toList l J.Object vals -> - G.VObject $ + G.VObject + $ -- FIXME(#3479): THIS WILL CREATE INVALID GRAPHQL OBJECTS HashMap.fromList [ (name, jsonToNormalizedGQLVal val) diff --git a/server/src-lib/Hasura/GraphQL/RemoteServer.hs b/server/src-lib/Hasura/GraphQL/RemoteServer.hs index a06290bf5cb..333eeee7936 100644 --- a/server/src-lib/Hasura/GraphQL/RemoteServer.hs +++ b/server/src-lib/Hasura/GraphQL/RemoteServer.hs @@ -91,13 +91,13 @@ stitchRemoteSchema rawIntrospectionResult rsDef@ValidatedRemoteSchemaDef {..} = -- generation. The result is discarded, as the local schema will be built -- properly for each role at schema generation time, but this allows us to -- quickly reject an invalid schema. - void $ - runMemoizeT $ - runRemoteSchema minimumValidContext $ - buildRemoteParser @_ @_ @Parse - _rscIntroOriginal - mempty -- remote relationships - remoteSchemaInfo + void + $ runMemoizeT + $ runRemoteSchema minimumValidContext + $ buildRemoteParser @_ @_ @Parse + _rscIntroOriginal + mempty -- remote relationships + remoteSchemaInfo return (_rscIntroOriginal, remoteSchemaInfo) where -- If there is no explicit mutation or subscription root type we need to check for @@ -141,8 +141,8 @@ execRemoteGQ :: execRemoteGQ env userInfo reqHdrs rsdef gqlReq@GQLReq {..} = do let gqlReqUnparsed = renderGQLReqOutgoing gqlReq - when (G._todType _grQuery == G.OperationTypeSubscription) $ - throwRemoteSchema "subscription to remote server is not supported" + when (G._todType _grQuery == G.OperationTypeSubscription) + $ throwRemoteSchema "subscription to remote server is not supported" confHdrs <- makeHeadersFromConf env hdrConf let clientHdrs = bool [] (mkClientHeadersForward reqHdrs) fwdClientHdrs -- filter out duplicate headers @@ -177,7 +177,7 @@ execRemoteGQ env userInfo reqHdrs rsdef gqlReq@GQLReq {..} = do validateSchemaCustomizations :: forall m. - MonadError QErr m => + (MonadError QErr m) => RemoteSchemaCustomizer -> RemoteSchemaIntrospection -> m () @@ -187,7 +187,7 @@ validateSchemaCustomizations remoteSchemaCustomizer remoteSchemaIntrospection = validateSchemaCustomizationsConsistent :: forall m. - MonadError QErr m => + (MonadError QErr m) => RemoteSchemaCustomizer -> RemoteSchemaIntrospection -> m () @@ -203,24 +203,24 @@ validateSchemaCustomizationsConsistent remoteSchemaCustomizer (RemoteSchemaIntro for_ _itdFieldsDefinition $ \G.FieldDefinition {..} -> do let interfaceCustomizedFieldName = runCustomizeRemoteFieldName customizeFieldName _itdName _fldName typeCustomizedFieldName = runCustomizeRemoteFieldName customizeFieldName typeName _fldName - when (interfaceCustomizedFieldName /= typeCustomizedFieldName) $ - throwRemoteSchema $ - "Remote schema customization inconsistency: field name mapping for field " - <> _fldName - <<> " of interface " - <> _itdName - <<> " is inconsistent with mapping for type " - <> typeName - <<> ". Interface field name maps to " - <> interfaceCustomizedFieldName - <<> ". Type field name maps to " - <> typeCustomizedFieldName - <<> "." + when (interfaceCustomizedFieldName /= typeCustomizedFieldName) + $ throwRemoteSchema + $ "Remote schema customization inconsistency: field name mapping for field " + <> _fldName + <<> " of interface " + <> _itdName + <<> " is inconsistent with mapping for type " + <> typeName + <<> ". Interface field name maps to " + <> interfaceCustomizedFieldName + <<> ". Type field name maps to " + <> typeCustomizedFieldName + <<> "." _ -> pure () validateSchemaCustomizationsDistinct :: forall m. - MonadError QErr m => + (MonadError QErr m) => RemoteSchemaCustomizer -> RemoteSchemaIntrospection -> m () @@ -234,29 +234,29 @@ validateSchemaCustomizationsDistinct remoteSchemaCustomizer (RemoteSchemaIntrosp validateTypeMappingsAreDistinct :: m () validateTypeMappingsAreDistinct = do let dups = duplicates $ runMkTypename customizeTypeName <$> HashMap.keys typeDefinitions - unless (Set.null dups) $ - throwRemoteSchema $ - "Type name mappings are not distinct; the following types appear more than once: " - <> dquoteList dups + unless (Set.null dups) + $ throwRemoteSchema + $ "Type name mappings are not distinct; the following types appear more than once: " + <> dquoteList dups validateFieldMappingsAreDistinct :: G.TypeDefinition a b -> m () validateFieldMappingsAreDistinct = \case G.TypeDefinitionInterface G.InterfaceTypeDefinition {..} -> do let dups = duplicates $ customizeFieldName _itdName . G._fldName <$> _itdFieldsDefinition - unless (Set.null dups) $ - throwRemoteSchema $ - "Field name mappings for interface type " - <> _itdName - <<> " are not distinct; the following fields appear more than once: " - <> dquoteList dups + unless (Set.null dups) + $ throwRemoteSchema + $ "Field name mappings for interface type " + <> _itdName + <<> " are not distinct; the following fields appear more than once: " + <> dquoteList dups G.TypeDefinitionObject G.ObjectTypeDefinition {..} -> do let dups = duplicates $ customizeFieldName _otdName . G._fldName <$> _otdFieldsDefinition - unless (Set.null dups) $ - throwRemoteSchema $ - "Field name mappings for object type " - <> _otdName - <<> " are not distinct; the following fields appear more than once: " - <> dquoteList dups + unless (Set.null dups) + $ throwRemoteSchema + $ "Field name mappings for object type " + <> _otdName + <<> " are not distinct; the following fields appear more than once: " + <> dquoteList dups _ -> pure () ------------------------------------------------------------------------------- @@ -442,8 +442,8 @@ getCustomizer IntrospectionResult {..} (Just RemoteSchemaCustomization {..}) = R -- the _rtcMapping. This means that a user can still change the root type -- name. rootTypeNameMap = - mkPrefixSuffixMap _rscRootFieldsNamespace Nothing $ - catMaybes [Just irQueryRoot, irMutationRoot, irSubscriptionRoot] + mkPrefixSuffixMap _rscRootFieldsNamespace Nothing + $ catMaybes [Just irQueryRoot, irMutationRoot, irSubscriptionRoot] typeRenameMap = case _rscTypeNames of @@ -480,17 +480,17 @@ getCustomizer IntrospectionResult {..} (Just RemoteSchemaCustomization {..}) = R pErr :: (MonadFail m) => Text -> m a pErr = fail . T.unpack -throwRemoteSchema :: QErrM m => Text -> m a +throwRemoteSchema :: (QErrM m) => Text -> m a throwRemoteSchema = throw400 RemoteSchemaError throwRemoteSchemaHttp :: - QErrM m => + (QErrM m) => EnvRecord URI -> HTTP.HttpException -> m a throwRemoteSchemaHttp urlEnvRecord exception = - throwError $ - (baseError urlEnvRecord) + throwError + $ (baseError urlEnvRecord) { qeInternal = Just $ ExtraInternal $ getHttpExceptionJson (ShowErrorInfo True) $ HttpException exception } where diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 5ebadc47f34..2c40941d9f2 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -148,9 +148,9 @@ buildGQLContext logger = do let remoteSchemasRoles = concatMap (HashMap.keys . _rscPermissions . fst . snd) $ HashMap.toList allRemoteSchemas actionRoles = - Set.insert adminRoleName $ - Set.fromList (allActionInfos ^.. folded . aiPermissions . to HashMap.keys . folded) - <> Set.fromList (bool mempty remoteSchemasRoles $ remoteSchemaPermissions == Options.EnableRemoteSchemaPermissions) + Set.insert adminRoleName + $ Set.fromList (allActionInfos ^.. folded . aiPermissions . to HashMap.keys . folded) + <> Set.fromList (bool mempty remoteSchemasRoles $ remoteSchemaPermissions == Options.EnableRemoteSchemaPermissions) allActionInfos = HashMap.elems allActions allTableRoles = Set.fromList $ getTableRoles =<< HashMap.elems sources allLogicalModelRoles = Set.fromList $ getLogicalModelRoles =<< HashMap.elems sources @@ -161,8 +161,9 @@ buildGQLContext -- but that isn't really acheivable (see mono #3829). NOTE: the admin role -- will still be a bottleneck here, even on huge_schema which has many -- roles. - fmap HashMap.fromList $ - forConcurrentlyEIO 10 (Set.toList allRoles) $ \role -> do + fmap HashMap.fromList + $ forConcurrentlyEIO 10 (Set.toList allRoles) + $ \role -> do (role,) <$> concurrentlyEIO ( buildRoleContext @@ -208,10 +209,10 @@ buildGQLContext (calculateSchemaSDLHash (generateSDLWithAllTypes adminIntrospection) adminRoleName) metadataResourceVersion now - pure $ - STM.atomically $ - STM.writeTQueue (_srpaSchemaRegistryTQueueRef schemaRegistryCtx) $ - projectSchemaInfo + pure + $ STM.atomically + $ STM.writeTQueue (_srpaSchemaRegistryTQueueRef schemaRegistryCtx) + $ projectSchemaInfo let hasuraContextsWithoutIntrospection = flip HashMap.mapWithKey hasuraContexts $ \r (context, err, schemaIntrospection) -> if r == adminRoleName @@ -308,8 +309,8 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe -- build all remote schemas -- we only keep the ones that don't result in a name conflict (remoteSchemaFields, !remoteSchemaErrors) <- - runRemoteSchema schemaContext $ - buildAndValidateRemoteSchemas remotes sourcesQueryFields sourcesMutationBackendFields role remoteSchemaPermsCtx + runRemoteSchema schemaContext + $ buildAndValidateRemoteSchemas remotes sourcesQueryFields sourcesMutationBackendFields role remoteSchemaPermsCtx let remotesQueryFields = concatMap piQuery remoteSchemaFields remotesMutationFields = concat $ mapMaybe piMutation remoteSchemaFields remotesSubscriptionFields = concat $ mapMaybe piSubscription remoteSchemaFields @@ -318,8 +319,9 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe -- build all actions -- we use the source context due to how async query relationships are implemented (actionsQueryFields, actionsMutationFields, actionsSubscriptionFields) <- - runActionSchema schemaContext schemaOptions $ - fmap mconcat $ for actions \action -> do + runActionSchema schemaContext schemaOptions + $ fmap mconcat + $ for actions \action -> do queryFields <- buildActionQueryFields customTypes action mutationFields <- buildActionMutationFields customTypes action subscriptionFields <- buildActionSubscriptionFields customTypes action @@ -341,20 +343,21 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe -- checks in the GraphQL schema. Furthermore, we want to persist this -- information in the case of the admin role. !introspectionSchema <- do - throwOnConflictingDefinitions $ - convertToSchemaIntrospection - <$> buildIntrospectionSchema - (P.parserType queryParserBackend) - (P.parserType <$> mutationParserBackend) - (P.parserType <$> subscriptionParser) + throwOnConflictingDefinitions + $ convertToSchemaIntrospection + <$> buildIntrospectionSchema + (P.parserType queryParserBackend) + (P.parserType <$> mutationParserBackend) + (P.parserType <$> subscriptionParser) -- TODO(nicuveo): we treat the admin role differently in this function, -- which is a bit inelegant; we might want to refactor this function and -- split it into several steps, so that we can make a separate function for -- the admin role that reuses the common parts and avoid such tests. - void . throwOnConflictingDefinitions $ - buildIntrospectionSchema + void + . throwOnConflictingDefinitions + $ buildIntrospectionSchema (P.parserType queryParserFrontend) (P.parserType <$> mutationParserFrontend) (P.parserType <$> subscriptionParser) @@ -457,9 +460,9 @@ buildRelayRoleContext options sources actions customTypes role expFeatures = do -- build all actions -- we only build mutations in the relay schema actionsMutationFields <- - runActionSchema schemaContext schemaOptions $ - fmap concat $ - traverse (buildActionMutationFields customTypes) actions + runActionSchema schemaContext schemaOptions + $ fmap concat + $ traverse (buildActionMutationFields customTypes) actions -- Remote schema mutations aren't exposed in relay because many times it throws -- the conflicting definitions error between the relay types like `Node`, `PageInfo` etc @@ -477,13 +480,15 @@ buildRelayRoleContext options sources actions customTypes role expFeatures = do -- In order to catch errors early, we attempt to generate the data -- required for introspection, which ends up doing a few correctness -- checks in the GraphQL schema. - void . throwOnConflictingDefinitions $ - buildIntrospectionSchema + void + . throwOnConflictingDefinitions + $ buildIntrospectionSchema (P.parserType queryParserBackend) (P.parserType <$> mutationParserBackend) (P.parserType <$> subscriptionParser) - void . throwOnConflictingDefinitions $ - buildIntrospectionSchema + void + . throwOnConflictingDefinitions + $ buildIntrospectionSchema (P.parserType queryParserFrontend) (P.parserType <$> mutationParserFrontend) (P.parserType <$> subscriptionParser) @@ -589,8 +594,8 @@ unauthenticatedContext options sources allRemotes expFeatures remoteSchemaPermsC Options.DisableRemoteSchemaPermissions -> do -- Permissions are disabled, unauthenticated users have access to remote schemas. (remoteFields, remoteSchemaErrors) <- - runRemoteSchema fakeSchemaContext $ - buildAndValidateRemoteSchemas allRemotes [] [] fakeRole remoteSchemaPermsCtx + runRemoteSchema fakeSchemaContext + $ buildAndValidateRemoteSchemas allRemotes [] [] fakeRole remoteSchemaPermsCtx pure ( fmap (fmap RFRemote) <$> concatMap piQuery remoteFields, fmap (fmap RFRemote) <$> concat (mapMaybe piMutation remoteFields), @@ -598,16 +603,17 @@ unauthenticatedContext options sources allRemotes expFeatures remoteSchemaPermsC remoteSchemaErrors ) mutationParser <- - whenMaybe (not $ null mutationFields) $ - safeSelectionSet mutationRoot (Just $ G.Description "mutation root") mutationFields - <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) + whenMaybe (not $ null mutationFields) + $ safeSelectionSet mutationRoot (Just $ G.Description "mutation root") mutationFields + <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) subscriptionParser <- - whenMaybe (not $ null subscriptionFields) $ - safeSelectionSet subscriptionRoot (Just $ G.Description "subscription root") subscriptionFields - <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) + whenMaybe (not $ null subscriptionFields) + $ safeSelectionSet subscriptionRoot (Just $ G.Description "subscription root") subscriptionFields + <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) queryParser <- queryWithIntrospectionHelper queryFields mutationParser Nothing - void . throwOnConflictingDefinitions $ - buildIntrospectionSchema + void + . throwOnConflictingDefinitions + $ buildIntrospectionSchema (P.parserType queryParser) (P.parserType <$> mutationParser) (P.parserType <$> subscriptionParser) @@ -658,22 +664,22 @@ buildAndValidateRemoteSchemas remotes sourcesQueryFields sourcesMutationFields r (duplicates $ newSchemaQueryFieldNames <> previousSchemasQueryFieldNames) \name -> reportInconsistency $ "Duplicate remote field " <> squote name -- - between this remote and the sources: - for_ (duplicates $ newSchemaQueryFieldNames <> sourcesQueryFieldNames) $ - \name -> reportInconsistency $ "Field cannot be overwritten by remote field " <> squote name + for_ (duplicates $ newSchemaQueryFieldNames <> sourcesQueryFieldNames) + $ \name -> reportInconsistency $ "Field cannot be overwritten by remote field " <> squote name -- Ditto, but for mutations - i.e. with mutation_root: unless (null newSchemaMutationFieldNames) do -- - between this remote and the previous ones: - for_ (duplicates $ newSchemaMutationFieldNames <> previousSchemasMutationFieldNames) $ - \name -> reportInconsistency $ "Duplicate remote field " <> squote name + for_ (duplicates $ newSchemaMutationFieldNames <> previousSchemasMutationFieldNames) + $ \name -> reportInconsistency $ "Duplicate remote field " <> squote name -- - between this remote and the sources: - for_ (duplicates $ newSchemaMutationFieldNames <> sourcesMutationFieldNames) $ - \name -> reportInconsistency $ "Field cannot be overwritten by remote field " <> squote name + for_ (duplicates $ newSchemaMutationFieldNames <> sourcesMutationFieldNames) + $ \name -> reportInconsistency $ "Field cannot be overwritten by remote field " <> squote name -- No need to check for conflicts between subscription fields, since -- remote subscriptions aren't supported yet. -- Only add this new remote to the list if there was no error - pure $ - if Set.null inconsistencies + pure + $ if Set.null inconsistencies then remoteSchemaParser : validatedSchemas else validatedSchemas @@ -724,12 +730,16 @@ buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs roleName <- retrieve scRole functionPermsCtx <- retrieve Options.soInferFunctionPermissions functionSelectExpParsers <- - concat . catMaybes + concat + . catMaybes <$> for (HashMap.toList functions) \(functionName, functionInfo) -> runMaybeT $ do - guard $ - roleName == adminRoleName - || roleName `HashMap.member` _fiPermissions functionInfo - || functionPermsCtx == Options.InferFunctionPermissions + guard + $ roleName + == adminRoleName + || roleName + `HashMap.member` _fiPermissions functionInfo + || functionPermsCtx + == Options.InferFunctionPermissions let targetReturnName = _fiReturnType functionInfo lift $ mkRFs $ buildFunctionQueryFields mkRootFieldName functionName functionInfo targetReturnName nativeQueryRootFields <- @@ -739,7 +749,8 @@ buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs buildStoredProcedureFields sourceInfo storedProcedures (tableQueryFields, tableSubscriptionFields, apolloFedTableParsers) <- - unzip3 . catMaybes + unzip3 + . catMaybes <$> for (HashMap.toList tables) \(tableName, tableInfo) -> runMaybeT $ do tableIdentifierName <- getTableIdentifierName @b tableInfo lift $ buildTableQueryAndSubscriptionFields mkRootFieldName tableName tableInfo tableIdentifierName @@ -776,9 +787,11 @@ buildNativeQueryFields sourceInfo nativeQueries = runMaybeTmempty $ do -- if the current role is admin, or we have a select permission -- for this role (this is the broad strokes check. later, we'll filter -- more granularly on columns and then rows) - guard $ - roleName == adminRoleName - || roleName `HashMap.member` _lmiPermissions (_nqiReturns nativeQuery) + guard + $ roleName + == adminRoleName + || roleName + `HashMap.member` _lmiPermissions (_nqiReturns nativeQuery) lift (buildNativeQueryRootFields nativeQuery) where @@ -804,9 +817,11 @@ buildStoredProcedureFields sourceInfo storedProcedures = runMaybeTmempty $ do -- if the current role is admin, or we have a select permission -- for this role (this is the broad strokes check. later, we'll filter -- more granularly on columns and then rows) - guard $ - roleName == adminRoleName - || roleName `HashMap.member` _lmiPermissions (_spiReturns storedProcedure) + guard + $ roleName + == adminRoleName + || roleName + `HashMap.member` _lmiPermissions (_spiReturns storedProcedure) lift (buildStoredProcedureRootFields storedProcedure) where @@ -829,7 +844,8 @@ buildRelayQueryAndSubscriptionFields :: buildRelayQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs FEAQuery -> functions) = do roleName <- retrieve scRole (tableConnectionQueryFields, tableConnectionSubscriptionFields) <- - unzip . catMaybes + unzip + . catMaybes <$> for (HashMap.toList tables) \(tableName, tableInfo) -> runMaybeT do tableIdentifierName <- getTableIdentifierName @b tableInfo SelPermInfo {..} <- hoistMaybe $ tableSelectPermissions roleName tableInfo @@ -849,10 +865,10 @@ buildRelayQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExpo returnTableInfo <- lift $ askTableInfo returnTableName pkeyColumns <- MaybeT $ (^? tiCoreInfo . tciPrimaryKey . _Just . pkColumns) <$> pure returnTableInfo lift $ mkRFs $ buildFunctionRelayQueryFields mkRootFieldName functionName functionInfo returnTableName pkeyColumns - pure $ - ( concat $ catMaybes $ tableConnectionQueryFields <> functionConnectionFields, - concat $ catMaybes $ tableConnectionSubscriptionFields <> functionConnectionFields - ) + pure + $ ( concat $ catMaybes $ tableConnectionQueryFields <> functionConnectionFields, + concat $ catMaybes $ tableConnectionSubscriptionFields <> functionConnectionFields + ) where mkRFs = mkRootFields sourceName sourceConfig queryTagsConfig QDBR sourceName = _siName sourceInfo @@ -884,7 +900,8 @@ buildMutationFields mkRootFieldName scenario sourceInfo tables (takeExposedAs FE -- A function exposed as mutation must have a function permission -- configured for the role. See Note [Function Permissions] - guard $ + guard + $ -- when function permissions are inferred, we don't expose the -- mutation functions for non-admin roles. See Note [Function Permissions] @@ -893,7 +910,10 @@ buildMutationFields mkRootFieldName scenario sourceInfo tables (takeExposedAs FE -- when function permissions are inferred, we don't expose the -- mutation functions for non-admin roles. See Note [Function Permissions] - roleName == adminRoleName || roleName `HashMap.member` _fiPermissions functionInfo + roleName + == adminRoleName + || roleName + `HashMap.member` _fiPermissions functionInfo lift $ mkRFs MDBR $ buildFunctionMutationFields mkRootFieldName functionName functionInfo targetTableName pure $ concat $ tableMutations <> catMaybes functionMutations where @@ -971,7 +991,9 @@ queryWithIntrospectionHelper basicQueryFP mutationP subscriptionP = do fixedQueryFP = if null basicQueryFP then [placeholderField] else basicQueryFP basicQueryP <- queryRootFromFields fixedQueryFP let buildIntrospectionResponse printResponseFromSchema = - NotNamespaced . RFRaw . printResponseFromSchema + NotNamespaced + . RFRaw + . printResponseFromSchema <$> parseBuildIntrospectionSchema (P.parserType basicQueryP) (P.parserType <$> mutationP) @@ -1001,9 +1023,9 @@ buildMutationParser mutationFields remoteFields actionFields = do mutationFields <> (fmap (fmap RFRemote) <$> remoteFields) <> (fmap NotNamespaced <$> actionFields) - whenMaybe (not $ null mutationFieldsParser) $ - safeSelectionSet mutationRoot (Just $ G.Description "mutation root") mutationFieldsParser - <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) + whenMaybe (not $ null mutationFieldsParser) + $ safeSelectionSet mutationRoot (Just $ G.Description "mutation root") mutationFieldsParser + <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) -- | Prepare the parser for subscriptions. Every postgres query field is -- exposed as a subscription along with fields to get the status of @@ -1020,9 +1042,9 @@ buildSubscriptionParser sourceSubscriptionFields remoteSubscriptionFields action sourceSubscriptionFields <> fmap (fmap $ fmap RFRemote) remoteSubscriptionFields <> (fmap NotNamespaced <$> actionFields) - whenMaybe (not $ null subscriptionFields) $ - safeSelectionSet subscriptionRoot Nothing subscriptionFields - <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) + whenMaybe (not $ null subscriptionFields) + $ safeSelectionSet subscriptionRoot Nothing subscriptionFields + <&> fmap (flattenNamespaces . fmap typenameToNamespacedRawRF) ------------------------------------------------------------------------------- -- Local helpers diff --git a/server/src-lib/Hasura/GraphQL/Schema/Action.hs b/server/src-lib/Hasura/GraphQL/Schema/Action.hs index 707d51dcb32..3f302071520 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Action.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Action.hs @@ -57,7 +57,7 @@ import Language.GraphQL.Draft.Syntax qualified as G -- > } actionExecute :: forall r m n. - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => AnnotatedCustomTypes -> ActionInfo -> SchemaT r m (Maybe (FieldParser n (IR.AnnActionExecution (IR.RemoteRelationshipField IR.UnpreparedValue)))) @@ -74,22 +74,22 @@ actionExecute customTypes actionInfo = runMaybeT do AOTScalar ast -> do let selectionSet = customScalarParser ast pure $ P.selection fieldName description inputArguments selectionSet <&> (,[]) - pure $ - parserOutput - <&> \(argsJson, fields) -> - IR.AnnActionExecution - { _aaeName = actionName, - _aaeFields = fields, - _aaePayload = argsJson, - _aaeOutputType = _adOutputType definition, - _aaeOutputFields = IR.getActionOutputFields outputObject, - _aaeWebhook = _adHandler definition, - _aaeHeaders = _adHeaders definition, - _aaeForwardClientHeaders = _adForwardClientHeaders definition, - _aaeTimeOut = _adTimeout definition, - _aaeRequestTransform = _adRequestTransform definition, - _aaeResponseTransform = _adResponseTransform definition - } + pure + $ parserOutput + <&> \(argsJson, fields) -> + IR.AnnActionExecution + { _aaeName = actionName, + _aaeFields = fields, + _aaePayload = argsJson, + _aaeOutputType = _adOutputType definition, + _aaeOutputFields = IR.getActionOutputFields outputObject, + _aaeWebhook = _adHandler definition, + _aaeHeaders = _adHeaders definition, + _aaeForwardClientHeaders = _adForwardClientHeaders definition, + _aaeTimeOut = _adTimeout definition, + _aaeRequestTransform = _adRequestTransform definition, + _aaeResponseTransform = _adResponseTransform definition + } where ActionInfo actionName (outputType, outputObject) definition permissions _ comment = actionInfo @@ -101,7 +101,7 @@ actionExecute customTypes actionInfo = runMaybeT do -- > action_name(action_input_arguments) actionAsyncMutation :: forall r m n. - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => HashMap G.Name AnnotatedInputType -> ActionInfo -> SchemaT r m (Maybe (FieldParser n IR.AnnActionMutationAsync)) @@ -111,9 +111,9 @@ actionAsyncMutation nonObjectTypeMap actionInfo = runMaybeT do inputArguments <- lift $ actionInputArguments nonObjectTypeMap $ _adArguments definition let fieldName = unActionName actionName description = G.Description <$> comment - pure $ - P.selection fieldName description inputArguments actionIdParser - <&> IR.AnnActionMutationAsync actionName forwardClientHeaders + pure + $ P.selection fieldName description inputArguments actionIdParser + <&> IR.AnnActionMutationAsync actionName forwardClientHeaders where ActionInfo actionName _ definition permissions forwardClientHeaders comment = actionInfo @@ -132,7 +132,7 @@ actionAsyncMutation nonObjectTypeMap actionInfo = runMaybeT do -- > } actionAsyncQuery :: forall r m n. - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => HashMap G.Name AnnotatedObjectType -> ActionInfo -> SchemaT r m (Maybe (FieldParser n (IR.AnnActionAsyncQuery ('Postgres 'Vanilla) (IR.RemoteRelationshipField IR.UnpreparedValue)))) @@ -183,19 +183,19 @@ actionAsyncQuery objectTypes actionInfo = runMaybeT do stringifyNumbers <- retrieve Options.soStringifyNumbers definitionsList <- lift $ mkDefinitionList outputObject - pure $ - parserOutput - <&> \(idArg, fields) -> - IR.AnnActionAsyncQuery - { _aaaqName = actionName, - _aaaqActionId = idArg, - _aaaqOutputType = _adOutputType definition, - _aaaqFields = fields, - _aaaqDefinitionList = definitionsList, - _aaaqStringifyNum = stringifyNumbers, - _aaaqForwardClientHeaders = forwardClientHeaders, - _aaaqSource = getActionSourceInfo outputObject - } + pure + $ parserOutput + <&> \(idArg, fields) -> + IR.AnnActionAsyncQuery + { _aaaqName = actionName, + _aaaqActionId = idArg, + _aaaqOutputType = _adOutputType definition, + _aaaqFields = fields, + _aaaqDefinitionList = definitionsList, + _aaaqStringifyNum = stringifyNumbers, + _aaaqForwardClientHeaders = forwardClientHeaders, + _aaaqSource = getActionSourceInfo outputObject + } where -- For historical reasons, we use postgres-specific scalar names for two -- specific output fields. To avoid calling all the postgres schema @@ -205,7 +205,7 @@ actionAsyncQuery objectTypes actionInfo = runMaybeT do -- Since we know that those parsers are only used for output scalar fields, -- we don't care about their output value: they are not used to parse input -- values, nor do they have a selection set to process. - mkOutputParser :: forall m'. MonadError QErr m' => PGScalarType -> m' (Parser 'Both n ()) + mkOutputParser :: forall m'. (MonadError QErr m') => PGScalarType -> m' (Parser 'Both n ()) mkOutputParser scalarType = do gName <- mkScalarTypeName scalarType pure $ mkScalar gName $ const $ pure () @@ -244,22 +244,22 @@ actionAsyncQuery objectTypes actionInfo = runMaybeT do Just pgScalar -> pure $ unwrapScalar pgScalar Nothing -> throw500 "encountered non-Postgres scalar in async query actions" ASTCustom ScalarTypeDefinition {..} -> - pure $ - if - | _stdName == GName._ID -> PGText - | _stdName == GName._Int -> PGInteger - | _stdName == GName._Float -> PGFloat - | _stdName == GName._String -> PGText - | _stdName == GName._Boolean -> PGBoolean - | otherwise -> PGJSON + pure + $ if + | _stdName == GName._ID -> PGText + | _stdName == GName._Int -> PGInteger + | _stdName == GName._Float -> PGFloat + | _stdName == GName._String -> PGText + | _stdName == GName._Boolean -> PGBoolean + | otherwise -> PGJSON -- | Async action's unique id -actionIdParser :: MonadParse n => Parser 'Both n ActionId +actionIdParser :: (MonadParse n) => Parser 'Both n ActionId actionIdParser = ActionId <$> P.uuid actionOutputFields :: forall r m n. - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => G.GType -> AnnotatedObjectType -> HashMap G.Name AnnotatedObjectType -> @@ -272,10 +272,10 @@ actionOutputFields outputType annotatedObject objectTypes = do scalarOrEnumOrObjectFields <> concat (catMaybes relationshipFields) outputTypeDescription = _aotDescription annotatedObject - pure $ - outputParserModifier outputType $ - P.selectionSet outputTypeName outputTypeDescription allFieldParsers - <&> parsedSelectionsToFields IR.ACFExpression + pure + $ outputParserModifier outputType + $ P.selectionSet outputTypeName outputTypeDescription allFieldParsers + <&> parsedSelectionsToFields IR.ACFExpression where outputParserModifier :: G.GType -> Parser 'Output n a -> Parser 'Output n a outputParserModifier = \case @@ -300,9 +300,9 @@ actionOutputFields outputType annotatedObject objectTypes = do where fieldName = unObjectFieldName name wrapScalar parser = - pure $ - P.wrapFieldParser gType (P.selection_ fieldName description parser) - $> IR.ACFScalar fieldName + pure + $ P.wrapFieldParser gType (P.selection_ fieldName description parser) + $> IR.ACFScalar fieldName relationshipFieldParser :: AnnotatedTypeRelationship -> @@ -329,16 +329,16 @@ actionOutputFields outputType annotatedObject objectTypes = do RemoteFieldInfo { _rfiLHS = lhsJoinFields, _rfiRHS = - RFISource $ - AB.mkAnyBackend @('Postgres 'Vanilla) $ - RemoteSourceFieldInfo - { _rsfiName = relName, - _rsfiType = _atrType, - _rsfiSource = _atrSource, - _rsfiSourceConfig = _atrSourceConfig, - _rsfiTable = _atrTableName, - _rsfiMapping = joinMapping - } + RFISource + $ AB.mkAnyBackend @('Postgres 'Vanilla) + $ RemoteSourceFieldInfo + { _rsfiName = relName, + _rsfiType = _atrType, + _rsfiSource = _atrSource, + _rsfiSourceConfig = _atrSourceConfig, + _rsfiTable = _atrTableName, + _rsfiMapping = joinMapping + } } RemoteRelationshipParserBuilder remoteRelationshipField <- retrieve scRemoteRelationshipParserBuilder remoteRelationshipFieldParsers <- MaybeT $ remoteRelationshipField remoteFieldInfo @@ -346,7 +346,7 @@ actionOutputFields outputType annotatedObject objectTypes = do actionInputArguments :: forall r m n. - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => HashMap G.Name AnnotatedInputType -> [ArgumentDefinition (G.GType, AnnotatedInputType)] -> SchemaT r m (InputFieldsParser n J.Value) @@ -387,9 +387,10 @@ actionInputArguments nonObjectTypeMap arguments = do HashMap.lookup (G.getBaseType fieldType) nonObjectTypeMap `onNothing` throw500 "object type for a field found in custom input object type" (fieldName,) <$> argumentParser fieldName fieldDesc fieldType nonObjectFieldType - pure $ - P.object objectName objectDesc $ - J.Object <$> inputFieldsToObject inputFieldsParsers + pure + $ P.object objectName objectDesc + $ J.Object + <$> inputFieldsToObject inputFieldsParsers mkArgumentInputFieldParser :: forall m k. @@ -417,18 +418,18 @@ mkArgumentInputFieldParser name description gType parser = bool (fmap J.toJSON) (fmap J.toJSON . P.nullable) . G.unNullability customScalarParser :: - MonadParse m => + (MonadParse m) => AnnotatedScalarType -> Parser 'Both m J.Value customScalarParser = \case ASTCustom ScalarTypeDefinition {..} -> if - | _stdName == GName._ID -> J.toJSON <$> P.identifier - | _stdName == GName._Int -> J.toJSON <$> P.int - | _stdName == GName._Float -> J.toJSON <$> P.float - | _stdName == GName._String -> J.toJSON <$> P.string - | _stdName == GName._Boolean -> J.toJSON <$> P.boolean - | otherwise -> P.jsonScalar _stdName _stdDescription + | _stdName == GName._ID -> J.toJSON <$> P.identifier + | _stdName == GName._Int -> J.toJSON <$> P.int + | _stdName == GName._Float -> J.toJSON <$> P.float + | _stdName == GName._String -> J.toJSON <$> P.string + | _stdName == GName._Boolean -> J.toJSON <$> P.boolean + | otherwise -> P.jsonScalar _stdName _stdDescription ASTReusedScalar name backendScalarType -> let schemaType = P.TNamed P.NonNullable $ P.Definition name Nothing Nothing [] P.TIScalar backendScalarValidator = @@ -441,9 +442,9 @@ customScalarParser = \case -- of the remote server. We do not parse scalars for remote servers -- for that reason; we might want to reconsider this validation as -- well. - void $ - parseScalarValue @b (unwrapScalar scalarType) jsonInput - `onLeft` \e -> parseErrorWith P.ParseFailed . toErrorMessage $ qeError e + void + $ parseScalarValue @b (unwrapScalar scalarType) jsonInput + `onLeft` \e -> parseErrorWith P.ParseFailed . toErrorMessage $ qeError e pure jsonInput in P.Parser { pType = schemaType, @@ -451,7 +452,7 @@ customScalarParser = \case } customEnumParser :: - MonadParse m => + (MonadParse m) => EnumTypeDefinition -> Parser 'Both m J.Value customEnumParser (EnumTypeDefinition typeName description enumValues) = @@ -459,8 +460,8 @@ customEnumParser (EnumTypeDefinition typeName description enumValues) = enumValueDefinitions = enumValues <&> \enumValue -> let valueName = G.unEnumValue $ _evdValue enumValue - in (,J.toJSON valueName) $ - P.Definition + in (,J.toJSON valueName) + $ P.Definition valueName (_evdDescription enumValue) Nothing diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs index 1d4dd18d817..53a371ee14a 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp.hs @@ -116,8 +116,9 @@ boolExpInternal gqlName fieldInfos description memoizeKey mkAggPredParser = do P.fieldOptional Name.__not Nothing (BoolNot <$> recur) ] - pure $ - BoolAnd <$> P.object name (Just description) do + pure + $ BoolAnd + <$> P.object name (Just description) do tableFields <- map BoolField . catMaybes <$> sequenceA tableFieldParsers specialFields <- catMaybes <$> sequenceA connectiveFieldParsers aggregationPredicateFields <- map (BoolField . AVAggregationPredicates) <$> aggregationPredicatesParser' @@ -142,9 +143,9 @@ boolExpInternal gqlName fieldInfos description memoizeKey mkAggPredParser = do RelTargetTable remoteTable -> do remoteTableInfo <- askTableInfo $ remoteTable let remoteTablePermissions = - (fmap . fmap) (partialSQLExpToUnpreparedValue) $ - maybe annBoolExpTrue spiFilter $ - tableSelectPermissions roleName remoteTableInfo + (fmap . fmap) (partialSQLExpToUnpreparedValue) + $ maybe annBoolExpTrue spiFilter + $ tableSelectPermissions roleName remoteTableInfo remoteBoolExp <- lift $ tableBoolExp remoteTableInfo pure $ fmap (AVRelationship relationshipInfo . RelationshipFilters remoteTablePermissions) remoteBoolExp FIComputedField ComputedFieldInfo {..} -> do @@ -153,8 +154,8 @@ boolExpInternal gqlName fieldInfos description memoizeKey mkAggPredParser = do case toList _cffInputArgs of [] -> do let functionArgs = - flip FunctionArgsExp mempty $ - fromComputedFieldImplicitArguments @b UVSession _cffComputedFieldImplicitArgs + flip FunctionArgsExp mempty + $ fromComputedFieldImplicitArguments @b UVSession _cffComputedFieldImplicitArgs fmap (AVComputedField . AnnComputedFieldBoolExp _cfiXComputedFieldInfo _cfiName _cffName functionArgs) <$> case computedFieldReturnType @b _cfiReturnType of @@ -205,10 +206,10 @@ logicalModelBoolExp logicalModel = memoizeKey = name description = - G.Description $ - "Boolean expression to filter rows from the logical model for " - <> name - <<> ". All fields are combined with a logical 'AND'." + G.Description + $ "Boolean expression to filter rows from the logical model for " + <> name + <<> ". All fields are combined with a logical 'AND'." in boolExpInternal gqlName fieldInfo description memoizeKey mkAggPredParser -- | @@ -230,10 +231,10 @@ tableBoolExp tableInfo = do fieldInfos <- tableSelectFields tableInfo let mkAggPredParser = aggregationPredicatesParser tableInfo let description = - G.Description $ - "Boolean expression to filter rows from the table " - <> tableInfoName tableInfo - <<> ". All fields are combined with a logical 'AND'." + G.Description + $ "Boolean expression to filter rows from the table " + <> tableInfoName tableInfo + <<> ". All fields are combined with a logical 'AND'." let memoizeKey = tableInfoName tableInfo boolExpInternal gqlName fieldInfos description memoizeKey mkAggPredParser diff --git a/server/src-lib/Hasura/GraphQL/Schema/BoolExp/AggregationPredicates.hs b/server/src-lib/Hasura/GraphQL/Schema/BoolExp/AggregationPredicates.hs index 99bfa272677..f309f685e28 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/BoolExp/AggregationPredicates.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/BoolExp/AggregationPredicates.hs @@ -70,8 +70,10 @@ defaultAggregationPredicatesParser aggFns ti = runMaybeT do aggregationFunctions <- fails $ return $ nonEmpty aggFns roleName <- retrieve scRole - collectOptionalFieldsNE . succeedingBranchesNE $ - arrayRelationships <&> \rel -> do + collectOptionalFieldsNE + . succeedingBranchesNE + $ arrayRelationships + <&> \rel -> do relTableName <- case riTarget rel of RelTargetNativeQuery _ -> error "defaultAggregationPredicatesParser RelTargetNativeQuery" RelTargetTable tn -> pure tn @@ -98,10 +100,12 @@ defaultAggregationPredicatesParser aggFns ti = runMaybeT do -- existing on the table. case fnArguments of ArgumentsStar -> - maybe AggregationPredicateArgumentsStar AggregationPredicateArguments . nonEmpty + maybe AggregationPredicateArgumentsStar AggregationPredicateArguments + . nonEmpty <$> fuse (fieldOptionalDefault Name._arguments Nothing [] . P.list <$> fails (tableSelectColumnsEnum relTable)) SingleArgument typ -> - AggregationPredicateArguments . (NE.:| []) + AggregationPredicateArguments + . (NE.:| []) <$> fuse ( P.field Name._arguments Nothing <$> fails (tableSelectColumnsPredEnum (== (ColumnScalar typ)) relFunGqlName relTable) diff --git a/server/src-lib/Hasura/GraphQL/Schema/Build.hs b/server/src-lib/Hasura/GraphQL/Schema/Build.hs index eafa4184ba5..e010b5ded51 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Build.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Build.hs @@ -208,8 +208,8 @@ buildTableStreamingSubscriptionFields mkRootFieldName tableName tableInfo tableI customRootFields = _tcCustomRootFields $ _tciCustomConfig $ _tiCoreInfo tableInfo selectDesc = Just $ G.Description $ "fetch data from the table in a streaming manner: " <>> tableName selectStreamName = - runMkRootFieldName mkRootFieldName $ - setFieldNameCase tCase tableInfo (_tcrfSelectStream customRootFields) mkSelectStreamField tableIdentifier + runMkRootFieldName mkRootFieldName + $ setFieldNameCase tCase tableInfo (_tcrfSelectStream customRootFields) mkSelectStreamField tableIdentifier catMaybes <$> sequenceA [ optionalFieldParser QDBStreamMultipleRows $ selectStreamTable tableInfo selectStreamName selectDesc diff --git a/server/src-lib/Hasura/GraphQL/Schema/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Common.hs index 162239e901f..b6b70647a64 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Common.hs @@ -138,7 +138,7 @@ type MonadBuildSchemaBase m n = newtype RemoteRelationshipParserBuilder = RemoteRelationshipParserBuilder ( forall lhsJoinField r n m. - MonadBuildSchemaBase m n => + (MonadBuildSchemaBase m n) => RemoteFieldInfo lhsJoinField -> SchemaT r m (Maybe [P.FieldParser n (IR.RemoteRelationshipField IR.UnpreparedValue)]) ) @@ -156,7 +156,7 @@ ignoreRemoteRelationship = RemoteRelationshipParserBuilder $ const $ pure Nothin newtype NodeInterfaceParserBuilder = NodeInterfaceParserBuilder { runNodeBuilder :: ( forall m n. - MonadBuildSchemaBase m n => + (MonadBuildSchemaBase m n) => SchemaContext -> SchemaOptions -> m (P.Parser 'P.Output n NodeMap) @@ -386,17 +386,18 @@ askNativeQueryInfo nativeQueryName = do -- | Whether the request is sent with `x-hasura-use-backend-only-permissions` set to `true`. data Scenario = Backend | Frontend deriving (Enum, Show, Eq) -textToName :: MonadError QErr m => Text -> m G.Name +textToName :: (MonadError QErr m) => Text -> m G.Name textToName textName = G.mkName textName `onNothing` throw400 ValidationFailed ( "cannot include " - <> textName <<> " in the GraphQL schema because " + <> textName + <<> " in the GraphQL schema because " <> " it is not a valid GraphQL identifier" ) -textToGQLIdentifier :: MonadError QErr m => Text -> m GQLNameIdentifier +textToGQLIdentifier :: (MonadError QErr m) => Text -> m GQLNameIdentifier textToGQLIdentifier textName = do let gqlIdents = do (pref, suffs) <- uncons (C.fromSnake textName) @@ -407,7 +408,8 @@ textToGQLIdentifier textName = do `onNothing` throw400 ValidationFailed ( "cannot include " - <> textName <<> " in the GraphQL schema because " + <> textName + <<> " in the GraphQL schema because " <> " it is not a valid GraphQL identifier" ) @@ -417,7 +419,7 @@ partialSQLExpToUnpreparedValue PSESession = IR.UVSession partialSQLExpToUnpreparedValue (PSESQLExp sqlExp) = IR.UVLiteral sqlExp mapField :: - Functor m => + (Functor m) => P.InputFieldsParser m (Maybe a) -> (a -> b) -> P.InputFieldsParser m (Maybe b) @@ -462,7 +464,7 @@ mkDescriptionWith descM defaultTxt = G.Description $ case descM of -- Karthikeyan: Yes, this is correct. We allowed this pre PDV but somehow -- got removed in PDV. OTOH, I’m not sure how prevalent this feature -- actually is -takeValidTables :: forall b. Backend b => TableCache b -> TableCache b +takeValidTables :: forall b. (Backend b) => TableCache b -> TableCache b takeValidTables = HashMap.filterWithKey graphQLTableFilter where graphQLTableFilter tableName tableInfo = @@ -510,12 +512,12 @@ mkEnumTypeName enumTableName enumTableCustomName = do addEnumSuffix :: ResolvedSourceCustomization -> GQLNameIdentifier -> Maybe G.Name -> G.Name addEnumSuffix customization enumTableGQLName enumTableCustomName = - runMkTypename (_rscTypeNames customization) $ - applyTypeNameCaseIdentifier (_rscNamingConvention customization) $ - mkEnumTableTypeName enumTableGQLName enumTableCustomName + runMkTypename (_rscTypeNames customization) + $ applyTypeNameCaseIdentifier (_rscNamingConvention customization) + $ mkEnumTableTypeName enumTableGQLName enumTableCustomName -- TODO: figure out what the purpose of this method is. -peelWithOrigin :: P.MonadParse m => P.Parser 'P.Both m a -> P.Parser 'P.Both m (IR.ValueWithOrigin a) +peelWithOrigin :: (P.MonadParse m) => P.Parser 'P.Both m a -> P.Parser 'P.Both m (IR.ValueWithOrigin a) peelWithOrigin parser = parser { P.pParser = \case @@ -529,12 +531,12 @@ peelWithOrigin parser = getIntrospectionResult :: Options.RemoteSchemaPermissions -> RoleName -> RemoteSchemaCtxG remoteFieldInfo -> Maybe IntrospectionResult getIntrospectionResult remoteSchemaPermsCtx role remoteSchemaContext = if - | -- admin doesn't have a custom annotated introspection, defaulting to the original one - role == adminRoleName -> - pure $ _rscIntroOriginal remoteSchemaContext - | -- if permissions are disabled, the role map will be empty, defaulting to the original one - remoteSchemaPermsCtx == Options.DisableRemoteSchemaPermissions -> - pure $ _rscIntroOriginal remoteSchemaContext - | -- otherwise, look the role up in the map; if we find nothing, then the role doesn't have access - otherwise -> - HashMap.lookup role (_rscPermissions remoteSchemaContext) + | -- admin doesn't have a custom annotated introspection, defaulting to the original one + role == adminRoleName -> + pure $ _rscIntroOriginal remoteSchemaContext + | -- if permissions are disabled, the role map will be empty, defaulting to the original one + remoteSchemaPermsCtx == Options.DisableRemoteSchemaPermissions -> + pure $ _rscIntroOriginal remoteSchemaContext + | -- otherwise, look the role up in the map; if we find nothing, then the role doesn't have access + otherwise -> + HashMap.lookup role (_rscPermissions remoteSchemaContext) diff --git a/server/src-lib/Hasura/GraphQL/Schema/Introspect.hs b/server/src-lib/Hasura/GraphQL/Schema/Introspect.hs index 20aaedeb1ab..61330c442fc 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Introspect.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Introspect.hs @@ -220,8 +220,8 @@ buildIntrospectionSchema queryRoot' mutationRoot' subscriptionRoot' = do P.TypeDefinitionsWrapper $ fDefinition (typeIntrospection @Parse) ] - pure $ - P.Schema + pure + $ P.Schema { sDescription = Nothing, sTypes = allTypes, sQueryType = queryRoot', @@ -233,7 +233,7 @@ buildIntrospectionSchema queryRoot' mutationRoot' subscriptionRoot' = do -- | Generate a __type introspection parser typeIntrospection :: forall n. - MonadParse n => + (MonadParse n) => FieldParser n (Schema -> J.Value) {-# INLINE typeIntrospection #-} typeIntrospection = do @@ -252,7 +252,7 @@ typeIntrospection = do -- | Generate a __schema introspection parser. schema :: forall n. - MonadParse n => + (MonadParse n) => FieldParser n (Schema -> J.Value) {-# INLINE schema #-} schema = P.subselection_ GName.___schema Nothing schemaSet @@ -287,7 +287,7 @@ data SomeType = forall k. SomeType (P.Type k) typeField :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (SomeType -> J.Value) typeField = let includeDeprecated :: P.InputFieldsParser n Bool @@ -338,8 +338,8 @@ typeField = fields = do -- TODO handle the value of includeDeprecated ~(_includeDeprecated, printer) <- P.subselection GName._fields Nothing includeDeprecated fieldField - return $ - \case + return + $ \case SomeType tp -> case tp of P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIObject (P.ObjectInfo fields' _interfaces'))) -> @@ -350,8 +350,8 @@ typeField = interfaces :: FieldParser n (SomeType -> J.Value) interfaces = do printer <- P.subselection_ GName._interfaces Nothing typeField - return $ - \case + return + $ \case SomeType tp -> case tp of P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIObject (P.ObjectInfo _fields' interfaces'))) -> @@ -360,8 +360,8 @@ typeField = possibleTypes :: FieldParser n (SomeType -> J.Value) possibleTypes = do printer <- P.subselection_ GName._possibleTypes Nothing typeField - return $ - \case + return + $ \case SomeType tp -> case tp of P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInterface (P.InterfaceInfo _fields' objects'))) -> @@ -373,8 +373,8 @@ typeField = enumValues = do -- TODO handle the value of includeDeprecated ~(_includeDeprecated, printer) <- P.subselection GName._enumValues Nothing includeDeprecated enumValue - return $ - \case + return + $ \case SomeType tp -> case tp of P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIEnum vals)) -> @@ -383,8 +383,8 @@ typeField = inputFields :: FieldParser n (SomeType -> J.Value) inputFields = do printer <- P.subselection_ GName._inputFields Nothing inputValue - return $ - \case + return + $ \case SomeType tp -> case tp of P.TNamed P.Nullable (P.Definition _ _ _ _ (P.TIInputObject (P.InputObjectInfo fieldDefs))) -> @@ -430,17 +430,19 @@ type __InputValue { -} inputValue :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (P.Definition P.InputFieldInfo -> J.Value) inputValue = let name :: FieldParser n (P.Definition P.InputFieldInfo -> J.Value) name = P.selection_ GName._name Nothing P.string - $> nameAsJSON . P.dName + $> nameAsJSON + . P.dName description :: FieldParser n (P.Definition P.InputFieldInfo -> J.Value) description = P.selection_ GName._description Nothing P.string - $> maybe J.Null (J.String . G.unDescription) . P.dDescription + $> maybe J.Null (J.String . G.unDescription) + . P.dDescription typeF :: FieldParser n (P.Definition P.InputFieldInfo -> J.Value) typeF = do printer <- P.subselection_ GName._type Nothing typeField @@ -472,17 +474,19 @@ type __EnumValue { -} enumValue :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (P.Definition P.EnumValueInfo -> J.Value) enumValue = let name :: FieldParser n (P.Definition P.EnumValueInfo -> J.Value) name = P.selection_ GName._name Nothing P.string - $> nameAsJSON . P.dName + $> nameAsJSON + . P.dName description :: FieldParser n (P.Definition P.EnumValueInfo -> J.Value) description = P.selection_ GName._description Nothing P.string - $> maybe J.Null (J.String . G.unDescription) . P.dDescription + $> maybe J.Null (J.String . G.unDescription) + . P.dDescription -- TODO We don't seem to support enum value deprecation isDeprecated :: FieldParser n (P.Definition P.EnumValueInfo -> J.Value) isDeprecated = @@ -516,7 +520,7 @@ enum __TypeKind { -} typeKind :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Both n () typeKind = P.enum @@ -548,13 +552,14 @@ type __Field { -} fieldField :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (P.Definition P.FieldInfo -> J.Value) fieldField = let name :: FieldParser n (P.Definition P.FieldInfo -> J.Value) name = P.selection_ GName._name Nothing P.string - $> nameAsJSON . P.dName + $> nameAsJSON + . P.dName description :: FieldParser n (P.Definition P.FieldInfo -> J.Value) description = P.selection_ GName._description Nothing P.string $> \defInfo -> @@ -602,7 +607,7 @@ type __Directive { directiveSet :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (P.DirectiveInfo -> J.Value) directiveSet = let name :: FieldParser n (P.DirectiveInfo -> J.Value) @@ -653,7 +658,7 @@ type __Schema { -} schemaSet :: forall n. - MonadParse n => + (MonadParse n) => Parser 'Output n (Schema -> J.Value) {-# INLINE schemaSet #-} schemaSet = @@ -666,14 +671,14 @@ schemaSet = types :: FieldParser n (Schema -> J.Value) types = do printer <- P.subselection_ GName._types Nothing typeField - return $ - \partialSchema -> - J.Array $ - V.fromList $ - map (printer . schemaTypeToSomeType) $ - sortOn P.getName $ - HashMap.elems $ - sTypes partialSchema + return + $ \partialSchema -> + J.Array + $ V.fromList + $ map (printer . schemaTypeToSomeType) + $ sortOn P.getName + $ HashMap.elems + $ sTypes partialSchema where schemaTypeToSomeType :: P.SomeDefinitionTypeInfo -> SomeType schemaTypeToSomeType (P.SomeDefinitionTypeInfo def) = @@ -721,5 +726,5 @@ applyPrinter :: J.Value applyPrinter = flip (\x -> selectionSetToJSON . fmap (($ x) . P.handleTypename (const . nameAsJSON))) -nameAsJSON :: P.HasName a => a -> J.Value +nameAsJSON :: (P.HasName a) => a -> J.Value nameAsJSON = J.String . G.unName . P.getName diff --git a/server/src-lib/Hasura/GraphQL/Schema/Mutation.hs b/server/src-lib/Hasura/GraphQL/Schema/Mutation.hs index e753e9759f0..9bdbc78bdab 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Mutation.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Mutation.hs @@ -96,10 +96,10 @@ insertIntoTable backendInsertAction scenario tableInfo fieldName description = r backendInsert <- backendInsertParser objects <- mkObjectsArg objectParser pure $ mkInsertObject objects tableInfo backendInsert insertPerms updatePerms - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description argsParser selectionParser - <&> \(insertObject, output) -> IR.AnnotatedInsert (G.unName fieldName) False insertObject (IR.MOutMultirowFields output) (Just tCase) + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description argsParser selectionParser + <&> \(insertObject, output) -> IR.AnnotatedInsert (G.unName fieldName) False insertObject (IR.MOutMultirowFields output) (Just tCase) where mkObjectsArg objectParser = P.field @@ -147,10 +147,10 @@ insertOneIntoTable backendInsertAction scenario tableInfo fieldName description backendInsert <- backendInsertParser object <- mkObjectArg objectParser pure $ mkInsertObject [object] tableInfo backendInsert insertPerms updatePerms - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description argsParser selectionParser - <&> \(insertObject, output) -> IR.AnnotatedInsert (G.unName fieldName) True insertObject (IR.MOutSinglerowObject output) (Just tCase) + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description argsParser selectionParser + <&> \(insertObject, output) -> IR.AnnotatedInsert (G.unName fieldName) True insertObject (IR.MOutSinglerowObject output) (Just tCase) where mkObjectArg objectParser = P.field @@ -230,8 +230,9 @@ tableFieldsInput tableInfo = do isAllowed = Set.member (ciColumn columnInfo) (ipiCols insertPerms) guard isAllowed fieldParser <- lift $ columnParser (ciType columnInfo) (G.Nullability $ ciIsNullable columnInfo) - pure $ - P.fieldOptional columnName columnDesc fieldParser `mapField` \value -> + pure + $ P.fieldOptional columnName columnDesc fieldParser + `mapField` \value -> IR.AIColumn (ciColumn columnInfo, IR.mkParameter value) mkDefaultRelationshipParser :: @@ -251,14 +252,16 @@ mkDefaultRelationshipParser backendInsertAction xNestedInserts relationshipInfo case riType relationshipInfo of ObjRel -> do parser <- MaybeT $ objectRelationshipInput backendInsertAction otherTableInfo - pure $ - P.fieldOptional relFieldName Nothing (P.nullable parser) <&> \objRelIns -> do + pure + $ P.fieldOptional relFieldName Nothing (P.nullable parser) + <&> \objRelIns -> do rel <- join objRelIns Just $ IR.AIObjectRelationship xNestedInserts $ IR.RelationInsert rel relationshipInfo ArrRel -> do parser <- MaybeT $ arrayRelationshipInput backendInsertAction otherTableInfo - pure $ - P.fieldOptional relFieldName Nothing (P.nullable parser) <&> \arrRelIns -> do + pure + $ P.fieldOptional relFieldName Nothing (P.nullable parser) + <&> \arrRelIns -> do rel <- join arrRelIns guard $ not $ null $ IR._aiInsertObject rel Just $ IR.AIArrayRelationship xNestedInserts $ IR.RelationInsert rel relationshipInfo @@ -404,10 +407,11 @@ deleteFromTable scenario tableInfo fieldName description = runMaybeT $ do whereArg <- P.field whereName (Just whereDesc) <$> tableBoolExp tableInfo selection <- mutationSelectionSet tableInfo let columns = tableColumns tableInfo - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description whereArg selection - <&> mkDeleteObject (tableInfoName tableInfo) columns deletePerms (Just tCase) . fmap IR.MOutMultirowFields + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description whereArg selection + <&> mkDeleteObject (tableInfoName tableInfo) columns deletePerms (Just tCase) + . fmap IR.MOutMultirowFields -- | Construct a root field, normally called delete_tablename_by_pk, that can be used to delete an -- individual rows from a DB table, specified by primary key. Select permissions are required, as @@ -440,10 +444,11 @@ deleteFromTableByPk scenario tableInfo fieldName description = runMaybeT $ do -- For more info see Note [Backend only permissions] guard $ not $ scenario == Frontend && dpiBackendOnly deletePerms selection <- MaybeT $ tableSelectionSet tableInfo - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description pkArgs selection - <&> mkDeleteObject (tableInfoName tableInfo) columns deletePerms (Just tCase) . fmap IR.MOutSinglerowObject + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description pkArgs selection + <&> mkDeleteObject (tableInfoName tableInfo) columns deletePerms (Just tCase) + . fmap IR.MOutSinglerowObject mkDeleteObject :: (Backend b) => @@ -496,14 +501,14 @@ mutationSelectionSet tableInfo = do selectionDesc = G.Description $ "response of any mutation on the table " <>> tableName selectionFields = catMaybes - [ Just $ - IR.MCount - <$ P.selection_ affectedRowsName (Just affectedRowsDesc) P.int, + [ Just + $ IR.MCount + <$ P.selection_ affectedRowsName (Just affectedRowsDesc) P.int, returning ] - pure $ - P.selectionSet selectionName (Just selectionDesc) selectionFields - <&> parsedSelectionsToFields IR.MExp + pure + $ P.selectionSet selectionName (Just selectionDesc) selectionFields + <&> parsedSelectionsToFields IR.MExp -- | How to specify a database row by primary key. -- @@ -521,9 +526,15 @@ primaryKeysArguments tableInfo = runMaybeT $ do primaryKeys <- hoistMaybe $ _tciPrimaryKey . _tiCoreInfo $ tableInfo let columns = _pkColumns primaryKeys guard $ all (\c -> ciColumn c `HashMap.member` spiCols selectPerms) columns - lift $ - fmap (BoolAnd . toList) . sequenceA <$> for columns \columnInfo -> do + lift + $ fmap (BoolAnd . toList) + . sequenceA + <$> for columns \columnInfo -> do field <- columnParser (ciType columnInfo) (G.Nullability False) - pure $ - BoolField . AVColumn columnInfo . pure . AEQ True . IR.mkParameter - <$> P.field (ciName columnInfo) (ciDescription columnInfo) field + pure + $ BoolField + . AVColumn columnInfo + . pure + . AEQ True + . IR.mkParameter + <$> P.field (ciName columnInfo) (ciDescription columnInfo) field diff --git a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs index 8a55b9400a8..aad83cabc66 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs @@ -79,8 +79,10 @@ logicalModelOrderByExp logicalModel = Nothing -> throw500 $ "Error creating fields for logical model " <> tshow name Just tableFields -> do let description = - G.Description $ - "Ordering options when selecting data from " <> name <<> "." + G.Description + $ "Ordering options when selecting data from " + <> name + <<> "." memoizeKey = name orderByExpInternal (C.fromCustomName name) description tableFields memoizeKey @@ -122,12 +124,13 @@ orderByExpInternal gqlName description tableFields memoizeKey = do case fieldInfo of FIColumn (SCIScalarColumn columnInfo) -> do let !fieldName = ciName columnInfo - pure $ - P.fieldOptional + pure + $ P.fieldOptional fieldName Nothing (orderByOperator @b tCase sourceInfo) - <&> fmap (pure . mkOrderByItemG @b (IR.AOCColumn columnInfo)) . join + <&> fmap (pure . mkOrderByItemG @b (IR.AOCColumn columnInfo)) + . join FIColumn (SCIObjectColumn _) -> empty -- TODO(dmoverton) FIColumn (SCIArrayColumn _) -> empty -- TODO(dmoverton) FIRelationship relationshipInfo -> do @@ -154,20 +157,21 @@ orderByExpInternal gqlName description tableFields memoizeKey = do let ComputedFieldFunction {..} = _cfiFunction mkComputedFieldOrderBy = let functionArgs = - flip FunctionArgsExp mempty $ - fromComputedFieldImplicitArguments @b IR.UVSession _cffComputedFieldImplicitArgs + flip FunctionArgsExp mempty + $ fromComputedFieldImplicitArguments @b IR.UVSession _cffComputedFieldImplicitArgs in IR.ComputedFieldOrderBy _cfiXComputedFieldInfo _cfiName _cffName functionArgs fieldName <- hoistMaybe $ G.mkName $ toTxt _cfiName guard $ _cffInputArgs == mempty -- No input arguments other than table row and session argument case computedFieldReturnType @b _cfiReturnType of ReturnsScalar scalarType -> do let computedFieldOrderBy = mkComputedFieldOrderBy $ IR.CFOBEScalar scalarType - pure $ - P.fieldOptional + pure + $ P.fieldOptional fieldName Nothing (orderByOperator @b tCase sourceInfo) - <&> fmap (pure . mkOrderByItemG @b (IR.AOCComputedField computedFieldOrderBy)) . join + <&> fmap (pure . mkOrderByItemG @b (IR.AOCComputedField computedFieldOrderBy)) + . join ReturnsTable table -> do let aggregateFieldName = applyFieldNameCaseIdentifier tCase $ C.fromAutogeneratedTuple (fieldName, [G.convertNameToSuffix Name._aggregate]) tableInfo' <- askTableInfo table @@ -176,13 +180,13 @@ orderByExpInternal gqlName description tableFields memoizeKey = do aggregationParser <- lift $ orderByAggregation sourceInfo tableInfo' pure $ do aggregationOrderBy <- join <$> P.fieldOptional aggregateFieldName Nothing (P.nullable aggregationParser) - pure $ - fmap - ( map $ - fmap $ - IR.AOCComputedField - . mkComputedFieldOrderBy - . IR.CFOBETableAggregation table newPerms + pure + $ fmap + ( map + $ fmap + $ IR.AOCComputedField + . mkComputedFieldOrderBy + . IR.CFOBETableAggregation table newPerms ) aggregationOrderBy ReturnsOthers -> empty @@ -207,8 +211,10 @@ tableOrderByExp tableInfo = do tableGQLName <- getTableIdentifierName tableInfo tableFields <- tableSelectFields tableInfo let description = - G.Description $ - "Ordering options when selecting data from " <> tableInfoName tableInfo <<> "." + G.Description + $ "Ordering options when selecting data from " + <> tableInfoName tableInfo + <<> "." memoizeKey = tableInfoName tableInfo orderByExpInternal tableGQLName description tableFields memoizeKey @@ -238,29 +244,32 @@ orderByAggregation sourceInfo tableInfo = P.memoizeOn 'orderByAggregation (_siNa numOperatorsAndColumns = HashMap.fromList $ (,numColumns) <$> numericAggOperators compOperatorsAndColumns = HashMap.fromList $ (,compColumns) <$> comparisonAggOperators customOperatorsAndColumns = - HashMap.mapKeys (C.fromCustomName) $ - getCustomAggOpsColumns tCase allColumns <$> getCustomAggregateOperators @b (_siConfiguration sourceInfo) + HashMap.mapKeys (C.fromCustomName) + $ getCustomAggOpsColumns tCase allColumns + <$> getCustomAggregateOperators @b (_siConfiguration sourceInfo) allOperatorsAndColumns = - HashMap.catMaybes $ - HashMap.unionsWith (<>) [numOperatorsAndColumns, compOperatorsAndColumns, customOperatorsAndColumns] + HashMap.catMaybes + $ HashMap.unionsWith (<>) [numOperatorsAndColumns, compOperatorsAndColumns, customOperatorsAndColumns] aggFields = - fmap (concat . catMaybes . concat) $ - sequenceA $ - catMaybes - [ -- count - Just $ - P.fieldOptional - Name._count - Nothing - (orderByOperator @b tCase sourceInfo) - <&> pure . fmap (pure . mkOrderByItemG @b IR.AAOCount) . join, - -- other operators - if null allOperatorsAndColumns - then Nothing - else Just $ - for (HashMap.toList allOperatorsAndColumns) \(operator, fields) -> do - parseOperator mkTypename operator tableGQLName tCase fields - ] + fmap (concat . catMaybes . concat) + $ sequenceA + $ catMaybes + [ -- count + Just + $ P.fieldOptional + Name._count + Nothing + (orderByOperator @b tCase sourceInfo) + <&> pure + . fmap (pure . mkOrderByItemG @b IR.AAOCount) + . join, + -- other operators + if null allOperatorsAndColumns + then Nothing + else Just + $ for (HashMap.toList allOperatorsAndColumns) \(operator, fields) -> do + parseOperator mkTypename operator tableGQLName tCase fields + ] let objectName = runMkTypename mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableAggregateOrderByTypeName tableIdentifierName description = G.Description $ "order by aggregate values of table " <>> tableName pure $ P.object objectName (Just description) aggFields @@ -312,7 +321,8 @@ orderByAggregation sourceInfo tableInfo = P.memoizeOn 'orderByAggregation (_siNa (ciName columnInfo) (ciDescription columnInfo) (orderByOperator @b tCase sourceInfo) - <&> fmap (columnInfo,resultType,) . join + <&> fmap (columnInfo,resultType,) + . join parseOperator :: MkTypename -> diff --git a/server/src-lib/Hasura/GraphQL/Schema/Parser.hs b/server/src-lib/Hasura/GraphQL/Schema/Parser.hs index 089444d5098..6a76a6adb1e 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Parser.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Parser.hs @@ -132,7 +132,7 @@ type TypeDefinitionsWrapper = P.TypeDefinitionsWrapper MetadataObjId -- | In order to aid type inference and type checking, we define this pattern -- synonym (an actual one) which restricts 'P.TypeDefinitionsWrapper' to have -- 'MetadataObjId' set for its origin type parameter. -pattern TypeDefinitionsWrapper :: () => forall a. HasTypeDefinitions a => a -> TypeDefinitionsWrapper +pattern TypeDefinitionsWrapper :: () => forall a. (HasTypeDefinitions a) => a -> TypeDefinitionsWrapper pattern TypeDefinitionsWrapper typeDef = P.TypeDefinitionsWrapper typeDef toQErr :: (MonadError QErr m) => Either ParseError a -> m a diff --git a/server/src-lib/Hasura/GraphQL/Schema/Postgres.hs b/server/src-lib/Hasura/GraphQL/Schema/Postgres.hs index 71dcb42a79b..e4f05195d30 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Postgres.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Postgres.hs @@ -18,12 +18,13 @@ import Hasura.RQL.Types.CustomTypes import Hasura.RQL.Types.Metadata.Object buildActionQueryFields :: - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => AnnotatedCustomTypes -> ActionInfo -> SchemaT r m [FieldParser n (QueryRootField UnpreparedValue)] buildActionQueryFields customTypes actionInfo = - maybeToList . applyActionOrigin actionInfo + maybeToList + . applyActionOrigin actionInfo <$> case _adType (_aiDefinition actionInfo) of ActionQuery -> fmap (fmap (RFAction . AQQuery)) <$> actionExecute customTypes actionInfo @@ -32,12 +33,13 @@ buildActionQueryFields customTypes actionInfo = fmap (fmap (RFAction . AQAsync)) <$> actionAsyncQuery (_actObjectTypes customTypes) actionInfo buildActionMutationFields :: - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => AnnotatedCustomTypes -> ActionInfo -> SchemaT r m [FieldParser n (MutationRootField UnpreparedValue)] buildActionMutationFields customTypes actionInfo = - maybeToList . applyActionOrigin actionInfo + maybeToList + . applyActionOrigin actionInfo <$> case _adType (_aiDefinition actionInfo) of ActionQuery -> pure Nothing ActionMutation ActionSynchronous -> @@ -46,12 +48,13 @@ buildActionMutationFields customTypes actionInfo = fmap (fmap (RFAction . AMAsync)) <$> actionAsyncMutation (_actInputTypes customTypes) actionInfo buildActionSubscriptionFields :: - MonadBuildActionSchema r m n => + (MonadBuildActionSchema r m n) => AnnotatedCustomTypes -> ActionInfo -> SchemaT r m [FieldParser n (QueryRootField UnpreparedValue)] buildActionSubscriptionFields customTypes actionInfo = - maybeToList . applyActionOrigin actionInfo + maybeToList + . applyActionOrigin actionInfo <$> case _adType (_aiDefinition actionInfo) of ActionQuery -> pure Nothing ActionMutation ActionSynchronous -> pure Nothing diff --git a/server/src-lib/Hasura/GraphQL/Schema/Relay.hs b/server/src-lib/Hasura/GraphQL/Schema/Relay.hs index 7888d1380a6..31b696e11b9 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Relay.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Relay.hs @@ -65,28 +65,29 @@ nodeInterface sourceCache = NodeInterfaceParserBuilder $ \context options -> mem tablePkeyColumns <- hoistMaybe $ tableInfo ^? tiCoreInfo . tciPrimaryKey . _Just . pkColumns selectPermissions <- hoistMaybe $ tableSelectPermissions roleName tableInfo annotatedFieldsParser <- MaybeT $ tableSelectionSet tableInfo - pure $ - annotatedFieldsParser <&> \fields -> + pure + $ annotatedFieldsParser + <&> \fields -> ( sourceName, - AB.mkAnyBackend $ - TableMap $ - HashMap.singleton tableName $ - NodeInfo sourceInfo selectPermissions tablePkeyColumns fields + AB.mkAnyBackend + $ TableMap + $ HashMap.singleton tableName + $ NodeInfo sourceInfo selectPermissions tablePkeyColumns fields ) - pure $ - HashMap.fromListWith fuseAnyMaps - <$> P.selectionSetInterface - Name._Node - (Just nodeInterfaceDescription) - [idField] - tables + pure + $ HashMap.fromListWith fuseAnyMaps + <$> P.selectionSetInterface + Name._Node + (Just nodeInterfaceDescription) + [idField] + tables where -- this can only ever fail if somehow, within the same source, we ran into -- two tables of a different type b; in other words, it is impossible. fuseAnyMaps :: AB.AnyBackend TableMap -> AB.AnyBackend TableMap -> AB.AnyBackend TableMap fuseAnyMaps m1 m2 = - AB.composeAnyBackend @Backend fuseMaps m1 m2 $ - error "panic: two tables of a different backend type within the same source" + AB.composeAnyBackend @Backend fuseMaps m1 m2 + $ error "panic: two tables of a different backend type within the same source" fuseMaps :: forall b. (Backend b) => TableMap b -> TableMap b -> AB.AnyBackend TableMap fuseMaps (TableMap m1) (TableMap m2) = AB.mkAnyBackend @b $ TableMap $ HashMap.union m1 m2 @@ -110,8 +111,9 @@ nodeField sourceCache context options = do nodeObject <- case scSchemaKind context of HasuraSchema -> throw500 "internal error: the node field should only be built for the Relay schema" RelaySchema nodeBuilder -> runNodeBuilder nodeBuilder context options - pure $ - P.subselection Name._node Nothing idArgument nodeObject `P.bindField` \(ident, parseds) -> do + pure + $ P.subselection Name._node Nothing idArgument nodeObject + `P.bindField` \(ident, parseds) -> do nodeId <- parseNodeId ident case nodeId of NodeIdV1 (V1NodeId tableName pKeys) -> do @@ -129,11 +131,11 @@ nodeField sourceCache context options = do [nodeValue] -> createRootField stringifyNumbers tableName nodeValue pKeys [] -> throwInvalidNodeId $ "no such table found: " <> toErrorValue tableName l -> - throwInvalidNodeId $ - "this V1 node id matches more than one table across different sources: " - <> toErrorValue tableName - <> " exists in sources " - <> toErrorValue (_siName . nvSourceInfo <$> l) + throwInvalidNodeId + $ "this V1 node id matches more than one table across different sources: " + <> toErrorValue tableName + <> " exists in sources " + <> toErrorValue (_siName . nvSourceInfo <$> l) NodeIdV2 nodev2 -> -- Node id V2. -- @@ -164,27 +166,27 @@ nodeField sourceCache context options = do n (IR.QueryRootField IR.UnpreparedValue) createRootField stringifyNumbers tableName (NodeInfo sourceInfo perms pKeys fields) columnValues = do whereExp <- buildNodeIdBoolExp columnValues pKeys - pure $ - IR.RFDB (_siName sourceInfo) $ - AB.mkAnyBackend $ - IR.SourceConfigWith (_siConfiguration sourceInfo) Nothing $ - IR.QDBR $ - IR.QDBSingleRow $ - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = tablePermissionsInfo perms, - IR._asnArgs = - IR.SelectArgs - { IR._saWhere = Just whereExp, - IR._saOrderBy = Nothing, - IR._saLimit = Nothing, - IR._saOffset = Nothing, - IR._saDistinct = Nothing - }, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just $ _rscNamingConvention $ _siCustomization sourceInfo - } + pure + $ IR.RFDB (_siName sourceInfo) + $ AB.mkAnyBackend + $ IR.SourceConfigWith (_siConfiguration sourceInfo) Nothing + $ IR.QDBR + $ IR.QDBSingleRow + $ IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = tablePermissionsInfo perms, + IR._asnArgs = + IR.SelectArgs + { IR._saWhere = Just whereExp, + IR._saOrderBy = Nothing, + IR._saLimit = Nothing, + IR._saOffset = Nothing, + IR._saDistinct = Nothing + }, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just $ _rscNamingConvention $ _siCustomization sourceInfo + } -- Craft the 'where' condition of the query by making an `AEQ` entry for -- each primary key. This might fail if the given node id doesn't exactly @@ -200,13 +202,16 @@ nodeField sourceCache context options = do (nonAlignedPkColumns, nonAlignedColumnValues, alignedTuples) = partitionThese $ toList $ align remainingPkColumns remainingColumns - unless (null nonAlignedPkColumns) $ - throwInvalidNodeId $ - "primary key columns " <> toErrorValue (map ciColumn nonAlignedPkColumns) <> " are missing" + unless (null nonAlignedPkColumns) + $ throwInvalidNodeId + $ "primary key columns " + <> toErrorValue (map ciColumn nonAlignedPkColumns) + <> " are missing" - unless (null nonAlignedColumnValues) $ - throwInvalidNodeId $ - "unexpected column values " <> toErrorValue nonAlignedColumnValues + unless (null nonAlignedColumnValues) + $ throwInvalidNodeId + $ "unexpected column values " + <> toErrorValue nonAlignedColumnValues let allTuples = (firstPkColumn, firstColumnValue) : alignedTuples IR.BoolAnd <$> for allTuples \(columnInfo, columnValue) -> do @@ -214,8 +219,8 @@ nodeField sourceCache context options = do parsedValue <- parseScalarValueColumnType columnType columnValue `onLeft` \e -> P.parseErrorWith P.ParseFailed $ "value of column " <> toErrorValue (ciColumn columnInfo) <> " in node id: " <> toErrorMessage (qeError e) - pure $ - IR.BoolField $ - IR.AVColumn - columnInfo - [IR.AEQ True $ IR.UVParameter IR.Unknown $ ColumnValue columnType parsedValue] + pure + $ IR.BoolField + $ IR.AVColumn + columnInfo + [IR.AEQ True $ IR.UVParameter IR.Unknown $ ColumnValue columnType parsedValue] diff --git a/server/src-lib/Hasura/GraphQL/Schema/Remote.hs b/server/src-lib/Hasura/GraphQL/Schema/Remote.hs index df1fbb87e4c..fdfd4145cbf 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Remote.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Remote.hs @@ -42,17 +42,17 @@ import Language.GraphQL.Draft.Syntax qualified as G buildRemoteParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => IntrospectionResult -> RemoteSchemaRelationships -> RemoteSchemaInfo -> SchemaT r m (RemoteSchemaParser n) buildRemoteParser introspectionResult remoteRelationships remoteSchemaInfo@RemoteSchemaInfo {..} = do (rawQueryParsers, rawMutationParsers, rawSubscriptionParsers) <- - withRemoteSchemaCustomization rsCustomizer $ - buildRawRemoteParser introspectionResult remoteRelationships remoteSchemaInfo - pure $ - RemoteSchemaParser + withRemoteSchemaCustomization rsCustomizer + $ buildRawRemoteParser introspectionResult remoteRelationships remoteSchemaInfo + pure + $ RemoteSchemaParser (customizeRemoteNamespace remoteSchemaInfo (irQueryRoot introspectionResult) rawQueryParsers) (customizeRemoteNamespace remoteSchemaInfo <$> irMutationRoot introspectionResult <*> rawMutationParsers) (customizeRemoteNamespace remoteSchemaInfo <$> irSubscriptionRoot introspectionResult <*> rawSubscriptionParsers) @@ -60,8 +60,8 @@ buildRemoteParser introspectionResult remoteRelationships remoteSchemaInfo@Remot makeResultCustomizer :: RemoteSchemaCustomizer -> IR.GraphQLField (IR.RemoteRelationshipField IR.UnpreparedValue) RemoteSchemaVariable -> ResultCustomizer makeResultCustomizer remoteSchemaCustomizer IR.GraphQLField {..} = - modifyFieldByName _fAlias $ - if _fName == GName.___typename + modifyFieldByName _fAlias + $ if _fName == GName.___typename then customizeTypeNameString (_rscCustomizeTypeName remoteSchemaCustomizer) else resultCustomizerFromSelection _fSelectionSet where @@ -84,7 +84,7 @@ makeResultCustomizer remoteSchemaCustomizer IR.GraphQLField {..} = buildRawRemoteParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => IntrospectionResult -> RemoteSchemaRelationships -> RemoteSchemaInfo -> @@ -260,7 +260,7 @@ newtype Altered = Altered {getAltered :: Bool} -- unmodified. inputValueDefinitionParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> G.InputValueDefinition -> SchemaT r m (InputFieldsParser n (Maybe (Altered, G.Value RemoteSchemaVariable))) @@ -269,7 +269,7 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType where doNullability :: forall a k. - 'Input <: k => + ('Input <: k) => G.Nullability -> Parser k n (Maybe a) -> Parser k n (Maybe a) @@ -278,7 +278,7 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType fieldConstructor :: forall k. - 'Input <: k => + ('Input <: k) => Parser k n (Maybe (Altered, G.Value RemoteSchemaVariable)) -> InputFieldsParser n (Maybe (Altered, G.Value RemoteSchemaVariable)) fieldConstructor (shortCircuitIfUnaltered -> parser) = @@ -291,7 +291,7 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType buildField :: ( forall k. - 'Input <: k => + ('Input <: k) => Parser k n (Maybe (Altered, G.Value RemoteSchemaVariable)) -> InputFieldsParser n (Maybe (Altered, G.Value RemoteSchemaVariable)) ) -> @@ -357,7 +357,7 @@ inputValueDefinitionParser schemaDoc (G.InputValueDefinition desc name fieldType -- If the value contains a variable with a customized type name then we need to consider it to be -- altered to ensure that the original type name is passed to the remote server. remoteFieldScalarParser :: - MonadParse n => + (MonadParse n) => MkTypename -> G.ScalarTypeDefinition -> P.Parser 'Both n (Altered, G.Value RemoteSchemaVariable) @@ -385,7 +385,7 @@ remoteFieldScalarParser customizeTypename (G.ScalarTypeDefinition description na G.TypeList n l -> G.TypeList n $ mkRemoteGType l remoteFieldEnumParser :: - MonadParse n => + (MonadParse n) => MkTypename -> G.EnumTypeDefinition -> Parser 'Both n (Altered, G.Value RemoteSchemaVariable) @@ -422,7 +422,7 @@ remoteFieldEnumParser customizeTypename (G.EnumTypeDefinition desc name _directi -- memoization: we know for sure that the preset fields won't generate a recursive call! remoteInputObjectParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> G.InputObjectTypeDefinition RemoteSchemaInputValueDefinition -> SchemaT @@ -469,8 +469,9 @@ shortCircuitIfUnaltered parser = -- The parser did yield a value, and it was unmodified by presets -- we can short-circuit by transforming the input value, therefore -- "unpeeling" variables and avoiding extraneous JSON variables. - Just (Altered False, _) -> Just $ - (Altered False,) $ case castWith (P.inputParserInput @k) value of + Just (Altered False, _) -> Just + $ (Altered False,) + $ case castWith (P.inputParserInput @k) value of -- The input was a GraphQL value: just forward it. GraphQLValue v -> QueryVariable <$> v -- The input value was already a JSON value: we still have to create @@ -522,7 +523,7 @@ shortCircuitIfUnaltered parser = -- contains values that contain presets further down, then this result is labelled as altered. argumentsParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => G.ArgumentsDefinition RemoteSchemaInputValueDefinition -> RemoteSchemaIntrospection -> SchemaT r m (InputFieldsParser n (Altered, HashMap G.Name (G.Value RemoteSchemaVariable))) @@ -562,7 +563,7 @@ aggregateListAndAlteration = first mconcat . unzip . catMaybes remoteSchemaRelationships :: forall r n m. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaRelationships -> G.Name -> SchemaT r m [FieldParser n (IR.SchemaRemoteRelationshipSelect (IR.RemoteRelationshipField IR.UnpreparedValue))] @@ -579,7 +580,7 @@ remoteSchemaRelationships relationships typeName = -- | 'remoteSchemaObject' returns a output parser for a given 'ObjectTypeDefinition'. remoteSchemaObject :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> G.ObjectTypeDefinition RemoteSchemaInputValueDefinition -> @@ -594,22 +595,22 @@ remoteSchemaObject schemaDoc remoteRelationships defn@(G.ObjectTypeDefinition de traverse_ validateImplementsFields interfaceDefs typename <- asks getter <&> \mkTypename -> runMkTypename mkTypename name let allFields = map (fmap IR.FieldGraphQL) subFieldParsers <> map (fmap IR.FieldRemote) remoteJoinParsers - pure $ - P.selectionSetObject typename description allFields implements - <&> InsOrdHashMap.mapWithKey \alias -> - handleTypename $ - const $ - IR.FieldGraphQL $ - IR.mkGraphQLField (Just alias) GName.___typename mempty mempty IR.SelectionSetNone + pure + $ P.selectionSetObject typename description allFields implements + <&> InsOrdHashMap.mapWithKey \alias -> + handleTypename + $ const + $ IR.FieldGraphQL + $ IR.mkGraphQLField (Just alias) GName.___typename mempty mempty IR.SelectionSetNone where getInterface :: G.Name -> SchemaT r m (G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition) getInterface interfaceName = - onNothing (lookupInterface schemaDoc interfaceName) $ - throw400 RemoteSchemaError $ - "Could not find interface " - <> squote interfaceName - <> " implemented by Object type " - <> squote name + onNothing (lookupInterface schemaDoc interfaceName) + $ throw400 RemoteSchemaError + $ "Could not find interface " + <> squote interfaceName + <> " implemented by Object type " + <> squote name validateImplementsFields :: G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition -> SchemaT r m () validateImplementsFields interface = traverse_ (validateImplementsField (G._itdName interface)) (G._itdFieldsDefinition interface) @@ -617,30 +618,30 @@ remoteSchemaObject schemaDoc remoteRelationships defn@(G.ObjectTypeDefinition de validateImplementsField interfaceName interfaceField = case lookup (G._fldName interfaceField) (zip (fmap G._fldName subFields) subFields) of Nothing -> - throw400 RemoteSchemaError $ - "Interface field " - <> squote interfaceName - <> "." - <> dquote (G._fldName interfaceField) - <> " expected, but " - <> squote name - <> " does not provide it" + throw400 RemoteSchemaError + $ "Interface field " + <> squote interfaceName + <> "." + <> dquote (G._fldName interfaceField) + <> " expected, but " + <> squote name + <> " does not provide it" Just f -> do - unless (validateSubType (G._fldType f) (G._fldType interfaceField)) $ - throw400 RemoteSchemaError $ - "The type of Object field " - <> squote name - <> "." - <> dquote (G._fldName f) - <> " (" - <> G.showGT (G._fldType f) - <> ") is not the same type/sub type of Interface field " - <> squote interfaceName - <> "." - <> dquote (G._fldName interfaceField) - <> " (" - <> G.showGT (G._fldType interfaceField) - <> ")" + unless (validateSubType (G._fldType f) (G._fldType interfaceField)) + $ throw400 RemoteSchemaError + $ "The type of Object field " + <> squote name + <> "." + <> dquote (G._fldName f) + <> " (" + <> G.showGT (G._fldType f) + <> ") is not the same type/sub type of Interface field " + <> squote interfaceName + <> "." + <> dquote (G._fldName interfaceField) + <> " (" + <> G.showGT (G._fldType interfaceField) + <> ")" traverse_ ( validateArgument (map _rsitdDefinition (G._fldArgumentsDefinition f)) @@ -658,56 +659,56 @@ remoteSchemaObject schemaDoc remoteRelationships defn@(G.ObjectTypeDefinition de validateArgument objectFieldArgs ifaceArgument = case lookup (G._ivdName ifaceArgument) (zip (fmap G._ivdName objectFieldArgs) objectFieldArgs) of Nothing -> - throw400 RemoteSchemaError $ - "Interface field argument " - <> squote interfaceName - <> "." - <> dquote (G._fldName interfaceField) - <> "(" - <> dquote (G._ivdName ifaceArgument) - <> ":) required, but Object field " - <> squote name - <> "." - <> dquote (G._fldName f) - <> " does not provide it" + throw400 RemoteSchemaError + $ "Interface field argument " + <> squote interfaceName + <> "." + <> dquote (G._fldName interfaceField) + <> "(" + <> dquote (G._ivdName ifaceArgument) + <> ":) required, but Object field " + <> squote name + <> "." + <> dquote (G._fldName f) + <> " does not provide it" Just a -> - unless (G._ivdType a == G._ivdType ifaceArgument) $ - throw400 RemoteSchemaError $ - "Interface field argument " - <> squote interfaceName - <> "." - <> dquote (G._fldName interfaceField) - <> "(" - <> dquote (G._ivdName ifaceArgument) - <> ":) expects type " - <> G.showGT (G._ivdType ifaceArgument) - <> ", but " - <> squote name - <> "." - <> dquote (G._fldName f) - <> "(" - <> dquote (G._ivdName ifaceArgument) - <> ":) has type " - <> G.showGT (G._ivdType a) + unless (G._ivdType a == G._ivdType ifaceArgument) + $ throw400 RemoteSchemaError + $ "Interface field argument " + <> squote interfaceName + <> "." + <> dquote (G._fldName interfaceField) + <> "(" + <> dquote (G._ivdName ifaceArgument) + <> ":) expects type " + <> G.showGT (G._ivdType ifaceArgument) + <> ", but " + <> squote name + <> "." + <> dquote (G._fldName f) + <> "(" + <> dquote (G._ivdName ifaceArgument) + <> ":) has type " + <> G.showGT (G._ivdType a) validateNoExtraNonNull :: [G.InputValueDefinition] -> G.InputValueDefinition -> SchemaT r m () validateNoExtraNonNull ifaceArguments objectFieldArg = case lookup (G._ivdName objectFieldArg) (zip (fmap G._ivdName ifaceArguments) ifaceArguments) of Just _ -> pure () Nothing -> - unless (G.isNullable (G._ivdType objectFieldArg)) $ - throw400 RemoteSchemaError $ - "Object field argument " - <> squote name - <> "." - <> dquote (G._fldName f) - <> "(" - <> dquote (G._ivdName objectFieldArg) - <> ":) is of required type " - <> G.showGT (G._ivdType objectFieldArg) - <> ", but is not provided by Interface field " - <> squote interfaceName - <> "." - <> dquote (G._fldName interfaceField) + unless (G.isNullable (G._ivdType objectFieldArg)) + $ throw400 RemoteSchemaError + $ "Object field argument " + <> squote name + <> "." + <> dquote (G._fldName f) + <> "(" + <> dquote (G._ivdName objectFieldArg) + <> ":) is of required type " + <> G.showGT (G._ivdType objectFieldArg) + <> ", but is not provided by Interface field " + <> squote interfaceName + <> "." + <> dquote (G._fldName interfaceField) validateSubType :: G.GType -> G.GType -> Bool -- TODO this ignores nullability which is probably wrong, even though the GraphQL spec is ambiguous validateSubType (G.TypeList _ x) (G.TypeList _ y) = validateSubType x y @@ -779,7 +780,7 @@ constructed query. -- Also check Note [Querying remote schema interfaces] remoteSchemaInterface :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition -> @@ -791,40 +792,41 @@ remoteSchemaInterface schemaDoc remoteRelationships defn@(G.InterfaceTypeDefinit -- In the Draft GraphQL spec (> June 2018), interfaces can themselves -- implement superinterfaces. In the future, we may need to support this -- here. - when (null subFieldParsers) $ - throw400 RemoteSchemaError $ - "List of fields cannot be empty for interface " <> squote name + when (null subFieldParsers) + $ throw400 RemoteSchemaError + $ "List of fields cannot be empty for interface " + <> squote name -- TODO: another way to obtain 'possibleTypes' is to lookup all the object -- types in the schema document that claim to implement this interface. We -- should have a check that expresses that that collection of objects is equal -- to 'possibleTypes'. typename <- asks getter <&> \mkTypename -> runMkTypename mkTypename name let allFields = map (fmap IR.FieldGraphQL) subFieldParsers - pure $ - P.selectionSetInterface typename description allFields objs - <&> IR.mkInterfaceSelectionSet (Set.fromList $ map G._fldName fields) + pure + $ P.selectionSetInterface typename description allFields objs + <&> IR.mkInterfaceSelectionSet (Set.fromList $ map G._fldName fields) where getObject :: G.Name -> SchemaT r m (G.ObjectTypeDefinition RemoteSchemaInputValueDefinition) getObject objectName = - onNothing (lookupObject schemaDoc objectName) $ - case lookupInterface schemaDoc objectName of + onNothing (lookupObject schemaDoc objectName) + $ case lookupInterface schemaDoc objectName of Nothing -> - throw400 RemoteSchemaError $ - "Could not find type " - <> squote objectName - <> ", which is defined as a member type of Interface " - <> squote name + throw400 RemoteSchemaError + $ "Could not find type " + <> squote objectName + <> ", which is defined as a member type of Interface " + <> squote name Just _ -> - throw400 RemoteSchemaError $ - "Interface type " - <> squote name - <> " can only include object types. It cannot include " - <> squote objectName + throw400 RemoteSchemaError + $ "Interface type " + <> squote name + <> " can only include object types. It cannot include " + <> squote objectName -- | 'remoteSchemaUnion' returns a output parser for a given 'UnionTypeDefinition'. remoteSchemaUnion :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> G.UnionTypeDefinition -> @@ -832,32 +834,33 @@ remoteSchemaUnion :: remoteSchemaUnion schemaDoc remoteRelationships defn@(G.UnionTypeDefinition description name _directives objectNames) = P.memoizeOn 'remoteSchemaObject defn do objs <- traverse (getObjectParser schemaDoc remoteRelationships getObject) objectNames - when (null objs) $ - throw400 RemoteSchemaError $ - "List of member types cannot be empty for union type " <> squote name + when (null objs) + $ throw400 RemoteSchemaError + $ "List of member types cannot be empty for union type " + <> squote name typename <- asks getter <&> \mkTypename -> runMkTypename mkTypename name pure $ P.selectionSetUnion typename description objs <&> IR.mkUnionSelectionSet where getObject :: G.Name -> SchemaT r m (G.ObjectTypeDefinition RemoteSchemaInputValueDefinition) getObject objectName = - onNothing (lookupObject schemaDoc objectName) $ - case lookupInterface schemaDoc objectName of + onNothing (lookupObject schemaDoc objectName) + $ case lookupInterface schemaDoc objectName of Nothing -> - throw400 RemoteSchemaError $ - "Could not find type " - <> squote objectName - <> ", which is defined as a member type of Union " - <> squote name + throw400 RemoteSchemaError + $ "Could not find type " + <> squote objectName + <> ", which is defined as a member type of Union " + <> squote name Just _ -> - throw400 RemoteSchemaError $ - "Union type " - <> squote name - <> " can only include object types. It cannot include " - <> squote objectName + throw400 RemoteSchemaError + $ "Union type " + <> squote name + <> " can only include object types. It cannot include " + <> squote objectName remoteFieldFromDefinition :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> G.Name -> RemoteSchemaRelationships -> @@ -892,7 +895,7 @@ remoteFieldFromDefinition schemaDoc parentTypeName remoteRelationships (G.FieldD -- in the 'RemoteSchemaIntrospection'. remoteFieldFromName :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> G.Name -> @@ -911,7 +914,7 @@ remoteFieldFromName sdoc remoteRelationships parentTypeName fieldName descriptio -- GraphQL 'Input' kind is provided, then error will be thrown. remoteField :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> G.Name -> @@ -981,7 +984,7 @@ remoteField sdoc remoteRelationships parentTypeName fieldName description argsDe -- reason 'getObject' is an argument to this function getObjectParser :: forall r m n. - MonadBuildRemoteSchema r m n => + (MonadBuildRemoteSchema r m n) => RemoteSchemaIntrospection -> RemoteSchemaRelationships -> (G.Name -> SchemaT r m (G.ObjectTypeDefinition RemoteSchemaInputValueDefinition)) -> @@ -1002,7 +1005,9 @@ customizeRemoteNamespace remoteSchemaInfo@RemoteSchemaInfo {..} rootTypeName fie customizeNamespace (_rscNamespaceFieldName rsCustomizer) fromParsedSelection mkNamespaceTypename fieldParsers where fromParsedSelection alias = - handleTypename . const $ + handleTypename + . const + $ -- In P.selectionSet we lose the resultCustomizer from __typename fields so we need to put it back let resultCustomizer = modifyFieldByName alias $ customizeTypeNameString $ _rscCustomizeTypeName rsCustomizer in IR.RemoteSchemaRootField remoteSchemaInfo resultCustomizer $ IR.mkGraphQLField (Just alias) GName.___typename mempty mempty IR.SelectionSetNone diff --git a/server/src-lib/Hasura/GraphQL/Schema/RemoteRelationship.hs b/server/src-lib/Hasura/GraphQL/Schema/RemoteRelationship.hs index fd7f2e96cc6..6312abe94db 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/RemoteRelationship.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/RemoteRelationship.hs @@ -92,7 +92,8 @@ remoteRelationshipToSchemaField remoteSchemaCache remoteSchemaPermissions lhsFie pure (_rrfiInputValueDefinitions, _rrfiParamMap) else do (_, roleRemoteField) <- - afold @(Either _) $ + afold @(Either _) + $ -- TODO: this really needs to go way, we shouldn't be doing -- validation when building parsers validateToSchemaRelationship relationshipDef _rrfiLHSIdentifier _rrfiName (_rrfiRemoteSchema, introspection) lhsFields @@ -109,37 +110,38 @@ remoteRelationshipToSchemaField remoteSchemaCache remoteSchemaPermissions lhsFie let typeName = G.getBaseType nestedFieldType fieldTypeDefinition <- onNothing (lookupType roleIntrospection typeName) - -- the below case will never happen because we get the type name - -- from the schema document itself i.e. if a field exists for the - -- given role, then it's return type also must exist - $ - throw500 $ - "unexpected: " <> typeName <<> " not found " + -- the below case will never happen because we get the type name + -- from the schema document itself i.e. if a field exists for the + -- given role, then it's return type also must exist + $ throw500 + $ "unexpected: " + <> typeName + <<> " not found " -- These are the arguments that are given by the user while executing a query let remoteFieldUserArguments = map snd $ HashMap.toList remoteFieldParamMap remoteFld <- - withRemoteSchemaCustomization remoteSchemaCustomizer $ - lift $ - P.wrapFieldParser nestedFieldType - <$> remoteField remoteRelationshipIntrospection remoteSchemaRelationships remoteSchemaRoot fieldName Nothing remoteFieldUserArguments fieldTypeDefinition + withRemoteSchemaCustomization remoteSchemaCustomizer + $ lift + $ P.wrapFieldParser nestedFieldType + <$> remoteField remoteRelationshipIntrospection remoteSchemaRelationships remoteSchemaRoot fieldName Nothing remoteFieldUserArguments fieldTypeDefinition - pure $ - remoteFld - `P.bindField` \fld@IR.GraphQLField {IR._fArguments = args, IR._fSelectionSet = selSet, IR._fName = fname} -> do - let remoteArgs = - HashMap.toList args <&> \(argName, argVal) -> IR.RemoteFieldArgument argName $ P.GraphQLValue argVal - let resultCustomizer = - applyFieldCalls fieldCalls $ - applyAliasMapping (singletonAliasMapping fname (fcName $ NE.last fieldCalls)) $ - makeResultCustomizer remoteSchemaCustomizer fld - pure $ - IR.RemoteSchemaSelect - { IR._rselArgs = remoteArgs, - IR._rselResultCustomizer = resultCustomizer, - IR._rselSelection = selSet, - IR._rselFieldCall = fieldCalls, - IR._rselRemoteSchema = _rrfiRemoteSchema - } + pure + $ remoteFld + `P.bindField` \fld@IR.GraphQLField {IR._fArguments = args, IR._fSelectionSet = selSet, IR._fName = fname} -> do + let remoteArgs = + HashMap.toList args <&> \(argName, argVal) -> IR.RemoteFieldArgument argName $ P.GraphQLValue argVal + let resultCustomizer = + applyFieldCalls fieldCalls + $ applyAliasMapping (singletonAliasMapping fname (fcName $ NE.last fieldCalls)) + $ makeResultCustomizer remoteSchemaCustomizer fld + pure + $ IR.RemoteSchemaSelect + { IR._rselArgs = remoteArgs, + IR._rselResultCustomizer = resultCustomizer, + IR._rselSelection = selSet, + IR._rselFieldCall = fieldCalls, + IR._rselRemoteSchema = _rrfiRemoteSchema + } where -- Apply parent field calls so that the result customizer modifies the nested field applyFieldCalls :: NonEmpty FieldCall -> ResultCustomizer -> ResultCustomizer @@ -192,9 +194,10 @@ remoteRelationshipToSourceField :: m [FieldParser n (IR.RemoteSourceSelect (IR.RemoteRelationshipField IR.UnpreparedValue) IR.UnpreparedValue tgt)] remoteRelationshipToSourceField context options sourceCache RemoteSourceFieldInfo {..} = do sourceInfo <- - onNothing (unsafeSourceInfo @tgt =<< HashMap.lookup _rsfiSource sourceCache) $ - throw500 $ - "source not found " <> dquote _rsfiSource + onNothing (unsafeSourceInfo @tgt =<< HashMap.lookup _rsfiSource sourceCache) + $ throw500 + $ "source not found " + <> dquote _rsfiSource runSourceSchema context options sourceInfo do let roleName = scRole context tCase = _rscNamingConvention $ _siCustomization sourceInfo @@ -209,21 +212,23 @@ remoteRelationshipToSourceField context options sourceCache RemoteSourceFieldInf pure $ case selectionSetParserM of Nothing -> [] Just selectionSetParser -> - pure $ - P.subselection_ fieldName Nothing selectionSetParser <&> \fields -> - IR.SourceRelationshipObject $ - IR.AnnObjectSelectG fields (IR.FromTable _rsfiTable) $ - IR._tpFilter $ - tablePermissionsInfo tablePerms + pure + $ P.subselection_ fieldName Nothing selectionSetParser + <&> \fields -> + IR.SourceRelationshipObject + $ IR.AnnObjectSelectG fields (IR.FromTable _rsfiTable) + $ IR._tpFilter + $ tablePermissionsInfo tablePerms ArrRel -> do let aggFieldName = applyFieldNameCaseIdentifier tCase $ C.fromAutogeneratedTuple (fieldName, [G.convertNameToSuffix Name._aggregate]) selectionSetParser <- selectTable tableInfo fieldName Nothing aggSelectionSetParser <- selectTableAggregate tableInfo aggFieldName Nothing - pure $ - catMaybes + pure + $ catMaybes [ selectionSetParser <&> fmap IR.SourceRelationshipArray, aggSelectionSetParser <&> fmap IR.SourceRelationshipArrayAggregate ] - pure $ - parsers <&> fmap \select -> + pure + $ parsers + <&> fmap \select -> IR.RemoteSourceSelect _rsfiSource _rsfiSourceConfig select _rsfiMapping (soStringifyNumbers options) diff --git a/server/src-lib/Hasura/GraphQL/Schema/Select.hs b/server/src-lib/Hasura/GraphQL/Schema/Select.hs index 06779363a23..3b778551137 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Select.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Select.hs @@ -137,18 +137,18 @@ defaultSelectTable tableInfo fieldName description = runMaybeT do lift $ P.memoizeOn 'defaultSelectTable (sourceName, tableName, fieldName) do stringifyNumbers <- retrieve Options.soStringifyNumbers tableArgsParser <- tableArguments tableInfo - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description tableArgsParser selectionSetParser - <&> \(args, fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description tableArgsParser selectionSetParser + <&> \(args, fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } -- | Simple table connection selection. -- @@ -195,24 +195,24 @@ selectTableConnection tableInfo fieldName description pkeyColumns = runMaybeT do lift $ P.memoizeOn 'selectTableConnection (_siName sourceInfo, tableName, fieldName) do stringifyNumbers <- retrieve Options.soStringifyNumbers selectArgsParser <- tableConnectionArgs pkeyColumns tableInfo - pure $ - P.subselection fieldName description selectArgsParser selectionSetParser - <&> \((args, split, slice), fields) -> - IR.ConnectionSelect - { IR._csXRelay = xRelayInfo, - IR._csPrimaryKeyColumns = pkeyColumns, - IR._csSplit = split, - IR._csSlice = slice, - IR._csSelect = - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } - } + pure + $ P.subselection fieldName description selectArgsParser selectionSetParser + <&> \((args, split, slice), fields) -> + IR.ConnectionSelect + { IR._csXRelay = xRelayInfo, + IR._csPrimaryKeyColumns = pkeyColumns, + IR._csSplit = split, + IR._csSlice = slice, + IR._csSelect = + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } + } -- | Table selection by primary key. -- @@ -249,25 +249,29 @@ selectTableByPk tableInfo fieldName description = runMaybeT do argsParser <- sequenceA <$> for primaryKeys \columnInfo -> do field <- columnParser (ciType columnInfo) (G.Nullability $ ciIsNullable columnInfo) - pure $ - BoolField . AVColumn columnInfo . pure . AEQ True . IR.mkParameter - <$> P.field (ciName columnInfo) (ciDescription columnInfo) field - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description argsParser selectionSetParser - <&> \(boolExpr, fields) -> - let defaultPerms = tablePermissionsInfo selectPermissions - -- Do not account permission limit since the result is just a nullable object - permissions = defaultPerms {IR._tpLimit = Nothing} - whereExpr = Just $ BoolAnd $ toList boolExpr - in IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = permissions, - IR._asnArgs = IR.noSelectArgs {IR._saWhere = whereExpr}, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ BoolField + . AVColumn columnInfo + . pure + . AEQ True + . IR.mkParameter + <$> P.field (ciName columnInfo) (ciDescription columnInfo) field + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description argsParser selectionSetParser + <&> \(boolExpr, fields) -> + let defaultPerms = tablePermissionsInfo selectPermissions + -- Do not account permission limit since the result is just a nullable object + permissions = defaultPerms {IR._tpLimit = Nothing} + whereExpr = Just $ BoolAnd $ toList boolExpr + in IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = permissions, + IR._asnArgs = IR.noSelectArgs {IR._saWhere = whereExpr}, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } -- | Table aggregation selection -- @@ -308,26 +312,26 @@ defaultSelectTableAggregate tableInfo fieldName description = runMaybeT $ do aggregateParser <- tableAggregationFields tableInfo let selectionName = mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableAggregateTypeName tableGQLName aggregationParser = - P.nonNullableParser $ - parsedSelectionsToFields IR.TAFExp - <$> P.selectionSet - selectionName - (Just $ G.Description $ "aggregated selection of " <>> tableName) - [ IR.TAFNodes xNodesAgg <$> P.subselection_ Name._nodes Nothing nodesParser, - IR.TAFAgg <$> P.subselection_ Name._aggregate Nothing aggregateParser - ] - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description tableArgsParser aggregationParser - <&> \(args, fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = IR.FromTable tableName, - IR._asnPerm = tablePermissionsInfo selectPermissions, - IR._asnArgs = args, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + P.nonNullableParser + $ parsedSelectionsToFields IR.TAFExp + <$> P.selectionSet + selectionName + (Just $ G.Description $ "aggregated selection of " <>> tableName) + [ IR.TAFNodes xNodesAgg <$> P.subselection_ Name._nodes Nothing nodesParser, + IR.TAFAgg <$> P.subselection_ Name._aggregate Nothing aggregateParser + ] + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description tableArgsParser aggregationParser + <&> \(args, fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = IR.FromTable tableName, + IR._asnPerm = tablePermissionsInfo selectPermissions, + IR._asnArgs = args, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } {- Note [Selectability of tables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -463,17 +467,17 @@ defaultTableSelectionSet tableInfo = runMaybeT do -- which to run, and will stack another `SchemaT` on top of it when -- recursively processing tables. nodeInterface <- lift $ runNodeBuilder nodeBuilder context options - pure $ - selectionSetObjectWithDirective objectTypename description allFieldParsers [nodeInterface] pkDirectives - <&> parsedSelectionsToFields IR.AFExpression + pure + $ selectionSetObjectWithDirective objectTypename description allFieldParsers [nodeInterface] pkDirectives + <&> parsedSelectionsToFields IR.AFExpression _ -> - pure $ - selectionSetObjectWithDirective objectTypename description fieldParsers [] pkDirectives - <&> parsedSelectionsToFields IR.AFExpression + pure + $ selectionSetObjectWithDirective objectTypename description fieldParsers [] pkDirectives + <&> parsedSelectionsToFields IR.AFExpression where selectionSetObjectWithDirective name description parsers implementsInterfaces directives = - IP.setParserDirectives directives $ - P.selectionSetObject name description parsers implementsInterfaces + IP.setParserDirectives directives + $ P.selectionSetObject name description parsers implementsInterfaces -- | List of table fields object. -- Just a @'nonNullableObjectList' wrapper over @'tableSelectionSet'. @@ -534,9 +538,9 @@ parseLogicalModelField field <- lift $ columnParser columnType (G.Nullability lmtsNullable) - pure $! - P.selection columnName (G.Description <$> lmfDescription) pathArg field - <&> IR.mkAnnColumnField column columnType caseBoolExpUnpreparedValue + pure + $! P.selection columnName (G.Description <$> lmfDescription) pathArg field + <&> IR.mkAnnColumnField column columnType caseBoolExpUnpreparedValue parseLogicalModelField relationshipInfo column @@ -624,9 +628,9 @@ defaultLogicalModelSelectionSet relationshipInfo logicalModel = runMaybeT $ do -- We entirely ignore Relay for now. implementsInterfaces = mempty - pure $ - P.selectionSetObject fieldName description parsers implementsInterfaces - <&> parsedSelectionsToFields IR.AFExpression + pure + $ P.selectionSetObject fieldName description parsers implementsInterfaces + <&> parsedSelectionsToFields IR.AFExpression logicalModelSelectionList :: (MonadBuildSchema b r m n, BackendLogicalModelSelectSchema b) => @@ -691,10 +695,10 @@ tableConnectionSelectionSet tableInfo = runMaybeT do edgesParser <&> IR.ConnectionEdges connectionDescription = G.Description $ "A Relay connection object on " <>> tableName - pure $ - P.nonNullableParser $ - P.selectionSet connectionTypeName (Just connectionDescription) [pageInfo, edges] - <&> parsedSelectionsToFields IR.ConnectionTypename + pure + $ P.nonNullableParser + $ P.selectionSet connectionTypeName (Just connectionDescription) [pageInfo, edges] + <&> parsedSelectionsToFields IR.ConnectionTypename where pageInfoSelectionSet :: Parser 'Output n IR.PageInfoFields pageInfoSelectionSet = @@ -728,9 +732,9 @@ tableConnectionSelectionSet tableInfo = runMaybeT do hasNextPageField, hasPreviousPageField ] - in P.nonNullableParser $ - P.selectionSet Name._PageInfo Nothing allFields - <&> parsedSelectionsToFields IR.PageInfoTypename + in P.nonNullableParser + $ P.selectionSet Name._PageInfo Nothing allFields + <&> parsedSelectionsToFields IR.PageInfoTypename tableEdgesSelectionSet :: (G.Name -> G.Name) -> @@ -751,10 +755,10 @@ tableConnectionSelectionSet tableInfo = runMaybeT do Nothing edgeNodeParser <&> IR.EdgeNode - pure $ - nonNullableObjectList $ - P.selectionSet edgesType Nothing [cursor, edgeNode] - <&> parsedSelectionsToFields IR.EdgeTypename + pure + $ nonNullableObjectList + $ P.selectionSet edgesType Nothing [cursor, edgeNode] + <&> parsedSelectionsToFields IR.EdgeTypename -------------------------------------------------------------------------------- -- Components @@ -790,10 +794,10 @@ logicalModelWhereArg :: SchemaT r m (InputFieldsParser n (Maybe (IR.AnnBoolExp b (IR.UnpreparedValue b)))) logicalModelWhereArg logicalModel = do boolExpParser <- logicalModelBoolExp logicalModel - pure $ - fmap join $ - P.fieldOptional whereName whereDesc $ - P.nullable boolExpParser + pure + $ fmap join + $ P.fieldOptional whereName whereDesc + $ P.nullable boolExpParser where whereName = Name._where whereDesc = Just $ G.Description "filter the rows returned" @@ -813,10 +817,10 @@ logicalModelOrderByArg logicalModel = do orderByDesc = Just $ G.Description "sort the rows by one or more columns" pure $ do maybeOrderByExps <- - fmap join $ - P.fieldOptional orderByName orderByDesc $ - P.nullable $ - P.list orderByParser + fmap join + $ P.fieldOptional orderByName orderByDesc + $ P.nullable + $ P.list orderByParser pure $ maybeOrderByExps >>= NE.nonEmpty . concat -- | Argument to distinct select on columns returned from table selection @@ -847,7 +851,8 @@ logicalModelDistinctArg logicalModel = do pure do maybeDistinctOnColumns <- - join . join + join + . join <$> for columnsEnum (P.fieldOptional distinctOnName distinctOnDesc . P.nullable . P.list) @@ -878,10 +883,10 @@ tableWhereArg :: SchemaT r m (InputFieldsParser n (Maybe (IR.AnnBoolExp b (IR.UnpreparedValue b)))) tableWhereArg tableInfo = do boolExpParser <- tableBoolExp tableInfo - pure $ - fmap join $ - P.fieldOptional whereName whereDesc $ - P.nullable boolExpParser + pure + $ fmap join + $ P.fieldOptional whereName whereDesc + $ P.nullable boolExpParser where whereName = Name._where whereDesc = Just $ G.Description "filter the rows returned" @@ -900,10 +905,10 @@ tableOrderByArg tableInfo = do orderByDesc = Just $ G.Description "sort the rows by one or more columns" pure $ do maybeOrderByExps <- - fmap join $ - P.fieldOptional orderByName orderByDesc $ - P.nullable $ - P.list orderByParser + fmap join + $ P.fieldOptional orderByName orderByDesc + $ P.nullable + $ P.list orderByParser pure $ maybeOrderByExps >>= NE.nonEmpty . concat -- | Argument to distinct select on columns returned from table selection @@ -920,7 +925,8 @@ tableDistinctArg tableInfo = do distinctOnDesc = Just $ G.Description "distinct select on columns" pure do maybeDistinctOnColumns <- - join . join + join + . join <$> for columnsEnum (P.fieldOptional distinctOnName distinctOnDesc . P.nullable . P.list) @@ -933,9 +939,9 @@ tableLimitArg :: (MonadParse n) => InputFieldsParser n (Maybe Int) tableLimitArg = - fmap (fmap fromIntegral . join) $ - P.fieldOptional limitName limitDesc $ - P.nullable P.nonNegativeInt + fmap (fmap fromIntegral . join) + $ P.fieldOptional limitName limitDesc + $ P.nullable P.nonNegativeInt where limitName = Name._limit limitDesc = Just $ G.Description "limit the number of rows returned" @@ -947,9 +953,9 @@ tableOffsetArg :: (MonadParse n) => InputFieldsParser n (Maybe Int64) tableOffsetArg = - fmap join $ - P.fieldOptional offsetName offsetDesc $ - P.nullable P.bigInt + fmap join + $ P.fieldOptional offsetName offsetDesc + $ P.nullable P.bigInt where offsetName = Name._offset offsetDesc = Just $ G.Description "skip the first n rows. Use only with order_by" @@ -1035,8 +1041,8 @@ tableConnectionArgs pkeyColumns tableInfo = do parseConnectionSplit maybeOrderBys splitKind cursorSplit = do cursorValue <- J.eitherDecode cursorSplit `onLeft` const throwInvalidCursor case maybeOrderBys of - Nothing -> forM (nonEmptySeqToNonEmptyList pkeyColumns) $ - \columnInfo -> do + Nothing -> forM (nonEmptySeqToNonEmptyList pkeyColumns) + $ \columnInfo -> do let columnJsonPath = [J.Key $ K.fromText $ toTxt $ ciColumn columnInfo] columnType = ciType columnInfo columnValue <- @@ -1044,9 +1050,9 @@ tableConnectionArgs pkeyColumns tableInfo = do `onNothing` throwInvalidCursor pgValue <- liftQErr $ parseScalarValueColumnType columnType columnValue let unresolvedValue = IR.UVParameter IR.Unknown $ ColumnValue columnType pgValue - pure $ - IR.ConnectionSplit splitKind unresolvedValue $ - IR.OrderByItemG Nothing (IR.AOCColumn columnInfo) Nothing + pure + $ IR.ConnectionSplit splitKind unresolvedValue + $ IR.OrderByItemG Nothing (IR.AOCColumn columnInfo) Nothing Just orderBys -> forM orderBys $ \orderBy -> do let IR.OrderByItemG orderType annObCol nullsOrder = orderBy @@ -1056,9 +1062,9 @@ tableConnectionArgs pkeyColumns tableInfo = do `onNothing` throwInvalidCursor pgValue <- liftQErr $ parseScalarValueColumnType columnType orderByItemValue let unresolvedValue = IR.UVParameter IR.Unknown $ ColumnValue columnType pgValue - pure $ - IR.ConnectionSplit splitKind unresolvedValue $ - IR.OrderByItemG orderType annObCol nullsOrder + pure + $ IR.ConnectionSplit splitKind unresolvedValue + $ IR.OrderByItemG orderType annObCol nullsOrder where throwInvalidCursor = parseError "the \"after\" or \"before\" cursor is invalid" liftQErr = either (parseError . toErrorMessage . qeError) pure . runExcept @@ -1138,45 +1144,48 @@ tableAggregationFields tableInfo = do selectName = runMkTypename mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableAggregateFieldTypeName tableGQLName count <- countField nonCountComputedFieldsMap <- - fmap (HashMap.unionsWith (++) . concat) $ - sequenceA $ - catMaybes - [ -- operators on numeric computed fields - if null numericComputedFields - then Nothing - else Just $ - for numericAggOperators $ \operator -> do - numFields <- mkNumericAggComputedFields tableName operator numericComputedFields + fmap (HashMap.unionsWith (++) . concat) + $ sequenceA + $ catMaybes + [ -- operators on numeric computed fields + if null numericComputedFields + then Nothing + else Just + $ for numericAggOperators + $ \operator -> do + numFields <- mkNumericAggComputedFields tableName operator numericComputedFields - pure $ HashMap.singleton operator numFields - ] + pure $ HashMap.singleton operator numFields + ] nonCountFieldsMap <- - fmap (HashMap.unionsWith (++) . concat) $ - sequenceA $ - catMaybes - [ -- operators on numeric columns - if null numericColumns - then Nothing - else Just $ - for numericAggOperators $ \operator -> do - numFields <- mkNumericAggFields operator numericColumns - pure $ HashMap.singleton operator numFields, - -- operators on comparable columns - if null comparableColumns - then Nothing - else Just $ do - comparableFields <- traverse mkColumnAggField comparableColumns - pure $ - comparisonAggOperators & map \operator -> - HashMap.singleton operator comparableFields, - -- -- custom operators - if null customOperatorsAndColumns - then Nothing - else Just $ - for customOperatorsAndColumns \(operator, columnTypes) -> do - customFields <- traverse (uncurry mkNullableScalarTypeAggField) (toList columnTypes) - pure $ HashMap.singleton (C.fromCustomName operator) customFields - ] + fmap (HashMap.unionsWith (++) . concat) + $ sequenceA + $ catMaybes + [ -- operators on numeric columns + if null numericColumns + then Nothing + else Just + $ for numericAggOperators + $ \operator -> do + numFields <- mkNumericAggFields operator numericColumns + pure $ HashMap.singleton operator numFields, + -- operators on comparable columns + if null comparableColumns + then Nothing + else Just $ do + comparableFields <- traverse mkColumnAggField comparableColumns + pure + $ comparisonAggOperators + & map \operator -> + HashMap.singleton operator comparableFields, + -- -- custom operators + if null customOperatorsAndColumns + then Nothing + else Just + $ for customOperatorsAndColumns \(operator, columnTypes) -> do + customFields <- traverse (uncurry mkNullableScalarTypeAggField) (toList columnTypes) + pure $ HashMap.singleton (C.fromCustomName operator) customFields + ] let nonCountFields = HashMap.mapWithKey @@ -1186,9 +1195,9 @@ tableAggregationFields tableInfo = do aggregateFields :: [FieldParser n (IR.AggregateField b (IR.UnpreparedValue b))] aggregateFields = count : HashMap.elems nonCountFields - pure $ - P.selectionSet selectName (Just description) aggregateFields - <&> parsedSelectionsToFields IR.AFExp + pure + $ P.selectionSet selectName (Just description) aggregateFields + <&> parsedSelectionsToFields IR.AFExp where getCustomAggOpsColumns :: [ColumnInfo b] -> HashMap (ScalarType b) (ScalarType b) -> Maybe (NonEmpty (ColumnInfo b, ScalarType b)) getCustomAggOpsColumns columnInfos typeMap = @@ -1225,9 +1234,11 @@ tableAggregationFields tableInfo = do -- Memoize here for more sharing. Note: we can't do `P.memoizeOn 'mkNumericAggFields...` -- due to stage restrictions, so just add a string key: | otherwise = traverse \columnInfo -> - P.memoizeOn 'tableAggregationFields ("mkNumericAggFields" :: Text, columnInfo) $ + P.memoizeOn 'tableAggregationFields ("mkNumericAggFields" :: Text, columnInfo) + $ -- CAREFUL!: below must only reference columnInfo else memoization key needs to be adapted - pure $! do + pure + $! do let !cfcol = IR.SFCol (ciColumn columnInfo) (ciType columnInfo) P.selection_ (ciName columnInfo) @@ -1242,12 +1253,12 @@ tableAggregationFields tableInfo = do mkColumnAggField' :: ColumnInfo b -> ColumnType b -> SchemaT r m (FieldParser n (IR.SelectionField b (IR.UnpreparedValue b))) mkColumnAggField' columnInfo resultType = do field <- columnParser resultType (G.Nullability True) - pure $ - P.selection_ + pure + $ P.selection_ (ciName columnInfo) (ciDescription columnInfo) field - $> IR.SFCol (ciColumn columnInfo) (ciType columnInfo) + $> IR.SFCol (ciColumn columnInfo) (ciType columnInfo) mkNullableScalarTypeAggField :: ColumnInfo b -> ScalarType b -> SchemaT r m (FieldParser n (IR.SelectionField b (IR.UnpreparedValue b))) mkNullableScalarTypeAggField columnInfo resultType = @@ -1260,12 +1271,12 @@ tableAggregationFields tableInfo = do args = do distinct <- P.fieldOptional distinctName Nothing P.boolean mkCountType <- countTypeInput @b columnsEnum - pure $ - mkCountType $ - maybe - IR.SelectCountNonDistinct -- If "distinct" is "null" or absent, we default to @'SelectCountNonDistinct' - (bool IR.SelectCountNonDistinct IR.SelectCountDistinct) - distinct + pure + $ mkCountType + $ maybe + IR.SelectCountNonDistinct -- If "distinct" is "null" or absent, we default to @'SelectCountNonDistinct' + (bool IR.SelectCountNonDistinct IR.SelectCountDistinct) + distinct pure $ IR.AFCount <$> P.selection Name._count Nothing args P.int @@ -1303,16 +1314,17 @@ defaultArgsParser whereParser orderByParser distinctParser = do limitArg <- tableLimitArg offsetArg <- tableOffsetArg distinctArg <- distinctParser - pure $ - IR.SelectArgs + pure + $ IR.SelectArgs { IR._saWhere = whereArg, IR._saOrderBy = orderByArg, IR._saLimit = limitArg, IR._saOffset = offsetArg, IR._saDistinct = distinctArg } - pure $ - result `P.bindFields` \args -> do + pure + $ result + `P.bindFields` \args -> do sequence_ do orderBy <- IR._saOrderBy args distinct <- IR._saDistinct args @@ -1329,8 +1341,8 @@ defaultArgsParser whereParser orderByParser distinctParser = do isValid = (colsLen == length initOrdByCols) && all (`elem` initOrdByCols) (toList distinctCols) - unless isValid $ - parseError + unless isValid + $ parseError "\"distinct_on\" columns must match initial \"order_by\" columns" defaultLogicalModelArgs :: @@ -1399,9 +1411,9 @@ fieldSelection table tableInfo = \case -- allow the case analysis only on nullable columns. nullability = ciIsNullable columnInfo || isJust caseBoolExp field <- lift $ columnParser (ciType columnInfo) (G.Nullability nullability) - pure $! - P.selection fieldName (ciDescription columnInfo) pathArg field - <&> IR.mkAnnColumnField (ciColumn columnInfo) (ciType columnInfo) caseBoolExpUnpreparedValue + pure + $! P.selection fieldName (ciDescription columnInfo) pathArg field + <&> IR.mkAnnColumnField (ciColumn columnInfo) (ciType columnInfo) caseBoolExpUnpreparedValue FIColumn (SCIObjectColumn nestedObjectInfo) -> pure . fmap IR.AFNestedObject <$> nestedObjectFieldParser tableInfo nestedObjectInfo FIColumn (SCIArrayColumn NestedArrayInfo {..}) -> @@ -1439,7 +1451,7 @@ outputParserModifier :: Bool -> IP.Parser origin 'Output m a -> IP.Parser origin outputParserModifier True = P.nullableParser outputParserModifier False = P.nonNullableParser -nestedArrayFieldParser :: forall origin m b r v. Functor m => XNestedArrays b -> Bool -> IP.FieldParser origin m (IR.AnnFieldG b r v) -> IP.FieldParser origin m (IR.AnnFieldG b r v) +nestedArrayFieldParser :: forall origin m b r v. (Functor m) => XNestedArrays b -> Bool -> IP.FieldParser origin m (IR.AnnFieldG b r v) -> IP.FieldParser origin m (IR.AnnFieldG b r v) nestedArrayFieldParser supportsNestedArrays isNullable = wrapNullable . IP.multipleField . fmap (IR.AFNestedArray @b supportsNestedArrays . IR.ANASSimple) where @@ -1456,10 +1468,10 @@ nestedObjectParser :: SchemaT r m (P.Parser 'Output n (AnnotatedNestedObjectSelect b)) nestedObjectParser supportsNestedObjects objectTypes objectType column isNullable = do allFieldParsers <- for (toList $ _totFields objectType) outputFieldParser - pure $ - outputParserModifier isNullable $ - P.selectionSet (_totName objectType) (_totDescription objectType) allFieldParsers - <&> IR.AnnNestedObjectSelectG supportsNestedObjects column . parsedSelectionsToFields IR.AFExpression + pure + $ outputParserModifier isNullable + $ P.selectionSet (_totName objectType) (_totDescription objectType) allFieldParsers + <&> IR.AnnNestedObjectSelectG supportsNestedObjects column . parsedSelectionsToFields IR.AFExpression where outputFieldParser :: TableObjectFieldDefinition b -> @@ -1479,9 +1491,9 @@ nestedObjectParser supportsNestedObjects objectTypes objectType column isNullabl go objectFieldType where wrapScalar scalarType parser = - pure $ - P.wrapFieldParser gType (P.selection_ name description parser) - $> IR.mkAnnColumnField column' (ColumnScalar scalarType) Nothing Nothing + pure + $ P.wrapFieldParser gType (P.selection_ name description parser) + $> IR.mkAnnColumnField column' (ColumnScalar scalarType) Nothing Nothing customScalarParser fieldTypeName = let schemaType = P.TNamed P.NonNullable $ P.Definition fieldTypeName Nothing Nothing [] P.TIScalar in P.Parser @@ -1679,26 +1691,26 @@ relationshipField table ri = runMaybeT do pure $ boolToNullable $ any ciIsNullable colInfo -- Manual or reverse relationships are always nullable _ -> pure Nullable - pure $ - pure $ - case nullable of { Nullable -> id; NotNullable -> IP.nonNullableField } $ - P.subselection_ relFieldName desc selectionSetParser - <&> \fields -> - IR.AFObjectRelation $ - IR.AnnRelationSelectG (riName ri) (riMapping ri) $ - IR.AnnObjectSelectG fields (IR.FromTable otherTableName) $ - deduplicatePermissions $ - IR._tpFilter $ - tablePermissionsInfo remotePerms + pure + $ pure + $ case nullable of Nullable -> id; NotNullable -> IP.nonNullableField + $ P.subselection_ relFieldName desc selectionSetParser + <&> \fields -> + IR.AFObjectRelation + $ IR.AnnRelationSelectG (riName ri) (riMapping ri) + $ IR.AnnObjectSelectG fields (IR.FromTable otherTableName) + $ deduplicatePermissions + $ IR._tpFilter + $ tablePermissionsInfo remotePerms ArrRel -> do let arrayRelDesc = Just $ G.Description "An array relationship" otherTableParser <- MaybeT $ selectTable otherTableInfo relFieldName arrayRelDesc let arrayRelField = otherTableParser <&> \selectExp -> - IR.AFArrayRelation $ - IR.ASSimple $ - IR.AnnRelationSelectG (riName ri) (riMapping ri) $ - deduplicatePermissions' selectExp + IR.AFArrayRelation + $ IR.ASSimple + $ IR.AnnRelationSelectG (riName ri) (riMapping ri) + $ deduplicatePermissions' selectExp relAggFieldName = applyFieldNameCaseCust tCase $ relFieldName <> Name.__aggregate relAggDesc = Just $ G.Description "An aggregate relationship" remoteAggField <- lift $ selectTableAggregate otherTableInfo relAggFieldName relAggDesc @@ -1707,14 +1719,14 @@ relationshipField table ri = runMaybeT do RelaySchema _ <- retrieve scSchemaKind _xRelayInfo <- hoistMaybe $ relayExtension @b pkeyColumns <- - MaybeT $ - (^? tiCoreInfo . tciPrimaryKey . _Just . pkColumns) - <$> pure otherTableInfo + MaybeT + $ (^? tiCoreInfo . tciPrimaryKey . _Just . pkColumns) + <$> pure otherTableInfo let relConnectionName = relFieldName <> Name.__connection relConnectionDesc = Just $ G.Description "An array relationship connection" MaybeT $ lift $ selectTableConnection otherTableInfo relConnectionName relConnectionDesc pkeyColumns - pure $ - catMaybes + pure + $ catMaybes [ Just arrayRelField, fmap (IR.AFArrayRelation . IR.ASAggregate . IR.AnnRelationSelectG (riName ri) (riMapping ri)) <$> remoteAggField, fmap (IR.AFArrayRelation . IR.ASConnection . IR.AnnRelationSelectG (riName ri) (riMapping ri)) <$> remoteConnectionField @@ -1745,12 +1757,12 @@ logicalModelObjectRelationshipField logicalModelName ri | riType ri == ObjRel = -- even possible for this to be an issue at this point when (logicalModelName /= _lmiName (_nqiReturns nativeQueryInfo)) - ( throw500 $ - "Expected object relationship to return " - <> toTxt logicalModelName - <> " but it returns " - <> toTxt (_lmiName (_nqiReturns nativeQueryInfo)) - <> "." + ( throw500 + $ "Expected object relationship to return " + <> toTxt logicalModelName + <> " but it returns " + <> toTxt (_lmiName (_nqiReturns nativeQueryInfo)) + <> "." ) relFieldName <- lift $ textToName $ relNameToTxt $ riName ri @@ -1759,8 +1771,9 @@ logicalModelObjectRelationshipField logicalModelName ri | riType ri == ObjRel = nativeQueryParser <- MaybeT $ selectNativeQueryObject nativeQueryInfo relFieldName objectRelDesc - pure $ - nativeQueryParser <&> \selectExp -> + pure + $ nativeQueryParser + <&> \selectExp -> IR.AFObjectRelation (IR.AnnRelationSelectG (riName ri) (riMapping ri) selectExp) RelTargetTable _otherTableName -> do throw500 "Object relationships from logical models to tables are not implemented" @@ -1786,23 +1799,24 @@ logicalModelArrayRelationshipField logicalModelName ri | riType ri == ArrRel = -- even possible for this to be an issue at this point when (logicalModelName /= _lmiName (_nqiReturns nativeQueryInfo)) - ( throw500 $ - "Expected array relationship to return " - <> toTxt logicalModelName - <> " but it returns " - <> toTxt (_lmiName (_nqiReturns nativeQueryInfo)) - <> "." + ( throw500 + $ "Expected array relationship to return " + <> toTxt logicalModelName + <> " but it returns " + <> toTxt (_lmiName (_nqiReturns nativeQueryInfo)) + <> "." ) let objectRelDesc = Just $ G.Description "An array relationship" nativeQueryParser <- MaybeT $ selectNativeQuery nativeQueryInfo relFieldName objectRelDesc - pure $ - nativeQueryParser <&> \selectExp -> - IR.AFArrayRelation $ - IR.ASSimple $ - IR.AnnRelationSelectG (riName ri) (riMapping ri) selectExp + pure + $ nativeQueryParser + <&> \selectExp -> + IR.AFArrayRelation + $ IR.ASSimple + $ IR.AnnRelationSelectG (riName ri) (riMapping ri) selectExp RelTargetTable _otherTableName -> do throw500 "Array relationships from logical models to tables are not implemented" logicalModelArrayRelationshipField _ _ = diff --git a/server/src-lib/Hasura/GraphQL/Schema/SubscriptionStream.hs b/server/src-lib/Hasura/GraphQL/Schema/SubscriptionStream.hs index d22c989c0d4..d7547299e87 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/SubscriptionStream.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/SubscriptionStream.hs @@ -71,17 +71,17 @@ cursorOrderingArgParser = do tCase = _rscNamingConvention customization enumName = runMkTypename (_rscTypeNames customization) $ applyTypeNameCaseCust tCase Name._cursor_ordering let description = - Just $ - G.Description $ - "ordering argument of a cursor" - pure $ - P.enum enumName description $ - NE.fromList -- It's fine to use fromList here because we know the list is never empty. - [ ( define enumNameVal, - snd enumNameVal - ) - | enumNameVal <- [(Name._ASC, COAscending), (Name._DESC, CODescending)] - ] + Just + $ G.Description + $ "ordering argument of a cursor" + pure + $ P.enum enumName description + $ NE.fromList -- It's fine to use fromList here because we know the list is never empty. + [ ( define enumNameVal, + snd enumNameVal + ) + | enumNameVal <- [(Name._ASC, COAscending), (Name._DESC, CODescending)] + ] where define (name, val) = let orderingTypeDesc = bool "descending" "ascending" $ val == COAscending @@ -236,8 +236,8 @@ tableStreamArguments tableInfo = do [c] -> pure c _ -> parseError "multiple column cursors are not supported yet" batchSizeArg <- cursorBatchSizeArg tCase - pure $ - IR.SelectStreamArgsG whereArg batchSizeArg cursorArg + pure + $ IR.SelectStreamArgsG whereArg batchSizeArg cursorArg -- | Field parser for a streaming subscription for a table. selectStreamTable :: @@ -263,17 +263,18 @@ selectStreamTable tableInfo fieldName description = runMaybeT $ do stringifyNumbers <- retrieve Options.soStringifyNumbers tableStreamArgsParser <- lift $ tableStreamArguments tableInfo selectionSetParser <- MaybeT $ tableSelectionList tableInfo - lift $ - memoizeOn 'selectStreamTable (sourceName, tableName, fieldName) $ do - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - P.subselection fieldName description tableStreamArgsParser selectionSetParser - <&> \(args, fields) -> - IR.AnnSelectStreamG - { IR._assnXStreamingSubscription = xStreamSubscription, - IR._assnFields = fields, - IR._assnFrom = IR.FromTable tableName, - IR._assnPerm = tablePermissionsInfo selectPermissions, - IR._assnArgs = args, - IR._assnStrfyNum = stringifyNumbers - } + lift + $ memoizeOn 'selectStreamTable (sourceName, tableName, fieldName) + $ do + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ P.subselection fieldName description tableStreamArgsParser selectionSetParser + <&> \(args, fields) -> + IR.AnnSelectStreamG + { IR._assnXStreamingSubscription = xStreamSubscription, + IR._assnFields = fields, + IR._assnFrom = IR.FromTable tableName, + IR._assnPerm = tablePermissionsInfo selectPermissions, + IR._assnArgs = args, + IR._assnStrfyNum = stringifyNumbers + } diff --git a/server/src-lib/Hasura/GraphQL/Schema/Table.hs b/server/src-lib/Hasura/GraphQL/Schema/Table.hs index f1782c2b0d3..40901f1c0b0 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Table.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Table.hs @@ -100,9 +100,10 @@ tableSelectColumnsEnum tableInfo = do columns <- tableSelectColumns tableInfo let enumName = mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableSelectColumnTypeName tableGQLName description = - Just $ - G.Description $ - "select columns of table " <>> tableInfoName tableInfo + Just + $ G.Description + $ "select columns of table " + <>> tableInfoName tableInfo -- We noticed many 'Definition's allocated, from 'define' below, so memoize -- to gain more sharing and lower memory residency. case nonEmpty $ map (define . structuredColumnInfoName &&& structuredColumnInfoColumn) columns of @@ -112,8 +113,8 @@ tableSelectColumnsEnum tableInfo = do <$> P.memoizeOn 'tableSelectColumnsEnum (enumName, description, columns) - ( pure $ - P.enum enumName description columnDefinitions + ( pure + $ P.enum enumName description columnDefinitions ) where define name = @@ -143,17 +144,20 @@ tableSelectColumnsPredEnum columnPredicate predName tableInfo = do columns <- filter (columnPredicate . ciType) . mapMaybe (^? _SCIScalarColumn) <$> tableSelectColumns tableInfo let enumName = mkTypename $ applyTypeNameCaseIdentifier tCase $ mkSelectColumnPredTypeName tableGQLName predName description = - Just $ - G.Description $ - "select \"" <> G.unName predName' <> "\" columns of table " <>> tableInfoName tableInfo - pure $ - P.enum enumName description - <$> nonEmpty - [ ( define $ ciName column, - ciColumn column - ) - | column <- columns - ] + Just + $ G.Description + $ "select \"" + <> G.unName predName' + <> "\" columns of table " + <>> tableInfoName tableInfo + pure + $ P.enum enumName description + <$> nonEmpty + [ ( define $ ciName column, + ciColumn column + ) + | column <- columns + ] where define name = P.Definition name (Just $ G.Description "column name") Nothing [] P.EnumValueInfo @@ -202,12 +206,12 @@ updateColumnsPlaceholderParser tableInfo = do Nothing -> do tableGQLName <- getTableIdentifierName tableInfo let enumName = mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableUpdateColumnTypeName tableGQLName - pure $ - P.enum enumName (Just $ G.Description $ "placeholder for update columns of table " <> tableInfoName tableInfo <<> " (current role has no relevant permissions)") $ - pure - ( P.Definition @_ @P.EnumValueInfo Name.__PLACEHOLDER (Just $ G.Description "placeholder (do not use)") Nothing [] P.EnumValueInfo, - Nothing - ) + pure + $ P.enum enumName (Just $ G.Description $ "placeholder for update columns of table " <> tableInfoName tableInfo <<> " (current role has no relevant permissions)") + $ pure + ( P.Definition @_ @P.EnumValueInfo Name.__PLACEHOLDER (Just $ G.Description "placeholder (do not use)") Nothing [] P.EnumValueInfo, + Nothing + ) tableSelectPermissions :: RoleName -> TableInfo b -> Maybe (SelPermInfo b) tableSelectPermissions role tableInfo = _permSel $ getRolePermInfo role tableInfo diff --git a/server/src-lib/Hasura/GraphQL/Schema/Update.hs b/server/src-lib/Hasura/GraphQL/Schema/Update.hs index 74fb165dc15..5a89165139e 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Update.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Update.hs @@ -75,12 +75,12 @@ buildUpdateOperators :: buildUpdateOperators presetCols ops tableInfo = do parsers :: P.InputFieldsParser n [HashMap (Column b) op] <- sequenceA . catMaybes <$> traverse (runUpdateOperator tableInfo) ops - pure $ - parsers - `P.bindFields` ( \opExps -> do - let withPreset = presetCols : opExps - mergeDisjoint @b withPreset - ) + pure + $ parsers + `P.bindFields` ( \opExps -> do + let withPreset = presetCols : opExps + mergeDisjoint @b withPreset + ) -- | The columns that have 'preset' definitions applied to them. (see -- toErrorValue duplicates <> "." @@ -170,15 +170,16 @@ updateOperator tableGQLName opName opFieldName mkParser columns opDesc objDesc = let fieldName = ciName columnInfo fieldDesc = ciDescription columnInfo fieldParser <- mkParser columnInfo - pure $ - P.fieldOptional fieldName fieldDesc fieldParser - `mapField` \value -> (ciColumn columnInfo, value) + pure + $ P.fieldOptional fieldName fieldDesc fieldParser + `mapField` \value -> (ciColumn columnInfo, value) let objName = mkTypename $ applyTypeNameCaseIdentifier tCase $ mkTableOperatorInputTypeName tableGQLName opName - pure $ - fmap (HashMap.fromList . (fold :: Maybe [(Column b, a)] -> [(Column b, a)])) $ - P.fieldOptional (applyFieldNameCaseIdentifier tCase opFieldName) (Just opDesc) $ - P.object objName (Just objDesc) $ - (catMaybes . toList) <$> sequenceA fieldParsers + pure + $ fmap (HashMap.fromList . (fold :: Maybe [(Column b, a)] -> [(Column b, a)])) + $ P.fieldOptional (applyFieldNameCaseIdentifier tCase opFieldName) (Just opDesc) + $ P.object objName (Just objDesc) + $ (catMaybes . toList) + <$> sequenceA fieldParsers {-# ANN updateOperator ("HLint: ignore Use tuple-section" :: String) #-} setOp :: diff --git a/server/src-lib/Hasura/GraphQL/Schema/Update/Batch.hs b/server/src-lib/Hasura/GraphQL/Schema/Update/Batch.hs index 3e5ea44d1fa..8fe607ac186 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Update/Batch.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Update/Batch.hs @@ -66,10 +66,10 @@ buildAnnotatedUpdateGField scenario tableInfo fieldName description parseOutput outputParser <- parseOutput updateVariantParser <- mkUpdateVariantParser updatePerms - pure $ - P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) $ - mkAnnotatedUpdateG tableName columns updatePerms (Just tCase) - <$> P.subselection fieldName description updateVariantParser outputParser + pure + $ P.setFieldParserOrigin (MOSourceObjId sourceName (AB.mkAnyBackend $ SMOTable @b tableName)) + $ mkAnnotatedUpdateG tableName columns updatePerms (Just tCase) + <$> P.subselection fieldName description updateVariantParser outputParser -- | Construct a root field, normally called update_tablename, that can be used -- to update rows in a DB table specified by filters. Only returns a parser if diff --git a/server/src-lib/Hasura/GraphQL/Transport/Backend.hs b/server/src-lib/Hasura/GraphQL/Transport/Backend.hs index 7c9c37a4672..90452119535 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/Backend.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/Backend.hs @@ -25,7 +25,7 @@ import Hasura.Tracing -- | This typeclass enacapsulates how a given backend sends queries and mutations over the -- network. Each backend is currently responsible for both logging and tracing, for now. -class BackendExecute b => BackendTransport (b :: BackendType) where +class (BackendExecute b) => BackendTransport (b :: BackendType) where runDBQuery :: forall m. ( MonadIO m, diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs index ff71828a49a..6f788e1ed03 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs @@ -115,7 +115,7 @@ data CacheResult -- store the response in the cache after a fresh execution. ResponseUncached (Maybe ResponseCacher) -class Monad m => MonadExecuteQuery m where +class (Monad m) => MonadExecuteQuery m where -- | This method does two things: it looks up a query result in the -- server-side cache, if a cache is used, and it additionally returns HTTP -- headers that can instruct a client how long a response can be cached @@ -150,9 +150,9 @@ class Monad m => MonadExecuteQuery m where m (Either QErr (HTTP.ResponseHeaders, CacheResult)) cacheLookup a b c d e f = lift $ cacheLookup a b c d e f -instance MonadExecuteQuery m => MonadExecuteQuery (ReaderT r m) +instance (MonadExecuteQuery m) => MonadExecuteQuery (ReaderT r m) -instance MonadExecuteQuery m => MonadExecuteQuery (ExceptT e m) +instance (MonadExecuteQuery m) => MonadExecuteQuery (ExceptT e m) -- | A partial response, e.g. from a remote schema call or postgres -- postgres query, which we'll assemble into the final response for @@ -201,8 +201,8 @@ buildResponse telemType res f = case res of Right a -> pure $ f a Left (Right err) -> throwError err Left (Left err) -> - pure $ - AnnotatedResponse + pure + $ AnnotatedResponse { arQueryType = telemType, arTimeIO = 0, arLocality = Telem.Remote, @@ -387,8 +387,8 @@ runGQ env sqlGenCtx sc scVer enableAL readOnlyMode prometheusMetrics logger agen -- If we get a cache hit, annotate the response with metadata and return it. ResponseCached cachedResponseData -> do logQueryLog logger $ QueryLog reqUnparsed Nothing reqId QueryLogKindCached - pure $ - AnnotatedResponse + pure + $ AnnotatedResponse { arQueryType = Telem.Query, arTimeIO = 0, arLocality = Telem.Local, @@ -438,9 +438,9 @@ runGQ env sqlGenCtx sc scVer enableAL readOnlyMode prometheusMetrics logger agen Just (sourceConfig, resolvedConnectionTemplate, pgMutations) -> do res <- -- TODO: can this be a `catch` rather than a `runExceptT`? - runExceptT $ - doQErr $ - runPGMutationTransaction reqId reqUnparsed userInfo logger sourceConfig resolvedConnectionTemplate pgMutations + runExceptT + $ doQErr + $ runPGMutationTransaction reqId reqUnparsed userInfo logger sourceConfig resolvedConnectionTemplate pgMutations -- we do not construct response parts since we have only one part buildResponse Telem.Mutation res \(telemTimeIO_DT, parts) -> let responseData = Right $ encJToLBS $ encodeEncJSONResults parts @@ -533,8 +533,8 @@ runGQ env sqlGenCtx sc scVer enableAL readOnlyMode prometheusMetrics logger agen doQErr $ E.execRemoteGQ env userInfo reqHeaders (rsDef rsi) gqlReq value <- extractFieldFromResponse fieldName resultCustomizer resp finalResponse <- - doQErr $ - RJ.processRemoteJoins + doQErr + $ RJ.processRemoteJoins reqId logger agentLicenseKey @@ -626,10 +626,12 @@ coalescePostgresMutations plan = do mutations <- for plan \case E.ExecStepDB _ exists remoteJoins -> do dbStepInfo <- AB.unpackAnyBackend @('Postgres 'Vanilla) exists - guard $ - oneSourceName == EB.dbsiSourceName dbStepInfo - && isNothing remoteJoins - && oneResolvedConnectionTemplate == EB.dbsiResolvedConnectionTemplate dbStepInfo + guard + $ oneSourceName + == EB.dbsiSourceName dbStepInfo + && isNothing remoteJoins + && oneResolvedConnectionTemplate + == EB.dbsiResolvedConnectionTemplate dbStepInfo Just dbStepInfo _ -> Nothing Just (oneSourceConfig, oneResolvedConnectionTemplate, mutations) @@ -651,7 +653,7 @@ decodeGraphQLResponse bs = do extractFieldFromResponse :: forall m. - Monad m => + (Monad m) => RootFieldAlias -> ResultCustomizer -> LBS.ByteString -> @@ -667,15 +669,16 @@ extractFieldFromResponse fieldName resultCustomizer resp = do GraphQLResponseData d -> pure d dataObj <- onLeft (JO.asObject dataVal) do400 fieldVal <- - onNothing (JO.lookup fieldName' dataObj) $ - do400 $ - "expecting key " <> fieldName' + onNothing (JO.lookup fieldName' dataObj) + $ do400 + $ "expecting key " + <> fieldName' return fieldVal where do400 = withExceptT Right . throw400 RemoteSchemaError doGQExecError = withExceptT Left . throwError . GQExecError -buildRaw :: Applicative m => JO.Value -> m AnnotatedResponsePart +buildRaw :: (Applicative m) => JO.Value -> m AnnotatedResponsePart buildRaw json = do let obj = encJFromOrderedValue json telemTimeIO_DT = 0 diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs index 828d55589ae..2f546201294 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP/Protocol.hs @@ -86,11 +86,11 @@ data GQLBatchedReqs a | GQLBatchedReqs [a] deriving (Show, Eq, Generic, Functor) -instance J.ToJSON a => J.ToJSON (GQLBatchedReqs a) where +instance (J.ToJSON a) => J.ToJSON (GQLBatchedReqs a) where toJSON (GQLSingleRequest q) = J.toJSON q toJSON (GQLBatchedReqs qs) = J.toJSON qs -instance J.FromJSON a => J.FromJSON (GQLBatchedReqs a) where +instance (J.FromJSON a) => J.FromJSON (GQLBatchedReqs a) where parseJSON arr@J.Array {} = GQLBatchedReqs <$> J.parseJSON arr parseJSON other = GQLSingleRequest <$> J.parseJSON other @@ -153,7 +153,7 @@ renderGQLReqOutgoing = fmap (GQLQueryText . G.renderExecutableDoc . toExecDoc . -- https://graphql.org/learn/serving-over-http/ {-# INLINEABLE getSingleOperation #-} getSingleOperation :: - MonadError QErr m => + (MonadError QErr m) => GQLReqParsed -> m SingleOperation getSingleOperation (GQLReq opNameM q _varValsM) = do @@ -163,21 +163,22 @@ getSingleOperation (GQLReq opNameM q _varValsM) = do (Just opName, [], _) -> do let n = _unOperationName opName opDefM = find (\opDef -> G._todName opDef == Just n) opDefs - onNothing opDefM $ - throw400 ValidationFailed $ - "no such operation found in the document: " <> dquote n + onNothing opDefM + $ throw400 ValidationFailed + $ "no such operation found in the document: " + <> dquote n (Just _, _, _) -> - throw400 ValidationFailed $ - "operationName cannot be used when " - <> "an anonymous operation exists in the document" + throw400 ValidationFailed + $ "operationName cannot be used when " + <> "an anonymous operation exists in the document" (Nothing, [selSet], []) -> return $ G.TypedOperationDefinition G.OperationTypeQuery Nothing [] [] selSet (Nothing, [], [opDef]) -> return opDef (Nothing, _, _) -> - throw400 ValidationFailed $ - "exactly one operation has to be present " - <> "in the document when operationName is not specified" + throw400 ValidationFailed + $ "exactly one operation has to be present " + <> "in the document when operationName is not specified" inlinedSelSet <- EI.inlineSelectionSet fragments _todSelectionSet pure $ G.TypedOperationDefinition {_todSelectionSet = inlinedSelSet, ..} diff --git a/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs b/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs index 95ae6eb10de..8e01647b939 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WSServerApp.hs @@ -94,8 +94,8 @@ createWSServerApp enabledLogTypes serverEnv connInitTimeout licenseKeyCache = \ flip runReaderT serverEnv $ onConn rid rh ip (wsActions sp) onMessageHandler conn bs sp = - mask_ $ - onMessage enabledLogTypes getAuthMode serverEnv conn bs (wsActions sp) licenseKeyCache + mask_ + $ onMessage enabledLogTypes getAuthMode serverEnv conn bs (wsActions sp) licenseKeyCache onCloseHandler conn = mask_ do granularPrometheusMetricsState <- runGetPrometheusMetricsGranularity @@ -123,8 +123,8 @@ createWSServerEnv appStateRef = do wsServer <- liftIO $ STM.atomically $ WS.createWSServer acAuthMode acEnableAllowlist allowlist corsPolicy acSQLGenCtx acExperimentalFeatures acDefaultNamingConvention logger - pure $ - WSServerEnv + pure + $ WSServerEnv (_lsLogger appEnvLoggers) appEnvSubscriptionState appStateRef @@ -162,15 +162,15 @@ mkWSActions logger subProtocol = GraphQLWS -> sendCloseWithMsg logger wsConn (WS.mkWSServerErrorCode mErrMsg err) (Just $ SMConnErr err) Nothing mkConnectionCloseAction wsConn opId errMsg = - when (subProtocol == GraphQLWS) $ - sendCloseWithMsg logger wsConn (GenericError4400 errMsg) (Just . SMErr $ ErrorMsg opId $ toJSON (pack errMsg)) (Just 1000) + when (subProtocol == GraphQLWS) + $ sendCloseWithMsg logger wsConn (GenericError4400 errMsg) (Just . SMErr $ ErrorMsg opId $ toJSON (pack errMsg)) (Just 1000) getServerMsgType = case subProtocol of Apollo -> SMData GraphQLWS -> SMNext - keepAliveAction wsConn = sendMsg wsConn $ - case subProtocol of + keepAliveAction wsConn = sendMsg wsConn + $ case subProtocol of Apollo -> SMConnKeepAlive GraphQLWS -> SMPing . Just $ keepAliveMessage diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs index 30df2c13da3..88ffb7c9dd3 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket.hs @@ -224,7 +224,7 @@ logWSEvent (L.Logger logger) wsConn wsEv = do ODCompleted -> False ODStopped -> False -sendMsg :: MonadIO m => WSConn -> ServerMsg -> m () +sendMsg :: (MonadIO m) => WSConn -> ServerMsg -> m () sendMsg wsConn msg = liftIO do timer <- startTimer WS.sendMsg wsConn $ WS.WSQueueResponse (encodeServerMsg msg) Nothing timer @@ -280,8 +280,8 @@ sendMsgWithMetadata wsConn msg opName paramQueryHash (ES.SubscriptionMetadata ex (SMData (DataMsg opId _)) -> (Just SMT_GQL_DATA, Just opId) _ -> (Nothing, Nothing) wsInfo = - Just $! - WS.WSEventInfo + Just + $! WS.WSEventInfo { WS._wseiEventType = msgType, WS._wseiOperationId = operationId, WS._wseiOperationName = opName, @@ -308,14 +308,16 @@ onConn wsId requestHead ipAddress onConnHActions = do -- in the Apollo spec. For 'graphql-ws', we're using the Ping -- messages that are part of the spec. keepAliveAction keepAliveDelay wsConn = - liftIO $ - forever $ do + liftIO + $ forever + $ do kaAction wsConn sleep $ seconds (unrefine $ unKeepAliveDelay keepAliveDelay) tokenExpiryHandler wsConn = do - expTime <- liftIO $ - STM.atomically $ do + expTime <- liftIO + $ STM.atomically + $ do connState <- STM.readTVar $ (_wscUser . WS.getData) wsConn case connState of CSNotInitialised _ _ -> STM.retry @@ -329,31 +331,31 @@ onConn wsId requestHead ipAddress onConnHActions = do keepAliveDelay <- asks _wseKeepAliveDelay logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing Nothing) EAccepted connData <- - liftIO $ - WSConnData - <$> STM.newTVarIO (CSNotInitialised hdrs ipAddress) - <*> STMMap.newIO - <*> pure errType - <*> pure queryType + liftIO + $ WSConnData + <$> STM.newTVarIO (CSNotInitialised hdrs ipAddress) + <*> STMMap.newIO + <*> pure errType + <*> pure queryType - pure $ - Right $ - WS.AcceptWith - connData - acceptRequest - (keepAliveAction keepAliveDelay) - tokenExpiryHandler + pure + $ Right + $ WS.AcceptWith + connData + acceptRequest + (keepAliveAction keepAliveDelay) + tokenExpiryHandler reject qErr = do (L.Logger logger) <- asks _wseLogger logger $ mkWsErrorLog Nothing (WsConnInfo wsId Nothing Nothing) (ERejected qErr) - return $ - Left $ - WS.RejectRequest - (HTTP.statusCode $ qeStatus qErr) - (HTTP.statusMessage $ qeStatus qErr) - [] - (LBS.toStrict $ J.encode $ encodeGQLErr False qErr) + return + $ Left + $ WS.RejectRequest + (HTTP.statusCode $ qeStatus qErr) + (HTTP.statusMessage $ qeStatus qErr) + [] + (LBS.toStrict $ J.encode $ encodeGQLErr False qErr) checkPath = case WS.requestPath requestHead of "/v1alpha1/graphql" -> return (ERTLegacy, E.QueryHasura) @@ -438,10 +440,11 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables -- NOTE: it should be safe to rely on this check later on in this function, since we expect that -- we process all operations on a websocket connection serially: - when (isJust op) $ - withComplete $ - sendStartErr $ - "an operation already exists with this id: " <> unOperationId opId + when (isJust op) + $ withComplete + $ sendStartErr + $ "an operation already exists with this id: " + <> unOperationId opId userInfoM <- liftIO $ STM.readTVarIO userInfoR (userInfo, origReqHdrs, ipAddress) <- case userInfoM of @@ -476,8 +479,8 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables -- https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/instrumentation/graphql/ Tracing.attachMetadata [("graphql.operation.name", unName nm)] execPlanE <- - runExceptT $ - E.getResolvedExecPlan + runExceptT + $ E.getResolvedExecPlan env logger prometheusMetrics @@ -514,55 +517,56 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables liftIO $ recordGQLQuerySuccess reportedExecutionTime gqlOpType sendSuccResp cachedResponseData opName parameterizedQueryHash $ ES.SubscriptionMetadata reportedExecutionTime ResponseUncached storeResponseM -> do - conclusion <- runExceptT $ - runLimits $ - forWithKey queryPlan $ \fieldName -> - let getResponse = \case - E.ExecStepDB _headers exists remoteJoins -> doQErr $ do - (telemTimeIO_DT, resp) <- - AB.dispatchAnyBackend @BackendTransport - exists - \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> - runDBQuery @b - requestId - q - fieldName - userInfo - logger - agentLicenseKey - sourceConfig - (fmap (statsToAnyBackend @b) tx) - genSql - resolvedConnectionTemplate + conclusion <- runExceptT + $ runLimits + $ forWithKey queryPlan + $ \fieldName -> + let getResponse = \case + E.ExecStepDB _headers exists remoteJoins -> doQErr $ do + (telemTimeIO_DT, resp) <- + AB.dispatchAnyBackend @BackendTransport + exists + \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> + runDBQuery @b + requestId + q + fieldName + userInfo + logger + agentLicenseKey + sourceConfig + (fmap (statsToAnyBackend @b) tx) + genSql + resolvedConnectionTemplate + finalResponse <- + RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q + pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] + E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema + runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins + E.ExecStepAction actionExecPlan _ remoteJoins -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction + (time, (resp, _)) <- doQErr $ do + (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan finalResponse <- RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q - pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] - E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema - runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins - E.ExecStepAction actionExecPlan _ remoteJoins -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction - (time, (resp, _)) <- doQErr $ do - (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan - finalResponse <- - RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q - pure (time, (finalResponse, hdrs)) - pure $ AnnotatedResponsePart time Telem.Empty resp [] - E.ExecStepRaw json -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection - buildRaw json - E.ExecStepMulti lst -> do - allResponses <- traverse getResponse lst - pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) [] - in getResponse + pure (time, (finalResponse, hdrs)) + pure $ AnnotatedResponsePart time Telem.Empty resp [] + E.ExecStepRaw json -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection + buildRaw json + E.ExecStepMulti lst -> do + allResponses <- traverse getResponse lst + pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) [] + in getResponse sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType case (storeResponseM, conclusion) of (Just ResponseCacher {..}, Right results) -> -- Note: The result of `runStoreResponse` is ignored here since we can't ensure that -- the WS client will respond correctly to multiple messages. - void $ - runStoreResponse $ - encodeAnnotatedResponseParts results + void + $ runStoreResponse + $ encodeAnnotatedResponseParts results _ -> pure () liftIO $ sendCompleted (Just requestId) (Just parameterizedQueryHash) @@ -574,10 +578,10 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables -- we are in the aforementioned case; we circumvent the normal process Just (sourceConfig, resolvedConnectionTemplate, pgMutations) -> do resp <- - runExceptT $ - runLimits $ - doQErr $ - runPGMutationTransaction requestId q userInfo logger sourceConfig resolvedConnectionTemplate pgMutations + runExceptT + $ runLimits + $ doQErr + $ runPGMutationTransaction requestId q userInfo logger sourceConfig resolvedConnectionTemplate pgMutations -- we do not construct result fragments since we have only one result handleResult requestId gqlOpType resp \(telemTimeIO_DT, results) -> do let telemQueryType = Telem.Query @@ -585,56 +589,57 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables telemTimeIO = convertDuration telemTimeIO_DT totalTime <- timerTot let telemTimeTot = Seconds totalTime - sendSuccResp (encodeEncJSONResults results) opName parameterizedQueryHash $ - ES.SubscriptionMetadata telemTimeIO_DT + sendSuccResp (encodeEncJSONResults results) opName parameterizedQueryHash + $ ES.SubscriptionMetadata telemTimeIO_DT -- Telemetry. NOTE: don't time network IO: Telem.recordTimingMetric Telem.RequestDimensions {..} Telem.RequestTimings {..} liftIO $ recordGQLQuerySuccess totalTime gqlOpType -- we are not in the transaction case; proceeding normally Nothing -> do - conclusion <- runExceptT $ - runLimits $ - forWithKey mutationPlan $ \fieldName -> - let getResponse = \case - -- Ignoring response headers since we can't send them over WebSocket - E.ExecStepDB _responseHeaders exists remoteJoins -> doQErr $ do - (telemTimeIO_DT, resp) <- - AB.dispatchAnyBackend @BackendTransport - exists - \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> - runDBMutation @b - requestId - q - fieldName - userInfo - logger - agentLicenseKey - sourceConfig - (fmap EB.arResult tx) - genSql - resolvedConnectionTemplate + conclusion <- runExceptT + $ runLimits + $ forWithKey mutationPlan + $ \fieldName -> + let getResponse = \case + -- Ignoring response headers since we can't send them over WebSocket + E.ExecStepDB _responseHeaders exists remoteJoins -> doQErr $ do + (telemTimeIO_DT, resp) <- + AB.dispatchAnyBackend @BackendTransport + exists + \(EB.DBStepInfo _ sourceConfig genSql tx resolvedConnectionTemplate :: EB.DBStepInfo b) -> + runDBMutation @b + requestId + q + fieldName + userInfo + logger + agentLicenseKey + sourceConfig + (fmap EB.arResult tx) + genSql + resolvedConnectionTemplate + finalResponse <- + RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q + pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] + E.ExecStepAction actionExecPlan _ remoteJoins -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction + (time, (resp, hdrs)) <- doQErr $ do + (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan finalResponse <- RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q - pure $ AnnotatedResponsePart telemTimeIO_DT Telem.Local finalResponse [] - E.ExecStepAction actionExecPlan _ remoteJoins -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction - (time, (resp, hdrs)) <- doQErr $ do - (time, (resp, hdrs)) <- EA.runActionExecution userInfo actionExecPlan - finalResponse <- - RJ.processRemoteJoins requestId logger agentLicenseKey env reqHdrs userInfo resp remoteJoins q - pure (time, (finalResponse, hdrs)) - pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs - E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema - runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins - E.ExecStepRaw json -> do - logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection - buildRaw json - E.ExecStepMulti lst -> do - allResponses <- traverse getResponse lst - pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) [] - in getResponse + pure (time, (finalResponse, hdrs)) + pure $ AnnotatedResponsePart time Telem.Empty resp $ fromMaybe [] hdrs + E.ExecStepRemote rsi resultCustomizer gqlReq remoteJoins -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindRemoteSchema + runRemoteGQ requestId q fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins + E.ExecStepRaw json -> do + logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindIntrospection + buildRaw json + E.ExecStepMulti lst -> do + allResponses <- traverse getResponse lst + pure $ AnnotatedResponsePart 0 Telem.Local (encJFromList (map arpResponse allResponses)) [] + in getResponse sendResultFromFragments Telem.Query timerTot requestId conclusion opName parameterizedQueryHash gqlOpType liftIO $ sendCompleted (Just requestId) (Just parameterizedQueryHash) E.SubscriptionExecutionPlan subExec -> do @@ -649,27 +654,28 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables Nothing -> sendCompleted (Just requestId) (Just parameterizedQueryHash) Just actionIds -> do let sendResponseIO actionLogMap = do - (dTime, resultsE) <- withElapsedTime $ - runExceptT $ - for actions $ \(actionId, resultBuilder) -> do - actionLogResponse <- - HashMap.lookup actionId actionLogMap - `onNothing` throw500 "unexpected: cannot lookup action_id in response map" - liftEither $ resultBuilder actionLogResponse + (dTime, resultsE) <- withElapsedTime + $ runExceptT + $ for actions + $ \(actionId, resultBuilder) -> do + actionLogResponse <- + HashMap.lookup actionId actionLogMap + `onNothing` throw500 "unexpected: cannot lookup action_id in response map" + liftEither $ resultBuilder actionLogResponse case resultsE of Left err -> sendError requestId err Right results -> do let dataMsg = - sendDataMsg $ - DataMsg opId $ - pure $ - encJToLBS $ - encodeEncJSONResults results + sendDataMsg + $ DataMsg opId + $ pure + $ encJToLBS + $ encodeEncJSONResults results sendMsgWithMetadata wsConn dataMsg opName (Just parameterizedQueryHash) $ ES.SubscriptionMetadata dTime asyncActionQueryLive = - ES.LAAQNoRelationships $ - ES.LiveAsyncActionQueryWithNoRelationships sendResponseIO (sendCompleted (Just requestId) (Just parameterizedQueryHash)) + ES.LAAQNoRelationships + $ ES.LiveAsyncActionQueryWithNoRelationships sendResponseIO (sendCompleted (Just requestId) (Just parameterizedQueryHash)) ES.addAsyncActionLiveQuery (ES._ssAsyncActions subscriptionsState) @@ -693,9 +699,9 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables logQueryLog logger $ QueryLog q Nothing requestId QueryLogKindAction liftIO $ do let asyncActionQueryLive = - ES.LAAQOnSourceDB $ - ES.LiveAsyncActionQueryOnSource lqId actionLogMap $ - restartLiveQuery parameterizedQueryHash requestId liveQueryBuilder granularPrometheusMetricsState (_grOperationName reqParsed) + ES.LAAQOnSourceDB + $ ES.LiveAsyncActionQueryOnSource lqId actionLogMap + $ restartLiveQuery parameterizedQueryHash requestId liveQueryBuilder granularPrometheusMetricsState (_grOperationName reqParsed) onUnexpectedException err = do sendError requestId err @@ -719,14 +725,14 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables fmtErrorMessage = WS._wsaErrorMsgFormat onMessageActions doQErr :: - Monad n => + (Monad n) => ExceptT QErr n a -> ExceptT (Either GQExecError QErr) n a doQErr = withExceptT Right withErr :: forall e f n a. - Monad n => + (Monad n) => (e -> f) -> (ExceptT e (ExceptT f n) a -> ExceptT e (ExceptT f n) a) -> ExceptT f n a -> @@ -757,10 +763,10 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables telemTimeIO = convertDuration $ sum $ fmap arpTimeIO results totalTime <- timerTot let telemTimeTot = Seconds totalTime - sendSuccResp (encodeAnnotatedResponseParts results) opName pqh $ - ES.SubscriptionMetadata $ - sum $ - fmap arpTimeIO results + sendSuccResp (encodeAnnotatedResponseParts results) opName pqh + $ ES.SubscriptionMetadata + $ sum + $ fmap arpTimeIO results -- Telemetry. NOTE: don't time network IO: Telem.recordTimingMetric Telem.RequestDimensions {..} Telem.RequestTimings {..} liftIO $ recordGQLQuerySuccess totalTime gqlOpType @@ -779,12 +785,12 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables runRemoteGQ requestId reqUnparsed fieldName userInfo reqHdrs rsi resultCustomizer gqlReq remoteJoins = do env <- liftIO $ acEnvironment <$> getAppContext appStateRef (telemTimeIO_DT, _respHdrs, resp) <- - doQErr $ - E.execRemoteGQ env userInfo reqHdrs (rsDef rsi) gqlReq + doQErr + $ E.execRemoteGQ env userInfo reqHdrs (rsDef rsi) gqlReq value <- hoist lift $ extractFieldFromResponse fieldName resultCustomizer resp finalResponse <- - doQErr $ - RJ.processRemoteJoins + doQErr + $ RJ.processRemoteJoins requestId logger agentLicenseKey @@ -816,7 +822,7 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables WSConnData userInfoR opMap errRespTy queryType = WS.getData wsConn - logOpEv :: MonadIO n => OpDetail -> Maybe RequestId -> Maybe ParameterizedQueryHash -> n () + logOpEv :: (MonadIO n) => OpDetail -> Maybe RequestId -> Maybe ParameterizedQueryHash -> n () logOpEv opTy reqId parameterizedQueryHash = -- See Note [Disable query printing when query-log is disabled] let censoredReq = @@ -824,20 +830,20 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables CaptureQueryVariables -> q DoNotCaptureQueryVariables -> q {_grVariables = Nothing} queryToLog = censoredReq <$ guard (Set.member L.ELTQueryLog enabledLogTypes) - in logWSEvent logger wsConn $ - EOperation $ - OperationDetails opId reqId (_grOperationName q) opTy queryToLog parameterizedQueryHash + in logWSEvent logger wsConn + $ EOperation + $ OperationDetails opId reqId (_grOperationName q) opTy queryToLog parameterizedQueryHash getErrFn ERTLegacy = encodeQErr getErrFn ERTGraphqlCompliant = encodeGQLErr sendStartErr e = do let errFn = getErrFn errRespTy - sendMsg wsConn $ - SMErr $ - ErrorMsg opId $ - errFn False $ - err400 StartFailed e + sendMsg wsConn + $ SMErr + $ ErrorMsg opId + $ errFn False + $ err400 StartFailed e liftIO $ logOpEv (ODProtoErr e) Nothing Nothing liftIO $ reportGQLQueryError Nothing liftIO $ closeConnAction wsConn opId (T.unpack e) @@ -927,7 +933,8 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables (onChange opName parameterizedQueryHash $ ES._sqpNamespace liveQueryPlan) liftIO $ $assertNFHere (lqId, opName) -- so we don't write thunks to mutable vars - STM.atomically $ + STM.atomically + $ -- NOTE: see crucial `lookup` check above, ensuring this doesn't clobber: STMMap.insert (LiveQuerySubscriber lqId, opName) opId opMap pure lqId @@ -956,7 +963,8 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables granularPrometheusMetricsState (onChange opName parameterizedQueryHash $ ES._sqpNamespace streamQueryPlan) liftIO $ $assertNFHere (streamSubscriberId, opName) -- so we don't write thunks to mutable vars - STM.atomically $ + STM.atomically + $ -- NOTE: see crucial `lookup` check above, ensuring this doesn't clobber: STMMap.insert (StreamingQuerySubscriber streamSubscriberId, opName) opId opMap pure () @@ -972,10 +980,12 @@ onStart enabledLogTypes agentLicenseKey serverEnv wsConn shouldCaptureVariables (Just queryHash) (ES.SubscriptionMetadata dTime) resp -> - sendMsg wsConn $ - sendDataMsg $ - DataMsg opId $ - LBS.fromStrict . ES._lqrPayload <$> resp + sendMsg wsConn + $ sendDataMsg + $ DataMsg opId + $ LBS.fromStrict + . ES._lqrPayload + <$> resp -- If the source has a namespace then we need to wrap the response -- from the DB in that namespace. @@ -1084,13 +1094,13 @@ onStop serverEnv wsConn (StopMsg opId) granularPrometheusMetricsState = liftIO $ -- OpMap as soon as it is executed -- 2. A misbehaving client -- 3. A bug on our end - stopOperation serverEnv wsConn opId granularPrometheusMetricsState $ - L.unLogger logger $ - L.UnstructuredLog L.LevelDebug $ - fromString $ - "Received STOP for an operation that we have no record for: " - <> show (unOperationId opId) - <> " (could be a query/mutation operation or a misbehaving client or a bug)" + stopOperation serverEnv wsConn opId granularPrometheusMetricsState + $ L.unLogger logger + $ L.UnstructuredLog L.LevelDebug + $ fromString + $ "Received STOP for an operation that we have no record for: " + <> show (unOperationId opId) + <> " (could be a query/mutation operation or a misbehaving client or a bug)" where logger = _wseLogger serverEnv @@ -1185,7 +1195,7 @@ onConnInit logger manager wsConn getAuthMode connParamsM onConnInitErrAction kee _ -> [] onClose :: - MonadIO m => + (MonadIO m) => L.Logger L.Hasura -> ServerMetrics -> PrometheusMetrics -> @@ -1196,8 +1206,9 @@ onClose :: onClose logger serverMetrics prometheusMetrics subscriptionsState wsConn granularPrometheusMetricsState = do logWSEvent logger wsConn EClosed operations <- liftIO $ STM.atomically $ ListT.toList $ STMMap.listT opMap - liftIO $ - for_ operations $ \(_, (subscriber, operationName)) -> + liftIO + $ for_ operations + $ \(_, (subscriber, operationName)) -> case subscriber of LiveQuerySubscriber lqId -> ES.removeLiveQuery logger serverMetrics prometheusMetrics subscriptionsState lqId granularPrometheusMetricsState operationName StreamingQuerySubscriber streamSubscriberId -> ES.removeStreamingQuery logger serverMetrics prometheusMetrics subscriptionsState streamSubscriberId granularPrometheusMetricsState operationName diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs index ff296845391..15b6dd0675f 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Protocol.hs @@ -241,8 +241,9 @@ encodeServerErrorMsg ecode = encJToLBS . encJFromJValue $ case ecode of encodeServerMsg :: ServerMsg -> BL.ByteString encodeServerMsg msg = - encJToLBS $ - encJFromAssocList $ case msg of + encJToLBS + $ encJFromAssocList + $ case msg of SMConnAck -> [encTy SMT_GQL_CONNECTION_ACK] SMConnKeepAlive -> @@ -299,8 +300,9 @@ getNewWSTimer :: Seconds -> IO WSConnInitTimer getNewWSTimer timeout = do timerState <- newTVarIO Running timer <- newEmptyTMVarIO - void $ - forkIO $ do + void + $ forkIO + $ do sleep (seconds timeout) atomically $ do runTimerState <- readTVar timerState diff --git a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs index 715e12713dc..04b5ba70efd 100644 --- a/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs +++ b/server/src-lib/Hasura/GraphQL/Transport/WebSocket/Server.hs @@ -219,8 +219,8 @@ forceConnReconnect wsConn bs = liftIO $ closeConnWithCode wsConn 1012 bs closeConnWithCode :: WSConn a -> Word16 -> BL.ByteString -> IO () closeConnWithCode wsConn code bs = do - ((\x -> L.unLogger x) . _wcLogger) wsConn $ - WSLog (_wcConnId wsConn) (ECloseSent $ SB.fromLBS bs) Nothing + ((\x -> L.unLogger x) . _wcLogger) wsConn + $ WSLog (_wcConnId wsConn) (ECloseSent $ SB.fromLBS bs) Nothing WS.sendCloseCode (_wcConnRaw wsConn) code bs sendMsgAndCloseConn :: WSConn a -> Word16 -> BL.ByteString -> ServerMsg -> IO () @@ -376,10 +376,10 @@ websocketConnectionReaper getLatestConfig getSchemaCache (WSServer (L.Logger wri (SecuritySensitiveUserConfig -> SecuritySensitiveUserConfig) -> IO () closeAllConnectionsWithReason logMsg reason updateConf = do - writeLog $ - WSReaperThreadLog $ - fromString $ - logMsg + writeLog + $ WSReaperThreadLog + $ fromString + $ logMsg conns <- STM.atomically $ do STM.modifyTVar' userConfRef updateConf flushConnMap serverStatus @@ -416,71 +416,71 @@ websocketConnectionReaper getLatestConfig getSchemaCache (WSServer (L.Logger wri hasHideStreamFieldsChanged = (EFHideStreamFields `elem` currExperimentalFeatures) && (EFHideStreamFields `elem` prevExperimentalFeatures) hasDefaultNamingCaseChanged = hasNamingConventionChanged (prevExperimentalFeatures, prevDefaultNamingCase) (currExperimentalFeatures, currDefaultNamingCase) if - -- if CORS policy has changed, close all connections - | hasCorsPolicyChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the cors policy changed" - "cors policy changed" - (\conf -> conf {ssucCorsPolicy = currCorsPolicy}) - -- if any auth config has changed, close all connections - | hasAuthModeChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the auth mode changed" - "auth mode changed" - (\conf -> conf {ssucAuthMode = currAuthMode}) - -- In case of allowlist, we need to check if the allowlist has changed. - -- If the allowlist is disabled, we keep all the connections as is. - -- If the allowlist is enabled from a disabled state, we need to close all the - -- connections. - | hasAllowlistEnabled -> - closeAllConnectionsWithReason - "closing all websocket connections as allow list is enabled" - "allow list enabled" - (\conf -> conf {ssucEnableAllowlist = currEnableAllowlist}) - -- If the allowlist is already enabled and there are any changes made to the - -- allowlist, we need to close all the connections. - | hasAllowlistUpdated -> - closeAllConnectionsWithReason - "closing all websocket connections as the allow list has been updated" - "allow list updated" - (\conf -> conf {ssucAllowlist = currAllowlist}) - -- if HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES has changed, close all connections - | hasStringifyNumChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES setting changed" - "HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES env var changed" - (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) - -- if HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE has changed, close all connections - | hasDangerousBooleanCollapseChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE setting changed" - "HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE env var changed" - (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) - -- if 'bigquery_string_numeric_input' option added/removed from experimental features, close all connections - | hasBigqueryStringNumericInputChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the 'bigquery_string_numeric_input' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" - "'bigquery_string_numeric_input' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" - (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) - -- if 'hide_aggregation_predicates' option added/removed from experimental features, close all connections - | hasHideAggregationPredicatesChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the 'hide-aggregation-predicates' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" - "'hide-aggregation-predicates' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" - (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures}) - -- if 'hide_stream_fields' option added/removed from experimental features, close all connections - | hasHideStreamFieldsChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the 'hide-stream-fields' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" - "'hide-stream-fields' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" - (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures}) - -- if naming convention has been changed, close all connections - | hasDefaultNamingCaseChanged -> - closeAllConnectionsWithReason - "closing all websocket connections as the 'naming_convention' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES and the HASURA_GRAPHQL_DEFAULT_NAMING_CONVENTION has changed" - "naming convention has been changed" - (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures, ssucDefaultNamingCase = currDefaultNamingCase}) - | otherwise -> pure () + -- if CORS policy has changed, close all connections + | hasCorsPolicyChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the cors policy changed" + "cors policy changed" + (\conf -> conf {ssucCorsPolicy = currCorsPolicy}) + -- if any auth config has changed, close all connections + | hasAuthModeChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the auth mode changed" + "auth mode changed" + (\conf -> conf {ssucAuthMode = currAuthMode}) + -- In case of allowlist, we need to check if the allowlist has changed. + -- If the allowlist is disabled, we keep all the connections as is. + -- If the allowlist is enabled from a disabled state, we need to close all the + -- connections. + | hasAllowlistEnabled -> + closeAllConnectionsWithReason + "closing all websocket connections as allow list is enabled" + "allow list enabled" + (\conf -> conf {ssucEnableAllowlist = currEnableAllowlist}) + -- If the allowlist is already enabled and there are any changes made to the + -- allowlist, we need to close all the connections. + | hasAllowlistUpdated -> + closeAllConnectionsWithReason + "closing all websocket connections as the allow list has been updated" + "allow list updated" + (\conf -> conf {ssucAllowlist = currAllowlist}) + -- if HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES has changed, close all connections + | hasStringifyNumChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES setting changed" + "HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES env var changed" + (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) + -- if HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE has changed, close all connections + | hasDangerousBooleanCollapseChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE setting changed" + "HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE env var changed" + (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) + -- if 'bigquery_string_numeric_input' option added/removed from experimental features, close all connections + | hasBigqueryStringNumericInputChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the 'bigquery_string_numeric_input' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" + "'bigquery_string_numeric_input' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" + (\conf -> conf {ssucSQLGenCtx = currSqlGenCtx}) + -- if 'hide_aggregation_predicates' option added/removed from experimental features, close all connections + | hasHideAggregationPredicatesChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the 'hide-aggregation-predicates' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" + "'hide-aggregation-predicates' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" + (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures}) + -- if 'hide_stream_fields' option added/removed from experimental features, close all connections + | hasHideStreamFieldsChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the 'hide-stream-fields' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES" + "'hide-stream-fields' removed/added in HASURA_GRAPHQL_EXPERIMENTAL_FEATURES env var" + (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures}) + -- if naming convention has been changed, close all connections + | hasDefaultNamingCaseChanged -> + closeAllConnectionsWithReason + "closing all websocket connections as the 'naming_convention' option has been added/removed from HASURA_GRAPHQL_EXPERIMENTAL_FEATURES and the HASURA_GRAPHQL_DEFAULT_NAMING_CONVENTION has changed" + "naming convention has been changed" + (\conf -> conf {ssucExperimentalFeatures = currExperimentalFeatures, ssucDefaultNamingCase = currDefaultNamingCase}) + | otherwise -> pure () createServerApp :: (MonadIO m, MC.MonadBaseControl IO m, LA.Forall (LA.Pure m), MonadWSLog m) => @@ -527,16 +527,18 @@ createServerApp getMetricsConfig wsConnInitTimeout (WSServer logger@(L.Logger wr -- Refer: https://hackage.haskell.org/package/warp-3.3.24/docs/src/Network.Wai.Handler.Warp.Settings.html#defaultShouldDisplayException Handler $ \(_ :: TM.TimeoutThread) -> pure (), Handler $ \(e :: Warp.InvalidRequest) -> do - writeLog $ - L.UnstructuredLog L.LevelError $ - fromString $ - "Client exception: " <> show e + writeLog + $ L.UnstructuredLog L.LevelError + $ fromString + $ "Client exception: " + <> show e throwIO e, Handler $ \(e :: SomeException) -> do - writeLog $ - L.UnstructuredLog L.LevelError $ - fromString $ - "Unexpected exception raised in websocket. Please report this as a bug: " <> show e + writeLog + $ L.UnstructuredLog L.LevelError + $ fromString + $ "Unexpected exception raised in websocket. Please report this as a bug: " + <> show e throwIO e ] @@ -562,8 +564,9 @@ createServerApp getMetricsConfig wsConnInitTimeout (WSServer logger@(L.Logger wr -- Adding `package` stanzas with -Xstrict -XStrictData for those two packages -- helped, cutting the number of thunks approximately in half. liftIO $ $assertNFHere wsConn -- so we don't write thunks to mutable vars - let whenAcceptingInsertConn = liftIO $ - STM.atomically $ do + let whenAcceptingInsertConn = liftIO + $ STM.atomically + $ do status <- STM.readTVar serverStatus case status of ShuttingDown -> pure () @@ -586,21 +589,22 @@ createServerApp getMetricsConfig wsConnInitTimeout (WSServer logger@(L.Logger wr shouldCaptureVariables <- liftIO $ _mcAnalyzeQueryVariables <$> getMetricsConfig -- Process all messages serially (important!), in a separate thread: msg <- - liftIO $ + liftIO + $ -- Re-throw "receiveloop: resource vanished (Connection reset by peer)" : -- https://github.com/yesodweb/wai/blob/master/warp/Network/Wai/Handler/Warp/Recv.hs#L112 -- as WS exception signaling cleanup below. It's not clear why exactly this gets -- raised occasionally; I suspect an equivalent handler is missing from WS itself. -- Regardless this should be safe: - handleJust (guard . E.isResourceVanishedError) (\() -> throw WS.ConnectionClosed) $ - WS.receiveData conn + handleJust (guard . E.isResourceVanishedError) (\() -> throw WS.ConnectionClosed) + $ WS.receiveData conn let messageLength = BL.length msg censoredMessage = MessageDetails (SB.fromLBS (if shouldCaptureVariables then msg else "")) messageLength - liftIO $ - Prometheus.Counter.add + liftIO + $ Prometheus.Counter.add (pmWebSocketBytesReceived prometheusMetrics) messageLength logWSLog logger $ WSLog wsId (EMessageReceived censoredMessage) Nothing @@ -629,9 +633,9 @@ createServerApp getMetricsConfig wsConnInitTimeout (WSServer logger@(L.Logger wr LA.withAsync (liftIO $ onJwtExpiry wsConn) $ \onJwtExpiryRef -> do -- once connection is accepted, check the status of the timer, and if it's expired, close the connection for `graphql-ws` timeoutStatus <- liftIO $ getWSTimerState wsConnInitTimer - when (timeoutStatus == Done && subProtocol == GraphQLWS) $ - liftIO $ - closeConnWithCode wsConn 4408 "Connection initialisation timed out" + when (timeoutStatus == Done && subProtocol == GraphQLWS) + $ liftIO + $ closeConnWithCode wsConn 4408 "Connection initialisation timed out" -- terminates on WS.ConnectionException and JWT expiry let waitOnRefs = [keepAliveRef, onJwtExpiryRef, rcvRef, sendRef] diff --git a/server/src-lib/Hasura/HTTP.hs b/server/src-lib/Hasura/HTTP.hs index 35817fec6f4..7e0c8c7eab1 100644 --- a/server/src-lib/Hasura/HTTP.hs +++ b/server/src-lib/Hasura/HTTP.hs @@ -164,8 +164,8 @@ serializeHTTPExceptionMessageForDebugging = serializeHTTPExceptionWithErrorMessa encodeHTTPRequestJSON :: HTTP.Request -> J.Value encodeHTTPRequestJSON request = - J.Object $ - KM.fromList + J.Object + $ KM.fromList [ ("host", J.toJSON $ TE.decodeUtf8 $ HTTP.host request), ("port", J.toJSON $ HTTP.port request), ("secure", J.toJSON $ HTTP.secure request), @@ -197,12 +197,12 @@ getHttpExceptionJson (ShowErrorInfo isShowHTTPErrorInfo) httpException = then serializeHTTPExceptionWithErrorMessage (ShowHeadersAndEnvVarInfo False) (unHttpException httpException) else serializeHTTPExceptionMessage httpException reqJSON = encodeHTTPRequestJSON req - J.object $ - [ "type" J..= ("http_exception" :: Text), - "message" J..= exceptionContent, - "request" J..= reqJSON - ] - <> maybe mempty (\status -> ["status" J..= status]) statusMaybe + J.object + $ [ "type" J..= ("http_exception" :: Text), + "message" J..= exceptionContent, + "request" J..= reqJSON + ] + <> maybe mempty (\status -> ["status" J..= status]) statusMaybe -- it will not show HTTP Exception error message info instance J.ToJSON HttpException where @@ -232,5 +232,5 @@ serializeServantClientErrorMessageForDebugging = \case Nothing -> "error in the connection: " <> serializeExceptionForDebugging exn other -> serializeServantClientErrorMessage other -serializeExceptionForDebugging :: Exception e => e -> Text +serializeExceptionForDebugging :: (Exception e) => e -> Text serializeExceptionForDebugging = T.pack . displayException diff --git a/server/src-lib/Hasura/Logging.hs b/server/src-lib/Hasura/Logging.hs index 18124683cb0..8f1bb2c7b87 100644 --- a/server/src-lib/Hasura/Logging.hs +++ b/server/src-lib/Hasura/Logging.hs @@ -224,21 +224,21 @@ data EngineLog impl = EngineLog _elDetail :: !J.Value } -deriving instance Show (EngineLogType impl) => Show (EngineLog impl) +deriving instance (Show (EngineLogType impl)) => Show (EngineLog impl) -deriving instance Eq (EngineLogType impl) => Eq (EngineLog impl) +deriving instance (Eq (EngineLogType impl)) => Eq (EngineLog impl) -- Empty splice to bring all the above definitions in scope. -- -- TODO: Restructure the code so that we can avoid this. $(pure []) -instance J.ToJSON (EngineLogType impl) => J.ToJSON (EngineLog impl) where +instance (J.ToJSON (EngineLogType impl)) => J.ToJSON (EngineLog impl) where toJSON = $(J.mkToJSON hasuraJSON ''EngineLog) -- | Typeclass representing any data type that can be converted to @EngineLog@ for the purpose of -- logging -class EnabledLogTypes impl => ToEngineLog a impl where +class (EnabledLogTypes impl) => ToEngineLog a impl where toEngineLog :: a -> (LogLevel, EngineLogType impl, J.Value) data UnstructuredLog = UnstructuredLog {_ulLevel :: !LogLevel, _ulPayload :: !SB.SerializableBlob} @@ -327,10 +327,10 @@ mkLogger :: (J.ToJSON (EngineLogType impl)) => LoggerCtx impl -> Logger impl mkLogger (LoggerCtx loggerSet serverLogLevel timeGetter enabledLogTypes) = Logger $ \l -> do localTime <- liftIO timeGetter let (logLevel, logTy, logDet) = toEngineLog l - when (logLevel >= serverLogLevel && isLogTypeEnabled enabledLogTypes logTy) $ - liftIO $ - FL.pushLogStrLn loggerSet $ - FL.toLogStr (J.encode $ EngineLog localTime logLevel logTy logDet) + when (logLevel >= serverLogLevel && isLogTypeEnabled enabledLogTypes logTy) + $ liftIO + $ FL.pushLogStrLn loggerSet + $ FL.toLogStr (J.encode $ EngineLog localTime logLevel logTy logDet) nullLogger :: Logger Hasura nullLogger = Logger \_ -> pure () diff --git a/server/src-lib/Hasura/LogicalModel/API.hs b/server/src-lib/Hasura/LogicalModel/API.hs index fc19dab3ed5..3a36d0da585 100644 --- a/server/src-lib/Hasura/LogicalModel/API.hs +++ b/server/src-lib/Hasura/LogicalModel/API.hs @@ -56,14 +56,14 @@ instance (Backend b) => HasCodec (TrackLogicalModel b) where ("A request to track a logical model") $ AC.object (backendPrefix @b <> "TrackLogicalModel") $ TrackLogicalModel - <$> AC.requiredField "source" sourceDoc - AC..= tlmSource + <$> AC.requiredField "source" sourceDoc + AC..= tlmSource <*> AC.requiredField "name" nameDoc - AC..= tlmName + AC..= tlmName <*> AC.optionalField "description" descriptionDoc - AC..= tlmDescription + AC..= tlmDescription <*> AC.requiredFieldWith "fields" logicalModelFieldMapCodec fieldsDoc - AC..= tlmFields + AC..= tlmFields where sourceDoc = "The source in which this logical model should be tracked" nameDoc = "Root field name for the logical model" @@ -98,16 +98,16 @@ data GetLogicalModel (b :: BackendType) = GetLogicalModel { glmSource :: SourceName } -deriving instance Backend b => Show (GetLogicalModel b) +deriving instance (Backend b) => Show (GetLogicalModel b) -deriving instance Backend b => Eq (GetLogicalModel b) +deriving instance (Backend b) => Eq (GetLogicalModel b) -instance Backend b => FromJSON (GetLogicalModel b) where +instance (Backend b) => FromJSON (GetLogicalModel b) where parseJSON = withObject "GetLogicalModel" $ \o -> do glmSource <- o .: "source" pure GetLogicalModel {..} -instance Backend b => ToJSON (GetLogicalModel b) where +instance (Backend b) => ToJSON (GetLogicalModel b) where toJSON GetLogicalModel {..} = object [ "source" .= glmSource @@ -155,18 +155,18 @@ runTrackLogicalModel trackLogicalModelRequest = do let fieldName = _lmmName metadata metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOLogicalModel @b fieldName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOLogicalModel @b fieldName existingLogicalModels = InsOrdHashMap.keys (_smLogicalModels sourceMetadata) when (fieldName `elem` existingLogicalModels) do throw400 AlreadyTracked $ "Logical model '" <> toTxt fieldName <> "' is already tracked." - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - (metaSources . ix source . toSourceMetadata @b . smLogicalModels) - %~ InsOrdHashMap.insert fieldName metadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ (metaSources . ix source . toSourceMetadata @b . smLogicalModels) + %~ InsOrdHashMap.insert fieldName metadata pure successMsg where @@ -211,9 +211,9 @@ runUntrackLogicalModel q = do assertLogicalModelExists @b source fieldName let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOLogicalModel @b fieldName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOLogicalModel @b fieldName metadata <- getMetadata @@ -222,15 +222,16 @@ runUntrackLogicalModel q = do case find ((== fieldName) . _nqmReturns) nativeQueries of Just NativeQueryMetadata {_nqmRootFieldName} -> - throw400 ConstraintViolation $ - "Logical model " - <> fieldName - <<> " still being used by native query " - <> _nqmRootFieldName <<> "." + throw400 ConstraintViolation + $ "Logical model " + <> fieldName + <<> " still being used by native query " + <> _nqmRootFieldName + <<> "." Nothing -> pure () - buildSchemaCacheFor metadataObj $ - dropLogicalModelInMetadata @b source fieldName + buildSchemaCacheFor metadataObj + $ dropLogicalModelInMetadata @b source fieldName pure successMsg where @@ -248,7 +249,7 @@ data CreateLogicalModelPermission a (b :: BackendType) = CreateLogicalModelPermi deriving stock (Generic) instance - FromJSON (PermDef b a) => + (FromJSON (PermDef b a)) => FromJSON (CreateLogicalModelPermission a b) where parseJSON = withObject "CreateLogicalModelPermission" \obj -> do @@ -268,14 +269,15 @@ runCreateSelectLogicalModelPermission CreateLogicalModelPermission {..} = do let metadataObj :: MetadataObjId metadataObj = - MOSourceObjId clmpSource $ - AB.mkAnyBackend $ - SMOLogicalModel @b clmpName + MOSourceObjId clmpSource + $ AB.mkAnyBackend + $ SMOLogicalModel @b clmpName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - logicalModelMetadataSetter @b clmpSource clmpName . lmmSelectPermissions - %~ InsOrdHashMap.insert (_pdRole clmpInfo) clmpInfo + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ logicalModelMetadataSetter @b clmpSource clmpName + . lmmSelectPermissions + %~ InsOrdHashMap.insert (_pdRole clmpInfo) clmpInfo pure successMsg @@ -306,23 +308,27 @@ runDropSelectLogicalModelPermission DropLogicalModelPermission {..} = do let metadataObj :: MetadataObjId metadataObj = - MOSourceObjId dlmpSource $ - AB.mkAnyBackend $ - SMOLogicalModel @b dlmpName + MOSourceObjId dlmpSource + $ AB.mkAnyBackend + $ SMOLogicalModel @b dlmpName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - logicalModelMetadataSetter @b dlmpSource dlmpName . lmmSelectPermissions - %~ InsOrdHashMap.delete dlmpRole + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ logicalModelMetadataSetter @b dlmpSource dlmpName + . lmmSelectPermissions + %~ InsOrdHashMap.delete dlmpRole pure successMsg -- | TODO: should this cascade and also delete associated permissions? -dropLogicalModelInMetadata :: forall b. BackendMetadata b => SourceName -> LogicalModelName -> MetadataModifier +dropLogicalModelInMetadata :: forall b. (BackendMetadata b) => SourceName -> LogicalModelName -> MetadataModifier dropLogicalModelInMetadata source name = do - MetadataModifier $ - metaSources . ix source . toSourceMetadata @b . smLogicalModels - %~ InsOrdHashMap.delete name + MetadataModifier + $ metaSources + . ix source + . toSourceMetadata @b + . smLogicalModels + %~ InsOrdHashMap.delete name -- | Check whether a logical model with the given root field name exists for -- the given source. diff --git a/server/src-lib/Hasura/LogicalModel/Common.hs b/server/src-lib/Hasura/LogicalModel/Common.hs index 2a6e79c4b77..bc83923eaee 100644 --- a/server/src-lib/Hasura/LogicalModel/Common.hs +++ b/server/src-lib/Hasura/LogicalModel/Common.hs @@ -49,18 +49,19 @@ traverseWithIndex f = zipWithM f [0 ..] logicalModelToColumnInfo :: forall b. (Backend b) => Int -> (Column b, NullableScalarType b) -> Maybe (StructuredColumnInfo b) logicalModelToColumnInfo i (column, NullableScalarType {..}) = do name <- G.mkName (toTxt column) - pure $ + pure + $ -- TODO(dmoverton): handle object and array columns - SCIScalarColumn $ - ColumnInfo - { ciColumn = column, - ciName = name, - ciPosition = i, - ciType = ColumnScalar nstType, - ciIsNullable = nstNullable, - ciDescription = G.Description <$> nstDescription, - ciMutability = ColumnMutability {_cmIsInsertable = False, _cmIsUpdatable = False} - } + SCIScalarColumn + $ ColumnInfo + { ciColumn = column, + ciName = name, + ciPosition = i, + ciType = ColumnScalar nstType, + ciIsNullable = nstNullable, + ciDescription = G.Description <$> nstDescription, + ciMutability = ColumnMutability {_cmIsInsertable = False, _cmIsUpdatable = False} + } logicalModelFieldsToFieldInfo :: forall b. diff --git a/server/src-lib/Hasura/LogicalModel/Metadata.hs b/server/src-lib/Hasura/LogicalModel/Metadata.hs index 63aec06be51..b07fd121d58 100644 --- a/server/src-lib/Hasura/LogicalModel/Metadata.hs +++ b/server/src-lib/Hasura/LogicalModel/Metadata.hs @@ -45,14 +45,14 @@ instance (Backend b) => HasCodec (LogicalModelMetadata b) where ("A return type.") $ AC.object (backendPrefix @b <> "LogicalModelMetadata") $ LogicalModelMetadata - <$> AC.requiredField "name" nameDoc - AC..= _lmmName + <$> AC.requiredField "name" nameDoc + AC..= _lmmName <*> AC.requiredFieldWith "fields" logicalModelFieldMapCodec fieldsDoc - AC..= _lmmFields + AC..= _lmmFields <*> AC.optionalField "description" descriptionDoc - AC..= _lmmDescription + AC..= _lmmDescription <*> optSortedList "select_permissions" _pdRole - AC..= _lmmSelectPermissions + AC..= _lmmSelectPermissions where nameDoc = "A name for a logical model" fieldsDoc = "Return types for the logical model" @@ -64,12 +64,12 @@ instance (Backend b) => HasCodec (LogicalModelMetadata b) where deriving via (Autodocodec (LogicalModelMetadata b)) instance - Backend b => FromJSON (LogicalModelMetadata b) + (Backend b) => FromJSON (LogicalModelMetadata b) deriving via (Autodocodec (LogicalModelMetadata b)) instance - Backend b => ToJSON (LogicalModelMetadata b) + (Backend b) => ToJSON (LogicalModelMetadata b) deriving stock instance (Backend b) => Eq (LogicalModelMetadata b) diff --git a/server/src-lib/Hasura/LogicalModel/NullableScalarType.hs b/server/src-lib/Hasura/LogicalModel/NullableScalarType.hs index 98828896693..cd2f8af6a62 100644 --- a/server/src-lib/Hasura/LogicalModel/NullableScalarType.hs +++ b/server/src-lib/Hasura/LogicalModel/NullableScalarType.hs @@ -32,11 +32,11 @@ instance (Backend b) => HasObjectCodec (NullableScalarType b) where objectCodec = NullableScalarType <$> AC.requiredField "type" columnDoc - AC..= nstType - <*> AC.optionalFieldWithDefault "nullable" False nullableDoc - AC..= nstNullable - <*> AC.optionalField "description" descriptionDoc - AC..= nstDescription + AC..= nstType + <*> AC.optionalFieldWithDefault "nullable" False nullableDoc + AC..= nstNullable + <*> AC.optionalField "description" descriptionDoc + AC..= nstDescription where columnDoc = "The base scalar type" nullableDoc = "Whether the type is nullable" @@ -45,7 +45,7 @@ instance (Backend b) => HasObjectCodec (NullableScalarType b) where deriving via (Autodocodec (NullableScalarType b)) instance - Backend b => ToJSON (NullableScalarType b) + (Backend b) => ToJSON (NullableScalarType b) deriving stock instance (Backend b) => Eq (NullableScalarType b) @@ -89,7 +89,7 @@ nullableScalarTypeMapCodec = ) ( fmap (\(fld, nst) -> MergedObject (NameField fld) nst) . InsOrdHashMap.toList ) - ( AC.listCodec $ - AC.object "NullableScalarType" $ - AC.objectCodec @(MergedObject (NameField (Column b)) (NullableScalarType b)) + ( AC.listCodec + $ AC.object "NullableScalarType" + $ AC.objectCodec @(MergedObject (NameField (Column b)) (NullableScalarType b)) ) diff --git a/server/src-lib/Hasura/LogicalModel/Types.hs b/server/src-lib/Hasura/LogicalModel/Types.hs index 4063e1d4281..c53a9111b6d 100644 --- a/server/src-lib/Hasura/LogicalModel/Types.hs +++ b/server/src-lib/Hasura/LogicalModel/Types.hs @@ -61,10 +61,10 @@ instance (Backend b) => HasCodec (LogicalModelTypeScalar b) where ("A scalar type used in a Logical Model.") $ AC.object (backendPrefix @b <> "LogicalModelTypeScalar") $ LogicalModelTypeScalarC - <$> AC.requiredField "scalar" scalarDoc - AC..= lmtsScalar + <$> AC.requiredField "scalar" scalarDoc + AC..= lmtsScalar <*> AC.optionalFieldWithDefault "nullable" False nullableDoc - AC..= lmtsNullable + AC..= lmtsNullable where scalarDoc = "Name of the scalar type" nullableDoc = "Whether this field is allowed to contain null values or not" @@ -101,10 +101,10 @@ instance (Backend b) => HasCodec (LogicalModelTypeArray b) where ("An array type used in a Logical Model.") $ AC.object (backendPrefix @b <> "LogicalModelTypeArray") $ LogicalModelTypeArrayC - <$> AC.requiredField "array" arrayDoc - AC..= lmtaArray + <$> AC.requiredField "array" arrayDoc + AC..= lmtaArray <*> AC.optionalFieldWithDefault "nullable" False nullableDoc - AC..= lmtaNullable + AC..= lmtaNullable where arrayDoc = "Type of items inside array" nullableDoc = "Whether this field can be null or not" @@ -134,10 +134,10 @@ instance HasCodec LogicalModelTypeReference where ("A reference to another Logical Model.") $ AC.object "LogicalModelTypeReference" $ LogicalModelTypeReferenceC - <$> AC.requiredField "logical_model" referenceDoc - AC..= lmtrReference + <$> AC.requiredField "logical_model" referenceDoc + AC..= lmtrReference <*> AC.optionalFieldWithDefault "nullable" False nullableDoc - AC..= lmtrNullable + AC..= lmtrNullable where referenceDoc = "Name of another Logical Model to nest" nullableDoc = "Whether this field can be null or not" @@ -248,14 +248,14 @@ instance (Backend b) => HasCodec (LogicalModelField b) where ("A field of a logical model") $ AC.object (backendPrefix @b <> "LogicalModelField") $ LogicalModelFieldSimple - <$> AC.requiredField "name" nameDoc - AC..= lmfsName + <$> AC.requiredField "name" nameDoc + AC..= lmfsName <*> AC.requiredField "type" typeDoc - AC..= lmfsScalar + AC..= lmfsScalar <*> AC.optionalFieldWithDefault "nullable" False nullableDoc - AC..= lmfsNullable + AC..= lmfsNullable <*> AC.optionalField "description" descriptionDoc - AC..= lmfsDescription + AC..= lmfsDescription where nameDoc = "Name of the field" nullableDoc = "Is field nullable or not?" @@ -269,12 +269,12 @@ instance (Backend b) => HasCodec (LogicalModelField b) where ("A field of a logical model") $ AC.object (backendPrefix @b <> "LogicalModelField") $ LogicalModelField - <$> AC.requiredField "name" nameDoc - AC..= lmfName + <$> AC.requiredField "name" nameDoc + AC..= lmfName <*> AC.requiredField "type" typeDoc - AC..= lmfType + AC..= lmfType <*> AC.optionalField "description" descriptionDoc - AC..= lmfDescription + AC..= lmfDescription where nameDoc = "Name of the field" typeDoc = "Type of the field" diff --git a/server/src-lib/Hasura/LogicalModelResolver/Codec.hs b/server/src-lib/Hasura/LogicalModelResolver/Codec.hs index 1a49916faec..ced3a404fd3 100644 --- a/server/src-lib/Hasura/LogicalModelResolver/Codec.hs +++ b/server/src-lib/Hasura/LogicalModelResolver/Codec.hs @@ -31,9 +31,9 @@ nativeQueryRelationshipsCodec = ) ( fmap (\(fld, nst) -> MergedObject (NameField fld) nst) . InsOrdHashMap.toList ) - ( AC.listCodec $ - AC.object "RelDefRelManualNativeQueryConfig" $ - AC.objectCodec @(MergedObject (NameField RelName) (RelDef (RelManualNativeQueryConfig b))) + ( AC.listCodec + $ AC.object "RelDefRelManualNativeQueryConfig" + $ AC.objectCodec @(MergedObject (NameField RelName) (RelDef (RelManualNativeQueryConfig b))) ) data MergedObject a b = MergedObject diff --git a/server/src-lib/Hasura/LogicalModelResolver/Schema.hs b/server/src-lib/Hasura/LogicalModelResolver/Schema.hs index 52984272fce..eca5e2874a6 100644 --- a/server/src-lib/Hasura/LogicalModelResolver/Schema.hs +++ b/server/src-lib/Hasura/LogicalModelResolver/Schema.hs @@ -24,7 +24,7 @@ import Language.GraphQL.Draft.Syntax.QQ qualified as G -- | Schema parser for native query or stored procedure arguments. argumentsSchema :: forall b r m n. - MonadBuildSchema b r m n => + (MonadBuildSchema b r m n) => -- | The resolver description, such as "Stored Procedure" or "Native Query". Text -> -- | The resolver name. @@ -37,8 +37,8 @@ argumentsSchema resolverDesc resolverName argsSignature = do -- This lets us use 'foldMap' + monoid structure of hashmaps to avoid awkwardly -- traversing the arguments and building the resulting parser. argsParser <- - getAp $ - foldMap + getAp + $ foldMap ( \(name, NullableScalarType {nstType, nstNullable, nstDescription}) -> Ap do argValueParser <- fmap (HashMap.singleton name . openValueOrigin) @@ -49,8 +49,8 @@ argumentsSchema resolverDesc resolverName argsSignature = do let description = case nstDescription of Just desc -> G.Description desc Nothing -> G.Description (resolverDesc <> " argument " <> getArgumentName name) - pure $ - P.field + pure + $ P.field argName (Just description) argValueParser @@ -59,8 +59,8 @@ argumentsSchema resolverDesc resolverName argsSignature = do let desc = Just $ G.Description $ G.unName resolverName <> resolverDesc <> " Arguments" - pure $ - if null argsSignature + pure + $ if null argsSignature then mempty else P.field diff --git a/server/src-lib/Hasura/Metadata/Class.hs b/server/src-lib/Hasura/Metadata/Class.hs index 27944062af3..7c713b1d30c 100644 --- a/server/src-lib/Hasura/Metadata/Class.hs +++ b/server/src-lib/Hasura/Metadata/Class.hs @@ -93,7 +93,7 @@ TODO: Reference to open issue or rfc? -- It is believed that all the above three are implemented in a single storage -- system (ex: a Postgres database). We can split the functions into appropriate and -- specific type classes in future iterations if required. -class Monad m => MonadMetadataStorage m where +class (Monad m) => MonadMetadataStorage m where -- Metadata fetchMetadataResourceVersion :: m (Either QErr MetadataResourceVersion) fetchMetadata :: m (Either QErr MetadataWithResourceVersion) @@ -200,48 +200,49 @@ deriving via (TransT MetadataT m) instance (MonadMetadataStorage m) => MonadMeta deriving via (TransT ManagedT m) instance (MonadMetadataStorage m) => MonadMetadataStorage (ManagedT m) -- | Record a one-off event -createOneOffScheduledEvent :: MonadMetadataStorage m => OneOffEvent -> m (Either QErr EventId) +createOneOffScheduledEvent :: (MonadMetadataStorage m) => OneOffEvent -> m (Either QErr EventId) createOneOffScheduledEvent = insertOneOffScheduledEvent -- | Record a cron event -createCronEvents :: MonadMetadataStorage m => [CronEventSeed] -> m (Either QErr ()) +createCronEvents :: (MonadMetadataStorage m) => [CronEventSeed] -> m (Either QErr ()) createCronEvents = insertCronEvents -- | Clear cron events -dropFutureCronEvents :: MonadMetadataStorage m => ClearCronEvents -> m (Either QErr ()) +dropFutureCronEvents :: (MonadMetadataStorage m) => ClearCronEvents -> m (Either QErr ()) dropFutureCronEvents = clearFutureCronEvents -- | Delete async action logs -deleteActionData :: MonadMetadataStorage m => ActionName -> m (Either QErr ()) +deleteActionData :: (MonadMetadataStorage m) => ActionName -> m (Either QErr ()) deleteActionData = clearActionData -- | Fetch cron/oneoff scheduled event invocations fetchScheduledEventInvocations :: - MonadMetadataStorage m => + (MonadMetadataStorage m) => GetScheduledEventInvocations -> m (Either QErr (WithOptionalTotalCount [ScheduledEventInvocation])) fetchScheduledEventInvocations = getScheduledEventInvocations -- | Fetch cron/oneoff scheduled events -fetchScheduledEvents :: MonadMetadataStorage m => GetScheduledEvents -> m (Either QErr Value) +fetchScheduledEvents :: (MonadMetadataStorage m) => GetScheduledEvents -> m (Either QErr Value) fetchScheduledEvents GetScheduledEvents {..} = do let totalCountToJSON WithOptionalTotalCount {..} = - object $ - ("events" .= _wtcData) : (maybe mempty (\count -> ["count" .= count]) _wtcCount) + object + $ ("events" .= _wtcData) + : (maybe mempty (\count -> ["count" .= count]) _wtcCount) case _gseScheduledEvent of SEOneOff -> (fmap . fmap) totalCountToJSON $ getOneOffScheduledEvents _gsePagination _gseStatus _gseGetRowsCount SECron name -> (fmap . fmap) totalCountToJSON $ getCronEvents name _gsePagination _gseStatus _gseGetRowsCount -- | Drop a cron/oneoff scheduled event -dropEvent :: MonadMetadataStorage m => ScheduledEventId -> ScheduledEventType -> m (Either QErr ()) +dropEvent :: (MonadMetadataStorage m) => ScheduledEventId -> ScheduledEventType -> m (Either QErr ()) dropEvent = deleteScheduledEvent -- | Retrieve the state from metadata storage catalog -fetchCatalogState :: MonadMetadataStorage m => m (Either QErr CatalogState) +fetchCatalogState :: (MonadMetadataStorage m) => m (Either QErr CatalogState) fetchCatalogState = getCatalogState -- | Update the state from metadata storage catalog -updateCatalogState :: MonadMetadataStorage m => CatalogStateType -> Value -> m (Either QErr ()) +updateCatalogState :: (MonadMetadataStorage m) => CatalogStateType -> Value -> m (Either QErr ()) updateCatalogState = setCatalogState -- | Metadata database operations for EE credentials storage. diff --git a/server/src-lib/Hasura/Metadata/DTO/Metadata.hs b/server/src-lib/Hasura/Metadata/DTO/Metadata.hs index 3484c527005..fef709bf7e9 100644 --- a/server/src-lib/Hasura/Metadata/DTO/Metadata.hs +++ b/server/src-lib/Hasura/Metadata/DTO/Metadata.hs @@ -39,15 +39,15 @@ data MetadataDTO = V1 MetadataV1 | V2 MetadataV2 | V3 MetadataV3 -- of allowed schemas. instance HasCodec MetadataDTO where codec = - named "Metadata" $ - dimapCodec decode encode $ - disjointEitherCodec - (codec @MetadataV1) - ( disjointEitherCodec - (codec @MetadataV2) - (codec @MetadataV3) - ) - "configuration format for the Hasura GraphQL Engine" + named "Metadata" + $ dimapCodec decode encode + $ disjointEitherCodec + (codec @MetadataV1) + ( disjointEitherCodec + (codec @MetadataV2) + (codec @MetadataV3) + ) + "configuration format for the Hasura GraphQL Engine" where decode = either V1 $ either V2 V3 encode = \case diff --git a/server/src-lib/Hasura/Metadata/DTO/MetadataV1.hs b/server/src-lib/Hasura/Metadata/DTO/MetadataV1.hs index 91ac986037c..e31dceadd67 100644 --- a/server/src-lib/Hasura/Metadata/DTO/MetadataV1.hs +++ b/server/src-lib/Hasura/Metadata/DTO/MetadataV1.hs @@ -21,9 +21,12 @@ data MetadataV1 = MetadataV1 instance HasCodec MetadataV1 where codec = - object "MetadataV1" $ - MetadataV1 - <$ optionalVersionField 1 - <*> optionalField "functions" "user-defined SQL functions" .= metaV1Functions - <*> optionalField "remote_schemas" "merge remote GraphQL schemas and provide a unified GraphQL API" .= metaV1RemoteSchemas - <*> requiredField "tables" "configured database tables" .= metaV1Tables + object "MetadataV1" + $ MetadataV1 + <$ optionalVersionField 1 + <*> optionalField "functions" "user-defined SQL functions" + .= metaV1Functions + <*> optionalField "remote_schemas" "merge remote GraphQL schemas and provide a unified GraphQL API" + .= metaV1RemoteSchemas + <*> requiredField "tables" "configured database tables" + .= metaV1Tables diff --git a/server/src-lib/Hasura/Metadata/DTO/MetadataV2.hs b/server/src-lib/Hasura/Metadata/DTO/MetadataV2.hs index 06123aa2504..65005ae157a 100644 --- a/server/src-lib/Hasura/Metadata/DTO/MetadataV2.hs +++ b/server/src-lib/Hasura/Metadata/DTO/MetadataV2.hs @@ -26,14 +26,22 @@ data MetadataV2 = MetadataV2 instance HasCodec MetadataV2 where codec = - object "MetadataV2" $ - MetadataV2 - <$ versionField 2 - <*> optionalField "actions" "action definitions which extend Hasura's schema with custom business logic using custom queries and mutations" .= metaV2Actions - <*> optionalField "allowlist" "safe GraphQL operations - when allow lists are enabled only these operations are allowed" .= metaV2Allowlist - <*> optionalField "cron_triggers" "reliably trigger HTTP endpoints to run custom business logic periodically based on a cron schedule" .= metaV2CronTriggers - <*> optionalField "custom_types" "custom type definitions" .= metaV2CustomTypes - <*> optionalField "functions" "user-defined SQL functions" .= metaV2Functions - <*> optionalField "query_collections" "group queries using query collections" .= metaV2QueryCollections - <*> optionalField "remote_schemas" "merge remote GraphQL schemas and provide a unified GraphQL API" .= metaV2RemoteSchemas - <*> requiredField "tables" "configured database tables" .= metaV2Tables + object "MetadataV2" + $ MetadataV2 + <$ versionField 2 + <*> optionalField "actions" "action definitions which extend Hasura's schema with custom business logic using custom queries and mutations" + .= metaV2Actions + <*> optionalField "allowlist" "safe GraphQL operations - when allow lists are enabled only these operations are allowed" + .= metaV2Allowlist + <*> optionalField "cron_triggers" "reliably trigger HTTP endpoints to run custom business logic periodically based on a cron schedule" + .= metaV2CronTriggers + <*> optionalField "custom_types" "custom type definitions" + .= metaV2CustomTypes + <*> optionalField "functions" "user-defined SQL functions" + .= metaV2Functions + <*> optionalField "query_collections" "group queries using query collections" + .= metaV2QueryCollections + <*> optionalField "remote_schemas" "merge remote GraphQL schemas and provide a unified GraphQL API" + .= metaV2RemoteSchemas + <*> requiredField "tables" "configured database tables" + .= metaV2Tables diff --git a/server/src-lib/Hasura/Metadata/DTO/MetadataV3.hs b/server/src-lib/Hasura/Metadata/DTO/MetadataV3.hs index 4ca737fc149..7a6ba91d039 100644 --- a/server/src-lib/Hasura/Metadata/DTO/MetadataV3.hs +++ b/server/src-lib/Hasura/Metadata/DTO/MetadataV3.hs @@ -63,26 +63,40 @@ data MetadataV3 = MetadataV3 -- 'requiredField') appear in the generated specification for users' reference. instance HasCodec MetadataV3 where codec = - object "MetadataV3" $ - MetadataV3 - <$ versionField 3 - <*> requiredFieldWith "sources" sourcesCodec "configured databases" .= metaV3Sources - <*> optionalFieldWithOmittedDefaultWith "remote_schemas" (sortedElemsCodec _rsmName) [] "merge remote GraphQL schemas and provide a unified GraphQL API" .= metaV3RemoteSchemas + object "MetadataV3" + $ MetadataV3 + <$ versionField 3 + <*> requiredFieldWith "sources" sourcesCodec "configured databases" + .= metaV3Sources + <*> optionalFieldWithOmittedDefaultWith "remote_schemas" (sortedElemsCodec _rsmName) [] "merge remote GraphQL schemas and provide a unified GraphQL API" + .= metaV3RemoteSchemas <*> optionalFieldWithOmittedDefaultWith "query_collections" (sortedElemsCodec QC._ccName) mempty "group queries using query collections" - .= metaV3QueryCollections - <*> optionalFieldWithOmittedDefaultWith "allowlist" (sortedElemsCodec aeCollection) [] "safe GraphQL operations - when allow lists are enabled only these operations are allowed" .= metaV3Allowlist - <*> optionalFieldWithOmittedDefaultWith "actions" (sortedElemsCodec _amName) mempty "action definitions which extend Hasura's schema with custom business logic using custom queries and mutations" .= metaV3Actions - <*> optionalFieldWithOmittedDefault "custom_types" emptyCustomTypes "custom type definitions" .= metaV3CustomTypes - <*> optionalFieldWithOmittedDefaultWith "cron_triggers" (sortedElemsCodec ctName) [] "reliably trigger HTTP endpoints to run custom business logic periodically based on a cron schedule" .= metaV3CronTriggers - <*> optionalFieldWithOmittedDefaultWith "rest_endpoints" (sortedElemsCodec _ceName) [] "REST interfaces to saved GraphQL queries and mutations" .= metaV3RestEndpoints - <*> optionalFieldWithOmittedDefault "api_limits" emptyApiLimit "limts to depth and/or rate of API requests" .= metaV3ApiLimits - <*> optionalFieldWithOmittedDefault' "metrics_config" emptyMetricsConfig .= metaV3MetricsConfig - <*> optionalFieldWithOmittedDefaultWith "inherited_roles" (sortedElemsCodec _rRoleName) [] "an inherited role is a way to create a new role which inherits permissions from two or more roles" .= metaV3InheritedRoles - <*> optionalFieldWithOmittedDefault' "graphql_schema_introspection" mempty .= metaV3GraphqlSchemaIntrospection - <*> optionalFieldWithOmittedDefault' "network" emptyNetwork .= metaV3Network - <*> optionalFieldWithOmittedDefault' "backend_configs" mempty .= metaV3BackendConfigs - <*> optionalFieldWithOmittedDefault' "opentelemetry" emptyOpenTelemetryConfig .= metaV3OpenTelemetryConfig + .= metaV3QueryCollections + <*> optionalFieldWithOmittedDefaultWith "allowlist" (sortedElemsCodec aeCollection) [] "safe GraphQL operations - when allow lists are enabled only these operations are allowed" + .= metaV3Allowlist + <*> optionalFieldWithOmittedDefaultWith "actions" (sortedElemsCodec _amName) mempty "action definitions which extend Hasura's schema with custom business logic using custom queries and mutations" + .= metaV3Actions + <*> optionalFieldWithOmittedDefault "custom_types" emptyCustomTypes "custom type definitions" + .= metaV3CustomTypes + <*> optionalFieldWithOmittedDefaultWith "cron_triggers" (sortedElemsCodec ctName) [] "reliably trigger HTTP endpoints to run custom business logic periodically based on a cron schedule" + .= metaV3CronTriggers + <*> optionalFieldWithOmittedDefaultWith "rest_endpoints" (sortedElemsCodec _ceName) [] "REST interfaces to saved GraphQL queries and mutations" + .= metaV3RestEndpoints + <*> optionalFieldWithOmittedDefault "api_limits" emptyApiLimit "limts to depth and/or rate of API requests" + .= metaV3ApiLimits + <*> optionalFieldWithOmittedDefault' "metrics_config" emptyMetricsConfig + .= metaV3MetricsConfig + <*> optionalFieldWithOmittedDefaultWith "inherited_roles" (sortedElemsCodec _rRoleName) [] "an inherited role is a way to create a new role which inherits permissions from two or more roles" + .= metaV3InheritedRoles + <*> optionalFieldWithOmittedDefault' "graphql_schema_introspection" mempty + .= metaV3GraphqlSchemaIntrospection + <*> optionalFieldWithOmittedDefault' "network" emptyNetwork + .= metaV3Network + <*> optionalFieldWithOmittedDefault' "backend_configs" mempty + .= metaV3BackendConfigs + <*> optionalFieldWithOmittedDefault' "opentelemetry" emptyOpenTelemetryConfig + .= metaV3OpenTelemetryConfig diff --git a/server/src-lib/Hasura/NativeQuery/API.hs b/server/src-lib/Hasura/NativeQuery/API.hs index f789c684a2b..6348321979e 100644 --- a/server/src-lib/Hasura/NativeQuery/API.hs +++ b/server/src-lib/Hasura/NativeQuery/API.hs @@ -64,22 +64,22 @@ instance (Backend b) => HasCodec (TrackNativeQuery b) where ("A request to track a native query") $ AC.object (backendPrefix @b <> "TrackNativeQuery") $ TrackNativeQuery - <$> AC.requiredField "source" sourceDoc - AC..= tnqSource + <$> AC.requiredField "source" sourceDoc + AC..= tnqSource <*> AC.requiredField "root_field_name" rootFieldDoc - AC..= tnqRootFieldName + AC..= tnqRootFieldName <*> AC.requiredField "code" codeDoc - AC..= tnqCode + AC..= tnqCode <*> AC.optionalFieldWithDefault "arguments" mempty argumentsDoc - AC..= tnqArguments + AC..= tnqArguments <*> AC.optionalFieldWithDefaultWith "array_relationships" nativeQueryRelationshipsCodec mempty arrayRelationshipsDoc - AC..= tnqArrayRelationships + AC..= tnqArrayRelationships <*> AC.optionalFieldWithDefaultWith "object_relationships" nativeQueryRelationshipsCodec mempty objectRelationshipsDoc - AC..= tnqObjectRelationships + AC..= tnqObjectRelationships <*> AC.optionalField "description" descriptionDoc - AC..= tnqDescription + AC..= tnqDescription <*> AC.requiredField "returns" returnsDoc - AC..= tnqReturns + AC..= tnqReturns where arrayRelationshipsDoc = "Any relationships between an output value and multiple values in another data source" objectRelationshipsDoc = "Any relationships between an output value and a single value in another data source" @@ -141,16 +141,16 @@ data GetNativeQuery (b :: BackendType) = GetNativeQuery { gnqSource :: SourceName } -deriving instance Backend b => Show (GetNativeQuery b) +deriving instance (Backend b) => Show (GetNativeQuery b) -deriving instance Backend b => Eq (GetNativeQuery b) +deriving instance (Backend b) => Eq (GetNativeQuery b) -instance Backend b => FromJSON (GetNativeQuery b) where +instance (Backend b) => FromJSON (GetNativeQuery b) where parseJSON = withObject "GetNativeQuery" $ \o -> do gnqSource <- o .: "source" pure GetNativeQuery {..} -instance Backend b => ToJSON (GetNativeQuery b) where +instance (Backend b) => ToJSON (GetNativeQuery b) where toJSON GetNativeQuery {..} = object [ "source" .= gnqSource @@ -196,12 +196,12 @@ runTrackNativeQuery env trackNativeQueryRequest = do sourceMetadata <- maybe - ( throw400 NotFound $ - "Source '" - <> sourceNameToText source - <> "' of kind " - <> toTxt (reify (backendTag @b)) - <> " not found." + ( throw400 NotFound + $ "Source '" + <> sourceNameToText source + <> "' of kind " + <> toTxt (reify (backendTag @b)) + <> " not found." ) pure . preview (metaSources . ix source . toSourceMetadata @b) @@ -213,18 +213,18 @@ runTrackNativeQuery env trackNativeQueryRequest = do let fieldName = _nqmRootFieldName metadata metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMONativeQuery @b fieldName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMONativeQuery @b fieldName existingNativeQueries = InsOrdHashMap.keys (_smNativeQueries sourceMetadata) when (fieldName `elem` existingNativeQueries) do throw400 AlreadyTracked $ "Native query '" <> toTxt fieldName <> "' is already tracked." - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - (metaSources . ix source . toSourceMetadata @b . smNativeQueries) - %~ InsOrdHashMap.insert fieldName metadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ (metaSources . ix source . toSourceMetadata @b . smNativeQueries) + %~ InsOrdHashMap.insert fieldName metadata pure successMsg where @@ -269,23 +269,26 @@ runUntrackNativeQuery q = do assertNativeQueryExists @b source fieldName let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMONativeQuery @b fieldName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMONativeQuery @b fieldName - buildSchemaCacheFor metadataObj $ - dropNativeQueryInMetadata @b source fieldName + buildSchemaCacheFor metadataObj + $ dropNativeQueryInMetadata @b source fieldName pure successMsg where source = utnqSource q fieldName = utnqRootFieldName q -dropNativeQueryInMetadata :: forall b. BackendMetadata b => SourceName -> NativeQueryName -> MetadataModifier +dropNativeQueryInMetadata :: forall b. (BackendMetadata b) => SourceName -> NativeQueryName -> MetadataModifier dropNativeQueryInMetadata source rootFieldName = do - MetadataModifier $ - metaSources . ix source . toSourceMetadata @b . smNativeQueries - %~ InsOrdHashMap.delete rootFieldName + MetadataModifier + $ metaSources + . ix source + . toSourceMetadata @b + . smNativeQueries + %~ InsOrdHashMap.delete rootFieldName -- | check feature flag is enabled before carrying out any actions throwIfFeatureDisabled :: (HasFeatureFlagChecker m, MonadError QErr m) => m () diff --git a/server/src-lib/Hasura/NativeQuery/InterpolatedQuery.hs b/server/src-lib/Hasura/NativeQuery/InterpolatedQuery.hs index d7bd8ab9846..4b013a6dee7 100644 --- a/server/src-lib/Hasura/NativeQuery/InterpolatedQuery.hs +++ b/server/src-lib/Hasura/NativeQuery/InterpolatedQuery.hs @@ -66,7 +66,7 @@ ppInterpolatedQuery (InterpolatedQuery parts) = foldMap ppInterpolatedItem parts -- | We store the interpolated query as the user text and parse it back -- when converting back to Haskell code. -instance v ~ ArgumentName => HasCodec (InterpolatedQuery v) where +instance (v ~ ArgumentName) => HasCodec (InterpolatedQuery v) where codec = CommentCodec ("An interpolated query expressed in native code (SQL)") @@ -78,7 +78,7 @@ instance v ~ ArgumentName => HasCodec (InterpolatedQuery v) where deriving via (Autodocodec (InterpolatedQuery ArgumentName)) instance - v ~ ArgumentName => + (v ~ ArgumentName) => ToJSON (InterpolatedQuery v) --------------------------------------- diff --git a/server/src-lib/Hasura/NativeQuery/Metadata.hs b/server/src-lib/Hasura/NativeQuery/Metadata.hs index 6c9bd2a4ab3..85a354452d9 100644 --- a/server/src-lib/Hasura/NativeQuery/Metadata.hs +++ b/server/src-lib/Hasura/NativeQuery/Metadata.hs @@ -56,9 +56,9 @@ data NativeQueryMetadata (b :: BackendType) = NativeQueryMetadata } deriving (Generic) -deriving instance Backend b => Eq (NativeQueryMetadata b) +deriving instance (Backend b) => Eq (NativeQueryMetadata b) -deriving instance Backend b => Show (NativeQueryMetadata b) +deriving instance (Backend b) => Show (NativeQueryMetadata b) instance (Backend b) => HasCodec (NativeQueryMetadata b) where codec = @@ -66,20 +66,20 @@ instance (Backend b) => HasCodec (NativeQueryMetadata b) where ("A native query as represented in metadata.") $ AC.object (backendPrefix @b <> "NativeQueryMetadata") $ NativeQueryMetadata - <$> requiredField "root_field_name" fieldNameDoc - AC..= _nqmRootFieldName + <$> requiredField "root_field_name" fieldNameDoc + AC..= _nqmRootFieldName <*> requiredField "code" sqlDoc - AC..= _nqmCode + AC..= _nqmCode <*> requiredField "returns" returnsDoc - AC..= _nqmReturns + AC..= _nqmReturns <*> optionalFieldWithDefault "arguments" mempty argumentDoc - AC..= _nqmArguments + AC..= _nqmArguments <*> optSortedList "array_relationships" _rdName - AC..= _nqmArrayRelationships + AC..= _nqmArrayRelationships <*> optSortedList "object_relationships" _rdName - AC..= _nqmObjectRelationships + AC..= _nqmObjectRelationships <*> optionalField "description" descriptionDoc - AC..= _nqmDescription + AC..= _nqmDescription where fieldNameDoc = "Root field name for the native query" sqlDoc = "Native code expression (SQL) to run" diff --git a/server/src-lib/Hasura/NativeQuery/Schema.hs b/server/src-lib/Hasura/NativeQuery/Schema.hs index 4f5be607135..7ad80536374 100644 --- a/server/src-lib/Hasura/NativeQuery/Schema.hs +++ b/server/src-lib/Hasura/NativeQuery/Schema.hs @@ -64,8 +64,9 @@ defaultSelectNativeQueryObject NativeQueryInfo {..} fieldName description = runM let sourceName = _siName sourceInfo logicalModelPermissions <- - MaybeT . fmap Just $ - buildLogicalModelPermissions @b @r @m @n _nqiReturns + MaybeT + . fmap Just + $ buildLogicalModelPermissions @b @r @m @n _nqiReturns selectionSetParser <- MaybeT $ logicalModelSelectionSet _nqiRelationships _nqiReturns @@ -75,24 +76,24 @@ defaultSelectNativeQueryObject NativeQueryInfo {..} fieldName description = runM sourceName (mkAnyBackend $ MO.SMONativeQuery @b _nqiRootFieldName) - pure $ - P.setFieldParserOrigin sourceObj $ - P.subselection - fieldName - description - nativeQueryArgsParser - selectionSetParser - <&> \(nqArgs, fields) -> - IR.AnnObjectSelectG - fields - ( IR.FromNativeQuery - NativeQuery - { nqRootFieldName = _nqiRootFieldName, - nqInterpolatedQuery = interpolatedQuery _nqiCode nqArgs, - nqLogicalModel = buildLogicalModelIR _nqiReturns - } - ) - (IR._tpFilter logicalModelPermissions) + pure + $ P.setFieldParserOrigin sourceObj + $ P.subselection + fieldName + description + nativeQueryArgsParser + selectionSetParser + <&> \(nqArgs, fields) -> + IR.AnnObjectSelectG + fields + ( IR.FromNativeQuery + NativeQuery + { nqRootFieldName = _nqiRootFieldName, + nqInterpolatedQuery = interpolatedQuery _nqiCode nqArgs, + nqLogicalModel = buildLogicalModelIR _nqiReturns + } + ) + (IR._tpFilter logicalModelPermissions) -- | select a native query - implementation is the same for root fields and -- array relationships @@ -123,8 +124,9 @@ defaultSelectNativeQuery NativeQueryInfo {..} fieldName description = runMaybeT stringifyNumbers <- retrieve Options.soStringifyNumbers logicalModelPermissions <- - MaybeT . fmap Just $ - buildLogicalModelPermissions @b @r @m @n _nqiReturns + MaybeT + . fmap Just + $ buildLogicalModelPermissions @b @r @m @n _nqiReturns (selectionListParser, logicalModelsArgsParser) <- MaybeT $ buildLogicalModelFields _nqiRelationships _nqiReturns @@ -134,31 +136,31 @@ defaultSelectNativeQuery NativeQueryInfo {..} fieldName description = runMaybeT sourceName (mkAnyBackend $ MO.SMONativeQuery @b _nqiRootFieldName) - pure $ - P.setFieldParserOrigin sourceObj $ - P.subselection - fieldName - description - ( (,) - <$> logicalModelsArgsParser - <*> nativeQueryArgsParser - ) - selectionListParser - <&> \((lmArgs, nqArgs), fields) -> - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = - IR.FromNativeQuery - NativeQuery - { nqRootFieldName = _nqiRootFieldName, - nqInterpolatedQuery = interpolatedQuery _nqiCode nqArgs, - nqLogicalModel = buildLogicalModelIR _nqiReturns - }, - IR._asnPerm = logicalModelPermissions, - IR._asnArgs = lmArgs, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.setFieldParserOrigin sourceObj + $ P.subselection + fieldName + description + ( (,) + <$> logicalModelsArgsParser + <*> nativeQueryArgsParser + ) + selectionListParser + <&> \((lmArgs, nqArgs), fields) -> + IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = + IR.FromNativeQuery + NativeQuery + { nqRootFieldName = _nqiRootFieldName, + nqInterpolatedQuery = interpolatedQuery _nqiCode nqArgs, + nqLogicalModel = buildLogicalModelIR _nqiReturns + }, + IR._asnPerm = logicalModelPermissions, + IR._asnArgs = lmArgs, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } defaultBuildNativeQueryRootFields :: forall b r m n. @@ -177,7 +179,7 @@ defaultBuildNativeQueryRootFields nqi@NativeQueryInfo {..} = do nativeQueryArgumentsSchema :: forall b r m n. - MonadBuildSchema b r m n => + (MonadBuildSchema b r m n) => G.Name -> HashMap ArgumentName (NullableScalarType b) -> MaybeT (SchemaT r m) (P.InputFieldsParser n (HashMap ArgumentName (Column.ColumnValue b))) @@ -189,8 +191,8 @@ interpolatedQuery :: HashMap ArgumentName (Column.ColumnValue b) -> InterpolatedQuery (UnpreparedValue b) interpolatedQuery nqiCode nqArgs = - InterpolatedQuery $ - (fmap . fmap) + InterpolatedQuery + $ (fmap . fmap) ( \var@(ArgumentName name) -> case HashMap.lookup var nqArgs of Just arg -> UVParameter (FromInternal name) arg Nothing -> diff --git a/server/src-lib/Hasura/QueryTags.hs b/server/src-lib/Hasura/QueryTags.hs index 997dc0c71be..6c28a82b150 100644 --- a/server/src-lib/Hasura/QueryTags.hs +++ b/server/src-lib/Hasura/QueryTags.hs @@ -118,6 +118,6 @@ class (Monad m) => MonadQueryTags m where default createQueryTags :: forall t n. (m ~ t n, MonadQueryTags n) => QueryTagsAttributes -> Maybe QueryTagsConfig -> Tagged m QueryTagsComment createQueryTags qtSourceConfig attr = retag (createQueryTags @n qtSourceConfig attr) :: Tagged (t n) QueryTagsComment -instance MonadQueryTags m => MonadQueryTags (ReaderT r m) +instance (MonadQueryTags m) => MonadQueryTags (ReaderT r m) -instance MonadQueryTags m => MonadQueryTags (ExceptT e m) +instance (MonadQueryTags m) => MonadQueryTags (ExceptT e m) diff --git a/server/src-lib/Hasura/QueryTags/Types.hs b/server/src-lib/Hasura/QueryTags/Types.hs index 8004490f623..92093a4dcbe 100644 --- a/server/src-lib/Hasura/QueryTags/Types.hs +++ b/server/src-lib/Hasura/QueryTags/Types.hs @@ -31,8 +31,8 @@ instance ToJSON QueryTagsFormat where SQLCommenter -> "sqlcommenter" instance FromJSON QueryTagsFormat where - parseJSON = withText "QueryTagsFormat" $ - \t -> case T.toLower t of + parseJSON = withText "QueryTagsFormat" + $ \t -> case T.toLower t of "standard" -> pure Standard "sqlcommenter" -> pure SQLCommenter _ -> fail errMsg @@ -44,12 +44,12 @@ instance FromJSON QueryTagsFormat where -- affecting the API. instance HasCodec QueryTagsFormat where codec = - named "QueryTagsFormat" $ - stringConstCodec $ - NonEmpty.fromList $ - [ (Standard, "standard"), - (SQLCommenter, "sqlcommenter") - ] + named "QueryTagsFormat" + $ stringConstCodec + $ NonEmpty.fromList + $ [ (Standard, "standard"), + (SQLCommenter, "sqlcommenter") + ] -- | QueryTagsConfig is the configuration created by the users to control query tags -- @@ -92,17 +92,26 @@ $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''QueryTagsConfig) instance FromJSON QueryTagsConfig where parseJSON = withObject "QueryTagsConfig" $ \o -> QueryTagsConfig - <$> o .:? "disabled" .!= False - <*> o .:? "format" .!= Standard - <*> o .:? "omit_request_id" .!= True + <$> o + .:? "disabled" + .!= False + <*> o + .:? "format" + .!= Standard + <*> o + .:? "omit_request_id" + .!= True instance HasCodec QueryTagsConfig where codec = - AC.object "QueryTagsConfig" $ - QueryTagsConfig - <$> optionalFieldWithDefault' "disabled" False .== _qtcDisabled - <*> optionalFieldWithDefault' "format" Standard .== _qtcFormat - <*> optionalFieldWithDefault' "omit_request_id" True .== _qtcOmitRequestId + AC.object "QueryTagsConfig" + $ QueryTagsConfig + <$> optionalFieldWithDefault' "disabled" False + .== _qtcDisabled + <*> optionalFieldWithDefault' "format" Standard + .== _qtcFormat + <*> optionalFieldWithDefault' "omit_request_id" True + .== _qtcOmitRequestId where (.==) = (AC..=) diff --git a/server/src-lib/Hasura/RQL/DDL/Action.hs b/server/src-lib/Hasura/RQL/DDL/Action.hs index 121c0dbe89d..5245d9f8b85 100644 --- a/server/src-lib/Hasura/RQL/DDL/Action.hs +++ b/server/src-lib/Hasura/RQL/DDL/Action.hs @@ -46,9 +46,11 @@ getActionInfo :: m ActionInfo getActionInfo actionName = do actionMap <- scActions <$> askSchemaCache - onNothing (HashMap.lookup actionName actionMap) $ - throw400 NotExists $ - "action with name " <> actionName <<> " does not exist" + onNothing (HashMap.lookup actionName actionMap) + $ throw400 NotExists + $ "action with name " + <> actionName + <<> " does not exist" data CreateAction = CreateAction { _caName :: ActionName, @@ -71,19 +73,22 @@ runCreateAction :: runCreateAction createAction = do -- check if action with same name exists already actionMap <- scActions <$> askSchemaCache - for_ (HashMap.lookup actionName actionMap) $ - const $ - throw400 AlreadyExists $ - "action with name " <> actionName <<> " already exists" + for_ (HashMap.lookup actionName actionMap) + $ const + $ throw400 AlreadyExists + $ "action with name " + <> actionName + <<> " already exists" let metadata = ActionMetadata actionName (_caComment createAction) (_caDefinition createAction) [] - buildSchemaCacheFor (MOAction actionName) $ - MetadataModifier $ - metaActions %~ InsOrdHashMap.insert actionName metadata + buildSchemaCacheFor (MOAction actionName) + $ MetadataModifier + $ metaActions + %~ InsOrdHashMap.insert actionName metadata pure successMsg where actionName = _caName createAction @@ -107,7 +112,7 @@ referred scalars. -} resolveAction :: - QErrM m => + (QErrM m) => Env.Environment -> AnnotatedCustomTypes -> ActionDefinitionInput -> @@ -123,15 +128,15 @@ resolveAction env AnnotatedCustomTypes {..} ActionDefinition {..} allScalars = d argumentBaseType = G.getBaseType gType (gType,) <$> if - | Just noCTScalar <- lookupBackendScalar allScalars argumentBaseType -> - pure $ NOCTScalar noCTScalar - | Just nonObjectType <- HashMap.lookup argumentBaseType _actInputTypes -> - pure nonObjectType - | otherwise -> - throw400 InvalidParams $ - "the type: " - <> dquote argumentBaseType - <> " is not defined in custom types or it is not a scalar/enum/input_object" + | Just noCTScalar <- lookupBackendScalar allScalars argumentBaseType -> + pure $ NOCTScalar noCTScalar + | Just nonObjectType <- HashMap.lookup argumentBaseType _actInputTypes -> + pure nonObjectType + | otherwise -> + throw400 InvalidParams + $ "the type: " + <> dquote argumentBaseType + <> " is not defined in custom types or it is not a scalar/enum/input_object" -- Check if the response type is an object let outputType = unGraphQLType _adOutputType @@ -139,14 +144,14 @@ resolveAction env AnnotatedCustomTypes {..} ActionDefinition {..} allScalars = d outputObject <- do aot <- if - | Just aoTScalar <- lookupBackendScalar allScalars outputBaseType -> - pure $ AOTScalar aoTScalar - | Just objectType <- HashMap.lookup outputBaseType _actObjectTypes -> - pure $ AOTObject objectType - | Just (NOCTScalar s) <- HashMap.lookup outputBaseType _actInputTypes -> - pure (AOTScalar s) - | otherwise -> - throw400 NotExists ("the type: " <> dquote outputBaseType <> " is not an object or scalar type defined in custom types") + | Just aoTScalar <- lookupBackendScalar allScalars outputBaseType -> + pure $ AOTScalar aoTScalar + | Just objectType <- HashMap.lookup outputBaseType _actObjectTypes -> + pure $ AOTObject objectType + | Just (NOCTScalar s) <- HashMap.lookup outputBaseType _actInputTypes -> + pure (AOTScalar s) + | otherwise -> + throw400 NotExists ("the type: " <> dquote outputBaseType <> " is not an object or scalar type defined in custom types") -- If the Action is sync: -- 1. Check if the output type has only top level relations (if any) -- If the Action is async: @@ -174,10 +179,10 @@ resolveAction env AnnotatedCustomTypes {..} ActionDefinition {..} allScalars = d in not $ all (`elem` scalarOrEnumFieldNames) objsInRel ) (_aotRelationships aot') - unless (null relationshipsWithNonTopLevelFields) $ - throw400 ConstraintError $ - "Relationships cannot be defined with nested object fields: " - <> commaSeparated (dquote . _atrName <$> relationshipsWithNonTopLevelFields) + unless (null relationshipsWithNonTopLevelFields) + $ throw400 ConstraintError + $ "Relationships cannot be defined with nested object fields: " + <> commaSeparated (dquote . _atrName <$> relationshipsWithNonTopLevelFields) AOTScalar _ -> pure () case _adType of ActionQuery -> validateSyncAction @@ -185,9 +190,10 @@ resolveAction env AnnotatedCustomTypes {..} ActionDefinition {..} allScalars = d ActionMutation ActionAsynchronous -> case aot of AOTScalar _ -> pure () AOTObject aot' -> - unless (null (_aotRelationships aot') || null nestedObjects) $ - throw400 ConstraintError $ - "Async action relations cannot be used with object fields: " <> commaSeparated (dquote . _ofdName <$> nestedObjects) + unless (null (_aotRelationships aot') || null nestedObjects) + $ throw400 ConstraintError + $ "Async action relations cannot be used with object fields: " + <> commaSeparated (dquote . _ofdName <$> nestedObjects) pure aot resolvedWebhook <- resolveWebhook env _adHandler let webhookEnvRecord = EnvRecord (printURLTemplate $ unInputWebhook _adHandler) resolvedWebhook @@ -223,18 +229,20 @@ runUpdateAction :: runUpdateAction (UpdateAction actionName actionDefinition actionComment) = do sc <- askSchemaCache let actionsMap = scActions sc - void $ - onNothing (HashMap.lookup actionName actionsMap) $ - throw400 NotExists $ - "action with name " <> actionName <<> " does not exist" + void + $ onNothing (HashMap.lookup actionName actionsMap) + $ throw400 NotExists + $ "action with name " + <> actionName + <<> " does not exist" buildSchemaCacheFor (MOAction actionName) $ updateActionMetadataModifier actionDefinition actionComment pure successMsg where updateActionMetadataModifier :: ActionDefinitionInput -> Maybe Text -> MetadataModifier updateActionMetadataModifier def comment = - MetadataModifier $ - (metaActions . ix actionName . amDefinition .~ def) - . (metaActions . ix actionName . amComment .~ comment) + MetadataModifier + $ (metaActions . ix actionName . amDefinition .~ def) + . (metaActions . ix actionName . amComment .~ comment) newtype ClearActionData = ClearActionData {unClearActionData :: Bool} deriving (Show, Eq, J.FromJSON, J.ToJSON) @@ -268,9 +276,9 @@ runDropAction :: m EncJSON runDropAction (DropAction actionName clearDataM) = do void $ getActionInfo actionName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropActionInMetadata actionName + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropActionInMetadata actionName when (shouldClearActionData clearData) $ liftEitherM $ deleteActionData actionName return successMsg where @@ -306,16 +314,18 @@ runCreateActionPermission :: m EncJSON runCreateActionPermission createActionPermission = do metadata <- getMetadata - when (doesActionPermissionExist metadata actionName roleName) $ - throw400 AlreadyExists $ - "permission for role " - <> roleName - <<> " is already defined on " - <>> actionName - buildSchemaCacheFor (MOActionPermission actionName roleName) $ - MetadataModifier $ - metaActions . ix actionName . amPermissions - %~ (:) (ActionPermissionMetadata roleName comment) + when (doesActionPermissionExist metadata actionName roleName) + $ throw400 AlreadyExists + $ "permission for role " + <> roleName + <<> " is already defined on " + <>> actionName + buildSchemaCacheFor (MOActionPermission actionName roleName) + $ MetadataModifier + $ metaActions + . ix actionName + . amPermissions + %~ (:) (ActionPermissionMetadata roleName comment) pure successMsg where CreateActionPermission actionName roleName _ comment = createActionPermission @@ -339,11 +349,14 @@ runDropActionPermission :: m EncJSON runDropActionPermission dropActionPermission = do metadata <- getMetadata - unless (doesActionPermissionExist metadata actionName roleName) $ - throw400 NotExists $ - "permission for role: " <> roleName <<> " is not defined on " <>> actionName - buildSchemaCacheFor (MOActionPermission actionName roleName) $ - dropActionPermissionInMetadata actionName roleName + unless (doesActionPermissionExist metadata actionName roleName) + $ throw400 NotExists + $ "permission for role: " + <> roleName + <<> " is not defined on " + <>> actionName + buildSchemaCacheFor (MOActionPermission actionName roleName) + $ dropActionPermissionInMetadata actionName roleName return successMsg where actionName = _dapAction dropActionPermission @@ -351,5 +364,8 @@ runDropActionPermission dropActionPermission = do dropActionPermissionInMetadata :: ActionName -> RoleName -> MetadataModifier dropActionPermissionInMetadata name role = - MetadataModifier $ - metaActions . ix name . amPermissions %~ filter ((/=) role . _apmRole) + MetadataModifier + $ metaActions + . ix name + . amPermissions + %~ filter ((/=) role . _apmRole) diff --git a/server/src-lib/Hasura/RQL/DDL/ApiLimit.hs b/server/src-lib/Hasura/RQL/DDL/ApiLimit.hs index 14281c00d35..3a421395903 100644 --- a/server/src-lib/Hasura/RQL/DDL/ApiLimit.hs +++ b/server/src-lib/Hasura/RQL/DDL/ApiLimit.hs @@ -34,15 +34,16 @@ runSetApiLimits al = do Right _ -> setApiLimit successMsg where setApiLimit successMessage = do - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaApiLimits .~ al + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaApiLimits + .~ al return successMessage -- This function compares the user time_limit and the cloud time_limit (used in both set_api_limit and replace_metadata -- APIs). The function returns either a metadata warning or `()` -compareTimeLimitWith :: MonadGetPolicies m => Maybe MaxTime -> m (Either MetadataWarning ()) +compareTimeLimitWith :: (MonadGetPolicies m) => Maybe MaxTime -> m (Either MetadataWarning ()) compareTimeLimitWith userTimeLimitMaybe = do cloudApiTimeLimit <- runGetApiTimeLimit let compareTimeLimitResultEither = @@ -57,21 +58,22 @@ compareTimeLimitWith userTimeLimitMaybe = do -- warning message if the user time limit API limit is greater than the cloud time limit API limit warningMessage :: MaxTime -> MaxTime -> MetadataWarning warningMessage userTimeLimit cloudTimeLimit = - MetadataWarning WCTimeLimitExceededSystemLimit (MOSource defaultSource) $ - "the configured time limit: " - <> tshow (seconds $ unMaxTime userTimeLimit) - <> " exceeds the project time limit: " - <> tshow (seconds $ unMaxTime cloudTimeLimit) - <> ". Time limit of " - <> tshow (seconds $ unMaxTime cloudTimeLimit) - <> " will be applied" + MetadataWarning WCTimeLimitExceededSystemLimit (MOSource defaultSource) + $ "the configured time limit: " + <> tshow (seconds $ unMaxTime userTimeLimit) + <> " exceeds the project time limit: " + <> tshow (seconds $ unMaxTime cloudTimeLimit) + <> ". Time limit of " + <> tshow (seconds $ unMaxTime cloudTimeLimit) + <> " will be applied" runRemoveApiLimits :: (MonadError QErr m, MetadataM m, CacheRWM m) => m EncJSON runRemoveApiLimits = do - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaApiLimits .~ emptyApiLimit + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaApiLimits + .~ emptyApiLimit return successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/ComputedField.hs b/server/src-lib/Hasura/RQL/DDL/ComputedField.hs index 22074785ba8..4698858522b 100644 --- a/server/src-lib/Hasura/RQL/DDL/ComputedField.hs +++ b/server/src-lib/Hasura/RQL/DDL/ComputedField.hs @@ -44,11 +44,18 @@ instance (Backend b) => ToJSON (AddComputedField b) where instance (Backend b) => FromJSON (AddComputedField b) where parseJSON = withObject "AddComputedField" $ \o -> AddComputedField - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "name" - <*> o .: "definition" - <*> o .:? "comment" .!= Automatic + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "name" + <*> o + .: "definition" + <*> o + .:? "comment" + .!= Automatic runAddComputedField :: forall b m. @@ -58,15 +65,16 @@ runAddComputedField :: runAddComputedField q = do void $ withPathK "table" $ askTableInfo @b source table let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOComputedField computedFieldName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOComputedField computedFieldName metadata = ComputedFieldMetadata computedFieldName (_afcDefinition q) (_afcComment q) - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b source table . tmComputedFields - %~ InsOrdHashMap.insert computedFieldName metadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b source table + . tmComputedFields + %~ InsOrdHashMap.insert computedFieldName metadata pure successMsg where source = _afcSource q @@ -83,10 +91,16 @@ data DropComputedField b = DropComputedField instance (Backend b) => FromJSON (DropComputedField b) where parseJSON = withObject "DropComputedField" $ \o -> DropComputedField - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "name" - <*> o .:? "cascade" .!= False + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "name" + <*> o + .:? "cascade" + .!= False runDropComputedField :: forall b m. @@ -101,19 +115,20 @@ runDropComputedField (DropComputedField source table computedField cascade) = do -- Dependencies check sc <- askSchemaCache let deps = - getDependentObjs sc $ - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b table $ - TOComputedField computedField + getDependentObjs sc + $ SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b table + $ TOComputedField computedField unless (cascade || null deps) $ reportDependentObjectsExist deps withNewInconsistentObjsCheck do metadataModifiers <- mapM purgeComputedFieldDependency deps - buildSchemaCache $ - MetadataModifier $ - tableMetadataSetter @b source table - %~ dropComputedFieldInMetadata computedField . foldl' (.) id metadataModifiers + buildSchemaCache + $ MetadataModifier + $ tableMetadataSetter @b source table + %~ dropComputedFieldInMetadata computedField + . foldl' (.) id metadataModifiers pure successMsg where purgeComputedFieldDependency = \case @@ -125,7 +140,8 @@ runDropComputedField (DropComputedField source table computedField cascade) = do AB.unpackAnyBackend @b exists -> pure $ dropPermissionInMetadata roleName permType d -> - throw500 $ - "unexpected dependency for computed field " - <> computedField <<> "; " - <> reportSchemaObj d + throw500 + $ "unexpected dependency for computed field " + <> computedField + <<> "; " + <> reportSchemaObj d diff --git a/server/src-lib/Hasura/RQL/DDL/ConnectionTemplate.hs b/server/src-lib/Hasura/RQL/DDL/ConnectionTemplate.hs index 2472a373043..4a291aa87de 100644 --- a/server/src-lib/Hasura/RQL/DDL/ConnectionTemplate.hs +++ b/server/src-lib/Hasura/RQL/DDL/ConnectionTemplate.hs @@ -33,9 +33,13 @@ instance (Backend b) => FromJSON (TestConnectionTemplate b) where parseJSON v = flip (J.withObject "TestConnectionTemplate") v $ \o -> TestConnectionTemplate - <$> o J..:? "source_name" J..!= defaultSource - <*> o J..: "request_context" - <*> o J..:? "connection_template" + <$> o + J..:? "source_name" + J..!= defaultSource + <*> o + J..: "request_context" + <*> o + J..:? "connection_template" -- | Resolver for the metadata API `_test_connection_template` runTestConnectionTemplate :: diff --git a/server/src-lib/Hasura/RQL/DDL/CustomTypes.hs b/server/src-lib/Hasura/RQL/DDL/CustomTypes.hs index e90770f7e98..5ce75a7c34d 100644 --- a/server/src-lib/Hasura/RQL/DDL/CustomTypes.hs +++ b/server/src-lib/Hasura/RQL/DDL/CustomTypes.hs @@ -45,9 +45,10 @@ runSetCustomTypes :: CustomTypes -> m EncJSON runSetCustomTypes customTypes = do - buildSchemaCacheFor MOCustomTypes $ - MetadataModifier $ - metaCustomTypes .~ customTypes + buildSchemaCacheFor MOCustomTypes + $ MetadataModifier + $ metaCustomTypes + .~ customTypes pure successMsg clearCustomTypesInMetadata :: MetadataModifier @@ -125,8 +126,9 @@ validateCustomTypeDefinitions sources customTypes allScalars = do mapFromL (unObjectTypeName . _aotName) <$> traverse validateObject objectDefinitions let scalarTypeMap = - HashMap.map NOCTScalar $ - HashMap.map ASTCustom scalarTypes <> reusedScalars + HashMap.map NOCTScalar + $ HashMap.map ASTCustom scalarTypes + <> reusedScalars enumTypeMap = HashMap.map NOCTEnum enumTypes inputObjectTypeMap = HashMap.map NOCTInputObject inputObjectTypes nonObjectTypeMap = scalarTypeMap <> enumTypeMap <> inputObjectTypeMap @@ -160,35 +162,35 @@ validateCustomTypeDefinitions sources customTypes allScalars = do EnumTypeDefinition -> m () validateEnum enumDefinition = do let duplicateEnumValues = - L.duplicates $ - map _evdValue $ - toList $ - _etdValues enumDefinition + L.duplicates + $ map _evdValue + $ toList + $ _etdValues enumDefinition -- check for duplicate field names - unless (null duplicateEnumValues) $ - dispute $ - pure $ - DuplicateEnumValues - (_etdName enumDefinition) - duplicateEnumValues + unless (null duplicateEnumValues) + $ dispute + $ pure + $ DuplicateEnumValues + (_etdName enumDefinition) + duplicateEnumValues validateInputObject :: InputObjectTypeDefinition -> WriterT (HashMap.HashMap G.Name AnnotatedScalarType) m () validateInputObject inputObjectDefinition = do let inputObjectTypeName = _iotdName inputObjectDefinition duplicateFieldNames = - L.duplicates $ - map _iofdName $ - toList $ - _iotdFields inputObjectDefinition + L.duplicates + $ map _iofdName + $ toList + $ _iotdFields inputObjectDefinition -- check for duplicate field names - unless (null duplicateFieldNames) $ - dispute $ - pure $ - InputObjectDuplicateFields - inputObjectTypeName - duplicateFieldNames + unless (null duplicateFieldNames) + $ dispute + $ pure + $ InputObjectDuplicateFields + inputObjectTypeName + duplicateFieldNames let mapToSet = Set.fromList . HashMap.keys inputTypes = @@ -198,122 +200,122 @@ validateCustomTypeDefinitions sources customTypes allScalars = do for_ (_iotdFields inputObjectDefinition) $ \inputObjectField -> do let fieldBaseType = G.getBaseType $ unGraphQLType $ _iofdType inputObjectField if - | Set.member fieldBaseType inputTypes -> pure () - | Just scalarInfo <- lookupBackendScalar allScalars fieldBaseType -> - tell $ HashMap.singleton fieldBaseType scalarInfo - | otherwise -> - refute $ - pure $ - InputObjectFieldTypeDoesNotExist - (_iotdName inputObjectDefinition) - (_iofdName inputObjectField) - fieldBaseType + | Set.member fieldBaseType inputTypes -> pure () + | Just scalarInfo <- lookupBackendScalar allScalars fieldBaseType -> + tell $ HashMap.singleton fieldBaseType scalarInfo + | otherwise -> + refute + $ pure + $ InputObjectFieldTypeDoesNotExist + (_iotdName inputObjectDefinition) + (_iofdName inputObjectField) + fieldBaseType validateObject :: ObjectTypeDefinition -> m AnnotatedObjectType validateObject ObjectTypeDefinition {..} = do let fieldNames = - map (unObjectFieldName . _ofdName) $ - toList _otdFields + map (unObjectFieldName . _ofdName) + $ toList _otdFields relNames = map (unRelationshipName . _trdName) _otdRelationships duplicateFieldNames = L.duplicates $ fieldNames <> relNames -- check for duplicate field names - unless (null duplicateFieldNames) $ - dispute $ - pure $ - ObjectDuplicateFields _otdName duplicateFieldNames + unless (null duplicateFieldNames) + $ dispute + $ pure + $ ObjectDuplicateFields _otdName duplicateFieldNames fields <- for _otdFields $ \objectField -> do let fieldName = _ofdName objectField -- check that arguments are not defined - when (isJust $ _ofdArguments objectField) $ - dispute $ - pure $ - ObjectFieldArgumentsNotAllowed - _otdName - fieldName + when (isJust $ _ofdArguments objectField) + $ dispute + $ pure + $ ObjectFieldArgumentsNotAllowed + _otdName + fieldName forM objectField $ \fieldType -> do let fieldBaseType = G.getBaseType $ unGraphQLType fieldType annotatedObjectFieldType <- if - | Just scalarDef <- HashMap.lookup fieldBaseType scalarTypes -> - pure $ AOFTScalar $ ASTCustom scalarDef - | Just enumDef <- HashMap.lookup fieldBaseType enumTypes -> - pure $ AOFTEnum enumDef - | HashMap.member fieldBaseType objectTypes -> - pure $ AOFTObject fieldBaseType - | Just scalarInfo <- lookupBackendScalar allScalars fieldBaseType -> - pure $ AOFTScalar scalarInfo - | otherwise -> - refute $ - pure $ - ObjectFieldTypeDoesNotExist - _otdName - fieldName - fieldBaseType + | Just scalarDef <- HashMap.lookup fieldBaseType scalarTypes -> + pure $ AOFTScalar $ ASTCustom scalarDef + | Just enumDef <- HashMap.lookup fieldBaseType enumTypes -> + pure $ AOFTEnum enumDef + | HashMap.member fieldBaseType objectTypes -> + pure $ AOFTObject fieldBaseType + | Just scalarInfo <- lookupBackendScalar allScalars fieldBaseType -> + pure $ AOFTScalar scalarInfo + | otherwise -> + refute + $ pure + $ ObjectFieldTypeDoesNotExist + _otdName + fieldName + fieldBaseType pure (unGraphQLType fieldType, annotatedObjectFieldType) let fieldsMap = - HashMap.fromList $ - map (_ofdName &&& (fst . _ofdType)) $ - toList fields + HashMap.fromList + $ map (_ofdName &&& (fst . _ofdType)) + $ toList fields - when (Set.size (Set.fromList $ _trdSource <$> _otdRelationships) > 1) $ - refute $ - pure $ - ObjectRelationshipMultiSources _otdName + when (Set.size (Set.fromList $ _trdSource <$> _otdRelationships) > 1) + $ refute + $ pure + $ ObjectRelationshipMultiSources _otdName annotatedRelationships <- for _otdRelationships $ \TypeRelationshipDefinition {..} -> do -- get the source info SourceInfo {..} <- - onNothing (unsafeSourceInfo =<< HashMap.lookup _trdSource sources) $ - refute $ - pure $ - ObjectRelationshipTableDoesNotExist - _otdName - _trdName - _trdRemoteTable + onNothing (unsafeSourceInfo =<< HashMap.lookup _trdSource sources) + $ refute + $ pure + $ ObjectRelationshipTableDoesNotExist + _otdName + _trdName + _trdRemoteTable -- check that the table exists remoteTableInfo <- - onNothing (HashMap.lookup _trdRemoteTable _siTables) $ - refute $ - pure $ - ObjectRelationshipTableDoesNotExist - _otdName - _trdName - _trdRemoteTable + onNothing (HashMap.lookup _trdRemoteTable _siTables) + $ refute + $ pure + $ ObjectRelationshipTableDoesNotExist + _otdName + _trdName + _trdRemoteTable -- check that the column mapping is sane - annotatedFieldMapping <- flip HashMap.traverseWithKey _trdFieldMapping $ - \fieldName columnName -> do + annotatedFieldMapping <- flip HashMap.traverseWithKey _trdFieldMapping + $ \fieldName columnName -> do case HashMap.lookup fieldName fieldsMap of Nothing -> - dispute $ - pure $ - ObjectRelationshipFieldDoesNotExist - _otdName - _trdName - fieldName + dispute + $ pure + $ ObjectRelationshipFieldDoesNotExist + _otdName + _trdName + fieldName Just fieldType -> -- the field should be a non-list type scalar - when (G.isListType fieldType) $ - dispute $ - pure $ - ObjectRelationshipFieldListType - _otdName - _trdName - fieldName + when (G.isListType fieldType) + $ dispute + $ pure + $ ObjectRelationshipFieldListType + _otdName + _trdName + fieldName -- the column should be a column of the table - onNothing (getColumnInfoM remoteTableInfo (fromCol @('Postgres 'Vanilla) columnName)) $ - refute $ - pure $ - ObjectRelationshipColumnDoesNotExist _otdName _trdName _trdRemoteTable columnName + onNothing (getColumnInfoM remoteTableInfo (fromCol @('Postgres 'Vanilla) columnName)) + $ refute + $ pure + $ ObjectRelationshipColumnDoesNotExist _otdName _trdName _trdRemoteTable columnName - pure $ - AnnotatedTypeRelationship + pure + $ AnnotatedTypeRelationship _trdName _trdType _siName @@ -321,8 +323,8 @@ validateCustomTypeDefinitions sources customTypes allScalars = do (tableInfoName remoteTableInfo) annotatedFieldMapping - pure $ - AnnotatedObjectType + pure + $ AnnotatedObjectType _otdName _otdDescription fields diff --git a/server/src-lib/Hasura/RQL/DDL/DataConnector.hs b/server/src-lib/Hasura/RQL/DDL/DataConnector.hs index 3c80890d947..e6bff35205a 100644 --- a/server/src-lib/Hasura/RQL/DDL/DataConnector.hs +++ b/server/src-lib/Hasura/RQL/DDL/DataConnector.hs @@ -68,12 +68,12 @@ instance FromJSON DCAddAgent where instance ToJSON DCAddAgent where toJSON DCAddAgent {..} = - J.object $ - [ "name" .= _gdcaName, - "url" .= show _gdcaUrl, - "skip_check" .= _gdcaSkipCheck - ] - ++ ["display_name" .= _gdcaDisplayName | isJust _gdcaDisplayName] + J.object + $ [ "name" .= _gdcaName, + "url" .= show _gdcaUrl, + "skip_check" .= _gdcaSkipCheck + ] + ++ ["display_name" .= _gdcaDisplayName | isJust _gdcaDisplayName] -- | Insert a new Data Connector Agent into Metadata. runAddDataConnectorAgent :: @@ -93,24 +93,25 @@ runAddDataConnectorAgent DCAddAgent {..} = do agent = DC.Types.DataConnectorOptions _gdcaUrl _gdcaDisplayName sourceKinds <- (:) "postgres" . fmap _skiSourceKind . unSourceKinds <$> agentSourceKinds if - | toTxt _gdcaName `elem` sourceKinds -> Error.throw400 Error.AlreadyExists $ "SourceKind '" <> toTxt _gdcaName <> "' already exists." - | _gdcaSkipCheck == SkipCheck True -> addAgent _gdcaName agent - | otherwise -> - checkAgentAvailability _gdcaUrl >>= \case - NotAvailable err -> - pure $ - EncJSON.encJFromJValue $ - J.object - [ ("message" .= J.String "Agent is not available"), - ("details" .= err) - ] - _ -> addAgent _gdcaName agent + | toTxt _gdcaName `elem` sourceKinds -> Error.throw400 Error.AlreadyExists $ "SourceKind '" <> toTxt _gdcaName <> "' already exists." + | _gdcaSkipCheck == SkipCheck True -> addAgent _gdcaName agent + | otherwise -> + checkAgentAvailability _gdcaUrl >>= \case + NotAvailable err -> + pure + $ EncJSON.encJFromJValue + $ J.object + [ ("message" .= J.String "Agent is not available"), + ("details" .= err) + ] + _ -> addAgent _gdcaName agent addAgent :: (MonadError Error.QErr m, SC.Build.MetadataM m, SC.Build.CacheRWM m) => DC.Types.DataConnectorName -> DC.Types.DataConnectorOptions -> m EncJSON addAgent agentName agent = do let modifier' = - Metadata.MetadataModifier $ - Metadata.metaBackendConfigs %~ BackendMap.modify @'Backend.DataConnector \oldMap -> + Metadata.MetadataModifier + $ Metadata.metaBackendConfigs + %~ BackendMap.modify @'Backend.DataConnector \oldMap -> Metadata.BackendConfigWrapper $ Map.insert agentName agent (coerce oldMap) SC.Build.withNewInconsistentObjsCheck $ SC.Build.buildSchemaCache modifier' @@ -165,10 +166,10 @@ runDeleteDataConnectorAgent DCDeleteAgent {..} = do Nothing -> Error.throw400 Error.NotFound $ "DC Agent '" <> toTxt _dcdaName <> "' not found" Just _ -> do let modifier' = - Metadata.MetadataModifier $ - Metadata.metaBackendConfigs - %~ BackendMap.alter @'Backend.DataConnector - (fmap (coerce . Map.delete _dcdaName . Metadata.unBackendConfigWrapper)) + Metadata.MetadataModifier + $ Metadata.metaBackendConfigs + %~ BackendMap.alter @'Backend.DataConnector + (fmap (coerce . Map.delete _dcdaName . Metadata.unBackendConfigWrapper)) SC.Build.withNewInconsistentObjsCheck $ SC.Build.buildSchemaCache modifier' pure Common.successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/Endpoint.hs b/server/src-lib/Hasura/RQL/DDL/Endpoint.hs index fd0b8be33c0..5850fef9ce0 100644 --- a/server/src-lib/Hasura/RQL/DDL/Endpoint.hs +++ b/server/src-lib/Hasura/RQL/DDL/Endpoint.hs @@ -27,13 +27,16 @@ runCreateEndpoint endpoint@EndpointMetadata {..} = do endpointsMap <- _metaRestEndpoints <$> getMetadata InsOrdHashMap.lookup _ceName endpointsMap `for_` \_ -> - throw400 AlreadyExists $ - "Endpoint with name: " <> toTxt _ceName <> " already exists" + throw400 AlreadyExists + $ "Endpoint with name: " + <> toTxt _ceName + <> " already exists" - withNewInconsistentObjsCheck $ - buildSchemaCacheFor (MOEndpoint _ceName) $ - MetadataModifier $ - metaRestEndpoints %~ InsOrdHashMap.insert _ceName endpoint + withNewInconsistentObjsCheck + $ buildSchemaCacheFor (MOEndpoint _ceName) + $ MetadataModifier + $ metaRestEndpoints + %~ InsOrdHashMap.insert _ceName endpoint return successMsg runDropEndpoint :: @@ -45,9 +48,9 @@ runDropEndpoint :: m EncJSON runDropEndpoint DropEndpoint {..} = do checkExists _deName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropEndpointInMetadata _deName + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropEndpointInMetadata _deName return successMsg dropEndpointInMetadata :: EndpointName -> MetadataModifier @@ -57,7 +60,9 @@ dropEndpointInMetadata name = checkExists :: (MetadataM m, MonadError QErr m) => EndpointName -> m () checkExists name = do endpointsMap <- _metaRestEndpoints <$> getMetadata - void $ - onNothing (InsOrdHashMap.lookup name endpointsMap) $ - throw400 NotExists $ - "endpoint with name: " <> toTxt name <> " does not exist" + void + $ onNothing (InsOrdHashMap.lookup name endpointsMap) + $ throw400 NotExists + $ "endpoint with name: " + <> toTxt name + <> " does not exist" diff --git a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs index a88c3f5531e..914dcb3a825 100644 --- a/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/EventTrigger.hs @@ -125,12 +125,12 @@ instance (Backend b) => FromJSON (CreateEventTriggerQuery b) where requestTransform <- o .:? "request_transform" responseTransform <- o .:? "response_transform" cleanupConfig <- o .:? "cleanup_config" - when (isIllegalTriggerName name) $ - fail "only alphanumeric and underscore and hyphens allowed for name" - unless (T.length (triggerNameToTxt name) <= maxTriggerNameLength) $ - fail "event trigger name can be at most 42 characters" - unless (any isJust [insert, update, delete] || enableManual) $ - fail "atleast one amongst insert/update/delete/enable_manual spec must be provided" + when (isIllegalTriggerName name) + $ fail "only alphanumeric and underscore and hyphens allowed for name" + unless (T.length (triggerNameToTxt name) <= maxTriggerNameLength) + $ fail "event trigger name can be at most 42 characters" + unless (any isJust [insert, update, delete] || enableManual) + $ fail "atleast one amongst insert/update/delete/enable_manual spec must be provided" case (webhook, webhookFromEnv) of (Just _, Nothing) -> return () (Nothing, Just _) -> return () @@ -157,8 +157,11 @@ data DeleteEventTriggerQuery (b :: BackendType) = DeleteEventTriggerQuery instance FromJSON (DeleteEventTriggerQuery b) where parseJSON = withObject "DeleteEventTriggerQuery" $ \o -> DeleteEventTriggerQuery - <$> o .:? "source" .!= defaultSource - <*> o .: "name" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "name" data RedeliverEventQuery (b :: BackendType) = RedeliverEventQuery { _rdeqEventId :: EventId, @@ -168,8 +171,11 @@ data RedeliverEventQuery (b :: BackendType) = RedeliverEventQuery instance FromJSON (RedeliverEventQuery b) where parseJSON = withObject "RedeliverEventQuery" $ \o -> RedeliverEventQuery - <$> o .: "event_id" - <*> o .:? "source" .!= defaultSource + <$> o + .: "event_id" + <*> o + .:? "source" + .!= defaultSource data InvokeEventTriggerQuery (b :: BackendType) = InvokeEventTriggerQuery { _ietqName :: TriggerName, @@ -180,9 +186,13 @@ data InvokeEventTriggerQuery (b :: BackendType) = InvokeEventTriggerQuery instance (Backend b) => FromJSON (InvokeEventTriggerQuery b) where parseJSON = withObject "InvokeEventTriggerQuery" $ \o -> InvokeEventTriggerQuery - <$> o .: "name" - <*> o .:? "source" .!= defaultSource - <*> o .: "payload" + <$> o + .: "name" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "payload" -- | This typeclass have the implementation logic for the event trigger log cleanup. -- @@ -276,9 +286,9 @@ resolveEventTriggerQuery (CreateEventTriggerQuery source name qt insert update d droppedTriggerOps :: TriggerOpsDef b -> TriggerOpsDef b -> HashSet Ops droppedTriggerOps oldEventTriggerOps newEventTriggerOps = - Set.fromList $ - catMaybes $ - [ (bool Nothing (Just INSERT) (isDroppedOp (tdInsert oldEventTriggerOps) (tdInsert newEventTriggerOps))), + Set.fromList + $ catMaybes + $ [ (bool Nothing (Just INSERT) (isDroppedOp (tdInsert oldEventTriggerOps) (tdInsert newEventTriggerOps))), (bool Nothing (Just UPDATE) (isDroppedOp (tdUpdate oldEventTriggerOps) (tdUpdate newEventTriggerOps))), (bool Nothing (Just DELETE) (isDroppedOp (tdDelete oldEventTriggerOps) (tdDelete newEventTriggerOps))) ] @@ -306,10 +316,10 @@ createEventTriggerQueryMetadata q = do source = _cetqSource q triggerName = etcName triggerConf metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOTrigger triggerName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOTrigger triggerName sourceInfo <- askSourceInfo @b source let sourceConfig = (_siConfiguration sourceInfo) newConfig = _cteqCleanupConfig q @@ -334,12 +344,13 @@ createEventTriggerQueryMetadata q = do else for_ newConfig \cleanupConfig -> do (`onLeft` logQErr) =<< generateCleanupSchedules (AB.mkAnyBackend sourceInfo) triggerName cleanupConfig - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b source table . tmEventTriggers - %~ if replace - then ix triggerName .~ triggerConf - else InsOrdHashMap.insert triggerName triggerConf + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b source table + . tmEventTriggers + %~ if replace + then ix triggerName .~ triggerConf + else InsOrdHashMap.insert triggerName triggerConf runCreateEventTriggerQuery :: forall b m r. @@ -369,10 +380,11 @@ runDeleteEventTriggerQuery (DeleteEventTriggerQuery sourceName triggerName) = do sourceConfig <- askSourceConfig @b sourceName tableName <- (_tciName . _tiCoreInfo) <$> askTabInfoFromTrigger @b sourceName triggerName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - tableMetadataSetter @b sourceName tableName %~ dropEventTriggerInMetadata triggerName + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ tableMetadataSetter @b sourceName tableName + %~ dropEventTriggerInMetadata triggerName dropTriggerAndArchiveEvents @b sourceConfig triggerName tableName @@ -576,9 +588,9 @@ buildEventTriggerInfo triggerOnReplication tabDep = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @b tableName + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @b tableName ) DRParent pure (eTrigInfo, tabDep Seq.:<| getTrigDefDeps @b source tableName def) @@ -591,22 +603,22 @@ getTrigDefDeps :: TriggerOpsDef b -> Seq SchemaDependency getTrigDefDeps source tableName (TriggerOpsDef mIns mUpd mDel _) = - mconcat $ - Seq.fromList - <$> catMaybes - [ subsOpSpecDeps <$> mIns, - subsOpSpecDeps <$> mUpd, - subsOpSpecDeps <$> mDel - ] + mconcat + $ Seq.fromList + <$> catMaybes + [ subsOpSpecDeps <$> mIns, + subsOpSpecDeps <$> mUpd, + subsOpSpecDeps <$> mDel + ] where subsOpSpecDeps :: SubscribeOpSpec b -> [SchemaDependency] subsOpSpecDeps os = let cols = getColsFromSub $ sosColumns os mkColDependency dependencyReason col = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b tableName (TOCol @b col) + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b tableName (TOCol @b col) ) dependencyReason colDeps = map (mkColDependency DRColumn) cols @@ -668,14 +680,17 @@ updateCleanupStatusInMetadata :: updateCleanupStatusInMetadata cleanupConfig cleanupSwitch sourceName tableName triggerName = do let newCleanupConfig = Just $ cleanupConfig {_atlccPaused = cleanupSwitch} metadataObj = - MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMOTableObj @b tableName $ - MTOTrigger triggerName + MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMOTableObj @b tableName + $ MTOTrigger triggerName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b sourceName tableName . tmEventTriggers . ix triggerName %~ updateCleanupConfig newCleanupConfig + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b sourceName tableName + . tmEventTriggers + . ix triggerName + %~ updateCleanupConfig newCleanupConfig -- | Function to start/stop the cleanup action based on the event triggers supplied in -- TriggerLogCleanupToggleConfig conf diff --git a/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs b/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs index 6e11ba916be..e8d21aa127f 100644 --- a/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs +++ b/server/src-lib/Hasura/RQL/DDL/FeatureFlag.hs @@ -41,10 +41,10 @@ runGetFeatureFlag (Types.CheckFeatureFlag getFeatureFlag) GetFeatureFlag {..} = Nothing -> Error.throw400 Error.NotFound $ "Feature Flag '" <> gfgIdentifier <> "' not found" Just flag -> do flagValue <- liftIO $ getFeatureFlag flag - pure $ - EncJSON.encJFromJValue $ - J.object - [ "identifier" .= gfgIdentifier, - "value" .= flagValue, - "description" .= FeatureFlag.ffDescription flag - ] + pure + $ EncJSON.encJFromJValue + $ J.object + [ "identifier" .= gfgIdentifier, + "value" .= flagValue, + "description" .= FeatureFlag.ffDescription flag + ] diff --git a/server/src-lib/Hasura/RQL/DDL/GraphqlSchemaIntrospection.hs b/server/src-lib/Hasura/RQL/DDL/GraphqlSchemaIntrospection.hs index 957b2839843..bfef347e575 100644 --- a/server/src-lib/Hasura/RQL/DDL/GraphqlSchemaIntrospection.hs +++ b/server/src-lib/Hasura/RQL/DDL/GraphqlSchemaIntrospection.hs @@ -19,6 +19,6 @@ runSetGraphqlSchemaIntrospectionOptions :: m EncJSON runSetGraphqlSchemaIntrospectionOptions introspectionOptions = do let metadataModifier = MetadataModifier $ metaSetGraphqlIntrospectionOptions .~ introspectionOptions - withNewInconsistentObjsCheck $ - buildSchemaCache metadataModifier + withNewInconsistentObjsCheck + $ buildSchemaCache metadataModifier return successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/Headers.hs b/server/src-lib/Hasura/RQL/DDL/Headers.hs index 4105c13e54e..69dbbe09cbb 100644 --- a/server/src-lib/Hasura/RQL/DDL/Headers.hs +++ b/server/src-lib/Hasura/RQL/DDL/Headers.hs @@ -15,7 +15,7 @@ import Network.HTTP.Types qualified as HTTP -- | Resolve configuration headers makeHeadersFromConf :: - MonadError QErr m => Env.Environment -> [HeaderConf] -> m [HTTP.Header] + (MonadError QErr m) => Env.Environment -> [HeaderConf] -> m [HTTP.Header] makeHeadersFromConf env = mapM getHeader where getHeader hconf = diff --git a/server/src-lib/Hasura/RQL/DDL/InheritedRoles.hs b/server/src-lib/Hasura/RQL/DDL/InheritedRoles.hs index 78b25893e56..07c0ac8fc0c 100644 --- a/server/src-lib/Hasura/RQL/DDL/InheritedRoles.hs +++ b/server/src-lib/Hasura/RQL/DDL/InheritedRoles.hs @@ -28,11 +28,12 @@ runAddInheritedRole :: InheritedRole -> m EncJSON runAddInheritedRole addInheritedRoleQ@(Role inheritedRoleName (ParentRoles parentRoles)) = do - when (inheritedRoleName `elem` parentRoles) $ - throw400 InvalidParams "an inherited role name cannot be in the role combination" - buildSchemaCacheFor (MOInheritedRole inheritedRoleName) $ - MetadataModifier $ - metaInheritedRoles %~ InsOrdHashMap.insert inheritedRoleName addInheritedRoleQ + when (inheritedRoleName `elem` parentRoles) + $ throw400 InvalidParams "an inherited role name cannot be in the role combination" + buildSchemaCacheFor (MOInheritedRole inheritedRoleName) + $ MetadataModifier + $ metaInheritedRoles + %~ InsOrdHashMap.insert inheritedRoleName addInheritedRoleQ pure successMsg dropInheritedRoleInMetadata :: RoleName -> MetadataModifier @@ -45,9 +46,10 @@ runDropInheritedRole :: m EncJSON runDropInheritedRole (DropInheritedRole roleName) = do inheritedRolesMetadata <- _metaInheritedRoles <$> getMetadata - unless (roleName `InsOrdHashMap.member` inheritedRolesMetadata) $ - throw400 NotExists $ - roleName <<> " inherited role doesn't exist" + unless (roleName `InsOrdHashMap.member` inheritedRolesMetadata) + $ throw400 NotExists + $ roleName + <<> " inherited role doesn't exist" buildSchemaCacheFor (MOInheritedRole roleName) (dropInheritedRoleInMetadata roleName) pure successMsg @@ -56,18 +58,19 @@ runDropInheritedRole (DropInheritedRole roleName) = do -- the dependencies of the inherited role which will be the list -- of the parent roles resolveInheritedRole :: - MonadError QErr m => + (MonadError QErr m) => HashSet RoleName -> InheritedRole -> m (Role, Seq SchemaDependency) resolveInheritedRole allRoles (Role roleName (ParentRoles parentRoles)) = do let missingParentRoles = Set.filter (`notElem` allRoles) parentRoles - unless (Set.null missingParentRoles) $ - let errMessage roles = - "the following parent role(s) are not found: " - <> roles - <> " which are required to construct the inherited role: " <>> roleName - in throw400 NotExists $ errMessage $ commaSeparated $ Set.map roleNameToTxt missingParentRoles + unless (Set.null missingParentRoles) + $ let errMessage roles = + "the following parent role(s) are not found: " + <> roles + <> " which are required to construct the inherited role: " + <>> roleName + in throw400 NotExists $ errMessage $ commaSeparated $ Set.map roleNameToTxt missingParentRoles let schemaDependencies = fmap (\parentRole -> SchemaDependency (SORole parentRole) DRParentRole) (Seq.fromList (toList parentRoles)) pure $ (Role roleName $ ParentRoles parentRoles, schemaDependencies) diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs index b99bfb7d7b2..6cda0655f6a 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs @@ -114,7 +114,7 @@ postDropSourceHookHelper oldSchemaCache sourceName sourceMetadataBackend = do let message = "Could not cleanup the source '" <> sourceName - <<> "' while dropping it from the graphql-engine as it is inconsistent." + <<> "' while dropping it from the graphql-engine as it is inconsistent." <> " Please consider cleaning the resources created by the graphql engine," <> " refer https://hasura.io/docs/latest/graphql/core/event-triggers/remove-event-triggers/#clean-footprints-manually" HL.unLogger logger $ MetadataLog HL.LevelWarn message J.Null @@ -152,23 +152,24 @@ runClearMetadata _ = do -- which contains only default source without any tables and functions. let emptyDefaultSource = AB.dispatchAnyBackend @Backend exists \(s :: SourceMetadata b) -> - BackendSourceMetadata $ - AB.mkAnyBackend @b $ - SourceMetadata - @b - defaultSource - (_smKind @b s) - mempty - mempty - mempty - mempty - mempty - (_smConfiguration @b s) - Nothing - emptySourceCustomization - Nothing + BackendSourceMetadata + $ AB.mkAnyBackend @b + $ SourceMetadata + @b + defaultSource + (_smKind @b s) + mempty + mempty + mempty + mempty + mempty + (_smConfiguration @b s) + Nothing + emptySourceCustomization + Nothing in emptyMetadata - & metaSources %~ InsOrdHashMap.insert defaultSource emptyDefaultSource + & metaSources + %~ InsOrdHashMap.insert defaultSource emptyDefaultSource (_inconsistencies, replaceMetadataWarnings) <- runMetadataWarnings . runReplaceMetadataV2' . ReplaceMetadataV2 NoAllowInconsistentMetadata AllowWarnings $ RMWithSources emptyMetadata' @@ -250,8 +251,8 @@ runReplaceMetadataV2 replaceMetadataArgs = do case _rmv2AllowWarningss replaceMetadataArgs of AllowWarnings -> pure () DisallowWarnings -> - unless (null metadataWarnings) $ - throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (J.toJSON metadataWarnings) + unless (null metadataWarnings) + $ throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (J.toJSON metadataWarnings) pure $ encJFromJValue $ formatInconsistentObjs inconsistentObjects metadataWarnings runReplaceMetadataV2' :: @@ -288,17 +289,17 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do RMWithoutSources MetadataNoSources {..} -> do let maybeDefaultSourceMetadata = oldMetadata ^? metaSources . ix defaultSource . toSourceMetadata defaultSourceMetadata <- - onNothing maybeDefaultSourceMetadata $ - throw400 NotSupported "cannot import metadata without sources since no default source is defined" + onNothing maybeDefaultSourceMetadata + $ throw400 NotSupported "cannot import metadata without sources since no default source is defined" let newDefaultSourceMetadata = - BackendSourceMetadata $ - AB.mkAnyBackend + BackendSourceMetadata + $ AB.mkAnyBackend defaultSourceMetadata { _smTables = _mnsTables, _smFunctions = _mnsFunctions } - pure $ - Metadata + pure + $ Metadata (InsOrdHashMap.singleton defaultSource newDefaultSourceMetadata) _mnsRemoteSchemas _mnsQueryCollections @@ -332,10 +333,10 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do mkEventTriggerObjID tableName triggerName = MOSourceObjId source $ AB.mkAnyBackend $ SMOTableObj @b tableName $ MTOTrigger triggerName mkIllegalEventTriggerNameWarning (tableName, triggerName) = -- TODO: capture the path as well - MetadataWarning WCIllegalEventTriggerName (mkEventTriggerObjID tableName triggerName) $ - "The event trigger with name " - <> dquote (triggerNameToTxt triggerName) - <> " may not work as expected, hasura suggests to use only alphanumeric, underscore and hyphens in an event trigger name" + MetadataWarning WCIllegalEventTriggerName (mkEventTriggerObjID tableName triggerName) + $ "The event trigger with name " + <> dquote (triggerNameToTxt triggerName) + <> " may not work as expected, hasura suggests to use only alphanumeric, underscore and hyphens in an event trigger name" unless (null newIllegalTriggerNamesInNewMetadata) $ do traverse_ (warn . mkIllegalEventTriggerNameWarning) newIllegalTriggerNamesInNewMetadata @@ -434,11 +435,11 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do -- traverse over the new cron triggers and check if any of them -- already exists as a cron trigger with "included_in_metadata: false" for_ allNewCronTriggers $ \ct -> - when (ctName ct `InsOrdHashMap.member` oldCronTriggersNotIncludedInMetadata) $ - throw400 AlreadyExists $ - "cron trigger with name " - <> ctName ct - <<> " already exists as a cron trigger with \"included_in_metadata\" as false" + when (ctName ct `InsOrdHashMap.member` oldCronTriggersNotIncludedInMetadata) + $ throw400 AlreadyExists + $ "cron trigger with name " + <> ctName ct + <<> " already exists as a cron trigger with \"included_in_metadata\" as false" -- we add the old cron triggers with included_in_metadata set to false with the -- newly added cron triggers pure $ allNewCronTriggers <> oldCronTriggersNotIncludedInMetadata @@ -476,8 +477,8 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do -- not need to be removed. -- -- TODO: Determine if any errors should be thrown from askSourceConfig at all if the errors are just being discarded - return $ - flip catchError catcher do + return + $ flip catchError catcher do sourceConfigMaybe <- askSourceConfigMaybe @b source case sourceConfigMaybe of Nothing -> @@ -485,7 +486,7 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do let message = "Could not drop SQL triggers present in the source '" <> source - <<> "' as it is inconsistent." + <<> "' as it is inconsistent." <> " While creating an event trigger, Hasura creates SQL triggers on the table." <> " Please refer https://hasura.io/docs/latest/graphql/core/event-triggers/remove-event-triggers/#clean-up-event-trigger-footprints-manually " <> " to delete the sql triggers from the database manually." @@ -538,7 +539,7 @@ runReplaceMetadataV2' ReplaceMetadataV2 {..} = do sqlTriggerError triggerName = ( "Could not drop SQL triggers associated with event trigger '" <> triggerName - <<> "'. While creating an event trigger, Hasura creates SQL triggers on the table." + <<> "'. While creating an event trigger, Hasura creates SQL triggers on the table." <> " Please refer https://hasura.io/docs/latest/graphql/core/event-triggers/remove-event-triggers/#clean-up-event-trigger-footprints-manually " <> " to delete the sql triggers from the database manually." <> " For more details, please refer https://hasura.io/docs/latest/graphql/core/event-triggers/index.html " @@ -568,12 +569,12 @@ runExportMetadataV2 :: m EncJSON runExportMetadataV2 currentResourceVersion ExportMetadata {} = do exportMetadata <- processCronTriggersMetadata <$> getMetadata - pure $ - encJFromOrderedValue $ - AO.object - [ ("resource_version", AO.toOrdered currentResourceVersion), - ("metadata", metadataToOrdJSON exportMetadata) - ] + pure + $ encJFromOrderedValue + $ AO.object + [ ("resource_version", AO.toOrdered currentResourceVersion), + ("metadata", metadataToOrdJSON exportMetadata) + ] runReloadMetadata :: (QErrM m, CacheRWM m, MetadataM m) => ReloadMetadata -> m EncJSON runReloadMetadata (ReloadMetadata reloadRemoteSchemas reloadSources reloadRecreateEventTriggers reloadDataConnectors) = do @@ -581,21 +582,27 @@ runReloadMetadata (ReloadMetadata reloadRemoteSchemas reloadSources reloadRecrea let allSources = HS.fromList $ InsOrdHashMap.keys $ _metaSources metadata allRemoteSchemas = HS.fromList $ InsOrdHashMap.keys $ _metaRemoteSchemas metadata allDataConnectors = - maybe mempty (HS.fromList . Map.keys . unBackendConfigWrapper) $ - BackendMap.lookup @'DataConnector $ - _metaBackendConfigs metadata + maybe mempty (HS.fromList . Map.keys . unBackendConfigWrapper) + $ BackendMap.lookup @'DataConnector + $ _metaBackendConfigs metadata checkRemoteSchema name = - unless (HS.member name allRemoteSchemas) $ - throw400 NotExists $ - "Remote schema with name " <> name <<> " not found in metadata" + unless (HS.member name allRemoteSchemas) + $ throw400 NotExists + $ "Remote schema with name " + <> name + <<> " not found in metadata" checkSource name = - unless (HS.member name allSources) $ - throw400 NotExists $ - "Source with name " <> name <<> " not found in metadata" + unless (HS.member name allSources) + $ throw400 NotExists + $ "Source with name " + <> name + <<> " not found in metadata" checkDataConnector name = - unless (HS.member name allDataConnectors) $ - throw400 NotExists $ - "Data connector with name " <> name <<> " not found in metadata" + unless (HS.member name allDataConnectors) + $ throw400 NotExists + $ "Data connector with name " + <> name + <<> " not found in metadata" remoteSchemaInvalidations <- case reloadRemoteSchemas of RSReloadAll -> pure allRemoteSchemas @@ -620,11 +627,13 @@ runReloadMetadata (ReloadMetadata reloadRemoteSchemas reloadSources reloadRecrea buildSchemaCacheWithOptions (CatalogUpdate $ Just recreateEventTriggersSources) cacheInvalidations metadata inconsObjs <- scInconsistentObjs <$> askSchemaCache - pure . encJFromJValue . J.object $ - [ "message" J..= ("success" :: Text), - "is_consistent" J..= null inconsObjs - ] - <> ["inconsistent_objects" J..= inconsObjs | not (null inconsObjs)] + pure + . encJFromJValue + . J.object + $ [ "message" J..= ("success" :: Text), + "is_consistent" J..= null inconsObjs + ] + <> ["inconsistent_objects" J..= inconsObjs | not (null inconsObjs)] runDumpInternalState :: (QErrM m, CacheRM m) => @@ -643,11 +652,11 @@ runGetInconsistentMetadata _ = do formatInconsistentObjs :: [InconsistentMetadata] -> MetadataWarnings -> J.Value formatInconsistentObjs inconsObjs metadataWarnings = - J.object $ - [ "is_consistent" J..= null inconsObjs, - "inconsistent_objects" J..= inconsObjs - ] - <> ["warnings" J..= metadataWarnings | not (null metadataWarnings)] + J.object + $ [ "is_consistent" J..= null inconsObjs, + "inconsistent_objects" J..= inconsObjs + ] + <> ["warnings" J..= metadataWarnings | not (null metadataWarnings)] runDropInconsistentMetadata :: (QErrM m, CacheRWM m, MetadataM m) => @@ -668,8 +677,8 @@ runDropInconsistentMetadata _ = do -- are only those which are not droppable newInconsistentObjects <- scInconsistentObjs <$> askSchemaCache let droppableInconsistentObjects = filter droppableInconsistentMetadata newInconsistentObjects - unless (null droppableInconsistentObjects) $ - throwError + unless (null droppableInconsistentObjects) + $ throwError (err400 Unexpected "cannot continue due to new inconsistent metadata") { qeInternal = Just $ ExtraInternal $ J.toJSON newInconsistentObjects } @@ -691,9 +700,9 @@ purgeMetadataObj = \case MOInheritedRole role -> dropInheritedRoleInMetadata role MOQueryCollectionsQuery cName lq -> dropListedQueryFromQueryCollections cName lq MODataConnectorAgent agentName -> - MetadataModifier $ - metaBackendConfigs - %~ BackendMap.modify @'DataConnector (BackendConfigWrapper . Map.delete agentName . unBackendConfigWrapper) + MetadataModifier + $ metaBackendConfigs + %~ BackendMap.modify @'DataConnector (BackendConfigWrapper . Map.delete agentName . unBackendConfigWrapper) MOOpenTelemetry subobject -> case subobject of OtelSubobjectAll -> @@ -710,21 +719,22 @@ purgeMetadataObj = \case SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata @b source qf rn SMONativeQuery nq -> dropNativeQueryInMetadata @b source nq SMONativeQueryObj nativeQueryName nativeQueryMetadataObjId -> - MetadataModifier $ - nativeQueryMetadataSetter @b source nativeQueryName - %~ case nativeQueryMetadataObjId of - NQMORel rn _ -> dropNativeQueryRelationshipInMetadata rn + MetadataModifier + $ nativeQueryMetadataSetter @b source nativeQueryName + %~ case nativeQueryMetadataObjId of + NQMORel rn _ -> dropNativeQueryRelationshipInMetadata rn SMOStoredProcedure sp -> dropStoredProcedureInMetadata @b source sp SMOLogicalModel lm -> dropLogicalModelInMetadata @b source lm SMOLogicalModelObj logicalModelName logicalModelMetadataObjId -> - MetadataModifier $ - logicalModelMetadataSetter @b source logicalModelName - %~ case logicalModelMetadataObjId of - LMMOPerm roleName permType -> - dropLogicalModelPermissionInMetadata roleName permType + MetadataModifier + $ logicalModelMetadataSetter @b source logicalModelName + %~ case logicalModelMetadataObjId of + LMMOPerm roleName permType -> + dropLogicalModelPermissionInMetadata roleName permType SMOTableObj qt tableObj -> - MetadataModifier $ - tableMetadataSetter @b source qt %~ case tableObj of + MetadataModifier + $ tableMetadataSetter @b source qt + %~ case tableObj of MTORel rn _ -> dropRelationshipInMetadata rn MTOPerm rn pt -> dropPermissionInMetadata rn pt MTOTrigger trn -> dropEventTriggerInMetadata trn @@ -783,20 +793,22 @@ runSetMetricsConfig :: MetricsConfig -> m EncJSON runSetMetricsConfig mc = do - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaMetricsConfig .~ mc + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaMetricsConfig + .~ mc pure successMsg runRemoveMetricsConfig :: (MonadIO m, CacheRWM m, MetadataM m, MonadError QErr m) => m EncJSON runRemoveMetricsConfig = do - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaMetricsConfig .~ emptyMetricsConfig + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaMetricsConfig + .~ emptyMetricsConfig pure successMsg data TestTransformError @@ -832,9 +844,9 @@ runTestWebhookTransform (TestWebhookTransform env headers urlE payload rt _ sv) -- NOTE: In the following case we have failed before producing a valid request. Left (RequestInitializationError err) -> let errorBundle = - TransformErrorBundle $ - pure $ - J.object ["error_code" J..= J.String "Request Initialization Error", "message" J..= J.String (tshow err)] + TransformErrorBundle + $ pure + $ J.object ["error_code" J..= J.String "Request Initialization Error", "message" J..= J.String (tshow err)] in throw400WithDetail ValidationFailed "request transform validation failed" $ J.toJSON errorBundle interpolateFromEnv :: (MonadError QErr m) => Env.Environment -> Text -> m Text @@ -845,13 +857,13 @@ interpolateFromEnv env url = let lookup' var = maybe (Left var) (Right . T.pack) $ Env.lookupEnv env (T.unpack var) result = traverse (fmap indistinct . bitraverse lookup' pure) xs err e = - throwError $ - err400 NotFound $ - "Missing Env Var: " - <> e - <> ". For security reasons when testing request options real environment variable values are not available. Please enter a mock value for " - <> e - <> " in the Sample Env Variables list. See https://hasura.io/docs/latest/graphql/core/actions/rest-connectors/#action-transforms-sample-context" + throwError + $ err400 NotFound + $ "Missing Env Var: " + <> e + <> ". For security reasons when testing request options real environment variable values are not available. Please enter a mock value for " + <> e + <> " in the Sample Env Variables list. See https://hasura.io/docs/latest/graphql/core/actions/rest-connectors/#action-transforms-sample-context" in either err (pure . fold) result -- | Deserialize a JSON or X-WWW-URL-FORMENCODED body from an @@ -883,8 +895,9 @@ indistinct = either id id packTransformResult :: (MonadError QErr m) => Either TransformErrorBundle HTTP.Request -> m EncJSON packTransformResult = \case Right req -> - pure . encJFromJValue $ - J.object + pure + . encJFromJValue + $ J.object [ "webhook_url" J..= (req ^. HTTP.url), "method" J..= (req ^. HTTP.method), "headers" J..= (first CI.foldedCase <$> (req ^. HTTP.headers)), diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs index 0b5a30f2bea..6ea00f07fa5 100644 --- a/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs +++ b/server/src-lib/Hasura/RQL/DDL/Metadata/Types.hs @@ -138,10 +138,18 @@ instance J.ToJSON ReloadMetadata where instance FromJSON ReloadMetadata where parseJSON = J.withObject "ReloadMetadata" $ \o -> ReloadMetadata - <$> o .:? "reload_remote_schemas" .!= reloadAllRemoteSchemas - <*> o .:? "reload_sources" .!= reloadAllSources - <*> o .:? "recreate_event_triggers" .!= RSReloadList mempty - <*> o .:? "reload_data_connectors" .!= reloadAllDataConnectors + <$> o + .:? "reload_remote_schemas" + .!= reloadAllRemoteSchemas + <*> o + .:? "reload_sources" + .!= reloadAllSources + <*> o + .:? "recreate_event_triggers" + .!= RSReloadList mempty + <*> o + .:? "reload_data_connectors" + .!= reloadAllDataConnectors -- | Undocumented Metadata API action which serializes the entire -- 'SchemaCache'. @@ -192,8 +200,9 @@ data AllowInconsistentMetadata instance FromJSON AllowInconsistentMetadata where parseJSON = - J.withBool "AllowInconsistentMetadata" $ - pure . bool NoAllowInconsistentMetadata AllowInconsistentMetadata + J.withBool "AllowInconsistentMetadata" + $ pure + . bool NoAllowInconsistentMetadata AllowInconsistentMetadata instance ToJSON AllowInconsistentMetadata where toJSON = J.toJSON . toBool @@ -232,9 +241,14 @@ data ReplaceMetadataV2 = ReplaceMetadataV2 instance FromJSON ReplaceMetadataV2 where parseJSON = J.withObject "ReplaceMetadataV2" $ \o -> ReplaceMetadataV2 - <$> o .:? "allow_inconsistent_metadata" .!= NoAllowInconsistentMetadata - <*> o .:? "allow_warnings" .!= AllowWarnings - <*> o .: "metadata" + <$> o + .:? "allow_inconsistent_metadata" + .!= NoAllowInconsistentMetadata + <*> o + .:? "allow_warnings" + .!= AllowWarnings + <*> o + .: "metadata" instance ToJSON ReplaceMetadataV2 where toJSON ReplaceMetadataV2 {..} = diff --git a/server/src-lib/Hasura/RQL/DDL/Network.hs b/server/src-lib/Hasura/RQL/DDL/Network.hs index 07af3d077ef..8e564cd9526 100644 --- a/server/src-lib/Hasura/RQL/DDL/Network.hs +++ b/server/src-lib/Hasura/RQL/DDL/Network.hs @@ -30,15 +30,21 @@ runAddHostToTLSAllowlist tlsAllowListEntry@TlsAllow {..} = do when (checkForHostWithSuffixInTLSAllowlist taHost taSuffix (tlsList networkMetadata)) $ do case taSuffix of Nothing -> - throw400 AlreadyExists $ - "the host " <> dquote (pack taHost) <> " already exists in the allowlist" + throw400 AlreadyExists + $ "the host " + <> dquote (pack taHost) + <> " already exists in the allowlist" Just suffix -> - throw400 AlreadyExists $ - "the host " <> dquote (pack taHost) <> " with suffix " <> dquote (pack suffix) <> " already exists in the allowlist" + throw400 AlreadyExists + $ "the host " + <> dquote (pack taHost) + <> " with suffix " + <> dquote (pack suffix) + <> " already exists in the allowlist" - withNewInconsistentObjsCheck $ - buildSchemaCache $ - addHostToTLSAllowList tlsAllowListEntry + withNewInconsistentObjsCheck + $ buildSchemaCache + $ addHostToTLSAllowList tlsAllowListEntry pure successMsg where @@ -57,15 +63,21 @@ runDropHostFromTLSAllowlist (DropHostFromTLSAllowlist hostname maybeSuffix) = do unless (checkForHostWithSuffixInTLSAllowlist hostname maybeSuffix (networkTlsAllowlist networkMetadata)) $ do case maybeSuffix of Nothing -> - throw400 NotExists $ - "the host " <> dquote (pack hostname) <> " isn't present in the allowlist" + throw400 NotExists + $ "the host " + <> dquote (pack hostname) + <> " isn't present in the allowlist" Just suffix -> - throw400 NotExists $ - "the host " <> dquote (pack hostname) <> " with suffix " <> dquote (pack suffix) <> " isn't present in the allowlist" + throw400 NotExists + $ "the host " + <> dquote (pack hostname) + <> " with suffix " + <> dquote (pack suffix) + <> " isn't present in the allowlist" - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropHostFromAllowList hostname maybeSuffix + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropHostFromAllowList hostname maybeSuffix pure successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/OpenTelemetry.hs b/server/src-lib/Hasura/RQL/DDL/OpenTelemetry.hs index 549919197c7..fbbdf1bf512 100644 --- a/server/src-lib/Hasura/RQL/DDL/OpenTelemetry.hs +++ b/server/src-lib/Hasura/RQL/DDL/OpenTelemetry.hs @@ -30,10 +30,11 @@ runSetOpenTelemetryConfig :: OpenTelemetryConfig -> m EncJSON runSetOpenTelemetryConfig otelConfig = do - withNewInconsistentObjsCheck $ - buildSchemaCacheFor (MOOpenTelemetry OtelSubobjectAll) $ - MetadataModifier $ - metaOpenTelemetryConfig .~ otelConfig + withNewInconsistentObjsCheck + $ buildSchemaCacheFor (MOOpenTelemetry OtelSubobjectAll) + $ MetadataModifier + $ metaOpenTelemetryConfig + .~ otelConfig pure successMsg -- | Set just the "status" field of the OpenTelemetry configuration. @@ -42,10 +43,12 @@ runSetOpenTelemetryStatus :: OtelStatus -> m EncJSON runSetOpenTelemetryStatus otelStatus = do - withNewInconsistentObjsCheck $ - buildSchemaCacheFor (MOOpenTelemetry OtelSubobjectAll) $ - MetadataModifier $ - metaOpenTelemetryConfig . ocStatus .~ otelStatus + withNewInconsistentObjsCheck + $ buildSchemaCacheFor (MOOpenTelemetry OtelSubobjectAll) + $ MetadataModifier + $ metaOpenTelemetryConfig + . ocStatus + .~ otelStatus pure successMsg -- | Smart constructor for 'OtelExporterInfo'. @@ -72,24 +75,24 @@ parseOtelExporterConfig otelStatus env OtelExporterConfig {..} = do OtelEnabled -> Left (err400 InvalidParams "Missing traces endpoint") Just rawTracesEndpoint -> do tracesUri <- - maybeToEither (err400 InvalidParams "Invalid URL") $ - parseURI $ - Text.unpack rawTracesEndpoint + maybeToEither (err400 InvalidParams "Invalid URL") + $ parseURI + $ Text.unpack rawTracesEndpoint uriRequest <- first (err400 InvalidParams . tshow) $ requestFromURI tracesUri - pure $ - Just $ - OtelExporterInfo - { _oteleiTracesBaseRequest = - uriRequest - { requestHeaders = headers ++ requestHeaders uriRequest - }, - _oteleiResourceAttributes = - Map.fromList $ - map - (\NameValue {nv_name, nv_value} -> (nv_name, nv_value)) - _oecResourceAttributes - } + pure + $ Just + $ OtelExporterInfo + { _oteleiTracesBaseRequest = + uriRequest + { requestHeaders = headers ++ requestHeaders uriRequest + }, + _oteleiResourceAttributes = + Map.fromList + $ map + (\NameValue {nv_name, nv_value} -> (nv_name, nv_value)) + _oecResourceAttributes + } -- Smart constructor. Consistent with defaults. parseOtelBatchSpanProcessorConfig :: diff --git a/server/src-lib/Hasura/RQL/DDL/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Permission.hs index 5c60fe95ce5..56e60924614 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission.hs @@ -189,24 +189,28 @@ procSetObj :: Maybe (ColumnValues b Value) -> m (PreSetColsPartial b, [Text], Seq SchemaDependency) procSetObj source tn fieldInfoMap mObj = do - (setColTups, deps) <- withPathK "set" $ - fmap unzip $ - forM (HashMap.toList setObj) $ \(pgCol, val) -> do - ty <- - askColumnType fieldInfoMap pgCol $ - "column " <> pgCol <<> " not found in table " <>> tn - sqlExp <- parseCollectableType (CollectableTypeScalar ty) val - let dep = mkColDep @b (getDepReason sqlExp) source tn pgCol - return ((pgCol, sqlExp), dep) + (setColTups, deps) <- withPathK "set" + $ fmap unzip + $ forM (HashMap.toList setObj) + $ \(pgCol, val) -> do + ty <- + askColumnType fieldInfoMap pgCol + $ "column " + <> pgCol + <<> " not found in table " + <>> tn + sqlExp <- parseCollectableType (CollectableTypeScalar ty) val + let dep = mkColDep @b (getDepReason sqlExp) source tn pgCol + return ((pgCol, sqlExp), dep) return (HashMap.fromList setColTups, depHeaders, Seq.fromList deps) where setObj = fromMaybe mempty mObj depHeaders = - getDepHeadersFromVal $ - Object $ - KM.fromList $ - map (first (K.fromText . toTxt)) $ - HashMap.toList setObj + getDepHeadersFromVal + $ Object + $ KM.fromList + $ map (first (K.fromText . toTxt)) + $ HashMap.toList setObj getDepReason = bool DRSessionVariable DROnType . isStaticValue @@ -289,10 +293,10 @@ runCreatePerm (CreatePerm (WithTable source tableName permissionDefn)) = do ptText = permTypeToCode permissionType role = _pdRole permissionDefn metadataObject = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b tableName $ - MTOPerm role permissionType + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b tableName + $ MTOPerm role permissionType -- NOTE: we check if a permission exists for a `(table, role)` entity in the metadata -- and not in the `RolePermInfoMap b` because there may exist a permission for the `role` @@ -301,12 +305,17 @@ runCreatePerm (CreatePerm (WithTable source tableName permissionDefn)) = do -- The metadata will not contain the permissions for the admin role, -- because the graphql-engine automatically creates the role and it's -- assumed that the admin role is an implicit role of the graphql-engine. - when (doesPermissionExistInMetadata tableMetadata role permissionType || role == adminRoleName) $ - throw400 AlreadyExists $ - ptText <> " permission already defined on table " <> tableName <<> " with role " <>> role - buildSchemaCacheFor metadataObject $ - MetadataModifier $ - tableMetadataSetter @b source tableName %~ addPermissionToMetadata permissionDefn + when (doesPermissionExistInMetadata tableMetadata role permissionType || role == adminRoleName) + $ throw400 AlreadyExists + $ ptText + <> " permission already defined on table " + <> tableName + <<> " with role " + <>> role + buildSchemaCacheFor metadataObject + $ MetadataModifier + $ tableMetadataSetter @b source tableName + %~ addPermissionToMetadata permissionDefn pure successMsg runDropPerm :: @@ -320,10 +329,11 @@ runDropPerm permType (DropPerm source table role) = do unless (doesPermissionExistInMetadata tableMetadata role permType) $ do let errMsg = permTypeToCode permType <> " permission on " <> table <<> " for role " <> role <<> " does not exist" throw400 PermissionDenied errMsg - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - tableMetadataSetter @b source table %~ dropPermissionInMetadata role permType + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ tableMetadataSetter @b source table + %~ dropPermissionInMetadata role permType return successMsg buildInsPermInfo :: @@ -342,18 +352,19 @@ buildInsPermInfo source tn fieldInfoMap (InsPerm checkCond set mCols backendOnly withPathK "permission" $ do (be, beDeps) <- withPathK "check" $ procBoolExp source tn fieldInfoMap checkCond (setColsSQL, setHdrs, setColDeps) <- procSetObj source tn fieldInfoMap set - void $ - withPathK "columns" $ do + void + $ withPathK "columns" + $ do indexedForM insCols $ \col -> do -- Check that all columns specified do in fact exist and are columns _ <- askColumnType fieldInfoMap col relInInsErr -- Check that the column is insertable ci <- askColInfo fieldInfoMap col "" - unless (_cmIsInsertable $ ciMutability ci) $ - throw500 + unless (_cmIsInsertable $ ciMutability ci) + $ throw500 ( "Column " <> col - <<> " is not insertable and so cannot have insert permissions defined" + <<> " is not insertable and so cannot have insert permissions defined" ) let fltrHeaders = getDependentHeaders checkCond @@ -403,14 +414,14 @@ validateAllowedRootFields sourceName tableName roleName SelPerm {..} = do ARFAllowConfiguredRootFields allowedRootFields -> rootField `HS.member` allowedRootFields pkValidationError = - throw400 ValidationFailed $ - "The \"select_by_pk\" field cannot be included in the query_root_fields or subscription_root_fields" - <> " because the role " - <> roleName - <<> " does not have access to the primary key of the table " - <> tableName - <<> " in the source " - <>> sourceName + throw400 ValidationFailed + $ "The \"select_by_pk\" field cannot be included in the query_root_fields or subscription_root_fields" + <> " because the role " + <> roleName + <<> " does not have access to the primary key of the table " + <> tableName + <<> " in the source " + <>> sourceName validatePrimaryKeyRootField TableCoreInfo {..} = case _tciPrimaryKey of Nothing -> pkValidationError @@ -423,10 +434,10 @@ validateAllowedRootFields sourceName tableName roleName SelPerm {..} = do unless (all ((`HS.member` selPermCols) . ciColumn) pkCols) pkValidationError validateAggregationRootField = - unless spAllowAggregations $ - throw400 ValidationFailed $ - "The \"select_aggregate\" root field can only be enabled in the query_root_fields or " - <> " the subscription_root_fields when \"allow_aggregations\" is set to true" + unless spAllowAggregations + $ throw400 ValidationFailed + $ "The \"select_aggregate\" root field can only be enabled in the query_root_fields or " + <> " the subscription_root_fields when \"allow_aggregations\" is set to true" -- | Given the native query's definition and the permissions as defined in the -- native query's metadata, try to construct the @SELECT@ permission @@ -451,8 +462,8 @@ buildLogicalModelSelPermInfo source logicalModelName logicalModelFieldMap sp = w -- TODO: do row permisions work on non-scalar fields? Going to assume not and -- filter out the non-scalars. (spiFilter, boolExpDeps) <- - withPathK "filter" $ - procLogicalModelBoolExp source logicalModelName (logicalModelFieldsToFieldInfo logicalModelFieldMap) (spFilter sp) + withPathK "filter" + $ procLogicalModelBoolExp source logicalModelName (logicalModelFieldsToFieldInfo logicalModelFieldMap) (spFilter sp) let -- What parts of the metadata are interesting when computing the -- permissions? These dependencies bubble all the way up to @@ -463,8 +474,8 @@ buildLogicalModelSelPermInfo source logicalModelName logicalModelFieldMap sp = w mconcat [ Seq.singleton (mkLogicalModelParentDep @b source logicalModelName), boolExpDeps, - fmap (mkLogicalModelColDep @b DRUntyped source logicalModelName) $ - Seq.fromList columns + fmap (mkLogicalModelColDep @b DRUntyped source logicalModelName) + $ Seq.fromList columns ] -- What headers are required in order to evaluate a given permission? For @@ -520,30 +531,32 @@ buildSelPermInfo source tableName fieldInfoMap roleName sp = withPathK "permissi let pgCols = interpColSpec (structuredColumnInfoColumn <$> getCols fieldInfoMap) $ spColumns sp (spiFilter, boolExpDeps) <- - withPathK "filter" $ - procBoolExp source tableName fieldInfoMap $ - spFilter sp + withPathK "filter" + $ procBoolExp source tableName fieldInfoMap + $ spFilter sp -- check if the columns exist - void $ - withPathK "columns" $ - indexedForM pgCols $ \pgCol -> - askColumnType fieldInfoMap pgCol autoInferredErr + void + $ withPathK "columns" + $ indexedForM pgCols + $ \pgCol -> + askColumnType fieldInfoMap pgCol autoInferredErr -- validate computed fields validComputedFields <- - withPathK "computed_fields" $ - indexedForM computedFields $ \fieldName -> do + withPathK "computed_fields" + $ indexedForM computedFields + $ \fieldName -> do computedFieldInfo <- askComputedFieldInfo fieldInfoMap fieldName case computedFieldReturnType @b (_cfiReturnType computedFieldInfo) of ReturnsScalar _ -> pure fieldName ReturnsTable returnTable -> - throw400 NotSupported $ - "select permissions on computed field " - <> fieldName - <<> " are auto-derived from the permissions on its returning table " - <> returnTable - <<> " and cannot be specified manually" + throw400 NotSupported + $ "select permissions on computed field " + <> fieldName + <<> " are auto-derived from the permissions on its returning table " + <> returnTable + <<> " and cannot be specified manually" ReturnsOthers -> pure fieldName let deps = @@ -555,8 +568,8 @@ buildSelPermInfo source tableName fieldInfoMap roleName sp = withPathK "permissi spiLimit = spLimit sp withPathK "limit" $ for_ spiLimit \value -> - when (value < 0) $ - throw400 NotSupported "unexpected negative value" + when (value < 0) + $ throw400 NotSupported "unexpected negative value" let spiCols = HashMap.fromList $ map (,Nothing) pgCols spiComputedFields = HS.toMap (HS.fromList validComputedFields) $> Nothing @@ -584,27 +597,28 @@ buildUpdPermInfo :: m (WithDeps (UpdPermInfo b)) buildUpdPermInfo source tn fieldInfoMap (UpdPerm colSpec set fltr check backendOnly) = do (be, beDeps) <- - withPathK "filter" $ - procBoolExp source tn fieldInfoMap fltr + withPathK "filter" + $ procBoolExp source tn fieldInfoMap fltr checkExpr <- traverse (withPathK "check" . procBoolExp source tn fieldInfoMap) check (setColsSQL, setHeaders, setColDeps) <- procSetObj source tn fieldInfoMap set -- check if the columns exist - void $ - withPathK "columns" $ - indexedForM updCols $ \updCol -> do - -- Check that all columns specified do in fact exist and are columns - _ <- askColumnType fieldInfoMap updCol relInUpdErr - -- Check that the column is updatable - ci <- askColInfo fieldInfoMap updCol "" - unless (_cmIsUpdatable $ ciMutability ci) $ - throw500 - ( "Column " - <> updCol - <<> " is not updatable and so cannot have update permissions defined" - ) + void + $ withPathK "columns" + $ indexedForM updCols + $ \updCol -> do + -- Check that all columns specified do in fact exist and are columns + _ <- askColumnType fieldInfoMap updCol relInUpdErr + -- Check that the column is updatable + ci <- askColInfo fieldInfoMap updCol "" + unless (_cmIsUpdatable $ ciMutability ci) + $ throw500 + ( "Column " + <> updCol + <<> " is not updatable and so cannot have update permissions defined" + ) let updColDeps = mkColDep @b DRUntyped source tn <$> updCols deps = mkParentDep @b source tn Seq.:<| beDeps <> maybe mempty snd checkExpr <> Seq.fromList updColDeps <> setColDeps @@ -632,8 +646,8 @@ buildDelPermInfo :: m (WithDeps (DelPermInfo b)) buildDelPermInfo source tn fieldInfoMap (DelPerm fltr backendOnly) = do (be, beDeps) <- - withPathK "filter" $ - procBoolExp source tn fieldInfoMap fltr + withPathK "filter" + $ procBoolExp source tn fieldInfoMap fltr let deps = mkParentDep @b source tn Seq.:<| beDeps depHeaders = getDependentHeaders fltr return (DelPermInfo tn be backendOnly depHeaders, deps) @@ -649,11 +663,17 @@ data SetPermComment b = SetPermComment instance (Backend b) => FromJSON (SetPermComment b) where parseJSON = withObject "SetPermComment" $ \o -> SetPermComment - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "role" - <*> o .: "permission" - <*> o .:? "comment" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "role" + <*> o + .: "permission" + <*> o + .:? "comment" runSetPermComment :: forall b m. @@ -679,11 +699,12 @@ runSetPermComment (SetPermComment source table roleName permType comment) = do pure $ tmDeletePermissions . ix roleName . pdComment .~ comment let metadataObject = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOPerm roleName permType - buildSchemaCacheFor metadataObject $ - MetadataModifier $ - tableMetadataSetter @b source table %~ permModifier + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOPerm roleName permType + buildSchemaCacheFor metadataObject + $ MetadataModifier + $ tableMetadataSetter @b source table + %~ permModifier pure successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs index bdd9f5fbfe4..85273f04127 100644 --- a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs +++ b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs @@ -61,16 +61,16 @@ assertPermDefined :: TableInfo backend -> m () assertPermDefined role pt tableInfo = - unless (any (permissionIsDefined pt) rpi) $ - throw400 PermissionDenied $ - "'" - <> tshow pt - <> "'" - <> " permission on " - <> tableInfoName tableInfo - <<> " for role " - <> role - <<> " does not exist" + unless (any (permissionIsDefined pt) rpi) + $ throw400 PermissionDenied + $ "'" + <> tshow pt + <> "'" + <> " permission on " + <> tableInfoName tableInfo + <<> " for role " + <> role + <<> " does not exist" where rpi = HashMap.lookup role $ _tiRolePermInfoMap tableInfo @@ -99,9 +99,9 @@ procBoolExp source tn fieldInfoMap be = do let rhsParser = BoolExpRHSParser parseCollectableType PSESession rootFieldInfoMap <- - fmap _tciFieldInfoMap $ - lookupTableCoreInfo tn - `onNothingM` throw500 ("unexpected: " <> tn <<> " doesn't exist") + fmap _tciFieldInfoMap + $ lookupTableCoreInfo tn + `onNothingM` throw500 ("unexpected: " <> tn <<> " doesn't exist") abe <- annBoolExp rhsParser rootFieldInfoMap fieldInfoMap $ unBoolExp be let deps = getBoolExpDeps source tn abe @@ -189,8 +189,8 @@ annColExp rhsParser rootFieldInfoMap colInfoMap (ColExp fieldName colVal) = do relBoolExp <- decodeValue colVal relFieldInfoMap <- askFieldInfoMapSource rhsTableName annRelBoolExp <- annBoolExp rhsParser rootFieldInfoMap relFieldInfoMap $ unBoolExp relBoolExp - return $ - AVRelationship + return + $ AVRelationship relInfo ( RelationshipFilters { -- Note that we do not include the permissions of the target table, since @@ -232,6 +232,10 @@ data DropPerm b = DropPerm instance (Backend b) => FromJSON (DropPerm b) where parseJSON = withObject "DropPerm" $ \o -> DropPerm - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "role" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "role" diff --git a/server/src-lib/Hasura/RQL/DDL/QueryCollection.hs b/server/src-lib/Hasura/RQL/DDL/QueryCollection.hs index 7c3d24727c6..bd895b95bcc 100644 --- a/server/src-lib/Hasura/RQL/DDL/QueryCollection.hs +++ b/server/src-lib/Hasura/RQL/DDL/QueryCollection.hs @@ -30,11 +30,11 @@ addCollectionP2 :: CollectionDef -> m () addCollectionP2 (CollectionDef queryList) = - withPathK "queries" $ - unless (null duplicateNames) $ - throw400 NotSupported $ - "found duplicate query names " - <> dquoteList (unNonEmptyText . unQueryName <$> toList duplicateNames) + withPathK "queries" + $ unless (null duplicateNames) + $ throw400 NotSupported + $ "found duplicate query names " + <> dquoteList (unNonEmptyText . unQueryName <$> toList duplicateNames) where duplicateNames = duplicates $ map _lqName queryList @@ -44,16 +44,19 @@ runCreateCollection :: m EncJSON runCreateCollection cc = do collDetM <- getCollectionDefM collName - withPathK "name" $ - for_ collDetM $ - const $ - throw400 AlreadyExists $ - "query collection with name " <> collName <<> " already exists" + withPathK "name" + $ for_ collDetM + $ const + $ throw400 AlreadyExists + $ "query collection with name " + <> collName + <<> " already exists" withPathK "definition" $ addCollectionP2 def - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaQueryCollections %~ InsOrdHashMap.insert collName cc + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaQueryCollections + %~ InsOrdHashMap.insert collName cc return successMsg where CreateCollection collName def _ = cc @@ -65,15 +68,18 @@ runRenameCollection :: runRenameCollection (RenameCollection oldName newName) = do _ <- getCollectionDef oldName newCollDefM <- getCollectionDefM newName - withPathK "new_name" $ - for_ newCollDefM $ - const $ - throw400 AlreadyExists $ - "query collection with name " <> newName <<> " already exists" - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaQueryCollections %~ changeCollectionName oldName newName + withPathK "new_name" + $ for_ newCollDefM + $ const + $ throw400 AlreadyExists + $ "query collection with name " + <> newName + <<> " already exists" + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaQueryCollections + %~ changeCollectionName oldName newName return successMsg where changeCollectionName :: CollectionName -> CollectionName -> QueryCollections -> QueryCollections @@ -91,16 +97,18 @@ runAddQueryToCollection (AddQueryToCollection collName queryName query) = do (CreateCollection _ (CollectionDef qList) comment) <- getCollectionDef collName let queryExists = flip any qList $ \q -> _lqName q == queryName - when queryExists $ - throw400 AlreadyExists $ - "query with name " - <> queryName <<> " already exists in collection " <>> collName + when queryExists + $ throw400 AlreadyExists + $ "query with name " + <> queryName + <<> " already exists in collection " + <>> collName let collDef = CollectionDef $ qList <> pure listQ - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaQueryCollections - %~ InsOrdHashMap.insert collName (CreateCollection collName collDef comment) + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaQueryCollections + %~ InsOrdHashMap.insert collName (CreateCollection collName collDef comment) return successMsg where listQ = ListedQuery queryName query @@ -117,16 +125,18 @@ runDropCollection (DropCollection collName cascade) = do then if not cascade then - throw400 DependencyError $ - "query collection with name " - <> collName <<> " is present in the allowlist; cannot proceed to drop. " - <> "please use cascade to confirm you wish to drop it from the allowlist as well" + throw400 DependencyError + $ "query collection with name " + <> collName + <<> " is present in the allowlist; cannot proceed to drop. " + <> "please use cascade to confirm you wish to drop it from the allowlist as well" else dropCollectionFromAllowlist collName else pure mempty - withNewInconsistentObjsCheck $ - buildSchemaCache $ - cascadeModifier <> MetadataModifier (metaQueryCollections %~ InsOrdHashMap.delete collName) + withNewInconsistentObjsCheck + $ buildSchemaCache + $ cascadeModifier + <> MetadataModifier (metaQueryCollections %~ InsOrdHashMap.delete collName) pure successMsg @@ -137,16 +147,21 @@ runDropQueryFromCollection :: runDropQueryFromCollection (DropQueryFromCollection collName queryName) = do CreateCollection _ (CollectionDef qList) _ <- getCollectionDef collName let queryExists = flip any qList $ \q -> _lqName q == queryName - unless queryExists $ - throw400 NotFound $ - "query with name " - <> queryName <<> " not found in collection " <>> collName + unless queryExists + $ throw400 NotFound + $ "query with name " + <> queryName + <<> " not found in collection " + <>> collName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - MetadataModifier $ - metaQueryCollections . ix collName . ccDefinition . cdQueries - %~ filter ((/=) queryName . _lqName) + withNewInconsistentObjsCheck + $ buildSchemaCache + $ MetadataModifier + $ metaQueryCollections + . ix collName + . ccDefinition + . cdQueries + %~ filter ((/=) queryName . _lqName) pure successMsg runAddCollectionToAllowlist :: @@ -158,8 +173,10 @@ runAddCollectionToAllowlist entry = do allowlist <- withPathK "allowlist" fetchAllowlist case metadataAllowlistInsert entry allowlist of Left msg -> - pure . encJFromJValue . J.object $ - ["message" J..= msg] + pure + . encJFromJValue + . J.object + $ ["message" J..= msg] Right allowlist' -> do withNewInconsistentObjsCheck . buildSchemaCache $ MetadataModifier (metaAllowlist .~ allowlist') pure successMsg @@ -195,8 +212,10 @@ runUpdateScopeOfCollectionInAllowlist (UpdateScopeOfCollectionInAllowlist entry) modifier <- case metadataAllowlistUpdateScope entry al of Left err -> throw400 NotFound err Right al' -> - pure . MetadataModifier $ - metaAllowlist .~ al' + pure + . MetadataModifier + $ metaAllowlist + .~ al' withNewInconsistentObjsCheck $ buildSchemaCache modifier return successMsg @@ -211,9 +230,11 @@ getCollectionDef :: m CreateCollection getCollectionDef collName = do detM <- getCollectionDefM collName - onNothing detM $ - throw400 NotExists $ - "query collection with name " <> collName <<> " does not exist" + onNothing detM + $ throw400 NotExists + $ "query collection with name " + <> collName + <<> " does not exist" getCollectionDefM :: (QErrM m, MetadataM m) => @@ -222,12 +243,12 @@ getCollectionDefM :: getCollectionDefM collName = InsOrdHashMap.lookup collName <$> fetchAllCollections -fetchAllCollections :: MetadataM m => m QueryCollections +fetchAllCollections :: (MetadataM m) => m QueryCollections fetchAllCollections = _metaQueryCollections <$> getMetadata -fetchAllowlist :: MetadataM m => m MetadataAllowlist +fetchAllowlist :: (MetadataM m) => m MetadataAllowlist fetchAllowlist = _metaAllowlist <$> getMetadata -fetchAllAllowlistCollections :: MetadataM m => m [CollectionName] +fetchAllAllowlistCollections :: (MetadataM m) => m [CollectionName] fetchAllAllowlistCollections = metadataAllowlistAllCollections <$> fetchAllowlist diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship.hs b/server/src-lib/Hasura/RQL/DDL/Relationship.hs index 5de1755c4c6..4d7ae6d1734 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship.hs @@ -63,23 +63,26 @@ runCreateRelationship relType (WithTable source tableName relDef) = do let relName = _rdName relDef -- Check if any field with relationship name already exists in the table tableFields <- _tciFieldInfoMap <$> askTableCoreInfo @b source tableName - for_ (HashMap.lookup (fromRel relName) tableFields) $ - const $ - throw400 AlreadyExists $ - "field with name " <> relName <<> " already exists in table " <>> tableName + for_ (HashMap.lookup (fromRel relName) tableFields) + $ const + $ throw400 AlreadyExists + $ "field with name " + <> relName + <<> " already exists in table " + <>> tableName tableCache <- askSchemaCache >>= flip onNothing (throw400 NotFound "Could not find source.") - . unsafeTableCache source - . scSources + . unsafeTableCache source + . scSources let comment = _rdComment relDef metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b tableName $ - MTORel relName relType + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b tableName + $ MTORel relName relType addRelationshipToMetadata <- case relType of ObjRel -> do value <- decodeValue $ toJSON relDef @@ -96,9 +99,10 @@ runCreateRelationship relType (WithTable source tableName relDef) = do (Right value) pure $ tmArrayRelationships %~ InsOrdHashMap.insert relName (RelDef relName (_rdUsing value) comment) - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b source tableName %~ addRelationshipToMetadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b source tableName + %~ addRelationshipToMetadata pure successMsg defaultBuildObjectRelationshipInfo :: @@ -115,10 +119,10 @@ defaultBuildObjectRelationshipInfo source foreignKeys qt (RelDef rn ru _) = case io = fromMaybe BeforeParent $ rmInsertOrder common mkDependency tableName reason col = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b tableName $ - TOCol @b col + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b tableName + $ TOCol @b col ) reason dependencies = @@ -133,18 +137,18 @@ defaultBuildObjectRelationshipInfo source foreignKeys qt (RelDef rn ru _) = case let dependencies = Seq.fromList [ SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TOForeignKey @b (_cName constraint) + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TOForeignKey @b (_cName constraint) ) DRFkey, -- this needs to be added explicitly to handle the remote table being untracked. In this case, -- neither the using_col nor the constraint name will help. SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @b foreignTable + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @b foreignTable ) DRRemoteTable ] @@ -171,10 +175,10 @@ nativeQueryRelationshipSetup sourceName nativeQueryName relType (RelDef relName ( fmap ( \c -> SchemaDependency - ( SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOINativeQueryObj @b nativeQueryName $ - NQOCol @b c + ( SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOINativeQueryObj @b nativeQueryName + $ NQOCol @b c ) DRLeftColumn ) @@ -183,10 +187,10 @@ nativeQueryRelationshipSetup sourceName nativeQueryName relType (RelDef relName <> fmap ( \c -> SchemaDependency - ( SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOINativeQueryObj @b refqt $ - NQOCol @b c + ( SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOINativeQueryObj @b refqt + $ NQOCol @b c ) DRRightColumn ) @@ -208,10 +212,10 @@ defaultBuildArrayRelationshipInfo source foreignKeys qt (RelDef rn ru _) = case ( fmap ( \c -> SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TOCol @b c + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TOCol @b c ) DRLeftColumn ) @@ -220,10 +224,10 @@ defaultBuildArrayRelationshipInfo source foreignKeys qt (RelDef rn ru _) = case <> fmap ( \c -> SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b refqt $ - TOCol @b c + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b refqt + $ TOCol @b c ) DRRightColumn ) @@ -254,16 +258,16 @@ mkFkeyRel relType io source rn sourceTable remoteTable remoteColumns foreignKeys let dependencies = Seq.fromList [ SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b remoteTable $ - TOForeignKey @b (_cName constraint) + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b remoteTable + $ TOForeignKey @b (_cName constraint) ) DRRemoteFkey, SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @b remoteTable + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @b remoteTable ) DRRemoteTable ] @@ -298,10 +302,10 @@ drUsingColumnDep :: SchemaDependency drUsingColumnDep source qt col = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TOCol @b col + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TOCol @b col ) DRUsingColumn @@ -318,10 +322,16 @@ data DropRel b = DropRel instance (Backend b) => FromJSON (DropRel b) where parseJSON = withObject "DropRel" $ \o -> DropRel - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "relationship" - <*> o .:? "cascade" .!= False + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "relationship" + <*> o + .:? "cascade" + .!= False runDropRel :: forall b m. @@ -332,10 +342,11 @@ runDropRel (DropRel source qt rn cascade) = do depObjs <- collectDependencies withNewInconsistentObjsCheck do metadataModifiers <- traverse purgeRelDep depObjs - buildSchemaCache $ - MetadataModifier $ - tableMetadataSetter @b source qt - %~ dropRelationshipInMetadata rn . foldr (.) id metadataModifiers + buildSchemaCache + $ MetadataModifier + $ tableMetadataSetter @b source qt + %~ dropRelationshipInMetadata rn + . foldr (.) id metadataModifiers pure successMsg where collectDependencies = do @@ -345,10 +356,10 @@ runDropRel (DropRel source qt rn cascade) = do let depObjs = getDependentObjs sc - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TORel rn + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TORel rn ) unless (null depObjs || cascade) $ reportDependentObjectsExist depObjs pure depObjs @@ -363,9 +374,9 @@ purgeRelDep (SOSourceObj _ exists) | Just (SOITableObj _ (TOPerm rn pt)) <- AB.unpackAnyBackend @b exists = pure $ dropPermissionInMetadata rn pt purgeRelDep d = - throw500 $ - "unexpected dependency of relationship: " - <> reportSchemaObj d + throw500 + $ "unexpected dependency of relationship: " + <> reportSchemaObj d -------------------------------------------------------------------------------- -- Set local relationship comment @@ -385,10 +396,15 @@ deriving instance (Backend b) => Eq (SetRelComment b) instance (Backend b) => FromJSON (SetRelComment b) where parseJSON = withObject "SetRelComment" $ \o -> SetRelComment - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "relationship" - <*> o .:? "comment" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "relationship" + <*> o + .:? "comment" runSetRelComment :: forall m b. @@ -399,15 +415,16 @@ runSetRelComment defn = do tabInfo <- askTableCoreInfo @b source qt relType <- riType <$> askRelType (_tciFieldInfoMap tabInfo) rn "" let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b qt $ - MTORel rn relType - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b source qt %~ case relType of - ObjRel -> tmObjectRelationships . ix rn . rdComment .~ comment - ArrRel -> tmArrayRelationships . ix rn . rdComment .~ comment + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b qt + $ MTORel rn relType + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b source qt + %~ case relType of + ObjRel -> tmObjectRelationships . ix rn . rdComment .~ comment + ArrRel -> tmArrayRelationships . ix rn . rdComment .~ comment pure successMsg where SetRelComment source qt rn comment = defn diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs index 96854f770fd..03f68c79d82 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship/Rename.hs @@ -30,10 +30,15 @@ data RenameRel b = RenameRel instance (Backend b) => FromJSON (RenameRel b) where parseJSON = withObject "RenameRel" $ \o -> RenameRel - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "name" - <*> o .: "new_name" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "name" + <*> o + .: "new_name" renameRelP2 :: forall b m. @@ -49,14 +54,14 @@ renameRelP2 source qt newRN relInfo = withNewInconsistentObjsCheck $ do case HashMap.lookup (fromRel newRN) $ _tciFieldInfoMap tabInfo of Nothing -> return () Just _ -> - throw400 AlreadyExists $ - "cannot rename relationship " - <> oldRN - <<> " to " - <> newRN - <<> " in table " - <> qt - <<> " as a column/relationship with the name already exists" + throw400 AlreadyExists + $ "cannot rename relationship " + <> oldRN + <<> " to " + <> newRN + <<> " in table " + <> qt + <<> " as a column/relationship with the name already exists" -- update metadata execWriterT $ renameRelationshipInMetadata @b source qt oldRN (riType relInfo) newRN where @@ -70,6 +75,7 @@ runRenameRel :: runRenameRel (RenameRel source qt rn newRN) = do tabInfo <- askTableCoreInfo @b source qt ri <- askRelType (_tciFieldInfoMap tabInfo) rn "" - withNewInconsistentObjsCheck $ - renameRelP2 source qt newRN ri >>= buildSchemaCache + withNewInconsistentObjsCheck + $ renameRelP2 source qt newRN ri + >>= buildSchemaCache pure successMsg diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship/Suggest.hs b/server/src-lib/Hasura/RQL/DDL/Relationship/Suggest.hs index 660ae13fb76..cda00928a65 100644 --- a/server/src-lib/Hasura/RQL/DDL/Relationship/Suggest.hs +++ b/server/src-lib/Hasura/RQL/DDL/Relationship/Suggest.hs @@ -56,11 +56,11 @@ instance (Backend b) => HasCodec (SuggestRels b) where "SuggestRels" ( SuggestRels <$> optionalFieldWithOmittedDefault "source" defaultSource "The source to suggest relationships for - Defaults to 'default'." - .= _srsSource - <*> optionalFieldOrNull "tables" "The list of tables to suggest relationships for - Defaults to all tracked tables." - .= _srsTables - <*> optionalFieldWithOmittedDefault "omit_tracked" False "Determines if currently tracked relationships should be ommited from suggestions - Defaults to false." - .= _srsOmitTracked + .= _srsSource + <*> optionalFieldOrNull "tables" "The list of tables to suggest relationships for - Defaults to all tracked tables." + .= _srsTables + <*> optionalFieldWithOmittedDefault "omit_tracked" False "Determines if currently tracked relationships should be ommited from suggestions - Defaults to false." + .= _srsOmitTracked ) ["API call to request suggestions for relationships"] @@ -76,7 +76,7 @@ instance (Backend b) => HasCodec (SuggestedRelationships b) where "SuggestedRelationships" ( Relationships <$> requiredField' "relationships" - .= sRelationships + .= sRelationships ) data Relationship b = Relationship @@ -93,11 +93,11 @@ instance (Backend b) => HasCodec (Relationship b) where "Relationship" ( Relationship <$> requiredField' "type" - .= rType - <*> requiredField' "from" - .= rFrom - <*> requiredField' "to" - .= rTo + .= rType + <*> requiredField' "from" + .= rFrom + <*> requiredField' "to" + .= rTo ) data Mapping b = Mapping @@ -114,11 +114,11 @@ instance (Backend b) => HasCodec (Mapping b) where "Mapping" ( Mapping <$> requiredField' "table" - .= mTable - <*> requiredField' "columns" - .= mColumns - <*> optionalFieldOrNull' "constraint_name" - .= mConstraintName + .= mTable + <*> requiredField' "columns" + .= mColumns + <*> optionalFieldOrNull' "constraint_name" + .= mConstraintName ) -- | Most of the heavy lifting for this module occurs in this function. @@ -167,9 +167,9 @@ suggestRelsFK omitTracked tables name uniqueConstraints tracked predicate foreig discard b x = bool Nothing (Just x) (not b) invert = HashMap.fromList . map swap . HashMap.toList trackedBack = - H.fromList $ - mapMaybe (relationships (getRelationshipsInputs @b)) $ - maybe [] (HashMap.elems . _tciFieldInfoMap) relatedTable + H.fromList + $ mapMaybe (relationships (getRelationshipsInputs @b)) + $ maybe [] (HashMap.elems . _tciFieldInfoMap) relatedTable -- we're only interested in suggesting table-based relationships for now getRelationshipsInputs :: @@ -194,10 +194,10 @@ suggestRelsTable omitTracked tables predicate (name, table) = foreignKeys = _tciForeignKeys table constraints = _tciUniqueConstraints table tracked = - H.fromList $ - mapMaybe (relationships (getRelationshipsInputs @b)) $ - HashMap.elems $ - _tciFieldInfoMap table + H.fromList + $ mapMaybe (relationships (getRelationshipsInputs @b)) + $ HashMap.elems + $ _tciFieldInfoMap table relationships :: (RelInfo b1 -> Maybe b2) -> FieldInfo b1 -> Maybe b2 relationships f = (=<<) f . preview _FIRelationship @@ -211,8 +211,9 @@ suggestRelsResponse :: (TableName b -> Bool) -> SuggestedRelationships b suggestRelsResponse omitTracked tables predicate = - Relationships $ - suggestRelsTable omitTracked tables predicate =<< HashMap.toList tables + Relationships + $ suggestRelsTable omitTracked tables predicate + =<< HashMap.toList tables tablePredicate :: (Hashable a) => Maybe [a] -> a -> Bool tablePredicate Nothing _ = True diff --git a/server/src-lib/Hasura/RQL/DDL/RemoteRelationship.hs b/server/src-lib/Hasura/RQL/DDL/RemoteRelationship.hs index dcdfe3c58a5..ea1cd8f7e8d 100644 --- a/server/src-lib/Hasura/RQL/DDL/RemoteRelationship.hs +++ b/server/src-lib/Hasura/RQL/DDL/RemoteRelationship.hs @@ -126,14 +126,15 @@ runCreateRemoteRelationship :: runCreateRemoteRelationship CreateFromSourceRelationship {..} = do void $ askTableInfo @b _crrSource _crrTable let metadataObj = - MOSourceObjId _crrSource $ - AB.mkAnyBackend $ - SMOTableObj @b _crrTable $ - MTORemoteRelationship _crrName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b _crrSource _crrTable . tmRemoteRelationships - %~ InsOrdHashMap.insert _crrName (RemoteRelationship _crrName _crrDefinition) + MOSourceObjId _crrSource + $ AB.mkAnyBackend + $ SMOTableObj @b _crrTable + $ MTORemoteRelationship _crrName + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b _crrSource _crrTable + . tmRemoteRelationships + %~ InsOrdHashMap.insert _crrName (RemoteRelationship _crrName _crrDefinition) pure successMsg runUpdateRemoteRelationship :: @@ -144,15 +145,16 @@ runUpdateRemoteRelationship :: runUpdateRemoteRelationship CreateFromSourceRelationship {..} = do fieldInfoMap <- askTableFieldInfoMap @b _crrSource _crrTable let metadataObj = - MOSourceObjId _crrSource $ - AB.mkAnyBackend $ - SMOTableObj @b _crrTable $ - MTORemoteRelationship _crrName + MOSourceObjId _crrSource + $ AB.mkAnyBackend + $ SMOTableObj @b _crrTable + $ MTORemoteRelationship _crrName void $ askRemoteRel fieldInfoMap _crrName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b _crrSource _crrTable . tmRemoteRelationships - %~ InsOrdHashMap.insert _crrName (RemoteRelationship _crrName _crrDefinition) + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b _crrSource _crrTable + . tmRemoteRelationships + %~ InsOrdHashMap.insert _crrName (RemoteRelationship _crrName _crrDefinition) pure successMsg -------------------------------------------------------------------------------- @@ -168,9 +170,13 @@ data DeleteFromSourceRelationship (b :: BackendType) = DeleteFromSourceRelations instance (Backend b) => FromJSON (DeleteFromSourceRelationship b) where parseJSON = J.withObject "DeleteFromSourceRelationship" $ \o -> DeleteFromSourceRelationship - <$> o .:? "source" .!= defaultSource - <*> o .: "table" - <*> o .: "name" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" + <*> o + .: "name" runDeleteRemoteRelationship :: forall b m. @@ -181,13 +187,14 @@ runDeleteRemoteRelationship (DeleteFromSourceRelationship source table relName) fieldInfoMap <- askTableFieldInfoMap @b source table void $ askRemoteRel fieldInfoMap relName let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTORemoteRelationship relName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - tableMetadataSetter @b source table %~ dropRemoteRelationshipInMetadata relName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTORemoteRelationship relName + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ tableMetadataSetter @b source table + %~ dropRemoteRelationshipInMetadata relName pure successMsg -------------------------------------------------------------------------------- @@ -204,9 +211,12 @@ data CreateRemoteSchemaRemoteRelationship = CreateRemoteSchemaRemoteRelationship instance FromJSON CreateRemoteSchemaRemoteRelationship where parseJSON = J.withObject "CreateRemoteSchemaRemoteRelationship" $ \o -> CreateRemoteSchemaRemoteRelationship - <$> o .: "remote_schema" - <*> o .: "type_name" - <*> o .: "name" + <$> o + .: "remote_schema" + <*> o + .: "type_name" + <*> o + .: "name" <*> (o .: "definition" >>= parseRemoteRelationshipDefinition RRPStrict) instance J.ToJSON CreateRemoteSchemaRemoteRelationship where @@ -221,15 +231,15 @@ runCreateRemoteSchemaRemoteRelationship :: runCreateRemoteSchemaRemoteRelationship CreateRemoteSchemaRemoteRelationship {..} = do let metadataObj = MORemoteSchemaRemoteRelationship _crsrrRemoteSchema _crsrrType _crsrrName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - metaRemoteSchemas - . ix _crsrrRemoteSchema - . rsmRemoteRelationships - . at _crsrrType - . non (RemoteSchemaTypeRelationships _crsrrType mempty) - . rstrsRelationships - %~ InsOrdHashMap.insert _crsrrName (RemoteRelationship _crsrrName _crsrrDefinition) + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ metaRemoteSchemas + . ix _crsrrRemoteSchema + . rsmRemoteRelationships + . at _crsrrType + . non (RemoteSchemaTypeRelationships _crsrrType mempty) + . rstrsRelationships + %~ InsOrdHashMap.insert _crsrrName (RemoteRelationship _crsrrName _crsrrDefinition) pure successMsg runUpdateRemoteSchemaRemoteRelationship :: @@ -246,10 +256,13 @@ runUpdateRemoteSchemaRemoteRelationship crss@CreateRemoteSchemaRemoteRelationshi . rscRemoteRelationships . ix _crsrrType . ix _crsrrName - void $ - onNothing remoteRelationship $ - throw400 NotExists $ - "no relationship defined on remote schema " <>> _crsrrRemoteSchema <<> " with name " <>> _crsrrName + void + $ onNothing remoteRelationship + $ throw400 NotExists + $ "no relationship defined on remote schema " + <>> _crsrrRemoteSchema + <<> " with name " + <>> _crsrrName runCreateRemoteSchemaRemoteRelationship crss -------------------------------------------------------------------------------- @@ -265,9 +278,12 @@ data DeleteRemoteSchemaRemoteRelationship = DeleteRemoteSchemaRemoteRelationship instance FromJSON DeleteRemoteSchemaRemoteRelationship where parseJSON = J.withObject "DeleteRemoteSchemaRemoteRelationship" $ \o -> DeleteRemoteSchemaRemoteRelationship - <$> o .: "remote_schema" - <*> o .: "type_name" - <*> o .: "name" + <$> o + .: "remote_schema" + <*> o + .: "type_name" + <*> o + .: "name" runDeleteRemoteSchemaRemoteRelationship :: forall m. @@ -278,10 +294,14 @@ runDeleteRemoteSchemaRemoteRelationship DeleteRemoteSchemaRemoteRelationship {.. let relName = _drsrrName metadataObj = MORemoteSchemaRemoteRelationship _drsrrRemoteSchema _drsrrTypeName relName - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - metaRemoteSchemas . ix _drsrrRemoteSchema . rsmRemoteRelationships . ix _drsrrTypeName . rstrsRelationships - %~ InsOrdHashMap.delete relName + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ metaRemoteSchemas + . ix _drsrrRemoteSchema + . rsmRemoteRelationships + . ix _drsrrTypeName + . rstrsRelationships + %~ InsOrdHashMap.delete relName pure successMsg -------------------------------------------------------------------------------- @@ -335,15 +355,18 @@ buildRemoteFieldInfo lhsIdentifier lhsJoinFields RemoteRelationship {..} allSour `onNothing` throw400 NotFound ("source not found: " <>> _tsrdSource) AB.dispatchAnyBackendWithTwoConstraints @Backend @BackendMetadata targetTables \(partiallyResolvedSource :: PartiallyResolvedSource b') -> do let PartiallyResolvedSource _ sourceConfig _ targetTablesInfo _ = partiallyResolvedSource - unless (supportsBeingRemoteRelationshipTarget @b' sourceConfig) $ - throw400 NotSupported ("source " <> sourceNameToText _tsrdSource <> " does not support being used as the target of a remote relationship") + unless (supportsBeingRemoteRelationshipTarget @b' sourceConfig) + $ throw400 NotSupported ("source " <> sourceNameToText _tsrdSource <> " does not support being used as the target of a remote relationship") (targetTable :: TableName b') <- runAesonParser J.parseJSON _tsrdTable targetColumns <- - fmap _tciFieldInfoMap $ - onNothing (HashMap.lookup targetTable targetTablesInfo) $ - throw400 NotExists $ - "table " <> targetTable <<> " does not exist in source: " <> sourceNameToText _tsrdSource + fmap _tciFieldInfoMap + $ onNothing (HashMap.lookup targetTable targetTablesInfo) + $ throw400 NotExists + $ "table " + <> targetTable + <<> " does not exist in source: " + <> sourceNameToText _tsrdSource -- TODO: rhs fields should also ideally be DBJoinFields columnPairs <- for (HashMap.toList _tsrdFieldMapping) \(srcFieldName, tgtFieldName) -> do lhsJoinField <- askFieldInfo lhsJoinFields srcFieldName @@ -356,27 +379,29 @@ buildRemoteFieldInfo lhsIdentifier lhsJoinFields RemoteRelationship {..} allSour ColumnEnumReference _ -> throw400 NotSupported "relationships to enum fields are not supported yet" pure (srcFieldName, (srcColumn, tgtScalar, ciColumn tgtColumn)) let rsri = - RemoteSourceFieldInfo _rrName _tsrdRelationshipType _tsrdSource sourceConfig targetTable $ - fmap (\(_, tgtType, tgtColumn) -> (tgtType, tgtColumn)) $ - HashMap.fromList columnMapping + RemoteSourceFieldInfo _rrName _tsrdRelationshipType _tsrdSource sourceConfig targetTable + $ fmap (\(_, tgtType, tgtColumn) -> (tgtType, tgtColumn)) + $ HashMap.fromList columnMapping rhsDependencies = SchemaDependency (SOSourceObj _tsrdSource $ AB.mkAnyBackend $ SOITable @b' targetTable) DRTable : flip map columnPairs \(_, _srcColumn, tgtColumn) -> SchemaDependency - ( SOSourceObj _tsrdSource $ - AB.mkAnyBackend $ - SOITableObj @b' targetTable $ - TOCol @b' $ - ciColumn tgtColumn + ( SOSourceObj _tsrdSource + $ AB.mkAnyBackend + $ SOITableObj @b' targetTable + $ TOCol @b' + $ ciColumn tgtColumn ) DRRemoteRelationship requiredLHSJoinFields = fmap (\(srcField, _, _) -> srcField) $ HashMap.fromList columnMapping pure (RemoteFieldInfo requiredLHSJoinFields $ RFISource $ AB.mkAnyBackend @b' rsri, Seq.fromList rhsDependencies) RelationshipToSchema _ remoteRelationship@ToSchemaRelationshipDef {..} -> do RemoteSchemaCtx {..} <- - onNothing (HashMap.lookup _trrdRemoteSchema remoteSchemaMap) $ - throw400 RemoteSchemaError $ - "remote schema with name " <> _trrdRemoteSchema <<> " not found" + onNothing (HashMap.lookup _trrdRemoteSchema remoteSchemaMap) + $ throw400 RemoteSchemaError + $ "remote schema with name " + <> _trrdRemoteSchema + <<> " not found" (requiredLHSJoinFields, remoteField) <- validateToSchemaRelationship remoteRelationship lhsIdentifier _rrName (_rscInfo, _rscIntroOriginal) lhsJoinFields `onLeft` (throw400 RemoteSchemaError . errorToText) @@ -391,12 +416,13 @@ getRemoteSchemaEntityJoinColumns :: m (HashMap FieldName G.Name) getRemoteSchemaEntityJoinColumns remoteSchemaName introspection typeName = do typeDefinition <- - onNothing (lookupType introspection typeName) $ - throw400 NotFound ("no type named " <> typeName <<> " defined in remote schema " <>> remoteSchemaName) + onNothing (lookupType introspection typeName) + $ throw400 NotFound ("no type named " <> typeName <<> " defined in remote schema " <>> remoteSchemaName) case typeDefinition of G.TypeDefinitionObject objectDefinition -> - pure $ - HashMap.fromList $ do + pure + $ HashMap.fromList + $ do fieldDefinition <- G._otdFieldsDefinition objectDefinition guard $ null $ G._fldArgumentsDefinition fieldDefinition pure (FieldName $ G.unName $ G._fldName fieldDefinition, G._fldName fieldDefinition) diff --git a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs index ccbcf049f12..59027ffa3da 100644 --- a/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/DDL/ScheduledTrigger.hs @@ -78,10 +78,10 @@ runCreateCronTrigger CreateCronTrigger {..} = do case HashMap.lookup (ctName q) cronTriggersMap of Nothing -> pure () Just _ -> - throw400 AlreadyExists $ - "cron trigger with name: " - <> triggerNameToTxt (ctName q) - <> " already exists" + throw400 AlreadyExists + $ "cron trigger with name: " + <> triggerNameToTxt (ctName q) + <> " already exists" let metadataObj = MOCronTrigger _cctName metadata = @@ -96,9 +96,10 @@ runCreateCronTrigger CreateCronTrigger {..} = do _cctComment _cctRequestTransform _cctResponseTransform - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - metaCronTriggers %~ InsOrdHashMap.insert _cctName metadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ metaCronTriggers + %~ InsOrdHashMap.insert _cctName metadata populateInitialCronTriggerEvents _cctCronSchedule _cctName return successMsg @@ -111,8 +112,8 @@ resolveCronTrigger env CronTriggerMetadata {..} = do webhookInfo <- resolveWebhook env ctWebhook headerInfo <- getHeaderInfosFromConf env ctHeaders let urlTemplate = printURLTemplate $ unInputWebhook ctWebhook - pure $ - CronTriggerInfo + pure + $ CronTriggerInfo ctName ctSchedule ctPayload @@ -135,9 +136,10 @@ updateCronTrigger :: updateCronTrigger cronTriggerMetadata = do let triggerName = ctName cronTriggerMetadata checkExists triggerName - buildSchemaCacheFor (MOCronTrigger triggerName) $ - MetadataModifier $ - metaCronTriggers %~ InsOrdHashMap.insert triggerName cronTriggerMetadata + buildSchemaCacheFor (MOCronTrigger triggerName) + $ MetadataModifier + $ metaCronTriggers + %~ InsOrdHashMap.insert triggerName cronTriggerMetadata liftEitherM $ dropFutureCronEvents $ SingleCronTrigger triggerName currentTime <- liftIO C.getCurrentTime let scheduleTimes = generateScheduleTimes currentTime 100 $ ctSchedule cronTriggerMetadata @@ -154,9 +156,9 @@ runDeleteCronTrigger :: m EncJSON runDeleteCronTrigger (ScheduledTriggerName stName) = do checkExists stName - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropCronTriggerInMetadata stName + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropCronTriggerInMetadata stName liftEitherM $ dropFutureCronEvents $ SingleCronTrigger stName return successMsg @@ -175,10 +177,12 @@ runCreateScheduledEvent scheduledEvent = do checkExists :: (CacheRM m, MonadError QErr m) => TriggerName -> m () checkExists name = do cronTriggersMap <- scCronTriggers <$> askSchemaCache - void $ - onNothing (HashMap.lookup name cronTriggersMap) $ - throw400 NotExists $ - "cron trigger with name: " <> triggerNameToTxt name <> " does not exist" + void + $ onNothing (HashMap.lookup name cronTriggersMap) + $ throw400 NotExists + $ "cron trigger with name: " + <> triggerNameToTxt name + <> " does not exist" runDeleteScheduledEvent :: (MonadMetadataStorage m, MonadError QErr m) => DeleteScheduledEvent -> m EncJSON @@ -213,16 +217,17 @@ runGetScheduledEventInvocations getEventInvocations@GetScheduledEventInvocations SEOneOff -> pure () SECron name -> checkExists name WithOptionalTotalCount countMaybe invocations <- liftEitherM $ fetchScheduledEventInvocations getEventInvocations - pure $ - encJFromJValue $ - J.object $ - ("invocations" J..= invocations) : (maybe mempty (\count -> ["count" J..= count]) countMaybe) + pure + $ encJFromJValue + $ J.object + $ ("invocations" J..= invocations) + : (maybe mempty (\count -> ["count" J..= count]) countMaybe) -- | Metadata API handler to retrieve all the cron triggers from the metadata -runGetCronTriggers :: MetadataM m => m EncJSON +runGetCronTriggers :: (MetadataM m) => m EncJSON runGetCronTriggers = do cronTriggers <- toList . _metaCronTriggers <$> getMetadata - pure $ - encJFromJValue $ - J.object - ["cron_triggers" J..= cronTriggers] + pure + $ encJFromJValue + $ J.object + ["cron_triggers" J..= cronTriggers] diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index 16171f3f71d..6f79e32c51b 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -170,8 +170,8 @@ buildRebuildableSchemaCacheWithReason :: CacheBuild RebuildableSchemaCache buildRebuildableSchemaCacheWithReason reason logger env metadataWithVersion dynamicConfig mSchemaRegistryContext = do result <- - flip runReaderT reason $ - Inc.build (buildSchemaCacheRule logger env mSchemaRegistryContext) (metadataWithVersion, dynamicConfig, initialInvalidationKeys, Nothing) + flip runReaderT reason + $ Inc.build (buildSchemaCacheRule logger env mSchemaRegistryContext) (metadataWithVersion, dynamicConfig, initialInvalidationKeys, Nothing) pure $ RebuildableSchemaCache (Inc.result result) initialInvalidationKeys (Inc.rebuildRule result) @@ -250,9 +250,9 @@ instance newInvalidationKeys = invalidateKeys invalidations invalidationKeys storedIntrospection <- onLeftM (fetchSourceIntrospection metadataVersion) (\_ -> pure Nothing) result <- - runCacheBuildM $ - flip runReaderT buildReason $ - Inc.build rule (metadataWithVersion, dynamicConfig, newInvalidationKeys, storedIntrospection) + runCacheBuildM + $ flip runReaderT buildReason + $ Inc.build rule (metadataWithVersion, dynamicConfig, newInvalidationKeys, storedIntrospection) let schemaCache = Inc.result result prunedInvalidationKeys = pruneInvalidationKeys schemaCache newInvalidationKeys !newCache = RebuildableSchemaCache schemaCache prunedInvalidationKeys (Inc.rebuildRule result) @@ -505,8 +505,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou (BackendConfigWrapper b, Inc.Dependency (BackendMap BackendInvalidationKeysWrapper)) `arr` BackendCache resolveBackendInfo' = proc (backendConfigWrapper, backendInvalidationMap) -> do let backendInvalidationKeys = - Inc.selectMaybeD #unBackendInvalidationKeysWrapper $ - BackendMap.lookupD @b backendInvalidationMap + Inc.selectMaybeD #unBackendInvalidationKeysWrapper + $ BackendMap.lookupD @b backendInvalidationMap backendInfo <- resolveBackendInfo @b logger -< (backendInvalidationKeys, unBackendConfigWrapper backendConfigWrapper) returnA -< BackendMap.singleton (BackendInfoWrapper @b backendInfo) @@ -561,7 +561,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou withRecordInconsistency ( bindErrorA -< ExceptT $ resolveSourceConfig @b sourceName sourceConfig backendKind backendInfo env httpMgr ) - |) metadataObj + |) + metadataObj tryResolveSource :: forall b arr m. @@ -647,32 +648,33 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou readOnlyMode = _cscReadOnlyMode cacheStaticConfig if - -- when safe mode is enabled, don't perform any migrations - | readOnlyMode == ReadOnlyModeEnabled -> pure (RETDoNothing, SCMSMigrationOnHold "read-only mode enabled") - -- when eventing mode is disabled, don't perform any migrations - | eventingMode == EventingDisabled -> pure (RETDoNothing, SCMSMigrationOnHold "eventing mode disabled") - -- when maintenance mode is enabled, don't perform any migrations - | maintenanceMode == (MaintenanceModeEnabled ()) -> pure (RETDoNothing, SCMSMigrationOnHold "maintenance mode enabled") - | otherwise -> do - -- The `initCatalogForSource` action is retried here because - -- in cloud there will be multiple workers (graphql-engine instances) - -- trying to migrate the source catalog, when needed. This introduces - -- a race condition as both the workers try to migrate the source catalog - -- concurrently and when one of them succeeds the other ones will fail - -- and be in an inconsistent state. To avoid the inconsistency, we retry - -- migrating the catalog on error and in the retry `initCatalogForSource` - -- will see that the catalog is already migrated, so it won't attempt the - -- migration again - liftEither - =<< Retry.retrying - ( Retry.constantDelay (fromIntegral $ diffTimeToMicroSeconds $ seconds $ Seconds 10) - <> Retry.limitRetries 3 - ) - (const $ return . isLeft) - (const $ runExceptT $ prepareCatalog @b sourceConfig) + -- when safe mode is enabled, don't perform any migrations + | readOnlyMode == ReadOnlyModeEnabled -> pure (RETDoNothing, SCMSMigrationOnHold "read-only mode enabled") + -- when eventing mode is disabled, don't perform any migrations + | eventingMode == EventingDisabled -> pure (RETDoNothing, SCMSMigrationOnHold "eventing mode disabled") + -- when maintenance mode is enabled, don't perform any migrations + | maintenanceMode == (MaintenanceModeEnabled ()) -> pure (RETDoNothing, SCMSMigrationOnHold "maintenance mode enabled") + | otherwise -> do + -- The `initCatalogForSource` action is retried here because + -- in cloud there will be multiple workers (graphql-engine instances) + -- trying to migrate the source catalog, when needed. This introduces + -- a race condition as both the workers try to migrate the source catalog + -- concurrently and when one of them succeeds the other ones will fail + -- and be in an inconsistent state. To avoid the inconsistency, we retry + -- migrating the catalog on error and in the retry `initCatalogForSource` + -- will see that the catalog is already migrated, so it won't attempt the + -- migration again + liftEither + =<< Retry.retrying + ( Retry.constantDelay (fromIntegral $ diffTimeToMicroSeconds $ seconds $ Seconds 10) + <> Retry.limitRetries 3 + ) + (const $ return . isLeft) + (const $ runExceptT $ prepareCatalog @b sourceConfig) else pure (RETDoNothing, SCMSUninitializedSource) ) - |) (concatMap (\(tableName, events) -> map (mkEventTriggerMetadataObject' sourceName tableName) events) eventTriggers) + |) + (concatMap (\(tableName, events) -> map (mkEventTriggerMetadataObject' sourceName tableName) events) eventTriggers) case res of Nothing -> @@ -720,8 +722,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou -- permissions result <- interpretWriter - -< runExceptT $ - for + -< runExceptT + $ for (tableCoreInfos `alignTableMap` mapFromL _tpiTable permissions `alignTableMap` eventTriggerInfoMaps) \((tableCoreInfo, permissionInputs), eventTriggerInfos) -> do let tableFields = _tciFieldInfoMap tableCoreInfo @@ -759,15 +761,15 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou definition = TrackFunction @b qf metadataObject = MetadataObject - ( MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMOFunction @b qf + ( MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMOFunction @b qf ) (toJSON definition) schemaObject = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOIFunction @b qf + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOIFunction @b qf addFunctionContext e = "in function " <> qf <<> ": " <> e metadataPermissions = mapFromL _fpmRole functionPermissions permissionsMap = mkBooleanPermissionMap FunctionPermissionInfo metadataPermissions orderedRoles @@ -787,9 +789,9 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou let mkLogicalModelMetadataObject :: LogicalModelMetadata b -> MetadataObject mkLogicalModelMetadataObject lmm = ( MetadataObject - ( MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMOLogicalModel @b (_lmmName lmm) + ( MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMOLogicalModel @b (_lmmName lmm) ) (toJSON lmm) ) @@ -825,39 +827,39 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou let metadataObject :: MetadataObject metadataObject = MetadataObject - ( MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMONativeQuery @b _nqmRootFieldName + ( MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMONativeQuery @b _nqmRootFieldName ) (toJSON nqm) schemaObjId :: SchemaObjId schemaObjId = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOINativeQuery @b _nqmRootFieldName + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOINativeQuery @b _nqmRootFieldName dependency :: SchemaDependency dependency = SchemaDependency { sdObjId = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOILogicalModel @b _nqmReturns, + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOILogicalModel @b _nqmReturns, sdReason = DRLogicalModel } withRecordInconsistencyM metadataObject $ do - unless (_cscAreNativeQueriesEnabled cacheStaticConfig) $ - throw400 InvalidConfiguration "The Native Queries feature is disabled" + unless (_cscAreNativeQueriesEnabled cacheStaticConfig) + $ throw400 InvalidConfiguration "The Native Queries feature is disabled" logicalModel <- onNothing (HashMap.lookup _nqmReturns logicalModelsCache) (throw400 InvalidConfiguration ("The logical model " <> toTxt _nqmReturns <> " could not be found")) - recordDependenciesM metadataObject schemaObjId $ - Seq.singleton dependency + recordDependenciesM metadataObject schemaObjId + $ Seq.singleton dependency arrayRelationships <- traverse @@ -870,9 +872,9 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou _nqmObjectRelationships let sourceObject = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOINativeQuery @b _nqmRootFieldName + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOINativeQuery @b _nqmRootFieldName let dependencies = mconcat (snd <$> InsOrdHashMap.elems arrayRelationships) @@ -901,39 +903,39 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou let metadataObject :: MetadataObject metadataObject = MetadataObject - ( MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMOStoredProcedure @b _spmStoredProcedure + ( MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMOStoredProcedure @b _spmStoredProcedure ) (toJSON spm) schemaObjId :: SchemaObjId schemaObjId = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOIStoredProcedure @b _spmStoredProcedure + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOIStoredProcedure @b _spmStoredProcedure dependency :: SchemaDependency dependency = SchemaDependency { sdObjId = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOILogicalModel @b _spmReturns, + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOILogicalModel @b _spmReturns, sdReason = DRLogicalModel } withRecordInconsistencyM metadataObject $ do - unless (_cscAreStoredProceduresEnabled cacheStaticConfig) $ - throw400 InvalidConfiguration "The Stored Procedure feature is disabled" + unless (_cscAreStoredProceduresEnabled cacheStaticConfig) + $ throw400 InvalidConfiguration "The Stored Procedure feature is disabled" logicalModel <- onNothing (HashMap.lookup _spmReturns logicalModelsCache) (throw400 InvalidConfiguration ("The logical model " <> toTxt _spmReturns <> " could not be found")) - recordDependenciesM metadataObject schemaObjId $ - Seq.singleton dependency + recordDependenciesM metadataObject schemaObjId + $ Seq.singleton dependency graphqlName <- getStoredProcedureGraphqlName @b _spmStoredProcedure _spmConfig @@ -977,16 +979,17 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou let actionRoles = map _apmRole . _amPermissions =<< InsOrdHashMap.elems actions remoteSchemaRoles = map _rspmRole . _rsmPermissions =<< InsOrdHashMap.elems remoteSchemas sourceRoles = - HS.fromList $ - concat $ - InsOrdHashMap.elems sources >>= \(BackendSourceMetadata e) -> - AB.dispatchAnyBackend @Backend e \(SourceMetadata _ _ tables _functions _nativeQueries _storedProcedures _logicalModels _ _ _ _) -> do - table <- InsOrdHashMap.elems tables - pure $ - InsOrdHashMap.keys (_tmInsertPermissions table) - <> InsOrdHashMap.keys (_tmSelectPermissions table) - <> InsOrdHashMap.keys (_tmUpdatePermissions table) - <> InsOrdHashMap.keys (_tmDeletePermissions table) + HS.fromList + $ concat + $ InsOrdHashMap.elems sources + >>= \(BackendSourceMetadata e) -> + AB.dispatchAnyBackend @Backend e \(SourceMetadata _ _ tables _functions _nativeQueries _storedProcedures _logicalModels _ _ _ _) -> do + table <- InsOrdHashMap.elems tables + pure + $ InsOrdHashMap.keys (_tmInsertPermissions table) + <> InsOrdHashMap.keys (_tmSelectPermissions table) + <> InsOrdHashMap.keys (_tmUpdatePermissions table) + <> InsOrdHashMap.keys (_tmDeletePermissions table) inheritedRoleNames = InsOrdHashMap.keys inheritedRoles allRoleNames = sourceRoles <> HS.fromList (remoteSchemaRoles <> actionRoles <> inheritedRoleNames) @@ -1058,18 +1061,20 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou ( \_ (tableCoreInfo, (_, eventTriggerConfs)) -> buildTableEventTriggers -< (dynamicConfig, sourceName, sourceConfig, tableCoreInfo, eventTriggerConfs, metadataInvalidationKey, recreateEventTriggers) ) - |) (tablesCoreInfo `alignTableMap` mapFromL fst eventTriggers) + |) + (tablesCoreInfo `alignTableMap` mapFromL fst eventTriggers) returnA -< - Just $ - AB.mkAnyBackend @b $ - PartiallyResolvedSource sourceMetadata sourceConfig source tablesCoreInfo eventTriggerInfoMaps + Just + $ AB.mkAnyBackend @b + $ PartiallyResolvedSource sourceMetadata sourceConfig source tablesCoreInfo eventTriggerInfoMaps ) -< (exists, (dynamicConfig, invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) ) - |) (HashMap.fromList $ InsOrdHashMap.toList backendInfoAndSourceMetadata) + |) + (HashMap.fromList $ InsOrdHashMap.toList backendInfoAndSourceMetadata) let partiallyResolvedSources = catMaybes partiallyResolvedSourcesMaybes -- then we can build the entire source output @@ -1111,7 +1116,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou (dynamicConfig, partiallyResolvedSources, remoteSchemaCtxMap, orderedRoles) ) ) - |) partiallyResolvedSources + |) + partiallyResolvedSources remoteSchemaCache <- interpretWriter @@ -1130,8 +1136,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou sourcesCache = HashMap.map fst sourcesOutput actionList = InsOrdHashMap.elems actions maybeResolvedCustomTypes <- - withRecordInconsistencyM (MetadataObject MOCustomTypes $ toJSON customTypes) $ - resolveCustomTypes sourcesCache customTypes scalarsMap + withRecordInconsistencyM (MetadataObject MOCustomTypes $ toJSON customTypes) + $ resolveCustomTypes sourcesCache customTypes scalarsMap case maybeResolvedCustomTypes of Just resolvedCustomTypes -> do actionCache' <- buildActions resolvedCustomTypes scalarsMap orderedRoles actionList @@ -1162,16 +1168,16 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou buildOpenTelemetry OpenTelemetryConfig {..} = do -- Always perform validation, even if OpenTelemetry is disabled mOtelExporterInfo <- - fmap join $ - withRecordInconsistencyM (MetadataObject (MOOpenTelemetry OtelSubobjectExporterOtlp) (toJSON _ocExporterOtlp)) $ - liftEither $ - parseOtelExporterConfig _ocStatus env _ocExporterOtlp + fmap join + $ withRecordInconsistencyM (MetadataObject (MOOpenTelemetry OtelSubobjectExporterOtlp) (toJSON _ocExporterOtlp)) + $ liftEither + $ parseOtelExporterConfig _ocStatus env _ocExporterOtlp mOtelBatchSpanProcessorInfo <- - withRecordInconsistencyM (MetadataObject (MOOpenTelemetry OtelSubobjectBatchSpanProcessor) (toJSON _ocBatchSpanProcessor)) $ - liftEither $ - parseOtelBatchSpanProcessorConfig _ocBatchSpanProcessor - pure $ - case _ocStatus of + withRecordInconsistencyM (MetadataObject (MOOpenTelemetry OtelSubobjectBatchSpanProcessor) (toJSON _ocBatchSpanProcessor)) + $ liftEither + $ parseOtelBatchSpanProcessorConfig _ocBatchSpanProcessor + pure + $ case _ocStatus of OtelDisabled -> -- Disable all components if OpenTelemetry export not enabled OpenTelemetryInfo Nothing Nothing @@ -1213,11 +1219,11 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou listedQuery <- flip onNothing - ( throw400 NotExists $ - "query with name " - <> toTxt queryName - <> " does not exist in collection " - <> toTxt collName + ( throw400 NotExists + $ "query with name " + <> toTxt queryName + <> " does not exist in collection " + <> toTxt collName ) $ find ((== queryName) . _lqName) (_cdQueries (_ccDefinition collection)) @@ -1251,11 +1257,11 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou MetadataObject mkEventTriggerMetadataObject' source table eventTriggerConf = let objectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOTrigger $ - etcName eventTriggerConf + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOTrigger + $ etcName eventTriggerConf definition = object ["table" .= table, "configuration" .= eventTriggerConf] in MetadataObject objectId definition @@ -1303,10 +1309,10 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou triggerOnReplication = etcTriggerOnReplication eventTriggerConf metadataObject = mkEventTriggerMetadataObject' @b source table eventTriggerConf schemaObjectId = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b table $ - TOTrigger triggerName + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b table + $ TOTrigger triggerName addTriggerContext e = "in event trigger " <> triggerName <<> ": " <> e buildReason <- bindA -< ask let reloadMetadataRecreateEventTrigger = @@ -1324,28 +1330,31 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou CatalogUpdate _ -> True CatalogSync -> False tableColumns = HashMap.elems $ _tciFieldInfoMap tableInfo - if ( _cscMaintenanceMode staticConfig == MaintenanceModeDisabled - && _cscReadOnlyMode staticConfig == ReadOnlyModeDisabled + if ( _cscMaintenanceMode staticConfig + == MaintenanceModeDisabled + && _cscReadOnlyMode staticConfig + == ReadOnlyModeDisabled ) then do bindErrorA -< - when (reloadMetadataRecreateEventTrigger == RETRecreate) $ + when (reloadMetadataRecreateEventTrigger == RETRecreate) + $ -- This is the case when the user sets `recreate_event_triggers` -- to `true` in `reload_metadata`, in this case, we recreate -- the SQL trigger by force, even if it may not be necessary -- TODO: Should we also mark the event trigger as inconsistent here? - liftEitherM $ - createTableEventTrigger - @b - (_cdcSQLGenCtx dynamicConfig) - sourceConfig - table - tableColumns - triggerName - triggerOnReplication - (etcDefinition eventTriggerConf) - (_tciPrimaryKey tableInfo) + liftEitherM + $ createTableEventTrigger + @b + (_cdcSQLGenCtx dynamicConfig) + sourceConfig + table + tableColumns + triggerName + triggerOnReplication + (etcDefinition eventTriggerConf) + (_tciPrimaryKey tableInfo) if isCatalogUpdate || migrationRecreateEventTriggers == RETRecreate then do recreateTriggerIfNeeded @@ -1377,7 +1386,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou recordDependencies -< (metadataObject, schemaObjectId, dependencies) returnA -< info ) - |) metadataObject + |) + metadataObject recreateTriggerIfNeeded = -- using `Inc.cache` here means that the response will be cached for the given output and the @@ -1397,8 +1407,8 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou -> do bindErrorA -< do - liftEitherM $ - createTableEventTrigger @b + liftEitherM + $ createTableEventTrigger @b (_cdcSQLGenCtx dynamicConfig) sourceConfig tableName @@ -1417,9 +1427,9 @@ buildSchemaCacheRule logger env mSchemaRegistryContext = proc (MetadataWithResou buildCronTrigger cronTrigger = do let triggerName = triggerNameToTxt $ ctName cronTrigger addCronTriggerContext e = "in cron trigger " <> triggerName <> ": " <> e - withRecordInconsistencyM (mkCronTriggerMetadataObject cronTrigger) $ - modifyErr addCronTriggerContext $ - resolveCronTrigger env cronTrigger + withRecordInconsistencyM (mkCronTriggerMetadataObject cronTrigger) + $ modifyErr addCronTriggerContext + $ resolveCronTrigger env cronTrigger buildInheritedRoles :: (MonadWriter (Seq (Either InconsistentMetadata MetadataDependency)) m) => @@ -1468,9 +1478,9 @@ buildRemoteSchemaRemoteRelationship :: m (Maybe (RemoteFieldInfo G.Name)) buildRemoteSchemaRemoteRelationship allSources remoteSchemaMap remoteSchema remoteSchemaIntrospection typeName rr@RemoteRelationship {..} = do let metadataObject = - MetadataObject (MORemoteSchemaRemoteRelationship remoteSchema typeName _rrName) $ - toJSON $ - CreateRemoteSchemaRemoteRelationship remoteSchema typeName _rrName _rrDefinition + MetadataObject (MORemoteSchemaRemoteRelationship remoteSchema typeName _rrName) + $ toJSON + $ CreateRemoteSchemaRemoteRelationship remoteSchema typeName _rrName _rrDefinition schemaObj = SORemoteSchemaRemoteRelationship remoteSchema typeName _rrName addRemoteRelationshipContext e = "in remote relationship " <> _rrName <<> ": " <> e -- buildRemoteFieldInfo only knows how to construct dependencies on the RHS of the join condition, diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs index d956d857df8..20a7cd6b29b 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Common.hs @@ -339,7 +339,8 @@ buildInfoMap extractKey mkMetadataObject buildInfo = proc (e, infos) -> do Nothing -> returnA -< Nothing Just info -> buildInfo -< (e, info) ) - |) groupedInfos + |) + groupedInfos returnA -< catMaybes infoMapMaybes {-# INLINEABLE buildInfoMap #-} diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Dependencies.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Dependencies.hs index 68170984d7f..093fbfed499 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Dependencies.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Dependencies.hs @@ -95,16 +95,16 @@ performIteration iterationNumber cache inconsistencies dependencies = do throwError (err500 Unexpected "schema dependency resolution failed to terminate") { qeInternal = - Just $ - ExtraInternal $ - object - [ "inconsistent_objects" - .= object - [ "old" .= inconsistencies, - "new" .= newInconsistencies - ], - "pruned_dependencies" .= (map snd <$> prunedDependencies) - ] + Just + $ ExtraInternal + $ object + [ "inconsistent_objects" + .= object + [ "old" .= inconsistencies, + "new" .= newInconsistencies + ], + "pruned_dependencies" .= (map snd <$> prunedDependencies) + ] } pruneDanglingDependents :: @@ -120,41 +120,47 @@ pruneDanglingDependents cache = resolveDependency :: SchemaDependency -> Either Text () resolveDependency (SchemaDependency objectId _) = case objectId of SOSource source -> - void $ - HashMap.lookup source (_boSources cache) - `onNothing` Left ("no such source exists: " <>> source) + void + $ HashMap.lookup source (_boSources cache) + `onNothing` Left ("no such source exists: " <>> source) SORemoteSchema remoteSchemaName -> - unless (remoteSchemaName `HashMap.member` _boRemoteSchemas cache) $ - Left $ - "remote schema " <> remoteSchemaName <<> " is not found" + unless (remoteSchemaName `HashMap.member` _boRemoteSchemas cache) + $ Left + $ "remote schema " + <> remoteSchemaName + <<> " is not found" SORemoteSchemaPermission remoteSchemaName roleName -> do remoteSchema <- - onNothing (HashMap.lookup remoteSchemaName $ _boRemoteSchemas cache) $ - Left $ - "remote schema " <> remoteSchemaName <<> " is not found" - unless (roleName `HashMap.member` _rscPermissions (fst remoteSchema)) $ - Left $ - "no permission defined on remote schema " - <> remoteSchemaName - <<> " for role " - <>> roleName + onNothing (HashMap.lookup remoteSchemaName $ _boRemoteSchemas cache) + $ Left + $ "remote schema " + <> remoteSchemaName + <<> " is not found" + unless (roleName `HashMap.member` _rscPermissions (fst remoteSchema)) + $ Left + $ "no permission defined on remote schema " + <> remoteSchemaName + <<> " for role " + <>> roleName SORemoteSchemaRemoteRelationship remoteSchemaName typeName relationshipName -> do remoteSchema <- - fmap fst $ - onNothing (HashMap.lookup remoteSchemaName $ _boRemoteSchemas cache) $ - Left $ - "remote schema " <> remoteSchemaName <<> " is not found" + fmap fst + $ onNothing (HashMap.lookup remoteSchemaName $ _boRemoteSchemas cache) + $ Left + $ "remote schema " + <> remoteSchemaName + <<> " is not found" void $ onNothing (InsOrdHashMap.lookup typeName (_rscRemoteRelationships remoteSchema) >>= InsOrdHashMap.lookup relationshipName) $ Left $ "remote relationship " - <> relationshipName - <<> " on type " - <> G.unName typeName - <> " on " - <> remoteSchemaName - <<> " is not found" + <> relationshipName + <<> " on type " + <> G.unName typeName + <> " on " + <> remoteSchemaName + <<> " is not found" SOSourceObj source exists -> do AB.dispatchAnyBackend @Backend exists $ \(sourceObjId :: SourceObjId b) -> do sourceInfo <- castSourceInfo source sourceObjId @@ -162,9 +168,9 @@ pruneDanglingDependents cache = SOITable tableName -> void $ resolveTable sourceInfo tableName SOIFunction functionName -> - void $ - HashMap.lookup functionName (_siFunctions sourceInfo) - `onNothing` Left ("function " <> functionName <<> " is not tracked") + void + $ HashMap.lookup functionName (_siFunctions sourceInfo) + `onNothing` Left ("function " <> functionName <<> " is not tracked") SOILogicalModel logicalModelName -> void $ resolveLogicalModel sourceInfo logicalModelName SOILogicalModelObj logicalModelName logicalModelObjId -> do @@ -174,12 +180,14 @@ pruneDanglingDependents cache = let rolePermissions :: Maybe (RolePermInfo b) rolePermissions = logicalModel ^? lmiPermissions . ix roleName - unless (any (permissionIsDefined permType) rolePermissions) $ - Left $ - "no " - <> permTypeToCode permType - <> " permission defined on logical model " - <> logicalModelName <<> " for role " <>> roleName + unless (any (permissionIsDefined permType) rolePermissions) + $ Left + $ "no " + <> permTypeToCode permType + <> " permission defined on logical model " + <> logicalModelName + <<> " for role " + <>> roleName LMOCol column -> unless (InsOrdHashMap.member column (_lmiFields logicalModel)) do Left ("Could not find column " <> column <<> " in logical model " <>> logicalModelName) @@ -189,8 +197,8 @@ pruneDanglingDependents cache = nativeQueryInfo <- resolveNativeQuery sourceInfo nativeQueryName case nativeQueryObjId of NQOCol colName -> - unless (InsOrdHashMap.member colName (_lmiFields (_nqiReturns nativeQueryInfo))) $ - Left + unless (InsOrdHashMap.member colName (_lmiFields (_nqiReturns nativeQueryInfo))) + $ Left ("native query " <> nativeQueryName <<> " has no field named " <>> colName) SOIStoredProcedure storedProcedureName -> do void $ resolveStoredProcedure sourceInfo storedProcedureName @@ -198,8 +206,8 @@ pruneDanglingDependents cache = storedProcedureInfo <- resolveStoredProcedure sourceInfo storedProcedureName case storedProcedureObjId of SPOCol colName -> - unless (InsOrdHashMap.member colName (_lmiFields (_spiReturns storedProcedureInfo))) $ - Left + unless (InsOrdHashMap.member colName (_lmiFields (_spiReturns storedProcedureInfo))) + $ Left ("stored procedure " <> storedProcedureName <<> " has no field named " <>> colName) SOITableObj tableName tableObjectId -> do tableInfo <- resolveTable sourceInfo tableName @@ -214,26 +222,33 @@ pruneDanglingDependents cache = void $ resolveField tableInfo (fromRemoteRelationship fieldName) _FIRemoteRelationship "remote relationship" TOForeignKey constraintName -> do let foreignKeys = _tciForeignKeys $ _tiCoreInfo tableInfo - unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) $ - Left $ - "no foreign key constraint named " - <> constraintName <<> " is " - <> "defined for table " <>> tableName + unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) + $ Left + $ "no foreign key constraint named " + <> constraintName + <<> " is " + <> "defined for table " + <>> tableName TOPerm roleName permType -> do - unless (any (permissionIsDefined permType) (tableInfo ^? (tiRolePermInfoMap . ix roleName))) $ - Left $ - "no " - <> permTypeToCode permType - <> " permission defined on table " - <> tableName <<> " for role " <>> roleName + unless (any (permissionIsDefined permType) (tableInfo ^? (tiRolePermInfoMap . ix roleName))) + $ Left + $ "no " + <> permTypeToCode permType + <> " permission defined on table " + <> tableName + <<> " for role " + <>> roleName TOTrigger triggerName -> - unless (HashMap.member triggerName (_tiEventTriggerInfoMap tableInfo)) $ - Left $ - "no event trigger named " <> triggerName <<> " is defined for table " <>> tableName + unless (HashMap.member triggerName (_tiEventTriggerInfoMap tableInfo)) + $ Left + $ "no event trigger named " + <> triggerName + <<> " is defined for table " + <>> tableName SORole roleName -> - void $ - (HashMap.lookup roleName (_boRoles cache)) - `onNothing` Left ("parent role " <> roleName <<> " does not exist") + void + $ (HashMap.lookup roleName (_boRoles cache)) + `onNothing` Left ("parent role " <> roleName <<> " does not exist") castSourceInfo :: (Backend b) => SourceName -> SourceObjId b -> Either Text (SourceInfo b) diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Fields.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Fields.hs index f3365a80b21..0787b5558f3 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Fields.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Fields.hs @@ -73,26 +73,26 @@ addNonColumnFields allSources sourceName sourceConfig rawTableInfos columns remo -- 2. computed fields which don't expect arguments other than the table row and user session let lhsJoinFields = let columnFields = scalarColumns <&> \columnInfo -> JoinColumn (ciColumn columnInfo) (ciType columnInfo) - computedFields = HashMap.fromList $ - flip mapMaybe (HashMap.toList computedFieldInfos) $ - \(cfName, (ComputedFieldInfo {..}, _)) -> do - scalarType <- case computedFieldReturnType @b _cfiReturnType of - ReturnsScalar ty -> pure ty - ReturnsTable {} -> Nothing - ReturnsOthers {} -> Nothing - let ComputedFieldFunction {..} = _cfiFunction - case toList _cffInputArgs of - [] -> - pure $ - (fromComputedField cfName,) $ - JoinComputedField $ - ScalarComputedField - _cfiXComputedFieldInfo - _cfiName - _cffName - _cffComputedFieldImplicitArgs - scalarType - _ -> Nothing + computedFields = HashMap.fromList + $ flip mapMaybe (HashMap.toList computedFieldInfos) + $ \(cfName, (ComputedFieldInfo {..}, _)) -> do + scalarType <- case computedFieldReturnType @b _cfiReturnType of + ReturnsScalar ty -> pure ty + ReturnsTable {} -> Nothing + ReturnsOthers {} -> Nothing + let ComputedFieldFunction {..} = _cfiFunction + case toList _cffInputArgs of + [] -> + pure + $ (fromComputedField cfName,) + $ JoinComputedField + $ ScalarComputedField + _cfiXComputedFieldInfo + _cfiName + _cffName + _cffComputedFieldImplicitArgs + scalarType + _ -> Nothing in HashMap.union columnFields computedFields rawRemoteRelationshipInfos <- @@ -127,12 +127,12 @@ addNonColumnFields allSources sourceName sourceConfig rawTableInfos columns remo This (thisField, metadata) -> pure $ Just (this thisField, metadata) That (thatField, metadata) -> pure $ Just (that thatField, metadata) These (_, thisMetadata) (_, thatMetadata) -> do - tell $ - Seq.singleton $ - Left $ - ConflictingObjects - ("conflicting definitions for field " <>> fieldName) - [thisMetadata, thatMetadata] + tell + $ Seq.singleton + $ Left + $ ConflictingObjects + ("conflicting definitions for field " <>> fieldName) + [thisMetadata, thatMetadata] pure Nothing noCustomFieldConflicts nonColumnFields = do @@ -145,9 +145,9 @@ addNonColumnFields allSources sourceName sourceConfig rawTableInfos columns remo -- more useful error message. Just columnInfo | toTxt (structuredColumnInfoColumn columnInfo) /= G.unName fieldGQLName -> - throw400 AlreadyExists $ - "field definition conflicts with custom field name for postgres column " - <>> structuredColumnInfoColumn columnInfo + throw400 AlreadyExists + $ "field definition conflicts with custom field name for postgres column " + <>> structuredColumnInfoColumn columnInfo _ -> return () return (fieldInfo, metadata) @@ -168,10 +168,10 @@ mkRelationshipMetadataObject :: MetadataObject mkRelationshipMetadataObject relType source table relDef = let objectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTORel (_rdName relDef) relType + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTORel (_rdName relDef) relType in MetadataObject objectId $ toJSON $ WithTable @b source table relDef buildObjectRelationship :: @@ -218,10 +218,10 @@ buildRelationship source table buildRelInfo relType relDef = do let relName = _rdName relDef metadataObject = mkRelationshipMetadataObject @b relType source table relDef schemaObject = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b table $ - TORel relName + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b table + $ TORel relName addRelationshipContext e = "in relationship " <> relName <<> ": " <> e withRecordInconsistencyM metadataObject $ do modifyErr (addTableContext @b table . addRelationshipContext) $ do @@ -238,10 +238,10 @@ mkComputedFieldMetadataObject :: MetadataObject mkComputedFieldMetadataObject source table ComputedFieldMetadata {..} = let objectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOComputedField _cfmName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOComputedField _cfmName definition = AddComputedField @b source table _cfmName _cfmDefinition _cfmComment in MetadataObject objectId (toJSON definition) @@ -261,8 +261,9 @@ buildComputedField trackedTableNames tableColumns source pgFunctions table cf@Co let addComputedFieldContext e = "in computed field " <> _cfmName <<> ": " <> e function = computedFieldFunction @b _cfmDefinition - withRecordInconsistencyM (mkComputedFieldMetadataObject source table cf) $ - modifyErr (addTableContext @b table . addComputedFieldContext) $ do + withRecordInconsistencyM (mkComputedFieldMetadataObject source table cf) + $ modifyErr (addTableContext @b table . addComputedFieldContext) + $ do funcDefs <- onNothing (HashMap.lookup function pgFunctions) @@ -280,13 +281,13 @@ mkRemoteRelationshipMetadataObject :: MetadataObject mkRemoteRelationshipMetadataObject source table RemoteRelationship {..} = let objectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTORemoteRelationship _rrName - in MetadataObject objectId $ - toJSON $ - CreateFromSourceRelationship @b source table _rrName _rrDefinition + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTORemoteRelationship _rrName + in MetadataObject objectId + $ toJSON + $ CreateFromSourceRelationship @b source table _rrName _rrDefinition -- | This is a "thin" wrapper around 'buildRemoteFieldInfo', which only knows -- how to construct dependencies on the RHS of the join condition, so the @@ -306,13 +307,14 @@ buildRemoteRelationship :: buildRemoteRelationship allSources allColumns remoteSchemaMap source table rr@RemoteRelationship {..} = do let metadataObject = mkRemoteRelationshipMetadataObject @b source table rr schemaObj = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b table $ - TORemoteRel _rrName + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b table + $ TORemoteRel _rrName addRemoteRelationshipContext e = "in remote relationship " <> _rrName <<> ": " <> e - withRecordInconsistencyM metadataObject $ - modifyErr (addTableContext @b table . addRemoteRelationshipContext) $ do + withRecordInconsistencyM metadataObject + $ modifyErr (addTableContext @b table . addRemoteRelationshipContext) + $ do (remoteField, rhsDependencies) <- buildRemoteFieldInfo (tableNameToLHSIdentifier @b table) allColumns rr allSources remoteSchemaMap let lhsDependencies = diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs index 357a0b239b6..b56cdc4118b 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache/Permission.hs @@ -157,8 +157,8 @@ resolveCheckPermission checkPermission roleName inconsistentEntity = do CPInconsistent -> do let inconsistentObj = -- check `Conflicts while inheriting permissions` in `rfcs/inherited-roles-improvements.md` - Left $ - ConflictingInheritedPermission roleName inconsistentEntity + Left + $ ConflictingInheritedPermission roleName inconsistentEntity tell $ Seq.singleton inconsistentObj pure Nothing CPDefined permissionDefn -> pure $ Just permissionDefn @@ -201,18 +201,22 @@ buildTablePermissions source tableCache tableFields tablePermissions orderedRole go accumulatedRolePermMap (Role roleName (ParentRoles parentRoles)) = do parentRolePermissions <- for (toList parentRoles) $ \role -> - onNothing (HashMap.lookup role accumulatedRolePermMap) $ - throw500 $ - -- this error will ideally never be thrown, but if it's thrown then - -- it's possible that the permissions for the role do exist, but it's - -- not yet built due to wrong ordering of the roles, check `orderRoles` - "buildTablePermissions: table role permissions for role: " <> role <<> " not found" + onNothing (HashMap.lookup role accumulatedRolePermMap) + $ throw500 + $ + -- this error will ideally never be thrown, but if it's thrown then + -- it's possible that the permissions for the role do exist, but it's + -- not yet built due to wrong ordering of the roles, check `orderRoles` + "buildTablePermissions: table role permissions for role: " + <> role + <<> " not found" let combinedParentRolePermInfo = mconcat $ fmap rolePermInfoToCombineRolePermInfo parentRolePermissions selectPermissionsCount = length $ filter (isJust . _permSel) parentRolePermissions accumulatedRolePermission = HashMap.lookup roleName accumulatedRolePermMap roleSelectPermission = - onNothing (_permSel =<< accumulatedRolePermission) $ - combinedSelPermInfoToSelPermInfo selectPermissionsCount <$> (crpiSelPerm combinedParentRolePermInfo) + onNothing (_permSel =<< accumulatedRolePermission) + $ combinedSelPermInfoToSelPermInfo selectPermissionsCount + <$> (crpiSelPerm combinedParentRolePermInfo) roleInsertPermission <- resolveCheckTablePermission (crpiInsPerm combinedParentRolePermInfo) accumulatedRolePermission _permIns roleName source table PTInsert roleUpdatePermission <- resolveCheckTablePermission (crpiUpdPerm combinedParentRolePermInfo) accumulatedRolePermission _permUpd roleName source table PTUpdate roleDeletePermission <- resolveCheckTablePermission (crpiDelPerm combinedParentRolePermInfo) accumulatedRolePermission _permDel roleName source table PTDelete @@ -248,14 +252,14 @@ buildTablePermissions source tableCache tableFields tablePermissions orderedRole permType = reflectPermDefPermission (_pdPermission permission) roleName = _pdRole permission schemaObject = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b table $ - TOPerm roleName permType + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b table + $ TOPerm roleName permType addPermContext err = "in permission for role " <> roleName <<> ": " <> err withRecordInconsistencyM metadataObject $ modifyErr (addTableContext @b table . addPermContext) do - when (_pdRole permission == adminRoleName) $ - throw400 ConstraintViolation "cannot define permission for admin role" + when (_pdRole permission == adminRoleName) + $ throw400 ConstraintViolation "cannot define permission for admin role" (info, dependencies) <- runTableCoreCacheRT ( buildPermInfo @@ -273,10 +277,10 @@ buildTablePermissions source tableCache tableFields tablePermissions orderedRole mkPermissionMetadataObject permDef = let permType = reflectPermDefPermission (_pdPermission permDef) objectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTableObj @b table $ - MTOPerm (_pdRole permDef) permType + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTableObj @b table + $ MTOPerm (_pdRole permDef) permType definition = toJSON $ WithTable @b source table permDef in MetadataObject objectId definition @@ -325,9 +329,9 @@ buildLogicalModelPermissions sourceName tableCache logicalModelName logicalModel -- back to the inherited permission. roleSelectPermission :: Maybe (SelPermInfo b) roleSelectPermission = - onNothing (accumulatedRolePermission >>= _permSel) $ - fmap (combinedSelPermInfoToSelPermInfo selectPermissionsCount) $ - crpiSelPerm combinedParentRolePermInfo + onNothing (accumulatedRolePermission >>= _permSel) + $ fmap (combinedSelPermInfoToSelPermInfo selectPermissionsCount) + $ crpiSelPerm combinedParentRolePermInfo rolePermInfo :: RolePermInfo b rolePermInfo = RolePermInfo Nothing roleSelectPermission Nothing Nothing @@ -344,17 +348,17 @@ buildLogicalModelPermissions sourceName tableCache logicalModelName logicalModel -- generate this permission. sourceObjId :: MetadataObjId sourceObjId = - MOSourceObjId sourceName $ - AB.mkAnyBackend $ - SMOLogicalModelObj @b logicalModelName $ - LMMOPerm role PTSelect + MOSourceObjId sourceName + $ AB.mkAnyBackend + $ SMOLogicalModelObj @b logicalModelName + $ LMMOPerm role PTSelect -- The object we're going to use to track the dependency and any -- potential cache inconsistencies. metadataObject :: MetadataObject metadataObject = - MetadataObject sourceObjId $ - toJSON + MetadataObject sourceObjId + $ toJSON WithLogicalModel { _wlmSource = sourceName, _wlmName = logicalModelName, @@ -364,24 +368,27 @@ buildLogicalModelPermissions sourceName tableCache logicalModelName logicalModel -- An identifier for this permission within the metadata structure. schemaObject :: SchemaObjId schemaObject = - SOSourceObj sourceName $ - AB.mkAnyBackend $ - SOILogicalModelObj @b logicalModelName $ - LMOPerm role PTSelect + SOSourceObj sourceName + $ AB.mkAnyBackend + $ SOILogicalModelObj @b logicalModelName + $ LMOPerm role PTSelect modifyError :: ExceptT QErr m a -> ExceptT QErr m a modifyError = modifyErr \err -> - addLogicalModelContext logicalModelName $ - "in permission for role " <> role <<> ": " <> err + addLogicalModelContext logicalModelName + $ "in permission for role " + <> role + <<> ": " + <> err select <- withRecordInconsistencyM metadataObject $ modifyError do - when (role == adminRoleName) $ - throw400 ConstraintViolation "cannot define permission for admin role" + when (role == adminRoleName) + $ throw400 ConstraintViolation "cannot define permission for admin role" (permissionInformation, dependencies) <- - flip runTableCoreCacheRT tableCache $ - buildLogicalModelPermInfo sourceName logicalModelName logicalModelFields $ - _pdPermission selectPermission + flip runTableCoreCacheRT tableCache + $ buildLogicalModelPermInfo sourceName logicalModelName logicalModelFields + $ _pdPermission selectPermission recordDependenciesM metadataObject schemaObject dependencies pure permissionInformation diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs index 95379e417fc..b3d951bf588 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs @@ -119,9 +119,9 @@ getTableDiff oldtm newtm = newCols = _ptmiColumns $ tmInfo newtm uniqueOrPrimaryCons = - map _cName $ - maybeToList (_pkConstraint <$> _ptmiPrimaryKey (tmInfo newtm)) - <> (_ucConstraint <$> toList (_ptmiUniqueConstraints (tmInfo newtm))) + map _cName + $ maybeToList (_pkConstraint <$> _ptmiPrimaryKey (tmInfo newtm)) + <> (_ucConstraint <$> toList (_ptmiUniqueConstraints (tmInfo newtm))) mNewDesc = _ptmiDescription $ tmInfo newtm @@ -132,16 +132,17 @@ getTableDiff oldtm newtm = -- foreign keys are considered dropped only if their oid -- and (ref-table, column mapping) are changed droppedFKeyConstraints = - map (_cName . _fkConstraint) $ - HS.toList $ - droppedFKeysWithOid `HS.intersection` droppedFKeysWithUniq + map (_cName . _fkConstraint) + $ HS.toList + $ droppedFKeysWithOid + `HS.intersection` droppedFKeysWithUniq tmForeignKeys = fmap unForeignKeyMetadata . toList . _ptmiForeignKeys . tmInfo droppedFKeysWithOid = - HS.fromList $ - (getDifferenceOn (_cOid . _fkConstraint) `on` tmForeignKeys) oldtm newtm + HS.fromList + $ (getDifferenceOn (_cOid . _fkConstraint) `on` tmForeignKeys) oldtm newtm droppedFKeysWithUniq = - HS.fromList $ - (getDifferenceOn mkFKeyUniqId `on` tmForeignKeys) oldtm newtm + HS.fromList + $ (getDifferenceOn mkFKeyUniqId `on` tmForeignKeys) oldtm newtm mkFKeyUniqId (ForeignKey _ reftn colMap) = (reftn, colMap) -- calculate computed field diff @@ -149,8 +150,8 @@ getTableDiff oldtm newtm = newComputedFieldMeta = tmComputedFields newtm droppedComputedFields = - map ccmName $ - getDifferenceOn (fmOid . ccmFunctionMeta) oldComputedFieldMeta newComputedFieldMeta + map ccmName + $ getDifferenceOn (fmOid . ccmFunctionMeta) oldComputedFieldMeta newComputedFieldMeta alteredComputedFields = getOverlapWith (fmOid . ccmFunctionMeta) oldComputedFieldMeta newComputedFieldMeta @@ -158,8 +159,9 @@ getTableDiff oldtm newtm = overloadedComputedFieldFunctions = let getFunction = fmFunction . ccmFunctionMeta getSecondElement (_ NE.:| list) = listToMaybe list - in mapMaybe (fmap ((&&&) ccmName getFunction) . getSecondElement) $ - flip NE.groupBy newComputedFieldMeta $ \l r -> + in mapMaybe (fmap ((&&&) ccmName getFunction) . getSecondElement) + $ flip NE.groupBy newComputedFieldMeta + $ \l r -> ccmName l == ccmName r && getFunction l == getFunction r computedFieldDiff = @@ -178,22 +180,24 @@ getTableChangeDeps :: getTableChangeDeps source tn tableDiff = do sc <- askSchemaCache -- for all the dropped columns - droppedColDeps <- fmap concat $ - forM droppedCols $ \droppedCol -> do + droppedColDeps <- fmap concat + $ forM droppedCols + $ \droppedCol -> do let objId = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b tn $ - TOCol @b droppedCol + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b tn + $ TOCol @b droppedCol return $ getDependentObjs sc objId -- for all dropped constraints - droppedConsDeps <- fmap concat $ - forM droppedFKeyConstraints $ \droppedCons -> do + droppedConsDeps <- fmap concat + $ forM droppedFKeyConstraints + $ \droppedCons -> do let objId = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b tn $ - TOForeignKey @b droppedCons + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b tn + $ TOForeignKey @b droppedCons return $ getDependentObjs sc objId return $ droppedConsDeps <> droppedColDeps <> droppedComputedFieldDeps where @@ -345,26 +349,26 @@ alterTableInMetadata source ti tableDiff = do let ComputedFieldDiff _ altered overloaded = computedFieldDiff getFunction = fmFunction . ccmFunctionMeta forM_ overloaded $ \(columnName, function) -> - throw400 NotSupported $ - "The function " - <> function - <<> " associated with computed field" - <> columnName - <<> " of table " - <> table - <<> " is being overloaded" + throw400 NotSupported + $ "The function " + <> function + <<> " associated with computed field" + <> columnName + <<> " of table " + <> table + <<> " is being overloaded" forM_ altered $ \(old, new) -> if - | (fmType . ccmFunctionMeta) new == FTVOLATILE -> - throw400 NotSupported $ - "The type of function " - <> getFunction old - <<> " associated with computed field " - <> ccmName old - <<> " of table " - <> table - <<> " is being altered to \"VOLATILE\"" - | otherwise -> pure () + | (fmType . ccmFunctionMeta) new == FTVOLATILE -> + throw400 NotSupported + $ "The type of function " + <> getFunction old + <<> " associated with computed field " + <> ccmName old + <<> " of table " + <> table + <<> " is being altered to \"VOLATILE\"" + | otherwise -> pure () dropTablesInMetadata :: forall b m. @@ -375,8 +379,8 @@ dropTablesInMetadata :: [TableName b] -> m () dropTablesInMetadata source droppedTables = - forM_ droppedTables $ - \tn -> tell $ MetadataModifier $ metaSources . ix source . toSourceMetadata . (smTables @b) %~ InsOrdHashMap.delete tn + forM_ droppedTables + $ \tn -> tell $ MetadataModifier $ metaSources . ix source . toSourceMetadata . (smTables @b) %~ InsOrdHashMap.delete tn alterColumnsInMetadata :: forall b m. @@ -392,28 +396,30 @@ alterColumnsInMetadata :: TableName b -> m () alterColumnsInMetadata source alteredCols fields sc tn = - for_ alteredCols $ - \( RawColumnInfo {rciName = oldName, rciType = oldType}, - RawColumnInfo {rciName = newName, rciType = newType} - ) -> do + for_ alteredCols + $ \( RawColumnInfo {rciName = oldName, rciType = oldType}, + RawColumnInfo {rciName = newName, rciType = newType} + ) -> do if - | oldName /= newName -> - renameColumnInMetadata oldName newName source tn fields - | oldType /= newType -> do - let colId = - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b tn $ - TOCol @b oldName - typeDepObjs = getDependentObjsWith (== DROnType) sc colId + | oldName /= newName -> + renameColumnInMetadata oldName newName source tn fields + | oldType /= newType -> do + let colId = + SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b tn + $ TOCol @b oldName + typeDepObjs = getDependentObjsWith (== DROnType) sc colId - unless (null typeDepObjs) $ - throw400 DependencyError $ - "cannot change type of column " - <> oldName <<> " in table " - <> tn <<> " because of the following dependencies: " - <> reportSchemaObjs typeDepObjs - | otherwise -> pure () + unless (null typeDepObjs) + $ throw400 DependencyError + $ "cannot change type of column " + <> oldName + <<> " in table " + <> tn + <<> " because of the following dependencies: " + <> reportSchemaObjs typeDepObjs + | otherwise -> pure () removeDroppedColumnsFromMetadataField :: forall b m. @@ -423,10 +429,12 @@ removeDroppedColumnsFromMetadataField :: TableCoreInfo b -> m () removeDroppedColumnsFromMetadataField source droppedCols tableInfo = do - when (newColumnConfig /= originalColumnConfig) $ - tell $ - MetadataModifier $ - tableMetadataSetter @b source tableName . tmConfiguration .~ newTableConfig + when (newColumnConfig /= originalColumnConfig) + $ tell + $ MetadataModifier + $ tableMetadataSetter @b source tableName + . tmConfiguration + .~ newTableConfig where tableName = _tciName tableInfo originalTableConfig = _tciCustomConfig tableInfo diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/LegacyCatalog.hs b/server/src-lib/Hasura/RQL/DDL/Schema/LegacyCatalog.hs index f3e5c0ab1e6..973b0991662 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/LegacyCatalog.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/LegacyCatalog.hs @@ -81,26 +81,28 @@ saveMetadataToHdbTables saveTableToCatalog _tmTable _tmIsEnum _tmConfiguration -- Relationships - withPathK "object_relationships" $ - indexedForM_ _tmObjectRelationships $ \objRel -> + withPathK "object_relationships" + $ indexedForM_ _tmObjectRelationships + $ \objRel -> insertRelationshipToCatalog _tmTable ObjRel objRel - withPathK "array_relationships" $ - indexedForM_ _tmArrayRelationships $ \arrRel -> + withPathK "array_relationships" + $ indexedForM_ _tmArrayRelationships + $ \arrRel -> insertRelationshipToCatalog _tmTable ArrRel arrRel -- Computed Fields - withPathK "computed_fields" $ - indexedForM_ _tmComputedFields $ - \(ComputedFieldMetadata name definition comment) -> - addComputedFieldToCatalog $ - AddComputedField defaultSource _tmTable name definition comment + withPathK "computed_fields" + $ indexedForM_ _tmComputedFields + $ \(ComputedFieldMetadata name definition comment) -> + addComputedFieldToCatalog + $ AddComputedField defaultSource _tmTable name definition comment -- Remote Relationships - withPathK "remote_relationships" $ - indexedForM_ _tmRemoteRelationships $ - \RemoteRelationship {..} -> do - addRemoteRelationshipToCatalog $ - CreateFromSourceRelationship defaultSource _tmTable _rrName _rrDefinition + withPathK "remote_relationships" + $ indexedForM_ _tmRemoteRelationships + $ \RemoteRelationship {..} -> do + addRemoteRelationshipToCatalog + $ CreateFromSourceRelationship defaultSource _tmTable _rrName _rrDefinition -- Permissions withPathK "insert_permissions" $ processPerms _tmTable _tmInsertPermissions @@ -109,51 +111,54 @@ saveMetadataToHdbTables withPathK "delete_permissions" $ processPerms _tmTable _tmDeletePermissions -- Event triggers - withPathK "event_triggers" $ - indexedForM_ _tmEventTriggers $ - \etc -> addEventTriggerToCatalog _tmTable etc + withPathK "event_triggers" + $ indexedForM_ _tmEventTriggers + $ \etc -> addEventTriggerToCatalog _tmTable etc -- sql functions - withPathK "functions" $ - indexedForM_ functions $ - \(FunctionMetadata function config _ _) -> addFunctionToCatalog function config + withPathK "functions" + $ indexedForM_ functions + $ \(FunctionMetadata function config _ _) -> addFunctionToCatalog function config -- query collections systemDefined <- ask - withPathK "query_collections" $ - indexedForM_ collections $ - \c -> liftTx $ addCollectionToCatalog c systemDefined + withPathK "query_collections" + $ indexedForM_ collections + $ \c -> liftTx $ addCollectionToCatalog c systemDefined -- allow list withPathK "allowlist" $ do indexedForM_ allowlist $ \(AllowlistEntry collectionName scope) -> do - unless (scope == AllowlistScopeGlobal) $ - throw400 NotSupported $ - "cannot downgrade to v1 because the " - <> collectionName - <<> " added to the allowlist is a role based allowlist" + unless (scope == AllowlistScopeGlobal) + $ throw400 NotSupported + $ "cannot downgrade to v1 because the " + <> collectionName + <<> " added to the allowlist is a role based allowlist" liftTx $ addCollectionToAllowlistCatalog collectionName -- remote schemas - withPathK "remote_schemas" $ - indexedMapM_ (liftTx . addRemoteSchemaToCatalog) schemas + withPathK "remote_schemas" + $ indexedMapM_ (liftTx . addRemoteSchemaToCatalog) schemas -- custom types withPathK "custom_types" $ setCustomTypesInCatalog customTypes -- cron triggers - withPathK "cron_triggers" $ - indexedForM_ cronTriggers $ \ct -> liftTx $ do + withPathK "cron_triggers" + $ indexedForM_ cronTriggers + $ \ct -> liftTx $ do addCronTriggerToCatalog ct -- actions - withPathK "actions" $ - indexedForM_ actions $ \action -> do + withPathK "actions" + $ indexedForM_ actions + $ \action -> do let createAction = CreateAction (_amName action) (_amDefinition action) (_amComment action) addActionToCatalog createAction - withPathK "permissions" $ - indexedForM_ (_amPermissions action) $ \permission -> do + withPathK "permissions" + $ indexedForM_ (_amPermissions action) + $ \permission -> do let createActionPermission = CreateActionPermission (_amName action) @@ -170,8 +175,8 @@ saveTableToCatalog :: (MonadTx m, MonadReader SystemDefined m) => QualifiedTable -> Bool -> TableConfig ('Postgres 'Vanilla) -> m () saveTableToCatalog (QualifiedObject sn tn) isEnum config = do systemDefined <- ask - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO "hdb_catalog"."hdb_table" @@ -225,8 +230,8 @@ addComputedFieldToCatalog :: AddComputedField ('Postgres 'Vanilla) -> m () addComputedFieldToCatalog q = - liftTx $ - PG.withQE + liftTx + $ PG.withQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_computed_field @@ -242,8 +247,8 @@ addComputedFieldToCatalog q = addRemoteRelationshipToCatalog :: (MonadTx m) => CreateFromSourceRelationship ('Postgres 'Vanilla) -> m () addRemoteRelationshipToCatalog CreateFromSourceRelationship {..} = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_remote_relationship @@ -262,8 +267,8 @@ addFunctionToCatalog :: m () addFunctionToCatalog (QualifiedObject sn fn) config = do systemDefined <- ask - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO "hdb_catalog"."hdb_function" @@ -290,8 +295,8 @@ addRemoteSchemaToCatalog (RemoteSchemaMetadata name def comment _ _) = addCollectionToCatalog :: (MonadTx m) => CreateCollection -> SystemDefined -> m () addCollectionToCatalog (CreateCollection name defn mComment) systemDefined = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_query_collection @@ -303,8 +308,8 @@ addCollectionToCatalog (CreateCollection name defn mComment) systemDefined = addCollectionToAllowlistCatalog :: (MonadTx m) => CollectionName -> m () addCollectionToAllowlistCatalog collName = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_allowlist @@ -338,8 +343,8 @@ setCustomTypesInCatalog customTypes = liftTx do addActionToCatalog :: (MonadTx m) => CreateAction -> m () addActionToCatalog (CreateAction actionName actionDefinition comment) = do - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT into hdb_catalog.hdb_action @@ -351,8 +356,8 @@ addActionToCatalog (CreateAction actionName actionDefinition comment) = do addActionPermissionToCatalog :: (MonadTx m) => CreateActionPermission -> m () addActionPermissionToCatalog CreateActionPermission {..} = do - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT into hdb_catalog.hdb_action_permission @@ -369,8 +374,8 @@ addPermissionToCatalog :: SystemDefined -> m () addPermissionToCatalog (QualifiedObject sn tn) (PermDef rn qdef mComment) systemDefined = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO @@ -414,8 +419,9 @@ parseLegacyRemoteRelationshipDefinition = fetchMetadataFromHdbTables :: (MonadTx m) => m MetadataNoSources fetchMetadataFromHdbTables = liftTx do tables <- fetchTables - let tableMetaMap = InsOrdHashMap.fromList . flip map tables $ - \(schema, name, isEnum, maybeConfig) -> + let tableMetaMap = InsOrdHashMap.fromList + . flip map tables + $ \(schema, name, isEnum, maybeConfig) -> let qualifiedName = QualifiedObject schema name configuration = maybe emptyTableConfig PG.getViaJSON maybeConfig in (qualifiedName, mkTableMeta qualifiedName isEnum configuration) @@ -467,8 +473,8 @@ fetchMetadataFromHdbTables = liftTx do actions <- oMapFromL _amName <$> fetchActions cronTriggers <- fetchCronTriggers - pure $ - MetadataNoSources + pure + $ MetadataNoSources fullTableMetaMap functions remoteSchemas @@ -559,13 +565,14 @@ fetchMetadataFromHdbTables = liftTx do |] () False - pure $ - oMapFromL _fmFunction $ - flip map l $ \(sn, fn, PG.ViaJSON config) -> - -- function permissions were only introduced post 43rd - -- migration, so it's impossible we get any permissions - -- here - FunctionMetadata (QualifiedObject sn fn) config [] Nothing + pure + $ oMapFromL _fmFunction + $ flip map l + $ \(sn, fn, PG.ViaJSON config) -> + -- function permissions were only introduced post 43rd + -- migration, so it's impossible we get any permissions + -- here + FunctionMetadata (QualifiedObject sn fn) config [] Nothing fetchRemoteSchemas = map fromRow @@ -623,14 +630,16 @@ fetchMetadataFromHdbTables = liftTx do |] () False - pure $ - flip map r $ \(schema, table, name, PG.ViaJSON definition, comment) -> + pure + $ flip map r + $ \(schema, table, name, PG.ViaJSON definition, comment) -> ( QualifiedObject schema table, ComputedFieldMetadata name definition (commentFromMaybeText comment) ) fetchCronTriggers = - oMapFromL ctName . map uncurryCronTrigger + oMapFromL ctName + . map uncurryCronTrigger <$> PG.withQE defaultTxErrorHandler [PG.sql| @@ -659,7 +668,9 @@ fetchMetadataFromHdbTables = liftTx do fetchCustomTypes :: PG.TxE QErr CustomTypes fetchCustomTypes = - PG.getViaJSON . runIdentity . PG.getRow + PG.getViaJSON + . runIdentity + . PG.getRow <$> PG.rawQE defaultTxErrorHandler [PG.sql| @@ -669,7 +680,9 @@ fetchMetadataFromHdbTables = liftTx do False fetchActions = - PG.getViaJSON . runIdentity . PG.getRow + PG.getViaJSON + . runIdentity + . PG.getRow <$> PG.rawQE defaultTxErrorHandler [PG.sql| @@ -718,8 +731,9 @@ fetchMetadataFromHdbTables = liftTx do |] () False - pure $ - flip map r $ \(schema, table, name, PG.ViaJSON definition) -> + pure + $ flip map r + $ \(schema, table, name, PG.ViaJSON definition) -> ( QualifiedObject schema table, name, definition @@ -727,8 +741,8 @@ fetchMetadataFromHdbTables = liftTx do addCronTriggerForeignKeyConstraint :: (MonadTx m) => m () addCronTriggerForeignKeyConstraint = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| ALTER TABLE hdb_catalog.hdb_cron_events ADD CONSTRAINT @@ -784,24 +798,24 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_table" - [ objectRel $$(nonEmptyText "detail") $ - manualConfig "information_schema" "tables" tableNameMapping, - objectRel $$(nonEmptyText "primary_key") $ - manualConfig "hdb_catalog" "hdb_primary_key" tableNameMapping, - arrayRel $$(nonEmptyText "columns") $ - manualConfig "information_schema" "columns" tableNameMapping, - arrayRel $$(nonEmptyText "foreign_key_constraints") $ - manualConfig "hdb_catalog" "hdb_foreign_key_constraint" tableNameMapping, - arrayRel $$(nonEmptyText "relationships") $ - manualConfig "hdb_catalog" "hdb_relationship" tableNameMapping, - arrayRel $$(nonEmptyText "permissions") $ - manualConfig "hdb_catalog" "hdb_permission_agg" tableNameMapping, - arrayRel $$(nonEmptyText "computed_fields") $ - manualConfig "hdb_catalog" "hdb_computed_field" tableNameMapping, - arrayRel $$(nonEmptyText "check_constraints") $ - manualConfig "hdb_catalog" "hdb_check_constraint" tableNameMapping, - arrayRel $$(nonEmptyText "unique_constraints") $ - manualConfig "hdb_catalog" "hdb_unique_constraint" tableNameMapping + [ objectRel $$(nonEmptyText "detail") + $ manualConfig "information_schema" "tables" tableNameMapping, + objectRel $$(nonEmptyText "primary_key") + $ manualConfig "hdb_catalog" "hdb_primary_key" tableNameMapping, + arrayRel $$(nonEmptyText "columns") + $ manualConfig "information_schema" "columns" tableNameMapping, + arrayRel $$(nonEmptyText "foreign_key_constraints") + $ manualConfig "hdb_catalog" "hdb_foreign_key_constraint" tableNameMapping, + arrayRel $$(nonEmptyText "relationships") + $ manualConfig "hdb_catalog" "hdb_relationship" tableNameMapping, + arrayRel $$(nonEmptyText "permissions") + $ manualConfig "hdb_catalog" "hdb_permission_agg" tableNameMapping, + arrayRel $$(nonEmptyText "computed_fields") + $ manualConfig "hdb_catalog" "hdb_computed_field" tableNameMapping, + arrayRel $$(nonEmptyText "check_constraints") + $ manualConfig "hdb_catalog" "hdb_check_constraint" tableNameMapping, + arrayRel $$(nonEmptyText "unique_constraints") + $ manualConfig "hdb_catalog" "hdb_unique_constraint" tableNameMapping ], table "hdb_catalog" "hdb_primary_key" [], table "hdb_catalog" "hdb_foreign_key_constraint" [], @@ -814,17 +828,17 @@ recreateSystemMetadata = do table "hdb_catalog" "event_triggers" - [ arrayRel $$(nonEmptyText "events") $ - manualConfig "hdb_catalog" "event_log" [("name", "trigger_name")] + [ arrayRel $$(nonEmptyText "events") + $ manualConfig "hdb_catalog" "event_log" [("name", "trigger_name")] ], table "hdb_catalog" "event_log" - [ objectRel $$(nonEmptyText "trigger") $ - manualConfig "hdb_catalog" "event_triggers" [("trigger_name", "name")], - arrayRel $$(nonEmptyText "logs") $ - RUFKeyOn $ - ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "event_invocation_logs") (pure "event_id") + [ objectRel $$(nonEmptyText "trigger") + $ manualConfig "hdb_catalog" "event_triggers" [("trigger_name", "name")], + arrayRel $$(nonEmptyText "logs") + $ RUFKeyOn + $ ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "event_invocation_logs") (pure "event_id") ], table "hdb_catalog" @@ -834,8 +848,8 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_function_agg" - [ objectRel $$(nonEmptyText "return_table_info") $ - manualConfig + [ objectRel $$(nonEmptyText "return_table_info") + $ manualConfig "hdb_catalog" "hdb_table" [ ("return_type_schema", "table_schema"), @@ -851,8 +865,8 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_action" - [ arrayRel $$(nonEmptyText "permissions") $ - manualConfig + [ arrayRel $$(nonEmptyText "permissions") + $ manualConfig "hdb_catalog" "hdb_action_permission" [("action_name", "action_name")] @@ -861,13 +875,13 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_role" - [ arrayRel $$(nonEmptyText "action_permissions") $ - manualConfig + [ arrayRel $$(nonEmptyText "action_permissions") + $ manualConfig "hdb_catalog" "hdb_action_permission" [("role_name", "role_name")], - arrayRel $$(nonEmptyText "permissions") $ - manualConfig + arrayRel $$(nonEmptyText "permissions") + $ manualConfig "hdb_catalog" "hdb_permission_agg" [("role_name", "role_name")] @@ -875,17 +889,17 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_cron_triggers" - [ arrayRel $$(nonEmptyText "cron_events") $ - RUFKeyOn $ - ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_cron_events") (pure "trigger_name") + [ arrayRel $$(nonEmptyText "cron_events") + $ RUFKeyOn + $ ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_cron_events") (pure "trigger_name") ], table "hdb_catalog" "hdb_cron_events" [ objectRel $$(nonEmptyText "cron_trigger") $ RUFKeyOn $ SameTable (pure "trigger_name"), - arrayRel $$(nonEmptyText "cron_event_logs") $ - RUFKeyOn $ - ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_cron_event_invocation_logs") (pure "event_id") + arrayRel $$(nonEmptyText "cron_event_logs") + $ RUFKeyOn + $ ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_cron_event_invocation_logs") (pure "event_id") ], table "hdb_catalog" @@ -895,9 +909,9 @@ recreateSystemMetadata = do table "hdb_catalog" "hdb_scheduled_events" - [ arrayRel $$(nonEmptyText "scheduled_event_logs") $ - RUFKeyOn $ - ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_scheduled_event_invocation_logs") (pure "event_id") + [ arrayRel $$(nonEmptyText "scheduled_event_logs") + $ RUFKeyOn + $ ArrRelUsingFKeyOn (QualifiedObject "hdb_catalog" "hdb_scheduled_event_invocation_logs") (pure "event_id") ], table "hdb_catalog" @@ -915,7 +929,7 @@ recreateSystemMetadata = do objectRel name using = Left $ RelDef (RelName name) using Nothing arrayRel name using = Right $ RelDef (RelName name) using Nothing manualConfig schemaName tableName columns = - RUManual $ - RelManualTableConfig + RUManual + $ RelManualTableConfig (QualifiedObject schemaName tableName) (RelManualCommon (HashMap.fromList columns) Nothing) diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs index 442f7547143..1d7d31957de 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Rename.hs @@ -71,11 +71,11 @@ data Rename b otherDeps :: (QErrM m) => Text -> SchemaObjId -> m () otherDeps errMsg d = - throw500 $ - "unexpected dependency " - <> reportSchemaObj d - <> "; " - <> errMsg + throw500 + $ "unexpected dependency " + <> reportSchemaObj d + <> "; " + <> errMsg -- | Replace all references to a given table name by its new name across the entire metadata. -- @@ -102,10 +102,10 @@ renameTableInMetadata :: renameTableInMetadata source newQT oldQT = do sc <- askSchemaCache let allDeps = - getDependentObjs sc $ - SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @b oldQT + getDependentObjs sc + $ SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @b oldQT -- update all dependant schema objects forM_ allDeps $ \case @@ -134,11 +134,15 @@ renameTableInMetadata source newQT oldQT = do -- any other kind of dependent object (erroneous) d -> otherDeps errMsg d -- Update table name in metadata - tell $ - MetadataModifier $ - metaSources . ix source . (toSourceMetadata @b) . smTables %~ \tables -> - flip (maybe tables) (InsOrdHashMap.lookup oldQT tables) $ - \tableMeta -> InsOrdHashMap.delete oldQT $ InsOrdHashMap.insert newQT tableMeta {_tmTable = newQT} tables + tell + $ MetadataModifier + $ metaSources + . ix source + . (toSourceMetadata @b) + . smTables + %~ \tables -> + flip (maybe tables) (InsOrdHashMap.lookup oldQT tables) + $ \tableMeta -> InsOrdHashMap.delete oldQT $ InsOrdHashMap.insert newQT tableMeta {_tmTable = newQT} tables where errMsg = "cannot rename table " <> oldQT <<> " to " <>> newQT @@ -172,11 +176,11 @@ renameColumnInMetadata oCol nCol source qt fieldInfo = do assertFldNotExists -- Fetch dependent objects let depObjs = - getDependentObjs sc $ - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TOCol @b oCol + getDependentObjs sc + $ SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TOCol @b oCol renameItem = RenameItem @b qt oCol nCol renameFld = RFCol renameItem -- Update dependent objects @@ -191,10 +195,12 @@ renameColumnInMetadata oCol nCol source qt fieldInfo = do SOITableObj refQT (TORel rn) -> updateColInRel @b source refQT rn renameItem SOITableObj refQT (TOTrigger triggerName) -> - tell $ - MetadataModifier $ - tableMetadataSetter @b source refQT . tmEventTriggers . ix triggerName - %~ updateColumnInEventTrigger @b refQT oCol nCol qt + tell + $ MetadataModifier + $ tableMetadataSetter @b source refQT + . tmEventTriggers + . ix triggerName + %~ updateColumnInEventTrigger @b refQT oCol nCol qt SOITableObj _ (TORemoteRel remoteRelName) -> updateColInRemoteRelationshipLHS source remoteRelName renameItem _ -> otherDeps errMsg sobj @@ -215,14 +221,14 @@ renameColumnInMetadata oCol nCol source qt fieldInfo = do assertFldNotExists = case HashMap.lookup (fromCol @b oCol) fieldInfo of Just (FIRelationship _) -> - throw400 AlreadyExists $ - "cannot rename column " - <> oCol - <<> " to " - <> nCol - <<> " in table " - <> qt - <<> " as a relationship with the name already exists" + throw400 AlreadyExists + $ "cannot rename column " + <> oCol + <<> " to " + <> nCol + <<> " in table " + <> qt + <<> " as a relationship with the name already exists" _ -> pure () renameRelationshipInMetadata :: @@ -241,11 +247,11 @@ renameRelationshipInMetadata :: renameRelationshipInMetadata source qt oldRN relType newRN = do sc <- askSchemaCache let depObjs = - getDependentObjs sc $ - SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b qt $ - TORel oldRN + getDependentObjs sc + $ SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b qt + $ TORel oldRN renameFld = RFRel $ RenameItem @b qt oldRN newRN forM_ depObjs $ \case @@ -254,18 +260,19 @@ renameRelationshipInMetadata source qt oldRN relType newRN = do updatePermFlds @b source refQT role pt $ RField renameFld _ -> otherDeps errMsg sobj d -> otherDeps errMsg d - tell $ - MetadataModifier $ - tableMetadataSetter @b source qt %~ case relType of - ObjRel -> tmObjectRelationships %~ rewriteRelationships - ArrRel -> tmArrayRelationships %~ rewriteRelationships + tell + $ MetadataModifier + $ tableMetadataSetter @b source qt + %~ case relType of + ObjRel -> tmObjectRelationships %~ rewriteRelationships + ArrRel -> tmArrayRelationships %~ rewriteRelationships where errMsg = "cannot rename relationship " <> oldRN <<> " to " <>> newRN rewriteRelationships :: Relationships (RelDef a) -> Relationships (RelDef a) rewriteRelationships relationsMap = - flip (maybe relationsMap) (InsOrdHashMap.lookup oldRN relationsMap) $ - \rd -> InsOrdHashMap.insert newRN rd {_rdName = newRN} $ InsOrdHashMap.delete oldRN relationsMap + flip (maybe relationsMap) (InsOrdHashMap.lookup oldRN relationsMap) + $ \rd -> InsOrdHashMap.insert newRN rd {_rdName = newRN} $ InsOrdHashMap.delete oldRN relationsMap -- update table names in relationship definition updateRelDefs :: @@ -283,11 +290,12 @@ updateRelDefs :: updateRelDefs source qt rn renameTable = do fim <- askTableFieldInfoMap @b source qt ri <- askRelType fim rn "" - tell $ - MetadataModifier $ - tableMetadataSetter source qt %~ case riType ri of - ObjRel -> tmObjectRelationships . ix rn %~ updateObjRelDef renameTable - ArrRel -> tmArrayRelationships . ix rn %~ updateArrRelDef renameTable + tell + $ MetadataModifier + $ tableMetadataSetter source qt + %~ case riType ri of + ObjRel -> tmObjectRelationships . ix rn %~ updateObjRelDef renameTable + ArrRel -> tmArrayRelationships . ix rn %~ updateArrRelDef renameTable where updateObjRelDef :: RenameTable b -> ObjRelDef b -> ObjRelDef b updateObjRelDef (oldQT, newQT) = @@ -327,21 +335,22 @@ updatePermFlds source refQT rn pt rename = do tables <- fold <$> askTableCache source let withTables :: Reader (TableCache b) a -> a withTables = flip runReader tables - tell $ - MetadataModifier $ - tableMetadataSetter source refQT %~ case pt of - PTInsert -> - tmInsertPermissions . ix rn . pdPermission %~ \insPerm -> - withTables $ updateInsPermFlds refQT rename insPerm - PTSelect -> - tmSelectPermissions . ix rn . pdPermission %~ \selPerm -> - withTables $ updateSelPermFlds refQT rename selPerm - PTUpdate -> - tmUpdatePermissions . ix rn . pdPermission %~ \updPerm -> - withTables $ updateUpdPermFlds refQT rename updPerm - PTDelete -> - tmDeletePermissions . ix rn . pdPermission %~ \delPerm -> - withTables $ updateDelPermFlds refQT rename delPerm + tell + $ MetadataModifier + $ tableMetadataSetter source refQT + %~ case pt of + PTInsert -> + tmInsertPermissions . ix rn . pdPermission %~ \insPerm -> + withTables $ updateInsPermFlds refQT rename insPerm + PTSelect -> + tmSelectPermissions . ix rn . pdPermission %~ \selPerm -> + withTables $ updateSelPermFlds refQT rename selPerm + PTUpdate -> + tmUpdatePermissions . ix rn . pdPermission %~ \updPerm -> + withTables $ updateUpdPermFlds refQT rename updPerm + PTDelete -> + tmDeletePermissions . ix rn . pdPermission %~ \delPerm -> + withTables $ updateDelPermFlds refQT rename delPerm updateInsPermFlds :: (MonadReader (TableCache b) m, Backend b) => @@ -445,14 +454,16 @@ updateCols qt rf permSpec = where updateCols' oCol nCol cols = case cols of PCStar -> cols - PCCols c -> PCCols $ - flip map c $ - \col -> if col == oCol then nCol else col + PCCols c -> PCCols + $ flip map c + $ \col -> if col == oCol then nCol else col updateTableInBoolExp :: (Backend b) => RenameTable b -> BoolExp b -> BoolExp b updateTableInBoolExp (oldQT, newQT) = - over _Wrapped . transform $ - (_BoolExists . geTable) %~ \rqfQT -> + over _Wrapped + . transform + $ (_BoolExists . geTable) + %~ \rqfQT -> if rqfQT == oldQT then newQT else rqfQT updateFieldInBoolExp :: @@ -468,7 +479,9 @@ updateFieldInBoolExp qt rf be = BoolOr exps -> BoolOr <$> procExps exps BoolNot e -> BoolNot <$> updateBoolExp' e BoolExists (GExists refqt wh) -> - BoolExists . GExists refqt . unBoolExp + BoolExists + . GExists refqt + . unBoolExp <$> updateFieldInBoolExp refqt rf (BoolExp wh) BoolField fld -> BoolField <$> updateColExp qt rf fld where @@ -491,7 +504,9 @@ updateColExp qt rf (ColExp fld val) = tables <- ask let maybeFieldInfo = HashMap.lookup qt tables - >>= HashMap.lookup fld . _tciFieldInfoMap . _tiCoreInfo + >>= HashMap.lookup fld + . _tciFieldInfoMap + . _tiCoreInfo case maybeFieldInfo of Nothing -> pure val Just fi -> case fi of @@ -527,16 +542,16 @@ updateColInRel source fromQT rn rnCol = do case riTarget relInfo of RelTargetNativeQuery _ -> error "updateColInRel RelTargetNativeQuery" RelTargetTable relTableName -> - tell $ - MetadataModifier $ - tableMetadataSetter source fromQT - %~ case riType relInfo of - ObjRel -> - tmObjectRelationships . ix rn . rdUsing - %~ updateColInObjRel fromQT relTableName rnCol - ArrRel -> - tmArrayRelationships . ix rn . rdUsing - %~ updateColInArrRel fromQT relTableName rnCol + tell + $ MetadataModifier + $ tableMetadataSetter source fromQT + %~ case riType relInfo of + ObjRel -> + tmObjectRelationships . ix rn . rdUsing + %~ updateColInObjRel fromQT relTableName rnCol + ArrRel -> + tmArrayRelationships . ix rn . rdUsing + %~ updateColInArrRel fromQT relTableName rnCol -- | Local helper: update a column's name in the left-hand side of a remote relationship. -- @@ -570,8 +585,9 @@ updateColInRemoteRelationshipLHS source remoteRelationshipName (RenameItem qt ol HashMap.fromList . map (\(key, value) -> (if key == oldFieldName then newFieldName else key, value)) . HashMap.toList updateFieldCalls (RemoteFields fields) = - RemoteFields $ - fields <&> \(FieldCall name (RemoteArguments args)) -> + RemoteFields + $ fields + <&> \(FieldCall name (RemoteArguments args)) -> FieldCall name $ RemoteArguments $ updateVariableName <$> args updateVariableName = @@ -614,15 +630,15 @@ updateColInRemoteRelationshipRHS :: RenameCol target -> m () updateColInRemoteRelationshipRHS source tableName remoteRelationshipName (RenameItem _ oldCol newCol) = - tell $ - MetadataModifier $ - tableMetadataSetter @source source tableName - . tmRemoteRelationships - . ix remoteRelationshipName - . rrDefinition - . _RelationshipToSource - . tsrdFieldMapping - %~ updateMapValue + tell + $ MetadataModifier + $ tableMetadataSetter @source source tableName + . tmRemoteRelationships + . ix remoteRelationshipName + . rrDefinition + . _RelationshipToSource + . tsrdFieldMapping + %~ updateMapValue where oldFieldName = fromCol @target oldCol newFieldName = fromCol @target newCol @@ -650,15 +666,15 @@ updateTableInRemoteRelationshipRHS :: RenameTable target -> m () updateTableInRemoteRelationshipRHS source tableName remoteRelationshipName (_, newTableName) = - tell $ - MetadataModifier $ - tableMetadataSetter @source source tableName - . tmRemoteRelationships - . ix remoteRelationshipName - . rrDefinition - . _RelationshipToSource - . tsrdTable - .~ toJSON newTableName + tell + $ MetadataModifier + $ tableMetadataSetter @source source tableName + . tmRemoteRelationships + . ix remoteRelationshipName + . rrDefinition + . _RelationshipToSource + . tsrdTable + .~ toJSON newTableName updateColInObjRel :: (Backend b) => @@ -753,9 +769,12 @@ possiblyUpdateCustomColumnNames :: Column b -> m () possiblyUpdateCustomColumnNames source tableName oldColumn newColumn = do - tell $ - MetadataModifier $ - tableMetadataSetter @b source tableName . tmConfiguration . tcColumnConfig %~ swapOldColumnForNewColumn + tell + $ MetadataModifier + $ tableMetadataSetter @b source tableName + . tmConfiguration + . tcColumnConfig + %~ swapOldColumnForNewColumn where swapOldColumnForNewColumn :: HashMap (Column b) columnData -> HashMap (Column b) columnData swapOldColumnForNewColumn customColumns = diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs index c00faf4105f..16049b5a171 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Source.hs @@ -102,18 +102,18 @@ instance (Backend b) => FromJSONWithContext (BackendSourceKind b) (AddSource b) parseJSONWithContext backendKind = withObject "AddSource" $ \o -> AddSource <$> o - .: "name" + .: "name" <*> pure backendKind <*> o - .: "configuration" + .: "configuration" <*> o - .:? "replace_configuration" - .!= False + .:? "replace_configuration" + .!= False <*> o - .:? "customization" - .!= emptySourceCustomization + .:? "customization" + .!= emptySourceCustomization <*> o - .:? "health_check" + .:? "health_check" runAddSource :: forall m b. @@ -167,17 +167,21 @@ runRenameSource :: runRenameSource RenameSource {..} = do sources <- scSources <$> askSchemaCache - unless (HashMap.member _rmName sources) $ - throw400 NotExists $ - "Could not find source with name " <>> _rmName + unless (HashMap.member _rmName sources) + $ throw400 NotExists + $ "Could not find source with name " + <>> _rmName - when (HashMap.member _rmNewName sources) $ - throw400 AlreadyExists $ - "Source with name " <> _rmNewName <<> " already exists" + when (HashMap.member _rmNewName sources) + $ throw400 AlreadyExists + $ "Source with name " + <> _rmNewName + <<> " already exists" let metadataModifier = - MetadataModifier $ - metaSources %~ renameBackendSourceMetadata _rmName _rmNewName + MetadataModifier + $ metaSources + %~ renameBackendSourceMetadata _rmName _rmNewName buildSchemaCacheFor (MOSource _rmNewName) metadataModifier pure successMsg @@ -230,10 +234,12 @@ runDropSource dropSourceInfo@(DropSource name cascade) = do AB.dispatchAnyBackend @BackendMetadata backendSourceInfo $ dropSource dropSourceInfo Nothing -> do metadata <- getMetadata - void $ - onNothing (metadata ^. metaSources . at name) $ - throw400 NotExists $ - "source with name " <> name <<> " does not exist" + void + $ onNothing (metadata ^. metaSources . at name) + $ throw400 NotExists + $ "source with name " + <> name + <<> " does not exist" if cascade then -- Without sourceInfo we can't cascade, so throw an error throw400 Unexpected $ "source with name " <> name <<> " is inconsistent" @@ -262,8 +268,8 @@ dropSource (DropSource sourceName cascade) sourceInfo = do schemaCache <- askSchemaCache let remoteDeps = getRemoteDependencies schemaCache sourceName - unless (cascade || null remoteDeps) $ - reportDependentObjectsExist remoteDeps + unless (cascade || null remoteDeps) + $ reportDependentObjectsExist remoteDeps metadataModifier <- execWriterT $ do traverse_ purgeSourceAndSchemaDependencies remoteDeps @@ -315,13 +321,13 @@ instance (Backend b) => FromJSONWithContext (BackendSourceKind b) (UpdateSource parseJSONWithContext _ = withObject "UpdateSource" $ \o -> UpdateSource <$> o - .: "name" + .: "name" <*> o - .:? "configuration" + .:? "configuration" <*> o - .:? "customization" + .:? "customization" <*> o - .:? "health_check" + .:? "health_check" runUpdateSource :: forall m b. @@ -441,7 +447,7 @@ data GetTableInfo (b :: BackendType) = GetTableInfo _gtiTableName :: TableName b } -instance Backend b => FromJSON (GetTableInfo b) where +instance (Backend b) => FromJSON (GetTableInfo b) where parseJSON = J.withObject "GetTableInfo_" \o -> do _gtiSourceName <- o .: "source" _gtiTableName <- o .: "table" diff --git a/server/src-lib/Hasura/RQL/DDL/SchemaRegistry.hs b/server/src-lib/Hasura/RQL/DDL/SchemaRegistry.hs index 09d174d3d71..45bc4dd0f81 100644 --- a/server/src-lib/Hasura/RQL/DDL/SchemaRegistry.hs +++ b/server/src-lib/Hasura/RQL/DDL/SchemaRegistry.hs @@ -95,10 +95,10 @@ data GQLSchemaInformation = GQLSchemaInformation instance J.ToJSON GQLSchemaInformation where toJSON (GQLSchemaInformation schemaSdl schemaHash) = - J.object $ - [ "schema_sdl" J..= (_sdl schemaSdl), - "schema_hash" J..= (_schemaHash schemaHash) - ] + J.object + $ [ "schema_sdl" J..= (_sdl schemaSdl), + "schema_hash" J..= (_schemaHash schemaHash) + ] data ProjectGQLSchemaInformation = ProjectGQLSchemaInformation { _pgsiSchemaRegistryMap :: SchemaRegistryMap, @@ -149,15 +149,16 @@ newtype SchemaRegistryControlRole = SchemaRegistryControlRole {unSchemaRegistryC selectNowQuery :: PG.TxE QErr UTCTime selectNowQuery = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE SQLTypes.defaultTxErrorHandler (PG.fromText "SELECT now();") () False calculateSchemaSDLHash :: T.Text -> RoleName -> SchemaHash calculateSchemaSDLHash sdl role = SchemaHash $ bsToTxt hash where hash = - cryptoHash $ - J.object + cryptoHash + $ J.object [ "schema_sdl" J..= sdl, "role" J..= roleNameToTxt role ] diff --git a/server/src-lib/Hasura/RQL/DDL/SourceKinds.hs b/server/src-lib/Hasura/RQL/DDL/SourceKinds.hs index 11b33e7872b..5648f369165 100644 --- a/server/src-lib/Hasura/RQL/DDL/SourceKinds.hs +++ b/server/src-lib/Hasura/RQL/DDL/SourceKinds.hs @@ -69,13 +69,13 @@ instance FromJSON SourceKindInfo where instance ToJSON SourceKindInfo where toJSON SourceKindInfo {..} = - J.object $ - [ "kind" .= _skiSourceKind, - "builtin" .= _skiBuiltin, - "available" .= _skiAvailable - ] - ++ ["display_name" .= _skiDisplayName | has _skiDisplayName] - ++ ["release_name" .= _skiReleaseName | has _skiReleaseName] + J.object + $ [ "kind" .= _skiSourceKind, + "builtin" .= _skiBuiltin, + "available" .= _skiAvailable + ] + ++ ["display_name" .= _skiDisplayName | has _skiDisplayName] + ++ ["release_name" .= _skiReleaseName | has _skiReleaseName] where has :: Maybe Text -> Bool has x = not $ isNothing x || x == Just "" @@ -137,7 +137,7 @@ builtinSourceKinds = SourceKinds $ mapMaybe mkNativeSource (filter (/= Backend.DataConnector) Backend.supportedBackends) -- | Collect 'SourceKindInfo' from Native and GDC backend types. -collectSourceKinds :: Metadata.MetadataM m => m SourceKinds +collectSourceKinds :: (Metadata.MetadataM m) => m SourceKinds collectSourceKinds = fmap (builtinSourceKinds <>) agentSourceKinds runListSourceKinds :: diff --git a/server/src-lib/Hasura/RQL/DDL/Warnings.hs b/server/src-lib/Hasura/RQL/DDL/Warnings.hs index 13eda12bc4b..6ffefc56c44 100644 --- a/server/src-lib/Hasura/RQL/DDL/Warnings.hs +++ b/server/src-lib/Hasura/RQL/DDL/Warnings.hs @@ -66,8 +66,9 @@ data AllowWarnings instance FromJSON AllowWarnings where parseJSON = - J.withBool "AllowWarnings" $ - pure . bool DisallowWarnings AllowWarnings + J.withBool "AllowWarnings" + $ pure + . bool DisallowWarnings AllowWarnings instance ToJSON AllowWarnings where toJSON = J.toJSON . toBool @@ -120,10 +121,11 @@ runMetadataWarnings = flip runStateT mempty mkSuccessResponseWithWarnings :: MetadataWarnings -> EncJSON mkSuccessResponseWithWarnings warnings = - encJFromJValue . J.object $ - [ "message" .= ("success" :: Text) - ] - <> ["warnings" .= warnings | not (null warnings)] + encJFromJValue + . J.object + $ [ "message" .= ("success" :: Text) + ] + <> ["warnings" .= warnings | not (null warnings)] successMsgWithWarnings :: (Monad m) => (StateT MetadataWarnings m ()) -> m EncJSON successMsgWithWarnings action = do diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform.hs index da04c734a02..b32f9801368 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform.hs @@ -130,7 +130,7 @@ requestL = lens getter setter -- interpolation on the request url. applyRequestTransform :: forall m. - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => (HTTP.Request -> RequestContext) -> RequestTransformFns -> HTTP.Request -> @@ -144,8 +144,8 @@ applyRequestTransform mkCtx transformations request = -- Apply all of the provided request transformation functions to the -- request data extracted from the given 'HTTP.Request'. transformReqData transformCtx reqData = - B.bsequence' $ - B.bzipWith3C @Transform + B.bsequence' + $ B.bzipWith3C @Transform transformField transformCtx transformations @@ -193,8 +193,10 @@ mkRespTemplateTransform (Body.ModifyAsJSON template) context = runResponseTemplateTransform template context mkRespTemplateTransform (Body.ModifyAsFormURLEncoded formTemplates) context = do result <- - liftEither . V.toEither . for formTemplates $ - runUnescapedResponseTemplateTransform' context + liftEither + . V.toEither + . for formTemplates + $ runUnescapedResponseTemplateTransform' context pure . J.String . TE.decodeUtf8 . BL.toStrict $ Body.foldFormEncoded result mkResponseTransform :: MetadataResponseTransform -> ResponseTransform diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Body.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Body.hs index c5c9cfb1939..700f9b38c19 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Body.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Body.hs @@ -57,7 +57,7 @@ instance Transform Body where -- transformations, this can be seen as an implementation of these -- transformations as normal Haskell functions. applyBodyTransformFn :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => BodyTransformFn -> RequestTransformCtx -> Body -> @@ -70,8 +70,10 @@ applyBodyTransformFn fn context _originalBody = case fn of pure . JSONBody . Just $ result ModifyAsFormURLEncoded formTemplates -> do result <- - liftEither . V.toEither . for formTemplates $ - runUnescapedRequestTemplateTransform' context + liftEither + . V.toEither + . for formTemplates + $ runUnescapedRequestTemplateTransform' context pure . RawBody $ foldFormEncoded result -- | Validate that the provided 'BodyTransformFn' is correct in the context of @@ -100,10 +102,10 @@ foldFormEncoded = . L.intersperse "&" . M.foldMapWithKey @[LBS.ByteString] \k v -> - [ LBS.fromStrict $ - TE.encodeUtf8 (escapeURIText k) - <> "=" - <> escapeURIBS v + [ LBS.fromStrict + $ TE.encodeUtf8 (escapeURIText k) + <> "=" + <> escapeURIBS v | v /= "null" ] diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Class.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Class.hs index ddbd761083b..033fd6acf0d 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Class.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Class.hs @@ -44,7 +44,7 @@ class Transform a where -- ReqTransformCtx -> a -> m a -- @ transform :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => TransformFn a -> TransformCtx a -> a -> @@ -60,7 +60,7 @@ class Transform a where -- | A helper function for serializing transformation errors to JSON. throwErrorBundle :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => Text -> Maybe J.Value -> m a @@ -87,7 +87,7 @@ wrapUnescapedTemplate (UnescapedTemplate txt) = Template $ "\"" <> txt <> "\"" -- | Encode a JSON Scalar Value as a 'ByteString'. -- If a non-Scalar value is provided, will return a 'TrnasformErrorBundle' encodeScalar :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => J.Value -> m ByteString encodeScalar = \case diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Headers.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Headers.hs index 5d9d27e9d35..0bca62497b6 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Headers.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Headers.hs @@ -52,7 +52,7 @@ instance Transform Headers where -- header transformations, this can be seen as an implementation of these -- transformations as normal Haskell functions. applyHeadersTransformFn :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => HeadersTransformFn -> RequestTransformCtx -> Headers -> @@ -69,8 +69,9 @@ applyHeadersTransformFn fn context (Headers originalHeaders) = case fn of -- NOTE: We use `ApplicativeDo` here to take advantage of Validation's -- applicative sequencing - newHeaders <- liftEither . V.toEither $ - for addOrReplaceHeaders \(rawKey, rawValue) -> do + newHeaders <- liftEither + . V.toEither + $ for addOrReplaceHeaders \(rawKey, rawValue) -> do let key = CI.map TE.encodeUtf8 rawKey value <- runUnescapedRequestTemplateTransform' context rawValue pure (key, value) diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Method.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Method.hs index 1d622d31699..7c39b595515 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Method.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Method.hs @@ -41,7 +41,7 @@ instance Transform Method where -- transformations, this can be seen as an implementation of these -- transformations as normal Haskell functions. applyMethodTransformFn :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => MethodTransformFn -> RequestTransformCtx -> Method -> diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/QueryParams.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/QueryParams.hs index 818f1af7823..a39b1fdaf51 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/QueryParams.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/QueryParams.hs @@ -49,7 +49,7 @@ instance Transform QueryParams where -- transformations, this can be seen as an implementation of these -- transformations as normal Haskell functions. applyQueryParamsTransformFn :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => QueryParamsTransformFn -> RequestTransformCtx -> QueryParams -> @@ -58,12 +58,13 @@ applyQueryParamsTransformFn fn context _oldQueryParams = case fn of AddOrReplace addOrReplaceParams -> do -- NOTE: We use `ApplicativeDo` here to take advantage of Validation's -- applicative sequencing - queryParams <- liftEither . V.toEither $ - for addOrReplaceParams \(rawKey, rawValue) -> do + queryParams <- liftEither + . V.toEither + $ for addOrReplaceParams \(rawKey, rawValue) -> do key <- runUnescapedRequestTemplateTransform' context rawKey value <- traverse (runUnescapedRequestTemplateTransform' context) rawValue - pure $ - if key == "null" || value == Just "null" + pure + $ if key == "null" || value == Just "null" then Nothing else Just (key, value) pure $ QueryParams (catMaybes queryParams) diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Request.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Request.hs index 1a4f2b2831e..cdaf7defdf4 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Request.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Request.hs @@ -133,8 +133,8 @@ runUnescapedRequestTemplateTransform' :: UnescapedTemplate -> Validation TransformErrorBundle ByteString runUnescapedRequestTemplateTransform' context unescapedTemplate = - fromEither $ - runUnescapedRequestTemplateTransform context unescapedTemplate + fromEither + $ runUnescapedRequestTemplateTransform context unescapedTemplate -- TODO: Should this live in 'Hasura.RQL.DDL.Webhook.Transform.Validation'? validateRequestUnescapedTemplateTransform :: diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Response.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Response.hs index b255ab021b6..783cb19274e 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Response.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Response.hs @@ -76,5 +76,5 @@ runUnescapedResponseTemplateTransform' :: UnescapedTemplate -> Validation TransformErrorBundle ByteString runUnescapedResponseTemplateTransform' context unescapedTemplate = - fromEither $ - runUnescapedResponseTemplateTransform context unescapedTemplate + fromEither + $ runUnescapedResponseTemplateTransform context unescapedTemplate diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Url.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Url.hs index 39dc7df535d..94dfc32f7cb 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Url.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Url.hs @@ -48,7 +48,7 @@ instance Transform Url where -- transformations, this can be seen as an implementation of these -- transformations as normal Haskell functions. applyUrlTransformFn :: - MonadError TransformErrorBundle m => + (MonadError TransformErrorBundle m) => UrlTransformFn -> RequestTransformCtx -> Url -> diff --git a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Validation.hs b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Validation.hs index a21f752317f..87e928fcba8 100644 --- a/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Validation.hs +++ b/server/src-lib/Hasura/RQL/DDL/Webhook/Transform/Validation.hs @@ -62,7 +62,7 @@ transformFns = lens getter setter -- | Validate all 'TransformFn a' fields in the 'RequestTransform'. validateRequestTransform :: - MonadError QErr m => + (MonadError QErr m) => RequestTransform -> m RequestTransform validateRequestTransform reqTransform = diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs index 9b74413e1b9..704c27d5f46 100644 --- a/server/src-lib/Hasura/RQL/DML/Count.hs +++ b/server/src-lib/Hasura/RQL/DML/Count.hs @@ -45,14 +45,14 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) = S.mkSelect { S.selExtr = [S.Extractor S.countStar Nothing], S.selFrom = - Just $ - S.FromExp + Just + $ S.FromExp [S.mkSelFromExp False innerSel $ TableName "r"] } where finalWC = - toSQLBoolExp (S.QualTable tn) $ - maybe permFltr (andAnnBoolExps permFltr) mWc + toSQLBoolExp (S.QualTable tn) + $ maybe permFltr (andAnnBoolExps permFltr) mWc innerSel = partSel @@ -85,8 +85,8 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt _ mDistCols mWhere) = -- Check if select is allowed selPerm <- - modifyErr (<> selNecessaryMsg) $ - askSelPermInfo tableInfo + modifyErr (<> selNecessaryMsg) + $ askSelPermInfo tableInfo let colInfoMap = _tciFieldInfoMap $ _tiCoreInfo tableInfo @@ -99,15 +99,15 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt _ mDistCols mWhere) = -- convert the where clause annSQLBoolExp <- forM mWhere $ \be -> - withPathK "where" $ - convBoolExp colInfoMap selPerm be sessVarBldr colInfoMap (valueParserWithCollectableType prepValBldr) + withPathK "where" + $ convBoolExp colInfoMap selPerm be sessVarBldr colInfoMap (valueParserWithCollectableType prepValBldr) resolvedSelFltr <- - convAnnBoolExpPartialSQL sessVarBldr $ - spiFilter selPerm + convAnnBoolExpPartialSQL sessVarBldr + $ spiFilter selPerm - return $ - CountQueryP1 + return + $ CountQueryP1 qt (resolvedSelFltr, annSQLBoolExp) mDistCols @@ -125,9 +125,9 @@ validateCountQ :: validateCountQ query = do let source = cqSource query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache source - flip runTableCacheRT tableCache $ - runDMLP1T $ - validateCountQWith sessVarFromCurrentSetting binRHSBuilder query + flip runTableCacheRT tableCache + $ runDMLP1T + $ validateCountQWith sessVarFromCurrentSetting binRHSBuilder query countQToTx :: (MonadTx m) => @@ -135,8 +135,8 @@ countQToTx :: m EncJSON countQToTx (u, p) = do qRes <- - liftTx $ - PG.rawQE + liftTx + $ PG.rawQE dmlTxErrorHandler (PG.fromBuilder countSQL) (toList p) diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs index 5fa53dfc8fe..b14f444021f 100644 --- a/server/src-lib/Hasura/RQL/DML/Delete.hs +++ b/server/src-lib/Hasura/RQL/DML/Delete.hs @@ -61,8 +61,8 @@ validateDeleteQWith -- Check if select is allowed selPerm <- - modifyErr (<> selNecessaryMsg) $ - askSelPermInfo tableInfo + modifyErr (<> selNecessaryMsg) + $ askSelPermInfo tableInfo let fieldInfoMap = _tciFieldInfoMap coreInfo allCols = mapMaybe (^? _SCIScalarColumn) $ getCols fieldInfoMap @@ -73,15 +73,15 @@ validateDeleteQWith -- convert the where clause annSQLBoolExp <- - withPathK "where" $ - convBoolExp fieldInfoMap selPerm rqlBE sessVarBldr fieldInfoMap (valueParserWithCollectableType prepValBldr) + withPathK "where" + $ convBoolExp fieldInfoMap selPerm rqlBE sessVarBldr fieldInfoMap (valueParserWithCollectableType prepValBldr) resolvedDelFltr <- - convAnnBoolExpPartialSQL sessVarBldr $ - dpiFilter delPerm + convAnnBoolExpPartialSQL sessVarBldr + $ dpiFilter delPerm - return $ - AnnDel + return + $ AnnDel tableName (resolvedDelFltr, annSQLBoolExp) (mkDefaultMutFlds mAnnRetCols) @@ -100,9 +100,9 @@ validateDeleteQ :: validateDeleteQ query = do let source = doSource query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache source - flip runTableCacheRT tableCache $ - runDMLP1T $ - validateDeleteQWith sessVarFromCurrentSetting binRHSBuilder query + flip runTableCacheRT tableCache + $ runDMLP1T + $ validateDeleteQWith sessVarFromCurrentSetting binRHSBuilder query runDelete :: forall m. @@ -123,5 +123,5 @@ runDelete sqlGen q = do userInfo <- askUserInfo validateDeleteQ q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery - . flip runReaderT emptyQueryTagsComment - . execDeleteQuery strfyNum Nothing userInfo + . flip runReaderT emptyQueryTagsComment + . execDeleteQuery strfyNum Nothing userInfo diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs index 89d51d35c36..4842ae23b38 100644 --- a/server/src-lib/Hasura/RQL/DML/Insert.hs +++ b/server/src-lib/Hasura/RQL/DML/Insert.hs @@ -59,16 +59,20 @@ convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do throwNotInsErr c = do roleName <- _uiRole <$> askUserInfo - throw400 NotSupported $ - "column " - <> c <<> " is not insertable" - <> " for role " <>> roleName + throw400 NotSupported + $ "column " + <> c + <<> " is not insertable" + <> " for role " + <>> roleName validateInpCols :: (MonadError QErr m) => [PGCol] -> [PGCol] -> m () validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol -> - unless (inpCol `elem` updColsPerm) $ - throw400 ValidationFailed $ - "column " <> inpCol <<> " is not updatable" + unless (inpCol `elem` updColsPerm) + $ throw400 ValidationFailed + $ "column " + <> inpCol + <<> " is not updatable" buildConflictClause :: (UserInfoM m, QErrM m) => @@ -113,22 +117,22 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act) validateCols c = do let targetcols = getPGCols c - void $ - withPathK "constraint_on" $ - indexedForM targetcols $ - \pgCol -> askColumnType fieldInfoMap pgCol "" + void + $ withPathK "constraint_on" + $ indexedForM targetcols + $ \pgCol -> askColumnType fieldInfoMap pgCol "" validateConstraint c = do let tableConsNames = maybe [] (toList . fmap (_cName . _ucConstraint)) (tciUniqueOrPrimaryKeyConstraints coreInfo) - withPathK "constraint" $ - unless (c `elem` tableConsNames) $ - throw400 Unexpected $ - "constraint " - <> getConstraintTxt c - <<> " for table " - <> _tciName coreInfo - <<> " does not exist" + withPathK "constraint" + $ unless (c `elem` tableConsNames) + $ throw400 Unexpected + $ "constraint " + <> getConstraintTxt c + <<> " for table " + <> _tciName coreInfo + <<> " does not exist" getUpdPerm = do upi <- askUpdPermInfo tableInfo @@ -173,8 +177,8 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName _ val oC mR mAnnRetCols <- forM mRetCols $ \retCols -> do -- Check if select is allowed only if you specify returning selPerm <- - modifyErr (<> selNecessaryMsg) $ - askSelPermInfo tableInfo + modifyErr (<> selNecessaryMsg) + $ askSelPermInfo tableInfo withPathK "returning" $ checkRetCols fieldInfoMap selPerm retCols @@ -191,8 +195,9 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName _ val oC mR resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) setInsVals - insTuples <- withPathK "objects" $ - indexedForM insObjs $ \obj -> + insTuples <- withPathK "objects" + $ indexedForM insObjs + $ \obj -> convObj prepFn defInsVals resolvedPreSet fieldInfoMap obj let sqlExps = map snd insTuples inpCols = HS.toList $ HS.fromList $ concatMap fst insTuples @@ -200,17 +205,18 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName _ val oC mR insCheck <- convAnnBoolExpPartialSQL sessVarFromCurrentSetting (ipiCheck insPerm) updCheck <- traverse (convAnnBoolExpPartialSQL sessVarFromCurrentSetting) (upiCheck =<< updPerm) - conflictClause <- withPathK "on_conflict" $ - forM oC $ \c -> do + conflictClause <- withPathK "on_conflict" + $ forM oC + $ \c -> do role <- askCurRole - unless (isTabUpdatable role tableInfo) $ - throw400 PermissionDenied $ - "upsert is not allowed for role " - <> role - <<> " since update permissions are not defined" + unless (isTabUpdatable role tableInfo) + $ throw400 PermissionDenied + $ "upsert is not allowed for role " + <> role + <<> " since update permissions are not defined" buildConflictClause sessVarBldr tableInfo inpCols c - return $ - InsertQueryP1 + return + $ InsertQueryP1 tableName insCols sqlExps @@ -230,13 +236,13 @@ convInsQ :: convInsQ query = do let source = iqSource query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache source - flip runTableCacheRT tableCache $ - runDMLP1T $ - convInsertQuery - (withPathK "objects" . decodeInsObjs) - sessVarFromCurrentSetting - binRHSBuilder - query + flip runTableCacheRT tableCache + $ runDMLP1T + $ convInsertQuery + (withPathK "objects" . decodeInsObjs) + sessVarFromCurrentSetting + binRHSBuilder + query runInsert :: forall m. @@ -256,9 +262,9 @@ runInsert sqlGen q = do userInfo <- askUserInfo res <- convInsQ q let strfyNum = stringifyNum sqlGen - runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery $ - flip runReaderT emptyQueryTagsComment $ - execInsertQuery strfyNum Nothing userInfo res + runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery + $ flip runReaderT emptyQueryTagsComment + $ execInsertQuery strfyNum Nothing userInfo res decodeInsObjs :: (UserInfoM m, QErrM m) => Value -> m [InsObj ('Postgres 'Vanilla)] decodeInsObjs v = do diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs index 64f26173005..6c54181bec0 100644 --- a/server/src-lib/Hasura/RQL/DML/Internal.hs +++ b/server/src-lib/Hasura/RQL/DML/Internal.hs @@ -98,9 +98,14 @@ assertAskPermInfo :: assertAskPermInfo pt pa tableInfo = do roleName <- askCurRole mPermInfo <- askPermInfo pa tableInfo - onNothing mPermInfo $ - throw400 PermissionDenied $ - permTypeToCode pt <> " on " <> tableInfoName tableInfo <<> " for role " <> roleName <<> " is not allowed. " + onNothing mPermInfo + $ throw400 PermissionDenied + $ permTypeToCode pt + <> " on " + <> tableInfoName tableInfo + <<> " for role " + <> roleName + <<> " is not allowed. " isTabUpdatable :: RoleName -> TableInfo ('Postgres 'Vanilla) -> Bool isTabUpdatable role ti @@ -164,9 +169,9 @@ checkPermOnCol :: m () checkPermOnCol pt allowedCols col = do role <- askCurRole - unless (HS.member col allowedCols) $ - throw400 PermissionDenied $ - permErrMsg role + unless (HS.member col allowedCols) + $ throw400 PermissionDenied + $ permErrMsg role where permErrMsg role | role == adminRoleName = "no such column exists: " <>> col @@ -180,9 +185,9 @@ checkSelectPermOnScalarComputedField :: m () checkSelectPermOnScalarComputedField selPermInfo computedField = do role <- askCurRole - unless (HashMap.member computedField $ spiComputedFields selPermInfo) $ - throw400 PermissionDenied $ - permErrMsg role + unless (HashMap.member computedField $ spiComputedFields selPermInfo) + $ throw400 PermissionDenied + $ permErrMsg role where permErrMsg role | role == adminRoleName = "no such computed field exists: " <>> computedField @@ -201,8 +206,8 @@ valueParserWithCollectableType valBldr pgType val = case pgType of -- for arrays, we don't use the prepared builder vals <- runAesonParser parseJSON val scalarValues <- parseScalarValuesColumnType ofTy vals - return $ - S.SETyAnn + return + $ S.SETyAnn (S.SEArray $ map (toTxtValue . ColumnValue ofTy) scalarValues) (S.mkTypeAnn $ CollectableTypeArray (unsafePGColumnToBackend ofTy)) @@ -223,17 +228,19 @@ fetchRelTabInfo :: m (TableInfo ('Postgres 'Vanilla)) fetchRelTabInfo refTabName = -- Internal error - modifyErrAndSet500 ("foreign " <>) $ - askTableInfoSource refTabName + modifyErrAndSet500 ("foreign " <>) + $ askTableInfoSource refTabName askTableInfoSource :: (QErrM m, TableInfoRM ('Postgres 'Vanilla) m) => TableName ('Postgres 'Vanilla) -> m (TableInfo ('Postgres 'Vanilla)) askTableInfoSource tableName = do - onNothingM (lookupTableInfo tableName) $ - throw400 NotExists $ - "table " <> tableName <<> " does not exist" + onNothingM (lookupTableInfo tableName) + $ throw400 NotExists + $ "table " + <> tableName + <<> " does not exist" data SessionVariableBuilder m = SessionVariableBuilder { _svbCurrentSession :: SQLExpression ('Postgres 'Vanilla), @@ -251,18 +258,18 @@ fetchRelDet relName refTabName = do refTabInfo <- fetchRelTabInfo refTabName -- Get the correct constraint that applies to the given relationship refSelPerm <- - modifyErr (relPermErr refTabName roleName) $ - askSelPermInfo refTabInfo + modifyErr (relPermErr refTabName roleName) + $ askSelPermInfo refTabInfo return (_tciFieldInfoMap $ _tiCoreInfo refTabInfo, refSelPerm) where relPermErr rTable roleName _ = "role " <> roleName - <<> " does not have permission to read relationship " + <<> " does not have permission to read relationship " <> relName - <<> "; no permission on table " - <>> rTable + <<> "; no permission on table " + <>> rTable checkOnColExp :: (UserInfoM m, QErrM m, TableInfoRM ('Postgres 'Vanilla) m) => @@ -294,9 +301,12 @@ checkOnColExp spi sessVarBldr annFld = case annFld of tableInfo <- modifyErrAndSet500 ("function " <>) $ askTableInfoSource table let errMsg _ = "role " - <> roleName <<> " does not have permission to read " + <> roleName + <<> " does not have permission to read " <> " computed field " - <> fieldName <<> "; no permission on table " <>> table + <> fieldName + <<> "; no permission on table " + <>> table tableSPI <- modifyErr errMsg $ askSelPermInfo tableInfo modBoolExp <- checkSelPerm tableSPI sessVarBldr nesBoolExp resolvedFltr <- convAnnBoolExpPartialSQL sessVarBldr $ spiFilter tableSPI @@ -380,14 +390,15 @@ validateHeaders :: (UserInfoM m, QErrM m) => HashSet Text -> m () validateHeaders depHeaders = do headers <- getSessionVariables . _uiSession <$> askUserInfo forM_ depHeaders $ \hdr -> - unless (hdr `elem` map T.toLower headers) $ - throw400 NotFound $ - hdr <<> " header is expected but not found" + unless (hdr `elem` map T.toLower headers) + $ throw400 NotFound + $ hdr + <<> " header is expected but not found" -- validate limit and offset int values onlyPositiveInt :: (MonadError QErr m) => Int -> m () onlyPositiveInt i = - when (i < 0) $ - throw400 + when (i < 0) + $ throw400 NotSupported "unexpected negative value" diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs index 9312ff7f0cf..260405d1784 100644 --- a/server/src-lib/Hasura/RQL/DML/Select.hs +++ b/server/src-lib/Hasura/RQL/DML/Select.hs @@ -57,8 +57,8 @@ convSelCol fieldInfoMap _ (SCExtRel rn malias selQ) = do -- Point to the name key let pgWhenRelErr = "only relationships can be expanded" relInfo <- - withPathK "name" $ - askRelType fieldInfoMap rn pgWhenRelErr + withPathK "name" + $ askRelType fieldInfoMap rn pgWhenRelErr case relInfo of (RelInfo {riTarget = RelTargetNativeQuery _}) -> error "convSelCol RelTargetNativeQuery" (RelInfo {riTarget = RelTargetTable relTable}) -> do @@ -93,9 +93,9 @@ convWildcard fieldInfoMap selPermInfo wildcard = forM mRelSelPerm $ \relSelPermInfo -> do rExtCols <- convWildcard (_tciFieldInfoMap $ _tiCoreInfo relTabInfo) relSelPermInfo wc - pure $ - ECRel relName Nothing $ - SelectG rExtCols Nothing Nothing Nothing Nothing + pure + $ ECRel relName Nothing + $ SelectG rExtCols Nothing Nothing Nothing Nothing relExtCols wc = mapM (mkRelCol wc) relColInfos @@ -106,11 +106,12 @@ resolveStar :: SelectQ -> m SelectQExt resolveStar fim selPermInfo (SelectG selCols mWh mOb mLt mOf) = do - procOverrides <- fmap (concat . catMaybes) $ - withPathK "columns" $ - indexedForM selCols $ \selCol -> case selCol of - (SCStar _) -> pure Nothing - _ -> Just <$> convSelCol fim selPermInfo selCol + procOverrides <- fmap (concat . catMaybes) + $ withPathK "columns" + $ indexedForM selCols + $ \selCol -> case selCol of + (SCStar _) -> pure Nothing + _ -> Just <$> convSelCol fim selPermInfo selCol everything <- case wildcards of [] -> pure [] _ -> convWildcard fim selPermInfo $ maximum wildcards @@ -141,15 +142,18 @@ convOrderByElem sessVarBldr (flds, spi) = \case let ty = ciType colInfo if isScalarColumnWhere isGeoType ty then - throw400 UnexpectedPayload $ - fldName <<> " has type 'geometry' and cannot be used in order_by" + throw400 UnexpectedPayload + $ fldName + <<> " has type 'geometry' and cannot be used in order_by" else pure $ AOCColumn colInfo FIRelationship _ -> - throw400 UnexpectedPayload $ - fldName <<> " is a relationship and should be expanded" + throw400 UnexpectedPayload + $ fldName + <<> " is a relationship and should be expanded" FIComputedField _ -> - throw400 UnexpectedPayload $ - fldName <<> " is a computed field and can't be used in 'order_by'" + throw400 UnexpectedPayload + $ fldName + <<> " is a computed field and can't be used in 'order_by'" -- TODO Rakesh (from master) FIRemoteRelationship {} -> throw400 UnexpectedPayload (fldName <<> " is a remote field") @@ -157,18 +161,21 @@ convOrderByElem sessVarBldr (flds, spi) = \case fldInfo <- askFieldInfo flds fldName case fldInfo of FIColumn _ -> - throw400 UnexpectedPayload $ - fldName <<> " is a Postgres column and cannot be chained further" + throw400 UnexpectedPayload + $ fldName + <<> " is a Postgres column and cannot be chained further" FIComputedField _ -> - throw400 UnexpectedPayload $ - fldName <<> " is a computed field and can't be used in 'order_by'" + throw400 UnexpectedPayload + $ fldName + <<> " is a computed field and can't be used in 'order_by'" FIRelationship relInfo -> do relTableName <- case riTarget relInfo of RelTargetTable tn -> pure tn RelTargetNativeQuery _ -> error "convOrderByElem RelTargetNativeQuery" - when (riType relInfo == ArrRel) $ - throw400 UnexpectedPayload $ - fldName <<> " is an array relationship and can't be used in 'order_by'" + when (riType relInfo == ArrRel) + $ throw400 UnexpectedPayload + $ fldName + <<> " is an array relationship and can't be used in 'order_by'" (relFim, relSelPermInfo) <- fetchRelDet (riName relInfo) relTableName resolvedSelFltr <- convAnnBoolExpPartialSQL sessVarBldr $ spiFilter relSelPermInfo AOCObjectRelation relInfo resolvedSelFltr <$> convOrderByElem sessVarBldr (relFim, relSelPermInfo) rest @@ -191,11 +198,12 @@ convSelectQ :: convSelectQ sqlGen table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do -- Convert where clause wClause <- forM (sqWhere selQ) $ \boolExp -> - withPathK "where" $ - convBoolExp fieldInfoMap selPermInfo boolExp sessVarBldr fieldInfoMap prepValBldr + withPathK "where" + $ convBoolExp fieldInfoMap selPermInfo boolExp sessVarBldr fieldInfoMap prepValBldr - annFlds <- withPathK "columns" $ - indexedForM (sqColumns selQ) $ \case + annFlds <- withPathK "columns" + $ indexedForM (sqColumns selQ) + $ \case (ECSimple pgCol) -> do (colInfo, caseBoolExpMaybe) <- convExtSimple fieldInfoMap selPermInfo pgCol resolvedCaseBoolExp <- @@ -217,10 +225,10 @@ convSelectQ sqlGen table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = ) annOrdByML <- forM (sqOrderBy selQ) $ \(OrderByExp obItems) -> - withPathK "order_by" $ - indexedForM obItems $ - mapM $ - convOrderByElem sessVarBldr (fieldInfoMap, selPermInfo) + withPathK "order_by" + $ indexedForM obItems + $ mapM + $ convOrderByElem sessVarBldr (fieldInfoMap, selPermInfo) let annOrdByM = NE.nonEmpty =<< annOrdByML @@ -229,8 +237,8 @@ convSelectQ sqlGen table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = withPathK "offset" $ mapM_ onlyPositiveInt mQueryOffset resolvedSelFltr <- - convAnnBoolExpPartialSQL sessVarBldr $ - spiFilter selPermInfo + convAnnBoolExpPartialSQL sessVarBldr + $ spiFilter selPermInfo let tabFrom = FromTable table tabPerm = TablePerm resolvedSelFltr mPermLimit @@ -271,8 +279,8 @@ convExtRel :: convExtRel sqlGen fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do -- Point to the name key relInfo <- - withPathK "name" $ - askRelType fieldInfoMap relName pgWhenRelErr + withPathK "name" + $ askRelType fieldInfoMap relName pgWhenRelErr let (RelInfo {riType = relTy, riMapping = colMapping, riTarget = relTarget}) = relInfo relTableName <- case relTarget of RelTargetNativeQuery _ -> error "convExtRel RelTargetNativeQuery" @@ -282,20 +290,20 @@ convExtRel sqlGen fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do case relTy of ObjRel -> do when misused $ throw400 UnexpectedPayload objRelMisuseMsg - pure $ - Left $ - AnnRelationSelectG (fromMaybe relName mAlias) colMapping $ - AnnObjectSelectG (_asnFields annSel) (FromTable relTableName) $ - _tpFilter $ - _asnPerm annSel + pure + $ Left + $ AnnRelationSelectG (fromMaybe relName mAlias) colMapping + $ AnnObjectSelectG (_asnFields annSel) (FromTable relTableName) + $ _tpFilter + $ _asnPerm annSel ArrRel -> - pure $ - Right $ - ASSimple $ - AnnRelationSelectG - (fromMaybe relName mAlias) - colMapping - annSel + pure + $ Right + $ ASSimple + $ AnnRelationSelectG + (fromMaybe relName mAlias) + colMapping + annSel where pgWhenRelErr = "only relationships can be expanded" misused = @@ -328,13 +336,14 @@ convSelectQuery sqlGen sessVarBldr prepArgBuilder (DMLQuery _ qt selQ) = do selectP2 :: JsonAggSelect -> (AnnSimpleSelect ('Postgres 'Vanilla), DS.Seq PG.PrepArg) -> PG.TxE QErr EncJSON selectP2 jsonAggSelect (sel, p) = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.rawQE dmlTxErrorHandler selectSQL (toList p) True where selectSQL = - toQuery $ - selectToSelectWith $ - mkSQLSelect jsonAggSelect sel + toQuery + $ selectToSelectWith + $ mkSQLSelect jsonAggSelect sel phaseOne :: (QErrM m, UserInfoM m, CacheRM m) => @@ -344,9 +353,9 @@ phaseOne :: phaseOne sqlGen query = do let sourceName = getSourceDMLQuery query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache sourceName - flip runTableCacheRT tableCache $ - runDMLP1T $ - convSelectQuery sqlGen sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query + flip runTableCacheRT tableCache + $ runDMLP1T + $ convSelectQuery sqlGen sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query phaseTwo :: (MonadTx m) => (AnnSimpleSelect ('Postgres 'Vanilla), DS.Seq PG.PrepArg) -> m EncJSON phaseTwo = diff --git a/server/src-lib/Hasura/RQL/DML/Types.hs b/server/src-lib/Hasura/RQL/DML/Types.hs index 9f0191968ef..47185103c0c 100644 --- a/server/src-lib/Hasura/RQL/DML/Types.hs +++ b/server/src-lib/Hasura/RQL/DML/Types.hs @@ -55,9 +55,12 @@ instance FromJSON OrderByExp where `onLeft` const (fail "string format for 'order_by' entry : {+/-}column Eg : +posted") parseObject o = OrderByItemG - <$> o .:? "type" - <*> o .: "column" - <*> o .:? "nulls" + <$> o + .:? "type" + <*> o + .: "column" + <*> o + .:? "nulls" orderByParser = OrderByItemG <$> orderTypeParser @@ -78,8 +81,11 @@ data DMLQuery a instance (FromJSON a) => FromJSON (DMLQuery a) where parseJSON = withObject "query" \o -> DMLQuery - <$> o .:? "source" .!= defaultSource - <*> o .: "table" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" <*> parseJSON (Object o) getSourceDMLQuery :: forall a. DMLQuery a -> SourceName @@ -123,12 +129,14 @@ instance FromJSON SelCol where Right x -> return $ SCStar x parseJSON v@(Object o) = SCExtRel - <$> o .: "name" - <*> o .:? "alias" + <$> o + .: "name" + <*> o + .:? "alias" <*> parseJSON v parseJSON _ = - fail $ - mconcat + fail + $ mconcat [ "A column should either be a string or an ", "object (relationship)" ] @@ -187,11 +195,17 @@ data InsertQuery = InsertQuery instance FromJSON InsertQuery where parseJSON = withObject "insert query" $ \o -> InsertQuery - <$> o .: "table" - <*> o .:? "source" .!= defaultSource - <*> o .: "objects" - <*> o .:? "on_conflict" - <*> o .:? "returning" + <$> o + .: "table" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "objects" + <*> o + .:? "on_conflict" + <*> o + .:? "returning" type UpdVals b = ColumnValues b Value @@ -210,14 +224,21 @@ data UpdateQuery = UpdateQuery instance FromJSON UpdateQuery where parseJSON = withObject "update query" \o -> UpdateQuery - <$> o .: "table" - <*> o .:? "source" .!= defaultSource - <*> o .: "where" + <$> o + .: "table" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "where" <*> ((o .: "$set" <|> o .:? "values") .!= HashMap.empty) <*> (o .:? "$inc" .!= HashMap.empty) <*> (o .:? "$mul" .!= HashMap.empty) - <*> o .:? "$default" .!= [] - <*> o .:? "returning" + <*> o + .:? "$default" + .!= [] + <*> o + .:? "returning" data DeleteQuery = DeleteQuery { doTable :: QualifiedTable, @@ -230,10 +251,15 @@ data DeleteQuery = DeleteQuery instance FromJSON DeleteQuery where parseJSON = withObject "delete query" $ \o -> DeleteQuery - <$> o .: "table" - <*> o .:? "source" .!= defaultSource - <*> o .: "where" - <*> o .:? "returning" + <$> o + .: "table" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "where" + <*> o + .:? "returning" data CountQuery = CountQuery { cqTable :: QualifiedTable, @@ -246,10 +272,15 @@ data CountQuery = CountQuery instance FromJSON CountQuery where parseJSON = withObject "count query" $ \o -> CountQuery - <$> o .: "table" - <*> o .:? "source" .!= defaultSource - <*> o .:? "distinct" - <*> o .:? "where" + <$> o + .: "table" + <*> o + .:? "source" + .!= defaultSource + <*> o + .:? "distinct" + <*> o + .:? "where" data QueryT = QTInsert InsertQuery diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs index 0d4e55104b7..a3324a29653 100644 --- a/server/src-lib/Hasura/RQL/DML/Update.hs +++ b/server/src-lib/Hasura/RQL/DML/Update.hs @@ -95,11 +95,13 @@ convOp fieldInfoMap preSetCols updPerm objs conv = relWhenPgErr = "relationships can't be updated" throwNotUpdErr c = do roleName <- _uiRole <$> askUserInfo - throw400 NotSupported $ - "column " - <> c <<> " is not updatable" - <> " for role " - <> roleName <<> "; its value is predefined in permission" + throw400 NotSupported + $ "column " + <> c + <<> " is not updatable" + <> " for role " + <> roleName + <<> "; its value is predefined in permission" validateUpdateQueryWith :: (UserInfoM m, QErrM m, TableInfoRM ('Postgres 'Vanilla) m) => @@ -127,8 +129,8 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do -- Check if select is allowed selPerm <- - modifyErr (<> selNecessaryMsg) $ - askSelPermInfo tableInfo + modifyErr (<> selNecessaryMsg) + $ askSelPermInfo tableInfo let fieldInfoMap = _tciFieldInfoMap coreInfo allCols = mapMaybe (^? _SCIScalarColumn) $ getCols fieldInfoMap @@ -137,23 +139,23 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do -- convert the object to SQL set expression setItems <- - withPathK "$set" $ - convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqSet uq) $ - convSet prepValBldr + withPathK "$set" + $ convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqSet uq) + $ convSet prepValBldr incItems <- - withPathK "$inc" $ - convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqInc uq) $ - convInc prepValBldr + withPathK "$inc" + $ convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqInc uq) + $ convInc prepValBldr mulItems <- - withPathK "$mul" $ - convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqMul uq) $ - convMul prepValBldr + withPathK "$mul" + $ convOp fieldInfoMap preSetCols updPerm (HashMap.toList $ uqMul uq) + $ convMul prepValBldr defItems <- - withPathK "$default" $ - convOp fieldInfoMap preSetCols updPerm ((,()) <$> uqDefault uq) convDefault + withPathK "$default" + $ convOp fieldInfoMap preSetCols updPerm ((,()) <$> uqDefault uq) convDefault -- convert the returning cols into sql returing exp mAnnRetCols <- forM mRetCols $ \retCols -> @@ -170,30 +172,30 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do ++ mulItems ++ defItems - when (null setExpItems) $ - throw400 UnexpectedPayload "atleast one of $set, $inc, $mul has to be present" + when (null setExpItems) + $ throw400 UnexpectedPayload "atleast one of $set, $inc, $mul has to be present" -- convert the where clause annSQLBoolExp <- - withPathK "where" $ - convBoolExp fieldInfoMap selPerm (uqWhere uq) sessVarBldr fieldInfoMap prepValBldr + withPathK "where" + $ convBoolExp fieldInfoMap selPerm (uqWhere uq) sessVarBldr fieldInfoMap prepValBldr resolvedUpdFltr <- - convAnnBoolExpPartialSQL sessVarBldr $ - upiFilter updPerm + convAnnBoolExpPartialSQL sessVarBldr + $ upiFilter updPerm resolvedUpdCheck <- fromMaybe gBoolExpTrue <$> traverse (convAnnBoolExpPartialSQL sessVarBldr) (upiCheck updPerm) - return $ - AnnotatedUpdateG + return + $ AnnotatedUpdateG tableName resolvedUpdFltr resolvedUpdCheck - ( SingleBatch $ - UpdateBatch + ( SingleBatch + $ UpdateBatch (HashMap.fromList $ fmap UpdateSet <$> setExpItems) annSQLBoolExp ) @@ -214,9 +216,9 @@ validateUpdateQuery :: validateUpdateQuery query = do let source = uqSource query tableCache :: TableCache ('Postgres 'Vanilla) <- fold <$> askTableCache source - flip runTableCacheRT tableCache $ - runDMLP1T $ - validateUpdateQueryWith sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query + flip runTableCacheRT tableCache + $ runDMLP1T + $ validateUpdateQueryWith sessVarFromCurrentSetting (valueParserWithCollectableType binRHSBuilder) query runUpdate :: forall m. @@ -237,5 +239,5 @@ runUpdate sqlGen q = do let strfyNum = stringifyNum sqlGen validateUpdateQuery q >>= runTxWithCtx (_pscExecCtx sourceConfig) (Tx PG.ReadWrite Nothing) LegacyRQLQuery - . flip runReaderT emptyQueryTagsComment - . execUpdateQuery strfyNum Nothing userInfo + . flip runReaderT emptyQueryTagsComment + . execUpdateQuery strfyNum Nothing userInfo diff --git a/server/src-lib/Hasura/RQL/IR/BoolExp.hs b/server/src-lib/Hasura/RQL/IR/BoolExp.hs index 8062f1b7cbc..b1b74086715 100644 --- a/server/src-lib/Hasura/RQL/IR/BoolExp.hs +++ b/server/src-lib/Hasura/RQL/IR/BoolExp.hs @@ -97,15 +97,15 @@ instance (Backend b, FromJSONKeyValue a) => FromJSON (GBoolExp b a) where parseJSON = withObject "boolean expression" \o -> BoolAnd <$> forM (KM.toList o) \(k, v) -> if - | k == "$or" -> BoolOr <$> parseJSON v Key k - | k == "_or" -> BoolOr <$> parseJSON v Key k - | k == "$and" -> BoolAnd <$> parseJSON v Key k - | k == "_and" -> BoolAnd <$> parseJSON v Key k - | k == "$not" -> BoolNot <$> parseJSON v Key k - | k == "_not" -> BoolNot <$> parseJSON v Key k - | k == "$exists" -> BoolExists <$> parseJSON v Key k - | k == "_exists" -> BoolExists <$> parseJSON v Key k - | otherwise -> BoolField <$> parseJSONKeyValue (k, v) + | k == "$or" -> BoolOr <$> parseJSON v Key k + | k == "_or" -> BoolOr <$> parseJSON v Key k + | k == "$and" -> BoolAnd <$> parseJSON v Key k + | k == "_and" -> BoolAnd <$> parseJSON v Key k + | k == "$not" -> BoolNot <$> parseJSON v Key k + | k == "_not" -> BoolNot <$> parseJSON v Key k + | k == "$exists" -> BoolExists <$> parseJSON v Key k + | k == "_exists" -> BoolExists <$> parseJSON v Key k + | otherwise -> BoolField <$> parseJSONKeyValue (k, v) instance (Backend backend, ToJSONKeyValue field) => ToJSON (GBoolExp backend field) where -- A representation for boolean values as JSON. @@ -196,7 +196,7 @@ newtype BoolExp (b :: BackendType) = BoolExp {unBoolExp :: GBoolExp b ColExp} -- decoding GBoolExp. To accurately represent GBoolExp with a codec we will need -- Autodocodec to gain support for expressing an object type with "additional -- properties" for fields. -instance Backend b => HasCodec (BoolExp b) where +instance (Backend b) => HasCodec (BoolExp b) where codec = CommentCodec doc $ named (backendPrefix @b <> "BoolExp") $ dimapCodec BoolExp unBoolExp jsonCodec where jsonCodec :: JSONCodec (GBoolExp b ColExp) @@ -232,7 +232,7 @@ instance ) => Hashable (PartialSQLExp b) -instance Backend b => ToJSON (PartialSQLExp b) where +instance (Backend b) => ToJSON (PartialSQLExp b) where toJSON = \case PSESessVar colTy sessVar -> toJSON (colTy, sessVar) PSESession -> String "hasura_session" @@ -244,7 +244,7 @@ isStaticValue = \case PSESession -> False PSESQLExp _ -> True -hasStaticExp :: Backend b => OpExpG b (PartialSQLExp b) -> Bool +hasStaticExp :: (Backend b) => OpExpG b (PartialSQLExp b) -> Bool hasStaticExp = getAny . foldMap (Any . isStaticValue) ---------------------------------------------------------------------------------------------------- @@ -282,15 +282,15 @@ data OpExpG (backend :: BackendType) field data RootOrCurrentColumn b = RootOrCurrentColumn RootOrCurrent (Column b) deriving (Generic) -deriving instance Backend b => Show (RootOrCurrentColumn b) +deriving instance (Backend b) => Show (RootOrCurrentColumn b) -deriving instance Backend b => Eq (RootOrCurrentColumn b) +deriving instance (Backend b) => Eq (RootOrCurrentColumn b) -instance Backend b => NFData (RootOrCurrentColumn b) +instance (Backend b) => NFData (RootOrCurrentColumn b) -instance Backend b => Hashable (RootOrCurrentColumn b) +instance (Backend b) => Hashable (RootOrCurrentColumn b) -instance Backend b => ToJSON (RootOrCurrentColumn b) +instance (Backend b) => ToJSON (RootOrCurrentColumn b) -- | The arguments of column-operators may refer to either the so-called 'root -- tabular value' or 'current tabular value'. @@ -622,10 +622,10 @@ instance (NFData a) => NFData (DWithinGeomOp a) instance (Hashable a) => Hashable (DWithinGeomOp a) -instance FromJSON a => FromJSON (DWithinGeomOp a) where +instance (FromJSON a) => FromJSON (DWithinGeomOp a) where parseJSON = genericParseJSON hasuraJSON -instance ToJSON a => ToJSON (DWithinGeomOp a) where +instance (ToJSON a) => ToJSON (DWithinGeomOp a) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON @@ -641,10 +641,10 @@ instance (NFData a) => NFData (DWithinGeogOp a) instance (Hashable a) => Hashable (DWithinGeogOp a) -instance FromJSON a => FromJSON (DWithinGeogOp a) where +instance (FromJSON a) => FromJSON (DWithinGeogOp a) where parseJSON = genericParseJSON hasuraJSON -instance ToJSON a => ToJSON (DWithinGeogOp a) where +instance (ToJSON a) => ToJSON (DWithinGeogOp a) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON @@ -659,10 +659,10 @@ instance (NFData a) => NFData (STIntersectsNbandGeommin a) instance (Hashable a) => Hashable (STIntersectsNbandGeommin a) -instance FromJSON field => FromJSON (STIntersectsNbandGeommin field) where +instance (FromJSON field) => FromJSON (STIntersectsNbandGeommin field) where parseJSON = genericParseJSON hasuraJSON -instance ToJSON field => ToJSON (STIntersectsNbandGeommin field) where +instance (ToJSON field) => ToJSON (STIntersectsNbandGeommin field) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON @@ -677,10 +677,10 @@ instance (NFData a) => NFData (STIntersectsGeomminNband a) instance (Hashable a) => Hashable (STIntersectsGeomminNband a) -instance FromJSON field => FromJSON (STIntersectsGeomminNband field) where +instance (FromJSON field) => FromJSON (STIntersectsGeomminNband field) where parseJSON = genericParseJSON hasuraJSON -instance ToJSON field => ToJSON (STIntersectsGeomminNband field) where +instance (ToJSON field) => ToJSON (STIntersectsGeomminNband field) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON diff --git a/server/src-lib/Hasura/RQL/IR/BoolExp/AggregationPredicates.hs b/server/src-lib/Hasura/RQL/IR/BoolExp/AggregationPredicates.hs index a1eecf84037..93b56c1955b 100644 --- a/server/src-lib/Hasura/RQL/IR/BoolExp/AggregationPredicates.hs +++ b/server/src-lib/Hasura/RQL/IR/BoolExp/AggregationPredicates.hs @@ -160,12 +160,12 @@ data AggregationPredicateArguments (b :: BackendType) | AggregationPredicateArguments (NonEmpty (Column b)) deriving stock (Generic) -deriving instance B.Backend b => Eq (AggregationPredicateArguments b) +deriving instance (B.Backend b) => Eq (AggregationPredicateArguments b) -deriving instance B.Backend b => Show (AggregationPredicateArguments b) +deriving instance (B.Backend b) => Show (AggregationPredicateArguments b) -instance Backend b => Hashable (AggregationPredicateArguments b) +instance (Backend b) => Hashable (AggregationPredicateArguments b) -instance Backend b => NFData (AggregationPredicateArguments b) +instance (Backend b) => NFData (AggregationPredicateArguments b) instance (Backend b) => ToJSON (AggregationPredicateArguments b) diff --git a/server/src-lib/Hasura/RQL/IR/Conflict.hs b/server/src-lib/Hasura/RQL/IR/Conflict.hs index 45cfeb8c84e..0fa7e2a5368 100644 --- a/server/src-lib/Hasura/RQL/IR/Conflict.hs +++ b/server/src-lib/Hasura/RQL/IR/Conflict.hs @@ -18,9 +18,9 @@ data ConflictTarget (b :: BackendType) = CTColumn [Column b] | CTConstraint (ConstraintName b) -deriving instance Backend b => Show (ConflictTarget b) +deriving instance (Backend b) => Show (ConflictTarget b) -deriving instance Backend b => Eq (ConflictTarget b) +deriving instance (Backend b) => Eq (ConflictTarget b) data OnConflictClauseData b v = OnConflictClauseData { cp1udConflictTarget :: ConflictTarget b, diff --git a/server/src-lib/Hasura/RQL/IR/RemoteSchema.hs b/server/src-lib/Hasura/RQL/IR/RemoteSchema.hs index aadfeb057b5..566a6390184 100644 --- a/server/src-lib/Hasura/RQL/IR/RemoteSchema.hs +++ b/server/src-lib/Hasura/RQL/IR/RemoteSchema.hs @@ -188,7 +188,7 @@ data RemoteSchemaSelect r = RemoteSchemaSelect -- set. convertSelectionSet :: forall var. - Eq var => + (Eq var) => SelectionSet Void var -> G.SelectionSet G.NoFragments var convertSelectionSet = \case @@ -219,7 +219,7 @@ convertSelectionSet = \case -- inline with the strategy used in `mkAbstractTypeSelectionSet` commonFields <> map G.SelectionInlineFragment concreteTypeSelectionSets -convertGraphQLField :: Eq var => GraphQLField Void var -> G.Field G.NoFragments var +convertGraphQLField :: (Eq var) => GraphQLField Void var -> G.Field G.NoFragments var convertGraphQLField GraphQLField {..} = G.Field { -- add the alias only if it is different from the field name. This @@ -334,10 +334,11 @@ reduceAbstractTypeSelectionSet (DeduplicatedSelectionSet baseMemberFields select memberSelectionSets = -- remove member selection sets that are subsumed by base selection set - filter (not . null . snd) $ + filter (not . null . snd) + $ -- remove the common prefix from member selection sets - map (second (InsOrdHashMap.fromList . drop (InsOrdHashMap.size baseSelectionSet) . InsOrdHashMap.toList)) $ - HashMap.toList selectionSets + map (second (InsOrdHashMap.fromList . drop (InsOrdHashMap.size baseSelectionSet) . InsOrdHashMap.toList)) + $ HashMap.toList selectionSets ------------------------------------------------------------------------------- -- TH lens generation diff --git a/server/src-lib/Hasura/RQL/IR/Root.hs b/server/src-lib/Hasura/RQL/IR/Root.hs index 2e0d7ae96b4..6f4117093ff 100644 --- a/server/src-lib/Hasura/RQL/IR/Root.hs +++ b/server/src-lib/Hasura/RQL/IR/Root.hs @@ -80,7 +80,7 @@ data RemoteRelationshipField vf | -- | AnyBackend is used here to capture a relationship to an arbitrary target RemoteSourceField (AB.AnyBackend (RemoteSourceSelect (RemoteRelationshipField vf) vf)) -deriving instance AB.SatisfiesForAllBackends vf Show => Show (RemoteRelationshipField vf) +deriving instance (AB.SatisfiesForAllBackends vf Show) => Show (RemoteRelationshipField vf) -- | Represents a query root field to an action type QueryActionRoot v = diff --git a/server/src-lib/Hasura/RQL/IR/Select.hs b/server/src-lib/Hasura/RQL/IR/Select.hs index a799871f661..9784ab137fe 100644 --- a/server/src-lib/Hasura/RQL/IR/Select.hs +++ b/server/src-lib/Hasura/RQL/IR/Select.hs @@ -124,7 +124,7 @@ data QueryDB (b :: BackendType) (r :: Type) v | QDBStreamMultipleRows (AnnSimpleStreamSelectG b r v) deriving stock (Generic, Functor, Foldable, Traversable) -instance Backend b => Bifoldable (QueryDB b) where +instance (Backend b) => Bifoldable (QueryDB b) where bifoldMap f g = \case QDBMultipleRows annSel -> bifoldMapAnnSelectG f g annSel QDBSingleRow annSel -> bifoldMapAnnSelectG f g annSel @@ -175,7 +175,7 @@ deriving stock instance ) => Show (ConnectionSelect b r v) -instance Backend b => Bifoldable (ConnectionSelect b) where +instance (Backend b) => Bifoldable (ConnectionSelect b) where bifoldMap f g ConnectionSelect {..} = foldMap (foldMap $ foldMap g) _csSplit <> bifoldMapAnnSelectG f g _csSelect @@ -274,7 +274,7 @@ deriving stock instance ) => Show (AnnFieldG b r v) -instance Backend b => Bifoldable (AnnFieldG b) where +instance (Backend b) => Bifoldable (AnnFieldG b) where bifoldMap f g = \case AFColumn col -> foldMap g col AFObjectRelation objRel -> foldMap (bifoldMap f g) objRel @@ -340,7 +340,7 @@ deriving stock instance ) => Show (TableAggregateFieldG b r v) -instance Backend b => Bifoldable (TableAggregateFieldG b) where +instance (Backend b) => Bifoldable (TableAggregateFieldG b) where bifoldMap f g = \case TAFAgg {} -> mempty TAFNodes _ fields -> foldMap (foldMap $ bifoldMap f g) fields @@ -419,7 +419,7 @@ deriving stock instance ) => Show (ConnectionField b r v) -instance Backend b => Bifoldable (ConnectionField b) where +instance (Backend b) => Bifoldable (ConnectionField b) where bifoldMap f g = \case ConnectionTypename {} -> mempty ConnectionPageInfo {} -> mempty @@ -449,7 +449,7 @@ deriving stock instance ) => Show (EdgeField b r v) -instance Backend b => Bifoldable (EdgeField b) where +instance (Backend b) => Bifoldable (EdgeField b) where bifoldMap f g = \case EdgeTypename {} -> mempty EdgeCursor -> mempty @@ -513,14 +513,14 @@ deriving stock instance (Backend b, Eq v, Eq (FunctionArgumentExp b v)) => Eq (C data ComputedFieldSelect (b :: BackendType) (r :: Type) v = CFSScalar + -- | Type containing info about the computed field (ComputedFieldScalarSelect b v) - -- ^ Type containing info about the computed field - (Maybe (AnnColumnCaseBoolExp b v)) - -- ^ This type is used to determine if whether the scalar + -- | This type is used to determine if whether the scalar -- computed field should be nullified. When the value is `Nothing`, -- the scalar computed value will be outputted as computed and when the -- value is `Just c`, the scalar computed field will be outputted when -- `c` evaluates to `true` and `null` when `c` evaluates to `false` + (Maybe (AnnColumnCaseBoolExp b v)) | CFSTable JsonAggSelect (AnnSimpleSelectG b r v) deriving stock (Functor, Foldable, Traversable) @@ -540,7 +540,7 @@ deriving stock instance ) => Show (ComputedFieldSelect b r v) -instance Backend b => Bifoldable (ComputedFieldSelect b) where +instance (Backend b) => Bifoldable (ComputedFieldSelect b) where bifoldMap f g = \case CFSScalar cfsSelect caseBoolExp -> foldMap g cfsSelect <> foldMap (foldMap $ foldMap g) caseBoolExp CFSTable _ simpleSelect -> bifoldMapAnnSelectG f g simpleSelect @@ -578,7 +578,7 @@ deriving stock instance ) => Show (AnnObjectSelectG b r v) -instance Backend b => Bifoldable (AnnObjectSelectG b) where +instance (Backend b) => Bifoldable (AnnObjectSelectG b) where bifoldMap f g AnnObjectSelectG {..} = foldMap (foldMap $ bifoldMap f g) _aosFields <> foldMap (foldMap g) _aosTargetFilter @@ -608,7 +608,7 @@ deriving stock instance ) => Show (ArraySelectG b r v) -instance Backend b => Bifoldable (ArraySelectG b) where +instance (Backend b) => Bifoldable (ArraySelectG b) where bifoldMap f g = \case ASSimple arrayRelationSelect -> foldMap (bifoldMapAnnSelectG f g) arrayRelationSelect ASAggregate arrayAggregateSelect -> foldMap (bifoldMapAnnSelectG f g) arrayAggregateSelect @@ -700,7 +700,7 @@ deriving stock instance ) => Show (AnnNestedObjectSelectG b r v) -instance Backend b => Bifoldable (AnnNestedObjectSelectG b) where +instance (Backend b) => Bifoldable (AnnNestedObjectSelectG b) where bifoldMap f g AnnNestedObjectSelectG {..} = foldMap (foldMap $ bifoldMap f g) _anosFields @@ -719,7 +719,7 @@ deriving stock instance deriving stock instance (Backend b, Show (AnnFieldG b r v), Show (AnnAggregateSelectG b r v)) => Show (AnnNestedArraySelectG b r v) -instance Backend b => Bifoldable (AnnNestedArraySelectG b) where +instance (Backend b) => Bifoldable (AnnNestedArraySelectG b) where bifoldMap f g = \case ANASSimple field -> bifoldMap f g field ANASAggregate agg -> bifoldMapAnnSelectG f g agg @@ -739,8 +739,8 @@ insertFunctionArg argName idx value (FunctionArgsExp positional named) = if (idx + 1) <= length positional then FunctionArgsExp (insertAt idx value positional) named else - FunctionArgsExp positional $ - HashMap.insert (getFuncArgNameTxt argName) value named + FunctionArgsExp positional + $ HashMap.insert (getFuncArgNameTxt argName) value named where insertAt i a = toList . Seq.insertAt i a . Seq.fromList diff --git a/server/src-lib/Hasura/RQL/IR/Select/OrderBy.hs b/server/src-lib/Hasura/RQL/IR/Select/OrderBy.hs index 4189eeb8f16..78e3aee9153 100644 --- a/server/src-lib/Hasura/RQL/IR/Select/OrderBy.hs +++ b/server/src-lib/Hasura/RQL/IR/Select/OrderBy.hs @@ -25,13 +25,13 @@ data AnnotatedOrderByElement (b :: BackendType) v = AOCColumn (ColumnInfo b) | AOCObjectRelation (RelInfo b) + -- | Permission filter of the remote table to which the relationship is defined (AnnBoolExp b v) - -- ^ Permission filter of the remote table to which the relationship is defined (AnnotatedOrderByElement b v) | AOCArrayAggregation (RelInfo b) + -- | Permission filter of the remote table to which the relationship is defined (AnnBoolExp b v) - -- ^ Permission filter of the remote table to which the relationship is defined (AnnotatedAggregateOrderBy b) | AOCComputedField (ComputedFieldOrderBy b v) deriving stock (Generic, Functor, Foldable, Traversable) @@ -83,10 +83,10 @@ data ComputedFieldOrderByElement (b :: BackendType) v CFOBEScalar (ScalarType b) | CFOBETableAggregation (TableName b) + -- | Permission filter of the retuning table (AnnBoolExp b v) - -- ^ Permission filter of the retuning table + -- | Sort by aggregation fields of table rows returned by computed field (AnnotatedAggregateOrderBy b) - -- ^ Sort by aggregation fields of table rows returned by computed field deriving stock (Generic, Functor, Foldable, Traversable) deriving stock instance diff --git a/server/src-lib/Hasura/RQL/Types/Action.hs b/server/src-lib/Hasura/RQL/Types/Action.hs index aeb85f71102..e3e4365ae48 100644 --- a/server/src-lib/Hasura/RQL/Types/Action.hs +++ b/server/src-lib/Hasura/RQL/Types/Action.hs @@ -96,12 +96,16 @@ instance NFData ActionMetadata instance HasCodec ActionMetadata where codec = - AC.object "ActionMetadata" $ - ActionMetadata - <$> requiredField' "name" AC..= _amName - <*> optionalField' "comment" AC..= _amComment - <*> requiredField' "definition" AC..= _amDefinition - <*> optionalFieldWithOmittedDefault' "permissions" [] AC..= _amPermissions + AC.object "ActionMetadata" + $ ActionMetadata + <$> requiredField' "name" + AC..= _amName + <*> optionalField' "comment" + AC..= _amComment + <*> requiredField' "definition" + AC..= _amDefinition + <*> optionalFieldWithOmittedDefault' "permissions" [] + AC..= _amPermissions data ActionPermissionMetadata = ActionPermissionMetadata { _apmRole :: RoleName, @@ -113,10 +117,12 @@ instance NFData ActionPermissionMetadata instance HasCodec ActionPermissionMetadata where codec = - AC.object "ActionPermissionMetadata" $ - ActionPermissionMetadata - <$> requiredField' "role" AC..= _apmRole - <*> optionalField' "comment" AC..= _apmComment + AC.object "ActionPermissionMetadata" + $ ActionPermissionMetadata + <$> requiredField' "role" + AC..= _apmRole + <*> optionalField' "comment" + AC..= _apmComment newtype ActionName = ActionName {unActionName :: G.Name} deriving (Show, Eq, Ord, J.FromJSON, J.ToJSON, J.FromJSONKey, J.ToJSONKey, ToTxt, Generic, NFData, Hashable) @@ -170,29 +176,39 @@ instance HasCodec (ActionDefinition arg webhook) where codec = - dimapCodec dec enc $ - disjointEitherCodec (actionCodec (const ActionQuery)) (actionCodec ActionMutation) + dimapCodec dec enc + $ disjointEitherCodec (actionCodec (const ActionQuery)) (actionCodec ActionMutation) where actionCodec :: (ActionMutationKind -> ActionType) -> AC.JSONCodec (ActionDefinition arg webhook) actionCodec actionTypeConstructor = - AC.object (typeId actionTypeConstructor) $ - ActionDefinition - <$> optionalFieldWithOmittedDefault' "arguments" [] AC..= _adArguments - <*> requiredField' "output_type" AC..= _adOutputType - <*> typeAndKind actionTypeConstructor AC..= _adType - <*> optionalFieldWithOmittedDefault' "headers" [] AC..= _adHeaders - <*> optionalFieldWithOmittedDefault' "forward_client_headers" False AC..= _adForwardClientHeaders - <*> optionalFieldWithOmittedDefault' "timeout" defaultActionTimeoutSecs AC..= _adTimeout - <*> requiredField' "handler" AC..= _adHandler - <*> optionalField' "request_transform" AC..= _adRequestTransform - <*> optionalField' "response_transform" AC..= _adResponseTransform + AC.object (typeId actionTypeConstructor) + $ ActionDefinition + <$> optionalFieldWithOmittedDefault' "arguments" [] + AC..= _adArguments + <*> requiredField' "output_type" + AC..= _adOutputType + <*> typeAndKind actionTypeConstructor + AC..= _adType + <*> optionalFieldWithOmittedDefault' "headers" [] + AC..= _adHeaders + <*> optionalFieldWithOmittedDefault' "forward_client_headers" False + AC..= _adForwardClientHeaders + <*> optionalFieldWithOmittedDefault' "timeout" defaultActionTimeoutSecs + AC..= _adTimeout + <*> requiredField' "handler" + AC..= _adHandler + <*> optionalField' "request_transform" + AC..= _adRequestTransform + <*> optionalField' "response_transform" + AC..= _adResponseTransform typeAndKind :: (ActionMutationKind -> ActionType) -> AC.ObjectCodec ActionType ActionType typeAndKind actionTypeConstructor = case (actionTypeConstructor ActionSynchronous) of (ActionMutation _) -> ActionMutation <$ discriminatorField "type" "mutation" - <*> optionalFieldWithDefault' "kind" ActionSynchronous AC..= \case + <*> optionalFieldWithDefault' "kind" ActionSynchronous + AC..= \case (ActionMutation kind) -> kind ActionQuery -> ActionSynchronous ActionQuery -> ActionQuery <$ discriminatorField "type" "query" @@ -246,11 +262,14 @@ instance (NFData a) => NFData (ArgumentDefinition a) instance (HasCodec a, Typeable a) => HasCodec (ArgumentDefinition a) where codec = - AC.object ("ArgumentDefinition_" <> typeableName @a) $ - ArgumentDefinition - <$> requiredField' "name" AC..= _argName - <*> requiredField' "type" AC..= _argType - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _argDescription + AC.object ("ArgumentDefinition_" <> typeableName @a) + $ ArgumentDefinition + <$> requiredField' "name" + AC..= _argName + <*> requiredField' "type" + AC..= _argType + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _argDescription newtype ArgumentName = ArgumentName {unArgumentName :: G.Name} deriving (Show, Eq, J.FromJSON, J.ToJSON, J.FromJSONKey, J.ToJSONKey, ToTxt, Generic, NFData) @@ -347,10 +366,10 @@ instance ToJSON ActionPermissionMetadata where toJSON = genericToJSON hasuraJSON {J.omitNothingFields = True} toEncoding = genericToEncoding hasuraJSON {J.omitNothingFields = True} -instance FromJSON arg => FromJSON (ArgumentDefinition arg) where +instance (FromJSON arg) => FromJSON (ArgumentDefinition arg) where parseJSON = genericParseJSON hasuraJSON -instance ToJSON arg => ToJSON (ArgumentDefinition arg) where +instance (ToJSON arg) => ToJSON (ArgumentDefinition arg) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON @@ -381,10 +400,15 @@ instance (J.FromJSON a, J.FromJSON b) => J.FromJSON (ActionDefinition a b) where instance J.FromJSON ActionMetadata where parseJSON = J.withObject "ActionMetadata" $ \o -> ActionMetadata - <$> o .: "name" - <*> o .:? "comment" - <*> o .: "definition" - <*> o .:? "permissions" .!= [] + <$> o + .: "name" + <*> o + .:? "comment" + <*> o + .: "definition" + <*> o + .:? "permissions" + .!= [] instance (J.ToJSON a, J.ToJSON b) => J.ToJSON (ActionDefinition a b) where toJSON (ActionDefinition {..}) = @@ -394,19 +418,19 @@ instance (J.ToJSON a, J.ToJSON b) => J.ToJSON (ActionDefinition a b) where [ "type" .= ("mutation" :: String), "kind" .= kind ] - in J.object $ - [ "arguments" .= _adArguments, - "output_type" .= _adOutputType, - "headers" .= _adHeaders, - "forward_client_headers" .= _adForwardClientHeaders, - "handler" .= _adHandler, - "timeout" .= _adTimeout - ] - <> catMaybes - [ ("request_transform" .=) <$> _adRequestTransform, - ("response_transform" .=) <$> _adResponseTransform - ] - <> typeAndKind + in J.object + $ [ "arguments" .= _adArguments, + "output_type" .= _adOutputType, + "headers" .= _adHeaders, + "forward_client_headers" .= _adForwardClientHeaders, + "handler" .= _adHandler, + "timeout" .= _adTimeout + ] + <> catMaybes + [ ("request_transform" .=) <$> _adRequestTransform, + ("response_transform" .=) <$> _adResponseTransform + ] + <> typeAndKind instance ToJSON ActionLogResponse where toJSON = genericToJSON hasuraJSON diff --git a/server/src-lib/Hasura/RQL/Types/Allowlist.hs b/server/src-lib/Hasura/RQL/Types/Allowlist.hs index 1ef2a91dc8d..ab9a76948b2 100644 --- a/server/src-lib/Hasura/RQL/Types/Allowlist.hs +++ b/server/src-lib/Hasura/RQL/Types/Allowlist.hs @@ -53,9 +53,9 @@ instance HasCodec AllowlistScope where where global = AC.object "AllowlistScopeGlobal" $ discriminatorBoolField "global" True scopeRoles = - AC.object "AllowlistScopeRoles" $ - void (discriminatorBoolField "global" False) - *> requiredField' "roles" + AC.object "AllowlistScopeRoles" + $ void (discriminatorBoolField "global" False) + *> requiredField' "roles" dec (Left _) = Right AllowlistScopeGlobal dec (Right roles) @@ -94,10 +94,12 @@ data AllowlistEntry = AllowlistEntry instance HasCodec AllowlistEntry where codec = - AC.object "AllowlistEntry" $ - AllowlistEntry - <$> requiredField' "collection" AC..= aeCollection - <*> optionalFieldWithDefault' "scope" AllowlistScopeGlobal AC..= aeScope + AC.object "AllowlistEntry" + $ AllowlistEntry + <$> requiredField' "collection" + AC..= aeCollection + <*> optionalFieldWithDefault' "scope" AllowlistScopeGlobal + AC..= aeScope instance ToJSON AllowlistEntry where toJSON = genericToJSON hasuraJSON @@ -128,10 +130,11 @@ metadataAllowlistInsert entry@(AllowlistEntry coll _) al = insertIfAbsent = \case Nothing -> Right (Just entry) Just _ -> - Left $ - "collection " - <> coll <<> " already exists in the allowlist, scope ignored;" - <> " to change scope, use update_scope_of_collection_in_allowlist" + Left + $ "collection " + <> coll + <<> " already exists in the allowlist, scope ignored;" + <> " to change scope, use update_scope_of_collection_in_allowlist" metadataAllowlistUpdateScope :: AllowlistEntry -> MetadataAllowlist -> Either Text MetadataAllowlist @@ -217,12 +220,12 @@ inlineAllowlist collections allowlist = InlinedAllowlist global perRole [coll | AllowlistEntry coll AllowlistScopeGlobal <- InsOrdHashMap.elems allowlist] perRoleCollections :: HashMap RoleName [CollectionName] perRoleCollections = - inverseMap $ - [ (coll, toList roles) - | AllowlistEntry coll (AllowlistScopeRoles roles) <- InsOrdHashMap.elems allowlist - ] + inverseMap + $ [ (coll, toList roles) + | AllowlistEntry coll (AllowlistScopeRoles roles) <- InsOrdHashMap.elems allowlist + ] - inverseMap :: Hashable b => [(a, [b])] -> HashMap b [a] + inverseMap :: (Hashable b) => [(a, [b])] -> HashMap b [a] inverseMap = HashMap.fromListWith (<>) . concatMap (\(c, rs) -> [(r, [c]) | r <- rs]) global = inlineQueries globalCollections diff --git a/server/src-lib/Hasura/RQL/Types/ApiLimit.hs b/server/src-lib/Hasura/RQL/Types/ApiLimit.hs index 1db817bde6b..261c5b0d4eb 100644 --- a/server/src-lib/Hasura/RQL/Types/ApiLimit.hs +++ b/server/src-lib/Hasura/RQL/Types/ApiLimit.hs @@ -53,24 +53,37 @@ data ApiLimit = ApiLimit instance HasCodec ApiLimit where codec = - AC.object "ApiLimit" $ - ApiLimit - <$> optionalField' "rate_limit" AC..= _alRateLimit - <*> optionalField' "depth_limit" AC..= _alDepthLimit - <*> optionalField' "node_limit" AC..= _alNodeLimit - <*> optionalField' "time_limit" AC..= _alTimeLimit - <*> optionalField' "batch_limit" AC..= _alBatchLimit - <*> optionalFieldWithDefault' "disabled" False AC..= _alDisabled + AC.object "ApiLimit" + $ ApiLimit + <$> optionalField' "rate_limit" + AC..= _alRateLimit + <*> optionalField' "depth_limit" + AC..= _alDepthLimit + <*> optionalField' "node_limit" + AC..= _alNodeLimit + <*> optionalField' "time_limit" + AC..= _alTimeLimit + <*> optionalField' "batch_limit" + AC..= _alBatchLimit + <*> optionalFieldWithDefault' "disabled" False + AC..= _alDisabled instance FromJSON ApiLimit where parseJSON = withObject "ApiLimit" $ \o -> ApiLimit - <$> o .:? "rate_limit" - <*> o .:? "depth_limit" - <*> o .:? "node_limit" - <*> o .:? "time_limit" - <*> o .:? "batch_limit" - <*> o .:? "disabled" .!= False + <$> o + .:? "rate_limit" + <*> o + .:? "depth_limit" + <*> o + .:? "node_limit" + <*> o + .:? "time_limit" + <*> o + .:? "batch_limit" + <*> o + .:? "disabled" + .!= False instance ToJSON ApiLimit where toJSON = @@ -87,16 +100,18 @@ data Limit a = Limit instance (HasCodec a, Typeable a) => HasCodec (Limit a) where codec = - AC.object ("Limit_" <> typeableName @a) $ - Limit - <$> requiredField' "global" AC..= _lGlobal - <*> optionalFieldWithDefault' "per_role" mempty AC..= _lPerRole + AC.object ("Limit_" <> typeableName @a) + $ Limit + <$> requiredField' "global" + AC..= _lGlobal + <*> optionalFieldWithDefault' "per_role" mempty + AC..= _lPerRole -instance FromJSON a => FromJSON (Limit a) where +instance (FromJSON a) => FromJSON (Limit a) where parseJSON = withObject "Limit" $ \o -> Limit <$> o .: "global" <*> o .:? "per_role" .!= mempty -instance ToJSON a => ToJSON (Limit a) where +instance (ToJSON a) => ToJSON (Limit a) where toJSON = genericToJSON (Casing.aesonPrefix Casing.snakeCase) @@ -118,10 +133,12 @@ data RateLimitConfig = RateLimitConfig instance HasCodec RateLimitConfig where codec = - AC.object "RateLimitConfig" $ - RateLimitConfig - <$> requiredFieldWith' "max_reqs_per_min" (integralWithLowerBoundCodec 0) AC..= _rlcMaxReqsPerMin - <*> optionalField "unique_params" "This would be either fixed value \"IP\" or a list of Session variables" AC..= _rlcUniqueParams + AC.object "RateLimitConfig" + $ RateLimitConfig + <$> requiredFieldWith' "max_reqs_per_min" (integralWithLowerBoundCodec 0) + AC..= _rlcMaxReqsPerMin + <*> optionalField "unique_params" "This would be either fixed value \"IP\" or a list of Session variables" + AC..= _rlcUniqueParams instance FromJSON RateLimitConfig where parseJSON = @@ -143,8 +160,8 @@ instance HasCodec UniqueParamConfig where codec = bimapCodec dec enc $ disjointEitherCodec ipAddress sessionVariables where ipAddress = - dimapCodec fromEither Left $ - disjointEitherCodec + dimapCodec fromEither Left + $ disjointEitherCodec (literalTextValueCodec () "IP") (literalTextValueCodec () "ip") sessionVariables = codec @@ -187,8 +204,8 @@ newtype MaxDepth = MaxDepth {unMaxDepth :: Int} instance HasCodec MaxDepth where codec = - dimapCodec MaxDepth unMaxDepth $ - integralWithLowerBoundCodec 0 + dimapCodec MaxDepth unMaxDepth + $ integralWithLowerBoundCodec 0 newtype MaxNodes = MaxNodes {unMaxNodes :: Int} deriving stock (Show, Eq, Ord, Generic) @@ -196,8 +213,8 @@ newtype MaxNodes = MaxNodes {unMaxNodes :: Int} instance HasCodec MaxNodes where codec = - dimapCodec MaxNodes unMaxNodes $ - integralWithLowerBoundCodec 0 + dimapCodec MaxNodes unMaxNodes + $ integralWithLowerBoundCodec 0 newtype MaxTime = MaxTime {unMaxTime :: Seconds} deriving stock (Show, Eq, Ord, Generic) @@ -205,8 +222,8 @@ newtype MaxTime = MaxTime {unMaxTime :: Seconds} instance HasCodec MaxTime where codec = - dimapCodec MaxTime unMaxTime $ - realFracWithLowerBoundCodec 0 + dimapCodec MaxTime unMaxTime + $ realFracWithLowerBoundCodec 0 newtype MaxBatchSize = MaxBatchSize {unMaxBatchSize :: Int} deriving stock (Show, Eq, Ord, Generic) @@ -214,8 +231,8 @@ newtype MaxBatchSize = MaxBatchSize {unMaxBatchSize :: Int} instance HasCodec MaxBatchSize where codec = - dimapCodec MaxBatchSize unMaxBatchSize $ - integralWithLowerBoundCodec 0 + dimapCodec MaxBatchSize unMaxBatchSize + $ integralWithLowerBoundCodec 0 -- | Defers to the (illegal) DiffTime Show instance. -- diff --git a/server/src-lib/Hasura/RQL/Types/BackendType.hs b/server/src-lib/Hasura/RQL/Types/BackendType.hs index 35e9ac0e512..f0f33d2ced1 100644 --- a/server/src-lib/Hasura/RQL/Types/BackendType.hs +++ b/server/src-lib/Hasura/RQL/Types/BackendType.hs @@ -167,8 +167,8 @@ instance HasCodec (BackendSourceKind ('DataConnector)) where mkCodecStaticBackendSourceKind :: BackendSourceKind b -> JSONCodec (BackendSourceKind b) mkCodecStaticBackendSourceKind backendSourceKind = - bimapCodec dec enc $ - parseAlternatives (literalTextCodec longName) (literalTextCodec <$> aliases) + bimapCodec dec enc + $ parseAlternatives (literalTextCodec longName) (literalTextCodec <$> aliases) where dec text = if text `elem` validValues diff --git a/server/src-lib/Hasura/RQL/Types/Column.hs b/server/src-lib/Hasura/RQL/Types/Column.hs index 9ae832ecebb..2fe749e9236 100644 --- a/server/src-lib/Hasura/RQL/Types/Column.hs +++ b/server/src-lib/Hasura/RQL/Types/Column.hs @@ -119,7 +119,7 @@ instance (Backend b) => ToJSON (ColumnType b) where $(makePrisms ''ColumnType) -instance Backend b => ToTxt (ColumnType b) where +instance (Backend b) => ToTxt (ColumnType b) where toTxt = \case ColumnScalar scalar -> toTxt scalar ColumnEnumReference (EnumReference tableName _ tableCustomName) -> @@ -165,12 +165,14 @@ parseScalarValueColumnType columnType value = case columnType of parseEnumValue enumValueName = do for_ enumValueName \evn -> do let enums = map getEnumValue $ HashMap.keys enumValues - unless (evn `elem` enums) $ - throw400 UnexpectedPayload $ - "expected one of the values " - <> dquoteList enums - <> " for type " - <> snakeCaseTableName @b tableName <<> ", given " <>> evn + unless (evn `elem` enums) + $ throw400 UnexpectedPayload + $ "expected one of the values " + <> dquoteList enums + <> " for type " + <> snakeCaseTableName @b tableName + <<> ", given " + <>> evn pure $ textToScalarValue @b $ G.unName <$> enumValueName parseScalarValuesColumnType :: @@ -187,30 +189,30 @@ data RawColumnType (b :: BackendType) | RawColumnTypeArray (XNestedArrays b) (RawColumnType b) Bool deriving stock (Generic) -deriving instance Backend b => Eq (RawColumnType b) +deriving instance (Backend b) => Eq (RawColumnType b) -deriving instance Backend b => Ord (RawColumnType b) +deriving instance (Backend b) => Ord (RawColumnType b) -deriving anyclass instance Backend b => Hashable (RawColumnType b) +deriving anyclass instance (Backend b) => Hashable (RawColumnType b) -deriving instance Backend b => Show (RawColumnType b) +deriving instance (Backend b) => Show (RawColumnType b) -instance Backend b => NFData (RawColumnType b) +instance (Backend b) => NFData (RawColumnType b) -- For backwards compatibility we want to serialize and deserialize -- RawColumnTypeScalar as a ScalarType -instance Backend b => ToJSON (RawColumnType b) where +instance (Backend b) => ToJSON (RawColumnType b) where toJSON = \case RawColumnTypeScalar scalar -> toJSON scalar other -> genericToJSON hasuraJSON other -instance Backend b => FromJSON (RawColumnType b) where +instance (Backend b) => FromJSON (RawColumnType b) where parseJSON v = (RawColumnTypeScalar <$> parseJSON v) <|> genericParseJSON hasuraJSON v -- Ideally we'd derive ToJSON and FromJSON instances from the HasCodec instance, rather than the other way around. -- Unfortunately, I'm not sure if it's possible to write a proper HasCodec instance in the presence -- of the (XNestedObjects b) and (XNestedArrays b) type families, which may be Void. -instance Backend b => HasCodec (RawColumnType b) where +instance (Backend b) => HasCodec (RawColumnType b) where codec = codecViaAeson "RawColumnType" -- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of @@ -229,16 +231,16 @@ data RawColumnInfo (b :: BackendType) = RawColumnInfo } deriving (Generic) -deriving instance Backend b => Eq (RawColumnInfo b) +deriving instance (Backend b) => Eq (RawColumnInfo b) -deriving instance Backend b => Show (RawColumnInfo b) +deriving instance (Backend b) => Show (RawColumnInfo b) -instance Backend b => NFData (RawColumnInfo b) +instance (Backend b) => NFData (RawColumnInfo b) -instance Backend b => ToJSON (RawColumnInfo b) where +instance (Backend b) => ToJSON (RawColumnInfo b) where toJSON = genericToJSON hasuraJSON -instance Backend b => FromJSON (RawColumnInfo b) where +instance (Backend b) => FromJSON (RawColumnInfo b) where parseJSON = genericParseJSON hasuraJSON -- | Indicates whether a column may participate in certain mutations. @@ -282,17 +284,17 @@ data ColumnInfo (b :: BackendType) = ColumnInfo } deriving (Generic) -deriving instance Backend b => Eq (ColumnInfo b) +deriving instance (Backend b) => Eq (ColumnInfo b) -deriving instance Backend b => Ord (ColumnInfo b) +deriving instance (Backend b) => Ord (ColumnInfo b) -deriving instance Backend b => Show (ColumnInfo b) +deriving instance (Backend b) => Show (ColumnInfo b) -instance Backend b => NFData (ColumnInfo b) +instance (Backend b) => NFData (ColumnInfo b) -instance Backend b => Hashable (ColumnInfo b) +instance (Backend b) => Hashable (ColumnInfo b) -instance Backend b => ToJSON (ColumnInfo b) where +instance (Backend b) => ToJSON (ColumnInfo b) where toJSON = genericToJSON hasuraJSON toEncoding = genericToEncoding hasuraJSON @@ -389,20 +391,20 @@ $(makePrisms ''StructuredColumnInfo) type PrimaryKeyColumns b = NESeq (ColumnInfo b) -onlyNumCols :: forall b. Backend b => [ColumnInfo b] -> [ColumnInfo b] +onlyNumCols :: forall b. (Backend b) => [ColumnInfo b] -> [ColumnInfo b] onlyNumCols = filter isNumCol -isNumCol :: forall b. Backend b => ColumnInfo b -> Bool +isNumCol :: forall b. (Backend b) => ColumnInfo b -> Bool isNumCol = isScalarColumnWhere (isNumType @b) . ciType -onlyComparableCols :: forall b. Backend b => [ColumnInfo b] -> [ColumnInfo b] +onlyComparableCols :: forall b. (Backend b) => [ColumnInfo b] -> [ColumnInfo b] onlyComparableCols = filter (isScalarColumnWhere (isComparableType @b) . ciType) -getColInfos :: Backend b => [Column b] -> [ColumnInfo b] -> [ColumnInfo b] +getColInfos :: (Backend b) => [Column b] -> [ColumnInfo b] -> [ColumnInfo b] getColInfos cols allColInfos = flip filter allColInfos $ \ci -> ciColumn ci `elem` cols -fromCol :: Backend b => Column b -> FieldName +fromCol :: (Backend b) => Column b -> FieldName fromCol = FieldName . toTxt type ColumnValues b a = HashMap (Column b) a @@ -420,7 +422,7 @@ columnReferenceType = \case ColumnReferenceComputedField _ scalarType -> ColumnScalar scalarType ColumnReferenceCast _ targetType -> targetType -instance Backend b => ToTxt (ColumnReference b) where +instance (Backend b) => ToTxt (ColumnReference b) where toTxt = \case ColumnReferenceColumn column -> toTxt $ ciColumn column ColumnReferenceComputedField name _ -> toTxt name diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs index b771b6dd655..b05d63db684 100644 --- a/server/src-lib/Hasura/RQL/Types/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Common.hs @@ -146,8 +146,9 @@ instance FromJSON RelType where parseJSON _ = fail "expecting either 'object' or 'array' for rel_type" instance PG.FromCol RelType where - fromCol bs = flip PG.fromColHelper bs $ - PD.enum $ \case + fromCol bs = flip PG.fromColHelper bs + $ PD.enum + $ \case "object" -> Just ObjRel "array" -> Just ArrRel _ -> Nothing @@ -347,7 +348,7 @@ instance PG.FromCol InputWebhook where -- Consists of the environment variable name with missing/invalid value newtype ResolveWebhookError = ResolveWebhookError {unResolveWebhookError :: Text} deriving (Show, ToTxt) -resolveWebhook :: QErrM m => Env.Environment -> InputWebhook -> m ResolvedWebhook +resolveWebhook :: (QErrM m) => Env.Environment -> InputWebhook -> m ResolvedWebhook resolveWebhook env inputWebhook = do let eitherRenderedTemplate = resolveWebhookEither env inputWebhook onLeft @@ -397,43 +398,43 @@ instance Hashable PGConnectionParams instance HasCodec PGConnectionParams where codec = - AC.object "PGConnectionParams" $ - PGConnectionParams - <$> requiredField' "host" - AC..= _pgcpHost + AC.object "PGConnectionParams" + $ PGConnectionParams + <$> requiredField' "host" + AC..= _pgcpHost <*> requiredField' "username" - AC..= _pgcpUsername + AC..= _pgcpUsername <*> optionalFieldOrNull' "password" - AC..= _pgcpPassword + AC..= _pgcpPassword <*> requiredField' "port" - AC..= _pgcpPort + AC..= _pgcpPort <*> requiredField' "database" - AC..= _pgcpDatabase + AC..= _pgcpDatabase -- TODO: Use HasCodec to define Aeson instances? instance ToJSON PGConnectionParams where toJSON PGConnectionParams {..} = - J.object $ - [ "host" .= _pgcpHost, - "username" .= _pgcpUsername, - "port" .= _pgcpPort, - "database" .= _pgcpDatabase - ] - ++ ["password" .= _pgcpPassword | isJust _pgcpPassword] + J.object + $ [ "host" .= _pgcpHost, + "username" .= _pgcpUsername, + "port" .= _pgcpPort, + "database" .= _pgcpDatabase + ] + ++ ["password" .= _pgcpPassword | isJust _pgcpPassword] instance FromJSON PGConnectionParams where parseJSON = withObject "PGConnectionParams" $ \o -> PGConnectionParams <$> o - .: "host" + .: "host" <*> o - .: "username" + .: "username" <*> o - .:? "password" + .:? "password" <*> o - .: "port" + .: "port" <*> o - .: "database" + .: "database" data UrlConf = -- | the database connection string @@ -450,9 +451,9 @@ instance Hashable UrlConf instance HasCodec UrlConf where codec = - dimapCodec dec enc $ - disjointEitherCodec valCodec $ - disjointEitherCodec fromEnvCodec fromParamsCodec + dimapCodec dec enc + $ disjointEitherCodec valCodec + $ disjointEitherCodec fromEnvCodec fromParamsCodec where valCodec = codec fromParamsCodec = AC.object "UrlConfFromParams" $ requiredField' "connection_parameters" @@ -520,15 +521,15 @@ getConnOptionsFromConnParams PGConnectionParams {..} = getPGConnectionStringFromParams :: PGConnectionParams -> String getPGConnectionStringFromParams PGConnectionParams {..} = let uriAuth = - rectifyAuth $ - URIAuth + rectifyAuth + $ URIAuth { uriUserInfo = getURIAuthUserInfo _pgcpUsername _pgcpPassword, uriRegName = unpackEscape _pgcpHost, uriPort = show _pgcpPort } pgConnectionURI = - rectify $ - URI + rectify + $ URI { uriScheme = "postgresql", uriAuthority = Just uriAuth, uriPath = "/" <> unpackEscape _pgcpDatabase, @@ -547,14 +548,14 @@ getPGConnectionStringFromParams PGConnectionParams {..} = Nothing -> unpackEscape username Just password -> unpackEscape username <> ":" <> unpackEscape password -resolveUrlConf :: MonadError QErr m => Env.Environment -> UrlConf -> m Text +resolveUrlConf :: (MonadError QErr m) => Env.Environment -> UrlConf -> m Text resolveUrlConf env = \case UrlValue v -> unResolvedWebhook <$> resolveWebhook env v UrlFromEnv envVar -> getEnv env envVar UrlFromParams connParams -> pure . T.pack $ getPGConnectionStringFromParams connParams -getEnv :: QErrM m => Env.Environment -> Text -> m Text +getEnv :: (QErrM m) => Env.Environment -> Text -> m Text getEnv env k = do let eitherEnv = getEnvEither env k onLeft @@ -579,10 +580,12 @@ data MetricsConfig = MetricsConfig instance HasCodec MetricsConfig where codec = - AC.object "MetricsConfig" $ - MetricsConfig - <$> requiredField' "analyze_query_variables" AC..= _mcAnalyzeQueryVariables - <*> requiredField' "analyze_response_body" AC..= _mcAnalyzeResponseBody + AC.object "MetricsConfig" + $ MetricsConfig + <$> requiredField' "analyze_query_variables" + AC..= _mcAnalyzeQueryVariables + <*> requiredField' "analyze_response_body" + AC..= _mcAnalyzeResponseBody instance FromJSON MetricsConfig where parseJSON = J.withObject "MetricsConfig" $ \o -> do @@ -649,9 +652,9 @@ data EnvRecord a = EnvRecord } deriving (Show, Eq, Generic) -instance NFData a => NFData (EnvRecord a) +instance (NFData a) => NFData (EnvRecord a) -instance Hashable a => Hashable (EnvRecord a) +instance (Hashable a) => Hashable (EnvRecord a) instance (ToJSON a) => ToJSON (EnvRecord a) where toJSON (EnvRecord envVar _envValue) = object ["env_var" .= envVar] @@ -667,8 +670,8 @@ instance ToJSON ApolloFederationVersion where toJSON V1 = J.String "v1" instance FromJSON ApolloFederationVersion where - parseJSON = withText "ApolloFederationVersion" $ - \case + parseJSON = withText "ApolloFederationVersion" + $ \case "v1" -> pure V1 _ -> fail "enable takes the version of apollo federation. Supported value is v1 only." @@ -681,9 +684,10 @@ data ApolloFederationConfig = ApolloFederationConfig instance HasCodec ApolloFederationConfig where codec = - AC.object "ApolloFederationConfig" $ - ApolloFederationConfig - <$> requiredField "enable" enableDoc AC..= enable + AC.object "ApolloFederationConfig" + $ ApolloFederationConfig + <$> requiredField "enable" enableDoc + AC..= enable where enableDoc = "enable takes the version of apollo federation. Supported value is v1 only." @@ -733,7 +737,7 @@ data RemoteRelationshipG definition = RemoteRelationship } deriving (Show, Eq, Generic) -instance ToJSON definition => ToJSON (RemoteRelationshipG definition) where +instance (ToJSON definition) => ToJSON (RemoteRelationshipG definition) where toJSON RemoteRelationship {..} = J.object [ "name" .= _rrName, @@ -752,7 +756,9 @@ remoteRelationshipCodec :: JSONCodec definition -> JSONCodec (RemoteRelationshipG definition) remoteRelationshipCodec definitionCodec = - AC.object ("RemoteRelationship_" <> typeableName @definition) $ - RemoteRelationship - <$> requiredField' "name" AC..= _rrName - <*> requiredFieldWith' "definition" definitionCodec AC..= _rrDefinition + AC.object ("RemoteRelationship_" <> typeableName @definition) + $ RemoteRelationship + <$> requiredField' "name" + AC..= _rrName + <*> requiredFieldWith' "definition" definitionCodec + AC..= _rrDefinition diff --git a/server/src-lib/Hasura/RQL/Types/ComputedField.hs b/server/src-lib/Hasura/RQL/Types/ComputedField.hs index 7e35b4da086..783228fb671 100644 --- a/server/src-lib/Hasura/RQL/Types/ComputedField.hs +++ b/server/src-lib/Hasura/RQL/Types/ComputedField.hs @@ -64,9 +64,9 @@ data CustomFunctionNames = CustomFunctionNames } deriving (Show, Eq, Generic) -deriving instance Backend b => Show (FunctionTrackedAs b) +deriving instance (Backend b) => Show (FunctionTrackedAs b) -deriving instance Backend b => Eq (FunctionTrackedAs b) +deriving instance (Backend b) => Eq (FunctionTrackedAs b) data ComputedFieldFunction (b :: BackendType) = ComputedFieldFunction { _cffName :: FunctionName b, @@ -114,11 +114,11 @@ instance (Backend b) => ToJSON (ComputedFieldInfo b) where object ["name" .= name, "function" .= func, "return_type" .= tp, "description" .= description] -- | Return all the computed fields in the given list that have numeric types. -onlyNumComputedFields :: forall b. Backend b => [ComputedFieldInfo b] -> [ComputedFieldInfo b] +onlyNumComputedFields :: forall b. (Backend b) => [ComputedFieldInfo b] -> [ComputedFieldInfo b] onlyNumComputedFields = filter isNumComputedField -- | Check whether a computed field has a numeric type. -isNumComputedField :: forall b. Backend b => ComputedFieldInfo b -> Bool +isNumComputedField :: forall b. (Backend b) => ComputedFieldInfo b -> Bool isNumComputedField cfi = case computedFieldReturnType @b (_cfiReturnType cfi) of ReturnsScalar t -> isNumType @b t _ -> False diff --git a/server/src-lib/Hasura/RQL/Types/CustomTypes.hs b/server/src-lib/Hasura/RQL/Types/CustomTypes.hs index 2fc5e913de3..1d9b2e73926 100644 --- a/server/src-lib/Hasura/RQL/Types/CustomTypes.hs +++ b/server/src-lib/Hasura/RQL/Types/CustomTypes.hs @@ -93,12 +93,16 @@ instance NFData CustomTypes instance HasCodec CustomTypes where codec = - AC.object "CustomTypes" $ - CustomTypes - <$> optionalFieldWithOmittedDefault' "input_objects" [] AC..= _ctInputObjects - <*> optionalFieldWithOmittedDefault' "objects" [] AC..= _ctObjects - <*> optionalFieldWithOmittedDefault' "scalars" [] AC..= _ctScalars - <*> optionalFieldWithOmittedDefault' "enums" [] AC..= _ctEnums + AC.object "CustomTypes" + $ CustomTypes + <$> optionalFieldWithOmittedDefault' "input_objects" [] + AC..= _ctInputObjects + <*> optionalFieldWithOmittedDefault' "objects" [] + AC..= _ctObjects + <*> optionalFieldWithOmittedDefault' "scalars" [] + AC..= _ctScalars + <*> optionalFieldWithOmittedDefault' "enums" [] + AC..= _ctEnums emptyCustomTypes :: CustomTypes emptyCustomTypes = CustomTypes [] [] [] [] @@ -117,11 +121,14 @@ instance NFData InputObjectTypeDefinition instance HasCodec InputObjectTypeDefinition where codec = - AC.object "InputObjectTypeDefinition" $ - InputObjectTypeDefinition - <$> requiredField' "name" AC..= _iotdName - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _iotdDescription - <*> requiredField' "fields" AC..= _iotdFields + AC.object "InputObjectTypeDefinition" + $ InputObjectTypeDefinition + <$> requiredField' "name" + AC..= _iotdName + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _iotdDescription + <*> requiredField' "fields" + AC..= _iotdFields newtype InputObjectTypeName = InputObjectTypeName {unInputObjectTypeName :: G.Name} deriving (Show, Eq, Ord, Hashable, J.FromJSON, J.ToJSON, ToTxt, Generic, NFData) @@ -141,11 +148,14 @@ instance NFData InputObjectFieldDefinition instance HasCodec InputObjectFieldDefinition where codec = - AC.object "InputObjectFieldDefinition" $ - InputObjectFieldDefinition - <$> requiredField' "name" AC..= _iofdName - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _iofdDescription - <*> requiredField' "type" AC..= _iofdType + AC.object "InputObjectFieldDefinition" + $ InputObjectFieldDefinition + <$> requiredField' "name" + AC..= _iofdName + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _iofdDescription + <*> requiredField' "type" + AC..= _iofdType newtype InputObjectFieldName = InputObjectFieldName {unInputObjectFieldName :: G.Name} deriving (Show, Eq, Ord, Hashable, J.FromJSON, J.ToJSON, ToTxt, Generic, NFData) @@ -168,12 +178,16 @@ instance NFData ObjectTypeDefinition instance HasCodec ObjectTypeDefinition where codec = - AC.object "ObjectTypeDefinition" $ - ObjectTypeDefinition - <$> requiredField' "name" AC..= _otdName - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _otdDescription - <*> requiredField' "fields" AC..= _otdFields - <*> optionalFieldWithOmittedDefault' "relationships" [] AC..= _otdRelationships + AC.object "ObjectTypeDefinition" + $ ObjectTypeDefinition + <$> requiredField' "name" + AC..= _otdName + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _otdDescription + <*> requiredField' "fields" + AC..= _otdFields + <*> optionalFieldWithOmittedDefault' "relationships" [] + AC..= _otdRelationships newtype ObjectTypeName = ObjectTypeName {unObjectTypeName :: G.Name} deriving (Show, Eq, Ord, Hashable, J.FromJSON, J.ToJSON, ToTxt, Generic, NFData) @@ -197,12 +211,16 @@ instance (NFData field) => NFData (ObjectFieldDefinition field) instance (HasCodec field, Typeable field) => HasCodec (ObjectFieldDefinition field) where codec = - AC.object ("ObjectFieldDefinition_" <> typeableName @field) $ - ObjectFieldDefinition - <$> requiredField' "name" AC..= _ofdName - <*> optionalField' "arguments" AC..= _ofdArguments - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _ofdDescription - <*> requiredField' "type" AC..= _ofdType + AC.object ("ObjectFieldDefinition_" <> typeableName @field) + $ ObjectFieldDefinition + <$> requiredField' "name" + AC..= _ofdName + <*> optionalField' "arguments" + AC..= _ofdArguments + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _ofdDescription + <*> requiredField' "type" + AC..= _ofdType newtype ObjectFieldName = ObjectFieldName {unObjectFieldName :: G.Name} deriving (Show, Eq, Ord, Hashable, J.FromJSON, J.ToJSON, J.FromJSONKey, J.ToJSONKey, ToTxt, Generic, NFData) @@ -223,10 +241,12 @@ instance NFData ScalarTypeDefinition instance HasCodec ScalarTypeDefinition where codec = - AC.object "ScalarTypeDefinition" $ - ScalarTypeDefinition - <$> requiredField' "name" AC..= _stdName - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _stdDescription + AC.object "ScalarTypeDefinition" + $ ScalarTypeDefinition + <$> requiredField' "name" + AC..= _stdName + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _stdDescription defaultGraphQLScalars :: HashMap G.Name ScalarTypeDefinition defaultGraphQLScalars = HashMap.fromList . map (\name -> (name, ScalarTypeDefinition name Nothing)) $ Set.toList GName.builtInScalars @@ -245,11 +265,14 @@ instance NFData EnumTypeDefinition instance HasCodec EnumTypeDefinition where codec = - AC.object "EnumTypeDefinition" $ - EnumTypeDefinition - <$> requiredField' "name" AC..= _etdName - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _etdDescription - <*> requiredField' "values" AC..= _etdValues + AC.object "EnumTypeDefinition" + $ EnumTypeDefinition + <$> requiredField' "name" + AC..= _etdName + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _etdDescription + <*> requiredField' "values" + AC..= _etdValues newtype EnumTypeName = EnumTypeName {unEnumTypeName :: G.Name} deriving (Show, Eq, Ord, Hashable, J.FromJSON, J.ToJSON, ToTxt, Generic, NFData) @@ -268,11 +291,14 @@ instance NFData EnumValueDefinition instance HasCodec EnumValueDefinition where codec = - AC.object "EnumValueDefinition" $ - EnumValueDefinition - <$> requiredFieldWith' "value" graphQLEnumValueCodec AC..= _evdValue - <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec AC..= _evdDescription - <*> optionalField' "is_deprecated" AC..= _evdIsDeprecated + AC.object "EnumValueDefinition" + $ EnumValueDefinition + <$> requiredFieldWith' "value" graphQLEnumValueCodec + AC..= _evdValue + <*> optionalFieldWith' "description" graphQLFieldDescriptionCodec + AC..= _evdDescription + <*> optionalField' "is_deprecated" + AC..= _evdIsDeprecated -------------------------------------------------------------------------------- -- Relationships @@ -296,22 +322,33 @@ instance NFData TypeRelationshipDefinition instance HasCodec TypeRelationshipDefinition where codec = - AC.object "TypeRelationshipDefinition" $ - TypeRelationshipDefinition - <$> requiredField' "name" AC..= _trdName - <*> requiredField' "type" AC..= _trdType - <*> optionalFieldWithDefault' "source" defaultSource AC..= _trdSource - <*> requiredField' "remote_table" AC..= _trdRemoteTable - <*> requiredField' "field_mapping" AC..= _trdFieldMapping + AC.object "TypeRelationshipDefinition" + $ TypeRelationshipDefinition + <$> requiredField' "name" + AC..= _trdName + <*> requiredField' "type" + AC..= _trdType + <*> optionalFieldWithDefault' "source" defaultSource + AC..= _trdSource + <*> requiredField' "remote_table" + AC..= _trdRemoteTable + <*> requiredField' "field_mapping" + AC..= _trdFieldMapping instance J.FromJSON TypeRelationshipDefinition where parseJSON = J.withObject "TypeRelationshipDefinition" $ \o -> TypeRelationshipDefinition - <$> o .: "name" - <*> o .: "type" - <*> o .:? "source" .!= defaultSource - <*> o .: "remote_table" - <*> o .: "field_mapping" + <$> o + .: "name" + <*> o + .: "type" + <*> o + .:? "source" + .!= defaultSource + <*> o + .: "remote_table" + <*> o + .: "field_mapping" -- | TODO: deduplicate this in favour of RelName newtype RelationshipName = RelationshipName {unRelationshipName :: G.Name} diff --git a/server/src-lib/Hasura/RQL/Types/Endpoint.hs b/server/src-lib/Hasura/RQL/Types/Endpoint.hs index 44a0cb0dc3c..dbafdf594df 100644 --- a/server/src-lib/Hasura/RQL/Types/Endpoint.hs +++ b/server/src-lib/Hasura/RQL/Types/Endpoint.hs @@ -98,7 +98,7 @@ newtype EndpointUrl = EndpointUrl {unEndpointUrl :: NonEmptyText} instance HasCodec EndpointUrl where codec = dimapCodec EndpointUrl unEndpointUrl codec -mkEndpointUrl :: ToTxt a => a -> Maybe EndpointUrl +mkEndpointUrl :: (ToTxt a) => a -> Maybe EndpointUrl mkEndpointUrl s = EndpointUrl <$> mkNonEmptyText (toTxt s) instance FromHttpApiData EndpointUrl where @@ -116,10 +116,12 @@ data QueryReference = QueryReference instance HasCodec QueryReference where codec = - AC.object "QueryReference" $ - QueryReference - <$> requiredField' "collection_name" AC..= _qrCollectionName - <*> requiredField' "query_name" AC..= _qrQueryName + AC.object "QueryReference" + $ QueryReference + <$> requiredField' "collection_name" + AC..= _qrCollectionName + <*> requiredField' "query_name" + AC..= _qrQueryName $(deriveJSON (aesonDrop 3 snakeCase) ''QueryReference) $(makeLenses ''QueryReference) @@ -131,16 +133,17 @@ data EndpointDef query = EndpointDef instance (HasCodec query, Typeable query) => HasCodec (EndpointDef query) where codec = - AC.object ("EndpointDef_" <> typeableName @query) $ - EndpointDef - <$> requiredField' "query" AC..= _edQuery + AC.object ("EndpointDef_" <> typeableName @query) + $ EndpointDef + <$> requiredField' "query" + AC..= _edQuery $(deriveJSON (aesonDrop 3 snakeCase) ''EndpointDef) $(makeLenses ''EndpointDef) type EndpointTrie query = MultiMapPathTrie Text EndpointMethod (EndpointMetadata query) -buildEndpointsTrie :: Ord query => [EndpointMetadata query] -> EndpointTrie query +buildEndpointsTrie :: (Ord query) => [EndpointMetadata query] -> EndpointTrie query buildEndpointsTrie = foldl' insert mempty where insert t q = @@ -169,13 +172,18 @@ data EndpointMetadata query = EndpointMetadata instance (HasCodec query, Typeable query) => HasCodec (EndpointMetadata query) where codec = - AC.object ("EndpointMetadata_" <> typeableName @query) $ - EndpointMetadata - <$> requiredField' "name" AC..= _ceName - <*> requiredField' "url" AC..= _ceUrl - <*> requiredField' "methods" AC..= _ceMethods - <*> requiredField' "definition" AC..= _ceDefinition - <*> optionalField' "comment" AC..= _ceComment + AC.object ("EndpointMetadata_" <> typeableName @query) + $ EndpointMetadata + <$> requiredField' "name" + AC..= _ceName + <*> requiredField' "url" + AC..= _ceUrl + <*> requiredField' "methods" + AC..= _ceMethods + <*> requiredField' "definition" + AC..= _ceDefinition + <*> optionalField' "comment" + AC..= _ceComment $(deriveJSON (aesonDrop 3 snakeCase) ''EndpointMetadata) $(makeLenses ''EndpointMetadata) diff --git a/server/src-lib/Hasura/RQL/Types/Endpoint/Trie.hs b/server/src-lib/Hasura/RQL/Types/Endpoint/Trie.hs index 75e7428cd7c..9fd41449223 100644 --- a/server/src-lib/Hasura/RQL/Types/Endpoint/Trie.hs +++ b/server/src-lib/Hasura/RQL/Types/Endpoint/Trie.hs @@ -30,11 +30,11 @@ data PathComponent a | PathParam deriving stock (Show, Eq, Ord, Generic) -instance ToJSON a => ToJSON (PathComponent a) +instance (ToJSON a) => ToJSON (PathComponent a) -instance ToJSON a => ToJSONKey (PathComponent a) +instance (ToJSON a) => ToJSONKey (PathComponent a) -instance Hashable a => Hashable (PathComponent a) +instance (Hashable a) => Hashable (PathComponent a) -- | Result of matching a path @['PathComponent'] a@ and key @k@ in a 'MultiMapPathTrie'. -- @@ -88,7 +88,7 @@ instance Monoid (MatchResult a k v) where -- | Look up the value at a path. -- @PathParam@ matches any path component. -- Returns a list of pairs containing the value found and bindings for any @PathParam@s. -lookupPath :: Hashable a => [a] -> T.Trie (PathComponent a) v -> [(v, [a])] +lookupPath :: (Hashable a) => [a] -> T.Trie (PathComponent a) v -> [(v, [a])] lookupPath [] t = [(v, []) | v <- maybeToList (T.trieData t)] lookupPath (x : xs) t = do (pc, t') <- matchPathComponent x $ T.trieMap t @@ -98,7 +98,7 @@ lookupPath (x : xs) t = do PathParam -> (x :) <$> m where matchPathComponent :: - Hashable a => + (Hashable a) => a -> HashMap.HashMap (PathComponent a) v -> [(PathComponent (), v)] diff --git a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs index 836600aa9f7..9bec22c9a4d 100644 --- a/server/src-lib/Hasura/RQL/Types/EventTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/EventTrigger.hs @@ -108,27 +108,27 @@ instance Hashable Ops data SubscribeColumns (b :: BackendType) = SubCStar | SubCArray [Column b] deriving (Generic) -deriving instance Backend b => Show (SubscribeColumns b) +deriving instance (Backend b) => Show (SubscribeColumns b) -deriving instance Backend b => Eq (SubscribeColumns b) +deriving instance (Backend b) => Eq (SubscribeColumns b) -instance Backend b => NFData (SubscribeColumns b) +instance (Backend b) => NFData (SubscribeColumns b) -instance Backend b => HasCodec (SubscribeColumns b) where +instance (Backend b) => HasCodec (SubscribeColumns b) where codec = dimapCodec (either (const SubCStar) SubCArray) (\case SubCStar -> Left "*"; SubCArray cols -> Right cols) $ disjointEitherCodec (literalTextCodec "*") (listCodec codec) -instance Backend b => FromJSON (SubscribeColumns b) where +instance (Backend b) => FromJSON (SubscribeColumns b) where parseJSON (String s) = case s of "*" -> return SubCStar _ -> fail "only * or [] allowed" parseJSON v@(Array _) = SubCArray <$> parseJSON v parseJSON _ = fail "unexpected columns" -instance Backend b => ToJSON (SubscribeColumns b) where +instance (Backend b) => ToJSON (SubscribeColumns b) where toJSON SubCStar = "*" toJSON (SubCArray cols) = toJSON cols @@ -143,17 +143,19 @@ data SubscribeOpSpec (b :: BackendType) = SubscribeOpSpec instance (Backend b) => NFData (SubscribeOpSpec b) -instance Backend b => HasCodec (SubscribeOpSpec b) where +instance (Backend b) => HasCodec (SubscribeOpSpec b) where codec = - AC.object (backendPrefix @b <> "SubscribeOpSpec") $ - SubscribeOpSpec - <$> requiredField' "columns" AC..= sosColumns - <*> optionalField' "payload" AC..= sosPayload + AC.object (backendPrefix @b <> "SubscribeOpSpec") + $ SubscribeOpSpec + <$> requiredField' "columns" + AC..= sosColumns + <*> optionalField' "payload" + AC..= sosPayload -instance Backend b => FromJSON (SubscribeOpSpec b) where +instance (Backend b) => FromJSON (SubscribeOpSpec b) where parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} -instance Backend b => ToJSON (SubscribeOpSpec b) where +instance (Backend b) => ToJSON (SubscribeOpSpec b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} defaultNumRetries :: Int @@ -179,11 +181,14 @@ instance NFData RetryConf instance HasCodec RetryConf where codec = - AC.object "RetryConf" $ - RetryConf - <$> requiredField' "num_retries" AC..= rcNumRetries - <*> requiredField' "interval_sec" AC..= rcIntervalSec - <*> optionalField' "timeout_sec" AC..= rcTimeoutSec + AC.object "RetryConf" + $ RetryConf + <$> requiredField' "num_retries" + AC..= rcNumRetries + <*> requiredField' "interval_sec" + AC..= rcIntervalSec + <*> optionalField' "timeout_sec" + AC..= rcTimeoutSec $(deriveJSON hasuraJSON {omitNothingFields = True} ''RetryConf) @@ -233,21 +238,25 @@ data TriggerOpsDef (b :: BackendType) = TriggerOpsDef } deriving (Show, Eq, Generic) -instance Backend b => NFData (TriggerOpsDef b) +instance (Backend b) => NFData (TriggerOpsDef b) -instance Backend b => HasCodec (TriggerOpsDef b) where +instance (Backend b) => HasCodec (TriggerOpsDef b) where codec = - AC.object (backendPrefix @b <> "TriggerOpsDef") $ - TriggerOpsDef - <$> optionalField' "insert" AC..= tdInsert - <*> optionalField' "update" AC..= tdUpdate - <*> optionalField' "delete" AC..= tdDelete - <*> optionalField' "enable_manual" AC..= tdEnableManual + AC.object (backendPrefix @b <> "TriggerOpsDef") + $ TriggerOpsDef + <$> optionalField' "insert" + AC..= tdInsert + <*> optionalField' "update" + AC..= tdUpdate + <*> optionalField' "delete" + AC..= tdDelete + <*> optionalField' "enable_manual" + AC..= tdEnableManual -instance Backend b => FromJSON (TriggerOpsDef b) where +instance (Backend b) => FromJSON (TriggerOpsDef b) where parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} -instance Backend b => ToJSON (TriggerOpsDef b) where +instance (Backend b) => ToJSON (TriggerOpsDef b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} data EventTriggerCleanupStatus = ETCSPaused | ETCSUnpaused deriving (Show, Eq, Generic) @@ -286,14 +295,20 @@ instance NFData AutoTriggerLogCleanupConfig instance HasCodec AutoTriggerLogCleanupConfig where codec = - AC.object "AutoTriggerLogCleanupConfig" $ - AutoTriggerLogCleanupConfig - <$> requiredField' "schedule" AC..= _atlccSchedule - <*> optionalFieldWithDefault' "batch_size" 10000 AC..= _atlccBatchSize - <*> requiredField' "clear_older_than" AC..= _atlccClearOlderThan - <*> optionalFieldWithDefault' "timeout" 60 AC..= _atlccTimeout - <*> optionalFieldWithDefault' "clean_invocation_logs" False AC..= _atlccCleanInvocationLogs - <*> optionalFieldWithDefault' "paused" ETCSUnpaused AC..= _atlccPaused + AC.object "AutoTriggerLogCleanupConfig" + $ AutoTriggerLogCleanupConfig + <$> requiredField' "schedule" + AC..= _atlccSchedule + <*> optionalFieldWithDefault' "batch_size" 10000 + AC..= _atlccBatchSize + <*> requiredField' "clear_older_than" + AC..= _atlccClearOlderThan + <*> optionalFieldWithDefault' "timeout" 60 + AC..= _atlccTimeout + <*> optionalFieldWithDefault' "clean_invocation_logs" False + AC..= _atlccCleanInvocationLogs + <*> optionalFieldWithDefault' "paused" ETCSUnpaused + AC..= _atlccPaused instance FromJSON AutoTriggerLogCleanupConfig where parseJSON = @@ -414,24 +429,33 @@ data EventTriggerConf (b :: BackendType) = EventTriggerConf instance (Backend b) => HasCodec (EventTriggerConf b) where codec = - AC.object (backendPrefix @b <> "EventTriggerConfEventTriggerConf") $ - EventTriggerConf - <$> requiredField' "name" AC..= etcName - <*> requiredField' "definition" AC..= etcDefinition - <*> optionalField' "webhook" AC..= etcWebhook - <*> optionalField' "webhook_from_env" AC..= etcWebhookFromEnv - <*> requiredField' "retry_conf" AC..= etcRetryConf - <*> optionalField' "headers" AC..= etcHeaders - <*> optionalField' "request_transform" AC..= etcRequestTransform - <*> optionalField' "response_transform" AC..= etcResponseTransform - <*> optionalField' "cleanup_config" AC..= etcCleanupConfig + AC.object (backendPrefix @b <> "EventTriggerConfEventTriggerConf") + $ EventTriggerConf + <$> requiredField' "name" + AC..= etcName + <*> requiredField' "definition" + AC..= etcDefinition + <*> optionalField' "webhook" + AC..= etcWebhook + <*> optionalField' "webhook_from_env" + AC..= etcWebhookFromEnv + <*> requiredField' "retry_conf" + AC..= etcRetryConf + <*> optionalField' "headers" + AC..= etcHeaders + <*> optionalField' "request_transform" + AC..= etcRequestTransform + <*> optionalField' "response_transform" + AC..= etcResponseTransform + <*> optionalField' "cleanup_config" + AC..= etcCleanupConfig <*> triggerOnReplication where triggerOnReplication = case defaultTriggerOnReplication @b of Just (_, defTOR) -> optionalFieldWithOmittedDefault' "trigger_on_replication" defTOR AC..= etcTriggerOnReplication Nothing -> error "No default setting for trigger_on_replication is defined for backend type." -instance Backend b => FromJSON (EventTriggerConf b) where +instance (Backend b) => FromJSON (EventTriggerConf b) where parseJSON = withObject "EventTriggerConf" \o -> do name <- o .: "name" definition <- o .: "definition" @@ -448,25 +472,25 @@ instance Backend b => FromJSON (EventTriggerConf b) where triggerOnReplication <- o .:? "trigger_on_replication" .!= defTOR return $ EventTriggerConf name definition webhook webhookFromEnv retryConf headers requestTransform responseTransform cleanupConfig triggerOnReplication -instance Backend b => ToJSON (EventTriggerConf b) where +instance (Backend b) => ToJSON (EventTriggerConf b) where toJSON (EventTriggerConf name definition webhook webhookFromEnv retryConf headers requestTransform responseTransform cleanupConfig triggerOnReplication) = - object $ - [ "name" .= name, - "definition" .= definition, - "retry_conf" .= retryConf - ] - <> catMaybes - [ "webhook" .=? webhook, - "webhook_from_env" .=? webhookFromEnv, - "headers" .=? headers, - "request_transform" .=? requestTransform, - "response_transform" .=? responseTransform, - "cleanup_config" .=? cleanupConfig, - "trigger_on_replication" - .=? case defaultTriggerOnReplication @b of - Just (_, defTOR) -> if triggerOnReplication == defTOR then Nothing else Just triggerOnReplication - Nothing -> Just triggerOnReplication - ] + object + $ [ "name" .= name, + "definition" .= definition, + "retry_conf" .= retryConf + ] + <> catMaybes + [ "webhook" .=? webhook, + "webhook_from_env" .=? webhookFromEnv, + "headers" .=? headers, + "request_transform" .=? requestTransform, + "response_transform" .=? responseTransform, + "cleanup_config" .=? cleanupConfig, + "trigger_on_replication" + .=? case defaultTriggerOnReplication @b of + Just (_, defTOR) -> if triggerOnReplication == defTOR then Nothing else Just triggerOnReplication + Nothing -> Just triggerOnReplication + ] updateCleanupConfig :: Maybe AutoTriggerLogCleanupConfig -> EventTriggerConf b -> EventTriggerConf b updateCleanupConfig cleanupConfig etConf = etConf {etcCleanupConfig = cleanupConfig} @@ -516,11 +540,11 @@ data Event (b :: BackendType) = Event } deriving (Generic) -deriving instance Backend b => Show (Event b) +deriving instance (Backend b) => Show (Event b) -deriving instance Backend b => Eq (Event b) +deriving instance (Backend b) => Eq (Event b) -instance Backend b => FromJSON (Event b) where +instance (Backend b) => FromJSON (Event b) where parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} -- | The event payload processed by 'processEvent' @@ -557,9 +581,9 @@ data EventTriggerInfo (b :: BackendType) = EventTriggerInfo } deriving (Generic, Eq) -instance Backend b => NFData (EventTriggerInfo b) +instance (Backend b) => NFData (EventTriggerInfo b) -instance Backend b => ToJSON (EventTriggerInfo b) where +instance (Backend b) => ToJSON (EventTriggerInfo b) where toJSON = genericToJSON hasuraJSON type EventTriggerInfoMap b = HashMap.HashMap TriggerName (EventTriggerInfo b) @@ -601,22 +625,31 @@ data GetEventLogs (b :: BackendType) = GetEventLogs instance ToJSON (GetEventLogs b) where toJSON GetEventLogs {..} = - object $ - [ "name" .= _gelName, - "source" .= _gelSourceName, - "limit" .= _gelLimit, - "offset" .= _gelOffset, - "status" .= _gelStatus - ] + object + $ [ "name" .= _gelName, + "source" .= _gelSourceName, + "limit" .= _gelLimit, + "offset" .= _gelOffset, + "status" .= _gelStatus + ] instance FromJSON (GetEventLogs b) where parseJSON = withObject "GetEventLogs" $ \o -> GetEventLogs - <$> o .: "name" - <*> o .:? "source" .!= SNDefault - <*> o .:? "limit" .!= 100 - <*> o .:? "offset" .!= 0 - <*> o .:? "status" .!= All + <$> o + .: "name" + <*> o + .:? "source" + .!= SNDefault + <*> o + .:? "limit" + .!= 100 + <*> o + .:? "offset" + .!= 0 + <*> o + .:? "status" + .!= All data EventLog = EventLog { elId :: EventId, @@ -646,20 +679,27 @@ data GetEventInvocations (b :: BackendType) = GetEventInvocations instance ToJSON (GetEventInvocations b) where toJSON GetEventInvocations {..} = - object $ - [ "name" .= _geiName, - "source" .= _geiSourceName, - "limit" .= _geiLimit, - "offset" .= _geiOffset - ] + object + $ [ "name" .= _geiName, + "source" .= _geiSourceName, + "limit" .= _geiLimit, + "offset" .= _geiOffset + ] instance FromJSON (GetEventInvocations b) where parseJSON = withObject "GetEventInvocations" $ \o -> GetEventInvocations - <$> o .: "name" - <*> o .:? "source" .!= SNDefault - <*> o .:? "limit" .!= 100 - <*> o .:? "offset" .!= 0 + <$> o + .: "name" + <*> o + .:? "source" + .!= SNDefault + <*> o + .:? "limit" + .!= 100 + <*> o + .:? "offset" + .!= 0 data EventInvocationLog = EventInvocationLog { eilId :: Text, @@ -684,20 +724,27 @@ data GetEventById (b :: BackendType) = GetEventById instance ToJSON (GetEventById b) where toJSON GetEventById {..} = - object $ - [ "source" .= _gebiSourceName, - "event_id" .= _gebiEventId, - "invocation_log_limit" .= _gebiInvocationLogLimit, - "invocation_log_offset" .= _gebiInvocationLogOffset - ] + object + $ [ "source" .= _gebiSourceName, + "event_id" .= _gebiEventId, + "invocation_log_limit" .= _gebiInvocationLogLimit, + "invocation_log_offset" .= _gebiInvocationLogOffset + ] instance FromJSON (GetEventById b) where parseJSON = withObject "GetEventById" $ \o -> GetEventById - <$> o .:? "source" .!= SNDefault - <*> o .: "event_id" - <*> o .:? "invocation_log_limit" .!= 100 - <*> o .:? "invocation_log_offset" .!= 0 + <$> o + .:? "source" + .!= SNDefault + <*> o + .: "event_id" + <*> o + .:? "invocation_log_limit" + .!= 100 + <*> o + .:? "invocation_log_offset" + .!= 0 data EventLogWithInvocations = EventLogWithInvocations { elwiEvent :: Maybe EventLog, diff --git a/server/src-lib/Hasura/RQL/Types/Headers.hs b/server/src-lib/Hasura/RQL/Types/Headers.hs index 1ca01a43ef6..cdf66a87f94 100644 --- a/server/src-lib/Hasura/RQL/Types/Headers.hs +++ b/server/src-lib/Hasura/RQL/Types/Headers.hs @@ -30,16 +30,20 @@ instance HasCodec HeaderConf where codec = bimapCodec dec enc $ disjointEitherCodec valCodec fromEnvCodec where valCodec = - AC.object "HeaderConfValue" $ - (,) - <$> requiredField' "name" AC..= fst - <*> requiredField' "value" AC..= snd + AC.object "HeaderConfValue" + $ (,) + <$> requiredField' "name" + AC..= fst + <*> requiredField' "value" + AC..= snd fromEnvCodec = - AC.object "HeaderConfFromEnv" $ - (,) - <$> requiredField' "name" AC..= fst - <*> requiredField' "value_from_env" AC..= snd + AC.object "HeaderConfFromEnv" + $ (,) + <$> requiredField' "name" + AC..= fst + <*> requiredField' "value_from_env" + AC..= snd dec (Left (name, value)) = Right $ HeaderConf name (HVValue value) dec (Right (name, valueFromEnv)) = @@ -59,9 +63,10 @@ instance FromJSON HeaderConf where (Nothing, Nothing) -> fail "expecting value or value_from_env keys" (Just val, Nothing) -> return $ HeaderConf name (HVValue val) (Nothing, Just val) -> do - when (T.isPrefixOf "HASURA_GRAPHQL_" val) $ - fail $ - "env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " <> T.unpack val + when (T.isPrefixOf "HASURA_GRAPHQL_" val) + $ fail + $ "env variables starting with \"HASURA_GRAPHQL_\" are not allowed in value_from_env: " + <> T.unpack val return $ HeaderConf name (HVEnv val) (Just _, Just _) -> fail "expecting only one of value or value_from_env keys" parseJSON _ = fail "expecting object for headers" diff --git a/server/src-lib/Hasura/RQL/Types/HealthCheck.hs b/server/src-lib/Hasura/RQL/Types/HealthCheck.hs index f347f66d077..8e2b4c91141 100644 --- a/server/src-lib/Hasura/RQL/Types/HealthCheck.hs +++ b/server/src-lib/Hasura/RQL/Types/HealthCheck.hs @@ -26,9 +26,10 @@ newtype HealthCheckTestSql = HealthCheckTestSql instance HasCodec HealthCheckTestSql where codec = - AC.object "HealthCheckTestSql" $ - HealthCheckTestSql - <$> optionalFieldWithDefault' "sql" defaultTestSql AC..= _hctSql + AC.object "HealthCheckTestSql" + $ HealthCheckTestSql + <$> optionalFieldWithDefault' "sql" defaultTestSql + AC..= _hctSql instance ToJSON HealthCheckTestSql where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} @@ -88,11 +89,20 @@ instance (Backend b) => FromJSON (HealthCheckConfig b) where Just (HealthCheckImplementation {..}) -> withObject "Object" $ \o -> HealthCheckConfig - <$> o .:? "test" .!= _hciDefaultTest - <*> o .: "interval" - <*> o .:? "retries" .!= defaultRetries - <*> o .:? "retry_interval" .!= defaultRetryInterval - <*> o .:? "timeout" .!= defaultTimeout + <$> o + .:? "test" + .!= _hciDefaultTest + <*> o + .: "interval" + <*> o + .:? "retries" + .!= defaultRetries + <*> o + .:? "retry_interval" + .!= defaultRetryInterval + <*> o + .:? "timeout" + .!= defaultTimeout Nothing -> \_ -> parseFail "cannot deserialize health check config because backend does not implement health checks" @@ -103,13 +113,18 @@ healthCheckConfigCodec :: HealthCheckImplementation (HealthCheckTest b) -> JSONCodec (HealthCheckConfig b) healthCheckConfigCodec (HealthCheckImplementation {..}) = - AC.object (backendPrefix @b <> "HealthCheckConfig") $ - HealthCheckConfig - <$> optionalFieldWithOmittedDefaultWith' "test" _hciTestCodec _hciDefaultTest AC..= _hccTest - <*> requiredField' "interval" AC..= _hccInterval - <*> optionalFieldWithOmittedDefault' "retries" defaultRetries AC..= _hccRetries - <*> optionalFieldWithOmittedDefault' "retry_interval" defaultRetryInterval AC..= _hccRetryInterval - <*> optionalFieldWithOmittedDefault' "timeout" defaultTimeout AC..= _hccTimeout + AC.object (backendPrefix @b <> "HealthCheckConfig") + $ HealthCheckConfig + <$> optionalFieldWithOmittedDefaultWith' "test" _hciTestCodec _hciDefaultTest + AC..= _hccTest + <*> requiredField' "interval" + AC..= _hccInterval + <*> optionalFieldWithOmittedDefault' "retries" defaultRetries + AC..= _hccRetries + <*> optionalFieldWithOmittedDefault' "retry_interval" defaultRetryInterval + AC..= _hccRetryInterval + <*> optionalFieldWithOmittedDefault' "timeout" defaultTimeout + AC..= _hccTimeout defaultRetries :: HealthCheckRetries defaultRetries = HealthCheckRetries 3 diff --git a/server/src-lib/Hasura/RQL/Types/Metadata.hs b/server/src-lib/Hasura/RQL/Types/Metadata.hs index 3dfe3d12e6d..88dcbef3d1a 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata.hs @@ -163,9 +163,10 @@ $(makeLenses ''Metadata) instance FromJSON Metadata where parseJSON = withObject "Metadata" $ \o -> do version <- o .:? "version" .!= MVVersion1 - when (version /= MVVersion3) $ - fail $ - "unexpected metadata version from storage: " <> show version + when (version /= MVVersion3) + $ fail + $ "unexpected metadata version from storage: " + <> show version rawSources <- o .: "sources" backendConfigs <- o .:? "backend_configs" .!= mempty sources <- oMapFromL getSourceName <$> mapWithJSONPath parseSourceMetadata rawSources Key "sources" @@ -184,8 +185,8 @@ instance FromJSON Metadata where disabledSchemaIntrospectionRoles ) <- parseNonSourcesMetadata o - pure $ - Metadata + pure + $ Metadata sources remoteSchemas queryCollections @@ -364,9 +365,9 @@ instance FromJSON MetadataNoSources where MVVersion1 -> do tables <- oMapFromL _tmTable <$> o .: "tables" functionList <- o .:? "functions" .!= [] - let functions = InsOrdHashMap.fromList $ - flip map functionList $ - \function -> (function, FunctionMetadata function emptyFunctionConfig mempty Nothing) + let functions = InsOrdHashMap.fromList + $ flip map functionList + $ \function -> (function, FunctionMetadata function emptyFunctionConfig mempty Nothing) pure (tables, functions) MVVersion2 -> do tables <- oMapFromL _tmTable <$> o .: "tables" @@ -385,8 +386,8 @@ instance FromJSON MetadataNoSources where _ ) <- parseNonSourcesMetadata o - pure $ - MetadataNoSources + pure + $ MetadataNoSources tables functions remoteSchemas @@ -450,8 +451,12 @@ dropRemoteRelationshipInMetadata name = dropFunctionInMetadata :: forall b. (Backend b) => SourceName -> FunctionName b -> MetadataModifier dropFunctionInMetadata source function = - MetadataModifier $ - metaSources . ix source . toSourceMetadata . (smFunctions @b) %~ InsOrdHashMap.delete function + MetadataModifier + $ metaSources + . ix source + . toSourceMetadata + . (smFunctions @b) + %~ InsOrdHashMap.delete function dropRemoteSchemaInMetadata :: RemoteSchemaName -> MetadataModifier dropRemoteSchemaInMetadata name = @@ -463,13 +468,13 @@ dropRemoteSchemaPermissionInMetadata remoteSchemaName roleName = dropRemoteSchemaRemoteRelationshipInMetadata :: RemoteSchemaName -> G.Name -> RelName -> MetadataModifier dropRemoteSchemaRemoteRelationshipInMetadata remoteSchemaName typeName relationshipName = - MetadataModifier $ - metaRemoteSchemas - . ix remoteSchemaName - . rsmRemoteRelationships - . ix typeName - . rstrsRelationships - %~ InsOrdHashMap.delete relationshipName + MetadataModifier + $ metaRemoteSchemas + . ix remoteSchemaName + . rsmRemoteRelationships + . ix typeName + . rstrsRelationships + %~ InsOrdHashMap.delete relationshipName -- | Encode 'Metadata' to JSON with deterministic ordering (e.g. "version" being at the top). -- The CLI system stores metadata in files and has option to show changes in git diff style. @@ -503,24 +508,24 @@ metadataToOrdJSON backendConfigs openTelemetryConfig ) = - AO.object $ - [versionPair, sourcesPair] - <> catMaybes - [ remoteSchemasPair, - queryCollectionsPair, - allowlistPair, - actionsPair, - customTypesPair, - cronTriggersPair, - endpointsPair, - apiLimitsPair, - metricsConfigPair, - inheritedRolesPair, - introspectionDisabledRolesPair, - networkPair, - backendConfigsPair, - openTelemetryConfigPair - ] + AO.object + $ [versionPair, sourcesPair] + <> catMaybes + [ remoteSchemasPair, + queryCollectionsPair, + allowlistPair, + actionsPair, + customTypesPair, + cronTriggersPair, + endpointsPair, + apiLimitsPair, + metricsConfigPair, + inheritedRolesPair, + introspectionDisabledRolesPair, + networkPair, + backendConfigsPair, + openTelemetryConfigPair + ] where versionPair = ("version", AO.toOrdered currentMetadataVersion) sourcesPair = ("sources", AO.Array $ sourcesToOrdJSONList sources) diff --git a/server/src-lib/Hasura/RQL/Types/Metadata/Common.hs b/server/src-lib/Hasura/RQL/Types/Metadata/Common.hs index e3d05147bc0..fa8423363ee 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata/Common.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata/Common.hs @@ -97,13 +97,13 @@ parseListAsMap :: parseListAsMap things mapFn listP = do list <- listP let duplicates = toList $ L.duplicates $ map mapFn list - unless (null duplicates) $ - fail $ - T.unpack $ - "multiple declarations exist for the following " - <> things - <> ": " - <> T.commaSeparated duplicates + unless (null duplicates) + $ fail + $ T.unpack + $ "multiple declarations exist for the following " + <> things + <> ": " + <> T.commaSeparated duplicates pure $ oMapFromL mapFn list type EventTriggers b = InsOrdHashMap TriggerName (EventTriggerConf b) @@ -169,7 +169,8 @@ instance (Backend b) => FromJSONWithContext (BackendSourceKind b) (SourceMetadat backendSourceMetadataCodec :: JSONCodec BackendSourceMetadata backendSourceMetadataCodec = - named "SourceMetadata" $ + named "SourceMetadata" + $ -- Attempt to match against @SourceMetadata@ codecs for each native backend -- type. If none match then apply the @SourceMetadata DataConnector@ codec. -- DataConnector is the fallback case because the possible values for its @@ -209,28 +210,28 @@ anySourceMetadataCodec = dimapCodec dec enc instance (Backend b) => HasCodec (SourceMetadata b) where codec = - AC.object (backendPrefix @b <> "SourceMetadata") $ - SourceMetadata - <$> requiredField' "name" - .== _smName + AC.object (backendPrefix @b <> "SourceMetadata") + $ SourceMetadata + <$> requiredField' "name" + .== _smName <*> requiredField' "kind" - .== _smKind + .== _smKind <*> requiredFieldWith' "tables" (sortedElemsCodec _tmTable) - .== _smTables + .== _smTables <*> optionalFieldOrNullWithOmittedDefaultWith' "functions" (sortedElemsCodec _fmFunction) mempty - .== _smFunctions + .== _smFunctions <*> optionalFieldOrNullWithOmittedDefaultWith' "native_queries" (sortedElemsCodec _nqmRootFieldName) mempty - .== _smNativeQueries + .== _smNativeQueries <*> optionalFieldOrNullWithOmittedDefaultWith' "stored_procedures" (sortedElemsCodec _spmStoredProcedure) mempty - .== _smStoredProcedures + .== _smStoredProcedures <*> optionalFieldOrNullWithOmittedDefaultWith' "logical_models" (sortedElemsCodec _lmmName) mempty - .== _smLogicalModels + .== _smLogicalModels <*> requiredField' "configuration" - .== _smConfiguration + .== _smConfiguration <*> optionalFieldOrNull' "query_tags" - .== _smQueryTags + .== _smQueryTags <*> optionalFieldWithOmittedDefault' "customization" emptySourceCustomization - .== _smCustomization + .== _smCustomization <*> healthCheckField where healthCheckField = case healthCheckImplementation @b of @@ -252,21 +253,21 @@ mkSourceMetadata :: Maybe (HealthCheckConfig b) -> BackendSourceMetadata mkSourceMetadata name backendSourceKind config customization healthCheckConfig = - BackendSourceMetadata $ - AB.mkAnyBackend $ - SourceMetadata - @b - name - backendSourceKind - mempty - mempty - mempty - mempty - mempty - config - Nothing - customization - healthCheckConfig + BackendSourceMetadata + $ AB.mkAnyBackend + $ SourceMetadata + @b + name + backendSourceKind + mempty + mempty + mempty + mempty + mempty + config + Nothing + customization + healthCheckConfig -- | Source configuration as stored in the Metadata DB for some existentialized backend. newtype BackendSourceMetadata = BackendSourceMetadata {unBackendSourceMetadata :: AB.AnyBackend SourceMetadata} @@ -299,23 +300,31 @@ parseNonSourcesMetadata :: ) parseNonSourcesMetadata o = do remoteSchemas <- - parseListAsMap "remote schemas" _rsmName $ - o .:? "remote_schemas" .!= [] + parseListAsMap "remote schemas" _rsmName + $ o + .:? "remote_schemas" + .!= [] queryCollections <- - parseListAsMap "query collections" _ccName $ - o .:? "query_collections" .!= [] + parseListAsMap "query collections" _ccName + $ o + .:? "query_collections" + .!= [] allowlist <- parseListAsMap "allowlist entries" aeCollection $ o .:? "allowlist" .!= [] customTypes <- o .:? "custom_types" .!= emptyCustomTypes actions <- parseListAsMap "actions" _amName $ o .:? "actions" .!= [] cronTriggers <- - parseListAsMap "cron triggers" ctName $ - o .:? "cron_triggers" .!= [] + parseListAsMap "cron triggers" ctName + $ o + .:? "cron_triggers" + .!= [] apiLimits <- o .:? "api_limits" .!= emptyApiLimit metricsConfig <- o .:? "metrics_config" .!= emptyMetricsConfig inheritedRoles <- - parseListAsMap "inherited roles" _rRoleName $ - o .:? "inherited_roles" .!= [] + parseListAsMap "inherited roles" _rRoleName + $ o + .:? "inherited_roles" + .!= [] introspectionDisabledForRoles <- o .:? "graphql_schema_introspection" .!= mempty pure ( remoteSchemas, diff --git a/server/src-lib/Hasura/RQL/Types/Metadata/Object.hs b/server/src-lib/Hasura/RQL/Types/Metadata/Object.hs index 16660464bbf..03e369c0668 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata/Object.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata/Object.hs @@ -202,7 +202,7 @@ moiName objectId = where handleSourceObj :: forall b. - Backend b => + (Backend b) => SourceName -> SourceMetadataObjId b -> Text @@ -228,9 +228,9 @@ moiName objectId = sourceObjectId :: MetadataObjId sourceObjectId = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOLogicalModel @b logicalModelName + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOLogicalModel @b logicalModelName objectName <> " in " <> moiName sourceObjectId SMOTableObj tableName tableObjectId -> @@ -243,9 +243,9 @@ moiName objectId = in tableObjectName <> " in " <> moiName - ( MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTable @b tableName + ( MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTable @b tableName ) data MetadataObject = MetadataObject @@ -261,10 +261,10 @@ $(makeLenses ''MetadataObject) data InconsistentRoleEntity = InconsistentTablePermission SourceName - Text - -- ^ Table name -- using `Text` here instead of `TableName b` for simplification, + -- | Table name -- using `Text` here instead of `TableName b` for simplification, -- Otherwise, we'll have to create a newtype wrapper around `TableName b` and then -- use it with `AB.AnyBackend` + Text PermType | InconsistentRemoteSchemaPermission RemoteSchemaName deriving stock (Show, Eq, Ord, Generic) diff --git a/server/src-lib/Hasura/RQL/Types/Metadata/Serialization.hs b/server/src-lib/Hasura/RQL/Types/Metadata/Serialization.hs index 541bf6fc6d0..58e896bcabf 100644 --- a/server/src-lib/Hasura/RQL/Types/Metadata/Serialization.hs +++ b/server/src-lib/Hasura/RQL/Types/Metadata/Serialization.hs @@ -110,10 +110,10 @@ import Network.Types.Extended (Network, emptyNetwork) sourcesToOrdJSONList :: Sources -> AO.Array sourcesToOrdJSONList sources = - Vector.fromList $ - map sourceMetaToOrdJSON $ - sortOn getSourceName $ - InsOrdHashMap.elems sources + Vector.fromList + $ map sourceMetaToOrdJSON + $ sortOn getSourceName + $ InsOrdHashMap.elems sources where sourceMetaToOrdJSON :: BackendSourceMetadata -> AO.Value sourceMetaToOrdJSON (BackendSourceMetadata exists) = @@ -132,16 +132,16 @@ sourcesToOrdJSONList sources = guard (_smCustomization /= emptySourceCustomization) *> [("customization", AO.toOrdered _smCustomization)] healthCheckPair = maybe [] (\healthCheckConfig -> [("health_check", AO.toOrdered healthCheckConfig)]) _smHealthCheckConfig - in AO.object $ - [sourceNamePair, sourceKindPair, tablesPair] - <> maybeToList functionsPair - <> maybeToList nativeQueriesPair - <> maybeToList storedProceduresPair - <> maybeToList logicalModelsPair - <> configurationPair - <> queryTagsConfigPair - <> customizationPair - <> healthCheckPair + in AO.object + $ [sourceNamePair, sourceKindPair, tablesPair] + <> maybeToList functionsPair + <> maybeToList nativeQueriesPair + <> maybeToList storedProceduresPair + <> maybeToList logicalModelsPair + <> configurationPair + <> queryTagsConfigPair + <> customizationPair + <> healthCheckPair tableMetaToOrdJSON :: (Backend b) => TableMetadata b -> AO.Value tableMetaToOrdJSON @@ -160,22 +160,22 @@ sourcesToOrdJSONList sources = eventTriggers enableApolloFed ) = - AO.object $ - [("table", AO.toOrdered table)] - <> catMaybes - [ isEnumPair, - configPair, - objectRelationshipsPair, - arrayRelationshipsPair, - computedFieldsPair, - remoteRelationshipsPair, - insertPermissionsPair, - selectPermissionsPair, - updatePermissionsPair, - deletePermissionsPair, - eventTriggersPair, - apolloFedConfigPair - ] + AO.object + $ [("table", AO.toOrdered table)] + <> catMaybes + [ isEnumPair, + configPair, + objectRelationshipsPair, + arrayRelationshipsPair, + computedFieldsPair, + remoteRelationshipsPair, + insertPermissionsPair, + selectPermissionsPair, + updatePermissionsPair, + deletePermissionsPair, + eventTriggersPair, + apolloFedConfigPair + ] where isEnumPair = if isEnum then Just ("is_enum", AO.toOrdered isEnum) else Nothing apolloFedConfigPair = fmap (\afConfig -> ("apollo_federation_config", AO.toOrdered afConfig)) enableApolloFed @@ -240,19 +240,19 @@ sourcesToOrdJSONList sources = relDefToOrdJSON :: (ToJSON a) => RelDef a -> AO.Value relDefToOrdJSON (RelDef name using comment) = - AO.object $ - [ ("name", AO.toOrdered name), - ("using", AO.toOrdered using) - ] - <> catMaybes [maybeCommentToMaybeOrdPair comment] + AO.object + $ [ ("name", AO.toOrdered name), + ("using", AO.toOrdered using) + ] + <> catMaybes [maybeCommentToMaybeOrdPair comment] computedFieldMetaToOrdJSON :: (Backend b) => ComputedFieldMetadata b -> AO.Value computedFieldMetaToOrdJSON (ComputedFieldMetadata name definition comment) = - AO.object $ - [ ("name", AO.toOrdered name), - ("definition", AO.toOrdered definition) - ] - <> catMaybes [commentToMaybeOrdPair comment] + AO.object + $ [ ("name", AO.toOrdered name), + ("definition", AO.toOrdered definition) + ] + <> catMaybes [commentToMaybeOrdPair comment] insPermDefToOrdJSON :: forall b. (Backend b) => InsPermDef b -> AO.Value insPermDefToOrdJSON = permDefToOrdJSON insPermToOrdJSON @@ -263,16 +263,16 @@ sourcesToOrdJSONList sources = if backendOnly then Just ("backend_only", AO.toOrdered backendOnly) else Nothing - in AO.object $ - [("check", AO.toOrdered check)] - <> catMaybes [maybeSetToMaybeOrdPair @b set, columnsPair, backendOnlyPair] + in AO.object + $ [("check", AO.toOrdered check)] + <> catMaybes [maybeSetToMaybeOrdPair @b set, columnsPair, backendOnlyPair] selPermDefToOrdJSON :: (Backend b) => SelPermDef b -> AO.Value selPermDefToOrdJSON = permDefToOrdJSON selPermToOrdJSON where selPermToOrdJSON (SelPerm columns fltr limit allowAgg computedFieldsPerm allowedQueryRootFieldTypes allowedSubscriptionRootFieldTypes) = - AO.object $ - catMaybes + AO.object + $ catMaybes [ columnsPair, computedFieldsPermPair, filterPair, @@ -309,12 +309,12 @@ sourcesToOrdJSONList sources = if backendOnly then Just ("backend_only", AO.toOrdered backendOnly) else Nothing - in AO.object $ - [ ("columns", AO.toOrdered columns), - ("filter", AO.toOrdered fltr), - ("check", AO.toOrdered check) - ] - <> catMaybes [maybeSetToMaybeOrdPair @b set, backendOnlyPair] + in AO.object + $ [ ("columns", AO.toOrdered columns), + ("filter", AO.toOrdered fltr), + ("check", AO.toOrdered check) + ] + <> catMaybes [maybeSetToMaybeOrdPair @b set, backendOnlyPair] delPermDefToOrdJSON :: (Backend b) => DelPermDef b -> AO.Value delPermDefToOrdJSON = permDefToOrdJSON delPermToOrdJSON @@ -324,18 +324,18 @@ sourcesToOrdJSONList sources = if backendOnly then Just ("backend_only", AO.toOrdered backendOnly) else Nothing - in AO.object $ - [ ("filter", AO.toOrdered filter') - ] - <> catMaybes [backendOnlyPair] + in AO.object + $ [ ("filter", AO.toOrdered filter') + ] + <> catMaybes [backendOnlyPair] permDefToOrdJSON :: (a b -> AO.Value) -> PermDef b a -> AO.Value permDefToOrdJSON permToOrdJSON (PermDef role permission comment) = - AO.object $ - [ ("role", AO.toOrdered role), - ("permission", permToOrdJSON (unPermDefPermission permission)) - ] - <> catMaybes [maybeCommentToMaybeOrdPair comment] + AO.object + $ [ ("role", AO.toOrdered role), + ("permission", permToOrdJSON (unPermDefPermission permission)) + ] + <> catMaybes [maybeCommentToMaybeOrdPair comment] eventTriggerConfToOrdJSON :: forall b. (Backend b) => EventTriggerConf b -> AO.Value eventTriggerConfToOrdJSON (EventTriggerConf name definition webhook webhookFromEnv retryConf headers reqTransform respTransform cleanupConfig triggerOnReplication) = @@ -343,20 +343,20 @@ sourcesToOrdJSONList sources = case defaultTriggerOnReplication @b of Just (_, defTOR) -> if triggerOnReplication == defTOR then Nothing else Just triggerOnReplication Nothing -> Just triggerOnReplication - in AO.object $ - [ ("name", AO.toOrdered name), - ("definition", AO.toOrdered definition), - ("retry_conf", AO.toOrdered retryConf) - ] - <> catMaybes - [ maybeAnyToMaybeOrdPair "webhook" AO.toOrdered webhook, - maybeAnyToMaybeOrdPair "webhook_from_env" AO.toOrdered webhookFromEnv, - headers >>= listToMaybeOrdPair "headers" AO.toOrdered, - fmap (("request_transform",) . AO.toOrdered) reqTransform, - fmap (("response_transform",) . AO.toOrdered) respTransform, - maybeAnyToMaybeOrdPair "cleanup_config" AO.toOrdered cleanupConfig, - maybeAnyToMaybeOrdPair "trigger_on_replication" AO.toOrdered triggerOnReplicationMaybe - ] + in AO.object + $ [ ("name", AO.toOrdered name), + ("definition", AO.toOrdered definition), + ("retry_conf", AO.toOrdered retryConf) + ] + <> catMaybes + [ maybeAnyToMaybeOrdPair "webhook" AO.toOrdered webhook, + maybeAnyToMaybeOrdPair "webhook_from_env" AO.toOrdered webhookFromEnv, + headers >>= listToMaybeOrdPair "headers" AO.toOrdered, + fmap (("request_transform",) . AO.toOrdered) reqTransform, + fmap (("response_transform",) . AO.toOrdered) respTransform, + maybeAnyToMaybeOrdPair "cleanup_config" AO.toOrdered cleanupConfig, + maybeAnyToMaybeOrdPair "trigger_on_replication" AO.toOrdered triggerOnReplicationMaybe + ] functionMetadataToOrdJSON :: (Backend b) => FunctionMetadata b -> AO.Value functionMetadataToOrdJSON FunctionMetadata {..} = @@ -379,41 +379,41 @@ remoteSchemasToOrdJSONList = listToMaybeArraySort remoteSchemaQToOrdJSON _rsmNam where remoteSchemaQToOrdJSON :: RemoteSchemaMetadata -> AO.Value remoteSchemaQToOrdJSON (RemoteSchemaMetadata name definition comment permissions relationships) = - AO.object $ - [ ("name", AO.toOrdered name), - ("definition", remoteSchemaDefToOrdJSON definition) - ] - <> catMaybes - [ maybeCommentToMaybeOrdPair comment, - listToMaybeOrdPair - "permissions" - permsToMaybeOrdJSON - permissions, - listToMaybeOrdPair - "remote_relationships" - AO.toOrdered - relationships - ] + AO.object + $ [ ("name", AO.toOrdered name), + ("definition", remoteSchemaDefToOrdJSON definition) + ] + <> catMaybes + [ maybeCommentToMaybeOrdPair comment, + listToMaybeOrdPair + "permissions" + permsToMaybeOrdJSON + permissions, + listToMaybeOrdPair + "remote_relationships" + AO.toOrdered + relationships + ] where permsToMaybeOrdJSON :: RemoteSchemaPermissionMetadata -> AO.Value permsToMaybeOrdJSON (RemoteSchemaPermissionMetadata role defn permComment) = - AO.object $ - [ ("role", AO.toOrdered role), - ("definition", AO.toOrdered defn) - ] - <> catMaybes [maybeCommentToMaybeOrdPair permComment] + AO.object + $ [ ("role", AO.toOrdered role), + ("definition", AO.toOrdered defn) + ] + <> catMaybes [maybeCommentToMaybeOrdPair permComment] remoteSchemaDefToOrdJSON :: RemoteSchemaDef -> AO.Value remoteSchemaDefToOrdJSON (RemoteSchemaDef url urlFromEnv headers frwrdClientHdrs timeout customization) = - AO.object $ - catMaybes + AO.object + $ catMaybes [ maybeToPair "url" url, maybeToPair "url_from_env" urlFromEnv, maybeToPair "timeout_seconds" timeout, maybeToPair "customization" customization, headers >>= listToMaybeOrdPair "headers" AO.toOrdered ] - <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs] + <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs] where maybeToPair n = maybeAnyToMaybeOrdPair n AO.toOrdered @@ -458,11 +458,11 @@ queryCollectionsToOrdJSONList = listToMaybeArraySort createCollectionToOrdJSON _ where createCollectionToOrdJSON :: CreateCollection -> AO.Value createCollectionToOrdJSON (CreateCollection name definition comment) = - AO.object $ - [ ("name", AO.toOrdered name), - ("definition", AO.toOrdered definition) - ] - <> catMaybes [maybeCommentToMaybeOrdPair comment] + AO.object + $ [ ("name", AO.toOrdered name), + ("definition", AO.toOrdered definition) + ] + <> catMaybes [maybeCommentToMaybeOrdPair comment] allowlistToOrdJSONList :: MetadataAllowlist -> Maybe AO.Array allowlistToOrdJSONList = listToMaybeArraySort (AO.toOrdered . toJSON @AllowlistEntry) aeCollection @@ -478,20 +478,20 @@ cronTriggersToOrdJSONList = listToMaybeArraySort crontriggerQToOrdJSON ctName crontriggerQToOrdJSON :: CronTriggerMetadata -> AO.Value crontriggerQToOrdJSON (CronTriggerMetadata name webhook schedule payload retryConf headers includeInMetadata comment reqTransform respTransform) = - AO.object $ - [ ("name", AO.toOrdered name), - ("webhook", AO.toOrdered webhook), - ("schedule", AO.toOrdered schedule), - ("include_in_metadata", AO.toOrdered includeInMetadata) - ] - <> catMaybes - [ maybeAnyToMaybeOrdPair "payload" AO.toOrdered payload, - maybeAnyToMaybeOrdPair "retry_conf" AO.toOrdered (maybeRetryConfiguration retryConf), - maybeAnyToMaybeOrdPair "headers" AO.toOrdered (maybeHeader headers), - maybeAnyToMaybeOrdPair "comment" AO.toOrdered comment, - fmap (("request_transform",) . AO.toOrdered) reqTransform, - fmap (("response_transform",) . AO.toOrdered) respTransform - ] + AO.object + $ [ ("name", AO.toOrdered name), + ("webhook", AO.toOrdered webhook), + ("schedule", AO.toOrdered schedule), + ("include_in_metadata", AO.toOrdered includeInMetadata) + ] + <> catMaybes + [ maybeAnyToMaybeOrdPair "payload" AO.toOrdered payload, + maybeAnyToMaybeOrdPair "retry_conf" AO.toOrdered (maybeRetryConfiguration retryConf), + maybeAnyToMaybeOrdPair "headers" AO.toOrdered (maybeHeader headers), + maybeAnyToMaybeOrdPair "comment" AO.toOrdered comment, + fmap (("request_transform",) . AO.toOrdered) reqTransform, + fmap (("response_transform",) . AO.toOrdered) respTransform + ] where maybeRetryConfiguration retryConfig | retryConfig == defaultSTRetryConf = Nothing @@ -505,64 +505,66 @@ customTypesToOrdJSON :: CustomTypes -> Maybe AO.Object customTypesToOrdJSON customTypes@(CustomTypes inpObjs objs scalars enums) | customTypes == emptyCustomTypes = Nothing | otherwise = - Just . AO.fromList . catMaybes $ - [ listToMaybeOrdPair "input_objects" inputObjectToOrdJSON inpObjs, - listToMaybeOrdPair "objects" objectTypeToOrdJSON objs, - listToMaybeOrdPair "scalars" scalarTypeToOrdJSON scalars, - listToMaybeOrdPair "enums" enumTypeToOrdJSON enums - ] + Just + . AO.fromList + . catMaybes + $ [ listToMaybeOrdPair "input_objects" inputObjectToOrdJSON inpObjs, + listToMaybeOrdPair "objects" objectTypeToOrdJSON objs, + listToMaybeOrdPair "scalars" scalarTypeToOrdJSON scalars, + listToMaybeOrdPair "enums" enumTypeToOrdJSON enums + ] where inputObjectToOrdJSON :: InputObjectTypeDefinition -> AO.Value inputObjectToOrdJSON (InputObjectTypeDefinition tyName descM fields) = - AO.object $ - [ ("name", AO.toOrdered tyName), - ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields) - ] - <> catMaybes [maybeDescriptionToMaybeOrdPair descM] + AO.object + $ [ ("name", AO.toOrdered tyName), + ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields) + ] + <> catMaybes [maybeDescriptionToMaybeOrdPair descM] where fieldDefinitionToOrdJSON :: InputObjectFieldDefinition -> AO.Value fieldDefinitionToOrdJSON (InputObjectFieldDefinition fieldName fieldDescM ty) = - AO.object $ - [ ("name", AO.toOrdered fieldName), - ("type", AO.toOrdered ty) - ] - <> catMaybes [maybeDescriptionToMaybeOrdPair fieldDescM] + AO.object + $ [ ("name", AO.toOrdered fieldName), + ("type", AO.toOrdered ty) + ] + <> catMaybes [maybeDescriptionToMaybeOrdPair fieldDescM] objectTypeToOrdJSON :: ObjectTypeDefinition -> AO.Value objectTypeToOrdJSON (ObjectTypeDefinition tyName descM fields rels) = - AO.object $ - [ ("name", AO.toOrdered tyName), - ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields) - ] - <> catMaybes - [ maybeDescriptionToMaybeOrdPair descM, - listToMaybeOrdPair "relationships" AO.toOrdered rels - ] + AO.object + $ [ ("name", AO.toOrdered tyName), + ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields) + ] + <> catMaybes + [ maybeDescriptionToMaybeOrdPair descM, + listToMaybeOrdPair "relationships" AO.toOrdered rels + ] where fieldDefinitionToOrdJSON :: ObjectFieldDefinition GraphQLType -> AO.Value fieldDefinitionToOrdJSON (ObjectFieldDefinition fieldName argsValM fieldDescM ty) = - AO.object $ - [ ("name", AO.toOrdered fieldName), - ("type", AO.toOrdered ty) - ] - <> catMaybes - [ ("arguments",) . AO.toOrdered <$> argsValM, - maybeDescriptionToMaybeOrdPair fieldDescM - ] + AO.object + $ [ ("name", AO.toOrdered fieldName), + ("type", AO.toOrdered ty) + ] + <> catMaybes + [ ("arguments",) . AO.toOrdered <$> argsValM, + maybeDescriptionToMaybeOrdPair fieldDescM + ] scalarTypeToOrdJSON :: ScalarTypeDefinition -> AO.Value scalarTypeToOrdJSON (ScalarTypeDefinition tyName descM) = - AO.object $ - [("name", AO.toOrdered tyName)] - <> catMaybes [maybeDescriptionToMaybeOrdPair descM] + AO.object + $ [("name", AO.toOrdered tyName)] + <> catMaybes [maybeDescriptionToMaybeOrdPair descM] enumTypeToOrdJSON :: EnumTypeDefinition -> AO.Value enumTypeToOrdJSON (EnumTypeDefinition tyName descM values) = - AO.object $ - [ ("name", AO.toOrdered tyName), - ("values", AO.toOrdered values) - ] - <> catMaybes [maybeDescriptionToMaybeOrdPair descM] + AO.object + $ [ ("name", AO.toOrdered tyName), + ("values", AO.toOrdered values) + ] + <> catMaybes [maybeDescriptionToMaybeOrdPair descM] endpointsToOrdJSONList :: Endpoints -> Maybe AO.Array endpointsToOrdJSONList = listToMaybeArraySort AO.toOrdered _ceUrl @@ -581,22 +583,22 @@ actionMetadataToOrdJSONList = listToMaybeArraySort actionMetadataToOrdJSON _amNa where actionMetadataToOrdJSON :: ActionMetadata -> AO.Value actionMetadataToOrdJSON (ActionMetadata name comment definition permissions) = - AO.object $ - [ ("name", AO.toOrdered name), - ("definition", actionDefinitionToOrdJSON definition) - ] - <> catMaybes - [ maybeCommentToMaybeOrdPair comment, - listToMaybeOrdPair "permissions" permToOrdJSON permissions - ] + AO.object + $ [ ("name", AO.toOrdered name), + ("definition", actionDefinitionToOrdJSON definition) + ] + <> catMaybes + [ maybeCommentToMaybeOrdPair comment, + listToMaybeOrdPair "permissions" permToOrdJSON permissions + ] where argDefinitionToOrdJSON :: ArgumentDefinition GraphQLType -> AO.Value argDefinitionToOrdJSON (ArgumentDefinition argName ty descM) = - AO.object $ - [ ("name", AO.toOrdered argName), - ("type", AO.toOrdered ty) - ] - <> catMaybes [maybeAnyToMaybeOrdPair "description" AO.toOrdered descM] + AO.object + $ [ ("name", AO.toOrdered argName), + ("type", AO.toOrdered ty) + ] + <> catMaybes [maybeAnyToMaybeOrdPair "description" AO.toOrdered descM] actionDefinitionToOrdJSON :: ActionDefinitionInput -> AO.Value actionDefinitionToOrdJSON @@ -617,19 +619,19 @@ actionMetadataToOrdJSONList = listToMaybeArraySort actionMetadataToOrdJSON _amNa [ ("type", AO.toOrdered ("mutation" :: String)), ("kind", AO.toOrdered kind) ] - in AO.object $ - [ ("handler", AO.toOrdered handler), - ("output_type", AO.toOrdered outputType) - ] - <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs] - <> catMaybes - [ listToMaybeOrdPair "headers" AO.toOrdered headers, - listToMaybeOrdPair "arguments" argDefinitionToOrdJSON args, - fmap (("request_transform",) . AO.toOrdered) requestTransform, - fmap (("response_transform",) . AO.toOrdered) responseTransform - ] - <> typeAndKind - <> bool [("timeout", AO.toOrdered timeout)] mempty (timeout == defaultActionTimeoutSecs) + in AO.object + $ [ ("handler", AO.toOrdered handler), + ("output_type", AO.toOrdered outputType) + ] + <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs] + <> catMaybes + [ listToMaybeOrdPair "headers" AO.toOrdered headers, + listToMaybeOrdPair "arguments" argDefinitionToOrdJSON args, + fmap (("request_transform",) . AO.toOrdered) requestTransform, + fmap (("response_transform",) . AO.toOrdered) responseTransform + ] + <> typeAndKind + <> bool [("timeout", AO.toOrdered timeout)] mempty (timeout == defaultActionTimeoutSecs) permToOrdJSON :: ActionPermissionMetadata -> AO.Value permToOrdJSON (ActionPermissionMetadata role permComment) = diff --git a/server/src-lib/Hasura/RQL/Types/NamingCase.hs b/server/src-lib/Hasura/RQL/Types/NamingCase.hs index be5c276d022..9bd3d5aca2d 100644 --- a/server/src-lib/Hasura/RQL/Types/NamingCase.hs +++ b/server/src-lib/Hasura/RQL/Types/NamingCase.hs @@ -17,8 +17,8 @@ data NamingCase = HasuraCase | GraphqlCase instance AC.HasCodec NamingCase where codec = - AC.named "NamingCase" $ - AC.stringConstCodec [(HasuraCase, "hasura-default"), (GraphqlCase, "graphql-default")] + AC.named "NamingCase" + $ AC.stringConstCodec [(HasuraCase, "hasura-default"), (GraphqlCase, "graphql-default")] instance J.ToJSON NamingCase where toJSON HasuraCase = J.String "hasura-default" diff --git a/server/src-lib/Hasura/RQL/Types/OpenTelemetry.hs b/server/src-lib/Hasura/RQL/Types/OpenTelemetry.hs index 48a6211197c..a7695b95b59 100644 --- a/server/src-lib/Hasura/RQL/Types/OpenTelemetry.hs +++ b/server/src-lib/Hasura/RQL/Types/OpenTelemetry.hs @@ -65,20 +65,32 @@ data OpenTelemetryConfig = OpenTelemetryConfig instance HasCodec OpenTelemetryConfig where codec = - AC.object "OpenTelemetryConfig" $ - OpenTelemetryConfig - <$> optionalFieldWithDefault' "status" defaultOtelStatus AC..= _ocStatus - <*> optionalFieldWithDefault' "data_types" defaultOtelEnabledDataTypes AC..= _ocEnabledDataTypes - <*> optionalFieldWithDefault' "exporter_otlp" defaultOtelExporterConfig AC..= _ocExporterOtlp - <*> optionalFieldWithDefault' "batch_span_processor" defaultOtelBatchSpanProcessorConfig AC..= _ocBatchSpanProcessor + AC.object "OpenTelemetryConfig" + $ OpenTelemetryConfig + <$> optionalFieldWithDefault' "status" defaultOtelStatus + AC..= _ocStatus + <*> optionalFieldWithDefault' "data_types" defaultOtelEnabledDataTypes + AC..= _ocEnabledDataTypes + <*> optionalFieldWithDefault' "exporter_otlp" defaultOtelExporterConfig + AC..= _ocExporterOtlp + <*> optionalFieldWithDefault' "batch_span_processor" defaultOtelBatchSpanProcessorConfig + AC..= _ocBatchSpanProcessor instance FromJSON OpenTelemetryConfig where parseJSON = J.withObject "OpenTelemetryConfig" $ \o -> OpenTelemetryConfig - <$> o .:? "status" .!= defaultOtelStatus - <*> o .:? "data_types" .!= defaultOtelEnabledDataTypes - <*> o .:? "exporter_otlp" .!= defaultOtelExporterConfig - <*> o .:? "batch_span_processor" .!= defaultOtelBatchSpanProcessorConfig + <$> o + .:? "status" + .!= defaultOtelStatus + <*> o + .:? "data_types" + .!= defaultOtelEnabledDataTypes + <*> o + .:? "exporter_otlp" + .!= defaultOtelExporterConfig + <*> o + .:? "batch_span_processor" + .!= defaultOtelBatchSpanProcessorConfig -- No `ToJSON` instance: use `openTelemetryConfigToOrdJSON` from -- Hasura.RQL.Types.Metadata.Serialization @@ -123,8 +135,8 @@ instance FromJSON OtelStatus where instance ToJSON OtelStatus where toJSON status = - J.String $ - case status of + J.String + $ case status of OtelEnabled -> "enabled" OtelDisabled -> "disabled" @@ -165,12 +177,16 @@ data OtelExporterConfig = OtelExporterConfig instance HasCodec OtelExporterConfig where codec = - AC.object "OtelExporterConfig" $ - OtelExporterConfig - <$> optionalField "otlp_traces_endpoint" tracesEndpointDoc AC..= _oecTracesEndpoint - <*> optionalFieldWithDefault "protocol" defaultOtelExporterProtocol protocolDoc AC..= _oecProtocol - <*> optionalFieldWithDefault "headers" defaultOtelExporterHeaders headersDoc AC..= _oecHeaders - <*> optionalFieldWithDefault "resource_attributes" defaultOtelExporterResourceAttributes attrsDoc AC..= _oecResourceAttributes + AC.object "OtelExporterConfig" + $ OtelExporterConfig + <$> optionalField "otlp_traces_endpoint" tracesEndpointDoc + AC..= _oecTracesEndpoint + <*> optionalFieldWithDefault "protocol" defaultOtelExporterProtocol protocolDoc + AC..= _oecProtocol + <*> optionalFieldWithDefault "headers" defaultOtelExporterHeaders headersDoc + AC..= _oecHeaders + <*> optionalFieldWithDefault "resource_attributes" defaultOtelExporterResourceAttributes attrsDoc + AC..= _oecResourceAttributes where tracesEndpointDoc = "Target URL to which the exporter is going to send traces. No default." protocolDoc = "The transport protocol" @@ -191,8 +207,8 @@ instance FromJSON OtelExporterConfig where instance ToJSON OtelExporterConfig where toJSON (OtelExporterConfig otlpTracesEndpoint protocol headers resourceAttributes) = - J.object $ - catMaybes + J.object + $ catMaybes [ ("otlp_traces_endpoint" .=) <$> otlpTracesEndpoint, Just $ "protocol" .= protocol, Just $ "headers" .= headers, @@ -246,8 +262,10 @@ instance HasCodec NameValue where AC.object "OtelNameValue" ( NameValue - <$> requiredField' "name" AC..= nv_name - <*> requiredField' "value" AC..= nv_value + <$> requiredField' "name" + AC..= nv_name + <*> requiredField' "value" + AC..= nv_value ) "Internal helper type for JSON lists of key-value pairs" @@ -283,16 +301,19 @@ newtype OtelBatchSpanProcessorConfig = OtelBatchSpanProcessorConfig instance HasCodec OtelBatchSpanProcessorConfig where codec = - AC.object "OtelBatchSpanProcessorConfig" $ - OtelBatchSpanProcessorConfig - <$> optionalFieldWithDefault "max_export_batch_size" defaultMaxExportBatchSize maxSizeDoc AC..= _obspcMaxExportBatchSize + AC.object "OtelBatchSpanProcessorConfig" + $ OtelBatchSpanProcessorConfig + <$> optionalFieldWithDefault "max_export_batch_size" defaultMaxExportBatchSize maxSizeDoc + AC..= _obspcMaxExportBatchSize where maxSizeDoc = "The maximum batch size of every export. It must be smaller or equal to maxQueueSize (not yet configurable). Default 512." instance FromJSON OtelBatchSpanProcessorConfig where parseJSON = J.withObject "OtelBatchSpanProcessorConfig" $ \o -> OtelBatchSpanProcessorConfig - <$> o .:? "max_export_batch_size" .!= defaultMaxExportBatchSize + <$> o + .:? "max_export_batch_size" + .!= defaultMaxExportBatchSize instance ToJSON OtelBatchSpanProcessorConfig where toJSON (OtelBatchSpanProcessorConfig maxExportBatchSize) = diff --git a/server/src-lib/Hasura/RQL/Types/Permission.hs b/server/src-lib/Hasura/RQL/Types/Permission.hs index 411113ba6a6..e83a073207f 100644 --- a/server/src-lib/Hasura/RQL/Types/Permission.hs +++ b/server/src-lib/Hasura/RQL/Types/Permission.hs @@ -63,8 +63,9 @@ instance NFData PermType instance Hashable PermType instance PG.FromCol PermType where - fromCol bs = flip PG.fromColHelper bs $ - PD.enum $ \case + fromCol bs = flip PG.fromColHelper bs + $ PD.enum + $ \case "insert" -> Just PTInsert "update" -> Just PTUpdate "select" -> Just PTSelect @@ -135,19 +136,19 @@ data PermDefPermission (b :: BackendType) (perm :: BackendType -> Type) where UpdPerm' :: UpdPerm b -> PermDefPermission b UpdPerm DelPerm' :: DelPerm b -> PermDefPermission b DelPerm -instance Backend b => FromJSON (PermDefPermission b SelPerm) where +instance (Backend b) => FromJSON (PermDefPermission b SelPerm) where parseJSON = fmap SelPerm' . parseJSON -instance Backend b => FromJSON (PermDefPermission b InsPerm) where +instance (Backend b) => FromJSON (PermDefPermission b InsPerm) where parseJSON = fmap InsPerm' . parseJSON -instance Backend b => FromJSON (PermDefPermission b UpdPerm) where +instance (Backend b) => FromJSON (PermDefPermission b UpdPerm) where parseJSON = fmap UpdPerm' . parseJSON -instance Backend b => FromJSON (PermDefPermission b DelPerm) where +instance (Backend b) => FromJSON (PermDefPermission b DelPerm) where parseJSON = fmap DelPerm' . parseJSON -instance Backend b => ToJSON (PermDefPermission b perm) where +instance (Backend b) => ToJSON (PermDefPermission b perm) where toJSON = \case SelPerm' p -> toJSON p InsPerm' p -> toJSON p @@ -157,9 +158,9 @@ instance Backend b => ToJSON (PermDefPermission b perm) where instance (Backend b, HasCodec (perm b), IsPerm perm) => HasCodec (PermDefPermission b perm) where codec = dimapCodec mkPermDefPermission unPermDefPermission codec -deriving stock instance Backend b => Show (PermDefPermission b perm) +deriving stock instance (Backend b) => Show (PermDefPermission b perm) -deriving stock instance Backend b => Eq (PermDefPermission b perm) +deriving stock instance (Backend b) => Eq (PermDefPermission b perm) ----------------------------- @@ -200,7 +201,7 @@ reflectPermDefPermission = \case instance (Backend b, ToJSON (perm b)) => ToJSON (PermDef b perm) where toJSON = object . toAesonPairs -instance Backend b => ToAesonPairs (PermDef b perm) where +instance (Backend b) => ToAesonPairs (PermDef b perm) where toAesonPairs (PermDef rn perm comment) = [ "role" .= rn, "permission" .= perm, @@ -209,11 +210,14 @@ instance Backend b => ToAesonPairs (PermDef b perm) where instance (Backend b, HasCodec (perm b), IsPerm perm) => HasCodec (PermDef b perm) where codec = - AC.object (backendPrefix @b <> T.toTitle (permTypeToCode (permType @perm)) <> "PermDef") $ - PermDef - <$> requiredField' "role" .== _pdRole - <*> requiredField' "permission" .== _pdPermission - <*> optionalFieldOrNull' "comment" .== _pdComment + AC.object (backendPrefix @b <> T.toTitle (permTypeToCode (permType @perm)) <> "PermDef") + $ PermDef + <$> requiredField' "role" + .== _pdRole + <*> requiredField' "permission" + .== _pdPermission + <*> optionalFieldOrNull' "comment" + .== _pdComment where (.==) = (AC..=) @@ -232,9 +236,10 @@ instance ToJSON QueryRootFieldType where instance HasCodec QueryRootFieldType where codec = - stringConstCodec $ - NonEmpty.fromList $ - (\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) <$> [minBound ..] + stringConstCodec + $ NonEmpty.fromList + $ (\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) + <$> [minBound ..] data SubscriptionRootFieldType = SRFTSelect @@ -252,9 +257,10 @@ instance ToJSON SubscriptionRootFieldType where instance HasCodec SubscriptionRootFieldType where codec = - stringConstCodec $ - NonEmpty.fromList $ - (\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) <$> [minBound ..] + stringConstCodec + $ NonEmpty.fromList + $ (\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) + <$> [minBound ..] -- Insert permission data InsPerm (b :: BackendType) = InsPerm @@ -265,25 +271,34 @@ data InsPerm (b :: BackendType) = InsPerm } deriving (Show, Eq, Generic) -instance Backend b => FromJSON (InsPerm b) where +instance (Backend b) => FromJSON (InsPerm b) where parseJSON = withObject "InsPerm" $ \o -> InsPerm - <$> o .: "check" - <*> o .:? "set" - <*> o .:? "columns" - <*> o .:? "backend_only" .!= False + <$> o + .: "check" + <*> o + .:? "set" + <*> o + .:? "columns" + <*> o + .:? "backend_only" + .!= False -instance Backend b => ToJSON (InsPerm b) where +instance (Backend b) => ToJSON (InsPerm b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} -instance Backend b => HasCodec (InsPerm b) where +instance (Backend b) => HasCodec (InsPerm b) where codec = - AC.object (backendPrefix @b <> "InsPerm") $ - InsPerm - <$> requiredField' "check" AC..= ipCheck - <*> optionalField' "set" AC..= ipSet - <*> optionalField' "columns" AC..= ipColumns - <*> optionalFieldWithDefault' "backend_only" False AC..= ipBackendOnly + AC.object (backendPrefix @b <> "InsPerm") + $ InsPerm + <$> requiredField' "check" + AC..= ipCheck + <*> optionalField' "set" + AC..= ipSet + <*> optionalField' "columns" + AC..= ipColumns + <*> optionalFieldWithDefault' "backend_only" False + AC..= ipBackendOnly type InsPermDef b = PermDef b InsPerm @@ -311,7 +326,7 @@ instance (Hashable rootFieldType, HasCodec rootFieldType) => HasCodec (AllowedRo enc ARFAllowAllRootFields = Nothing enc (ARFAllowConfiguredRootFields fields) = Just $ Set.toList fields -instance Semigroup (HashSet rootFieldType) => Semigroup (AllowedRootFields rootFieldType) where +instance (Semigroup (HashSet rootFieldType)) => Semigroup (AllowedRootFields rootFieldType) where ARFAllowAllRootFields <> _ = ARFAllowAllRootFields _ <> ARFAllowAllRootFields = ARFAllowAllRootFields ARFAllowConfiguredRootFields rfL <> ARFAllowConfiguredRootFields rfR = @@ -340,7 +355,7 @@ data SelPerm (b :: BackendType) = SelPerm } deriving (Show, Eq, Generic) -instance Backend b => ToJSON (SelPerm b) where +instance (Backend b) => ToJSON (SelPerm b) where toJSON SelPerm {..} = let queryRootFieldsPair = case spAllowedQueryRootFields of @@ -356,17 +371,17 @@ instance Backend b => ToJSON (SelPerm b) where case spLimit of Nothing -> mempty Just limit -> ["limit" .= limit] - in object $ - [ "columns" .= spColumns, - "filter" .= spFilter, - "allow_aggregations" .= spAllowAggregations, - "computed_fields" .= spComputedFields - ] - <> queryRootFieldsPair - <> subscriptionRootFieldsPair - <> limitPair + in object + $ [ "columns" .= spColumns, + "filter" .= spFilter, + "allow_aggregations" .= spAllowAggregations, + "computed_fields" .= spComputedFields + ] + <> queryRootFieldsPair + <> subscriptionRootFieldsPair + <> limitPair -instance Backend b => FromJSON (SelPerm b) where +instance (Backend b) => FromJSON (SelPerm b) where parseJSON = do withObject "SelPerm" $ \o -> do queryRootFieldsMaybe <- o .:? "query_root_fields" @@ -382,25 +397,39 @@ instance Backend b => FromJSON (SelPerm b) where Nothing -> pure $ ARFAllowAllRootFields SelPerm - <$> o .: "columns" - <*> o .: "filter" - <*> o .:? "limit" - <*> o .:? "allow_aggregations" .!= False - <*> o .:? "computed_fields" .!= [] + <$> o + .: "columns" + <*> o + .: "filter" + <*> o + .:? "limit" + <*> o + .:? "allow_aggregations" + .!= False + <*> o + .:? "computed_fields" + .!= [] <*> pure allowedQueryRootFields <*> pure allowedSubscriptionRootFields -instance Backend b => HasCodec (SelPerm b) where +instance (Backend b) => HasCodec (SelPerm b) where codec = - AC.object (backendPrefix @b <> "SelPerm") $ - SelPerm - <$> requiredField' "columns" AC..= spColumns - <*> requiredField' "filter" AC..= spFilter - <*> optionalField' "limit" AC..= spLimit - <*> optionalFieldWithOmittedDefault' "allow_aggregations" False AC..= spAllowAggregations - <*> optionalFieldWithOmittedDefault' "computed_fields" [] AC..= spComputedFields - <*> optionalFieldWithOmittedDefault' "query_root_fields" ARFAllowAllRootFields AC..= spAllowedQueryRootFields - <*> optionalFieldWithOmittedDefault' "subscription_root_fields" ARFAllowAllRootFields AC..= spAllowedSubscriptionRootFields + AC.object (backendPrefix @b <> "SelPerm") + $ SelPerm + <$> requiredField' "columns" + AC..= spColumns + <*> requiredField' "filter" + AC..= spFilter + <*> optionalField' "limit" + AC..= spLimit + <*> optionalFieldWithOmittedDefault' "allow_aggregations" False + AC..= spAllowAggregations + <*> optionalFieldWithOmittedDefault' "computed_fields" [] + AC..= spComputedFields + <*> optionalFieldWithOmittedDefault' "query_root_fields" ARFAllowAllRootFields + AC..= spAllowedQueryRootFields + <*> optionalFieldWithOmittedDefault' "subscription_root_fields" ARFAllowAllRootFields + AC..= spAllowedSubscriptionRootFields type SelPermDef b = PermDef b SelPerm @@ -411,21 +440,26 @@ data DelPerm (b :: BackendType) = DelPerm } deriving (Show, Eq, Generic) -instance Backend b => FromJSON (DelPerm b) where +instance (Backend b) => FromJSON (DelPerm b) where parseJSON = withObject "DelPerm" $ \o -> DelPerm - <$> o .: "filter" - <*> o .:? "backend_only" .!= False + <$> o + .: "filter" + <*> o + .:? "backend_only" + .!= False -instance Backend b => ToJSON (DelPerm b) where +instance (Backend b) => ToJSON (DelPerm b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} -instance Backend b => HasCodec (DelPerm b) where +instance (Backend b) => HasCodec (DelPerm b) where codec = - AC.object (backendPrefix @b <> "DelPerm") $ - DelPerm - <$> requiredField' "filter" .== dcFilter - <*> optionalFieldWithOmittedDefault' "backend_only" False .== dcBackendOnly + AC.object (backendPrefix @b <> "DelPerm") + $ DelPerm + <$> requiredField' "filter" + .== dcFilter + <*> optionalFieldWithOmittedDefault' "backend_only" False + .== dcBackendOnly where (.==) = (AC..=) @@ -446,29 +480,40 @@ data UpdPerm (b :: BackendType) = UpdPerm } deriving (Show, Eq, Generic) -instance Backend b => FromJSON (UpdPerm b) where +instance (Backend b) => FromJSON (UpdPerm b) where parseJSON = withObject "UpdPerm" $ \o -> UpdPerm - <$> o .: "columns" - <*> o .:? "set" - <*> o .: "filter" - <*> o .:? "check" - <*> o .:? "backend_only" .!= False + <$> o + .: "columns" + <*> o + .:? "set" + <*> o + .: "filter" + <*> o + .:? "check" + <*> o + .:? "backend_only" + .!= False -instance Backend b => ToJSON (UpdPerm b) where +instance (Backend b) => ToJSON (UpdPerm b) where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} -instance Backend b => HasCodec (UpdPerm b) where +instance (Backend b) => HasCodec (UpdPerm b) where codec = - AC.object (backendPrefix @b <> "UpdPerm") $ - UpdPerm - <$> requiredField "columns" "Allowed columns" AC..= ucColumns - <*> optionalField "set" "Preset columns" AC..= ucSet - <*> requiredField' "filter" AC..= ucFilter + AC.object (backendPrefix @b <> "UpdPerm") + $ UpdPerm + <$> requiredField "columns" "Allowed columns" + AC..= ucColumns + <*> optionalField "set" "Preset columns" + AC..= ucSet + <*> requiredField' "filter" + AC..= ucFilter -- Include @null@ in serialized output for this field because that is -- the way the @toOrdJSON@ serialization is written. - <*> optionalFieldOrIncludedNull' "check" AC..= ucCheck - <*> optionalFieldWithOmittedDefault' "backend_only" False AC..= ucBackendOnly + <*> optionalFieldOrIncludedNull' "check" + AC..= ucCheck + <*> optionalFieldWithOmittedDefault' "backend_only" False + AC..= ucBackendOnly type UpdPermDef b = PermDef b UpdPerm @@ -477,16 +522,16 @@ type UpdPermDef b = PermDef b UpdPerm -- See https://gitlab.haskell.org/ghc/ghc/-/issues/9813 $(return []) -instance Backend b => FromJSON (PermDef b SelPerm) where +instance (Backend b) => FromJSON (PermDef b SelPerm) where parseJSON = genericParseJSON hasuraJSON -instance Backend b => FromJSON (PermDef b InsPerm) where +instance (Backend b) => FromJSON (PermDef b InsPerm) where parseJSON = genericParseJSON hasuraJSON -instance Backend b => FromJSON (PermDef b UpdPerm) where +instance (Backend b) => FromJSON (PermDef b UpdPerm) where parseJSON = genericParseJSON hasuraJSON -instance Backend b => FromJSON (PermDef b DelPerm) where +instance (Backend b) => FromJSON (PermDef b DelPerm) where parseJSON = genericParseJSON hasuraJSON $(makeLenses ''PermDef) diff --git a/server/src-lib/Hasura/RQL/Types/QueryCollection.hs b/server/src-lib/Hasura/RQL/Types/QueryCollection.hs index 4248bed1dbe..aaf453f4be1 100644 --- a/server/src-lib/Hasura/RQL/Types/QueryCollection.hs +++ b/server/src-lib/Hasura/RQL/Types/QueryCollection.hs @@ -103,10 +103,12 @@ instance Hashable ListedQuery instance HasCodec ListedQuery where codec = - AC.object "ListedQuery" $ - ListedQuery - <$> requiredField' "name" AC..= _lqName - <*> requiredField' "query" AC..= _lqQuery + AC.object "ListedQuery" + $ ListedQuery + <$> requiredField' "name" + AC..= _lqName + <*> requiredField' "query" + AC..= _lqQuery instance FromJSON ListedQuery where parseJSON = genericParseJSON hasuraJSON @@ -121,9 +123,10 @@ newtype CollectionDef = CollectionDef instance HasCodec CollectionDef where codec = - AC.object "CollectionDef" $ - CollectionDef - <$> requiredField' "queries" AC..= _cdQueries + AC.object "CollectionDef" + $ CollectionDef + <$> requiredField' "queries" + AC..= _cdQueries instance FromJSON CollectionDef where parseJSON = genericParseJSON hasuraJSON @@ -143,11 +146,14 @@ data CreateCollection = CreateCollection instance HasCodec CreateCollection where codec = - AC.object "CreateCollection" $ - CreateCollection - <$> requiredField' "name" AC..= _ccName - <*> requiredField' "definition" AC..= _ccDefinition - <*> optionalField' "comment" AC..= _ccComment + AC.object "CreateCollection" + $ CreateCollection + <$> requiredField' "name" + AC..= _ccName + <*> requiredField' "definition" + AC..= _ccDefinition + <*> optionalField' "comment" + AC..= _ccComment instance FromJSON CreateCollection where parseJSON = genericParseJSON hasuraJSON diff --git a/server/src-lib/Hasura/RQL/Types/Relationships/Local.hs b/server/src-lib/Hasura/RQL/Types/Relationships/Local.hs index c5a3e07ffa8..2a879aad763 100644 --- a/server/src-lib/Hasura/RQL/Types/Relationships/Local.hs +++ b/server/src-lib/Hasura/RQL/Types/Relationships/Local.hs @@ -52,11 +52,14 @@ instance (HasCodec a, Typeable a) => HasCodec (RelDef a) where instance (HasCodec a) => HasObjectCodec (RelDef a) where objectCodec = RelDef - <$> requiredField' "name" AC..= _rdName - <*> requiredField' "using" AC..= _rdUsing - <*> optionalField' "comment" AC..= _rdComment + <$> requiredField' "name" + AC..= _rdName + <*> requiredField' "using" + AC..= _rdUsing + <*> optionalField' "comment" + AC..= _rdComment -instance FromJSON a => FromJSON (RelDef a) where +instance (FromJSON a) => FromJSON (RelDef a) where parseJSON = genericParseJSON hasuraJSON {omitNothingFields = True} $(makeLenses ''RelDef) @@ -78,9 +81,9 @@ data RelManualTableConfig (b :: BackendType) = RelManualTableConfig deriving (Generic) deriving (FromJSON, ToJSON) via AC.Autodocodec (RelManualTableConfig b) -deriving instance Backend b => Eq (RelManualTableConfig b) +deriving instance (Backend b) => Eq (RelManualTableConfig b) -deriving instance Backend b => Show (RelManualTableConfig b) +deriving instance (Backend b) => Show (RelManualTableConfig b) data RelManualNativeQueryConfig (b :: BackendType) = RelManualNativeQueryConfig { rmnNativeQueryName :: NativeQueryName, @@ -88,9 +91,9 @@ data RelManualNativeQueryConfig (b :: BackendType) = RelManualNativeQueryConfig } deriving (Generic) -deriving instance Backend b => Eq (RelManualNativeQueryConfig b) +deriving instance (Backend b) => Eq (RelManualNativeQueryConfig b) -deriving instance Backend b => Show (RelManualNativeQueryConfig b) +deriving instance (Backend b) => Show (RelManualNativeQueryConfig b) data RelManualCommon (b :: BackendType) = RelManualCommon { rmColumns :: HashMap (Column b) (Column b), @@ -98,29 +101,35 @@ data RelManualCommon (b :: BackendType) = RelManualCommon } deriving (Generic) -deriving instance Backend b => Eq (RelManualCommon b) +deriving instance (Backend b) => Eq (RelManualCommon b) -deriving instance Backend b => Show (RelManualCommon b) +deriving instance (Backend b) => Show (RelManualCommon b) instance (Backend b) => HasCodec (RelManualTableConfig b) where codec = - AC.object (backendPrefix @b <> "RelManualTableConfig") $ - RelManualTableConfig - <$> requiredField' "remote_table" AC..= rmtTable - <*> AC.objectCodec AC..= rmtCommon + AC.object (backendPrefix @b <> "RelManualTableConfig") + $ RelManualTableConfig + <$> requiredField' "remote_table" + AC..= rmtTable + <*> AC.objectCodec + AC..= rmtCommon instance (Backend b) => HasCodec (RelManualNativeQueryConfig b) where codec = - AC.object (backendPrefix @b <> "RelManualNativeQueryConfig") $ - RelManualNativeQueryConfig - <$> requiredField' "remote_native_query" AC..= rmnNativeQueryName - <*> AC.objectCodec AC..= rmnCommon + AC.object (backendPrefix @b <> "RelManualNativeQueryConfig") + $ RelManualNativeQueryConfig + <$> requiredField' "remote_native_query" + AC..= rmnNativeQueryName + <*> AC.objectCodec + AC..= rmnCommon instance (Backend b) => AC.HasObjectCodec (RelManualCommon b) where objectCodec = RelManualCommon - <$> requiredField' "column_mapping" AC..= rmColumns - <*> optionalFieldOrIncludedNull' "insertion_order" AC..= rmInsertOrder + <$> requiredField' "column_mapping" + AC..= rmColumns + <*> optionalFieldOrIncludedNull' "insertion_order" + AC..= rmInsertOrder data RelUsing (b :: BackendType) a = RUFKeyOn a @@ -131,12 +140,12 @@ instance (Backend b, HasCodec a, Typeable a) => HasCodec (RelUsing b a) where codec = dimapCodec dec enc $ disjointEitherCodec fkCodec manualCodec where fkCodec = - AC.object ("RUFKeyOn_" <> typeableName @a) $ - requiredField' "foreign_key_constraint_on" + AC.object ("RUFKeyOn_" <> typeableName @a) + $ requiredField' "foreign_key_constraint_on" manualCodec = - AC.object (backendPrefix @b <> "RUManual") $ - requiredField' "manual_configuration" + AC.object (backendPrefix @b <> "RUManual") + $ requiredField' "manual_configuration" dec = either RUFKeyOn RUManual enc (RUFKeyOn fkey) = Left fkey @@ -167,9 +176,9 @@ data ArrRelUsingFKeyOn (b :: BackendType) = ArrRelUsingFKeyOn } deriving (Generic) -deriving instance Backend b => Eq (ArrRelUsingFKeyOn b) +deriving instance (Backend b) => Eq (ArrRelUsingFKeyOn b) -deriving instance Backend b => Show (ArrRelUsingFKeyOn b) +deriving instance (Backend b) => Show (ArrRelUsingFKeyOn b) -- TODO: This has to move to a common module data WithTable b a = WithTable @@ -185,8 +194,11 @@ deriving instance (Backend b, Eq a) => Eq (WithTable b a) instance (FromJSON a, Backend b) => FromJSON (WithTable b a) where parseJSON v@(Object o) = WithTable - <$> o .:? "source" .!= defaultSource - <*> o .: "table" + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "table" <*> parseJSON v parseJSON _ = fail "expecting an Object with key 'table'" @@ -200,9 +212,9 @@ data ObjRelUsingChoice b | RemoteTable (TableName b) (NonEmpty (Column b)) deriving (Generic) -deriving instance Backend b => Eq (ObjRelUsingChoice b) +deriving instance (Backend b) => Eq (ObjRelUsingChoice b) -deriving instance Backend b => Show (ObjRelUsingChoice b) +deriving instance (Backend b) => Show (ObjRelUsingChoice b) instance (Backend b) => HasCodec (ObjRelUsingChoice b) where codec = dimapCodec dec enc $ disjointEitherCodec sameTableCodec remoteTableCodec @@ -212,8 +224,9 @@ instance (Backend b) => HasCodec (ObjRelUsingChoice b) where remoteTableCodec :: AC.JSONCodec (Either (TableName b, Column b) (TableName b, NonEmpty (Column b))) remoteTableCodec = - singleOrMultipleRelColumnsCodec @b $ - backendPrefix @b <> "ObjRelRemoteTable" + singleOrMultipleRelColumnsCodec @b + $ backendPrefix @b + <> "ObjRelRemoteTable" dec = \case Left (Left col) -> SameTable $ pure col @@ -229,7 +242,7 @@ instance (Backend b) => HasCodec (ObjRelUsingChoice b) where singleOrMultipleRelColumnsCodec :: forall b. - Backend b => + (Backend b) => Text -> AC.JSONCodec ( Either @@ -238,15 +251,19 @@ singleOrMultipleRelColumnsCodec :: ) singleOrMultipleRelColumnsCodec codecName = disjointEitherCodec - ( AC.object (codecName <> "SingleColumn") $ - (,) - <$> requiredField' "table" AC..= fst - <*> requiredField' "column" AC..= snd + ( AC.object (codecName <> "SingleColumn") + $ (,) + <$> requiredField' "table" + AC..= fst + <*> requiredField' "column" + AC..= snd ) - ( AC.object (codecName <> "MultipleColumns") $ - (,) - <$> requiredField' "table" AC..= fst - <*> requiredField' "columns" AC..= snd + ( AC.object (codecName <> "MultipleColumns") + $ (,) + <$> requiredField' "table" + AC..= fst + <*> requiredField' "columns" + AC..= snd ) instance (Backend b) => ToJSON (ObjRelUsingChoice b) where @@ -283,10 +300,10 @@ instance (Backend b) => FromJSON (ObjRelUsingChoice b) where v@(Array _) -> parseJSON v _ -> fail "Expected string or array" -instance Backend b => HasCodec (ArrRelUsingFKeyOn b) where +instance (Backend b) => HasCodec (ArrRelUsingFKeyOn b) where codec = - dimapCodec dec enc $ - singleOrMultipleRelColumnsCodec @b (backendPrefix @b <> "ArrRelUsingFKeyOn") + dimapCodec dec enc + $ singleOrMultipleRelColumnsCodec @b (backendPrefix @b <> "ArrRelUsingFKeyOn") where dec :: (Either (TableName b, Column b) (TableName b, NonEmpty (Column b))) -> ArrRelUsingFKeyOn b dec = \case @@ -300,11 +317,11 @@ instance Backend b => HasCodec (ArrRelUsingFKeyOn b) where instance (Backend b) => ToJSON (ArrRelUsingFKeyOn b) where toJSON ArrRelUsingFKeyOn {arufTable = _arufTable, arufColumns = _arufColumns} = - object $ - ("table" .= _arufTable) - : case _arufColumns of - col :| [] -> ["column" .= col] - cols -> ["columns" .= cols] + object + $ ("table" .= _arufTable) + : case _arufColumns of + col :| [] -> ["column" .= col] + cols -> ["columns" .= cols] instance (Backend b) => FromJSON (ArrRelUsingFKeyOn b) where parseJSON = \case @@ -340,13 +357,13 @@ data RelTarget b | RelTargetNativeQuery NativeQueryName deriving (Generic) -deriving instance Backend b => Eq (RelTarget b) +deriving instance (Backend b) => Eq (RelTarget b) -deriving instance Backend b => Show (RelTarget b) +deriving instance (Backend b) => Show (RelTarget b) -instance Backend b => NFData (RelTarget b) +instance (Backend b) => NFData (RelTarget b) -instance Backend b => Hashable (RelTarget b) +instance (Backend b) => Hashable (RelTarget b) instance (Backend b) => FromJSON (RelTarget b) where parseJSON = genericParseJSON hasuraJSON @@ -366,13 +383,13 @@ data RelInfo (b :: BackendType) = RelInfo } deriving (Generic) -deriving instance Backend b => Show (RelInfo b) +deriving instance (Backend b) => Show (RelInfo b) -deriving instance Backend b => Eq (RelInfo b) +deriving instance (Backend b) => Eq (RelInfo b) -instance Backend b => NFData (RelInfo b) +instance (Backend b) => NFData (RelInfo b) -instance Backend b => Hashable (RelInfo b) +instance (Backend b) => Hashable (RelInfo b) instance (Backend b) => FromJSON (RelInfo b) where parseJSON = genericParseJSON hasuraJSON diff --git a/server/src-lib/Hasura/RQL/Types/Relationships/Remote.hs b/server/src-lib/Hasura/RQL/Types/Relationships/Remote.hs index 4e610ea0ec2..7c06dee25b0 100644 --- a/server/src-lib/Hasura/RQL/Types/Relationships/Remote.hs +++ b/server/src-lib/Hasura/RQL/Types/Relationships/Remote.hs @@ -54,7 +54,8 @@ instance HasCodec RemoteRelationship where instance FromJSON RemoteRelationship where parseJSON = withObject "RemoteRelationship" $ \obj -> RemoteRelationship - <$> obj .: "name" + <$> obj + .: "name" <*> (parseRemoteRelationshipDefinition RRPLenient =<< obj .: "definition") -- | Represents the format of the metadata a remote relationship was read from @@ -82,11 +83,12 @@ data RemoteRelationshipDefinition -- See documentation for 'parseRemoteRelationshipDefinition' for why -- this is necessary. instance - TypeError - ( 'ShowType RemoteRelationshipDefinition - ':<>: 'Text " has different JSON representations depending on context;" - ':$$: 'Text "call ‘parseRemoteRelationshipDefinition’ directly instead of relying on ‘FromJSON’" - ) => + ( TypeError + ( 'ShowType RemoteRelationshipDefinition + ':<>: 'Text " has different JSON representations depending on context;" + ':$$: 'Text "call ‘parseRemoteRelationshipDefinition’ directly instead of relying on ‘FromJSON’" + ) + ) => FromJSON RemoteRelationshipDefinition where parseJSON = error "impossible" @@ -131,11 +133,14 @@ remoteRelationshipDefinitionCodec mode = toSchemaOldDBFormat :: JSONCodec ToSchemaRelationshipDef toSchemaOldDBFormat = - AC.object "ToSchemaRelationshipDefLegacyFormat" $ - ToSchemaRelationshipDef - <$> requiredField' "remote_schema" AC..= _trrdRemoteSchema - <*> requiredFieldWith' "hasura_fields" hashSetCodec AC..= _trrdLhsFields - <*> requiredField' "remote_field" AC..= _trrdRemoteField + AC.object "ToSchemaRelationshipDefLegacyFormat" + $ ToSchemaRelationshipDef + <$> requiredField' "remote_schema" + AC..= _trrdRemoteSchema + <*> requiredFieldWith' "hasura_fields" hashSetCodec + AC..= _trrdLhsFields + <*> requiredField' "remote_field" + AC..= _trrdRemoteField -- | Parse 'RemoteRelationshipDefinition' letting the caller decide how lenient to be. -- @@ -223,8 +228,8 @@ parseRemoteRelationshipDefinition mode = withObject ("RemoteRelationshipDefiniti RRPStrict -> ("(strict format)", "to_source, to_remote_schema") invalid = - fail $ - mconcat + fail + $ mconcat [ "remote relationship definition ", suffix, " expects exactly one of: ", @@ -273,11 +278,11 @@ data DBJoinField (b :: BackendType) | JoinComputedField (ScalarComputedField b) deriving (Generic) -deriving instance Backend b => Eq (DBJoinField b) +deriving instance (Backend b) => Eq (DBJoinField b) -deriving instance Backend b => Show (DBJoinField b) +deriving instance (Backend b) => Show (DBJoinField b) -instance Backend b => Hashable (DBJoinField b) +instance (Backend b) => Hashable (DBJoinField b) instance (Backend b) => ToJSON (DBJoinField b) where toJSON = \case @@ -295,13 +300,13 @@ data ScalarComputedField (b :: BackendType) = ScalarComputedField } deriving (Generic) -deriving instance Backend b => Eq (ScalarComputedField b) +deriving instance (Backend b) => Eq (ScalarComputedField b) -deriving instance Backend b => Show (ScalarComputedField b) +deriving instance (Backend b) => Show (ScalarComputedField b) -instance Backend b => Hashable (ScalarComputedField b) +instance (Backend b) => Hashable (ScalarComputedField b) -instance Backend b => ToJSON (ScalarComputedField b) where +instance (Backend b) => ToJSON (ScalarComputedField b) where toJSON ScalarComputedField {..} = object [ "name" .= _scfName, diff --git a/server/src-lib/Hasura/RQL/Types/Relationships/ToSource.hs b/server/src-lib/Hasura/RQL/Types/Relationships/ToSource.hs index ac68cc3da07..6e56dedda95 100644 --- a/server/src-lib/Hasura/RQL/Types/Relationships/ToSource.hs +++ b/server/src-lib/Hasura/RQL/Types/Relationships/ToSource.hs @@ -54,12 +54,16 @@ instance NFData ToSourceRelationshipDef instance HasCodec ToSourceRelationshipDef where codec = - AC.object "ToSourceRelationshipDef" $ - ToSourceRelationshipDef - <$> requiredField' "relationship_type" AC..= _tsrdRelationshipType - <*> requiredField' "field_mapping" AC..= _tsrdFieldMapping - <*> requiredField' "source" AC..= _tsrdSource - <*> requiredField' "table" AC..= _tsrdTable + AC.object "ToSourceRelationshipDef" + $ ToSourceRelationshipDef + <$> requiredField' "relationship_type" + AC..= _tsrdRelationshipType + <*> requiredField' "field_mapping" + AC..= _tsrdFieldMapping + <*> requiredField' "source" + AC..= _tsrdSource + <*> requiredField' "table" + AC..= _tsrdTable instance ToJSON ToSourceRelationshipDef where toJSON = genericToJSON hasuraJSON diff --git a/server/src-lib/Hasura/RQL/Types/ResultCustomization.hs b/server/src-lib/Hasura/RQL/Types/ResultCustomization.hs index 6039b95aaf5..36d53235fc5 100644 --- a/server/src-lib/Hasura/RQL/Types/ResultCustomization.hs +++ b/server/src-lib/Hasura/RQL/Types/ResultCustomization.hs @@ -66,8 +66,9 @@ customizeTypeNameString :: HashMap G.Name G.Name -> ResultCustomizer customizeTypeNameString typeNameMap | HashMap.null typeNameMap = mempty customizeTypeNameString typeNameMap = ResultCustomizer $ \_aliasMapping -> \case - JO.String t -> JO.String $ - fromMaybe t $ do + JO.String t -> JO.String + $ fromMaybe t + $ do -- This function is only meant to be applied on type names, and creating a -- GraphQL name out of the string should never fail. If it nonetheless -- fails, we assume there will not be customization information and we diff --git a/server/src-lib/Hasura/RQL/Types/Roles.hs b/server/src-lib/Hasura/RQL/Types/Roles.hs index d8192f5c895..a9237b1da18 100644 --- a/server/src-lib/Hasura/RQL/Types/Roles.hs +++ b/server/src-lib/Hasura/RQL/Types/Roles.hs @@ -81,10 +81,12 @@ instance Hashable Role instance HasCodec Role where codec = - AC.object "Role" $ - Role - <$> requiredField' "role_name" AC..= _rRoleName - <*> requiredField' "role_set" AC..= _rParentRoles + AC.object "Role" + $ Role + <$> requiredField' "role_name" + AC..= _rRoleName + <*> requiredField' "role_set" + AC..= _rParentRoles instance ToJSON Role where toJSON (Role roleName parentRoles) = diff --git a/server/src-lib/Hasura/RQL/Types/Roles/Internal.hs b/server/src-lib/Hasura/RQL/Types/Roles/Internal.hs index 1981b108a42..9c32579990d 100644 --- a/server/src-lib/Hasura/RQL/Types/Roles/Internal.hs +++ b/server/src-lib/Hasura/RQL/Types/Roles/Internal.hs @@ -172,11 +172,16 @@ instance where (InsPermInfo colsL checkL setL backendOnlyL reqHeadersL) ==~ (InsPermInfo colsR checkR setR backendOnlyR reqHeadersR) = - colsL == colsR - && checkL ==~ checkR - && setL == setR - && backendOnlyL == backendOnlyR - && reqHeadersL == reqHeadersR + colsL + == colsR + && checkL + ==~ checkR + && setL + == setR + && backendOnlyL + == backendOnlyR + && reqHeadersL + == reqHeadersR instance ( Backend b, @@ -186,13 +191,20 @@ instance where (UpdPermInfo colsL tableL filterL checkL setL backendOnlyL reqHeadersL) ==~ (UpdPermInfo colsR tableR filterR checkR setR backendOnlyR reqHeadersR) = - colsL == colsR - && tableL == tableR - && filterL ==~ filterR - && checkL ==~ checkR - && setL == setR - && backendOnlyL == backendOnlyR - && reqHeadersL == reqHeadersR + colsL + == colsR + && tableL + == tableR + && filterL + ==~ filterR + && checkL + ==~ checkR + && setL + == setR + && backendOnlyL + == backendOnlyR + && reqHeadersL + == reqHeadersR instance ( Backend b, @@ -202,10 +214,14 @@ instance where (DelPermInfo tableL filterL backendOnlyL reqHeadersL) ==~ (DelPermInfo tableR filterR backendOnlyR reqHeadersR) = - tableL == tableR - && filterL ==~ filterR - && backendOnlyL == backendOnlyR - && reqHeadersL == reqHeadersR + tableL + == tableR + && filterL + ==~ filterR + && backendOnlyL + == backendOnlyR + && reqHeadersL + == reqHeadersR instance OnlyRelevantEq RemoteSchemaInputValueDefinition where RemoteSchemaInputValueDefinition defnL presetL @@ -220,10 +236,14 @@ instance OnlyRelevantEq RemoteSchemaIntrospection where instance OnlyRelevantEq IntrospectionResult where IntrospectionResult (RemoteSchemaIntrospection typeDefnsL) queryRootL mutationRootL subsRootL ==~ IntrospectionResult (RemoteSchemaIntrospection typeDefnsR) queryRootR mutationRootR subsRootR = - sort (HashMap.elems typeDefnsL) ==~ sort (HashMap.elems typeDefnsR) - && queryRootL == queryRootR - && mutationRootL == mutationRootR - && subsRootL == subsRootR + sort (HashMap.elems typeDefnsL) + ==~ sort (HashMap.elems typeDefnsR) + && queryRootL + == queryRootR + && mutationRootL + == mutationRootR + && subsRootL + == subsRootR instance (OnlyRelevantEq a) => OnlyRelevantEq (Maybe a) where (==~) l r = @@ -248,40 +268,58 @@ instance OnlyRelevantEq G.ScalarTypeDefinition where instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.FieldDefinition a) where G.FieldDefinition _descL nameL argumentsL typeL directivesL ==~ G.FieldDefinition _descR nameR argumentsR typeR directivesR = - nameL == nameR - && sort argumentsL ==~ sort argumentsR - && typeL == typeR - && Set.fromList directivesL == Set.fromList directivesR + nameL + == nameR + && sort argumentsL + ==~ sort argumentsR + && typeL + == typeR + && Set.fromList directivesL + == Set.fromList directivesR instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.ObjectTypeDefinition a) where G.ObjectTypeDefinition _descL nameL implementsInterfacesL directivesL fieldDefnsL ==~ G.ObjectTypeDefinition _descR nameR implementsInterfacesR directivesR fieldDefnsR = - nameL == nameR - && Set.fromList implementsInterfacesL == Set.fromList implementsInterfacesR - && Set.fromList directivesL == Set.fromList directivesR - && sort fieldDefnsL ==~ sort fieldDefnsR + nameL + == nameR + && Set.fromList implementsInterfacesL + == Set.fromList implementsInterfacesR + && Set.fromList directivesL + == Set.fromList directivesR + && sort fieldDefnsL + ==~ sort fieldDefnsR instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.InterfaceTypeDefinition [G.Name] a) where G.InterfaceTypeDefinition _descL nameL directivesL fieldDefnsL possibleTypesL ==~ G.InterfaceTypeDefinition _descR nameR directivesR fieldDefnsR possibleTypesR = - nameL == nameR - && Set.fromList directivesL == Set.fromList directivesR - && sort fieldDefnsL ==~ sort fieldDefnsR - && Set.fromList possibleTypesL == Set.fromList possibleTypesR + nameL + == nameR + && Set.fromList directivesL + == Set.fromList directivesR + && sort fieldDefnsL + ==~ sort fieldDefnsR + && Set.fromList possibleTypesL + == Set.fromList possibleTypesR instance OnlyRelevantEq G.UnionTypeDefinition where G.UnionTypeDefinition _descL nameL directivesL membersL ==~ G.UnionTypeDefinition _descR nameR directivesR membersR = - nameL == nameR - && Set.fromList directivesL == Set.fromList directivesR - && Set.fromList membersL == Set.fromList membersR + nameL + == nameR + && Set.fromList directivesL + == Set.fromList directivesR + && Set.fromList membersL + == Set.fromList membersR instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.InputObjectTypeDefinition a) where G.InputObjectTypeDefinition _descL nameL directivesL defnsL ==~ G.InputObjectTypeDefinition _descR nameR directivesR defnsR = - nameL == nameR - && Set.fromList directivesL == Set.fromList directivesR - && sort defnsL ==~ sort defnsR + nameL + == nameR + && Set.fromList directivesL + == Set.fromList directivesR + && sort defnsL + ==~ sort defnsR instance OnlyRelevantEq G.EnumValueDefinition where G.EnumValueDefinition _descL nameL directivesL @@ -291,9 +329,12 @@ instance OnlyRelevantEq G.EnumValueDefinition where instance OnlyRelevantEq G.EnumTypeDefinition where G.EnumTypeDefinition _descL nameL directivesL valueDefnsL ==~ G.EnumTypeDefinition _descR nameR directivesR valueDefnsR = - nameL == nameR - && Set.fromList directivesL == Set.fromList directivesR - && sort valueDefnsL ==~ sort valueDefnsR + nameL + == nameR + && Set.fromList directivesL + == Set.fromList directivesR + && sort valueDefnsL + ==~ sort valueDefnsR instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.TypeDefinition [G.Name] a) where G.TypeDefinitionScalar scalarDefnL ==~ G.TypeDefinitionScalar scalarDefnR = scalarDefnL ==~ scalarDefnR @@ -307,10 +348,14 @@ instance (OnlyRelevantEq a, Ord a) => OnlyRelevantEq (G.TypeDefinition [G.Name] instance OnlyRelevantEq G.InputValueDefinition where G.InputValueDefinition _descL nameL typeL defaultValueL directivesL ==~ G.InputValueDefinition _descR nameR typeR defaultValueR directivesR = - nameL == nameR - && typeL == typeR - && defaultValueL == defaultValueR - && Set.fromList directivesL == Set.fromList directivesR + nameL + == nameR + && typeL + == typeR + && defaultValueL + == defaultValueR + && Set.fromList directivesL + == Set.fromList directivesR maybeToCheckPermission :: Maybe a -> CheckPermission a maybeToCheckPermission = maybe CPUndefined CPDefined diff --git a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs index 5ea76ec7ff5..b160e9fbad7 100644 --- a/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/RQL/Types/ScheduledTrigger.hs @@ -100,12 +100,16 @@ instance NFData STRetryConf instance HasCodec STRetryConf where codec = - AC.object "STRetryConf" $ - STRetryConf - <$> optionalFieldWithDefaultWith' "num_retries" nonNegativeCodec (strcNumRetries defaultSTRetryConf) AC..= strcNumRetries - <*> optionalFieldWithDefaultWith' "retry_interval_seconds" refinedCodec (strcRetryIntervalSeconds defaultSTRetryConf) AC..= strcRetryIntervalSeconds - <*> optionalFieldWithDefaultWith' "timeout_seconds" refinedCodec (strcTimeoutSeconds defaultSTRetryConf) AC..= strcTimeoutSeconds - <*> optionalFieldWithDefaultWith' "tolerance_seconds" refinedCodec (strcToleranceSeconds defaultSTRetryConf) AC..= strcToleranceSeconds + AC.object "STRetryConf" + $ STRetryConf + <$> optionalFieldWithDefaultWith' "num_retries" nonNegativeCodec (strcNumRetries defaultSTRetryConf) + AC..= strcNumRetries + <*> optionalFieldWithDefaultWith' "retry_interval_seconds" refinedCodec (strcRetryIntervalSeconds defaultSTRetryConf) + AC..= strcRetryIntervalSeconds + <*> optionalFieldWithDefaultWith' "timeout_seconds" refinedCodec (strcTimeoutSeconds defaultSTRetryConf) + AC..= strcTimeoutSeconds + <*> optionalFieldWithDefaultWith' "tolerance_seconds" refinedCodec (strcToleranceSeconds defaultSTRetryConf) + AC..= strcToleranceSeconds where nonNegativeCodec = bimapCodec validateNonNegative id codec validateNonNegative n = @@ -148,18 +152,28 @@ instance NFData CronTriggerMetadata instance HasCodec CronTriggerMetadata where codec = - AC.object "CronTriggerMetadata" $ - CronTriggerMetadata - <$> requiredField' "name" AC..= ctName - <*> requiredField' "webhook" AC..= ctWebhook - <*> requiredField' "schedule" AC..= ctSchedule - <*> optionalField' "payload" AC..= ctPayload - <*> optionalFieldWithOmittedDefault' "retry_conf" defaultSTRetryConf AC..= ctRetryConf - <*> optionalFieldWithOmittedDefault' "headers" [] AC..= ctHeaders - <*> requiredField' "include_in_metadata" AC..= ctIncludeInMetadata - <*> optionalField' "comment" AC..= ctComment - <*> optionalField' "request_transform" AC..= ctRequestTransform - <*> optionalField' "response_transform" AC..= ctResponseTransform + AC.object "CronTriggerMetadata" + $ CronTriggerMetadata + <$> requiredField' "name" + AC..= ctName + <*> requiredField' "webhook" + AC..= ctWebhook + <*> requiredField' "schedule" + AC..= ctSchedule + <*> optionalField' "payload" + AC..= ctPayload + <*> optionalFieldWithOmittedDefault' "retry_conf" defaultSTRetryConf + AC..= ctRetryConf + <*> optionalFieldWithOmittedDefault' "headers" [] + AC..= ctHeaders + <*> requiredField' "include_in_metadata" + AC..= ctIncludeInMetadata + <*> optionalField' "comment" + AC..= ctComment + <*> optionalField' "request_transform" + AC..= ctRequestTransform + <*> optionalField' "response_transform" + AC..= ctResponseTransform instance FromJSON CronTriggerMetadata where parseJSON = @@ -251,14 +265,24 @@ instance FromJSON CreateScheduledEvent where parseJSON = withObject "CreateScheduledEvent" $ \o -> CreateScheduledEvent - <$> o .: "webhook" - <*> o .: "schedule_at" - <*> o .:? "payload" - <*> o .:? "headers" .!= [] - <*> o .:? "retry_conf" .!= defaultSTRetryConf - <*> o .:? "comment" - <*> o .:? "request_transform" - <*> o .:? "response_transform" + <$> o + .: "webhook" + <*> o + .: "schedule_at" + <*> o + .:? "payload" + <*> o + .:? "headers" + .!= [] + <*> o + .:? "retry_conf" + .!= defaultSTRetryConf + <*> o + .:? "comment" + <*> o + .:? "request_transform" + <*> o + .:? "response_transform" instance ToJSON CreateScheduledEvent where toJSON = genericToJSON hasuraJSON @@ -366,10 +390,11 @@ instance ToJSON ScheduledEventStatus where instance FromJSON ScheduledEventStatus where parseJSON = withText "String" $ \s -> - onNothing (textToScheduledEventStatus s) $ - fail $ - T.unpack $ - "unexpected status: " <> s + onNothing (textToScheduledEventStatus s) + $ fail + $ T.unpack + $ "unexpected status: " + <> s data OneOffScheduledEvent = OneOffScheduledEvent { _ooseId :: OneOffScheduledEventId, @@ -426,8 +451,10 @@ data ScheduledEventPagination = ScheduledEventPagination parseScheduledEventPagination :: Object -> Parser ScheduledEventPagination parseScheduledEventPagination o = ScheduledEventPagination - <$> o .:? "limit" - <*> o .:? "offset" + <$> o + .:? "limit" + <*> o + .:? "offset" scheduledEventPaginationToPairs :: ScheduledEventPagination -> [Pair] scheduledEventPaginationToPairs ScheduledEventPagination {..} = @@ -453,20 +480,24 @@ data GetScheduledEvents = GetScheduledEvents instance ToJSON GetScheduledEvents where toJSON GetScheduledEvents {..} = - object $ - scheduledEventToPairs _gseScheduledEvent - <> scheduledEventPaginationToPairs _gsePagination - <> [ "status" .= _gseStatus, - "get_rows_count" .= _gseGetRowsCount - ] + object + $ scheduledEventToPairs _gseScheduledEvent + <> scheduledEventPaginationToPairs _gsePagination + <> [ "status" .= _gseStatus, + "get_rows_count" .= _gseGetRowsCount + ] instance FromJSON GetScheduledEvents where parseJSON = withObject "GetScheduledEvents" $ \o -> GetScheduledEvents <$> parseScheduledEvent o <*> parseScheduledEventPagination o - <*> o .:? "status" .!= [] - <*> o .:? "get_rows_count" .!= DontIncludeRowsCount + <*> o + .:? "status" + .!= [] + <*> o + .:? "get_rows_count" + .!= DontIncludeRowsCount data WithOptionalTotalCount a = WithOptionalTotalCount { _wtcCount :: Maybe Int, @@ -507,15 +538,17 @@ instance FromJSON GetScheduledEventInvocations where GetScheduledEventInvocations <$> (parseEventId o <|> (GIBEvent <$> parseScheduledEvent o)) <*> parseScheduledEventPagination o - <*> o .:? "get_rows_count" .!= DontIncludeRowsCount + <*> o + .:? "get_rows_count" + .!= DontIncludeRowsCount where parseEventId o = GIBEventId <$> o .: "event_id" <*> o .: "type" instance ToJSON GetScheduledEventInvocations where toJSON GetScheduledEventInvocations {..} = - object $ - case _geiInvocationsBy of + object + $ case _geiInvocationsBy of GIBEventId eventId eventType -> ["event_id" .= eventId, "type" .= eventType] GIBEvent event -> scheduledEventToPairs event diff --git a/server/src-lib/Hasura/RQL/Types/Schema/Options.hs b/server/src-lib/Hasura/RQL/Types/Schema/Options.hs index e6b85779ccb..b247a2548ad 100644 --- a/server/src-lib/Hasura/RQL/Types/Schema/Options.hs +++ b/server/src-lib/Hasura/RQL/Types/Schema/Options.hs @@ -72,8 +72,9 @@ data DangerouslyCollapseBooleans instance FromJSON DangerouslyCollapseBooleans where parseJSON = - withBool "DangerouslyCollapseBooleans" $ - pure . \case + withBool "DangerouslyCollapseBooleans" + $ pure + . \case True -> DangerouslyCollapseBooleans False -> Don'tDangerouslyCollapseBooleans @@ -92,8 +93,9 @@ data InferFunctionPermissions instance FromJSON InferFunctionPermissions where parseJSON = - withBool "InferFunctionPermissions" $ - pure . \case + withBool "InferFunctionPermissions" + $ pure + . \case True -> InferFunctionPermissions False -> Don'tInferFunctionPermissions @@ -111,8 +113,9 @@ data RemoteSchemaPermissions instance FromJSON RemoteSchemaPermissions where parseJSON = - withBool "RemoteSchemaPermissions" $ - pure . \case + withBool "RemoteSchemaPermissions" + $ pure + . \case True -> EnableRemoteSchemaPermissions False -> DisableRemoteSchemaPermissions diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs index 43b47d219e3..7ee7f2e36cc 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs @@ -210,9 +210,9 @@ mkLogicalModelParentDep :: mkLogicalModelParentDep source logicalModelName = do let sourceObject :: SchemaObjId sourceObject = - SOSourceObj source $ - AB.mkAnyBackend @b $ - SOILogicalModel logicalModelName + SOSourceObj source + $ AB.mkAnyBackend @b + $ SOILogicalModel logicalModelName SchemaDependency sourceObject DRTable @@ -243,10 +243,10 @@ mkLogicalModelColDep :: mkLogicalModelColDep reason source logicalModelName column = do let sourceObject :: SchemaObjId sourceObject = - SOSourceObj source $ - AB.mkAnyBackend $ - SOILogicalModelObj @b logicalModelName $ - LMOCol @b column + SOSourceObj source + $ AB.mkAnyBackend + $ SOILogicalModelObj @b logicalModelName + $ LMOCol @b column SchemaDependency sourceObject reason @@ -339,17 +339,20 @@ askSourceInfo sourceName = do metadata <- getMetadata maybe -- 2. The named source exists but does not match the expected type - ( throw400 NotExists $ - "source with name " - <> sourceName <<> " has backend type " - <> T.toTxt (AB.lowerTag matchingNameSourceInfo) - <<> " which does not match the expected type " - <> T.toTxt (reify $ backendTag @b) + ( throw400 NotExists + $ "source with name " + <> sourceName + <<> " has backend type " + <> T.toTxt (AB.lowerTag matchingNameSourceInfo) + <<> " which does not match the expected type " + <> T.toTxt (reify $ backendTag @b) ) -- 3. The named source exists, and is of the expected type, but is inconsistent - ( const $ - throw400 Unexpected $ - "source with name " <> sourceName <<> " is inconsistent" + ( const + $ throw400 Unexpected + $ "source with name " + <> sourceName + <<> " is inconsistent" ) (metadata ^. metaSources . at sourceName) @@ -436,9 +439,12 @@ askTableInfo :: m (TableInfo b) askTableInfo sourceName tableName = do rawSchemaCache <- askSchemaCache - onNothing (unsafeTableInfo sourceName tableName $ scSources rawSchemaCache) $ - throw400 NotExists $ - "table " <> tableName <<> " does not exist in source: " <> sourceNameToText sourceName + onNothing (unsafeTableInfo sourceName tableName $ scSources rawSchemaCache) + $ throw400 NotExists + $ "table " + <> tableName + <<> " does not exist in source: " + <> sourceNameToText sourceName -- | Similar to 'askTableInfo', but drills further down to extract the -- underlying core info. @@ -477,9 +483,12 @@ askTableMetadata :: TableName b -> m (TableMetadata b) askTableMetadata sourceName tableName = do - onNothingM (getMetadata <&> preview focusTableMetadata) $ - throw400 NotExists $ - "table " <> tableName <<> " does not exist in source: " <> sourceNameToText sourceName + onNothingM (getMetadata <&> preview focusTableMetadata) + $ throw400 NotExists + $ "table " + <> tableName + <<> " does not exist in source: " + <> sourceNameToText sourceName where focusTableMetadata :: Traversal' Metadata (TableMetadata b) focusTableMetadata = @@ -527,9 +536,12 @@ askFunctionInfo :: m (FunctionInfo b) askFunctionInfo sourceName functionName = do rawSchemaCache <- askSchemaCache - onNothing (unsafeFunctionInfo sourceName functionName $ scSources rawSchemaCache) $ - throw400 NotExists $ - "function " <> functionName <<> " does not exist in source: " <> sourceNameToText sourceName + onNothing (unsafeFunctionInfo sourceName functionName $ scSources rawSchemaCache) + $ throw400 NotExists + $ "function " + <> functionName + <<> " does not exist in source: " + <> sourceNameToText sourceName ------------------------------------------------------------------------------- @@ -697,10 +709,10 @@ getDependentObjsWith :: getDependentObjsWith f sc objId = map fst $ filter (isDependency . snd) $ HashMap.toList $ scDepMap sc where - isDependency deps = not $ - HS.null $ - flip HS.filter deps $ - \(SchemaDependency depId reason) -> objId `induces` depId && f reason + isDependency deps = not + $ HS.null + $ flip HS.filter deps + $ \(SchemaDependency depId reason) -> objId `induces` depId && f reason -- induces a b : is b dependent on a induces (SOSource s1) (SOSource s2) = s1 == s2 induces (SOSource s1) (SOSourceObj s2 _) = s1 == s2 @@ -823,9 +835,9 @@ getBoolExpDeps' = \case BoolExpCtx {source} <- ask let tableDep = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITable @b refqt + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITable @b refqt ) DRRemoteTable (tableDep :) <$> local (\e -> e {currTable = refqt}) (getBoolExpDeps' whereExp) @@ -853,9 +865,9 @@ getColExpDeps bexp = do let relationshipName = riName relInfo schemaDependency = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b currTable (TORel relationshipName) + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b currTable (TORel relationshipName) ) DROnType in do @@ -876,9 +888,9 @@ getColExpDeps bexp = do in case _acfbBoolExp computedFieldBoolExp of CFBEScalar opExps -> let computedFieldDep = - mkComputedFieldDep' $ - bool DRSessionVariable DROnType $ - any hasStaticExp opExps + mkComputedFieldDep' + $ bool DRSessionVariable DROnType + $ any hasStaticExp opExps in (computedFieldDep :) <$> getOpExpDeps opExps CFBETable cfTable cfTableBoolExp -> (mkComputedFieldDep' DROnType :) <$> local (\e -> e {currTable = cfTable}) (getBoolExpDeps' cfTableBoolExp) @@ -919,7 +931,9 @@ askFieldInfoMapSource :: TableName b -> m (FieldInfoMap (FieldInfo b)) askFieldInfoMapSource tableName = do - fmap _tciFieldInfoMap $ - onNothingM (lookupTableCoreInfo tableName) $ - throw400 NotExists $ - "table " <> tableName <<> " does not exist" + fmap _tciFieldInfoMap + $ onNothingM (lookupTableCoreInfo tableName) + $ throw400 NotExists + $ "table " + <> tableName + <<> " does not exist" diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache/AggregationPredicates.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache/AggregationPredicates.hs index e751fd24c68..5027ef1191f 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache/AggregationPredicates.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache/AggregationPredicates.hs @@ -32,9 +32,9 @@ defaultGetAggregationPredicateDeps (AggregationPredicatesImplementation relInfo RelTargetTable tn -> tn schemaDependency = SchemaDependency - ( SOSourceObj source $ - AB.mkAnyBackend $ - SOITableObj @b currTable (TORel relationshipName) + ( SOSourceObj source + $ AB.mkAnyBackend + $ SOITableObj @b currTable (TORel relationshipName) ) DROnType in (schemaDependency :) <$> local (\e -> e {currTable = relationshipTable}) (getFunctionDeps functions) diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs index f6e510fac82..1ea142162b7 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCache/Build.hs @@ -96,8 +96,8 @@ recordInconsistenciesM' metadataObjects reason = data MetadataDependency = MetadataDependency + -- | for error reporting on missing dependencies MetadataObject - -- ^ for error reporting on missing dependencies SchemaObjId SchemaDependency deriving (Eq) @@ -194,7 +194,7 @@ class (CacheRM m) => CacheRWM m where tryBuildSchemaCacheWithOptions :: BuildReason -> CacheInvalidations -> Metadata -> (ValidateNewSchemaCache a) -> m a setMetadataResourceVersionInSchemaCache :: MetadataResourceVersion -> m () -buildSchemaCacheWithOptions :: CacheRWM m => BuildReason -> CacheInvalidations -> Metadata -> m () +buildSchemaCacheWithOptions :: (CacheRWM m) => BuildReason -> CacheInvalidations -> Metadata -> m () buildSchemaCacheWithOptions buildReason cacheInvalidation metadata = tryBuildSchemaCacheWithOptions buildReason cacheInvalidation metadata (\_ _ -> (KeepNewSchemaCache, ())) data BuildReason @@ -323,8 +323,8 @@ tryBuildSchemaCache :: tryBuildSchemaCache MetadataModifier {..} = do modifiedMetadata <- runMetadataModifier <$> getMetadata newInconsistentObjects <- tryBuildSchemaCacheWithOptions (CatalogUpdate mempty) mempty modifiedMetadata validateNewSchemaCache - when (newInconsistentObjects == mempty) $ - putMetadata modifiedMetadata + when (newInconsistentObjects == mempty) + $ putMetadata modifiedMetadata pure $ newInconsistentObjects where validateNewSchemaCache :: SchemaCache -> SchemaCache -> (ValidateNewSchemaCacheResult, HashMap MetadataObjId (NonEmpty InconsistentMetadata)) @@ -372,8 +372,8 @@ tryBuildSchemaCacheAndWarnOnFailingObjects mkMetadataModifier warningCode metada else -- Otherwise just look at the rest of the errors, if any pure metadataInconsistencies - unless (null finalMetadataInconsistencies) $ - throwError + unless (null finalMetadataInconsistencies) + $ throwError (err400 Unexpected "cannot continue due to newly found inconsistent metadata") { qeInternal = Just $ ExtraInternal $ toJSON (L.nub . concatMap toList $ HashMap.elems finalMetadataInconsistencies) } @@ -394,8 +394,8 @@ buildSchemaCacheFor objectId metadataModifier = do let reasons = commaSeparated $ imReason <$> matchingObjects throwError (err400 InvalidConfiguration reasons) {qeInternal = Just $ ExtraInternal $ toJSON matchingObjects} - unless (null newInconsistentObjects) $ - throwError + unless (null newInconsistentObjects) + $ throwError (err400 Unexpected "cannot continue due to new inconsistent metadata") { qeInternal = Just $ ExtraInternal $ toJSON (L.nub . concatMap toList $ HashMap.elems newInconsistentObjects) } @@ -420,8 +420,8 @@ withNewInconsistentObjsCheck action = do let diffInconsistentObjects = HashMap.difference `on` groupInconsistentMetadataById newInconsistentObjects = L.uniques $ concatMap toList $ HashMap.elems (currentObjects `diffInconsistentObjects` originalObjects) - unless (null newInconsistentObjects) $ - throwError + unless (null newInconsistentObjects) + $ throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata") { qeInternal = Just $ ExtraInternal $ toJSON newInconsistentObjects } diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCacheTypes.hs b/server/src-lib/Hasura/RQL/Types/SchemaCacheTypes.hs index 9f9c72806a2..e26f3f62fd7 100644 --- a/server/src-lib/Hasura/RQL/Types/SchemaCacheTypes.hs +++ b/server/src-lib/Hasura/RQL/Types/SchemaCacheTypes.hs @@ -58,7 +58,7 @@ data TableObjId (b :: BackendType) | TOTrigger TriggerName deriving (Generic) -deriving instance Backend b => Eq (TableObjId b) +deriving instance (Backend b) => Eq (TableObjId b) instance (Backend b) => Hashable (TableObjId b) @@ -82,7 +82,7 @@ newtype NativeQueryObjId (b :: BackendType) = NQOCol (Column b) deriving (Generic) -deriving instance Backend b => Eq (NativeQueryObjId b) +deriving instance (Backend b) => Eq (NativeQueryObjId b) instance (Backend b) => Hashable (NativeQueryObjId b) @@ -94,7 +94,7 @@ newtype StoredProcedureObjId (b :: BackendType) = SPOCol (Column b) deriving (Generic) -deriving instance Backend b => Eq (StoredProcedureObjId b) +deriving instance (Backend b) => Eq (StoredProcedureObjId b) instance (Backend b) => Hashable (StoredProcedureObjId b) @@ -130,8 +130,8 @@ instance Hashable SchemaObjId reportSchemaObj :: SchemaObjId -> T.Text reportSchemaObj = \case SOSource source -> "source " <> sourceNameToText source - SOSourceObj source exists -> inSource source $ - AB.dispatchAnyBackend @Backend + SOSourceObj source exists -> inSource source + $ AB.dispatchAnyBackend @Backend exists \case SOITable tn -> "table " <> toTxt tn @@ -166,7 +166,8 @@ reportSchemaObj = \case SORemoteSchemaPermission remoteSchemaName roleName -> "remote schema permission " <> unNonEmptyText (unRemoteSchemaName remoteSchemaName) - <> "." <>> roleName + <> "." + <>> roleName SORemoteSchemaRemoteRelationship remoteSchemaName typeName relationshipName -> "remote_relationship " <> toTxt relationshipName @@ -247,12 +248,12 @@ instance Hashable SchemaDependency reportDependentObjectsExist :: (MonadError QErr m) => [SchemaObjId] -> m () reportDependentObjectsExist dependentObjects = - throw400 DependencyError $ - "cannot drop due to the following dependent objects: " - <> reportSchemaObjs dependentObjects + throw400 DependencyError + $ "cannot drop due to the following dependent objects: " + <> reportSchemaObjs dependentObjects purgeSourceAndSchemaDependencies :: - MonadError QErr m => + (MonadError QErr m) => SchemaObjId -> WriterT MetadataModifier m () purgeSourceAndSchemaDependencies = \case @@ -271,23 +272,24 @@ purgeDependentObject :: m MetadataModifier purgeDependentObject source sourceObjId = case sourceObjId of SOITableObj tn tableObj -> - pure $ - MetadataModifier $ - tableMetadataSetter @b source tn %~ case tableObj of - TOPerm rn pt -> dropPermissionInMetadata rn pt - TORel rn -> dropRelationshipInMetadata rn - TOTrigger trn -> dropEventTriggerInMetadata trn - TOComputedField ccn -> dropComputedFieldInMetadata ccn - TORemoteRel rrn -> dropRemoteRelationshipInMetadata rrn - _ -> id + pure + $ MetadataModifier + $ tableMetadataSetter @b source tn + %~ case tableObj of + TOPerm rn pt -> dropPermissionInMetadata rn pt + TORel rn -> dropRelationshipInMetadata rn + TOTrigger trn -> dropEventTriggerInMetadata trn + TOComputedField ccn -> dropComputedFieldInMetadata ccn + TORemoteRel rrn -> dropRemoteRelationshipInMetadata rrn + _ -> id SOIFunction qf -> pure $ dropFunctionInMetadata @b source qf _ -> - throw500 $ - "unexpected dependent object: " - <> reportSchemaObj (SOSourceObj source $ AB.mkAnyBackend sourceObjId) + throw500 + $ "unexpected dependent object: " + <> reportSchemaObj (SOSourceObj source $ AB.mkAnyBackend sourceObjId) -- | Type class to collect schema dependencies from backend-specific aggregation predicates. -class Backend b => GetAggregationPredicatesDeps b where +class (Backend b) => GetAggregationPredicatesDeps b where getAggregationPredicateDeps :: AggregationPredicates b (PartialSQLExp b) -> BoolExpM b [SchemaDependency] diff --git a/server/src-lib/Hasura/RQL/Types/Source/Column.hs b/server/src-lib/Hasura/RQL/Types/Source/Column.hs index a653df121a1..19dfd83ba9f 100644 --- a/server/src-lib/Hasura/RQL/Types/Source/Column.hs +++ b/server/src-lib/Hasura/RQL/Types/Source/Column.hs @@ -45,13 +45,13 @@ data SourceColumnInfo b = SourceColumnInfo deriving anyclass (Hashable) deriving (FromJSON, ToJSON, ToSchema) via Autodocodec (SourceColumnInfo b) -deriving instance Backend b => Eq (SourceColumnInfo b) +deriving instance (Backend b) => Eq (SourceColumnInfo b) -deriving instance Backend b => Ord (SourceColumnInfo b) +deriving instance (Backend b) => Ord (SourceColumnInfo b) -deriving instance Backend b => Show (SourceColumnInfo b) +deriving instance (Backend b) => Show (SourceColumnInfo b) -instance Backend b => HasCodec (SourceColumnInfo b) where +instance (Backend b) => HasCodec (SourceColumnInfo b) where codec = object "ColumnInfo" $ SourceColumnInfo diff --git a/server/src-lib/Hasura/RQL/Types/Source/Table.hs b/server/src-lib/Hasura/RQL/Types/Source/Table.hs index e32527d6545..38ca24d5096 100644 --- a/server/src-lib/Hasura/RQL/Types/Source/Table.hs +++ b/server/src-lib/Hasura/RQL/Types/Source/Table.hs @@ -58,13 +58,13 @@ data SourceTableInfo b = SourceTableInfo deriving anyclass (Hashable) deriving (FromJSON, ToJSON, ToSchema) via Autodocodec (SourceTableInfo b) -deriving stock instance Backend b => Eq (SourceTableInfo b) +deriving stock instance (Backend b) => Eq (SourceTableInfo b) -deriving stock instance Backend b => Ord (SourceTableInfo b) +deriving stock instance (Backend b) => Ord (SourceTableInfo b) -deriving stock instance Backend b => Show (SourceTableInfo b) +deriving stock instance (Backend b) => Show (SourceTableInfo b) -instance Backend b => HasCodec (SourceTableInfo b) where +instance (Backend b) => HasCodec (SourceTableInfo b) where codec = object "TableInfo" $ SourceTableInfo @@ -98,13 +98,13 @@ newtype SourceForeignKeys b = SourceForeignKeys {_unSourceForeignKeys :: HashMap deriving anyclass (Hashable) deriving (FromJSON, ToJSON) via Autodocodec (SourceForeignKeys b) -deriving stock instance Backend b => Eq (SourceForeignKeys b) +deriving stock instance (Backend b) => Eq (SourceForeignKeys b) -deriving stock instance Backend b => Ord (SourceForeignKeys b) +deriving stock instance (Backend b) => Ord (SourceForeignKeys b) -deriving stock instance Backend b => Show (SourceForeignKeys b) +deriving stock instance (Backend b) => Show (SourceForeignKeys b) -instance Backend b => HasCodec (SourceForeignKeys b) where +instance (Backend b) => HasCodec (SourceForeignKeys b) where codec = dimapCodec SourceForeignKeys _unSourceForeignKeys $ codec @(HashMap (ConstraintName b) (SourceConstraint b)) -------------------------------------------------------------------------------- @@ -117,13 +117,13 @@ data SourceConstraint b = SourceConstraint deriving anyclass (Hashable) deriving (FromJSON, ToJSON) via Autodocodec (SourceConstraint b) -deriving stock instance Backend b => Eq (SourceConstraint b) +deriving stock instance (Backend b) => Eq (SourceConstraint b) -deriving stock instance Backend b => Ord (SourceConstraint b) +deriving stock instance (Backend b) => Ord (SourceConstraint b) -deriving stock instance Backend b => Show (SourceConstraint b) +deriving stock instance (Backend b) => Show (SourceConstraint b) -instance Backend b => HasCodec (SourceConstraint b) where +instance (Backend b) => HasCodec (SourceConstraint b) where codec = object "SourceConstraint" $ SourceConstraint diff --git a/server/src-lib/Hasura/RQL/Types/SourceCustomization.hs b/server/src-lib/Hasura/RQL/Types/SourceCustomization.hs index 0abcae6fe42..db4e1bdeee2 100644 --- a/server/src-lib/Hasura/RQL/Types/SourceCustomization.hs +++ b/server/src-lib/Hasura/RQL/Types/SourceCustomization.hs @@ -103,11 +103,14 @@ data RootFieldsCustomization = RootFieldsCustomization instance HasCodec RootFieldsCustomization where codec = - AC.object "RootFieldsCustomization" $ - RootFieldsCustomization - <$> optionalFieldWith' "namespace" graphQLFieldNameCodec AC..= _rootfcNamespace - <*> optionalFieldWith' "prefix" graphQLFieldNameCodec AC..= _rootfcPrefix - <*> optionalFieldWith' "suffix" graphQLFieldNameCodec AC..= _rootfcSuffix + AC.object "RootFieldsCustomization" + $ RootFieldsCustomization + <$> optionalFieldWith' "namespace" graphQLFieldNameCodec + AC..= _rootfcNamespace + <*> optionalFieldWith' "prefix" graphQLFieldNameCodec + AC..= _rootfcPrefix + <*> optionalFieldWith' "suffix" graphQLFieldNameCodec + AC..= _rootfcSuffix instance ToJSON RootFieldsCustomization where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} @@ -126,10 +129,12 @@ data SourceTypeCustomization = SourceTypeCustomization instance HasCodec SourceTypeCustomization where codec = - AC.object "SourceTypeCustomization" $ - SourceTypeCustomization - <$> optionalFieldWith' "prefix" graphQLFieldNameCodec AC..= _stcPrefix - <*> optionalFieldWith' "suffix" graphQLFieldNameCodec AC..= _stcSuffix + AC.object "SourceTypeCustomization" + $ SourceTypeCustomization + <$> optionalFieldWith' "prefix" graphQLFieldNameCodec + AC..= _stcPrefix + <*> optionalFieldWith' "suffix" graphQLFieldNameCodec + AC..= _stcSuffix instance ToJSON SourceTypeCustomization where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} @@ -242,11 +247,14 @@ data SourceCustomization = SourceCustomization instance HasCodec SourceCustomization where codec = - AC.object "SourceCustomization" $ - SourceCustomization - <$> optionalField' "root_fields" AC..= _scRootFields - <*> optionalField' "type_names" AC..= _scTypeNames - <*> optionalField' "naming_convention" AC..= _scNamingConvention + AC.object "SourceCustomization" + $ SourceCustomization + <$> optionalField' "root_fields" + AC..= _scRootFields + <*> optionalField' "type_names" + AC..= _scTypeNames + <*> optionalField' "naming_convention" + AC..= _scNamingConvention instance ToJSON SourceCustomization where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} diff --git a/server/src-lib/Hasura/RQL/Types/Webhook/Transform.hs b/server/src-lib/Hasura/RQL/Types/Webhook/Transform.hs index 457be356701..e7368a8b7e8 100644 --- a/server/src-lib/Hasura/RQL/Types/Webhook/Transform.hs +++ b/server/src-lib/Hasura/RQL/Types/Webhook/Transform.hs @@ -49,17 +49,19 @@ instance HasCodec MetadataResponseTransform where $ disjointEitherCodec transformV1 transformV2 where transformV1 = - AC.object "ResponseTransformV1" $ - MetadataResponseTransform - <$> (V1 <$ optionalVersionField 1) - <*> bodyV1 AC..= mrtBodyTransform + AC.object "ResponseTransformV1" + $ MetadataResponseTransform + <$> (V1 <$ optionalVersionField 1) + <*> bodyV1 + AC..= mrtBodyTransform <*> transformCommon transformV2 = - AC.object "ResponseTransformV2" $ - MetadataResponseTransform - <$> (V2 <$ versionField 2) - <*> bodyV2 AC..= mrtBodyTransform + AC.object "ResponseTransformV2" + $ MetadataResponseTransform + <$> (V2 <$ versionField 2) + <*> bodyV2 + AC..= mrtBodyTransform <*> transformCommon transformCommon = optionalFieldWithDefault' "template_engine" Kriti AC..= mrtTemplatingEngine @@ -91,11 +93,11 @@ instance ToJSON MetadataResponseTransform where Just (Body.ModifyAsJSON template) -> Just ("body", J.toJSON template) _ -> Nothing V2 -> "body" .=? mrtBodyTransform - in J.object $ - [ "template_engine" .= mrtTemplatingEngine, - "version" .= mrtVersion - ] - <> maybeToList body + in J.object + $ [ "template_engine" .= mrtTemplatingEngine, + "version" .= mrtVersion + ] + <> maybeToList body ------------------------------------------------------------------------------- @@ -121,28 +123,35 @@ instance HasCodec RequestTransform where $ disjointEitherCodec transformV1 transformV2 where transformV1 = - AC.object "RequestTransformV1" $ - RequestTransform - <$> (V1 <$ optionalVersionField 1) - <*> requestFieldsCodec bodyV1 AC..= requestFields + AC.object "RequestTransformV1" + $ RequestTransform + <$> (V1 <$ optionalVersionField 1) + <*> requestFieldsCodec bodyV1 + AC..= requestFields <*> transformCommon transformV2 = - AC.object "RequestTransformV2" $ - RequestTransform - <$> (V2 <$ versionField 2) - <*> requestFieldsCodec bodyV2 AC..= requestFields + AC.object "RequestTransformV2" + $ RequestTransform + <$> (V2 <$ versionField 2) + <*> requestFieldsCodec bodyV2 + AC..= requestFields <*> transformCommon transformCommon = optionalFieldWithDefault' "template_engine" Kriti AC..= templateEngine requestFieldsCodec bodyCodec = RequestFields - <$> withOptionalField' @MethodTransformFn "method" AC..= method - <*> withOptionalField' @UrlTransformFn "url" AC..= url - <*> bodyCodec AC..= body - <*> withOptionalField' @QueryParamsTransformFn "query_params" AC..= queryParams - <*> withOptionalField' @HeadersTransformFn "request_headers" AC..= requestHeaders + <$> withOptionalField' @MethodTransformFn "method" + AC..= method + <*> withOptionalField' @UrlTransformFn "url" + AC..= url + <*> bodyCodec + AC..= body + <*> withOptionalField' @QueryParamsTransformFn "query_params" + AC..= queryParams + <*> withOptionalField' @HeadersTransformFn "request_headers" + AC..= requestHeaders bodyV1 = dimapCodec dec enc $ optionalField' @Template "body" where @@ -185,17 +194,17 @@ instance ToJSON RequestTransform where Just ("body", J.toJSON template) _ -> Nothing V2 -> "body" .=? getOptional body - in J.object $ - [ "version" .= version, - "template_engine" .= templateEngine - ] - <> catMaybes - [ "method" .=? getOptional method, - "url" .=? getOptional url, - "query_params" .=? getOptional queryParams, - "request_headers" .=? getOptional requestHeaders, - body' - ] + in J.object + $ [ "version" .= version, + "template_engine" .= templateEngine + ] + <> catMaybes + [ "method" .=? getOptional method, + "url" .=? getOptional url, + "query_params" .=? getOptional queryParams, + "request_headers" .=? getOptional requestHeaders, + body' + ] ------------------------------------------------------------------------------- @@ -227,15 +236,15 @@ data RequestFields f = RequestFields deriving anyclass (FunctorB, ApplicativeB, TraversableB, ConstraintsB) deriving stock instance - AllBF Show f RequestFields => + (AllBF Show f RequestFields) => Show (RequestFields f) deriving stock instance - AllBF Eq f RequestFields => + (AllBF Eq f RequestFields) => Eq (RequestFields f) deriving anyclass instance - AllBF NFData f RequestFields => + (AllBF NFData f RequestFields) => NFData (RequestFields f) -- NOTE: It is likely that we can derive these instances. Possibly if @@ -247,8 +256,8 @@ instance FromJSON RequestTransformFns where body <- o .:? "body" queryParams <- o .:? "query_params" headers <- o .:? "request_headers" - pure $ - RequestFields + pure + $ RequestFields { method = withOptional @MethodTransformFn method, url = withOptional @UrlTransformFn url, body = withOptional @BodyTransformFn body, @@ -258,13 +267,14 @@ instance FromJSON RequestTransformFns where instance ToJSON RequestTransformFns where toJSON RequestFields {..} = - J.object . catMaybes $ - [ "method" .=? getOptional method, - "url" .=? getOptional url, - "body" .=? getOptional body, - "query_params" .=? getOptional queryParams, - "request_headers" .=? getOptional requestHeaders - ] + J.object + . catMaybes + $ [ "method" .=? getOptional method, + "url" .=? getOptional url, + "body" .=? getOptional body, + "query_params" .=? getOptional queryParams, + "request_headers" .=? getOptional requestHeaders + ] type RequestContext = RequestFields TransformCtx diff --git a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Body.hs b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Body.hs index 8866e6146d1..141d2384098 100644 --- a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Body.hs +++ b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Body.hs @@ -73,25 +73,29 @@ newtype instance TransformCtx Body = TransformCtx RequestTransformCtx instance HasCodec BodyTransformFn where codec = - dimapCodec dec enc $ - disjointEitherCodec removeCodec $ - disjointEitherCodec modifyAsJSONCodec modifyAsFormURLEncodecCodec + dimapCodec dec enc + $ disjointEitherCodec removeCodec + $ disjointEitherCodec modifyAsJSONCodec modifyAsFormURLEncodecCodec where removeCodec = object "BodyTransformFn_Remove" $ discriminatorField "action" "remove" modifyAsJSONCodec = - dimapCodec snd ((),) $ - object "BodyTransformFn_ModifyAsJSON" $ - (,) - <$> discriminatorField "action" "transform" .= fst - <*> requiredField' @Template "template" .= snd + dimapCodec snd ((),) + $ object "BodyTransformFn_ModifyAsJSON" + $ (,) + <$> discriminatorField "action" "transform" + .= fst + <*> requiredField' @Template "template" + .= snd modifyAsFormURLEncodecCodec = - dimapCodec snd ((),) $ - object "BodyTransformFn_ModifyAsFormURLEncoded" $ - (,) - <$> discriminatorField "action" "x_www_form_urlencoded" .= fst - <*> requiredField' @(M.HashMap Text UnescapedTemplate) "form_template" .= snd + dimapCodec snd ((),) + $ object "BodyTransformFn_ModifyAsFormURLEncoded" + $ (,) + <$> discriminatorField "action" "x_www_form_urlencoded" + .= fst + <*> requiredField' @(M.HashMap Text UnescapedTemplate) "form_template" + .= snd dec (Left _) = Remove dec (Right (Left template)) = ModifyAsJSON template diff --git a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Headers.hs b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Headers.hs index 8d387f91bb2..0f70ba7bc95 100644 --- a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Headers.hs +++ b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/Headers.hs @@ -52,10 +52,12 @@ data AddReplaceOrRemoveFields = AddReplaceOrRemoveFields instance HasCodec AddReplaceOrRemoveFields where codec = - object "AddReplaceOrRemoveFields" $ - AddReplaceOrRemoveFields - <$> optionalFieldWithDefaultWith' "add_headers" addCodec mempty .= addOrReplaceHeaders - <*> optionalFieldWithDefaultWith' "remove_headers" removeCodec mempty .= removeHeaders + object "AddReplaceOrRemoveFields" + $ AddReplaceOrRemoveFields + <$> optionalFieldWithDefaultWith' "add_headers" addCodec mempty + .= addOrReplaceHeaders + <*> optionalFieldWithDefaultWith' "remove_headers" removeCodec mempty + .= removeHeaders where addCodec = dimapCodec HashMap.toList HashMap.fromList $ caseInsensitiveHashMapCodec codec removeCodec = listCodec caseInsensitiveTextCodec diff --git a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/WithOptional.hs b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/WithOptional.hs index 6a180834620..5682f4ab119 100644 --- a/server/src-lib/Hasura/RQL/Types/Webhook/Transform/WithOptional.hs +++ b/server/src-lib/Hasura/RQL/Types/Webhook/Transform/WithOptional.hs @@ -40,7 +40,7 @@ deriving newtype instance -- and @'WithOptional' f b@ if an isomorphism exists between @a@ and @f b@. withOptional :: forall a b f. - Coercible a (f b) => + (Coercible a (f b)) => Maybe a -> WithOptional f b withOptional = coerce @@ -60,10 +60,10 @@ withOptionalField' name = withOptionalFieldWith' name (codec @a) -- This version takes a codec for the underlying value type as an argument. withOptionalFieldWith' :: forall a b f. - Coercible a (f b) => + (Coercible a (f b)) => Text -> ValueCodec a a -> ObjectCodec (WithOptional f b) (WithOptional f b) withOptionalFieldWith' name aCodec = - dimapCodec withOptional (fmap coerce . getOptional) $ - optionalFieldWith' name aCodec + dimapCodec withOptional (fmap coerce . getOptional) + $ optionalFieldWith' name aCodec diff --git a/server/src-lib/Hasura/RemoteSchema/Metadata/Core.hs b/server/src-lib/Hasura/RemoteSchema/Metadata/Core.hs index 1d4f522bd71..cc99509237e 100644 --- a/server/src-lib/Hasura/RemoteSchema/Metadata/Core.hs +++ b/server/src-lib/Hasura/RemoteSchema/Metadata/Core.hs @@ -60,26 +60,39 @@ instance NFData RemoteSchemaDef instance HasCodec RemoteSchemaDef where codec = - object "RemoteSchemaDef" $ - RemoteSchemaDef - <$> optionalField' "url" .= _rsdUrl - <*> optionalField' "url_from_env" .= _rsdUrlFromEnv - <*> optionalField' "headers" .= _rsdHeaders - <*> optionalFieldWithDefault' "forward_client_headers" False .= _rsdForwardClientHeaders - <*> optionalField' "timeout_seconds" .= _rsdTimeoutSeconds - <*> optionalField' "customization" .= _rsdCustomization + object "RemoteSchemaDef" + $ RemoteSchemaDef + <$> optionalField' "url" + .= _rsdUrl + <*> optionalField' "url_from_env" + .= _rsdUrlFromEnv + <*> optionalField' "headers" + .= _rsdHeaders + <*> optionalFieldWithDefault' "forward_client_headers" False + .= _rsdForwardClientHeaders + <*> optionalField' "timeout_seconds" + .= _rsdTimeoutSeconds + <*> optionalField' "customization" + .= _rsdCustomization $(J.deriveToJSON hasuraJSON {J.omitNothingFields = True} ''RemoteSchemaDef) instance J.FromJSON RemoteSchemaDef where parseJSON = J.withObject "Object" $ \o -> RemoteSchemaDef - <$> o J..:? "url" - <*> o J..:? "url_from_env" - <*> o J..:? "headers" - <*> o J..:? "forward_client_headers" J..!= False - <*> o J..:? "timeout_seconds" - <*> o J..:? "customization" + <$> o + J..:? "url" + <*> o + J..:? "url_from_env" + <*> o + J..:? "headers" + <*> o + J..:? "forward_client_headers" + J..!= False + <*> o + J..:? "timeout_seconds" + <*> o + J..:? "customization" getUrlFromEnv :: (MonadError QErr m) => Env.Environment -> Text -> m (EnvRecord N.URI) getUrlFromEnv env urlFromEnv = do @@ -103,28 +116,37 @@ data RemoteSchemaMetadataG r = RemoteSchemaMetadata instance (HasCodec (RemoteRelationshipG r), Typeable r) => HasCodec (RemoteSchemaMetadataG r) where codec = - object ("RemoteSchemaMetadata_" <> typeableName @r) $ - RemoteSchemaMetadata - <$> requiredField' "name" .= _rsmName - <*> requiredField' "definition" .= _rsmDefinition - <*> optionalField' "comment" .= _rsmComment - <*> optionalFieldWithDefault' "permissions" mempty .= _rsmPermissions + object ("RemoteSchemaMetadata_" <> typeableName @r) + $ RemoteSchemaMetadata + <$> requiredField' "name" + .= _rsmName + <*> requiredField' "definition" + .= _rsmDefinition + <*> optionalField' "comment" + .= _rsmComment + <*> optionalFieldWithDefault' "permissions" mempty + .= _rsmPermissions <*> optionalFieldWithDefaultWith' "remote_relationships" (insertionOrderedElemsCodec _rstrsName) mempty - .= _rsmRemoteRelationships + .= _rsmRemoteRelationships -instance J.FromJSON (RemoteRelationshipG r) => J.FromJSON (RemoteSchemaMetadataG r) where +instance (J.FromJSON (RemoteRelationshipG r)) => J.FromJSON (RemoteSchemaMetadataG r) where parseJSON = J.withObject "RemoteSchemaMetadata" \obj -> RemoteSchemaMetadata - <$> obj J..: "name" - <*> obj J..: "definition" - <*> obj J..:? "comment" - <*> obj J..:? "permissions" J..!= mempty + <$> obj + J..: "name" + <*> obj + J..: "definition" + <*> obj + J..:? "comment" + <*> obj + J..:? "permissions" + J..!= mempty <*> (oMapFromL _rstrsName <$> obj J..:? "remote_relationships" J..!= []) -instance J.ToJSON (RemoteRelationshipG r) => J.ToJSON (RemoteSchemaMetadataG r) where +instance (J.ToJSON (RemoteRelationshipG r)) => J.ToJSON (RemoteSchemaMetadataG r) where toJSON RemoteSchemaMetadata {..} = J.object [ "name" J..= _rsmName, diff --git a/server/src-lib/Hasura/RemoteSchema/Metadata/Customization.hs b/server/src-lib/Hasura/RemoteSchema/Metadata/Customization.hs index 296531b73f7..e58344cd7a8 100644 --- a/server/src-lib/Hasura/RemoteSchema/Metadata/Customization.hs +++ b/server/src-lib/Hasura/RemoteSchema/Metadata/Customization.hs @@ -29,20 +29,27 @@ instance Hashable RemoteTypeCustomization instance HasCodec RemoteTypeCustomization where codec = - object "RemoteTypeCustomization" $ - RemoteTypeCustomization - <$> optionalFieldWith' "prefix" graphQLFieldNameCodec .= _rtcPrefix - <*> optionalFieldWith' "suffix" graphQLFieldNameCodec .= _rtcSuffix - <*> requiredFieldWith' "mapping" (hashMapCodec graphQLFieldNameCodec) .= _rtcMapping + object "RemoteTypeCustomization" + $ RemoteTypeCustomization + <$> optionalFieldWith' "prefix" graphQLFieldNameCodec + .= _rtcPrefix + <*> optionalFieldWith' "suffix" graphQLFieldNameCodec + .= _rtcSuffix + <*> requiredFieldWith' "mapping" (hashMapCodec graphQLFieldNameCodec) + .= _rtcMapping $(J.deriveToJSON hasuraJSON {J.omitNothingFields = True} ''RemoteTypeCustomization) instance J.FromJSON RemoteTypeCustomization where parseJSON = J.withObject "RemoteTypeCustomization" $ \o -> RemoteTypeCustomization - <$> o J..:? "prefix" - <*> o J..:? "suffix" - <*> o J..:? "mapping" J..!= mempty + <$> o + J..:? "prefix" + <*> o + J..:? "suffix" + <*> o + J..:? "mapping" + J..!= mempty data RemoteFieldCustomization = RemoteFieldCustomization { _rfcParentType :: G.Name, @@ -58,22 +65,31 @@ instance Hashable RemoteFieldCustomization instance HasCodec RemoteFieldCustomization where codec = - object "RemoteFieldCustomization" $ - RemoteFieldCustomization - <$> requiredFieldWith' "parent_type" graphQLFieldNameCodec .= _rfcParentType - <*> optionalFieldWith' "prefix" graphQLFieldNameCodec .= _rfcPrefix - <*> optionalFieldWith' "suffix" graphQLFieldNameCodec .= _rfcSuffix - <*> requiredFieldWith' "mapping" (hashMapCodec graphQLFieldNameCodec) .= _rfcMapping + object "RemoteFieldCustomization" + $ RemoteFieldCustomization + <$> requiredFieldWith' "parent_type" graphQLFieldNameCodec + .= _rfcParentType + <*> optionalFieldWith' "prefix" graphQLFieldNameCodec + .= _rfcPrefix + <*> optionalFieldWith' "suffix" graphQLFieldNameCodec + .= _rfcSuffix + <*> requiredFieldWith' "mapping" (hashMapCodec graphQLFieldNameCodec) + .= _rfcMapping $(J.deriveToJSON hasuraJSON {J.omitNothingFields = True} ''RemoteFieldCustomization) instance J.FromJSON RemoteFieldCustomization where parseJSON = J.withObject "RemoteFieldCustomization" $ \o -> RemoteFieldCustomization - <$> o J..: "parent_type" - <*> o J..:? "prefix" - <*> o J..:? "suffix" - <*> o J..:? "mapping" J..!= mempty + <$> o + J..: "parent_type" + <*> o + J..:? "prefix" + <*> o + J..:? "suffix" + <*> o + J..:? "mapping" + J..!= mempty data RemoteSchemaCustomization = RemoteSchemaCustomization { _rscRootFieldsNamespace :: Maybe G.Name, @@ -88,10 +104,13 @@ instance Hashable RemoteSchemaCustomization instance HasCodec RemoteSchemaCustomization where codec = - object "RemoteSchemaCustomization" $ - RemoteSchemaCustomization - <$> optionalFieldWith' "root_fields_namespace" graphQLFieldNameCodec .= _rscRootFieldsNamespace - <*> optionalField' "type_names" .= _rscTypeNames - <*> optionalField' "field_names" .= _rscFieldNames + object "RemoteSchemaCustomization" + $ RemoteSchemaCustomization + <$> optionalFieldWith' "root_fields_namespace" graphQLFieldNameCodec + .= _rscRootFieldsNamespace + <*> optionalField' "type_names" + .= _rscTypeNames + <*> optionalField' "field_names" + .= _rscFieldNames $(J.deriveJSON hasuraJSON {J.omitNothingFields = True} ''RemoteSchemaCustomization) diff --git a/server/src-lib/Hasura/RemoteSchema/Metadata/Permission.hs b/server/src-lib/Hasura/RemoteSchema/Metadata/Permission.hs index 82745ca793c..af0b02c6d27 100644 --- a/server/src-lib/Hasura/RemoteSchema/Metadata/Permission.hs +++ b/server/src-lib/Hasura/RemoteSchema/Metadata/Permission.hs @@ -27,13 +27,13 @@ instance Hashable RemoteSchemaPermissionDefinition instance HasCodec RemoteSchemaPermissionDefinition where codec = - object "RemoteSchemaPermissionDefinition" $ - RemoteSchemaPermissionDefinition - <$> requiredFieldWith - "schema" - graphQLSchemaDocumentCodec - "GraphQL schema document, e.g. the content of schema.gql" - .= _rspdSchema + object "RemoteSchemaPermissionDefinition" + $ RemoteSchemaPermissionDefinition + <$> requiredFieldWith + "schema" + graphQLSchemaDocumentCodec + "GraphQL schema document, e.g. the content of schema.gql" + .= _rspdSchema instance J.FromJSON RemoteSchemaPermissionDefinition where parseJSON = J.withObject "RemoteSchemaPermissionDefinition" $ \obj -> do @@ -52,10 +52,13 @@ data RemoteSchemaPermissionMetadata = RemoteSchemaPermissionMetadata instance HasCodec RemoteSchemaPermissionMetadata where codec = - object "RemoteSchemaPermissionMetadata" $ - RemoteSchemaPermissionMetadata - <$> requiredField' "role" .= _rspmRole - <*> requiredField' "definition" .= _rspmDefinition - <*> optionalField' "comment" .= _rspmComment + object "RemoteSchemaPermissionMetadata" + $ RemoteSchemaPermissionMetadata + <$> requiredField' "role" + .= _rspmRole + <*> requiredField' "definition" + .= _rspmDefinition + <*> optionalField' "comment" + .= _rspmComment $(J.deriveJSON hasuraJSON {J.omitNothingFields = True} ''RemoteSchemaPermissionMetadata) diff --git a/server/src-lib/Hasura/RemoteSchema/Metadata/RemoteRelationship.hs b/server/src-lib/Hasura/RemoteSchema/Metadata/RemoteRelationship.hs index 495aa204de7..f7305f1fd51 100644 --- a/server/src-lib/Hasura/RemoteSchema/Metadata/RemoteRelationship.hs +++ b/server/src-lib/Hasura/RemoteSchema/Metadata/RemoteRelationship.hs @@ -52,11 +52,14 @@ instance NFData ToSchemaRelationshipDef instance HasCodec ToSchemaRelationshipDef where codec = - object "ToSchemaRelationshipDef" $ - ToSchemaRelationshipDef - <$> requiredField' "remote_schema" .= _trrdRemoteSchema - <*> requiredFieldWith' "lhs_fields" hashSetCodec .= _trrdLhsFields - <*> requiredField' "remote_field" .= _trrdRemoteField + object "ToSchemaRelationshipDef" + $ ToSchemaRelationshipDef + <$> requiredField' "remote_schema" + .= _trrdRemoteSchema + <*> requiredFieldWith' "lhs_fields" hashSetCodec + .= _trrdLhsFields + <*> requiredField' "remote_field" + .= _trrdRemoteField -- | Targeted field in a remote schema relationship. -- TODO: explain about subfields and why this is a container @@ -67,27 +70,28 @@ instance NFData RemoteFields instance HasCodec RemoteFields where codec = - named "RemoteFields" $ - bimapCodec dec enc $ - hashMapCodec argumentsCodec - "Remote fields are represented by an object that maps each field name to its arguments." + named "RemoteFields" + $ bimapCodec dec enc + $ hashMapCodec argumentsCodec + "Remote fields are represented by an object that maps each field name to its arguments." where argumentsCodec :: JSONCodec (RemoteArguments, Maybe RemoteFields) argumentsCodec = - object "FieldCall" $ - (,) - <$> requiredField' "arguments" - .= fst + object "FieldCall" + $ (,) + <$> requiredField' "arguments" + .= fst <*> optionalField' "field" - .= snd + .= snd dec :: HashMap G.Name (RemoteArguments, Maybe RemoteFields) -> Either String RemoteFields dec hashmap = case HashMap.toList hashmap of [(fieldName, (arguments, maybeSubField))] -> let subfields = maybe [] (toList . unRemoteFields) maybeSubField - in Right $ - RemoteFields $ - FieldCall {fcName = fieldName, fcArguments = arguments} :| subfields + in Right + $ RemoteFields + $ FieldCall {fcName = fieldName, fcArguments = arguments} + :| subfields [] -> Left "Expecting one single mapping, received none." _ -> Left "Expecting one single mapping, received too many." @@ -153,10 +157,10 @@ instance Hashable RemoteArguments instance HasCodec RemoteArguments where codec = - named "RemoteArguments" $ - CommentCodec "Remote arguments are represented by an object that maps each argument name to its value." $ - dimapCodec RemoteArguments getRemoteArguments $ - hashMapCodec (graphQLValueCodec varCodec) + named "RemoteArguments" + $ CommentCodec "Remote arguments are represented by an object that maps each argument name to its value." + $ dimapCodec RemoteArguments getRemoteArguments + $ hashMapCodec (graphQLValueCodec varCodec) where varCodec = bimapCodec decodeVariable encodeVariable textCodec @@ -234,22 +238,24 @@ data RemoteSchemaTypeRelationships r = RemoteSchemaTypeRelationships instance (HasCodec (RemoteRelationshipG r), Typeable r) => HasCodec (RemoteSchemaTypeRelationships r) where codec = - AC.object ("RemoteSchemaMetadata_" <> typeableName @r) $ - RemoteSchemaTypeRelationships - <$> requiredFieldWith' "type_name" graphQLFieldNameCodec AC..= _rstrsName + AC.object ("RemoteSchemaMetadata_" <> typeableName @r) + $ RemoteSchemaTypeRelationships + <$> requiredFieldWith' "type_name" graphQLFieldNameCodec + AC..= _rstrsName <*> optionalFieldWithDefaultWith' "relationships" (insertionOrderedElemsCodec _rrName) mempty - AC..= _rstrsRelationships + AC..= _rstrsRelationships -instance J.FromJSON (RemoteRelationshipG r) => J.FromJSON (RemoteSchemaTypeRelationships r) where +instance (J.FromJSON (RemoteRelationshipG r)) => J.FromJSON (RemoteSchemaTypeRelationships r) where parseJSON = J.withObject "RemoteSchemaMetadata" \obj -> RemoteSchemaTypeRelationships - <$> obj J..: "type_name" + <$> obj + J..: "type_name" <*> (oMapFromL _rrName <$> obj J..:? "relationships" J..!= []) -instance J.ToJSON (RemoteRelationshipG r) => J.ToJSON (RemoteSchemaTypeRelationships r) where +instance (J.ToJSON (RemoteRelationshipG r)) => J.ToJSON (RemoteSchemaTypeRelationships r) where toJSON RemoteSchemaTypeRelationships {..} = J.object [ "type_name" J..= _rstrsName, diff --git a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs index bc3b7d2e5cc..a67037865c8 100644 --- a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs +++ b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Core.hs @@ -73,9 +73,10 @@ runAddRemoteSchema :: runAddRemoteSchema env (AddRemoteSchemaQuery name defn comment) = do addRemoteSchemaP1 name void $ addRemoteSchemaP2Setup env defn - buildSchemaCacheFor (MORemoteSchema name) $ - MetadataModifier $ - metaRemoteSchemas %~ InsOrdHashMap.insert name remoteSchemaMeta + buildSchemaCacheFor (MORemoteSchema name) + $ MetadataModifier + $ metaRemoteSchemas + %~ InsOrdHashMap.insert name remoteSchemaMeta pure successMsg where -- NOTE: permissions here are empty, manipulated via a separate API with @@ -88,10 +89,11 @@ addRemoteSchemaP1 :: m () addRemoteSchemaP1 name = do remoteSchemaNames <- getAllRemoteSchemas <$> askSchemaCache - when (name `elem` remoteSchemaNames) $ - throw400 AlreadyExists $ - "remote schema with name " - <> name <<> " already exists" + when (name `elem` remoteSchemaNames) + $ throw400 AlreadyExists + $ "remote schema with name " + <> name + <<> " already exists" runRemoveRemoteSchema :: (QErrM m, UserInfoM m, CacheRWM m, MetadataM m) => @@ -99,9 +101,9 @@ runRemoveRemoteSchema :: m EncJSON runRemoveRemoteSchema (RemoteSchemaNameQuery rsn) = do void $ removeRemoteSchemaP1 rsn - withNewInconsistentObjsCheck $ - buildSchemaCache $ - dropRemoteSchemaInMetadata rsn + withNewInconsistentObjsCheck + $ buildSchemaCache + $ dropRemoteSchemaInMetadata rsn pure successMsg removeRemoteSchemaP1 :: @@ -111,9 +113,9 @@ removeRemoteSchemaP1 :: removeRemoteSchemaP1 rsn = do sc <- askSchemaCache let rmSchemas = scRemoteSchemas sc - void $ - onNothing (HashMap.lookup rsn rmSchemas) $ - throw400 NotExists "no such remote schema" + void + $ onNothing (HashMap.lookup rsn rmSchemas) + $ throw400 NotExists "no such remote schema" let depObjs = getDependentObjs sc remoteSchemaDepId roles = mapMaybe getRole depObjs nonPermDependentObjs = filter nonPermDependentObjPredicate depObjs @@ -142,14 +144,16 @@ runReloadRemoteSchema :: m EncJSON runReloadRemoteSchema (RemoteSchemaNameQuery name) = do remoteSchemas <- getAllRemoteSchemas <$> askSchemaCache - unless (name `elem` remoteSchemas) $ - throw400 NotExists $ - "remote schema with name " <> name <<> " does not exist" + unless (name `elem` remoteSchemas) + $ throw400 NotExists + $ "remote schema with name " + <> name + <<> " does not exist" let invalidations = mempty {ciRemoteSchemas = S.singleton name} metadata <- getMetadata - withNewInconsistentObjsCheck $ - buildSchemaCacheWithOptions (CatalogUpdate Nothing) invalidations metadata + withNewInconsistentObjsCheck + $ buildSchemaCacheWithOptions (CatalogUpdate Nothing) invalidations metadata pure successMsg runIntrospectRemoteSchema :: @@ -186,9 +190,11 @@ runUpdateRemoteSchema env (AddRemoteSchemaQuery name defn comment) = do currentRMSchemaURL = _rsdUrl defn currentRMSchemaURLFromEnv = _rsdUrlFromEnv defn - unless (name `elem` remoteSchemaNames) $ - throw400 NotExists $ - "remote schema with name " <> name <<> " doesn't exist" + unless (name `elem` remoteSchemaNames) + $ throw400 NotExists + $ "remote schema with name " + <> name + <<> " doesn't exist" rsi <- validateRemoteSchemaDef env defn @@ -202,10 +208,11 @@ runUpdateRemoteSchema env (AddRemoteSchemaQuery name defn comment) = do -- This will throw an error if the new schema fetched in incompatible -- with the existing permissions and relations - withNewInconsistentObjsCheck $ - buildSchemaCacheFor (MORemoteSchema name) $ - MetadataModifier $ - metaRemoteSchemas %~ InsOrdHashMap.insert name (remoteSchemaMeta metadataRMSchemaPerms) + withNewInconsistentObjsCheck + $ buildSchemaCacheFor (MORemoteSchema name) + $ MetadataModifier + $ metaRemoteSchemas + %~ InsOrdHashMap.insert name (remoteSchemaMeta metadataRMSchemaPerms) pure successMsg where diff --git a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs index 9a39b0b58b6..184bce5f6aa 100644 --- a/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs +++ b/server/src-lib/Hasura/RemoteSchema/MetadataAPI/Permission.hs @@ -58,23 +58,30 @@ runAddRemoteSchemaPermissions :: runAddRemoteSchemaPermissions remoteSchemaPermsCtx q = do metadata <- getMetadata unless (remoteSchemaPermsCtx == Options.EnableRemoteSchemaPermissions) $ do - throw400 ConstraintViolation $ - "remote schema permissions can only be added when " - <> "remote schema permissions are enabled in the graphql-engine" + throw400 ConstraintViolation + $ "remote schema permissions can only be added when " + <> "remote schema permissions are enabled in the graphql-engine" remoteSchemaMap <- scRemoteSchemas <$> askSchemaCache remoteSchemaCtx <- - onNothing (HashMap.lookup name remoteSchemaMap) $ - throw400 NotExists $ - "remote schema " <> name <<> " doesn't exist" - when (doesRemoteSchemaPermissionExist metadata name role) $ - throw400 AlreadyExists $ - "permissions for role: " - <> role <<> " for remote schema:" - <> name <<> " already exists" + onNothing (HashMap.lookup name remoteSchemaMap) + $ throw400 NotExists + $ "remote schema " + <> name + <<> " doesn't exist" + when (doesRemoteSchemaPermissionExist metadata name role) + $ throw400 AlreadyExists + $ "permissions for role: " + <> role + <<> " for remote schema:" + <> name + <<> " already exists" void $ resolveRoleBasedRemoteSchema role name (_rscIntroOriginal remoteSchemaCtx) providedSchemaDoc - buildSchemaCacheFor (MORemoteSchemaPermissions name role) $ - MetadataModifier $ - metaRemoteSchemas . ix name . rsmPermissions %~ (:) remoteSchemaPermMeta + buildSchemaCacheFor (MORemoteSchemaPermissions name role) + $ MetadataModifier + $ metaRemoteSchemas + . ix name + . rsmPermissions + %~ (:) remoteSchemaPermMeta pure successMsg where AddRemoteSchemaPermission name role defn comment = q @@ -97,15 +104,19 @@ runDropRemoteSchemaPermissions :: runDropRemoteSchemaPermissions (DropRemoteSchemaPermissions name roleName) = do metadata <- getMetadata remoteSchemaMap <- scRemoteSchemas <$> askSchemaCache - void $ - onNothing (HashMap.lookup name remoteSchemaMap) $ - throw400 NotExists $ - "remote schema " <> name <<> " doesn't exist" - unless (doesRemoteSchemaPermissionExist metadata name roleName) $ - throw400 NotExists $ - "permissions for role: " - <> roleName <<> " for remote schema:" - <> name <<> " doesn't exist" - buildSchemaCacheFor (MORemoteSchemaPermissions name roleName) $ - dropRemoteSchemaPermissionInMetadata name roleName + void + $ onNothing (HashMap.lookup name remoteSchemaMap) + $ throw400 NotExists + $ "remote schema " + <> name + <<> " doesn't exist" + unless (doesRemoteSchemaPermissionExist metadata name roleName) + $ throw400 NotExists + $ "permissions for role: " + <> roleName + <<> " for remote schema:" + <> name + <<> " doesn't exist" + buildSchemaCacheFor (MORemoteSchemaPermissions name roleName) + $ dropRemoteSchemaPermissionInMetadata name roleName pure successMsg diff --git a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs index 6e3b7f5a899..74e18d9609b 100644 --- a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs +++ b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Build.hs @@ -147,9 +147,10 @@ buildRemoteSchemaPermissions = proc ((remoteSchemaName, originalIntrospection, o rolePermission <- onNothing (HashMap.lookup roleName accumulatedRolePermMap) $ do parentRolePermissions <- for (toList parentRoles) $ \role -> - onNothing (HashMap.lookup role accumulatedRolePermMap) $ - throw500 $ - "remote schema permissions: bad ordering of roles, could not find the permission of role: " <>> role + onNothing (HashMap.lookup role accumulatedRolePermMap) + $ throw500 + $ "remote schema permissions: bad ordering of roles, could not find the permission of role: " + <>> role let combinedPermission = sconcat <$> nonEmpty parentRolePermissions pure $ fromMaybe CPUndefined combinedPermission pure $ HashMap.insert roleName rolePermission accumulatedRolePermMap @@ -181,7 +182,8 @@ buildRemoteSchemaPermissions = proc ((remoteSchemaName, originalIntrospection, o recordDependencies -< (metadataObject, schemaObject, pure dependency) returnA -< resolvedSchemaIntrospection ) - |) metadataObject + |) + metadataObject mkRemoteSchemaPermissionMetadataObject :: (RemoteSchemaName, RemoteSchemaPermissionMetadata) -> diff --git a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Permission.hs b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Permission.hs index f3a25b7fbe6..669bd525f55 100644 --- a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Permission.hs +++ b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Permission.hs @@ -280,8 +280,10 @@ showRoleBasedSchemaValidationError = \case graphQLType <<> ": " <> typeName <<> " does not exist in the upstream remote schema" NonMatchingDefaultValue inpObjName inpValName expectedVal providedVal -> "expected default value of input value: " - <> inpValName <<> "of input object " - <> inpObjName <<> " to be " + <> inpValName + <<> "of input object " + <> inpObjName + <<> " to be " <> defaultValueToText expectedVal <> " but received " <> defaultValueToText providedVal @@ -289,41 +291,48 @@ showRoleBasedSchemaValidationError = \case "input argument " <> inpArgName <<> " does not exist in the input object:" <>> inpObjName MissingNonNullableArguments fieldName nonNullableArgs -> "field: " - <> fieldName <<> " expects the following non nullable arguments to " + <> fieldName + <<> " expects the following non nullable arguments to " <> "be present: " <> englishList "and" (fmap dquote nonNullableArgs) NonExistingDirectiveArgument parentName parentType directiveName nonExistingArgs -> "the following directive argument(s) defined in the directive: " <> directiveName - <<> " defined with the type name: " + <<> " defined with the type name: " <> parentName - <<> " of type " + <<> " of type " <> parentType - <<> " do not exist in the corresponding upstream directive: " + <<> " do not exist in the corresponding upstream directive: " <> englishList "and" (fmap dquote nonExistingArgs) NonExistingField (fldDefnType, parentTypeName) providedName -> "field " - <> providedName <<> " does not exist in the " - <> fldDefnType <<> ": " <>> parentTypeName + <> providedName + <<> " does not exist in the " + <> fldDefnType + <<> ": " + <>> parentTypeName NonExistingUnionMemberTypes unionName nonExistingMembers -> "union " - <> unionName <<> " contains members which do not exist in the members" + <> unionName + <<> " contains members which do not exist in the members" <> " of the remote schema union :" <> englishList "and" (fmap dquote nonExistingMembers) CustomInterfacesNotAllowed objName customInterfaces -> "custom interfaces are not supported. " <> "Object" <> objName - <<> " implements the following custom interfaces: " + <<> " implements the following custom interfaces: " <> englishList "and" (fmap dquote customInterfaces) ObjectImplementsNonExistingInterfaces objName nonExistentInterfaces -> "object " - <> objName <<> " is trying to implement the following interfaces" + <> objName + <<> " is trying to implement the following interfaces" <> " that do not exist in the corresponding upstream remote object: " <> englishList "and" (fmap dquote nonExistentInterfaces) NonExistingEnumValues enumName nonExistentEnumVals -> "enum " - <> enumName <<> " contains the following enum values that do not exist " + <> enumName + <<> " contains the following enum values that do not exist " <> "in the corresponding upstream remote enum: " <> englishList "and" (fmap dquote nonExistentEnumVals) MissingQueryRoot -> "query root does not exist in the schema definition" @@ -335,21 +344,27 @@ showRoleBasedSchemaValidationError = \case "duplicate directives: " <> englishList "and" (fmap dquote directiveNames) <> "found in the " - <> parentType <<> " " <>> parentName + <> parentType + <<> " " + <>> parentName DuplicateFields (parentType, parentName) fieldNames -> "duplicate fields: " <> englishList "and" (fmap dquote fieldNames) <> "found in the " - <> parentType <<> " " <>> parentName + <> parentType + <<> " " + <>> parentName DuplicateArguments fieldName args -> "duplicate arguments: " <> englishList "and" (fmap dquote args) - <> "found in the field: " <>> fieldName + <> "found in the field: " + <>> fieldName DuplicateEnumValues enumName enumValues -> "duplicate enum values: " <> englishList "and" (fmap dquote enumValues) <> " found in the " - <> enumName <<> " enum" + <> enumName + <<> " enum" InvalidPresetDirectiveLocation -> "Preset directives can be defined only on INPUT_FIELD_DEFINITION or ARGUMENT_DEFINITION" MultiplePresetDirectives (parentType, parentName) -> @@ -362,21 +377,21 @@ showRoleBasedSchemaValidationError = \case ExpectedEnumValue typeName presetValue -> "expected preset value " <> presetValue - <<> " of type " + <<> " of type " <> typeName - <<> " to be an enum value" + <<> " to be an enum value" ExpectedScalarValue typeName presetValue -> "expected preset value " <> presetValue - <<> " of type " + <<> " of type " <> typeName - <<> " to be a scalar value" + <<> " to be a scalar value" ExpectedInputObject typeName presetValue -> "expected preset value " <> presetValue - <<> " of type " + <<> " of type " <> typeName - <<> " to be an input object value" + <<> " to be an input object value" KeyDoesNotExistInInputObject key' inpObjTypeName -> key' <<> " does not exist in the input object " <>> inpObjTypeName DisallowSessionVarForListType name -> @@ -385,10 +400,11 @@ showRoleBasedSchemaValidationError = \case "expected preset static value to be a Boolean value" UnexpectedNonMatchingNames providedName upstreamName gType -> "unexpected: trying to compare " - <> gType <<> " with name " + <> gType + <<> " with name " <> providedName - <<> " with " - <>> upstreamName + <<> " with " + <>> upstreamName where defaultValueToText = \case Just defaultValue -> toTxt defaultValue @@ -428,16 +444,16 @@ lookupInputType (G.SchemaDocument types) name = go types case typeDef of G.TypeDefinitionScalar (G.ScalarTypeDefinition _ scalarName _) -> if - | name == scalarName -> Just $ PresetScalar scalarName - | otherwise -> go tps + | name == scalarName -> Just $ PresetScalar scalarName + | otherwise -> go tps G.TypeDefinitionEnum (G.EnumTypeDefinition _ enumName _ vals) -> if - | name == enumName -> Just $ PresetEnum enumName $ map G._evdName vals - | otherwise -> go tps + | name == enumName -> Just $ PresetEnum enumName $ map G._evdName vals + | otherwise -> go tps G.TypeDefinitionInputObject (G.InputObjectTypeDefinition _ inpObjName _ vals) -> if - | name == inpObjName -> Just $ PresetInputObject vals - | otherwise -> go tps + | name == inpObjName -> Just $ PresetInputObject vals + | otherwise -> go tps _ -> go tps go [] = Nothing @@ -471,10 +487,10 @@ parsePresetValue gType varName isStatic value = do G.VString t -> case (isSessionVariable t && (not isStatic)) of True -> - pure $ - G.VVariable $ - SessionPresetVariable (mkSessionVariable t) scalarTypeName $ - SessionArgumentPresetScalar + pure + $ G.VVariable + $ SessionPresetVariable (mkSessionVariable t) scalarTypeName + $ SessionArgumentPresetScalar False -> pure $ G.VString t G.VList _ -> refute $ pure $ ExpectedScalarValue typeName value G.VObject _ -> refute $ pure $ ExpectedScalarValue typeName value @@ -488,11 +504,11 @@ parsePresetValue gType varName isStatic value = do G.VString t -> case isSessionVariable t of True -> - pure $ - G.VVariable $ - SessionPresetVariable (mkSessionVariable t) enumTypeName $ - SessionArgumentPresetEnum $ - S.fromList enumVals + pure + $ G.VVariable + $ SessionPresetVariable (mkSessionVariable t) enumTypeName + $ SessionArgumentPresetEnum + $ S.fromList enumVals False -> refute $ pure $ ExpectedEnumValue typeName value _ -> refute $ pure $ ExpectedEnumValue typeName value Just (PresetInputObject inputValueDefinitions) -> @@ -527,26 +543,26 @@ parsePresetDirective :: m (G.Value RemoteSchemaVariable) parsePresetDirective gType parentArgName (G.Directive _name args) = do if - | HashMap.null args -> refute $ pure $ NoPresetArgumentFound - | otherwise -> do - val <- - onNothing (HashMap.lookup Name._value args) $ - refute $ - pure $ - InvalidPresetArgument parentArgName - isStatic <- - case (HashMap.lookup Name._static args) of - Nothing -> pure False - (Just (G.VBoolean b)) -> pure b - _ -> refute $ pure $ InvalidStaticValue - parsePresetValue gType parentArgName isStatic val + | HashMap.null args -> refute $ pure $ NoPresetArgumentFound + | otherwise -> do + val <- + onNothing (HashMap.lookup Name._value args) + $ refute + $ pure + $ InvalidPresetArgument parentArgName + isStatic <- + case (HashMap.lookup Name._static args) of + Nothing -> pure False + (Just (G.VBoolean b)) -> pure b + _ -> refute $ pure $ InvalidStaticValue + parsePresetValue gType parentArgName isStatic val -- | validateDirective checks if the arguments of a given directive -- is a subset of the corresponding upstream directive arguments -- *NOTE*: This function assumes that the `providedDirective` and the -- `upstreamDirective` have the same name. validateDirective :: - MonadValidate [RoleBasedSchemaValidationError] m => + (MonadValidate [RoleBasedSchemaValidationError] m) => -- | provided directive G.Directive a -> -- | upstream directive @@ -555,14 +571,14 @@ validateDirective :: (GraphQLType, G.Name) -> m () validateDirective providedDirective upstreamDirective (parentType, parentTypeName) = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Directive + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Directive for_ (NE.nonEmpty $ HashMap.keys argsDiff) $ \argNames -> - dispute $ - pure $ - NonExistingDirectiveArgument parentTypeName parentType providedName argNames + dispute + $ pure + $ NonExistingDirectiveArgument parentTypeName parentType providedName argNames where argsDiff = HashMap.difference providedDirectiveArgs upstreamDirectiveArgs @@ -573,7 +589,7 @@ validateDirective providedDirective upstreamDirective (parentType, parentTypeNam -- are a subset of `upstreamDirectives` and then validate -- each of the directives by calling the `validateDirective` validateDirectives :: - MonadValidate [RoleBasedSchemaValidationError] m => + (MonadValidate [RoleBasedSchemaValidationError] m) => [G.Directive a] -> [G.Directive a] -> G.TypeSystemDirectiveLocation -> @@ -585,10 +601,10 @@ validateDirectives providedDirectives upstreamDirectives directiveLocation paren for_ nonPresetDirectives $ \dir -> do let directiveName = G._dName dir upstreamDir <- - onNothing (HashMap.lookup directiveName upstreamDirectivesMap) $ - refute $ - pure $ - TypeDoesNotExist Directive directiveName + onNothing (HashMap.lookup directiveName upstreamDirectivesMap) + $ refute + $ pure + $ TypeDoesNotExist Directive directiveName validateDirective dir upstreamDir parentType case presetDirectives of [] -> pure Nothing @@ -625,10 +641,10 @@ validateEnumTypeDefinition :: G.EnumTypeDefinition -> m G.EnumTypeDefinition validateEnumTypeDefinition providedEnum upstreamEnum = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Enum + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Enum void $ validateDirectives providedDirectives upstreamDirectives G.TSDLENUM $ (Enum, providedName) for_ (NE.nonEmpty $ S.toList $ duplicates providedEnumValNames) $ \dups -> do refute $ pure $ DuplicateEnumValues providedName dups @@ -659,25 +675,25 @@ validateInputValueDefinition :: G.Name -> m RemoteSchemaInputValueDefinition validateInputValueDefinition providedDefn upstreamDefn inputObjectName = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName (Argument InputObjectArgument) + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName (Argument InputObjectArgument) presetDirective <- - validateDirectives providedDirectives upstreamDirectives G.TSDLINPUT_FIELD_DEFINITION $ - (Argument InputObjectArgument, inputObjectName) - when (providedType /= upstreamType) $ - dispute $ - pure $ - NonMatchingType providedName (Argument InputObjectArgument) upstreamType providedType - when (providedDefaultValue /= upstreamDefaultValue) $ - dispute $ - pure $ - NonMatchingDefaultValue - inputObjectName - providedName - upstreamDefaultValue - providedDefaultValue + validateDirectives providedDirectives upstreamDirectives G.TSDLINPUT_FIELD_DEFINITION + $ (Argument InputObjectArgument, inputObjectName) + when (providedType /= upstreamType) + $ dispute + $ pure + $ NonMatchingType providedName (Argument InputObjectArgument) upstreamType providedType + when (providedDefaultValue /= upstreamDefaultValue) + $ dispute + $ pure + $ NonMatchingDefaultValue + inputObjectName + providedName + upstreamDefaultValue + providedDefaultValue presetArguments <- for presetDirective $ parsePresetDirective providedType providedName pure $ RemoteSchemaInputValueDefinition providedDefn presetArguments where @@ -702,10 +718,10 @@ validateArguments providedArgs upstreamArgs parentTypeName = do refute $ pure $ MissingNonNullableArguments parentTypeName nonNullableArgs for providedArgs $ \providedArg@(G.InputValueDefinition _ name _ _ _) -> do upstreamArg <- - onNothing (HashMap.lookup name upstreamArgsMap) $ - refute $ - pure $ - NonExistingInputArgument parentTypeName name + onNothing (HashMap.lookup name upstreamArgsMap) + $ refute + $ pure + $ NonExistingInputArgument parentTypeName name validateInputValueDefinition providedArg upstreamArg parentTypeName where upstreamArgsMap = mapFromL G._ivdName $ map _rsitdDefinition upstreamArgs @@ -722,10 +738,10 @@ validateInputObjectTypeDefinition :: G.InputObjectTypeDefinition RemoteSchemaInputValueDefinition -> m (G.InputObjectTypeDefinition RemoteSchemaInputValueDefinition) validateInputObjectTypeDefinition providedInputObj upstreamInputObj = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName InputObject + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName InputObject void $ validateDirectives providedDirectives upstreamDirectives G.TSDLINPUT_OBJECT $ (InputObject, providedName) args <- validateArguments providedArgs upstreamArgs $ providedName pure $ providedInputObj {G._iotdValueDefinitions = args} @@ -743,15 +759,15 @@ validateFieldDefinition :: (FieldDefinitionType, G.Name) -> m (G.FieldDefinition RemoteSchemaInputValueDefinition) validateFieldDefinition providedFieldDefinition upstreamFieldDefinition (parentType, parentTypeName) = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName (Field parentType) + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName (Field parentType) void $ validateDirectives providedDirectives upstreamDirectives G.TSDLFIELD_DEFINITION $ (Field parentType, parentTypeName) - when (providedType /= upstreamType) $ - dispute $ - pure $ - NonMatchingType providedName (Field parentType) upstreamType providedType + when (providedType /= upstreamType) + $ dispute + $ pure + $ NonMatchingType providedName (Field parentType) upstreamType providedType args <- validateArguments providedArgs upstreamArgs $ providedName pure $ providedFieldDefinition {G._fldArgumentsDefinition = args} where @@ -773,10 +789,10 @@ validateFieldDefinitions providedFldDefnitions upstreamFldDefinitions parentType refute $ pure $ DuplicateFields parentType dups for providedFldDefnitions $ \fldDefn@(G.FieldDefinition _ name _ _ _) -> do upstreamFldDefn <- - onNothing (HashMap.lookup name upstreamFldDefinitionsMap) $ - refute $ - pure $ - NonExistingField parentType name + onNothing (HashMap.lookup name upstreamFldDefinitionsMap) + $ refute + $ pure + $ NonExistingField parentType name validateFieldDefinition fldDefn upstreamFldDefn parentType where upstreamFldDefinitionsMap = mapFromL G._fldName upstreamFldDefinitions @@ -789,10 +805,10 @@ validateInterfaceDefinition :: G.InterfaceTypeDefinition [G.Name] RemoteSchemaInputValueDefinition -> m (G.InterfaceTypeDefinition () RemoteSchemaInputValueDefinition) validateInterfaceDefinition providedInterfaceDefn upstreamInterfaceDefn = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Interface + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Interface void $ validateDirectives providedDirectives upstreamDirectives G.TSDLINTERFACE $ (Interface, providedName) fieldDefinitions <- validateFieldDefinitions providedFieldDefns upstreamFieldDefns $ (InterfaceField, providedName) pure $ providedInterfaceDefn {G._itdFieldsDefinition = fieldDefinitions} @@ -802,15 +818,15 @@ validateInterfaceDefinition providedInterfaceDefn upstreamInterfaceDefn = do G.InterfaceTypeDefinition _ upstreamName upstreamDirectives upstreamFieldDefns _ = upstreamInterfaceDefn validateScalarDefinition :: - MonadValidate [RoleBasedSchemaValidationError] m => + (MonadValidate [RoleBasedSchemaValidationError] m) => G.ScalarTypeDefinition -> G.ScalarTypeDefinition -> m G.ScalarTypeDefinition validateScalarDefinition providedScalar upstreamScalar = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Scalar + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Scalar void $ validateDirectives providedDirectives upstreamDirectives G.TSDLSCALAR $ (Scalar, providedName) pure providedScalar where @@ -819,15 +835,15 @@ validateScalarDefinition providedScalar upstreamScalar = do G.ScalarTypeDefinition _ upstreamName upstreamDirectives = upstreamScalar validateUnionDefinition :: - MonadValidate [RoleBasedSchemaValidationError] m => + (MonadValidate [RoleBasedSchemaValidationError] m) => G.UnionTypeDefinition -> G.UnionTypeDefinition -> m G.UnionTypeDefinition validateUnionDefinition providedUnion upstreamUnion = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Union + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Union void $ validateDirectives providedDirectives upstreamDirectives G.TSDLUNION $ (Union, providedName) for_ (NE.nonEmpty $ S.toList memberTypesDiff) $ \nonExistingMembers -> refute $ pure $ NonExistingUnionMemberTypes providedName nonExistingMembers @@ -849,10 +865,10 @@ validateObjectDefinition :: S.HashSet G.Name -> m (G.ObjectTypeDefinition RemoteSchemaInputValueDefinition) validateObjectDefinition providedObj upstreamObj interfacesDeclared = do - when (providedName /= upstreamName) $ - dispute $ - pure $ - UnexpectedNonMatchingNames providedName upstreamName Object + when (providedName /= upstreamName) + $ dispute + $ pure + $ UnexpectedNonMatchingNames providedName upstreamName Object void $ validateDirectives providedDirectives upstreamDirectives G.TSDLOBJECT $ (Object, providedName) for_ (NE.nonEmpty $ S.toList customInterfaces) $ \ifaces -> dispute $ pure $ CustomInterfacesNotAllowed providedName ifaces @@ -941,9 +957,9 @@ getSchemaDocIntrospection providedTypeDefns (queryRoot, mutationRoot, subscripti providedType <- providedTypeDefns case providedType of G.TypeDefinitionInterface interface@(G.InterfaceTypeDefinition _ name _ _ _) -> - pure $ - G.TypeDefinitionInterface $ - interface {G._itdPossibleTypes = concat $ maybeToList (HashMap.lookup name possibleTypesMap)} + pure + $ G.TypeDefinitionInterface + $ interface {G._itdPossibleTypes = concat $ maybeToList (HashMap.lookup name possibleTypesMap)} G.TypeDefinitionScalar scalar -> pure $ G.TypeDefinitionScalar scalar G.TypeDefinitionEnum enum -> pure $ G.TypeDefinitionEnum enum G.TypeDefinitionObject obj -> pure $ G.TypeDefinitionObject obj @@ -971,8 +987,9 @@ validateRemoteSchema upstreamRemoteSchemaIntrospection = do refute $ pure $ DuplicateTypeNames duplicateTypeNames rootTypeNames <- validateSchemaDefinitions providedSchemaDefinitions let providedInterfacesTypes = - S.fromList $ - flip mapMaybe providedTypeDefinitions $ \case + S.fromList + $ flip mapMaybe providedTypeDefinitions + $ \case G.TypeDefinitionInterface interface -> Just $ G._itdName interface _ -> Nothing validatedTypeDefinitions <- @@ -1018,7 +1035,7 @@ validateRemoteSchema upstreamRemoteSchemaIntrospection = do typeNotFound gType name = refute (pure $ TypeDoesNotExist gType name) resolveRoleBasedRemoteSchema :: - MonadError QErr m => + (MonadError QErr m) => RoleName -> RemoteSchemaName -> IntrospectionResult -> @@ -1027,14 +1044,15 @@ resolveRoleBasedRemoteSchema :: resolveRoleBasedRemoteSchema roleName remoteSchemaName remoteSchemaIntrospection (G.SchemaDocument providedTypeDefns) = do when (roleName == adminRoleName) $ throw400 ConstraintViolation $ "cannot define permission for admin role" let providedSchemaDocWithDefaultScalars = - G.SchemaDocument $ - providedTypeDefns <> (map (G.TypeSystemDefinitionType . G.TypeDefinitionScalar) defaultScalars) + G.SchemaDocument + $ providedTypeDefns + <> (map (G.TypeSystemDefinitionType . G.TypeDefinitionScalar) defaultScalars) introspectionRes <- flip onLeft (throw400 ValidationFailed . showErrors) =<< runValidateT - ( flip runReaderT providedSchemaDocWithDefaultScalars $ - validateRemoteSchema $ - irDoc remoteSchemaIntrospection + ( flip runReaderT providedSchemaDocWithDefaultScalars + $ validateRemoteSchema + $ irDoc remoteSchemaIntrospection ) pure (introspectionRes, schemaDependency) where @@ -1052,5 +1070,6 @@ resolveRoleBasedRemoteSchema roleName remoteSchemaName remoteSchemaIntrospection schemaDependency = SchemaDependency (SORemoteSchema remoteSchemaName) DRRemoteSchema defaultScalars = - map (\n -> G.ScalarTypeDefinition Nothing n []) . toList $ - GName.builtInScalars + map (\n -> G.ScalarTypeDefinition Nothing n []) + . toList + $ GName.builtInScalars diff --git a/server/src-lib/Hasura/RemoteSchema/SchemaCache/RemoteRelationship.hs b/server/src-lib/Hasura/RemoteSchema/SchemaCache/RemoteRelationship.hs index fdf9c24370a..f425294b208 100644 --- a/server/src-lib/Hasura/RemoteSchema/SchemaCache/RemoteRelationship.hs +++ b/server/src-lib/Hasura/RemoteSchema/SchemaCache/RemoteRelationship.hs @@ -67,9 +67,9 @@ errorToText = \case Just allowedFields -> ", the allowed fields are " <> englishList "or" allowedFields in "field with name " <> fieldName - <<> "is not provided by the lhs entity" - <>> lhs - <<> "for defining a join condition" + <<> "is not provided by the lhs entity" + <>> lhs + <<> "for defining a join condition" <> helpText ExpectedTypeButGot expTy actualTy -> "expected type " <> G.getBaseType expTy <<> " but got " <>> G.getBaseType actualTy @@ -107,38 +107,38 @@ validateToSchemaRelationship :: validateToSchemaRelationship schema lhsIdentifier name (remoteSchemaInfo, introspectionResult) lhsJoinFields = do let remoteSchemaName = _trrdRemoteSchema schema requiredLHSJoinFields <- forM (toList $ _trrdLhsFields schema) $ \fieldName -> do - fmap (fieldName,) $ - onNothing (HashMap.lookup fieldName lhsJoinFields) $ - throwError $ - JoinFieldNonExistent lhsIdentifier fieldName $ - HashMap.keysSet lhsJoinFields + fmap (fieldName,) + $ onNothing (HashMap.lookup fieldName lhsJoinFields) + $ throwError + $ JoinFieldNonExistent lhsIdentifier fieldName + $ HashMap.keysSet lhsJoinFields hasuraFieldsVariablesMap <- fmap HashMap.fromList $ for requiredLHSJoinFields $ \(fieldName, field) -> (,field) <$> hasuraFieldToVariable fieldName let schemaDoc = irDoc introspectionResult queryRootName = irQueryRoot introspectionResult queryRoot <- - onNothing (lookupObject schemaDoc queryRootName) $ - throwError $ - FieldNotFoundInRemoteSchema queryRootName + onNothing (lookupObject schemaDoc queryRootName) + $ throwError + $ FieldNotFoundInRemoteSchema queryRootName (_, (leafParamMap, leafTypeMap)) <- foldlM (buildRelationshipTypeInfo hasuraFieldsVariablesMap schemaDoc) (queryRoot, (mempty, mempty)) (unRemoteFields $ _trrdRemoteField schema) - pure $ - ( HashMap.fromList requiredLHSJoinFields, - RemoteSchemaFieldInfo - { _rrfiName = name, - _rrfiParamMap = leafParamMap, - _rrfiRemoteFields = _trrdRemoteField schema, - _rrfiRemoteSchema = remoteSchemaInfo, - -- adding the new input types after stripping the values of the - -- schema document - _rrfiInputValueDefinitions = HashMap.elems leafTypeMap, - _rrfiRemoteSchemaName = remoteSchemaName, - _rrfiLHSIdentifier = lhsIdentifier - } - ) + pure + $ ( HashMap.fromList requiredLHSJoinFields, + RemoteSchemaFieldInfo + { _rrfiName = name, + _rrfiParamMap = leafParamMap, + _rrfiRemoteFields = _trrdRemoteField schema, + _rrfiRemoteSchema = remoteSchemaInfo, + -- adding the new input types after stripping the values of the + -- schema document + _rrfiInputValueDefinitions = HashMap.elems leafTypeMap, + _rrfiRemoteSchemaName = remoteSchemaName, + _rrfiLHSIdentifier = lhsIdentifier + } + ) where getObjTyInfoFromField :: RemoteSchemaIntrospection -> @@ -193,10 +193,10 @@ validateToSchemaRelationship schema lhsIdentifier name (remoteSchemaInfo, intros typeMap (newParamMap, newTypeMap) <- onLeft eitherParamAndTypeMap throwError innerObjTyInfo <- - onNothing (getObjTyInfoFromField schemaDoc objFldDefinition) $ - bool - ( throwError $ - InvalidType (G._fldType objFldDefinition) "only output type is expected" + onNothing (getObjTyInfoFromField schemaDoc objFldDefinition) + $ bool + ( throwError + $ InvalidType (G._fldType objFldDefinition) "only output type is expected" ) (pure objTyInfo) (isValidType schemaDoc objFldDefinition) @@ -224,15 +224,15 @@ stripInMap :: (Either ValidationError) (HashMap.HashMap G.Name RemoteSchemaInputValueDefinition) stripInMap relName lhsIdentifier types schemaArguments providedArguments = - fmap catMaybes $ - HashMap.traverseWithKey + fmap catMaybes + $ HashMap.traverseWithKey ( \name remoteInpValDef@(RemoteSchemaInputValueDefinition inpValInfo _preset) -> case HashMap.lookup name providedArguments of Nothing -> pure $ Just remoteInpValDef Just value -> do maybeNewGType <- stripValue relName lhsIdentifier types (G._ivdType inpValInfo) value - pure $ - fmap + pure + $ fmap ( \newGType -> let newInpValInfo = inpValInfo {G._ivdType = newGType} in RemoteSchemaInputValueDefinition newInpValInfo Nothing @@ -332,7 +332,7 @@ stripObject name lhsIdentifier schemaDoc originalGtype templateArguments = -- types for a remote relationship. -- TODO: Consider a separator character to avoid conflicts. renameTypeForRelationship :: - MonadError ValidationError m => + (MonadError ValidationError m) => RelName -> LHSIdentifier -> G.Name -> @@ -344,8 +344,11 @@ renameTypeForRelationship (relNameToTxt -> relTxt) lhsIdentifier name = do relName <- G.mkName relTxt `onNothing` throwError (InvalidGraphQLName relTxt) - pure $ - name <> Name.__remote_rel_ <> lhsName <> relName + pure + $ name + <> Name.__remote_rel_ + <> lhsName + <> relName -- | Convert a field name to a variable name. hasuraFieldToVariable :: @@ -472,22 +475,22 @@ isTypeCoercible actualType expectedType = let (actualBaseType, actualNestingLevel) = getBaseTyWithNestedLevelsCount actualType (expectedBaseType, expectedNestingLevel) = getBaseTyWithNestedLevelsCount expectedType in if - | expectedBaseType == GName._ID -> - bool - (throwError $ IDTypeJoin actualBaseType) - (pure ()) - -- Check under `Input Coercion` https://spec.graphql.org/June2018/#sec-ID - -- We can also include the `ID` type in the below list but it will be - -- extraneous because at the time of writing this, we don't generate - -- the `ID` type in the DB schema - ( G.unName actualBaseType - `elem` ["ID", "Int", "String", "bigint", "smallint", "uuid"] - ) - | actualBaseType /= expectedBaseType -> raiseValidationError - -- we cannot coerce two types with different nesting levels, - -- for example, we cannot coerce [Int] to [[Int]] - | (actualNestingLevel == expectedNestingLevel || actualNestingLevel == 0) -> pure () - | otherwise -> raiseValidationError + | expectedBaseType == GName._ID -> + bool + (throwError $ IDTypeJoin actualBaseType) + (pure ()) + -- Check under `Input Coercion` https://spec.graphql.org/June2018/#sec-ID + -- We can also include the `ID` type in the below list but it will be + -- extraneous because at the time of writing this, we don't generate + -- the `ID` type in the DB schema + ( G.unName actualBaseType + `elem` ["ID", "Int", "String", "bigint", "smallint", "uuid"] + ) + | actualBaseType /= expectedBaseType -> raiseValidationError + -- we cannot coerce two types with different nesting levels, + -- for example, we cannot coerce [Int] to [[Int]] + | (actualNestingLevel == expectedNestingLevel || actualNestingLevel == 0) -> pure () + | otherwise -> raiseValidationError where raiseValidationError = throwError $ ExpectedTypeButGot expectedType actualType diff --git a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Types.hs b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Types.hs index 3cad9b7a7df..6aa0778097c 100644 --- a/server/src-lib/Hasura/RemoteSchema/SchemaCache/Types.hs +++ b/server/src-lib/Hasura/RemoteSchema/SchemaCache/Types.hs @@ -200,9 +200,10 @@ validateRemoteSchemaCustomization (Just RemoteSchemaCustomization {..}) = for_ _rscFieldNames $ \fieldCustomizations -> for_ fieldCustomizations $ \RemoteFieldCustomization {..} -> for_ (HashMap.keys _rfcMapping) $ \fieldName -> - when (isReservedName fieldName) $ - throw400 InvalidParams $ - "attempt to customize reserved field name " <>> fieldName + when (isReservedName fieldName) + $ throw400 InvalidParams + $ "attempt to customize reserved field name " + <>> fieldName where isReservedName = ("__" `T.isPrefixOf`) . G.unName @@ -412,10 +413,10 @@ $(J.deriveJSON hasuraJSON ''RemoteSchemaInfo) instance (J.ToJSON remoteFieldInfo) => J.ToJSON (RemoteSchemaCtxG remoteFieldInfo) where toJSON RemoteSchemaCtx {..} = - J.object $ - [ "name" J..= _rscName, - "info" J..= J.toJSON _rscInfo - ] + J.object + $ [ "name" J..= _rscName, + "info" J..= J.toJSON _rscInfo + ] instance J.ToJSON RemoteSchemaFieldInfo where toJSON RemoteSchemaFieldInfo {..} = diff --git a/server/src-lib/Hasura/SQL/AnyBackend.hs b/server/src-lib/Hasura/SQL/AnyBackend.hs index e3952145238..282a0c4bb6a 100644 --- a/server/src-lib/Hasura/SQL/AnyBackend.hs +++ b/server/src-lib/Hasura/SQL/AnyBackend.hs @@ -217,7 +217,7 @@ traverseBackend :: f. (AllBackendsSatisfy c, Functor f) => AnyBackend i -> - (forall b. c b => i b -> f (j b)) -> + (forall b. (c b) => i b -> f (j b)) -> f (AnyBackend j) traverseBackend e f = case e of PostgresVanillaValue x -> PostgresVanillaValue <$> f x @@ -232,7 +232,7 @@ mkAnyBackend :: forall (b :: BackendType) (i :: BackendType -> Type). - HasTag b => + (HasTag b) => i b -> AnyBackend i mkAnyBackend x = case backendTag @b of @@ -269,9 +269,9 @@ dispatchAnyBackend :: (c :: BackendType -> Constraint) (i :: BackendType -> Type) (r :: Type). - AllBackendsSatisfy c => + (AllBackendsSatisfy c) => AnyBackend i -> - (forall (b :: BackendType). c b => i b -> r) -> + (forall (b :: BackendType). (c b) => i b -> r) -> r dispatchAnyBackend e f = case e of PostgresVanillaValue x -> f x @@ -287,10 +287,10 @@ dispatchAnyBackendWithTwoConstraints :: (c2 :: BackendType -> Constraint) (i :: BackendType -> Type) (r :: Type). - AllBackendsSatisfy c1 => - AllBackendsSatisfy c2 => + (AllBackendsSatisfy c1) => + (AllBackendsSatisfy c2) => AnyBackend i -> - (forall (b :: BackendType). c1 b => c2 b => i b -> r) -> + (forall (b :: BackendType). (c1 b) => (c2 b) => i b -> r) -> r dispatchAnyBackendWithTwoConstraints e f = case e of PostgresVanillaValue x -> f x @@ -307,9 +307,9 @@ dispatchAnyBackend' :: (c :: Type -> Constraint) (i :: BackendType -> Type) (r :: Type). - i `SatisfiesForAllBackends` c => + (i `SatisfiesForAllBackends` c) => AnyBackend i -> - (forall (b :: BackendType). c (i b) => i b -> r) -> + (forall (b :: BackendType). (c (i b)) => i b -> r) -> r dispatchAnyBackend' e f = case e of PostgresVanillaValue x -> f x @@ -327,10 +327,10 @@ dispatchAnyBackend'' :: (c2 :: BackendType -> Constraint) (i :: BackendType -> Type) (r :: Type). - i `SatisfiesForAllBackends` c1 => - AllBackendsSatisfy c2 => + (i `SatisfiesForAllBackends` c1) => + (AllBackendsSatisfy c2) => AnyBackend i -> - (forall (b :: BackendType). c2 b => c1 (i b) => i b -> r) -> + (forall (b :: BackendType). (c2 b) => (c1 (i b)) => i b -> r) -> r dispatchAnyBackend'' e f = case e of PostgresVanillaValue x -> f x @@ -348,8 +348,8 @@ composeAnyBackend :: (c :: BackendType -> Constraint) (i :: BackendType -> Type) (r :: Type). - AllBackendsSatisfy c => - (forall (b :: BackendType). c b => i b -> i b -> r) -> + (AllBackendsSatisfy c) => + (forall (b :: BackendType). (c b) => i b -> i b -> r) -> AnyBackend i -> AnyBackend i -> r -> @@ -371,8 +371,8 @@ mergeAnyBackend :: forall (c :: Type -> Constraint) (i :: BackendType -> Type). - i `SatisfiesForAllBackends` c => - (forall (b :: BackendType). c (i b) => i b -> i b -> i b) -> + (i `SatisfiesForAllBackends` c) => + (forall (b :: BackendType). (c (i b)) => i b -> i b -> i b) -> AnyBackend i -> AnyBackend i -> AnyBackend i -> @@ -395,7 +395,7 @@ unpackAnyBackend :: forall (b :: BackendType) (i :: BackendType -> Type). - HasTag b => + (HasTag b) => AnyBackend i -> Maybe (i b) unpackAnyBackend exists = case (backendTag @b, exists) of @@ -431,7 +431,7 @@ dispatchAnyBackendArrow :: (arr :: Type -> Type -> Type) x. (ArrowChoice arr, AllBackendsSatisfy c1, AllBackendsSatisfy c2) => - (forall b. c1 b => c2 b => arr (i b, x) r) -> + (forall b. (c1 b) => (c2 b) => arr (i b, x) r) -> arr (AnyBackend i, x) r dispatchAnyBackendArrow arrow = proc (ab, x) -> do case ab of @@ -455,7 +455,7 @@ dispatchAnyBackendArrow arrow = proc (ab, x) -> do -- | Attempts to parse an 'AnyBackend' from a JSON value, using the provided -- backend information. parseAnyBackendFromJSON :: - i `SatisfiesForAllBackends` FromJSON => + (i `SatisfiesForAllBackends` FromJSON) => BackendType -> Value -> Parser (AnyBackend i) @@ -472,7 +472,7 @@ parseAnyBackendFromJSON backendKind value = case backendKind of -- argument. anyBackendCodec :: forall i. - i `SatisfiesForAllBackends` HasCodec => + (i `SatisfiesForAllBackends` HasCodec) => BackendType -> JSONCodec (AnyBackend i) anyBackendCodec backendKind = case backendKind of @@ -489,7 +489,7 @@ anyBackendCodec backendKind = case backendKind of -- used for debug purposes, as it has no way of inserting the backend kind in -- the output, since there's no guarantee that the output will be an object. debugAnyBackendToJSON :: - i `SatisfiesForAllBackends` ToJSON => + (i `SatisfiesForAllBackends` ToJSON) => AnyBackend i -> Value debugAnyBackendToJSON e = dispatchAnyBackend' @ToJSON e toJSON @@ -498,15 +498,15 @@ debugAnyBackendToJSON e = dispatchAnyBackend' @ToJSON e toJSON -- * Instances for 'AnyBackend' -deriving instance i `SatisfiesForAllBackends` Show => Show (AnyBackend i) +deriving instance (i `SatisfiesForAllBackends` Show) => Show (AnyBackend i) -deriving instance i `SatisfiesForAllBackends` Eq => Eq (AnyBackend i) +deriving instance (i `SatisfiesForAllBackends` Eq) => Eq (AnyBackend i) -deriving instance i `SatisfiesForAllBackends` Ord => Ord (AnyBackend i) +deriving instance (i `SatisfiesForAllBackends` Ord) => Ord (AnyBackend i) -instance i `SatisfiesForAllBackends` Hashable => Hashable (AnyBackend i) +instance (i `SatisfiesForAllBackends` Hashable) => Hashable (AnyBackend i) -instance i `SatisfiesForAllBackends` FromJSON => FromJSONKeyValue (AnyBackend i) where +instance (i `SatisfiesForAllBackends` FromJSON) => FromJSONKeyValue (AnyBackend i) where parseJSONKeyValue (backendTypeStr, value) = do backendType <- parseBackendTypeFromText $ Key.toText backendTypeStr parseAnyBackendFromJSON backendType value diff --git a/server/src-lib/Hasura/SQL/BackendMap.hs b/server/src-lib/Hasura/SQL/BackendMap.hs index 42d5e18e9e4..0d9c41cad09 100644 --- a/server/src-lib/Hasura/SQL/BackendMap.hs +++ b/server/src-lib/Hasura/SQL/BackendMap.hs @@ -42,9 +42,9 @@ newtype BackendMap (i :: BackendType -> Type) = BackendMap (Map BackendType (Any deriving stock (Generic) deriving newtype (Semigroup, Monoid) -deriving newtype instance i `SatisfiesForAllBackends` Show => Show (BackendMap i) +deriving newtype instance (i `SatisfiesForAllBackends` Show) => Show (BackendMap i) -deriving newtype instance i `SatisfiesForAllBackends` Eq => Eq (BackendMap i) +deriving newtype instance (i `SatisfiesForAllBackends` Eq) => Eq (BackendMap i) instance ( i `SatisfiesForAllBackends` HasCodec, @@ -53,8 +53,8 @@ instance HasCodec (BackendMap i) where codec = - AC.object ("BackendMap_" <> objectNameSuffix) $ - foldl' + AC.object ("BackendMap_" <> objectNameSuffix) + $ foldl' foldBackendType (pure mempty) supportedBackends @@ -80,10 +80,11 @@ instance let t = typeableName @(i 'DataConnector) in fromMaybe t $ stripSuffix "__DataConnector" t -instance i `SatisfiesForAllBackends` FromJSON => FromJSON (BackendMap i) where +instance (i `SatisfiesForAllBackends` FromJSON) => FromJSON (BackendMap i) where parseJSON = J.withObject "BackendMap" $ \obj -> do - BackendMap . Map.fromList + BackendMap + . Map.fromList <$> traverse ( \keyValue -> do out <- parseJSONKeyValue keyValue @@ -91,7 +92,7 @@ instance i `SatisfiesForAllBackends` FromJSON => FromJSON (BackendMap i) where ) (KeyMap.toList obj) -instance i `SatisfiesForAllBackends` ToJSON => ToJSON (BackendMap i) where +instance (i `SatisfiesForAllBackends` ToJSON) => ToJSON (BackendMap i) where toJSON (BackendMap backendMap) = J.object $ valueToPair <$> Map.elems backendMap where @@ -105,7 +106,7 @@ instance Select (BackendMap i) where select (BackendMapS (_ :: BackendTag b)) = lookup @b data BackendMapS i a where - BackendMapS :: forall (b :: BackendType) (i :: BackendType -> Type). HasTag b => BackendTag b -> BackendMapS i (Maybe (i b)) + BackendMapS :: forall (b :: BackendType) (i :: BackendType -> Type). (HasTag b) => BackendTag b -> BackendMapS i (Maybe (i b)) instance GEq (BackendMapS i) where BackendMapS a `geq` BackendMapS b = case a `geq` b of @@ -120,14 +121,14 @@ instance GCompare (BackendMapS i) where lookupD :: forall (b :: BackendType) (i :: BackendType -> Type). - HasTag b => + (HasTag b) => Dependency (BackendMap i) -> Dependency (Maybe (i b)) lookupD = selectD (BackendMapS (backendTag @b)) -------------------------------------------------------------------------------- -singleton :: forall b i. HasTag b => i b -> BackendMap i +singleton :: forall b i. (HasTag b) => i b -> BackendMap i singleton value = BackendMap $ Map.singleton (reify $ backendTag @b) (mkAnyBackend value) -- | Get a value from the map for the particular 'BackendType' 'b'. This function @@ -135,7 +136,7 @@ singleton value = BackendMap $ Map.singleton (reify $ backendTag @b) (mkAnyBacke -- @ -- lookup @('Postgres 'Vanilla) backendMap -- @ -lookup :: forall (b :: BackendType) i. HasTag b => BackendMap i -> Maybe (i b) +lookup :: forall (b :: BackendType) i. (HasTag b) => BackendMap i -> Maybe (i b) lookup (BackendMap backendMap) = Map.lookup (reify $ backendTag @b) backendMap >>= unpackAnyBackend @b @@ -158,7 +159,7 @@ modify f = alter \case -- value in a Map. -- -- In short : @lookup k (alter f k m) = f (lookup k m)@. -alter :: forall b i. HasTag b => (Maybe (i b) -> Maybe (i b)) -> BackendMap i -> BackendMap i +alter :: forall b i. (HasTag b) => (Maybe (i b) -> Maybe (i b)) -> BackendMap i -> BackendMap i alter f (BackendMap bmap) = BackendMap $ Map.alter (wrap . f . unwrap) (reify @b backendTag) bmap where wrap :: Maybe (i b) -> Maybe (AnyBackend i) @@ -169,7 +170,7 @@ alter f (BackendMap bmap) = BackendMap $ Map.alter (wrap . f . unwrap) (reify @b -- | The expression @a `overridesDeeply b@ applies the values from @a@ on top of the defaults @b@. -- In practice this should union the maps for each backend type. -overridesDeeply :: i `SatisfiesForAllBackends` Semigroup => BackendMap i -> BackendMap i -> BackendMap i +overridesDeeply :: (i `SatisfiesForAllBackends` Semigroup) => BackendMap i -> BackendMap i -> BackendMap i overridesDeeply (BackendMap a) (BackendMap b) = BackendMap (Map.unionWith override a b) where override a' b' = mergeAnyBackend @Semigroup (<>) a' b' a' diff --git a/server/src-lib/Hasura/SQL/GeoJSON.hs b/server/src-lib/Hasura/SQL/GeoJSON.hs index 1ce7ef40532..203e97a311c 100644 --- a/server/src-lib/Hasura/SQL/GeoJSON.hs +++ b/server/src-lib/Hasura/SQL/GeoJSON.hs @@ -44,8 +44,8 @@ instance J.FromJSON Position where then fail "A Position needs at least 2 elements" else -- here we are ignoring anything past 3 elements - return $ - Position + return + $ Position (arr `V.unsafeIndex` 0) (arr `V.unsafeIndex` 1) (arr V.!? 2) @@ -110,8 +110,8 @@ instance J.FromJSON LinearRing where thrPos = arr `V.unsafeIndex` 2 rest = V.drop 3 arr let lastPos = V.last rest - unless (fstPos == lastPos) $ - fail "the first and last locations have to be equal for a LinearRing" + unless (fstPos == lastPos) + $ fail "the first and last locations have to be equal for a LinearRing" return $ LinearRing fstPos sndPos thrPos $ V.toList $ V.init rest instance J.ToJSON LinearRing where diff --git a/server/src-lib/Hasura/Server/API/Backend.hs b/server/src-lib/Hasura/Server/API/Backend.hs index 0927d3c6dfb..66cb2e9f158 100644 --- a/server/src-lib/Hasura/Server/API/Backend.hs +++ b/server/src-lib/Hasura/Server/API/Backend.hs @@ -62,9 +62,10 @@ commandParserWithExplicitParser parseJSONWithBackendKind expected constructor ba -- instance backtracks: if we used 'fail', we would not be able to distinguish between "this is -- the correct branch, the name matches, but the argument fails to parse, we must fail" and "this -- is not the command we were expecting here, it is fine to continue with another". - whenMaybe (expected == provided) $ - modifyFailure withDetails $ - constructor <$> (parseJSONWithBackendKind backendKind arguments J.Key "args") + whenMaybe (expected == provided) + $ modifyFailure withDetails + $ constructor + <$> (parseJSONWithBackendKind backendKind arguments J.Key "args") where withDetails internalErrorMessage = intercalate @@ -75,7 +76,7 @@ commandParserWithExplicitParser parseJSONWithBackendKind expected constructor ba ] commandParser :: - J.FromJSON a => + (J.FromJSON a) => -- | expected command name Text -> -- | corresponding parser @@ -84,7 +85,7 @@ commandParser :: commandParser = commandParserWithExplicitParser (const J.parseJSON) -- Ignore the backend source kind and just parse using the FromJSON instance commandParserWithBackendKind :: - FromJSONWithContext (BackendSourceKind b) a => + (FromJSONWithContext (BackendSourceKind b) a) => -- | expected command name Text -> -- | corresponding parser @@ -93,7 +94,7 @@ commandParserWithBackendKind :: commandParserWithBackendKind = commandParserWithExplicitParser parseJSONWithContext -sourceCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +sourceCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] sourceCommands = [ commandParserWithBackendKind "add_source" $ RMAddSource . mkAnyBackend @b, commandParser "drop_source" $ RMDropSource, @@ -102,7 +103,7 @@ sourceCommands = commandParserWithBackendKind "update_source" $ RMUpdateSource . mkAnyBackend @b ] -tableCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +tableCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] tableCommands = [ commandParser "get_source_tables" $ RMGetSourceTables . mkAnyBackend @b, commandParser "get_table_info" $ RMGetTableInfo . mkAnyBackend @b, @@ -112,7 +113,7 @@ tableCommands = commandParser "untrack_tables" $ RMUntrackTables . mkAnyBackend @b ] -tablePermissionsCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +tablePermissionsCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] tablePermissionsCommands = [ commandParser "create_insert_permission" $ RMCreateInsertPermission . mkAnyBackend @b, commandParser "create_select_permission" $ RMCreateSelectPermission . mkAnyBackend @b, @@ -125,25 +126,25 @@ tablePermissionsCommands = commandParser "set_permission_comment" $ RMSetPermissionComment . mkAnyBackend @b ] -functionCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +functionCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] functionCommands = [ commandParser "track_function" $ RMTrackFunction . mkAnyBackend @b, commandParser "untrack_function" $ RMUntrackFunction . mkAnyBackend @b, commandParser "set_function_customization" $ RMSetFunctionCustomization . mkAnyBackend @b ] -trackableCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +trackableCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] trackableCommands = [ commandParser "get_source_trackables" $ RMGetSourceTrackables . mkAnyBackend @b ] -functionPermissionsCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +functionPermissionsCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] functionPermissionsCommands = [ commandParser "create_function_permission" $ RMCreateFunctionPermission . mkAnyBackend @b, commandParser "drop_function_permission" $ RMDropFunctionPermission . mkAnyBackend @b ] -relationshipCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +relationshipCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] relationshipCommands = [ commandParser "create_object_relationship" $ RMCreateObjectRelationship . mkAnyBackend @b, commandParser "create_array_relationship" $ RMCreateArrayRelationship . mkAnyBackend @b, @@ -153,14 +154,14 @@ relationshipCommands = commandParser "suggest_relationships" $ RMSuggestRelationships . mkAnyBackend @b ] -remoteRelationshipCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +remoteRelationshipCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] remoteRelationshipCommands = [ commandParser "create_remote_relationship" $ RMCreateRemoteRelationship . mkAnyBackend @b, commandParser "update_remote_relationship" $ RMUpdateRemoteRelationship . mkAnyBackend @b, commandParser "delete_remote_relationship" $ RMDeleteRemoteRelationship . mkAnyBackend @b ] -eventTriggerCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +eventTriggerCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] eventTriggerCommands = [ commandParser "invoke_event_trigger" $ RMInvokeEventTrigger . mkAnyBackend @b, commandParser "create_event_trigger" $ RMCreateEventTrigger . mkAnyBackend @b, @@ -171,32 +172,32 @@ eventTriggerCommands = commandParser "get_event_by_id" $ RMGetEventById . mkAnyBackend @b ] -computedFieldCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +computedFieldCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] computedFieldCommands = [ commandParser "add_computed_field" $ RMAddComputedField . mkAnyBackend @b, commandParser "drop_computed_field" $ RMDropComputedField . mkAnyBackend @b ] -connectionTemplateCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +connectionTemplateCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] connectionTemplateCommands = [ commandParser "test_connection_template" $ RMTestConnectionTemplate . mkAnyBackend @b ] -nativeQueriesCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +nativeQueriesCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] nativeQueriesCommands = [ commandParser "get_native_query" $ RMGetNativeQuery . mkAnyBackend @b, commandParser "track_native_query" $ RMTrackNativeQuery . mkAnyBackend @b, commandParser "untrack_native_query" $ RMUntrackNativeQuery . mkAnyBackend @b ] -storedProceduresCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +storedProceduresCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] storedProceduresCommands = [ commandParser "get_stored_procedure" $ RMGetStoredProcedure . mkAnyBackend @b, commandParser "track_stored_procedure" $ RMTrackStoredProcedure . mkAnyBackend @b, commandParser "untrack_stored_procedure" $ RMUntrackStoredProcedure . mkAnyBackend @b ] -logicalModelsCommands :: forall (b :: BackendType). Backend b => [CommandParser b] +logicalModelsCommands :: forall (b :: BackendType). (Backend b) => [CommandParser b] logicalModelsCommands = [ commandParser "get_logical_model" $ RMGetLogicalModel . mkAnyBackend @b, commandParser "track_logical_model" $ RMTrackLogicalModel . mkAnyBackend @b, diff --git a/server/src-lib/Hasura/Server/API/Config.hs b/server/src-lib/Hasura/Server/API/Config.hs index 216d64ec445..d40626129cb 100644 --- a/server/src-lib/Hasura/Server/API/Config.hs +++ b/server/src-lib/Hasura/Server/API/Config.hs @@ -105,9 +105,9 @@ runGetConfig where isPrometheusMetricsEnabled = METRICS `Set.member` enabledAPIs featureFlagSettings = - Set.fromList $ - (\(FeatureFlag {ffDescription, ffIdentifier}, enabled) -> FeatureFlagInfo {ffiName = getIdentifier ffIdentifier, ffiEnabled = enabled, ffiDescription = ffDescription}) - <$> featureFlags + Set.fromList + $ (\(FeatureFlag {ffDescription, ffIdentifier}, enabled) -> FeatureFlagInfo {ffiName = getIdentifier ffIdentifier, ffiEnabled = enabled, ffiDescription = ffDescription}) + <$> featureFlags isAdminSecretSet :: AuthMode -> Bool isAdminSecretSet = \case diff --git a/server/src-lib/Hasura/Server/API/Metadata.hs b/server/src-lib/Hasura/Server/API/Metadata.hs index ad9fde68b71..e26be7f71c2 100644 --- a/server/src-lib/Hasura/Server/API/Metadata.hs +++ b/server/src-lib/Hasura/Server/API/Metadata.hs @@ -151,32 +151,34 @@ runMetadataQuery appContext schemaCache RQLMetadata {..} = do then case (appEnvEnableMaintenanceMode, appEnvEnableReadOnlyMode) of (MaintenanceModeDisabled, ReadOnlyModeDisabled) -> do -- set modified metadata in storage - L.unLogger logger $ - SchemaSyncLog L.LevelInfo TTMetadataApi $ - String $ - "Attempting to insert new metadata in storage" + L.unLogger logger + $ SchemaSyncLog L.LevelInfo TTMetadataApi + $ String + $ "Attempting to insert new metadata in storage" newResourceVersion <- - Tracing.newSpan "setMetadata" $ - liftEitherM $ - setMetadata (fromMaybe currentResourceVersion _rqlMetadataResourceVersion) modMetadata - L.unLogger logger $ - SchemaSyncLog L.LevelInfo TTMetadataApi $ - String $ - "Successfully inserted new metadata in storage with resource version: " <> showMetadataResourceVersion newResourceVersion + Tracing.newSpan "setMetadata" + $ liftEitherM + $ setMetadata (fromMaybe currentResourceVersion _rqlMetadataResourceVersion) modMetadata + L.unLogger logger + $ SchemaSyncLog L.LevelInfo TTMetadataApi + $ String + $ "Successfully inserted new metadata in storage with resource version: " + <> showMetadataResourceVersion newResourceVersion -- notify schema cache sync - Tracing.newSpan "notifySchemaCacheSync" $ - liftEitherM $ - notifySchemaCacheSync newResourceVersion appEnvInstanceId cacheInvalidations - L.unLogger logger $ - SchemaSyncLog L.LevelInfo TTMetadataApi $ - String $ - "Inserted schema cache sync notification at resource version:" <> showMetadataResourceVersion newResourceVersion + Tracing.newSpan "notifySchemaCacheSync" + $ liftEitherM + $ notifySchemaCacheSync newResourceVersion appEnvInstanceId cacheInvalidations + L.unLogger logger + $ SchemaSyncLog L.LevelInfo TTMetadataApi + $ String + $ "Inserted schema cache sync notification at resource version:" + <> showMetadataResourceVersion newResourceVersion (_, modSchemaCache', _) <- - Tracing.newSpan "setMetadataResourceVersionInSchemaCache" $ - setMetadataResourceVersionInSchemaCache newResourceVersion - & runCacheRWT dynamicConfig modSchemaCache + Tracing.newSpan "setMetadataResourceVersionInSchemaCache" + $ setMetadataResourceVersionInSchemaCache newResourceVersion + & runCacheRWT dynamicConfig modSchemaCache pure (r, modSchemaCache') (MaintenanceModeEnabled (), ReadOnlyModeDisabled) -> @@ -349,11 +351,11 @@ runMetadataQueryM env checkFeatureFlag remoteSchemaPerms currentResourceVersion -- NOTE: This is a good place to install tracing, since it's involved in -- the recursive case via "bulk": RMV1 q -> - Tracing.newSpan ("v1 " <> T.pack (constrName q)) $ - runMetadataQueryV1M env checkFeatureFlag remoteSchemaPerms currentResourceVersion q + Tracing.newSpan ("v1 " <> T.pack (constrName q)) + $ runMetadataQueryV1M env checkFeatureFlag remoteSchemaPerms currentResourceVersion q RMV2 q -> - Tracing.newSpan ("v2 " <> T.pack (constrName q)) $ - runMetadataQueryV2M currentResourceVersion q + Tracing.newSpan ("v2 " <> T.pack (constrName q)) + $ runMetadataQueryV2M currentResourceVersion q runMetadataQueryV1M :: forall m r. @@ -541,12 +543,12 @@ runMetadataQueryV1M env checkFeatureFlag remoteSchemaPerms currentResourceVersio pure (encJFromList results) where dispatchMetadata :: - (forall b. BackendMetadata b => i b -> a) -> + (forall b. (BackendMetadata b) => i b -> a) -> AnyBackend i -> a dispatchMetadata f x = dispatchAnyBackend @BackendMetadata x f - dispatchEventTrigger :: (forall b. BackendEventTrigger b => i b -> a) -> AnyBackend i -> a + dispatchEventTrigger :: (forall b. (BackendEventTrigger b) => i b -> a) -> AnyBackend i -> a dispatchEventTrigger f x = dispatchAnyBackend @BackendEventTrigger x f dispatchMetadataAndEventTrigger :: diff --git a/server/src-lib/Hasura/Server/API/Metadata/Instances.hs b/server/src-lib/Hasura/Server/API/Metadata/Instances.hs index ae148e8963f..3c0d913fbb7 100644 --- a/server/src-lib/Hasura/Server/API/Metadata/Instances.hs +++ b/server/src-lib/Hasura/Server/API/Metadata/Instances.hs @@ -25,7 +25,7 @@ import Hasura.Server.Utils (APIVersion (..)) instance FromJSON RQLMetadataV1 where parseJSON = withObject "RQLMetadataV1" \o -> do queryType <- o .: "type" - let args :: forall a. FromJSON a => A.Parser a + let args :: forall a. (FromJSON a) => A.Parser a args = o .: "args" case queryType of -- backend agnostic @@ -106,17 +106,17 @@ instance FromJSON RQLMetadataV1 where -- 2) Attempt to run all the backend specific command parsers against the source kind, cmd, and arg: -- NOTE: If parsers succeed then this will pick out the first successful one. command <- choice <$> sequenceA [p backendSourceKind' cmd argValue | p <- metadataV1CommandParsers @b] - onNothing command $ - fail $ - "unknown metadata command \"" - <> T.unpack cmd - <> "\" for backend " - <> T.unpack (T.toTxt backendSourceKind') + onNothing command + $ fail + $ "unknown metadata command \"" + <> T.unpack cmd + <> "\" for backend " + <> T.unpack (T.toTxt backendSourceKind') instance FromJSON RQLMetadataV2 where parseJSON = - genericParseJSON $ - defaultOptions + genericParseJSON + $ defaultOptions { constructorTagModifier = snakeCase . drop 4, sumEncoding = TaggedObject "type" "args" } @@ -135,7 +135,7 @@ instance FromJSON RQLMetadataRequest where -- 'BackendSourceKind' and the action suffix. -- -- For example: @"pg_add_source"@ parses as @(PostgresVanillaValue, "add_source")@ -parseQueryType :: MonadFail m => Text -> m (AnyBackend BackendSourceKind, Text) +parseQueryType :: (MonadFail m) => Text -> m (AnyBackend BackendSourceKind, Text) parseQueryType queryType = let (prefix, T.drop 1 -> cmd) = T.breakOn "_" queryType in (,cmd) diff --git a/server/src-lib/Hasura/Server/API/PGDump.hs b/server/src-lib/Hasura/Server/API/PGDump.hs index 32ff93b58e0..c9d41a7d2d2 100644 --- a/server/src-lib/Hasura/Server/API/PGDump.hs +++ b/server/src-lib/Hasura/Server/API/PGDump.hs @@ -30,9 +30,14 @@ data PGDumpReqBody = PGDumpReqBody instance FromJSON PGDumpReqBody where parseJSON = withObject "Object" $ \o -> PGDumpReqBody - <$> o .:? "source" .!= defaultSource - <*> o .: "opts" - <*> o .:? "clean_output" .!= False + <$> o + .:? "source" + .!= defaultSource + <*> o + .: "opts" + <*> o + .:? "clean_output" + .!= False execPGDump :: (MonadError RTE.QErr m, MonadIO m) => diff --git a/server/src-lib/Hasura/Server/API/Query.hs b/server/src-lib/Hasura/Server/API/Query.hs index 4a859eab595..196fef05536 100644 --- a/server/src-lib/Hasura/Server/API/Query.hs +++ b/server/src-lib/Hasura/Server/API/Query.hs @@ -198,8 +198,8 @@ runQuery :: runQuery appContext sc query = do AppEnv {..} <- askAppEnv let logger = _lsLogger appEnvLoggers - when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB query) $ - throw400 NotSupported "Cannot run write queries when read-only mode is enabled" + when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB query) + $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" let exportsMetadata = \case RQV1 (RQExportMetadata _) -> True diff --git a/server/src-lib/Hasura/Server/API/V2Query.hs b/server/src-lib/Hasura/Server/API/V2Query.hs index dea1f70ef43..871650e18cf 100644 --- a/server/src-lib/Hasura/Server/API/V2Query.hs +++ b/server/src-lib/Hasura/Server/API/V2Query.hs @@ -77,7 +77,7 @@ data RQLQuery instance FromJSON RQLQuery where parseJSON = withObject "RQLQuery" \o -> do t <- o .: "type" - let args :: forall a. FromJSON a => Parser a + let args :: forall a. (FromJSON a) => Parser a args = o .: "args" dcNameFromRunSql = T.stripSuffix "_run_sql" >=> GQL.mkName >=> preview _Right . mkDataConnectorName case t of @@ -119,8 +119,8 @@ runQuery :: m (EncJSON, RebuildableSchemaCache) runQuery appContext schemaCache rqlQuery = do AppEnv {..} <- askAppEnv - when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB rqlQuery) $ - throw400 NotSupported "Cannot run write queries when read-only mode is enabled" + when ((appEnvEnableReadOnlyMode == ReadOnlyModeEnabled) && queryModifiesUserDB rqlQuery) + $ throw400 NotSupported "Cannot run write queries when read-only mode is enabled" let dynamicConfig = buildCacheDynamicConfig appContext MetadataWithResourceVersion metadata currentResourceVersion <- Tracing.newSpan "fetchMetadata" $ liftEitherM fetchMetadata @@ -134,13 +134,13 @@ runQuery appContext schemaCache rqlQuery = do MaintenanceModeDisabled -> do -- set modified metadata in storage newResourceVersion <- - Tracing.newSpan "setMetadata" $ - liftEitherM $ - setMetadata currentResourceVersion updatedMetadata + Tracing.newSpan "setMetadata" + $ liftEitherM + $ setMetadata currentResourceVersion updatedMetadata -- notify schema cache sync - Tracing.newSpan "notifySchemaCacheSync" $ - liftEitherM $ - notifySchemaCacheSync newResourceVersion appEnvInstanceId invalidations + Tracing.newSpan "notifySchemaCacheSync" + $ liftEitherM + $ notifySchemaCacheSync newResourceVersion appEnvInstanceId invalidations MaintenanceModeEnabled () -> throw500 "metadata cannot be modified in maintenance mode" pure (result, updatedCache) @@ -190,8 +190,8 @@ runQueryM sqlGen rq = Tracing.newSpan (T.pack $ constrName rq) $ case rq of RQBigqueryDatabaseInspection q -> BigQuery.runDatabaseInspection q RQBulk l -> encJFromList <$> indexedMapM (runQueryM sqlGen) l RQConcurrentBulk l -> do - when (queryModifiesSchema rq) $ - throw500 "Only read-only queries are allowed in a concurrent_bulk" + when (queryModifiesSchema rq) + $ throw500 "Only read-only queries are allowed in a concurrent_bulk" encJFromList <$> mapConcurrently (runQueryM sqlGen) l queryModifiesUserDB :: RQLQuery -> Bool diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index b5e0a87ec8d..abfdd23c2eb 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -161,7 +161,7 @@ newtype Handler m a = Handler (ReaderT HandlerCtx (ExceptT QErr m) a) instance MonadTrans Handler where lift = Handler . lift . lift -instance Monad m => UserInfoM (Handler m) where +instance (Monad m) => UserInfoM (Handler m) where askUserInfo = asks hcUser runHandler :: (HasResourceLimits m, MonadBaseControl IO m) => HandlerCtx -> Handler m a -> m (Either QErr a) @@ -234,10 +234,10 @@ parseBody reqBody = onlyAdmin :: (MonadError QErr m, MonadReader HandlerCtx m) => m () onlyAdmin = do uRole <- asks (_uiRole . hcUser) - unless (uRole == adminRoleName) $ - throw400 AccessDenied "You have to be an admin to access this endpoint" + unless (uRole == adminRoleName) + $ throw400 AccessDenied "You have to be an admin to access this endpoint" -setHeader :: MonadIO m => HTTP.Header -> Spock.ActionCtxT ctx m () +setHeader :: (MonadIO m) => HTTP.Header -> Spock.ActionCtxT ctx m () setHeader (headerName, headerValue) = Spock.setHeader (bsToTxt $ CI.original headerName) (bsToTxt headerValue) @@ -252,23 +252,23 @@ class (Monad m) => MonadMetadataApiAuthorization m where authorizeV2QueryApi :: V2Q.RQLQuery -> HandlerCtx -> m (Either QErr ()) -instance MonadMetadataApiAuthorization m => MonadMetadataApiAuthorization (ReaderT r m) where +instance (MonadMetadataApiAuthorization m) => MonadMetadataApiAuthorization (ReaderT r m) where authorizeV1QueryApi q hc = lift $ authorizeV1QueryApi q hc authorizeV1MetadataApi q hc = lift $ authorizeV1MetadataApi q hc authorizeV2QueryApi q hc = lift $ authorizeV2QueryApi q hc -instance MonadMetadataApiAuthorization m => MonadMetadataApiAuthorization (ExceptT e m) where +instance (MonadMetadataApiAuthorization m) => MonadMetadataApiAuthorization (ExceptT e m) where authorizeV1QueryApi q hc = lift $ authorizeV1QueryApi q hc authorizeV1MetadataApi q hc = lift $ authorizeV1MetadataApi q hc authorizeV2QueryApi q hc = lift $ authorizeV2QueryApi q hc -instance MonadMetadataApiAuthorization m => MonadMetadataApiAuthorization (Tracing.TraceT m) where +instance (MonadMetadataApiAuthorization m) => MonadMetadataApiAuthorization (Tracing.TraceT m) where authorizeV1QueryApi q hc = lift $ authorizeV1QueryApi q hc authorizeV1MetadataApi q hc = lift $ authorizeV1MetadataApi q hc authorizeV2QueryApi q hc = lift $ authorizeV2QueryApi q hc -- | The config API (/v1alpha1/config) handler -class Monad m => MonadConfigApiHandler m where +class (Monad m) => MonadConfigApiHandler m where runConfigApiHandler :: AppStateRef impl -> Spock.SpockCtxT () m () @@ -310,12 +310,12 @@ mkSpockAction appStateRef qErrEncoder qErrModifier apiHandler = do let traceIdMaybe = lookup "X-B3-TraceId" headers >>= \rawTraceId -> if - | Char8.length rawTraceId == 32 -> - Tracing.traceIdFromHex rawTraceId - | Char8.length rawTraceId == 16 -> - Tracing.traceIdFromHex $ Char8.replicate 16 '0' <> rawTraceId - | otherwise -> - Nothing + | Char8.length rawTraceId == 32 -> + Tracing.traceIdFromHex rawTraceId + | Char8.length rawTraceId == 16 -> + Tracing.traceIdFromHex $ Char8.replicate 16 '0' <> rawTraceId + | otherwise -> + Nothing case traceIdMaybe of Just traceId -> do @@ -523,8 +523,8 @@ v2QueryHandler schemaCacheRefUpdater query = Tracing.newSpan "v2 Query" $ do schemaCache <- asks hcSchemaCache (liftEitherM . authorizeV2QueryApi query) =<< ask res <- - bool (fst <$> dbAction schemaCache) (schemaCacheRefUpdater dbAction) $ - V2Q.queryModifiesSchema query + bool (fst <$> dbAction schemaCache) (schemaCacheRefUpdater dbAction) + $ V2Q.queryModifiesSchema query return $ HttpResponse res [] where -- Hit postgres @@ -651,11 +651,11 @@ consoleAssetsHandler logger loggingSettings dir path = do -- '..' in paths need not be handed as it is resolved in the url by -- spock's routing. we get the expanded path. eFileContents <- - liftIO $ - try @IOException do + liftIO + $ try @IOException do unless validFilename $ throwIO $ userError "invalid asset filename" - BL.readFile $ - joinPath [T.unpack dir, pathStr] + BL.readFile + $ joinPath [T.unpack dir, pathStr] either (onError reqHeaders) onSuccess eFileContents where pathStr = T.unpack path @@ -693,7 +693,7 @@ ceConsoleTypeIdentifier = \case OSSConsole -> "oss" ProLiteConsole -> "pro-lite" -instance ConsoleRenderer m => ConsoleRenderer (Tracing.TraceT m) where +instance (ConsoleRenderer m) => ConsoleRenderer (Tracing.TraceT m) where type ConsoleType (Tracing.TraceT m) = ConsoleType m renderConsole a b c d e f = lift $ renderConsole a b c d e f @@ -722,8 +722,9 @@ configApiGetHandler :: AppStateRef impl -> Spock.SpockCtxT () m () configApiGetHandler appStateRef = do - Spock.get "v1alpha1/config" $ - onlyWhenApiEnabled isConfigEnabled appStateRef $ do + Spock.get "v1alpha1/config" + $ onlyWhenApiEnabled isConfigEnabled appStateRef + $ do AppEnv {..} <- lift askAppEnv AppContext {..} <- liftIO $ getAppContext appStateRef let (CheckFeatureFlag checkFeatureFlag) = appEnvCheckFeatureFlag @@ -731,8 +732,9 @@ configApiGetHandler appStateRef = do traverse (\ff -> (,) ff <$> liftIO (checkFeatureFlag ff)) (HashMap.elems (getFeatureFlags featureFlags)) - mkSpockAction appStateRef encodeQErr id $ - mkGetHandler $ do + mkSpockAction appStateRef encodeQErr id + $ mkGetHandler + $ do onlyAdmin let res = runGetConfig @@ -793,9 +795,9 @@ mkWaiApp :: mkWaiApp setupHook appStateRef consoleType ekgStore wsServerEnv = do appEnv@AppEnv {..} <- askAppEnv spockApp <- liftWithStateless $ \lowerIO -> - Spock.spockAsApp $ - Spock.spockT lowerIO $ - httpApp setupHook appStateRef appEnv consoleType ekgStore + Spock.spockAsApp + $ Spock.spockT lowerIO + $ httpApp setupHook appStateRef appEnv consoleType ekgStore let wsServerApp = WS.createWSServerApp (_lsEnabledLogTypes appEnvLoggingSettings) wsServerEnv appEnvWebSocketConnectionInitTimeout appEnvLicenseKeyCache stopWSServer = WS.stopWSServerApp wsServerEnv @@ -843,8 +845,8 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do setupHook appStateRef -- cors middleware - Spock.middleware $ - corsMiddleware (acCorsPolicy <$> getAppContext appStateRef) + Spock.middleware + $ corsMiddleware (acCorsPolicy <$> getAppContext appStateRef) -- API Console and Root Dir serveApiConsole @@ -868,21 +870,21 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do inconsistenciesMessage = "inconsistent objects in schema" (status, responseText) <- if - | (isInconsistent && isStrict) -> do - -- Inconsistencies exist and strict mode enabled. Report inconsistencies as ERROR with status 500. - let message = "ERROR: " <> inconsistenciesMessage - logError $ err500 InvalidConfiguration message - pure (HTTP.status500, message) - | (isInconsistent && not isStrict) -> do - -- Inconsistencies exist and strict mode disabled. Warn inconsistencies with status 200. - let message = "WARN: " <> inconsistenciesMessage - logSuccess $ LT.fromStrict message - pure (HTTP.status200, message) - | otherwise -> do - -- No inconsistencies in schema cache, report OK - let message = "OK" - logSuccess $ LT.fromStrict message - pure (HTTP.status200, message) + | (isInconsistent && isStrict) -> do + -- Inconsistencies exist and strict mode enabled. Report inconsistencies as ERROR with status 500. + let message = "ERROR: " <> inconsistenciesMessage + logError $ err500 InvalidConfiguration message + pure (HTTP.status500, message) + | (isInconsistent && not isStrict) -> do + -- Inconsistencies exist and strict mode disabled. Warn inconsistencies with status 200. + let message = "WARN: " <> inconsistenciesMessage + logSuccess $ LT.fromStrict message + pure (HTTP.status200, message) + | otherwise -> do + -- No inconsistencies in schema cache, report OK + let message = "OK" + logSuccess $ LT.fromStrict message + pure (HTTP.status200, message) Spock.setStatus status >> Spock.text responseText @@ -951,50 +953,54 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do onlyWhenApiEnabled isMetadataEnabled appStateRef gqlExplainAction Spock.post "v1/query" $ do - onlyWhenApiEnabled isMetadataEnabled appStateRef $ - spockAction encodeQErr id $ do + onlyWhenApiEnabled isMetadataEnabled appStateRef + $ spockAction encodeQErr id + $ do mkPostHandler $ fmap (emptyHttpLogGraphQLInfo,) <$> mkAPIRespHandler (v1QueryHandler schemaCacheUpdater) Spock.post "v1/metadata" $ do - onlyWhenApiEnabled isMetadataEnabled appStateRef $ - spockAction encodeQErr id $ - mkPostHandler $ - fmap (emptyHttpLogGraphQLInfo,) <$> mkAPIRespHandler (v1MetadataHandler schemaCacheUpdater) + onlyWhenApiEnabled isMetadataEnabled appStateRef + $ spockAction encodeQErr id + $ mkPostHandler + $ fmap (emptyHttpLogGraphQLInfo,) + <$> mkAPIRespHandler (v1MetadataHandler schemaCacheUpdater) Spock.post "v2/query" $ do - onlyWhenApiEnabled isMetadataEnabled appStateRef $ - spockAction encodeQErr id $ - mkPostHandler $ - fmap (emptyHttpLogGraphQLInfo,) <$> mkAPIRespHandler (v2QueryHandler schemaCacheUpdater) + onlyWhenApiEnabled isMetadataEnabled appStateRef + $ spockAction encodeQErr id + $ mkPostHandler + $ fmap (emptyHttpLogGraphQLInfo,) + <$> mkAPIRespHandler (v2QueryHandler schemaCacheUpdater) Spock.post "v1alpha1/pg_dump" $ do - onlyWhenApiEnabled isPGDumpEnabled appStateRef $ - spockAction encodeQErr id $ - mkPostHandler $ - fmap (emptyHttpLogGraphQLInfo,) <$> v1Alpha1PGDumpHandler + onlyWhenApiEnabled isPGDumpEnabled appStateRef + $ spockAction encodeQErr id + $ mkPostHandler + $ fmap (emptyHttpLogGraphQLInfo,) + <$> v1Alpha1PGDumpHandler runConfigApiHandler appStateRef Spock.post "v1alpha1/graphql" $ do - onlyWhenApiEnabled isGraphQLEnabled appStateRef $ - spockAction GH.encodeGQErr id $ - mkGQLRequestHandler $ - mkGQLAPIRespHandler $ - v1Alpha1GQHandler E.QueryHasura + onlyWhenApiEnabled isGraphQLEnabled appStateRef + $ spockAction GH.encodeGQErr id + $ mkGQLRequestHandler + $ mkGQLAPIRespHandler + $ v1Alpha1GQHandler E.QueryHasura Spock.post "v1/graphql" $ do - onlyWhenApiEnabled isGraphQLEnabled appStateRef $ - spockAction GH.encodeGQErr allMod200 $ - mkGQLRequestHandler $ - mkGQLAPIRespHandler $ - v1GQHandler + onlyWhenApiEnabled isGraphQLEnabled appStateRef + $ spockAction GH.encodeGQErr allMod200 + $ mkGQLRequestHandler + $ mkGQLAPIRespHandler + $ v1GQHandler Spock.post "v1beta1/relay" $ do - onlyWhenApiEnabled isGraphQLEnabled appStateRef $ - spockAction GH.encodeGQErr allMod200 $ - mkGQLRequestHandler $ - mkGQLAPIRespHandler $ - v1GQRelayHandler + onlyWhenApiEnabled isGraphQLEnabled appStateRef + $ spockAction GH.encodeGQErr allMod200 + $ mkGQLRequestHandler + $ mkGQLAPIRespHandler + $ v1GQRelayHandler -- This exposes some simple RTS stats when we run with `+RTS -T`. We want -- this to be available even when developer APIs are not compiled in, to @@ -1009,54 +1015,60 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do Spock.json stats Spock.get "dev/ekg" $ do - onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef $ - spockAction encodeQErr id $ - mkGetHandler $ do - onlyAdmin - respJ <- liftIO $ EKG.sampleAll ekgStore - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) []) + onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef + $ spockAction encodeQErr id + $ mkGetHandler + $ do + onlyAdmin + respJ <- liftIO $ EKG.sampleAll ekgStore + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) []) -- This deprecated endpoint used to show the query plan cache pre-PDV. -- Eventually this endpoint can be removed. Spock.get "dev/plan_cache" $ do - onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef $ - spockAction encodeQErr id $ - mkGetHandler $ do - onlyAdmin - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue J.Null) []) + onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef + $ spockAction encodeQErr id + $ mkGetHandler + $ do + onlyAdmin + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue J.Null) []) Spock.get "dev/subscriptions" $ do - onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef $ - spockAction encodeQErr id $ - mkGetHandler $ do - onlyAdmin - appCtx <- liftIO $ getAppContext appStateRef - respJ <- liftIO $ ES.dumpSubscriptionsState False (acLiveQueryOptions appCtx) (acStreamQueryOptions appCtx) appEnvSubscriptionState - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue respJ) []) + onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef + $ spockAction encodeQErr id + $ mkGetHandler + $ do + onlyAdmin + appCtx <- liftIO $ getAppContext appStateRef + respJ <- liftIO $ ES.dumpSubscriptionsState False (acLiveQueryOptions appCtx) (acStreamQueryOptions appCtx) appEnvSubscriptionState + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue respJ) []) Spock.get "dev/subscriptions/extended" $ do - onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef $ - spockAction encodeQErr id $ - mkGetHandler $ do - onlyAdmin - appCtx <- liftIO $ getAppContext appStateRef - respJ <- liftIO $ ES.dumpSubscriptionsState True (acLiveQueryOptions appCtx) (acStreamQueryOptions appCtx) appEnvSubscriptionState - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue respJ) []) + onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef + $ spockAction encodeQErr id + $ mkGetHandler + $ do + onlyAdmin + appCtx <- liftIO $ getAppContext appStateRef + respJ <- liftIO $ ES.dumpSubscriptionsState True (acLiveQueryOptions appCtx) (acStreamQueryOptions appCtx) appEnvSubscriptionState + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue respJ) []) Spock.get "dev/dataconnector/schema" $ do - onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef $ - spockAction encodeQErr id $ - mkGetHandler $ do - onlyAdmin - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue openApiSchema) []) - - Spock.get "api/swagger/json" $ - spockAction encodeQErr id $ - mkGetHandler $ do + onlyWhenApiEnabled isDeveloperAPIEnabled appStateRef + $ spockAction encodeQErr id + $ mkGetHandler + $ do onlyAdmin - sc <- liftIO $ getSchemaCache appStateRef - json <- buildOpenAPI sc - return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue json) []) + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue openApiSchema) []) + + Spock.get "api/swagger/json" + $ spockAction encodeQErr id + $ mkGetHandler + $ do + onlyAdmin + sc <- liftIO $ getSchemaCache appStateRef + json <- buildOpenAPI sc + return (emptyHttpLogGraphQLInfo, JSONResp $ HttpResponse (encJFromJValue json) []) forM_ [Spock.GET, Spock.POST] $ \m -> Spock.hookAny m $ \_ -> do req <- Spock.request @@ -1093,15 +1105,16 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do -- all graphql errors should be of type 200 allMod200 qe = qe {qeStatus = HTTP.status200} gqlExplainAction = do - spockAction encodeQErr id $ - mkPostHandler $ - fmap (emptyHttpLogGraphQLInfo,) <$> mkAPIRespHandler gqlExplainHandler + spockAction encodeQErr id + $ mkPostHandler + $ fmap (emptyHttpLogGraphQLInfo,) + <$> mkAPIRespHandler gqlExplainHandler serveApiConsole = do -- redirect / to /console Spock.get Spock.root $ do - onlyWhenApiEnabled (\appCtx -> isConsoleEnabled (acConsoleStatus appCtx) && isMetadataEnabled appCtx) appStateRef $ - Spock.redirect "console" + onlyWhenApiEnabled (\appCtx -> isConsoleEnabled (acConsoleStatus appCtx) && isMetadataEnabled appCtx) appStateRef + $ Spock.redirect "console" -- serve console html Spock.get ("console" Spock.wildcard) $ \path -> do @@ -1121,7 +1134,7 @@ httpApp setupHook appStateRef AppEnv {..} consoleType ekgStore = do -- an endpoint can be switched ON/OFF dynamically, hence serve the endpoint only -- when it is enabled else throw HTTP Error 404 onlyWhenApiEnabled :: - MonadIO m => + (MonadIO m) => (AppContext -> Bool) -> AppStateRef impl -> Spock.ActionCtxT ctx m b -> diff --git a/server/src-lib/Hasura/Server/AppStateRef.hs b/server/src-lib/Hasura/Server/AppStateRef.hs index 9b6abd633e1..bb1cf67453e 100644 --- a/server/src-lib/Hasura/Server/AppStateRef.hs +++ b/server/src-lib/Hasura/Server/AppStateRef.hs @@ -87,7 +87,7 @@ data AppState impl = AppState -- This function also updates the 'TLSAllowListRef' to make it point to the -- newly minted 'SchemaCacheRef'. initialiseAppStateRef :: - MonadIO m => + (MonadIO m) => TLSAllowListRef -> Maybe MetricsConfigRef -> ServerMetrics -> @@ -258,15 +258,15 @@ getAppContext asRef = lastBuiltAppContext <$> readAppContextRef asRef -- | Formats and logs a list of inconsistent metadata objects. logInconsistentMetadata :: L.Logger L.Hasura -> [InconsistentMetadata] -> IO () logInconsistentMetadata logger objs = - unless (null objs) $ - L.unLogger logger $ - mkInconsMetadataLog objs + unless (null objs) + $ L.unLogger logger + $ mkInconsMetadataLog objs -------------------------------------------------------------------------------- -- Local helpers -- | Set the gauge metric to the metadata version of the schema cache, if it exists. -updateMetadataVersionGauge :: MonadIO m => Gauge -> RebuildableSchemaCache -> m () +updateMetadataVersionGauge :: (MonadIO m) => Gauge -> RebuildableSchemaCache -> m () updateMetadataVersionGauge metadataVersionGauge schemaCache = do let metadataVersion = scMetadataResourceVersion . lastBuiltSchemaCache $ schemaCache liftIO $ Gauge.set metadataVersionGauge $ getMetadataResourceVersion metadataVersion diff --git a/server/src-lib/Hasura/Server/Auth.hs b/server/src-lib/Hasura/Server/Auth.hs index 8adec38bdf7..390c1d30b42 100644 --- a/server/src-lib/Hasura/Server/Auth.hs +++ b/server/src-lib/Hasura/Server/Auth.hs @@ -52,7 +52,7 @@ import Network.HTTP.Client qualified as HTTP import Network.HTTP.Types qualified as HTTP -- | Typeclass representing the @UserInfo@ authorization and resolving effect -class Monad m => UserAuthentication m where +class (Monad m) => UserAuthentication m where resolveUserInfo :: Logger Hasura -> HTTP.Manager -> @@ -147,18 +147,20 @@ setupAuthMode adminSecretHashSet mWebHook mJwtSecrets mUnAuthRole logger httpMan -- that parameter, lest users misunderstand their auth configuration: _ | isJust mUnAuthRole -> - throwError $ - "Fatal Error: --unauthorized-role (HASURA_GRAPHQL_UNAUTHORIZED_ROLE)" - <> requiresAdminScrtMsg - <> " and is not allowed when --auth-hook (HASURA_GRAPHQL_AUTH_HOOK) is set" + throwError + $ "Fatal Error: --unauthorized-role (HASURA_GRAPHQL_UNAUTHORIZED_ROLE)" + <> requiresAdminScrtMsg + <> " and is not allowed when --auth-hook (HASURA_GRAPHQL_AUTH_HOOK) is set" (False, Nothing, False) -> return AMNoAuth (True, Just hook, False) -> return $ AMAdminSecretAndHook adminSecretHashSet hook (False, Just _, False) -> - throwError $ - "Fatal Error : --auth-hook (HASURA_GRAPHQL_AUTH_HOOK)" <> requiresAdminScrtMsg + throwError + $ "Fatal Error : --auth-hook (HASURA_GRAPHQL_AUTH_HOOK)" + <> requiresAdminScrtMsg (False, Nothing, True) -> - throwError $ - "Fatal Error : --jwt-secret (HASURA_GRAPHQL_JWT_SECRET)" <> requiresAdminScrtMsg + throwError + $ "Fatal Error : --jwt-secret (HASURA_GRAPHQL_JWT_SECRET)" + <> requiresAdminScrtMsg (_, Just _, True) -> throwError "Fatal Error: Both webhook and JWT mode cannot be enabled at the same time" @@ -220,8 +222,8 @@ updateJwkFromUrl (JWTCtx url ref _ _ _ _ _) httpManager logger = _ -> do currentTime <- liftIO getCurrentTime for_ jwkExpiry \expiryTime -> - when (currentTime >= expiryTime) $ - fetchAndUpdateJWKs logger httpManager uri ref + when (currentTime >= expiryTime) + $ fetchAndUpdateJWKs logger httpManager uri ref -- | Authenticate the request using the headers and the configured 'AuthMode'. getUserInfoWithExpTime :: @@ -266,24 +268,25 @@ getUserInfoWithExpTime_ userInfoFromAuthHook_ processJwt_ logger manager rawHead -- - if so, check it and authorize as admin else fail -- - if not proceed with either webhook or JWT auth if configured AMAdminSecret adminSecretHashSet maybeUnauthRole -> - checkingSecretIfSent adminSecretHashSet $ - withNoExpTime + checkingSecretIfSent adminSecretHashSet + $ withNoExpTime -- Consider unauthorized role, if not found raise admin secret header required exception case maybeUnauthRole of Nothing -> - throw401 $ - adminSecretHeader - <> "/" - <> deprecatedAccessKeyHeader - <> " required, but not found" + throw401 + $ adminSecretHeader + <> "/" + <> deprecatedAccessKeyHeader + <> " required, but not found" Just unAuthRole -> mkUserInfo (URBPreDetermined unAuthRole) UAdminSecretNotSent sessionVariables -- this is the case that actually ends up consuming the request AST AMAdminSecretAndHook adminSecretHashSet hook -> checkingSecretIfSent adminSecretHashSet $ userInfoFromAuthHook_ logger manager hook rawHeaders reqs AMAdminSecretAndJWT adminSecretHashSet jwtSecrets unAuthRole -> - checkingSecretIfSent adminSecretHashSet $ - processJwt_ jwtSecrets rawHeaders unAuthRole <&> (\(a, b, c, _) -> (a, b, c)) + checkingSecretIfSent adminSecretHashSet + $ processJwt_ jwtSecrets rawHeaders unAuthRole + <&> (\(a, b, c, _) -> (a, b, c)) where -- CAREFUL!: mkUserInfoFallbackAdminRole adminSecretState = @@ -298,8 +301,8 @@ getUserInfoWithExpTime_ userInfoFromAuthHook_ processJwt_ logger manager rawHead Set.HashSet AdminSecretHash -> m (UserInfo, Maybe UTCTime, [HTTP.Header]) -> m (UserInfo, Maybe UTCTime, [HTTP.Header]) checkingSecretIfSent adminSecretHashSet actionIfNoAdminSecret = do let maybeRequestAdminSecret = - foldl1 (<|>) $ - map + foldl1 (<|>) + $ map (`getSessionVariableValue` sessionVariables) [adminSecretHeader, deprecatedAccessKeyHeader] @@ -307,9 +310,12 @@ getUserInfoWithExpTime_ userInfoFromAuthHook_ processJwt_ logger manager rawHead case maybeRequestAdminSecret of Nothing -> actionIfNoAdminSecret Just requestAdminSecret -> do - unless (Set.member (hashAdminSecret requestAdminSecret) adminSecretHashSet) $ - throw401 $ - "invalid " <> adminSecretHeader <> "/" <> deprecatedAccessKeyHeader + unless (Set.member (hashAdminSecret requestAdminSecret) adminSecretHashSet) + $ throw401 + $ "invalid " + <> adminSecretHeader + <> "/" + <> deprecatedAccessKeyHeader withNoExpTime $ mkUserInfoFallbackAdminRole UAdminSecretSent withNoExpTime a = (,Nothing,[]) <$> a diff --git a/server/src-lib/Hasura/Server/Auth/JWT.hs b/server/src-lib/Hasura/Server/Auth/JWT.hs index bf886601cac..a4face85cc9 100644 --- a/server/src-lib/Hasura/Server/Auth/JWT.hs +++ b/server/src-lib/Hasura/Server/Auth/JWT.hs @@ -132,9 +132,9 @@ instance J.FromJSON JWTHeader where parseJSON = J.withObject "JWTHeader" $ \o -> do hdrType <- o J..: "type" <&> CI.mk @Text if - | hdrType == "Authorization" -> pure JHAuthorization - | hdrType == "Cookie" -> JHCookie <$> o J..: "name" - | otherwise -> fail "expected 'type' is 'Authorization' or 'Cookie'" + | hdrType == "Authorization" -> pure JHAuthorization + | hdrType == "Cookie" -> JHCookie <$> o J..: "name" + | otherwise -> fail "expected 'type' is 'Authorization' or 'Cookie'" instance J.ToJSON JWTHeader where toJSON JHAuthorization = J.object ["type" J..= ("Authorization" :: String)] @@ -180,9 +180,9 @@ instance (J.FromJSON v) => J.FromJSON (JWTCustomClaimsMapValueG v) where instance (J.ToJSON v) => J.ToJSON (JWTCustomClaimsMapValueG v) where toJSON (JWTCustomClaimsMapJSONPath jsonPath mDefVal) = - J.object $ - ["path" J..= encodeJSONPath jsonPath] - <> ["default" J..= defVal | Just defVal <- [mDefVal]] + J.object + $ ["path" J..= encodeJSONPath jsonPath] + <> ["default" J..= defVal | Just defVal <- [mDefVal]] toJSON (JWTCustomClaimsMapStatic v) = J.toJSON v type JWTCustomClaimsMapDefaultRole = JWTCustomClaimsMapValueG RoleName @@ -208,32 +208,33 @@ data JWTCustomClaimsMap = JWTCustomClaimsMap instance J.ToJSON JWTCustomClaimsMap where toJSON (JWTCustomClaimsMap defaultRole allowedRoles customClaims) = - J.Object $ - KM.fromList $ - map (first (K.fromText . sessionVariableToText)) $ - [ (defaultRoleClaim, J.toJSON defaultRole), - (allowedRolesClaim, J.toJSON allowedRoles) - ] - <> map (second J.toJSON) (HashMap.toList customClaims) + J.Object + $ KM.fromList + $ map (first (K.fromText . sessionVariableToText)) + $ [ (defaultRoleClaim, J.toJSON defaultRole), + (allowedRolesClaim, J.toJSON allowedRoles) + ] + <> map (second J.toJSON) (HashMap.toList customClaims) instance J.FromJSON JWTCustomClaimsMap where parseJSON = J.withObject "JWTClaimsMap" $ \obj -> do let withNotFoundError sessionVariable = let sessionVarText = sessionVariableToText sessionVariable errorMsg = - T.unpack $ - sessionVarText <> " is expected but not found" + T.unpack + $ sessionVarText + <> " is expected but not found" in KM.lookup (K.fromText sessionVarText) obj `onNothing` fail errorMsg allowedRoles <- withNotFoundError allowedRolesClaim >>= J.parseJSON defaultRole <- withNotFoundError defaultRoleClaim >>= J.parseJSON let filteredClaims = - HashMap.delete allowedRolesClaim $ - HashMap.delete defaultRoleClaim $ - HashMap.fromList $ - map (first (mkSessionVariable . K.toText)) $ - KM.toList obj + HashMap.delete allowedRolesClaim + $ HashMap.delete defaultRoleClaim + $ HashMap.fromList + $ map (first (mkSessionVariable . K.toText)) + $ KM.toList obj customClaims <- flip HashMap.traverseWithKey filteredClaims $ const $ J.parseJSON pure $ JWTCustomClaimsMap defaultRole allowedRoles customClaims @@ -327,8 +328,8 @@ fetchAndUpdateJWKs logger httpManager url jwkRef = do Left _e -> pure () Right (jwkSet, responseHeaders) -> do expiryRes <- - runExceptT $ - determineJwkExpiryLifetime (liftIO getCurrentTime) logger responseHeaders + runExceptT + $ determineJwkExpiryLifetime (liftIO getCurrentTime) logger responseHeaders maybeExpiry <- onLeft expiryRes (const $ pure Nothing) case maybeExpiry of Nothing -> liftIO $ do @@ -425,12 +426,12 @@ determineJwkExpiryLifetime getCurrentTime' (Logger logger) responseHeaders = Nothing -> return Nothing if - -- If a max-age is specified with a must-revalidate we use it, but if not we use an immediate expiry time - | mustRevalidateExists cacheControl -> pure $ fromMaybe currTime maxExpiryMaybe - -- In these cases we want don't want to cache the JWK, so we use an immediate expiry time - | noCacheExists cacheControl || noStoreExists cacheControl -> pure currTime - -- Use max-age, if it exists - | otherwise -> hoistMaybe maxExpiryMaybe + -- If a max-age is specified with a must-revalidate we use it, but if not we use an immediate expiry time + | mustRevalidateExists cacheControl -> pure $ fromMaybe currTime maxExpiryMaybe + -- In these cases we want don't want to cache the JWK, so we use an immediate expiry time + | noCacheExists cacheControl || noStoreExists cacheControl -> pure currTime + -- Use max-age, if it exists + | otherwise -> hoistMaybe maxExpiryMaybe timeFromExpires :: MaybeT m UTCTime timeFromExpires = do @@ -508,7 +509,7 @@ processJwt_ processJwtBytes decodeIssuer fGetHeaderType jwtCtxs headers mUnAuthR (_, [(ctx, val)]) -> withAuthZ val ctx _ -> throw400 InvalidHeaders "Could not verify JWT: Multiple JWTs found" where - intersectKeys :: Hashable a => HashMap.HashMap a [b] -> HashMap.HashMap a [c] -> [(b, c)] + intersectKeys :: (Hashable a) => HashMap.HashMap a [b] -> HashMap.HashMap a [c] -> [(b, c)] intersectKeys m n = concatMap (uncurry cartesianProduct) $ HashMap.elems $ HashMap.intersectionWith (,) m n issuerMatch (j, b) = do @@ -556,29 +557,31 @@ processJwt_ processJwtBytes decodeIssuer fGetHeaderType jwtCtxs headers mUnAuthR -- see if there is a x-hasura-role header, or else pick the default role. -- The role returned is unauthenticated at this point: let requestedRole = - fromMaybe defaultRole $ - getRequestHeader userRoleHeader headers >>= mkRoleName . bsToTxt + fromMaybe defaultRole + $ getRequestHeader userRoleHeader headers + >>= mkRoleName + . bsToTxt - when (requestedRole `notElem` allowedRoles) $ - throw400 AccessDenied "Your requested role is not in allowed roles" + when (requestedRole `notElem` allowedRoles) + $ throw400 AccessDenied "Your requested role is not in allowed roles" let finalClaims = HashMap.delete defaultRoleClaim . HashMap.delete allowedRolesClaim $ claimsMap let finalClaimsObject = - KM.fromList $ - map (first (K.fromText . sessionVariableToText)) $ - HashMap.toList finalClaims + KM.fromList + $ map (first (K.fromText . sessionVariableToText)) + $ HashMap.toList finalClaims metadata <- parseJwtClaim (J.Object finalClaimsObject) "x-hasura-* claims" userInfo <- - mkUserInfo (URBPreDetermined requestedRole) UAdminSecretNotSent $ - mkSessionVariablesText metadata + mkUserInfo (URBPreDetermined requestedRole) UAdminSecretNotSent + $ mkSessionVariablesText metadata pure (userInfo, expTimeM, [], Just jwtCtx) withoutAuthZ = do unAuthRole <- onNothing mUnAuthRole (throw400 InvalidHeaders "Missing 'Authorization' or 'Cookie' header in JWT authentication mode") userInfo <- - mkUserInfo (URBPreDetermined unAuthRole) UAdminSecretNotSent $ - mkSessionVariablesHeaders headers + mkUserInfo (URBPreDetermined unAuthRole) UAdminSecretNotSent + $ mkSessionVariablesHeaders headers pure (userInfo, Nothing, [], Nothing) jwtNotIssuerError = throw400 JWTInvalid "Could not verify JWT: JWTNotInIssuer" @@ -618,7 +621,7 @@ processHeaderSimple jwtCtx jwt = do -- | parse the claims map from the JWT token or custom claims from the JWT config parseClaimsMap :: - MonadError QErr m => + (MonadError QErr m) => -- | Unregistered JWT claims Jose.ClaimsSet -> -- | Claims config @@ -640,11 +643,11 @@ parseClaimsMap claimsSet jcxClaims = do -- filter only x-hasura claims let claimsMap = - HashMap.fromList $ - map (first mkSessionVariable) $ - filter (isSessionVariable . fst) $ - map (first K.toText) $ - KM.toList claimsObject + HashMap.fromList + $ map (first mkSessionVariable) + $ filter (isSessionVariable . fst) + $ map (first K.toText) + $ KM.toList claimsObject pure claimsMap JCMap claimsConfig -> do @@ -657,17 +660,17 @@ parseClaimsMap claimsSet jcxClaims = do defaultRole <- case defaultRoleClaimsMap of JWTCustomClaimsMapJSONPath defaultRoleJsonPath defaultVal -> - parseDefaultRoleClaim defaultVal $ - iResultToMaybe $ - executeJSONPath defaultRoleJsonPath claimsJSON + parseDefaultRoleClaim defaultVal + $ iResultToMaybe + $ executeJSONPath defaultRoleJsonPath claimsJSON JWTCustomClaimsMapStatic staticDefaultRole -> pure staticDefaultRole otherClaims <- flip HashMap.traverseWithKey otherClaimsMap $ \k claimObj -> do let throwClaimErr = - throw400 JWTInvalidClaims $ - "JWT claim from claims_map, " - <> sessionVariableToText k - <> " not found" + throw400 JWTInvalidClaims + $ "JWT claim from claims_map, " + <> sessionVariableToText k + <> " not found" case claimObj of JWTCustomClaimsMapJSONPath path defaultVal -> iResultToMaybe (executeJSONPath path claimsJSON) @@ -675,34 +678,36 @@ parseClaimsMap claimsSet jcxClaims = do `onNothing` throwClaimErr JWTCustomClaimsMapStatic claimStaticValue -> pure $ J.String claimStaticValue - pure $ - HashMap.fromList + pure + $ HashMap.fromList [ (allowedRolesClaim, J.toJSON allowedRoles), (defaultRoleClaim, J.toJSON defaultRole) ] - <> otherClaims + <> otherClaims where parseAllowedRolesClaim defaultVal = \case Nothing -> - onNothing defaultVal $ - throw400 JWTRoleClaimMissing $ - "JWT claim does not contain " <> sessionVariableToText allowedRolesClaim + onNothing defaultVal + $ throw400 JWTRoleClaimMissing + $ "JWT claim does not contain " + <> sessionVariableToText allowedRolesClaim Just v -> - parseJwtClaim v $ - "invalid " - <> sessionVariableToText allowedRolesClaim - <> "; should be a list of roles" + parseJwtClaim v + $ "invalid " + <> sessionVariableToText allowedRolesClaim + <> "; should be a list of roles" parseDefaultRoleClaim defaultVal = \case Nothing -> - onNothing defaultVal $ - throw400 JWTRoleClaimMissing $ - "JWT claim does not contain " <> sessionVariableToText defaultRoleClaim + onNothing defaultVal + $ throw400 JWTRoleClaimMissing + $ "JWT claim does not contain " + <> sessionVariableToText defaultRoleClaim Just v -> - parseJwtClaim v $ - "invalid " - <> sessionVariableToText defaultRoleClaim - <> "; should be a role" + parseJwtClaim v + $ "invalid " + <> sessionVariableToText defaultRoleClaim + <> "; should be a role" claimsNotFound namespace = throw400 JWTInvalidClaims $ case namespace of @@ -778,14 +783,14 @@ instance J.ToJSON JWTConfig where ClaimNs ns -> ["claims_namespace" J..= J.String ns] in namespacePairs <> ["claims_format" J..= claimsFormat] JCMap claimsMap -> ["claims_map" J..= claimsMap] - in J.object $ - keyOrUrlPairs - <> [ "audience" J..= aud, - "issuer" J..= iss, - "header" J..= jwtHeader - ] - <> claimsPairs - <> (maybe [] (\skew -> ["allowed_skew" J..= skew]) allowedSkew) + in J.object + $ keyOrUrlPairs + <> [ "audience" J..= aud, + "issuer" J..= iss, + "header" J..= jwtHeader + ] + <> claimsPairs + <> (maybe [] (\skew -> ["allowed_skew" J..= skew]) allowedSkew) -- | Parse from a json string like: -- | `{"type": "RS256", "key": ""}` @@ -848,7 +853,7 @@ parseHasuraClaims claimsMap = do <$> parseClaim allowedRolesClaim "should be a list of roles" <*> parseClaim defaultRoleClaim "should be a single role name" where - parseClaim :: J.FromJSON a => SessionVariable -> Text -> m a + parseClaim :: (J.FromJSON a) => SessionVariable -> Text -> m a parseClaim claim hint = do claimV <- onNothing (HashMap.lookup claim claimsMap) missingClaim parseJwtClaim claimV $ "invalid " <> claimText <> "; " <> hint diff --git a/server/src-lib/Hasura/Server/Auth/JWT/Internal.hs b/server/src-lib/Hasura/Server/Auth/JWT/Internal.hs index 020f6f651c3..1aea0b32e0f 100644 --- a/server/src-lib/Hasura/Server/Auth/JWT/Internal.hs +++ b/server/src-lib/Hasura/Server/Auth/JWT/Internal.hs @@ -84,9 +84,9 @@ fromX509Pem s = do pem <- getAtleastOne "No pem found" pems -- decode the bytestring to a certificate signedExactCert <- - fmapL T.pack $ - X509.decodeSignedCertificate $ - PEM.pemContent pem + fmapL T.pack + $ X509.decodeSignedCertificate + $ PEM.pemContent pem let cert = X509.signedObject $ X509.getSigned signedExactCert pubKey = X509.certPubKey cert case pubKey of diff --git a/server/src-lib/Hasura/Server/Auth/WebHook.hs b/server/src-lib/Hasura/Server/Auth/WebHook.hs index 10378b06b9b..e067555188d 100644 --- a/server/src-lib/Hasura/Server/Auth/WebHook.hs +++ b/server/src-lib/Hasura/Server/Auth/WebHook.hs @@ -90,20 +90,20 @@ userInfoFromAuthHook logger manager hook reqHeaders reqs = do & set HTTP.headers (addDefaultHeaders [contentType]) & set HTTP.body - ( HTTP.RequestBodyLBS $ - J.encode $ - object - ( ["headers" J..= headersPayload] - -- We will only send the request if `ahSendRequestBody` is set to true - <> ["request" J..= reqs | ahSendRequestBody hook] - ) + ( HTTP.RequestBodyLBS + $ J.encode + $ object + ( ["headers" J..= headersPayload] + -- We will only send the request if `ahSendRequestBody` is set to true + <> ["request" J..= reqs | ahSendRequestBody hook] + ) ) HTTP.httpLbs req' manager logAndThrow :: HTTP.HttpException -> m a logAndThrow err = do - unLogger logger $ - WebHookLog + unLogger logger + $ WebHookLog LevelError Nothing (ahUrl hook) @@ -138,16 +138,16 @@ mkUserInfoFromResp (Logger logger) url method statusCode respBody respHdrs where getUserInfoFromHdrs rawHeaders responseHdrs = do userInfo <- - mkUserInfo URBFromSessionVariables UAdminSecretNotSent $ - mkSessionVariablesText rawHeaders + mkUserInfo URBFromSessionVariables UAdminSecretNotSent + $ mkSessionVariablesText rawHeaders logWebHookResp LevelInfo Nothing Nothing expiration <- runMaybeT $ timeFromCacheControl rawHeaders <|> timeFromExpires rawHeaders pure (userInfo, expiration, responseHdrs) - logWebHookResp :: MonadIO m => LogLevel -> Maybe BL.ByteString -> Maybe Text -> m () + logWebHookResp :: (MonadIO m) => LogLevel -> Maybe BL.ByteString -> Maybe Text -> m () logWebHookResp logLevel mResp message = - logger $ - WebHookLog + logger + $ WebHookLog logLevel (Just statusCode) url diff --git a/server/src-lib/Hasura/Server/CheckUpdates.hs b/server/src-lib/Hasura/Server/CheckUpdates.hs index b97bd8c3cba..87c13deaf10 100644 --- a/server/src-lib/Hasura/Server/CheckUpdates.hs +++ b/server/src-lib/Hasura/Server/CheckUpdates.hs @@ -43,10 +43,10 @@ checkForUpdates (LoggerCtx loggerSet _ _ _) manager = do Left ex -> ignoreHttpErr ex Right bs -> do UpdateInfo latestVersion <- decodeResp $ bs ^. Wreq.responseBody - when (latestVersion /= currentVersion) $ - FL.pushLogStrLn loggerSet $ - FL.toLogStr $ - updateMsg latestVersion + when (latestVersion /= currentVersion) + $ FL.pushLogStrLn loggerSet + $ FL.toLogStr + $ updateMsg latestVersion C.sleep $ days 1 where diff --git a/server/src-lib/Hasura/Server/Compression.hs b/server/src-lib/Hasura/Server/Compression.hs index 052dae66e3a..c04e8747f47 100644 --- a/server/src-lib/Hasura/Server/Compression.hs +++ b/server/src-lib/Hasura/Server/Compression.hs @@ -118,16 +118,18 @@ getAcceptedEncodings :: NH.RequestHeaders -> Set.Set EncodingType getAcceptedEncodings reqHeaders = Set.fromList acceptedEncodingTypes where rawHeaderVals = - concatMap (splitHeaderVal . snd) $ - filter (\h -> fst h == NH.hAcceptEncoding) reqHeaders + concatMap (splitHeaderVal . snd) + $ filter (\h -> fst h == NH.hAcceptEncoding) reqHeaders splitHeaderVal bs = map T.strip $ T.splitOn "," $ bsToTxt bs -- we'll ignore qvalues, except (crucially) to determine if 'identity' is rejected: identityRejected = -- ...if we're explicitly rejecting identity, or... - "identity;q=0" `elem` rawHeaderVals + "identity;q=0" + `elem` rawHeaderVals || -- ...rejecting anything not listed and identity is not listed - ( "*;q=0" `elem` rawHeaderVals + ( "*;q=0" + `elem` rawHeaderVals && not (any ("identity" `T.isPrefixOf`) rawHeaderVals) ) gzipAccepted = diff --git a/server/src-lib/Hasura/Server/Cors.hs b/server/src-lib/Hasura/Server/Cors.hs index 5c30a4d4f7e..7d4d9b5b93c 100644 --- a/server/src-lib/Hasura/Server/Cors.hs +++ b/server/src-lib/Hasura/Server/Cors.hs @@ -71,7 +71,8 @@ instance J.FromJSON CorsConfig where False -> o .: "allowed_origins" >>= \v -> J.withText "origins" parseAllowAll v - <|> CCAllowedOrigins <$> J.parseJSON v + <|> CCAllowedOrigins + <$> J.parseJSON v isCorsDisabled :: CorsConfig -> Bool isCorsDisabled = \case diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index db2323a31dc..498c37ec4a8 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -77,7 +77,9 @@ Following is the precise behaviour - -- TODO: Move into a dedicated Metadata module (ala Pro). getDbId :: Query.TxE Error.QErr Types.MetadataDbId getDbId = - Types.MetadataDbId . runIdentity . Query.getRow + Types.MetadataDbId + . runIdentity + . Query.getRow <$> Query.withQE Connection.defaultTxErrorHandler [Query.sql| @@ -96,7 +98,7 @@ getPgVersion = Types.PGVersion <$> Query.serverVersion -- command parser, then process the subcommand raw values if -- necessary. mkHGEOptions :: - Logging.EnabledLogTypes impl => HGEOptionsRaw (ServeOptionsRaw impl) -> WithEnv (HGEOptions (ServeOptions impl)) + (Logging.EnabledLogTypes impl) => HGEOptionsRaw (ServeOptionsRaw impl) -> WithEnv (HGEOptions (ServeOptions impl)) mkHGEOptions (HGEOptionsRaw rawDbUrl rawMetadataDbUrl rawCmd) = do dbUrl <- processPostgresConnInfo rawDbUrl metadataDbUrl <- withOption rawMetadataDbUrl metadataDbUrlOption @@ -147,7 +149,7 @@ rawConnInfoToUrlConf maybeRawConnInfo = do -- | Merge the results of the serve subcommmand arg parser with -- corresponding values from the 'WithEnv' context. -mkServeOptions :: forall impl. Logging.EnabledLogTypes impl => ServeOptionsRaw impl -> WithEnv (ServeOptions impl) +mkServeOptions :: forall impl. (Logging.EnabledLogTypes impl) => ServeOptionsRaw impl -> WithEnv (ServeOptions impl) mkServeOptions sor@ServeOptionsRaw {..} = do soPort <- withOptionDefault rsoPort servePortOption soHost <- withOptionDefault rsoHost serveHostOption @@ -217,7 +219,7 @@ mkServeOptions sor@ServeOptionsRaw {..} = do -- | Fetch Postgres 'Query.ConnParams' components from the environment -- and merge with the values consumed by the arg parser in -- 'ConnParamsRaw'. -mkConnParams :: Monad m => ConnParamsRaw -> WithEnvT m Query.ConnParams +mkConnParams :: (Monad m) => ConnParamsRaw -> WithEnvT m Query.ConnParams mkConnParams ConnParamsRaw {..} = do cpStripes <- unrefine <$> withOptionDefault rcpStripes pgStripesOption -- Note: by Little's Law we can expect e.g. (with 50 max connections) a @@ -233,12 +235,12 @@ mkConnParams ConnParamsRaw {..} = do else pure (Just lifetime) cpTimeout <- fmap unrefine <$> withOption rcpPoolTimeout pgPoolTimeoutOption let cpCancel = True - return $ - Query.ConnParams {..} + return + $ Query.ConnParams {..} -- | Fetch 'Auth.AuthHook' components from the environment and merge -- with the values consumed by the arg parser in 'AuthHookRaw'. -mkAuthHook :: Monad m => AuthHookRaw -> WithEnvT m (Maybe Auth.AuthHook) +mkAuthHook :: (Monad m) => AuthHookRaw -> WithEnvT m (Maybe Auth.AuthHook) mkAuthHook (AuthHookRaw mUrl mType mSendRequestBody) = do mUrlEnv <- withOption mUrl authHookOption -- Also support HASURA_GRAPHQL_AUTH_HOOK_TYPE @@ -265,7 +267,7 @@ mkAuthHook (AuthHookRaw mUrl mType mSendRequestBody) = do -- | Fetch 'Cors.CorsConfig' settings from the environment and merge -- with the settings consumed by the arg parser. -mkCorsConfig :: Monad m => ServeOptionsRaw imp -> Maybe Cors.CorsConfig -> WithEnvT m Cors.CorsConfig +mkCorsConfig :: (Monad m) => ServeOptionsRaw imp -> Maybe Cors.CorsConfig -> WithEnvT m Cors.CorsConfig mkCorsConfig ServeOptionsRaw {..} mCfg = do corsCfg <- do corsDisabled <- withOptionDefault Nothing disableCorsOption @@ -279,9 +281,9 @@ mkCorsConfig ServeOptionsRaw {..} mCfg = do wsReadCookie <- case (Cors.isCorsDisabled corsCfg, readCookVal) of (True, _) -> pure readCookVal (False, WsReadCookieEnabled) -> - throwError $ - _envVar wsReadCookieOption - <> " can only be used when CORS is disabled" + throwError + $ _envVar wsReadCookieOption + <> " can only be used when CORS is disabled" (False, WsReadCookieDisabled) -> pure WsReadCookieDisabled pure $ case corsCfg of Cors.CCDisabled _ -> Cors.CCDisabled $ isWsReadCookieEnabled wsReadCookie diff --git a/server/src-lib/Hasura/Server/Init/Arg.hs b/server/src-lib/Hasura/Server/Init/Arg.hs index 1ce233e3c14..2ff93eac6c6 100644 --- a/server/src-lib/Hasura/Server/Init/Arg.hs +++ b/server/src-lib/Hasura/Server/Init/Arg.hs @@ -36,11 +36,11 @@ import Options.Applicative qualified as Opt -- 1. '(Config.PostgresConnInfo (Maybe PostgresConnInfoRaw))' - The DB connection. -- 2: 'Maybe String' - Representing the metadata connection. -- 3: 'Config.HGECommand' @a@ - The result of the supplied Subcommand. -parseHgeOpts :: Logging.EnabledLogTypes impl => Opt.Parser (HGEOptionsRaw (ServeOptionsRaw impl)) +parseHgeOpts :: (Logging.EnabledLogTypes impl) => Opt.Parser (HGEOptionsRaw (ServeOptionsRaw impl)) parseHgeOpts = Config.HGEOptionsRaw <$> parsePostgresConnInfo <*> parseMetadataDbUrl <*> parseHGECommand -parseHGECommand :: Logging.EnabledLogTypes impl => Opt.Parser (HGECommand (ServeOptionsRaw impl)) +parseHGECommand :: (Logging.EnabledLogTypes impl) => Opt.Parser (HGECommand (ServeOptionsRaw impl)) parseHGECommand = Opt.subparser ( Opt.command @@ -88,8 +88,8 @@ parsePostgresConnInfo = do pure $ Config.PostgresConnInfo maybeRawConnInfo retries' where retries = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option Opt.auto ( Opt.long "retries" <> Opt.metavar "NO OF RETRIES" @@ -106,8 +106,8 @@ retriesNumOption = parseDatabaseUrl :: Opt.Parser (Maybe Template.URLTemplate) parseDatabaseUrl = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "database-url" <> Opt.metavar "" @@ -130,26 +130,26 @@ parseRawConnDetails = do password' <- password dbName' <- dbName options' <- options - pure $ - Config.PostgresConnDetailsRaw - <$> host' - <*> port' - <*> user' - <*> pure password' - <*> dbName' - <*> pure options' + pure + $ Config.PostgresConnDetailsRaw + <$> host' + <*> port' + <*> user' + <*> pure password' + <*> dbName' + <*> pure options' where host = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "host" <> Opt.metavar "" <> Opt.help "Postgres server host" ) port = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option Opt.auto ( Opt.long "port" <> Opt.short 'p' @@ -158,8 +158,8 @@ parseRawConnDetails = do ) user = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "user" <> Opt.short 'u' <> Opt.metavar "" @@ -175,8 +175,8 @@ parseRawConnDetails = do ) dbName = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "dbname" <> Opt.short 'd' <> Opt.metavar "" @@ -184,8 +184,8 @@ parseRawConnDetails = do ) options = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "pg-connection-options" <> Opt.short 'o' <> Opt.metavar "" @@ -195,8 +195,8 @@ parseRawConnDetails = do -- TODO(SOLOMON): Should we parse the URL here? parseMetadataDbUrl :: Opt.Parser (Maybe String) parseMetadataDbUrl = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "metadata-database-url" <> Opt.metavar "" <> Opt.help (Config._helpMessage metadataDbUrlOption) diff --git a/server/src-lib/Hasura/Server/Init/Arg/Command/Serve.hs b/server/src-lib/Hasura/Server/Init/Arg/Command/Serve.hs index 04160503ca0..6e0ce4d6d29 100644 --- a/server/src-lib/Hasura/Server/Init/Arg/Command/Serve.hs +++ b/server/src-lib/Hasura/Server/Init/Arg/Command/Serve.hs @@ -152,8 +152,8 @@ serveCommandParser = parseServerPort :: Opt.Parser (Maybe Config.Port) parseServerPort = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "server-port" <> Opt.metavar "" @@ -170,8 +170,8 @@ servePortOption = parseServerHost :: Opt.Parser (Maybe Warp.HostPreference) parseServerHost = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "server-host" <> Opt.metavar "" <> Opt.help (Config._helpMessage serveHostOption) @@ -191,8 +191,8 @@ parseConnParams = where pgStripes :: Opt.Parser (Maybe (Refined NonNegative Int)) pgStripes = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "stripes" <> Opt.short 's' @@ -202,8 +202,8 @@ parseConnParams = pgConns :: Opt.Parser (Maybe (Refined NonNegative Int)) pgConns = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "connections" <> Opt.short 'c' @@ -213,8 +213,8 @@ parseConnParams = pgIdleTimeout :: Opt.Parser (Maybe (Refined NonNegative Int)) pgIdleTimeout = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "timeout" <> Opt.metavar "" @@ -223,8 +223,8 @@ parseConnParams = pgConnLifetime :: Opt.Parser (Maybe (Refined NonNegative Time.NominalDiffTime)) pgConnLifetime = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "conn-lifetime" <> Opt.metavar "" @@ -233,8 +233,8 @@ parseConnParams = pgUsePreparedStatements :: Opt.Parser (Maybe Bool) pgUsePreparedStatements = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "use-prepared-statements" <> Opt.metavar "" @@ -243,8 +243,8 @@ parseConnParams = pgPoolTimeout :: Opt.Parser (Maybe (Refined NonNegative Time.NominalDiffTime)) pgPoolTimeout = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "pool-timeout" <> Opt.metavar "" @@ -309,8 +309,8 @@ pgPoolTimeoutOption = parseTxIsolation :: Opt.Parser (Maybe Query.TxIsolation) parseTxIsolation = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "tx-iso" <> Opt.short 'i' @@ -328,13 +328,13 @@ txIsolationOption = parseAdminSecret :: Opt.Parser (Maybe Auth.AdminSecretHash) parseAdminSecret = - Opt.optional $ - Auth.hashAdminSecret - <$> Opt.strOption - ( Opt.long "admin-secret" - <> Opt.metavar "ADMIN SECRET KEY" - <> Opt.help (Config._helpMessage adminSecretOption) - ) + Opt.optional + $ Auth.hashAdminSecret + <$> Opt.strOption + ( Opt.long "admin-secret" + <> Opt.metavar "ADMIN SECRET KEY" + <> Opt.help (Config._helpMessage adminSecretOption) + ) adminSecretOption :: Config.Option () adminSecretOption = @@ -346,13 +346,13 @@ adminSecretOption = parseAccessKey :: Opt.Parser (Maybe Auth.AdminSecretHash) parseAccessKey = - Opt.optional $ - Auth.hashAdminSecret - <$> Opt.strOption - ( Opt.long "access-key" - <> Opt.metavar "ADMIN SECRET KEY (DEPRECATED: USE --admin-secret)" - <> Opt.help (Config._helpMessage accessKeyOption) - ) + Opt.optional + $ Auth.hashAdminSecret + <$> Opt.strOption + ( Opt.long "access-key" + <> Opt.metavar "ADMIN SECRET KEY (DEPRECATED: USE --admin-secret)" + <> Opt.help (Config._helpMessage accessKeyOption) + ) accessKeyOption :: Config.Option () accessKeyOption = @@ -367,23 +367,23 @@ parseAuthHook = Config.AuthHookRaw <$> url <*> urlType <*> sendRequestBody where url = - Opt.optional $ - Opt.strOption + Opt.optional + $ Opt.strOption ( Opt.long "auth-hook" <> Opt.metavar "" <> Opt.help (Config._helpMessage authHookOption) ) urlType = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "auth-hook-mode" <> Opt.metavar "" <> Opt.help (Config._helpMessage authHookModeOption) ) sendRequestBody :: Opt.Parser (Maybe Bool) = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "auth-hook-send-request-body" <> Opt.metavar "" @@ -416,8 +416,8 @@ authHookSendRequestBodyOption = parseJwtSecret :: Opt.Parser (Maybe Auth.JWTConfig) parseJwtSecret = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "jwt-secret" <> Opt.metavar "" @@ -437,13 +437,13 @@ jwtSecretOption = parseUnAuthRole :: Opt.Parser (Maybe RoleName) parseUnAuthRole = - fmap mkRoleName $ - Opt.optional $ - Opt.strOption - ( Opt.long "unauthorized-role" - <> Opt.metavar "" - <> Opt.help (Config._helpMessage unAuthRoleOption) - ) + fmap mkRoleName + $ Opt.optional + $ Opt.strOption + ( Opt.long "unauthorized-role" + <> Opt.metavar "" + <> Opt.help (Config._helpMessage unAuthRoleOption) + ) where mkRoleName mText = mText >>= Roles.mkRoleName @@ -461,8 +461,8 @@ parseCorsConfig :: Opt.Parser (Maybe Cors.CorsConfig) parseCorsConfig = mapCC <$> disableCors <*> corsDomain where corsDomain = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "cors-domain" <> Opt.metavar "" @@ -514,8 +514,8 @@ enableConsoleOption = parseConsoleAssetsDir :: Opt.Parser (Maybe Text) parseConsoleAssetsDir = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "console-assets-dir" <> Opt.help (Config._helpMessage consoleAssetsDirOption) @@ -534,8 +534,8 @@ consoleAssetsDirOption = parseConsoleSentryDsn :: Opt.Parser (Maybe Text) parseConsoleSentryDsn = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "console-sentry-dsn" <> Opt.help (Config._helpMessage consoleSentryDsnOption) @@ -553,8 +553,8 @@ consoleSentryDsnOption = -- NOTE: Should this be an 'Opt.flag'? parseEnableTelemetry :: Opt.Parser (Maybe Config.TelemetryStatus) parseEnableTelemetry = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "enable-telemetry" <> Opt.help (Config._helpMessage enableTelemetryOption) @@ -590,8 +590,8 @@ wsReadCookieOption = parseStringifyNum :: Opt.Parser Options.StringifyNumbers parseStringifyNum = - fmap (bool Options.Don'tStringifyNumbers Options.StringifyNumbers) $ - Opt.switch + fmap (bool Options.Don'tStringifyNumbers Options.StringifyNumbers) + $ Opt.switch ( Opt.long "stringify-numeric-types" <> Opt.help (Config._helpMessage stringifyNumOption) ) @@ -606,8 +606,8 @@ stringifyNumOption = parseDangerousBooleanCollapse :: Opt.Parser (Maybe Options.DangerouslyCollapseBooleans) parseDangerousBooleanCollapse = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "v1-boolean-null-collapse" <> Opt.help (Config._helpMessage dangerousBooleanCollapseOption) @@ -626,8 +626,8 @@ dangerousBooleanCollapseOption = parseEnabledAPIs :: Opt.Parser (Maybe (HashSet Config.API)) parseEnabledAPIs = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "enabled-apis" <> Opt.help (Config._helpMessage enabledAPIsOption) @@ -643,8 +643,8 @@ enabledAPIsOption = parseMxRefetchDelay :: Opt.Parser (Maybe Subscription.Options.RefetchInterval) parseMxRefetchDelay = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "live-queries-multiplexed-refetch-interval" <> Opt.metavar "" @@ -663,8 +663,8 @@ mxRefetchDelayOption = parseMxBatchSize :: Opt.Parser (Maybe Subscription.Options.BatchSize) parseMxBatchSize = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "live-queries-multiplexed-batch-size" <> Opt.metavar "BATCH_SIZE" @@ -683,8 +683,8 @@ mxBatchSizeOption = parseStreamingMxRefetchDelay :: Opt.Parser (Maybe Subscription.Options.RefetchInterval) parseStreamingMxRefetchDelay = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "streaming-queries-multiplexed-refetch-interval" <> Opt.metavar "" @@ -703,8 +703,8 @@ streamingMxRefetchDelayOption = parseStreamingMxBatchSize :: Opt.Parser (Maybe Subscription.Options.BatchSize) parseStreamingMxBatchSize = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "streaming-queries-multiplexed-batch-size" <> Opt.metavar "BATCH_SIZE" @@ -739,8 +739,8 @@ enableAllowlistOption = parseEnabledLogs :: forall impl. (Logging.EnabledLogTypes impl) => Opt.Parser (Maybe (HashSet (Logging.EngineLogType impl))) parseEnabledLogs = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "enabled-log-types" <> Opt.help (Config._helpMessage (enabledLogsOption @impl)) @@ -766,8 +766,8 @@ enabledLogsOption = parseLogLevel :: Opt.Parser (Maybe Logging.LogLevel) parseLogLevel = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "log-level" <> Opt.help (Config._helpMessage logLevelOption) @@ -783,8 +783,8 @@ logLevelOption = parsePlanCacheSize :: Opt.Parser (Maybe Bounded.CacheSize) parsePlanCacheSize = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "query-plan-cache-size" <> Opt.help @@ -812,8 +812,8 @@ graphqlDevModeOption = parseGraphqlAdminInternalErrors :: Opt.Parser (Maybe Config.AdminInternalErrorsStatus) parseGraphqlAdminInternalErrors = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "admin-internal-errors" <> Opt.help (Config._helpMessage graphqlAdminInternalErrorsOption) @@ -830,8 +830,8 @@ graphqlAdminInternalErrorsOption = parseGraphqlEventsHttpPoolSize :: Opt.Parser (Maybe (Refined Positive Int)) parseGraphqlEventsHttpPoolSize = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "events-http-pool-size" <> Opt.metavar (Config._envVar graphqlEventsHttpPoolSizeOption) @@ -848,8 +848,8 @@ graphqlEventsHttpPoolSizeOption = parseGraphqlEventsFetchInterval :: Opt.Parser (Maybe (Refined NonNegative Milliseconds)) parseGraphqlEventsFetchInterval = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "events-fetch-interval" <> Opt.metavar (Config._envVar graphqlEventsFetchIntervalOption) @@ -866,8 +866,8 @@ graphqlEventsFetchIntervalOption = parseGraphqlAsyncActionsFetchInterval :: Opt.Parser (Maybe Config.OptionalInterval) parseGraphqlAsyncActionsFetchInterval = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "async-actions-fetch-interval" <> Opt.metavar (Config._envVar asyncActionsFetchIntervalOption) @@ -887,8 +887,8 @@ asyncActionsFetchIntervalOption = parseEnableRemoteSchemaPerms :: Opt.Parser Options.RemoteSchemaPermissions parseEnableRemoteSchemaPerms = - fmap (bool Options.DisableRemoteSchemaPermissions Options.EnableRemoteSchemaPermissions) $ - Opt.switch + fmap (bool Options.DisableRemoteSchemaPermissions Options.EnableRemoteSchemaPermissions) + $ Opt.switch ( Opt.long "enable-remote-schema-permissions" <> Opt.help (Config._helpMessage enableRemoteSchemaPermsOption) ) @@ -919,8 +919,8 @@ webSocketCompressionOption = parseWebSocketKeepAlive :: Opt.Parser (Maybe Config.KeepAliveDelay) parseWebSocketKeepAlive = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "websocket-keepalive" <> Opt.help (Config._helpMessage webSocketKeepAliveOption) @@ -937,8 +937,8 @@ webSocketKeepAliveOption = parseInferFunctionPerms :: Opt.Parser (Maybe Options.InferFunctionPermissions) parseInferFunctionPerms = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "infer-function-permissions" <> Opt.help (Config._helpMessage inferFunctionPermsOption) @@ -954,8 +954,8 @@ inferFunctionPermsOption = parseEnableMaintenanceMode :: Opt.Parser (Types.MaintenanceMode ()) parseEnableMaintenanceMode = - fmap (bool Types.MaintenanceModeDisabled (Types.MaintenanceModeEnabled ())) $ - Opt.switch + fmap (bool Types.MaintenanceModeDisabled (Types.MaintenanceModeEnabled ())) + $ Opt.switch ( Opt.long "enable-maintenance-mode" <> Opt.help (Config._helpMessage enableMaintenanceModeOption) ) @@ -970,8 +970,8 @@ enableMaintenanceModeOption = parseSchemaPollInterval :: Opt.Parser (Maybe Config.OptionalInterval) parseSchemaPollInterval = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "schema-sync-poll-interval" <> Opt.metavar (Config._envVar schemaPollIntervalOption) @@ -989,8 +989,8 @@ schemaPollIntervalOption = parseExperimentalFeatures :: Opt.Parser (Maybe (HashSet Types.ExperimentalFeature)) parseExperimentalFeatures = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "experimental-features" <> Opt.help (Config._helpMessage experimentalFeaturesOption) @@ -1013,8 +1013,8 @@ experimentalFeaturesOption = parseEventsFetchBatchSize :: Opt.Parser (Maybe (Refined NonNegative Int)) parseEventsFetchBatchSize = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "events-fetch-batch-size" <> Opt.metavar (Config._envVar eventsFetchBatchSizeOption) @@ -1033,8 +1033,8 @@ eventsFetchBatchSizeOption = parseGracefulShutdownTimeout :: Opt.Parser (Maybe (Refined NonNegative Seconds)) parseGracefulShutdownTimeout = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "graceful-shutdown-timeout" <> Opt.metavar "" @@ -1053,8 +1053,8 @@ gracefulShutdownOption = parseWebSocketConnectionInitTimeout :: Opt.Parser (Maybe Config.WSConnectionInitTimeout) parseWebSocketConnectionInitTimeout = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "websocket-connection-init-timeout" <> Opt.help (Config._helpMessage webSocketConnectionInitTimeoutOption) @@ -1071,8 +1071,8 @@ webSocketConnectionInitTimeoutOption = parseEnableMetadataQueryLogging :: Opt.Parser Server.Logging.MetadataQueryLoggingMode parseEnableMetadataQueryLogging = - fmap (bool Server.Logging.MetadataQueryLoggingDisabled Server.Logging.MetadataQueryLoggingEnabled) $ - Opt.switch + fmap (bool Server.Logging.MetadataQueryLoggingDisabled Server.Logging.MetadataQueryLoggingEnabled) + $ Opt.switch ( Opt.long "enable-metadata-query-logging" <> Opt.help (Config._helpMessage enableMetadataQueryLoggingOption) ) @@ -1089,8 +1089,8 @@ enableMetadataQueryLoggingOption = -- an isolated PR we should move that defaulting in the parsing stage. parseDefaultNamingConvention :: Opt.Parser (Maybe NC.NamingCase) parseDefaultNamingConvention = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "default-naming-convention" <> Opt.help (Config._helpMessage defaultNamingConventionOption) @@ -1109,8 +1109,8 @@ defaultNamingConventionOption = parseExtensionsSchema :: Opt.Parser (Maybe MonadTx.ExtensionsSchema) parseExtensionsSchema = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "metadata-database-extensions-schema" <> Opt.help (Config._helpMessage metadataDBExtensionsSchemaOption) @@ -1126,8 +1126,8 @@ metadataDefaultsOption = parseMetadataDefaults :: Opt.Parser (Maybe MetadataDefaults) parseMetadataDefaults = - Opt.optional $ - Opt.option + Opt.optional + $ Opt.option (Opt.eitherReader Env.fromEnv) ( Opt.long "metadata-defaults" <> Opt.help (Config._helpMessage metadataDefaultsOption) diff --git a/server/src-lib/Hasura/Server/Init/Config.hs b/server/src-lib/Hasura/Server/Init/Config.hs index 250dfd092ed..a0e4e7c2a50 100644 --- a/server/src-lib/Hasura/Server/Init/Config.hs +++ b/server/src-lib/Hasura/Server/Init/Config.hs @@ -224,29 +224,29 @@ instance FromJSON PostgresConnDetailsRaw where instance ToJSON PostgresConnDetailsRaw where toJSON PostgresConnDetailsRaw {..} = - J.object $ - [ "host" .= connHost, - "port" .= connPort, - "user" .= connUser, - "password" .= connPassword, - "database" .= connDatabase - ] - <> catMaybes [fmap ("options" .=) connOptions] + J.object + $ [ "host" .= connHost, + "port" .= connPort, + "user" .= connUser, + "password" .= connPassword, + "database" .= connDatabase + ] + <> catMaybes [fmap ("options" .=) connOptions] rawConnDetailsToUrlText :: PostgresConnDetailsRaw -> Text rawConnDetailsToUrlText PostgresConnDetailsRaw {..} = - Text.pack $ - "postgresql://" - <> connUser - <> ":" - <> connPassword - <> "@" - <> connHost - <> ":" - <> show connPort - <> "/" - <> connDatabase - <> maybe "" ("?options=" <>) connOptions + Text.pack + $ "postgresql://" + <> connUser + <> ":" + <> connPassword + <> "@" + <> connHost + <> ":" + <> show connPort + <> "/" + <> connDatabase + <> maybe "" ("?options=" <>) connOptions -------------------------------------------------------------------------------- diff --git a/server/src-lib/Hasura/Server/Init/Env.hs b/server/src-lib/Hasura/Server/Init/Env.hs index dd4e9c87fb8..c9d40e1083d 100644 --- a/server/src-lib/Hasura/Server/Init/Env.hs +++ b/server/src-lib/Hasura/Server/Init/Env.hs @@ -63,8 +63,11 @@ considerEnv envVar = do Just val -> either throwErr (pure . Just) $ fromEnv val where throwErr s = - throwError $ - "Fatal Error:- Environment variable " ++ envVar ++ ": " ++ s + throwError + $ "Fatal Error:- Environment variable " + ++ envVar + ++ ": " + ++ s -- | Lookup a list of keys with 'considerEnv' and return the first -- value to parse successfully. @@ -286,9 +289,9 @@ instance FromEnv (HashSet Server.Types.ExperimentalFeature) where readAPI si = case Text.toLower $ Text.strip si of key | Just (_, ef) <- find ((== key) . fst) experimentalFeatures -> Right ef _ -> - Left $ - "Only expecting list of comma separated experimental features, options are:" - ++ intercalate ", " (map (Text.unpack . fst) experimentalFeatures) + Left + $ "Only expecting list of comma separated experimental features, options are:" + ++ intercalate ", " (map (Text.unpack . fst) experimentalFeatures) experimentalFeatures :: [(Text, Server.Types.ExperimentalFeature)] experimentalFeatures = [(Server.Types.experimentalFeatureKey ef, ef) | ef <- [minBound .. maxBound]] diff --git a/server/src-lib/Hasura/Server/Init/FeatureFlag.hs b/server/src-lib/Hasura/Server/Init/FeatureFlag.hs index e4f8b72a5eb..68c6fb4aabe 100644 --- a/server/src-lib/Hasura/Server/Init/FeatureFlag.hs +++ b/server/src-lib/Hasura/Server/Init/FeatureFlag.hs @@ -53,8 +53,8 @@ newtype FeatureFlags = FeatureFlags {getFeatureFlags :: HashMap Text FeatureFlag featureFlags :: FeatureFlags featureFlags = - FeatureFlags $ - HashMap.fromList + FeatureFlags + $ HashMap.fromList [ ("test-flag", testFlag), ("native-query-interface", nativeQueryInterface), ("stored-procedures", storedProceduresFlag) @@ -62,16 +62,16 @@ featureFlags = -------------------------------------------------------------------------------- -class Monad m => HasFeatureFlagChecker m where +class (Monad m) => HasFeatureFlagChecker m where checkFlag :: FeatureFlag -> m Bool -instance HasFeatureFlagChecker m => HasFeatureFlagChecker (ReaderT r m) where +instance (HasFeatureFlagChecker m) => HasFeatureFlagChecker (ReaderT r m) where checkFlag = lift . checkFlag -instance HasFeatureFlagChecker m => HasFeatureFlagChecker (ExceptT e m) where +instance (HasFeatureFlagChecker m) => HasFeatureFlagChecker (ExceptT e m) where checkFlag = lift . checkFlag -instance HasFeatureFlagChecker m => HasFeatureFlagChecker (StateT s m) where +instance (HasFeatureFlagChecker m) => HasFeatureFlagChecker (StateT s m) where checkFlag = lift . checkFlag -------------------------------------------------------------------------------- diff --git a/server/src-lib/Hasura/Server/Init/Logging.hs b/server/src-lib/Hasura/Server/Init/Logging.hs index 4398d03aaa7..b9bd877faf8 100644 --- a/server/src-lib/Hasura/Server/Init/Logging.hs +++ b/server/src-lib/Hasura/Server/Init/Logging.hs @@ -79,7 +79,7 @@ connInfoToLog connInfo = ] -- | Generate a 'StartupLog' from the final 'ServeOptions'. -serveOptsToLog :: ToJSON (Logging.EngineLogType impl) => Config.ServeOptions impl -> Server.Logging.StartupLog +serveOptsToLog :: (ToJSON (Logging.EngineLogType impl)) => Config.ServeOptions impl -> Server.Logging.StartupLog serveOptsToLog so = Server.Logging.StartupLog Logging.LevelInfo "server_configuration" infoVal where @@ -120,7 +120,7 @@ serveOptsToLog so = "enable_metadata_query_logging" .= Config.soEnableMetadataQueryLogging so ] -mkGenericLog :: ToJSON a => Logging.LogLevel -> Text -> a -> Server.Logging.StartupLog +mkGenericLog :: (ToJSON a) => Logging.LogLevel -> Text -> a -> Server.Logging.StartupLog mkGenericLog logLevel k msg = Server.Logging.StartupLog logLevel k $ J.toJSON msg diff --git a/server/src-lib/Hasura/Server/Limits.hs b/server/src-lib/Hasura/Server/Limits.hs index db141436928..02b59a38433 100644 --- a/server/src-lib/Hasura/Server/Limits.hs +++ b/server/src-lib/Hasura/Server/Limits.hs @@ -24,7 +24,7 @@ data ResourceLimits = ResourceLimits } -- | Monads which support resource (memory, CPU time, etc.) limiting -class Monad m => HasResourceLimits m where +class (Monad m) => HasResourceLimits m where askHTTPHandlerLimit :: m ResourceLimits askGraphqlOperationLimit :: HGE.RequestId -> UserInfo -> ApiLimit -> m ResourceLimits @@ -42,8 +42,8 @@ class Monad m => HasResourceLimits m where m ResourceLimits askGraphqlOperationLimit reqId userInfo apiLimit = lift $ askGraphqlOperationLimit reqId userInfo apiLimit -instance HasResourceLimits m => HasResourceLimits (ReaderT r m) +instance (HasResourceLimits m) => HasResourceLimits (ReaderT r m) -instance HasResourceLimits m => HasResourceLimits (ExceptT e m) +instance (HasResourceLimits m) => HasResourceLimits (ExceptT e m) -instance HasResourceLimits m => HasResourceLimits (Tracing.TraceT m) +instance (HasResourceLimits m) => HasResourceLimits (Tracing.TraceT m) diff --git a/server/src-lib/Hasura/Server/Logging.hs b/server/src-lib/Hasura/Server/Logging.hs index db1b9b0c1ca..0dbb88d54be 100644 --- a/server/src-lib/Hasura/Server/Logging.hs +++ b/server/src-lib/Hasura/Server/Logging.hs @@ -122,8 +122,8 @@ instance ToEngineLog MetadataLog Hasura where mkInconsMetadataLog :: [InconsistentMetadata] -> MetadataLog mkInconsMetadataLog objs = - MetadataLog LevelWarn "Inconsistent Metadata!" $ - object ["objects" .= objs] + MetadataLog LevelWarn "Inconsistent Metadata!" + $ object ["objects" .= objs] data WebHookLog = WebHookLog { whlLogLevel :: !LogLevel, @@ -224,7 +224,7 @@ type HttpLogMetadata m = (CommonHttpLogMetadata, ExtraHttpLogMetadata m) buildHttpLogMetadata :: forall m. - HttpLog m => + (HttpLog m) => HttpLogGraphQLInfo -> ExtraUserInfo -> HttpLogMetadata m @@ -232,7 +232,7 @@ buildHttpLogMetadata (commonHttpLogMetadata, paramQueryHashList) extraUserInfo = (commonHttpLogMetadata, buildExtraHttpLogMetadata @m paramQueryHashList extraUserInfo) -- | synonym for clarity, writing `emptyHttpLogMetadata @m` instead of `def @(HttpLogMetadata m)` -emptyHttpLogMetadata :: forall m. HttpLog m => HttpLogMetadata m +emptyHttpLogMetadata :: forall m. (HttpLog m) => HttpLogMetadata m emptyHttpLogMetadata = (CommonHttpLogMetadata RequestModeNonBatchable Nothing, emptyExtraHttpLogMetadata @m) -- See Note [Disable query printing for metadata queries] @@ -241,8 +241,9 @@ data MetadataQueryLoggingMode = MetadataQueryLoggingEnabled | MetadataQueryLoggi instance FromJSON MetadataQueryLoggingMode where parseJSON = - withBool "MetadataQueryLoggingMode" $ - pure . bool MetadataQueryLoggingDisabled MetadataQueryLoggingEnabled + withBool "MetadataQueryLoggingMode" + $ pure + . bool MetadataQueryLoggingDisabled MetadataQueryLoggingEnabled instance ToJSON MetadataQueryLoggingMode where toJSON = \case @@ -279,7 +280,7 @@ HASURA_GRAPHQL_ENABLE_METADATA_QUERY_LOGGING envirnoment variables is not set, t we disable the 'query' field in HTTP logs. -} -class Monad m => HttpLog m where +class (Monad m) => HttpLog m where -- | Extra http-log metadata that we attach when operating in 'm'. type ExtraHttpLogMetadata m @@ -334,7 +335,7 @@ class Monad m => HttpLog m where HttpLogMetadata m -> m () -instance HttpLog m => HttpLog (TraceT m) where +instance (HttpLog m) => HttpLog (TraceT m) where type ExtraHttpLogMetadata (TraceT m) = ExtraHttpLogMetadata m buildExtraHttpLogMetadata a = buildExtraHttpLogMetadata @m a @@ -344,7 +345,7 @@ instance HttpLog m => HttpLog (TraceT m) where logHttpSuccess a b c d e f g h i j k l = lift $ logHttpSuccess a b c d e f g h i j k l -instance HttpLog m => HttpLog (ReaderT r m) where +instance (HttpLog m) => HttpLog (ReaderT r m) where type ExtraHttpLogMetadata (ReaderT r m) = ExtraHttpLogMetadata m buildExtraHttpLogMetadata a = buildExtraHttpLogMetadata @m a @@ -354,7 +355,7 @@ instance HttpLog m => HttpLog (ReaderT r m) where logHttpSuccess a b c d e f g h i j k l = lift $ logHttpSuccess a b c d e f g h i j k l -instance HttpLog m => HttpLog (ExceptT e m) where +instance (HttpLog m) => HttpLog (ExceptT e m) where type ExtraHttpLogMetadata (ExceptT e m) = ExtraHttpLogMetadata m buildExtraHttpLogMetadata a = buildExtraHttpLogMetadata @m a @@ -515,18 +516,18 @@ mkHttpAccessLogContext userInfoM loggingSettings reqId req (_, parsedReq) uncomp >>= ( \case GH.GQLSingleRequest _ -> Nothing -- This case is aleady handled in the `OperationLog` GH.GQLBatchedReqs opLogs -> - NE.nonEmpty $ - map + NE.nonEmpty + $ map ( \case GQLQueryOperationSuccess (GQLQueryOperationSuccessLog {..}) -> - BatchOperationSuccess $ - BatchOperationSuccessLog + BatchOperationSuccess + $ BatchOperationSuccessLog (addQuery parsedReq (hlPath http) loggingSettings) gqolResponseSize (convertDuration gqolQueryExecutionTime) GQLQueryOperationError (GQLQueryOperationErrorLog {..}) -> - BatchOperationError $ - BatchOperationErrorLog + BatchOperationError + $ BatchOperationErrorLog (addQuery parsedReq (hlPath http) loggingSettings) gqelError ) @@ -612,21 +613,21 @@ logDeprecatedEnvVars logger env sources = do -- to the metadata for_ (HashMap.lookup SNDefault sources) $ \_defSource -> do let deprecated = checkDeprecatedEnvVars (unEnvVarsMovedToMetadata envVarsMovedToMetadata) - unless (null deprecated) $ - unLogger logger $ - UnstructuredLog LevelWarn $ - SB.fromText $ - "The following environment variables are deprecated and moved to metadata: " - <> toText deprecated + unless (null deprecated) + $ unLogger logger + $ UnstructuredLog LevelWarn + $ SB.fromText + $ "The following environment variables are deprecated and moved to metadata: " + <> toText deprecated -- Log when completely deprecated environment variables are present let deprecated = checkDeprecatedEnvVars (unDeprecatedEnvVars deprecatedEnvVars) - unless (null deprecated) $ - unLogger logger $ - UnstructuredLog LevelWarn $ - SB.fromText $ - "The following environment variables are deprecated: " - <> toText deprecated + unless (null deprecated) + $ unLogger logger + $ UnstructuredLog LevelWarn + $ SB.fromText + $ "The following environment variables are deprecated: " + <> toText deprecated data SchemaSyncThreadType = TTListener diff --git a/server/src-lib/Hasura/Server/MetadataOpenAPI.hs b/server/src-lib/Hasura/Server/MetadataOpenAPI.hs index 78e07b0943c..6388160a595 100644 --- a/server/src-lib/Hasura/Server/MetadataOpenAPI.hs +++ b/server/src-lib/Hasura/Server/MetadataOpenAPI.hs @@ -24,8 +24,9 @@ metadataOpenAPI :: OpenApi metadataOpenAPI = mempty {_openApiComponents = mempty {_componentsSchemas = definitions}} where - definitions = fst $ - flip runDeclare mempty $ do + definitions = fst + $ flip runDeclare mempty + $ do NamedSchema mName codecSchema <- declareNamedSchemaViaCodec (Proxy @MetadataDTO) declare $ InsOrdHashMap.fromList [(fromMaybe "MetadataDTO" mName, codecSchema)] pure codecSchema diff --git a/server/src-lib/Hasura/Server/Middleware.hs b/server/src-lib/Hasura/Server/Middleware.hs index f0d8acf33c8..107c3ab17f6 100644 --- a/server/src-lib/Hasura/Server/Middleware.hs +++ b/server/src-lib/Hasura/Server/Middleware.hs @@ -38,14 +38,14 @@ corsMiddleware getPolicy app req sendResp = do respondPreFlight :: B.ByteString -> CorsPolicy -> Response respondPreFlight origin policy = - setHeaders (mkPreFlightHeaders requestedHeaders) $ - injectCorsHeaders origin policy emptyResponse + setHeaders (mkPreFlightHeaders requestedHeaders) + $ injectCorsHeaders origin policy emptyResponse emptyResponse = responseLBS HTTP.status204 [] "" requestedHeaders = - fromMaybe "" $ - getRequestHeader "Access-Control-Request-Headers" $ - requestHeaders req + fromMaybe "" + $ getRequestHeader "Access-Control-Request-Headers" + $ requestHeaders req injectCorsHeaders :: B.ByteString -> CorsPolicy -> Response -> Response injectCorsHeaders origin policy = setHeaders (mkCorsHeaders origin policy) diff --git a/server/src-lib/Hasura/Server/Migrate.hs b/server/src-lib/Hasura/Server/Migrate.hs index 44a132fd5b0..7c4a766b864 100644 --- a/server/src-lib/Hasura/Server/Migrate.hs +++ b/server/src-lib/Hasura/Server/Migrate.hs @@ -74,8 +74,8 @@ data MigrationResult instance ToEngineLog MigrationResult Hasura where toEngineLog result = - toEngineLog $ - StartupLog + toEngineLog + $ StartupLog { slLogLevel = LevelInfo, slKind = "catalog_migrate", slInfo = J.toJSON $ case result of @@ -120,31 +120,31 @@ migrateCatalog maybeDefaultSourceConfig extensionsSchema maintenanceMode migrati metadataTableExists <- doesTableExist (SchemaName "hdb_catalog") (TableName "hdb_metadata") migrationResult <- if - | maintenanceMode == (MaintenanceModeEnabled ()) -> do - if - | not catalogSchemaExists -> - throw500 "unexpected: hdb_catalog schema not found in maintenance mode" - | not versionTableExists -> - throw500 "unexpected: hdb_catalog.hdb_version table not found in maintenance mode" - | not metadataTableExists -> - throw500 $ - "the \"hdb_catalog.hdb_metadata\" table is expected to exist and contain" - <> " the metadata of the graphql-engine" - | otherwise -> pure MRMaintanenceMode - | otherwise -> case catalogSchemaExists of - False -> initialize True - True -> case versionTableExists of - False -> initialize False - True -> migrateFrom =<< liftTx getCatalogVersion + | maintenanceMode == (MaintenanceModeEnabled ()) -> do + if + | not catalogSchemaExists -> + throw500 "unexpected: hdb_catalog schema not found in maintenance mode" + | not versionTableExists -> + throw500 "unexpected: hdb_catalog.hdb_version table not found in maintenance mode" + | not metadataTableExists -> + throw500 + $ "the \"hdb_catalog.hdb_metadata\" table is expected to exist and contain" + <> " the metadata of the graphql-engine" + | otherwise -> pure MRMaintanenceMode + | otherwise -> case catalogSchemaExists of + False -> initialize True + True -> case versionTableExists of + False -> initialize False + True -> migrateFrom =<< liftTx getCatalogVersion metadataWithVersion <- liftTx fetchMetadataAndResourceVersionFromCatalog pure (migrationResult, metadataWithVersion) where -- initializes the catalog, creating the schema if necessary initialize :: Bool -> m MigrationResult initialize createSchema = do - liftTx $ - when createSchema $ - PG.unitQE defaultTxErrorHandler "CREATE SCHEMA hdb_catalog" () False + liftTx + $ when createSchema + $ PG.unitQE defaultTxErrorHandler "CREATE SCHEMA hdb_catalog" () False enablePgcryptoExtension extensionsSchema multiQ $(makeRelativeToProject "src-rsr/initialise.sql" >>= PG.sqlFromFile) updateCatalogVersion @@ -154,8 +154,8 @@ migrateCatalog maybeDefaultSourceConfig extensionsSchema maintenanceMode migrati Just defaultSourceConfig -> -- insert metadata with default source let defaultSourceMetadata = - AB.mkAnyBackend $ - SourceMetadata + AB.mkAnyBackend + $ SourceMetadata @('Postgres 'Vanilla) defaultSource PostgresVanillaKind @@ -182,14 +182,14 @@ migrateCatalog maybeDefaultSourceConfig extensionsSchema maintenanceMode migrati let upMigrations = migrations maybeDefaultSourceConfig False maintenanceMode case neededMigrations previousVersion upMigrations of [] -> - throw400 NotSupported $ - "Cannot use database previously used with a newer version of graphql-engine (expected" - <> " a catalog version <=" - <> latestCatalogVersionString - <> ", but the current version" - <> " is " - <> tshow previousVersion - <> ")." + throw400 NotSupported + $ "Cannot use database previously used with a newer version of graphql-engine (expected" + <> " a catalog version <=" + <> latestCatalogVersionString + <> ", but the current version" + <> " is " + <> tshow previousVersion + <> ")." migrationsToBeApplied -> do traverse_ (mpMigrate . snd) migrationsToBeApplied updateCatalogVersion @@ -221,13 +221,13 @@ downgradeCatalog defaultSourceConfig opts time = do | otherwise = case neededDownMigrations targetVersion of Left reason -> - throw400 NotSupported $ - "This downgrade path (from " - <> tshow previousVersion - <> " to " - <> dgoTargetVersion opts - <> ") is not supported, because " - <> reason + throw400 NotSupported + $ "This downgrade path (from " + <> tshow previousVersion + <> " to " + <> dgoTargetVersion opts + <> ") is not supported, because " + <> reason Right path -> do sequence_ path unless (dgoDryRun opts) do @@ -299,18 +299,17 @@ migrations maybeDefaultSourceConfig dryRun maintenanceMode = ) |] in TH.listE - -- version 0.8 is the only non-integral catalog version - -- The 40_to_41 migration is consciously omitted from below because its contents - -- have been moved to the `0_to_1.sql` because the `40_to_41` migration only contained - -- source catalog changes and we'd like to keep source catalog migrations in a different - -- path than metadata catalog migrations. - $ - [|(MetadataCatalogVersion08, MigrationPair $(migrationFromFile "08" "1") Nothing)|] - : migrationsFromFile [MetadataCatalogVersion 2 .. MetadataCatalogVersion 3] + -- version 0.8 is the only non-integral catalog version + -- The 40_to_41 migration is consciously omitted from below because its contents + -- have been moved to the `0_to_1.sql` because the `40_to_41` migration only contained + -- source catalog changes and we'd like to keep source catalog migrations in a different + -- path than metadata catalog migrations. + $ [|(MetadataCatalogVersion08, MigrationPair $(migrationFromFile "08" "1") Nothing)|] + : migrationsFromFile [MetadataCatalogVersion 2 .. MetadataCatalogVersion 3] ++ [|(MetadataCatalogVersion 3, MigrationPair from3To4 Nothing)|] - : (migrationsFromFile [MetadataCatalogVersion 5 .. MetadataCatalogVersion 40] ++ migrationsFromFile [MetadataCatalogVersion 42]) + : (migrationsFromFile [MetadataCatalogVersion 5 .. MetadataCatalogVersion 40] ++ migrationsFromFile [MetadataCatalogVersion 42]) ++ [|(MetadataCatalogVersion 42, MigrationPair from42To43 (Just from43To42))|] - : migrationsFromFile [MetadataCatalogVersion 44 .. latestCatalogVersion] + : migrationsFromFile [MetadataCatalogVersion 44 .. latestCatalogVersion] ) where runTxOrPrint :: PG.Query -> m () @@ -320,8 +319,8 @@ migrations maybeDefaultSourceConfig dryRun maintenanceMode = | otherwise = multiQ from42To43 = do - when (maintenanceMode == MaintenanceModeEnabled ()) $ - throw500 "cannot migrate to catalog version 43 in maintenance mode" + when (maintenanceMode == MaintenanceModeEnabled ()) + $ throw500 "cannot migrate to catalog version 43 in maintenance mode" let query = $(makeRelativeToProject "src-rsr/migrations/42_to_43.sql" >>= PG.sqlFromFile) if dryRun then (liftIO . TIO.putStrLn . PG.getQueryText) query @@ -329,15 +328,16 @@ migrations maybeDefaultSourceConfig dryRun maintenanceMode = metadataV2 <- fetchMetadataFromHdbTables multiQ query defaultSourceConfig <- - onNothing maybeDefaultSourceConfig $ - throw400 NotSupported $ - "cannot migrate to catalog version 43 without --database-url or env var " <> tshow (_envVar databaseUrlOption) + onNothing maybeDefaultSourceConfig + $ throw400 NotSupported + $ "cannot migrate to catalog version 43 without --database-url or env var " + <> tshow (_envVar databaseUrlOption) let metadataV3 = let MetadataNoSources {..} = metadataV2 defaultSourceMetadata = - BackendSourceMetadata $ - AB.mkAnyBackend $ - SourceMetadata defaultSource PostgresVanillaKind _mnsTables _mnsFunctions mempty mempty mempty defaultSourceConfig Nothing emptySourceCustomization Nothing + BackendSourceMetadata + $ AB.mkAnyBackend + $ SourceMetadata defaultSource PostgresVanillaKind _mnsTables _mnsFunctions mempty mempty mempty defaultSourceConfig Nothing emptySourceCustomization Nothing in Metadata (InsOrdHashMap.singleton defaultSource defaultSourceMetadata) _mnsRemoteSchemas diff --git a/server/src-lib/Hasura/Server/Migrate/Internal.hs b/server/src-lib/Hasura/Server/Migrate/Internal.hs index 35869f16ff9..91e166d5074 100644 --- a/server/src-lib/Hasura/Server/Migrate/Internal.hs +++ b/server/src-lib/Hasura/Server/Migrate/Internal.hs @@ -25,14 +25,15 @@ import Hasura.Server.Migrate.Version getCatalogVersion :: PG.TxE QErr MetadataCatalogVersion getCatalogVersion = do versionText <- - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| SELECT version FROM hdb_catalog.hdb_version |] () False - onLeft (readEither $ T.unpack versionText) $ - \err -> throw500 $ "Unexpected: couldn't convert read catalog version " <> versionText <> ", err:" <> tshow err + onLeft (readEither $ T.unpack versionText) + $ \err -> throw500 $ "Unexpected: couldn't convert read catalog version " <> versionText <> ", err:" <> tshow err from3To4 :: forall m. (Backend ('Postgres 'Vanilla), MonadTx m) => m () from3To4 = liftTx do @@ -90,10 +91,10 @@ from3To4 = liftTx do (PG.ViaJSON $ J.toJSON etc, name) True -setCatalogVersion :: MonadTx m => Text -> UTCTime -> m () +setCatalogVersion :: (MonadTx m) => Text -> UTCTime -> m () setCatalogVersion ver time = - liftTx $ - PG.unitQE + liftTx + $ PG.unitQE defaultTxErrorHandler [PG.sql| INSERT INTO hdb_catalog.hdb_version (version, upgraded_on) VALUES ($1, $2) diff --git a/server/src-lib/Hasura/Server/Migrate/Version.hs b/server/src-lib/Hasura/Server/Migrate/Version.hs index 92684fb9954..4a1363018b4 100644 --- a/server/src-lib/Hasura/Server/Migrate/Version.hs +++ b/server/src-lib/Hasura/Server/Migrate/Version.hs @@ -73,18 +73,19 @@ instance ToEngineLog (SourceName, SourceCatalogMigrationState) Hasura where SCMSNothingToDo catalogVersion -> "source " <> sourceName - <<> " is already at the latest catalog version (" + <<> " is already at the latest catalog version (" <> tshow catalogVersion <> ")." SCMSInitialized catalogVersion -> "source " <> sourceName - <<> " has the source catalog version successfully initialized (at version " + <<> " has the source catalog version successfully initialized (at version " <> tshow catalogVersion <> ")." SCMSMigratedTo oldCatalogVersion newCatalogVersion -> "source " - <> sourceName <<> " has been migrated successfully from catalog version " + <> sourceName + <<> " has been migrated successfully from catalog version " <> tshow oldCatalogVersion <> " to " <> tshow newCatalogVersion @@ -93,13 +94,13 @@ instance ToEngineLog (SourceName, SourceCatalogMigrationState) Hasura where "Source catalog migration for source: " <> sourceName <<> " is on hold due to " <> reason <> "." SCMSNotSupported -> "Source catalog migration is not supported for source " <>> sourceName - in toEngineLog $ - StartupLog + in toEngineLog + $ StartupLog { slLogLevel = LevelInfo, slKind = "source_catalog_migrate", slInfo = - J.toJSON $ - J.object + J.toJSON + $ J.object [ "source" J..= sourceName, "message" J..= migrationStatusMessage ] diff --git a/server/src-lib/Hasura/Server/OpenAPI.hs b/server/src-lib/Hasura/Server/OpenAPI.hs index 06f36a667de..f2685216094 100644 --- a/server/src-lib/Hasura/Server/OpenAPI.hs +++ b/server/src-lib/Hasura/Server/OpenAPI.hs @@ -34,12 +34,12 @@ buildOpenAPI :: (MonadError QErr m, MonadFix m) => SchemaCache -> m OpenApi buildOpenAPI schemaCache = do (defs, spec) <- flip runDeclareT mempty do endpoints <- buildAllEndpoints schemaCache (scAdminIntrospection schemaCache) - pure $ - mempty - & paths .~ fmap fst endpoints - & info . title .~ "Rest Endpoints" - & info . description - ?~ "This OpenAPI specification is automatically generated by Hasura." <> foldMap snd endpoints + pure + $ mempty + & paths .~ fmap fst endpoints + & info . title .~ "Rest Endpoints" + & info . description + ?~ "This OpenAPI specification is automatically generated by Hasura." <> foldMap snd endpoints pure $ spec & components . schemas .~ defs -------------------------------------------------------------------------------- @@ -144,20 +144,21 @@ collectParams (Structure _ vars) eURL = do -- TODO: document this -- NOTE: URL Variable name ':' prefix is removed for `elem` lookup. pathVars = map (T.drop 1) $ concat $ splitPath pure (const []) eURL - pure $ + pure + $ -- We always inline the schema, since we might need to add the default value. - Inline $ - mempty - & name .~ varName - & description .~ desc - & in_ .~ (if varName `elem` pathVars then ParamPath else ParamQuery) - & schema - ?~ Inline - ( mempty - & default_ .~ (gqlToJsonValue <$> _viDefaultValue) - & type_ ?~ refType - & pattern .~ typePattern - ) + Inline + $ mempty + & name .~ varName + & description .~ desc + & in_ .~ (if varName `elem` pathVars then ParamPath else ParamQuery) + & schema + ?~ Inline + ( mempty + & default_ .~ (gqlToJsonValue <$> _viDefaultValue) + & type_ ?~ refType + & pattern .~ typePattern + ) -------------------------------------------------------------------------------- -- Request body @@ -177,27 +178,28 @@ buildRequestBody Structure {..} = do then pure Nothing else do (varProperties, Any isBodyRequired) <- - runCircularT $ - mconcat <$> for vars \(varName, varInfo) -> do + runCircularT + $ mconcat + <$> for vars \(varName, varInfo) -> do (resolvedVarInfo, isVarRequired) <- buildVariableSchema varInfo pure (InsOrdHashMap.singleton (G.unName varName) resolvedVarInfo, Any isVarRequired) - pure $ - Just $ - Inline $ - mempty - & description ?~ "Query parameters can also be provided in the request body as a JSON object" - & required ?~ isBodyRequired - & content - .~ InsOrdHashMap.singleton - ("application" // "json") - ( mempty - & schema - ?~ Inline - ( mempty - & type_ ?~ OpenApiObject - & properties .~ varProperties - ) - ) + pure + $ Just + $ Inline + $ mempty + & description ?~ "Query parameters can also be provided in the request body as a JSON object" + & required ?~ isBodyRequired + & content + .~ InsOrdHashMap.singleton + ("application" // "json") + ( mempty + & schema + ?~ Inline + ( mempty + & type_ ?~ OpenApiObject + & properties .~ varProperties + ) + ) -- | Given the information about a variable, build the corresponding schema. -- @@ -245,7 +247,7 @@ buildVariableSchema VariableInfo {..} = do -- | Given the information about an input type, build the corresponding schema. buildInputFieldSchema :: - MonadFix m => + (MonadFix m) => G.GType -> InputFieldInfo -> CircularT (G.Name, G.Nullability) (Referenced Schema) (DeclareM m) (Referenced Schema) @@ -277,21 +279,21 @@ buildInputFieldSchema gType = \case -- | Given the 'Structure' of a query, generate the corresponding 'Response'. buildResponse :: - Monad m => + (Monad m) => Structure -> EndpointMethod -> Text -> DeclareM m Response buildResponse (Structure fields _) endpointMethod endpointURL = do fs <- buildSelectionSchema $ HashMap.toList fields - pure $ - mempty - & content .~ InsOrdHashMap.singleton ("application" // "json") (mempty & schema ?~ Inline fs) - & description .~ "Responses for " <> tshow endpointMethod <> " " <> endpointURL + pure + $ mempty + & content .~ InsOrdHashMap.singleton ("application" // "json") (mempty & schema ?~ Inline fs) + & description .~ "Responses for " <> tshow endpointMethod <> " " <> endpointURL -- | Given a list of fields and their types, build a corresponding schema. buildSelectionSchema :: - Monad m => + (Monad m) => [(G.Name, FieldInfo)] -> DeclareM m Schema buildSelectionSchema fields = do @@ -302,7 +304,7 @@ buildSelectionSchema fields = do -- | Build the schema for a given output type. buildFieldSchema :: - Monad m => + (Monad m) => FieldInfo -> DeclareM m (Referenced Schema) buildFieldSchema = \case @@ -313,13 +315,13 @@ buildFieldSchema = \case -- this output field is an object: we inline it FieldObjectInfo gType ObjectInfo {..} -> applyModifiers gType $ \typeName nullability -> do objectSchema <- buildSelectionSchema $ HashMap.toList _oiSelection - pure $ - Inline $ - objectSchema - & title ?~ G.unName typeName - & description .~ fmap G.unDescription (G._otdDescription _oiTypeDefinition) - & type_ ?~ OpenApiObject - & nullable ?~ G.unNullability nullability + pure + $ Inline + $ objectSchema + & title ?~ G.unName typeName + & description .~ fmap G.unDescription (G._otdDescription _oiTypeDefinition) + & type_ ?~ OpenApiObject + & nullable ?~ G.unNullability nullability -------------------------------------------------------------------------------- -- Scalars @@ -327,7 +329,7 @@ buildFieldSchema = \case -- | Craft the OpenAPI 'Schema' for a given scalar. Any non-standard scalar will -- instead be declared, and returned by reference. buildScalarSchema :: - Monad m => + (Monad m) => ScalarInfo -> G.Name -> G.Nullability -> @@ -345,9 +347,9 @@ buildScalarSchema ScalarInfo {..} scalarName nullability = do else declareType scalarName nullability resultSchema -- there isn't: we declare that type and return a reference to it Nothing -> - declareType scalarName nullability $ - baseSchema - & description .~ fmap G.unDescription (G._stdDescription _siTypeDefinition) + declareType scalarName nullability + $ baseSchema + & description .~ fmap G.unDescription (G._stdDescription _siTypeDefinition) where baseSchema = mempty @@ -376,18 +378,18 @@ getReferenceScalarInfo = -- | Craft the OpenAPI 'Schema' for a given enum. buildEnumSchema :: - Monad m => + (Monad m) => EnumInfo -> G.Name -> G.Nullability -> DeclareM m (Referenced Schema) buildEnumSchema EnumInfo {..} enumName nullability = - declareType enumName nullability $ - mempty - & title ?~ G.unName enumName - & enum_ ?~ enumValues - & nullable ?~ G.unNullability nullability - & description .~ fmap G.unDescription (G._etdDescription _eiTypeDefinition) + declareType enumName nullability + $ mempty + & title ?~ G.unName enumName + & enum_ ?~ enumValues + & nullable ?~ G.unNullability nullability + & description .~ fmap G.unDescription (G._etdDescription _eiTypeDefinition) where enumValues :: [J.Value] enumValues = @@ -401,7 +403,7 @@ buildEnumSchema EnumInfo {..} enumName nullability = -- function to be used on the actual underlying type, construct a 'Schema' by -- recursively applying modifiers. applyModifiers :: - Monad m => + (Monad m) => G.GType -> (G.Name -> G.Nullability -> m (Referenced Schema)) -> m (Referenced Schema) @@ -409,15 +411,15 @@ applyModifiers gtype fun = case gtype of G.TypeNamed nullability typeName -> fun typeName nullability G.TypeList nullability innerType -> do s <- applyModifiers innerType fun - pure $ - Inline $ - mempty - & nullable ?~ G.unNullability nullability - & type_ ?~ OpenApiArray - & items ?~ OpenApiItemsObject s + pure + $ Inline + $ mempty + & nullable ?~ G.unNullability nullability + & type_ ?~ OpenApiArray + & items ?~ OpenApiItemsObject s -- | Adds a declaration for the given type, returns a schema that references it. -declareType :: Monad m => G.Name -> G.Nullability -> Schema -> DeclareM m (Referenced Schema) +declareType :: (Monad m) => G.Name -> G.Nullability -> Schema -> DeclareM m (Referenced Schema) declareType typeName nullability s = do let refName = mkReferenceName typeName nullability declare $ InsOrdHashMap.singleton refName s diff --git a/server/src-lib/Hasura/Server/Prometheus.hs b/server/src-lib/Hasura/Server/Prometheus.hs index 30535bfbc9b..15f09539311 100644 --- a/server/src-lib/Hasura/Server/Prometheus.hs +++ b/server/src-lib/Hasura/Server/Prometheus.hs @@ -314,9 +314,9 @@ data DynamicSubscriptionLabel = DynamicSubscriptionLabel instance ToLabels DynamicSubscriptionLabel where toLabels (DynamicSubscriptionLabel hash opName) = - Map.fromList $ - [("parameterized_query_hash", bsToTxt $ unParamQueryHash hash)] - <> maybe [] (\op -> [("operation_name", G.unName $ _unOperationName op)]) opName + Map.fromList + $ [("parameterized_query_hash", bsToTxt $ unParamQueryHash hash)] + <> maybe [] (\op -> [("operation_name", G.unName $ _unOperationName op)]) opName data SubscriptionLabel = SubscriptionLabel { _slKind :: SubscriptionKindLabel, @@ -346,9 +346,9 @@ recordMetricWithLabel getMetricState alwaysObserve metricActionWithLabel metricA -- Some metrics do not make sense without a dynamic label, hence only record the -- metric when alwaysObserve is set to true else do not record the metric GranularMetricsOff -> do - when alwaysObserve $ - liftIO $ - metricActionWithoutLabel + when alwaysObserve + $ liftIO + $ metricActionWithoutLabel -- | Observe a histogram metric with a label. -- diff --git a/server/src-lib/Hasura/Server/ResourceChecker.hs b/server/src-lib/Hasura/Server/ResourceChecker.hs index 4d711fb2f42..f8a0593a452 100644 --- a/server/src-lib/Hasura/Server/ResourceChecker.hs +++ b/server/src-lib/Hasura/Server/ResourceChecker.hs @@ -99,8 +99,8 @@ getServerResources_ mountPath = -- we need to find the root cgroup folder -- 1269 1263 0:31 /kubepods/burstable/pod37349393 /sys/fs/cgroup/blkio ro,nosuid,nodev,noexec,relatime master:14 - cgroup cgroup rw,blkio getCGroupMode = - liftIO $ - catchIOError + liftIO + $ catchIOError ( T.readFile mountPath >>= ( \contentLines -> case find (\ls -> ("cgroup" `elem` ls || "cgroup2" `elem` ls) && length ls >= 9) contentLines of @@ -112,8 +112,8 @@ getServerResources_ mountPath = <&> bool CGroupV1 CGroupV2 <&> (,cgroupRoot) ) - . map T.words - . T.lines + . map T.words + . T.lines ) (return . const (CGUnavailable, "")) @@ -204,15 +204,15 @@ getCGroupV2Resources cgroupRoot = do _ -> Left $ RCInternalError "INVALID_CPU_PERIOD_AND_QUOTA" getCpuAllocationCGroupV2 = - catchCpuAllocation $ - cpuLimits - `catchError` const cpuShares - `catchError` const (getCGroupV1CpuAllocation cgroupRoot) + catchCpuAllocation + $ cpuLimits + `catchError` const cpuShares + `catchError` const (getCGroupV1CpuAllocation cgroupRoot) getMemoryAllocationCGroupV2 = - catchMemoryAllocation $ - getMemoryAllocation (cgroupRoot "memory.max") - `catchError` const (getCGroupV1MemoryAllocation cgroupRoot) + catchMemoryAllocation + $ getMemoryAllocation (cgroupRoot "memory.max") + `catchError` const (getCGroupV1MemoryAllocation cgroupRoot) deduceCpuLimits :: Int -> Int -> Either ResourceCheckerError (Int, Maybe ResourceCheckerError) deduceCpuLimits quota period @@ -230,11 +230,11 @@ getMemoryAllocation path = bool (Just <$> liftEither (parseUint content)) getMaxPhysicalMemory (content == "max") <&> (,Nothing) ) - . T.strip + . T.strip -- catch cpu allocation error with default physical cpu resource catchCpuAllocation :: - MonadIO m => + (MonadIO m) => ExceptT e m (Int, Maybe ResourceCheckerError) -> m (Int, Maybe ResourceCheckerError) catchCpuAllocation m = @@ -243,7 +243,7 @@ catchCpuAllocation m = -- catch memory allocation error with default physical memory resource catchMemoryAllocation :: - MonadIO m => + (MonadIO m) => ExceptT ResourceCheckerError m (Maybe Int64, Maybe ResourceCheckerError) -> m (Maybe Int64, Maybe ResourceCheckerError) catchMemoryAllocation m = @@ -263,7 +263,7 @@ readFileT mapError path = do eContent <- liftIO $ catchIOError (Right <$> T.readFile path) (pure . Left . show) liftEither $ mapLeft mapError eContent -parseUint :: Integral a => T.Text -> Either ResourceCheckerError a +parseUint :: (Integral a) => T.Text -> Either ResourceCheckerError a parseUint = bimap RCInternalError fst . T.decimal readFileUint :: diff --git a/server/src-lib/Hasura/Server/SchemaUpdate.hs b/server/src-lib/Hasura/Server/SchemaUpdate.hs index b83ce30d4b6..a582904af70 100644 --- a/server/src-lib/Hasura/Server/SchemaUpdate.hs +++ b/server/src-lib/Hasura/Server/SchemaUpdate.hs @@ -66,13 +66,13 @@ logThreadStarted :: m () logThreadStarted logger instanceId threadType thread = let msg = tshow threadType <> " thread started" - in unLogger logger $ - StartupLog LevelInfo "schema-sync" $ - object - [ "instance_id" .= getInstanceId instanceId, - "thread_id" .= show (Immortal.threadId thread), - "message" .= msg - ] + in unLogger logger + $ StartupLog LevelInfo "schema-sync" + $ object + [ "instance_id" .= getInstanceId instanceId, + "thread_id" .= show (Immortal.threadId thread), + "message" .= msg + ] {- Note [Schema Cache Sync] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -120,7 +120,7 @@ if listen started after schema cache init start time. -- | An async thread which listen to Postgres notify to enable schema syncing -- See Note [Schema Cache Sync] startSchemaSyncListenerThread :: - C.ForkableMonadIO m => + (C.ForkableMonadIO m) => Logger Hasura -> PG.PGPool -> InstanceId -> @@ -130,8 +130,8 @@ startSchemaSyncListenerThread :: startSchemaSyncListenerThread logger pool instanceId interval metaVersionRef = do -- Start listener thread listenerThread <- - C.forkManagedT "SchemeUpdate.listener" logger $ - listener logger pool metaVersionRef (unrefine interval) + C.forkManagedT "SchemeUpdate.listener" logger + $ listener logger pool metaVersionRef (unrefine interval) logThreadStarted logger instanceId TTListener listenerThread pure listenerThread @@ -153,8 +153,8 @@ startSchemaSyncProcessorThread appStateRef logTVar = do let logger = _lsLogger appEnvLoggers -- Start processor thread processorThread <- - C.forkManagedT "SchemeUpdate.processor" logger $ - processor appEnvMetadataVersionRef appStateRef logTVar + C.forkManagedT "SchemeUpdate.processor" logger + $ processor appEnvMetadataVersionRef appStateRef logTVar logThreadStarted logger appEnvInstanceId TTProcessor processorThread pure processorThread @@ -166,8 +166,8 @@ schemaVersionCheckHandler :: PG.PGPool -> STM.TMVar MetadataResourceVersion -> IO (Either QErr ()) schemaVersionCheckHandler pool metaVersionRef = runExceptT - ( PG.runTx pool (PG.RepeatableRead, Nothing) $ - fetchMetadataResourceVersionFromCatalog + ( PG.runTx pool (PG.RepeatableRead, Nothing) + $ fetchMetadataResourceVersionFromCatalog ) >>= \case Right version -> Right <$> forcePut metaVersionRef version @@ -212,7 +212,7 @@ toLogError es qerr mrv = not $ isQErrLastSeen || isMetadataResourceVersionLastSe -- | An IO action that listens to postgres for events and pushes them to a Queue, in a loop forever. listener :: - MonadIO m => + (MonadIO m) => Logger Hasura -> PG.PGPool -> STM.TMVar MetadataResourceVersion -> @@ -234,9 +234,9 @@ listener logger pool metaVersionRef interval = L.iterateM_ listenerLoop defaultE else do pure errorState Right _ -> do - when (isInErrorState errorState) $ - logInfo logger TTListener $ - object ["message" .= ("SchemaSync Restored..." :: Text)] + when (isInErrorState errorState) + $ logInfo logger TTListener + $ object ["message" .= ("SchemaSync Restored..." :: Text)] pure defaultErrorState liftIO $ C.sleep $ milliseconds interval pure nextErr @@ -283,8 +283,9 @@ refreshSchemaCache logTVar = do AppEnv {..} <- askAppEnv let logger = _lsLogger appEnvLoggers - respErr <- runExceptT $ - withSchemaCacheUpdate appStateRef logger (Just logTVar) $ do + respErr <- runExceptT + $ withSchemaCacheUpdate appStateRef logger (Just logTVar) + $ do rebuildableCache <- liftIO $ fst <$> getRebuildableSchemaCacheWithVersion appStateRef appContext <- liftIO $ getAppContext appStateRef let dynamicConfig = buildCacheDynamicConfig appContext @@ -293,41 +294,41 @@ refreshSchemaCache schemaCache <- askSchemaCache let engineResourceVersion = scMetadataResourceVersion schemaCache unless (engineResourceVersion == resourceVersion) $ do - logInfo logger threadType $ - String $ - T.unwords - [ "Received metadata resource version:", - showMetadataResourceVersion resourceVersion <> ",", - "different from the current engine resource version:", - showMetadataResourceVersion engineResourceVersion <> ".", - "Trying to update the schema cache." - ] + logInfo logger threadType + $ String + $ T.unwords + [ "Received metadata resource version:", + showMetadataResourceVersion resourceVersion <> ",", + "different from the current engine resource version:", + showMetadataResourceVersion engineResourceVersion <> ".", + "Trying to update the schema cache." + ] MetadataWithResourceVersion metadata latestResourceVersion <- liftEitherM fetchMetadata - logInfo logger threadType $ - String $ - T.unwords - [ "Fetched metadata with resource version:", - showMetadataResourceVersion latestResourceVersion - ] + logInfo logger threadType + $ String + $ T.unwords + [ "Fetched metadata with resource version:", + showMetadataResourceVersion latestResourceVersion + ] notifications <- liftEitherM $ fetchMetadataNotifications engineResourceVersion appEnvInstanceId case notifications of [] -> do - logInfo logger threadType $ - String $ - T.unwords - [ "Fetched metadata notifications and received no notifications. Not updating the schema cache.", - "Only setting resource version:", - showMetadataResourceVersion latestResourceVersion, - "in schema cache" - ] + logInfo logger threadType + $ String + $ T.unwords + [ "Fetched metadata notifications and received no notifications. Not updating the schema cache.", + "Only setting resource version:", + showMetadataResourceVersion latestResourceVersion, + "in schema cache" + ] setMetadataResourceVersionInSchemaCache latestResourceVersion _ -> do - logInfo logger threadType $ - String "Fetched metadata notifications and received some notifications. Updating the schema cache." + logInfo logger threadType + $ String "Fetched metadata notifications and received some notifications. Updating the schema cache." let cacheInvalidations = if any ((== (engineResourceVersion + 1)) . fst) notifications then -- If (engineResourceVersion + 1) is in the list of notifications then @@ -341,25 +342,26 @@ refreshSchemaCache ciRemoteSchemas = HS.fromList $ getAllRemoteSchemas schemaCache, ciSources = HS.fromList $ HashMap.keys $ scSources schemaCache, ciDataConnectors = - maybe mempty (HS.fromList . HashMap.keys . unBackendInfoWrapper) $ - BackendMap.lookup @'DataConnector $ - scBackendCache schemaCache + maybe mempty (HS.fromList . HashMap.keys . unBackendInfoWrapper) + $ BackendMap.lookup @'DataConnector + $ scBackendCache schemaCache } buildSchemaCacheWithOptions CatalogSync cacheInvalidations metadata setMetadataResourceVersionInSchemaCache latestResourceVersion - logInfo logger threadType $ - String $ - "Schema cache updated with resource version: " <> showMetadataResourceVersion latestResourceVersion + logInfo logger threadType + $ String + $ "Schema cache updated with resource version: " + <> showMetadataResourceVersion latestResourceVersion pure (msg, cache) onLeft respErr (logError logger threadType . TEQueryError) logInfo :: (MonadIO m) => Logger Hasura -> SchemaSyncThreadType -> Value -> m () logInfo logger threadType val = - unLogger logger $ - SchemaSyncLog LevelInfo threadType val + unLogger logger + $ SchemaSyncLog LevelInfo threadType val logError :: (MonadIO m, ToJSON a) => Logger Hasura -> SchemaSyncThreadType -> a -> m () logError logger threadType err = - unLogger logger $ - SchemaSyncLog LevelError threadType $ - object ["error" .= toJSON err] + unLogger logger + $ SchemaSyncLog LevelError threadType + $ object ["error" .= toJSON err] diff --git a/server/src-lib/Hasura/Server/Telemetry.hs b/server/src-lib/Hasura/Server/Telemetry.hs index 1c278d838cb..555cdf873c0 100644 --- a/server/src-lib/Hasura/Server/Telemetry.hs +++ b/server/src-lib/Hasura/Server/Telemetry.hs @@ -191,12 +191,12 @@ runTelemetry (Logger logger) appStateRef metadataDbUid pgVersion computeResource payloads = J.encode <$> telemetries serverTelemetry = - J.encode $ - ServerTelemetryRow $ - ServerTelemetry - (_rcrCpu computeResources) - (_rcrMemory computeResources) - (_rcrErrorCode computeResources) + J.encode + $ ServerTelemetryRow + $ ServerTelemetry + (_rcrCpu computeResources) + (_rcrMemory computeResources) + (_rcrErrorCode computeResources) for_ (serverTelemetry : payloads) $ \payload -> do logger $ debugLBS $ "metrics_info: " <> payload @@ -289,9 +289,9 @@ computeMetrics sourceInfo _mtServiceTimings remoteSchemaMap actionCache = _mtPermissions = PermissionMetric {..} _mtEventTriggers = - HashMap.size $ - HashMap.filter (not . HashMap.null) $ - HashMap.map _tiEventTriggerInfoMap sourceTableCache + HashMap.size + $ HashMap.filter (not . HashMap.null) + $ HashMap.map _tiEventTriggerInfoMap sourceTableCache _mtRemoteSchemas = HashMap.size <$> remoteSchemaMap _mtFunctions = HashMap.size $ HashMap.filter (not . isSystemDefined . _fiSystemDefined) sourceFunctionCache _mtActions = computeActionsMetrics <$> actionCache diff --git a/server/src-lib/Hasura/Server/Telemetry/Counters.hs b/server/src-lib/Hasura/Server/Telemetry/Counters.hs index 27784f34d4d..04b26eea534 100644 --- a/server/src-lib/Hasura/Server/Telemetry/Counters.hs +++ b/server/src-lib/Hasura/Server/Telemetry/Counters.hs @@ -153,17 +153,17 @@ totalTimeBuckets = coerce [0.000, 0.001, 0.050, 1.000, 3600.000 :: Seconds] -- | Save a timing metric sample in our in-memory store. These will be -- accumulated and uploaded periodically in "Hasura.Server.Telemetry". -recordTimingMetric :: MonadIO m => RequestDimensions -> RequestTimings -> m () +recordTimingMetric :: (MonadIO m) => RequestDimensions -> RequestTimings -> m () recordTimingMetric reqDimensions RequestTimings {..} = liftIO $ do let ourBucket = - fromMaybe (RunningTimeBucket 0) $ -- although we expect 'head' would be safe here - listToMaybe $ - dropWhile (> coerce telemTimeTot) $ - reverse $ - sort totalTimeBuckets - atomicModifyIORef' requestCounters $ - (,()) - . HashMap.insertWith (<>) (reqDimensions, ourBucket) RequestTimingsCount {telemCount = 1, ..} + fromMaybe (RunningTimeBucket 0) + $ listToMaybe -- although we expect 'head' would be safe here + $ dropWhile (> coerce telemTimeTot) + $ reverse + $ sort totalTimeBuckets + atomicModifyIORef' requestCounters + $ (,()) + . HashMap.insertWith (<>) (reqDimensions, ourBucket) RequestTimingsCount {telemCount = 1, ..} -- | The final shape of this part of our metrics data JSON. This should allow -- reasonably efficient querying using GIN indexes and JSONB containment @@ -193,10 +193,10 @@ instance J.ToJSON ServiceTimingMetrics instance J.FromJSON ServiceTimingMetrics -dumpServiceTimingMetrics :: MonadIO m => m ServiceTimingMetrics +dumpServiceTimingMetrics :: (MonadIO m) => m ServiceTimingMetrics dumpServiceTimingMetrics = liftIO $ do cs <- readIORef requestCounters - let serviceTimingMetrics = flip map (HashMap.toList cs) $ - \((dimensions, bucket), metrics) -> ServiceTimingMetric {..} + let serviceTimingMetrics = flip map (HashMap.toList cs) + $ \((dimensions, bucket), metrics) -> ServiceTimingMetric {..} collectionTag = round approxStartTime return ServiceTimingMetrics {..} diff --git a/server/src-lib/Hasura/Server/Types.hs b/server/src-lib/Hasura/Server/Types.hs index ea6b245cc1f..2fcf71e78a4 100644 --- a/server/src-lib/Hasura/Server/Types.hs +++ b/server/src-lib/Hasura/Server/Types.hs @@ -104,9 +104,9 @@ instance FromJSON ExperimentalFeature where parseJSON = withText "ExperimentalFeature" $ \case k | Just (_, ef) <- find ((== k) . fst) experimentalFeatures -> return $ ef _ -> - fail $ - "ExperimentalFeature can only be one of these values: " - <> unpack (intercalate "," (map fst experimentalFeatures)) + fail + $ "ExperimentalFeature can only be one of these values: " + <> unpack (intercalate "," (map fst experimentalFeatures)) where experimentalFeatures :: [(Text, ExperimentalFeature)] experimentalFeatures = @@ -122,8 +122,9 @@ data MaintenanceMode a = MaintenanceModeEnabled a | MaintenanceModeDisabled instance FromJSON (MaintenanceMode ()) where parseJSON = - withBool "MaintenanceMode" $ - pure . bool MaintenanceModeDisabled (MaintenanceModeEnabled ()) + withBool "MaintenanceMode" + $ pure + . bool MaintenanceModeDisabled (MaintenanceModeEnabled ()) instance ToJSON (MaintenanceMode ()) where toJSON = Bool . (== MaintenanceModeEnabled ()) @@ -179,7 +180,7 @@ instance ToJSON GranularPrometheusMetricsState where GranularMetricsOff -> Bool False GranularMetricsOn -> Bool True -class Monad m => MonadGetPolicies m where +class (Monad m) => MonadGetPolicies m where runGetApiTimeLimit :: m (Maybe MaxTime) diff --git a/server/src-lib/Hasura/Server/Utils.hs b/server/src-lib/Hasura/Server/Utils.hs index bdd2109a8e2..0d0e3ff2814 100644 --- a/server/src-lib/Hasura/Server/Utils.hs +++ b/server/src-lib/Hasura/Server/Utils.hs @@ -79,25 +79,25 @@ sqlHeader = ("Content-Type", "application/sql; charset=utf-8") gzipHeader :: HTTP.Header gzipHeader = ("Content-Encoding", "gzip") -userRoleHeader :: IsString a => a +userRoleHeader :: (IsString a) => a userRoleHeader = "x-hasura-role" -deprecatedAccessKeyHeader :: IsString a => a +deprecatedAccessKeyHeader :: (IsString a) => a deprecatedAccessKeyHeader = "x-hasura-access-key" -adminSecretHeader :: IsString a => a +adminSecretHeader :: (IsString a) => a adminSecretHeader = "x-hasura-admin-secret" -userIdHeader :: IsString a => a +userIdHeader :: (IsString a) => a userIdHeader = "x-hasura-user-id" -requestIdHeader :: IsString a => a +requestIdHeader :: (IsString a) => a requestIdHeader = "x-request-id" -contentLengthHeader :: IsString a => a +contentLengthHeader :: (IsString a) => a contentLengthHeader = "Content-Length" -useBackendOnlyPermissionsHeader :: IsString a => a +useBackendOnlyPermissionsHeader :: (IsString a) => a useBackendOnlyPermissionsHeader = "x-hasura-use-backend-only-permissions" getRequestHeader :: HTTP.HeaderName -> [HTTP.Header] -> Maybe B.ByteString @@ -280,13 +280,14 @@ executeJSONPath jsonPath = iparse (valueParser jsonPath) parseWithPathElement = \case Key k -> withObject "Object" (.: k) Index i -> - withArray "Array" $ - maybe (fail "Array index out of range") pure . (V.!? i) + withArray "Array" + $ maybe (fail "Array index out of range") pure + . (V.!? i) sha1 :: BL.ByteString -> B.ByteString sha1 = convert @_ @B.ByteString . Crypto.hashlazy @Crypto.SHA1 -cryptoHash :: J.ToJSON a => a -> B.ByteString +cryptoHash :: (J.ToJSON a) => a -> B.ByteString cryptoHash = Base16.encode . sha1 . J.encode readIsoLevel :: String -> Either String PG.TxIsolation diff --git a/server/src-lib/Hasura/Server/Version.hs b/server/src-lib/Hasura/Server/Version.hs index 18da3b9f8b5..6e07c88023f 100644 --- a/server/src-lib/Hasura/Server/Version.hs +++ b/server/src-lib/Hasura/Server/Version.hs @@ -48,24 +48,25 @@ instance FromJSON Version where currentVersion :: Version currentVersion = - fromText $ - T.dropWhileEnd (== '\n') $ - T.pack $ - -- NOTE: This must work correctly in the presence of a caching! See - -- graphql-engine.cabal (search for “CURRENT_VERSION”) for details - -- about our approach here. We could use embedFile but want a nice - -- error message - $( do - versionFileName <- makeRelativeToProject "CURRENT_VERSION" - addDependentFile versionFileName - let noFileErr = - "\n===========================================================================" - <> "\n>>> DEAR HASURIAN: The way we bake versions into the server has " - <> "\n>>> changed; You'll need to run the following once in your repo to proceed: " - <> "\n>>> $ echo 12345 > \"$(git rev-parse --show-toplevel)/server/CURRENT_VERSION\"" - <> "\n===========================================================================\n" - runIO (readFile versionFileName `onException` error noFileErr) >>= stringE - ) + fromText + $ T.dropWhileEnd (== '\n') + $ T.pack + $ + -- NOTE: This must work correctly in the presence of a caching! See + -- graphql-engine.cabal (search for “CURRENT_VERSION”) for details + -- about our approach here. We could use embedFile but want a nice + -- error message + $( do + versionFileName <- makeRelativeToProject "CURRENT_VERSION" + addDependentFile versionFileName + let noFileErr = + "\n===========================================================================" + <> "\n>>> DEAR HASURIAN: The way we bake versions into the server has " + <> "\n>>> changed; You'll need to run the following once in your repo to proceed: " + <> "\n>>> $ echo 12345 > \"$(git rev-parse --show-toplevel)/server/CURRENT_VERSION\"" + <> "\n===========================================================================\n" + runIO (readFile versionFileName `onException` error noFileErr) >>= stringE + ) versionToAssetsVersion :: Version -> Text versionToAssetsVersion = \case @@ -84,8 +85,8 @@ versionToAssetsVersion = \case Nothing -> Nothing Just r -> if - | T.null r -> Nothing - | otherwise -> T.pack <$> getChannelFromPreRelease (T.unpack r) + | T.null r -> Nothing + | otherwise -> T.pack <$> getChannelFromPreRelease (T.unpack r) getChannelFromPreRelease :: String -> Maybe String getChannelFromPreRelease sv = sv =~~ ("^([a-z]+)" :: String) diff --git a/server/src-lib/Hasura/Services/Network.hs b/server/src-lib/Hasura/Services/Network.hs index 3854c06cff6..0afe0c4bb3d 100644 --- a/server/src-lib/Hasura/Services/Network.hs +++ b/server/src-lib/Hasura/Services/Network.hs @@ -18,20 +18,20 @@ import Network.HTTP.Client qualified as HTTP -------------------------------------------------------------------------------- -class Monad m => ProvidesNetwork m where +class (Monad m) => ProvidesNetwork m where askHTTPManager :: m HTTP.Manager -instance ProvidesNetwork m => ProvidesNetwork (ReaderT r m) where +instance (ProvidesNetwork m) => ProvidesNetwork (ReaderT r m) where askHTTPManager = lift askHTTPManager instance (Monoid w, ProvidesNetwork m) => ProvidesNetwork (WriterT w m) where askHTTPManager = lift askHTTPManager -instance ProvidesNetwork m => ProvidesNetwork (StateT s m) where +instance (ProvidesNetwork m) => ProvidesNetwork (StateT s m) where askHTTPManager = lift askHTTPManager -instance ProvidesNetwork m => ProvidesNetwork (ExceptT e m) where +instance (ProvidesNetwork m) => ProvidesNetwork (ExceptT e m) where askHTTPManager = lift askHTTPManager -instance ProvidesNetwork m => ProvidesNetwork (TraceT m) where +instance (ProvidesNetwork m) => ProvidesNetwork (TraceT m) where askHTTPManager = lift askHTTPManager diff --git a/server/src-lib/Hasura/Session.hs b/server/src-lib/Hasura/Session.hs index 64beb3c6a9f..4b7fb03837c 100644 --- a/server/src-lib/Hasura/Session.hs +++ b/server/src-lib/Hasura/Session.hs @@ -95,9 +95,10 @@ mkUserInfo :: mkUserInfo roleBuild userAdminSecret sessionVariables = do roleName <- case roleBuild of URBFromSessionVariables -> - onNothing maybeSessionRole $ - throw400 InvalidParams $ - userRoleHeader <> " not found in session variables" + onNothing maybeSessionRole + $ throw400 InvalidParams + $ userRoleHeader + <> " not found in session variables" URBFromSessionVariablesFallback roleName' -> pure $ fromMaybe roleName' maybeSessionRole URBPreDetermined roleName' -> pure roleName' backendOnlyFieldAccess <- getBackendOnlyFieldAccess @@ -126,8 +127,10 @@ mkUserInfo roleBuild userAdminSecret sessionVariables = do Just varVal -> case parseStringAsBool (T.unpack varVal) of Left err -> - throw400 BadRequest $ - useBackendOnlyPermissionsHeader <> ": " <> T.pack err + throw400 BadRequest + $ useBackendOnlyPermissionsHeader + <> ": " + <> T.pack err Right privilege -> pure $ if privilege then BOFAAllowed else BOFADisallowed maybeRoleFromSessionVariables :: SessionVariables -> Maybe RoleName diff --git a/server/src-lib/Hasura/StoredProcedure/API.hs b/server/src-lib/Hasura/StoredProcedure/API.hs index cf18c7a7d4a..5bd10bdd76e 100644 --- a/server/src-lib/Hasura/StoredProcedure/API.hs +++ b/server/src-lib/Hasura/StoredProcedure/API.hs @@ -59,18 +59,18 @@ instance (Backend b) => HasCodec (TrackStoredProcedure b) where ("A request to track a stored procedure") $ AC.object (backendPrefix @b <> "TrackStoredProcedure") $ TrackStoredProcedure - <$> AC.requiredField "source" sourceDoc - AC..= tspSource + <$> AC.requiredField "source" sourceDoc + AC..= tspSource <*> AC.requiredField "stored_procedure" spDoc - AC..= tspStoredProcedure + AC..= tspStoredProcedure <*> AC.requiredField "configuration" configDoc - AC..= tspConfig + AC..= tspConfig <*> AC.optionalFieldWithDefault "arguments" mempty argumentsDoc - AC..= tspArguments + AC..= tspArguments <*> AC.optionalField "description" descriptionDoc - AC..= tspDescription + AC..= tspDescription <*> AC.requiredField "returns" returnsDoc - AC..= tspReturns + AC..= tspReturns where sourceDoc = "The source in which this stored procedure should be tracked" configDoc = "The configuration for the SQL stored procedure" @@ -128,16 +128,16 @@ data GetStoredProcedure (b :: BackendType) = GetStoredProcedure { gspSource :: SourceName } -deriving instance Backend b => Show (GetStoredProcedure b) +deriving instance (Backend b) => Show (GetStoredProcedure b) -deriving instance Backend b => Eq (GetStoredProcedure b) +deriving instance (Backend b) => Eq (GetStoredProcedure b) -instance Backend b => FromJSON (GetStoredProcedure b) where +instance (Backend b) => FromJSON (GetStoredProcedure b) where parseJSON = withObject "GetStoredProcedure" $ \o -> do gspSource <- o .: "source" pure GetStoredProcedure {..} -instance Backend b => ToJSON (GetStoredProcedure b) where +instance (Backend b) => ToJSON (GetStoredProcedure b) where toJSON GetStoredProcedure {..} = object [ "source" .= gspSource @@ -183,12 +183,12 @@ runTrackStoredProcedure env trackStoredProcedureRequest = do sourceMetadata <- maybe - ( throw400 NotFound $ - "Source '" - <> sourceNameToText source - <> "' of kind " - <> toTxt (reify (backendTag @b)) - <> " not found." + ( throw400 NotFound + $ "Source '" + <> sourceNameToText source + <> "' of kind " + <> toTxt (reify (backendTag @b)) + <> " not found." ) pure . preview (metaSources . ix source . toSourceMetadata @b) @@ -200,17 +200,17 @@ runTrackStoredProcedure env trackStoredProcedureRequest = do let storedProcedure = _spmStoredProcedure metadata metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOStoredProcedure @b storedProcedure + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOStoredProcedure @b storedProcedure existingStoredProcedures = InsOrdHashMap.keys (_smStoredProcedures sourceMetadata) when (storedProcedure `elem` existingStoredProcedures) do throw400 AlreadyTracked $ "Stored procedure '" <> toTxt storedProcedure <> "' is already tracked." - buildSchemaCacheFor metadataObj $ - MetadataModifier $ - (metaSources . ix source . toSourceMetadata @b . smStoredProcedures) - %~ InsOrdHashMap.insert storedProcedure metadata + buildSchemaCacheFor metadataObj + $ MetadataModifier + $ (metaSources . ix source . toSourceMetadata @b . smStoredProcedures) + %~ InsOrdHashMap.insert storedProcedure metadata pure successMsg where @@ -222,17 +222,17 @@ data UntrackStoredProcedure (b :: BackendType) = UntrackStoredProcedure utspStoredProcedure :: FunctionName b } -deriving instance Backend b => Show (UntrackStoredProcedure b) +deriving instance (Backend b) => Show (UntrackStoredProcedure b) -deriving instance Backend b => Eq (UntrackStoredProcedure b) +deriving instance (Backend b) => Eq (UntrackStoredProcedure b) -instance Backend b => FromJSON (UntrackStoredProcedure b) where +instance (Backend b) => FromJSON (UntrackStoredProcedure b) where parseJSON = withObject "UntrackStoredProcedure" $ \o -> do utspSource <- o .: "source" utspStoredProcedure <- o .: "stored_procedure" pure UntrackStoredProcedure {..} -instance Backend b => ToJSON (UntrackStoredProcedure b) where +instance (Backend b) => ToJSON (UntrackStoredProcedure b) where toJSON UntrackStoredProcedure {..} = object [ "source" .= utspSource, @@ -255,12 +255,12 @@ runUntrackStoredProcedure q = do assertStoredProcedureExists @b source storedProcedure let metadataObj = - MOSourceObjId source $ - AB.mkAnyBackend $ - SMOStoredProcedure @b storedProcedure + MOSourceObjId source + $ AB.mkAnyBackend + $ SMOStoredProcedure @b storedProcedure - buildSchemaCacheFor metadataObj $ - dropStoredProcedureInMetadata @b source storedProcedure + buildSchemaCacheFor metadataObj + $ dropStoredProcedureInMetadata @b source storedProcedure pure successMsg where source = utspSource q @@ -268,14 +268,17 @@ runUntrackStoredProcedure q = do dropStoredProcedureInMetadata :: forall b. - BackendMetadata b => + (BackendMetadata b) => SourceName -> FunctionName b -> MetadataModifier dropStoredProcedureInMetadata source rootFieldName = do - MetadataModifier $ - metaSources . ix source . toSourceMetadata @b . smStoredProcedures - %~ InsOrdHashMap.delete rootFieldName + MetadataModifier + $ metaSources + . ix source + . toSourceMetadata @b + . smStoredProcedures + %~ InsOrdHashMap.delete rootFieldName -- | check feature flag is enabled before carrying out any actions throwIfFeatureDisabled :: (HasFeatureFlagChecker m, MonadError QErr m) => m () diff --git a/server/src-lib/Hasura/StoredProcedure/Metadata.hs b/server/src-lib/Hasura/StoredProcedure/Metadata.hs index 91ddfcb787b..48b8c108157 100644 --- a/server/src-lib/Hasura/StoredProcedure/Metadata.hs +++ b/server/src-lib/Hasura/StoredProcedure/Metadata.hs @@ -40,9 +40,9 @@ data StoredProcedureMetadata (b :: BackendType) = StoredProcedureMetadata } deriving (Generic) -deriving instance Backend b => Eq (StoredProcedureMetadata b) +deriving instance (Backend b) => Eq (StoredProcedureMetadata b) -deriving instance Backend b => Show (StoredProcedureMetadata b) +deriving instance (Backend b) => Show (StoredProcedureMetadata b) instance (Backend b) => HasCodec (StoredProcedureMetadata b) where codec = @@ -50,16 +50,16 @@ instance (Backend b) => HasCodec (StoredProcedureMetadata b) where ("A stored procedure as represented in metadata.") $ AC.object (backendPrefix @b <> "StoredProcedureMetadata") $ StoredProcedureMetadata - <$> AC.requiredField "stored_procedure" spDoc - AC..= _spmStoredProcedure + <$> AC.requiredField "stored_procedure" spDoc + AC..= _spmStoredProcedure <*> requiredField "configuration" configDoc - AC..= _spmConfig + AC..= _spmConfig <*> requiredField "returns" returnsDoc - AC..= _spmReturns + AC..= _spmReturns <*> optionalFieldWithDefault "arguments" mempty argumentDoc - AC..= _spmArguments + AC..= _spmArguments <*> optionalField "description" descriptionDoc - AC..= _spmDescription + AC..= _spmDescription where spDoc = "The name of the SQL stored procedure" configDoc = "The configuration for the SQL stored procedure" diff --git a/server/src-lib/Hasura/StoredProcedure/Schema.hs b/server/src-lib/Hasura/StoredProcedure/Schema.hs index 2866ea1e030..eeb91181f56 100644 --- a/server/src-lib/Hasura/StoredProcedure/Schema.hs +++ b/server/src-lib/Hasura/StoredProcedure/Schema.hs @@ -60,8 +60,9 @@ defaultBuildStoredProcedureRootFields StoredProcedureInfo {..} = runMaybeT $ do stringifyNumbers <- retrieve Options.soStringifyNumbers logicalModelPermissions <- - MaybeT . fmap Just $ - buildLogicalModelPermissions @b @r @m @n _spiReturns + MaybeT + . fmap Just + $ buildLogicalModelPermissions @b @r @m @n _spiReturns (selectionSetParser, logicalModelsArgsParser) <- MaybeT $ buildLogicalModelFields mempty _spiReturns @@ -83,37 +84,37 @@ defaultBuildStoredProcedureRootFields StoredProcedureInfo {..} = runMaybeT $ do sourceName (mkAnyBackend $ MO.SMOStoredProcedure @b _spiStoredProcedure) - pure $ - P.setFieldParserOrigin sourceObj $ - P.subselection - fieldName - description - ( (,) - <$> logicalModelsArgsParser - <*> storedProcedureArgsParser - ) - selectionSetParser - <&> \((lmArgs, spArgs), fields) -> - QDBMultipleRows $ - IR.AnnSelectG - { IR._asnFields = fields, - IR._asnFrom = - IR.FromStoredProcedure - StoredProcedure - { spStoredProcedure = _spiStoredProcedure, - spGraphqlName = _spiGraphqlName, - spArgs = arguments spArgs, - spLogicalModel = buildLogicalModelIR _spiReturns - }, - IR._asnPerm = logicalModelPermissions, - IR._asnArgs = lmArgs, - IR._asnStrfyNum = stringifyNumbers, - IR._asnNamingConvention = Just tCase - } + pure + $ P.setFieldParserOrigin sourceObj + $ P.subselection + fieldName + description + ( (,) + <$> logicalModelsArgsParser + <*> storedProcedureArgsParser + ) + selectionSetParser + <&> \((lmArgs, spArgs), fields) -> + QDBMultipleRows + $ IR.AnnSelectG + { IR._asnFields = fields, + IR._asnFrom = + IR.FromStoredProcedure + StoredProcedure + { spStoredProcedure = _spiStoredProcedure, + spGraphqlName = _spiGraphqlName, + spArgs = arguments spArgs, + spLogicalModel = buildLogicalModelIR _spiReturns + }, + IR._asnPerm = logicalModelPermissions, + IR._asnArgs = lmArgs, + IR._asnStrfyNum = stringifyNumbers, + IR._asnNamingConvention = Just tCase + } storedProcedureArgumentsSchema :: forall b r m n. - MonadBuildSchema b r m n => + (MonadBuildSchema b r m n) => G.Name -> HashMap ArgumentName (NullableScalarType b) -> MaybeT (SchemaT r m) (P.InputFieldsParser n (HashMap ArgumentName (Column.ColumnValue b))) diff --git a/server/src-lib/Hasura/StoredProcedure/Types.hs b/server/src-lib/Hasura/StoredProcedure/Types.hs index ca03b680e1d..d99493c2957 100644 --- a/server/src-lib/Hasura/StoredProcedure/Types.hs +++ b/server/src-lib/Hasura/StoredProcedure/Types.hs @@ -30,16 +30,20 @@ instance NFData StoredProcedureConfig instance HasCodec StoredProcedureConfig where codec = - AC.object "StoredProcedureConfig" $ - StoredProcedureConfig - <$> AC.requiredField' "exposed_as" AC..= _spcExposedAs - <*> AC.optionalFieldWith' "custom_name" graphQLFieldNameCodec AC..= _spcCustomName + AC.object "StoredProcedureConfig" + $ StoredProcedureConfig + <$> AC.requiredField' "exposed_as" + AC..= _spcExposedAs + <*> AC.optionalFieldWith' "custom_name" graphQLFieldNameCodec + AC..= _spcCustomName instance FromJSON StoredProcedureConfig where parseJSON = withObject "StoredProcedureConfig" $ \obj -> StoredProcedureConfig - <$> obj .: "exposed_as" - <*> obj .:? "custom_name" + <$> obj + .: "exposed_as" + <*> obj + .:? "custom_name" instance ToJSON StoredProcedureConfig where toJSON = genericToJSON hasuraJSON {omitNothingFields = True} diff --git a/server/src-lib/Hasura/Table/API.hs b/server/src-lib/Hasura/Table/API.hs index d2154a1aeb8..f725e8ab03b 100644 --- a/server/src-lib/Hasura/Table/API.hs +++ b/server/src-lib/Hasura/Table/API.hs @@ -92,15 +92,15 @@ instance (Backend b) => FromJSON (TrackTable b) where withOptions = withObject "TrackTable" \o -> TrackTable <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .:? "is_enum" - .!= False + .:? "is_enum" + .!= False <*> o - .:? "apollo_federation_config" + .:? "apollo_federation_config" withoutOptions = TrackTable defaultSource <$> parseJSON v <*> pure False <*> pure Nothing data SetTableIsEnum b = SetTableIsEnum @@ -117,12 +117,12 @@ instance (Backend b) => FromJSON (SetTableIsEnum b) where parseJSON = withObject "SetTableIsEnum" $ \o -> SetTableIsEnum <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .: "is_enum" + .: "is_enum" data UntrackTable b = UntrackTable { utSource :: SourceName, @@ -138,13 +138,13 @@ instance (Backend b) => FromJSON (UntrackTable b) where parseJSON = withObject "UntrackTable" $ \o -> UntrackTable <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .:? "cascade" - .!= False + .:? "cascade" + .!= False isTableTracked :: forall b. (Backend b) => SourceInfo b -> TableName b -> Bool isTableTracked sourceInfo tableName = @@ -161,13 +161,16 @@ trackExistingTableOrViewPhase1 :: m () trackExistingTableOrViewPhase1 source tableName = do sourceInfo <- askSourceInfo source - when (isTableTracked @b sourceInfo tableName) $ - throw400 AlreadyTracked $ - "view/table already tracked: " <>> tableName + when (isTableTracked @b sourceInfo tableName) + $ throw400 AlreadyTracked + $ "view/table already tracked: " + <>> tableName let functionName = tableToFunction @b tableName - when (isJust $ HashMap.lookup functionName $ _siFunctions @b sourceInfo) $ - throw400 NotSupported $ - "function with name " <> tableName <<> " already exists" + when (isJust $ HashMap.lookup functionName $ _siFunctions @b sourceInfo) + $ throw400 NotSupported + $ "function with name " + <> tableName + <<> " already exists" queryForExistingFieldNames :: SchemaCache -> Vector Text queryForExistingFieldNames schemaCache = do @@ -182,26 +185,26 @@ queryForExistingFieldNames schemaCache = do -- } -- } introspectionQuery = - [ G.SelectionField $ - G.Field + [ G.SelectionField + $ G.Field Nothing GName.___schema mempty [] - [ G.SelectionField $ - G.Field + [ G.SelectionField + $ G.Field Nothing GName._queryType mempty [] - [ G.SelectionField $ - G.Field + [ G.SelectionField + $ G.Field Nothing GName._fields mempty [] - [ G.SelectionField $ - G.Field + [ G.SelectionField + $ G.Field Nothing GName._name mempty @@ -237,11 +240,11 @@ checkConflictingNode :: m () checkConflictingNode sc tnGQL = do let fieldNames = queryForExistingFieldNames sc - when (tnGQL `elem` fieldNames) $ - throw400 RemoteSchemaConflicts $ - "node " - <> tnGQL - <> " already exists in current graphql schema" + when (tnGQL `elem` fieldNames) + $ throw400 RemoteSchemaConflicts + $ "node " + <> tnGQL + <> " already exists in current graphql schema" findConflictingNodes :: SchemaCache -> @@ -253,10 +256,10 @@ findConflictingNodes sc extractName items = do flip foldMap items $ \item -> let name = extractName item err = - err400 RemoteSchemaConflicts $ - "node " - <> name - <> " already exists in current graphql schema" + err400 RemoteSchemaConflicts + $ "node " + <> name + <> " already exists in current graphql schema" in [(item, err) | name `elem` fieldNames] trackExistingTableOrViewPhase2 :: @@ -276,9 +279,9 @@ trackExistingTableOrViewPhase2 trackTable@TrackTableV2 {ttv2Table = TrackTable { -} checkConflictingNode sc $ snakeCaseTableName @b tName buildSchemaCacheFor - ( MOSourceObjId tSource $ - AB.mkAnyBackend $ - SMOTable @b tName + ( MOSourceObjId tSource + $ AB.mkAnyBackend + $ SMOTable @b tName ) $ mkTrackTableMetadataModifier trackTable pure successMsg @@ -326,8 +329,11 @@ data TrackTables b = TrackTables instance (Backend b) => FromJSON (TrackTables b) where parseJSON = withObject "TrackTables" $ \o -> do TrackTables - <$> o .: "tables" - <*> o .:? "allow_warnings" .!= AllowWarnings + <$> o + .: "tables" + <*> o + .:? "allow_warnings" + .!= AllowWarnings runTrackTablesQ :: forall b m. @@ -335,9 +341,9 @@ runTrackTablesQ :: TrackTables b -> m EncJSON runTrackTablesQ TrackTables {..} = do - unless (null duplicatedTables) $ - let tables = commaSeparated $ (\(source, tableName) -> toTxt source <> "." <> toTxt tableName) <$> duplicatedTables - in withPathK "tables" $ throw400 BadRequest ("The following tables occur more than once in the request: " <> tables) + unless (null duplicatedTables) + $ let tables = commaSeparated $ (\(source, tableName) -> toTxt source <> "." <> toTxt tableName) <$> duplicatedTables + in withPathK "tables" $ throw400 BadRequest ("The following tables occur more than once in the request: " <> tables) (successfulTables, metadataWarnings) <- runMetadataWarnings $ do phase1SuccessfulTables <- fmap mconcat . for _ttv2Tables $ \trackTable@TrackTableV2 {ttv2Table = TrackTable {..}} -> do @@ -350,14 +356,14 @@ runTrackTablesQ TrackTables {..} = do trackExistingTablesOrViewsPhase2 phase1SuccessfulTables - when (null successfulTables) $ - throw400WithDetail InvalidConfiguration "all tables failed to track" (toJSON metadataWarnings) + when (null successfulTables) + $ throw400WithDetail InvalidConfiguration "all tables failed to track" (toJSON metadataWarnings) case _ttv2AllowWarnings of AllowWarnings -> pure () DisallowWarnings -> - unless (null metadataWarnings) $ - throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (toJSON metadataWarnings) + unless (null metadataWarnings) + $ throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (toJSON metadataWarnings) pure $ mkSuccessResponseWithWarnings metadataWarnings where @@ -407,8 +413,11 @@ data UntrackTables b = UntrackTables instance (Backend b) => FromJSON (UntrackTables b) where parseJSON = withObject "UntrackTables" $ \o -> do UntrackTables - <$> o .: "tables" - <*> o .:? "allow_warnings" .!= AllowWarnings + <$> o + .: "tables" + <*> o + .:? "allow_warnings" + .!= AllowWarnings runUntrackTablesQ :: forall b m. @@ -416,9 +425,9 @@ runUntrackTablesQ :: UntrackTables b -> m EncJSON runUntrackTablesQ UntrackTables {..} = do - unless (null duplicatedTables) $ - let tables = commaSeparated $ (\(source, tableName) -> toTxt source <> "." <> toTxt tableName) <$> duplicatedTables - in withPathK "tables" $ throw400 BadRequest ("The following tables occur more than once in the request: " <> tables) + unless (null duplicatedTables) + $ let tables = commaSeparated $ (\(source, tableName) -> toTxt source <> "." <> toTxt tableName) <$> duplicatedTables + in withPathK "tables" $ throw400 BadRequest ("The following tables occur more than once in the request: " <> tables) (successfulTables, metadataWarnings) <- runMetadataWarnings $ do phase1SuccessfulTables <- fmap mconcat . for _utTables $ \untrackTable -> do @@ -431,14 +440,14 @@ runUntrackTablesQ UntrackTables {..} = do untrackExistingTablesOrViewsPhase2 phase1SuccessfulTables - when (null successfulTables) $ - throw400WithDetail InvalidConfiguration "all tables failed to untrack" (toJSON metadataWarnings) + when (null successfulTables) + $ throw400WithDetail InvalidConfiguration "all tables failed to untrack" (toJSON metadataWarnings) case _utAllowWarnings of AllowWarnings -> pure () DisallowWarnings -> - unless (null metadataWarnings) $ - throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (toJSON metadataWarnings) + unless (null metadataWarnings) + $ throw400WithDetail (CustomCode "metadata-warnings") "failed due to metadata warnings" (toJSON metadataWarnings) pure $ mkSuccessResponseWithWarnings metadataWarnings where @@ -454,7 +463,7 @@ runUntrackTablesQ UntrackTables {..} = do _ -> Nothing ) -mkUntrackTableObjectId :: forall b. Backend b => UntrackTable b -> MetadataObjId +mkUntrackTableObjectId :: forall b. (Backend b) => UntrackTable b -> MetadataObjId mkUntrackTableObjectId UntrackTable {..} = MOSourceObjId utSource . AB.mkAnyBackend $ SMOTable @b utTable @@ -514,7 +523,9 @@ runSetExistingTableIsEnumQ (SetTableIsEnum source tableName isEnum) = do buildSchemaCacheFor (MOSourceObjId source $ AB.mkAnyBackend $ SMOTable @b tableName) $ MetadataModifier - $ tableMetadataSetter @b source tableName . tmIsEnum .~ isEnum + $ tableMetadataSetter @b source tableName + . tmIsEnum + .~ isEnum return successMsg data SetTableCustomization b = SetTableCustomization @@ -528,12 +539,12 @@ instance (Backend b) => FromJSON (SetTableCustomization b) where parseJSON = withObject "SetTableCustomization" $ \o -> SetTableCustomization <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .: "configuration" + .: "configuration" data SetTableCustomFields = SetTableCustomFields { _stcfSource :: SourceName, @@ -547,16 +558,16 @@ instance FromJSON SetTableCustomFields where parseJSON = withObject "SetTableCustomFields" $ \o -> SetTableCustomFields <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .:? "custom_root_fields" - .!= emptyCustomRootFields + .:? "custom_root_fields" + .!= emptyCustomRootFields <*> o - .:? "custom_column_names" - .!= HashMap.empty + .:? "custom_column_names" + .!= HashMap.empty runSetTableCustomFieldsQV2 :: (QErrM m, CacheRWM m, MetadataM m) => SetTableCustomFields -> m EncJSON @@ -567,7 +578,9 @@ runSetTableCustomFieldsQV2 (SetTableCustomFields source tableName rootFields col buildSchemaCacheFor (MOSourceObjId source $ AB.mkAnyBackend $ SMOTable @('Postgres 'Vanilla) tableName) $ MetadataModifier - $ tableMetadataSetter source tableName . tmConfiguration .~ tableConfig + $ tableMetadataSetter source tableName + . tmConfiguration + .~ tableConfig return successMsg runSetTableCustomization :: @@ -580,7 +593,9 @@ runSetTableCustomization (SetTableCustomization source table config) = do buildSchemaCacheFor (MOSourceObjId source $ AB.mkAnyBackend $ SMOTable @b table) $ MetadataModifier - $ tableMetadataSetter source table . tmConfiguration .~ config + $ tableMetadataSetter source table + . tmConfiguration + .~ config return successMsg untrackExistingTableOrViewPhase1 :: @@ -590,9 +605,9 @@ untrackExistingTableOrViewPhase1 :: m () untrackExistingTableOrViewPhase1 (UntrackTable source vn _) = do schemaCache <- askSchemaCache - void $ - unsafeTableInfo @b source vn (scSources schemaCache) - `onNothing` throw400 AlreadyUntracked ("view/table already untracked: " <>> vn) + void + $ unsafeTableInfo @b source vn (scSources schemaCache) + `onNothing` throw400 AlreadyUntracked ("view/table already untracked: " <>> vn) untrackExistingTableOrViewPhase2 :: forall b m. @@ -604,8 +619,8 @@ untrackExistingTableOrViewPhase2 untrackTable@(UntrackTable source tableName cas sourceConfig <- askSourceConfig @b source -- Report batch with an error if cascade is not set - unless (null indirectDeps || cascade) $ - reportDependentObjectsExist indirectDeps + unless (null indirectDeps || cascade) + $ reportDependentObjectsExist indirectDeps -- Purge all the dependents from state metadataModifier <- mkUntrackTableMetadataModifier untrackTable indirectDeps -- delete the table and its direct dependencies @@ -706,9 +721,11 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild Just metadataTable -> buildRawTableInfo -< (table, metadataTable, sourceConfig, reloadMetadataInvalidationKey) ) - |) (mkTableMetadataObject source tableName) + |) + (mkTableMetadataObject source tableName) ) - |) (HashMap.groupOnNE _tbiName tableBuildInputs) + |) + (HashMap.groupOnNE _tbiName tableBuildInputs) let rawTableCache = catMaybes rawTableInfos enumTables = flip mapMaybe rawTableCache \rawTableInfo -> (,,) <$> _tciPrimaryKey rawTableInfo <*> pure (_tciCustomConfig rawTableInfo) <*> _tciEnumValues rawTableInfo @@ -720,9 +737,9 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild where mkTableMetadataObject source name = MetadataObject - ( MOSourceObjId source $ - AB.mkAnyBackend $ - SMOTable @b name + ( MOSourceObjId source + $ AB.mkAnyBackend + $ SMOTable @b name ) (toJSON name) @@ -824,8 +841,10 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild This column -> pure (column, mempty) These column config -> pure (column, config) That _ -> - throw400 NotExists $ - "configuration was given for the column " <> fieldName <<> ", but no such column exists" + throw400 NotExists + $ "configuration was given for the column " + <> fieldName + <<> ", but no such column exists" extractColumnConfiguration :: (QErrM n) => @@ -854,8 +873,8 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild processRawColumnType isNullable = \case RawColumnTypeScalar scalarType -> do resolvedType <- resolveColumnType scalarType - pure $ - SCIScalarColumn + pure + $ SCIScalarColumn ColumnInfo { ciColumn = pgCol, ciName = applyFieldNameCaseIdentifier tCase name, @@ -866,8 +885,8 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild ciMutability = rciMutability rawInfo } RawColumnTypeObject supportsNestedObjects objectTypeName -> - pure $ - SCIObjectColumn @b + pure + $ SCIObjectColumn @b NestedObjectInfo { _noiSupportsNestedObjects = supportsNestedObjects, _noiColumn = pgCol, @@ -879,8 +898,8 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild } RawColumnTypeArray supportsNestedArrays rawColumnType isNullable' -> do nestedColumnInfo <- processRawColumnType isNullable' rawColumnType - pure $ - SCIArrayColumn @b + pure + $ SCIArrayColumn @b NestedArrayInfo { _naiSupportsNestedArrays = supportsNestedArrays, _naiIsNullable = isNullable, @@ -895,22 +914,24 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild Just (enumReference :| []) -> pure $ ColumnEnumReference enumReference -- multiple referenced enums? the schema is strange, so let’s reject it Just enumReferences -> - throw400 ConstraintViolation $ - "column " - <> rciName rawInfo <<> " in table " - <> tableName - <<> " references multiple enum tables (" - <> commaSeparated (map (dquote . erTable) $ toList enumReferences) - <> ")" + throw400 ConstraintViolation + $ "column " + <> rciName rawInfo + <<> " in table " + <> tableName + <<> " references multiple enum tables (" + <> commaSeparated (map (dquote . erTable) $ toList enumReferences) + <> ")" assertNoDuplicateFieldNames columns = void $ flip HashMap.traverseWithKey (HashMap.groupOn structuredColumnInfoName columns) \name columnsWithName -> case columnsWithName of one : two : more -> - throw400 AlreadyExists $ - "the definitions of columns " - <> englishList "and" (dquote . structuredColumnInfoColumn <$> (one :| two : more)) - <> " are in conflict: they are mapped to the same field name, " <>> name + throw400 AlreadyExists + $ "the definitions of columns " + <> englishList "and" (dquote . structuredColumnInfoColumn <$> (one :| two : more)) + <> " are in conflict: they are mapped to the same field name, " + <>> name _ -> pure () buildDescription :: TableName b -> TableConfig b -> DBTableMetadata b -> Maybe PGDescription @@ -934,12 +955,12 @@ instance (Backend b) => FromJSON (SetApolloFederationConfig b) where parseJSON = withObject "SetApolloFederationConfig" $ \o -> SetApolloFederationConfig <$> o - .:? "source" - .!= defaultSource + .:? "source" + .!= defaultSource <*> o - .: "table" + .: "table" <*> o - .:? "apollo_federation_config" + .:? "apollo_federation_config" runSetApolloFederationConfig :: forall b m. @@ -955,5 +976,7 @@ runSetApolloFederationConfig (SetApolloFederationConfig source table apolloFedCo -- this approach of replacing the configuration everytime the API is called -- and maybe throw some error if the configuration is already there. $ MetadataModifier - $ tableMetadataSetter @b source table . tmApolloFederationConfig .~ apolloFedConfig + $ tableMetadataSetter @b source table + . tmApolloFederationConfig + .~ apolloFedConfig return successMsg diff --git a/server/src-lib/Hasura/Table/Cache.hs b/server/src-lib/Hasura/Table/Cache.hs index 8f7958d4beb..d3be2e58c99 100644 --- a/server/src-lib/Hasura/Table/Cache.hs +++ b/server/src-lib/Hasura/Table/Cache.hs @@ -190,15 +190,17 @@ instance NFData CustomRootField instance HasCodec CustomRootField where codec = - dimapCodec dec enc $ - disjointEitherCodec nullCodec $ - disjointEitherCodec (codec @Text) nameAndComment + dimapCodec dec enc + $ disjointEitherCodec nullCodec + $ disjointEitherCodec (codec @Text) nameAndComment where nameAndComment = - AC.object "CustomRootField" $ - CustomRootField - <$> optionalFieldOrNullWith' "name" graphQLFieldNameCodec AC..= _crfName - <*> optionalFieldOrNullWithOmittedDefault' "comment" Automatic AC..= _crfComment + AC.object "CustomRootField" + $ CustomRootField + <$> optionalFieldOrNullWith' "name" graphQLFieldNameCodec + AC..= _crfName + <*> optionalFieldOrNullWithOmittedDefault' "comment" Automatic + AC..= _crfComment dec = \case Left _ -> CustomRootField Nothing Automatic @@ -251,26 +253,37 @@ instance NFData TableCustomRootFields instance HasCodec TableCustomRootFields where codec = - AC.object "TableCustomRootFields" $ - TableCustomRootFields - <$> field "select" AC..= _tcrfSelect - <*> field "select_by_pk" AC..= _tcrfSelectByPk - <*> field "select_aggregate" AC..= _tcrfSelectAggregate - <*> field "select_stream" AC..= _tcrfSelectStream - <*> field "insert" AC..= _tcrfInsert - <*> field "insert_one" AC..= _tcrfInsertOne - <*> field "update" AC..= _tcrfUpdate - <*> field "update_by_pk" AC..= _tcrfUpdateByPk - <*> field "update_many" AC..= _tcrfUpdateMany - <*> field "delete" AC..= _tcrfDelete - <*> field "delete_by_pk" AC..= _tcrfDeleteByPk + AC.object "TableCustomRootFields" + $ TableCustomRootFields + <$> field "select" + AC..= _tcrfSelect + <*> field "select_by_pk" + AC..= _tcrfSelectByPk + <*> field "select_aggregate" + AC..= _tcrfSelectAggregate + <*> field "select_stream" + AC..= _tcrfSelectStream + <*> field "insert" + AC..= _tcrfInsert + <*> field "insert_one" + AC..= _tcrfInsertOne + <*> field "update" + AC..= _tcrfUpdate + <*> field "update_by_pk" + AC..= _tcrfUpdateByPk + <*> field "update_many" + AC..= _tcrfUpdateMany + <*> field "delete" + AC..= _tcrfDelete + <*> field "delete_by_pk" + AC..= _tcrfDeleteByPk where field name = optionalFieldWithOmittedDefault' name defaultCustomRootField instance ToJSON TableCustomRootFields where toJSON TableCustomRootFields {..} = - object $ - filter + object + $ filter ((/= Null) . snd) [ "select" .= _tcrfSelect, "select_by_pk" .= _tcrfSelectByPk, @@ -303,9 +316,10 @@ instance FromJSON TableCustomRootFields where let duplicateRootFields = HS.toList . duplicates . mapMaybe _crfName $ getAllCustomRootFields tableCustomRootFields for_ (nonEmpty duplicateRootFields) \duplicatedFields -> - fail . T.unpack $ - "the following custom root field names are duplicated: " - <> englishList "and" (toTxt <$> duplicatedFields) + fail + . T.unpack + $ "the following custom root field names are duplicated: " + <> englishList "and" (toTxt <$> duplicatedFields) pure tableCustomRootFields where @@ -359,8 +373,8 @@ deriving instance (Backend b) => Eq (FieldInfo b) instance (Backend b) => ToJSON (FieldInfo b) where toJSON = - genericToJSON $ - defaultOptions + genericToJSON + $ defaultOptions { constructorTagModifier = snakeCase . drop 2, sumEncoding = TaggedObject "type" "detail" } @@ -753,15 +767,17 @@ instance NFData ColumnConfig instance HasCodec ColumnConfig where codec = - AC.object "ColumnConfig" $ - ColumnConfig - <$> optionalFieldOrNullWith' "custom_name" graphQLFieldNameCodec AC..= _ccfgCustomName - <*> optionalFieldWithOmittedDefault' "comment" Automatic AC..= _ccfgComment + AC.object "ColumnConfig" + $ ColumnConfig + <$> optionalFieldOrNullWith' "custom_name" graphQLFieldNameCodec + AC..= _ccfgCustomName + <*> optionalFieldWithOmittedDefault' "comment" Automatic + AC..= _ccfgComment instance ToJSON ColumnConfig where toJSON ColumnConfig {..} = - object $ - filter + object + $ filter ((/= Null) . snd) [ "custom_name" .= _ccfgCustomName, "comment" .= _ccfgComment @@ -770,8 +786,11 @@ instance ToJSON ColumnConfig where instance FromJSON ColumnConfig where parseJSON = withObject "ColumnConfig" $ \obj -> ColumnConfig - <$> obj .:? "custom_name" - <*> obj .:? "comment" .!= Automatic + <$> obj + .:? "custom_name" + <*> obj + .:? "comment" + .!= Automatic instance Semigroup ColumnConfig where a <> b = ColumnConfig customName comment @@ -806,12 +825,16 @@ emptyTableConfig = instance (Backend b) => HasCodec (TableConfig b) where codec = - AC.object (backendPrefix @b <> "TableConfig") $ - TableConfig - <$> optionalFieldWithDefault' "custom_root_fields" emptyCustomRootFields AC..= _tcCustomRootFields - <*> columnConfigCodec AC..= _tcColumnConfig - <*> optionalFieldOrNullWith' "custom_name" graphQLFieldNameCodec AC..= _tcCustomName - <*> optionalFieldWithOmittedDefault' "comment" Automatic AC..= _tcComment + AC.object (backendPrefix @b <> "TableConfig") + $ TableConfig + <$> optionalFieldWithDefault' "custom_root_fields" emptyCustomRootFields + AC..= _tcCustomRootFields + <*> columnConfigCodec + AC..= _tcColumnConfig + <*> optionalFieldOrNullWith' "custom_name" graphQLFieldNameCodec + AC..= _tcCustomName + <*> optionalFieldWithOmittedDefault' "comment" Automatic + AC..= _tcComment where -- custom_column_names is a deprecated property that has been replaced by column_config. -- We merge custom_column_names into column_config transparently to maintain backwards @@ -825,10 +848,12 @@ instance (Backend b) => HasCodec (TableConfig b) where -- values from @column_config@ and @custom_column_names@ are merged -- produce one value for @_tcColumnConfig@. columnConfigCodec = - dimapCodec dec enc $ - (,) - <$> optionalFieldWithDefault' "column_config" HashMap.empty AC..= fst - <*> optionalFieldWithDefaultWith' "custom_column_names" (hashMapCodec graphQLFieldNameCodec) HashMap.empty AC..= snd + dimapCodec dec enc + $ (,) + <$> optionalFieldWithDefault' "column_config" HashMap.empty + AC..= fst + <*> optionalFieldWithDefaultWith' "custom_column_names" (hashMapCodec graphQLFieldNameCodec) HashMap.empty + AC..= snd -- if @custom_column_names@ was given then merge its value during decoding -- to get a complete value for _tcColumnConfig @@ -846,10 +871,15 @@ instance (Backend b) => HasCodec (TableConfig b) where instance (Backend b) => FromJSON (TableConfig b) where parseJSON = withObject "TableConfig" $ \obj -> do TableConfig - <$> obj .:? "custom_root_fields" .!= emptyCustomRootFields + <$> obj + .:? "custom_root_fields" + .!= emptyCustomRootFields <*> parseColumnConfig obj - <*> obj .:? "custom_name" - <*> obj .:? "comment" .!= Automatic + <*> obj + .:? "custom_name" + <*> obj + .:? "comment" + .!= Automatic where -- custom_column_names is a deprecated property that has been replaced by column_config. -- We merge custom_column_names into column_config transparently to maintain backwards @@ -864,8 +894,8 @@ instance (Backend b) => FromJSON (TableConfig b) where instance (Backend b) => ToJSON (TableConfig b) where toJSON TableConfig {..} = - object $ - filter + object + $ filter ((/= Null) . snd) [ "custom_root_fields" .= _tcCustomRootFields, -- custom_column_names is a deprecated property that has been replaced by column_config. @@ -1061,9 +1091,9 @@ tciUniqueOrPrimaryKeyConstraints :: TableCoreInfoG b f (ColumnInfo b) -> Maybe (NonEmpty (UniqueConstraint b)) tciUniqueOrPrimaryKeyConstraints info = - NE.nonEmpty $ - maybeToList (primaryToUnique <$> _tciPrimaryKey info) - <> (toList (_tciUniqueConstraints info)) + NE.nonEmpty + $ maybeToList (primaryToUnique <$> _tciPrimaryKey info) + <> (toList (_tciUniqueConstraints info)) where primaryToUnique :: PrimaryKey b (ColumnInfo b) -> UniqueConstraint b primaryToUnique pk = UniqueConstraint (_pkConstraint pk) (HS.fromList . fmap ciColumn . toList $ _pkColumns pk) @@ -1138,14 +1168,14 @@ instance (Backend b) => FromJSON (ForeignKeyMetadata b) where unless (length columns == length foreignColumns) do fail "columns and foreign_columns differ in length" - pure $ - ForeignKeyMetadata + pure + $ ForeignKeyMetadata ForeignKey { _fkConstraint = constraint, _fkForeignTable = foreignTable, _fkColumnMapping = - NEHashMap.fromNonEmpty $ - NE.zip columns foreignColumns + NEHashMap.fromNonEmpty + $ NE.zip columns foreignColumns } -- | Metadata of any Backend table which is being extracted from source database @@ -1210,8 +1240,8 @@ askColInfo :: m (ColumnInfo backend) askColInfo m c msg = do fieldInfo <- - modifyErr ("column " <>) $ - askFieldInfo m (fromCol @backend c) + modifyErr ("column " <>) + $ askFieldInfo m (fromCol @backend c) case fieldInfo of (FIColumn (SCIScalarColumn colInfo)) -> pure colInfo (FIColumn (SCIObjectColumn _)) -> throwErr "object" @@ -1221,13 +1251,14 @@ askColInfo m c msg = do (FIRemoteRelationship _) -> throwErr "remote relationship" where throwErr fieldType = - throwError $ - err400 UnexpectedPayload $ - "expecting a database column; but, " - <> c <<> " is a " - <> fieldType - <> "; " - <> msg + throwError + $ err400 UnexpectedPayload + $ "expecting a database column; but, " + <> c + <<> " is a " + <> fieldType + <> "; " + <> msg askComputedFieldInfo :: (MonadError QErr m) => @@ -1236,9 +1267,9 @@ askComputedFieldInfo :: m (ComputedFieldInfo backend) askComputedFieldInfo fields computedField = do fieldInfo <- - modifyErr ("computed field " <>) $ - askFieldInfo fields $ - fromComputedField computedField + modifyErr ("computed field " <>) + $ askFieldInfo fields + $ fromComputedField computedField case fieldInfo of (FIColumn _) -> throwErr "column" (FIRelationship _) -> throwErr "relationship" @@ -1246,11 +1277,12 @@ askComputedFieldInfo fields computedField = do (FIComputedField cci) -> pure cci where throwErr fieldType = - throwError $ - err400 UnexpectedPayload $ - "expecting a computed field; but, " - <> computedField <<> " is a " - <> fieldType + throwError + $ err400 UnexpectedPayload + $ "expecting a computed field; but, " + <> computedField + <<> " is a " + <> fieldType assertColumnExists :: forall backend m. @@ -1270,16 +1302,17 @@ askRelType :: m (RelInfo backend) askRelType m r msg = do colInfo <- - modifyErr ("relationship " <>) $ - askFieldInfo m (fromRel r) + modifyErr ("relationship " <>) + $ askFieldInfo m (fromRel r) case colInfo of (FIRelationship relInfo) -> return relInfo _ -> - throwError $ - err400 UnexpectedPayload $ - "expecting a relationship; but, " - <> r <<> " is a postgres column; " - <> msg + throwError + $ err400 UnexpectedPayload + $ "expecting a relationship; but, " + <> r + <<> " is a postgres column; " + <> msg askRemoteRel :: (MonadError QErr m) => diff --git a/server/src-lib/Hasura/Table/Metadata.hs b/server/src-lib/Hasura/Table/Metadata.hs index 498088aa813..0d34a3c5323 100644 --- a/server/src-lib/Hasura/Table/Metadata.hs +++ b/server/src-lib/Hasura/Table/Metadata.hs @@ -61,13 +61,13 @@ parseListAsMap :: parseListAsMap things mapFn listP = do list <- listP let duplicates = toList $ L.duplicates $ map mapFn list - unless (null duplicates) $ - fail $ - T.unpack $ - "multiple declarations exist for the following " - <> things - <> ": " - <> T.commaSeparated duplicates + unless (null duplicates) + $ fail + $ T.unpack + $ "multiple declarations exist for the following " + <> things + <> ": " + <> T.commaSeparated duplicates pure $ oMapFromL mapFn list data ComputedFieldMetadata b = ComputedFieldMetadata @@ -83,30 +83,33 @@ deriving instance (Backend b) => Eq (ComputedFieldMetadata b) instance (Backend b) => HasCodec (ComputedFieldMetadata b) where codec = - AC.object (backendPrefix @b <> "ComputedFieldMetadata") $ - ComputedFieldMetadata - <$> requiredField' "name" AC..= _cfmName - <*> requiredField' "definition" AC..= _cfmDefinition - <*> optionalFieldWithOmittedDefault' "comment" Automatic AC..= _cfmComment + AC.object (backendPrefix @b <> "ComputedFieldMetadata") + $ ComputedFieldMetadata + <$> requiredField' "name" + AC..= _cfmName + <*> requiredField' "definition" + AC..= _cfmDefinition + <*> optionalFieldWithOmittedDefault' "comment" Automatic + AC..= _cfmComment instance (Backend b) => ToJSON (ComputedFieldMetadata b) where toJSON ComputedFieldMetadata {..} = - object $ - [ "name" .= _cfmName, - "definition" .= _cfmDefinition, - "comment" .= _cfmComment - ] + object + $ [ "name" .= _cfmName, + "definition" .= _cfmDefinition, + "comment" .= _cfmComment + ] instance (Backend b) => FromJSON (ComputedFieldMetadata b) where parseJSON = withObject "ComputedFieldMetadata" $ \obj -> ComputedFieldMetadata <$> obj - .: "name" + .: "name" <*> obj - .: "definition" + .: "definition" <*> obj - .:? "comment" - .!= Automatic + .:? "comment" + .!= Automatic type Relationships a = InsOrdHashMap RelName a @@ -144,34 +147,34 @@ instance (Backend b) => ToJSON (TableMetadata b) where instance (Backend b) => HasCodec (TableMetadata b) where codec = - CommentCodec "Representation of a table in metadata, 'tables.yaml' and 'metadata.json'" $ - AC.object (backendPrefix @b <> "TableMetadata") $ - TableMetadata - <$> requiredField' "table" - .== _tmTable - <*> optionalFieldWithOmittedDefault' "is_enum" False - .== _tmIsEnum - <*> optionalFieldWithOmittedDefault "configuration" emptyTableConfig configDoc - .== _tmConfiguration - <*> optSortedList "object_relationships" _rdName - .== _tmObjectRelationships - <*> optSortedList "array_relationships" _rdName - .== _tmArrayRelationships - <*> optSortedList "computed_fields" _cfmName - .== _tmComputedFields - <*> optSortedList "remote_relationships" _rrName - .== _tmRemoteRelationships - <*> optSortedList "insert_permissions" _pdRole - .== _tmInsertPermissions - <*> optSortedList "select_permissions" _pdRole - .== _tmSelectPermissions - <*> optSortedList "update_permissions" _pdRole - .== _tmUpdatePermissions - <*> optSortedList "delete_permissions" _pdRole - .== _tmDeletePermissions - <*> eventTriggers - <*> optionalFieldOrNull' "apollo_federation_config" - .== _tmApolloFederationConfig + CommentCodec "Representation of a table in metadata, 'tables.yaml' and 'metadata.json'" + $ AC.object (backendPrefix @b <> "TableMetadata") + $ TableMetadata + <$> requiredField' "table" + .== _tmTable + <*> optionalFieldWithOmittedDefault' "is_enum" False + .== _tmIsEnum + <*> optionalFieldWithOmittedDefault "configuration" emptyTableConfig configDoc + .== _tmConfiguration + <*> optSortedList "object_relationships" _rdName + .== _tmObjectRelationships + <*> optSortedList "array_relationships" _rdName + .== _tmArrayRelationships + <*> optSortedList "computed_fields" _cfmName + .== _tmComputedFields + <*> optSortedList "remote_relationships" _rrName + .== _tmRemoteRelationships + <*> optSortedList "insert_permissions" _pdRole + .== _tmInsertPermissions + <*> optSortedList "select_permissions" _pdRole + .== _tmSelectPermissions + <*> optSortedList "update_permissions" _pdRole + .== _tmUpdatePermissions + <*> optSortedList "delete_permissions" _pdRole + .== _tmDeletePermissions + <*> eventTriggers + <*> optionalFieldOrNull' "apollo_federation_config" + .== _tmApolloFederationConfig where -- Some backends do not implement event triggers. In those cases we tailor -- the codec to omit the @"event_triggers"@ field from the API. @@ -218,20 +221,20 @@ mkTableMeta qt isEnum config = instance (Backend b) => FromJSON (TableMetadata b) where parseJSON = withObject "Object" $ \o -> do let unexpectedKeys = getUnexpectedKeys o - unless (null unexpectedKeys) $ - fail $ - "unexpected keys when parsing TableMetadata: " - <> show (HS.toList unexpectedKeys) + unless (null unexpectedKeys) + $ fail + $ "unexpected keys when parsing TableMetadata: " + <> show (HS.toList unexpectedKeys) TableMetadata <$> o - .: tableKey + .: tableKey <*> o - .:? isEnumKey - .!= False + .:? isEnumKey + .!= False <*> o - .:? configKey - .!= emptyTableConfig + .:? configKey + .!= emptyTableConfig <*> parseListAsMap "object relationships" _rdName (o .:? orKey .!= []) <*> parseListAsMap "array relationships" _rdName (o .:? arKey .!= []) <*> parseListAsMap "computed fields" _cfmName (o .:? cfKey .!= []) @@ -242,7 +245,7 @@ instance (Backend b) => FromJSON (TableMetadata b) where <*> parseListAsMap "delete permissions" _pdRole (o .:? dpKey .!= []) <*> parseListAsMap "event triggers" etcName (o .:? etKey .!= []) <*> o - .:? enableAFKey + .:? enableAFKey where tableKey = "table" isEnumKey = "is_enum" diff --git a/server/src-lib/Hasura/Tracing/Class.hs b/server/src-lib/Hasura/Tracing/Class.hs index eb5118d9278..661899d5028 100644 --- a/server/src-lib/Hasura/Tracing/Class.hs +++ b/server/src-lib/Hasura/Tracing/Class.hs @@ -18,7 +18,7 @@ import Hasura.Tracing.TraceId -------------------------------------------------------------------------------- -- MonadTrace -class Monad m => MonadTrace m where +class (Monad m) => MonadTrace m where -- | Trace the execution of a block of code, attaching a human-readable -- name. This starts a new trace and its corresponding root span, to which -- subsequent spans will be attached. @@ -49,25 +49,25 @@ class Monad m => MonadTrace m where -- | Log some arbitrary metadata to be attached to the current span, if any. attachMetadata :: TraceMetadata -> m () -instance MonadTrace m => MonadTrace (ReaderT r m) where +instance (MonadTrace m) => MonadTrace (ReaderT r m) where newTraceWith c p n = mapReaderT (newTraceWith c p n) newSpanWith i n = mapReaderT (newSpanWith i n) currentContext = lift currentContext attachMetadata = lift . attachMetadata -instance MonadTrace m => MonadTrace (StateT e m) where +instance (MonadTrace m) => MonadTrace (StateT e m) where newTraceWith c p n = mapStateT (newTraceWith c p n) newSpanWith i n = mapStateT (newSpanWith i n) currentContext = lift currentContext attachMetadata = lift . attachMetadata -instance MonadTrace m => MonadTrace (ExceptT e m) where +instance (MonadTrace m) => MonadTrace (ExceptT e m) where newTraceWith c p n = mapExceptT (newTraceWith c p n) newSpanWith i n = mapExceptT (newSpanWith i n) currentContext = lift currentContext attachMetadata = lift . attachMetadata -instance MonadTrace m => MonadTrace (MaybeT m) where +instance (MonadTrace m) => MonadTrace (MaybeT m) where newTraceWith c p n = mapMaybeT (newTraceWith c p n) newSpanWith i n = mapMaybeT (newSpanWith i n) currentContext = lift currentContext diff --git a/server/src-lib/Hasura/Tracing/Monad.hs b/server/src-lib/Hasura/Tracing/Monad.hs index c8bf2b802f1..d532e5954b3 100644 --- a/server/src-lib/Hasura/Tracing/Monad.hs +++ b/server/src-lib/Hasura/Tracing/Monad.hs @@ -63,7 +63,7 @@ instance MonadTrans TraceT where lift = TraceT . lift -- | Hides the fact that TraceT is a reader to the rest of the stack. -instance MonadReader r m => MonadReader r (TraceT m) where +instance (MonadReader r m) => MonadReader r (TraceT m) where ask = lift ask local f (TraceT m) = TraceT $ mapReaderT (local f) m @@ -102,8 +102,8 @@ instance (MonadIO m, MonadBaseControl IO m) => MonadTrace (TraceT m) where { teTraceContext = subContext, teMetadataRef = metadataRef } - runReporter reporter subContext name (readIORef metadataRef) $ - local (_2 .~ Just subTraceEnv) body + runReporter reporter subContext name (readIORef metadataRef) + $ local (_2 .~ Just subTraceEnv) body currentContext = TraceT $ asks $ fmap teTraceContext . snd @@ -131,7 +131,7 @@ data TraceEnv = TraceEnv -- Helper for consistently deciding whether or not to sample a trace based on -- trace context and sampling policy. -decideSampling :: MonadIO m => SamplingState -> SamplingPolicy -> m SamplingDecision +decideSampling :: (MonadIO m) => SamplingState -> SamplingPolicy -> m SamplingDecision decideSampling samplingState samplingPolicy = case samplingState of SamplingDefer -> liftIO samplingPolicy diff --git a/server/src-lib/Hasura/Tracing/Sampling.hs b/server/src-lib/Hasura/Tracing/Sampling.hs index fa0665491e7..2111883ac12 100644 --- a/server/src-lib/Hasura/Tracing/Sampling.hs +++ b/server/src-lib/Hasura/Tracing/Sampling.hs @@ -30,7 +30,7 @@ data SamplingState = SamplingDefer | SamplingDeny | SamplingAccept -- | Convert a sampling state to a value for the X-B3-Sampled header. A return -- value of Nothing indicates that the header should not be set. -samplingStateToHeader :: IsString s => SamplingState -> Maybe s +samplingStateToHeader :: (IsString s) => SamplingState -> Maybe s samplingStateToHeader = \case SamplingDefer -> Nothing SamplingDeny -> Just "0" diff --git a/server/src-lib/Hasura/Tracing/TraceId.hs b/server/src-lib/Hasura/Tracing/TraceId.hs index ff621649dd7..4bd4b0dd4db 100644 --- a/server/src-lib/Hasura/Tracing/TraceId.hs +++ b/server/src-lib/Hasura/Tracing/TraceId.hs @@ -41,7 +41,7 @@ data TraceId traceIdBytes :: Int traceIdBytes = 16 -randomTraceId :: MonadIO m => m TraceId +randomTraceId :: (MonadIO m) => m TraceId randomTraceId = liftIO do (w1, w2) <- flip Random.applyAtomicGen Random.globalStdGen $ \gen0 -> @@ -60,9 +60,11 @@ traceIdFromBytes :: ByteString -> Maybe TraceId traceIdFromBytes bs = do guard $ ByteString.length bs == traceIdBytes (w1, w2) <- - eitherToMaybe $ - flip Serialize.runGet bs $ - (,) <$> Serialize.getWord64be <*> Serialize.getWord64be + eitherToMaybe + $ flip Serialize.runGet bs + $ (,) + <$> Serialize.getWord64be + <*> Serialize.getWord64be guard $ w1 .|. w2 /= 0 pure $ TraceId w1 w2 @@ -95,7 +97,7 @@ newtype SpanId = SpanId Word64 spanIdBytes :: Int spanIdBytes = 8 -randomSpanId :: MonadIO m => m SpanId +randomSpanId :: (MonadIO m) => m SpanId randomSpanId = liftIO do w <- Random.uniformM Random.globalStdGen if w == 0 diff --git a/server/src-test/Control/Monad/MemoizationSpecDefinition.hs b/server/src-test/Control/Monad/MemoizationSpecDefinition.hs index d0b962f9960..e06ef5acdd0 100644 --- a/server/src-test/Control/Monad/MemoizationSpecDefinition.hs +++ b/server/src-test/Control/Monad/MemoizationSpecDefinition.hs @@ -16,10 +16,10 @@ import Test.Hspec class ( forall k v. MonadTrans (m k v), - forall k v n. Monad n => Functor (m k v n), - forall k v n. Monad n => Applicative (m k v n), - forall k v n. Monad n => Monad (m k v n), - forall k v n s. MonadState s n => MonadState s (m k v n) + forall k v n. (Monad n) => Functor (m k v n), + forall k v n. (Monad n) => Applicative (m k v n), + forall k v n. (Monad n) => Monad (m k v n), + forall k v n s. (MonadState s n) => MonadState s (m k v n) ) => Memoizer (m :: Type -> Type -> (Type -> Type) -> Type -> Type) where @@ -36,7 +36,7 @@ class m k v n v -> m k v n v -memoizationSpec :: forall m. Memoizer m => Spec +memoizationSpec :: forall m. (Memoizer m) => Spec memoizationSpec = do describe "circular graphs" $ checkCircularGraphs @m describe "infinite lists" $ checkInfiniteLists @m @@ -57,7 +57,7 @@ instance Show Node where instance Eq Node where Node n1 s1 == Node n2 s2 = n1 == n2 && map nodeName s1 == map nodeName s2 -checkCircularGraphs :: forall m. Memoizer m => Spec +checkCircularGraphs :: forall m. (Memoizer m) => Spec checkCircularGraphs = do it "builds A -> B -> C -> A" do (a, b, c) <- succeedsWithinTimeLimit $ runMemoizer @m do @@ -88,7 +88,7 @@ checkCircularGraphs = do -------------------------------------------------------------------------------- -- Infinite lists -checkInfiniteLists :: forall m. Memoizer m => Spec +checkInfiniteLists :: forall m. (Memoizer m) => Spec checkInfiniteLists = do it "builds `x = 1 : x`" do l <- succeedsWithinTimeLimit $ runMemoizer @m do @@ -108,11 +108,12 @@ checkInfiniteLists = do -------------------------------------------------------------------------------- -- Memoization -checkMemoization :: forall m. Memoizer m => Spec +checkMemoization :: forall m. (Memoizer m) => Spec checkMemoization = do it "memoizes fibo" do - (fibos, count) <- succeedsWithinTimeLimit $ - flip runStateT (mempty :: HashMap Int Int) $ runMemoizer @m do + (fibos, count) <- succeedsWithinTimeLimit + $ flip runStateT (mempty :: HashMap Int Int) + $ runMemoizer @m do let fibo n = memoize 'checkMemoization n do modify $ HashMap.insertWith (+) n (1 :: Int) case n of @@ -126,7 +127,7 @@ checkMemoization = do -------------------------------------------------------------------------------- -- Failure -checkFailure :: forall m. Memoizer m => Spec +checkFailure :: forall m. (Memoizer m) => Spec checkFailure = do it "unsuccessfully attempts to memoize Maybe" do result <- runWithTimeLimit $ runMemoizer @m do diff --git a/server/src-test/Control/Monad/MemoizeSpec.hs b/server/src-test/Control/Monad/MemoizeSpec.hs index 0bc53b833c3..e034fe417c3 100644 --- a/server/src-test/Control/Monad/MemoizeSpec.hs +++ b/server/src-test/Control/Monad/MemoizeSpec.hs @@ -16,7 +16,7 @@ instance Memoizer MemoizeWithExtraParamsT where runMemoizer = runMemoizeT . unMemoizeWithExtraParamsT memoize name key = MemoizeWithExtraParamsT . memoizeOn name key . unMemoizeWithExtraParamsT -deriving newtype instance MonadState s m => MonadState s (MemoizeWithExtraParamsT k v m) +deriving newtype instance (MonadState s m) => MonadState s (MemoizeWithExtraParamsT k v m) spec :: Spec spec = memoizationSpec @MemoizeWithExtraParamsT diff --git a/server/src-test/Control/Monad/TimeLimit.hs b/server/src-test/Control/Monad/TimeLimit.hs index dac02a0f516..e8e1ec5f9d0 100644 --- a/server/src-test/Control/Monad/TimeLimit.hs +++ b/server/src-test/Control/Monad/TimeLimit.hs @@ -13,7 +13,7 @@ import Hasura.Prelude -- | Runs an action with a time limit of approximately 0.1s. -- If the time taken to perform the action exceeds this limit, -- it returns 'Nothing'. -runWithTimeLimit :: MonadIO m => IO a -> m (Maybe a) +runWithTimeLimit :: (MonadIO m) => IO a -> m (Maybe a) runWithTimeLimit action = liftIO do var <- newEmptyMVar thread <- async do diff --git a/server/src-test/Data/HashMap/Strict/ExtendedSpec.hs b/server/src-test/Data/HashMap/Strict/ExtendedSpec.hs index 1655762d9ab..54de5b26b07 100644 --- a/server/src-test/Data/HashMap/Strict/ExtendedSpec.hs +++ b/server/src-test/Data/HashMap/Strict/ExtendedSpec.hs @@ -10,28 +10,28 @@ import Test.QuickCheck spec :: Spec spec = describe "isInverseOf" $ do - it "is satisfied by maps with the same unique keys as values" $ - property $ - \(xs :: [Int]) -> do - let m = HashMap.fromList $ zip xs xs - m `HashMap.isInverseOf` m + it "is satisfied by maps with the same unique keys as values" + $ property + $ \(xs :: [Int]) -> do + let m = HashMap.fromList $ zip xs xs + m `HashMap.isInverseOf` m - it "is satisfied by maps with swapped unique keys and values" $ - property $ - \(vals :: [Int]) -> do - let keys = show <$> vals - let forward = HashMap.fromList $ zip keys vals - let backward = HashMap.fromList $ zip vals keys - forward `HashMap.isInverseOf` backward + it "is satisfied by maps with swapped unique keys and values" + $ property + $ \(vals :: [Int]) -> do + let keys = show <$> vals + let forward = HashMap.fromList $ zip keys vals + let backward = HashMap.fromList $ zip vals keys + forward `HashMap.isInverseOf` backward - it "fails when different keys map to one value" $ - property $ - \(NonNegative (x :: Int)) (Positive (n :: Int)) -> do - let keys = take (n + 2) [x ..] - let vals = even <$> keys - let forward = HashMap.fromList $ zip keys vals - let backward = HashMap.fromList $ zip vals keys - not $ forward `HashMap.isInverseOf` backward + it "fails when different keys map to one value" + $ property + $ \(NonNegative (x :: Int)) (Positive (n :: Int)) -> do + let keys = take (n + 2) [x ..] + let vals = even <$> keys + let forward = HashMap.fromList $ zip keys vals + let backward = HashMap.fromList $ zip vals keys + not $ forward `HashMap.isInverseOf` backward it "passes some trivial examples as a smoke test" $ do let fwd = HashMap.fromList @Int @Bool diff --git a/server/src-test/Data/Parser/JSONPathSpec.hs b/server/src-test/Data/Parser/JSONPathSpec.hs index f3dc37e7efb..304d95d759b 100644 --- a/server/src-test/Data/Parser/JSONPathSpec.hs +++ b/server/src-test/Data/Parser/JSONPathSpec.hs @@ -12,34 +12,41 @@ import Test.QuickCheck spec :: Spec spec = do describe "encoding a JSON path" $ do - it "encodes a one-level path" $ - encodeJSONPath [Key "ABCD"] `shouldBe` "$.ABCD" + it "encodes a one-level path" + $ encodeJSONPath [Key "ABCD"] + `shouldBe` "$.ABCD" - it "encodes a multi-level path" $ - encodeJSONPath [Key "7seven", Index 0, Key "@!^@*#(!("] `shouldBe` "$[\"7seven\"][0][\"@!^@*#(!(\"]" + it "encodes a multi-level path" + $ encodeJSONPath [Key "7seven", Index 0, Key "@!^@*#(!("] + `shouldBe` "$[\"7seven\"][0][\"@!^@*#(!(\"]" - it "escapes control characters and quotes" $ - encodeJSONPath [Key "/\\ '\" \t\r\n \xfffd"] `shouldBe` "$[\"/\\\\ '\\\" \\t\\r\\n \xfffd\"]" + it "escapes control characters and quotes" + $ encodeJSONPath [Key "/\\ '\" \t\r\n \xfffd"] + `shouldBe` "$[\"/\\\\ '\\\" \\t\\r\\n \xfffd\"]" describe "parsing a JSON path" $ do - it "parses a single '$'" $ - parseJSONPath "$" `shouldBe` Right [] + it "parses a single '$'" + $ parseJSONPath "$" + `shouldBe` Right [] - it "parses bracketed single quotes" $ - parseJSONPath "$['foo \\' \" bar']" `shouldBe` Right [Key "foo ' \" bar"] + it "parses bracketed single quotes" + $ parseJSONPath "$['foo \\' \" bar']" + `shouldBe` Right [Key "foo ' \" bar"] - it "parses bracketed double quotes" $ - parseJSONPath "$[\"bar ' \\\" foo\"]" `shouldBe` Right [Key "bar ' \" foo"] + it "parses bracketed double quotes" + $ parseJSONPath "$[\"bar ' \\\" foo\"]" + `shouldBe` Right [Key "bar ' \" foo"] describe "the round trip" $ do - it "encodes and parses random JSON paths" $ - withMaxSuccess 1000 $ - forAll (resize 20 generateJSONPath) $ \jsonPath -> - let encPath = encodeJSONPath jsonPath - parsedJSONPathE = parseJSONPath encPath - in case parsedJSONPathE of - Left err -> counterexample (T.unpack (err <> ": " <> encPath)) False - Right parsedJSONPath -> property $ parsedJSONPath === jsonPath + it "encodes and parses random JSON paths" + $ withMaxSuccess 1000 + $ forAll (resize 20 generateJSONPath) + $ \jsonPath -> + let encPath = encodeJSONPath jsonPath + parsedJSONPathE = parseJSONPath encPath + in case parsedJSONPathE of + Left err -> counterexample (T.unpack (err <> ": " <> encPath)) False + Right parsedJSONPath -> property $ parsedJSONPath === jsonPath generateJSONPath :: Gen JSONPath generateJSONPath = map (either id id) <$> listOf1 genPathElementEither diff --git a/server/src-test/Data/Parser/URLTemplateSpec.hs b/server/src-test/Data/Parser/URLTemplateSpec.hs index 384fac0ab20..0b26d766e3e 100644 --- a/server/src-test/Data/Parser/URLTemplateSpec.hs +++ b/server/src-test/Data/Parser/URLTemplateSpec.hs @@ -6,11 +6,12 @@ import Test.Hspec import Test.QuickCheck spec :: Spec -spec = describe "parseURLTemplate" $ - it "URL template parser and printer" $ - withMaxSuccess 1000 $ - forAll (arbitrary :: Gen URLTemplate) $ \urlTemplate -> do - let templateString = printURLTemplate urlTemplate - case parseURLTemplate templateString of - Left e -> counterexample e False - Right r -> property $ printURLTemplate r == templateString +spec = describe "parseURLTemplate" + $ it "URL template parser and printer" + $ withMaxSuccess 1000 + $ forAll (arbitrary :: Gen URLTemplate) + $ \urlTemplate -> do + let templateString = printURLTemplate urlTemplate + case parseURLTemplate templateString of + Left e -> counterexample e False + Right r -> property $ printURLTemplate r == templateString diff --git a/server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs b/server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs index 180bf249cfb..878628c6c78 100644 --- a/server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs +++ b/server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs @@ -26,11 +26,14 @@ data MockServiceAccount = MockServiceAccount instance HasCodec MockServiceAccount where codec = - object "MockServiceAccount" $ - MockServiceAccount - <$> requiredField' "client_email" .= _msaClientEmail - <*> requiredField' "private_key" .= _msaPrivateKey - <*> requiredField' "project_id" .= _msaProjectId + object "MockServiceAccount" + $ MockServiceAccount + <$> requiredField' "client_email" + .= _msaClientEmail + <*> requiredField' "private_key" + .= _msaPrivateKey + <*> requiredField' "project_id" + .= _msaProjectId $(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''MockServiceAccount) @@ -47,8 +50,8 @@ spec = do let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc." let projectId = "2" let input = - encode $ - [aesonQQ| + encode + $ [aesonQQ| { client_email: #{clientEmail}, private_key: #{privateKey}, project_id: #{projectId} @@ -68,9 +71,9 @@ spec = do let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc." let projectId = "2" let input = - decodeUtf8 $ - encode $ - [aesonQQ| + decodeUtf8 + $ encode + $ [aesonQQ| { client_email: #{clientEmail}, private_key: #{privateKey}, project_id: #{projectId} diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/CapabilitiesSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/CapabilitiesSpec.hs index 94a02977523..eca1e90040b 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/CapabilitiesSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/CapabilitiesSpec.hs @@ -31,8 +31,8 @@ spec = do (CapabilitiesResponse (defaultCapabilities {_cRelationships = Just RelationshipCapabilities {}}) emptyConfigSchemaResponse Nothing Nothing) [aesonQQ|{"capabilities": {"relationships": {}}, "config_schemas": {"config_schema": {}, "other_schemas": {}}}|] describe "ScalarTypesCapabilities" $ do - describe "Minimal" $ - testToFromJSONToSchema (ScalarTypesCapabilities (HashMap.singleton (ScalarType "string") (ScalarTypeCapabilities mempty mempty mempty Nothing))) [aesonQQ|{"string": {}}|] + describe "Minimal" + $ testToFromJSONToSchema (ScalarTypesCapabilities (HashMap.singleton (ScalarType "string") (ScalarTypeCapabilities mempty mempty mempty Nothing))) [aesonQQ|{"string": {}}|] describe "Maximal" $ do let comparisonOperators = ComparisonOperators $ HashMap.fromList [([G.name|same_day_as|], ScalarType "DateTime")] let aggregateFunctions = AggregateFunctions $ HashMap.fromList [([G.name|max|], ScalarType "DateTime")] @@ -56,24 +56,24 @@ spec = do testToFromJSONToSchema (ScalarTypeCapabilities comparisonOperators aggregateFunctions updateColumnOperators graphQLType) json jsonOpenApiProperties genScalarTypesCapabilities -genDataSchemaCapabilities :: MonadGen m => m DataSchemaCapabilities +genDataSchemaCapabilities :: (MonadGen m) => m DataSchemaCapabilities genDataSchemaCapabilities = DataSchemaCapabilities <$> Gen.bool <*> Gen.bool <*> genColumnNullability -genColumnNullability :: MonadGen m => m ColumnNullability +genColumnNullability :: (MonadGen m) => m ColumnNullability genColumnNullability = Gen.element [NullableAndNonNullableColumns, OnlyNullableColumns] -genQueryCapabilities :: MonadGen m => m QueryCapabilities +genQueryCapabilities :: (MonadGen m) => m QueryCapabilities genQueryCapabilities = QueryCapabilities <$> Gen.maybe genForeachCapabilities -genForeachCapabilities :: MonadGen m => m ForeachCapabilities +genForeachCapabilities :: (MonadGen m) => m ForeachCapabilities genForeachCapabilities = pure ForeachCapabilities -genMutationCapabilities :: MonadGen m => m MutationCapabilities +genMutationCapabilities :: (MonadGen m) => m MutationCapabilities genMutationCapabilities = MutationCapabilities <$> Gen.maybe genInsertCapabilities @@ -82,22 +82,22 @@ genMutationCapabilities = <*> Gen.maybe genAtomicitySupportLevel <*> Gen.maybe genReturningCapabilities -genInsertCapabilities :: MonadGen m => m InsertCapabilities +genInsertCapabilities :: (MonadGen m) => m InsertCapabilities genInsertCapabilities = InsertCapabilities <$> Gen.bool -genUpdateCapabilities :: MonadGen m => m UpdateCapabilities +genUpdateCapabilities :: (MonadGen m) => m UpdateCapabilities genUpdateCapabilities = pure UpdateCapabilities -genDeleteCapabilities :: MonadGen m => m DeleteCapabilities +genDeleteCapabilities :: (MonadGen m) => m DeleteCapabilities genDeleteCapabilities = pure DeleteCapabilities -genAtomicitySupportLevel :: MonadGen m => m AtomicitySupportLevel +genAtomicitySupportLevel :: (MonadGen m) => m AtomicitySupportLevel genAtomicitySupportLevel = Gen.enumBounded -genReturningCapabilities :: MonadGen m => m ReturningCapabilities +genReturningCapabilities :: (MonadGen m) => m ReturningCapabilities genReturningCapabilities = pure ReturningCapabilities -genSubscriptionCapabilities :: MonadGen m => m SubscriptionCapabilities +genSubscriptionCapabilities :: (MonadGen m) => m SubscriptionCapabilities genSubscriptionCapabilities = pure SubscriptionCapabilities {} genComparisonOperators :: (MonadGen m, GenBase m ~ Identity) => m ComparisonOperators @@ -108,7 +108,7 @@ genAggregateFunctions :: (MonadGen m, GenBase m ~ Identity) => m AggregateFuncti genAggregateFunctions = AggregateFunctions <$> genHashMap (genGName defaultRange) genScalarType defaultRange -genGraphQLType :: MonadGen m => m GraphQLType +genGraphQLType :: (MonadGen m) => m GraphQLType genGraphQLType = Gen.enumBounded genScalarTypeCapabilities :: (MonadGen m, GenBase m ~ Identity) => m ScalarTypeCapabilities @@ -127,41 +127,41 @@ genUpdateColumnOperators :: (MonadGen m, GenBase m ~ Identity) => m UpdateColumn genUpdateColumnOperators = UpdateColumnOperators <$> genHashMap genUpdateColumnOperatorName genUpdateColumnOperatorDefinition defaultRange -genUpdateColumnOperatorName :: MonadGen m => m UpdateColumnOperatorName +genUpdateColumnOperatorName :: (MonadGen m) => m UpdateColumnOperatorName genUpdateColumnOperatorName = UpdateColumnOperatorName <$> genGName defaultRange genUpdateColumnOperatorDefinition :: (MonadGen m, GenBase m ~ Identity) => m UpdateColumnOperatorDefinition genUpdateColumnOperatorDefinition = UpdateColumnOperatorDefinition <$> genScalarType -genRelationshipCapabilities :: MonadGen m => m RelationshipCapabilities +genRelationshipCapabilities :: (MonadGen m) => m RelationshipCapabilities genRelationshipCapabilities = pure RelationshipCapabilities {} -genComparisonCapabilities :: MonadGen m => m ComparisonCapabilities +genComparisonCapabilities :: (MonadGen m) => m ComparisonCapabilities genComparisonCapabilities = ComparisonCapabilities <$> Gen.maybe genSubqueryComparisonCapabilities -genSubqueryComparisonCapabilities :: MonadGen m => m SubqueryComparisonCapabilities +genSubqueryComparisonCapabilities :: (MonadGen m) => m SubqueryComparisonCapabilities genSubqueryComparisonCapabilities = SubqueryComparisonCapabilities <$> Gen.bool -genMetricsCapabilities :: MonadGen m => m MetricsCapabilities +genMetricsCapabilities :: (MonadGen m) => m MetricsCapabilities genMetricsCapabilities = pure MetricsCapabilities {} -genExplainCapabilities :: MonadGen m => m ExplainCapabilities +genExplainCapabilities :: (MonadGen m) => m ExplainCapabilities genExplainCapabilities = pure ExplainCapabilities {} -genRawCapabilities :: MonadGen m => m RawCapabilities +genRawCapabilities :: (MonadGen m) => m RawCapabilities genRawCapabilities = pure RawCapabilities {} -genDatasetCapabilities :: MonadGen m => m DatasetCapabilities +genDatasetCapabilities :: (MonadGen m) => m DatasetCapabilities genDatasetCapabilities = pure DatasetCapabilities {} -genUserDefinedFunctionCapabilities :: MonadGen m => m UserDefinedFunctionCapabilities +genUserDefinedFunctionCapabilities :: (MonadGen m) => m UserDefinedFunctionCapabilities genUserDefinedFunctionCapabilities = pure UserDefinedFunctionCapabilities {} -genLicensing :: MonadGen m => m Licensing +genLicensing :: (MonadGen m) => m Licensing genLicensing = pure Licensing {} genCapabilities :: Gen Capabilities diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/ColumnSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/ColumnSpec.hs index 5af9cce54d0..b51878852c4 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/ColumnSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/ColumnSpec.hs @@ -20,8 +20,8 @@ spec = do testToFromJSONToSchema (ColumnName "my_column_name") [aesonQQ|"my_column_name"|] jsonOpenApiProperties genColumnName describe "ColumnInfo" $ do - describe "minimal" $ - testFromJSON + describe "minimal" + $ testFromJSON (ColumnInfo (ColumnName "my_column_name") (ColumnTypeScalar $ ScalarType "string") False Nothing False False Nothing) [aesonQQ| { "name": "my_column_name", @@ -29,8 +29,8 @@ spec = do "nullable": false } |] - describe "non-minimal" $ - testToFromJSONToSchema + describe "non-minimal" + $ testToFromJSONToSchema (ColumnInfo (ColumnName "my_column_name") (ColumnTypeScalar $ ScalarType "number") True (Just "My column description") True True (Just AutoIncrement)) [aesonQQ| { "name": "my_column_name", @@ -44,16 +44,16 @@ spec = do |] jsonOpenApiProperties genColumnInfo describe "ColumnValueGenerationStrategy" $ do - describe "AutoIncrement" $ - testToFromJSONToSchema AutoIncrement [aesonQQ|{"type": "auto_increment"}|] + describe "AutoIncrement" + $ testToFromJSONToSchema AutoIncrement [aesonQQ|{"type": "auto_increment"}|] - describe "UniqueIdentifier" $ - testToFromJSONToSchema UniqueIdentifier [aesonQQ|{"type": "unique_identifier"}|] + describe "UniqueIdentifier" + $ testToFromJSONToSchema UniqueIdentifier [aesonQQ|{"type": "unique_identifier"}|] - describe "DefaultValue" $ - testToFromJSONToSchema DefaultValue [aesonQQ|{"type": "default_value"}|] + describe "DefaultValue" + $ testToFromJSONToSchema DefaultValue [aesonQQ|{"type": "default_value"}|] -genColumnName :: MonadGen m => m ColumnName +genColumnName :: (MonadGen m) => m ColumnName genColumnName = ColumnName <$> genArbitraryAlphaNumText defaultRange genColumnType :: Gen ColumnType @@ -75,6 +75,6 @@ genColumnInfo = <*> Gen.bool <*> Gen.maybe genColumnValueGenerationStrategy -genColumnValueGenerationStrategy :: MonadGen m => m ColumnValueGenerationStrategy +genColumnValueGenerationStrategy :: (MonadGen m) => m ColumnValueGenerationStrategy genColumnValueGenerationStrategy = Gen.element [AutoIncrement, UniqueIdentifier, DefaultValue] diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/ConfigSchemaSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/ConfigSchemaSpec.hs index beb64da966b..7fa3b376c3f 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/ConfigSchemaSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/ConfigSchemaSpec.hs @@ -65,9 +65,9 @@ spec = do |] testToFromJSON val jsonVal - it "OpenAPI spec is as expected" $ - toJSON (toSchema (Proxy @ConfigSchemaResponse)) - `shouldBe` [aesonQQ| + it "OpenAPI spec is as expected" + $ toJSON (toSchema (Proxy @ConfigSchemaResponse)) + `shouldBe` [aesonQQ| { "required": [ "config_schema", diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/ExpressionSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/ExpressionSpec.hs index c7801b95215..5fa879f8c06 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/ExpressionSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/ExpressionSpec.hs @@ -28,41 +28,41 @@ import Test.Hspec spec :: Spec spec = do describe "BinaryComparisonOperator" $ do - describe "LessThan" $ - testToFromJSONToSchema LessThan [aesonQQ|"less_than"|] + describe "LessThan" + $ testToFromJSONToSchema LessThan [aesonQQ|"less_than"|] - describe "LessThanOrEqual" $ - testToFromJSONToSchema LessThanOrEqual [aesonQQ|"less_than_or_equal"|] + describe "LessThanOrEqual" + $ testToFromJSONToSchema LessThanOrEqual [aesonQQ|"less_than_or_equal"|] - describe "GreaterThan" $ - testToFromJSONToSchema GreaterThan [aesonQQ|"greater_than"|] + describe "GreaterThan" + $ testToFromJSONToSchema GreaterThan [aesonQQ|"greater_than"|] - describe "GreaterThanOrEqual" $ - testToFromJSONToSchema GreaterThanOrEqual [aesonQQ|"greater_than_or_equal"|] + describe "GreaterThanOrEqual" + $ testToFromJSONToSchema GreaterThanOrEqual [aesonQQ|"greater_than_or_equal"|] - describe "Equal" $ - testToFromJSONToSchema Equal [aesonQQ|"equal"|] + describe "Equal" + $ testToFromJSONToSchema Equal [aesonQQ|"equal"|] - describe "CustomBinaryComparisonOperator" $ - testToFromJSONToSchema (CustomBinaryComparisonOperator "foo") [aesonQQ|"foo"|] + describe "CustomBinaryComparisonOperator" + $ testToFromJSONToSchema (CustomBinaryComparisonOperator "foo") [aesonQQ|"foo"|] jsonOpenApiProperties genBinaryComparisonOperator describe "BinaryArrayComparisonOperator" $ do - describe "In" $ - testToFromJSONToSchema In [aesonQQ|"in"|] + describe "In" + $ testToFromJSONToSchema In [aesonQQ|"in"|] - describe "CustomBinaryArrayComparisonOperator" $ - testToFromJSONToSchema (CustomBinaryArrayComparisonOperator "foo") [aesonQQ|"foo"|] + describe "CustomBinaryArrayComparisonOperator" + $ testToFromJSONToSchema (CustomBinaryArrayComparisonOperator "foo") [aesonQQ|"foo"|] jsonOpenApiProperties genBinaryArrayComparisonOperator describe "UnaryComparisonOperator" $ do - describe "IsNull" $ - testToFromJSONToSchema IsNull [aesonQQ|"is_null"|] + describe "IsNull" + $ testToFromJSONToSchema IsNull [aesonQQ|"is_null"|] - describe "CustomUnaryComparisonOperator" $ - testToFromJSONToSchema (CustomUnaryComparisonOperator "foo") [aesonQQ|"foo"|] + describe "CustomUnaryComparisonOperator" + $ testToFromJSONToSchema (CustomUnaryComparisonOperator "foo") [aesonQQ|"foo"|] jsonOpenApiProperties genUnaryComparisonOperator @@ -74,35 +74,35 @@ spec = do jsonOpenApiProperties genComparisonColumn describe "ColumnPath" $ do - describe "QueryTable" $ - testToFromJSONToSchema QueryTable [aesonQQ|["$"]|] - describe "CurrentTable" $ - testToFromJSONToSchema CurrentTable [aesonQQ|[]|] + describe "QueryTable" + $ testToFromJSONToSchema QueryTable [aesonQQ|["$"]|] + describe "CurrentTable" + $ testToFromJSONToSchema CurrentTable [aesonQQ|[]|] jsonOpenApiProperties genColumnPath describe "ComparisonValue" $ do - describe "AnotherColumnComparison" $ - testToFromJSONToSchema + describe "AnotherColumnComparison" + $ testToFromJSONToSchema (AnotherColumnComparison $ ComparisonColumn CurrentTable (ColumnName "my_column_name") (ScalarType "string")) [aesonQQ|{"type": "column", "column": {"name": "my_column_name", "column_type": "string"}}|] - describe "ScalarValueComparison" $ - testToFromJSONToSchema + describe "ScalarValueComparison" + $ testToFromJSONToSchema (ScalarValueComparison $ ScalarValue (String "scalar value") (ScalarType "string")) [aesonQQ|{"type": "scalar", "value": "scalar value", "value_type": "string"}|] jsonOpenApiProperties genComparisonValue describe "ExistsInTable" $ do - describe "RelatedTable" $ - testToFromJSONToSchema + describe "RelatedTable" + $ testToFromJSONToSchema (RelatedTable (RelationshipName "my_relation")) [aesonQQ| { "type": "related", "relationship": "my_relation" } |] - describe "UnrelatedTable" $ - testToFromJSONToSchema + describe "UnrelatedTable" + $ testToFromJSONToSchema (UnrelatedTable (TableName ["my_table_name"])) [aesonQQ| { "type": "unrelated", @@ -250,7 +250,7 @@ genComparisonColumn = <*> genColumnName <*> genScalarType -genColumnPath :: MonadGen m => m ColumnPath +genColumnPath :: (MonadGen m) => m ColumnPath genColumnPath = Gen.element [CurrentTable, QueryTable] @@ -261,7 +261,7 @@ genComparisonValue = ScalarValueComparison <$> genScalarValue ] -genExistsInTable :: MonadGen m => m ExistsInTable +genExistsInTable :: (MonadGen m) => m ExistsInTable genExistsInTable = Gen.choice [ RelatedTable <$> genRelationshipName, diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/MutationsSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/MutationsSpec.hs index 99272df36f5..4deb1229dc9 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/MutationsSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/MutationsSpec.hs @@ -157,8 +157,8 @@ spec = do describe "InsertFieldValue" $ do describe "ColumnInsertFieldValue" $ do - describe "Object" $ - testToFromJSONToSchema + describe "Object" + $ testToFromJSONToSchema (mkColumnInsertFieldValue $ Object [("property", "Wow")]) [aesonQQ| { "property": "Wow" } |] describe "String" $ do @@ -188,15 +188,15 @@ spec = do jsonOpenApiProperties genInsertFieldValue describe "ObjectRelationInsertionOrder" $ do - describe "BeforeParent" $ - testToFromJSONToSchema BeforeParent [aesonQQ|"before_parent"|] - describe "AfterParent" $ - testToFromJSONToSchema AfterParent [aesonQQ|"after_parent"|] + describe "BeforeParent" + $ testToFromJSONToSchema BeforeParent [aesonQQ|"before_parent"|] + describe "AfterParent" + $ testToFromJSONToSchema AfterParent [aesonQQ|"after_parent"|] jsonOpenApiProperties genObjectRelationInsertionOrder describe "RowUpdate" $ do - describe "SetColumnRowUpdate" $ - testToFromJSONToSchema + describe "SetColumnRowUpdate" + $ testToFromJSONToSchema (SetColumn $ RowColumnOperatorValue (ColumnName "my_column") (Number 10) (ScalarType "number")) [aesonQQ| { "type": "set", @@ -204,8 +204,8 @@ spec = do "value": 10, "value_type": "number" } |] - describe "CustomUpdateColumnOperator" $ - testToFromJSONToSchema + describe "CustomUpdateColumnOperator" + $ testToFromJSONToSchema (CustomUpdateColumnOperator (UpdateColumnOperatorName [G.name|increment|]) (RowColumnOperatorValue (ColumnName "my_column") (Number 10) (ScalarType "number"))) [aesonQQ| { "type": "custom_operator", @@ -276,16 +276,16 @@ genColumnInsertSchema = <*> Gen.bool <*> Gen.maybe genColumnValueGenerationStrategy -genObjectRelationInsertSchema :: MonadGen m => m ObjectRelationInsertSchema +genObjectRelationInsertSchema :: (MonadGen m) => m ObjectRelationInsertSchema genObjectRelationInsertSchema = ObjectRelationInsertSchema <$> genRelationshipName <*> genObjectRelationInsertionOrder -genObjectRelationInsertionOrder :: MonadGen m => m ObjectRelationInsertionOrder +genObjectRelationInsertionOrder :: (MonadGen m) => m ObjectRelationInsertionOrder genObjectRelationInsertionOrder = Gen.enumBounded -genArrayRelationInsertSchema :: MonadGen m => m ArrayRelationInsertSchema +genArrayRelationInsertSchema :: (MonadGen m) => m ArrayRelationInsertSchema genArrayRelationInsertSchema = ArrayRelationInsertSchema <$> genRelationshipName genMutationOperation :: Gen MutationOperation diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/OrderBySpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/OrderBySpec.hs index 9c64309bb2b..dc4c82647cb 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/OrderBySpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/OrderBySpec.hs @@ -26,22 +26,22 @@ import Test.Hspec spec :: Spec spec = do describe "OrderByTarget" $ do - describe "OrderByColumn" $ - testToFromJSONToSchema + describe "OrderByColumn" + $ testToFromJSONToSchema (OrderByColumn (ColumnName "test_column")) [aesonQQ| { "type": "column", "column": "test_column" } |] - describe "OrderByStarCountAggregate" $ - testToFromJSONToSchema + describe "OrderByStarCountAggregate" + $ testToFromJSONToSchema (OrderByStarCountAggregate) [aesonQQ| { "type": "star_count_aggregate" } |] - describe "OrderBySingleColumnAggregate" $ - testToFromJSONToSchema + describe "OrderBySingleColumnAggregate" + $ testToFromJSONToSchema (OrderBySingleColumnAggregate (SingleColumnAggregate (SingleColumnAggregateFunction [G.name|sum|]) (ColumnName "test_column") (ScalarType "number"))) [aesonQQ| { "type": "single_column_aggregate", @@ -116,10 +116,10 @@ spec = do jsonOpenApiProperties genOrderBy describe "OrderDirection" $ do - describe "Ascending" $ - testToFromJSONToSchema Ascending [aesonQQ|"asc"|] - describe "Descending" $ - testToFromJSONToSchema Descending [aesonQQ|"desc"|] + describe "Ascending" + $ testToFromJSONToSchema Ascending [aesonQQ|"asc"|] + describe "Descending" + $ testToFromJSONToSchema Descending [aesonQQ|"desc"|] jsonOpenApiProperties genOrderDirection genOrderBy :: Gen OrderBy diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/QuerySpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/QuerySpec.hs index 8ec8ddb28d3..4c2fc6a6530 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/QuerySpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/QuerySpec.hs @@ -32,8 +32,8 @@ import Test.Hspec spec :: Spec spec = do describe "Field" $ do - describe "ColumnField" $ - testToFromJSONToSchema + describe "ColumnField" + $ testToFromJSONToSchema (ColumnField (ColumnName "my_column_name") (ScalarType "string")) [aesonQQ| { "type": "column", @@ -91,8 +91,8 @@ spec = do describe "TableRequest" $ do let queryRequest = - QRTable $ - TableRequest + QRTable + $ TableRequest { _trTable = TableName ["my_table"], _trRelationships = [], _trQuery = Query (Just mempty) Nothing Nothing Nothing Nothing Nothing Nothing, @@ -114,8 +114,8 @@ spec = do describe "FunctionRequest" $ do let queryRequest = - QRFunction $ - FunctionRequest + QRFunction + $ FunctionRequest { _frFunction = FunctionName ["my_function"], _frFunctionArguments = [], _frRelationships = [], @@ -222,7 +222,7 @@ genFunctionRequest = <*> Gen.set defaultRange genRelationships <*> genQuery -genFunctionName :: MonadGen m => m FunctionName +genFunctionName :: (MonadGen m) => m FunctionName genFunctionName = FunctionName <$> Gen.nonEmpty (linear 1 3) (genArbitraryAlphaNumText defaultRange) genFunctionArgument :: Gen FunctionArgument @@ -233,10 +233,10 @@ genFunctionArgument = genArgumentValue :: Gen ArgumentValue genArgumentValue = - fmap ScalarArgumentValue $ - ScalarValue - <$> genValue - <*> genScalarType + fmap ScalarArgumentValue + $ ScalarValue + <$> genValue + <*> genScalarType genTableRequest :: Gen QueryRequest genTableRequest = diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/RelationshipsSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/RelationshipsSpec.hs index aa82d070ad2..321b1acb7c2 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/RelationshipsSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/RelationshipsSpec.hs @@ -28,10 +28,10 @@ spec = do testToFromJSONToSchema (RelationshipName "relationship_name") [aesonQQ|"relationship_name"|] jsonOpenApiProperties genRelationshipName describe "RelationshipType" $ do - describe "ObjectRelationship" $ - testToFromJSONToSchema ObjectRelationship [aesonQQ|"object"|] - describe "ArrayRelationship" $ - testToFromJSONToSchema ArrayRelationship [aesonQQ|"array"|] + describe "ObjectRelationship" + $ testToFromJSONToSchema ObjectRelationship [aesonQQ|"object"|] + describe "ArrayRelationship" + $ testToFromJSONToSchema ArrayRelationship [aesonQQ|"array"|] jsonOpenApiProperties genRelationshipType describe "Relationship" $ do let relationship = @@ -97,14 +97,14 @@ spec = do |] jsonOpenApiProperties genTableRelationships -genRelationshipName :: MonadGen m => m RelationshipName +genRelationshipName :: (MonadGen m) => m RelationshipName genRelationshipName = RelationshipName <$> genArbitraryAlphaNumText defaultRange -genRelationshipType :: MonadGen m => m RelationshipType +genRelationshipType :: (MonadGen m) => m RelationshipType genRelationshipType = Gen.enumBounded -genRelationship :: MonadGen m => m Relationship +genRelationship :: (MonadGen m) => m Relationship genRelationship = Relationship <$> genTableName @@ -114,17 +114,17 @@ genRelationship = genRelationships :: Gen Relationships genRelationships = (RTable <$> genTableRelationships) <|> (RFunction <$> genFunctionRelationships) -genTableRelationships :: MonadGen m => m TableRelationships +genTableRelationships :: (MonadGen m) => m TableRelationships genTableRelationships = TableRelationships <$> genTableName <*> fmap HashMap.fromList (Gen.list defaultRange ((,) <$> genRelationshipName <*> genRelationship)) -genFunctionRelationships :: MonadGen m => m FunctionRelationships +genFunctionRelationships :: (MonadGen m) => m FunctionRelationships genFunctionRelationships = FunctionRelationships <$> genFunctionName <*> fmap HashMap.fromList (Gen.list defaultRange ((,) <$> genRelationshipName <*> genRelationship)) -genFunctionName :: MonadGen m => m FunctionName +genFunctionName :: (MonadGen m) => m FunctionName genFunctionName = FunctionName <$> Gen.nonEmpty (linear 1 3) (genArbitraryAlphaNumText defaultRange) diff --git a/server/src-test/Hasura/Backends/DataConnector/API/V0/TableSpec.hs b/server/src-test/Hasura/Backends/DataConnector/API/V0/TableSpec.hs index 50c56ad8dc1..d07740e0097 100644 --- a/server/src-test/Hasura/Backends/DataConnector/API/V0/TableSpec.hs +++ b/server/src-test/Hasura/Backends/DataConnector/API/V0/TableSpec.hs @@ -21,16 +21,16 @@ spec = do testToFromJSONToSchema (TableName ["my_table_name"]) [aesonQQ|["my_table_name"]|] jsonOpenApiProperties genTableName describe "TableInfo" $ do - describe "minimal" $ - testFromJSON + describe "minimal" + $ testFromJSON (TableInfo (TableName ["my_table_name"]) Table [] Nothing (ForeignKeys mempty) Nothing False False False) [aesonQQ| { "name": ["my_table_name"], "columns": [] } |] - describe "non-minimal" $ - testToFromJSONToSchema + describe "non-minimal" + $ testToFromJSONToSchema ( TableInfo (TableName ["my_table_name"]) View @@ -53,8 +53,8 @@ spec = do "deletable": true } |] - describe "foreign-key" $ - testToFromJSONToSchema + describe "foreign-key" + $ testToFromJSONToSchema ( TableInfo (TableName ["my_table_name"]) Table @@ -87,21 +87,21 @@ spec = do |] jsonOpenApiProperties genTableInfo -genTableName :: MonadGen m => m TableName +genTableName :: (MonadGen m) => m TableName genTableName = TableName <$> Gen.nonEmpty (linear 1 3) (genArbitraryAlphaNumText defaultRange) -genForeignKeys :: MonadGen m => m ForeignKeys +genForeignKeys :: (MonadGen m) => m ForeignKeys genForeignKeys = ForeignKeys <$> genHashMap genConstraintName genConstraint defaultRange -genConstraintName :: MonadGen m => m ConstraintName +genConstraintName :: (MonadGen m) => m ConstraintName genConstraintName = ConstraintName <$> genArbitraryAlphaNumText defaultRange -genConstraint :: MonadGen m => m Constraint +genConstraint :: (MonadGen m) => m Constraint genConstraint = let mapping = genHashMap genColumnName genColumnName defaultRange in Constraint <$> genTableName <*> mapping -genTableType :: MonadGen m => m TableType +genTableType :: (MonadGen m) => m TableType genTableType = Gen.enumBounded -- | Note: this generator is intended for serialization tests only and does not ensure valid Foreign Key Constraints. diff --git a/server/src-test/Hasura/Backends/Postgres/NativeQueries/NativeQueriesSpec.hs b/server/src-test/Hasura/Backends/Postgres/NativeQueries/NativeQueriesSpec.hs index 1cbf475d444..1cc99c9b7bb 100644 --- a/server/src-test/Hasura/Backends/Postgres/NativeQueries/NativeQueriesSpec.hs +++ b/server/src-test/Hasura/Backends/Postgres/NativeQueries/NativeQueriesSpec.hs @@ -96,17 +96,17 @@ spec = do it "Handles multiple occurences of variables " do let Right code = parseInterpolatedQuery "SELECT {{hey}}, {{hey}}" let actual :: Either QErr Text = - fmap snd $ - runExcept $ - nativeQueryToPreparedStatement - lmm - nqm - { _nqmCode = code, - _nqmArguments = - HashMap.fromList - [ (ArgumentName "hey", NullableScalarType PGVarchar False Nothing) - ] - } + fmap snd + $ runExcept + $ nativeQueryToPreparedStatement + lmm + nqm + { _nqmCode = code, + _nqmArguments = + HashMap.fromList + [ (ArgumentName "hey", NullableScalarType PGVarchar False Nothing) + ] + } (first showQErr actual) `shouldSatisfy` isRight let Right rendered = actual @@ -116,18 +116,18 @@ spec = do it "Handles multiple variables " do let Right code = parseInterpolatedQuery "SELECT {{hey}}, {{ho}}" let actual :: Either QErr Text = - fmap snd $ - runExcept $ - nativeQueryToPreparedStatement - lmm - nqm - { _nqmCode = code, - _nqmArguments = - HashMap.fromList - [ (ArgumentName "hey", NullableScalarType PGVarchar False Nothing), - (ArgumentName "ho", NullableScalarType PGInteger False Nothing) - ] - } + fmap snd + $ runExcept + $ nativeQueryToPreparedStatement + lmm + nqm + { _nqmCode = code, + _nqmArguments = + HashMap.fromList + [ (ArgumentName "hey", NullableScalarType PGVarchar False Nothing), + (ArgumentName "ho", NullableScalarType PGInteger False Nothing) + ] + } (first showQErr actual) `shouldSatisfy` isRight let Right rendered = actual diff --git a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAnnSelectG.hs b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAnnSelectG.hs index 45724daf8e1..7926bd05990 100644 --- a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAnnSelectG.hs +++ b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAnnSelectG.hs @@ -17,7 +17,7 @@ import Hedgehog.Gen qualified as Gen -------------------------------------------------------------------------------- -genAnnSelectG :: forall m f a. MonadGen m => m a -> m (f a) -> m (AnnSelectG ('Postgres 'Vanilla) f a) +genAnnSelectG :: forall m f a. (MonadGen m) => m a -> m (f a) -> m (AnnSelectG ('Postgres 'Vanilla) f a) genAnnSelectG genA genFA = AnnSelectG <$> genFields genFA defaultRange defaultRange diff --git a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAssociatedTypes.hs b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAssociatedTypes.hs index 1964b662324..a6c627c19cc 100644 --- a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAssociatedTypes.hs +++ b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenAssociatedTypes.hs @@ -34,14 +34,14 @@ import Hedgehog.Range qualified as Range -------------------------------------------------------------------------------- -- Exported -genColumn :: MonadGen m => m (Column ('Postgres 'Vanilla)) +genColumn :: (MonadGen m) => m (Column ('Postgres 'Vanilla)) genColumn = Postgres.unsafePGCol <$> genArbitraryUnicodeText defaultRange -- | Generator for a qualified Postgres 'TableName' -genTableName :: MonadGen m => m (TableName ('Postgres 'Vanilla)) +genTableName :: (MonadGen m) => m (TableName ('Postgres 'Vanilla)) genTableName = genQualifiedTable -genScalarType :: MonadGen m => m (ScalarType ('Postgres 'Vanilla)) +genScalarType :: (MonadGen m) => m (ScalarType ('Postgres 'Vanilla)) genScalarType = Gen.choice [ pure Postgres.PGSmallInt, @@ -75,14 +75,14 @@ genScalarType = Postgres.PGCompositeScalar <$> genArbitraryUnicodeText defaultRange ] -genFunctionName :: MonadGen m => m (FunctionName ('Postgres 'Vanilla)) +genFunctionName :: (MonadGen m) => m (FunctionName ('Postgres 'Vanilla)) genFunctionName = Postgres.QualifiedObject <$> genSchemaName defaultRange <*> genPgFunctionName defaultRange -genXComputedField :: MonadGen m => m (XComputedField ('Postgres 'Vanilla)) +genXComputedField :: (MonadGen m) => m (XComputedField ('Postgres 'Vanilla)) genXComputedField = pure () -genBooleanOperators :: MonadGen m => m a -> m (BooleanOperators ('Postgres 'Vanilla) a) +genBooleanOperators :: (MonadGen m) => m a -> m (BooleanOperators ('Postgres 'Vanilla) a) genBooleanOperators genA = Gen.choice [ B.AILIKE <$> genA, @@ -122,7 +122,7 @@ genBooleanOperators genA = B.AMatchesFulltext <$> genA ] -genFunctionArgumentExp :: MonadGen m => m a -> m (FunctionArgumentExp ('Postgres 'Vanilla) a) +genFunctionArgumentExp :: (MonadGen m) => m a -> m (FunctionArgumentExp ('Postgres 'Vanilla) a) genFunctionArgumentExp genA = Gen.choice [ pure PGTypes.AETableRow, @@ -135,7 +135,7 @@ genFunctionArgumentExp genA = -- Unexported Helpers -- | Generator for a qualified Postgres table. -genQualifiedTable :: MonadGen m => m PGTypes.QualifiedTable +genQualifiedTable :: (MonadGen m) => m PGTypes.QualifiedTable genQualifiedTable = do let schema = PGTypes.SchemaName <$> genIdentifier table = PGTypes.TableName <$> genIdentifier @@ -154,7 +154,7 @@ genQualifiedTableFixture = -- | Generator for an arbitrary Postgres identifier. -- -- cf. https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS -genIdentifier :: MonadGen m => m Text +genIdentifier :: (MonadGen m) => m Text genIdentifier = do -- NOTE: 'Gen.alpha' is used out of convenience, but the Postgres -- specification states that identifiers may begin with "letters with @@ -178,21 +178,21 @@ genIdentifier = do -- Construct the arbitrarily generated identifier pure $ T.cons begin rest -genSchemaName :: MonadGen m => Range Int -> m Postgres.SchemaName +genSchemaName :: (MonadGen m) => Range Int -> m Postgres.SchemaName genSchemaName textRange = Gen.choice [pure Postgres.publicSchema, Postgres.SchemaName <$> genArbitraryUnicodeText textRange] -genPgFunctionName :: MonadGen m => Range Int -> m Postgres.FunctionName +genPgFunctionName :: (MonadGen m) => Range Int -> m Postgres.FunctionName genPgFunctionName textRange = Postgres.FunctionName <$> genArbitraryUnicodeText textRange -genDWithinGeomOp :: MonadGen m => m a -> m (DWithinGeomOp a) +genDWithinGeomOp :: (MonadGen m) => m a -> m (DWithinGeomOp a) genDWithinGeomOp genA = DWithinGeomOp <$> genA <*> genA -genDWithinGeogOp :: MonadGen m => m a -> m (DWithinGeogOp a) +genDWithinGeogOp :: (MonadGen m) => m a -> m (DWithinGeogOp a) genDWithinGeogOp genA = DWithinGeogOp <$> genA <*> genA <*> genA -genTIntersectsGeomminNband :: MonadGen m => m a -> m (STIntersectsGeomminNband a) +genTIntersectsGeomminNband :: (MonadGen m) => m a -> m (STIntersectsGeomminNband a) genTIntersectsGeomminNband genA = STIntersectsGeomminNband <$> genA <*> Gen.maybe genA -genSTIntersectsNbandGeommin :: MonadGen m => m a -> m (STIntersectsNbandGeommin a) +genSTIntersectsNbandGeommin :: (MonadGen m) => m a -> m (STIntersectsNbandGeommin a) genSTIntersectsNbandGeommin genA = STIntersectsNbandGeommin <$> genA <*> genA diff --git a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectArgsG.hs b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectArgsG.hs index 77404a5f5b1..b0fecde4555 100644 --- a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectArgsG.hs +++ b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectArgsG.hs @@ -32,7 +32,7 @@ import Hedgehog.Gen qualified as Gen -------------------------------------------------------------------------------- -- Exported -genSelectArgsG :: forall m a. MonadGen m => m a -> m (SelectArgsG ('Postgres 'Vanilla) a) +genSelectArgsG :: forall m a. (MonadGen m) => m a -> m (SelectArgsG ('Postgres 'Vanilla) a) genSelectArgsG genA = do _saWhere <- where' _saOrderBy <- orderBy @@ -43,8 +43,8 @@ genSelectArgsG genA = do where where' :: m (Maybe (AnnBoolExp ('Postgres 'Vanilla) a)) where' = - Gen.maybe $ - genAnnBoolExp + Gen.maybe + $ genAnnBoolExp ( genAnnBoolExpFld genColumn genTableName @@ -59,8 +59,9 @@ genSelectArgsG genA = do orderBy :: m (Maybe (NonEmpty (AnnotatedOrderByItemG ('Postgres 'Vanilla) a))) orderBy = - Gen.maybe . Gen.nonEmpty defaultRange $ - genAnnotatedOrderByItemG @_ @('Postgres 'Vanilla) + Gen.maybe + . Gen.nonEmpty defaultRange + $ genAnnotatedOrderByItemG @_ @('Postgres 'Vanilla) genBasicOrderType genNullsOrderType ( genAnnotatedOrderByElement @_ @('Postgres 'Vanilla) @@ -86,8 +87,8 @@ genSelectArgsG genA = do -------------------------------------------------------------------------------- -- Unexported Helpers -genBasicOrderType :: MonadGen m => m (BasicOrderType ('Postgres 'Vanilla)) +genBasicOrderType :: (MonadGen m) => m (BasicOrderType ('Postgres 'Vanilla)) genBasicOrderType = Gen.element [OTAsc, OTDesc] -genNullsOrderType :: MonadGen m => m (NullsOrderType ('Postgres 'Vanilla)) +genNullsOrderType :: (MonadGen m) => m (NullsOrderType ('Postgres 'Vanilla)) genNullsOrderType = Gen.element [NullsFirst, NullsLast] diff --git a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectFromG.hs b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectFromG.hs index 6b6f2ebcdf0..6c9ec51649b 100644 --- a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectFromG.hs +++ b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenSelectFromG.hs @@ -17,7 +17,7 @@ import Hedgehog.Gen qualified as Gen -- | @genA@ is a generator for some type @a@ which is threaded through -- the 'SelectFromG' AST. At the leaf nodes this is in an 'ArgumentExp -- a' term. -genSelectFromG :: MonadGen m => m a -> m (SelectFromG ('Postgres 'Vanilla) a) +genSelectFromG :: (MonadGen m) => m a -> m (SelectFromG ('Postgres 'Vanilla) a) genSelectFromG genA = Gen.choice [fromTable, fromIdentifier, fromFunction] where fromTable = FromTable <$> genTableName diff --git a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenTablePermG.hs b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenTablePermG.hs index e699d30961c..4a60790e1c6 100644 --- a/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenTablePermG.hs +++ b/server/src-test/Hasura/Backends/Postgres/RQLGenerator/GenTablePermG.hs @@ -22,7 +22,7 @@ import Hedgehog.Gen qualified as Gen -------------------------------------------------------------------------------- -genTablePermG :: MonadGen m => m a -> m (TablePermG ('Postgres 'Vanilla) a) +genTablePermG :: (MonadGen m) => m a -> m (TablePermG ('Postgres 'Vanilla) a) genTablePermG genA = do let genV = genAnnBoolExpFld @_ @('Postgres 'Vanilla) genColumn genTableName genScalarType genFunctionName genXComputedField (genBooleanOperators genA) (genFunctionArgumentExp genA) genA gBoolExp <- genAnnBoolExp @_ @_ @('Postgres 'Vanilla) genV genTableName diff --git a/server/src-test/Hasura/Backends/Postgres/SQL/Select/RenameIdentifiersSpec.hs b/server/src-test/Hasura/Backends/Postgres/SQL/Select/RenameIdentifiersSpec.hs index 861140b64f9..6323d26a988 100644 --- a/server/src-test/Hasura/Backends/Postgres/SQL/Select/RenameIdentifiersSpec.hs +++ b/server/src-test/Hasura/Backends/Postgres/SQL/Select/RenameIdentifiersSpec.hs @@ -9,8 +9,8 @@ import Test.Hspec spec :: Spec spec = do - it "empty is empty" $ - shouldBe + it "empty is empty" + $ shouldBe (renameIdentifiers mkSelect) mkSelect literalQueries @@ -123,8 +123,8 @@ simpleQueries = [ mkSelect { selExtr = [row_to_json_ [selectIdentifiers_ e1' root_base' ["id", "author"]] `asC_` "root"], selFrom = - from_ $ - lateralLeftJoin_ + from_ + $ lateralLeftJoin_ (selectStar_ "public" "article" `as'_` root_base') ( mkSelect { selExtr = @@ -183,8 +183,8 @@ simpleQueries = [ mkSelect { selExtr = [row_to_json_ [selectIdentifiers_ e1' root_base' ["id", "author_with_a_very_long_name_that_is_almost_63_characters_long"]] `asC_` "root"], selFrom = - from_ $ - lateralLeftJoin_ + from_ + $ lateralLeftJoin_ (selectStar_ "public" "article" `as'_` root_base') ( mkSelect { selExtr = @@ -277,8 +277,8 @@ simpleQueries = [ mkSelect { selExtr = [row_to_json_ [selectIdentifiers_ e1' root_base' ["id", "author"]] `asC_` "root"], selFrom = - from_ $ - lateralLeftJoin_ + from_ + $ lateralLeftJoin_ (selectStar_ "public" "author" `as'_` root_base') ( mkSelect { selExtr = diff --git a/server/src-test/Hasura/Backends/Postgres/Translate/UpdateSpec.hs b/server/src-test/Hasura/Backends/Postgres/Translate/UpdateSpec.hs index 272b260a03b..170f72d70a8 100644 --- a/server/src-test/Hasura/Backends/Postgres/Translate/UpdateSpec.hs +++ b/server/src-test/Hasura/Backends/Postgres/Translate/UpdateSpec.hs @@ -25,8 +25,8 @@ spec = columns = [P.idColumn, P.nameColumn], mutationOutput = MOutMultirowFields [("affected_rows", MCount)], updateVariant = - Expect.SingleBatchUpdate $ - Expect.UpdateBatchBuilder + Expect.SingleBatchUpdate + $ Expect.UpdateBatchBuilder { ubbOperations = [(P.nameColumn, UpdateSet P.textNew)], ubbWhere = [(P.idColumn, [AEQ True P.integerOne])] }, @@ -47,8 +47,8 @@ spec = columns = [P.idColumn, P.nameColumn, P.descColumn], mutationOutput = MOutMultirowFields [("affected_rows", MCount)], updateVariant = - Expect.SingleBatchUpdate $ - Expect.UpdateBatchBuilder + Expect.SingleBatchUpdate + $ Expect.UpdateBatchBuilder { ubbOperations = [ (P.nameColumn, UpdateSet P.textNew), (P.descColumn, UpdateSet P.textOther) @@ -72,8 +72,8 @@ spec = columns = [P.idColumn, P.nameColumn, P.descColumn], mutationOutput = MOutMultirowFields [("affected_rows", MCount)], updateVariant = - Expect.SingleBatchUpdate $ - Expect.UpdateBatchBuilder + Expect.SingleBatchUpdate + $ Expect.UpdateBatchBuilder { ubbOperations = [(P.nameColumn, UpdateSet P.textNew)], ubbWhere = [ (P.idColumn, [AEQ True P.integerOne]), diff --git a/server/src-test/Hasura/Generator/Common.hs b/server/src-test/Hasura/Generator/Common.hs index bbde6f6ba26..f3b1e5532d2 100644 --- a/server/src-test/Hasura/Generator/Common.hs +++ b/server/src-test/Hasura/Generator/Common.hs @@ -28,8 +28,8 @@ import Test.Hspec import Test.Hspec.Hedgehog genHashMap :: - MonadGen m => - Hashable a => + (MonadGen m) => + (Hashable a) => m a -> m b -> Range Int -> @@ -42,30 +42,30 @@ genInt = fromIntegral <$> Gen.int32 (Range.linear 1 99999) genText :: Gen Text genText = Gen.text (Range.linear 0 11) Gen.unicode -genNonEmptyText :: MonadGen m => Range Int -> m NonEmptyText +genNonEmptyText :: (MonadGen m) => Range Int -> m NonEmptyText genNonEmptyText range = mkNonEmptyText `Gen.mapMaybeT` genArbitraryUnicodeText range -genArbitraryUnicodeText :: MonadGen m => Range Int -> m Text +genArbitraryUnicodeText :: (MonadGen m) => Range Int -> m Text genArbitraryUnicodeText range = Gen.text range Gen.unicode -genArbitraryAlphaNumText :: MonadGen m => Range Int -> m Text +genArbitraryAlphaNumText :: (MonadGen m) => Range Int -> m Text genArbitraryAlphaNumText range = Gen.text range Gen.alphaNum genArbitraryAlphaNumTextExcluding :: (MonadGen m, GenBase m ~ Identity) => [Text] -> Range Int -> m Text genArbitraryAlphaNumTextExcluding excluded = Gen.filter (`notElem` excluded) . genArbitraryAlphaNumText -genFieldName :: MonadGen m => Range Int -> m FieldName +genFieldName :: (MonadGen m) => Range Int -> m FieldName genFieldName range = FieldName <$> genArbitraryUnicodeText range -genGName :: MonadGen m => Range Int -> m G.Name +genGName :: (MonadGen m) => Range Int -> m G.Name genGName range = G.mkName `Gen.mapMaybeT` genArbitraryAlphaNumText range -genDescription :: MonadGen m => Range Int -> m G.Description +genDescription :: (MonadGen m) => Range Int -> m G.Description genDescription range = G.Description <$> genArbitraryUnicodeText range -- | A reasonable range size to generate data on dev machines without -- blowing up. -defaultRange :: Integral a => Range a +defaultRange :: (Integral a) => Range a defaultRange = Range.linear 0 8 -- | Given 'Gen' @a@, assert that @a@'s Aeson instances are isomorphic. diff --git a/server/src-test/Hasura/GraphQL/Schema/BoolExp/AggregationPredicatesSpec.hs b/server/src-test/Hasura/GraphQL/Schema/BoolExp/AggregationPredicatesSpec.hs index aef1d966c50..9ab8127f82b 100644 --- a/server/src-test/Hasura/GraphQL/Schema/BoolExp/AggregationPredicatesSpec.hs +++ b/server/src-test/Hasura/GraphQL/Schema/BoolExp/AggregationPredicatesSpec.hs @@ -84,16 +84,16 @@ spec = do describe "When no aggregation functions are given" do it "Yields no parsers" do let maybeParser = - runSchemaTest sourceInfo $ - defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest + runSchemaTest sourceInfo + $ defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest [] albumTableInfo (Unshowable maybeParser) `shouldSatisfy` (isNothing . unUnshowable) describe "When some aggregation functions are given" do let maybeParser = - runSchemaTest sourceInfo $ - defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest + runSchemaTest sourceInfo + $ defaultAggregationPredicatesParser @('Postgres 'Vanilla) @_ @_ @ParserTest [ FunctionSignature { fnName = "count", fnGQLName = [G.name|count|], diff --git a/server/src-test/Hasura/GraphQL/Schema/Build/UpdateSpec.hs b/server/src-test/Hasura/GraphQL/Schema/Build/UpdateSpec.hs index cf9e163e547..81592040c84 100644 --- a/server/src-test/Hasura/GraphQL/Schema/Build/UpdateSpec.hs +++ b/server/src-test/Hasura/GraphQL/Schema/Build/UpdateSpec.hs @@ -34,8 +34,8 @@ spec = do UpdateExpectationBuilder { utbOutput = MOutMultirowFields [("affected_rows", MCount)], utbUpdate = - SingleBatchUpdate $ - UpdateBatchBuilder + SingleBatchUpdate + $ UpdateBatchBuilder { ubbOperations = [(P.nameColumnBuilder, UpdateSet P.textNew)], ubbWhere = [(P.nameColumnBuilder, [AEQ True P.textOld])] } @@ -60,8 +60,8 @@ spec = do UpdateExpectationBuilder { utbOutput = MOutMultirowFields [("affected_rows", MCount)], utbUpdate = - SingleBatchUpdate $ - UpdateBatchBuilder + SingleBatchUpdate + $ UpdateBatchBuilder { ubbOperations = [ (P.nameColumnBuilder, UpdateSet P.textNew), (P.descColumnBuilder, UpdateSet P.textOther) diff --git a/server/src-test/Hasura/GraphQL/Schema/RemoteSpec.hs b/server/src-test/Hasura/GraphQL/Schema/RemoteSpec.hs index 374283954cf..8c275ae5fae 100644 --- a/server/src-test/Hasura/GraphQL/Schema/RemoteSpec.hs +++ b/server/src-test/Hasura/GraphQL/Schema/RemoteSpec.hs @@ -51,65 +51,69 @@ instance P.MonadParse TestMonad where -- test tools -runError :: Monad m => ExceptT QErr m a -> m a +runError :: (Monad m) => ExceptT QErr m a -> m a runError = runExceptT >=> (`onLeft` (error . T.unpack . qeError)) mkTestRemoteSchema :: Text -> RemoteSchemaIntrospection -mkTestRemoteSchema schema = RemoteSchemaIntrospection $ - HashMap.fromListOn getTypeName $ - runIdentity $ - runError $ do - G.SchemaDocument types <- G.parseSchemaDocument schema `onLeft` throw500 - pure $ flip mapMaybe types \case - G.TypeSystemDefinitionSchema _ -> Nothing - G.TypeSystemDefinitionType td -> Just $ case fmap toRemoteInputValue td of - G.TypeDefinitionScalar std -> G.TypeDefinitionScalar std - G.TypeDefinitionObject otd -> G.TypeDefinitionObject otd - G.TypeDefinitionUnion utd -> G.TypeDefinitionUnion utd - G.TypeDefinitionEnum etd -> G.TypeDefinitionEnum etd - G.TypeDefinitionInputObject itd -> G.TypeDefinitionInputObject itd - G.TypeDefinitionInterface itd -> - G.TypeDefinitionInterface $ - G.InterfaceTypeDefinition - { G._itdDescription = G._itdDescription itd, - G._itdName = G._itdName itd, - G._itdDirectives = G._itdDirectives itd, - G._itdFieldsDefinition = G._itdFieldsDefinition itd, - G._itdPossibleTypes = [] - } +mkTestRemoteSchema schema = RemoteSchemaIntrospection + $ HashMap.fromListOn getTypeName + $ runIdentity + $ runError + $ do + G.SchemaDocument types <- G.parseSchemaDocument schema `onLeft` throw500 + pure $ flip mapMaybe types \case + G.TypeSystemDefinitionSchema _ -> Nothing + G.TypeSystemDefinitionType td -> Just $ case fmap toRemoteInputValue td of + G.TypeDefinitionScalar std -> G.TypeDefinitionScalar std + G.TypeDefinitionObject otd -> G.TypeDefinitionObject otd + G.TypeDefinitionUnion utd -> G.TypeDefinitionUnion utd + G.TypeDefinitionEnum etd -> G.TypeDefinitionEnum etd + G.TypeDefinitionInputObject itd -> G.TypeDefinitionInputObject itd + G.TypeDefinitionInterface itd -> + G.TypeDefinitionInterface + $ G.InterfaceTypeDefinition + { G._itdDescription = G._itdDescription itd, + G._itdName = G._itdName itd, + G._itdDirectives = G._itdDirectives itd, + G._itdFieldsDefinition = G._itdFieldsDefinition itd, + G._itdPossibleTypes = [] + } where toRemoteInputValue ivd = RemoteSchemaInputValueDefinition { _rsitdDefinition = ivd, _rsitdPresetArgument = - choice $ - G._ivdDirectives ivd <&> \dir -> do + choice + $ G._ivdDirectives ivd + <&> \dir -> do guard $ G._dName dir == Name._preset value <- HashMap.lookup Name._value $ G._dArguments dir Just $ case value of G.VString "x-hasura-test" -> - G.VVariable $ - SessionPresetVariable (mkSessionVariable "x-hasura-test") GName._String SessionArgumentPresetScalar + G.VVariable + $ SessionPresetVariable (mkSessionVariable "x-hasura-test") GName._String SessionArgumentPresetScalar _ -> absurd <$> value } mkTestExecutableDocument :: Text -> ([G.VariableDefinition], G.SelectionSet G.NoFragments G.Name) -mkTestExecutableDocument t = runIdentity $ - runError $ do +mkTestExecutableDocument t = runIdentity + $ runError + $ do G.ExecutableDocument execDoc <- G.parseExecutableDoc t `onLeft` throw500 case execDoc of [G.ExecutableDefinitionOperation op] -> case op of G.OperationDefinitionUnTyped selSet -> ([],) <$> inlineSelectionSet [] selSet G.OperationDefinitionTyped opDef -> do - unless (G._todType opDef == G.OperationTypeQuery) $ - throw500 "only queries for now" + unless (G._todType opDef == G.OperationTypeQuery) + $ throw500 "only queries for now" resSelSet <- inlineSelectionSet [] $ G._todSelectionSet opDef pure (G._todVariableDefinitions opDef, resSelSet) _ -> throw500 "must have only one query in the document" mkTestVariableValues :: LBS.ByteString -> HashMap.HashMap G.Name J.Value -mkTestVariableValues vars = runIdentity $ - runError $ do +mkTestVariableValues vars = runIdentity + $ runError + $ do value <- J.eitherDecode vars `onLeft` (throw500 . T.pack) case value of J.Object vs -> @@ -131,12 +135,13 @@ buildQueryParsers introspection = do ignoreRemoteRelationship adminRoleName RemoteSchemaParser query _ _ <- - runError $ - runMemoizeT $ - runRemoteSchema schemaContext $ - buildRemoteParser introResult remoteSchemaRels remoteSchemaInfo - pure $ - head query <&> \case + runError + $ runMemoizeT + $ runRemoteSchema schemaContext + $ buildRemoteParser introResult remoteSchemaRels remoteSchemaInfo + pure + $ head query + <&> \case NotNamespaced remoteFld -> _rfField remoteFld Namespaced _ -> -- Shouldn't happen if we're using identityCustomizer @@ -165,8 +170,8 @@ run :: IO (GraphQLField (RemoteRelationshipField UnpreparedValue) RemoteSchemaVariable) run schema query variables = do parser <- buildQueryParsers $ mkTestRemoteSchema schema - pure $ - runQueryParser + pure + $ runQueryParser parser (mkTestExecutableDocument query) (mkTestVariableValues variables) @@ -226,12 +231,12 @@ query($a: A!) { arg `shouldBe` ( _a, -- the parser did not create a new JSON variable, and forwarded the query variable unmodified - G.VVariable $ - QueryVariable $ - Variable - (VIRequired _a) - (G.TypeNamed (G.Nullability False) _A) - (JSONValue $ J.Object $ KM.fromList [("b", J.Object $ KM.fromList [("c", J.Object $ KM.fromList [("i", J.Number 0)])])]) + G.VVariable + $ QueryVariable + $ Variable + (VIRequired _a) + (G.TypeNamed (G.Nullability False) _A) + (JSONValue $ J.Object $ KM.fromList [("b", J.Object $ KM.fromList [("c", J.Object $ KM.fromList [("i", J.Number 0)])])]) ) testNoVarExpansionIfNoPresetUnlessTopLevelOptionalField :: Spec @@ -281,8 +286,8 @@ query($a: A) { `shouldBe` ( _a, -- fieldOptional has peeled the variable; all we see is a JSON blob, and in doubt -- we repackage it as a newly minted JSON variable - G.VVariable $ - RemoteJSONValue + G.VVariable + $ RemoteJSONValue (G.TypeNamed (G.Nullability True) _A) (J.Object $ KM.fromList [("b", J.Object $ KM.fromList [("c", J.Object $ KM.fromList [("i", J.Number 0)])])]) ) @@ -334,14 +339,14 @@ query($a: A!) { arg `shouldBe` ( _a, -- the preset has caused partial variable expansion, only up to where it's needed - G.VObject $ - HashMap.fromList + G.VObject + $ HashMap.fromList [ ( _x, G.VInt 0 ), ( _b, - G.VVariable $ - RemoteJSONValue + G.VVariable + $ RemoteJSONValue (G.TypeNamed (G.Nullability True) _B) (J.Object $ KM.fromList [("c", J.Object $ KM.fromList [("i", J.Number 0)])]) ) diff --git a/server/src-test/Hasura/QuickCheck/Instances.hs b/server/src-test/Hasura/QuickCheck/Instances.hs index 7404dace661..dfab72491b5 100644 --- a/server/src-test/Hasura/QuickCheck/Instances.hs +++ b/server/src-test/Hasura/QuickCheck/Instances.hs @@ -264,32 +264,32 @@ instance Arbitrary IntrospectionResult where scalarTypeDefinitions <- for scalarTypeNames genScalarTypeDefinition objectTypeDefinitions <- - for objectTypeNames $ - genObjectTypeDefinition inputValues outputTypeNames interfaceTypeNames + for objectTypeNames + $ genObjectTypeDefinition inputValues outputTypeNames interfaceTypeNames interfaceTypeDefinitions <- - for interfaceTypeNames $ - genInterfaceTypeDefinition inputValues outputTypeNames + for interfaceTypeNames + $ genInterfaceTypeDefinition inputValues outputTypeNames unionTypeDefinitions <- - for unionTypeNames $ - genUnionTypeDefinition objectTypeNames + for unionTypeNames + $ genUnionTypeDefinition objectTypeNames enumTypeDefinitions <- for enumTypeNames genEnumTypeDefinition inputObjectTypeDefinitions <- - for inputObjectTypeNames $ - genInputObjectTypeDefinition inputValues + for inputObjectTypeNames + $ genInputObjectTypeDefinition inputValues -- finally, create an IntrospectionResult from the aggregated definitions let irDoc = - RemoteSchemaIntrospection $ - HashMap.fromListOn getTypeName $ - concat - [ GraphQL.TypeDefinitionScalar <$> scalarTypeDefinitions, - GraphQL.TypeDefinitionObject <$> objectTypeDefinitions, - GraphQL.TypeDefinitionInterface <$> interfaceTypeDefinitions, - GraphQL.TypeDefinitionUnion <$> unionTypeDefinitions, - GraphQL.TypeDefinitionEnum <$> enumTypeDefinitions, - GraphQL.TypeDefinitionInputObject <$> inputObjectTypeDefinitions - ] + RemoteSchemaIntrospection + $ HashMap.fromListOn getTypeName + $ concat + [ GraphQL.TypeDefinitionScalar <$> scalarTypeDefinitions, + GraphQL.TypeDefinitionObject <$> objectTypeDefinitions, + GraphQL.TypeDefinitionInterface <$> interfaceTypeDefinitions, + GraphQL.TypeDefinitionUnion <$> unionTypeDefinitions, + GraphQL.TypeDefinitionEnum <$> enumTypeDefinitions, + GraphQL.TypeDefinitionInputObject <$> inputObjectTypeDefinitions + ] irQueryRoot <- elements objectTypeNames let maybeObjectTypeName = elements $ Nothing : (Just <$> objectTypeNames) irMutationRoot <- maybeObjectTypeName diff --git a/server/src-test/Hasura/RQL/IR/Generator.hs b/server/src-test/Hasura/RQL/IR/Generator.hs index 9ae0b5852c2..d253a1bae8a 100644 --- a/server/src-test/Hasura/RQL/IR/Generator.hs +++ b/server/src-test/Hasura/RQL/IR/Generator.hs @@ -44,12 +44,12 @@ import Language.GraphQL.Draft.Syntax qualified as G -- | Generate a list of pairs of field names and 'a's. -- -- | See 'genFieldName' for details on generating field names. -genFields :: MonadGen m => m a -> Range Int -> Range Int -> m (Fields a) +genFields :: (MonadGen m) => m a -> Range Int -> Range Int -> m (Fields a) genFields genA fieldsRange fieldNameRange = list fieldsRange $ (,) <$> genFieldName fieldNameRange <*> genA genFunctionArgsExpG :: - MonadGen m => + (MonadGen m) => m a -> m (FunctionArgsExpG a) genFunctionArgsExpG genA = @@ -58,7 +58,7 @@ genFunctionArgsExpG genA = <*> genHashMap (genArbitraryUnicodeText defaultRange) genA defaultRange genGExists :: - MonadGen m => + (MonadGen m) => m a -> m (TableName b) -> m (GExists b a) @@ -66,7 +66,7 @@ genGExists aGen tableGen = GExists <$> tableGen <*> genAnnBoolExp aGen tableGen genAnnBoolExp :: - MonadGen m => + (MonadGen m) => m a -> m (TableName b) -> m (GBoolExp b a) @@ -85,9 +85,9 @@ genAnnBoolExp boolFld = BoolField <$> aGen genAnnBoolExpFld :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (Column b) -> m (TableName b) -> m (ScalarType b) -> @@ -165,8 +165,8 @@ genAnnBoolExpFld genA genRelInfo :: - MonadGen m => - Hashable (Column b) => + (MonadGen m) => + (Hashable (Column b)) => m (TableName b) -> m (Column b) -> m (RelInfo b) @@ -179,29 +179,29 @@ genRelInfo genTableName genColumn = <*> bool_ <*> genInsertOrder -genRelTarget :: MonadGen m => m (TableName b) -> m (RelTarget b) +genRelTarget :: (MonadGen m) => m (TableName b) -> m (RelTarget b) genRelTarget genTableName = choice [ RelTargetTable <$> genTableName, RelTargetNativeQuery <$> genNativeQueryName ] -genNativeQueryName :: MonadGen m => m NativeQueryName +genNativeQueryName :: (MonadGen m) => m NativeQueryName genNativeQueryName = NativeQueryName . G.unsafeMkName <$> Assoc.genIdentifier -genRelName :: MonadGen m => m RelName +genRelName :: (MonadGen m) => m RelName genRelName = RelName <$> genNonEmptyText defaultRange -genRelType :: MonadGen m => m RelType +genRelType :: (MonadGen m) => m RelType genRelType = element [ObjRel, ArrRel] -genInsertOrder :: MonadGen m => m InsertOrder +genInsertOrder :: (MonadGen m) => m InsertOrder genInsertOrder = element [BeforeParent, AfterParent] genAnnComputedFieldBolExp :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (TableName b) -> m (Column b) -> m (ScalarType b) -> @@ -236,9 +236,9 @@ genAnnComputedFieldBolExp genA genComputedFieldBoolExp :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (TableName b) -> m (Column b) -> m (ScalarType b) -> @@ -284,12 +284,12 @@ genComputedFieldBoolExp genTableName ] -genComputedFieldName :: MonadGen m => m ComputedFieldName +genComputedFieldName :: (MonadGen m) => m ComputedFieldName genComputedFieldName = ComputedFieldName <$> genNonEmptyText defaultRange genOpExpG :: - MonadGen m => - Hashable (ScalarType b) => + (MonadGen m) => + (Hashable (ScalarType b)) => m (TableName b) -> m (Column b) -> m (ScalarType b) -> @@ -324,8 +324,8 @@ genOpExpG genTableName genColumn genScalarType genBooleanOperators genA = ACast <$> genHashMap genScalarType - ( list defaultRange $ - genOpExpG + ( list defaultRange + $ genOpExpG genTableName genColumn genScalarType @@ -356,7 +356,7 @@ genOpExpG genTableName genColumn genScalarType genBooleanOperators genA = genRootOrCurrent = element [IsRoot, IsCurrent] genColumnType :: - MonadGen m => + (MonadGen m) => m (TableName b) -> m (ScalarType b) -> m (ColumnType b) @@ -369,7 +369,7 @@ genColumnType genTableName genScalarType = <$> genEnumReference genTableName genEnumReference :: - MonadGen m => + (MonadGen m) => m (TableName b) -> m (EnumReference b) genEnumReference genTableName = @@ -384,14 +384,14 @@ genEnumReference genTableName = defaultRange ) -genEnumValue :: MonadGen m => m EnumValue +genEnumValue :: (MonadGen m) => m EnumValue genEnumValue = EnumValue <$> genGName defaultRange -genEnumValueInfo :: MonadGen m => m EnumValueInfo +genEnumValueInfo :: (MonadGen m) => m EnumValueInfo genEnumValueInfo = EnumValueInfo <$> maybe (genArbitraryUnicodeText defaultRange) genColumnInfo :: - MonadGen m => + (MonadGen m) => m (Column b) -> m (TableName b) -> m (ScalarType b) -> @@ -409,11 +409,11 @@ genColumnInfo <*> maybe (genDescription defaultRange) <*> genColumnMutability -genColumnMutability :: MonadGen m => m ColumnMutability +genColumnMutability :: (MonadGen m) => m ColumnMutability genColumnMutability = ColumnMutability <$> bool <*> bool genAnnotatedOrderByItemG :: - MonadGen m => + (MonadGen m) => m (BasicOrderType b) -> m (NullsOrderType b) -> m a -> @@ -425,9 +425,9 @@ genAnnotatedOrderByItemG genBasicOrderType genNullsOrderType genA = <*> maybe genNullsOrderType genAnnotatedOrderByElement :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (Column b) -> m (TableName b) -> m (ScalarType b) -> @@ -515,7 +515,7 @@ genAnnotatedOrderByElement genA genAnnotatedAggregateOrderBy :: - MonadGen m => + (MonadGen m) => m (Column b) -> m (TableName b) -> m (ScalarType b) -> @@ -536,9 +536,9 @@ genAnnotatedAggregateOrderBy ] genComputedFieldOrderBy :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (Column b) -> m (ScalarType b) -> m (TableName b) -> @@ -573,9 +573,9 @@ genComputedFieldOrderBy genA genComputedFieldOrderByElement :: - MonadGen m => - Hashable (ScalarType b) => - Hashable (Column b) => + (MonadGen m) => + (Hashable (ScalarType b)) => + (Hashable (Column b)) => m (Column b) -> m (ScalarType b) -> m (TableName b) -> @@ -616,5 +616,5 @@ genComputedFieldOrderByElement genScalarType ] -genIdentifier :: MonadGen m => m FIIdentifier +genIdentifier :: (MonadGen m) => m FIIdentifier genIdentifier = Hasura.RQL.IR.Select.FIIdentifier <$> genArbitraryUnicodeText defaultRange diff --git a/server/src-test/Hasura/RQL/IR/SelectSpec.hs b/server/src-test/Hasura/RQL/IR/SelectSpec.hs index fb4f06885fa..c95061343f7 100644 --- a/server/src-test/Hasura/RQL/IR/SelectSpec.hs +++ b/server/src-test/Hasura/RQL/IR/SelectSpec.hs @@ -15,7 +15,7 @@ newtype MyPair (b :: BackendType) r v = MyPair (r, v) deriving stock (Show) deriving newtype (Foldable, Bifoldable) -genMyPair :: MonadGen m => m r -> m v -> m (MyPair b r v) +genMyPair :: (MonadGen m) => m r -> m v -> m (MyPair b r v) genMyPair genR genV = do r <- genR v <- genV @@ -27,10 +27,11 @@ spec = do let singleton :: a -> [a] singleton x = [x] - it "bifoldMapAnnSelectG (const mempty) == foldMap" $ - hedgehog $ do + it "bifoldMapAnnSelectG (const mempty) == foldMap" + $ hedgehog + $ do annSelectG :: AnnSelectG ('Postgres 'Vanilla) (MyPair ('Postgres 'Vanilla) Int) Int <- - forAll $ - genAnnSelectG (int defaultRange) (genMyPair (int defaultRange) (int defaultRange)) + forAll + $ genAnnSelectG (int defaultRange) (genMyPair (int defaultRange) (int defaultRange)) bifoldMapAnnSelectG (const []) singleton annSelectG === foldMap singleton annSelectG bifoldMapAnnSelectG singleton (const []) annSelectG === foldMap (foldMap $ bifoldMap singleton (const [])) (_asnFields annSelectG) diff --git a/server/src-test/Hasura/RQL/MetadataSpec.hs b/server/src-test/Hasura/RQL/MetadataSpec.hs index 56df716d318..edfed9b7b4c 100644 --- a/server/src-test/Hasura/RQL/MetadataSpec.hs +++ b/server/src-test/Hasura/RQL/MetadataSpec.hs @@ -50,37 +50,41 @@ spec = describe "Remote Relationship Metadata" do spec_roundtrip :: Spec spec_roundtrip = describe "JSON Roundtrip" do describe "Metadata" do - it "example remote relationship fragment" $ - hedgehog do + it "example remote relationship fragment" + $ hedgehog do metadata :: Metadata <- - evalAesonResult $ - J.fromJSON remote_relationship_metadata_fragment + evalAesonResult + $ J.fromJSON remote_relationship_metadata_fragment trippingJSONValue metadata describe "CreateFromSourceRelationship" do - it "'pg_create_remote_relationship' query" $ - hedgehog $ do + it "'pg_create_remote_relationship' query" + $ hedgehog + $ do let argument = mk_pg_remote_relationship_argument "create" ^?! key "args" cfsr :: (CreateFromSourceRelationship ('Postgres 'Vanilla)) <- evalAesonResult $ J.fromJSON argument trippingJSON cfsr - it "'pg_create_remote_relationship' query with the 'old' schema" $ - hedgehog $ do + it "'pg_create_remote_relationship' query with the 'old' schema" + $ hedgehog + $ do let argument = mk_pg_remote_relationship_old_argument "create" ^?! key "args" cfsr :: (CreateFromSourceRelationship ('Postgres 'Vanilla)) <- evalAesonResult $ J.fromJSON argument trippingJSON cfsr - it "'mssql_create_remote_relationship' query" $ - hedgehog $ do + it "'mssql_create_remote_relationship' query" + $ hedgehog + $ do let argument = mk_mssql_remote_relationship_argument "create" ^?! key "args" cfsr :: (CreateFromSourceRelationship 'MSSQL) <- evalAesonResult $ J.fromJSON argument trippingJSON cfsr - it "'bigquery_create_remote_relationship' query" $ - hedgehog $ do + it "'bigquery_create_remote_relationship' query" + $ hedgehog + $ do let argument = mk_bigquery_remote_relationship_argument "create" ^?! key "args" cfsr :: (CreateFromSourceRelationship 'BigQuery) <- evalAesonResult $ J.fromJSON argument @@ -205,8 +209,8 @@ spec_query_tags_examples = ] \(mdisabled, mformat, momit_request_id) -> it ("decodes with (disabled, format, omit_request_id) set to " <> show (mdisabled, mformat, momit_request_id)) do - decodesJSON @RQLMetadataV1 $ - J.object + decodesJSON @RQLMetadataV1 + $ J.object [ "type" J..= J.String "set_query_tags", "args" J..= J.object @@ -279,9 +283,9 @@ mk_backend_remote_relationship_argument :: Text -> Text -> J.Value mk_backend_remote_relationship_argument backend action = backend_create_remote_relationship_fragment & _Object - %~ KM.insert - (Key.fromText "type") - (J.String $ backend <> "_" <> action <> "_remote_relationship") + %~ KM.insert + (Key.fromText "type") + (J.String $ backend <> "_" <> action <> "_remote_relationship") -- | Constructor for @v1/metadata@ @mssql_(create|update|delete)_remote_relationship@ -- arguments using the new, unified schema. @@ -309,13 +313,14 @@ mk_pg_remote_relationship_argument action = mk_bigquery_remote_relationship_argument :: Text -> J.Value mk_bigquery_remote_relationship_argument action = mk_backend_remote_relationship_argument "bigquery" action - & key "args" . key "table" - .~ J.Object - ( KM.fromList - [ ("name", "profiles"), - ("dataset", "test") - ] - ) + & key "args" + . key "table" + .~ J.Object + ( KM.fromList + [ ("name", "profiles"), + ("dataset", "test") + ] + ) -- | Constructor for @v1/metadata@ @pg_(create|update|delete)_remote_relationship@ -- arguments using the old, non-unified schema. @@ -323,9 +328,9 @@ mk_pg_remote_relationship_old_argument :: Text -> J.Value mk_pg_remote_relationship_old_argument action = fragment & _Object - %~ KM.insert - (Key.fromText "type") - (J.String $ "pg_" <> action <> "_remote_relationship") + %~ KM.insert + (Key.fromText "type") + (J.String $ "pg_" <> action <> "_remote_relationship") where fragment = [yamlQQ| @@ -348,9 +353,9 @@ mk_pg_remote_relationship_old_new_argument :: Text -> J.Value mk_pg_remote_relationship_old_new_argument action = fragment & _Object - %~ KM.insert - (Key.fromText "type") - (J.String $ "pg_" <> action <> "_remote_relationship") + %~ KM.insert + (Key.fromText "type") + (J.String $ "pg_" <> action <> "_remote_relationship") where fragment = [yamlQQ| @@ -434,8 +439,8 @@ rejectsJSON :: forall a. (HasCallStack, Typeable a, FromJSON a) => String -> J.V rejectsJSON message value = case J.fromJSON @a value of J.Error err -> err `shouldContain` message J.Success _ -> - expectationFailure $ - mconcat + expectationFailure + $ mconcat [ "expected parsing ", show $ typeRep $ Proxy @a, " to fail, but it succeeded" diff --git a/server/src-test/Hasura/RQL/PermissionSpec.hs b/server/src-test/Hasura/RQL/PermissionSpec.hs index 3e2ab6165db..23121581000 100644 --- a/server/src-test/Hasura/RQL/PermissionSpec.hs +++ b/server/src-test/Hasura/RQL/PermissionSpec.hs @@ -39,11 +39,12 @@ booleanPermissionSpec = do HashMap.fromList $ [(role3Name, ActionPermissionInfo role3Name), (inheritedRole1Name, ActionPermissionInfo inheritedRole1Name)] processedPermissions = mkBooleanPermissionMap ActionPermissionInfo metadataPermissions orderedRoles describe "Action Permissions" $ do - it "overrides the inherited permission for a role if permission already exists in the metadata" $ - HashMap.lookup inheritedRole1Name processedPermissions - `shouldBe` (Just (ActionPermissionInfo inheritedRole1Name)) - it "when a role doesn't have a metadata permission and at least one of its parents has, then the inherited role should inherit the permission" $ - HashMap.lookup inheritedRole2Name processedPermissions - `shouldBe` (Just (ActionPermissionInfo inheritedRole2Name)) - it "when a role doesn't have a metadata permission and none of the parents have permissions, then the inherited role should not inherit the permission" $ - HashMap.lookup inheritedRole3Name processedPermissions `shouldBe` Nothing + it "overrides the inherited permission for a role if permission already exists in the metadata" + $ HashMap.lookup inheritedRole1Name processedPermissions + `shouldBe` (Just (ActionPermissionInfo inheritedRole1Name)) + it "when a role doesn't have a metadata permission and at least one of its parents has, then the inherited role should inherit the permission" + $ HashMap.lookup inheritedRole2Name processedPermissions + `shouldBe` (Just (ActionPermissionInfo inheritedRole2Name)) + it "when a role doesn't have a metadata permission and none of the parents have permissions, then the inherited role should not inherit the permission" + $ HashMap.lookup inheritedRole3Name processedPermissions + `shouldBe` Nothing diff --git a/server/src-test/Hasura/RQL/Types/AllowlistSpec.hs b/server/src-test/Hasura/RQL/Types/AllowlistSpec.hs index fb6e73deca4..0ffa5416e77 100644 --- a/server/src-test/Hasura/RQL/Types/AllowlistSpec.hs +++ b/server/src-test/Hasura/RQL/Types/AllowlistSpec.hs @@ -116,13 +116,14 @@ spec = do it "round-trips roles when serializing via codecs" do let expected = - maybeToEither "nonempty" $ - AllowlistScopeRoles <$> traverse mkRoleName ["viewer", "admin"] + maybeToEither "nonempty" + $ AllowlistScopeRoles + <$> traverse mkRoleName ["viewer", "admin"] let json = toJSONViaCodec <$> expected let actual = parseEither parseJSONViaCodec =<< json actual `shouldBe` expected -mustJSON :: J.FromJSON a => J.Value -> a +mustJSON :: (J.FromJSON a) => J.Value -> a mustJSON v = case J.parseEither J.parseJSON v of Left err -> error err Right x -> x diff --git a/server/src-test/Hasura/RQL/Types/CommonSpec.hs b/server/src-test/Hasura/RQL/Types/CommonSpec.hs index 4e99fa6a368..7c8df62aaca 100644 --- a/server/src-test/Hasura/RQL/Types/CommonSpec.hs +++ b/server/src-test/Hasura/RQL/Types/CommonSpec.hs @@ -86,16 +86,16 @@ pgConnectionStringFromParamsSpec = commentSpec :: Spec commentSpec = describe "Comment" $ do - prop "should roundtrip between Comment and Maybe Text" $ - \str -> + prop "should roundtrip between Comment and Maybe Text" + $ \str -> let text = Text.pack <$> str in (commentToMaybeText . commentFromMaybeText) text `shouldBe` text withRecordInconsistencyEqualSpec :: Spec withRecordInconsistencyEqualSpec = describe "withRecordInconsistency" do - prop "Should equal withRecordInconsistencyM" $ - \inputMetadata (errOrUnit :: Either QErr ()) -> + prop "Should equal withRecordInconsistencyM" + $ \inputMetadata (errOrUnit :: Either QErr ()) -> let arrowInputArr = ErrorA (arr (const errOrUnit)) arrow = withRecordInconsistency @_ @InconsistentMetadata arrowInputArr arrowOutput = diff --git a/server/src-test/Hasura/RQL/Types/EndpointSpec.hs b/server/src-test/Hasura/RQL/Types/EndpointSpec.hs index 9364121b258..1a035ada19c 100644 --- a/server/src-test/Hasura/RQL/Types/EndpointSpec.hs +++ b/server/src-test/Hasura/RQL/Types/EndpointSpec.hs @@ -58,8 +58,9 @@ spec = describe "Endpoint" $ do describe "ambiguousPaths" $ do let amb = map fst . ambiguousPaths - it "empty trie" $ - amb emptyTrie `shouldBe` [] + it "empty trie" + $ amb emptyTrie + `shouldBe` [] prop "param/literal at start" $ \(t :: TestTrie) -> do let t' = inserts [PathParam] ["GET", "POST"] 42 $ inserts [PathLiteral 0] ["POST", "PUT"] 43 t diff --git a/server/src-test/Hasura/RQL/WebhookTransformsSpec.hs b/server/src-test/Hasura/RQL/WebhookTransformsSpec.hs index 57cd8f4a328..3857330310e 100644 --- a/server/src-test/Hasura/RQL/WebhookTransformsSpec.hs +++ b/server/src-test/Hasura/RQL/WebhookTransformsSpec.hs @@ -32,20 +32,30 @@ import Test.Hspec.Hedgehog (hedgehog) spec :: Spec spec = describe "WebhookTransform" do - it "Method RoundTrip" . hedgehog $ - forAll genMethod >>= trippingJSON + it "Method RoundTrip" + . hedgehog + $ forAll genMethod + >>= trippingJSON - it "StringTemplateText RoundTrip" . hedgehog $ - forAll genUnescapedTemplate >>= trippingJSON + it "StringTemplateText RoundTrip" + . hedgehog + $ forAll genUnescapedTemplate + >>= trippingJSON - it "Url RoundTrip" . hedgehog $ - forAll genUrl >>= trippingJSON + it "Url RoundTrip" + . hedgehog + $ forAll genUrl + >>= trippingJSON - it "Template RoundTrip" . hedgehog $ - forAll genTemplate >>= trippingJSON + it "Template RoundTrip" + . hedgehog + $ forAll genTemplate + >>= trippingJSON - it "TemplateEngine RoundTrip" . hedgehog $ - forAll genTemplatingEngine >>= trippingJSON + it "TemplateEngine RoundTrip" + . hedgehog + $ forAll genTemplatingEngine + >>= trippingJSON it "TransformHeaders" . hedgehog $ do headers <- forAll genTransformHeaders @@ -59,14 +69,20 @@ spec = describe "WebhookTransform" do let sortH (WithOptional Nothing) = WithOptional Nothing sortH (WithOptional (Just (HeadersTransformFn_ (Headers.AddReplaceOrRemove (Headers.AddReplaceOrRemoveFields {..}))))) = - WithOptional . Just . HeadersTransformFn_ . Headers.AddReplaceOrRemove $ - Headers.AddReplaceOrRemoveFields (sort addOrReplaceHeaders) (sort removeHeaders) + WithOptional + . Just + . HeadersTransformFn_ + . Headers.AddReplaceOrRemove + $ Headers.AddReplaceOrRemoveFields (sort addOrReplaceHeaders) (sort removeHeaders) let sortQ (WithOptional Nothing) = WithOptional Nothing sortQ (WithOptional (Just (QueryParamsTransformFn_ (QueryParams.AddOrReplace qs)))) = - WithOptional . Just . QueryParamsTransformFn_ . QueryParams.AddOrReplace $ - sortOn fst qs + WithOptional + . Just + . QueryParamsTransformFn_ + . QueryParams.AddOrReplace + $ sortOn fst qs sortQ (WithOptional (Just (QueryParamsTransformFn_ (QueryParams.ParamTemplate qs)))) = WithOptional . Just . QueryParamsTransformFn_ . QueryParams.ParamTemplate $ qs diff --git a/server/src-test/Hasura/SQL/BackendMapSpec.hs b/server/src-test/Hasura/SQL/BackendMapSpec.hs index 13eb82b45ee..eb6e0ed1c4f 100644 --- a/server/src-test/Hasura/SQL/BackendMapSpec.hs +++ b/server/src-test/Hasura/SQL/BackendMapSpec.hs @@ -25,8 +25,8 @@ spec = describe "BackendMap" do let mssqlConfig = BM.singleton @'MSSQL @BackendConfigWrapper (BackendConfigWrapper ()) let dataconnectorConfig = BM.singleton @'DataConnector @BackendConfigWrapper - ( BackendConfigWrapper $ - Map.singleton + ( BackendConfigWrapper + $ Map.singleton (fromRight' $ mkDataConnectorName $ fromJust $ GQL.mkName "MyConnector") ( DataConnectorOptions { _dcoUri = fromRight' $ S.parseBaseUrl "https://somehost.org/", diff --git a/server/src-test/Hasura/SQL/WKTSpec.hs b/server/src-test/Hasura/SQL/WKTSpec.hs index 1344b19b140..5e24ffa884c 100644 --- a/server/src-test/Hasura/SQL/WKTSpec.hs +++ b/server/src-test/Hasura/SQL/WKTSpec.hs @@ -29,13 +29,13 @@ negativeSpecs = mkPoint $ mkPosition 1 2 (Just (-1)) ] -runNegativeSpec :: forall a. ToWKT a => String -> [a] -> Spec +runNegativeSpec :: forall a. (ToWKT a) => String -> [a] -> Spec runNegativeSpec name = it name . traverse_ go where go wkt = getWKT <$> toWKT wkt `shouldSatisfy` isLeft -runSpec :: forall a. ToWKT a => String -> [(Text, a)] -> Spec +runSpec :: forall a. (ToWKT a) => String -> [(Text, a)] -> Spec runSpec name = it name . traverse_ go where go (t, wkt) = @@ -182,8 +182,8 @@ geometryCollectionSpec = [ ( "GEOMETRYCOLLECTION (POINT (1.0 2.0), LINESTRING (3.0 4.0, 5.0 6.0))", mkGeometryCollection [ G.GPoint $ mkPoint $ mkPosition 1 2 Nothing, - G.GLineString $ - mkLineString + G.GLineString + $ mkLineString [ mkPosition 3 4 Nothing, mkPosition 5 6 Nothing ] @@ -192,19 +192,19 @@ geometryCollectionSpec = ( "GEOMETRYCOLLECTION (POINT (1.1 -2.2), MULTIPOINT (1.1 2.2, 4.4 5.5), LINESTRING (1.1 2.2, 4.4 5.5, 7.7 8.8), MULTILINESTRING ((1.1 2.2, 4.4 5.5, 7.7 8.8), (1.1 2.2, 4.4 5.5)), POLYGON ((1.1 2.2, 4.4 5.5, 7.7 8.8, 1.1 2.2), (1.1 2.2, 4.4 5.5, 7.7 8.8, 1.0 1.0, 1.1 2.2)), MULTIPOLYGON (((1.1 2.2, 4.4 5.5, 7.7 8.8, 1.1 2.2)), ((1.1 2.2, 4.4 5.5, 7.7 8.8, 1.1 2.2), (1.1 2.2, 4.4 5.5, 7.7 8.8, 1.0 1.0, 1.1 2.2))))", mkGeometryCollection [ G.GPoint $ mkPoint $ mkPosition 1.1 (-2.2) Nothing, - G.GMultiPoint $ - mkMultiPoint + G.GMultiPoint + $ mkMultiPoint [ mkPosition 1.1 2.2 Nothing, mkPosition 4.4 5.5 Nothing ], - G.GLineString $ - mkLineString + G.GLineString + $ mkLineString [ mkPosition 1.1 2.2 Nothing, mkPosition 4.4 5.5 Nothing, mkPosition 7.7 8.8 Nothing ], - G.GMultiLineString $ - mkMultiLineString + G.GMultiLineString + $ mkMultiLineString [ mkLineString [ mkPosition 1.1 2.2 Nothing, mkPosition 4.4 5.5 Nothing, @@ -215,8 +215,8 @@ geometryCollectionSpec = mkPosition 4.4 5.5 Nothing ] ], - G.GPolygon $ - mkPolygon + G.GPolygon + $ mkPolygon [ mkLinearRing [ mkPosition 1.1 2.2 Nothing, mkPosition 4.4 5.5 Nothing, @@ -229,8 +229,8 @@ geometryCollectionSpec = mkPosition 1.0 1.0 Nothing ] ], - G.GMultiPolygon $ - mkMultiPolygon + G.GMultiPolygon + $ mkMultiPolygon [ mkPolygon [ mkLinearRing [ mkPosition 1.1 2.2 Nothing, diff --git a/server/src-test/Hasura/Server/Auth/JWTSpec.hs b/server/src-test/Hasura/Server/Auth/JWTSpec.hs index 22732209275..d19de16ae0b 100644 --- a/server/src-test/Hasura/Server/Auth/JWTSpec.hs +++ b/server/src-test/Hasura/Server/Auth/JWTSpec.hs @@ -81,7 +81,7 @@ determineJwkExpiryLifetimeTests = describe "determineJwkExpiryLifetime" $ do result <- determineJwkExpiryLifetime' [expires, cacheControl] result `shouldBe` (Left ()) -determineJwkExpiryLifetime' :: MonadIO m => ResponseHeaders -> m (Either () (Maybe UTCTime)) +determineJwkExpiryLifetime' :: (MonadIO m) => ResponseHeaders -> m (Either () (Maybe UTCTime)) determineJwkExpiryLifetime' headers = discardJwkFetchError <$> runExceptT (JWT.determineJwkExpiryLifetime (pure currentTimeForTest) voidLogger headers) diff --git a/server/src-test/Hasura/Server/AuthSpec.hs b/server/src-test/Hasura/Server/AuthSpec.hs index e2d11e15964..036ab1779a5 100644 --- a/server/src-test/Hasura/Server/AuthSpec.hs +++ b/server/src-test/Hasura/Server/AuthSpec.hs @@ -135,8 +135,8 @@ getUserInfoWithExpTimeTests = describe "getUserInfo" $ do describe "unauth role set" $ do mode <- - runIO $ - setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing mempty (Just ourUnauthRole) + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing mempty (Just ourUnauthRole) it "accepts when admin secret matches" $ do getUserInfoWithExpTime mempty [(adminSecretHeader, "secret")] mode `shouldReturn` Right adminRoleName @@ -173,8 +173,8 @@ getUserInfoWithExpTimeTests = describe "getUserInfo" $ do -- Unauthorized role is not supported for webhook describe "webhook" $ do mode <- - runIO $ - setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") (Just fakeAuthHook) mempty Nothing + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") (Just fakeAuthHook) mempty Nothing it "accepts when admin secret matches" $ do getUserInfoWithExpTime mempty [(adminSecretHeader, "secret")] mode @@ -221,8 +221,8 @@ getUserInfoWithExpTimeTests = describe "getUserInfo" $ do describe "JWT" $ do describe "unauth role NOT set" $ do mode <- - runIO $ - setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing [fakeJWTConfig] Nothing + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing [fakeJWTConfig] Nothing it "accepts when admin secret matches" $ do getUserInfoWithExpTime mempty [(adminSecretHeader, "secret")] mode @@ -254,8 +254,8 @@ getUserInfoWithExpTimeTests = describe "getUserInfo" $ do describe "unauth role set" $ do mode <- - runIO $ - setupAuthMode'E + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing [fakeJWTConfig] @@ -293,23 +293,23 @@ getUserInfoWithExpTimeTests = describe "getUserInfo" $ do describe "when Authorization header sent, and no admin secret" $ do modeA <- - runIO $ - setupAuthMode'E + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing [fakeJWTConfig] (Just ourUnauthRole) modeB <- - runIO $ - setupAuthMode'E + runIO + $ setupAuthMode'E (Just $ Set.singleton $ hashAdminSecret "secret") Nothing [fakeJWTConfig] Nothing -- Here the unauth role does not come into play at all, so map same tests over both modes: - forM_ [(modeA, "with unauth role set"), (modeB, "with unauth role NOT set")] $ - \(mode, modeMsg) -> describe modeMsg $ do + forM_ [(modeA, "with unauth role set"), (modeB, "with unauth role NOT set")] + $ \(mode, modeMsg) -> describe modeMsg $ do it "authorizes successfully with JWT when requested role allowed" $ do let claim = unObject @@ -446,10 +446,10 @@ parseClaimsMapTests = describe "parseClaimMapTests" $ do describe "JWT configured with namespace key, the key is a text value which is expected to be at the root of the JWT token" $ do it "parses claims map from the JWT token with correct namespace " $ do let claimsObj = - unObject $ - [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), - "x-hasura-default-role" .= ("user" :: Text) - ] + unObject + $ [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), + "x-hasura-default-role" .= ("user" :: Text) + ] let obj = unObject $ ["claims_map" .= claimsObj] claimsSet = mkClaimsSetWithUnregisteredClaims obj parseClaimsMap_ claimsSet (JCNamespace (ClaimNs "claims_map") defaultClaimsFormat) @@ -457,10 +457,10 @@ parseClaimsMapTests = describe "parseClaimMapTests" $ do it "doesn't parse claims map from the JWT token with wrong namespace " $ do let claimsObj = - unObject $ - [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), - "x-hasura-default-role" .= ("user" :: Text) - ] + unObject + $ [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), + "x-hasura-default-role" .= ("user" :: Text) + ] let obj = unObject $ ["claims_map" .= claimsObj] claimsSet = mkClaimsSetWithUnregisteredClaims obj parseClaimsMap_ claimsSet (JCNamespace (ClaimNs "wrong_claims_map") defaultClaimsFormat) @@ -469,12 +469,12 @@ parseClaimsMapTests = describe "parseClaimMapTests" $ do describe "JWT configured with namespace JSON path, JSON path to the claims map" $ do it "parse claims map from the JWT token using claims namespace JSON Path" $ do let unregisteredClaims = - unObject $ - [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), - "x-hasura-default-role" .= ("user" :: Text), - "sub" .= ("random" :: Text), - "exp" .= (1626420800 :: Int) -- we ignore these non session variables, in the response - ] + unObject + $ [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), + "x-hasura-default-role" .= ("user" :: Text), + "sub" .= ("random" :: Text), + "exp" .= (1626420800 :: Int) -- we ignore these non session variables, in the response + ] claimsSetWithSub = (JWT.emptyClaimsSet & JWT.claimSub .~ Just "random") & JWT.unregisteredClaims .~ KM.toMapText unregisteredClaims parseClaimsMap_ claimsSetWithSub (JCNamespace (ClaimNsPath (mkJSONPathE "$")) defaultClaimsFormat) @@ -483,10 +483,10 @@ parseClaimsMapTests = describe "parseClaimMapTests" $ do it "throws error while attempting to parse claims map from the JWT token with a wrong namespace JSON Path" $ do let claimsObj = - unObject $ - [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), - "x-hasura-default-role" .= ("user" :: Text) - ] + unObject + $ [ "x-hasura-allowed-roles" .= (["user", "editor"] :: [Text]), + "x-hasura-default-role" .= ("user" :: Text) + ] obj = unObject $ ["hasura_claims" .= claimsObj] claimsSet = mkClaimsSetWithUnregisteredClaims obj parseClaimsMap_ claimsSet (JCNamespace (ClaimNsPath (mkJSONPathE "$.claims")) defaultClaimsFormat) @@ -494,16 +494,16 @@ parseClaimsMapTests = describe "parseClaimMapTests" $ do describe "JWT configured with custom JWT claims" $ do let rolesObj = - unObject $ - [ "allowed" .= (["user", "editor"] :: [Text]), - "default" .= ("user" :: Text) - ] + unObject + $ [ "allowed" .= (["user", "editor"] :: [Text]), + "default" .= ("user" :: Text) + ] userId = unObject ["id" .= ("1" :: Text)] obj = - unObject $ - [ "roles" .= rolesObj, - "user" .= userId - ] + unObject + $ [ "roles" .= rolesObj, + "user" .= userId + ] claimsSet = mkClaimsSetWithUnregisteredClaims obj userIdClaim = mkSessionVariable "x-hasura-user-id" @@ -591,8 +591,8 @@ mkCustomAllowedRoleClaim claimPath defVal = case claimPath of Just path -> JWTCustomClaimsMapJSONPath (mkJSONPathE path) $ defAllowedRoles Nothing -> - JWTCustomClaimsMapStatic $ - fromMaybe (mkRoleNameE <$> ["user", "editor"]) defAllowedRoles + JWTCustomClaimsMapStatic + $ fromMaybe (mkRoleNameE <$> ["user", "editor"]) defAllowedRoles where defAllowedRoles = fmap mkRoleNameE <$> defVal @@ -634,15 +634,15 @@ setupAuthMode' :: m (Either () AuthMode) setupAuthMode' mAdminSecretHash mWebHook jwtSecrets mUnAuthRole = do httpManager <- liftIO $ HTTP.newManager HTTP.defaultManagerSettings - fmap (mapLeft $ const ()) $ - runExceptT $ - setupAuthMode - (fromMaybe Set.empty mAdminSecretHash) - mWebHook - jwtSecrets - mUnAuthRole - (Logger $ void . return) - httpManager + fmap (mapLeft $ const ()) + $ runExceptT + $ setupAuthMode + (fromMaybe Set.empty mAdminSecretHash) + mWebHook + jwtSecrets + mUnAuthRole + (Logger $ void . return) + httpManager mkClaimsSetWithUnregisteredClaims :: J.Object -> JWT.ClaimsSet mkClaimsSetWithUnregisteredClaims unregisteredClaims = diff --git a/server/src-test/Hasura/Server/Migrate/VersionSpec.hs b/server/src-test/Hasura/Server/Migrate/VersionSpec.hs index 6ef557a8267..9f015cce4f4 100644 --- a/server/src-test/Hasura/Server/Migrate/VersionSpec.hs +++ b/server/src-test/Hasura/Server/Migrate/VersionSpec.hs @@ -39,9 +39,9 @@ spec = do it "cannot read any other non-integral number" $ hedgehog do expected <- - forAll $ - Gen.filter (/= 0.8) $ - Gen.float (Range.constantFrom 0 (-1e6) 1e6) + forAll + $ Gen.filter (/= 0.8) + $ Gen.float (Range.constantFrom 0 (-1e6) 1e6) let input = show expected let version :: Either String MetadataCatalogVersion = readEither input assert $ isLeft version diff --git a/server/src-test/Hasura/Server/VersionSpec.hs b/server/src-test/Hasura/Server/VersionSpec.hs index ea088fa3676..234b16ecf9f 100644 --- a/server/src-test/Hasura/Server/VersionSpec.hs +++ b/server/src-test/Hasura/Server/VersionSpec.hs @@ -35,7 +35,8 @@ versions = spec :: Spec spec = describe "console assets version" do - parallel $ - for_ versions \(input, output) -> - it ("versionToAssetsVersion returns expected output for " <> T.unpack input) $ - versionToAssetsVersion (fromText input) `shouldBe` output + parallel + $ for_ versions \(input, output) -> + it ("versionToAssetsVersion returns expected output for " <> T.unpack input) + $ versionToAssetsVersion (fromText input) + `shouldBe` output diff --git a/server/src-test/Test/Aeson/Expectation.hs b/server/src-test/Test/Aeson/Expectation.hs index fedd5a87035..ba2cd20355c 100644 --- a/server/src-test/Test/Aeson/Expectation.hs +++ b/server/src-test/Test/Aeson/Expectation.hs @@ -22,11 +22,13 @@ import Test.Hspec shouldBeSubsetOf :: J.Value -> J.Value -> IO () shouldBeSubsetOf subset superset | subset `jsonSubsetOf` superset = return () shouldBeSubsetOf subset superset = - expectationFailure $ - T.unpack $ - decodeUtf8 $ - LBS.toStrict $ - AP.encodePretty subset <> " is not a subset of " <> AP.encodePretty superset + expectationFailure + $ T.unpack + $ decodeUtf8 + $ LBS.toStrict + $ AP.encodePretty subset + <> " is not a subset of " + <> AP.encodePretty superset -- | Compute whether one json value 'sub' is a subset of another value 'sup', in the sense that: -- @@ -44,8 +46,8 @@ jsonSubsetOf _sub _sup = False subobjectOf :: J.KeyMap J.Value -> J.KeyMap J.Value -> Bool subobjectOf sub sup = - J.foldr (&&) True $ - J.alignWith + J.foldr (&&) True + $ J.alignWith ( \case This _ -> False -- key is only in the sub That _ -> True -- key is only in sup diff --git a/server/src-test/Test/Aeson/Utils.hs b/server/src-test/Test/Aeson/Utils.hs index beae5c35ed1..39cc55566ee 100644 --- a/server/src-test/Test/Aeson/Utils.hs +++ b/server/src-test/Test/Aeson/Utils.hs @@ -33,19 +33,22 @@ showType = show $ typeRep (Proxy :: Proxy a) testFromJSON :: (HasCallStack, Eq a, Show a, FromJSON a) => a -> Value -> Spec testFromJSON a v = do - it "parses from JSON" $ - parseEither parseJSON v `shouldBe` Right a + it "parses from JSON" + $ parseEither parseJSON v + `shouldBe` Right a testToFromJSON :: (HasCallStack, Eq a, Show a, FromJSON a, ToJSON a) => a -> Value -> Spec testToFromJSON a v = do testFromJSON a v - it "encodes to JSON" $ - toJSON a `shouldBe` v + it "encodes to JSON" + $ toJSON a + `shouldBe` v validateToJSONOpenApi :: (HasCallStack, ToJSON a, ToSchema a) => a -> Spec validateToJSONOpenApi a = do - it "value validates against OpenAPI schema" $ - validatePrettyToJSON a `shouldBe` Nothing + it "value validates against OpenAPI schema" + $ validatePrettyToJSON a + `shouldBe` Nothing testToFromJSONToSchema :: (HasCallStack, Eq a, Show a, FromJSON a, ToJSON a, ToSchema a) => a -> Value -> Spec testToFromJSONToSchema a v = do @@ -54,15 +57,17 @@ testToFromJSONToSchema a v = do jsonRoundTrip :: forall a. (HasCallStack, Typeable a, Eq a, Show a, FromJSON a, ToJSON a) => Gen a -> Spec jsonRoundTrip gen = - it ("JSON roundtrips " <> showType @a) $ - hedgehog $ do + it ("JSON roundtrips " <> showType @a) + $ hedgehog + $ do a <- forAll gen tripping a toJSON (parseEither parseJSON) jsonEncodingEqualsValue :: (HasCallStack, Show a, ToJSON a) => Gen a -> Spec jsonEncodingEqualsValue gen = - it "JSON encoding equals value" $ - hedgehog $ do + it "JSON encoding equals value" + $ hedgehog + $ do a <- forAll gen let encoded = encode a decoded = decode encoded :: Maybe Value @@ -75,8 +80,9 @@ jsonProperties gen = do validateAgainstOpenApiSchema :: (HasCallStack, Show a, ToJSON a, ToSchema a) => Gen a -> Spec validateAgainstOpenApiSchema gen = do - it "ToJSON validates against OpenAPI schema" $ - hedgehog $ do + it "ToJSON validates against OpenAPI schema" + $ hedgehog + $ do a <- forAll gen validatePrettyToJSON a === Nothing @@ -85,14 +91,14 @@ jsonOpenApiProperties gen = do jsonProperties gen validateAgainstOpenApiSchema gen -genKeyMap :: MonadGen m => m value -> m (KM.KeyMap value) +genKeyMap :: (MonadGen m) => m value -> m (KM.KeyMap value) genKeyMap genKMValue = KM.fromList . map (first K.fromText) <$> Gen.list (linear 0 5) ((,) <$> Gen.text (linear 0 5) Gen.unicode <*> genKMValue) -genObject :: MonadGen m => m Object +genObject :: (MonadGen m) => m Object genObject = genKeyMap genValue -genValue :: MonadGen m => m Value +genValue :: (MonadGen m) => m Value genValue = Gen.recursive Gen.choice diff --git a/server/src-test/Test/Backend/Postgres/Misc.hs b/server/src-test/Test/Backend/Postgres/Misc.hs index 30f6c5c1c15..e2f4f4c2d11 100644 --- a/server/src-test/Test/Backend/Postgres/Misc.hs +++ b/server/src-test/Test/Backend/Postgres/Misc.hs @@ -75,40 +75,40 @@ descColumn = Expect.mkColumnInfo descColumnBuilder textOld :: UnpreparedValue PG textOld = - UVParameter Unknown $ - ColumnValue + UVParameter Unknown + $ ColumnValue { cvType = ColumnScalar PGText, cvValue = PGValText "old name" } textNew :: UnpreparedValue PG textNew = - UVParameter Unknown $ - ColumnValue + UVParameter Unknown + $ ColumnValue { cvType = ColumnScalar PGText, cvValue = PGValText "new name" } textOther :: UnpreparedValue PG textOther = - UVParameter Unknown $ - ColumnValue + UVParameter Unknown + $ ColumnValue { cvType = ColumnScalar PGText, cvValue = PGValText "other" } integerOne :: UnpreparedValue PG integerOne = - UVParameter Unknown $ - ColumnValue + UVParameter Unknown + $ ColumnValue { cvType = ColumnScalar PGInteger, cvValue = PGValInteger 1 } integerTwo :: UnpreparedValue PG integerTwo = - UVParameter Unknown $ - ColumnValue + UVParameter Unknown + $ ColumnValue { cvType = ColumnScalar PGInteger, cvValue = PGValInteger 2 } diff --git a/server/src-test/Test/Backend/Postgres/Update.hs b/server/src-test/Test/Backend/Postgres/Update.hs index ad192cd0e1b..08a6eb774e8 100644 --- a/server/src-test/Test/Backend/Postgres/Update.hs +++ b/server/src-test/Test/Backend/Postgres/Update.hs @@ -75,6 +75,9 @@ runMultipleUpdates TestBuilder {..} = } case Update.mkUpdateCTE @'Vanilla upd of (Update.MultiUpdate ctes) -> - SI.fromText . toSQLTxt <$> ctes - `shouldBe` SI.fromText <$> expectedSQL + SI.fromText + . toSQLTxt + <$> ctes + `shouldBe` SI.fromText + <$> expectedSQL _ -> assertFailure "expected update_many, got single update" diff --git a/server/src-test/Test/Parser/Field.hs b/server/src-test/Test/Parser/Field.hs index 7aef300e57f..81751a8d9b8 100644 --- a/server/src-test/Test/Parser/Field.hs +++ b/server/src-test/Test/Parser/Field.hs @@ -42,8 +42,9 @@ field = where fieldExp :: String -> ExpQ fieldExp input = do - either fail TH.lift $ - runExcept $ do + either fail TH.lift + $ runExcept + $ do parsed <- hoistEither $ Parser.parseOnly (Parser.skipSpace *> GraphQL.field @GraphQL.Name) . T.pack $ input fixField parsed diff --git a/server/src-test/Test/Parser/Monad.hs b/server/src-test/Test/Parser/Monad.hs index 37301c88aa6..09b7facb7e1 100644 --- a/server/src-test/Test/Parser/Monad.hs +++ b/server/src-test/Test/Parser/Monad.hs @@ -37,9 +37,9 @@ import Test.HUnit.Lang (assertFailure) -- | Placeholder value for test inputs that are not relevant yet. notImplementedYet :: (HasCallStack) => String -> a notImplementedYet thing = - withFrozenCallStack $ - error $ - ( unlines + withFrozenCallStack + $ error + $ ( unlines [ "\"" ++ thing ++ "\" is not yet defined, because it hasn't been touched by tests yet.", "If you see this message you likely need to provide/mock a value here" ] diff --git a/server/test-mssql/Database/MSSQL/TransactionSpec.hs b/server/test-mssql/Database/MSSQL/TransactionSpec.hs index 1a569c40eb8..4513a0e460d 100644 --- a/server/test-mssql/Database/MSSQL/TransactionSpec.hs +++ b/server/test-mssql/Database/MSSQL/TransactionSpec.hs @@ -196,7 +196,7 @@ transactionStateTests = -- -- Please also note that we are discarding 'Left's from "setup" transactions -- (all but the last transaction). See the 'runSetup' helper below. -run :: forall a. Eq a => Show a => TestCase a -> SpecWith ConnectionString +run :: forall a. (Eq a) => (Show a) => TestCase a -> SpecWith ConnectionString run TestCase {..} = it description \connString -> do case reverse transactions of @@ -206,9 +206,9 @@ run TestCase {..} = runSetup connString (reverse leadingTransactions) -- Get the result from the last transaction. result <- - runInConn connString $ - runQueries runWith $ - unTransaction mainTransaction + runInConn connString + $ runQueries runWith + $ unTransaction mainTransaction case (result, expectation) of -- Validate the error is the one we were expecting. (Left err, Left expected) -> @@ -219,13 +219,17 @@ run TestCase {..} = -- Expected success but got error. Needs special case because the expected -- Left is a validator (function). (Left err, Right expected) -> - expectationFailure $ - "Expected " <> show expected <> " but got error: " <> show err + expectationFailure + $ "Expected " + <> show expected + <> " but got error: " + <> show err -- Expected error but got success. Needs special case because the expected -- Left is a validator (function). (Right res, Left _) -> - expectationFailure $ - "Expected error but got success: " <> show res + expectationFailure + $ "Expected error but got success: " + <> show res where runSetup :: ConnectionString -> [Transaction] -> IO () runSetup _ [] = pure () diff --git a/server/test-postgres/Main.hs b/server/test-postgres/Main.hs index a63f08ab8d7..eda581a2a9e 100644 --- a/server/test-postgres/Main.hs +++ b/server/test-postgres/Main.hs @@ -59,13 +59,15 @@ main = do env <- getEnvironment let envMap = Env.mkEnvironment env - pgUrlText <- flip onLeft printErrExit $ - runWithEnv env $ do + pgUrlText <- flip onLeft printErrExit + $ runWithEnv env + $ do let envVar = _envVar databaseUrlOption maybeV <- considerEnv envVar - onNothing maybeV $ - throwError $ - "Expected: " <> envVar + onNothing maybeV + $ throwError + $ "Expected: " + <> envVar let pgConnInfo = PG.ConnInfo 1 $ PG.CDDatabaseURI $ txtToBs pgUrlText urlConf = UrlValue $ InputWebhook $ mkPlainURLTemplate pgUrlText @@ -126,8 +128,8 @@ main = do cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) mkMSSQLSourceResolver staticConfig (_appInit, appEnv) <- - lowerManagedT $ - initialiseAppEnv + lowerManagedT + $ initialiseAppEnv envMap globalCtx serveOptions @@ -158,10 +160,10 @@ main = do eventTriggerLogCleanupSuite <- EventTriggerCleanupSuite.buildEventTriggerCleanupSuite hspec do - describe "Migrate suite" $ - beforeAll setupCacheRef $ - describe "Hasura.Server.Migrate" $ - MigrateSuite.suite sourceConfig pgContext pgConnInfo + describe "Migrate suite" + $ beforeAll setupCacheRef + $ describe "Hasura.Server.Migrate" + $ MigrateSuite.suite sourceConfig pgContext pgConnInfo describe "Streaming subscription suite" $ streamingSubscriptionSuite describe "Event trigger log cleanup suite" $ eventTriggerLogCleanupSuite diff --git a/server/test-postgres/Test/Hasura/EventTriggerCleanupSuite.hs b/server/test-postgres/Test/Hasura/EventTriggerCleanupSuite.hs index 8aef30b90a3..56fd135d886 100644 --- a/server/test-postgres/Test/Hasura/EventTriggerCleanupSuite.hs +++ b/server/test-postgres/Test/Hasura/EventTriggerCleanupSuite.hs @@ -30,13 +30,15 @@ buildEventTriggerCleanupSuite :: IO Spec buildEventTriggerCleanupSuite = do env <- getEnvironment - pgUrlText :: Text <- flip onLeft (printErrExit . T.pack) $ - runWithEnv env $ do + pgUrlText :: Text <- flip onLeft (printErrExit . T.pack) + $ runWithEnv env + $ do let envVar = _envVar databaseUrlOption maybeV <- considerEnv envVar - onNothing maybeV $ - throwError $ - "Expected: " <> envVar + onNothing maybeV + $ throwError + $ "Expected: " + <> envVar let pgConnInfo = PG.ConnInfo 1 $ PG.CDDatabaseURI $ txtToBs pgUrlText @@ -281,7 +283,7 @@ triggerLogCleanupConfig shouldDelInv = -- * Utils -- | Stringifies QErrs and throws them. -runExceptQErr :: MonadFail m => ExceptT QErr m a -> m a +runExceptQErr :: (MonadFail m) => ExceptT QErr m a -> m a runExceptQErr ex = runExceptT ex >>= (`onLeft` (fail . T.unpack . showQErr)) -- | Print QErr @@ -291,7 +293,8 @@ printErrExit = (*> exitFailure) . T.putStrLn -- | Returns a count of cleanup schedules based on status getCleanupStatusCount :: TriggerName -> Text -> PG.TxE QErr Int getCleanupStatusCount triggername status = - runIdentity . PG.getRow + runIdentity + . PG.getRow <$> PG.withQE defaultTxErrorHandler [PG.sql| diff --git a/server/test-postgres/Test/Hasura/StreamingSubscriptionSuite.hs b/server/test-postgres/Test/Hasura/StreamingSubscriptionSuite.hs index 1df8fed4b82..17e2073e84d 100644 --- a/server/test-postgres/Test/Hasura/StreamingSubscriptionSuite.hs +++ b/server/test-postgres/Test/Hasura/StreamingSubscriptionSuite.hs @@ -55,13 +55,15 @@ buildStreamingSubscriptionSuite :: IO Spec buildStreamingSubscriptionSuite = do env <- getEnvironment - pgUrlText :: Text <- flip onLeft (printErrExit . T.pack) $ - runWithEnv env $ do + pgUrlText :: Text <- flip onLeft (printErrExit . T.pack) + $ runWithEnv env + $ do let envVar = _envVar databaseUrlOption maybeV <- considerEnv envVar - onNothing maybeV $ - throwError $ - "Expected: " <> envVar + onNothing maybeV + $ throwError + $ "Expected: " + <> envVar let pgConnInfo = PG.ConnInfo 1 $ PG.CDDatabaseURI $ txtToBs pgUrlText @@ -70,9 +72,9 @@ buildStreamingSubscriptionSuite = do let pgContext = mkPGExecCtx PG.ReadCommitted pgPool NeverResizePool dbSourceConfig = PGSourceConfig pgContext pgConnInfo Nothing (pure ()) defaultPostgresExtensionsSchema mempty ConnTemplate_NotApplicable - pure $ - describe "Streaming subscriptions polling tests" $ - streamingSubscriptionPollingSpec dbSourceConfig + pure + $ describe "Streaming subscriptions polling tests" + $ streamingSubscriptionPollingSpec dbSourceConfig mkRoleNameE :: Text -> RoleName mkRoleNameE = fromMaybe (error "Use a non empty string") . mkRoleName @@ -197,30 +199,33 @@ streamingSubscriptionPollingSpec srcConfig = do let subscriber1 = mkSubscriber subscriberId1 subscriber2 = mkSubscriber subscriberId2 let initialCursorValue = HashMap.singleton Name._id (TELit "1") - cohort1 <- runIO $ - liftIO $ - STM.atomically $ do - cohort1' <- mkNewCohort cohortId1 initialCursorValue - -- adding a subscriber to the newly created cohort - addSubscriberToCohort subscriber1 cohort1' - pure cohort1' + cohort1 <- runIO + $ liftIO + $ STM.atomically + $ do + cohort1' <- mkNewCohort cohortId1 initialCursorValue + -- adding a subscriber to the newly created cohort + addSubscriberToCohort subscriber1 cohort1' + pure cohort1' cohortMap <- runIO $ liftIO $ STM.atomically $ TMap.new let cohortKey1 = mkCohortVariables' (mkUnsafeValidateVariables initialCursorValue) cohortId2 <- runIO newCohortId - cohort2 <- runIO $ - liftIO $ - STM.atomically $ do - cohort2' <- mkNewCohort cohortId2 initialCursorValue - addSubscriberToCohort subscriber2 cohort2' - pure cohort2' + cohort2 <- runIO + $ liftIO + $ STM.atomically + $ do + cohort2' <- mkNewCohort cohortId2 initialCursorValue + addSubscriberToCohort subscriber2 cohort2' + pure cohort2' let mkCohortKey n = cohortKey1 & cvCursorVariables . unValidatedVariables . ix [G.name|id|] .~ TELit n cohortKey2 = mkCohortKey "2" cohortKey3 = mkCohortKey "3" describe "after first poll, the key of the cohort should be updated to contain the next cursor value" $ do - runIO $ - STM.atomically $ do + runIO + $ STM.atomically + $ do TMap.reset cohortMap TMap.insert cohort1 cohortKey1 cohortMap @@ -256,8 +261,10 @@ streamingSubscriptionPollingSpec srcConfig = do let currentCohort2 = HashMap.lookup cohortKey3 currentCohortMap (originalCohort2StaticSnapshot, currentCohort2StaticSnapshot) <- - STM.atomically $ - (,) <$> getStaticCohortSnapshot cohort2 <*> traverse getStaticCohortSnapshot currentCohort2 + STM.atomically + $ (,) + <$> getStaticCohortSnapshot cohort2 + <*> traverse getStaticCohortSnapshot currentCohort2 Just originalCohort2StaticSnapshot `shouldBe` currentCohort2StaticSnapshot it "deleting a cohort concurrently should not retain the deleted cohort in the cohort map" $ do @@ -306,8 +313,8 @@ streamingSubscriptionPollingSpec srcConfig = do cohortKey2CohortSnapshot <- STM.atomically $ traverse getStaticCohortSnapshot cohortKey2Cohort _cssNewSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` all (notElem temporarySubscriberId) _cssExistingSubscribers <$> cohortKey2CohortSnapshot `shouldSatisfy` all (notElem temporarySubscriberId) - STM.atomically $ - TMap.delete temporarySubscriberId (_cNewSubscribers cohort1) + STM.atomically + $ TMap.delete temporarySubscriberId (_cNewSubscribers cohort1) it "deleting a subscriber from a cohort should not retain the subscriber in any of the cohorts" $ do temporarySubscriberId <- newSubscriberId @@ -333,16 +340,20 @@ streamingSubscriptionPollingSpec srcConfig = do cohortKey2CohortSnapshot <- STM.atomically $ traverse getStaticCohortSnapshot cohortKey2Cohort -- check the deleted subscriber in the older cohort - _cssExistingSubscribers <$> cohortKey1CohortSnapshot + _cssExistingSubscribers + <$> cohortKey1CohortSnapshot `shouldSatisfy` (\existingSubs -> temporarySubscriberId `notElem` concat (maybeToList existingSubs)) - _cssNewSubscribers <$> cohortKey1CohortSnapshot + _cssNewSubscribers + <$> cohortKey1CohortSnapshot `shouldSatisfy` (\newSubs -> temporarySubscriberId `notElem` concat (maybeToList newSubs)) - _cssExistingSubscribers <$> cohortKey2CohortSnapshot + _cssExistingSubscribers + <$> cohortKey2CohortSnapshot `shouldSatisfy` (\existingSubs -> temporarySubscriberId `notElem` concat (maybeToList existingSubs)) - _cssNewSubscribers <$> cohortKey2CohortSnapshot + _cssNewSubscribers + <$> cohortKey2CohortSnapshot `shouldSatisfy` (\newSubs -> temporarySubscriberId `notElem` concat (maybeToList newSubs)) - STM.atomically $ - TMap.delete temporarySubscriberId (_cNewSubscribers cohort1) + STM.atomically + $ TMap.delete temporarySubscriberId (_cNewSubscribers cohort1) describe "Adding two subscribers concurrently" $ do subscriptionState <- do @@ -397,8 +408,8 @@ streamingSubscriptionPollingSpec srcConfig = do it "concurrently adding two subscribers should retain both of them in the poller map" $ do -- Adding two subscribers that query identical queries should be adding them into the same -- cohort of the same poller - liftIO $ - Async.concurrently_ + liftIO + $ Async.concurrently_ (addStreamSubQuery subscriber1Metadata requestId1) (addStreamSubQuery subscriber2Metadata requestId2)