Separate data_connector_type_mappings resolve stage (#469)

<!-- Thank you for submitting this PR! :) -->

## Description

Fairly mechanical change to split the data connector type mappings
resolve into own folders / stage. No functional changes.

V3_GIT_ORIGIN_REV_ID: 805c9d056a64a47afa9005674298e7417e58dad7
This commit is contained in:
Daniel Harvey 2024-04-18 10:23:07 +01:00 committed by hasura-bot
parent 2cd9714e7f
commit 4b34cddd8a
7 changed files with 464 additions and 287 deletions

View File

@ -24,8 +24,8 @@ use open_dds::types::{BaseType, CustomTypeName, Deprecated, TypeName, TypeRefere
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use super::metadata::DataConnectorTypeMappings;
use super::permission::{resolve_value_expression, ValueExpression};
use super::stages::data_connector_type_mappings;
use super::typecheck;
use super::types::{
collect_type_mapping_for_source, TypeMappingCollectionError, TypeMappingToCollect,
@ -162,8 +162,7 @@ pub fn resolve_command_source(
data_connectors: &data_connectors::DataConnectors,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
scalar_types: &HashMap<Qualified<CustomTypeName>, ScalarTypeRepresentation>,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
) -> Result<(), Error> {
if command.source.is_some() {
return Err(Error::DuplicateCommandSourceDefinition {

View File

@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize};
use open_dds::{
commands::CommandName,
data_connector::DataConnectorName,
models::ModelName,
types::{CustomTypeName, TypeName},
};
@ -21,15 +20,15 @@ use crate::metadata::resolved::model::{
use crate::metadata::resolved::relationship::resolve_relationship;
use crate::metadata::resolved::subgraph::Qualified;
use crate::metadata::resolved::types::{
mk_name, resolve_object_type, resolve_output_type_permission, store_new_graphql_type,
ObjectTypeRepresentation,
mk_name, resolve_output_type_permission, store_new_graphql_type, ObjectTypeRepresentation,
};
use super::types::{
resolve_data_connector_type_mapping, resolve_object_boolean_expression_type,
ObjectBooleanExpressionType, ScalarTypeRepresentation, TypeMapping,
resolve_object_boolean_expression_type, ObjectBooleanExpressionType, ScalarTypeRepresentation,
};
use crate::metadata::resolved::stages::{
data_connector_type_mappings, data_connectors, graphql_config,
};
use crate::metadata::resolved::stages::{data_connectors, graphql_config};
/// Resolved and validated metadata for a project. Used internally in the v3 server.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
@ -42,69 +41,6 @@ pub struct Metadata {
pub graphql_config: graphql_config::GlobalGraphqlConfig,
}
pub type DataConnectorTypeMappingsForObjectType =
HashMap<Qualified<DataConnectorName>, HashMap<String, TypeMapping>>;
#[derive(Debug)]
pub struct DataConnectorTypeMappings(
HashMap<Qualified<CustomTypeName>, DataConnectorTypeMappingsForObjectType>,
);
impl Default for DataConnectorTypeMappings {
fn default() -> Self {
Self::new()
}
}
impl DataConnectorTypeMappings {
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn get(
&self,
object_type_name: &Qualified<CustomTypeName>,
data_connector_name: &Qualified<DataConnectorName>,
data_connector_object_type: &str,
) -> Option<&TypeMapping> {
self.0
.get(object_type_name)
.and_then(|connectors| {
connectors
.get(data_connector_name)
.map(|data_connector_object_types| {
data_connector_object_types.get(data_connector_object_type)
})
})
.flatten()
}
fn insert(
&mut self,
object_type_name: &Qualified<CustomTypeName>,
data_connector_name: &Qualified<DataConnectorName>,
data_connector_object_type: &str,
type_mapping: TypeMapping,
) -> Result<(), Error> {
if self
.0
.entry(object_type_name.clone())
.or_default()
.entry(data_connector_name.clone())
.or_default()
.insert(data_connector_object_type.to_string(), type_mapping)
.is_some()
{
return Err(Error::DuplicateDataConnectorTypeMapping {
type_name: object_type_name.clone(),
data_connector: data_connector_name.clone(),
data_connector_object_type: data_connector_object_type.to_string(),
});
}
Ok(())
}
}
/*******************
Functions to validate and resolve OpenDD spec to internal metadata
*******************/
@ -112,28 +48,15 @@ pub fn resolve_metadata(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
graphql_config: graphql_config::GraphqlConfig,
mut data_connectors: data_connectors::DataConnectors,
data_connector_type_mappings_output: data_connector_type_mappings::DataConnectorTypeMappingsOutput,
) -> Result<Metadata, Error> {
let mut existing_graphql_types: HashSet<ast::TypeName> = HashSet::new();
// we collect all the types with global id fields, and models with global id source for that field. this is used
// later for validation, such that a type with global id field must have atleast one model with global id source
let mut global_id_enabled_types: HashMap<Qualified<CustomTypeName>, Vec<Qualified<ModelName>>> =
HashMap::new();
let mut apollo_federation_entity_enabled_types: HashMap<
Qualified<CustomTypeName>,
Option<Qualified<ModelName>>,
> = HashMap::new();
// resolve object types
let (data_connector_type_mappings, object_types) =
resolve_data_connector_type_mappings_and_objects(
metadata_accessor,
&data_connectors,
&mut existing_graphql_types,
&mut global_id_enabled_types,
&mut apollo_federation_entity_enabled_types,
)?;
let data_connector_type_mappings::DataConnectorTypeMappingsOutput {
mut existing_graphql_types,
mut global_id_enabled_types,
mut apollo_federation_entity_enabled_types,
object_types,
data_connector_type_mappings,
} = data_connector_type_mappings_output;
// resolve scalar types
let scalar_types = resolve_scalar_types(metadata_accessor, &mut existing_graphql_types)?;
@ -239,7 +162,7 @@ pub fn resolve_metadata(
fn resolve_commands(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
data_connectors: &data_connectors::DataConnectors,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
scalar_types: &HashMap<Qualified<CustomTypeName>, ScalarTypeRepresentation>,
) -> Result<IndexMap<Qualified<CommandName>, command::Command>, Error> {
@ -275,83 +198,6 @@ fn resolve_commands(
Ok(commands)
}
/// resolve object types, matching them to that in the data connectors
fn resolve_data_connector_type_mappings_and_objects(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
data_connectors: &data_connectors::DataConnectors,
existing_graphql_types: &mut HashSet<ast::TypeName>,
global_id_enabled_types: &mut HashMap<Qualified<CustomTypeName>, Vec<Qualified<ModelName>>>,
apollo_federation_entity_enabled_types: &mut HashMap<
Qualified<CustomTypeName>,
Option<Qualified<open_dds::models::ModelName>>,
>,
) -> Result<
(
DataConnectorTypeMappings,
HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
),
Error,
> {
let mut data_connector_type_mappings = DataConnectorTypeMappings::new();
let mut object_types = HashMap::new();
for open_dds::accessor::QualifiedObject {
subgraph,
object: object_type_definition,
} in &metadata_accessor.object_types
{
let qualified_object_type_name =
Qualified::new(subgraph.to_string(), object_type_definition.name.clone());
let resolved_object_type = resolve_object_type(
object_type_definition,
existing_graphql_types,
&qualified_object_type_name,
subgraph,
global_id_enabled_types,
apollo_federation_entity_enabled_types,
)?;
// resolve object types' type mappings
for dc_type_mapping in &object_type_definition.data_connector_type_mapping {
let qualified_data_connector_name = Qualified::new(
subgraph.to_string(),
dc_type_mapping.data_connector_name.clone(),
);
let type_mapping = resolve_data_connector_type_mapping(
dc_type_mapping,
&qualified_object_type_name,
subgraph,
&resolved_object_type,
data_connectors,
)
.map_err(|type_validation_error| {
Error::DataConnectorTypeMappingValidationError {
type_name: qualified_object_type_name.clone(),
error: type_validation_error,
}
})?;
data_connector_type_mappings.insert(
&qualified_object_type_name,
&qualified_data_connector_name,
&dc_type_mapping.data_connector_object_type,
type_mapping,
)?;
}
if object_types
.insert(qualified_object_type_name.clone(), resolved_object_type)
.is_some()
{
return Err(Error::DuplicateTypeDefinition {
name: qualified_object_type_name,
});
}
}
Ok((data_connector_type_mappings, object_types))
}
/// resolve scalar types
/// this currently works by mutating `existing_graphql_types`, we should try
/// and change this to return new values here and make the caller combine them together
@ -428,7 +274,7 @@ fn resolve_type_permissions(
fn resolve_boolean_expression_types(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
data_connectors: &data_connectors::DataConnectors,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
existing_graphql_types: &mut HashSet<ast::TypeName>,
) -> Result<HashMap<Qualified<CustomTypeName>, ObjectBooleanExpressionType>, Error> {
@ -545,7 +391,7 @@ fn resolve_data_connector_scalar_representations(
fn resolve_models(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
data_connectors: &data_connectors::DataConnectors,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
scalar_types: &HashMap<Qualified<CustomTypeName>, ScalarTypeRepresentation>,
existing_graphql_types: &mut HashSet<ast::TypeName>,

View File

@ -1,7 +1,6 @@
use super::metadata::DataConnectorTypeMappings;
use super::permission::{resolve_value_expression, ValueExpression};
use super::relationship::RelationshipTarget;
use super::stages::data_connectors;
use super::stages::{data_connector_type_mappings, data_connectors};
use super::typecheck;
use super::types::{
collect_type_mapping_for_source, NdcColumnForComparison, ObjectBooleanExpressionType,
@ -1279,7 +1278,7 @@ pub fn resolve_model_source(
data_connectors: &data_connectors::DataConnectors,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
scalar_types: &HashMap<Qualified<CustomTypeName>, ScalarTypeRepresentation>,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
) -> Result<(), Error> {
if model.source.is_some() {
return Err(Error::DuplicateModelSourceDefinition {

View File

@ -0,0 +1,343 @@
use std::collections::{BTreeMap, HashMap, HashSet};
pub mod types;
use open_dds::types::CustomTypeName;
pub use types::{DataConnectorTypeMappings, DataConnectorTypeMappingsOutput};
use crate::metadata::resolved::types::ObjectTypeRepresentation;
use crate::metadata::resolved::stages::data_connectors;
use crate::metadata::resolved::types::{
mk_name, store_new_graphql_type, FieldDefinition, FieldMapping,
ResolvedApolloFederationObjectKey, ResolvedObjectApolloFederationConfig, TypeMapping,
};
use crate::metadata::resolved::error::{Error, TypeMappingValidationError};
use crate::metadata::resolved::subgraph::{mk_qualified_type_reference, Qualified};
use indexmap::IndexMap;
use lang_graphql::ast::common as ast;
use open_dds::identifier;
/// resolve object types, matching them to that in the data connectors
pub(crate) fn resolve(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
data_connectors: &data_connectors::DataConnectors,
) -> Result<DataConnectorTypeMappingsOutput, Error> {
let mut data_connector_type_mappings = DataConnectorTypeMappings::new();
let mut object_types = HashMap::new();
let mut existing_graphql_types = HashSet::new();
let mut global_id_enabled_types = HashMap::new();
let mut apollo_federation_entity_enabled_types = HashMap::new();
for open_dds::accessor::QualifiedObject {
subgraph,
object: object_type_definition,
} in &metadata_accessor.object_types
{
let qualified_object_type_name =
Qualified::new(subgraph.to_string(), object_type_definition.name.clone());
let resolved_object_type = resolve_object_type(
object_type_definition,
&mut existing_graphql_types,
&qualified_object_type_name,
subgraph,
&mut global_id_enabled_types,
&mut apollo_federation_entity_enabled_types,
)?;
// resolve object types' type mappings
for dc_type_mapping in &object_type_definition.data_connector_type_mapping {
let qualified_data_connector_name = Qualified::new(
subgraph.to_string(),
dc_type_mapping.data_connector_name.clone(),
);
let type_mapping = resolve_data_connector_type_mapping(
dc_type_mapping,
&qualified_object_type_name,
subgraph,
&resolved_object_type,
data_connectors,
)
.map_err(|type_validation_error| {
Error::DataConnectorTypeMappingValidationError {
type_name: qualified_object_type_name.clone(),
error: type_validation_error,
}
})?;
data_connector_type_mappings.insert(
&qualified_object_type_name,
&qualified_data_connector_name,
&dc_type_mapping.data_connector_object_type,
type_mapping,
)?;
}
if object_types
.insert(qualified_object_type_name.clone(), resolved_object_type)
.is_some()
{
return Err(Error::DuplicateTypeDefinition {
name: qualified_object_type_name,
});
}
}
Ok(DataConnectorTypeMappingsOutput {
data_connector_type_mappings,
object_types,
existing_graphql_types,
global_id_enabled_types,
apollo_federation_entity_enabled_types,
})
}
fn resolve_field(
field: &open_dds::types::FieldDefinition,
subgraph: &str,
) -> Result<FieldDefinition, Error> {
Ok(FieldDefinition {
field_type: mk_qualified_type_reference(&field.field_type, subgraph),
description: field.description.clone(),
deprecated: field.deprecated.clone(),
})
}
pub fn resolve_object_type(
object_type_definition: &open_dds::types::ObjectTypeV1,
existing_graphql_types: &mut HashSet<ast::TypeName>,
qualified_type_name: &Qualified<CustomTypeName>,
subgraph: &str,
global_id_enabled_types: &mut HashMap<
Qualified<CustomTypeName>,
Vec<Qualified<open_dds::models::ModelName>>,
>,
apollo_federation_entity_enabled_types: &mut HashMap<
Qualified<CustomTypeName>,
Option<Qualified<open_dds::models::ModelName>>,
>,
) -> Result<ObjectTypeRepresentation, Error> {
let mut resolved_fields = IndexMap::new();
let mut resolved_global_id_fields = Vec::new();
for field in &object_type_definition.fields {
if resolved_fields
.insert(field.name.clone(), resolve_field(field, subgraph)?)
.is_some()
{
return Err(Error::DuplicateFieldDefinition {
type_name: qualified_type_name.clone(),
field_name: field.name.clone(),
});
}
}
match &object_type_definition.global_id_fields {
Some(global_id_fields) => {
if !global_id_fields.is_empty() {
// Throw error if the object type has a field called id" and has global fields configured.
// Because, when the global id fields are configured, the `id` field will be auto-generated.
if resolved_fields.contains_key(&open_dds::types::FieldName(identifier!("id"))) {
return Err(Error::IdFieldConflictingGlobalId {
type_name: qualified_type_name.clone(),
});
}
// To check if global_id_fields are defined in object type but no model has global_id_source set to
// true:
// - If the object type has globalIdFields configured, add the object type to the
// global_id_enabled_types map.
global_id_enabled_types.insert(qualified_type_name.clone(), Vec::new());
};
for global_id_field in global_id_fields {
if !resolved_fields.contains_key(global_id_field) {
return Err(Error::UnknownFieldInGlobalId {
field_name: global_id_field.clone(),
type_name: qualified_type_name.clone(),
});
} else {
resolved_global_id_fields.push(global_id_field.clone())
}
}
}
None => {}
}
let (graphql_type_name, graphql_input_type_name, apollo_federation_config) =
match object_type_definition.graphql.as_ref() {
None => Ok::<_, Error>((None, None, None)),
Some(graphql) => {
let graphql_type_name = graphql
.type_name
.as_ref()
.map(|type_name| mk_name(type_name.0.as_ref()).map(ast::TypeName))
.transpose()?;
let graphql_input_type_name = graphql
.input_type_name
.as_ref()
.map(|input_type_name| mk_name(input_type_name.0.as_ref()).map(ast::TypeName))
.transpose()?;
// To check if apolloFederation.keys are defined in object type but no model has
// apollo_federation_entity_source set to true:
// - If the object type has apolloFederation.keys configured, add the object type to the
// apollo_federation_entity_enabled_types map.
let resolved_apollo_federation_config = match &graphql.apollo_federation {
None => Ok(None),
Some(apollo_federation) => {
// Validate that the fields in the apollo federation keys are defined in the object type
let mut resolved_keys: Vec<ResolvedApolloFederationObjectKey> = Vec::new();
for key in &apollo_federation.keys {
let mut resolved_key_fields = Vec::new();
for field in &key.fields {
if !resolved_fields.contains_key(field) {
return Err(Error::UnknownFieldInApolloFederationKey {
field_name: field.clone(),
object_type: qualified_type_name.clone(),
});
}
resolved_key_fields.push(field.clone());
}
let resolved_key =
match nonempty::NonEmpty::from_vec(resolved_key_fields) {
None => {
return Err(
Error::EmptyFieldsInApolloFederationConfigForObject {
object_type: qualified_type_name.clone(),
},
)
}
Some(fields) => ResolvedApolloFederationObjectKey { fields },
};
resolved_keys.push(resolved_key);
}
apollo_federation_entity_enabled_types
.insert(qualified_type_name.clone(), None);
match nonempty::NonEmpty::from_vec(resolved_keys) {
None => Err(Error::EmptyKeysInApolloFederationConfigForObject {
object_type: qualified_type_name.clone(),
}),
Some(keys) => Ok(Some(ResolvedObjectApolloFederationConfig { keys })),
}
}
}?;
Ok((
graphql_type_name,
graphql_input_type_name,
resolved_apollo_federation_config,
))
}
}?;
store_new_graphql_type(existing_graphql_types, graphql_type_name.as_ref())?;
store_new_graphql_type(existing_graphql_types, graphql_input_type_name.as_ref())?;
Ok(ObjectTypeRepresentation {
fields: resolved_fields,
relationships: IndexMap::new(),
global_id_fields: resolved_global_id_fields,
type_permissions: HashMap::new(),
graphql_output_type_name: graphql_type_name,
graphql_input_type_name,
description: object_type_definition.description.clone(),
apollo_federation_config,
})
}
/// Resolve a given data connector type mapping
pub fn resolve_data_connector_type_mapping(
data_connector_type_mapping: &open_dds::types::DataConnectorTypeMapping,
qualified_type_name: &Qualified<CustomTypeName>,
subgraph: &str,
type_representation: &ObjectTypeRepresentation,
data_connectors: &data_connectors::DataConnectors,
) -> Result<TypeMapping, TypeMappingValidationError> {
let qualified_data_connector_name = Qualified::new(
subgraph.to_string(),
data_connector_type_mapping.data_connector_name.clone(),
);
let data_connector_context = data_connectors
.data_connectors
.get(&qualified_data_connector_name)
.ok_or_else(|| TypeMappingValidationError::UnknownDataConnector {
data_connector: qualified_data_connector_name.clone(),
type_name: qualified_type_name.clone(),
})?;
let ndc_object_type = data_connector_context
.schema
.object_types
.get(&data_connector_type_mapping.data_connector_object_type)
.ok_or_else(|| TypeMappingValidationError::UnknownNdcType {
type_name: qualified_type_name.clone(),
unknown_ndc_type: data_connector_type_mapping
.data_connector_object_type
.clone(),
})?;
// Walk all the fields in the ObjectType, if there's a mapping for the field
// use it, otherwise assume the destination column is the same name as the field.
// At the end, if there are any mappings left over, these are invalid as they do not
// exist in the actual ObjectType.
let mut unconsumed_field_mappings = data_connector_type_mapping
.field_mapping
.0
.iter()
.collect::<HashMap<_, _>>();
let mut resolved_field_mappings = BTreeMap::new();
for field_name in type_representation.fields.keys() {
let resolved_field_mapping_column: &str =
if let Some(field_mapping) = unconsumed_field_mappings.remove(field_name) {
match field_mapping {
open_dds::types::FieldMapping::Column(column_mapping) => &column_mapping.name,
}
} else {
// If no mapping is defined for a field, implicitly create a mapping
// with the same column name as the field.
&field_name.0 .0
};
let source_column = get_column(ndc_object_type, field_name, resolved_field_mapping_column)?;
let resolved_field_mapping = FieldMapping {
column: resolved_field_mapping_column.to_string(),
column_type: source_column.r#type.clone(),
};
let existing_mapping =
resolved_field_mappings.insert(field_name.clone(), resolved_field_mapping);
if existing_mapping.is_some() {
return Err(TypeMappingValidationError::DuplicateFieldMapping {
type_name: qualified_type_name.clone(),
field_name: field_name.clone(),
});
}
}
// If any unconsumed field mappings, these do not exist in the actual ObjectType
if !unconsumed_field_mappings.is_empty() {
let mut unconsumed_field_names = unconsumed_field_mappings
.into_keys()
.cloned()
.collect::<Vec<_>>();
unconsumed_field_names.sort();
return Err(TypeMappingValidationError::UnknownSourceFields {
type_name: qualified_type_name.clone(),
field_names: unconsumed_field_names,
});
}
let resolved_type_mapping = TypeMapping::Object {
ndc_object_type_name: data_connector_type_mapping
.data_connector_object_type
.to_string(),
field_mappings: resolved_field_mappings,
};
Ok(resolved_type_mapping)
}
fn get_column<'a>(
ndc_type: &'a ndc_models::ObjectType,
field_name: &open_dds::types::FieldName,
column: &str,
) -> Result<&'a ndc_models::ObjectField, TypeMappingValidationError> {
ndc_type
.fields
.get(column)
.ok_or(TypeMappingValidationError::UnknownTargetColumn {
field_name: field_name.clone(),
column_name: column.to_string(),
})
}

View File

@ -0,0 +1,86 @@
use std::collections::{HashMap, HashSet};
use open_dds::{models::ModelName, types::CustomTypeName};
use crate::metadata::resolved::types::ObjectTypeRepresentation;
use crate::metadata::resolved::types::TypeMapping;
use crate::metadata::resolved::error::Error;
use crate::metadata::resolved::subgraph::Qualified;
use lang_graphql::ast::common as ast;
use open_dds::data_connector::DataConnectorName;
pub type DataConnectorTypeMappingsForObjectType =
HashMap<Qualified<DataConnectorName>, HashMap<String, TypeMapping>>;
#[derive(Debug)]
pub struct DataConnectorTypeMappings(
HashMap<Qualified<CustomTypeName>, DataConnectorTypeMappingsForObjectType>,
);
impl Default for DataConnectorTypeMappings {
fn default() -> Self {
Self::new()
}
}
impl DataConnectorTypeMappings {
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn get(
&self,
object_type_name: &Qualified<CustomTypeName>,
data_connector_name: &Qualified<DataConnectorName>,
data_connector_object_type: &str,
) -> Option<&TypeMapping> {
self.0
.get(object_type_name)
.and_then(|connectors| {
connectors
.get(data_connector_name)
.map(|data_connector_object_types| {
data_connector_object_types.get(data_connector_object_type)
})
})
.flatten()
}
pub fn insert(
&mut self,
object_type_name: &Qualified<CustomTypeName>,
data_connector_name: &Qualified<DataConnectorName>,
data_connector_object_type: &str,
type_mapping: TypeMapping,
) -> Result<(), Error> {
if self
.0
.entry(object_type_name.clone())
.or_default()
.entry(data_connector_name.clone())
.or_default()
.insert(data_connector_object_type.to_string(), type_mapping)
.is_some()
{
return Err(Error::DuplicateDataConnectorTypeMapping {
type_name: object_type_name.clone(),
data_connector: data_connector_name.clone(),
data_connector_object_type: data_connector_object_type.to_string(),
});
}
Ok(())
}
}
/// output of `data_connector_type_mappings` step
pub struct DataConnectorTypeMappingsOutput {
pub existing_graphql_types: HashSet<ast::TypeName>,
pub global_id_enabled_types: HashMap<Qualified<CustomTypeName>, Vec<Qualified<ModelName>>>,
pub apollo_federation_entity_enabled_types:
HashMap<Qualified<CustomTypeName>, Option<Qualified<open_dds::models::ModelName>>>,
pub data_connector_type_mappings: DataConnectorTypeMappings,
pub object_types: HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
}

View File

@ -1,3 +1,4 @@
pub mod data_connector_type_mappings;
pub mod data_connectors;
/// This is where we'll be moving explicit metadata resolve stages
pub mod graphql_config;
@ -17,5 +18,13 @@ pub fn resolve(metadata: open_dds::Metadata) -> Result<Metadata, Error> {
let data_connectors = data_connectors::resolve(&metadata_accessor)?;
resolve_metadata(&metadata_accessor, graphql_config, data_connectors)
let data_connector_type_mappings =
data_connector_type_mappings::resolve(&metadata_accessor, &data_connectors)?;
resolve_metadata(
&metadata_accessor,
graphql_config,
data_connectors,
data_connector_type_mappings,
)
}

View File

@ -1,4 +1,4 @@
use crate::metadata::resolved::error::{BooleanExpressionError, Error, TypeMappingValidationError};
use crate::metadata::resolved::error::{BooleanExpressionError, Error};
use crate::metadata::resolved::relationship::Relationship;
use crate::metadata::resolved::subgraph::{
mk_qualified_type_reference, Qualified, QualifiedBaseType, QualifiedTypeName,
@ -13,16 +13,14 @@ use open_dds::identifier;
use open_dds::models::EnableAllOrSpecific;
use open_dds::permissions::{Role, TypeOutputPermission, TypePermissionsV1};
use open_dds::types::{
self, CustomTypeName, DataConnectorTypeMapping, Deprecated, FieldName,
ObjectBooleanExpressionTypeV1, ObjectTypeV1,
self, CustomTypeName, Deprecated, FieldName, ObjectBooleanExpressionTypeV1, ObjectTypeV1,
};
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::str::FromStr;
use super::metadata::DataConnectorTypeMappings;
use super::ndc_validation::{get_underlying_named_type, NDCValidationError};
use super::stages::data_connectors;
use super::stages::{data_connector_type_mappings, data_connectors};
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, derive_more::Display)]
#[display(fmt = "Display")]
@ -262,109 +260,6 @@ pub fn resolve_object_type(
})
}
pub fn get_column<'a>(
ndc_type: &'a ndc_models::ObjectType,
field_name: &FieldName,
column: &str,
) -> Result<&'a ndc_models::ObjectField, TypeMappingValidationError> {
ndc_type
.fields
.get(column)
.ok_or(TypeMappingValidationError::UnknownTargetColumn {
field_name: field_name.clone(),
column_name: column.to_string(),
})
}
/// Resolve a given data connector type mapping
pub fn resolve_data_connector_type_mapping(
data_connector_type_mapping: &DataConnectorTypeMapping,
qualified_type_name: &Qualified<CustomTypeName>,
subgraph: &str,
type_representation: &ObjectTypeRepresentation,
data_connectors: &data_connectors::DataConnectors,
) -> Result<TypeMapping, TypeMappingValidationError> {
let qualified_data_connector_name = Qualified::new(
subgraph.to_string(),
data_connector_type_mapping.data_connector_name.clone(),
);
let data_connector_context = data_connectors
.data_connectors
.get(&qualified_data_connector_name)
.ok_or_else(|| TypeMappingValidationError::UnknownDataConnector {
data_connector: qualified_data_connector_name.clone(),
type_name: qualified_type_name.clone(),
})?;
let ndc_object_type = data_connector_context
.schema
.object_types
.get(&data_connector_type_mapping.data_connector_object_type)
.ok_or_else(|| TypeMappingValidationError::UnknownNdcType {
type_name: qualified_type_name.clone(),
unknown_ndc_type: data_connector_type_mapping
.data_connector_object_type
.clone(),
})?;
// Walk all the fields in the ObjectType, if there's a mapping for the field
// use it, otherwise assume the destination column is the same name as the field.
// At the end, if there are any mappings left over, these are invalid as they do not
// exist in the actual ObjectType.
let mut unconsumed_field_mappings = data_connector_type_mapping
.field_mapping
.0
.iter()
.collect::<HashMap<_, _>>();
let mut resolved_field_mappings = BTreeMap::new();
for field_name in type_representation.fields.keys() {
let resolved_field_mapping_column: &str =
if let Some(field_mapping) = unconsumed_field_mappings.remove(field_name) {
match field_mapping {
types::FieldMapping::Column(column_mapping) => &column_mapping.name,
}
} else {
// If no mapping is defined for a field, implicitly create a mapping
// with the same column name as the field.
&field_name.0 .0
};
let source_column = get_column(ndc_object_type, field_name, resolved_field_mapping_column)?;
let resolved_field_mapping = FieldMapping {
column: resolved_field_mapping_column.to_string(),
column_type: source_column.r#type.clone(),
};
let existing_mapping =
resolved_field_mappings.insert(field_name.clone(), resolved_field_mapping);
if existing_mapping.is_some() {
return Err(TypeMappingValidationError::DuplicateFieldMapping {
type_name: qualified_type_name.clone(),
field_name: field_name.clone(),
});
}
}
// If any unconsumed field mappings, these do not exist in the actual ObjectType
if !unconsumed_field_mappings.is_empty() {
let mut unconsumed_field_names = unconsumed_field_mappings
.into_keys()
.cloned()
.collect::<Vec<_>>();
unconsumed_field_names.sort();
return Err(TypeMappingValidationError::UnknownSourceFields {
type_name: qualified_type_name.clone(),
field_names: unconsumed_field_names,
});
}
let resolved_type_mapping = TypeMapping::Object {
ndc_object_type_name: data_connector_type_mapping
.data_connector_object_type
.to_string(),
field_mappings: resolved_field_mappings,
};
Ok(resolved_type_mapping)
}
#[derive(Debug)]
/// we do not want to store our types like this, but occasionally it is useful
/// for pattern matching
@ -469,7 +364,7 @@ pub(crate) fn resolve_object_boolean_expression_type(
subgraph: &str,
data_connectors: &data_connectors::DataConnectors,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
existing_graphql_types: &mut HashSet<ast::TypeName>,
) -> Result<ObjectBooleanExpressionType, Error> {
// name of the boolean expression
@ -646,7 +541,7 @@ pub enum TypeMappingCollectionError {
pub(crate) fn collect_type_mapping_for_source(
mapping_to_collect: &TypeMappingToCollect,
data_connector_type_mappings: &DataConnectorTypeMappings,
data_connector_type_mappings: &data_connector_type_mappings::DataConnectorTypeMappings,
data_connector_name: &Qualified<DataConnectorName>,
object_types: &HashMap<Qualified<CustomTypeName>, ObjectTypeRepresentation>,
scalar_types: &HashMap<Qualified<CustomTypeName>, ScalarTypeRepresentation>,