mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
Add a DataConnectorColumnName
newtype (#639)
<!-- Thank you for submitting this PR! :) --> ## Description Once again I found myself getting confused as to what a `String` meant so I wrapped it up in a newtype and then found a bug and fixed it as a result. (nearly) functional no-op. <!-- Questions to consider answering: 1. What user-facing changes are being made? 2. What are issues related to this PR? (Consider adding `(close #<issue-no>)` to the PR title) 3. What is the conceptual design behind this PR? 4. How can this PR be tested/verified? 5. Does the PR have limitations? 6. Does the PR introduce breaking changes? --> ## Changelog - Add a changelog entry (in the "Changelog entry" section below) if the changes in this PR have any user-facing impact. See [changelog guide](https://github.com/hasura/graphql-engine-mono/wiki/Changelog-Guide). - If no changelog is required ignore/remove this section and add a `no-changelog-required` label to the PR. ### Product _(Select all products this will be available in)_ - [X] community-edition - [X] cloud <!-- product : end : DO NOT REMOVE --> ### Type <!-- See changelog structure: https://github.com/hasura/graphql-engine-mono/wiki/Changelog-Guide#structure-of-our-changelog --> _(Select only one. In case of multiple, choose the most appropriate)_ - [ ] highlight - [ ] enhancement - [X] bugfix - [ ] behaviour-change - [ ] performance-enhancement - [ ] security-fix <!-- type : end : DO NOT REMOVE --> ### Changelog entry <!-- - Add a user understandable changelog entry - Include all details needed to understand the change. Try including links to docs or issues if relevant - For Highlights start with a H4 heading (#### <entry title>) - Get the changelog entry reviewed by your team --> Use field mappings more consistently when following remote joins. <!-- changelog-entry : end : DO NOT REMOVE --> <!-- changelog : end : DO NOT REMOVE --> V3_GIT_ORIGIN_REV_ID: dd4fff5c4c8f5d1e50742c046b62cb7ddec3f467
This commit is contained in:
parent
f04bdc59d7
commit
c09a6ee6c6
@ -12,7 +12,10 @@ use metadata_resolve::{
|
||||
};
|
||||
use ndc_models;
|
||||
use nonempty::NonEmpty;
|
||||
use open_dds::types::{CustomTypeName, InbuiltType};
|
||||
use open_dds::{
|
||||
data_connector::DataConnectorColumnName,
|
||||
types::{CustomTypeName, InbuiltType},
|
||||
};
|
||||
use schema::GDS;
|
||||
use schema::{
|
||||
Annotation, ArgumentNameAndPath, ArgumentPresets, InputAnnotation, ModelInputAnnotation,
|
||||
@ -30,7 +33,7 @@ use super::permissions;
|
||||
/// it will modify the JSON object to -
|
||||
/// `{"name": "Queen Mary University of London", "location": {"city": "London", "country": "UK"}}`
|
||||
pub(crate) fn follow_field_path_and_insert_value(
|
||||
field_path: &NonEmpty<String>,
|
||||
field_path: &NonEmpty<DataConnectorColumnName>,
|
||||
object_slice: &mut serde_json::Map<String, serde_json::Value>,
|
||||
value: serde_json::Value,
|
||||
) -> Result<(), error::Error> {
|
||||
@ -43,7 +46,7 @@ pub(crate) fn follow_field_path_and_insert_value(
|
||||
// if rest is *not* empty, pick the field from the current object, and
|
||||
// recursively process with the rest
|
||||
Some(tail) => {
|
||||
match object_slice.get_mut(field_name) {
|
||||
match object_slice.get_mut(&field_name.0) {
|
||||
None => {
|
||||
// object should have this field; if it doesn't then all the fields are preset
|
||||
object_slice.insert(
|
||||
|
@ -8,6 +8,7 @@ use serde::Serialize;
|
||||
|
||||
use crate::ir::error;
|
||||
use crate::model_tracking::{count_model, UsagesCounts};
|
||||
use open_dds::data_connector::DataConnectorColumnName;
|
||||
use schema::FilterRelationshipAnnotation;
|
||||
use schema::GDS;
|
||||
use schema::{self};
|
||||
@ -226,12 +227,12 @@ fn resolve_filter_object<'s>(
|
||||
/// Generate a binary comparison operator
|
||||
fn build_binary_comparison_expression(
|
||||
operator: &str,
|
||||
column: String,
|
||||
column: DataConnectorColumnName,
|
||||
value: &normalized_ast::Value<'_, GDS>,
|
||||
) -> ndc_models::Expression {
|
||||
ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: column,
|
||||
name: column.0,
|
||||
path: Vec::new(),
|
||||
},
|
||||
operator: operator.to_string(),
|
||||
@ -243,13 +244,13 @@ fn build_binary_comparison_expression(
|
||||
|
||||
/// Resolve `_is_null` GraphQL boolean operator
|
||||
fn build_is_null_expression(
|
||||
column: String,
|
||||
column: DataConnectorColumnName,
|
||||
value: &normalized_ast::Value<'_, GDS>,
|
||||
) -> Result<ndc_models::Expression, error::Error> {
|
||||
// Build an 'IsNull' unary comparison expression
|
||||
let unary_comparison_expression = ndc_models::Expression::UnaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: column,
|
||||
name: column.0,
|
||||
path: Vec::new(),
|
||||
},
|
||||
operator: ndc_models::UnaryComparisonOperator::IsNull,
|
||||
|
@ -170,7 +170,7 @@ pub(crate) fn build_ndc_order_by_element<'s>(
|
||||
order_direction,
|
||||
// TODO(naveen): When aggregates are supported, extend this to support other ndc_models::OrderByTarget
|
||||
target: ndc_models::OrderByTarget::Column {
|
||||
name: ndc_column.clone(),
|
||||
name: ndc_column.0.clone(),
|
||||
path: order_by_element_path,
|
||||
},
|
||||
};
|
||||
|
@ -4,7 +4,7 @@ use hasura_authn_core::{SessionVariableValue, SessionVariables};
|
||||
use lang_graphql::normalized_ast;
|
||||
use ndc_models;
|
||||
|
||||
use open_dds::types::InbuiltType;
|
||||
use open_dds::{data_connector::DataConnectorColumnName, types::InbuiltType};
|
||||
|
||||
use crate::ir::error;
|
||||
use crate::model_tracking::{count_model, UsagesCounts};
|
||||
@ -165,7 +165,7 @@ pub(crate) fn process_model_predicate<'s>(
|
||||
}
|
||||
|
||||
fn make_permission_binary_boolean_expression(
|
||||
ndc_column: String,
|
||||
ndc_column: DataConnectorColumnName,
|
||||
argument_type: &QualifiedTypeReference,
|
||||
operator: &str,
|
||||
value_expression: &metadata_resolve::ValueExpression,
|
||||
@ -180,7 +180,7 @@ fn make_permission_binary_boolean_expression(
|
||||
)?;
|
||||
Ok(ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: ndc_column,
|
||||
name: ndc_column.0,
|
||||
path: Vec::new(),
|
||||
},
|
||||
operator: operator.to_owned(),
|
||||
@ -191,12 +191,12 @@ fn make_permission_binary_boolean_expression(
|
||||
}
|
||||
|
||||
fn make_permission_unary_boolean_expression(
|
||||
ndc_column: String,
|
||||
ndc_column: DataConnectorColumnName,
|
||||
operator: ndc_models::UnaryComparisonOperator,
|
||||
) -> Result<ndc_models::Expression, error::Error> {
|
||||
Ok(ndc_models::Expression::UnaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: ndc_column,
|
||||
name: ndc_column.0,
|
||||
path: Vec::new(),
|
||||
},
|
||||
operator,
|
||||
|
@ -147,7 +147,7 @@ pub(crate) fn entities_ir<'n, 's>(
|
||||
)?;
|
||||
Ok(ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: field_mapping.column.clone(),
|
||||
name: field_mapping.column.0.clone(),
|
||||
path: vec![], // We don't support nested fields in the key fields, so the path is empty
|
||||
},
|
||||
operator: field_mapping.equal_operator.clone(),
|
||||
|
@ -127,7 +127,7 @@ pub(crate) fn relay_node_ir<'n, 's>(
|
||||
})?;
|
||||
Ok(ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: field_mapping.column.clone(),
|
||||
name: field_mapping.column.0.clone(),
|
||||
path: vec![],
|
||||
},
|
||||
operator: field_mapping.equal_operator.clone(),
|
||||
|
@ -64,7 +64,7 @@ pub(crate) fn select_one_generate_ir<'n, 's>(
|
||||
description: format!("Missing NDC column mapping for unique identifier argument {} on field {}", argument.name, field_call.name)})?;
|
||||
let ndc_expression = ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: ndc_column.column.clone(),
|
||||
name: ndc_column.column.0.clone(),
|
||||
path: vec![],
|
||||
},
|
||||
operator: ndc_column.equal_operator.clone(),
|
||||
|
@ -394,7 +394,7 @@ pub(crate) fn build_remote_relationship<'n, 's>(
|
||||
let target_value_variable = format!("${}", &target_column.column);
|
||||
let comparison_exp = ndc_models::Expression::BinaryComparisonOperator {
|
||||
column: ndc_models::ComparisonTarget::Column {
|
||||
name: target_column.column.clone(),
|
||||
name: target_column.column.0.clone(),
|
||||
path: vec![],
|
||||
},
|
||||
operator: target_column.equal_operator.clone(),
|
||||
|
@ -103,7 +103,7 @@ impl<'s> ResultSelectionSet<'s> {
|
||||
pub(crate) fn contains(&self, other_field: &metadata_resolve::FieldMapping) -> Option<String> {
|
||||
self.fields.iter().find_map(|(alias, field)| match field {
|
||||
FieldSelection::Column { column, .. } => {
|
||||
if *column == other_field.column {
|
||||
if *column == other_field.column.0 {
|
||||
Some(alias.clone())
|
||||
} else {
|
||||
None
|
||||
@ -135,7 +135,7 @@ fn build_global_id_fields(
|
||||
fields.insert(
|
||||
global_col_id_alias,
|
||||
FieldSelection::Column {
|
||||
column: field_mapping.column.clone(),
|
||||
column: field_mapping.column.0.clone(),
|
||||
nested_selection: None,
|
||||
},
|
||||
);
|
||||
@ -242,7 +242,7 @@ pub(crate) fn generate_selection_set_ir<'s>(
|
||||
fields.insert(
|
||||
field.alias.to_string(),
|
||||
FieldSelection::Column {
|
||||
column: field_mapping.column.clone(),
|
||||
column: field_mapping.column.0.clone(),
|
||||
nested_selection,
|
||||
},
|
||||
);
|
||||
|
@ -121,7 +121,7 @@ pub(crate) fn process_model_relationship_definition(
|
||||
})?;
|
||||
|
||||
if column_mapping
|
||||
.insert(source_column.column, target_column.column.clone())
|
||||
.insert(source_column.column.0, target_column.column.0.clone())
|
||||
.is_some()
|
||||
{
|
||||
Err(error::InternalError::MappingExistsInRelationship {
|
||||
@ -182,7 +182,7 @@ pub(crate) fn process_command_relationship_definition(
|
||||
})?;
|
||||
|
||||
let relationship_argument = ndc_models::RelationshipArgument::Column {
|
||||
name: source_column.column,
|
||||
name: source_column.column.0,
|
||||
};
|
||||
|
||||
if arguments
|
||||
|
@ -1,6 +1,3 @@
|
||||
use indexmap::IndexMap;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use super::commands;
|
||||
use super::error;
|
||||
use super::model_selection;
|
||||
@ -12,7 +9,10 @@ use crate::remote_joins::types::{
|
||||
JoinLocations, JoinNode, LocationKind, MonotonicCounter, RemoteJoinType, TargetField,
|
||||
};
|
||||
use crate::remote_joins::types::{Location, RemoteJoin};
|
||||
use indexmap::IndexMap;
|
||||
use metadata_resolve::FieldMapping;
|
||||
use open_dds::data_connector::DataConnectorColumnName;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
pub(crate) fn process_nested_selection<'s, 'ir>(
|
||||
nested_selection: &'ir NestedSelection<'s>,
|
||||
@ -251,7 +251,7 @@ fn process_remote_relationship_field_mapping(
|
||||
ndc_fields.insert(
|
||||
internal_alias.clone(),
|
||||
ndc_models::Field::Column {
|
||||
column: field.column.clone(),
|
||||
column: field.column.0.clone(),
|
||||
fields: None,
|
||||
},
|
||||
);
|
||||
@ -261,8 +261,8 @@ fn process_remote_relationship_field_mapping(
|
||||
}
|
||||
}
|
||||
|
||||
fn make_hasura_phantom_field(field_name: &str) -> String {
|
||||
format!("__hasura_phantom_field__{}", field_name)
|
||||
fn make_hasura_phantom_field(field_name: &DataConnectorColumnName) -> String {
|
||||
format!("__hasura_phantom_field__{}", field_name.0)
|
||||
}
|
||||
|
||||
pub(crate) fn collect_relationships_from_nested_selection(
|
||||
|
@ -2,7 +2,7 @@ use crate::stages::{commands, data_connectors, models, object_types};
|
||||
use ndc_models;
|
||||
use open_dds::{
|
||||
commands::{CommandName, DataConnectorCommand, FunctionName, ProcedureName},
|
||||
data_connector::DataConnectorName,
|
||||
data_connector::{DataConnectorColumnName, DataConnectorName},
|
||||
models::ModelName,
|
||||
types::{CustomTypeName, FieldName},
|
||||
};
|
||||
@ -44,7 +44,7 @@ pub enum NDCValidationError {
|
||||
model_name: Qualified<ModelName>,
|
||||
field_name: FieldName,
|
||||
collection_name: String,
|
||||
column_name: String,
|
||||
column_name: DataConnectorColumnName,
|
||||
},
|
||||
#[error("procedure {procedure_name} is not defined in data connector {db_name}")]
|
||||
NoSuchProcedure {
|
||||
@ -64,7 +64,7 @@ pub enum NDCValidationError {
|
||||
command_name: Qualified<CommandName>,
|
||||
field_name: FieldName,
|
||||
func_proc_name: String,
|
||||
column_name: String,
|
||||
column_name: DataConnectorColumnName,
|
||||
},
|
||||
#[error("column {column_name} has type {column_type} in collection {collection_name} in data connector {db_name}, not type {field_type}")]
|
||||
ColumnTypeDoesNotMatch {
|
||||
@ -72,7 +72,7 @@ pub enum NDCValidationError {
|
||||
model_name: ModelName,
|
||||
field_name: FieldName,
|
||||
collection_name: String,
|
||||
column_name: String,
|
||||
column_name: DataConnectorColumnName,
|
||||
field_type: String,
|
||||
column_type: String,
|
||||
},
|
||||
@ -178,7 +178,7 @@ pub fn validate_ndc(
|
||||
let _column =
|
||||
collection_type
|
||||
.fields
|
||||
.get(column_name)
|
||||
.get(&column_name.0)
|
||||
.ok_or(NDCValidationError::NoSuchColumn {
|
||||
db_name: db.name.clone(),
|
||||
model_name: model_name.clone(),
|
||||
@ -334,7 +334,7 @@ pub fn validate_ndc_command(
|
||||
// Check if the field mappings for the output_type is valid
|
||||
for (field_name, field_mapping) in field_mappings {
|
||||
let column_name = &field_mapping.column;
|
||||
if !command_source_ndc_type.fields.contains_key(column_name) {
|
||||
if !command_source_ndc_type.fields.contains_key(&column_name.0) {
|
||||
return Err(NDCValidationError::NoSuchColumnForCommand {
|
||||
db_name: db.name.clone(),
|
||||
command_name: command_name.clone(),
|
||||
|
@ -6,14 +6,14 @@ use crate::types::subgraph::{
|
||||
};
|
||||
use lang_graphql::ast::common as ast;
|
||||
|
||||
use open_dds::types::CustomTypeName;
|
||||
use open_dds::{data_connector::DataConnectorColumnName, types::CustomTypeName};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct NdcColumnForComparison {
|
||||
pub column: String,
|
||||
pub column: DataConnectorColumnName,
|
||||
pub equal_operator: String,
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,7 @@ use crate::types::error::{Error, RelationshipError};
|
||||
use crate::types::permission::ValueExpression;
|
||||
use crate::types::subgraph::{deserialize_qualified_btreemap, serialize_qualified_btreemap};
|
||||
use open_dds::{
|
||||
data_connector::DataConnectorColumnName,
|
||||
models::ModelName,
|
||||
relationships::{RelationshipName, RelationshipType},
|
||||
types::CustomTypeName,
|
||||
@ -40,12 +41,12 @@ pub struct SelectPermission {
|
||||
pub enum ModelPredicate {
|
||||
UnaryFieldComparison {
|
||||
field: FieldName,
|
||||
ndc_column: String,
|
||||
ndc_column: DataConnectorColumnName,
|
||||
operator: ndc_models::UnaryComparisonOperator,
|
||||
},
|
||||
BinaryFieldComparison {
|
||||
field: FieldName,
|
||||
ndc_column: String,
|
||||
ndc_column: DataConnectorColumnName,
|
||||
operator: String,
|
||||
argument_type: QualifiedTypeReference,
|
||||
value: ValueExpression,
|
||||
|
@ -11,7 +11,7 @@ use lang_graphql::ast::common::{self as ast, Name};
|
||||
use open_dds::types::Deprecated;
|
||||
use open_dds::{
|
||||
arguments::ArgumentName,
|
||||
data_connector::DataConnectorName,
|
||||
data_connector::{DataConnectorColumnName, DataConnectorName},
|
||||
models::{ModelName, OrderableField},
|
||||
types::{CustomTypeName, FieldName},
|
||||
};
|
||||
@ -67,7 +67,7 @@ pub struct SelectManyGraphQlDefinition {
|
||||
// TODO: add support for aggregates
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct OrderByExpressionInfo {
|
||||
pub ndc_column: String,
|
||||
pub ndc_column: DataConnectorColumnName,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -7,7 +7,7 @@ use lang_graphql::ast::common::{self as ast};
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use open_dds::{
|
||||
data_connector::{DataConnectorName, DataConnectorObjectType},
|
||||
data_connector::{DataConnectorColumnName, DataConnectorName, DataConnectorObjectType},
|
||||
types::{CustomTypeName, FieldName, OperatorName},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -42,7 +42,7 @@ pub struct ObjectBooleanExpressionType {
|
||||
pub struct ComparisonExpressionInfo {
|
||||
pub scalar_type_name: String,
|
||||
pub type_name: ast::TypeName,
|
||||
pub ndc_column: String,
|
||||
pub ndc_column: DataConnectorColumnName,
|
||||
pub operators: BTreeMap<OperatorName, QualifiedTypeReference>,
|
||||
pub is_null_operator_name: ast::Name,
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
pub mod types;
|
||||
use open_dds::types::CustomTypeName;
|
||||
use open_dds::{data_connector::DataConnectorColumnName, types::CustomTypeName};
|
||||
use ref_cast::RefCast;
|
||||
pub use types::{
|
||||
DataConnectorTypeMappingsForObject, DataConnectorTypeMappingsOutput, FieldDefinition,
|
||||
FieldMapping, ObjectTypeRepresentation, ObjectTypeWithTypeMappings,
|
||||
@ -287,7 +288,7 @@ pub fn resolve_data_connector_type_mapping(
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
let mut resolved_field_mappings = BTreeMap::new();
|
||||
for field_name in type_representation.fields.keys() {
|
||||
let resolved_field_mapping_column: &str =
|
||||
let resolved_field_mapping_column: &DataConnectorColumnName =
|
||||
if let Some(field_mapping) = unconsumed_field_mappings.remove(field_name) {
|
||||
match field_mapping {
|
||||
open_dds::types::FieldMapping::Column(column_mapping) => &column_mapping.name,
|
||||
@ -295,11 +296,11 @@ pub fn resolve_data_connector_type_mapping(
|
||||
} else {
|
||||
// If no mapping is defined for a field, implicitly create a mapping
|
||||
// with the same column name as the field.
|
||||
&field_name.0 .0
|
||||
DataConnectorColumnName::ref_cast(&field_name.0 .0)
|
||||
};
|
||||
let source_column = get_column(ndc_object_type, field_name, resolved_field_mapping_column)?;
|
||||
let resolved_field_mapping = FieldMapping {
|
||||
column: resolved_field_mapping_column.to_string(),
|
||||
column: resolved_field_mapping_column.clone(),
|
||||
column_type: source_column.r#type.clone(),
|
||||
};
|
||||
|
||||
@ -338,11 +339,11 @@ pub fn resolve_data_connector_type_mapping(
|
||||
fn get_column<'a>(
|
||||
ndc_type: &'a ndc_models::ObjectType,
|
||||
field_name: &open_dds::types::FieldName,
|
||||
column: &str,
|
||||
column: &DataConnectorColumnName,
|
||||
) -> Result<&'a ndc_models::ObjectField, TypeMappingValidationError> {
|
||||
ndc_type
|
||||
.fields
|
||||
.get(column)
|
||||
.get(&column.0)
|
||||
.ok_or(TypeMappingValidationError::UnknownTargetColumn {
|
||||
field_name: field_name.clone(),
|
||||
column_name: column.to_string(),
|
||||
|
@ -11,7 +11,9 @@ use open_dds::models::ModelName;
|
||||
use crate::types::subgraph::Qualified;
|
||||
|
||||
use lang_graphql::ast::common as ast;
|
||||
use open_dds::data_connector::{DataConnectorName, DataConnectorObjectType};
|
||||
use open_dds::data_connector::{
|
||||
DataConnectorColumnName, DataConnectorName, DataConnectorObjectType,
|
||||
};
|
||||
|
||||
/// A mapping from a data connector to their objects, which contain field types.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
|
||||
@ -119,7 +121,7 @@ pub struct ResolvedApolloFederationObjectKey {
|
||||
/// Mapping from a column to its type.
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct FieldMapping {
|
||||
pub column: String,
|
||||
pub column: DataConnectorColumnName,
|
||||
pub column_type: ndc_models::Type,
|
||||
}
|
||||
|
||||
|
@ -1178,11 +1178,19 @@
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "The name of the target column",
|
||||
"type": "string"
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/DataConnectorColumnName"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"DataConnectorColumnName": {
|
||||
"description": "The name of a column in a data connector.",
|
||||
"type": "string"
|
||||
},
|
||||
"ScalarTypeV1": {
|
||||
"$id": "https://hasura.io/jsonschemas/metadata/ScalarTypeV1",
|
||||
"title": "ScalarTypeV1",
|
||||
|
@ -82,6 +82,25 @@ pub struct DataConnectorScalarType(pub String);
|
||||
)]
|
||||
pub struct DataConnectorOperatorName(pub String);
|
||||
|
||||
/// The name of a column in a data connector.
|
||||
#[repr(transparent)]
|
||||
#[derive(
|
||||
Serialize,
|
||||
Deserialize,
|
||||
Clone,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
JsonSchema,
|
||||
ref_cast::RefCast,
|
||||
derive_more::Display,
|
||||
opendds_derive::OpenDd,
|
||||
)]
|
||||
pub struct DataConnectorColumnName(pub String);
|
||||
|
||||
#[derive(Serialize, Clone, Debug, PartialEq, opendds_derive::OpenDd)]
|
||||
#[serde(tag = "version", content = "definition")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -9,7 +9,10 @@ use serde::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
data_connector::{DataConnectorName, DataConnectorObjectType, DataConnectorScalarType},
|
||||
data_connector::{
|
||||
DataConnectorColumnName, DataConnectorName, DataConnectorObjectType,
|
||||
DataConnectorScalarType,
|
||||
},
|
||||
identifier::Identifier,
|
||||
impl_JsonSchema_with_OpenDd_for, impl_OpenDd_default_for,
|
||||
models::EnableAllOrSpecific,
|
||||
@ -398,7 +401,7 @@ pub enum FieldMapping {
|
||||
/// The target column in a data connector object that a source field maps to.
|
||||
pub struct ColumnFieldMapping {
|
||||
/// The name of the target column
|
||||
pub name: String, // TODO: Map field arguments
|
||||
pub name: DataConnectorColumnName, // TODO: Map field arguments
|
||||
}
|
||||
|
||||
/// The name of a field in a user-defined object type.
|
||||
|
@ -1,4 +1,7 @@
|
||||
use open_dds::types::{CustomTypeName, FieldName};
|
||||
use open_dds::{
|
||||
data_connector::DataConnectorColumnName,
|
||||
types::{CustomTypeName, FieldName},
|
||||
};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use crate::types;
|
||||
@ -224,7 +227,7 @@ pub(crate) fn get_command_namespace_annotations(
|
||||
}
|
||||
|
||||
fn build_annotations_from_input_object_type_permissions(
|
||||
field_path: &mut [String],
|
||||
field_path: &mut [DataConnectorColumnName],
|
||||
type_reference: &QualifiedTypeReference,
|
||||
ndc_argument_name: &Option<ConnectorArgumentName>,
|
||||
object_types: &BTreeMap<
|
||||
@ -268,7 +271,18 @@ fn build_annotations_from_input_object_type_permissions(
|
||||
// recursively process all the fields of this input object type
|
||||
for (field_name, field_definition) in &object_type_repr.object_type.fields {
|
||||
let mut field_path_ = field_path.to_owned();
|
||||
field_path_.push(field_name.to_string());
|
||||
let ndc_field = field_mappings
|
||||
.and_then(|mappings| {
|
||||
mappings
|
||||
.get(field_name)
|
||||
.map(|field_mapping| field_mapping.column.clone())
|
||||
})
|
||||
.ok_or_else(|| crate::Error::InternalMappingNotFound {
|
||||
type_name: object_type.clone(),
|
||||
field_name: field_name.clone(),
|
||||
})?;
|
||||
|
||||
field_path_.push(ndc_field.clone());
|
||||
build_annotations_from_input_object_type_permissions(
|
||||
&mut field_path_,
|
||||
&field_definition.field_type,
|
||||
@ -303,7 +317,7 @@ fn build_preset_map_from_input_object_type_permission(
|
||||
permission: &metadata_resolve::TypeInputPermission,
|
||||
field_mappings: Option<&BTreeMap<FieldName, metadata_resolve::FieldMapping>>,
|
||||
type_reference: &QualifiedTypeReference,
|
||||
field_path: &[String],
|
||||
field_path: &[DataConnectorColumnName],
|
||||
ndc_argument_name: &Option<ConnectorArgumentName>,
|
||||
object_type: &Qualified<CustomTypeName>,
|
||||
) -> Result<
|
||||
|
@ -11,7 +11,9 @@ use std::{
|
||||
|
||||
use open_dds::{
|
||||
arguments::ArgumentName,
|
||||
commands, models,
|
||||
commands,
|
||||
data_connector::DataConnectorColumnName,
|
||||
models,
|
||||
types::{self},
|
||||
};
|
||||
|
||||
@ -79,7 +81,7 @@ pub enum ModelFilterArgument {
|
||||
AndOp,
|
||||
OrOp,
|
||||
NotOp,
|
||||
Field { ndc_column: String },
|
||||
Field { ndc_column: DataConnectorColumnName },
|
||||
RelationshipField(FilterRelationshipAnnotation),
|
||||
}
|
||||
|
||||
@ -197,7 +199,7 @@ pub enum ModelInputAnnotation {
|
||||
IsNullOperation,
|
||||
ModelOrderByExpression,
|
||||
ModelOrderByArgument {
|
||||
ndc_column: String,
|
||||
ndc_column: DataConnectorColumnName,
|
||||
},
|
||||
ModelOrderByRelationshipArgument(OrderByRelationshipAnnotation),
|
||||
|
||||
@ -289,7 +291,7 @@ pub struct ArgumentNameAndPath {
|
||||
pub ndc_argument_name: Option<metadata_resolve::ConnectorArgumentName>,
|
||||
/// Optional path of field names to traverse to get to a field, in case of
|
||||
/// complex input object types
|
||||
pub field_path: Vec<String>,
|
||||
pub field_path: Vec<DataConnectorColumnName>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Display)]
|
||||
|
Loading…
Reference in New Issue
Block a user