console/bigquery: fix showing empty dataset name when the dataset is from a non-US region

Fixes [issue](https://github.com/hasura/graphql-engine/issues/6937)

### Description
The problem lies with the `schemaListSql` for Big Query.  Without a region qualifier prefixed, the SQL result contains datasets only US regions, as described in the [docs](https://cloud.google.com/bigquery/docs/information-schema-datasets).
```
If you do not specify a regional qualifier, metadata is retrieved from the us multi-region.
```

The alternative way is to get a list of all available publicly available regions (which is `us` and `europe` atm), for example for europe -
```
SELECT * FROM region-europe.INFORMATION_SCHEMA.SCHEMATA;
```
and then perform `UNION ALL` of the results. This is approached is ditched in favour of the reason that Big Query is read-only on Hasura and new datasets cannot be added from the console (other than by modifying the metadata), we can return the datasets available in the Big Query source's metadata as the possible values of datasets needed by the console to render the data nav tree, thereby avoiding an extra API call in the process.

GitOrigin-RevId: 8117d5698dcc4e7d932362b7cf2e7c8ef34d106a
This commit is contained in:
Vijay Prasanna 2021-05-25 23:50:50 +05:30 committed by hasura-bot
parent 1674325eb2
commit 3acfd35d4e
2 changed files with 31 additions and 28 deletions

View File

@ -316,15 +316,39 @@ const setConsistentSchema = data => ({
data,
});
const updateSchemaList = (dispatch, getState, schemaList) => {
dispatch({
type: FETCH_SCHEMA_LIST,
schemaList,
});
let newSchema = '';
const { locationBeforeTransitions } = getState().routing;
if (schemaList.length) {
newSchema =
dataSource.defaultRedirectSchema &&
schemaList.includes(dataSource.defaultRedirectSchema)
? dataSource.defaultRedirectSchema
: schemaList.sort(Intl.Collator().compare)[0];
}
if (
locationBeforeTransitions &&
!locationBeforeTransitions.pathname.includes('tables')
)
dispatch({ type: UPDATE_CURRENT_SCHEMA, currentSchema: newSchema });
return dispatch(updateSchemaInfo());
};
const fetchDataInit = (source, driver) => (dispatch, getState) => {
const url = Endpoints.query;
let { schemaFilter } = getState().tables;
const { schemaFilter } = getState().tables;
if (driver === 'bigquery')
schemaFilter = getState().metadata.metadataObject.sources.find(
if (driver === 'bigquery') {
const schemaList = getState().metadata.metadataObject.sources.find(
x => x.name === source
).configuration.datasets;
return updateSchemaList(dispatch, getState, schemaList);
}
const currentSource = source || getState().tables.currentDataSource;
const query = getRunSqlQuery(
@ -352,25 +376,8 @@ const fetchDataInit = (source, driver) => (dispatch, getState) => {
}
return [schema[0], ...acc];
}, []);
dispatch({
type: FETCH_SCHEMA_LIST,
schemaList,
});
let newSchema = '';
const { locationBeforeTransitions } = getState().routing;
if (schemaList.length) {
newSchema =
dataSource.defaultRedirectSchema &&
schemaList.includes(dataSource.defaultRedirectSchema)
? dataSource.defaultRedirectSchema
: schemaList.sort(Intl.Collator().compare)[0];
}
if (
locationBeforeTransitions &&
!locationBeforeTransitions.pathname.includes('tables')
)
dispatch({ type: UPDATE_CURRENT_SCHEMA, currentSchema: newSchema });
return dispatch(updateSchemaInfo()); // TODO
return updateSchemaList(dispatch, getState, schemaList);
},
error => {
console.error('Failed to fetch schema ' + JSON.stringify(error));

View File

@ -198,12 +198,8 @@ export const bigquery: DataSourcesAPI = {
arrayToPostgresArray: () => {
return '';
},
schemaListSql: (schemaFilter: string[]) => {
if (schemaFilter.length)
return `select schema_name from INFORMATION_SCHEMA.SCHEMATA where schema_name in (${schemaFilter
.map(s => `'${s}'`)
.join(',')})`;
return `select schema_name from INFORMATION_SCHEMA.SCHEMATA`;
schemaListSql: () => {
return '';
},
parseColumnsInfoResult: () => {
return {};