mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
console: use _track_tables
and _untrack_tables
in the new and old tracking section
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9185 GitOrigin-RevId: 45cb115dec7187ab68e066814028144863fbf24a
This commit is contained in:
parent
faedb37857
commit
c405ba2500
@ -185,9 +185,10 @@ const addAllUntrackedTablesSql = tableList => {
|
||||
|
||||
dispatch({ type: MAKING_REQUEST });
|
||||
dispatch(showSuccessNotification('Adding...'));
|
||||
const bulkQueryUp = [];
|
||||
const bulkQueryDown = [];
|
||||
|
||||
const listOfUntrackedTables = [];
|
||||
|
||||
for (let i = 0; i < tableList.length; i++) {
|
||||
if (tableList[i].table_name !== 'schema_migrations') {
|
||||
const tableDef = getQualifiedTableDef(
|
||||
@ -199,14 +200,14 @@ const addAllUntrackedTablesSql = tableList => {
|
||||
);
|
||||
|
||||
const table = findTable(getState().tables.allSchemas, tableDef);
|
||||
bulkQueryUp.push(
|
||||
getTrackTableQuery({
|
||||
tableDef,
|
||||
source: currentDataSource,
|
||||
customColumnNames: escapeTableColumns(table),
|
||||
customName: escapeTableName(tableList[i].table_name),
|
||||
})
|
||||
);
|
||||
const trackableTablePayload = getTrackTableQuery({
|
||||
tableDef,
|
||||
source: currentDataSource,
|
||||
customColumnNames: escapeTableColumns(table),
|
||||
customName: escapeTableName(tableList[i].table_name),
|
||||
});
|
||||
|
||||
listOfUntrackedTables.push(trackableTablePayload.args);
|
||||
bulkQueryDown.push(
|
||||
getUntrackTableQuery(
|
||||
{
|
||||
@ -222,6 +223,14 @@ const addAllUntrackedTablesSql = tableList => {
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
type: `${currentDriver}_track_tables`,
|
||||
args: {
|
||||
allow_warnings: true,
|
||||
tables: listOfUntrackedTables,
|
||||
},
|
||||
};
|
||||
|
||||
const migrationName = 'add_all_existing_table_or_view_' + currentSchema;
|
||||
|
||||
const requestMsg = 'Adding existing table/view...';
|
||||
@ -238,11 +247,10 @@ const addAllUntrackedTablesSql = tableList => {
|
||||
const customOnError = err => {
|
||||
dispatch({ type: REQUEST_ERROR, data: err });
|
||||
};
|
||||
|
||||
makeMigrationCall(
|
||||
dispatch,
|
||||
getState,
|
||||
bulkQueryUp,
|
||||
[requestBody],
|
||||
bulkQueryDown,
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
|
@ -152,5 +152,5 @@ TrackedTables.play = async ({ canvasElement }) => {
|
||||
export const MassiveTableAmount = UntrackedTables.bind({});
|
||||
|
||||
MassiveTableAmount.parameters = {
|
||||
msw: handlers(1000000),
|
||||
msw: handlers(100),
|
||||
};
|
||||
|
@ -75,19 +75,22 @@ export const resetMetadata = () => {
|
||||
metadata = initialMetadata();
|
||||
};
|
||||
|
||||
function isTrackOrUntrackTable(body: any) {
|
||||
return body.type === 'bulk_keep_going';
|
||||
// function isTrackOrUntrackTable(body: any) {
|
||||
// return (
|
||||
// body.type === 'postgres_track_tables' ||
|
||||
// body.type === 'postgres_track_tables'
|
||||
// );
|
||||
// }
|
||||
|
||||
function isTrackTable(type: string) {
|
||||
return type === 'postgres_track_tables';
|
||||
}
|
||||
|
||||
function isTrackTable(arg: any) {
|
||||
return arg.type === 'postgres_track_table';
|
||||
function isUntrackTable(type: any) {
|
||||
return type === 'postgres_untrack_tables';
|
||||
}
|
||||
|
||||
function isUntrackTable(arg: any) {
|
||||
return arg.type === 'postgres_untrack_table';
|
||||
}
|
||||
|
||||
const runSQLResponse = (size = 1700): RunSQLResponse => ({
|
||||
const runSQLResponse = (size = 100): RunSQLResponse => ({
|
||||
result_type: 'TuplesOk',
|
||||
result: [
|
||||
['table_name', 'table_schema', 'table_type'],
|
||||
@ -124,16 +127,12 @@ function createTables(count: number) {
|
||||
export const handlers = (amountOfTables = 1700) => [
|
||||
rest.post(`http://localhost:8080/v1/metadata`, async (req, res, ctx) => {
|
||||
const body = (await req.json()) as TMigration['query'];
|
||||
if (isTrackOrUntrackTable(body)) {
|
||||
body.args.forEach((arg: any) => {
|
||||
if (isTrackTable(arg)) {
|
||||
trackTable(arg.args.table);
|
||||
}
|
||||
if (isUntrackTable(arg)) {
|
||||
untrackTable(arg.args.table);
|
||||
}
|
||||
});
|
||||
if (isTrackTable(body.type)) {
|
||||
body.args.tables.forEach((table: any) => trackTable(table.table));
|
||||
} else if (isUntrackTable(body.type)) {
|
||||
body.args.tables.forEach((table: any) => untrackTable(table.table));
|
||||
}
|
||||
|
||||
return res(ctx.json({ ...metadata }));
|
||||
}),
|
||||
rest.post(`http://localhost:8080/v2/query`, async (req, res, ctx) => {
|
||||
|
@ -17,9 +17,7 @@ import { TableRow } from './TableRow';
|
||||
import { usePushRoute } from '../../../ConnectDBRedesign/hooks';
|
||||
import { useTrackTables } from '../../hooks/useTrackTables';
|
||||
import { hasuraToast } from '../../../../new-components/Toasts';
|
||||
import { APIError } from '../../../../hooks/error';
|
||||
import { useInvalidateMetadata } from '../../../hasura-metadata-api';
|
||||
import { TableDisplayName } from '../components/TableDisplayName';
|
||||
import { DisplayToastErrorMessage } from '../../components/DisplayErrorMessage';
|
||||
|
||||
interface TableListProps {
|
||||
dataSourceName: string;
|
||||
@ -46,128 +44,52 @@ export const TableList = (props: TableListProps) => {
|
||||
checkboxRef.current.indeterminate = inputStatus === 'indeterminate';
|
||||
}, [inputStatus]);
|
||||
|
||||
const { untrackTablesInBatches, isLoading, trackTablesInBatches } =
|
||||
useTrackTables({
|
||||
dataSourceName,
|
||||
});
|
||||
|
||||
const [progress, setProgress] = useState<undefined | number>();
|
||||
const invalidateMetadata = useInvalidateMetadata();
|
||||
const { trackTables, isLoading, untrackTables } = useTrackTables({
|
||||
dataSourceName,
|
||||
});
|
||||
|
||||
const onClick = async () => {
|
||||
const tables = filteredTables.filter(({ name }) =>
|
||||
checkedIds.includes(name)
|
||||
);
|
||||
|
||||
if (mode === 'track') {
|
||||
untrackTablesInBatches({
|
||||
tablesToBeUntracked: tables,
|
||||
onSuccess: (data, variables, ctx, batchInfo) => {
|
||||
const { batchNumber, totalBatchSize, aggregatedResults } = batchInfo;
|
||||
|
||||
setProgress((batchNumber / totalBatchSize) * 100);
|
||||
|
||||
if (batchNumber === totalBatchSize) {
|
||||
const failedResults = aggregatedResults.reduce<TrackableTable[]>(
|
||||
(acc, result, index) => {
|
||||
if ('error' in result)
|
||||
return [...acc, { ...tables[index], status: result.error }];
|
||||
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
if (failedResults.length) {
|
||||
hasuraToast({
|
||||
type: 'info',
|
||||
title: `Complete (${
|
||||
tables.length - failedResults.length
|
||||
} untracked, ${failedResults.length} failed)`,
|
||||
children: (
|
||||
<div>
|
||||
Some tables in the list could not be untracked due to
|
||||
conflicts -
|
||||
<div>
|
||||
{failedResults.map(table => (
|
||||
<TableDisplayName table={table.table} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
} else {
|
||||
hasuraToast({
|
||||
type: 'success',
|
||||
title: 'Successfully untracked',
|
||||
message: `${tables.length} objects untracked`,
|
||||
});
|
||||
}
|
||||
invalidateMetadata();
|
||||
setProgress(undefined);
|
||||
}
|
||||
if (mode === 'untrack') {
|
||||
trackTables({
|
||||
tablesToBeTracked: tables,
|
||||
onSuccess: () => {
|
||||
hasuraToast({
|
||||
type: 'success',
|
||||
title: 'Successfully tracked',
|
||||
message: `${tables.length} ${
|
||||
tables.length <= 1 ? 'table' : 'tables'
|
||||
} tracked!`,
|
||||
});
|
||||
},
|
||||
onError: err => {
|
||||
hasuraToast({
|
||||
type: 'error',
|
||||
title: 'Unable to perform operation',
|
||||
message: (err as APIError).message,
|
||||
title: err.name,
|
||||
children: <DisplayToastErrorMessage message={err.message} />,
|
||||
});
|
||||
},
|
||||
});
|
||||
} else {
|
||||
trackTablesInBatches({
|
||||
tablesToBeTracked: tables,
|
||||
onSuccess: (data, variables, ctx, batchInfo) => {
|
||||
const { batchNumber, totalBatchSize, aggregatedResults } = batchInfo;
|
||||
|
||||
setProgress((batchNumber / totalBatchSize) * 100);
|
||||
|
||||
if (batchNumber === totalBatchSize) {
|
||||
const failedResults = aggregatedResults.reduce<TrackableTable[]>(
|
||||
(acc, result, index) => {
|
||||
if ('error' in result)
|
||||
return [...acc, { ...tables[index], status: result.error }];
|
||||
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
if (failedResults.length) {
|
||||
hasuraToast({
|
||||
type: 'info',
|
||||
title: `Complete (${
|
||||
tables.length - failedResults.length
|
||||
} tracked, ${failedResults.length} failed)`,
|
||||
children: (
|
||||
<div>
|
||||
Some tables in the list could not be tracked due to
|
||||
conflicts -
|
||||
<div>
|
||||
{failedResults.map(table => (
|
||||
<TableDisplayName table={table.table} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
});
|
||||
} else {
|
||||
hasuraToast({
|
||||
type: 'success',
|
||||
title: 'Successfully tracked',
|
||||
message: `${tables.length} objects tracked`,
|
||||
});
|
||||
}
|
||||
invalidateMetadata();
|
||||
setProgress(undefined);
|
||||
}
|
||||
untrackTables({
|
||||
tablesToBeUntracked: tables,
|
||||
onSuccess: () => {
|
||||
hasuraToast({
|
||||
type: 'success',
|
||||
title: 'Successfully untracked',
|
||||
message: `${tables.length} ${
|
||||
tables.length <= 1 ? 'table' : 'tables'
|
||||
} untracked`,
|
||||
});
|
||||
},
|
||||
onError: err => {
|
||||
hasuraToast({
|
||||
type: 'error',
|
||||
title: 'Unable to perform operation',
|
||||
message: (err as APIError).message,
|
||||
title: err.name,
|
||||
children: <DisplayToastErrorMessage message={err.message} />,
|
||||
});
|
||||
},
|
||||
});
|
||||
@ -197,11 +119,7 @@ export const TableList = (props: TableListProps) => {
|
||||
disabled={!checkedIds.length}
|
||||
onClick={onClick}
|
||||
isLoading={isLoading}
|
||||
loadingText={
|
||||
progress
|
||||
? `Please Wait (${Math.floor(progress)}%)`
|
||||
: 'Please Wait'
|
||||
}
|
||||
loadingText={'Please Wait'}
|
||||
>
|
||||
{`${mode === 'track' ? 'Untrack' : 'Track'} Selected (${
|
||||
checkedIds.length
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TMigration, useMetadataMigration } from '../../MetadataAPI';
|
||||
import { useMetadataMigration } from '../../MetadataAPI';
|
||||
import { useCallback } from 'react';
|
||||
import {
|
||||
MetadataSelectors,
|
||||
@ -6,26 +6,9 @@ import {
|
||||
useMetadata,
|
||||
} from '../../hasura-metadata-api';
|
||||
import type { TrackableTable } from '../TrackResources/types';
|
||||
import {
|
||||
MAX_METADATA_BATCH_SIZE,
|
||||
MetadataMigrationOptions,
|
||||
} from '../../MetadataAPI/hooks/useMetadataMigration';
|
||||
import { MetadataMigrationOptions } from '../../MetadataAPI/hooks/useMetadataMigration';
|
||||
import { transformErrorResponse } from '../../ConnectDBRedesign/utils';
|
||||
import { BulkKeepGoingResponse } from '../../hasura-metadata-types';
|
||||
import chunk from 'lodash/chunk';
|
||||
|
||||
type BatchMigrationOptions = Omit<MetadataMigrationOptions, 'onSuccess'> & {
|
||||
onSuccess?: (
|
||||
data: Record<string, any>,
|
||||
variables: TMigration<Record<string, any>>,
|
||||
context: unknown,
|
||||
batchInfo: {
|
||||
totalBatchSize: number;
|
||||
batchNumber: number;
|
||||
aggregatedResults: Record<string, any>[];
|
||||
}
|
||||
) => void;
|
||||
};
|
||||
|
||||
export const useTrackTables = ({
|
||||
dataSourceName,
|
||||
@ -38,15 +21,14 @@ export const useTrackTables = ({
|
||||
|
||||
const invalidateMetadata = useInvalidateMetadata();
|
||||
|
||||
const { mutate, mutateAsync, ...rest } =
|
||||
useMetadataMigration<BulkKeepGoingResponse>({
|
||||
...globalMutateOptions,
|
||||
onSuccess: (data, variables, ctx) => {
|
||||
invalidateMetadata();
|
||||
globalMutateOptions?.onSuccess?.(data, variables, ctx);
|
||||
},
|
||||
errorTransform: transformErrorResponse,
|
||||
});
|
||||
const { mutate, ...rest } = useMetadataMigration<BulkKeepGoingResponse>({
|
||||
...globalMutateOptions,
|
||||
onSuccess: (data, variables, ctx) => {
|
||||
invalidateMetadata();
|
||||
globalMutateOptions?.onSuccess?.(data, variables, ctx);
|
||||
},
|
||||
errorTransform: transformErrorResponse,
|
||||
});
|
||||
|
||||
const trackTables = useCallback(
|
||||
({
|
||||
@ -58,17 +40,16 @@ export const useTrackTables = ({
|
||||
mutate(
|
||||
{
|
||||
query: {
|
||||
type: 'bulk_keep_going',
|
||||
source: dataSourceName,
|
||||
type: `${driver}_track_tables`,
|
||||
resource_version,
|
||||
args: tablesToBeTracked.map(trackableTable => ({
|
||||
type: `${driver}_track_table`,
|
||||
args: {
|
||||
args: {
|
||||
allow_warnings: true,
|
||||
tables: tablesToBeTracked.map(trackableTable => ({
|
||||
table: trackableTable.table,
|
||||
source: dataSourceName,
|
||||
configuration: trackableTable.configuration,
|
||||
},
|
||||
})),
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
mutateOptions
|
||||
@ -77,46 +58,6 @@ export const useTrackTables = ({
|
||||
[dataSourceName, driver, mutate, resource_version]
|
||||
);
|
||||
|
||||
const trackTablesInBatches = useCallback(
|
||||
async ({
|
||||
tablesToBeTracked,
|
||||
...mutateOptions
|
||||
}: { tablesToBeTracked: TrackableTable[] } & BatchMigrationOptions) => {
|
||||
const results: Record<string, any>[] = [];
|
||||
const batches = chunk(tablesToBeTracked, MAX_METADATA_BATCH_SIZE);
|
||||
for (const [index, batch] of batches.entries()) {
|
||||
await mutateAsync(
|
||||
{
|
||||
query: {
|
||||
type: 'bulk_keep_going',
|
||||
source: dataSourceName,
|
||||
args: batch.map(trackableTable => ({
|
||||
type: `${driver}_track_table`,
|
||||
args: {
|
||||
table: trackableTable.table,
|
||||
source: dataSourceName,
|
||||
configuration: trackableTable.configuration,
|
||||
},
|
||||
})),
|
||||
},
|
||||
},
|
||||
{
|
||||
...mutateOptions,
|
||||
onSuccess: (data, variables, ctx) => {
|
||||
results.push(data);
|
||||
return mutateOptions?.onSuccess?.(data, variables, ctx, {
|
||||
totalBatchSize: batches.length,
|
||||
batchNumber: index + 1,
|
||||
aggregatedResults: results.flat(),
|
||||
});
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
[dataSourceName, driver, mutateAsync]
|
||||
);
|
||||
|
||||
const untrackTables = useCallback(
|
||||
({
|
||||
tablesToBeUntracked,
|
||||
@ -127,18 +68,16 @@ export const useTrackTables = ({
|
||||
mutate(
|
||||
{
|
||||
query: {
|
||||
type: 'bulk_keep_going',
|
||||
source: dataSourceName,
|
||||
type: `${driver}_untrack_tables`,
|
||||
resource_version,
|
||||
args: tablesToBeUntracked.map(trackedTable => ({
|
||||
type: `${driver}_untrack_table`,
|
||||
args: {
|
||||
table: trackedTable.table,
|
||||
args: {
|
||||
allow_warnings: true,
|
||||
tables: tablesToBeUntracked.map(untrackableTable => ({
|
||||
table: untrackableTable.table,
|
||||
source: dataSourceName,
|
||||
// This will remove any relationships that are attached to the table
|
||||
cascade: true,
|
||||
},
|
||||
})),
|
||||
configuration: untrackableTable.configuration,
|
||||
})),
|
||||
},
|
||||
},
|
||||
},
|
||||
mutateOptions
|
||||
@ -147,55 +86,9 @@ export const useTrackTables = ({
|
||||
[dataSourceName, driver, mutate, resource_version]
|
||||
);
|
||||
|
||||
const untrackTablesInBatches = useCallback(
|
||||
async ({
|
||||
tablesToBeUntracked,
|
||||
...mutateOptions
|
||||
}: { tablesToBeUntracked: TrackableTable[] } & BatchMigrationOptions) => {
|
||||
// const { onFinalError, onFinalSuccess, ...options } = mutateOptions;
|
||||
const results: Record<string, any>[] = [];
|
||||
const batches = chunk(tablesToBeUntracked, MAX_METADATA_BATCH_SIZE);
|
||||
for (const [index, batch] of batches.entries()) {
|
||||
await mutateAsync(
|
||||
{
|
||||
query: {
|
||||
type: 'bulk_keep_going',
|
||||
source: dataSourceName,
|
||||
args: batch.map(trackedTable => ({
|
||||
type: `${driver}_untrack_table`,
|
||||
args: {
|
||||
table: trackedTable.table,
|
||||
source: dataSourceName,
|
||||
// This will remove any relationships that are attached to the table
|
||||
cascade: true,
|
||||
},
|
||||
})),
|
||||
},
|
||||
},
|
||||
{
|
||||
...mutateOptions,
|
||||
onSuccess: (data, variables, ctx) => {
|
||||
return mutateOptions?.onSuccess?.(data, variables, ctx, {
|
||||
totalBatchSize: batches.length,
|
||||
batchNumber: index + 1,
|
||||
aggregatedResults: [
|
||||
...results,
|
||||
...(data as Record<string, any>[]),
|
||||
].flat(),
|
||||
});
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
},
|
||||
[dataSourceName, driver, mutateAsync]
|
||||
);
|
||||
|
||||
return {
|
||||
trackTables,
|
||||
untrackTables,
|
||||
trackTablesInBatches,
|
||||
untrackTablesInBatches,
|
||||
...rest,
|
||||
};
|
||||
};
|
||||
|
@ -109,6 +109,8 @@ export const metadataQueryTypes = [
|
||||
'untrack_logical_model',
|
||||
'track_native_query',
|
||||
'untrack_native_query',
|
||||
'track_tables',
|
||||
'untrack_tables',
|
||||
'track_stored_procedure',
|
||||
'untrack_stored_procedure',
|
||||
] as const;
|
||||
|
Loading…
Reference in New Issue
Block a user