mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
chore: add useCreateRestEndpoints hook
## Description This PR adds a hook to create REST endpoints for a particular table. It uses a different approach than the one used in the "Try it" feature: it uses the introspection schema query results to identify which GraphQL operations correspond to the particular rest operation we want to create. The matching is performed by looking at the comment in the graphql schema. This lets us to quickly identify the correct GraphQL operation even in the presence of schema customizations, but, on the other end, it could not work if the user disabled or changed the default comment in the schema. We evaluated that, since this is a "quick start" feature, the pros outcomes the cons. ## How to test it - Run Storybook - Go to the `useCreateQueryCollection ` story - Click "Create Rest endpoint" - look at the network tab and the changes in the metadata displayed PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9464 GitOrigin-RevId: fc0d5f26d656a5e7f2c83ecda10703b851a88626
This commit is contained in:
parent
59a967cb8e
commit
6f5cf01f58
@ -21,7 +21,12 @@ export function useIntrospectionSchema(
|
||||
select = (d: IntrospectionQueryResp) => d,
|
||||
transformFn = (d: unknown) => d,
|
||||
queryOptions?: Omit<
|
||||
UseQueryOptions<IntrospectionQueryResp, Error, unknown, 'metadata'>,
|
||||
UseQueryOptions<
|
||||
IntrospectionQueryResp,
|
||||
Error,
|
||||
unknown,
|
||||
'introspectionSchema'
|
||||
>,
|
||||
'queryKey' | 'queryFn'
|
||||
>
|
||||
) {
|
||||
@ -35,7 +40,7 @@ export function useIntrospectionSchema(
|
||||
};
|
||||
|
||||
return useQuery({
|
||||
queryKey: 'metadata',
|
||||
queryKey: 'introspectionSchema',
|
||||
queryFn,
|
||||
...queryOptions,
|
||||
select: d => transformFn(select(d)),
|
||||
|
@ -1,4 +1,7 @@
|
||||
export { useAddOperationsToQueryCollection } from './useAddOperationsToQueryCollection';
|
||||
export {
|
||||
useAddOperationsToQueryCollection,
|
||||
createAddOperationToQueryCollectionMetadataArgs,
|
||||
} from './useAddOperationsToQueryCollection';
|
||||
export { useEditOperationInQueryCollection } from './useEditOperationInQueryCollection';
|
||||
export { useMoveOperationsToQueryCollection } from './useMoveOperationsToQueryCollection';
|
||||
export { useOperationsFromQueryCollection } from './useOperationsFromQueryCollection';
|
||||
|
@ -1,9 +1,28 @@
|
||||
import { useCallback } from 'react';
|
||||
|
||||
import { useMetadata, useMetadataMigration } from '../../../MetadataAPI';
|
||||
import {
|
||||
MetadataResponse,
|
||||
useMetadata,
|
||||
useMetadataMigration,
|
||||
} from '../../../MetadataAPI';
|
||||
import { QueryCollection } from '../../../../metadata/types';
|
||||
import { createAllowedQueriesIfNeeded } from '../useCreateQueryCollection';
|
||||
|
||||
export const createAddOperationToQueryCollectionMetadataArgs = (
|
||||
queryCollection: string,
|
||||
queries: QueryCollection[],
|
||||
metadata?: MetadataResponse
|
||||
) => [
|
||||
...createAllowedQueriesIfNeeded(queryCollection, metadata),
|
||||
...queries.map(query => ({
|
||||
type: 'add_query_to_collection',
|
||||
args: {
|
||||
collection_name: queryCollection,
|
||||
query_name: query.name,
|
||||
query: query.query,
|
||||
},
|
||||
})),
|
||||
];
|
||||
export const useAddOperationsToQueryCollection = () => {
|
||||
const { mutate, ...rest } = useMetadataMigration();
|
||||
|
||||
@ -28,17 +47,11 @@ export const useAddOperationsToQueryCollection = () => {
|
||||
...(metadata?.resource_version && {
|
||||
resource_version: metadata.resource_version,
|
||||
}),
|
||||
args: [
|
||||
...createAllowedQueriesIfNeeded(queryCollection, metadata),
|
||||
...queries.map(query => ({
|
||||
type: 'add_query_to_collection',
|
||||
args: {
|
||||
collection_name: queryCollection,
|
||||
query_name: query.name,
|
||||
query: query.query,
|
||||
},
|
||||
})),
|
||||
],
|
||||
args: createAddOperationToQueryCollectionMetadataArgs(
|
||||
queryCollection,
|
||||
queries,
|
||||
metadata
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,103 @@
|
||||
import React from 'react';
|
||||
import { handlers } from '../../../mocks/metadata.mock';
|
||||
import { ReactQueryDecorator } from '../../../storybook/decorators/react-query';
|
||||
import { ReduxDecorator } from '../../../storybook/decorators/redux-decorator';
|
||||
import { StoryObj, Meta } from '@storybook/react';
|
||||
|
||||
import { useCreateRestEndpoints } from './useCreateRestEndpoints';
|
||||
import { Button } from '../../../new-components/Button';
|
||||
import ReactJson from 'react-json-view';
|
||||
import { rest } from 'msw';
|
||||
import introspectionSchema from './mocks/introspectionWithoutCustomizations.json';
|
||||
import { useMetadata } from '../../MetadataAPI';
|
||||
import { userEvent, waitFor, within } from '@storybook/testing-library';
|
||||
import { expect } from '@storybook/jest';
|
||||
|
||||
const UseCreateRestEndpoints: React.FC = () => {
|
||||
const { createRestEndpoints, isLoading, isError, isSuccess, isReady } =
|
||||
useCreateRestEndpoints();
|
||||
|
||||
const { data: metadata } = useMetadata();
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Button
|
||||
onClick={() =>
|
||||
createRestEndpoints('user', [
|
||||
'VIEW',
|
||||
'VIEW_ALL',
|
||||
'DELETE',
|
||||
'CREATE',
|
||||
'UPDATE',
|
||||
])
|
||||
}
|
||||
>
|
||||
Create REST Endpoint
|
||||
</Button>
|
||||
<div data-testid="ready-state">Is Ready: {JSON.stringify(isReady)}</div>
|
||||
<div data-testid="loading-state">
|
||||
Is Error: {JSON.stringify(isLoading)}
|
||||
</div>
|
||||
<div data-testid="success-state">
|
||||
Is Success: {JSON.stringify(isSuccess)}
|
||||
</div>
|
||||
<div data-testid="error-state">Is Error: {JSON.stringify(isError)}</div>
|
||||
<div>
|
||||
<ReactJson src={{ metadata }} name="metadata" collapsed={1} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export const Primary: StoryObj = {
|
||||
render: args => {
|
||||
return <UseCreateRestEndpoints {...args} />;
|
||||
},
|
||||
|
||||
args: {
|
||||
collectionName: 'rest-endpoint',
|
||||
},
|
||||
};
|
||||
|
||||
Primary.play = async ({ canvasElement }: any) => {
|
||||
const canvas = within(canvasElement);
|
||||
|
||||
await waitFor(
|
||||
() =>
|
||||
expect(canvas.getByTestId('ready-state')).toHaveTextContent(
|
||||
'Is Ready: true'
|
||||
),
|
||||
|
||||
{
|
||||
timeout: 5000,
|
||||
}
|
||||
);
|
||||
await userEvent.click(await canvas.findByText('Create REST Endpoint'));
|
||||
|
||||
await canvas.findByTestId('success-state');
|
||||
await waitFor(
|
||||
() =>
|
||||
expect(canvas.getByTestId('success-state')).toHaveTextContent(
|
||||
'Is Success: true'
|
||||
),
|
||||
{
|
||||
timeout: 5000,
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export default {
|
||||
title: 'features/Rest endpoints/hooks/useCreateRestEndpoint',
|
||||
decorators: [
|
||||
ReduxDecorator({ tables: { currentDataSource: 'default' } }),
|
||||
ReactQueryDecorator(),
|
||||
],
|
||||
parameters: {
|
||||
msw: [
|
||||
...handlers({ delay: 500 }),
|
||||
rest.post(`http://localhost:8080/v1/graphql`, async (req, res, ctx) => {
|
||||
return res(ctx.json(introspectionSchema));
|
||||
}),
|
||||
],
|
||||
},
|
||||
} as Meta;
|
@ -0,0 +1,56 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useMetadata, useMetadataMigration } from '../../MetadataAPI';
|
||||
import { createAddOperationToQueryCollectionMetadataArgs } from '../../QueryCollections/hooks';
|
||||
import {
|
||||
type EndpointType,
|
||||
useRestEndpointDefinitions,
|
||||
EndpointDefinition,
|
||||
} from './useRestEndpointDefinitions';
|
||||
|
||||
export const useCreateRestEndpoints = () => {
|
||||
const { data: endpointDefinitions } = useRestEndpointDefinitions();
|
||||
|
||||
const { mutate, ...rest } = useMetadataMigration();
|
||||
|
||||
const { data: metadata, isSuccess: isReady } = useMetadata();
|
||||
|
||||
const createRestEndpoints = useCallback(
|
||||
(
|
||||
table: string,
|
||||
types: EndpointType[],
|
||||
options?: Parameters<typeof mutate>[1]
|
||||
) => {
|
||||
const endpoints = types
|
||||
.map(type => endpointDefinitions?.[table]?.[type])
|
||||
.filter(a => a) as EndpointDefinition[];
|
||||
|
||||
return mutate(
|
||||
{
|
||||
query: {
|
||||
type: 'bulk',
|
||||
...(metadata?.resource_version && {
|
||||
resource_version: metadata.resource_version,
|
||||
}),
|
||||
args: [
|
||||
...createAddOperationToQueryCollectionMetadataArgs(
|
||||
'allowed-queries',
|
||||
endpoints?.map(endpoint => endpoint.query),
|
||||
metadata
|
||||
),
|
||||
...endpoints.map(endpoint => ({
|
||||
type: 'create_rest_endpoint',
|
||||
args: endpoint.restEndpoint,
|
||||
})),
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
...options,
|
||||
}
|
||||
);
|
||||
},
|
||||
[endpointDefinitions, mutate, metadata]
|
||||
);
|
||||
|
||||
return { createRestEndpoints, isReady, ...rest };
|
||||
};
|
@ -0,0 +1,164 @@
|
||||
import { Microfiber } from 'microfiber';
|
||||
import { useIntrospectionSchema } from '../../../components/Services/Actions/Common/components/ImportTypesModal/useIntrospectionSchema';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Query, RestEndpoint } from '../../hasura-metadata-types';
|
||||
import {
|
||||
generateDeleteEndpoint,
|
||||
generateInsertEndpoint,
|
||||
generateUpdateEndpoint,
|
||||
generateViewAllEndpoint,
|
||||
generateViewEndpoint,
|
||||
} from './utils';
|
||||
import { formatSdl } from 'format-graphql';
|
||||
|
||||
export type EndpointType = 'VIEW' | 'VIEW_ALL' | 'CREATE' | 'UPDATE' | 'DELETE';
|
||||
|
||||
export type EndpointDefinition = {
|
||||
restEndpoint: RestEndpoint;
|
||||
query: Query;
|
||||
};
|
||||
|
||||
type EndpointDefinitions = {
|
||||
[key: string]: Partial<Record<EndpointType, EndpointDefinition>>;
|
||||
};
|
||||
|
||||
export type Generator = {
|
||||
regExp: RegExp;
|
||||
generator: (
|
||||
root: string,
|
||||
table: string,
|
||||
operation: any,
|
||||
microfiber: any
|
||||
) => EndpointDefinition;
|
||||
};
|
||||
|
||||
export const getOperations = (microfiber: any) => {
|
||||
const queryType = microfiber.getQueryType();
|
||||
const mutationType = microfiber.getMutationType();
|
||||
|
||||
// if there are customizations, there is an additional level in the types.
|
||||
// using an heuristic to find the correct type
|
||||
// if query and mutations have only 1 fields which name contains 'query' and 'mutation' respectively
|
||||
// then we assume that the query and mutation types are one level deeper
|
||||
|
||||
let queryTypeName: string = queryType.name;
|
||||
let mutationTypeName: string = mutationType.name;
|
||||
|
||||
let root = '';
|
||||
|
||||
if (queryType.fields[0].name === 'no_queries_available') {
|
||||
return {
|
||||
root: '',
|
||||
operations: [],
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
queryType.fields.length === 1 &&
|
||||
queryType.fields[0].type.name.includes('query')
|
||||
) {
|
||||
queryTypeName = queryType.fields[0].type.name;
|
||||
root = queryType.fields[0].name;
|
||||
}
|
||||
|
||||
if (
|
||||
mutationType.fields.length === 1 &&
|
||||
mutationType.fields[0].type.name.includes('mutation')
|
||||
) {
|
||||
mutationTypeName = mutationType.fields[0].type.name;
|
||||
root = mutationType.fields[0].name;
|
||||
}
|
||||
|
||||
const queries = microfiber.getType({
|
||||
kind: 'OBJECT',
|
||||
name: queryTypeName,
|
||||
}).fields;
|
||||
const mutations = microfiber.getType({
|
||||
kind: 'OBJECT',
|
||||
name: mutationTypeName,
|
||||
}).fields;
|
||||
|
||||
return {
|
||||
root,
|
||||
operations: [...queries, ...mutations],
|
||||
};
|
||||
};
|
||||
|
||||
const generators: Record<EndpointType, Generator> = {
|
||||
VIEW: {
|
||||
regExp: /fetch data from the table: "(.+)" using primary key columns$/,
|
||||
generator: generateViewEndpoint,
|
||||
},
|
||||
VIEW_ALL: {
|
||||
regExp: /fetch data from the table: "(.+)"$/,
|
||||
generator: generateViewAllEndpoint,
|
||||
},
|
||||
CREATE: {
|
||||
regExp: /insert a single row into the table: "(.+)"$/,
|
||||
generator: generateInsertEndpoint,
|
||||
},
|
||||
UPDATE: {
|
||||
regExp: /update single row of the table: "(.+)"$/,
|
||||
generator: generateUpdateEndpoint,
|
||||
},
|
||||
|
||||
DELETE: {
|
||||
regExp: /delete single row from the table: "(.+)"$/,
|
||||
generator: generateDeleteEndpoint,
|
||||
},
|
||||
};
|
||||
|
||||
export const useRestEndpointDefinitions = () => {
|
||||
const {
|
||||
data: introspectionSchema,
|
||||
isLoading,
|
||||
error,
|
||||
} = useIntrospectionSchema();
|
||||
|
||||
const [data, setData] = useState<EndpointDefinitions>();
|
||||
|
||||
useEffect(() => {
|
||||
if (introspectionSchema) {
|
||||
const response: EndpointDefinitions = {};
|
||||
const microfiber = new Microfiber(introspectionSchema);
|
||||
|
||||
const operations = getOperations(microfiber);
|
||||
|
||||
if (!operations) {
|
||||
setData({});
|
||||
return;
|
||||
}
|
||||
|
||||
for (const operation of operations.operations) {
|
||||
for (const endpointType in generators) {
|
||||
const match = operation.description.match(
|
||||
generators[endpointType as EndpointType].regExp
|
||||
);
|
||||
const table = match?.[1];
|
||||
|
||||
if (match) {
|
||||
const definition = generators[
|
||||
endpointType as EndpointType
|
||||
].generator(operations.root, table, operation, microfiber);
|
||||
|
||||
if (definition.query.query) {
|
||||
definition.query.query = formatSdl(definition.query.query);
|
||||
}
|
||||
|
||||
response[table] = {
|
||||
...(response[table] || {}),
|
||||
[endpointType]: definition,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
setData(response);
|
||||
}
|
||||
}, [introspectionSchema]);
|
||||
|
||||
return {
|
||||
data,
|
||||
isLoading,
|
||||
isError: error,
|
||||
};
|
||||
};
|
@ -0,0 +1,437 @@
|
||||
import { formatSdl } from 'format-graphql';
|
||||
import { Microfiber } from 'microfiber';
|
||||
import introspectionNoCustom from './mocks/introspectionWithoutCustomizations.json';
|
||||
import introspectionCustom from './mocks/introspectionWithCustomizations.json';
|
||||
|
||||
import {
|
||||
generateDeleteEndpoint,
|
||||
generateInsertEndpoint,
|
||||
generateUpdateEndpoint,
|
||||
generateViewAllEndpoint,
|
||||
generateViewEndpoint,
|
||||
} from './utils';
|
||||
import { getOperations } from './useRestEndpointDefinitions';
|
||||
|
||||
const microfiberNoCustom = new Microfiber(introspectionNoCustom);
|
||||
const operationsWithoutCustom = getOperations(microfiberNoCustom);
|
||||
const microfiberCustom = new Microfiber(introspectionCustom);
|
||||
const operationsCustom = getOperations(microfiberCustom);
|
||||
|
||||
describe('generateViewEndpoint', () => {
|
||||
it('should generate a query and a rest endpoint for a view operation without customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateViewEndpoint(
|
||||
operationsWithoutCustom?.root,
|
||||
table,
|
||||
operationsWithoutCustom?.operations?.find(
|
||||
({ name }) => name === 'user_by_pk'
|
||||
),
|
||||
microfiberNoCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'user_by_pk',
|
||||
query: formatSdl(`query user_by_pk($id: Int!) {
|
||||
user_by_pk(id: $id) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'user_by_pk',
|
||||
url: `${table}/:id`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'user_by_pk',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a query and a rest endpoint for a view operation with customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateViewEndpoint(
|
||||
operationsCustom?.root,
|
||||
table,
|
||||
operationsCustom?.operations?.find(
|
||||
({ name }) => name === 'a_user_by_pk_b'
|
||||
),
|
||||
microfiberCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'a_user_by_pk_b',
|
||||
query: formatSdl(`query a_user_by_pk_b($id: Int!) {
|
||||
root_ {
|
||||
a_user_by_pk_b(id: $id) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'a_user_by_pk_b',
|
||||
url: `${table}/:id`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'a_user_by_pk_b',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateViewAllEndpoint', () => {
|
||||
it('should generate a query and a rest endpoint for a view all operation without customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateViewAllEndpoint(
|
||||
operationsWithoutCustom?.root,
|
||||
table,
|
||||
operationsWithoutCustom?.operations?.find(({ name }) => name === 'user'),
|
||||
microfiberNoCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'user',
|
||||
query: formatSdl(`query user {
|
||||
user {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'user',
|
||||
url: `${table}`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'user',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a query and a rest endpoint for a view all operation with customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateViewAllEndpoint(
|
||||
operationsCustom?.root,
|
||||
table,
|
||||
operationsCustom?.operations?.find(({ name }) => name === 'a_user_b'),
|
||||
microfiberCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'a_user_b',
|
||||
query: formatSdl(`query a_user_b {
|
||||
root_ {
|
||||
a_user_b {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'a_user_b',
|
||||
url: `${table}`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'a_user_b',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateDeleteEndpoint', () => {
|
||||
it('should generate a query and a rest endpoint for a delete operation without customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateDeleteEndpoint(
|
||||
operationsWithoutCustom?.root,
|
||||
table,
|
||||
operationsWithoutCustom?.operations?.find(
|
||||
({ name }) => name === 'delete_user_by_pk'
|
||||
),
|
||||
microfiberNoCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'delete_user_by_pk',
|
||||
query: formatSdl(`mutation delete_user_by_pk($id: Int!) {
|
||||
delete_user_by_pk(id: $id) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'delete_user_by_pk',
|
||||
url: `${table}/:id`,
|
||||
methods: ['DELETE'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'delete_user_by_pk',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a query and a rest endpoint for a delete operation with customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateDeleteEndpoint(
|
||||
operationsCustom?.root,
|
||||
table,
|
||||
operationsCustom?.operations?.find(
|
||||
({ name }) => name === 'a_delete_user_by_pk_b'
|
||||
),
|
||||
microfiberCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'a_delete_user_by_pk_b',
|
||||
query: formatSdl(`mutation a_delete_user_by_pk_b($id: Int!) {
|
||||
root_ {
|
||||
a_delete_user_by_pk_b(id: $id) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'a_delete_user_by_pk_b',
|
||||
url: `${table}/:id`,
|
||||
methods: ['DELETE'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'a_delete_user_by_pk_b',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateUpdateEndpoint', () => {
|
||||
it('should generate a query and a rest endpoint for an update operation without customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateUpdateEndpoint(
|
||||
operationsWithoutCustom?.root,
|
||||
table,
|
||||
operationsWithoutCustom?.operations?.find(
|
||||
({ name }) => name === 'update_user_by_pk'
|
||||
),
|
||||
microfiberNoCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'update_user_by_pk',
|
||||
query:
|
||||
formatSdl(`mutation update_user_by_pk($id: user_pk_columns_input!, $object: user_set_input!) {
|
||||
update_user_by_pk(pk_columns: $id, _set: $object) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'update_user_by_pk',
|
||||
url: `${table}/:id`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'update_user_by_pk',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a query and a rest endpoint for an update operation with customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateUpdateEndpoint(
|
||||
operationsCustom?.root,
|
||||
table,
|
||||
operationsCustom?.operations?.find(
|
||||
({ name }) => name === 'a_update_user_by_pk_b'
|
||||
),
|
||||
microfiberCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'a_update_user_by_pk_b',
|
||||
query:
|
||||
formatSdl(`mutation a_update_user_by_pk_b($id: c_user_pk_columns_input_d!, $object: c_user_set_input_d!) {
|
||||
root_ {
|
||||
a_update_user_by_pk_b(pk_columns: $id, _set: $object) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'a_update_user_by_pk_b',
|
||||
url: `${table}/:id`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'a_update_user_by_pk_b',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateInsertEndpoint', () => {
|
||||
it('should generate a query and a rest endpoint for an insert operation without customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateInsertEndpoint(
|
||||
operationsWithoutCustom?.root,
|
||||
table,
|
||||
operationsWithoutCustom?.operations?.find(
|
||||
({ name }) => name === 'insert_user_one'
|
||||
),
|
||||
microfiberNoCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'insert_user_one',
|
||||
query: formatSdl(`mutation insert_user_one($object: user_insert_input!) {
|
||||
insert_user_one(object: $object) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'insert_user_one',
|
||||
url: `${table}`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'insert_user_one',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate a query and a rest endpoint for an insert operation with customizations', () => {
|
||||
const table = 'user';
|
||||
|
||||
const { query, restEndpoint } = generateInsertEndpoint(
|
||||
operationsCustom?.root,
|
||||
table,
|
||||
operationsCustom?.operations?.find(
|
||||
({ name }) => name === 'a_insert_user_one_b'
|
||||
),
|
||||
microfiberCustom
|
||||
);
|
||||
|
||||
expect(query).toEqual({
|
||||
name: 'a_insert_user_one_b',
|
||||
query:
|
||||
formatSdl(`mutation a_insert_user_one_b($object: c_user_insert_input_d!) {
|
||||
root_ {
|
||||
a_insert_user_one_b(object: $object) {
|
||||
address
|
||||
bool
|
||||
count
|
||||
date
|
||||
id
|
||||
name
|
||||
uuid
|
||||
}
|
||||
}
|
||||
}`),
|
||||
});
|
||||
expect(restEndpoint).toEqual({
|
||||
name: 'a_insert_user_one_b',
|
||||
url: `${table}`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: 'a_insert_user_one_b',
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,238 @@
|
||||
import { formatSdl } from 'format-graphql';
|
||||
import { Query, RestEndpoint } from '../../hasura-metadata-types';
|
||||
import { Generator } from './useRestEndpointDefinitions';
|
||||
|
||||
type GraphQLType = {
|
||||
kind: string;
|
||||
name?: string;
|
||||
ofType?: GraphQLType;
|
||||
};
|
||||
|
||||
const wrapRoot = (root: string, operation: string) => {
|
||||
return root ? `${root} { ${operation} }` : operation;
|
||||
};
|
||||
|
||||
const extractFields = (operation: any, microfiber: any) => {
|
||||
const type = microfiber.getType(recursiveType(operation.type));
|
||||
const fields = type?.fields
|
||||
?.filter((field: any) => recursiveType(field.type)?.kind === 'SCALAR')
|
||||
?.map((f: { name: string }) => f.name);
|
||||
return { fields };
|
||||
};
|
||||
|
||||
export const recursiveType = (type?: GraphQLType): GraphQLType | undefined => {
|
||||
if (!type) {
|
||||
return undefined;
|
||||
}
|
||||
if (['OBJECT', 'SCALAR', 'INPUT_OBJECT'].includes(type.kind)) {
|
||||
return type;
|
||||
} else {
|
||||
return recursiveType(type.ofType);
|
||||
}
|
||||
};
|
||||
export const generateViewEndpoint: Generator['generator'] = (
|
||||
root,
|
||||
table,
|
||||
operation,
|
||||
microfiber
|
||||
) => {
|
||||
const { fields } = extractFields(operation, microfiber);
|
||||
|
||||
const idType = recursiveType(
|
||||
operation.args?.find((arg: any) => arg.name === 'id')?.type
|
||||
)?.name;
|
||||
|
||||
const grapqhlOperation = `
|
||||
${operation.name}(id: $id) {
|
||||
${fields?.join('\n')}
|
||||
}
|
||||
`;
|
||||
|
||||
const query: Query = {
|
||||
name: operation.name,
|
||||
query: formatSdl(`
|
||||
query ${operation.name}($id: ${idType}!) {
|
||||
${wrapRoot(root, grapqhlOperation)}
|
||||
}`),
|
||||
};
|
||||
|
||||
const restEndpoint: RestEndpoint = {
|
||||
name: operation.name,
|
||||
url: `${table}/:id`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: operation.name,
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
};
|
||||
|
||||
return { query, restEndpoint };
|
||||
};
|
||||
|
||||
export const generateViewAllEndpoint: Generator['generator'] = (
|
||||
root,
|
||||
table,
|
||||
operation,
|
||||
microfiber
|
||||
) => {
|
||||
const { fields } = extractFields(operation, microfiber);
|
||||
|
||||
const grapqhlOperation = `
|
||||
${operation.name} {
|
||||
${fields?.join('\n')}
|
||||
}
|
||||
`;
|
||||
|
||||
const query: Query = {
|
||||
name: operation.name,
|
||||
query: formatSdl(`
|
||||
query ${operation.name} {
|
||||
${wrapRoot(root, grapqhlOperation)}
|
||||
}`),
|
||||
};
|
||||
|
||||
const restEndpoint: RestEndpoint = {
|
||||
name: operation.name,
|
||||
url: `${table}`,
|
||||
methods: ['GET'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: operation.name,
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
};
|
||||
|
||||
return { query, restEndpoint };
|
||||
};
|
||||
|
||||
export const generateDeleteEndpoint: Generator['generator'] = (
|
||||
root,
|
||||
table,
|
||||
operation,
|
||||
microfiber
|
||||
) => {
|
||||
const { fields } = extractFields(operation, microfiber);
|
||||
const idType = recursiveType(
|
||||
operation.args?.find((arg: any) => arg.name === 'id')?.type
|
||||
)?.name;
|
||||
|
||||
const grapqhlOperation = `
|
||||
${operation.name}(id: $id) {
|
||||
${fields?.join('\n')}
|
||||
}
|
||||
`;
|
||||
|
||||
const query: Query = {
|
||||
name: operation.name,
|
||||
query: formatSdl(`
|
||||
mutation ${operation.name}($id: ${idType}!) {
|
||||
${wrapRoot(root, grapqhlOperation)}
|
||||
}`),
|
||||
};
|
||||
|
||||
const restEndpoint: RestEndpoint = {
|
||||
name: operation.name,
|
||||
url: `${table}/:id`,
|
||||
methods: ['DELETE'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: operation.name,
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
};
|
||||
|
||||
return { query, restEndpoint };
|
||||
};
|
||||
|
||||
export const generateInsertEndpoint: Generator['generator'] = (
|
||||
root,
|
||||
table,
|
||||
operation,
|
||||
microfiber
|
||||
) => {
|
||||
const { fields } = extractFields(operation, microfiber);
|
||||
const inputType = recursiveType(
|
||||
operation.args?.find((arg: any) => arg.name === 'object')?.type
|
||||
)?.name;
|
||||
|
||||
const grapqhlOperation = `
|
||||
${operation.name}(object: $object) {
|
||||
${fields?.join('\n')}
|
||||
}
|
||||
`;
|
||||
|
||||
const query: Query = {
|
||||
name: operation.name,
|
||||
query: formatSdl(`
|
||||
mutation ${operation.name}($object: ${inputType}!) {
|
||||
${wrapRoot(root, grapqhlOperation)}
|
||||
}`),
|
||||
};
|
||||
|
||||
const restEndpoint: RestEndpoint = {
|
||||
name: operation.name,
|
||||
url: `${table}`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: operation.name,
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
};
|
||||
|
||||
return { query, restEndpoint };
|
||||
};
|
||||
|
||||
export const generateUpdateEndpoint: Generator['generator'] = (
|
||||
root,
|
||||
table,
|
||||
operation,
|
||||
microfiber
|
||||
) => {
|
||||
const { fields } = extractFields(operation, microfiber);
|
||||
const idType = recursiveType(
|
||||
operation.args?.find((arg: any) => arg.name === 'pk_columns')?.type
|
||||
)?.name;
|
||||
|
||||
const inputType = recursiveType(
|
||||
operation.args?.find((arg: any) => arg.name === '_set')?.type
|
||||
)?.name;
|
||||
|
||||
const grapqhlOperation = `
|
||||
${operation.name}(pk_columns: $id, _set: $object) {
|
||||
${fields?.join('\n')}
|
||||
}
|
||||
`;
|
||||
|
||||
const query: Query = {
|
||||
name: operation.name,
|
||||
query: formatSdl(`
|
||||
mutation ${operation.name}($id: ${idType}!, $object: ${inputType}!) {
|
||||
${wrapRoot(root, grapqhlOperation)}
|
||||
}`),
|
||||
};
|
||||
|
||||
const restEndpoint: RestEndpoint = {
|
||||
name: operation.name,
|
||||
url: `${table}/:id`,
|
||||
methods: ['POST'],
|
||||
definition: {
|
||||
query: {
|
||||
query_name: operation.name,
|
||||
collection_name: 'allowed-queries',
|
||||
},
|
||||
},
|
||||
comment: '',
|
||||
};
|
||||
|
||||
return { query, restEndpoint };
|
||||
};
|
@ -0,0 +1,4 @@
|
||||
export {
|
||||
metadataHandlers,
|
||||
restEndpointsInitialData,
|
||||
} from './mocks/metadata.mock';
|
@ -0,0 +1,75 @@
|
||||
import produce from 'immer';
|
||||
|
||||
import { allowedMetadataTypes } from '../../MetadataAPI';
|
||||
import { Metadata, RestEndpoint } from '../../hasura-metadata-types';
|
||||
import { MetadataReducer } from '../../../mocks/actions';
|
||||
|
||||
export const restEndpointsInitialData: Partial<Metadata['metadata']> = {
|
||||
rest_endpoints: [
|
||||
{
|
||||
comment: '',
|
||||
definition: {
|
||||
query: {
|
||||
collection_name: 'allowed-queries',
|
||||
query_name: 'test',
|
||||
},
|
||||
},
|
||||
methods: ['GET'],
|
||||
name: 'test',
|
||||
url: 'test',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const metadataHandlers: Partial<
|
||||
Record<allowedMetadataTypes, MetadataReducer>
|
||||
> = {
|
||||
create_rest_endpoint: (state, action) => {
|
||||
const endpoint = action.args as RestEndpoint;
|
||||
const existingEndpoint = (state.metadata.rest_endpoints || []).find(
|
||||
c => c.name === endpoint.name
|
||||
);
|
||||
if (existingEndpoint) {
|
||||
return {
|
||||
status: 400,
|
||||
error: {
|
||||
path: '$.args.name',
|
||||
error: `rest endpoint with name "${endpoint.name}" already exists`,
|
||||
code: 'already-exists',
|
||||
},
|
||||
};
|
||||
}
|
||||
const existingQueryCollection = (
|
||||
state.metadata.query_collections || []
|
||||
).find(c => c.name === endpoint.definition.query.collection_name);
|
||||
if (!existingQueryCollection) {
|
||||
return {
|
||||
status: 400,
|
||||
error: {
|
||||
path: '$.args.query.collection_name',
|
||||
error: `query collection with name "${endpoint.definition.query.collection_name}" does not exist`,
|
||||
code: 'not-exists',
|
||||
},
|
||||
};
|
||||
}
|
||||
const existingQuery = (
|
||||
existingQueryCollection.definition?.queries || []
|
||||
).find(q => q.name === endpoint.definition.query.query_name);
|
||||
if (!existingQuery) {
|
||||
return {
|
||||
status: 400,
|
||||
error: {
|
||||
path: '$.args.query.query_name',
|
||||
error: `query with name "${endpoint.definition.query.query_name}" does not exist in query collection "${endpoint.definition.query.collection_name}"`,
|
||||
code: 'not-exists',
|
||||
},
|
||||
};
|
||||
}
|
||||
return produce(state, draft => {
|
||||
draft.metadata.rest_endpoints = [
|
||||
...(draft.metadata.rest_endpoints || []),
|
||||
endpoint,
|
||||
];
|
||||
});
|
||||
},
|
||||
};
|
@ -6,6 +6,7 @@ import { metadataHandlers as adhocEventMetadataHandlers } from '../features/Adho
|
||||
import { metadataHandlers as queryCollectionMetadataHandlers } from '../features/QueryCollections';
|
||||
import { metadataHandlers as openTelemetryMetadataHandlers } from '../features/OpenTelemetry';
|
||||
import { metadataHandlers as dataMetadataHandlers } from '../features/Data';
|
||||
import { metadataHandlers as restEndpointsMetadataHandlers } from '../features/RestEndpoints';
|
||||
|
||||
import { TMigration } from '../features/MetadataAPI/hooks/useMetadataMigration';
|
||||
|
||||
@ -34,6 +35,7 @@ const metadataHandlers: Partial<Record<allowedMetadataTypes, MetadataReducer>> =
|
||||
...adhocEventMetadataHandlers,
|
||||
...openTelemetryMetadataHandlers,
|
||||
...dataMetadataHandlers,
|
||||
...restEndpointsMetadataHandlers,
|
||||
};
|
||||
|
||||
export const metadataReducer: MetadataReducer = (state, action) => {
|
||||
|
@ -5,6 +5,7 @@ import type { Metadata } from '../features/hasura-metadata-types';
|
||||
import { allowListInitialData } from '../features/AllowLists';
|
||||
import { queryCollectionInitialData } from '../features/QueryCollections';
|
||||
import { openTelemetryInitialData } from '../features/OpenTelemetry';
|
||||
import { restEndpointsInitialData } from '../features/RestEndpoints';
|
||||
import { dataInitialData } from '../features/Data';
|
||||
|
||||
import { rest } from 'msw';
|
||||
@ -20,6 +21,7 @@ export const createDefaultInitialData = (): Metadata => ({
|
||||
...queryCollectionInitialData,
|
||||
...openTelemetryInitialData,
|
||||
...dataInitialData,
|
||||
...restEndpointsInitialData,
|
||||
},
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user