mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
console: add tests for install metadata and migration template hooks
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6110 GitOrigin-RevId: 4ea89f60ba719ae8210d700893fcf30a4c4aab0a
This commit is contained in:
parent
e2dc37ab60
commit
9834acdb8a
@ -0,0 +1,142 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Check useInstallMetadata installs the correct metadata should install the correct metadata and call success callback 1`] = `
|
||||
Object {
|
||||
"args": Object {
|
||||
"sources": Array [
|
||||
Object {
|
||||
"configuration": Object {
|
||||
"connection_info": Object {
|
||||
"database_url": "postgres://postgres:postgrespassword@postgres:5432/postgres",
|
||||
"isolation_level": "read-committed",
|
||||
"use_prepared_statements": false,
|
||||
},
|
||||
},
|
||||
"functions": Array [],
|
||||
"kind": "postgres",
|
||||
"name": "default",
|
||||
"tables": Array [
|
||||
Object {
|
||||
"array_relationships": Array [
|
||||
Object {
|
||||
"name": "orders",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": Object {
|
||||
"column": "customer_id",
|
||||
"table": Object {
|
||||
"name": "order",
|
||||
"schema": "public",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"name": "customer",
|
||||
"schema": "public",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"object_relationships": Array [
|
||||
Object {
|
||||
"name": "customer",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": "customer_id",
|
||||
},
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"name": "order",
|
||||
"schema": "public",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"configuration": Object {
|
||||
"connection_info": Object {
|
||||
"database_url": "postgres://abhijeet:dxefFu5yvVJ2@fragrant-firefly-499238.cloud.neon.tech/main",
|
||||
"isolation_level": "read-committed",
|
||||
"use_prepared_statements": false,
|
||||
},
|
||||
},
|
||||
"customization": Object {
|
||||
"naming_convention": "hasura-default",
|
||||
},
|
||||
"kind": "postgres",
|
||||
"name": "fragrant-firefly",
|
||||
"tables": Array [
|
||||
Object {
|
||||
"array_relationships": Array [
|
||||
Object {
|
||||
"name": "article_tags",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": Object {
|
||||
"column": "article_id",
|
||||
"table": Object {
|
||||
"name": "article_tag",
|
||||
"schema": "_manytomany",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"name": "article",
|
||||
"schema": "_manytomany",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"object_relationships": Array [
|
||||
Object {
|
||||
"name": "article",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": "article_id",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"name": "tag",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": "tag_id",
|
||||
},
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"name": "article_tag",
|
||||
"schema": "_manytomany",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"array_relationships": Array [
|
||||
Object {
|
||||
"name": "article_tags",
|
||||
"using": Object {
|
||||
"foreign_key_constraint_on": Object {
|
||||
"column": "tag_id",
|
||||
"table": Object {
|
||||
"name": "article_tag",
|
||||
"schema": "_manytomany",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"name": "tag",
|
||||
"schema": "_manytomany",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"table": Object {
|
||||
"name": "sample_table",
|
||||
"schema": "public",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"version": 3,
|
||||
},
|
||||
"type": "replace_metadata",
|
||||
}
|
||||
`;
|
@ -0,0 +1,170 @@
|
||||
import React, { ReactNode } from 'react';
|
||||
import { matchRequestUrl, MockedRequest } from 'msw';
|
||||
import { waitFor, screen, render } from '@testing-library/react';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { setupServer } from 'msw/node';
|
||||
import { Provider as ReduxProvider } from 'react-redux';
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
import {
|
||||
fetchGithubMetadataHandler,
|
||||
metadataFailureHandler,
|
||||
metadataSuccessHandler,
|
||||
mockGithubServerDownHandler,
|
||||
} from '../mocks/handlers.mock';
|
||||
import { useInstallMetadata } from './useInstallMetadata';
|
||||
import Endpoints from '../../../Endpoints';
|
||||
import {
|
||||
mockMetadataUrl,
|
||||
MOCK_INITIAL_METADATA,
|
||||
serverDownErrorMessage,
|
||||
} from '../mocks/constants';
|
||||
|
||||
const server = setupServer();
|
||||
|
||||
function waitForRequest(method: string, url: string) {
|
||||
let requestId = '';
|
||||
return new Promise<MockedRequest>((resolve, reject) => {
|
||||
server.events.on('request:start', async req => {
|
||||
const matchesMethod = req.method.toLowerCase() === method.toLowerCase();
|
||||
const matchesUrl = matchRequestUrl(req.url, url).matches;
|
||||
let matchesType = false;
|
||||
|
||||
try {
|
||||
const reqbody = await req.json();
|
||||
matchesType = reqbody?.type === 'replace_metadata';
|
||||
} catch (err) {
|
||||
// not a metadata request, can be ignored
|
||||
}
|
||||
|
||||
if (matchesMethod && matchesUrl && matchesType) {
|
||||
requestId = req.id;
|
||||
}
|
||||
});
|
||||
server.events.on('request:match', req => {
|
||||
if (req.id === requestId) {
|
||||
resolve(req);
|
||||
}
|
||||
});
|
||||
server.events.on('request:unhandled', req => {
|
||||
if (req.id === requestId) {
|
||||
reject(
|
||||
new Error(`The ${req.method} ${req.url.href} request was unhandled.`)
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
let reactQueryClient = new QueryClient();
|
||||
|
||||
beforeAll(() => server.listen({ onUnhandledRequest: 'warn' }));
|
||||
beforeEach(() => {
|
||||
// provide a fresh reactQueryClient for each test to prevent state caching among tests
|
||||
reactQueryClient = new QueryClient();
|
||||
|
||||
// don't retry failed queries, overrides the default behaviour. This is done as otherwise we'll
|
||||
// need to add a significant wait time (~10000 ms) to the test to wait for all the 3 retries (react-query default)
|
||||
// to fail, for the error callback to be called. Till then the state is loading.
|
||||
reactQueryClient.setDefaultOptions({
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
afterEach(() => server.resetHandlers());
|
||||
afterAll(() => server.close());
|
||||
|
||||
const onSuccessCb = jest.fn(() => {});
|
||||
|
||||
const onErrorCb = jest.fn(() => {});
|
||||
|
||||
const Component = () => {
|
||||
const { updateMetadata } = useInstallMetadata(
|
||||
'default',
|
||||
mockMetadataUrl,
|
||||
onSuccessCb,
|
||||
onErrorCb
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (updateMetadata) {
|
||||
updateMetadata();
|
||||
}
|
||||
}, [updateMetadata]);
|
||||
|
||||
return <div>Welcome</div>;
|
||||
};
|
||||
|
||||
type Props = {
|
||||
children?: ReactNode;
|
||||
};
|
||||
|
||||
const store = configureStore({
|
||||
reducer: {
|
||||
tables: () => ({ currentDataSource: 'postgres', dataHeaders: {} }),
|
||||
metadata: () => ({
|
||||
metadataObject: MOCK_INITIAL_METADATA,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const wrapper = ({ children }: Props) => (
|
||||
<ReduxProvider store={store} key="provider">
|
||||
<QueryClientProvider client={reactQueryClient}>
|
||||
{children}
|
||||
</QueryClientProvider>
|
||||
</ReduxProvider>
|
||||
);
|
||||
|
||||
describe('Check useInstallMetadata installs the correct metadata', () => {
|
||||
it('should install the correct metadata and call success callback', async () => {
|
||||
server.use(fetchGithubMetadataHandler, metadataSuccessHandler);
|
||||
const pendingRequest = waitForRequest('POST', Endpoints.metadata);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect success callback to be called, after successful `replace_metadata` request
|
||||
await waitFor(() => expect(onSuccessCb).toHaveBeenCalledTimes(1));
|
||||
|
||||
// STEP 3: expect the correct metadata being sent to the server
|
||||
const replaceMetadataRequest = await pendingRequest;
|
||||
expect(JSON.parse(replaceMetadataRequest.body as string)).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('fails to fetch metadata file from github, should call the error callback', async () => {
|
||||
server.use(
|
||||
mockGithubServerDownHandler(mockMetadataUrl),
|
||||
metadataSuccessHandler
|
||||
);
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after fetching metadata file from github fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledTimes(1));
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = `Failed to fetch metadata from the provided Url: ${mockMetadataUrl}`;
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
|
||||
it('fails to apply metadata to server, should call the error callback', async () => {
|
||||
server.use(fetchGithubMetadataHandler, metadataFailureHandler);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after applying metadata to server fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledTimes(1));
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = JSON.stringify(serverDownErrorMessage);
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
});
|
@ -0,0 +1,130 @@
|
||||
import React, { ReactNode } from 'react';
|
||||
import { waitFor, screen, render } from '@testing-library/react';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { setupServer } from 'msw/node';
|
||||
import { Provider as ReduxProvider } from 'react-redux';
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
import {
|
||||
fetchGithubMigrationHandler,
|
||||
mockGithubServerDownHandler,
|
||||
querySuccessHandler,
|
||||
queryFailureHandler,
|
||||
} from '../mocks/handlers.mock';
|
||||
import {
|
||||
mockMigrationUrl,
|
||||
MOCK_INITIAL_METADATA,
|
||||
serverDownErrorMessage,
|
||||
} from '../mocks/constants';
|
||||
import { useInstallMigration } from './useInstallMigration';
|
||||
|
||||
const server = setupServer();
|
||||
|
||||
let reactQueryClient = new QueryClient();
|
||||
|
||||
beforeAll(() => server.listen({ onUnhandledRequest: 'warn' }));
|
||||
beforeEach(() => {
|
||||
// provide a fresh reactQueryClient for each test to prevent state caching among tests
|
||||
reactQueryClient = new QueryClient();
|
||||
|
||||
// don't retry failed queries, overrides the default behaviour. This is done as otherwise we'll
|
||||
// need to add a significant wait time (~10000 ms) to the test to wait for all the 3 retries (react-query default)
|
||||
// to fail, for the error callback to be called. Till then the state is loading.
|
||||
reactQueryClient.setDefaultOptions({
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
afterEach(() => server.resetHandlers());
|
||||
afterAll(() => server.close());
|
||||
|
||||
const onSuccessCb = jest.fn(() => {});
|
||||
|
||||
const onErrorCb = jest.fn(() => {});
|
||||
|
||||
const Component = () => {
|
||||
// fetch the function to apply migration
|
||||
const { performMigration } = useInstallMigration(
|
||||
'default',
|
||||
mockMigrationUrl,
|
||||
onSuccessCb,
|
||||
onErrorCb
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (performMigration) {
|
||||
performMigration();
|
||||
}
|
||||
}, [performMigration]);
|
||||
|
||||
return <div>Welcome</div>;
|
||||
};
|
||||
|
||||
type Props = {
|
||||
children?: ReactNode;
|
||||
};
|
||||
|
||||
const store = configureStore({
|
||||
reducer: {
|
||||
tables: () => ({ currentDataSource: 'postgres', dataHeaders: {} }),
|
||||
metadata: () => ({
|
||||
metadataObject: MOCK_INITIAL_METADATA,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const wrapper = ({ children }: Props) => (
|
||||
<ReduxProvider store={store} key="provider">
|
||||
<QueryClientProvider client={reactQueryClient}>
|
||||
{children}
|
||||
</QueryClientProvider>
|
||||
</ReduxProvider>
|
||||
);
|
||||
|
||||
describe('Check useInstallMigration installs the correct migrations', () => {
|
||||
it('should install the correct migration and call success callback', async () => {
|
||||
server.use(fetchGithubMigrationHandler, querySuccessHandler);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect success callback to be called
|
||||
await waitFor(() => expect(onSuccessCb).toHaveBeenCalledTimes(1));
|
||||
});
|
||||
|
||||
it('fails to fetch migration file from github, should call the error callback', async () => {
|
||||
server.use(
|
||||
mockGithubServerDownHandler(mockMigrationUrl),
|
||||
querySuccessHandler
|
||||
);
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after fetching migration file from github fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledTimes(1));
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = `Failed to fetch migration data from the provided Url: ${mockMigrationUrl}`;
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
|
||||
it('fails to apply migration to server, should call the error callback', async () => {
|
||||
server.use(fetchGithubMigrationHandler, queryFailureHandler);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after applying migration to server fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledTimes(1));
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = JSON.stringify(serverDownErrorMessage);
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
});
|
@ -0,0 +1,188 @@
|
||||
import React, { ReactNode } from 'react';
|
||||
import { waitFor, screen, render } from '@testing-library/react';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { setupServer } from 'msw/node';
|
||||
import { Provider as ReduxProvider } from 'react-redux';
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
import {
|
||||
mockGithubServerDownHandler,
|
||||
fetchGithubMetadataHandler,
|
||||
fetchGithubMigrationHandler,
|
||||
metadataFailureHandler,
|
||||
metadataSuccessHandler,
|
||||
queryFailureHandler,
|
||||
querySuccessHandler,
|
||||
} from '../mocks/handlers.mock';
|
||||
import {
|
||||
mockMetadataUrl,
|
||||
mockMigrationUrl,
|
||||
MOCK_INITIAL_METADATA,
|
||||
serverDownErrorMessage,
|
||||
} from '../mocks/constants';
|
||||
import { useInstallTemplate } from './useInstallTemplate';
|
||||
import { NEON_TEMPLATE_BASE_PATH } from '../constants';
|
||||
|
||||
const server = setupServer();
|
||||
|
||||
let reactQueryClient = new QueryClient();
|
||||
|
||||
beforeAll(() => server.listen({ onUnhandledRequest: 'warn' }));
|
||||
beforeEach(() => {
|
||||
// provide a fresh reactQueryClient for each test to prevent state caching among tests
|
||||
reactQueryClient = new QueryClient();
|
||||
|
||||
// don't retry failed queries, overrides the default behaviour. This is done as otherwise we'll
|
||||
// need to add a significant wait time (~10000 ms) to the test to wait for all the 3 retries (react-query default)
|
||||
// to fail, for the error callback to be called. Till then the state is loading.
|
||||
reactQueryClient.setDefaultOptions({
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
afterEach(() => server.resetHandlers());
|
||||
afterAll(() => server.close());
|
||||
|
||||
const onSuccessCb = jest.fn(() => {});
|
||||
|
||||
const onErrorCb = jest.fn(() => {});
|
||||
|
||||
const Component = () => {
|
||||
// fetch the function to apply migration
|
||||
const { install } = useInstallTemplate(
|
||||
'default',
|
||||
NEON_TEMPLATE_BASE_PATH,
|
||||
onSuccessCb,
|
||||
onErrorCb
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (install) {
|
||||
install();
|
||||
}
|
||||
}, [install]);
|
||||
|
||||
return <div>Welcome</div>;
|
||||
};
|
||||
|
||||
type Props = {
|
||||
children?: ReactNode;
|
||||
};
|
||||
|
||||
const store = configureStore({
|
||||
reducer: {
|
||||
tables: () => ({ currentDataSource: 'postgres', dataHeaders: {} }),
|
||||
metadata: () => ({
|
||||
metadataObject: MOCK_INITIAL_METADATA,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const wrapper = ({ children }: Props) => (
|
||||
<ReduxProvider store={store} key="provider">
|
||||
<QueryClientProvider client={reactQueryClient}>
|
||||
{children}
|
||||
</QueryClientProvider>
|
||||
</ReduxProvider>
|
||||
);
|
||||
|
||||
describe('Check useInstallMigration installs the correct migrations', () => {
|
||||
it('should install the correct migration and call success callback', async () => {
|
||||
server.use(
|
||||
fetchGithubMetadataHandler,
|
||||
fetchGithubMigrationHandler,
|
||||
querySuccessHandler,
|
||||
metadataSuccessHandler
|
||||
);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect success callback to be called
|
||||
await waitFor(() => expect(onSuccessCb).toHaveBeenCalled());
|
||||
});
|
||||
|
||||
it('fails to fetch metadata file from github, should call the error callback', async () => {
|
||||
server.use(
|
||||
mockGithubServerDownHandler(mockMetadataUrl),
|
||||
fetchGithubMigrationHandler,
|
||||
metadataSuccessHandler,
|
||||
querySuccessHandler
|
||||
);
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after fetching metadata file from github fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalled());
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = `Failed to fetch metadata from the provided Url: ${mockMetadataUrl}`;
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
|
||||
it('fails to fetch migration file from github, should call the error callback', async () => {
|
||||
server.use(
|
||||
mockGithubServerDownHandler(mockMigrationUrl),
|
||||
fetchGithubMetadataHandler,
|
||||
metadataSuccessHandler,
|
||||
querySuccessHandler
|
||||
);
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after fetching migration file from github fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalled());
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = `Failed to fetch migration data from the provided Url: ${mockMigrationUrl}`;
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
|
||||
it('fails to apply metadata to server, should call the error callback', async () => {
|
||||
server.use(
|
||||
fetchGithubMetadataHandler,
|
||||
fetchGithubMigrationHandler,
|
||||
querySuccessHandler,
|
||||
metadataFailureHandler
|
||||
);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after applying metadata to server fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalled());
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = JSON.stringify(serverDownErrorMessage);
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
|
||||
it('fails to apply migration to server, should call the error callback', async () => {
|
||||
server.use(
|
||||
fetchGithubMetadataHandler,
|
||||
fetchGithubMigrationHandler,
|
||||
metadataSuccessHandler,
|
||||
queryFailureHandler
|
||||
);
|
||||
|
||||
render(<Component />, { wrapper });
|
||||
|
||||
// STEP 1: expect our mock component renders successfully
|
||||
expect(screen.queryByText('Welcome')).toBeInTheDocument();
|
||||
|
||||
// STEP 2: expect error callback to be called, after applying migration to server fails
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalled());
|
||||
|
||||
// STEP 3: expect error callback to be called with correct arguments
|
||||
const errorMessage = JSON.stringify(serverDownErrorMessage);
|
||||
await waitFor(() => expect(onErrorCb).toHaveBeenCalledWith(errorMessage));
|
||||
});
|
||||
});
|
@ -1,5 +1,10 @@
|
||||
import { GrowthExperimentsClient } from '@/features/GrowthExperiments';
|
||||
import { SurveysResponseData } from '@/features/Surveys';
|
||||
import {
|
||||
getMetadataUrl,
|
||||
getMigrationUrl,
|
||||
NEON_TEMPLATE_BASE_PATH,
|
||||
} from '../constants';
|
||||
|
||||
export const mockGrowthClient: Record<string, GrowthExperimentsClient> = {
|
||||
/**
|
||||
@ -175,3 +180,251 @@ export const fetchSurveysDataResponse: Record<
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const mockMetadataUrl = getMetadataUrl(NEON_TEMPLATE_BASE_PATH);
|
||||
export const mockMigrationUrl = getMigrationUrl(NEON_TEMPLATE_BASE_PATH);
|
||||
|
||||
export const MOCK_INITIAL_METADATA = {
|
||||
version: 3,
|
||||
sources: [
|
||||
{
|
||||
name: 'default',
|
||||
kind: 'postgres',
|
||||
tables: [],
|
||||
configuration: {
|
||||
connection_info: {
|
||||
database_url:
|
||||
'postgres://postgres:postgrespassword@postgres:5432/postgres',
|
||||
isolation_level: 'read-committed',
|
||||
use_prepared_statements: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'fragrant-firefly',
|
||||
kind: 'postgres',
|
||||
tables: [
|
||||
{
|
||||
table: {
|
||||
name: 'article',
|
||||
schema: '_manytomany',
|
||||
},
|
||||
array_relationships: [
|
||||
{
|
||||
name: 'article_tags',
|
||||
using: {
|
||||
foreign_key_constraint_on: {
|
||||
column: 'article_id',
|
||||
table: {
|
||||
name: 'article_tag',
|
||||
schema: '_manytomany',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: {
|
||||
name: 'article_tag',
|
||||
schema: '_manytomany',
|
||||
},
|
||||
object_relationships: [
|
||||
{
|
||||
name: 'article',
|
||||
using: {
|
||||
foreign_key_constraint_on: 'article_id',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'tag',
|
||||
using: {
|
||||
foreign_key_constraint_on: 'tag_id',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: {
|
||||
name: 'tag',
|
||||
schema: '_manytomany',
|
||||
},
|
||||
array_relationships: [
|
||||
{
|
||||
name: 'article_tags',
|
||||
using: {
|
||||
foreign_key_constraint_on: {
|
||||
column: 'tag_id',
|
||||
table: {
|
||||
name: 'article_tag',
|
||||
schema: '_manytomany',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: {
|
||||
name: 'sample_table',
|
||||
schema: 'public',
|
||||
},
|
||||
},
|
||||
],
|
||||
configuration: {
|
||||
connection_info: {
|
||||
database_url:
|
||||
'postgres://abhijeet:dxefFu5yvVJ2@fragrant-firefly-499238.cloud.neon.tech/main',
|
||||
isolation_level: 'read-committed',
|
||||
use_prepared_statements: false,
|
||||
},
|
||||
},
|
||||
customization: {
|
||||
naming_convention: 'hasura-default',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const MOCK_METADATA_FILE_CONTENTS = {
|
||||
resource_version: 12,
|
||||
metadata: {
|
||||
version: 3,
|
||||
sources: [
|
||||
{
|
||||
name: 'default',
|
||||
kind: 'postgres',
|
||||
tables: [
|
||||
{
|
||||
table: {
|
||||
name: 'customer',
|
||||
schema: 'public',
|
||||
},
|
||||
array_relationships: [
|
||||
{
|
||||
name: 'orders',
|
||||
using: {
|
||||
foreign_key_constraint_on: {
|
||||
column: 'customer_id',
|
||||
table: {
|
||||
name: 'order',
|
||||
schema: 'public',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
table: {
|
||||
name: 'order',
|
||||
schema: 'public',
|
||||
},
|
||||
object_relationships: [
|
||||
{
|
||||
name: 'customer',
|
||||
using: {
|
||||
foreign_key_constraint_on: 'customer_id',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const serverDownErrorMessage = {
|
||||
code: 'Service Unavailable',
|
||||
error: `The resource cannot be delivered`,
|
||||
};
|
||||
|
||||
export const MOCK_MIGRATION_FILE_CONTENTS = `
|
||||
CREATE TABLE "public"."customer" (
|
||||
"id" int4,
|
||||
"first_name" text,
|
||||
"last_name" text,
|
||||
"email" text,
|
||||
"phone" text,
|
||||
"username" text,
|
||||
"ip_address" text,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "public"."order" (
|
||||
"id" int4,
|
||||
"transaction_id" text,
|
||||
"product" text,
|
||||
"purchase_price" text,
|
||||
"discount_price" text,
|
||||
"order_date" text,
|
||||
"customer_id" int4,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
ALTER TABLE "public"."order" ADD FOREIGN KEY ("customer_id") REFERENCES "public"."customer"("id");
|
||||
|
||||
INSERT INTO "public"."customer" ("id", "first_name", "last_name", "email", "phone", "username", "ip_address") VALUES
|
||||
(1, 'Daisy', 'Syme', 'dsyme0@hp.com', '+20 (915) 874-5336', 'dsyme0', '42.14.173.181'),
|
||||
(2, 'Berny', 'Linford', 'blinford1@odnoklassniki.ru', '+55 (659) 852-4292', 'blinford1', '84.112.166.217'),
|
||||
(3, 'Krystal', 'Fretwell', 'kfretwell2@fda.gov', '+358 (577) 234-5107', 'kfretwell2', '16.230.140.234'),
|
||||
(4, 'Donnell', 'Yve', 'dyve3@aboutads.info', '+81 (167) 244-5980', 'dyve3', '50.23.181.152'),
|
||||
(5, 'Ola', 'Fretter', 'ofretter4@sitemeter.com', '+86 (690) 881-4182', 'ofretter4', '73.164.185.62'),
|
||||
(6, 'Ximenes', 'Mote', 'xmote5@barnesandnoble.com', '+502 (536) 300-9224', 'xmote5', '175.219.84.213'),
|
||||
(7, 'Ainslie', 'Davidzon', 'adavidzon6@cbslocal.com', '+254 (755) 803-4401', 'adavidzon6', '186.172.158.188'),
|
||||
(8, 'Diego', 'Ellit', 'dellit7@walmart.com', '+62 (310) 893-0690', 'dellit7', '86.123.251.103'),
|
||||
(9, 'Maximilien', 'Longbothom', 'mlongbothom8@yahoo.co.jp', '+33 (279) 317-1163', 'mlongbothom8', '5.68.95.19'),
|
||||
(10, 'Garold', 'Pendock', 'gpendock9@foxnews.com', '+27 (948) 957-1398', 'gpendock9', '153.194.87.243');
|
||||
|
||||
INSERT INTO "public"."order" ("id", "transaction_id", "product", "purchase_price", "discount_price", "order_date", "customer_id") VALUES
|
||||
(1, '1F3ZLKrcXyisvMa79GdQ3UCKcUpHprzkjX', 'Juice - Cranberry, 341 Ml', '$6.26 ', '$1.46 ', '10/19/2021', 7),
|
||||
(2, '16WKVa7pV9xAUPC12ymbdFSA1TqLtfitSA', 'Pasta - Canelloni', '$4.74 ', '$1.82 ', '03/25/2022', 8),
|
||||
(3, '19TbQiD2ijcDZCY9d3V8pxfj8U4punJxCo', 'Bread - Calabrese Baguette', '$9.83 ', '$0.45 ', '07/07/2022', 1),
|
||||
(4, '1A2nhANZWqBcdCuz6BgTXNpiwCSrAMjbVQ', 'Ice Cream Bar - Rolo Cone', '$7.64 ', '$0.73 ', '12/26/2021', 2),
|
||||
(5, '12VwQBESupdem6xHS1h4eqrvsffsyL3skU', 'Cookie - Oreo 100x2', '$9.03 ', '$1.83 ', '06/29/2022', 9),
|
||||
(6, '1Co69PachjWpM2TfdnkrGuwaKpDDMRMBks', 'Bacardi Breezer - Tropical', '$2.43 ', '$0.10 ', '09/21/2022', 10),
|
||||
(7, '16EXSRGT8iEgPiReq2rQfdAPDUm3wraM83', 'Lamb - Sausage Casings', '$8.06 ', '$1.26 ', '03/16/2022', 10),
|
||||
(8, '1C64titz7BGGjt76nG9Vfzh3D4v8JjwyH5', 'Mini - Vol Au Vents', '$3.27 ', '$0.54 ', '01/15/2022', 5),
|
||||
(9, '1H5Cujy9X9NgHfZf6rqeFXsczeUderCJUf', 'Table Cloth 54x54 Colour', '$9.26 ', '$1.08 ', '08/02/2022', 6),
|
||||
(10, '14pLQTdLWX2rWZqqKqHosTdwwUX6iikgHD', 'Tomatoes - Hot House', '$2.61 ', '$1.27 ', '02/28/2022', 7),
|
||||
(11, '1524M52setuYRjfdDF8xzqVg5TjL4bFrzo', 'Sauce - White, Mix', '$5.41 ', '$0.96 ', '01/06/2022', 2),
|
||||
(12, '1GXisxNigWCmB1xKJjz5PY53wXVnhcysg', 'Sausage - Blood Pudding', '$3.22 ', '$1.99 ', '02/17/2022', 1),
|
||||
(13, '1PSDKmzMg5BMXABNqRZxEVg59kaTsu169y', 'Oats Large Flake', '$9.17 ', '$1.29 ', '06/15/2022', 10),
|
||||
(14, '1LjtJTJt3NPg9Sq8QHFUoYFNJZafLD751q', 'Cheese - Parmesan Cubes', '$1.89 ', '$1.42 ', '04/13/2022', 9),
|
||||
(15, '1KfeywEXGGHvUPALHPDVQFnX2jXdu4J4fC', 'Split Peas - Yellow, Dry', '$7.32 ', '$0.93 ', '04/18/2022', 7),
|
||||
(16, '158DLeiSkfDzR7JyEwRSfagmYkhzD1ZkYA', 'Flower - Potmums', '$1.95 ', '$0.18 ', '05/30/2022', 8),
|
||||
(17, '14WcFhhLCF65XJ47LqUDyYkKtqp77tT29L', 'Lobster - Baby, Boiled', '$7.29 ', '$0.96 ', '11/17/2021', 9),
|
||||
(18, '15B9rtow88Jzqf3t2LJepHd6hCEVd3M7Rd', 'Tuna - Loin', '$2.12 ', '$0.07 ', '02/21/2022', 2),
|
||||
(19, '161qmk9yP269JvqMWFdWipbeBkGjAa5RLe', 'Soup - Campbells, Creamy', '$6.84 ', '$1.26 ', '08/28/2022', 4),
|
||||
(20, '17xt1nVNKgtcfesgUWW2D1MB4fJMnqMcFk', 'Wine - Pinot Noir Pond Haddock', '$1.78 ', '$0.35 ', '03/11/2022', 3),
|
||||
(21, '1JXmjRLRXKGfkmGt8E2ZTHTZrKVAa7sJ4J', 'Cake - Bande Of Fruit', '$8.68 ', '$1.24 ', '11/14/2021', 5),
|
||||
(22, '16ALno1YEmGg4ZQcdt9NYwFwFeknMfHzrK', 'Pastry - Baked Cinnamon Stick', '$3.99 ', '$0.89 ', '02/15/2022', 1),
|
||||
(23, '1EHcpDDkL3SCRNu3wJkH48QZVTtw3c1Hk8', 'Bread Crumbs - Panko', '$1.82 ', '$0.94 ', '09/24/2022', 3),
|
||||
(24, '1Ew3i2S8ZrSumCiiDgVV7gvY3sN43R5mTn', 'Wine - White, Cooking', '$9.03 ', '$1.40 ', '09/25/2022', 5),
|
||||
(25, '1PH7f4Y7oUH1c5xGuinmTPng3poHSiCCTm', 'Potatoes - Idaho 80 Count', '$7.64 ', '$0.10 ', '10/15/2021', 6),
|
||||
(26, '1QE4zGaLEL49fRM5jspBgFQVsmP5AboAgR', 'Red Snapper - Fillet, Skin On', '$5.16 ', '$0.08 ', '04/16/2022', 5),
|
||||
(27, '16ecF9SH1ySFAUn4dLxerAeyinccrDASdZ', 'Carbonated Water - White Grape', '$4.01 ', '$1.41 ', '11/02/2021', 4),
|
||||
(28, '1Mg24XDM5zAXBxCyTU6Xe2SQCLZuRh9tMm', 'Energy Drink', '$1.99 ', '$0.01 ', '08/29/2022', 8),
|
||||
(29, '19D5mnYpFGaWwPSN657ovWPzh1PHkjp4Wg', 'Table Cloth 54x54 White', '$7.23 ', '$0.76 ', '06/26/2022', 2),
|
||||
(30, '1MYY8JqZk3BcwPsxPH2G8nEYqFNkMKFMSM', 'Muffin Hinge - 211n', '$4.25 ', '$1.25 ', '08/24/2022', 10),
|
||||
(31, '1JvZyii5fzLn8mmrCWarjHxd8JE7rGBnXw', 'Soup - Campbells Beef Noodle', '$7.15 ', '$1.40 ', '01/08/2022', 6),
|
||||
(32, '18HGUxeEAtYUkiWxN2QeTmR85ygoeamivb', 'Tea - English Breakfast', '$6.74 ', '$1.02 ', '11/20/2021', 1),
|
||||
(33, '1ESV3zwy1V5Ff5z5T3TiqJuftbEg8Kc6nd', 'Lamb - Sausage Casings', '$3.31 ', '$0.84 ', '09/16/2022', 9),
|
||||
(34, '17x7uCHL99C9FjyKZMvxoKM36jzKQTD8X2', 'Coconut - Shredded, Unsweet', '$7.71 ', '$0.15 ', '11/20/2021', 5),
|
||||
(35, '1MeeiJYSkHnAVw8tNbnNwaw8sNn9DUjgr4', 'Chicken - Leg, Fresh', '$6.35 ', '$1.43 ', '04/10/2022', 3),
|
||||
(36, '1Ka3cptwfsRyKYA33cNP45wYWA3po15E1A', 'Wine - Lamancha Do Crianza', '$7.37 ', '$1.69 ', '06/02/2022', 7),
|
||||
(37, '13FcF6f4mmzXBPBum8gkETSj9jNiFkS5QX', 'Creme De Menthe Green', '$2.99 ', '$1.09 ', '01/22/2022', 4),
|
||||
(38, '13PmWYQDUe2ewJF7MoHQLus17XQhyeYruf', 'Quail - Jumbo', '$5.21 ', '$0.53 ', '10/17/2021', 4),
|
||||
(39, '1NbocYefavPmQ6wDRWVsnNZNwaqqVbqX5z', 'Chocolate - Pistoles, Lactee, Milk', '$6.80 ', '$0.95 ', '05/03/2022', 1),
|
||||
(40, '1E9KeV7T6TNd1gkb1e7ePGYve7qTtbA6RC', 'Coconut - Creamed, Pure', '$6.19 ', '$0.41 ', '11/26/2021', 10),
|
||||
(41, '1LRNuJ2HHkcaQ5pVpSU349MVM6yw1Wcs8T', 'Cattail Hearts', '$9.72 ', '$0.55 ', '09/10/2022', 7),
|
||||
(42, '13B8jKKCvkk52or91b1tC8ipBF6QhaWi98', 'Soup - Beef Conomme, Dry', '$4.56 ', '$0.33 ', '05/23/2022', 2),
|
||||
(43, '1B2q9pQVBYsv9iYXHLHVeijf6dwypti4Sp', 'Egg Patty Fried', '$6.31 ', '$0.98 ', '10/04/2021', 2),
|
||||
(44, '1PnXRW7DtFbAQ922FY4rmFXjeXcYsnuv2x', 'Venison - Denver Leg Boneless', '$2.60 ', '$0.74 ', '09/21/2022', 9),
|
||||
(45, '136p12cywvTMJUvmPDwmDf95fij2ZbRcPr', 'Island Oasis - Ice Cream Mix', '$2.95 ', '$1.53 ', '03/22/2022', 2),
|
||||
(46, '1JYmm4ZmdPrCPBYCuWgAHYX8iG6WaWk8Fs', 'Ostrich - Fan Fillet', '$4.32 ', '$0.42 ', '04/10/2022', 9),
|
||||
(47, '13o7QeoHwnwD91FcRXkVLg1Qg5v3Qye4BK', 'Plate - Foam, Bread And Butter', '$1.81 ', '$1.64 ', '06/19/2022', 4),
|
||||
(48, '15bLFHs6uWNt7fWBtKM8gphnh7GrfJPugG', 'Foil Wrap', '$1.34 ', '$0.62 ', '08/29/2022', 3),
|
||||
(49, '14PaKTHoj7y2gAc6YsE9jtxHZzjjFRwzb7', 'Bread - Assorted Rolls', '$3.87 ', '$1.49 ', '09/26/2022', 1);
|
||||
`;
|
||||
|
@ -1,7 +1,14 @@
|
||||
import { graphql } from 'msw';
|
||||
import { graphql, rest } from 'msw';
|
||||
import Endpoints from '@/Endpoints';
|
||||
import { SurveysResponseData } from '@/features/Surveys';
|
||||
import { fetchSurveysDataResponse } from './constants';
|
||||
import {
|
||||
fetchSurveysDataResponse,
|
||||
mockMetadataUrl,
|
||||
mockMigrationUrl,
|
||||
MOCK_METADATA_FILE_CONTENTS,
|
||||
MOCK_MIGRATION_FILE_CONTENTS,
|
||||
serverDownErrorMessage,
|
||||
} from './constants';
|
||||
|
||||
type ResponseBodyOnSuccess = {
|
||||
status: 'success';
|
||||
@ -56,3 +63,76 @@ export const fetchAnsweredSurveysHandler = controlPlaneApi.query<
|
||||
>('fetchAllSurveysData', (req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.data(fetchSurveysDataResponse.answered));
|
||||
});
|
||||
|
||||
export const fetchGithubMetadataHandler = rest.get(
|
||||
mockMetadataUrl,
|
||||
(req, res, ctx) => {
|
||||
return res(ctx.text(JSON.stringify(MOCK_METADATA_FILE_CONTENTS)));
|
||||
}
|
||||
);
|
||||
|
||||
export const fetchGithubMigrationHandler = rest.get(
|
||||
mockMigrationUrl,
|
||||
(req, res, ctx) => {
|
||||
return res(ctx.text(MOCK_MIGRATION_FILE_CONTENTS));
|
||||
}
|
||||
);
|
||||
|
||||
export const mockGithubServerDownHandler = (url: string) =>
|
||||
rest.get(url, (req, res, ctx) => {
|
||||
return res(ctx.status(503), ctx.json(serverDownErrorMessage));
|
||||
});
|
||||
|
||||
export const metadataSuccessHandler = rest.post(
|
||||
Endpoints.metadata,
|
||||
async (req, res, ctx) => {
|
||||
const body = (await req.json()) as Record<string, unknown>;
|
||||
|
||||
if (body.type === 'replace_metadata' || body.type === 'reload_metadata') {
|
||||
return res(ctx.json({ message: 'success' }));
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(400),
|
||||
ctx.json({
|
||||
code: 'parse-failed',
|
||||
error: `unknown metadata command ${body.type}`,
|
||||
path: '$',
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export const metadataFailureHandler = rest.post(
|
||||
Endpoints.metadata,
|
||||
async (req, res, ctx) => {
|
||||
return res(ctx.status(503), ctx.json(serverDownErrorMessage));
|
||||
}
|
||||
);
|
||||
|
||||
export const querySuccessHandler = rest.post(
|
||||
Endpoints.queryV2,
|
||||
async (req, res, ctx) => {
|
||||
const body = (await req.json()) as Record<string, unknown>;
|
||||
|
||||
if (body.type === 'run_sql') {
|
||||
return res(ctx.json({ message: 'success' }));
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(400),
|
||||
ctx.json({
|
||||
code: 'parse-failed',
|
||||
error: `unknown metadata command ${body.type}`,
|
||||
path: '$',
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export const queryFailureHandler = rest.post(
|
||||
Endpoints.queryV2,
|
||||
async (req, res, ctx) => {
|
||||
return res(ctx.status(503), ctx.json(serverDownErrorMessage));
|
||||
}
|
||||
);
|
||||
|
Loading…
Reference in New Issue
Block a user