cli: allow managing actions (#3859)

Co-authored-by: Rishichandra Wawhal <rishichandra.wawhal@gmail.com>
Co-authored-by: Rikin Kachhia <54616969+rikinsk@users.noreply.github.com>
Co-authored-by: Aravind <aravindkp@outlook.in>
Co-authored-by: Anon Ray <ecthiender@users.noreply.github.com>
Co-authored-by: Shahidh K Muhammed <muhammedshahid.k@gmail.com>
This commit is contained in:
Aravind Shankar 2020-02-24 21:44:46 +05:30 committed by GitHub
parent 8df0151f99
commit bb63d7e60e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
199 changed files with 17246 additions and 2507 deletions

View File

@ -353,6 +353,45 @@ jobs:
root: /build
paths:
- _cli_output
test_and_build_cli_ext:
docker:
- image: hasura/graphql-engine-extension-cli-builder:20200220
working_directory: ~/graphql-engine
steps:
- attach_workspace:
at: /build
- *skip_job_on_ciignore
- checkout
- restore_cache:
key:
cli-ext-npm-cache-v1-{{ checksum "cli-ext/package.json" }}-{{ checksum "cli-ext/package-lock.json" }}
- run:
name: install dependencies
working_directory: cli-ext
command: make ci-deps
- save_cache:
key:
cli-ext-npm-cache-v1-{{ checksum "cli-ext/package.json" }}-{{ checksum "cli-ext/package-lock.json" }}
paths:
- cli-ext/node_modules
- ~/.npm
- ~/.cache
- run:
name: build extension cli
working_directory: cli-ext
command: |
npm install -g pkg
npm run build
make deploy
make ci-copy-assets
- store_artifacts:
path: /build/_cli_ext_output
destination: cli_ext
- persist_to_workspace:
root: /build
paths:
- _cli_ext_output
# build console assets
build_console:
@ -493,6 +532,10 @@ workflows:
workflow_v20200120:
jobs:
- check_build_worthiness: *filter_only_vtags
- test_and_build_cli_ext:
<<: *filter_only_vtags
requires:
- check_build_worthiness
- build_console:
<<: *filter_only_vtags
requires:
@ -501,6 +544,11 @@ workflows:
<<: *filter_only_vtags
requires:
- check_build_worthiness
- test_cli_with_last_release:
<<: *filter_only_vtags
requires:
- test_and_build_cli_ext
- check_build_worthiness
- build_image:
<<: *filter_only_vtags
requires:
@ -544,13 +592,10 @@ workflows:
- test_server_pg_9.5
- server_unit_tests
- test_server_upgrade
- test_cli_with_last_release:
<<: *filter_only_vtags
requires:
- check_build_worthiness
- test_and_build_cli:
<<: *filter_only_vtags
requires:
- test_and_build_cli_ext
- build_server
- test_console:
<<: *filter_only_vtags

View File

@ -26,4 +26,4 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /usr/share/man/ \
&& rm -rf /usr/share/locale/
ENV PATH "/usr/local/google-cloud-sdk/bin:$PATH"
ENV PATH "/usr/local/google-cloud-sdk/bin:$PATH"

View File

@ -58,7 +58,7 @@ draft_github_release() {
-r "$CIRCLE_PROJECT_REPONAME" \
-b "${RELEASE_BODY}" \
-draft \
"$CIRCLE_TAG" /build/_cli_output/binaries/
"$CIRCLE_TAG" /build/_cli_output/binaries/ /build/_cli_ext_output/*.tar.gz /build/_cli_ext_output/*.zip
}
configure_git() {
@ -95,6 +95,27 @@ deploy_console() {
unset DIST_PATH
}
deploy_cli_ext() {
echo "deploying extension cli"
cd "$ROOT/cli-ext"
export VERSION=$(../scripts/get-version.sh)
export DIST_PATH="/build/_cli_ext_output"
configure_git
git clone https://github.com/hasura/cli-plugins-index.git ~/plugins-index
cd ~/plugins-index
git checkout -b cli-ext-${LATEST_TAG}
cp ${DIST_PATH}/manifest.yaml ./plugins/cli-ext.yaml
git add .
git commit -m "update cli-ext manifest to ${LATEST_TAG}"
git push -q https://${GITHUB_TOKEN}@github.com/hasura/plugins-index.git cli-ext-${LATEST_TAG}
hub pull-request -F- <<<"Update cli-ext manifest to ${LATEST_TAG}" -r ${REVIEWERS} -a ${REVIEWERS}
unset VERSION
unset DIST_PATH
}
# build and push container for auto-migrations
build_and_push_cli_migrations_image() {
IMAGE_TAG="hasura/graphql-engine:${CIRCLE_TAG}.cli-migrations"
@ -167,6 +188,7 @@ deploy_console
deploy_server
if [[ ! -z "$CIRCLE_TAG" ]]; then
build_and_push_cli_migrations_image
deploy_cli_ext
# if this is a stable release, update all latest assets
if [ $IS_STABLE_RELEASE = true ]; then

View File

@ -0,0 +1,27 @@
FROM node:12
ARG gcloud_version="207.0.0"
# install dependencies
RUN apt-get update && apt-get install -y \
netcat \
libpq5 \
libgtk2.0-0 \
libnotify-dev \
libgconf-2-4 \
libnss3 \
libxss1 \
libasound2 \
zip \
xvfb \
&& curl -Lo /tmp/gcloud-${gcloud_version}.tar.gz https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${gcloud_version}-linux-x86_64.tar.gz \
&& tar -xzf /tmp/gcloud-${gcloud_version}.tar.gz -C /usr/local \
&& /usr/local/google-cloud-sdk/install.sh \
&& apt-get -y auto-remove \
&& apt-get -y clean \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /usr/share/doc/ \
&& rm -rf /usr/share/man/ \
&& rm -rf /usr/share/locale/
ENV PATH "/usr/local/google-cloud-sdk/bin:$PATH"

View File

@ -33,7 +33,7 @@ PID=$!
wait_for_port 8080
# test cli
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" make test
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" TEST_TAGS="latest_release" make test
# kill the running server
kill -s INT $PID
@ -47,5 +47,5 @@ PID=$!
wait_for_port 8080
# test cli
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" HASURA_GRAPHQL_TEST_ADMIN_SECRET="abcd" make test
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" HASURA_GRAPHQL_TEST_ADMIN_SECRET="abcd" TEST_TAGS="latest_release" make test
kill -s INT $PID

View File

@ -29,7 +29,7 @@ PID=$!
wait_for_port 8080
# test cli
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" make test
HASURA_GRAPHQL_TEST_ENDPOINT="http://localhost:8080" TEST_TAGS="test_plugins" make test
kill -s INT $PID
# start graphql-engine with admin secret

4
cli-ext/.babelrc Normal file
View File

@ -0,0 +1,4 @@
{
presets: ["@babel/preset-env"],
plugins: ["@babel/plugin-transform-async-to-generator"]
}

6
cli-ext/.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
bin
build
src/shared
_tmptests
node_modules
version.json

37
cli-ext/Makefile Normal file
View File

@ -0,0 +1,37 @@
BUILDDIR := bin
ASSETS := $(BUILDDIR)/command-macos.tar.gz $(BUILDDIR)/command-linux.tar.gz $(BUILDDIR)/command-win.zip
CHECKSUMS := $(patsubst %,%.sha256,$(ASSETS))
COMPRESS := gzip --best -k -c
ci-deps:
if [ ! -d "node_modules" ]; then npm ci; fi
ci-copy-assets:
mkdir -p /build/_cli_ext_output
cp $(BUILDDIR)/* /build/_cli_ext_output/
.PRECIOUS: %.zip
%.zip: %.exe
cd $(BUILDDIR) && \
zip $(patsubst $(BUILDDIR)/%, %, $@) $(patsubst $(BUILDDIR)/%, %, $<)
.PRECIOUS: %.gz
%.gz: %
$(COMPRESS) "$<" > "$@"
%.tar: %
tar cf "$@" -C $(BUILDDIR) $(patsubst $(BUILDDIR)/%,%,$^)
%.sha256: %
shasum -a 256 $< > $@
.PHONY: deploy
deploy: $(CHECKSUMS)
./scripts/generate-manifest.sh && \
$(RM) $(BUILDDIR)/tmp.yaml
.PHONY: clean
clean:
$(RM) -r $(BUILDDIR)

45
cli-ext/README.md Normal file

File diff suppressed because one or more lines are too long

4395
cli-ext/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
cli-ext/package.json Normal file
View File

@ -0,0 +1,32 @@
{
"name": "scaffolder",
"version": "1.0.0",
"description": "A service to generate Hasura action scaffolds",
"main": "src/server.js",
"scripts": {
"get-shared-modules": "rm -rf src/shared && cp ../console/src/shared ./src/shared -r",
"pretranspile": "npm run get-shared-modules",
"transpile": "rm -rf build/* && babel ./src ./tests --out-dir build",
"prebuild": "npm run transpile",
"build": "rm -rf ./bin/* && pkg ./build/command.js --out-path ./bin",
"pretest": "npm run transpile && babel ./tests --out-dir _tmptests",
"posttest": "rm -rf _tmptests",
"test": "node ./_tmptests/index.js"
},
"author": "wawhal",
"license": "ISC",
"dependencies": {
"@graphql-codegen/core": "^1.9.0",
"@graphql-codegen/typescript": "^1.9.0",
"graphql": "^14.5.8",
"inflection": "^1.12.0",
"node-fetch": "^2.6.0",
"regenerator-runtime": "^0.13.3"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/plugin-transform-async-to-generator": "^7.7.4",
"@babel/preset-env": "^7.7.6"
}
}

View File

@ -0,0 +1,23 @@
#!/usr/bin/env bash
set -evo pipefail
IFS=$'\n\t'
ROOT="$(readlink -f ${BASH_SOURCE[0]%/*}/../../)"
export VERSION=$(${ROOT}/scripts/get-version.sh)
export BUCKET_URL=https://github.com/hasura/graphql-engine/releases/download/${VERSION}
export LINUX_SHA256=$(cat ${ROOT}/cli-ext/bin/command-linux.tar.gz.sha256 | cut -f1 -d' ')
export MACOS_SHA256=$(cat ${ROOT}/cli-ext/bin/command-macos.tar.gz.sha256 | cut -f1 -d' ')
export WINDOWS_SHA256=$(cat ${ROOT}/cli-ext/bin/command-win.zip.sha256 | cut -f1 -d' ')
( echo "cat <<EOF >${ROOT}/cli-ext/bin/manifest.yaml";
cat ${ROOT}/cli-ext/scripts/manifest.yaml;
) >${ROOT}/cli-ext/bin/tmp.yaml
. ${ROOT}/cli-ext/bin/tmp.yaml
export BUCKET_URL=https://${CIRCLE_BUILD_NUM}-137724480-gh.circle-artifacts.com/0/cli_ext
( echo "cat <<EOF >${ROOT}/cli-ext/bin/manifest-dev.yaml";
cat ${ROOT}/cli-ext/scripts/manifest.yaml;
) >${ROOT}/cli-ext/bin/tmp.yaml
. ${ROOT}/cli-ext/bin/tmp.yaml

View File

@ -0,0 +1,27 @@
name: cli-ext
version: "${VERSION}"
shortDescription: "Hasura CLI extension"
homepage: https://github.com/hasura/graphql-engine
hidden: true
platforms:
- uri: "${BUCKET_URL}/command-linux.tar.gz"
sha256: "${LINUX_SHA256}"
bin: command-linux
files:
- from: ./command-linux
to: command-linux
selector: linux-amd64
- uri: "${BUCKET_URL}/command-macos.tar.gz"
sha256: "${MACOS_SHA256}"
bin: command-macos
files:
- from: ./command-macos
to: command-macos
selector: darwin-amd64
- uri: "${BUCKET_URL}/command-win.zip"
sha256: "${WINDOWS_SHA256}"
bin: command-win.exe
files:
- from: ./command-win.exe
to: command-win.exe
selector: windows-amd64

67
cli-ext/src/command.js Normal file
View File

@ -0,0 +1,67 @@
import "regenerator-runtime/runtime";
const {
sdl,
actionsCodegen
} = require('./services');
const fs = require('fs');
const { getFlagValue, OUTPUT_FILE_FLAG } = require('./utils/commandUtils');
const commandArgs = process.argv;
const outputFilePath = getFlagValue(commandArgs, OUTPUT_FILE_FLAG);
const logOutput = (log) => {
try {
fs.writeFile(outputFilePath, log, 'utf8', () => {
console.log(JSON.stringify({
success: true,
output_file_path: outputFilePath
}));
});
} catch (e) {
console.error(`could not write output to "${outputFilePath}"`);
process.exit(1);
}
};
const handleArgs = () => {
const rootArg = commandArgs[2];
switch(rootArg) {
case 'sdl':
const sdlSubCommands = commandArgs.slice(3);
return sdl(sdlSubCommands);
case 'actions-codegen':
const actionCodegenSubCommands = commandArgs.slice(3);
return actionsCodegen(actionCodegenSubCommands);
default:
return;
}
}
try {
let cliResponse = handleArgs();
if (cliResponse.error) {
throw Error(cliResponse.error)
}
if (cliResponse.constructor.name === 'Promise') {
cliResponse.then(r => {
if (r.error) {
throw Error(r.error)
}
logOutput(JSON.stringify(r));
}).catch(e => {
console.error(e);
process.exit(1);
})
} else {
logOutput(JSON.stringify(cliResponse));
}
} catch (e) {
if (e) {
if (e.error) {
console.error(e.error);
} else {
console.error(e.message ? e.message : e);
}
}
process.exit(1);
}

10
cli-ext/src/constants.js Normal file
View File

@ -0,0 +1,10 @@
const PORT = process.env.PORT || 4000;
const GRAPHQL_ENGINE_REPO_OWNER = process.env.GRAPHQL_ENGINE_REPO_OWNER || 'hasura'
const GRAPHQL_ENGINE_REPO_BRANCH = process.env.GRAPHQL_ENGINE_REPO_BRANCH || 'master'
module.exports = {
PORT,
GRAPHQL_ENGINE_REPO_OWNER,
GRAPHQL_ENGINE_REPO_BRANCH
};

View File

@ -0,0 +1,34 @@
const fetch = require('node-fetch');
const { getTemplatePath } = require('../../utils/utils');
const { parseCustomTypes, getActionTypes } = require('../../shared/utils/hasuraCustomTypeUtils')
const { getFrameworkCodegen } = require('./template');
const { getActionDefinitionSdl, getTypesSdl } = require('../../shared/utils/sdlUtils');
const { parse: sdlParse } = require('graphql/language/parser');
const getActionsCodegen = async (payload) => {
const {
action_name: actionName,
sdl: {
complete: sdlComplete
},
derive,
codegen_config: codegenConfig
} = payload;
try {
const codegenResp = await getFrameworkCodegen(actionName, sdlComplete, derive, codegenConfig)
if (codegenResp.error) {
throw Error(codegenResp.error)
} else {
return codegenResp.files
}
} catch (e) {
throw e;
}
};
module.exports = {
getActionsCodegen
};

View File

@ -0,0 +1,10 @@
const { getInputPayload } = require('../../utils/commandUtils')
const handler = require('./handler');
const command = (subCommands) => {
const rootInput = subCommands[0];
const payload = getInputPayload(subCommands);
return handler(payload);
};
module.exports = command;

View File

@ -0,0 +1,14 @@
const { getActionsCodegen } = require('./codegen');
const handler = async (payload) => {
try {
const codegen = await getActionsCodegen(payload);
return { codegen };
} catch (e) {
return { error: e.message };
}
}
module.exports = handler;

View File

@ -0,0 +1 @@
module.exports = require('./command');

View File

@ -0,0 +1,114 @@
const { buildSchema, printSchema, parse } = require('graphql');
const { codegen } = require('@graphql-codegen/core');
const typescriptPlugin = require('@graphql-codegen/typescript');
const { camelize } = require('inflection');
const fetch = require('node-fetch');
const path = require('path')
const fs = require('fs');
const { getTemplatePath } = require('../../utils/utils')
const CODEGENERATOR_NOT_FOUND = 'given codegen framework not found';
const FILE_SYSTEM_PATH = 'fs_path';
const URL_PATH = 'url path';
const resolveCodegeneratorPath = (codegenConfig) => {
let { framework } = codegenConfig;
let codegeneratorPath = codegenConfig.uri;
if (!codegeneratorPath) {
codegeneratorPath = getTemplatePath(framework)
}
return codegeneratorPath;
};
const resolveCodegeneratorFromUrl = async (url) => {
let codegenerator;
try {
const fetchResp = await fetch(url);
if (fetchResp.status >= 300) {
throw Error(CODEGENERATOR_NOT_FOUND);
}
const codegeneratorText = await fetchResp.text()
eval(`${codegeneratorText} codegenerator = templater`);
return codegenerator;
} catch (e) {
throw e;
}
};
const resolveCodegeneratorFromFs = async (fsPath) => {
let codegenerator;
try {
const codegeneratorText = fs.readFileSync(path.resolve(fsPath), { encoding: 'utf8'});
eval(`${codegeneratorText}\n codegenerator = templater`);
return codegenerator;
} catch (e) {
throw e;
}
};
const resolveCodegenerator = async (codegenConfig) => {
const codegeneratorPath = resolveCodegeneratorPath(codegenConfig);
if (!codegeneratorPath) {
throw Error(CODEGENERATOR_NOT_FOUND)
}
let codegenerator
let pathType = URL_PATH;
try {
new URL(codegeneratorPath)
} catch (_) {
pathType = FILE_SYSTEM_PATH;
}
try {
if (pathType === FILE_SYSTEM_PATH) {
codegenerator = await resolveCodegeneratorFromFs(codegeneratorPath)
} else {
codegenerator = await resolveCodegeneratorFromUrl(codegeneratorPath);
}
} catch (e) {
throw e;
}
return codegenerator;
}
const getCodegenFiles = async (actionName, actionsSdl, derive, codegenConfig) => {
let codegenerator;
try {
codegenerator = await resolveCodegenerator(codegenConfig)
} catch (e) {
throw e;
}
let codegenFiles = codegenerator(actionName, actionsSdl, derive);
if (codegenFiles && codegenFiles.constructor.name === 'Promise') {
codegenFiles = await codegenFiles;
}
return codegenFiles;
}
const getFrameworkCodegen = async (actionName, actionsSdl, derive, codegenConfig) => {
try {
const codegenFiles = await getCodegenFiles(actionName, actionsSdl, derive, codegenConfig);
return {
files: codegenFiles
}
} catch (e) {
return {
error: e.message
}
}
};
module.exports = {
getFrameworkCodegen
};

View File

@ -0,0 +1,7 @@
const actionsCodegenHandler = require('./actions-codegen');
const sdlHandler = require('./sdl');
module.exports = {
actionsCodegen: actionsCodegenHandler,
sdl: sdlHandler
};

View File

@ -0,0 +1,18 @@
const toCommand = require('./to/command');
const fromCommand = require('./from/command');
const command = (subCommands) => {
const rootSubCommand = subCommands[0];
switch(rootSubCommand) {
case 'to':
const toSubCommands = subCommands.slice(1);
return toCommand(toSubCommands)
case 'from':
const fromSubCommands = subCommands.slice(1);
return fromCommand(fromSubCommands);
default:
return;
}
};
module.exports = command;

View File

@ -0,0 +1,10 @@
const handler = require('./handler');
const { getInputPayload } = require('../../../utils/commandUtils')
const command = (subCommands) => {
const rootInput = subCommands[0];
const payload = getInputPayload(subCommands);
return handler(payload);
};
module.exports = command;

View File

@ -0,0 +1,81 @@
const { getAllActionsFromSdl, getAllTypesFromSdl } = require('../../../shared/utils/sdlUtils');
const { reformCustomTypes } = require('../../../shared/utils/hasuraCustomTypeUtils')
const handlePayload = (payload) => {
const response = {
body: null,
status: 200
};
const { sdl } = payload;
let customTypes, typesParseError;
if (!sdl.complete.trim()) {
response.body = {
actions: [],
types: reformCustomTypes([])
}
return response
}
try {
customTypes = getAllTypesFromSdl(sdl.complete);
} catch (e) {
typesParseError = e;
}
if (typesParseError) {
response.body = {
error: typesParseError.message
};
response.status = 400;
return response;
}
let allActions, actionsParseError;
try {
allActions = getAllActionsFromSdl(sdl.complete);
} catch (e) {
actionsParseError = e;
}
if (actionsParseError) {
response.body = {
error: actionsParseError.message
};
response.status = 400;
}
response.body = {
actions: allActions,
types: customTypes
};
return response;
}
const requestHandler = (payload) => {
const {
body, status
} = handlePayload(payload)
return body;
};
module.exports = requestHandler;
module.exports.handlePayload = handlePayload;

View File

@ -0,0 +1 @@
module.exports = require('./command')

View File

@ -0,0 +1,11 @@
const handler = require('./handler');
const { getInputPayload } = require('../../../utils/commandUtils');
const command = (subCommands) => {
const rootInput = subCommands[0];
const payload = getInputPayload(subCommands);
const response = handler(payload);
return response;
};
module.exports = command;

View File

@ -0,0 +1,98 @@
const { getTypesSdl, getActionDefinitionSdl } = require('../../../shared/utils/sdlUtils');
const deriveMutation = require('../../../shared/utils/deriveMutation').default;
const handlePayload = (payload) => {
const { actions, types, derive } = payload;
const {
operation: toDeriveOperation,
introspection_schema: introspectionSchema,
action_name: actionName
} = derive || {};
const response = {
body: null,
status: 200
};
let actionSdl = '';
let typesSdl = '';
let actionSdlError, typesSdlError, deriveMutationError;
if (actions) {
try {
actions.forEach(a => {
actionSdl += getActionDefinitionSdl(a.name, a.definition.arguments, a.definition.output_type) + '\n';
})
} catch (e) {
actionSdlError = e;
}
}
if (types) {
try {
typesSdl = getTypesSdl(types);
} catch (e) {
typesSdlError = e;
}
}
let sdl = `${actionSdl}\n\n${typesSdl}`;
if (toDeriveOperation) {
try {
const derivation = deriveMutation(toDeriveOperation, introspectionSchema, actionName);
const derivedActionSdl = getActionDefinitionSdl(derivation.action.name, derivation.action.arguments, derivation.action.output_type);
const derivedTypesSdl = getTypesSdl(derivation.types);
sdl = `${derivedActionSdl}\n\n${derivedTypesSdl}\n\n${sdl}`
} catch (e) {
deriveMutationError = e;
}
}
if (actionSdlError) {
response.body = {
error: 'invalid actions definition'
};
response.status = 400;
return response;
}
if (deriveMutationError) {
response.body = {
error: `could not derive mutation: ${deriveMutationError.message}`
};
response.status = 400;
return response;
}
if (typesSdlError) {
response.body = {
error: 'invalid types'
};
response.status = 400;
return response;
}
response.body = {
sdl: {
complete: sdl
}
};
return response;
}
const requestHandler = (payload) => {
const {
body, status
} = handlePayload(payload)
return body;
}
module.exports = requestHandler;
module.exports.handlePayload = handlePayload;

View File

@ -0,0 +1,30 @@
const fs = require('fs');
const OUTPUT_FILE_FLAG = 'output-file';
const INPUT_FILE_FLAG = 'input-file';
const getFlagValue = (args, flagname) => {
for (var i = args.length - 1; i >= 0; i--) {
if (args[i] === `--${flagname}`) {
const flagValue = args[i + 1];
if (!flagValue) {
throw Error(`unexpected value for flag ${flagname}`);
} else {
return flagValue;
}
}
}
};
const getInputPayload = (args) => {
const inputFilePath = getFlagValue(args, INPUT_FILE_FLAG);
const payloadString = fs.readFileSync(inputFilePath, 'utf8');
return JSON.parse(payloadString);
};
module.exports = {
getInputPayload,
getFlagValue,
OUTPUT_FILE_FLAG,
INPUT_FILE_FLAG
}

View File

@ -0,0 +1,7 @@
const { GRAPHQL_ENGINE_REPO_BRANCH, GRAPHQL_ENGINE_REPO_OWNER } = require("../constants");
const getTemplatePath = (framework) => {
return `https://raw.githubusercontent.com/hasura/codegen-assets/master/${framework}/actions-codegen.js`
}
module.exports.getTemplatePath = getTemplatePath;

View File

@ -0,0 +1 @@
module.exports = require('./test.js');

View File

@ -0,0 +1,13 @@
const samplePayload = {
"action_name": "actionName1",
"sdl": {
"complete": "type Mutation { actionName1 (arg1: SampleInput!): SampleOutput } type SampleOutput { accessToken: String! } input SampleInput { username: String! password: String! } type Mutation { actionName2 (arg1: SampleInput!): SampleOutput }"
},
"codegen_config": {
"framework": "typescript-zeit"
}
};
module.exports = {
samplePayload
};

View File

@ -0,0 +1,23 @@
const { getActionsCodegen } = require('../../build/services/actions-codegen/codegen');
const { samplePayload } = require('./payload');
const test = async () => {
try {
const codegenFiles = await getActionsCodegen(samplePayload);
if (codegenFiles) {
console.log('✓ Actions codegen test passed');
} else {
console.log('✘ Actions codegen test failed');
console.log('Received empty codegen');
process.exit(1);
}
} catch (e) {
console.log('✘ Actions codegen test failed');
console.error(e);
process.exit(1);
}
};
module.exports = test;

10
cli-ext/tests/index.js Normal file
View File

@ -0,0 +1,10 @@
import "regenerator-runtime/runtime";
const actionsCodegen = require('./actions-codegen');
const sdlTest = require('./sdl');
const runTests = async () => {
await actionsCodegen();
await sdlTest();
};
runTests();

View File

@ -0,0 +1 @@
module.exports = require('./test');

3038
cli-ext/tests/sdl/payload.js Normal file

File diff suppressed because it is too large Load Diff

35
cli-ext/tests/sdl/test.js Normal file
View File

@ -0,0 +1,35 @@
const { toPayload, fromPayload } = require('./payload');
const { handlePayload: toHandler } = require('../../build/services/sdl/to/handler');
const { handlePayload: fromHandler } = require('../../build/services/sdl/from/handler');
const test = async () => {
const toResponse = toHandler(toPayload);
if (
toResponse.status === 200
) {
console.log('✓ Conversion from metadata to SDL passed');
} else {
console.log('✘ Conversion from metadata to SDL failed');
console.log(toResponse);
process.exit(1);
}
const fromResponse = fromHandler(fromPayload);
if (
fromResponse.status === 200
) {
console.log('✓ Conversion from SDL to metadata passed');
} else {
console.log('✘ Conversion from SDL to metadata failed');
console.log(fromResponse);
process.exit(1);
}
return Promise.resolve()
}
module.exports = test;

View File

@ -48,7 +48,9 @@ ci-copy-binary:
# run tests
.PHONY: test
test:
go test -ldflags "-X github.com/hasura/graphql-engine/cli/version.BuildVersion=$(VERSION)" ./...
go test -ldflags "-X github.com/hasura/graphql-engine/cli/version.BuildVersion=$(VERSION)" -v -tags="${TEST_TAGS}" `go list ./... | grep -v integration_test`
go test -ldflags "-X github.com/hasura/graphql-engine/cli/version.BuildVersion=$(VERSION)" -v -tags="${TEST_TAGS}" -run Commands/config=v1 ./integration_test
go test -ldflags "-X github.com/hasura/graphql-engine/cli/version.BuildVersion=$(VERSION)" -v -tags="${TEST_TAGS}" -run Commands/config=v2 ./integration_test
# clean the output directory
.PHONY: clean

View File

@ -8,19 +8,22 @@
package cli
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/url"
"os"
"path/filepath"
"strings"
"time"
"gopkg.in/yaml.v2"
"github.com/briandowns/spinner"
"github.com/gofrs/uuid"
"github.com/hasura/graphql-engine/cli/metadata/actions/types"
"github.com/hasura/graphql-engine/cli/plugins"
"github.com/hasura/graphql-engine/cli/telemetry"
"github.com/hasura/graphql-engine/cli/util"
"github.com/hasura/graphql-engine/cli/version"
"github.com/mattn/go-colorable"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
@ -46,65 +49,26 @@ visit https://docs.hasura.io/1.0/graphql/manual/guides/telemetry.html
`
)
// ServerConfig has the config values required to contact the server.
// ConfigVersion defines the version of the Config.
type ConfigVersion int
const (
// V1 represents config version 1
V1 ConfigVersion = iota + 1
// V2 represents config version 2
V2
)
// ServerConfig has the config values required to contact the server
type ServerConfig struct {
// Endpoint for the GraphQL Engine
Endpoint string
// AdminSecret (optional) required to query the endpoint
AdminSecret string
ParsedEndpoint *url.URL
}
type rawServerConfig struct {
// Endpoint for the GraphQL Engine
Endpoint string `json:"endpoint"`
Endpoint string `yaml:"endpoint"`
// AccessKey (deprecated) (optional) Admin secret key required to query the endpoint
AccessKey string `json:"access_key,omitempty"`
AccessKey string `yaml:"access_key,omitempty"`
// AdminSecret (optional) Admin secret required to query the endpoint
AdminSecret string `json:"admin_secret,omitempty"`
AdminSecret string `yaml:"admin_secret,omitempty"`
ParsedEndpoint *url.URL `json:"-"`
}
func (r rawServerConfig) toServerConfig() ServerConfig {
s := r.AdminSecret
if s == "" {
s = r.AccessKey
}
return ServerConfig{
Endpoint: r.Endpoint,
AdminSecret: s,
ParsedEndpoint: r.ParsedEndpoint,
}
}
func (s ServerConfig) toRawServerConfig() rawServerConfig {
return rawServerConfig{
Endpoint: s.Endpoint,
AccessKey: "",
AdminSecret: s.AdminSecret,
ParsedEndpoint: s.ParsedEndpoint,
}
}
// MarshalJSON converts s to JSON
func (s ServerConfig) MarshalJSON() ([]byte, error) {
return json.Marshal(s.toRawServerConfig())
}
// UnmarshalJSON converts b to struct s
func (s ServerConfig) UnmarshalJSON(b []byte) error {
var r rawServerConfig
err := json.Unmarshal(b, &r)
if err != nil {
return errors.Wrap(err, "unmarshal error")
}
sc := r.toServerConfig()
s.Endpoint = sc.Endpoint
s.AdminSecret = sc.AdminSecret
s.ParsedEndpoint = sc.ParsedEndpoint
return nil
ParsedEndpoint *url.URL `yaml:"-"`
}
// ParseEndpoint ensures the endpoint is valid.
@ -117,6 +81,22 @@ func (s *ServerConfig) ParseEndpoint() error {
return nil
}
// Config represents configuration required for the CLI to function
type Config struct {
// Version of the config.
Version ConfigVersion `yaml:"version"`
// ServerConfig to be used by CLI to contact server.
ServerConfig `yaml:",inline"`
// MetadataDirectory defines the directory where the metadata files were stored.
MetadataDirectory string `yaml:"metadata_directory"`
// MigrationsDirectory defines the directory where the migration files were stored.
MigrationsDirectory string `yaml:"migrations_directory,omitempty"`
// ActionConfig defines the config required to create or generate codegen for an action.
ActionConfig types.ActionExecutionConfig `yaml:"actions"`
}
// ExecutionContext contains various contextual information required by the cli
// at various points of it's execution. Values are filled in by the
// initializers and passed on to each command. Commands can also fill in values
@ -141,14 +121,14 @@ type ExecutionContext struct {
ExecutionDirectory string
// MigrationDir is the name of directory where migrations are stored.
MigrationDir string
// MetadataDir is the name of directory where metadata files are stored.
MetadataDir string
// ConfigFile is the file where endpoint etc. are stored.
ConfigFile string
// MetadataFile (optional) is a yaml|json file where Hasura metadata is stored.
MetadataFile []string
// ServerConfig is the configuration object storing the endpoint and admin secret
// Config is the configuration object storing the endpoint and admin secret
// information after reading from config file or env var.
ServerConfig *ServerConfig
Config *Config
// GlobalConfigDir is the ~/.hasura-graphql directory to store configuration
// globally.
@ -183,6 +163,15 @@ type ExecutionContext struct {
// SkipUpdateCheck will skip the auto update check if set to true
SkipUpdateCheck bool
// PluginsConfig defines the config for plugins
PluginsConfig *plugins.Config
// CodegenAssetsRepo defines the config to handle codegen-assets repo
CodegenAssetsRepo *util.GitUtil
// InitTemplatesRepo defines the config to handle init-templates repo
InitTemplatesRepo *util.GitUtil
// IsTerminal indicates whether the current session is a terminal or not
IsTerminal bool
}
@ -223,11 +212,27 @@ func (ec *ExecutionContext) Prepare() error {
return errors.Wrap(err, "setting up global config failed")
}
// setup plugins path
err = ec.setupPlugins()
if err != nil {
return errors.Wrap(err, "setting up plugins path failed")
}
err = ec.setupCodegenAssetsRepo()
if err != nil {
return errors.Wrap(err, "setting up codegen-assets repo failed")
}
err = ec.setupInitTemplatesRepo()
if err != nil {
return errors.Wrap(err, "setting up init-templates repo failed")
}
ec.LastUpdateCheckFile = filepath.Join(ec.GlobalConfigDir, LastUpdateCheckFileName)
// initialize a blank server config
if ec.ServerConfig == nil {
ec.ServerConfig = &ServerConfig{}
if ec.Config == nil {
ec.Config = &Config{}
}
// generate an execution id
@ -247,22 +252,63 @@ func (ec *ExecutionContext) Prepare() error {
return nil
}
// setupPlugins create and returns the inferred paths for hasura. By default, it assumes
// $HOME/.hasura as the base path
func (ec *ExecutionContext) setupPlugins() error {
base := filepath.Join(ec.GlobalConfigDir, "plugins")
base, err := filepath.Abs(base)
if err != nil {
return errors.Wrap(err, "cannot get absolute path")
}
ec.PluginsConfig = plugins.New(base)
ec.PluginsConfig.Logger = ec.Logger
return ec.PluginsConfig.Prepare()
}
func (ec *ExecutionContext) setupCodegenAssetsRepo() error {
base := filepath.Join(ec.GlobalConfigDir, util.ActionsCodegenDirName)
base, err := filepath.Abs(base)
if err != nil {
return errors.Wrap(err, "cannot get absolute path")
}
ec.CodegenAssetsRepo = util.NewGitUtil(util.ActionsCodegenRepoURI, base, "")
return nil
}
func (ec *ExecutionContext) setupInitTemplatesRepo() error {
base := filepath.Join(ec.GlobalConfigDir, util.InitTemplatesDirName)
base, err := filepath.Abs(base)
if err != nil {
return errors.Wrap(err, "cannot get absolute path")
}
ec.InitTemplatesRepo = util.NewGitUtil(util.InitTemplatesRepoURI, base, "")
return nil
}
// Validate prepares the ExecutionContext ec and then validates the
// ExecutionDirectory to see if all the required files and directories are in
// place.
func (ec *ExecutionContext) Validate() error {
// ensure plugins index exists
err := ec.PluginsConfig.Repo.EnsureCloned()
if err != nil {
return errors.Wrap(err, "ensuring plugins index failed")
}
// ensure codegen-assets repo exists
err = ec.CodegenAssetsRepo.EnsureCloned()
if err != nil {
return errors.Wrap(err, "ensuring codegen-assets repo failed")
}
// validate execution directory
err := ec.validateDirectory()
err = ec.validateDirectory()
if err != nil {
return errors.Wrap(err, "validating current directory failed")
}
// set names of files and directories
ec.MigrationDir = filepath.Join(ec.ExecutionDirectory, "migrations")
// set names of config file
ec.ConfigFile = filepath.Join(ec.ExecutionDirectory, "config.yaml")
ec.MetadataFile = append(ec.MetadataFile, filepath.Join(ec.MigrationDir, "metadata.yaml"))
ec.MetadataFile = append(ec.MetadataFile, filepath.Join(ec.MigrationDir, "metadata.json"))
// read config and parse the values into Config
err = ec.readConfig()
@ -270,8 +316,28 @@ func (ec *ExecutionContext) Validate() error {
return errors.Wrap(err, "cannot read config")
}
ec.Logger.Debug("graphql engine endpoint: ", ec.ServerConfig.Endpoint)
ec.Logger.Debug("graphql engine admin_secret: ", ec.ServerConfig.AdminSecret)
// set name of migration directory
ec.MigrationDir = filepath.Join(ec.ExecutionDirectory, ec.Config.MigrationsDirectory)
if _, err := os.Stat(ec.MigrationDir); os.IsNotExist(err) {
err = os.MkdirAll(ec.MigrationDir, os.ModePerm)
if err != nil {
return errors.Wrap(err, "cannot create migrations directory")
}
}
if ec.Config.Version == V2 && ec.Config.MetadataDirectory != "" {
// set name of metadata directory
ec.MetadataDir = filepath.Join(ec.ExecutionDirectory, ec.Config.MetadataDirectory)
if _, err := os.Stat(ec.MetadataDir); os.IsNotExist(err) {
err = os.MkdirAll(ec.MetadataDir, os.ModePerm)
if err != nil {
return errors.Wrap(err, "cannot create metadata directory")
}
}
}
ec.Logger.Debug("graphql engine endpoint: ", ec.Config.ServerConfig.Endpoint)
ec.Logger.Debug("graphql engine admin_secret: ", ec.Config.ServerConfig.AdminSecret)
// get version from the server and match with the cli version
err = ec.checkServerVersion()
@ -279,7 +345,13 @@ func (ec *ExecutionContext) Validate() error {
return errors.Wrap(err, "version check")
}
state := util.GetServerState(ec.ServerConfig.Endpoint, ec.ServerConfig.AdminSecret, ec.Version.ServerSemver, ec.Logger)
// get the server feature flags
err = ec.Version.GetServerFeatureFlags()
if err != nil {
return errors.Wrap(err, "error in getting server feature flags")
}
state := util.GetServerState(ec.Config.ServerConfig.Endpoint, ec.Config.ServerConfig.AdminSecret, ec.Version.ServerSemver, ec.Logger)
ec.ServerUUID = state.UUID
ec.Telemetry.ServerUUID = ec.ServerUUID
ec.Logger.Debugf("server: uuid: %s", ec.ServerUUID)
@ -288,7 +360,7 @@ func (ec *ExecutionContext) Validate() error {
}
func (ec *ExecutionContext) checkServerVersion() error {
v, err := version.FetchServerVersion(ec.ServerConfig.Endpoint)
v, err := version.FetchServerVersion(ec.Config.ServerConfig.Endpoint)
if err != nil {
return errors.Wrap(err, "failed to get version from server")
}
@ -303,17 +375,41 @@ func (ec *ExecutionContext) checkServerVersion() error {
return nil
}
// WriteConfig writes the configuration from ec.Config or input config
func (ec *ExecutionContext) WriteConfig(config *Config) error {
var cfg *Config
if config != nil {
cfg = config
} else {
cfg = ec.Config
}
y, err := yaml.Marshal(cfg)
if err != nil {
return err
}
return ioutil.WriteFile(ec.ConfigFile, y, 0644)
}
// readConfig reads the configuration from config file, flags and env vars,
// through viper.
func (ec *ExecutionContext) readConfig() error {
// need to get existing viper because https://github.com/spf13/viper/issues/233
v := ec.Viper
v.SetEnvPrefix("HASURA_GRAPHQL")
v.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
v.AutomaticEnv()
v.SetConfigName("config")
v.SetDefault("version", "1")
v.SetDefault("endpoint", "http://localhost:8080")
v.SetDefault("admin_secret", "")
v.SetDefault("access_key", "")
v.SetDefault("metadata_directory", "")
v.SetDefault("migrations_directory", "migrations")
v.SetDefault("actions.kind", "synchronous")
v.SetDefault("actions.handler_webhook_baseurl", "http://localhost:3000")
v.SetDefault("actions.codegen.framework", "")
v.SetDefault("actions.codegen.output_dir", "")
v.SetDefault("actions.codegen.uri", "")
v.AddConfigPath(ec.ExecutionDirectory)
err := v.ReadInConfig()
if err != nil {
@ -323,11 +419,25 @@ func (ec *ExecutionContext) readConfig() error {
if adminSecret == "" {
adminSecret = v.GetString("access_key")
}
ec.ServerConfig = &ServerConfig{
Endpoint: v.GetString("endpoint"),
AdminSecret: adminSecret,
ec.Config = &Config{
Version: ConfigVersion(v.GetInt("version")),
ServerConfig: ServerConfig{
Endpoint: v.GetString("endpoint"),
AdminSecret: adminSecret,
},
MetadataDirectory: v.GetString("metadata_directory"),
MigrationsDirectory: v.GetString("migrations_directory"),
ActionConfig: types.ActionExecutionConfig{
Kind: v.GetString("actions.kind"),
HandlerWebhookBaseURL: v.GetString("actions.handler_webhook_baseurl"),
Codegen: &types.CodegenExecutionConfig{
Framework: v.GetString("actions.codegen.framework"),
OutputDir: v.GetString("actions.codegen.output_dir"),
URI: v.GetString("actions.codegen.uri"),
},
},
}
return ec.ServerConfig.ParseEndpoint()
return ec.Config.ServerConfig.ParseEndpoint()
}
// setupSpinner creates a default spinner if the context does not already have
@ -355,25 +465,6 @@ func (ec *ExecutionContext) Spin(message string) {
func (ec *ExecutionContext) setupLogger() {
if ec.Logger == nil {
logger := logrus.New()
if ec.IsTerminal {
if ec.NoColor {
logger.Formatter = &logrus.TextFormatter{
DisableColors: true,
DisableTimestamp: true,
}
} else {
logger.Formatter = &logrus.TextFormatter{
ForceColors: true,
DisableTimestamp: true,
}
}
} else {
logger.Formatter = &logrus.JSONFormatter{
PrettyPrint: false,
}
}
logger.Out = colorable.NewColorableStdout()
ec.Logger = logger
}
@ -386,6 +477,9 @@ func (ec *ExecutionContext) setupLogger() {
ec.Logger.SetLevel(level)
}
ec.Logger.Hooks = make(logrus.LevelHooks)
ec.Logger.AddHook(newSpinnerHandlerHook(ec.Logger, ec.Spinner, ec.IsTerminal, ec.NoColor))
// set the logger for telemetry
if ec.Telemetry.Logger == nil {
ec.Telemetry.Logger = ec.Logger
@ -399,35 +493,3 @@ func (ec *ExecutionContext) setVersion() {
ec.Version = version.New()
}
}
// GetMetadataFilePath returns the file path based on the format.
func (ec *ExecutionContext) GetMetadataFilePath(format string) (string, error) {
ext := fmt.Sprintf(".%s", format)
for _, filePath := range ec.MetadataFile {
switch p := filepath.Ext(filePath); p {
case ext:
return filePath, nil
}
}
return "", errors.New("unsupported file type")
}
// GetExistingMetadataFile returns the path to the default metadata file that
// also exists, json or yaml
func (ec *ExecutionContext) GetExistingMetadataFile() (string, error) {
filename := ""
for _, format := range []string{"yaml", "json"} {
f, err := ec.GetMetadataFilePath(format)
if err != nil {
return "", errors.Wrap(err, "cannot get metadata file")
}
filename = f
if _, err := os.Stat(filename); os.IsNotExist(err) {
continue
}
break
}
return filename, nil
}

View File

@ -1,89 +0,0 @@
package cli_test
import (
"math/rand"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
"github.com/hasura/graphql-engine/cli/util/fake"
"github.com/sirupsen/logrus/hooks/test"
"github.com/spf13/viper"
)
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
func TestPrepare(t *testing.T) {
logger, _ := test.NewNullLogger()
ec := cli.NewExecutionContext()
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
ec.Spinner.Writer = &fake.FakeWriter{}
err := ec.Prepare()
if err != nil {
t.Fatalf("prepare failed: %v", err)
}
if ec.CMDName == "" {
t.Fatalf("expected CMDName, got: %v", ec.CMDName)
}
if ec.Spinner == nil {
t.Fatal("got spinner empty")
}
if ec.Logger == nil {
t.Fatal("got empty logger")
}
if ec.GlobalConfigDir == "" {
t.Fatalf("global config dir: expected $HOME/%s, got %s", cli.GlobalConfigDirName, ec.GlobalConfigDir)
}
if ec.GlobalConfigFile == "" {
t.Fatalf("global config file: expected $HOME/%s/%s, got %s", cli.GlobalConfigDirName, cli.GlobalConfigFileName, ec.GlobalConfigFile)
}
if ec.ServerConfig == nil {
t.Fatal("got empty Config")
}
}
func TestValidate(t *testing.T) {
logger, _ := test.NewNullLogger()
ec := cli.NewExecutionContext()
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
ec.Spinner.Writer = &fake.FakeWriter{}
ec.ExecutionDirectory = filepath.Join(os.TempDir(), "hasura-gql-tests-"+strconv.Itoa(rand.Intn(1000)))
ec.Viper = viper.New()
// validate a directory created by init
initCmd := commands.NewInitCmd(ec)
initCmd.Flags().Set("directory", ec.ExecutionDirectory)
err := initCmd.Execute()
if err != nil {
t.Fatalf("execution failed: %v", err)
}
err = ec.Prepare()
if err != nil {
t.Fatalf("prepare failed: %v", err)
}
err = ec.Validate()
if err != nil {
t.Fatalf("validate failed: %v", err)
}
// remove config.yaml and validate, should result in an error
err = os.Remove(filepath.Join(ec.ExecutionDirectory, "config.yaml"))
if err != nil {
t.Fatalf("remove failed: %v", err)
}
err = ec.Validate()
if err == nil {
t.Fatal("validate succeeded with no config.yaml")
}
os.RemoveAll(ec.ExecutionDirectory)
}

70
cli/commands/actions.go Normal file
View File

@ -0,0 +1,70 @@
package commands
import (
"encoding/json"
"fmt"
"io/ioutil"
"path/filepath"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/util"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
// NewActionsCmd returns the actions command
func NewActionsCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
ec.Viper = v
actionsCmd := &cobra.Command{
Use: "actions",
Short: "Manage actions on hasura",
SilenceUsage: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
err := ec.Prepare()
if err != nil {
return err
}
err = ec.Validate()
if err != nil {
return err
}
if ec.Config.Version < cli.V2 {
return fmt.Errorf("actions commands can be executed only when config version is greater than 1")
}
if ec.MetadataDir == "" {
return fmt.Errorf("actions commands can be executed only when metadata_dir is set in config")
}
return nil
},
}
actionsCmd.AddCommand(
newActionsCreateCmd(ec, v),
newActionsCodegenCmd(ec),
newActionsUseCodegenCmd(ec),
)
actionsCmd.PersistentFlags().String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
actionsCmd.PersistentFlags().String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
actionsCmd.PersistentFlags().String("access-key", "", "access key for Hasura GraphQL Engine")
actionsCmd.PersistentFlags().MarkDeprecated("access-key", "use --admin-secret instead")
v.BindPFlag("endpoint", actionsCmd.PersistentFlags().Lookup("endpoint"))
v.BindPFlag("admin_secret", actionsCmd.PersistentFlags().Lookup("admin-secret"))
v.BindPFlag("access_key", actionsCmd.PersistentFlags().Lookup("access-key"))
return actionsCmd
}
func getCodegenFrameworks() (allFrameworks []codegenFramework, err error) {
frameworkFileBytes, err := ioutil.ReadFile(filepath.Join(ec.GlobalConfigDir, util.ActionsCodegenDirName, "frameworks.json"))
if err != nil {
return
}
err = json.Unmarshal(frameworkFileBytes, &allFrameworks)
if err != nil {
return
}
return
}

View File

@ -0,0 +1,99 @@
package commands
import (
"fmt"
"strings"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/metadata/actions"
"github.com/hasura/graphql-engine/cli/metadata/actions/types"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
func newActionsCodegenCmd(ec *cli.ExecutionContext) *cobra.Command {
opts := &actionsCodegenOptions{
EC: ec,
}
actionsCodegenCmd := &cobra.Command{
Use: "codegen [action-name]",
Short: "Generate code for actions",
Example: ` # Generate code for all actions
hasura actions codegen
# Generate code for an action
hasura actions codegen [action-name]
# Generate code for two or more actions
hasura actions codegen [action-name] [action-name...]
# Derive an action from a hasura operation
hasura actions codegen [action-name] --derive-from ""`,
SilenceUsage: true,
RunE: func(cmd *cobra.Command, args []string) error {
opts.actions = args
return opts.run()
},
}
f := actionsCodegenCmd.Flags()
f.StringVar(&opts.deriveFrom, "derive-from", "", "derive action from a hasura operation")
return actionsCodegenCmd
}
type actionsCodegenOptions struct {
EC *cli.ExecutionContext
actions []string
deriveFrom string
}
func (o *actionsCodegenOptions) run() (err error) {
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
var derivePayload types.DerivePayload
if o.deriveFrom != "" {
derivePayload.Operation = strings.TrimSpace(o.deriveFrom)
o.EC.Spin("Deriving a Hasura operation...")
introSchema, err := migrateDrv.GetIntroSpectionSchema()
if err != nil {
return errors.Wrap(err, "unable to fetch introspection schema")
}
derivePayload.IntrospectionSchema = introSchema
o.EC.Spinner.Stop()
}
if o.EC.Config.ActionConfig.Codegen.Framework == "" {
return fmt.Errorf(`Could not find codegen config. For adding codegen config, run:
hasura actions use-codegen`)
}
// if no actions are passed, perform codegen for all actions
o.EC.Spin("Generating code...")
var codegenActions []string
actionCfg := actions.New(o.EC, o.EC.MetadataDir)
if len(o.actions) == 0 {
actionsFileContent, err := actionCfg.GetActionsFileContent()
if err != nil {
return errors.Wrap(err, "error getting actions file content")
}
for _, action := range actionsFileContent.Actions {
codegenActions = append(codegenActions, action.Name)
}
} else {
codegenActions = o.actions
}
for _, actionName := range codegenActions {
err = actionCfg.Codegen(actionName, derivePayload)
if err != nil {
return errors.Wrapf(err, "error generating codegen for action %s", actionName)
}
}
o.EC.Spinner.Stop()
o.EC.Logger.Info("Codegen files generated at " + o.EC.Config.ActionConfig.Codegen.OutputDir)
return nil
}

View File

@ -0,0 +1,143 @@
package commands
import (
"fmt"
"strings"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/metadata/actions"
"github.com/hasura/graphql-engine/cli/metadata/actions/types"
"github.com/hasura/graphql-engine/cli/util"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newActionsCreateCmd(ec *cli.ExecutionContext, v *viper.Viper) *cobra.Command {
opts := &actionsCreateOptions{
EC: ec,
}
actionsCreateCmd := &cobra.Command{
Use: "create [action-name]",
Short: "Create an action",
Example: ` # Create an Action
hasura actions create [action-name]
# Create an action with codegen
hasura actions create [action-name] --with-codegen true
# Create an action by deriving from a hasura operation
hasura actions create [action-name] --derive-from ''
# Create an action with a different kind or webhook
hasura actions create [action-name] --kind [synchronous|asynchronous] --webhook [http://localhost:3000]`,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
opts.name = args[0]
return opts.run()
},
}
f := actionsCreateCmd.Flags()
f.StringVar(&opts.deriveFrom, "derive-from", "", "derive action from a Hasura operation")
f.BoolVar(&opts.withCodegen, "with-codegen", false, "create action along with codegen")
f.String("kind", "", "kind to use in action")
f.String("webhook", "", "webhook to use in action")
// bind to viper
v.BindPFlag("actions.kind", f.Lookup("kind"))
v.BindPFlag("actions.handler_webhook_baseurl", f.Lookup("webhook"))
return actionsCreateCmd
}
type actionsCreateOptions struct {
EC *cli.ExecutionContext
name string
deriveFrom string
withCodegen bool
}
func (o *actionsCreateOptions) run() error {
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
// introspect Hasura schema if a mutation is being derived
var introSchema interface{}
if o.deriveFrom != "" {
o.deriveFrom = strings.TrimSpace(o.deriveFrom)
o.EC.Spin("Deriving a Hasura operation...")
introSchema, err = migrateDrv.GetIntroSpectionSchema()
if err != nil {
return errors.Wrap(err, "error in fetching introspection schema")
}
o.EC.Spinner.Stop()
}
// create new action
o.EC.Spin("Creating the action...")
o.EC.Spinner.Stop()
actionCfg := actions.New(o.EC, o.EC.MetadataDir)
err = actionCfg.Create(o.name, introSchema, o.deriveFrom)
if err != nil {
return errors.Wrap(err, "error in creating action")
}
err = migrateDrv.ApplyMetadata()
if err != nil {
return errors.Wrap(err, "error in applying metadata")
}
o.EC.Spinner.Stop()
o.EC.Logger.WithField("name", o.name).Infoln("action created")
// if codegen config not present, skip codegen
if o.EC.Config.ActionConfig.Codegen.Framework == "" {
if o.withCodegen {
return fmt.Errorf(`Could not find codegen config. For adding codegen config, run:
hasura actions use-codegen`)
}
return nil
}
// if with-codegen flag not present, ask them if they want to codegen
var confirmation string
if !o.withCodegen {
confirmation, err = util.GetYesNoPrompt("Do you want to generate " + o.EC.Config.ActionConfig.Codegen.Framework + " code for this action and the custom types?")
if err != nil {
return errors.Wrap(err, "error in getting user input")
}
}
if confirmation == "n" {
infoMsg := fmt.Sprintf(`You skipped codegen. For getting codegen for this action, run:
hasura actions codegen %s
`, o.name)
o.EC.Logger.Info(infoMsg)
return nil
}
// construct derive payload to send to codegenerator
derivePayload := types.DerivePayload{
IntrospectionSchema: introSchema,
Operation: o.deriveFrom,
ActionName: o.name,
}
// Run codegen
o.EC.Spin(fmt.Sprintf(`Running "hasura actions codegen %s"...`, o.name))
err = actionCfg.Codegen(o.name, derivePayload)
if err != nil {
return errors.Wrap(err, "error in generating codegen")
}
o.EC.Spinner.Stop()
o.EC.Logger.Info("Codegen files generated at " + o.EC.Config.ActionConfig.Codegen.OutputDir)
return nil
}

View File

@ -0,0 +1,171 @@
package commands
import (
"fmt"
"path/filepath"
"sort"
"strconv"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/util"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
type codegenFramework struct {
Name string `json:"name"`
HasStarterKit bool `json:"hasStarterKit"`
}
func newActionsUseCodegenCmd(ec *cli.ExecutionContext) *cobra.Command {
opts := &actionsUseCodegenOptions{
EC: ec,
}
actionsUseCodegenCmd := &cobra.Command{
Use: "use-codegen",
Short: "Configure the codegen module",
Example: ` # Use codegen by providing framework
hasura actions use-codegen --framework nodejs-express
# Use codegen from framework list
hasura actions use-codegen
# Set output directory
hasura actions use-codegen --output-dir codegen
# Use a codegen with a starter kit
hasura actions use-codegen --with-starter-kit true`,
SilenceUsage: true,
RunE: func(cmd *cobra.Command, args []string) error {
return opts.run()
},
}
f := actionsUseCodegenCmd.Flags()
f.StringVar(&opts.framework, "framework", "", "framework to be used by codegen")
f.StringVar(&opts.outputDir, "output-dir", "", "directory to create the codegen files")
f.BoolVar(&opts.withStarterKit, "with-starter-kit", false, "clone starter kit for a framework")
return actionsUseCodegenCmd
}
type actionsUseCodegenOptions struct {
EC *cli.ExecutionContext
framework string
outputDir string
withStarterKit bool
}
func (o *actionsUseCodegenOptions) run() error {
o.EC.Spin("Ensuring codegen-assets repo is updated...")
defer o.EC.Spinner.Stop()
// ensure the the actions-codegen repo is updated
err := o.EC.CodegenAssetsRepo.EnsureUpdated()
if err != nil {
o.EC.Logger.Warnf("unable to update codegen-assets repo, got %v", err)
}
newCodegenExecutionConfig := o.EC.Config.ActionConfig.Codegen
newCodegenExecutionConfig.Framework = ""
o.EC.Spin("Fetching frameworks...")
allFrameworks, err := getCodegenFrameworks()
if err != nil {
return errors.Wrap(err, "error in fetching codegen frameworks")
}
o.EC.Spinner.Stop()
if o.framework == "" {
// if framework flag is not provided, display a list and allow them to choose
var frameworkList []string
for _, f := range allFrameworks {
frameworkList = append(frameworkList, f.Name)
}
sort.Strings(frameworkList)
newCodegenExecutionConfig.Framework, err = util.GetSelectPrompt("Choose a codegen framework to use", frameworkList)
if err != nil {
return errors.Wrap(err, "error in selecting framework")
}
} else {
for _, f := range allFrameworks {
if o.framework == f.Name {
newCodegenExecutionConfig.Framework = o.framework
}
}
if newCodegenExecutionConfig.Framework == "" {
return fmt.Errorf("framework %s is not found", o.framework)
}
}
hasStarterKit := false
for _, f := range allFrameworks {
if f.Name == newCodegenExecutionConfig.Framework && f.HasStarterKit {
hasStarterKit = true
}
}
// if with-starter-kit flag is set and the same is not available for the framework, return error
if o.withStarterKit && !hasStarterKit {
return fmt.Errorf("starter kit is not available for framework %s", newCodegenExecutionConfig.Framework)
}
// if with-starter-kit flag is not provided, give an option to clone a starterkit
if !o.withStarterKit && hasStarterKit {
shouldCloneStarterKit, err := util.GetYesNoPrompt("Do you also want to clone a starter kit for " + newCodegenExecutionConfig.Framework + "?")
if err != nil {
return errors.Wrap(err, "error in getting input from user")
}
o.withStarterKit = shouldCloneStarterKit == "y"
}
// if output directory is not provided, make them enter it
if o.outputDir == "" {
outputDir, err := util.GetFSPathPrompt("Where do you want to place the codegen files?", o.EC.Config.ActionConfig.Codegen.OutputDir)
if err != nil {
return errors.Wrap(err, "error in getting output directory input")
}
newCodegenExecutionConfig.OutputDir = outputDir
} else {
newCodegenExecutionConfig.OutputDir = o.outputDir
}
// clone the starter kit
o.EC.Spin("Clonning the starter kit...")
if o.withStarterKit && hasStarterKit {
// get a directory name to clone the starter kit in
starterKitDirname := newCodegenExecutionConfig.Framework
err = util.FSCheckIfDirPathExists(
filepath.Join(o.EC.ExecutionDirectory, starterKitDirname),
)
suffix := 2
for err == nil {
starterKitDirname = newCodegenExecutionConfig.Framework + "-" + strconv.Itoa(suffix)
suffix++
err = util.FSCheckIfDirPathExists(starterKitDirname)
}
err = nil
// copy the starter kit
destinationDir := filepath.Join(o.EC.ExecutionDirectory, starterKitDirname)
err = util.FSCopyDir(
filepath.Join(o.EC.GlobalConfigDir, util.ActionsCodegenDirName, newCodegenExecutionConfig.Framework, "starter-kit"),
destinationDir,
)
if err != nil {
return errors.Wrap(err, "error in copying starter kit")
}
o.EC.Logger.Info("Starter kit cloned at " + destinationDir)
}
newConfig := o.EC.Config
newConfig.ActionConfig.Codegen = newCodegenExecutionConfig
err = o.EC.WriteConfig(newConfig)
if err != nil {
return errors.Wrap(err, "error in writing config")
}
o.EC.Spinner.Stop()
o.EC.Logger.Info("Codegen configuration updated in config.yaml")
return nil
}

View File

@ -3,6 +3,7 @@ package commands
import (
"fmt"
"net/http"
"os"
"sync"
"github.com/fatih/color"
@ -23,7 +24,7 @@ import (
// NewConsoleCmd returns the console command
func NewConsoleCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &consoleOptions{
opts := &ConsoleOptions{
EC: ec,
}
consoleCmd := &cobra.Command{
@ -47,10 +48,14 @@ func NewConsoleCmd(ec *cli.ExecutionContext) *cobra.Command {
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
err := ec.Prepare()
if err != nil {
return err
}
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
return opts.run()
return opts.Run()
},
}
f := consoleCmd.Flags()
@ -74,7 +79,7 @@ func NewConsoleCmd(ec *cli.ExecutionContext) *cobra.Command {
return consoleCmd
}
type consoleOptions struct {
type ConsoleOptions struct {
EC *cli.ExecutionContext
APIPort string
@ -87,9 +92,12 @@ type consoleOptions struct {
StaticDir string
Browser string
APIServerInterruptSignal chan os.Signal
ConsoleServerInterruptSignal chan os.Signal
}
func (o *consoleOptions) run() error {
func (o *ConsoleOptions) Run() error {
log := o.EC.Logger
// Switch to "release" mode in production.
gin.SetMode(gin.ReleaseMode)
@ -103,12 +111,7 @@ func (o *consoleOptions) run() error {
return errors.New("cannot validate version, object is nil")
}
metadataPath, err := o.EC.GetMetadataFilePath("yaml")
if err != nil {
return err
}
t, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, false)
t, err := newMigrate(o.EC, false)
if err != nil {
return err
}
@ -119,7 +122,7 @@ func (o *consoleOptions) run() error {
t,
}
r.setRoutes(o.EC.MigrationDir, metadataPath, o.EC.Logger)
r.setRoutes(o.EC.MigrationDir, o.EC.Logger)
consoleTemplateVersion := o.EC.Version.GetConsoleTemplateVersion()
consoleAssetsVersion := o.EC.Version.GetConsoleAssetsVersion()
@ -133,10 +136,10 @@ func (o *consoleOptions) run() error {
"apiPort": o.APIPort,
"cliVersion": o.EC.Version.GetCLIVersion(),
"serverVersion": o.EC.Version.GetServerVersion(),
"dataApiUrl": o.EC.ServerConfig.ParsedEndpoint.String(),
"dataApiUrl": o.EC.Config.ServerConfig.ParsedEndpoint.String(),
"dataApiVersion": "",
"hasAccessKey": adminSecretHeader == XHasuraAccessKey,
"adminSecret": o.EC.ServerConfig.AdminSecret,
"adminSecret": o.EC.Config.ServerConfig.AdminSecret,
"assetsVersion": consoleAssetsVersion,
"enableTelemetry": o.EC.GlobalConfig.EnableTelemetry,
"cliUUID": o.EC.GlobalConfig.UUID,
@ -145,22 +148,52 @@ func (o *consoleOptions) run() error {
return errors.Wrap(err, "error serving console")
}
// Create WaitGroup for running 3 servers
// create servers
apiServer := &http.Server{
Addr: fmt.Sprintf("%s:%s", o.Address, o.APIPort),
Handler: r.router,
}
consoleServer := &http.Server{
Addr: fmt.Sprintf("%s:%s", o.Address, o.ConsolePort),
Handler: consoleRouter,
}
go func() {
<-o.APIServerInterruptSignal
if err := apiServer.Close(); err != nil {
o.EC.Logger.Debugf("unable to close server running on port %s", o.APIPort)
}
}()
go func() {
<-o.ConsoleServerInterruptSignal
if err := consoleServer.Close(); err != nil {
o.EC.Logger.Debugf("unable to close server running on port %s", o.ConsolePort)
}
}()
// Create WaitGroup for running 2 servers
wg := &sync.WaitGroup{}
o.WG = wg
wg.Add(1)
go func() {
err = r.router.Run(o.Address + ":" + o.APIPort)
if err != nil {
o.EC.Logger.WithError(err).Errorf("error listening on port %s", o.APIPort)
if err := apiServer.ListenAndServe(); err != nil {
if err == http.ErrServerClosed {
o.EC.Logger.Infof("server closed on port %s under signal", o.APIPort)
} else {
o.EC.Logger.WithError(err).Errorf("error listening on port %s", o.APIPort)
}
}
wg.Done()
}()
wg.Add(1)
go func() {
err = consoleRouter.Run(o.Address + ":" + o.ConsolePort)
if err != nil {
o.EC.Logger.WithError(err).Errorf("error listening on port %s", o.ConsolePort)
if err := consoleServer.ListenAndServe(); err != nil {
if err == http.ErrServerClosed {
o.EC.Logger.Infof("server closed on port %s under signal", o.ConsolePort)
} else {
o.EC.Logger.WithError(err).Errorf("error listening on port %s", o.ConsolePort)
}
}
wg.Done()
}()
@ -200,12 +233,13 @@ type cRouter struct {
migrate *migrate.Migrate
}
func (r *cRouter) setRoutes(migrationDir, metadataFile string, logger *logrus.Logger) {
func (r *cRouter) setRoutes(migrationDir string, logger *logrus.Logger) {
apis := r.router.Group("/apis")
{
apis.Use(setLogger(logger))
apis.Use(setFilePath(migrationDir))
apis.Use(setMigrate(r.migrate))
apis.Use(setConfigVersion())
// Migrate api endpoints and middleware
migrateAPIs := apis.Group("/migrate")
{
@ -223,7 +257,6 @@ func (r *cRouter) setRoutes(migrationDir, metadataFile string, logger *logrus.Lo
// Migrate api endpoints and middleware
metadataAPIs := apis.Group("/metadata")
{
metadataAPIs.Use(setMetadataFile(metadataFile))
metadataAPIs.Any("", api.MetadataAPI)
}
}
@ -244,6 +277,13 @@ func setFilePath(dir string) gin.HandlerFunc {
}
}
func setConfigVersion() gin.HandlerFunc {
return func(c *gin.Context) {
c.Set("version", int(ec.Config.Version))
c.Next()
}
}
func setMetadataFile(file string) gin.HandlerFunc {
return func(c *gin.Context) {
c.Set("metadataFile", file)

View File

@ -1,63 +0,0 @@
package commands
import (
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/util/fake"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func TestConsoleCmd(t *testing.T) {
logger, _ := test.NewNullLogger()
ec := cli.NewExecutionContext()
ec.Telemetry.Command = "TEST"
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
ec.ServerConfig = &cli.ServerConfig{
Endpoint: "http://localhost:8080",
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
}
ec.MetadataFile = []string{"metadata.yaml"}
ec.Version = version.New()
v, err := version.FetchServerVersion(ec.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
ec.Version.SetServerVersion(v)
err = ec.Prepare()
if err != nil {
t.Fatalf("prepare failed: %v", err)
}
opts := &consoleOptions{
EC: ec,
APIPort: "9693",
ConsolePort: "9695",
Address: "localhost",
DontOpenBrowser: true,
}
opts.EC.Spinner.Writer = &fake.FakeWriter{}
err = opts.EC.ServerConfig.ParseEndpoint()
if err != nil {
t.Fatal(err)
}
go func() {
t.Log("waiting for console to start")
for opts.WG == nil {
time.Sleep(1 * time.Second)
}
opts.WG.Done()
opts.WG.Done()
}()
err = opts.run()
if err != nil {
t.Fatalf("failed running console: %v", err)
}
// TODO: (shahidhk) curl the console endpoint for 200 response
}

View File

@ -25,8 +25,9 @@ func NewDocsCmd(ec *cli.ExecutionContext) *cobra.Command {
Short: "Generate CLI docs in various formats",
Hidden: true,
SilenceUsage: true,
PreRun: func(cmd *cobra.Command, args []string) {
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = viper.New()
return nil
},
RunE: func(cmd *cobra.Command, args []string) (err error) {
err = os.MkdirAll(docDirectory, os.ModePerm)

View File

@ -2,12 +2,21 @@ package commands
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/ghodss/yaml"
"github.com/hasura/graphql-engine/cli/metadata/actions"
"github.com/hasura/graphql-engine/cli/metadata/actions/types"
"github.com/hasura/graphql-engine/cli/metadata/allowlist"
"github.com/hasura/graphql-engine/cli/metadata/functions"
"github.com/hasura/graphql-engine/cli/metadata/querycollections"
"github.com/hasura/graphql-engine/cli/metadata/remoteschemas"
"github.com/hasura/graphql-engine/cli/metadata/tables"
metadataTypes "github.com/hasura/graphql-engine/cli/metadata/types"
metadataVersion "github.com/hasura/graphql-engine/cli/metadata/version"
"github.com/hasura/graphql-engine/cli/util"
"github.com/hasura/graphql-engine/cli"
"github.com/manifoldco/promptui"
"github.com/pkg/errors"
@ -16,12 +25,12 @@ import (
)
const (
defaultDirectory = "hasura"
defaultDirectory string = "hasura"
)
// NewInitCmd is the definition for init command
func NewInitCmd(ec *cli.ExecutionContext) *cobra.Command {
opts := &initOptions{
opts := &InitOptions{
EC: ec,
}
initCmd := &cobra.Command{
@ -39,37 +48,54 @@ func NewInitCmd(ec *cli.ExecutionContext) *cobra.Command {
# See https://docs.hasura.io/1.0/graphql/manual/migrations/index.html for more details`,
SilenceUsage: true,
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = viper.New()
err := ec.Prepare()
if err != nil {
return err
}
return ec.PluginsConfig.Repo.EnsureCloned()
},
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) == 1 {
opts.InitDir = args[0]
}
return opts.run()
return opts.Run()
},
}
f := initCmd.Flags()
f.StringVar(&opts.Version, "version", "2", "config version to be used")
f.StringVar(&opts.InitDir, "directory", "", "name of directory where files will be created")
f.StringVar(&opts.MetadataDir, "metadata-directory", "metadata", "name of directory where metadata files will be created")
f.StringVar(&opts.Endpoint, "endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.StringVar(&opts.AdminSecret, "admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.StringVar(&opts.AdminSecret, "access-key", "", "access key for Hasura GraphQL Engine")
f.StringVar(&opts.ActionKind, "action-kind", "synchronous", "kind to be used for an action")
f.StringVar(&opts.ActionHandler, "action-handler-webhook-baseurl", "http://localhost:3000", "webhook baseurl to be used for an action")
f.StringVar(&opts.Template, "install-manifest", "", "install manifest to be cloned")
f.MarkDeprecated("access-key", "use --admin-secret instead")
f.MarkDeprecated("directory", "use directory-name argument instead")
return initCmd
}
type initOptions struct {
type InitOptions struct {
EC *cli.ExecutionContext
Version string
Endpoint string
AdminSecret string
InitDir string
MetadataDir string
ActionKind string
ActionHandler string
Template string
}
func (o *initOptions) run() error {
func (o *InitOptions) Run() error {
var dir string
// prompt for init directory if it's not set already
if o.InitDir == "" {
@ -112,6 +138,12 @@ func (o *initOptions) run() error {
hasura console
`, o.EC.ExecutionDirectory)
// create template files
err = o.createTemplateFiles()
if err != nil {
return err
}
// create other required files, like config.yaml, migrations directory
err = o.createFiles()
if err != nil {
@ -123,30 +155,44 @@ func (o *initOptions) run() error {
}
// createFiles creates files required by the CLI in the ExecutionDirectory
func (o *initOptions) createFiles() error {
func (o *InitOptions) createFiles() error {
// create the directory
err := os.MkdirAll(filepath.Dir(o.EC.ExecutionDirectory), os.ModePerm)
if err != nil {
return errors.Wrap(err, "error creating setup directories")
}
// set config object
config := &cli.ServerConfig{
Endpoint: "http://localhost:8080",
var config *cli.Config
if o.Version == "1" {
config = &cli.Config{
ServerConfig: cli.ServerConfig{
Endpoint: "http://localhost:8080",
},
}
} else {
config = &cli.Config{
Version: cli.V2,
ServerConfig: cli.ServerConfig{
Endpoint: "http://localhost:8080",
},
MetadataDirectory: o.MetadataDir,
ActionConfig: types.ActionExecutionConfig{
Kind: o.ActionKind,
HandlerWebhookBaseURL: o.ActionHandler,
},
}
}
if o.Endpoint != "" {
config.Endpoint = o.Endpoint
config.ServerConfig.Endpoint = o.Endpoint
}
if o.AdminSecret != "" {
config.AdminSecret = o.AdminSecret
config.ServerConfig.AdminSecret = o.AdminSecret
}
// write the config file
data, err := yaml.Marshal(config)
if err != nil {
return errors.Wrap(err, "cannot convert to yaml")
}
o.EC.Config = config
o.EC.ConfigFile = filepath.Join(o.EC.ExecutionDirectory, "config.yaml")
err = ioutil.WriteFile(o.EC.ConfigFile, data, 0644)
err = o.EC.WriteConfig(nil)
if err != nil {
return errors.Wrap(err, "cannot write config file")
}
@ -158,6 +204,53 @@ func (o *initOptions) createFiles() error {
return errors.Wrap(err, "cannot write migration directory")
}
if config.Version == cli.V2 {
// create metadata directory
o.EC.MetadataDir = filepath.Join(o.EC.ExecutionDirectory, "metadata")
err = os.MkdirAll(o.EC.MetadataDir, os.ModePerm)
if err != nil {
return errors.Wrap(err, "cannot write migration directory")
}
// create metadata files
plugins := make(metadataTypes.MetadataPlugins, 0)
plugins = append(plugins, metadataVersion.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, tables.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, functions.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, querycollections.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, allowlist.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, remoteschemas.New(o.EC, o.EC.MetadataDir))
plugins = append(plugins, actions.New(o.EC, o.EC.MetadataDir))
for _, plg := range plugins {
err := plg.CreateFiles()
if err != nil {
return errors.Wrap(err, "cannot create metadata files")
}
}
}
return nil
}
func (o *InitOptions) createTemplateFiles() error {
if o.Template == "" {
return nil
}
err := o.EC.InitTemplatesRepo.EnsureUpdated()
if err != nil {
return errors.Wrap(err, "error in updating init-templates repo")
}
templatePath := filepath.Join(o.EC.InitTemplatesRepo.Path, o.Template)
info, err := os.Stat(templatePath)
if err != nil {
return errors.Wrap(err, "template doesn't exists")
}
if !info.IsDir() {
return errors.Errorf("template should be a directory")
}
err = util.CopyDir(templatePath, filepath.Join(o.EC.ExecutionDirectory, "install-manifest"))
if err != nil {
return err
}
return nil
}

View File

@ -1,55 +0,0 @@
package commands
import (
"math/rand"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/util/fake"
"github.com/sirupsen/logrus/hooks/test"
)
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
func TestInitCmd(t *testing.T) {
logger, _ := test.NewNullLogger()
ec := cli.NewExecutionContext()
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
tt := []struct {
name string
opts *initOptions
err error
}{
{"only-init-dir", &initOptions{
EC: ec,
Endpoint: "",
AdminSecret: "",
InitDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
}, nil},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
tc.opts.EC.Spinner.Writer = &fake.FakeWriter{}
err := tc.opts.EC.Prepare()
if err != nil {
t.Fatalf("%s: prep failed: %v", tc.name, err)
}
err = tc.opts.run()
if err != tc.err {
t.Fatalf("%s: expected %v, got %v", tc.name, tc.err, err)
} else {
// TODO: (shahidhk) need to verify the contents of the spec generated
os.RemoveAll(tc.opts.InitDir)
}
})
}
}

View File

@ -1,23 +1,28 @@
package commands
import (
"io/ioutil"
"os"
"github.com/ghodss/yaml"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/migrate"
"github.com/pkg/errors"
"github.com/spf13/cobra"
v2yaml "gopkg.in/yaml.v2"
"github.com/spf13/viper"
)
// NewMetadataCmd returns the metadata command
func NewMetadataCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
ec.Viper = v
metadataCmd := &cobra.Command{
Use: "metadata",
Aliases: []string{"md"},
Short: "Manage Hasura GraphQL Engine metadata saved in the database",
Use: "metadata",
Aliases: []string{"md"},
Short: "Manage Hasura GraphQL Engine metadata saved in the database",
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
err := ec.Prepare()
if err != nil {
return err
}
return ec.Validate()
},
SilenceUsage: true,
}
metadataCmd.AddCommand(
@ -28,30 +33,29 @@ func NewMetadataCmd(ec *cli.ExecutionContext) *cobra.Command {
newMetadataApplyCmd(ec),
newMetadataInconsistencyCmd(ec),
)
metadataCmd.PersistentFlags().String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
metadataCmd.PersistentFlags().String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
metadataCmd.PersistentFlags().String("access-key", "", "access key for Hasura GraphQL Engine")
metadataCmd.PersistentFlags().MarkDeprecated("access-key", "use --admin-secret instead")
v.BindPFlag("endpoint", metadataCmd.PersistentFlags().Lookup("endpoint"))
v.BindPFlag("admin_secret", metadataCmd.PersistentFlags().Lookup("admin-secret"))
v.BindPFlag("access_key", metadataCmd.PersistentFlags().Lookup("access-key"))
return metadataCmd
}
func executeMetadata(cmd string, t *migrate.Migrate, ec *cli.ExecutionContext) error {
switch cmd {
case "export":
metaData, err := t.ExportMetadata()
files, err := t.ExportMetadata()
if err != nil {
return errors.Wrap(err, "cannot export metadata")
return errors.Wrap(err, "cannot export metadata from server")
}
databyt, err := v2yaml.Marshal(metaData)
err = t.WriteMetadata(files)
if err != nil {
return err
}
metadataPath, err := ec.GetMetadataFilePath("yaml")
if err != nil {
return errors.Wrap(err, "cannot save metadata")
}
err = ioutil.WriteFile(metadataPath, databyt, 0644)
if err != nil {
return errors.Wrap(err, "cannot save metadata")
return errors.Wrap(err, "cannot write metadata")
}
case "clear":
err := t.ResetMetadata()
@ -64,34 +68,7 @@ func executeMetadata(cmd string, t *migrate.Migrate, ec *cli.ExecutionContext) e
return errors.Wrap(err, "cannot reload Metadata")
}
case "apply":
var data interface{}
var metadataContent []byte
for _, format := range []string{"yaml", "json"} {
metadataPath, err := ec.GetMetadataFilePath(format)
if err != nil {
return errors.Wrap(err, "cannot apply metadata")
}
metadataContent, err = ioutil.ReadFile(metadataPath)
if err != nil {
if os.IsNotExist(err) {
continue
}
return err
}
break
}
if metadataContent == nil {
return errors.New("Unable to locate metadata.[yaml|json] file under migrations directory")
}
err := yaml.Unmarshal(metadataContent, &data)
if err != nil {
return errors.Wrap(err, "cannot parse metadata file")
}
err = t.ApplyMetadata(data)
err := t.ApplyMetadata()
if err != nil {
return errors.Wrap(err, "cannot apply metadata on the database")
}

View File

@ -6,14 +6,12 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMetadataApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataApplyOptions{
opts := &MetadataApplyOptions{
EC: ec,
actionType: "apply",
ActionType: "apply",
}
metadataApplyCmd := &cobra.Command{
@ -28,25 +26,17 @@ func newMetadataApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
# Apply metadata to an instance specified by the flag:
hasura metadata apply --endpoint "<endpoint>"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
if opts.dryRun {
o := &metadataDiffOptions{
o := &MetadataDiffOptions{
EC: ec,
output: os.Stdout,
Output: os.Stdout,
Args: []string{},
}
filename, err := ec.GetExistingMetadataFile()
if err != nil {
return errors.Wrap(err, "failed getting metadata file")
}
o.metadata[0] = filename
return o.run()
return o.Run()
}
opts.EC.Spin("Applying metadata...")
err := opts.run()
err := opts.Run()
opts.EC.Spinner.Stop()
if err != nil {
return errors.Wrap(err, "failed to apply metadata")
@ -57,33 +47,34 @@ func newMetadataApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
}
f := metadataApplyCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
f.BoolVar(&opts.FromFile, "from-file", false, "apply metadata from migrations/metadata.[yaml|json]")
f.BoolVar(&opts.dryRun, "dry-run", false, "show a diff instead of applying the metadata")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataApplyCmd
}
type metadataApplyOptions struct {
type MetadataApplyOptions struct {
EC *cli.ExecutionContext
actionType string
ActionType string
dryRun bool
FromFile bool
dryRun bool
}
func (o *metadataApplyOptions) run() error {
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
func (o *MetadataApplyOptions) Run() error {
if o.FromFile {
actualMetadataDir := o.EC.MetadataDir
o.EC.MetadataDir = ""
defer func() {
o.EC.MetadataDir = actualMetadataDir
}()
}
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
return executeMetadata(o.actionType, migrateDrv, o.EC)
return executeMetadata(o.ActionType, migrateDrv, o.EC)
}

View File

@ -1,42 +0,0 @@
package commands
import (
"net/url"
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func testMetadataApply(t *testing.T, metadataFile string, endpoint *url.URL) {
logger, _ := test.NewNullLogger()
opts := &metadataApplyOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
actionType: "apply",
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed applying metadata: %v", err)
}
}

View File

@ -4,14 +4,12 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMetadataClearCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataClearOptions{
opts := &MetadataClearOptions{
EC: ec,
actionType: "clear",
ActionType: "clear",
}
metadataResetCmd := &cobra.Command{
@ -27,16 +25,12 @@ func newMetadataClearCmd(ec *cli.ExecutionContext) *cobra.Command {
# Clear metadata on a different Hasura instance:
hasura metadata clear --endpoint "<endpoint>"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
if cmd.CalledAs() == "reset" {
opts.EC.Logger.Warn("metadata reset command is deprecated, use metadata clear instead")
}
opts.EC.Spin("Clearing metadata...")
err := opts.run()
err := opts.Run()
opts.EC.Spinner.Stop()
if err != nil {
return errors.Wrap(err, "failed to clear metadata")
@ -46,32 +40,21 @@ func newMetadataClearCmd(ec *cli.ExecutionContext) *cobra.Command {
},
}
f := metadataResetCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataResetCmd
}
type metadataClearOptions struct {
type MetadataClearOptions struct {
EC *cli.ExecutionContext
actionType string
ActionType string
}
func (o *metadataClearOptions) run() error {
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
func (o *MetadataClearOptions) Run() error {
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
err = executeMetadata(o.actionType, migrateDrv, o.EC)
err = executeMetadata(o.ActionType, migrateDrv, o.EC)
if err != nil {
return errors.Wrap(err, "Cannot clear metadata")
}

View File

@ -1,42 +0,0 @@
package commands
import (
"net/url"
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func testMetadataReset(t *testing.T, metadataFile string, endpoint *url.URL) {
logger, _ := test.NewNullLogger()
opts := &metadataClearOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
actionType: "clear",
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed exporting metadata: %v", err)
}
}

View File

@ -9,26 +9,26 @@ import (
"github.com/aryann/difflib"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/metadata"
"github.com/mgutz/ansi"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
v2yaml "gopkg.in/yaml.v2"
"gopkg.in/yaml.v2"
)
type metadataDiffOptions struct {
type MetadataDiffOptions struct {
EC *cli.ExecutionContext
output io.Writer
Output io.Writer
Args []string
// two metadata to diff, 2nd is server if it's empty
metadata [2]string
// two Metadata to diff, 2nd is server if it's empty
Metadata [2]string
}
func newMetadataDiffCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataDiffOptions{
opts := &MetadataDiffOptions{
EC: ec,
output: os.Stdout,
Output: os.Stdout,
}
metadataDiffCmd := &cobra.Command{
@ -53,92 +53,160 @@ By default, shows changes between exported metadata file and server metadata.`,
# Diff metadata on a different Hasura instance:
hasura metadata diff --endpoint "<endpoint>"`,
Args: cobra.MaximumNArgs(2),
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
messageFormat := "Showing diff between %s and %s..."
message := ""
switch len(args) {
case 0:
// no args, diff exported metadata and metadata on server
filename, err := ec.GetExistingMetadataFile()
if err != nil {
return errors.Wrap(err, "failed getting metadata file")
}
opts.metadata[0] = filename
message = fmt.Sprintf(messageFormat, filename, "the server")
case 1:
// 1 arg, diff given filename and the metadata on server
opts.metadata[0] = args[0]
message = fmt.Sprintf(messageFormat, args[0], "the server")
case 2:
// 2 args, diff given filenames
opts.metadata[0] = args[0]
opts.metadata[1] = args[1]
message = fmt.Sprintf(messageFormat, args[0], args[1])
}
opts.EC.Logger.Info(message)
err := opts.run()
if err != nil {
return errors.Wrap(err, "failed to show metadata diff")
}
return nil
opts.Args = args
return opts.Run()
},
}
f := metadataDiffCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataDiffCmd
}
func (o *metadataDiffOptions) run() error {
func (o *MetadataDiffOptions) runv2(args []string) error {
messageFormat := "Showing diff between %s and %s..."
message := ""
switch len(args) {
case 0:
o.Metadata[0] = o.EC.MetadataDir
message = fmt.Sprintf(messageFormat, o.Metadata[0], "the server")
case 1:
// 1 arg, diff given directory and the metadata on server
err := checkDir(args[0])
if err != nil {
return err
}
o.Metadata[0] = args[0]
message = fmt.Sprintf(messageFormat, o.Metadata[0], "the server")
case 2:
err := checkDir(args[0])
if err != nil {
return err
}
o.Metadata[0] = args[0]
err = checkDir(args[1])
if err != nil {
return err
}
o.Metadata[1] = args[1]
message = fmt.Sprintf(messageFormat, o.Metadata[0], o.Metadata[1])
}
o.EC.Logger.Info(message)
var oldYaml, newYaml []byte
var err error
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
if o.Metadata[1] == "" {
tmpDir, err := ioutil.TempDir("", "*")
if err != nil {
return err
}
defer os.RemoveAll(tmpDir)
setMetadataPluginsWithDir(o.EC, migrateDrv, tmpDir)
files, err := migrateDrv.ExportMetadata()
if err != nil {
return err
}
err = migrateDrv.WriteMetadata(files)
if err != nil {
return err
}
} else {
setMetadataPluginsWithDir(o.EC, migrateDrv, o.Metadata[1])
}
// build server metadata
serverMeta, err := migrateDrv.BuildMetadata()
if err != nil {
return err
}
newYaml, err = yaml.Marshal(serverMeta)
if err != nil {
return errors.Wrap(err, "cannot unmarshall server metadata")
}
// build local metadata
setMetadataPluginsWithDir(o.EC, migrateDrv, o.Metadata[0])
localMeta, err := migrateDrv.BuildMetadata()
if err != nil {
return err
}
oldYaml, err = yaml.Marshal(localMeta)
if err != nil {
return errors.Wrap(err, "cannot unmarshal local metadata")
}
printDiff(string(oldYaml), string(newYaml), o.Output)
return nil
}
func (o *MetadataDiffOptions) runv1(args []string) error {
messageFormat := "Showing diff between %s and %s..."
message := ""
switch len(args) {
case 0:
// no args, diff exported metadata and metadata on server
m := metadata.New(o.EC, o.EC.MigrationDir)
filename, err := m.GetExistingMetadataFile()
if err != nil {
return errors.Wrap(err, "failed getting metadata file")
}
o.Metadata[0] = filename
message = fmt.Sprintf(messageFormat, filename, "the server")
case 1:
// 1 arg, diff given filename and the metadata on server
o.Metadata[0] = args[0]
message = fmt.Sprintf(messageFormat, args[0], "the server")
case 2:
// 2 args, diff given filenames
o.Metadata[0] = args[0]
o.Metadata[1] = args[1]
message = fmt.Sprintf(messageFormat, args[0], args[1])
}
o.EC.Logger.Info(message)
var oldYaml, newYaml []byte
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
if o.metadata[1] == "" {
if o.Metadata[1] == "" {
// get metadata from server
m, err := migrateDrv.ExportMetadata()
files, err := migrateDrv.ExportMetadata()
if err != nil {
return errors.Wrap(err, "cannot fetch metadata from server")
}
newYaml, err = v2yaml.Marshal(m)
if err != nil {
return errors.Wrap(err, "cannot convert metadata from server to yaml")
// export metadata will always return single file for metadata.yaml
for _, content := range files {
newYaml = content
}
} else {
newYaml, err = ioutil.ReadFile(o.metadata[1])
newYaml, err = ioutil.ReadFile(o.Metadata[1])
if err != nil {
return errors.Wrap(err, "cannot read file")
}
}
oldYaml, err = ioutil.ReadFile(o.metadata[0])
oldYaml, err = ioutil.ReadFile(o.Metadata[0])
if err != nil {
return errors.Wrap(err, "cannot read file")
}
printDiff(string(oldYaml), string(newYaml), o.output)
printDiff(string(oldYaml), string(newYaml), o.Output)
return nil
}
func (o *MetadataDiffOptions) Run() error {
if o.EC.Config.Version == cli.V2 && o.EC.MetadataDir != "" {
return o.runv2(o.Args)
}
return o.runv1(o.Args)
}
func printDiff(before, after string, to io.Writer) {
diffs := difflib.Diff(strings.Split(before, "\n"), strings.Split(after, "\n"))
@ -155,3 +223,14 @@ func printDiff(before, after string, to io.Writer) {
}
}
}
func checkDir(path string) error {
file, err := os.Stat(path)
if err != nil {
return err
}
if !file.IsDir() {
return fmt.Errorf("metadata diff only works with folder but got file %s", path)
}
return nil
}

View File

@ -1,125 +0,0 @@
package commands
import (
"bytes"
"io/ioutil"
"net/url"
"os"
"path/filepath"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
var testMetadata1 = `allowlist: []
functions: []
query_collections: []
remote_schemas: []
tables:
- array_relationships: []
delete_permissions: []
event_triggers: []
insert_permissions: []
is_enum: false
object_relationships: []
select_permissions: []
table: test
update_permissions: []
`
var testMetadata2 = `allowlist: []
functions: []
query_collections: []
remote_schemas: []
tables:
- array_relationships: []
configuration:
custom_column_names: {}
custom_root_fields:
delete: null
insert: null
select: null
select_aggregate: null
select_by_pk: null
update: null
delete_permissions: []
event_triggers: []
insert_permissions: []
is_enum: false
object_relationships: []
select_permissions: []
table: test
update_permissions: []
`
func TestMetadataDiffCmd(t *testing.T) {
endpointURL, err := url.Parse(os.Getenv("HASURA_GRAPHQL_TEST_ENDPOINT"))
if err != nil {
t.Fatal(err)
}
// Create migration Dir
migrationsDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(migrationsDir)
metadataFile := filepath.Join(migrationsDir, "metadata.yaml")
testMetadataFile1 := filepath.Join(migrationsDir, "testmetadata1.yaml")
testMetadataFile2 := filepath.Join(migrationsDir, "testmetadata2.yaml")
mustWriteFile(t, "", metadataFile, testMetadata1)
mustWriteFile(t, "", testMetadataFile1, testMetadata1)
mustWriteFile(t, "", testMetadataFile2, testMetadata2)
logger, _ := test.NewNullLogger()
outputFile := new(bytes.Buffer)
opts := &metadataDiffOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpointURL.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpointURL,
},
},
output: outputFile,
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
// Run without args
opts.metadata[0] = metadataFile
err = opts.run()
if err != nil {
t.Fatalf("failed diffing metadata: %v", err)
}
// Run with one arg
opts.metadata = [2]string{testMetadataFile1, ""}
err = opts.run()
if err != nil {
t.Fatalf("failed diffing metadata: %v", err)
}
// Run with two args
opts.metadata = [2]string{testMetadataFile1, testMetadataFile2}
err = opts.run()
if err != nil {
t.Fatalf("failed diffing metadata: %v", err)
}
}

View File

@ -4,7 +4,6 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
const longHelpMetadataExportCmd = `Export Hasura metadata and save it in migrations/metadata.yaml file.
@ -14,10 +13,9 @@ permission rules, relationships and event triggers that are defined
on those tables.`
func newMetadataExportCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataExportOptions{
opts := &MetadataExportOptions{
EC: ec,
actionType: "export",
ActionType: "export",
}
metadataExportCmd := &cobra.Command{
@ -32,13 +30,9 @@ func newMetadataExportCmd(ec *cli.ExecutionContext) *cobra.Command {
# Export metadata to another instance specified by the flag:
hasura metadata export --endpoint "<endpoint>"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("Exporting metadata...")
err := opts.run()
err := opts.Run()
opts.EC.Spinner.Stop()
if err != nil {
return errors.Wrap(err, "failed to export metadata")
@ -49,30 +43,19 @@ func newMetadataExportCmd(ec *cli.ExecutionContext) *cobra.Command {
Long: longHelpMetadataExportCmd,
}
f := metadataExportCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataExportCmd
}
type metadataExportOptions struct {
type MetadataExportOptions struct {
EC *cli.ExecutionContext
actionType string
ActionType string
}
func (o *metadataExportOptions) run() error {
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
func (o *MetadataExportOptions) Run() error {
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
return executeMetadata(o.actionType, migrateDrv, o.EC)
return executeMetadata(o.ActionType, migrateDrv, o.EC)
}

View File

@ -1,42 +0,0 @@
package commands
import (
"net/url"
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func testMetadataExport(t *testing.T, metadataFile string, endpoint *url.URL) {
logger, _ := test.NewNullLogger()
opts := &metadataExportOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
actionType: "export",
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed exporting metadata: %v", err)
}
}

View File

@ -4,23 +4,16 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMetadataInconsistencyDropCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataInconsistencyDropOptions{
EC: ec,
}
metadataInconsistencyDropCmd := &cobra.Command{
Use: "drop",
Short: "Drop inconsistent objects from the metadata",
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("Dropping inconsistent metadata...")
err := opts.run()
@ -33,17 +26,6 @@ func newMetadataInconsistencyDropCmd(ec *cli.ExecutionContext) *cobra.Command {
},
}
f := metadataInconsistencyDropCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataInconsistencyDropCmd
}
@ -52,7 +34,7 @@ type metadataInconsistencyDropOptions struct {
}
func (o *metadataInconsistencyDropOptions) run() error {
d, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
d, err := newMigrate(o.EC, true)
if err != nil {
return err
}

View File

@ -1,45 +0,0 @@
package commands
import (
"net/url"
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/sirupsen/logrus/hooks/test"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
)
func testMetadataInconsistencyDropCmd(t *testing.T, migrationsDir string, metadataFile string, endpoint *url.URL) {
logger, _ := test.NewNullLogger()
opts := &metadataInconsistencyDropOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
MigrationDir: migrationsDir,
},
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed dropping the inconsistency: %v", err)
}
os.RemoveAll(opts.EC.MigrationDir)
}

View File

@ -7,7 +7,6 @@ import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/migrate/database"
@ -15,7 +14,6 @@ import (
)
func newMetadataInconsistencyListCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataInconsistencyListOptions{
EC: ec,
}
@ -25,10 +23,6 @@ func newMetadataInconsistencyListCmd(ec *cli.ExecutionContext) *cobra.Command {
Aliases: []string{"ls"},
Short: "List all inconsistent objects from the metadata",
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
err := opts.run()
opts.EC.Spinner.Stop()
@ -42,17 +36,6 @@ func newMetadataInconsistencyListCmd(ec *cli.ExecutionContext) *cobra.Command {
},
}
f := metadataInconsistencyListCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataInconsistencyListCmd
}
@ -64,7 +47,7 @@ type metadataInconsistencyListOptions struct {
}
func (o *metadataInconsistencyListOptions) read() error {
d, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
d, err := newMigrate(o.EC, true)
if err != nil {
return err
}

View File

@ -4,11 +4,9 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMetadataInconsistencyStatusCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataInconsistencyListOptions{
EC: ec,
}
@ -17,10 +15,6 @@ func newMetadataInconsistencyStatusCmd(ec *cli.ExecutionContext) *cobra.Command
Use: "status",
Short: "Check if the metadata is inconsistent or not",
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("reading metadata status...")
err := opts.read()
@ -37,16 +31,5 @@ func newMetadataInconsistencyStatusCmd(ec *cli.ExecutionContext) *cobra.Command
},
}
f := metadataInconsistencyStatusCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataInconsistencyStatusCmd
}

View File

@ -4,11 +4,9 @@ import (
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMetadataReloadCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &metadataReloadOptions{
EC: ec,
actionType: "reload",
@ -26,10 +24,6 @@ func newMetadataReloadCmd(ec *cli.ExecutionContext) *cobra.Command {
# Reload metadata on a different instance:
hasura metadata export --endpoint "<endpoint>"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("Reloading metadata...")
err := opts.run()
@ -42,17 +36,6 @@ func newMetadataReloadCmd(ec *cli.ExecutionContext) *cobra.Command {
},
}
f := metadataReloadCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return metadataReloadCmd
}
@ -63,7 +46,7 @@ type metadataReloadOptions struct {
}
func (o *metadataReloadOptions) run() error {
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}

View File

@ -1,42 +0,0 @@
package commands
import (
"net/url"
"os"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func testMetadataReload(t *testing.T, metadataFile string, endpoint *url.URL) {
logger, _ := test.NewNullLogger()
opts := &metadataReloadOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MetadataFile: []string{metadataFile},
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
actionType: "reload",
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed reloading metadata: %v", err)
}
}

View File

@ -7,12 +7,21 @@ import (
"strings"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/metadata"
"github.com/hasura/graphql-engine/cli/metadata/actions"
"github.com/hasura/graphql-engine/cli/metadata/allowlist"
"github.com/hasura/graphql-engine/cli/metadata/functions"
"github.com/hasura/graphql-engine/cli/metadata/querycollections"
"github.com/hasura/graphql-engine/cli/metadata/remoteschemas"
"github.com/hasura/graphql-engine/cli/metadata/tables"
metadataTypes "github.com/hasura/graphql-engine/cli/metadata/types"
metadataVersion "github.com/hasura/graphql-engine/cli/metadata/version"
"github.com/hasura/graphql-engine/cli/migrate"
mig "github.com/hasura/graphql-engine/cli/migrate/cmd"
"github.com/hasura/graphql-engine/cli/version"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
// Initialize migration drivers
_ "github.com/hasura/graphql-engine/cli/migrate/database/hasuradb"
@ -21,10 +30,19 @@ import (
// NewMigrateCmd returns the migrate command
func NewMigrateCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
ec.Viper = v
migrateCmd := &cobra.Command{
Use: "migrate",
Short: "Manage migrations on the database",
SilenceUsage: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
err := ec.Prepare()
if err != nil {
return err
}
return ec.Validate()
},
}
migrateCmd.AddCommand(
newMigrateApplyCmd(ec),
@ -32,16 +50,26 @@ func NewMigrateCmd(ec *cli.ExecutionContext) *cobra.Command {
newMigrateCreateCmd(ec),
newMigrateSquashCmd(ec),
)
migrateCmd.PersistentFlags().String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
migrateCmd.PersistentFlags().String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
migrateCmd.PersistentFlags().String("access-key", "", "access key for Hasura GraphQL Engine")
migrateCmd.PersistentFlags().MarkDeprecated("access-key", "use --admin-secret instead")
v.BindPFlag("endpoint", migrateCmd.PersistentFlags().Lookup("endpoint"))
v.BindPFlag("admin_secret", migrateCmd.PersistentFlags().Lookup("admin-secret"))
v.BindPFlag("access_key", migrateCmd.PersistentFlags().Lookup("access-key"))
return migrateCmd
}
func newMigrate(dir string, db *url.URL, adminSecretValue string, logger *logrus.Logger, v *version.Version, isCmd bool) (*migrate.Migrate, error) {
dbURL := getDataPath(db, getAdminSecretHeaderName(v), adminSecretValue)
fileURL := getFilePath(dir)
t, err := migrate.New(fileURL.String(), dbURL.String(), isCmd, logger)
func newMigrate(ec *cli.ExecutionContext, isCmd bool) (*migrate.Migrate, error) {
dbURL := getDataPath(ec.Config.ServerConfig.ParsedEndpoint, getAdminSecretHeaderName(ec.Version), ec.Config.ServerConfig.AdminSecret)
fileURL := getFilePath(ec.MigrationDir)
t, err := migrate.New(fileURL.String(), dbURL.String(), isCmd, int(ec.Config.Version), ec.Logger)
if err != nil {
return nil, errors.Wrap(err, "cannot create migrate instance")
}
// Set Plugins
setMetadataPluginsWithDir(ec, t)
return t, nil
}
@ -120,15 +148,30 @@ const (
)
func getAdminSecretHeaderName(v *version.Version) string {
if v.ServerSemver == nil {
return XHasuraAdminSecret
}
flags, err := v.GetServerFeatureFlags()
if err != nil {
return XHasuraAdminSecret
}
if flags.HasAccessKey {
if v.ServerFeatureFlags.HasAccessKey {
return XHasuraAccessKey
}
return XHasuraAdminSecret
}
func setMetadataPluginsWithDir(ec *cli.ExecutionContext, drv *migrate.Migrate, dir ...string) {
var metadataDir string
if len(dir) == 0 {
metadataDir = ec.MetadataDir
} else {
metadataDir = dir[0]
}
plugins := make(metadataTypes.MetadataPlugins, 0)
if ec.Config.Version == cli.V2 && metadataDir != "" {
plugins = append(plugins, metadataVersion.New(ec, metadataDir))
plugins = append(plugins, tables.New(ec, metadataDir))
plugins = append(plugins, functions.New(ec, metadataDir))
plugins = append(plugins, querycollections.New(ec, metadataDir))
plugins = append(plugins, allowlist.New(ec, metadataDir))
plugins = append(plugins, remoteschemas.New(ec, metadataDir))
plugins = append(plugins, actions.New(ec, metadataDir))
} else {
plugins = append(plugins, metadata.New(ec, ec.MigrationDir))
}
drv.SetMetadataPlugins(plugins)
}

View File

@ -8,12 +8,10 @@ import (
migrate "github.com/hasura/graphql-engine/cli/migrate"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMigrateApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &migrateApplyOptions{
opts := &MigrateApplyOptions{
EC: ec,
}
migrateApplyCmd := &cobra.Command{
@ -56,12 +54,15 @@ func newMigrateApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
hasura migrate apply --down all`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
err := ec.Prepare()
if err != nil {
return err
}
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("Applying migrations...")
err := opts.run()
err := opts.Run()
opts.EC.Spinner.Stop()
if err != nil {
if err == migrate.ErrNoChange {
@ -84,49 +85,40 @@ func newMigrateApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
f := migrateApplyCmd.Flags()
f.SortFlags = false
f.StringVar(&opts.upMigration, "up", "", "apply all or N up migration steps")
f.StringVar(&opts.downMigration, "down", "", "apply all or N down migration steps")
f.StringVar(&opts.gotoVersion, "goto", "", "apply migration chain up to to the version specified")
f.StringVar(&opts.UpMigration, "up", "", "apply all or N up migration steps")
f.StringVar(&opts.DownMigration, "down", "", "apply all or N down migration steps")
f.StringVar(&opts.GotoVersion, "goto", "", "apply migration chain up to to the version specified")
f.StringVar(&opts.versionMigration, "version", "", "only apply this particular migration")
f.BoolVar(&opts.skipExecution, "skip-execution", false, "skip executing the migration action, but mark them as applied")
f.StringVar(&opts.migrationType, "type", "up", "type of migration (up, down) to be used with version flag")
f.StringVar(&opts.VersionMigration, "version", "", "only apply this particular migration")
f.BoolVar(&opts.SkipExecution, "skip-execution", false, "skip executing the migration action, but mark them as applied")
f.StringVar(&opts.MigrationType, "type", "up", "type of migration (up, down) to be used with version flag")
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return migrateApplyCmd
}
type migrateApplyOptions struct {
type MigrateApplyOptions struct {
EC *cli.ExecutionContext
upMigration string
downMigration string
versionMigration string
migrationType string
UpMigration string
DownMigration string
VersionMigration string
MigrationType string
// version up to which migration chain has to be applied
gotoVersion string
skipExecution bool
GotoVersion string
SkipExecution bool
}
func (o *migrateApplyOptions) run() error {
migrationType, step, err := getMigrationTypeAndStep(o.upMigration, o.downMigration, o.versionMigration, o.migrationType, o.gotoVersion, o.skipExecution)
func (o *MigrateApplyOptions) Run() error {
migrationType, step, err := getMigrationTypeAndStep(o.UpMigration, o.DownMigration, o.VersionMigration, o.MigrationType, o.GotoVersion, o.SkipExecution)
if err != nil {
return errors.Wrap(err, "error validating flags")
}
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return err
}
migrateDrv.SkipExecution = o.skipExecution
migrateDrv.SkipExecution = o.SkipExecution
return ExecuteMigration(migrationType, migrateDrv, step)
}
@ -161,7 +153,7 @@ func getMigrationTypeAndStep(upMigration, downMigration, versionMigration, migra
}
if flagCount > 1 {
return "", 0, errors.New("Only one migration type can be applied at a time (--up, --down or --goto)")
return "", 0, errors.New("only one migration type can be applied at a time (--up, --down or --goto)")
}
if migrationName != "version" && skipExecution {

View File

@ -1,105 +0,0 @@
package commands
import (
"math/rand"
"net/url"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
)
func testMigrateApply(t *testing.T, endpoint *url.URL, migrationsDir string, up string, down string, v string, vType string) {
logger, _ := test.NewNullLogger()
opts := &migrateApplyOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: migrationsDir,
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
upMigration: up,
downMigration: down,
versionMigration: v,
migrationType: vType,
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err != nil {
t.Fatalf("failed applying migration: %v", err)
}
}
func TestMigrateApplyWithInvalidEndpoint(t *testing.T) {
logger, _ := test.NewNullLogger()
opts := &migrateApplyOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
ServerConfig: &cli.ServerConfig{
Endpoint: ":",
AdminSecret: "",
ParsedEndpoint: &url.URL{},
},
},
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err == nil {
t.Fatalf("expected error to be not nil")
}
opts.EC.Version.SetServerVersion(v)
err = opts.run()
if err == nil {
t.Fatalf("expected err not to be nil")
}
}
func TestMigrateApplyWithMultipleFlags(t *testing.T) {
logger, _ := test.NewNullLogger()
opts := &migrateApplyOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
ServerConfig: &cli.ServerConfig{
Endpoint: ":",
AdminSecret: "",
},
},
upMigration: "1",
downMigration: "2",
}
opts.EC.Version = version.New()
opts.EC.Version.SetServerVersion("")
err := opts.EC.ServerConfig.ParseEndpoint()
if err == nil {
t.Fatalf("expected err not to be nil")
}
err = opts.run()
if err == nil {
t.Fatalf("expected err not to be nil")
}
}

View File

@ -6,12 +6,12 @@ import (
"time"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/metadata"
metadataTypes "github.com/hasura/graphql-engine/cli/metadata/types"
"github.com/hasura/graphql-engine/cli/migrate"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/spf13/viper"
v2yaml "gopkg.in/yaml.v2"
mig "github.com/hasura/graphql-engine/cli/migrate/cmd"
log "github.com/sirupsen/logrus"
@ -27,7 +27,6 @@ const migrateCreateCmdExamples = ` # Setup migration files for the first time b
hasura migrate create init --from-server --endpoint "<endpoint>"`
func newMigrateCreateCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &migrateCreateOptions{
EC: ec,
}
@ -39,10 +38,6 @@ func newMigrateCreateCmd(ec *cli.ExecutionContext) *cobra.Command {
Example: migrateCreateCmdExamples,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.name = args[0]
opts.EC.Spin("Creating migration files...")
@ -66,19 +61,10 @@ func newMigrateCreateCmd(ec *cli.ExecutionContext) *cobra.Command {
f.StringArrayVar(&opts.schemaNames, "schema", []string{"public"}, "name of Postgres schema to export as migration")
f.StringVar(&opts.metaDataFile, "metadata-from-file", "", "path to a hasura metadata file to be used for up actions")
f.BoolVar(&opts.metaDataServer, "metadata-from-server", false, "take metadata from the server and write it as an up migration file")
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
migrateCreateCmd.MarkFlagFilename("sql-from-file")
migrateCreateCmd.MarkFlagFilename("metadata-from-file")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return migrateCreateCmd
}
@ -115,7 +101,7 @@ func (o *migrateCreateOptions) run() (version int64, err error) {
var migrateDrv *migrate.Migrate
if o.sqlServer || o.metaDataServer {
migrateDrv, err = newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
migrateDrv, err = newMigrate(o.EC, true)
if err != nil {
return 0, errors.Wrap(err, "cannot create migrate instance")
}
@ -144,33 +130,32 @@ func (o *migrateCreateOptions) run() (version int64, err error) {
}
}
if o.metaDataServer {
// Create metadata migrations only if config version is V1
if o.metaDataServer && ec.Config.Version == cli.V1 {
// To create metadata.yaml, set metadata plugin
tmpDirName, err := ioutil.TempDir("", "*")
if err != nil {
return 0, errors.Wrap(err, "cannot create temp directory to fetch metadata")
}
defer os.RemoveAll(tmpDirName)
plugins := make(metadataTypes.MetadataPlugins, 0)
plugins = append(plugins, metadata.New(o.EC, tmpDirName))
migrateDrv.SetMetadataPlugins(plugins)
// fetch metadata from server
metaData, err := migrateDrv.ExportMetadata()
files, err := migrateDrv.ExportMetadata()
if err != nil {
return 0, errors.Wrap(err, "cannot fetch metadata from server")
}
tmpfile, err := ioutil.TempFile("", "metadata")
err = migrateDrv.WriteMetadata(files)
if err != nil {
return 0, errors.Wrap(err, "cannot create tempfile")
}
defer os.Remove(tmpfile.Name())
t, err := v2yaml.Marshal(metaData)
if err != nil {
return 0, errors.Wrap(err, "cannot marshal metadata")
}
if _, err := tmpfile.Write(t); err != nil {
return 0, errors.Wrap(err, "cannot write to temp file")
}
if err := tmpfile.Close(); err != nil {
return 0, errors.Wrap(err, "cannot close tmp file")
return 0, errors.Wrap(err, "cannot write to tmp file")
}
err = createOptions.SetMetaUpFromFile(tmpfile.Name())
if err != nil {
return 0, errors.Wrap(err, "cannot parse metadata from the server")
for name := range files {
err = createOptions.SetMetaUpFromFile(name)
if err != nil {
return 0, errors.Wrap(err, "cannot parse metadata from the server")
}
}
}

View File

@ -1,35 +0,0 @@
package commands
import (
"math/rand"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/sirupsen/logrus/hooks/test"
"github.com/spf13/pflag"
)
func TestMigrateCreateCmd(t *testing.T) {
logger, _ := test.NewNullLogger()
opts := &migrateCreateOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
},
name: "create_article",
flags: pflag.NewFlagSet("migrate-create-test", pflag.ContinueOnError),
}
_, err := opts.run()
if err != nil {
t.Fatalf("failed creating migration: %v", err)
}
os.RemoveAll(opts.EC.MigrationDir)
}

View File

@ -13,13 +13,10 @@ import (
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
mig "github.com/hasura/graphql-engine/cli/migrate/cmd"
)
func newMigrateSquashCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &migrateSquashOptions{
EC: ec,
}
@ -35,10 +32,6 @@ func newMigrateSquashCmd(ec *cli.ExecutionContext) *cobra.Command {
# Add a name for the new squashed migration
hasura migrate squash --name "<name>" --from 123`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.newVersion = getTime()
return opts.run()
@ -50,19 +43,9 @@ func newMigrateSquashCmd(ec *cli.ExecutionContext) *cobra.Command {
f.StringVar(&opts.name, "name", "squashed", "name for the new squashed migration")
f.BoolVar(&opts.deleteSource, "delete-source", false, "delete the source files after squashing without any confirmation")
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// mark flag as required
migrateSquashCmd.MarkFlagRequired("from")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return migrateSquashCmd
}
@ -80,7 +63,7 @@ func (o *migrateSquashOptions) run() error {
o.EC.Logger.Warnln("This command is currently experimental and hence in preview, correctness of squashed migration is not guaranteed!")
o.EC.Spin(fmt.Sprintf("Squashing migrations from %d to latest...", o.from))
defer o.EC.Spinner.Stop()
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return errors.Wrap(err, "unable to initialize migrations driver")
}

View File

@ -10,30 +10,24 @@ import (
"github.com/hasura/graphql-engine/cli/util"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newMigrateStatusCmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
opts := &migrateStatusOptions{
opts := &MigrateStatusOptions{
EC: ec,
}
migrateStatusCmd := &cobra.Command{
Use: "status",
Short: "Display current status of migrations on a database",
Use: "status",
Short: "Display current status of migrations on a database",
Example: ` # Use with admin secret:
hasura migrate status --admin-secret "<your-admin-secret>"
# Check status on a different server:
hasura migrate status --endpoint "<endpoint>"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.EC.Spin("Fetching migration status...")
status, err := opts.run()
status, err := opts.Run()
opts.EC.Spinner.Stop()
if err != nil {
return err
@ -44,26 +38,15 @@ func newMigrateStatusCmd(ec *cli.ExecutionContext) *cobra.Command {
},
}
f := migrateStatusCmd.Flags()
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return migrateStatusCmd
}
type migrateStatusOptions struct {
type MigrateStatusOptions struct {
EC *cli.ExecutionContext
}
func (o *migrateStatusOptions) run() (*migrate.Status, error) {
migrateDrv, err := newMigrate(o.EC.MigrationDir, o.EC.ServerConfig.ParsedEndpoint, o.EC.ServerConfig.AdminSecret, o.EC.Logger, o.EC.Version, true)
func (o *MigrateStatusOptions) Run() (*migrate.Status, error) {
migrateDrv, err := newMigrate(o.EC, true)
if err != nil {
return nil, err
}

View File

@ -1,70 +0,0 @@
package commands
import (
"math/rand"
"net/url"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/migrate"
"github.com/hasura/graphql-engine/cli/version"
"github.com/sirupsen/logrus/hooks/test"
"github.com/stretchr/testify/assert"
)
func testMigrateStatus(t *testing.T, endpoint *url.URL, migrationsDir string, expectedStatus *migrate.Status) {
logger, _ := test.NewNullLogger()
opts := &migrateStatusOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: migrationsDir,
ServerConfig: &cli.ServerConfig{
Endpoint: endpoint.String(),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
ParsedEndpoint: endpoint,
},
},
}
opts.EC.Version = version.New()
v, err := version.FetchServerVersion(opts.EC.ServerConfig.Endpoint)
if err != nil {
t.Fatalf("getting server version failed: %v", err)
}
opts.EC.Version.SetServerVersion(v)
status, err := opts.run()
if err != nil {
t.Fatalf("failed fetching migration status: %v", err)
}
assert.Equal(t, expectedStatus, status)
}
func TestMigrateStatusWithInvalidEndpoint(t *testing.T) {
logger, _ := test.NewNullLogger()
opts := &migrateStatusOptions{
EC: &cli.ExecutionContext{
Logger: logger,
Spinner: spinner.New(spinner.CharSets[7], 100*time.Millisecond),
MigrationDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
ServerConfig: &cli.ServerConfig{
Endpoint: ":",
AdminSecret: "",
ParsedEndpoint: &url.URL{},
},
},
}
opts.EC.Version = version.New()
opts.EC.Version.SetServerVersion("")
_, err := opts.run()
if err == nil {
t.Fatalf("expected err not to be nil")
}
}

View File

@ -1,290 +0,0 @@
package commands
import (
"database/sql"
sqldriver "database/sql/driver"
"fmt"
"github.com/hasura/graphql-engine/cli/migrate"
"io"
"io/ioutil"
"net/url"
"os"
"path"
"path/filepath"
"testing"
"github.com/Masterminds/semver"
mt "github.com/hasura/graphql-engine/cli/migrate/testing"
"github.com/hasura/graphql-engine/cli/version"
_ "github.com/lib/pq"
"github.com/parnurzeal/gorequest"
"github.com/stretchr/testify/assert"
)
var postgresVersions = []mt.Version{
{Image: "postgres:9.6"},
}
var ravenVersions = []mt.Version{
{Image: "hasura/graphql-engine:190d78e", Cmd: []string{"raven", "serve", "--database-url"}, ExposedPort: 8080},
}
var testMetadataPrev = map[string][]byte{
"metadata": []byte(`functions: []
remote_schemas: []
query_collections: []
allowlist: []
version: 2
tables:
- table: test
is_enum: false
configuration:
custom_root_fields:
select: null
select_by_pk: null
select_aggregate: null
insert: null
update: null
delete: null
custom_column_names: {}
object_relationships: []
array_relationships: []
insert_permissions: []
select_permissions: []
update_permissions: []
delete_permissions: []
event_triggers: []
computed_fields: []
`),
"empty-metadata": []byte(`functions: []
remote_schemas: []
query_collections: []
allowlist: []
version: 2
tables: []
`),
}
var testMetadataCurrent = map[string][]byte{
"metadata": []byte(`version: 2
tables:
- table:
schema: public
name: test
`),
"empty-metadata": []byte(`version: 2
tables: []
`),
}
func isReadyPostgres(i mt.Instance) bool {
db, err := sql.Open("postgres", fmt.Sprintf("postgres://postgres@%v:%v/postgres?sslmode=disable", i.Host(), i.Port()))
if err != nil {
return false
}
defer db.Close()
if err = db.Ping(); err != nil {
switch err {
case sqldriver.ErrBadConn, io.EOF:
return false
default:
fmt.Println(err)
}
return false
}
return true
}
func isReadyRaven(i mt.Instance) bool {
request := gorequest.New()
_, _, errs := request.Post(fmt.Sprintf("http://%s:%d", i.Host(), i.Port())).End()
if len(errs) == 0 {
return true
}
return false
}
func testMigrateWithDocker(t *testing.T, migrationsDir, executionDir string) {
mt.ParallelTest(t, postgresVersions, isReadyPostgres,
func(t *testing.T, pi mt.Instance) {
for i, v := range ravenVersions {
ravenVersions[i].Cmd = append(v.Cmd, fmt.Sprintf("postgres://postgres@%v:%v/postgres?sslmode=disable", pi.NetworkSettings().Gateway, pi.Port()))
}
mt.ParallelTest(t, ravenVersions, isReadyRaven,
func(t *testing.T, ri mt.Instance) {
defer pi.Remove()
defer ri.Remove()
endpointURL, err := url.Parse(fmt.Sprintf("http://%s:%d", ri.Host(), ri.Port()))
if err != nil {
t.Fatal(err)
}
// Create migration Dir
migrationsDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(migrationsDir)
testMigrate(t, endpointURL, migrationsDir)
})
})
}
func TestMigrateCmd(t *testing.T) {
endpointURL, err := url.Parse(os.Getenv("HASURA_GRAPHQL_TEST_ENDPOINT"))
if err != nil {
t.Fatal(err)
}
// Create migration Dir
migrationsDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(migrationsDir)
testMigrate(t, endpointURL, migrationsDir)
}
func testMigrate(t *testing.T, endpoint *url.URL, migrationsDir string) {
versionCtx := version.New()
v, err := version.FetchServerVersion(endpoint.String())
if err != nil {
t.Fatal(err)
}
versionCtx.SetServerVersion(v)
metadataFile := filepath.Join(migrationsDir, "metadata.yaml")
// Create 1_create_table_test.up.sql which creates table test
mustWriteFile(t, migrationsDir, "1_create_table_test.up.sql", `CREATE TABLE "test"("id" serial NOT NULL, PRIMARY KEY ("id") )`)
// Create 1_create_table_test.down.sql which creates table test
mustWriteFile(t, migrationsDir, "1_create_table_test.down.sql", `DROP TABLE "test";`)
// Create 2_add_table_test.up.yaml which adds table test to metadata
mustWriteFile(t, migrationsDir, "2_add_table_test.up.yaml", `- args:
name: test
type: add_existing_table_or_view
`)
mustWriteFile(t, migrationsDir, "2_add_table_test.down.yaml", `- args:
table: test
type: untrack_table
`)
mustWriteFile(t, migrationsDir, "2_add_table_test.up.sql", `CREATE TABLE "author"("id" serial NOT NULL, PRIMARY KEY ("id") )`)
mustWriteFile(t, migrationsDir, "2_add_table_test.down.sql", `DROP TABLE "author";`)
// Apply 1_create_table_test.up.sql
testMigrateApply(t, endpoint, migrationsDir, "1", "", "", "")
// Check Migration status
expectedStatus := migrate.NewStatus()
expectedStatus.Append(&migrate.MigrationStatus{
Version: 1,
Name: "create_table_test",
IsApplied: true,
IsPresent: true,
})
expectedStatus.Append(&migrate.MigrationStatus{
Version: 2,
Name: "add_table_test",
IsApplied: false,
IsPresent: true,
})
testMigrateStatus(t, endpoint, migrationsDir, expectedStatus)
// Apply 2_add_table_test.up.yaml
testMigrateApply(t, endpoint, migrationsDir, "", "", "2", "")
// Check Migration status
expectedStatus = migrate.NewStatus()
expectedStatus.Append(&migrate.MigrationStatus{
Version: 1,
Name: "create_table_test",
IsApplied: true,
IsPresent: true,
})
expectedStatus.Append(&migrate.MigrationStatus{
Version: 2,
Name: "add_table_test",
IsApplied: true,
IsPresent: true,
})
testMigrateStatus(t, endpoint, migrationsDir, expectedStatus)
// Apply 2_add_table_test.down.yaml
testMigrateApply(t, endpoint, migrationsDir, "", "1", "", "")
// Check Migration status
expectedStatus = migrate.NewStatus()
expectedStatus.Append(&migrate.MigrationStatus{
Version: 1,
Name: "create_table_test",
IsApplied: true,
IsPresent: true,
})
expectedStatus.Append(&migrate.MigrationStatus{
Version: 2,
Name: "add_table_test",
IsApplied: false,
IsPresent: true,
})
testMigrateStatus(t, endpoint, migrationsDir, expectedStatus)
// Apply 1_create_table_test.down.sql
testMigrateApply(t, endpoint, migrationsDir, "", "", "1", "down")
// Check Migration status
expectedStatus = migrate.NewStatus()
expectedStatus.Append(&migrate.MigrationStatus{
Version: 1,
Name: "create_table_test",
IsApplied: false,
IsPresent: true,
})
expectedStatus.Append(&migrate.MigrationStatus{
Version: 2,
Name: "add_table_test",
IsApplied: false,
IsPresent: true,
})
testMigrateStatus(t, endpoint, migrationsDir, expectedStatus)
// Apply both 1 and 2
testMigrateApply(t, endpoint, migrationsDir, "", "", "", "")
testMetadataExport(t, metadataFile, endpoint)
compareMetadata(t, metadataFile, "metadata", versionCtx.ServerSemver)
testMetadataApply(t, metadataFile, endpoint)
testMetadataExport(t, metadataFile, endpoint)
compareMetadata(t, metadataFile, "metadata", versionCtx.ServerSemver)
testMetadataReset(t, metadataFile, endpoint)
testMetadataExport(t, metadataFile, endpoint)
compareMetadata(t, metadataFile, "empty-metadata", versionCtx.ServerSemver)
testMetadataInconsistencyDropCmd(t, migrationsDir, metadataFile, endpoint)
}
func mustWriteFile(t testing.TB, dir, file string, body string) {
if err := ioutil.WriteFile(path.Join(dir, file), []byte(body), 06444); err != nil {
t.Fatal(err)
}
}
func compareMetadata(t testing.TB, metadataFile string, actualType string, serverVersion *semver.Version) {
var actualData []byte
c, err := semver.NewConstraint("<= v1.0.0")
if err != nil {
t.Fatal(err)
}
if serverVersion == nil || !c.Check(serverVersion) {
actualData = testMetadataCurrent[actualType]
} else {
actualData = testMetadataPrev[actualType]
}
data, err := ioutil.ReadFile(metadataFile)
if err != nil {
t.Fatalf("error reading metadata %s", err)
}
assert.Equal(t, string(actualData), string(data))
}

189
cli/commands/plugins.go Normal file
View File

@ -0,0 +1,189 @@
package commands
/*
Most of the plugin handler code is borrowed from the kubectl codebase.
Wherever "courtesy: kubectl" is indicated, the copyright belongs to the
respective authors with the following notice:
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"strings"
"syscall"
"unicode"
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
var validPluginFilenamePrefixes = []string{"hasura"}
// NewPluginsCmd returns the plugins command
func NewPluginsCmd(ec *cli.ExecutionContext) *cobra.Command {
pluginsCmd := &cobra.Command{
Use: "plugins",
Aliases: []string{"plugin"},
Short: "Manage plugins for the cli",
Long: `Plugins can be installed to extend the functionality of Hasura CLI
An index for all available plugins can be found at
https://github.com/hasura/cli-plugins-index
Please open pull requests against this repo to add new plugins`,
SilenceUsage: true,
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
return ec.PluginsConfig.Repo.EnsureCloned()
},
}
pluginsCmd.AddCommand(
newPluginsListCmd(ec),
newPluginsInstallCmd(ec),
newPluginsUnInstallCmd(ec),
newPluginsUpgradeCmd(ec),
)
return pluginsCmd
}
// PluginHandler is capable of parsing command line arguments
// and performing executable filename lookups to search
// for valid plugin files, and execute found plugins.
// courtesy: kubectl
// https://github.com/kubernetes/kubernetes/blob/master/pkg/kubectl/cmd/cmd.go
type PluginHandler interface {
// exists at the given filename, or a boolean false.
// Lookup will iterate over a list of given prefixes
// in order to recognize valid plugin filenames.
// The first filepath to match a prefix is returned.
Lookup(filename string) (string, bool)
// Execute receives an executable's filepath, a slice
// of arguments, and a slice of environment variables
// to relay to the executable.
Execute(executablePath string, cmdArgs, environment []string) error
}
// DefaultPluginHandler implements PluginHandler
type DefaultPluginHandler struct {
ValidPrefixes []string
}
// NewDefaultPluginHandler instantiates the DefaultPluginHandler with a list of
// given filename prefixes used to identify valid plugin filenames.
func NewDefaultPluginHandler(validPrefixes []string) *DefaultPluginHandler {
return &DefaultPluginHandler{
ValidPrefixes: validPrefixes,
}
}
// Lookup implements PluginHandler
func (h *DefaultPluginHandler) Lookup(filename string) (string, bool) {
for _, prefix := range h.ValidPrefixes {
filename := filepath.Join(ec.PluginsConfig.Paths.BinPath(), fmt.Sprintf("%s-%s", prefix, filename))
path, err := exec.LookPath(filename)
if err != nil || len(path) == 0 {
continue
}
return path, true
}
return "", false
}
// Execute implements PluginHandler
func (h *DefaultPluginHandler) Execute(executablePath string, cmdArgs, environment []string) error {
// Windows does not support exec syscall.
if runtime.GOOS == "windows" {
cmd := exec.Command(executablePath, cmdArgs...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
cmd.Stdin = os.Stdin
cmd.Env = environment
err := cmd.Run()
if err == nil {
os.Exit(0)
}
return err
}
// invoke cmd binary relaying the environment and args given
// append executablePath to cmdArgs, as execve will make first argument the "binary name".
return syscall.Exec(executablePath, append([]string{executablePath}, cmdArgs...), environment)
}
// HandlePluginCommand receives a pluginHandler and command-line arguments and attempts to find
// a plugin executable on the PATH that satisfies the given arguments.
func HandlePluginCommand(pluginHandler PluginHandler, cmdArgs []string) error {
remainingArgs := []string{} // all "non-flag" arguments
for idx := range cmdArgs {
if strings.HasPrefix(cmdArgs[idx], "-") {
break
}
remainingArgs = append(remainingArgs, strings.Replace(cmdArgs[idx], "-", "_", -1))
}
foundBinaryPath := ""
// attempt to find binary, starting at longest possible name with given cmdArgs
for len(remainingArgs) > 0 {
path, found := pluginHandler.Lookup(strings.Join(remainingArgs, "-"))
if !found {
remainingArgs = remainingArgs[:len(remainingArgs)-1]
continue
}
foundBinaryPath = path
break
}
if len(foundBinaryPath) == 0 {
return nil
}
// invoke cmd binary relaying the current environment and args given
if err := pluginHandler.Execute(foundBinaryPath, cmdArgs[len(remainingArgs):], os.Environ()); err != nil {
return err
}
return nil
}
func ensureDirs(paths ...string) error {
for _, p := range paths {
if err := os.MkdirAll(p, 0755); err != nil {
return errors.Wrapf(err, "failed to ensure create directory %q", p)
}
}
return nil
}
func indent(s string) string {
out := "\\\n"
s = strings.TrimRightFunc(s, unicode.IsSpace)
out += regexp.MustCompile("(?m)^").ReplaceAllString(s, " | ")
out += "\n/"
return out
}
func limitString(s string, length int) string {
if len(s) > length && length > 3 {
s = s[:length-3] + "..."
}
return s
}

View File

@ -0,0 +1,77 @@
package commands
/*
some of the code here is borrowed from the krew codebse (kubernetes)
and the copyright belongs to the respective authors.
source: https://github.com/kubernetes-sigs/krew/blob/master/cmd/krew/cmd/install.go
*/
import (
"fmt"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/plugins"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
func newPluginsInstallCmd(ec *cli.ExecutionContext) *cobra.Command {
opts := &PluginInstallOptions{
EC: ec,
}
pluginsInstallCmd := &cobra.Command{
Use: "install [plugin-name]",
Short: "Install a plugin from the index",
Example: ` # Install a plugin:
hasura plugins install [plugin-name]`,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
err := ec.Prepare()
if err != nil {
return err
}
err = ec.PluginsConfig.Repo.EnsureUpdated()
if err != nil {
ec.Logger.Debugf("unable to update plugins index: got %v", err)
}
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
opts.Name = args[0]
ec.Spin(fmt.Sprintf("Installing plugin %q...", opts.Name))
defer ec.Spinner.Stop()
err := opts.Run()
if err == plugins.ErrIsAlreadyInstalled {
ec.Spinner.Stop()
ec.Logger.WithField("name", opts.Name).Infof("%q", err)
return nil
}
if err != nil && err != plugins.ErrIsAlreadyInstalled {
return errors.Wrapf(err, "failed to install plugin %q", opts.Name)
}
ec.Spinner.Stop()
ec.Logger.WithField("name", opts.Name).Infoln("plugin installed")
return nil
},
}
f := pluginsInstallCmd.Flags()
f.StringVar(&opts.ManifestFile, "manifest-file", "", "(dev) speficy local manifest file")
f.MarkHidden("manifest-file")
return pluginsInstallCmd
}
type PluginInstallOptions struct {
EC *cli.ExecutionContext
Name string
ManifestFile string
}
func (o *PluginInstallOptions) Run() error {
return o.EC.PluginsConfig.Install(o.Name, o.ManifestFile)
}

View File

@ -0,0 +1,133 @@
package commands
/*
some of the code here is borrowed from the krew codebse (kubernetes)
and the copyright belongs to the respective authors.
source: https://github.com/kubernetes-sigs/krew/blob/master/cmd/krew/cmd/list.go
*/
import (
"fmt"
"io"
"os"
"runtime"
"sort"
"strings"
"text/tabwriter"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/plugins"
)
func newPluginsListCmd(ec *cli.ExecutionContext) *cobra.Command {
opts := &pluginListOptions{
EC: ec,
}
pluginsListCmd := &cobra.Command{
Use: "list",
Aliases: []string{"ls"},
Short: "List all available plugins from index, versions and installation status",
Example: ` # List all plugins
hasura plugins list
# The command also updates the plugin index that is cached locally
# To avoid updating the index, use the following flag:
hasura plugins list --dont-update-index`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
return ec.Prepare()
},
RunE: func(cmd *cobra.Command, args []string) error {
return opts.run()
},
}
f := pluginsListCmd.Flags()
f.BoolVar(&opts.dontUpdateIndex, "dont-update-index", false, "don't update the plugin index local cache, only show the list")
return pluginsListCmd
}
type pluginListOptions struct {
EC *cli.ExecutionContext
dontUpdateIndex bool
}
func (p *pluginListOptions) run() error {
if !p.dontUpdateIndex {
ec.Spin("Updating plugin index...")
err := p.EC.PluginsConfig.Repo.EnsureUpdated()
if err != nil {
p.EC.Logger.Warnf("unable to update plugin index %q", err)
}
}
ec.Spin("Fetching plugins list...")
defer ec.Spinner.Stop()
availablePlugins, err := ec.PluginsConfig.ListPlugins()
if err != nil {
return errors.Wrap(err, "failed to load the list of plugins from the index")
}
names := make([]string, len(availablePlugins))
pluginMap := make(map[string]plugins.Plugin, len(availablePlugins))
for i, p := range availablePlugins {
names[i] = p.Name
pluginMap[p.Name] = p
}
installed, err := ec.PluginsConfig.ListInstalledPlugins()
if err != nil {
return errors.Wrap(err, "failed to load installed plugins")
}
// No plugins found
if len(names) == 0 {
return nil
}
var rows [][]string
cols := []string{"NAME", "DESCRIPTION", "VERSION", "INSTALLED"}
for _, name := range names {
plugin := pluginMap[name]
var status string
var version string
if _, ok := installed[name]; ok {
status = "yes"
version = installed[name]
} else if _, ok, err := plugins.MatchPlatform(plugin.Platforms); err != nil {
return errors.Wrapf(err, "failed to get the matching platform for plugin %s", name)
} else if ok {
status = "no"
} else {
status = "unavailable on " + runtime.GOOS
}
if status == "yes" {
version = installed[name]
} else {
version = plugin.Version
}
rows = append(rows, []string{name, limitString(plugin.ShortDescription, 50), version, status})
}
rows = sortByFirstColumn(rows)
ec.Spinner.Stop()
return printTable(os.Stdout, cols, rows)
}
func printTable(out io.Writer, columns []string, rows [][]string) error {
w := tabwriter.NewWriter(out, 0, 0, 2, ' ', 0)
fmt.Fprint(w, strings.Join(columns, "\t"))
fmt.Fprintln(w)
for _, values := range rows {
fmt.Fprint(w, strings.Join(values, "\t"))
fmt.Fprintln(w)
}
return w.Flush()
}
func sortByFirstColumn(rows [][]string) [][]string {
sort.Slice(rows, func(a, b int) bool {
return rows[a][0] < rows[b][0]
})
return rows
}

View File

@ -0,0 +1,42 @@
package commands
/*
some of the code here is borrowed from the krew codebse (kubernetes)
and the copyright belongs to the respective authors.
source: https://github.com/kubernetes-sigs/krew/blob/master/cmd/krew/cmd/uninstall.go
*/
import (
"fmt"
"github.com/hasura/graphql-engine/cli"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
func newPluginsUnInstallCmd(ec *cli.ExecutionContext) *cobra.Command {
pluginsUnInstallCmd := &cobra.Command{
Use: "uninstall [plugin-name]",
Short: "Uninstall a plugin",
Example: ` # Uninstall a plugin
hasura plugins uninstall [plugin-name]`,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
return ec.Prepare()
},
RunE: func(cmd *cobra.Command, args []string) error {
pluginName := args[0]
ec.Spin(fmt.Sprintf("Uninstalling plugin %q", pluginName))
defer ec.Spinner.Stop()
if err := ec.PluginsConfig.Uninstall(pluginName); err != nil {
return errors.Wrapf(err, "failed to uninstall plugin %s", pluginName)
}
ec.Spinner.Stop()
ec.Logger.WithField("name", pluginName).Infoln("plugin uninstalled")
return nil
},
}
return pluginsUnInstallCmd
}

View File

@ -0,0 +1,49 @@
package commands
/*
some of the code here is borrowed from the krew codebse (kubernetes)
and the copyright belongs to the respective authors.
source: https://github.com/kubernetes-sigs/krew/blob/master/cmd/krew/cmd/upgrade.go
*/
import (
"fmt"
"github.com/sirupsen/logrus"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/plugins"
"github.com/pkg/errors"
"github.com/spf13/cobra"
)
func newPluginsUpgradeCmd(ec *cli.ExecutionContext) *cobra.Command {
pluginsUpgradeCmd := &cobra.Command{
Use: "upgrade",
Short: "Upgrade a plugin to a newer version",
Example: ` # Upgrade a plugin to a newer version
hasura plugins upgrade [plugin-name]`,
SilenceUsage: true,
Args: cobra.ExactArgs(1),
PreRunE: func(cmd *cobra.Command, args []string) error {
return ec.Prepare()
},
RunE: func(cmd *cobra.Command, args []string) error {
pluginName := args[0]
ec.Spin(fmt.Sprintf("Upgrading plugin %q...", pluginName))
defer ec.Spinner.Stop()
plugin, err := ec.PluginsConfig.Upgrade(pluginName)
if err != nil && err != plugins.ErrIsAlreadyUpgraded {
return errors.Wrapf(err, "failed to upgrade plugin %q", plugin.Name)
}
ec.Spinner.Stop()
ec.Logger.WithFields(logrus.Fields{
"name": pluginName,
"version": plugin.Version,
}).Infoln("Plugin upgraded")
return nil
},
}
return pluginsUpgradeCmd
}

View File

@ -3,6 +3,10 @@
package commands
import (
"fmt"
"io"
"os"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/update"
"github.com/hasura/graphql-engine/cli/version"
@ -61,7 +65,10 @@ func init() {
NewConsoleCmd(ec),
NewMetadataCmd(ec),
NewMigrateCmd(ec),
NewActionsCmd(ec),
NewPluginsCmd(ec),
NewVersionCmd(ec),
NewScriptsCmd(ec),
NewDocsCmd(ec),
NewCompletionCmd(ec),
NewUpdateCLICmd(ec),
@ -74,13 +81,42 @@ func init() {
f.BoolVar(&ec.NoColor, "no-color", false, "do not colorize output (default: false)")
}
// NewDefaultHasuraCommand creates the `hasura` command with default arguments
func NewDefaultHasuraCommand() *cobra.Command {
return NewDefaultHasuraCommandWithArgs(NewDefaultPluginHandler(validPluginFilenamePrefixes), os.Args, os.Stdin, os.Stdout, os.Stderr)
}
// NewDefaultHasuraCommandWithArgs creates the `hasura` command with arguments
func NewDefaultHasuraCommandWithArgs(pluginHandler PluginHandler, args []string, in io.Reader, out, errout io.Writer) *cobra.Command {
cmd := rootCmd
if pluginHandler == nil {
return cmd
}
if len(args) > 1 {
cmdPathPieces := args[1:]
// only look for suitable extension executables if
// the specified command does not already exist
if _, _, err := cmd.Find(cmdPathPieces); err != nil {
if err := HandlePluginCommand(pluginHandler, cmdPathPieces); err != nil {
fmt.Fprintf(errout, "%v\n", err)
os.Exit(1)
}
}
}
return cmd
}
// Execute executes the command and returns the error
func Execute() error {
err := ec.Prepare()
if err != nil {
return errors.Wrap(err, "preparing execution context failed")
}
err = rootCmd.Execute()
err = NewDefaultHasuraCommand().Execute()
if err != nil {
ec.Telemetry.IsError = true
}

19
cli/commands/scripts.go Normal file
View File

@ -0,0 +1,19 @@
package commands
import (
"github.com/hasura/graphql-engine/cli"
"github.com/spf13/cobra"
)
// NewScriptsCmd returns the scripts command
func NewScriptsCmd(ec *cli.ExecutionContext) *cobra.Command {
scriptsCmd := &cobra.Command{
Use: "scripts",
Short: "Execute helper scripts to manage Hasura Projects",
SilenceUsage: true,
}
scriptsCmd.AddCommand(
newScriptsUpdateConfigV2Cmd(ec),
)
return scriptsCmd
}

View File

@ -0,0 +1,344 @@
package commands
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"gopkg.in/yaml.v2"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/migrate/database/hasuradb"
"github.com/hasura/graphql-engine/cli/migrate/source"
"github.com/hasura/graphql-engine/cli/migrate/source/file"
"github.com/hasura/graphql-engine/cli/plugins"
"github.com/hasura/graphql-engine/cli/util"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func newScriptsUpdateConfigV2Cmd(ec *cli.ExecutionContext) *cobra.Command {
v := viper.New()
var metadataDir string
scriptsUpdateConfigV2Cmd := &cobra.Command{
Use: "update-project-v2",
Aliases: []string{"update-config-v2"},
Short: "Update the Hasura Project from v1 to v2",
Long: `Update the Hasura Project from v1 to v2 by executing the following actions:
1. Installs a plugin system for CLI
2. Installs CLI Extensions plugins (primarily for actions)
3. Takes a back up of migrations directory
4. Removes all metadata yaml migrations and converts everything to SQL
5. Exports the metadata from server in the new format (multiple files in a directory)
6. Re-write the config.yaml file to new format
`,
Example: ` # Read more about v2 configuration for CLI at https://docs.hasura.io
# Update the Hasura Project from v1 to v2
hasura scripts update-project-v2
# Update the Hasura Project from v1 to v2 with a different metadata directory:
hasura scripts update-project-v2 --metadata-dir "metadata"`,
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
ec.Viper = v
err := ec.Prepare()
if err != nil {
return err
}
return ec.Validate()
},
RunE: func(cmd *cobra.Command, args []string) error {
if ec.Config.Version != cli.V1 {
return fmt.Errorf("this script can be executed only when the current config version is 1")
}
// update the plugin index
ec.Spin("Updating the plugin index...")
defer ec.Spinner.Stop()
err := ec.PluginsConfig.Repo.EnsureUpdated()
if err != nil {
return errors.Wrap(err, "cannot update plugin index")
}
// install the plugin
ec.Spin("Installing cli-ext plugin...")
err = ec.PluginsConfig.Install("cli-ext", "")
if err != nil && err != plugins.ErrIsAlreadyInstalled {
return errors.Wrap(err, "cannot install plugin")
}
// Move copy migrations directory to migrations_backup
ec.Spin("Backing up migrations...")
err = util.CopyDir(ec.MigrationDir, filepath.Join(ec.ExecutionDirectory, "migrations_backup"))
if err != nil {
return errors.Wrap(err, "error in copying migrations to migrations_backup")
}
defer func() {
if err != nil {
ec.Logger.Infof("migrations are backed up to migrations_backup directory.")
}
}()
// Open the file driver to list of source migrations and remove unwanted yaml
ec.Spin("Cleaning up migrations...")
fileCfg, err := file.New(getFilePath(ec.MigrationDir).String(), ec.Logger)
if err != nil {
return errors.Wrap(err, "error in opening migrate file driver")
}
err = fileCfg.Scan()
if err != nil {
return errors.Wrap(err, "error in scanning migrate file driver")
}
// Remove yaml from up migrations
upVersions := make([]uint64, 0)
for _, version := range fileCfg.Migrations.Index {
sqlUp := &bytes.Buffer{}
// check if up.yaml exists
upMetaMigration, ok := fileCfg.Migrations.Migrations[version][source.MetaUp]
if !ok {
continue
}
// Read the up.yaml file
bodyReader, _, _, err := fileCfg.ReadMetaUp(version)
if err != nil {
return errors.Wrapf(err, "error in reading %s file", upMetaMigration.Raw)
}
buf := new(bytes.Buffer)
_, err = buf.ReadFrom(bodyReader)
if err != nil {
return errors.Wrapf(err, "unable to read bytes")
}
var queries []hasuradb.HasuraInterfaceQuery
err = yaml.Unmarshal(buf.Bytes(), &queries)
if err != nil {
return errors.Wrapf(err, "unable to unmarhsal %s", upMetaMigration.Raw)
}
// for each query check if type is run_sql
// if yes, append to bytes buffer
for _, query := range queries {
if query.Type == "run_sql" {
argByt, err := yaml.Marshal(query.Args)
if err != nil {
return errors.Wrapf(err, "unable to marshal run_sql args in %s", upMetaMigration.Raw)
}
var to hasuradb.RunSQLInput
err = yaml.Unmarshal(argByt, &to)
if err != nil {
return errors.Wrapf(err, "unable to unmarshal run_sql args in %s", upMetaMigration.Raw)
}
sqlUp.WriteString("\n")
sqlUp.WriteString(to.SQL)
}
}
// check if up.sql file exists
if string(sqlUp.Bytes()) != "" {
upMigration, ok := fileCfg.Migrations.Migrations[version][source.Up]
if !ok {
// if up.sql doesn't exists, create a up.sql file and upMigration
var filePath string
if upMetaMigration.IsDir {
dir := filepath.Dir(upMetaMigration.Raw)
filePath = filepath.Join(ec.MigrationDir, dir, "up.sql")
} else {
fileName := fmt.Sprintf("%d_%s.up.sql", version, upMetaMigration.Identifier)
filePath = filepath.Join(ec.MigrationDir, fileName)
}
err = ioutil.WriteFile(filePath, sqlUp.Bytes(), os.ModePerm)
if err != nil {
return errors.Wrap(err, "unable to create up migration")
}
fileCfg.Migrations.Migrations[version][source.Up] = &source.Migration{}
} else {
upByt, err := ioutil.ReadFile(upMigration.Raw)
if err != nil {
return errors.Wrap(err, "error in reading up.sql")
}
upByt = append(upByt, sqlUp.Bytes()...)
err = ioutil.WriteFile(upMigration.Raw, upByt, os.ModePerm)
if err != nil {
return errors.Wrap(err, "error in writing up.sql")
}
}
}
// delete the yaml file
err = os.Remove(filepath.Join(ec.MigrationDir, upMetaMigration.Raw))
if err != nil {
return errors.Wrap(err, "error in removing up.yaml")
}
delete(fileCfg.Migrations.Migrations[version], source.MetaUp)
}
// Remove yaml from down migrations
for _, version := range fileCfg.Migrations.Index {
sqlDown := &bytes.Buffer{}
downMetaMigration, ok := fileCfg.Migrations.Migrations[version][source.MetaDown]
if !ok {
continue
}
bodyReader, _, _, err := fileCfg.ReadMetaDown(version)
if err != nil {
return errors.Wrapf(err, "error in reading %s file", downMetaMigration.Raw)
}
buf := new(bytes.Buffer)
_, err = buf.ReadFrom(bodyReader)
if err != nil {
return errors.Wrap(err, "unable to read bytes")
}
var queries []hasuradb.HasuraInterfaceQuery
err = yaml.Unmarshal(buf.Bytes(), &queries)
if err != nil {
return errors.Wrapf(err, "unable to unmarhsal %s", downMetaMigration.Raw)
}
for _, query := range queries {
if query.Type == "run_sql" {
argByt, err := yaml.Marshal(query.Args)
if err != nil {
return errors.Wrapf(err, "unable to marshal run_sql args in %s", downMetaMigration.Raw)
}
var to hasuradb.RunSQLInput
err = yaml.Unmarshal(argByt, &to)
if err != nil {
return errors.Wrapf(err, "unable to unmarshal run_sql args in %s", downMetaMigration.Raw)
}
sqlDown.WriteString("\n")
sqlDown.WriteString(to.SQL)
}
}
// check if up.sql file exists
if string(sqlDown.Bytes()) != "" {
downMigration, ok := fileCfg.Migrations.Migrations[version][source.Down]
if !ok {
// if up.sql doesn't exists, create a up.sql file and upMigration
var filePath string
if downMetaMigration.IsDir {
dir := filepath.Dir(downMetaMigration.Raw)
filePath = filepath.Join(ec.MigrationDir, dir, "down.sql")
} else {
fileName := fmt.Sprintf("%d_%s.down.sql", version, downMetaMigration.Identifier)
filePath = filepath.Join(ec.MigrationDir, fileName)
}
err = ioutil.WriteFile(filePath, sqlDown.Bytes(), os.ModePerm)
if err != nil {
return errors.Wrap(err, "unable to create up migration")
}
fileCfg.Migrations.Migrations[version][source.Down] = &source.Migration{}
} else {
downByt, err := ioutil.ReadFile(downMigration.Raw)
if err != nil {
return errors.Wrap(err, "error in reading down.sql")
}
downByt = append(sqlDown.Bytes(), downByt...)
err = ioutil.WriteFile(downMigration.Raw, downByt, os.ModePerm)
if err != nil {
return errors.Wrap(err, "error in writing down.sql")
}
}
}
// delete the yaml file
err = os.Remove(filepath.Join(ec.MigrationDir, downMetaMigration.Raw))
if err != nil {
return errors.Wrap(err, "error in removing down.yaml")
}
delete(fileCfg.Migrations.Migrations[version], source.MetaDown)
}
for version := range fileCfg.Migrations.Migrations {
directions := fileCfg.GetDirections(version)
// check if all the directions were set, else delete
if !directions[source.Up] && !directions[source.MetaUp] && !directions[source.Down] && !directions[source.MetaDown] {
files, err := filepath.Glob(filepath.Join(ec.MigrationDir, fmt.Sprintf("%d_*", version)))
if err != nil {
return errors.Wrapf(err, "unable to filter files for %d", version)
}
for _, file := range files {
info, err := os.Stat(file)
if err != nil {
return errors.Wrap(err, "error in stating file")
}
if info.IsDir() {
err = os.RemoveAll(file)
if err != nil {
return errors.Wrap(err, "error in removing dir")
}
} else {
if err := os.Remove(file); err != nil {
return errors.Wrap(err, "error in removing file")
}
}
}
upVersions = append(upVersions, version)
}
}
ec.Spin("Removing versions from database...")
migrateDrv, err := newMigrate(ec, true)
if err != nil {
return errors.Wrap(err, "unable to initialize migrations driver")
}
err = migrateDrv.RemoveVersions(upVersions)
if err != nil {
return errors.Wrap(err, "unable to remove versions from database")
}
// update current config to v2
ec.Spin("Updating current config to 2")
os.Setenv("HASURA_GRAPHQL_VERSION", "2")
os.Setenv("HASURA_GRAPHQL_METADATA_DIRECTORY", metadataDir)
os.Setenv("HASURA_GRAPHQL_ACTION_KIND", ec.Viper.GetString("actions.kind"))
os.Setenv("HASURA_GRAPHQL_ACTION_HANDLER_WEBHOOK_BASEURL", ec.Viper.GetString("actions.handler_webhook_baseurl"))
defer func() {
// unset env
os.Unsetenv("HASURA_GRAPHQL_VERSION")
os.Unsetenv("HASURA_GRAPHQL_METADATA_DIRECTORY")
os.Unsetenv("HASURA_GRAPHQL_ACTION_KIND")
os.Unsetenv("HASURA_GRAPHQL_ACTION_HANDLER_WEBHOOK_BASEURL")
}()
ec.Spin("Reloading config file...")
err = ec.Validate()
if err != nil {
return errors.Wrap(err, "cannot validate new config")
}
defer func() {
if err != nil {
os.RemoveAll(ec.MetadataDir)
}
}()
// set codegen to nil, so that it is not exported in yaml
ec.Config.ActionConfig.Codegen = nil
// run metadata export
ec.Spin("Exporting metadata...")
migrateDrv, err = newMigrate(ec, true)
if err != nil {
return errors.Wrap(err, "unable to initialize migrations driver")
}
files, err := migrateDrv.ExportMetadata()
if err != nil {
return errors.Wrap(err, "cannot export metadata from server")
}
ec.Spin("Writing metadata...")
err = migrateDrv.WriteMetadata(files)
if err != nil {
return errors.Wrap(err, "cannot write metadata")
}
ec.Spin("Writing new config file...")
err = ec.WriteConfig(nil)
if err != nil {
return errors.Wrap(err, "cannot write config file")
}
ec.Spinner.Stop()
ec.Logger.Infoln("Updated config to version 2")
return nil
},
}
f := scriptsUpdateConfigV2Cmd.Flags()
f.StringVar(&metadataDir, "metadata-dir", "metadata", "")
f.String("endpoint", "", "http(s) endpoint for Hasura GraphQL Engine")
f.String("admin-secret", "", "admin secret for Hasura GraphQL Engine")
f.String("access-key", "", "access key for Hasura GraphQL Engine")
f.MarkDeprecated("access-key", "use --admin-secret instead")
// need to create a new viper because https://github.com/spf13/viper/issues/233
v.BindPFlag("endpoint", f.Lookup("endpoint"))
v.BindPFlag("admin_secret", f.Lookup("admin-secret"))
v.BindPFlag("access_key", f.Lookup("access-key"))
return scriptsUpdateConfigV2Cmd
}

View File

@ -21,7 +21,7 @@ func NewVersionCmd(ec *cli.ExecutionContext) *cobra.Command {
err := ec.Validate()
if err == nil {
ec.Logger.
WithField("endpoint", ec.ServerConfig.Endpoint).
WithField("endpoint", ec.Config.ServerConfig.Endpoint).
WithField("version", ec.Version.GetServerVersion()).
Info("hasura graphql engine")
}

View File

@ -52,7 +52,6 @@ func (ec *ExecutionContext) validateDirectory() error {
// directory.
var filesRequired = []string{
"config.yaml",
"migrations",
}
// recursivelyValidateDirectory tries to parse 'startFrom' as a project

View File

@ -4,7 +4,6 @@ go 1.13
require (
github.com/Masterminds/semver v1.5.0
github.com/Microsoft/go-winio v0.4.14 // indirect
github.com/ahmetb/go-linq v3.0.0+incompatible
github.com/andybalholm/cascadia v1.1.0 // indirect
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a
@ -12,10 +11,6 @@ require (
github.com/aws/aws-sdk-go v1.25.50 // indirect
github.com/briandowns/spinner v1.8.0
github.com/disintegration/imaging v1.6.2 // indirect
github.com/docker/distribution v2.7.1+incompatible // indirect
github.com/docker/docker v1.13.1
github.com/docker/go-connections v0.4.0
github.com/docker/go-units v0.4.0 // indirect
github.com/elazarl/go-bindata-assetfs v1.0.0
github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484 // indirect
github.com/fatih/color v1.7.0
@ -27,17 +22,16 @@ require (
github.com/gofrs/uuid v3.2.0+incompatible
github.com/gorilla/sessions v1.2.0 // indirect
github.com/gosimple/slug v1.9.0 // indirect
github.com/graphql-go/graphql v0.7.8
github.com/jinzhu/configor v1.1.1 // indirect
github.com/jinzhu/gorm v1.9.11 // indirect
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
github.com/lib/pq v1.1.1
github.com/manifoldco/promptui v0.6.0
github.com/mattn/go-colorable v0.1.4
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b
github.com/microcosm-cc/bluemonday v1.0.2 // indirect
github.com/mitchellh/go-homedir v1.1.0
github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852
github.com/opencontainers/go-digest v1.0.0-rc1 // indirect
github.com/parnurzeal/gorequest v0.2.16
github.com/pkg/errors v0.8.1
github.com/qor/admin v0.0.0-20191205023516-9032e7fec172 // indirect
@ -63,7 +57,8 @@ require (
github.com/theplant/htmltestingutils v0.0.0-20190423050759-0e06de7b6967 // indirect
github.com/theplant/testingutils v0.0.0-20190603093022-26d8b4d95c61 // indirect
github.com/yosssi/gohtml v0.0.0-20190915184251-7ff6f235ecaf // indirect
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4
gopkg.in/src-d/go-git.v4 v4.13.1
gopkg.in/yaml.v2 v2.2.7
moul.io/http2curl v1.0.0 // indirect
)

View File

@ -6,13 +6,13 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Microsoft/go-winio v0.4.14 h1:+hMXMk01us9KgxGb7ftKQt2Xpf5hH/yky+TDA+qxleU=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/ahmetb/go-linq v3.0.0+incompatible h1:qQkjjOXKrKOTy83X8OpRmnKflXKQIL/mC/gMVVDMhOA=
github.com/ahmetb/go-linq v3.0.0+incompatible/go.mod h1:PFffvbdbtw+QTB0WKRP0cNht7vnCfnGlEpak/DVg5cY=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
github.com/alecthomas/gometalinter v3.0.0+incompatible h1:e9Zfvfytsw/e6Kd/PYd75wggK+/kX5Xn8IYDUKyc5fU=
github.com/alecthomas/gometalinter v3.0.0+incompatible/go.mod h1:qfIpQGGz3d+NmgyPBqv+LSh50emm1pt72EtcX2vKYQk=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@ -21,8 +21,12 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d h1:UQZhZ2O0vMHr2c
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/andybalholm/cascadia v1.1.0 h1:BuuO6sSfQNFRu1LppgbD25Hr2vLYW25JvxHs5zzsLTo=
github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a h1:pv34s756C4pEXnjgPfGYgdhg/ZdajGhyOvzx8k+23nw=
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA=
@ -50,6 +54,7 @@ github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -59,14 +64,6 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v1.13.1 h1:IkZjBSIc8hBjLpqeAbeE5mca5mNgeatLHBy3GO78BWo=
github.com/docker/docker v1.13.1/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
@ -76,10 +73,14 @@ github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484 h1:pEtiCjIXx3RvGjl
github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 h1:dWB6v3RcOy03t/bUadywsbyrQwCqZeNIEX6M1OtSZOM=
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
@ -96,6 +97,8 @@ github.com/gin-gonic/contrib v0.0.0-20191209060500-d6e26eeaa607/go.mod h1:iqneQ2
github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM=
github.com/gin-gonic/gin v1.5.0 h1:fi+bqFAx/oLK54somfCtEZs9HeH1LHVoEPUgARpTqyc=
github.com/gin-gonic/gin v1.5.0/go.mod h1:Nd6IXA8m5kNZdNEHMBd93KT+mdY3+bewLgRvmCsR2Do=
github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
@ -124,6 +127,8 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@ -144,6 +149,8 @@ github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/z
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
github.com/graphql-go/graphql v0.7.8 h1:769CR/2JNAhLG9+aa8pfLkKdR0H+r5lsQqling5WwpU=
github.com/graphql-go/graphql v0.7.8/go.mod h1:k6yrAYQaSP59DC5UVxbgxESlmVyojThKdORUqGDGmrI=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
@ -153,6 +160,9 @@ github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jinzhu/configor v1.1.1 h1:gntDP+ffGhs7aJ0u8JvjCDts2OsxsI7bnz3q+jC+hSY=
github.com/jinzhu/configor v1.1.1/go.mod h1:nX89/MOmDba7ZX7GCyU/VIaQ2Ar2aizBl2d3JLF/rDc=
github.com/jinzhu/gorm v1.9.11 h1:gaHGvE+UnWGlbWG4Y3FUwY1EcZ5n6S9WtqBA/uySMLE=
@ -175,6 +185,8 @@ github.com/juju/ansiterm v0.0.0-20180109212912-720a0952cc2a/go.mod h1:UJSiEoRfvx
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 h1:iQTw/8FWTuc7uiaSepXwyf3o52HaUYcV+Tu66S3F5GA=
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY=
github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
@ -183,6 +195,7 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/leodido/go-urn v1.1.0 h1:Sm1gr51B1kKyfD2BlRcLSiEkffoG96g6TPv6eRoEiB8=
@ -231,11 +244,10 @@ github.com/oliveagle/jsonpath v0.0.0-20180606110733-2e52cf6e6852/go.mod h1:eqOVx
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/opencontainers/go-digest v1.0.0-rc1 h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ=
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/parnurzeal/gorequest v0.2.16 h1:T/5x+/4BT+nj+3eSknXmCTnEVGSzFzPGdpqmUVVZXHQ=
github.com/parnurzeal/gorequest v0.2.16/go.mod h1:3Kh2QUMJoqw3icWAecsyzkpY7UzRfDhbRdTjtNwNiUE=
github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
@ -290,8 +302,9 @@ github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6So
github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc=
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/skratchdot/open-golang v0.0.0-20190402232053-79abb63cd66e h1:VAzdS5Nw68fbf5RZ8RDVlUvPXNU6Z3jtPCK/qvm4FoQ=
@ -316,8 +329,11 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.6.1 h1:VPZzIkznI1YhVMRi6vNFLHSwhnhReBfgTxIPccpfdZk=
github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k=
github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
@ -339,6 +355,8 @@ github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVM
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs=
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70=
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yosssi/gohtml v0.0.0-20190915184251-7ff6f235ecaf h1:VA200mPTYh9FWY8zKX5ctXCtNk78HUez8ecTdsQGhoo=
@ -350,9 +368,12 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c h1:Vj5n4GlwjmQteupaxJ9+0FNOmBrHfq7vN4btdGoDZgI=
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4 h1:HuIa8hRrWRSrqYzx1qI49NNxhdi2PrY7gxVSq1JjLDc=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
@ -371,15 +392,20 @@ golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80 h1:Ao/3l156eZf2AW5wK8a7/smtodRU+gha3+BeqJ69lRk=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -388,19 +414,25 @@ golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2 h1:z99zHgr7hKfrUcX/KsoJk5FJfjTceCKIp96+biqP4To=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181122213734-04b5d21e00f1/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -408,6 +440,8 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384 h1:TFlARGu6Czu1z7q93HTxcP1P+/ZFC/IKythI5RzrnRg=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a h1:mEQZbbaBjWyLNy0tmZmgEuQAR8XOQ3hL8GYi3J/NG64=
golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
@ -433,7 +467,15 @@ gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWd
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=
gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0 h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=
gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=
gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -0,0 +1,45 @@
package integrationtest
import (
"math/rand"
"testing"
"time"
"github.com/hasura/graphql-engine/cli"
)
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
func TestPrepare(t *testing.T, ec *cli.ExecutionContext) {
err := ec.Prepare()
if err != nil {
t.Fatalf("prepare failed: %v", err)
}
if ec.CMDName == "" {
t.Fatalf("expected CMDName, got: %v", ec.CMDName)
}
if ec.Spinner == nil {
t.Fatal("got spinner empty")
}
if ec.Logger == nil {
t.Fatal("got empty logger")
}
if ec.GlobalConfigDir == "" {
t.Fatalf("global config dir: expected $HOME/%s, got %s", cli.GlobalConfigDirName, ec.GlobalConfigDir)
}
if ec.GlobalConfigFile == "" {
t.Fatalf("global config file: expected $HOME/%s/%s, got %s", cli.GlobalConfigDirName, cli.GlobalConfigFileName, ec.GlobalConfigFile)
}
if ec.Config == nil {
t.Fatal("got empty Config")
}
}
func TestValidate(t *testing.T, ec *cli.ExecutionContext) {
err := ec.Validate()
if err != nil {
t.Fatalf("validate failed: %v", err)
}
}

View File

@ -0,0 +1,139 @@
package integrationtest_test
import (
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/briandowns/spinner"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
integrationtest "github.com/hasura/graphql-engine/cli/integration_test"
"github.com/spf13/viper"
v1 "github.com/hasura/graphql-engine/cli/integration_test/v1"
v2 "github.com/hasura/graphql-engine/cli/integration_test/v2"
"github.com/sirupsen/logrus/hooks/test"
)
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
func TestCommands(t *testing.T) {
// Run tests only for config version v1
t.Run("config=v1", func(t *testing.T) {
// Initialize ec
ec := cli.NewExecutionContext()
ec.Config = &cli.Config{}
logger, _ := test.NewNullLogger()
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
ec.Spinner.Writer = ioutil.Discard
ec.Viper = viper.New()
initDir := filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000)))
defer os.RemoveAll(initDir)
// This will prepare the execution context, so no need to run ec.Prepare() on all the other tests
t.Run("prepare", func(t *testing.T) {
integrationtest.TestPrepare(t, ec)
})
skip(t)
// This will init the project dir
t.Run("init command", func(t *testing.T) {
v1.TestInitCmd(t, ec, initDir)
})
skip(t)
// This will validate the project dir
t.Run("validate", func(t *testing.T) {
integrationtest.TestValidate(t, ec)
})
skip(t)
t.Run("console command", func(t *testing.T) {
v1.TestConsoleCmd(t, ec)
})
skip(t)
t.Run("migrate commands", func(t *testing.T) {
v1.TestMigrateCmd(t, ec)
})
skip(t)
t.Run("metadata commands", func(t *testing.T) {
v1.TestMetadataCmd(t, ec)
})
})
// Run tests only for config version v2
t.Run("config=v2", func(t *testing.T) {
ec := cli.NewExecutionContext()
ec.Config = &cli.Config{}
logger, _ := test.NewNullLogger()
ec.Logger = logger
ec.Spinner = spinner.New(spinner.CharSets[7], 100*time.Millisecond)
ec.Spinner.Writer = ioutil.Discard
ec.Viper = viper.New()
initDir := filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000)))
defer os.RemoveAll(initDir)
// This will prepare the execution context, so no need to run ec.Prepare() on all the other tests
t.Run("prepare", func(t *testing.T) {
integrationtest.TestPrepare(t, ec)
})
skip(t)
t.Run("cli-ext-plugin-install", func(t *testing.T) {
installOpts := &commands.PluginInstallOptions{
EC: ec,
Name: "cli-ext",
ManifestFile: "/build/_cli_ext_output/manifest-dev.yaml",
}
err := installOpts.Run()
if err != nil {
t.Fatalf("unable to install cli-ext plugin, got %v", err)
}
})
skip(t)
// This will init the project dir
t.Run("init command", func(t *testing.T) {
v2.TestInitCmd(t, ec, initDir)
})
skip(t)
// This will validate the project dir
t.Run("validate", func(t *testing.T) {
integrationtest.TestValidate(t, ec)
})
skip(t)
t.Run("console command", func(t *testing.T) {
v2.TestConsoleCmd(t, ec)
})
skip(t)
t.Run("migrate commands", func(t *testing.T) {
v2.TestMigrateCmd(t, ec)
})
skip(t)
t.Run("metadata commands", func(t *testing.T) {
v2.TestMetadataCmd(t, ec)
})
})
}
func skip(t *testing.T) {
if t.Failed() {
t.SkipNow()
}
}

View File

@ -0,0 +1,38 @@
package v1
import (
"os"
"testing"
"time"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
)
func TestConsoleCmd(t *testing.T, ec *cli.ExecutionContext) {
opts := &commands.ConsoleOptions{
EC: ec,
APIPort: "9693",
ConsolePort: "9695",
Address: "localhost",
DontOpenBrowser: true,
APIServerInterruptSignal: make(chan os.Signal),
ConsoleServerInterruptSignal: make(chan os.Signal),
}
go func() {
t.Log("waiting for console to start")
for opts.WG == nil {
time.Sleep(1 * time.Second)
}
opts.APIServerInterruptSignal <- os.Interrupt
opts.ConsoleServerInterruptSignal <- os.Interrupt
close(opts.APIServerInterruptSignal)
close(opts.ConsoleServerInterruptSignal)
}()
err := opts.Run()
if err != nil {
t.Fatalf("failed running console: %v", err)
}
// TODO: (shahidhk) curl the console endpoint for 200 response
}

View File

@ -0,0 +1,43 @@
package v1
import (
"math/rand"
"os"
"path/filepath"
"strconv"
"testing"
"time"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
)
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
func TestInitCmd(t *testing.T, ec *cli.ExecutionContext, initDir string) {
tt := []struct {
name string
opts *commands.InitOptions
err error
}{
{"only-init-dir", &commands.InitOptions{
EC: ec,
Version: "1",
Endpoint: os.Getenv("HASURA_GRAPHQL_TEST_ENDPOINT"),
AdminSecret: os.Getenv("HASURA_GRAPHQL_TEST_ADMIN_SECRET"),
InitDir: filepath.Join(os.TempDir(), "hasura-cli-test-"+strconv.Itoa(rand.Intn(1000))),
}, nil},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
err := tc.opts.Run()
if err != tc.err {
t.Fatalf("%s: expected %v, got %v", tc.name, tc.err, err)
}
// TODO: (shahidhk) need to verify the contents of the spec generated
})
}
}

View File

@ -0,0 +1,129 @@
package v1
import (
"bytes"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
"github.com/hasura/graphql-engine/cli/util"
"github.com/stretchr/testify/assert"
)
type metadataInterface interface {
Run() error
}
func TestMetadataCmd(t *testing.T, ec *cli.ExecutionContext) {
currDir, _ := os.Getwd()
actualMetadataFile := filepath.Join(ec.MigrationDir, "metadata.yaml")
tt := []struct {
name string
opts metadataInterface
err error
copyMetadataFile string
expectedMetadataFile string
}{
{
"metadata-export",
&commands.MetadataExportOptions{
EC: ec,
ActionType: "export",
},
nil,
"",
filepath.Join(currDir, getMetadataDir(), "1_metadata.yaml"),
},
{
"metadata-apply",
&commands.MetadataApplyOptions{
EC: ec,
ActionType: "apply",
},
nil,
filepath.Join(currDir, getMetadataDir(), "2_metadata.yaml"),
"",
},
{
"metadata-export",
&commands.MetadataExportOptions{
EC: ec,
ActionType: "export",
},
nil,
"",
filepath.Join(currDir, getMetadataDir(), "2_metadata.yaml"),
},
{
"metadata-clear",
&commands.MetadataClearOptions{
EC: ec,
ActionType: "clear",
},
nil,
"",
"",
},
{
"metadata-export",
&commands.MetadataExportOptions{
EC: ec,
ActionType: "export",
},
nil,
"",
filepath.Join(currDir, getMetadataDir(), "1_metadata.yaml"),
},
{
"metadata-diff",
&commands.MetadataDiffOptions{
EC: ec,
Args: []string{filepath.Join(currDir, getMetadataDir(), "2_metadata.yaml")},
Output: new(bytes.Buffer),
},
nil,
"",
"",
},
{
"down-all-migrations",
&commands.MigrateApplyOptions{
EC: ec,
DownMigration: "all",
},
nil,
"",
"",
},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
if tc.copyMetadataFile != "" {
err := util.CopyFile(tc.copyMetadataFile, actualMetadataFile)
if err != nil {
t.Fatalf("%s: unable to copy metadata file, got %v", tc.name, err)
}
}
err := tc.opts.Run()
if err != tc.err {
t.Fatalf("%s: expected %v, got %v", tc.name, tc.err, err)
}
if tc.expectedMetadataFile != "" {
assert.FileExists(t, actualMetadataFile)
expectedByt, err := ioutil.ReadFile(tc.expectedMetadataFile)
if err != nil {
t.Fatalf("%s: unable to read expected metadata file, got %v", tc.name, err)
}
actualByt, err := ioutil.ReadFile(actualMetadataFile)
if err != nil {
t.Fatalf("%s: unable to read actual metadata file, got %v", tc.name, err)
}
assert.Equal(t, string(expectedByt), string(actualByt))
}
})
}
}

View File

@ -0,0 +1,2 @@
version: 2
tables: []

View File

@ -0,0 +1,5 @@
version: 2
tables:
- table:
schema: public
name: test

View File

@ -0,0 +1,7 @@
//+build !latest_release
package v1
func getMetadataDir() string {
return "v1/metadata"
}

View File

@ -0,0 +1,7 @@
//+build latest_release
package v1
func getMetadataDir() string {
return "v1/metadata_latest"
}

View File

@ -0,0 +1,2 @@
version: 2
tables: []

View File

@ -0,0 +1,5 @@
version: 2
tables:
- table:
schema: public
name: test

View File

@ -0,0 +1,190 @@
package v1
import (
"os"
"path/filepath"
"testing"
"github.com/ghodss/yaml"
"github.com/hasura/graphql-engine/cli"
"github.com/hasura/graphql-engine/cli/commands"
"github.com/hasura/graphql-engine/cli/migrate"
"github.com/hasura/graphql-engine/cli/util"
"github.com/stretchr/testify/assert"
)
type migrateInterface interface {
Run() error
}
func TestMigrateCmd(t *testing.T, ec *cli.ExecutionContext) {
// copy migrations to ec.Execution.Directory/migrations
os.RemoveAll(ec.MigrationDir)
currDir, _ := os.Getwd()
err := util.CopyDir(filepath.Join(currDir, "v1/migrations"), ec.MigrationDir)
if err != nil {
t.Fatalf("unable to copy migrations directory %v", err)
}
tt := []struct {
name string
opts migrateInterface
err error
status migrate.Status
}{
{"apply-up-all-migrations", &commands.MigrateApplyOptions{
EC: ec,
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
},
}},
{"apply-down-1-migration", &commands.MigrateApplyOptions{
EC: ec,
DownMigration: "1",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
},
}},
{"apply-down-all-migration", &commands.MigrateApplyOptions{
EC: ec,
DownMigration: "all",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
},
}},
{"apply-goto-2-migration", &commands.MigrateApplyOptions{
EC: ec,
GotoVersion: "2",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
},
}},
{"apply-goto-nil-migration", &commands.MigrateApplyOptions{
EC: ec,
GotoVersion: "-1",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
},
}},
{"apply-up-1-migration", &commands.MigrateApplyOptions{
EC: ec,
UpMigration: "1",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
},
}},
{"apply-version-2-up-migration", &commands.MigrateApplyOptions{
EC: ec,
VersionMigration: "2",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
},
}},
{"apply-version-2-down-migration", &commands.MigrateApplyOptions{
EC: ec,
VersionMigration: "2",
MigrationType: "down",
}, nil, migrate.Status{
Index: []uint64{1, 2},
Migrations: map[uint64]*migrate.MigrationStatus{
1: &migrate.MigrationStatus{
IsApplied: true,
IsPresent: true,
},
2: &migrate.MigrationStatus{
IsApplied: false,
IsPresent: true,
},
},
}},
}
for _, tc := range tt {
t.Run(tc.name, func(t *testing.T) {
err := tc.opts.Run()
if err != tc.err {
t.Fatalf("%s: expected %v, got %v", tc.name, tc.err, err)
}
expectedStatusByt, err := yaml.Marshal(tc.status)
if err != nil {
t.Fatal(err)
}
statusOpts := &commands.MigrateStatusOptions{
EC: ec,
}
actualStatus, err := statusOpts.Run()
if err != nil {
t.Fatalf("%s: unable to fetch migrate status, got %v", tc.name, err)
}
actualStatusByt, err := yaml.Marshal(actualStatus)
if err != nil {
t.Fatal(err)
}
assert.Equal(t, string(expectedStatusByt), string(actualStatusByt))
})
}
}

View File

@ -0,0 +1 @@
DROP TABLE "test";

View File

@ -0,0 +1 @@
CREATE TABLE "test" ("id" serial NOT NULL, PRIMARY KEY ("id") );

View File

@ -0,0 +1 @@
DROP TABLE "author";

Some files were not shown because too many files have changed in this diff Show More