Merge branch 'main' into stable
GitOrigin-RevId: 22386edc40cfed87b222c950dec915b18764fe2f
@ -19,7 +19,7 @@
|
||||
|
||||
You can also install a specific version of the CLI by providing the `VERSION` variable:
|
||||
```bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash
|
||||
```
|
||||
|
||||
- Windows
|
||||
|
@ -44,7 +44,7 @@ log "Selecting version..."
|
||||
# version=${VERSION:-`echo $(curl -s -f -H 'Content-Type: application/json' \
|
||||
# https://releases.hasura.io/graphql-engine?agent=cli-get.sh) | sed -n -e "s/^.*\"$release\":\"\([^\",}]*\)\".*$/\1/p"`}
|
||||
|
||||
version=${VERSION:-v2.35.0}
|
||||
version=${VERSION:-v2.36.0}
|
||||
|
||||
if [ ! $version ]; then
|
||||
log "${YELLOW}"
|
||||
@ -62,7 +62,7 @@ log "Selected version: $version"
|
||||
|
||||
log "${YELLOW}"
|
||||
log NOTE: Install a specific version of the CLI by using VERSION variable
|
||||
log 'curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash'
|
||||
log 'curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash'
|
||||
log "${NC}"
|
||||
|
||||
# check for existing hasura installation
|
||||
|
1
docs/.gitignore
vendored
@ -10,6 +10,7 @@
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
|
@ -499,7 +499,7 @@ Supported in `v2.0.0-alpha.3` and above.
|
||||
| --------------- | -------- | ------------------------------------------------------ | --------------------------------------------------------- |
|
||||
| remote_table | true | [TableName](#tablename) | The table to which the relationship has to be established |
|
||||
| column_mapping | true | Object ([PGColumn](#pgcolumn) : [PGColumn](#pgcolumn)) | Mapping of columns from current table to remote table |
|
||||
| insertion_order | false | [InsertOrder](#insertorder) | insertion order: before or after parent (default: before) |
|
||||
| insertion_order | false | [InsertOrder](#insertorder) | insertion order: before or after parent (default: "before_parent") |
|
||||
|
||||
## InsertOrder {#insertorder}
|
||||
|
||||
|
@ -55,6 +55,22 @@ Currently, Hasura supports read-only queries, relationships, and permissions on
|
||||
|
||||
:::
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred Amazon Athena client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
If you'd like to stay informed about the status of Amazon Athena support, subscribe to our newsletter and join our
|
||||
|
@ -39,14 +39,35 @@ Here is how you can get started with Hasura and BigQuery:
|
||||
|
||||
-->
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred BigQuery client instead. The Hasura Console is designed to be a tool for managing your
|
||||
GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Minimum required IAM permissions
|
||||
|
||||
* BigQuery queries through Hasura require the `bigquery.jobs.create` and `bigquery.jobs.get` permissions to send a job to the BigQuery servers.
|
||||
* The `bigquery.tables.getData` permission allows Hasura to query your BigQuery data source. Note that mutations are not currently supported for BigQuery, and so no corresponding `updateData` permission is required.
|
||||
* To use the Hasura Console to edit your data source, several different permissions may be required depending on your actions:
|
||||
* `bigquery.datasets.create` and `bigquery.datasets.delete` for creating and deleting datasets.
|
||||
* `bigquery.routines.create`, `bigquery.routines.update`, and `bigquery.routines.delete` for managing user-defined functions and stored procedures.
|
||||
* `bigquery.table.create`, `bigquery.tables.list`, `bigquery.tables.get`, `bigquery.tables.delete`, and `bigquery.tables.update` to manage the dataset definition.
|
||||
- BigQuery queries through Hasura require the `bigquery.jobs.create` and `bigquery.jobs.get` permissions to send a job
|
||||
to the BigQuery servers.
|
||||
- The `bigquery.tables.getData` permission allows Hasura to query your BigQuery data source. Note that mutations are not
|
||||
currently supported for BigQuery, and so no corresponding `updateData` permission is required.
|
||||
- To use the Hasura Console to edit your data source, several different permissions may be required depending on your
|
||||
actions:
|
||||
- `bigquery.datasets.create` and `bigquery.datasets.delete` for creating and deleting datasets.
|
||||
- `bigquery.routines.create`, `bigquery.routines.update`, and `bigquery.routines.delete` for managing user-defined
|
||||
functions and stored procedures.
|
||||
- `bigquery.table.create`, `bigquery.tables.list`, `bigquery.tables.get`, `bigquery.tables.delete`, and
|
||||
`bigquery.tables.update` to manage the dataset definition.
|
||||
|
||||
## Supported features
|
||||
|
||||
|
@ -34,6 +34,22 @@ Here are 2 ways you can get started with Hasura and ClickHouse:
|
||||
2. [Docker](/databases/clickhouse/getting-started/docker.mdx): Run Hasura with Docker and then connect your ClickHouse
|
||||
instance to Hasura.
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred ClickHouse client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
:::info Note
|
||||
|
@ -20,7 +20,7 @@ that data.
|
||||
|
||||
Currently, Hasura natively supports Postgres, MS SQL Server, and BigQuery databases. Data Connectors allow you to
|
||||
connect Hasura to **_any_** other data source. Hasura has built Data Connectors for MySQL, Oracle, Snowflake, Amazon
|
||||
Athena, MariaDB, MongoDB (coming soon), with more sources in the pipeline, but you can also use them to connect to
|
||||
Athena, MariaDB, and MongoDB, with more sources in the pipeline, but you can also use them to connect to
|
||||
your data sources. Think Microsoft Excel, SQLite, CSV, AirTable and more.
|
||||
|
||||
For more information on databases, check out the [Hasura Databases documentation](/docs/databases/overview.mdx) or to
|
||||
|
@ -205,6 +205,22 @@ schema.
|
||||
- [Subscriptions](/subscriptions/overview.mdx)
|
||||
- [Event triggers](/event-triggers/overview.mdx)
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred MariaDB client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Resources
|
||||
|
||||
- Check out the [Getting Started with Docker](/databases/mariadb/docker.mdx) guide.
|
||||
|
@ -43,6 +43,22 @@ To get started with MongoDB:
|
||||
- In Hasura Cloud, check out our [Getting Started with MongoDB in Hasura Cloud](/databases/mongodb/cloud.mdx) guide
|
||||
- In a Docker environment, check out our [Getting Started with Docker](/databases/mongodb/docker.mdx) guide
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred MongoDB client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
If you'd like to stay informed about the status of MongoDB support, subscribe to our newsletter and join our Discord!
|
||||
|
@ -38,13 +38,31 @@ Here are 2 ways you can get started with Hasura and SQL Server:
|
||||
Hasura currently supports queries, subscriptions, mutations, relationships, permissions, and Event Triggers on MS SQL
|
||||
Server.
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred MS SQL Server client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Required permissions
|
||||
|
||||
Assuming a `CONNECT` permission already exists, the following permissions are required for Hasura to function completely. Note that missing permissions may cause the corresponding features to work incorrectly:
|
||||
Assuming a `CONNECT` permission already exists, the following permissions are required for Hasura to function
|
||||
completely. Note that missing permissions may cause the corresponding features to work incorrectly:
|
||||
|
||||
* To use the Hasura Console to alter your schema, you will need appropriate schema permissions, such as `CREATE TABLE`, `CREATE VIEW`, `CREATE FUNCTION`, and `CREATE PROCEDURE`, depending on what you want to do.
|
||||
* To perform queries and mutations, Hasura will need permission to `DELETE`, `INSERT`, `SELECT`, and `UPDATE`.
|
||||
* To call MSSQL stored procedures via Hasura, the `EXECUTE` permission is also required.
|
||||
- To use the Hasura Console to alter your schema, you will need appropriate schema permissions, such as `CREATE TABLE`,
|
||||
`CREATE VIEW`, `CREATE FUNCTION`, and `CREATE PROCEDURE`, depending on what you want to do.
|
||||
- To perform queries and mutations, Hasura will need permission to `DELETE`, `INSERT`, `SELECT`, and `UPDATE`.
|
||||
- To call MSSQL stored procedures via Hasura, the `EXECUTE` permission is also required.
|
||||
|
||||
## Keep up to date
|
||||
|
||||
|
167
docs/docs/databases/mysql/aiven.mdx
Normal file
@ -0,0 +1,167 @@
|
||||
---
|
||||
description: "Learn how to connect Hasura Cloud to Aiven MySQL databases, including set up instructions, service configuration, and secure connection details."
|
||||
title: 'Cloud: Using Hasura Cloud with an Aiven MySQL database'
|
||||
keywords:
|
||||
- hasura cloud
|
||||
- aiven mysql integration
|
||||
- graphql database connection
|
||||
- mysql jdbc string
|
||||
- secure database setup
|
||||
- hasura console guide
|
||||
- cloud database management
|
||||
- mysql service configuration
|
||||
- aiven service creation
|
||||
- connect hasura to mysql
|
||||
sidebar_label: Aiven MySQL
|
||||
sidebar_position: 4
|
||||
seoFrontMatterUpdated: true
|
||||
---
|
||||
|
||||
import Thumbnail from '@site/src/components/Thumbnail';
|
||||
import HeadingIcon from '@site/src/components/HeadingIcon';
|
||||
|
||||
# Connecting Hasura to an Aiven MySQL Database
|
||||
|
||||
## Introduction
|
||||
|
||||
This guide explains how to connect a new or existing [Aiven MySQL](https://aiven.io/mysql?utm_source=website&utm_medium=referral&utm_campaign=hasura) database to a Hasura
|
||||
instance, either on [Hasura Cloud](https://cloud.hasura.io?skip_onboarding=true) or via one of our
|
||||
[self-hosted](/deployment/deployment-guides/index.mdx) solutions.
|
||||
|
||||
:::info Note
|
||||
|
||||
If you plan on using Hasura Cloud, which we recommend, follow steps 1 and 2 below. If you're self-hosting a Hasura
|
||||
instance and already have a project running, skip to [step 3](#create-mysql-db-aiven).
|
||||
|
||||
:::
|
||||
|
||||
:::tip Supported From
|
||||
|
||||
Aiven-hosted MySQL databases are supported from Hasura `v2.35.0` onwards.
|
||||
|
||||
:::
|
||||
|
||||
## Step 1: Sign up or log in to Hasura Cloud
|
||||
|
||||
Navigate to [Hasura Cloud](https://cloud.hasura.io/signup/?pg=docs&plcmt=body&cta=navigate-to-hasura-cloud&tech=default)
|
||||
and sign up or log in.
|
||||
|
||||
## Step 2: Create a Hasura Cloud project {#create-hasura-project-aiven}
|
||||
|
||||
On the Hasura Cloud dashboard, create a new project:
|
||||
|
||||
<Thumbnail src="/img/cloud-dbs/create-hasura-cloud-project.png" alt="Create Hasura Cloud project" width="1000px" />
|
||||
|
||||
After the project is initialized successfully, click on `Launch Console` to open the Hasura Console in your browser.
|
||||
|
||||
On the Hasura Console, navigate to the `Data` tab and choose `Connect Existing Database`. Choose the MySQL driver and
|
||||
then click `Connect Existing Database`:
|
||||
|
||||
<Thumbnail src="/img/databases/mysql/aiven/aiven-mysql-choose-driver.png" alt="Choose MySQL driver" width="1000px" />
|
||||
|
||||
We'll provision the database on Aiven in the next step and then return to this page to complete the connection.
|
||||
|
||||
## Step 3: Create a MySQL DB on Aiven {#create-mysql-db-aiven}
|
||||
|
||||
:::info Note
|
||||
|
||||
If you have an existing Aiven MySQL database, you can skip this step and move on to [step 4](#connect-hasura-aiven).
|
||||
|
||||
:::
|
||||
|
||||
Log into the [Aiven console](https://console.aiven.io/signup?utm_source=website&utm_medium=referral&utm_campaign=hasura).
|
||||
|
||||
On the Aiven console, click `+ Create a new service` and choose `MySQL`:
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/mysql/aiven/aiven-create-mysql.png"
|
||||
alt="Create MySQL instance on Aiven"
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
Scroll down and select the `Cloud Provider`, `Region` and `Service Plan` based on your requirements. Then click
|
||||
`Create free service`:
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/mysql/aiven/aiven-mysql-configuration.png"
|
||||
alt="Configure MySQL service"
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
## Step 4: Allow connections to your DB from Hasura {#connect-hasura-aiven}
|
||||
|
||||
On the `Services` dashboard, click on your DB and scroll down to `Allowed IP Addresses` and click on `Change`:
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/mysql/aiven/aiven-mysql-change-ip.png"
|
||||
alt="Change allowed IP addresses on Aiven"
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
If you're using Hasura Cloud, you can quickly find your IP address from the `Hasura Cloud IP` field on the project's
|
||||
details view:
|
||||
|
||||
<Thumbnail src="/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip.png" alt="Hasura Cloud IP" width="1000px" />
|
||||
|
||||
:::info Note
|
||||
|
||||
If you're using a self-hosted solution, you'll need to determine the IP address manually depending on your hosting
|
||||
service.
|
||||
|
||||
:::
|
||||
|
||||
Add the Hasura IP address that you copied, click on the `+`:
|
||||
|
||||
<Thumbnail src="/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip-set.png" alt="Hasura Cloud IP" width="1000px" />
|
||||
|
||||
Then click on `Close`.
|
||||
|
||||
## Step 5: Get the database connection URL {#get-db-url-aiven}
|
||||
|
||||
The MySQL connector utilizes JDBC connection strings to connect to the database. The format of the connection string is
|
||||
as follows:
|
||||
|
||||
```bash
|
||||
jdbc:mysql://<hostname>:<port>/<database name>?user=<username>&password=<password>
|
||||
```
|
||||
|
||||
You'll have to transform the connection string provided by Aiven into the format above. Navigate to the `Overview` tab
|
||||
of your database dashboard and use the `Service URI` to construct the connection string:
|
||||
|
||||
<Thumbnail src="/img/databases/mysql/aiven/aiven-mysql-connection-uri.png" alt="Connection URI" width="1000px" />
|
||||
|
||||
## Step 6: Finish connecting the database
|
||||
|
||||
Back on the Hasura Console, enter the database URL that we retrieved in [step 5](#get-db-url-aiven):
|
||||
|
||||
<Thumbnail src="/img/databases/mysql/aiven/aiven-mysql-finish-connecting.png" alt="Finish connecting" width="1000px" />
|
||||
|
||||
Then click `Connect Database`.
|
||||
|
||||
:::info Note
|
||||
|
||||
For security reasons, it is recommended to set database URLs as [env vars](/hasura-cloud/projects/env-vars.mdx) and
|
||||
using the env vars to connect to the databases in place of the raw database URLs.
|
||||
|
||||
:::
|
||||
|
||||
Voilà. You are ready to start developing.
|
||||
|
||||
<Thumbnail src="/img/cloud-dbs/hasura-console.png" alt="Hasura Console" width="1100px" />
|
||||
|
||||
## Next steps
|
||||
|
||||
- You can check out our [30-Minute Hasura Basics Course](https://hasura.io/learn/graphql/hasura/introduction/) and other
|
||||
[GraphQL & Hasura Courses](https://hasura.io/learn/) for a more detailed introduction to Hasura.
|
||||
|
||||
- If using Hasura Cloud, you can also click the gear icon to manage your Hasura Cloud project. (e.g. add
|
||||
[collaborators](/hasura-cloud/projects/collaborators.mdx), [env vars](/hasura-cloud/projects/env-vars.mdx) or
|
||||
[custom domains](/hasura-cloud/domains.mdx)).
|
||||
|
||||
<Thumbnail src="/img/getting-started/project-manage.png" alt="Project actions" width="860px" />
|
||||
|
||||
:::info Note
|
||||
|
||||
For more information on which MySQL features we support, check out [this page](/databases/feature-support.mdx).
|
||||
|
||||
:::
|
@ -208,6 +208,22 @@ schema.
|
||||
- [Subscriptions](/subscriptions/overview.mdx)
|
||||
- [Event triggers](/event-triggers/overview.mdx)
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred MySQL client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Resources
|
||||
|
||||
- Check out the [Getting Started with Docker](/databases/mysql/docker.mdx) guide.
|
||||
|
@ -204,3 +204,19 @@ schema.
|
||||
|
||||
- [Subscriptions](/subscriptions/overview.mdx)
|
||||
- [Event triggers](/event-triggers/overview.mdx)
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred Oracle client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
@ -127,7 +127,7 @@ import Clickhouse from '@site/static/img/databases/logos/clickhouse.png';
|
||||
<div className="card">
|
||||
<img src={MongoDB} style={{ width: '172px' }} title="MongoDB" alt="Connect MongoDB to Hasura" />
|
||||
</div>
|
||||
<h5>MongoDB (Beta)</h5>
|
||||
<h5>MongoDB</h5>
|
||||
</div>
|
||||
</VersionedLink>
|
||||
<VersionedLink to="/databases/clickhouse/index/">
|
||||
|
@ -204,18 +204,36 @@ Curious about any other Postgres flavors? Any other questions? Ask us on
|
||||
|
||||
:::
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
You can use these tools to manage your PostgreSQL database, but we recommend using your preferred
|
||||
[PostgreSQL client instead](https://wiki.postgresql.org/wiki/PostgreSQL_Clients). The Hasura Console is designed to be a
|
||||
tool for managing your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Required user role permissions
|
||||
|
||||
Below are the role permissions required for Hasura to perform all its functionality. Note that, with the exception of `CONNECT` and `GRANT USAGE`, the other features are opt-in, and not enabling them will simply mean that only the corresponding Hasura features will not work.
|
||||
|
||||
* `CONNECT` is required in order for Hasura to connect to your Postgres data source.
|
||||
* You must `GRANT USAGE` to the Hasura user role for any schema you want to access via Hasura.
|
||||
* To allow queries and subscriptions via the GraphQL API, `SELECT` permissions are required.
|
||||
* Similarly, `INSERT`, `UPDATE`, and `DELETE` permissions are required for mutations.
|
||||
* The Hasura Console requires permissions such as `REFERENCES` and `CREATE` to make changes to your schema.
|
||||
* `TRIGGER` is required to use [Event Triggers](/event-triggers/overview.mdx)
|
||||
* If you want to use computed fields or user-defined Postgres functions, the `EXECUTE` permission is required.
|
||||
Below are the role permissions required for Hasura to perform all its functionality. Note that, with the exception of
|
||||
`CONNECT` and `GRANT USAGE`, the other features are opt-in, and not enabling them will simply mean that only the
|
||||
corresponding Hasura features will not work.
|
||||
|
||||
- `CONNECT` is required in order for Hasura to connect to your Postgres data source.
|
||||
- You must `GRANT USAGE` to the Hasura user role for any schema you want to access via Hasura.
|
||||
- To allow queries and subscriptions via the GraphQL API, `SELECT` permissions are required.
|
||||
- Similarly, `INSERT`, `UPDATE`, and `DELETE` permissions are required for mutations.
|
||||
- The Hasura Console requires permissions such as `REFERENCES` and `CREATE` to make changes to your schema.
|
||||
- `TRIGGER` is required to use [Event Triggers](/event-triggers/overview.mdx)
|
||||
- If you want to use computed fields or user-defined Postgres functions, the `EXECUTE` permission is required.
|
||||
|
||||
## Know more
|
||||
|
||||
|
5
docs/docs/databases/redshift/_category_.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"label": "Amazon Redshift",
|
||||
"position": 10,
|
||||
"className": "beta-cat"
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
{
|
||||
"label": "Getting Started",
|
||||
"position": 1
|
||||
}
|
106
docs/docs/databases/redshift/getting-started/cloud.mdx
Normal file
@ -0,0 +1,106 @@
|
||||
---
|
||||
sidebar_label: Hasura Cloud
|
||||
sidebar_position: 1
|
||||
description: Hasura Cloud for Amazon Redshift
|
||||
keywords:
|
||||
- hasura
|
||||
- docs
|
||||
- databases
|
||||
- redshift
|
||||
- amazon redshift
|
||||
- hasura cloud
|
||||
---
|
||||
|
||||
import Thumbnail from '@site/src/components/Thumbnail';
|
||||
|
||||
# Get Started with Hasura Cloud and Amazon Redshift
|
||||
|
||||
## Introduction
|
||||
|
||||
### Step 1: Create an account on Hasura Cloud and create a new Hasura Project
|
||||
|
||||
Navigate to
|
||||
[cloud.hasura.io](https://cloud.hasura.io/signup/?pg=docs&plcmt=body&cta=navigate-to-cloud-hasura-io&tech=default&skip_onboarding=true),
|
||||
and create a new Hasura Cloud account.
|
||||
|
||||
Once you create a project on Hasura Cloud, hit the "Launch Console" button to open the Hasura Console for your project.
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/data-connector/create-project.png"
|
||||
alt="Connect new or existing database"
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
### Step 2: Add your Amazon Redshift database as a source to Hasura
|
||||
|
||||
Head to the `Data > Manage databases` section on the Console to add your Amazon Redshift database as a source to Hasura.
|
||||
|
||||
:::info Make sure your Amazon Redshift service is reachable by Hasura Cloud:
|
||||
|
||||
1. **Allow public connections or
|
||||
[whitelist the Hasura Cloud IP](/hasura-cloud/projects/create.mdx#cloud-projects-create-allow-nat-ip) on your Amazon
|
||||
Redshift firewall:** This is good for testing and will allow you to quickly try Hasura out with your database!
|
||||
2. **VPC peering:** VPC peering and private network access is available on Hasura Cloud paid tiers: Recommended for
|
||||
production. Get in touch with us if you'd like to try this out against your existing databases!
|
||||
|
||||
:::
|
||||
|
||||
First, we need to add the redshift agent:
|
||||
|
||||
<Thumbnail src="/img/databases/redshift/redshift-add-agent.png" alt="Adding the Amazon Redshift agent" width="1000px" />
|
||||
|
||||
Now we need to connect to Redshift by clicking `Connect Database`:
|
||||
|
||||
<Thumbnail src="/img/databases/data-connector/manage-databases.png" alt="Manage databases" width="1000px" />
|
||||
|
||||
Next, choose the `redshift (Beta)` driver:
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/redshift/redshift-add-service1.png"
|
||||
alt="Adding the Amazon Redshift service"
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
Finally, enter your Amazon Redshift database URL and
|
||||
[database schema](https://docs.aws.amazon.com/athena/latest/ug/creating-tables.html) and click `Connect Database`:
|
||||
|
||||
<Thumbnail
|
||||
src="/img/databases/redshift/redshift-add-service2.png"
|
||||
alt="Setting the Amazon Redshift connection details."
|
||||
width="1000px"
|
||||
/>
|
||||
|
||||
Once you add the Amazon Redshift service, you'll see it listed as an available database on the sidebar.
|
||||
|
||||
### Step 3: Track existing tables
|
||||
|
||||
To query against your Amazon Redshift service using Hasura, you'll need to have existing tables to select. Those tables
|
||||
will appear under the database as shown.
|
||||
|
||||
<Thumbnail src="/img/databases/redshift/redshift-tracking-tables1.png" alt="Tracking tables." width="1000px" />
|
||||
|
||||
Track tables selectively or all so that Hasura can introspect the tables and create the corresponding GraphQL schema.
|
||||
Once you've selected the tables you'd like to track, click `Track Selected` to finish setup:
|
||||
|
||||
<Thumbnail src="/img/databases/redshift/redshift-tracking-tables2.png" alt="Tracking tables selected." width="1000px" />
|
||||
|
||||
### Step 4: Try out a GraphQL query
|
||||
|
||||
Head to the `API` tab in the Console and try running a GraphQL query! Use the explorer sidebar on GraphQL to get help in
|
||||
creating a GraphQL query.
|
||||
|
||||
<Thumbnail src="/img/databases/redshift/make-graphql-query.png" alt="Try a GraphQL query" width="1000px" />
|
||||
|
||||
## Keep up to date
|
||||
|
||||
:::info Note
|
||||
|
||||
Currently, Hasura supports read-only queries, subscriptions, relationships, and permissions on Amazon Redshift.
|
||||
|
||||
:::
|
||||
|
||||
If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our
|
||||
discord!
|
||||
|
||||
- [https://hasura.io/newsletter/](https://hasura.io/newsletter/)
|
||||
- [https://discord.com/invite/hasura](https://discord.com/invite/hasura)
|
52
docs/docs/databases/redshift/getting-started/docker.mdx
Normal file
@ -0,0 +1,52 @@
|
||||
---
|
||||
sidebar_label: Docker
|
||||
sidebar_position: 2
|
||||
description: Hasura with Docker for Amazon Redshift
|
||||
keywords:
|
||||
- hasura
|
||||
- docs
|
||||
- databases
|
||||
- redshift
|
||||
- amazon redshift
|
||||
- docker
|
||||
---
|
||||
|
||||
# Get Started with Docker and Amazon Redshift
|
||||
|
||||
## Introduction
|
||||
|
||||
Currently, testing continues on the Amazon Redshift connector for use in self-hosted environments. Our suggested
|
||||
installation method is to use Docker Compose to deploy a working deployment of Hasura with the Amazon Redshift connector
|
||||
enabled.
|
||||
|
||||
In order to do this, follow the instructions for
|
||||
[Hasura Enterprise Edition](/enterprise/getting-started/quickstart-docker.mdx), but change out the Docker Compose files
|
||||
listed in that documentation with these values:
|
||||
|
||||
```bash
|
||||
# in a new directory run
|
||||
wget https://raw.githubusercontent.com/hasura/graphql-engine/master/install-manifests/enterprise/redshift/docker-compose.yaml
|
||||
# or run
|
||||
curl https://raw.githubusercontent.com/hasura/graphql-engine/master/install-manifests/enterprise/redshift/docker-compose.yaml -o docker-compose.yml
|
||||
```
|
||||
|
||||
When you use these to launch the services, you'll see three containers running instead of two. The third container is
|
||||
the Amazon Redshift GraphQL Connector agent. By navigating to the Hasura Console after execution, you'll find the Amazon
|
||||
Redshift data source as a type that can now be added to your Hasura GraphQL Service instance.
|
||||
|
||||
## Keep up to date
|
||||
|
||||
:::info Note
|
||||
|
||||
Currently, Hasura supports read-only queries, subscriptions, relationships, and permissions on Amazon Redshift.
|
||||
|
||||
:::
|
||||
|
||||
Please watch this space to get the latest docs on how you can try these features out via the Console or by manipulating
|
||||
Metadata in JSON/YAML directly.
|
||||
|
||||
If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our
|
||||
discord!
|
||||
|
||||
- [https://hasura.io/newsletter/](https://hasura.io/newsletter/)
|
||||
- [https://discord.com/invite/hasura](https://discord.com/invite/hasura)
|
33
docs/docs/databases/redshift/getting-started/index.mdx
Normal file
@ -0,0 +1,33 @@
|
||||
---
|
||||
slug: index
|
||||
- hasura
|
||||
- docs
|
||||
- databases
|
||||
- redshift
|
||||
- amazon redshift
|
||||
---
|
||||
|
||||
# Get Started with Amazon Redshift
|
||||
|
||||
:::caution Redshift Beta Availability
|
||||
|
||||
The Hasura Amazon Redshift connector is currently available in beta for all Hasura Cloud offerings and for Hasura
|
||||
Enterprise Edition customers. Once the Redshift connector is generally available (GA), it will be available only for
|
||||
Hasura Cloud Enterprise and Enterprise Edition customers.
|
||||
|
||||
:::
|
||||
|
||||
:::caution Redshift Beta Regions
|
||||
|
||||
At this time, Redshift access is only available on AWS Regions.
|
||||
|
||||
:::
|
||||
|
||||
To try Hasura with Amazon Redshift, you'll need your own new or existing Amazon Redshift database.
|
||||
|
||||
Here are 2 ways you can get started with Hasura:
|
||||
|
||||
1. [Hasura Cloud](/databases/redshift/getting-started/cloud.mdx) : You'll need to be able to access your Amazon Redshift
|
||||
service from Hasura Cloud.
|
||||
2. [Docker](/databases/redshift/getting-started/docker.mdx): Run Hasura with Docker and then connect your Amazon Redshift
|
||||
service to Hasura.
|
70
docs/docs/databases/redshift/index.mdx
Normal file
@ -0,0 +1,70 @@
|
||||
---
|
||||
slug: index
|
||||
description: Hasura Amazon Redshift database support
|
||||
keywords:
|
||||
- hasura
|
||||
- docs
|
||||
- databases
|
||||
- redshift
|
||||
- amazon redshift
|
||||
---
|
||||
|
||||
# Amazon Redshift
|
||||
|
||||
## Introduction
|
||||
|
||||
Hasura allows connecting to an Amazon Redshift service to build a GraphQL API based on the schema of the service.
|
||||
|
||||
:::tip Supported versions:
|
||||
|
||||
1. Hasura GraphQL Engine `v2.35.0` onwards
|
||||
2. [Amazon Redshift](https://aws.amazon.com/redshift/)
|
||||
|
||||
:::
|
||||
|
||||
## Get Started
|
||||
|
||||
To try Hasura with Amazon Redshift, you'll need your own new or existing Amazon Redshift instance.
|
||||
|
||||
Here are 2 ways you can get started with Hasura and Amazon Redshift:
|
||||
|
||||
1. [Hasura Cloud](/databases/redshift/getting-started/cloud.mdx): You'll need to be able to access your Amazon Redshift
|
||||
instance service from Hasura Cloud.
|
||||
2. [Docker](/databases/redshift/getting-started/docker.mdx): Run Hasura with Docker and then connect your Amazon Redshift
|
||||
instance to Hasura.
|
||||
|
||||
## Supported features
|
||||
|
||||
:::info Note
|
||||
|
||||
Currently, Hasura supports read-only queries, relationships, and permissions on Amazon Redshift.
|
||||
|
||||
:::
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred Amazon Redshift client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our
|
||||
discord!
|
||||
|
||||
- [https://hasura.io/newsletter/](https://hasura.io/newsletter/)
|
||||
- [https://discord.com/invite/hasura](https://discord.com/invite/hasura)
|
||||
|
||||
## Know more
|
||||
|
||||
- [Get started](/databases/redshift/getting-started/index.mdx)
|
@ -16,9 +16,8 @@ import Thumbnail from '@site/src/components/Thumbnail';
|
||||
|
||||
## Introduction
|
||||
|
||||
Testing is currently underway on the Snowflake connector for use in self-hosted environments. Our suggested
|
||||
installation method is to use Docker Compose to deploy a working deployment of Hasura with the Snowflake Connector
|
||||
enabled.
|
||||
Testing is currently underway on the Snowflake connector for use in self-hosted environments. Our suggested installation
|
||||
method is to use Docker Compose to deploy a working deployment of Hasura with the Snowflake Connector enabled.
|
||||
|
||||
In order to do this, follow the instructions for
|
||||
[Hasura Enterprise Edition](/enterprise/getting-started/quickstart-docker.mdx), but change out the Docker Compose files
|
||||
@ -35,6 +34,41 @@ When you use these to launch the services, you'll see three containers running i
|
||||
the Snowflake GraphQL Connector agent. By navigating to the Hasura Console after execution, you'll find the Snowflake
|
||||
data source as a type that can now be added to your Hasura GraphQL Service instance.
|
||||
|
||||
### Snowflake Connector Configuration
|
||||
|
||||
You can directly add your JDBC connection string to the Snowflake Connector agent in the Hasura Console, or you can add
|
||||
it as an environment variable to your project.
|
||||
|
||||
:::info Setting the connection string as an environment variable
|
||||
|
||||
It's generally accepted that setting the connection string as an environment variable is a better practice as it's more
|
||||
secure and prevents any secrets from being exposed in your instance's metadata.
|
||||
|
||||
An example would be to create a new
|
||||
[environment variable](/deployment/graphql-engine-flags/index.mdx#using-environment-variables) called
|
||||
`SNOWFLAKE_JDBC_URL` and set it equal to your JDBC connection string.
|
||||
|
||||
Then, export the metadata - in JSON form - using the Console's `Settings` page or by making a call using the
|
||||
[metadata API](api-reference/metadata-api/manage-metadata.mdx#metadata-export-metadata) and add the following key-value
|
||||
pair to the `metadata.json`'s `configuration` object:
|
||||
|
||||
```json
|
||||
"template": "{\"fully_qualify_all_names\": false, \"jdbc_url\": \"{{getEnvironmentVariable(\"SNOWFLAKE_JDBC_URL\")}}\"}"
|
||||
```
|
||||
|
||||
You can then apply the metadata to your instance by either using the Console's `Settings` page or by making a call using
|
||||
the [metadata API](api-reference/metadata-api/manage-metadata.mdx#metadata-apply-metadata).
|
||||
|
||||
:::
|
||||
|
||||
:::info Ensure your password escapes special characters
|
||||
|
||||
Due to the potential variations in drivers, it's crucial to escape special characters used in the password of the
|
||||
connection string. These include `{ } % & #`. To escape a character, use the appropriate escape sequence based on your
|
||||
database's driver's documentation.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
:::info Note
|
||||
@ -46,8 +80,7 @@ Currently, Hasura supports read-only queries, relationships, and permissions on
|
||||
Please watch this space to get the latest docs on how you can try these features out via the Console or by manipulating
|
||||
Metadata in JSON/YAML directly.
|
||||
|
||||
If you'd like to stay informed about the status of Snowflake support, subscribe to our newsletter and join our
|
||||
discord!
|
||||
If you'd like to stay informed about the status of Snowflake support, subscribe to our newsletter and join our discord!
|
||||
|
||||
- [https://hasura.io/newsletter/](https://hasura.io/newsletter/)
|
||||
- [https://discord.com/invite/hasura](https://discord.com/invite/hasura)
|
||||
|
@ -198,6 +198,22 @@ Here are 2 ways you can get started with Hasura and Snowflake:
|
||||
|
||||
</div>
|
||||
|
||||
## Managing data with the Hasura Console
|
||||
|
||||
The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at
|
||||
`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use
|
||||
[Hasura Cloud](https://cloud.hasura.io).
|
||||
|
||||
The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API
|
||||
in the `API` tab and interact with it using the GraphiQL interface.
|
||||
|
||||
:::info Console support
|
||||
|
||||
We recommend using your preferred Snowflake client instead. The Hasura Console is designed to be a tool for managing
|
||||
your GraphQL API, and not a full-fledged database management tool.
|
||||
|
||||
:::
|
||||
|
||||
## Keep up to date
|
||||
|
||||
:::info Note
|
||||
|
@ -52,11 +52,10 @@ above, databases can be connected using any custom environment variables of your
|
||||
### Metadata Database URL
|
||||
|
||||
This Postgres database URL is used to store Hasura's Metadata. By default, the database configured using
|
||||
`HASURA_GRAPHQL_DATABASE_URL` / `--database_url` will be used to store the
|
||||
Metadata. This can also be a URI of the form `dynamic-from-file:///path/to/file`, where the referenced
|
||||
file contains a postgres connection string, which will be read dynamically every time a new
|
||||
connection is established. This allows the server to be used in an environment where secrets are
|
||||
rotated frequently.
|
||||
`HASURA_GRAPHQL_DATABASE_URL` / `--database_url` will be used to store the Metadata. This can also be a URI of the form
|
||||
`dynamic-from-file:///path/to/file`, where the referenced file contains a postgres connection string, which will be read
|
||||
dynamically every time a new connection is established. This allows the server to be used in an environment where
|
||||
secrets are rotated frequently.
|
||||
|
||||
| | |
|
||||
| ------------------- | --------------------------------------------------------------------------- |
|
||||
@ -210,8 +209,8 @@ Whether or not to send the request body (graphql request/variables) to the auth
|
||||
|
||||
Stringify certain
|
||||
[BigQuery numeric types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#numeric_types),
|
||||
specifically `bignumeric`, `float64`, `int64`, `numeric` and aliases thereof, as they don't fit into the
|
||||
`IEnterprise EditionE 754` spec for JSON encoding-decoding.
|
||||
specifically `bignumeric`, `float64`, `int64`, `numeric` and aliases thereof, as they don't fit into the `IEEE 754` spec
|
||||
for JSON encoding-decoding.
|
||||
|
||||
| | |
|
||||
| ------------------- | ---------------------------------------------- |
|
||||
@ -332,17 +331,17 @@ headers on any request.
|
||||
|
||||
Enables the ability to source Postgres connection strings from files on disk using
|
||||
[DynamicFromFile](/api-reference/syntax-defs.mdx#pgsourceconnectioninfo), and the ability for
|
||||
[Template Variables](/databases/database-config/data-connector-config.mdx#template-variables) to
|
||||
use the contents of files in Data Connector configuration transforms. File paths used with these
|
||||
features must start with the prefix set in this environment variable.
|
||||
[Template Variables](/databases/database-config/data-connector-config.mdx#template-variables) to use the contents of
|
||||
files in Data Connector configuration transforms. File paths used with these features must start with the prefix set in
|
||||
this environment variable.
|
||||
|
||||
| | |
|
||||
| ------------------- | ------------------------------------------------------------------------ |
|
||||
| **Flag** | N/A |
|
||||
| **Env var** | `HASURA_GRAPHQL_DYNAMIC_SECRETS_ALLOWED_PATH_PREFIX` |
|
||||
| **Accepted values** | String (representing a filesystem path prefix, such as `/var/secrets/`) |
|
||||
| **Default** | `null` |
|
||||
| **Supported in** | CE, Enterprise Edition |
|
||||
| | |
|
||||
| ------------------- | ----------------------------------------------------------------------- |
|
||||
| **Flag** | N/A |
|
||||
| **Env var** | `HASURA_GRAPHQL_DYNAMIC_SECRETS_ALLOWED_PATH_PREFIX` |
|
||||
| **Accepted values** | String (representing a filesystem path prefix, such as `/var/secrets/`) |
|
||||
| **Default** | `null` |
|
||||
| **Supported in** | CE, Enterprise Edition |
|
||||
|
||||
### Enable Allow List
|
||||
|
||||
@ -705,6 +704,41 @@ Multiplexed live queries are split into
|
||||
| **Default** | `100` |
|
||||
| **Supported in** | CE, Enterprise Edition, Cloud |
|
||||
|
||||
### Null in Non-nullable Variables
|
||||
|
||||
GraphQL query validation is fixed in [`v2.36.0-beta.1`](https://hasura.io/changelog/community-edition/v2.36.0-beta.1),
|
||||
where queries that assigned a `null` value to variables with non-nullable type were allowed.
|
||||
|
||||
Example:
|
||||
|
||||
```graphql
|
||||
query ($user_id: Int!) {
|
||||
users(where: { id: { _eq: $user_id } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
variables
|
||||
|
||||
```json
|
||||
{
|
||||
"user_id": null
|
||||
}
|
||||
```
|
||||
|
||||
To rollback to the old behavior, i.e., allow `null` value for non-nullable variables, use this option.
|
||||
|
||||
| | |
|
||||
| ------------------- | --------------------------------------------------------------- |
|
||||
| **Flag** | `--null-in-nonnullable-variables <true-or-false>` |
|
||||
| **Env var** | `HASURA_GRAPHQL_BACKWARDS_COMPAT_NULL_IN_NONNULLABLE_VARIABLES` |
|
||||
| **Accepted values** | Boolean |
|
||||
| **Options** | `true` or `false` |
|
||||
| **Default** | `false` |
|
||||
| **Supported in** | CE, Enterprise Edition, Cloud - from `v2.36.0-beta.1` |
|
||||
|
||||
### Number of Retries
|
||||
|
||||
:::warning Notice
|
||||
@ -1055,7 +1089,7 @@ any - will be sent, at most, once during this interval.
|
||||
### Stringify Numeric Types
|
||||
|
||||
Stringify certain [Postgres numeric types](/schema/postgres/postgresql-types.mdx), specifically `bigint` ,`numeric`
|
||||
,`decimal` and `double precision` as they don't fit into the `IEnterprise EditionE-754` spec for JSON encoding-decoding.
|
||||
,`decimal` and `double precision` as they don't fit into the `IEEE-754` spec for JSON encoding-decoding.
|
||||
|
||||
| | |
|
||||
| ------------------- | ---------------------------------------- |
|
||||
@ -1181,13 +1215,13 @@ Used to set the connection initialization timeout for `graphql-ws` clients. This
|
||||
Used to set the `Keep Alive` delay for clients that use the `subscription-transport-ws` (Apollo) protocol. For
|
||||
`graphql-ws` clients, the `graphql-engine` sends `PING` messages instead.
|
||||
|
||||
| | |
|
||||
| ------------------- | ---------------------------------------------------- |
|
||||
| **Flag** | `--websocket-keepalive <TIME_IN_SECONDS>` |
|
||||
| **Env var** | `HASURA_GRAPHQL_WEBSOCKET_KEnterprise EditionPALIVE` |
|
||||
| **Accepted values** | Integer (Representing a delay in seconds) |
|
||||
| **Default** | `5` |
|
||||
| **Supported in** | CE, Enterprise Edition |
|
||||
| | |
|
||||
| ------------------- | ----------------------------------------- |
|
||||
| **Flag** | `--websocket-keepalive <TIME_IN_SECONDS>` |
|
||||
| **Env var** | `HASURA_GRAPHQL_WEBSOCKET_KEEPALIVE` |
|
||||
| **Accepted values** | Integer (Representing a delay in seconds) |
|
||||
| **Default** | `5` |
|
||||
| **Supported in** | CE, Enterprise Edition |
|
||||
|
||||
### WS Read Cookie
|
||||
|
||||
|
@ -75,34 +75,28 @@ hasura metadata apply
|
||||
</TabItem>
|
||||
<TabItem value="api" label="API">
|
||||
|
||||
You can add _health check_ for a database using the
|
||||
[pg_add_source](/api-reference/metadata-api/source.mdx#metadata-pg-add-source) Metadata API.
|
||||
You can add/update _health check_ for a database using the
|
||||
[pg_update_source](/api-reference/metadata-api/source.mdx#metadata-pg-update-source) Metadata API.
|
||||
|
||||
```http {17-24}
|
||||
```http {7-18}
|
||||
POST /v1/metadata HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type":"pg_add_source",
|
||||
"args":{
|
||||
"name":"<db_name>",
|
||||
"replace_configuration":true,
|
||||
"configuration":{
|
||||
"connection_info":{
|
||||
"database_url":{
|
||||
"from_env":"<DATABASE_URL_ENV>"
|
||||
}
|
||||
{
|
||||
"type": "pg_update_source",
|
||||
"args": {
|
||||
"name": "default",
|
||||
"health_check": {
|
||||
"test": {
|
||||
"sql": "SELECT 1"
|
||||
},
|
||||
"interval": 100,
|
||||
"timeout": 2,
|
||||
"retries": 3,
|
||||
"retry_interval": 2
|
||||
}
|
||||
},
|
||||
"health_check": {
|
||||
"test": {
|
||||
"sql": "SELECT 1"
|
||||
},
|
||||
"interval": 300,
|
||||
"timeout": 5,
|
||||
"retries": 3,
|
||||
"retry_interval": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ the websocket layer are called `websocket-log`, logs from the Event Trigger syst
|
||||
|
||||
You can configure the GraphQL Engine to enable/disable certain log-types using the `--enabled-log-types` flag or the
|
||||
`HASURA_GRAPHQL_ENABLED_LOG_TYPES` env var. See
|
||||
[GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx)
|
||||
[GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx#log-level)
|
||||
|
||||
The default enabled **Community Edition** log-types are:
|
||||
`startup, http-log, webhook-log, websocket-log, jwk-refresh-log`
|
||||
@ -108,7 +108,7 @@ Apart from the above, there are other internal log-types which cannot be configu
|
||||
## Logging levels
|
||||
|
||||
You can set the desired logging level on the server using the `log-level` flag or the `HASURA_GRAPHQL_LOG_LEVEL` env
|
||||
var. See [GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx).
|
||||
var. See [GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx#log-level).
|
||||
|
||||
The default log-level is `info`.
|
||||
|
||||
|
@ -58,11 +58,11 @@ different data sources that need to be accessible to different teams.
|
||||
|
||||
To save you time, we've generated three **read-only** databases for you to use in this guide:
|
||||
|
||||
| Database | Description | Connection String |
|
||||
| ----------- | ---------------------------------------------------------------- | -------------------------------------------------------------------------------------------- |
|
||||
| Banking | A database containing banking information of the account holders | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-1` |
|
||||
| Stocks | A database containing historic, proprietary market information | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-2` |
|
||||
| Real Estate | A database containing real estate information | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-3` |
|
||||
| Database | Description | Connection String |
|
||||
| ----------- | ---------------------------------------------------------------- | -------------------------------------------------------------------------------------- |
|
||||
| Banking | A database containing banking information of the account holders | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-1` |
|
||||
| Stocks | A database containing historic, proprietary market information | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-2` |
|
||||
| Real Estate | A database containing real estate information | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-3` |
|
||||
|
||||
:::info Only available from Hasura Cloud
|
||||
|
||||
|
@ -46,7 +46,7 @@ curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | INSTALL
|
||||
You can also install a specific version of the CLI by providing the `VERSION` variable:
|
||||
|
||||
```bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
@ -71,7 +71,7 @@ curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | INSTALL
|
||||
You can also install a specific version of the CLI by providing the `VERSION` variable:
|
||||
|
||||
```bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash
|
||||
curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
|
@ -66,6 +66,15 @@ hasura migrate create init --from-server --endpoint <hasura-cloud-project-url> -
|
||||
hasura migrate apply --endpoint <hasura-cloud-project-url> --admin-secret <admin-secret> --version <version-number> --skip-execution
|
||||
```
|
||||
|
||||
:::info When using a non-Postgres database
|
||||
|
||||
Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases.
|
||||
Should you wish to use a different database, you will need to manually create the migration files. For more information,
|
||||
please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations)
|
||||
documentation.
|
||||
|
||||
:::
|
||||
|
||||
It is important to mark the migration as applied on the Cloud project to ensure that the schema that is already created
|
||||
on Hasura Cloud project is not attempted to be recreated again, which would end in an error state.
|
||||
|
||||
@ -142,6 +151,15 @@ hasura migrate create init --from-server --admin-secret <admin-secret> --databas
|
||||
hasura migrate apply --admin-secret <admin-secret> --version <version-number> --skip-execution
|
||||
```
|
||||
|
||||
:::info When using a non-Postgres database
|
||||
|
||||
Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases.
|
||||
Should you wish to use a different database, you will need to manually create the migration files. For more information,
|
||||
please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations)
|
||||
documentation.
|
||||
|
||||
:::
|
||||
|
||||
### Step 4: Initialize Hasura Metadata
|
||||
|
||||
```bash
|
||||
|
@ -17,6 +17,14 @@ keywords:
|
||||
Hasura ships a special `cli-migrations` Docker image which can be used to automatically apply Migrations and Metadata
|
||||
when the server starts.
|
||||
|
||||
### What does it mean to "auto-apply" Migrations and Metadata?
|
||||
|
||||
Auto-applying migrations means that Hasura can automatically apply database schema changes or migrations to your
|
||||
underlying database without requiring manual intervention. This feature simplifies the process of keeping your database
|
||||
schema in sync with your GraphQL schema and makes it easier to evolve your application over time.
|
||||
|
||||
### How does it work?
|
||||
|
||||
This image is a drop-in place replacement for the standard Hasura GraphQL Engine
|
||||
[images](https://hub.docker.com/r/hasura/graphql-engine). This container provides a method to apply
|
||||
[Migrations and Metadata](/migrations-metadata-seeds/overview.mdx) automatically when the container starts up. It works
|
||||
@ -65,9 +73,10 @@ For `config v2`, see
|
||||
## Applying Migrations
|
||||
|
||||
The `migrations` and `metadata` directories created by the Hasura CLI in a Hasura Project can be mounted at the
|
||||
`/hasura-migrations` and `/hasura-metadata` paths of this Docker container and the container's entrypoint script will
|
||||
automatically apply the Migrations and Metadata before starting the server. If no directory is mounted at the designated
|
||||
paths, the server will start and ignore the Migrations and/or Metadata.
|
||||
`/hasura-migrations` and `/hasura-metadata`
|
||||
[paths of this Docker container and the container's entrypoint script](https://github.com/hasura/graphql-engine/blob/master/packaging/cli-migrations/v3/docker-entrypoint.sh#L12C1-L13)
|
||||
will automatically apply the Migrations and Metadata before starting the server. If no directory is mounted at the
|
||||
designated paths, the server will start and ignore the Migrations and/or Metadata.
|
||||
|
||||
You can also mount the Migrations/Metadata directories at some location other than the above by setting the following
|
||||
environment variables:
|
||||
@ -93,5 +102,6 @@ docker run -p 8080:8080 \
|
||||
## Applying only Metadata {#auto-apply-metadata}
|
||||
|
||||
If you're managing Migrations with a different tool and want to use this image to apply only the metadata, mount the
|
||||
`metadata` directory of your Hasura Project at the `/hasura-metadata` path of this Docker container the container’s
|
||||
entry point script will apply the Metadata before starting the server.
|
||||
`metadata` directory of your Hasura Project at the `/hasura-metadata`
|
||||
[path of this Docker container the container's entry point script](https://github.com/hasura/graphql-engine/blob/master/packaging/cli-migrations/v3/docker-entrypoint.sh#L13)
|
||||
will apply the Metadata before starting the server.
|
||||
|
@ -69,6 +69,14 @@ This will create a new folder named in the format `<timestamp>_init` within anot
|
||||
in Hasura which is being referenced. It will contain only an `up.sql` file which describes in SQL how to create the
|
||||
schema in full for that database.
|
||||
|
||||
:::info When using a non-Postgres database
|
||||
|
||||
Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases.
|
||||
Should you wish to use a different database, you will need to manually create the migration files. For more information,
|
||||
please see the section below.
|
||||
|
||||
:::
|
||||
|
||||
### Create a Migration manually {#create-manual-migrations}
|
||||
|
||||
While the Hasura Console can auto generate Migrations for every action, sometimes you might want to write the migrations
|
||||
|
@ -67,6 +67,15 @@ server using the following commands:
|
||||
hasura migrate create "init" --from-server --database-name <database-name>
|
||||
```
|
||||
|
||||
:::info When using a non-Postgres database
|
||||
|
||||
Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases.
|
||||
Should you wish to use a different database, you will need to manually create the migration files. For more information,
|
||||
please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations)
|
||||
documentation.
|
||||
|
||||
:::
|
||||
|
||||
```bash
|
||||
## note down the version
|
||||
## mark the migration as applied on this server
|
||||
|
@ -33,6 +33,7 @@ To avoid this, you can disable GraphQL introspection on a per-role basis.
|
||||
|
||||
## Disabling GraphQL introspection for a role
|
||||
|
||||
Introspection can be disabled for a role as shown below:
|
||||
Introspection can be disabled by navigating to the `Security` tab, selecting `Schema Introspection` and modifying or
|
||||
adding the role you wish to disable introspection for.
|
||||
|
||||
<Thumbnail src="/img/security/disable-introspection.png" alt="Hasura Cloud Console Schema Introspection tab" />
|
||||
|
@ -36,7 +36,7 @@ have multiple subscriptions running at the same time they must be in separate qu
|
||||
|
||||
### Live queries
|
||||
|
||||
A live query subscription will return the latest result of the query being made and not necessarily all the individual
|
||||
A live query subscription will return the **latest result** of the query being made and not necessarily all the individual
|
||||
events leading up to the result. By default, updates are delivered to clients every **1 sec**.
|
||||
|
||||
See more details [here](/subscriptions/postgres/livequery/index.mdx).
|
||||
|
@ -14,13 +14,15 @@ slug: index
|
||||
|
||||
## Introduction
|
||||
|
||||
A Live query subscription will return the latest result of the query being made and not necessarily all the individual
|
||||
events leading up to the result.
|
||||
A Live query subscription will return the **latest result** of the query being made and not necessarily all the
|
||||
individual events leading up to the result, such as with a
|
||||
[streaming subscription](/subscriptions/postgres/streaming/index.mdx).
|
||||
|
||||
By default, updates are delivered to clients every **1 sec**.
|
||||
|
||||
See more details on
|
||||
[subscriptions execution](/subscriptions/postgres/livequery/execution.mdx).
|
||||
A live query is a query that is continuously monitored for changes in the database and automatically updates the query
|
||||
result whenever the underlying data changes. Live queries are a PostgreSQL-specific implementation that allow real-time
|
||||
updates to clients without the need for manual polling or refreshing.
|
||||
|
||||
## Convert a query to a subscription
|
||||
|
||||
@ -29,8 +31,8 @@ You can turn any query into a subscription by simply replacing `query` with `sub
|
||||
:::info Single subscription in each query caveat
|
||||
|
||||
Hasura follows the [GraphQL spec](https://graphql.github.io/graphql-spec/June2018/#sec-Single-root-field) which allows
|
||||
for only one root field in a subscription. You also cannot execute multiple separate subscriptions in one query. To
|
||||
have multiple subscriptions running at the same time they must be in separate queries.
|
||||
for only one root field in a subscription. You also cannot execute multiple separate subscriptions in one query. To have
|
||||
multiple subscriptions running at the same time they must be in separate queries.
|
||||
|
||||
:::
|
||||
|
||||
|
@ -15,8 +15,11 @@ slug: index
|
||||
## Introduction
|
||||
|
||||
A streaming subscription streams the response according to the cursor provided by the user while making the
|
||||
subscription. Streaming subscriptions can be used to subscribe only to the data which has been newly added to the result
|
||||
set.
|
||||
subscription. Streaming subscriptions can be used to subscribe only to the data which has been **newly added to the
|
||||
result set.**
|
||||
|
||||
This is different from a [live query](/subscriptions/postgres/livequery/index.mdx) subscription where only the latest
|
||||
value is returned to the client.
|
||||
|
||||
:::tip Supported from
|
||||
|
||||
@ -35,7 +38,7 @@ Streaming subscriptions work well with other Hasura features like
|
||||
[relationships](/schema/postgres/table-relationships/index.mdx#table-relationships) and also leverage the power of
|
||||
[subscriptions multiplexing](/subscriptions/postgres/livequery/execution.mdx#subscription-multiplexing).
|
||||
|
||||
:::info Confguration details
|
||||
:::info Configuration details
|
||||
|
||||
In the case of streaming subscriptions, the multiplexed batch size can be configured via
|
||||
`HASURA_GRAPHQL_STREAMING_QUERIES_MULTIPLEXED_BATCH_SIZE` and the refetch interval can be configured via
|
||||
|
@ -18,13 +18,11 @@ const config = {
|
||||
organizationName: 'hasura',
|
||||
projectName: 'graphql-engine',
|
||||
staticDirectories: ['static', 'public'],
|
||||
scripts: [
|
||||
{
|
||||
src: "https://www.chatbase.co/embed.min.js",
|
||||
id: "iiL6XJbYo6tRR_M4rUB9F",
|
||||
defer: true,
|
||||
}
|
||||
],
|
||||
customFields: {
|
||||
docsBotEndpointURL: process.env.NODE_ENV === "development" ? "ws://localhost:8000/hasura-docs-ai" : "wss://hasura-docs-bot.deno.dev/hasura-docs-ai",
|
||||
hasuraVersion: 2,
|
||||
},
|
||||
scripts: [],
|
||||
webpack: {
|
||||
jsLoader: isServer => ({
|
||||
loader: require.resolve('swc-loader'),
|
||||
|
@ -35,6 +35,7 @@
|
||||
"graphiql": "^1.5.1",
|
||||
"graphql": "^15.7.2",
|
||||
"graphql-ws": "^5.11.2",
|
||||
"markdown-to-jsx": "^7.3.2",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
|
271
docs/src/components/AiChatBot/AiChatBot.tsx
Normal file
@ -0,0 +1,271 @@
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import Markdown from 'markdown-to-jsx';
|
||||
import './styles.css';
|
||||
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
|
||||
import { CloseIcon, RespondingIconGray, SparklesIcon } from '@site/src/components/AiChatBot/icons';
|
||||
import useLocalStorage from "@site/src/components/AiChatBot/useLocalStorage";
|
||||
|
||||
interface Message {
|
||||
userMessage: string;
|
||||
botResponse: string;
|
||||
}
|
||||
|
||||
interface Query {
|
||||
previousMessages: Message[];
|
||||
currentUserInput: string;
|
||||
}
|
||||
|
||||
// Websocket Event data types (stringified)
|
||||
// { type: "loading", message: "Processing your request..." }
|
||||
// { type: "responsePart", message: "...part of response..." }
|
||||
// { type: "error", message: "error description" }
|
||||
// { type: "endOfStream", message: "End of stream..." }
|
||||
|
||||
const initialMessages: Message[] = [
|
||||
{
|
||||
userMessage: '',
|
||||
botResponse: "Hi! I'm HasuraAI, the docs chatbot.",
|
||||
},
|
||||
{
|
||||
userMessage: '',
|
||||
botResponse: 'You can ask me anything about Hasura and I will try to answer.',
|
||||
},
|
||||
{
|
||||
userMessage: '',
|
||||
botResponse: 'Always check the docs for official information.',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
function AiChatBot() {
|
||||
// Get the docsBotEndpointURL and hasuraVersion from the siteConfig
|
||||
const {
|
||||
siteConfig: { customFields },
|
||||
} = useDocusaurusContext();
|
||||
// Manage the open state of the popup
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
// Manage the bot responding state
|
||||
const [isResponding, setIsResponding] = useState<boolean>(false)
|
||||
// Manage the text input
|
||||
const [input, setInput] = useState<string>('');
|
||||
// Manage the historical messages
|
||||
const [messages, setMessages] = useLocalStorage<Message[]>(`hasuraV${customFields.hasuraVersion}BotMessages`, initialMessages);
|
||||
// Manage the current message
|
||||
const [currentMessage, setCurrentMessage] = useState<Message>({ userMessage: '', botResponse: '' });
|
||||
// Manage scrolling to the end
|
||||
const [isAutoScroll, setIsAutoScroll] = useState<boolean>(true);
|
||||
// Manage the websocket
|
||||
const [ws, setWs] = useState<WebSocket | null>(null);
|
||||
// Set is Websocket connecting
|
||||
const [isConnecting, setIsConnecting] = useState<boolean>(true);
|
||||
|
||||
// Use a ref because of the useEffect closure issue
|
||||
const currentMessageRef = useRef<Message>({ userMessage: '', botResponse: '' });
|
||||
|
||||
// Enables scrolling to the end
|
||||
const scrollDiv = useRef<HTMLDivElement>(null);
|
||||
|
||||
const { docsBotEndpointURL, hasuraVersion } = customFields as { docsBotEndpointURL: string; hasuraVersion: number };
|
||||
|
||||
const storedUserID = localStorage.getItem('hasuraDocsUserID') as string | "null";
|
||||
|
||||
// Effect to auto-scroll to the bottom if autoScroll is true
|
||||
useEffect(() => {
|
||||
if (isAutoScroll) {
|
||||
scrollDiv.current?.scrollTo({
|
||||
top: scrollDiv.current.scrollHeight,
|
||||
behavior: 'smooth'
|
||||
});
|
||||
}
|
||||
}, [currentMessage.botResponse]);
|
||||
|
||||
// Detect if user scrolls up and disable auto-scrolling
|
||||
const handleScroll = (e) => {
|
||||
const atBottom = Math.abs(scrollDiv.current?.scrollHeight - Math.floor(e.target.scrollTop + e.target.clientHeight)) < 2;
|
||||
setIsAutoScroll(atBottom);
|
||||
};
|
||||
|
||||
|
||||
// Update the ref when the currentMessage changes ie: when the endpoint is responding
|
||||
useEffect(() => {
|
||||
currentMessageRef.current = currentMessage;
|
||||
}, [currentMessage]);
|
||||
|
||||
// Manage the websocket and set event listener for messages
|
||||
useEffect(() => {
|
||||
let websocket;
|
||||
let reconnectInterval;
|
||||
|
||||
const connectWebSocket = () => {
|
||||
websocket = new WebSocket(encodeURI(`${docsBotEndpointURL}?version=${hasuraVersion}&userId=${storedUserID}`));
|
||||
|
||||
websocket.onopen = () => {
|
||||
console.log('Connected to the websocket');
|
||||
setIsConnecting(false);
|
||||
clearTimeout(reconnectInterval);
|
||||
};
|
||||
|
||||
websocket.onmessage = (event) => {
|
||||
|
||||
let response = { type: "", message: "" };
|
||||
|
||||
try {
|
||||
response = JSON.parse(event.data) as {"type": string, "message": string}
|
||||
} catch (e) {
|
||||
console.error("error parsing websocket message", e);
|
||||
}
|
||||
|
||||
switch (response.type) {
|
||||
case "endOfStream": {
|
||||
console.log('end of stream');
|
||||
setMessages((prevMessages: Message[]) => [...prevMessages, currentMessageRef.current]);
|
||||
setCurrentMessage({ userMessage: '', botResponse: '' });
|
||||
setIsResponding(false);
|
||||
break;
|
||||
}
|
||||
case "responsePart": {
|
||||
setIsResponding(true);
|
||||
setCurrentMessage(prevState => {
|
||||
return { ...prevState, botResponse: prevState?.botResponse + response.message };
|
||||
});
|
||||
break;
|
||||
}
|
||||
case "error": {
|
||||
console.error("error", response.message);
|
||||
break;
|
||||
}
|
||||
case "loading": {
|
||||
console.log("loading", response.message);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.error("unknown response type", response.type);
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
websocket.onclose = () => {
|
||||
console.log('WebSocket closed. Attempting to reconnect...');
|
||||
setIsConnecting(true);
|
||||
setIsResponding(false);
|
||||
reconnectInterval = setTimeout(connectWebSocket, 3000); // attempt to reconnect every 3 seconds
|
||||
};
|
||||
|
||||
websocket.onerror = error => {
|
||||
console.error('WebSocket error:', error);
|
||||
setIsConnecting(true);
|
||||
setIsResponding(false);
|
||||
websocket.close();
|
||||
};
|
||||
|
||||
setWs(websocket);
|
||||
};
|
||||
|
||||
connectWebSocket();
|
||||
return () => {
|
||||
clearTimeout(reconnectInterval);
|
||||
if (websocket) {
|
||||
websocket.close();
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Send the query to the websocket when the user submits the form
|
||||
const handleSubmit = async () => {
|
||||
// if the input is empty, do nothing
|
||||
if (!input) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (ws) {
|
||||
const toSend = JSON.stringify({ previousMessages: messages, currentUserInput: input });
|
||||
setCurrentMessage({ userMessage: input, botResponse: '' });
|
||||
setInput('');
|
||||
ws.send(toSend);
|
||||
setIsResponding(true);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="chat-popup">
|
||||
{isOpen ? (
|
||||
<button className="close-chat-button" onClick={() => setIsOpen(!isOpen)}>
|
||||
{CloseIcon} Close Chat
|
||||
</button>
|
||||
) : (
|
||||
<button className="open-chat-button" onClick={() => setIsOpen(!isOpen)}>
|
||||
{SparklesIcon} Hasura Docs AI Chat
|
||||
</button>
|
||||
)}
|
||||
{isOpen && (
|
||||
<div className="chat-window">
|
||||
<div className="info-bar">
|
||||
<div className={"bot-name-pic-container"}>
|
||||
<div className="bot-name">HasuraAI</div>
|
||||
<img src={"/docs/img/hasura-ai-profile-pic.png"} height={30} width={30} className="bot-pic"/>
|
||||
</div>
|
||||
<button className="clear-button" onClick={() => setMessages(initialMessages)}>Clear</button>
|
||||
</div>
|
||||
<div className="messages-container" onScroll={handleScroll} ref={scrollDiv}>
|
||||
{messages.map((msg, index) => (
|
||||
<div key={index}>
|
||||
{msg.userMessage && (
|
||||
<div className="user-message-container">
|
||||
<div className="formatted-text message user-message">
|
||||
<Markdown>{msg.userMessage}</Markdown>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{msg.botResponse && (
|
||||
<div className="bot-message-container">
|
||||
<div className="formatted-text message bot-message">
|
||||
<Markdown>{msg.botResponse}</Markdown>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
<div className="user-message-container">
|
||||
{currentMessage.userMessage && (
|
||||
<div className="formatted-text message user-message">
|
||||
<Markdown>{currentMessage.userMessage}</Markdown>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<div className="bot-message-container">
|
||||
{currentMessage.botResponse && (
|
||||
<div className="formatted-text message bot-message">
|
||||
<Markdown>{currentMessage.botResponse}</Markdown>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="responding-div">
|
||||
{isResponding ?
|
||||
RespondingIconGray : null}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/* Handles scrolling to the end */}
|
||||
{/*<div ref={messagesEndRef} />*/}
|
||||
<form
|
||||
className="input-container"
|
||||
onSubmit={e => {
|
||||
e.preventDefault();
|
||||
handleSubmit();
|
||||
}}
|
||||
>
|
||||
<input disabled={isResponding || isConnecting} className="input-text" value={input} onChange={e => setInput(e.target.value)} />
|
||||
<button disabled={isResponding || isConnecting} className="input-button" type="submit">
|
||||
{isConnecting ? "Connecting..." : isResponding ? "Responding..." : "Send"}
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AiChatBot;
|
13
docs/src/components/AiChatBot/icons.tsx
Normal file
@ -0,0 +1,13 @@
|
||||
import React from "react";
|
||||
|
||||
export const SparklesIcon = <svg className="sparkles-icon" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 30" x="0px" y="0px"><path d="m15.6968,12.4905l-2.8295,1.4148c-.8491.4245-1.5375,1.113-1.962,1.962l-1.4148,2.8295c-.2021.4042-.7789.4042-.981,0l-1.4148-2.8295c-.4245-.8491-1.113-1.5375-1.962-1.962l-2.8295-1.4148c-.4042-.2021-.4042-.7789,0-.981l2.8295-1.4148c.8491-.4246,1.5375-1.113,1.962-1.9621l1.4148-2.8295c.2021-.4042.7789-.4042.981,0l1.4148,2.8295c.4245.8491,1.113,1.5375,1.962,1.9621l2.8295,1.4148c.4042.2021.4042.7789,0,.981Zm6.1732,6.2993l-1.2127-.6063c-.3639-.182-.6589-.477-.8409-.8409l-.6063-1.2126c-.0866-.1732-.3338-.1732-.4204,0l-.6063,1.2126c-.1819.3639-.477.6589-.8409.8409l-1.2127.6063c-.1732.0866-.1732.3338,0,.4204l1.2127.6063c.3639.1819.6589.477.8409.8409l.6063,1.2126c.0866.1732.3338.1732.4204,0l.6063-1.2126c.1819-.3639.477-.6589.8409-.8409l1.2127-.6063c.1732-.0866.1732-.3338,0-.4204Zm0-14l-1.2127-.6063c-.3639-.182-.6589-.477-.8409-.8409l-.6063-1.2126c-.0866-.1732-.3338-.1732-.4204,0l-.6063,1.2126c-.1819.3639-.477.6589-.8409.8409l-1.2127.6063c-.1732.0866-.1732.3338,0,.4204l1.2127.6063c.3639.1819.6589.477.8409.8409l.6063,1.2126c.0866.1732.3338.1732.4204,0l.6063-1.2126c.1819-.3639.477-.6589.8409-.8409l1.2127-.6063c.1732-.0866.1732-.3338,0-.4204Z"/><text x="0" y="39" fill="#111111" font-size="5px" font-weight="bold" font-family="'Helvetica Neue', Helvetica, Arial-Unicode, Arial, Sans-serif">Created by Royyan Wijaya</text><text x="0" y="44" fill="#000000" font-size="5px" font-weight="bold" font-family="'Helvetica Neue', Helvetica, Arial-Unicode, Arial, Sans-serif">from the Noun Project</text></svg>
|
||||
|
||||
export const CloseIcon = <svg className="close-icon" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 30" fill="none" x="0px" y="0px"><path fill-rule="evenodd" clip-rule="evenodd" d="M5.69292 18.3071C5.30239 17.9166 5.30239 17.2834 5.69292 16.8929L16.8929 5.69292C17.2834 5.30239 17.9166 5.30239 18.3071 5.69292C18.6977 6.08344 18.6977 6.71661 18.3071 7.10713L7.10713 18.3071C6.71661 18.6977 6.08344 18.6977 5.69292 18.3071Z" fill="white"/><path fill-rule="evenodd" clip-rule="evenodd" d="M5.69292 5.69292C6.08344 5.30239 6.71661 5.30239 7.10713 5.69292L18.3071 16.8929C18.6977 17.2834 18.6977 17.9166 18.3071 18.3071C17.9166 18.6977 17.2834 18.6977 16.8929 18.3071L5.69292 7.10713C5.30239 6.71661 5.30239 6.08344 5.69292 5.69292Z" fill="white"/></svg>
|
||||
|
||||
export const RespondingIconGray = <svg width="30" height="30" viewBox="0 0 40 40" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="20" cy="20" fill="none" r="10" stroke="#BABABA" strokeWidth="2">
|
||||
<animate attributeName="r" from="8" to="20" dur="1.5s" begin="0s" repeatCount="indefinite"/>
|
||||
<animate attributeName="opacity" from="1" to="0" dur="1.5s" begin="0s" repeatCount="indefinite"/>
|
||||
</circle>
|
||||
<circle cx="20" cy="20" fill="#BABABA" r="10"/>
|
||||
</svg>
|
213
docs/src/components/AiChatBot/styles.css
Normal file
@ -0,0 +1,213 @@
|
||||
.chat-popup {
|
||||
position: fixed;
|
||||
bottom: 10px;
|
||||
right: 10px;
|
||||
z-index: 1000;
|
||||
}
|
||||
|
||||
.open-chat-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 40px;
|
||||
background-color: #1699e2;
|
||||
color: white;
|
||||
border-radius: 8px;
|
||||
padding-right: 15px;
|
||||
padding-left: 15px;
|
||||
font-weight: bold;
|
||||
border: none;
|
||||
margin-bottom: 1.5rem;
|
||||
margin-right: 1.5rem;
|
||||
box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.close-chat-button {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 40px;
|
||||
background-color: #1699e2;
|
||||
color: white;
|
||||
border-radius: 8px;
|
||||
padding-right: 15px;
|
||||
padding-left: 15px;
|
||||
border: none;
|
||||
box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2);
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.sparkles-icon {
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
fill: white;
|
||||
margin-top: 3px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.chat-window {
|
||||
width: 100%;
|
||||
max-width: 500px;
|
||||
border: 1px solid #ccc;
|
||||
background-color: #fff;
|
||||
padding: 15px;
|
||||
box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2);
|
||||
margin-top: 10px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
max-height: 90vh;
|
||||
min-height: 50vh;
|
||||
height: auto;
|
||||
overflow: auto;
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
.close-icon {
|
||||
width: 23px;
|
||||
height: 23px;
|
||||
margin-top: 5px;
|
||||
fill: white;
|
||||
margin-right: 3px;
|
||||
}
|
||||
|
||||
.formatted-text {
|
||||
white-space: pre-line;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
.formatted-text a {
|
||||
color: blue;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.messages-container {
|
||||
overflow-y: auto;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.message {
|
||||
border-radius: 8px;
|
||||
padding: 10px 15px;
|
||||
margin: 5px 0;
|
||||
max-width: 80%;
|
||||
}
|
||||
|
||||
.user-message-container,
|
||||
.bot-message-container {
|
||||
width: 100%;
|
||||
margin: 3px 0;
|
||||
}
|
||||
|
||||
.user-message-container {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.bot-message-container {
|
||||
display: flex;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.user-message,
|
||||
.bot-message {
|
||||
display: block;
|
||||
|
||||
}
|
||||
|
||||
.user-message {
|
||||
text-align: right;
|
||||
background-color: #1699e2;
|
||||
color: white;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
|
||||
.bot-message {
|
||||
text-align: left;
|
||||
background-color: #e9e9e9;
|
||||
border-top-left-radius: 0;
|
||||
}
|
||||
|
||||
.responding-message {
|
||||
}
|
||||
|
||||
input {
|
||||
width: 80%;
|
||||
padding: 10px;
|
||||
border-radius: 5px 0 0 5px;
|
||||
border: 1px solid #ccc;
|
||||
outline: none;
|
||||
height: 40px;
|
||||
box-sizing: border-box;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.input-container {
|
||||
display: flex;
|
||||
margin-top: auto;
|
||||
width: 100%;
|
||||
font-size: 16px;
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
.input-text {
|
||||
font-size: 16px;
|
||||
color: #333;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
.input-text:disabled {
|
||||
background-color: #eeeeee !important;
|
||||
}
|
||||
|
||||
.input-button {
|
||||
background-color: #1699e2;
|
||||
color: white;
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
border: none;
|
||||
height: 40px;
|
||||
border-radius: 0 5px 5px 0;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.input-button:disabled {
|
||||
background-color: #ababab;
|
||||
}
|
||||
|
||||
.info-bar {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 10px;
|
||||
background-color: #1699e2;
|
||||
border-radius: 8px 8px 0 0;
|
||||
padding: 10px 15px;
|
||||
}
|
||||
|
||||
.bot-name-pic-container {
|
||||
display: flex;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.bot-name {
|
||||
margin-right: 10px;
|
||||
font-weight: bold;
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
|
||||
.bot-pic {
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.clear-button {
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
html[data-theme=dark] code {
|
||||
background-color: #e0e0e0;
|
||||
}
|
56
docs/src/components/AiChatBot/useLocalStorage.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { useState } from 'react';
|
||||
export const useLocalStorage = <T>(key: string, defaultValue: T) => {
|
||||
// Create state variable to store localStorage value in state
|
||||
const [localStorageValue, setLocalStorageValue] = useState(() => {
|
||||
try {
|
||||
const value = localStorage.getItem(key);
|
||||
// If value is already present in localStorage then return it
|
||||
|
||||
// Else set default value in localStorage and then return it
|
||||
if (value) {
|
||||
let parsedValue = JSON.parse(value);
|
||||
|
||||
if (Array.isArray(parsedValue)) {
|
||||
const filteredValue = parsedValue.filter(item => !!item);
|
||||
|
||||
// Update localStorage if non-truthy values were filtered out
|
||||
if (filteredValue.length !== parsedValue.length) {
|
||||
parsedValue = filteredValue;
|
||||
localStorage.setItem(key, JSON.stringify(filteredValue));
|
||||
}
|
||||
}
|
||||
|
||||
return parsedValue as T;
|
||||
} else {
|
||||
localStorage.setItem(key, JSON.stringify(defaultValue));
|
||||
return defaultValue;
|
||||
}
|
||||
} catch (error) {
|
||||
localStorage.setItem(key, JSON.stringify(defaultValue));
|
||||
return defaultValue;
|
||||
}
|
||||
});
|
||||
|
||||
// this method update our localStorage and our state
|
||||
const setLocalStorageStateValue = valueOrFn => {
|
||||
let newValue: T;
|
||||
if (typeof valueOrFn === 'function') {
|
||||
const fn = valueOrFn as (value: T) => T;
|
||||
newValue = fn(localStorageValue);
|
||||
} else {
|
||||
newValue = valueOrFn;
|
||||
}
|
||||
|
||||
// Filter out non-truthy values if newValue is an array
|
||||
if (Array.isArray(newValue)) {
|
||||
newValue = newValue.filter(item => !!item) as T;
|
||||
}
|
||||
|
||||
localStorage.setItem(key, JSON.stringify(newValue));
|
||||
setLocalStorageValue(newValue);
|
||||
};
|
||||
|
||||
return [localStorageValue, setLocalStorageStateValue] as const;
|
||||
};
|
||||
|
||||
export default useLocalStorage;
|
30
docs/src/components/BannerDismissable/DDNBanner.css
Normal file
@ -0,0 +1,30 @@
|
||||
.banner {
|
||||
background-color: #1699e2;
|
||||
color: white;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
padding: 15px;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
background: none;
|
||||
color: white;
|
||||
border: none;
|
||||
position: absolute;
|
||||
right: 10px;
|
||||
top: 10px;
|
||||
font-size: 18px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.close-btn:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.banner a {
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
}
|
21
docs/src/components/BannerDismissable/DDNBanner.tsx
Normal file
@ -0,0 +1,21 @@
|
||||
import React, { useState } from 'react';
|
||||
import './DDNBanner.css';
|
||||
|
||||
export const DDNBanner = () => {
|
||||
const [isVisible, setIsVisible] = useState(true);
|
||||
|
||||
if (!isVisible) return null;
|
||||
|
||||
return (
|
||||
<div className="banner">
|
||||
<div>
|
||||
Hasura DDN is the future of data delivery. <a href="https://hasura.io/docs/3.0/index">Click here for the Hasura DDN docs</a>.
|
||||
</div>
|
||||
<button className="close-btn" onClick={() => setIsVisible(false)}>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path d="M6 18L18 6M6 6l12 12" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
@ -5,7 +5,8 @@ import GraphQLWithHasuraBanner from '@site/src/components/GraphQLWithHasuraBanne
|
||||
import CustomFooter from '@site/src/components/CustomFooter';
|
||||
import styles from './styles.module.scss';
|
||||
import { Redirect } from '@docusaurus/router';
|
||||
|
||||
import AiChatBot from "@site/src/components/AiChatBot/AiChatBot";
|
||||
import BrowserOnly from '@docusaurus/BrowserOnly';
|
||||
const CustomDocItem = props => {
|
||||
useEffect(() => {
|
||||
// This function is adds <wbr> tags to code blocks within a table
|
||||
@ -77,6 +78,9 @@ const CustomDocItem = props => {
|
||||
{/*<PageHelpful />*/}
|
||||
<HasuraConBanner {...props} />
|
||||
<GraphQLWithHasuraBanner />
|
||||
<BrowserOnly fallback={<div>Loading...</div>}>
|
||||
{() => <AiChatBot/>}
|
||||
</BrowserOnly>
|
||||
<CustomFooter />
|
||||
</div>
|
||||
</div>
|
||||
|
11
docs/src/theme/Navbar/index.js
Normal file
@ -0,0 +1,11 @@
|
||||
import React from 'react';
|
||||
import Navbar from '@theme-original/Navbar';
|
||||
import {DDNBanner} from "@site/src/components/BannerDismissable/DDNBanner";
|
||||
export default function NavbarWrapper(props) {
|
||||
return (
|
||||
<>
|
||||
<DDNBanner/>
|
||||
<Navbar {...props} />
|
||||
</>
|
||||
);
|
||||
}
|
BIN
docs/static/img/databases/mysql/aiven/aiven-create-mysql.png
vendored
Normal file
After Width: | Height: | Size: 130 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-change-ip.png
vendored
Normal file
After Width: | Height: | Size: 63 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-choose-driver.png
vendored
Normal file
After Width: | Height: | Size: 56 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-configuration.png
vendored
Normal file
After Width: | Height: | Size: 88 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-connection-uri.png
vendored
Normal file
After Width: | Height: | Size: 79 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-finish-connecting.png
vendored
Normal file
After Width: | Height: | Size: 50 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip-set.png
vendored
Normal file
After Width: | Height: | Size: 68 KiB |
BIN
docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip.png
vendored
Normal file
After Width: | Height: | Size: 75 KiB |
BIN
docs/static/img/databases/redshift/make-graphql-query.png
vendored
Normal file
After Width: | Height: | Size: 58 KiB |
BIN
docs/static/img/databases/redshift/redshift-add-agent.png
vendored
Normal file
After Width: | Height: | Size: 131 KiB |
BIN
docs/static/img/databases/redshift/redshift-add-service1.png
vendored
Normal file
After Width: | Height: | Size: 189 KiB |
BIN
docs/static/img/databases/redshift/redshift-add-service2.png
vendored
Normal file
After Width: | Height: | Size: 121 KiB |
BIN
docs/static/img/databases/redshift/redshift-tracking-tables1.png
vendored
Normal file
After Width: | Height: | Size: 240 KiB |
BIN
docs/static/img/databases/redshift/redshift-tracking-tables2.png
vendored
Normal file
After Width: | Height: | Size: 243 KiB |
BIN
docs/static/img/hasura-ai-profile-pic.png
vendored
Normal file
After Width: | Height: | Size: 250 KiB |
BIN
docs/static/img/security/disable-introspection.png
vendored
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 56 KiB |
@ -5513,6 +5513,7 @@ __metadata:
|
||||
graphiql: ^1.5.1
|
||||
graphql: ^15.7.2
|
||||
graphql-ws: ^5.11.2
|
||||
markdown-to-jsx: ^7.3.2
|
||||
prettier: ^3.0.0
|
||||
prism-react-renderer: ^1.3.5
|
||||
react: ^17.0.2
|
||||
@ -7931,6 +7932,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"markdown-to-jsx@npm:^7.3.2":
|
||||
version: 7.3.2
|
||||
resolution: "markdown-to-jsx@npm:7.3.2"
|
||||
peerDependencies:
|
||||
react: ">= 0.14.0"
|
||||
checksum: 8885c6343b71570b0a7ec16cd85a49b853a830234790ee7430e2517ea5d8d361ff138bd52147f650790f3e7b3a28a15c755fc16f8856dd01ddf09a6161782e06
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"mdast-squeeze-paragraphs@npm:^4.0.0":
|
||||
version: 4.0.0
|
||||
resolution: "mdast-squeeze-paragraphs@npm:4.0.0"
|
||||
|
@ -20,11 +20,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1696330627,
|
||||
"narHash": "sha256-o0WGId7Vuhl+5KeNgh343Orex0YUMVCopf1LWr+kI/k=",
|
||||
"lastModified": 1699914561,
|
||||
"narHash": "sha256-b296O45c3Jgj8GEFg/NN7ZOJjBBCHr1o2iA4yoJ3OKE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "7b6b919f3a707c566b8592106bb7ce070721b137",
|
||||
"rev": "2f8742189e9ef86961ab90a30c68eb844565578a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -4,13 +4,16 @@ import { Button } from '../../../new-components/Button';
|
||||
import { FaLink } from 'react-icons/fa';
|
||||
import { Badge } from '../../../new-components/Badge';
|
||||
import { Analytics } from '../../../features/Analytics';
|
||||
import { Table } from '../../../features/hasura-metadata-types';
|
||||
|
||||
interface CreateRestEndpointProps {
|
||||
tableName: string;
|
||||
dataSourceName: string;
|
||||
table: Table;
|
||||
}
|
||||
|
||||
export const CreateRestEndpoint = (props: CreateRestEndpointProps) => {
|
||||
const { tableName } = props;
|
||||
const { tableName, dataSourceName, table } = props;
|
||||
const [isModalOpen, setIsModalOpen] = React.useState(false);
|
||||
|
||||
const toggleModal = () => {
|
||||
@ -31,7 +34,12 @@ export const CreateRestEndpoint = (props: CreateRestEndpointProps) => {
|
||||
</Button>
|
||||
</Analytics>
|
||||
{isModalOpen && (
|
||||
<RestEndpointModal onClose={toggleModal} tableName={tableName} />
|
||||
<RestEndpointModal
|
||||
onClose={toggleModal}
|
||||
tableName={tableName}
|
||||
dataSourceName={dataSourceName}
|
||||
table={table}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
@ -54,7 +54,11 @@ class Heading extends React.Component {
|
||||
<h2>{currentValue}</h2>
|
||||
<div className="text-base font-normal flex gap-2">
|
||||
<TryOperation table={table} dispatch={dispatch} source={source} />
|
||||
<CreateRestEndpoint tableName={table.table_name} />
|
||||
<CreateRestEndpoint
|
||||
tableName={table.table_name}
|
||||
dataSourceName={source}
|
||||
table={{ name: table.table_name, schema: table.table_schema }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="pb-5">
|
||||
@ -75,7 +79,11 @@ class Heading extends React.Component {
|
||||
<h2>{currentValue}</h2>
|
||||
<div className="text-base font-normal flex gap-2">
|
||||
<TryOperation table={table} dispatch={dispatch} source={source} />
|
||||
<CreateRestEndpoint tableName={table.table_name} />
|
||||
<CreateRestEndpoint
|
||||
tableName={table.table_name}
|
||||
dataSourceName={source}
|
||||
table={{ name: table.table_name, schema: table.table_schema }}
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
onClick={this.toggleEditting}
|
||||
|
@ -10,6 +10,7 @@ import { CardedTable } from '../../../../../new-components/CardedTable';
|
||||
import { Badge, BadgeColor } from '../../../../../new-components/Badge';
|
||||
import { Link, browserHistory } from 'react-router';
|
||||
import { FaExclamation, FaExternalLinkAlt } from 'react-icons/fa';
|
||||
import { Table } from '../../../../../features/hasura-metadata-types';
|
||||
|
||||
const ENDPOINTS: {
|
||||
value: EndpointType;
|
||||
@ -25,12 +26,17 @@ const ENDPOINTS: {
|
||||
export interface RestEndpointModalProps {
|
||||
onClose: () => void;
|
||||
tableName: string;
|
||||
dataSourceName: string;
|
||||
table: Table;
|
||||
}
|
||||
|
||||
export const RestEndpointModal = (props: RestEndpointModalProps) => {
|
||||
const { onClose, tableName } = props;
|
||||
const { onClose, tableName, dataSourceName } = props;
|
||||
const { createRestEndpoints, endpointDefinitions, isLoading } =
|
||||
useCreateRestEndpoints();
|
||||
useCreateRestEndpoints({
|
||||
dataSourceName: props.dataSourceName,
|
||||
table: props.table,
|
||||
});
|
||||
|
||||
const tableEndpointDefinitions = endpointDefinitions?.[tableName] ?? {};
|
||||
|
||||
@ -62,7 +68,7 @@ export const RestEndpointModal = (props: RestEndpointModalProps) => {
|
||||
onClose={onClose}
|
||||
disabled={selectedMethods.length === 0}
|
||||
onSubmit={() => {
|
||||
createRestEndpoints(tableName, selectedMethods, {
|
||||
createRestEndpoints(dataSourceName, tableName, selectedMethods, {
|
||||
onSuccess: () => {
|
||||
hasuraToast({
|
||||
type: 'success',
|
||||
|
@ -84,6 +84,7 @@ const TopNav: React.FC<TopNavProps> = ({ location }) => {
|
||||
?.schema_registry_dumps_v2[0] || [];
|
||||
if (
|
||||
data &&
|
||||
data.diff_with_previous_schema &&
|
||||
data.diff_with_previous_schema[0] &&
|
||||
data.diff_with_previous_schema[0].schema_diff_data &&
|
||||
data.change_recorded_at
|
||||
|
@ -110,6 +110,7 @@ export const ReactTableWrapper: React.VFC<ReactTableWrapperProps> = ({
|
||||
<FaExternalLinkAlt />
|
||||
</span>
|
||||
) : (
|
||||
// eslint-disable-next-line jsx-a11y/anchor-is-valid
|
||||
<a
|
||||
onClick={() => {
|
||||
relationships?.onClick({
|
||||
@ -269,6 +270,7 @@ export const ReactTableWrapper: React.VFC<ReactTableWrapperProps> = ({
|
||||
key={`${row.id}-${i}`}
|
||||
data-testid={`@table-cell-${row.id}-${i}`}
|
||||
style={{ maxWidth: '20ch' }}
|
||||
className="px-sm py-xs whitespace-nowrap text-muted overflow-hidden text-ellipsis"
|
||||
>
|
||||
{flexRender(cell.column.columnDef.cell, cell.getContext())}
|
||||
</CardedTable.TableBodyCell>
|
||||
|
@ -6,9 +6,9 @@ import {
|
||||
Textarea,
|
||||
InputField,
|
||||
SimpleForm,
|
||||
CodeEditorField,
|
||||
} from '../../../../../new-components/Form';
|
||||
import { TableColumn } from '../../../../DataSource';
|
||||
import ReactJson from 'react-json-view';
|
||||
|
||||
interface RowDialogProps {
|
||||
row: Record<string, any>;
|
||||
@ -28,7 +28,19 @@ export const RowDialog = ({ onClose, row, columns }: RowDialogProps) => {
|
||||
)?.consoleDataType;
|
||||
|
||||
if (columnDataType === 'json')
|
||||
return <CodeEditorField name={key} label={key} disabled />;
|
||||
return (
|
||||
<div>
|
||||
<div className="font-semibold">{key}</div>
|
||||
<div className="border p-4 rounded">
|
||||
<ReactJson
|
||||
src={
|
||||
typeof row[key] === 'string' ? JSON.parse(row[key]) : row[key]
|
||||
}
|
||||
collapsed
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
if (columnDataType === 'string')
|
||||
return <InputField disabled type="text" name={key} label={key} />;
|
||||
|
@ -13,8 +13,10 @@ import { hasuraToast } from '../../../../new-components/Toasts';
|
||||
import { useAvailableDrivers } from '../../../ConnectDB/hooks';
|
||||
import { OpenApi3Form } from '../../../OpenApi3Form';
|
||||
import { useMetadata } from '../../../hasura-metadata-api';
|
||||
import { Source } from '../../../hasura-metadata-types';
|
||||
import { useManageDatabaseConnection } from '../../hooks/useManageDatabaseConnection';
|
||||
import { DisplayToastErrorMessage } from '../Common/DisplayToastErrorMessage';
|
||||
import { cleanEmpty } from '../ConnectPostgresWidget/utils/helpers';
|
||||
import { GraphQLCustomization } from '../GraphQLCustomization/GraphQLCustomization';
|
||||
import { adaptGraphQLCustomization } from '../GraphQLCustomization/utils/adaptResponse';
|
||||
import { Template } from './components/Template';
|
||||
@ -25,8 +27,6 @@ import {
|
||||
useFormValidationSchema,
|
||||
} from './useFormValidationSchema';
|
||||
import { generateGDCRequestPayload } from './utils/generateRequest';
|
||||
import { Source } from '../../../hasura-metadata-types';
|
||||
import { cleanEmpty } from '../ConnectPostgresWidget/utils/helpers';
|
||||
|
||||
interface ConnectGDCSourceWidgetProps {
|
||||
driver: string;
|
||||
@ -129,6 +129,11 @@ export const ConnectGDCSourceWidget = (props: ConnectGDCSourceWidgetProps) => {
|
||||
methods: { formState, reset },
|
||||
} = useConsoleForm({
|
||||
schema,
|
||||
options: {
|
||||
defaultValues: {
|
||||
template_variables: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -108,7 +108,7 @@ export const TemplateVariables = () => {
|
||||
),
|
||||
}),
|
||||
],
|
||||
[]
|
||||
[remove]
|
||||
);
|
||||
|
||||
const table = useReactTable({
|
||||
|
@ -26,6 +26,10 @@ const createValidationSchema = (configSchemas: GDCConfigSchemas) =>
|
||||
.gte(0, { message: 'Timeout must be a postive number' })
|
||||
.optional(),
|
||||
template: z.string().optional(),
|
||||
|
||||
// template variables is not marked as optional b/c it makes some pretty annoying TS issues with react-hook-form
|
||||
// the field is initialized with a default value of `[]`
|
||||
// with clean up empty fields, including arrays before submission, so it won't be sent to the server if the array is empty
|
||||
template_variables: z
|
||||
.object({
|
||||
name: reqString('variable name'),
|
||||
|
@ -108,7 +108,7 @@ export const Test: StoryObj<typeof ConnectPostgresWidget> = {
|
||||
// Find and click on advanced settings
|
||||
await userEvent.click(await canvas.findByText('Advanced Settings'));
|
||||
await expect(
|
||||
await canvas.findByText('Total Max Connections')
|
||||
await canvas.findByText('Max Connections')
|
||||
).toBeInTheDocument();
|
||||
await expect(await canvas.findByText('Idle Timeout')).toBeInTheDocument();
|
||||
await expect(await canvas.findByText('Retries')).toBeInTheDocument();
|
||||
@ -174,7 +174,7 @@ export const PostgresEditConnection: StoryObj<typeof ConnectPostgresWidget> = {
|
||||
await userEvent.click(await canvas.findByText('Advanced Settings'));
|
||||
await expect(
|
||||
await canvas.findByTestId(
|
||||
'configuration.connectionInfo.poolSettings.totalMaxConnections'
|
||||
'configuration.connectionInfo.poolSettings.maxConnections'
|
||||
)
|
||||
).toHaveValue(500);
|
||||
await expect(
|
||||
|
@ -8,10 +8,14 @@ import { ReactQueryDecorator } from '../../../../../storybook/decorators/react-q
|
||||
import { useState } from 'react';
|
||||
import { userEvent, waitFor, within } from '@storybook/testing-library';
|
||||
import { expect } from '@storybook/jest';
|
||||
import { ConsoleTypeDecorator } from '../../../../../storybook/decorators';
|
||||
|
||||
export default {
|
||||
component: ConnectPostgresForm,
|
||||
decorators: [ReactQueryDecorator()],
|
||||
decorators: [
|
||||
ReactQueryDecorator(),
|
||||
ConsoleTypeDecorator({ consoleType: 'cloud' }),
|
||||
],
|
||||
} as Meta<typeof ConnectPostgresForm>;
|
||||
|
||||
export const TestPostgresForm: StoryObj<typeof ConnectPostgresForm> = {
|
||||
@ -166,7 +170,7 @@ export const TestPostgresForm: StoryObj<typeof ConnectPostgresForm> = {
|
||||
);
|
||||
|
||||
await userEvent.click(await canvas.findByText('Advanced Settings'));
|
||||
await userEvent.type(await canvas.findByPlaceholderText('1000'), '100');
|
||||
await userEvent.type(await canvas.findByPlaceholderText('50'), '100');
|
||||
await userEvent.type(await canvas.findByPlaceholderText('180'), '100');
|
||||
await userEvent.type(await canvas.findByPlaceholderText('1'), '100');
|
||||
await userEvent.type(await canvas.findByPlaceholderText('360'), '100');
|
||||
@ -194,7 +198,7 @@ export const TestPostgresForm: StoryObj<typeof ConnectPostgresForm> = {
|
||||
port: 5432,
|
||||
},
|
||||
poolSettings: {
|
||||
totalMaxConnections: 100,
|
||||
maxConnections: 100,
|
||||
idleTimeout: 100,
|
||||
retries: 100,
|
||||
poolTimeout: 100,
|
||||
@ -252,7 +256,7 @@ export const TestPostgresForm: StoryObj<typeof ConnectPostgresForm> = {
|
||||
port: 5432,
|
||||
},
|
||||
poolSettings: {
|
||||
totalMaxConnections: 100,
|
||||
maxConnections: 100,
|
||||
idleTimeout: 100,
|
||||
retries: 100,
|
||||
poolTimeout: 100,
|
||||
|
@ -1,4 +1,7 @@
|
||||
import { isCloudConsole } from '../../../../../utils';
|
||||
import globals from '../../../../../Globals';
|
||||
import { InputField } from '../../../../../new-components/Form';
|
||||
// import { isCloudConsole } from '../../../../../utils';
|
||||
|
||||
const commonFieldProps: Partial<React.InputHTMLAttributes<HTMLInputElement>> = {
|
||||
onWheelCapture: e => e.currentTarget.blur(),
|
||||
@ -7,12 +10,23 @@ const commonFieldProps: Partial<React.InputHTMLAttributes<HTMLInputElement>> = {
|
||||
export const PoolSettings = ({ name }: { name: string }) => {
|
||||
return (
|
||||
<>
|
||||
{isCloudConsole(globals) && (
|
||||
<InputField
|
||||
type="number"
|
||||
name={`${name}.totalMaxConnections`}
|
||||
label="Total Max Connections"
|
||||
placeholder="1000"
|
||||
tooltip="Maximum number of total connections to be maintained across any number of Hasura Cloud instances (default: 1000). Takes precedence over max_connections in Cloud projects."
|
||||
fieldProps={commonFieldProps}
|
||||
/>
|
||||
)}
|
||||
|
||||
<InputField
|
||||
type="number"
|
||||
name={`${name}.totalMaxConnections`}
|
||||
label="Total Max Connections"
|
||||
placeholder="1000"
|
||||
tooltip="Maximum number of database connections"
|
||||
name={`${name}.maxConnections`}
|
||||
label="Max Connections"
|
||||
placeholder="50"
|
||||
tooltip="Maximum number of connections to be kept in the pool (default: 50)"
|
||||
fieldProps={commonFieldProps}
|
||||
/>
|
||||
<InputField
|
||||
|
@ -11,6 +11,7 @@ const numberSchema = z.preprocess(
|
||||
export const poolSettingsSchema = z
|
||||
.object({
|
||||
totalMaxConnections: numberSchema.optional(),
|
||||
maxConnections: numberSchema.optional(),
|
||||
idleTimeout: numberSchema.optional(),
|
||||
retries: numberSchema.optional(),
|
||||
poolTimeout: numberSchema.optional(),
|
||||
|
@ -27,6 +27,7 @@ describe('adaptPostgresConnection works for ', () => {
|
||||
"poolSettings": {
|
||||
"connectionLifetime": undefined,
|
||||
"idleTimeout": undefined,
|
||||
"maxConnections": undefined,
|
||||
"poolTimeout": undefined,
|
||||
"retries": undefined,
|
||||
"totalMaxConnections": undefined,
|
||||
@ -98,6 +99,7 @@ describe('adaptPostgresConnection works for ', () => {
|
||||
"poolSettings": {
|
||||
"connectionLifetime": undefined,
|
||||
"idleTimeout": undefined,
|
||||
"maxConnections": undefined,
|
||||
"poolTimeout": undefined,
|
||||
"retries": undefined,
|
||||
"totalMaxConnections": undefined,
|
||||
|
@ -43,6 +43,7 @@ export const adaptPostgresConnectionInfo = (
|
||||
databaseUrl: adaptDatabaseUrl(connectionInfo.database_url),
|
||||
poolSettings: {
|
||||
totalMaxConnections: connectionInfo.pool_settings?.total_max_connections,
|
||||
maxConnections: connectionInfo.pool_settings?.max_connections,
|
||||
idleTimeout: connectionInfo.pool_settings?.idle_timeout,
|
||||
retries: connectionInfo.pool_settings?.retries,
|
||||
poolTimeout: connectionInfo.pool_settings?.pool_timeout,
|
||||
|
@ -24,6 +24,7 @@ export const generateConnectionInfo = (
|
||||
},
|
||||
pool_settings: {
|
||||
total_max_connections: values.poolSettings?.totalMaxConnections,
|
||||
max_connections: values.poolSettings?.maxConnections,
|
||||
idle_timeout: values.poolSettings?.idleTimeout,
|
||||
retries: values.poolSettings?.retries,
|
||||
pool_timeout: values.poolSettings?.poolTimeout,
|
||||
|
@ -35,6 +35,7 @@ export const mockMetadata: Metadata = {
|
||||
pool_timeout: 300,
|
||||
retries: 400,
|
||||
total_max_connections: 500,
|
||||
max_connections: 500,
|
||||
},
|
||||
use_prepared_statements: true,
|
||||
},
|
||||
|
@ -10,6 +10,8 @@ import { useAppDispatch } from '../../../../storeHooks';
|
||||
import { getRoute } from '../../../../utils/getDataRoute';
|
||||
import _push from '../../../../components/Services/Data/push';
|
||||
import AceEditor from 'react-ace';
|
||||
import { CreateRestEndpoint } from '../../../../components/Common/EditableHeading/CreateRestEndpoints';
|
||||
import { MetadataSelectors, useMetadata } from '../../../hasura-metadata-api';
|
||||
|
||||
export const TableName: React.VFC<{
|
||||
dataSourceName: string;
|
||||
@ -17,6 +19,9 @@ export const TableName: React.VFC<{
|
||||
tableName: string;
|
||||
}> = ({ tableName, dataSourceName, table }) => {
|
||||
const dispatch = useAppDispatch();
|
||||
const { data: driver = '' } = useMetadata(
|
||||
m => MetadataSelectors.findSource(dataSourceName)(m)?.kind
|
||||
);
|
||||
const { untrackTable } = useUntrackTable({
|
||||
onSuccess: () => {
|
||||
hasuraToast({
|
||||
@ -87,6 +92,13 @@ export const TableName: React.VFC<{
|
||||
<div>
|
||||
<Badge color="green">Tracked</Badge>
|
||||
</div>
|
||||
{['postgres', 'mssql', 'bigquery'].includes(driver) && (
|
||||
<CreateRestEndpoint
|
||||
tableName={tableName.split('.')[tableName.split('.').length - 1]}
|
||||
dataSourceName={dataSourceName}
|
||||
table={table}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
@ -11,7 +11,7 @@ export const manageTableUrl = ({
|
||||
const tab =
|
||||
sessionStore.getItem('manageTable.lastTab') ?? DEFAULT_MANAGE_TABLE_TAB;
|
||||
|
||||
return `data/v2/manage/table/${tab}?database=${dataSourceName}&table=${encodeURIComponent(
|
||||
return `/data/v2/manage/table/${tab}?database=${dataSourceName}&table=${encodeURIComponent(
|
||||
JSON.stringify(table)
|
||||
)}`;
|
||||
};
|
||||
@ -26,6 +26,6 @@ export const manageFunctionUrl = ({
|
||||
dataSourceName: string;
|
||||
fn: unknown;
|
||||
}) =>
|
||||
`data/v2/manage/function?database=${dataSourceName}&function=${encodeURIComponent(
|
||||
`/data/v2/manage/function?database=${dataSourceName}&function=${encodeURIComponent(
|
||||
JSON.stringify(fn)
|
||||
)}`;
|
||||
|
@ -4,7 +4,7 @@ import { Button } from '../../../new-components/Button';
|
||||
import { IndicatorCard } from '../../../new-components/IndicatorCard';
|
||||
import {
|
||||
MetadataSelector,
|
||||
useMetadata,
|
||||
useMetadata as useLegacyMetadata,
|
||||
useRoles,
|
||||
useSupportedQueryTypes,
|
||||
} from '../../MetadataAPI';
|
||||
@ -41,6 +41,7 @@ import {
|
||||
inputValidationEnabledSchema,
|
||||
} from '../../../components/Services/Data/TablePermissions/InputValidation/InputValidation';
|
||||
import { z } from 'zod';
|
||||
import { MetadataSelectors, useMetadata } from '../../hasura-metadata-api';
|
||||
|
||||
export interface ComponentProps {
|
||||
dataSourceName: string;
|
||||
@ -70,7 +71,7 @@ const Component = (props: ComponentProps) => {
|
||||
|
||||
useScrollIntoView(permissionSectionRef, [roleName], { behavior: 'smooth' });
|
||||
|
||||
const { data: metadataTables } = useMetadata(
|
||||
const { data: metadataTables } = useLegacyMetadata(
|
||||
MetadataSelector.getTables(dataSourceName)
|
||||
);
|
||||
const tables = metadataTables?.map(t => t.table) ?? [];
|
||||
@ -197,6 +198,7 @@ const Component = (props: ComponentProps) => {
|
||||
roleName={roleName}
|
||||
queryType={queryType}
|
||||
columns={formData?.columns}
|
||||
computedFields={formData?.computed_fields}
|
||||
table={table}
|
||||
dataSourceName={dataSourceName}
|
||||
/>
|
||||
@ -281,6 +283,11 @@ export const PermissionsForm = (props: PermissionsFormProps) => {
|
||||
const { columns: tableColumns, isLoading: isLoadingTables } =
|
||||
useListAllTableColumns(dataSourceName, table);
|
||||
|
||||
const metadataTableResult = useMetadata(
|
||||
MetadataSelectors.findTable(dataSourceName, table)
|
||||
);
|
||||
const computedFields = metadataTableResult.data?.computed_fields ?? [];
|
||||
|
||||
const { data: metadataSource } = useMetadataSource(dataSourceName);
|
||||
|
||||
const { data, isError, isLoading } = useFormData({
|
||||
@ -328,6 +335,7 @@ export const PermissionsForm = (props: PermissionsFormProps) => {
|
||||
metadata: data?.metadata,
|
||||
table,
|
||||
tableColumns,
|
||||
tableComputedFields: computedFields,
|
||||
defaultQueryRoot: data.defaultQueryRoot,
|
||||
metadataSource,
|
||||
supportedOperators: data.supportedOperators,
|
||||
@ -357,6 +365,7 @@ export const PermissionsForm = (props: PermissionsFormProps) => {
|
||||
table,
|
||||
metadata: data.metadata,
|
||||
tableColumns,
|
||||
computedFields,
|
||||
trackedTables: metadataSource.tables,
|
||||
metadataSource,
|
||||
validateInput: {
|
||||
|
@ -17,11 +17,13 @@ test('create select args object from form data', () => {
|
||||
args: {
|
||||
table: ['Album'],
|
||||
role: 'user',
|
||||
comment: '',
|
||||
permission: {
|
||||
columns: ['AlbumId', 'Title', 'ArtistId'],
|
||||
filter: { _not: { AlbumId: { _eq: 'X-Hasura-User-Id' } } },
|
||||
set: {},
|
||||
allow_aggregations: false,
|
||||
computed_fields: [],
|
||||
},
|
||||
source: 'Chinook',
|
||||
},
|
||||
@ -42,6 +44,7 @@ test('create delete args object from form data', () => {
|
||||
args: {
|
||||
table: ['Album'],
|
||||
role: 'user',
|
||||
comment: '',
|
||||
permission: { backend_only: false, filter: { Title: { _eq: 'Test' } } },
|
||||
source: 'Chinook',
|
||||
},
|
||||
@ -58,6 +61,7 @@ test('create insert args object from form data', () => {
|
||||
args: {
|
||||
table: ['Album'],
|
||||
role: 'user',
|
||||
comment: '',
|
||||
permission: {
|
||||
columns: [],
|
||||
check: {
|
||||
@ -69,6 +73,7 @@ test('create insert args object from form data', () => {
|
||||
},
|
||||
allow_upsert: true,
|
||||
set: {},
|
||||
validate_input: undefined,
|
||||
backend_only: false,
|
||||
},
|
||||
source: 'Chinook',
|
||||
|
@ -30,6 +30,7 @@ const formatFilterValues = (formFilter: Record<string, any>[] = []) => {
|
||||
|
||||
type SelectPermissionMetadata = {
|
||||
columns: string[];
|
||||
computed_fields: string[];
|
||||
set: Record<string, any>;
|
||||
filter: Record<string, any>;
|
||||
allow_aggregations?: boolean;
|
||||
@ -43,12 +44,16 @@ const createSelectObject = (input: PermissionsSchema) => {
|
||||
const columns = Object.entries(input.columns)
|
||||
.filter(({ 1: value }) => value)
|
||||
.map(([key]) => key);
|
||||
const computed_fields = Object.entries(input.computed_fields)
|
||||
.filter(({ 1: value }) => value)
|
||||
.map(([key]) => key);
|
||||
|
||||
// Input may be undefined
|
||||
const filter = formatFilterValues(input.filter);
|
||||
|
||||
const permissionObject: SelectPermissionMetadata = {
|
||||
columns,
|
||||
computed_fields,
|
||||
filter,
|
||||
set: {},
|
||||
allow_aggregations: input.aggregationEnabled,
|
||||
|
@ -18,6 +18,7 @@ import {
|
||||
SubscriptionRootPermissionType,
|
||||
QueryRootPermissionType,
|
||||
} from './RootFieldPermissions/types';
|
||||
import { MetadataSelectors, useMetadata } from '../../../hasura-metadata-api';
|
||||
|
||||
const getAccessText = (queryType: string) => {
|
||||
if (queryType === 'insert') {
|
||||
@ -35,6 +36,7 @@ export interface ColumnPermissionsSectionProps {
|
||||
queryType: QueryType;
|
||||
roleName: string;
|
||||
columns?: string[];
|
||||
computedFields?: string[];
|
||||
table: unknown;
|
||||
dataSourceName: string;
|
||||
}
|
||||
@ -85,19 +87,30 @@ const checkIfConfirmationIsNeeded = (
|
||||
);
|
||||
};
|
||||
|
||||
// @todo
|
||||
// this hasn't been fully implemented, it still needs computed columns adding
|
||||
export const ColumnPermissionsSection: React.FC<
|
||||
ColumnPermissionsSectionProps
|
||||
> = ({ roleName, queryType, columns, table, dataSourceName }) => {
|
||||
> = ({
|
||||
roleName,
|
||||
queryType,
|
||||
columns,
|
||||
table,
|
||||
computedFields,
|
||||
dataSourceName,
|
||||
}) => {
|
||||
const { setValue, watch } = useFormContext();
|
||||
const [showConfirmation, setShowConfirmationModal] = useState<string | null>(
|
||||
null
|
||||
);
|
||||
watch();
|
||||
|
||||
const [selectedColumns, queryRootFields, subscriptionRootFields] = watch([
|
||||
const [
|
||||
selectedColumns,
|
||||
selectedComputedFields,
|
||||
queryRootFields,
|
||||
subscriptionRootFields,
|
||||
] = watch([
|
||||
'columns',
|
||||
'computed_fields',
|
||||
'query_root_fields',
|
||||
'subscription_root_fields',
|
||||
]);
|
||||
@ -112,6 +125,13 @@ export const ColumnPermissionsSection: React.FC<
|
||||
table
|
||||
);
|
||||
|
||||
const metadataTableResult = useMetadata(
|
||||
MetadataSelectors.findTable(dataSourceName, table)
|
||||
);
|
||||
const tableComputedFields = metadataTableResult.data?.computed_fields?.map(
|
||||
({ name }) => name
|
||||
);
|
||||
|
||||
const onClick = () => {
|
||||
columns?.forEach(column => {
|
||||
const toggleAllOn = status !== 'All columns';
|
||||
@ -119,6 +139,12 @@ export const ColumnPermissionsSection: React.FC<
|
||||
// otherwise toggle all off
|
||||
setValue(`columns.${column}`, toggleAllOn);
|
||||
});
|
||||
computedFields?.forEach(field => {
|
||||
const toggleAllOn = status !== 'All columns';
|
||||
// if status is not all columns: toggle all on
|
||||
// otherwise toggle all off
|
||||
setValue(`computed_fields.${field}`, toggleAllOn);
|
||||
});
|
||||
};
|
||||
|
||||
if (isError) {
|
||||
@ -206,6 +232,26 @@ export const ColumnPermissionsSection: React.FC<
|
||||
<i>{fieldName}</i>
|
||||
</label>
|
||||
))}
|
||||
{queryType === 'select' &&
|
||||
tableComputedFields?.map(fieldName => (
|
||||
<label key={fieldName} className="flex gap-2 items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
title={disabled ? 'Set a row permission first' : ''}
|
||||
disabled={disabled}
|
||||
style={{ marginTop: '0px !important' }}
|
||||
className="rounded shadow-sm border border-gray-300 hover:border-gray-400 focus:ring-yellow-400"
|
||||
checked={selectedComputedFields[fieldName]}
|
||||
onChange={() => {
|
||||
setValue(
|
||||
`computed_fields.${fieldName}`,
|
||||
!selectedComputedFields[fieldName]
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<i>{fieldName}</i>
|
||||
</label>
|
||||
))}
|
||||
<Button
|
||||
type="button"
|
||||
size="sm"
|
||||
|
@ -21,7 +21,8 @@ export const Operator = ({
|
||||
rowPermissionsContext
|
||||
);
|
||||
const { tables } = useContext(rootTableContext);
|
||||
const { columns, table, relationships } = useContext(tableContext);
|
||||
const { columns, table, relationships, computedFields } =
|
||||
useContext(tableContext);
|
||||
const { rootLogicalModel } = useContext(logicalModelContext);
|
||||
const parent = path[path.length - 1];
|
||||
const operatorLevelId =
|
||||
@ -73,6 +74,19 @@ export const Operator = ({
|
||||
))}
|
||||
</optgroup>
|
||||
) : null}
|
||||
{computedFields.length ? (
|
||||
<optgroup label="Computed fields">
|
||||
{computedFields.map((field, index) => (
|
||||
<option
|
||||
data-type="computedField"
|
||||
key={'computedField' + index}
|
||||
value={field.name}
|
||||
>
|
||||
{field.name}
|
||||
</option>
|
||||
))}
|
||||
</optgroup>
|
||||
) : null}
|
||||
{rootLogicalModel?.fields.length ? (
|
||||
<optgroup label="Columns">
|
||||
{rootLogicalModel?.fields.map((field, index) => (
|
||||
|
@ -5,6 +5,7 @@ import { rootTableContext } from './RootTableProvider';
|
||||
import { areTablesEqual } from '../../../../../hasura-metadata-api';
|
||||
import { fieldsToColumns } from './utils/nestedObjects';
|
||||
import { rowPermissionsContext } from './RowPermissionsProvider';
|
||||
import { ComputedField } from '../../../../../../metadata/types';
|
||||
|
||||
export const tableContext = createContext<TableContext>({
|
||||
table: {},
|
||||
@ -13,6 +14,8 @@ export const tableContext = createContext<TableContext>({
|
||||
setComparator: () => {},
|
||||
columns: [],
|
||||
setColumns: () => {},
|
||||
computedFields: [],
|
||||
setComputedFields: () => {},
|
||||
relationships: [],
|
||||
setRelationships: () => {},
|
||||
});
|
||||
@ -29,6 +32,7 @@ export const TableProvider = ({
|
||||
const [table, setTableName] = useState<Table>(defaultTable || {});
|
||||
const [comparator, setComparator] = useState<string | undefined>();
|
||||
const [columns, setColumns] = useState<Columns>([]);
|
||||
const [computedFields, setComputedFields] = useState<ComputedField[]>([]);
|
||||
const [relationships, setRelationships] = useState<Relationships>([]);
|
||||
const { tables, rootTable } = useContext(rootTableContext);
|
||||
const { loadRelationships } = useContext(rowPermissionsContext);
|
||||
@ -50,6 +54,7 @@ export const TableProvider = ({
|
||||
const foundTable = tables.find(t => areTablesEqual(t.table, table));
|
||||
if (foundTable) {
|
||||
setColumns(foundTable.columns);
|
||||
setComputedFields(foundTable.computedFields);
|
||||
if (foundTable?.dataSource?.name !== rootTable?.dataSource?.name) return;
|
||||
setRelationships(
|
||||
foundTable.relationships.filter(rel => {
|
||||
@ -82,6 +87,8 @@ export const TableProvider = ({
|
||||
setRelationships,
|
||||
objectPath,
|
||||
loadRelationships,
|
||||
computedFields,
|
||||
setComputedFields,
|
||||
]);
|
||||
|
||||
return (
|
||||
@ -89,6 +96,8 @@ export const TableProvider = ({
|
||||
value={{
|
||||
columns,
|
||||
setColumns,
|
||||
computedFields,
|
||||
setComputedFields,
|
||||
table,
|
||||
setTable: setTableName,
|
||||
relationships,
|
||||
|
@ -19,6 +19,7 @@ export const tables: Tables = [
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
computedFields: [],
|
||||
},
|
||||
{
|
||||
table: ['Artist'],
|
||||
@ -38,6 +39,7 @@ export const tables: Tables = [
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
computedFields: [],
|
||||
},
|
||||
{
|
||||
table: ['Album'],
|
||||
@ -80,12 +82,14 @@ export const tables: Tables = [
|
||||
},
|
||||
},
|
||||
],
|
||||
computedFields: [],
|
||||
},
|
||||
{
|
||||
table: ['Customer'],
|
||||
dataSource: { name: 'SQLite', kind: 'SQLite' },
|
||||
columns: [],
|
||||
relationships: [],
|
||||
computedFields: [],
|
||||
},
|
||||
{
|
||||
table: { dataset: 'bigquery_sample', name: 'sample_table' },
|
||||
@ -149,6 +153,7 @@ export const tables: Tables = [
|
||||
},
|
||||
],
|
||||
relationships: [],
|
||||
computedFields: [],
|
||||
},
|
||||
];
|
||||
|
||||
@ -189,6 +194,7 @@ export const tableWithGeolocationSupport = [
|
||||
},
|
||||
},
|
||||
relationships: [],
|
||||
computedFields: [],
|
||||
columns: [
|
||||
{
|
||||
name: 'user_id',
|
||||
|
@ -2,6 +2,7 @@ import { Source, Table } from '../../../../../hasura-metadata-types';
|
||||
import { GraphQLType } from 'graphql';
|
||||
import { Relationship } from '../../../../../DatabaseRelationships';
|
||||
import { TableColumn } from '../../../../../DataSource';
|
||||
import { ComputedField } from '../../../../../../metadata/types';
|
||||
|
||||
export type Operators = Record<
|
||||
string,
|
||||
@ -22,6 +23,7 @@ export type Tables = Array<{
|
||||
columns: Columns;
|
||||
relationships: Relationships;
|
||||
dataSource: Pick<Source, 'kind' | 'name'> | undefined;
|
||||
computedFields: ComputedField[];
|
||||
}>;
|
||||
|
||||
export type Operator = {
|
||||
@ -40,6 +42,7 @@ export type Comparators = Record<string, Comparator>;
|
||||
|
||||
export type PermissionType =
|
||||
| 'column'
|
||||
| 'computedField'
|
||||
| 'exist'
|
||||
| 'relationship'
|
||||
| 'object'
|
||||
@ -77,6 +80,8 @@ export type TableContext = {
|
||||
setComparator: (comparator: string | undefined) => void;
|
||||
columns: Columns;
|
||||
setColumns: (columns: Columns) => void;
|
||||
computedFields: ComputedField[];
|
||||
setComputedFields: (computedFields: ComputedField[]) => void;
|
||||
relationships: Relationships;
|
||||
setRelationships: (relationships: Relationships) => void;
|
||||
};
|
||||
|
@ -10,6 +10,7 @@ import { rowPermissionsContext } from '../RowPermissionsProvider';
|
||||
import { sourceDataTypes, SourceDataTypes } from './sourceDataTypes';
|
||||
import { rootTableContext } from '../RootTableProvider';
|
||||
import { columnDataType } from '../../../../../../DataSource/utils';
|
||||
import { ComputedField } from '../../../../../../../metadata/types';
|
||||
|
||||
function columnOperators(): Array<Operator> {
|
||||
return Object.keys(columnOperatorsInfo).reduce((acc, key) => {
|
||||
@ -152,7 +153,7 @@ export const mapScalarDataType = (
|
||||
export function useOperators({ path }: { path: string[] }) {
|
||||
const { comparators } = useContext(rowPermissionsContext);
|
||||
const { tables } = useContext(rootTableContext);
|
||||
const { columns, table } = useContext(tableContext);
|
||||
const { columns, table, computedFields } = useContext(tableContext);
|
||||
|
||||
const columnName = path[path.length - 2];
|
||||
const column = columns.find(c => c.name === columnName);
|
||||
@ -166,6 +167,7 @@ export function useOperators({ path }: { path: string[] }) {
|
||||
comparators,
|
||||
path,
|
||||
columns,
|
||||
computedFields,
|
||||
tables,
|
||||
table,
|
||||
});
|
||||
@ -181,6 +183,7 @@ export type GetDataTypeOperatorsProps = {
|
||||
comparators: Comparators;
|
||||
path: string[];
|
||||
columns: Columns;
|
||||
computedFields: ComputedField[];
|
||||
tables: Tables;
|
||||
table: Table;
|
||||
};
|
||||
|
@ -92,6 +92,7 @@ const getInitialValue = (key: string, type?: PermissionType) => {
|
||||
|
||||
switch (type) {
|
||||
case 'column':
|
||||
case 'computedField':
|
||||
// Depends on column type
|
||||
return { _eq: '' };
|
||||
case 'comparator':
|
||||
|
@ -39,6 +39,7 @@ export const usePermissionTables = ({
|
||||
suggestedRelationships
|
||||
),
|
||||
columns,
|
||||
computedFields: metadataTable.computed_fields ?? [],
|
||||
};
|
||||
}) ?? [],
|
||||
};
|
||||
|
@ -12,6 +12,7 @@ import { SourceCustomization } from '../../../../../../hasura-metadata-types/sou
|
||||
import { Operator } from '../../../../../../DataSource/types';
|
||||
|
||||
import {
|
||||
ComputedField,
|
||||
MetadataDataSource,
|
||||
TableEntry,
|
||||
} from '../../../../../../../metadata/types';
|
||||
@ -41,6 +42,7 @@ export interface CreateDefaultValuesArgs {
|
||||
dataSourceName: string;
|
||||
metadata: Metadata | undefined;
|
||||
tableColumns: TableColumn[];
|
||||
tableComputedFields: ComputedField[];
|
||||
defaultQueryRoot: string | never[];
|
||||
metadataSource: MetadataDataSource | undefined;
|
||||
supportedOperators: Operator[];
|
||||
@ -52,6 +54,7 @@ export const createDefaultValues = ({
|
||||
roleName,
|
||||
table,
|
||||
tableColumns,
|
||||
tableComputedFields,
|
||||
defaultQueryRoot,
|
||||
metadataSource,
|
||||
supportedOperators,
|
||||
@ -74,6 +77,7 @@ export const createDefaultValues = ({
|
||||
comment: '',
|
||||
filterType: 'none',
|
||||
columns: {},
|
||||
computed_fields: {},
|
||||
supportedOperators,
|
||||
validateInput,
|
||||
};
|
||||
@ -84,6 +88,7 @@ export const createDefaultValues = ({
|
||||
selectedTable,
|
||||
roleName,
|
||||
tableColumns,
|
||||
tableComputedFields,
|
||||
tableName,
|
||||
metadataSource,
|
||||
});
|
||||
|
@ -14,6 +14,7 @@ import { createDefaultValues } from '../../../../components/RowPermissionsBuilde
|
||||
|
||||
import type { QueryType } from '../../../../../types';
|
||||
import {
|
||||
ComputedField,
|
||||
MetadataDataSource,
|
||||
TableEntry,
|
||||
} from '../../../../../../../metadata/types';
|
||||
@ -83,6 +84,17 @@ const getColumns = (
|
||||
}, {});
|
||||
};
|
||||
|
||||
const getComputedFields = (
|
||||
permissionComputedFields: string[],
|
||||
tableComputedFields: ComputedField[]
|
||||
) => {
|
||||
return tableComputedFields.reduce<Record<string, boolean>>((acc, each) => {
|
||||
const computedFieldIncluded = permissionComputedFields?.includes(each.name);
|
||||
acc[each.name] = !!computedFieldIncluded;
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
|
||||
export const createPermission = {
|
||||
insert: (
|
||||
permission: InsertPermissionDefinition,
|
||||
@ -110,6 +122,7 @@ export const createPermission = {
|
||||
select: (
|
||||
permission: SelectPermissionDefinition,
|
||||
tableColumns: TableColumn[],
|
||||
tableComputedFields: ComputedField[],
|
||||
tableName: string,
|
||||
metadataSource: MetadataDataSource | undefined
|
||||
) => {
|
||||
@ -123,6 +136,10 @@ export const createPermission = {
|
||||
const filterType = getCheckType(permission?.filter);
|
||||
|
||||
const columns = getColumns(permission?.columns || [], tableColumns);
|
||||
const computed_fields = getComputedFields(
|
||||
permission?.computed_fields || [],
|
||||
tableComputedFields
|
||||
);
|
||||
|
||||
const rowCount = getRowCount({
|
||||
currentQueryPermissions: permission,
|
||||
@ -135,6 +152,7 @@ export const createPermission = {
|
||||
filter,
|
||||
filterType,
|
||||
columns,
|
||||
computed_fields,
|
||||
rowCount,
|
||||
aggregationEnabled,
|
||||
operators: ops,
|
||||
@ -238,6 +256,7 @@ interface ObjArgs {
|
||||
queryType: QueryType;
|
||||
selectedTable: TableEntry;
|
||||
tableColumns: TableColumn[];
|
||||
tableComputedFields: ComputedField[];
|
||||
roleName: string;
|
||||
tableName: string;
|
||||
metadataSource: MetadataDataSource | undefined;
|
||||
@ -247,6 +266,7 @@ export const createPermissionsObject = ({
|
||||
queryType,
|
||||
selectedTable,
|
||||
tableColumns,
|
||||
tableComputedFields,
|
||||
roleName,
|
||||
tableName,
|
||||
metadataSource,
|
||||
@ -267,6 +287,7 @@ export const createPermissionsObject = ({
|
||||
return createPermission.select(
|
||||
selectedPermission.permission as SelectPermissionDefinition,
|
||||
tableColumns,
|
||||
tableComputedFields,
|
||||
tableName,
|
||||
// selectedTable.configuration,
|
||||
metadataSource
|
||||
|
@ -2,6 +2,7 @@ import { TableColumn } from '../../../../../../DataSource';
|
||||
import { Metadata } from '../../../../../../hasura-metadata-types';
|
||||
import { isPermission } from '../../../../../utils';
|
||||
import {
|
||||
ComputedField,
|
||||
MetadataDataSource,
|
||||
TableEntry,
|
||||
} from '../../../../../../../metadata/types';
|
||||
@ -75,10 +76,12 @@ export interface CreateFormDataArgs {
|
||||
metadataSource: MetadataDataSource;
|
||||
trackedTables: TableEntry[];
|
||||
validateInput: z.infer<typeof inputValidationSchema>;
|
||||
computedFields: ComputedField[];
|
||||
}
|
||||
|
||||
export const createFormData = (props: CreateFormDataArgs) => {
|
||||
const { dataSourceName, table, tableColumns, trackedTables } = props;
|
||||
const { dataSourceName, table, tableColumns, trackedTables, computedFields } =
|
||||
props;
|
||||
// find the specific metadata table
|
||||
const metadataTable = getMetadataTable({
|
||||
dataSourceName,
|
||||
@ -93,5 +96,6 @@ export const createFormData = (props: CreateFormDataArgs) => {
|
||||
supportedQueries,
|
||||
tableNames: metadataTable.tableNames,
|
||||
columns: tableColumns?.map(({ name }) => name),
|
||||
computed_fields: computedFields.map(({ name }) => name),
|
||||
};
|
||||
};
|
||||
|
@ -75,6 +75,7 @@ export const useFormDataCreateDefaultValuesMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: ['id', 'teacher'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: { name: 'testing', schema: 'public' },
|
||||
@ -165,6 +166,7 @@ export const useFormDataCreateDefaultValuesMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['class', 'id'],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
allow_aggregations: true,
|
||||
query_root_fields: [
|
||||
@ -213,6 +215,7 @@ export const useFormDataCreateDefaultValuesMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['deleted_at', 'id', 'metadata'],
|
||||
computed_fields: [],
|
||||
filter: { deleted_at: { _is_null: true } },
|
||||
allow_aggregations: true,
|
||||
},
|
||||
@ -738,6 +741,7 @@ export const useFormDataCreateDefaultValuesMock = {
|
||||
{ name: 'like', value: '_like', defaultValue: '%%' },
|
||||
{ name: 'not like', value: '_nlike', defaultValue: '%%' },
|
||||
],
|
||||
tableComputedFields: [],
|
||||
} as any;
|
||||
|
||||
export const createFormDataMock = {
|
||||
@ -816,6 +820,7 @@ export const createFormDataMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: ['id', 'teacher'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: { name: 'testing', schema: 'public' },
|
||||
@ -833,6 +838,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['id', 'teacher'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: { name: 'testing', schema: 'public' },
|
||||
@ -845,6 +851,7 @@ export const createFormDataMock = {
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
columns: ['id'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: { name: 'class_student', schema: 'public' },
|
||||
@ -859,6 +866,7 @@ export const createFormDataMock = {
|
||||
role: 'testrole',
|
||||
permission: {
|
||||
columns: ['id', 'teacher'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
class_students: { class: { _eq: 'X-Hasura-User-Id' } },
|
||||
},
|
||||
@ -868,6 +876,7 @@ export const createFormDataMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['id'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: { name: 'class', schema: 'public' },
|
||||
@ -906,6 +915,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['class', 'id'],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
allow_aggregations: true,
|
||||
query_root_fields: [
|
||||
@ -924,6 +934,7 @@ export const createFormDataMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['class', 'id', 'student_id'],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
allow_aggregations: true,
|
||||
},
|
||||
@ -939,6 +950,7 @@ export const createFormDataMock = {
|
||||
permission: {
|
||||
check: {},
|
||||
columns: ['id', 'metadata', 'deleted_at'],
|
||||
computed_fields: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -947,6 +959,7 @@ export const createFormDataMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: ['id', 'metadata', 'deleted_at'],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
},
|
||||
},
|
||||
@ -954,6 +967,7 @@ export const createFormDataMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['deleted_at', 'id', 'metadata'],
|
||||
computed_fields: [],
|
||||
filter: { deleted_at: { _is_null: true } },
|
||||
allow_aggregations: true,
|
||||
},
|
||||
@ -964,6 +978,7 @@ export const createFormDataMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['id', 'metadata'],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
check: {},
|
||||
},
|
||||
@ -989,6 +1004,7 @@ export const createFormDataMock = {
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
columns: ['id', 'name', 'deleted_at'],
|
||||
computed_fields: [],
|
||||
filter: { _or: [] },
|
||||
query_root_fields: ['select', 'select_by_pk'],
|
||||
subscription_root_fields: ['select', 'select_by_pk'],
|
||||
@ -998,6 +1014,7 @@ export const createFormDataMock = {
|
||||
role: 'user',
|
||||
permission: {
|
||||
columns: ['deleted_at', 'id', 'name'],
|
||||
computed_fields: [],
|
||||
filter: { deleted_at: { _is_null: true } },
|
||||
query_root_fields: ['select', 'select_by_pk'],
|
||||
subscription_root_fields: ['select', 'select_by_pk'],
|
||||
@ -1049,6 +1066,7 @@ export const createFormDataMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: ['AlbumId'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_or: [{ AlbumId: { _eq: 'X-Hasura-User-Id' } }],
|
||||
},
|
||||
@ -1058,6 +1076,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['AlbumId'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_or: [{ AlbumId: { _eq: 'X-Hasura-User-Id' } }],
|
||||
},
|
||||
@ -1067,6 +1086,7 @@ export const createFormDataMock = {
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
columns: ['AlbumId', 'Title', 'ArtistId'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_and: [{ AlbumId: { _eq: 'X-Hasura-User-Id' } }],
|
||||
},
|
||||
@ -1076,6 +1096,7 @@ export const createFormDataMock = {
|
||||
role: 'testrole',
|
||||
permission: {
|
||||
columns: ['AlbumId', 'Title'],
|
||||
computed_fields: [],
|
||||
filter: { _and: [{ AlbumId: { _eq: 'X-Hasura-User' } }] },
|
||||
},
|
||||
},
|
||||
@ -1094,6 +1115,7 @@ export const createFormDataMock = {
|
||||
role: 'testrole',
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_exists: {
|
||||
_table: ['Album'],
|
||||
@ -1155,6 +1177,7 @@ export const createFormDataMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: { _not: { Data_value: { _eq: 1337 } } },
|
||||
},
|
||||
},
|
||||
@ -1162,6 +1185,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['Series_reference', 'Period'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_and: [
|
||||
{ Data_value: { _eq: 'X-Hasura-User-Id' } },
|
||||
@ -1176,6 +1200,7 @@ export const createFormDataMock = {
|
||||
{
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
computed_fields: [],
|
||||
columns: [
|
||||
'Series_reference',
|
||||
'Period',
|
||||
@ -1201,6 +1226,7 @@ export const createFormDataMock = {
|
||||
role: 'testrole',
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: { Magnitude: { _eq: '123' } },
|
||||
},
|
||||
},
|
||||
@ -1223,6 +1249,7 @@ export const createFormDataMock = {
|
||||
'Series_title_4',
|
||||
'Series_title_5',
|
||||
],
|
||||
computed_fields: [],
|
||||
filter: {},
|
||||
},
|
||||
},
|
||||
@ -1382,6 +1409,7 @@ export const createFormDataMock = {
|
||||
nullable: false,
|
||||
},
|
||||
],
|
||||
computedFields: [],
|
||||
trackedTables: [
|
||||
{
|
||||
table: { dataset: 'bigquery_sample', name: 'sample_table' },
|
||||
@ -1425,6 +1453,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['Series_reference', 'Period'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_and: [
|
||||
{ Data_value: { _eq: 'X-Hasura-User-Id' } },
|
||||
@ -1439,6 +1468,7 @@ export const createFormDataMock = {
|
||||
{
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
computed_fields: [],
|
||||
columns: [
|
||||
'Series_reference',
|
||||
'Period',
|
||||
@ -1462,11 +1492,16 @@ export const createFormDataMock = {
|
||||
},
|
||||
{
|
||||
role: 'testrole',
|
||||
permission: { columns: [], filter: { Magnitude: { _eq: '123' } } },
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: { Magnitude: { _eq: '123' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
permission: {
|
||||
computed_fields: [],
|
||||
columns: [
|
||||
'Series_reference',
|
||||
'Period',
|
||||
@ -1528,6 +1563,7 @@ export const createFormDataMock = {
|
||||
role: 'asdf',
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: { _not: { Data_value: { _eq: 1337 } } },
|
||||
},
|
||||
},
|
||||
@ -1535,6 +1571,7 @@ export const createFormDataMock = {
|
||||
role: 'new',
|
||||
permission: {
|
||||
columns: ['Series_reference', 'Period'],
|
||||
computed_fields: [],
|
||||
filter: {
|
||||
_and: [
|
||||
{ Data_value: { _eq: 'X-Hasura-User-Id' } },
|
||||
@ -1549,6 +1586,7 @@ export const createFormDataMock = {
|
||||
{
|
||||
role: 'sdfsf',
|
||||
permission: {
|
||||
computed_fields: [],
|
||||
columns: [
|
||||
'Series_reference',
|
||||
'Period',
|
||||
@ -1572,11 +1610,16 @@ export const createFormDataMock = {
|
||||
},
|
||||
{
|
||||
role: 'testrole',
|
||||
permission: { columns: [], filter: { Magnitude: { _eq: '123' } } },
|
||||
permission: {
|
||||
columns: [],
|
||||
computed_fields: [],
|
||||
filter: { Magnitude: { _eq: '123' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
permission: {
|
||||
computed_fields: [],
|
||||
columns: [
|
||||
'Series_reference',
|
||||
'Period',
|
||||
|
@ -22,6 +22,7 @@ const formDataMockResult = {
|
||||
'Series_title_4',
|
||||
'Series_title_5',
|
||||
],
|
||||
computed_fields: [],
|
||||
};
|
||||
|
||||
test('returns correctly formatted formData', () => {
|
||||
@ -79,6 +80,7 @@ const defaultValuesMockResult: ReturnType<typeof createDefaultValues> = {
|
||||
},
|
||||
query_root_fields: null,
|
||||
subscription_root_fields: null,
|
||||
computed_fields: {},
|
||||
};
|
||||
|
||||
test('use default values returns values correctly', () => {
|
||||
|