diff --git a/cli/README.md b/cli/README.md index 555357971c7..40f1ed6a32d 100644 --- a/cli/README.md +++ b/cli/README.md @@ -19,7 +19,7 @@ You can also install a specific version of the CLI by providing the `VERSION` variable: ```bash - curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash + curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash ``` - Windows diff --git a/cli/get.sh b/cli/get.sh index 6f1bfd524e5..fd4a0a2863f 100755 --- a/cli/get.sh +++ b/cli/get.sh @@ -44,7 +44,7 @@ log "Selecting version..." # version=${VERSION:-`echo $(curl -s -f -H 'Content-Type: application/json' \ # https://releases.hasura.io/graphql-engine?agent=cli-get.sh) | sed -n -e "s/^.*\"$release\":\"\([^\",}]*\)\".*$/\1/p"`} -version=${VERSION:-v2.35.0} +version=${VERSION:-v2.36.0} if [ ! $version ]; then log "${YELLOW}" @@ -62,7 +62,7 @@ log "Selected version: $version" log "${YELLOW}" log NOTE: Install a specific version of the CLI by using VERSION variable -log 'curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash' +log 'curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash' log "${NC}" # check for existing hasura installation diff --git a/docs/.gitignore b/docs/.gitignore index d8ab9d65d1c..5616b5ce501 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -10,6 +10,7 @@ # Misc .DS_Store +.env .env.local .env.development.local .env.test.local diff --git a/docs/docs/api-reference/syntax-defs.mdx b/docs/docs/api-reference/syntax-defs.mdx index 5aae865c75c..7844b608ee9 100644 --- a/docs/docs/api-reference/syntax-defs.mdx +++ b/docs/docs/api-reference/syntax-defs.mdx @@ -499,7 +499,7 @@ Supported in `v2.0.0-alpha.3` and above. | --------------- | -------- | ------------------------------------------------------ | --------------------------------------------------------- | | remote_table | true | [TableName](#tablename) | The table to which the relationship has to be established | | column_mapping | true | Object ([PGColumn](#pgcolumn) : [PGColumn](#pgcolumn)) | Mapping of columns from current table to remote table | -| insertion_order | false | [InsertOrder](#insertorder) | insertion order: before or after parent (default: before) | +| insertion_order | false | [InsertOrder](#insertorder) | insertion order: before or after parent (default: "before_parent") | ## InsertOrder {#insertorder} diff --git a/docs/docs/databases/athena/index.mdx b/docs/docs/databases/athena/index.mdx index d897df7d04d..593057dc40a 100644 --- a/docs/docs/databases/athena/index.mdx +++ b/docs/docs/databases/athena/index.mdx @@ -55,6 +55,22 @@ Currently, Hasura supports read-only queries, relationships, and permissions on ::: +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred Amazon Athena client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Keep up to date If you'd like to stay informed about the status of Amazon Athena support, subscribe to our newsletter and join our diff --git a/docs/docs/databases/bigquery/index.mdx b/docs/docs/databases/bigquery/index.mdx index 75a2c7eb4b0..145e80465a2 100644 --- a/docs/docs/databases/bigquery/index.mdx +++ b/docs/docs/databases/bigquery/index.mdx @@ -39,14 +39,35 @@ Here is how you can get started with Hasura and BigQuery: --> +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred BigQuery client instead. The Hasura Console is designed to be a tool for managing your +GraphQL API, and not a full-fledged database management tool. + +::: + ## Minimum required IAM permissions -* BigQuery queries through Hasura require the `bigquery.jobs.create` and `bigquery.jobs.get` permissions to send a job to the BigQuery servers. -* The `bigquery.tables.getData` permission allows Hasura to query your BigQuery data source. Note that mutations are not currently supported for BigQuery, and so no corresponding `updateData` permission is required. -* To use the Hasura Console to edit your data source, several different permissions may be required depending on your actions: - * `bigquery.datasets.create` and `bigquery.datasets.delete` for creating and deleting datasets. - * `bigquery.routines.create`, `bigquery.routines.update`, and `bigquery.routines.delete` for managing user-defined functions and stored procedures. - * `bigquery.table.create`, `bigquery.tables.list`, `bigquery.tables.get`, `bigquery.tables.delete`, and `bigquery.tables.update` to manage the dataset definition. +- BigQuery queries through Hasura require the `bigquery.jobs.create` and `bigquery.jobs.get` permissions to send a job + to the BigQuery servers. +- The `bigquery.tables.getData` permission allows Hasura to query your BigQuery data source. Note that mutations are not + currently supported for BigQuery, and so no corresponding `updateData` permission is required. +- To use the Hasura Console to edit your data source, several different permissions may be required depending on your + actions: + - `bigquery.datasets.create` and `bigquery.datasets.delete` for creating and deleting datasets. + - `bigquery.routines.create`, `bigquery.routines.update`, and `bigquery.routines.delete` for managing user-defined + functions and stored procedures. + - `bigquery.table.create`, `bigquery.tables.list`, `bigquery.tables.get`, `bigquery.tables.delete`, and + `bigquery.tables.update` to manage the dataset definition. ## Supported features diff --git a/docs/docs/databases/clickhouse/index.mdx b/docs/docs/databases/clickhouse/index.mdx index 5ded5b6f5be..2757e7478f6 100644 --- a/docs/docs/databases/clickhouse/index.mdx +++ b/docs/docs/databases/clickhouse/index.mdx @@ -34,6 +34,22 @@ Here are 2 ways you can get started with Hasura and ClickHouse: 2. [Docker](/databases/clickhouse/getting-started/docker.mdx): Run Hasura with Docker and then connect your ClickHouse instance to Hasura. +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred ClickHouse client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Keep up to date :::info Note diff --git a/docs/docs/databases/data-connectors/index.mdx b/docs/docs/databases/data-connectors/index.mdx index e51c85cb9e0..1ab5618f74c 100644 --- a/docs/docs/databases/data-connectors/index.mdx +++ b/docs/docs/databases/data-connectors/index.mdx @@ -20,7 +20,7 @@ that data. Currently, Hasura natively supports Postgres, MS SQL Server, and BigQuery databases. Data Connectors allow you to connect Hasura to **_any_** other data source. Hasura has built Data Connectors for MySQL, Oracle, Snowflake, Amazon -Athena, MariaDB, MongoDB (coming soon), with more sources in the pipeline, but you can also use them to connect to +Athena, MariaDB, and MongoDB, with more sources in the pipeline, but you can also use them to connect to your data sources. Think Microsoft Excel, SQLite, CSV, AirTable and more. For more information on databases, check out the [Hasura Databases documentation](/docs/databases/overview.mdx) or to diff --git a/docs/docs/databases/mariadb/index.mdx b/docs/docs/databases/mariadb/index.mdx index e4377e11ae0..855c5975d7a 100644 --- a/docs/docs/databases/mariadb/index.mdx +++ b/docs/docs/databases/mariadb/index.mdx @@ -205,6 +205,22 @@ schema. - [Subscriptions](/subscriptions/overview.mdx) - [Event triggers](/event-triggers/overview.mdx) +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred MariaDB client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Resources - Check out the [Getting Started with Docker](/databases/mariadb/docker.mdx) guide. diff --git a/docs/docs/databases/mongodb/index.mdx b/docs/docs/databases/mongodb/index.mdx index 81dbe308551..e23f8afed02 100644 --- a/docs/docs/databases/mongodb/index.mdx +++ b/docs/docs/databases/mongodb/index.mdx @@ -43,6 +43,22 @@ To get started with MongoDB: - In Hasura Cloud, check out our [Getting Started with MongoDB in Hasura Cloud](/databases/mongodb/cloud.mdx) guide - In a Docker environment, check out our [Getting Started with Docker](/databases/mongodb/docker.mdx) guide +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred MongoDB client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Keep up to date If you'd like to stay informed about the status of MongoDB support, subscribe to our newsletter and join our Discord! diff --git a/docs/docs/databases/ms-sql-server/index.mdx b/docs/docs/databases/ms-sql-server/index.mdx index e26902ac5c6..a6627ef67c2 100644 --- a/docs/docs/databases/ms-sql-server/index.mdx +++ b/docs/docs/databases/ms-sql-server/index.mdx @@ -38,13 +38,31 @@ Here are 2 ways you can get started with Hasura and SQL Server: Hasura currently supports queries, subscriptions, mutations, relationships, permissions, and Event Triggers on MS SQL Server. +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred MS SQL Server client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Required permissions -Assuming a `CONNECT` permission already exists, the following permissions are required for Hasura to function completely. Note that missing permissions may cause the corresponding features to work incorrectly: +Assuming a `CONNECT` permission already exists, the following permissions are required for Hasura to function +completely. Note that missing permissions may cause the corresponding features to work incorrectly: -* To use the Hasura Console to alter your schema, you will need appropriate schema permissions, such as `CREATE TABLE`, `CREATE VIEW`, `CREATE FUNCTION`, and `CREATE PROCEDURE`, depending on what you want to do. -* To perform queries and mutations, Hasura will need permission to `DELETE`, `INSERT`, `SELECT`, and `UPDATE`. -* To call MSSQL stored procedures via Hasura, the `EXECUTE` permission is also required. +- To use the Hasura Console to alter your schema, you will need appropriate schema permissions, such as `CREATE TABLE`, + `CREATE VIEW`, `CREATE FUNCTION`, and `CREATE PROCEDURE`, depending on what you want to do. +- To perform queries and mutations, Hasura will need permission to `DELETE`, `INSERT`, `SELECT`, and `UPDATE`. +- To call MSSQL stored procedures via Hasura, the `EXECUTE` permission is also required. ## Keep up to date diff --git a/docs/docs/databases/mysql/aiven.mdx b/docs/docs/databases/mysql/aiven.mdx new file mode 100644 index 00000000000..3ce68618549 --- /dev/null +++ b/docs/docs/databases/mysql/aiven.mdx @@ -0,0 +1,167 @@ +--- +description: "Learn how to connect Hasura Cloud to Aiven MySQL databases, including set up instructions, service configuration, and secure connection details." +title: 'Cloud: Using Hasura Cloud with an Aiven MySQL database' +keywords: + - hasura cloud + - aiven mysql integration + - graphql database connection + - mysql jdbc string + - secure database setup + - hasura console guide + - cloud database management + - mysql service configuration + - aiven service creation + - connect hasura to mysql +sidebar_label: Aiven MySQL +sidebar_position: 4 +seoFrontMatterUpdated: true +--- + +import Thumbnail from '@site/src/components/Thumbnail'; +import HeadingIcon from '@site/src/components/HeadingIcon'; + +# Connecting Hasura to an Aiven MySQL Database + +## Introduction + +This guide explains how to connect a new or existing [Aiven MySQL](https://aiven.io/mysql?utm_source=website&utm_medium=referral&utm_campaign=hasura) database to a Hasura +instance, either on [Hasura Cloud](https://cloud.hasura.io?skip_onboarding=true) or via one of our +[self-hosted](/deployment/deployment-guides/index.mdx) solutions. + +:::info Note + +If you plan on using Hasura Cloud, which we recommend, follow steps 1 and 2 below. If you're self-hosting a Hasura +instance and already have a project running, skip to [step 3](#create-mysql-db-aiven). + +::: + +:::tip Supported From + +Aiven-hosted MySQL databases are supported from Hasura `v2.35.0` onwards. + +::: + +## Step 1: Sign up or log in to Hasura Cloud + +Navigate to [Hasura Cloud](https://cloud.hasura.io/signup/?pg=docs&plcmt=body&cta=navigate-to-hasura-cloud&tech=default) +and sign up or log in. + +## Step 2: Create a Hasura Cloud project {#create-hasura-project-aiven} + +On the Hasura Cloud dashboard, create a new project: + + + +After the project is initialized successfully, click on `Launch Console` to open the Hasura Console in your browser. + +On the Hasura Console, navigate to the `Data` tab and choose `Connect Existing Database`. Choose the MySQL driver and +then click `Connect Existing Database`: + + + +We'll provision the database on Aiven in the next step and then return to this page to complete the connection. + +## Step 3: Create a MySQL DB on Aiven {#create-mysql-db-aiven} + +:::info Note + +If you have an existing Aiven MySQL database, you can skip this step and move on to [step 4](#connect-hasura-aiven). + +::: + +Log into the [Aiven console](https://console.aiven.io/signup?utm_source=website&utm_medium=referral&utm_campaign=hasura). + +On the Aiven console, click `+ Create a new service` and choose `MySQL`: + + + +Scroll down and select the `Cloud Provider`, `Region` and `Service Plan` based on your requirements. Then click +`Create free service`: + + + +## Step 4: Allow connections to your DB from Hasura {#connect-hasura-aiven} + +On the `Services` dashboard, click on your DB and scroll down to `Allowed IP Addresses` and click on `Change`: + + + +If you're using Hasura Cloud, you can quickly find your IP address from the `Hasura Cloud IP` field on the project's +details view: + + + +:::info Note + +If you're using a self-hosted solution, you'll need to determine the IP address manually depending on your hosting +service. + +::: + +Add the Hasura IP address that you copied, click on the `+`: + + + +Then click on `Close`. + +## Step 5: Get the database connection URL {#get-db-url-aiven} + +The MySQL connector utilizes JDBC connection strings to connect to the database. The format of the connection string is +as follows: + +```bash +jdbc:mysql://:/?user=&password= +``` + +You'll have to transform the connection string provided by Aiven into the format above. Navigate to the `Overview` tab +of your database dashboard and use the `Service URI` to construct the connection string: + + + +## Step 6: Finish connecting the database + +Back on the Hasura Console, enter the database URL that we retrieved in [step 5](#get-db-url-aiven): + + + +Then click `Connect Database`. + +:::info Note + +For security reasons, it is recommended to set database URLs as [env vars](/hasura-cloud/projects/env-vars.mdx) and +using the env vars to connect to the databases in place of the raw database URLs. + +::: + +Voilà. You are ready to start developing. + + + +## Next steps + +- You can check out our [30-Minute Hasura Basics Course](https://hasura.io/learn/graphql/hasura/introduction/) and other + [GraphQL & Hasura Courses](https://hasura.io/learn/) for a more detailed introduction to Hasura. + +- If using Hasura Cloud, you can also click the gear icon to manage your Hasura Cloud project. (e.g. add + [collaborators](/hasura-cloud/projects/collaborators.mdx), [env vars](/hasura-cloud/projects/env-vars.mdx) or + [custom domains](/hasura-cloud/domains.mdx)). + + + +:::info Note + +For more information on which MySQL features we support, check out [this page](/databases/feature-support.mdx). + +::: diff --git a/docs/docs/databases/mysql/index.mdx b/docs/docs/databases/mysql/index.mdx index 3821c3faef6..93697c594d7 100644 --- a/docs/docs/databases/mysql/index.mdx +++ b/docs/docs/databases/mysql/index.mdx @@ -208,6 +208,22 @@ schema. - [Subscriptions](/subscriptions/overview.mdx) - [Event triggers](/event-triggers/overview.mdx) +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred MySQL client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Resources - Check out the [Getting Started with Docker](/databases/mysql/docker.mdx) guide. diff --git a/docs/docs/databases/oracle/index.mdx b/docs/docs/databases/oracle/index.mdx index 5599e120d73..ec1f2f99f13 100644 --- a/docs/docs/databases/oracle/index.mdx +++ b/docs/docs/databases/oracle/index.mdx @@ -204,3 +204,19 @@ schema. - [Subscriptions](/subscriptions/overview.mdx) - [Event triggers](/event-triggers/overview.mdx) + +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred Oracle client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: \ No newline at end of file diff --git a/docs/docs/databases/overview.mdx b/docs/docs/databases/overview.mdx index f7a06aef18a..629d662c7ab 100644 --- a/docs/docs/databases/overview.mdx +++ b/docs/docs/databases/overview.mdx @@ -127,7 +127,7 @@ import Clickhouse from '@site/static/img/databases/logos/clickhouse.png';
Connect MongoDB to Hasura
-
MongoDB (Beta)
+
MongoDB
diff --git a/docs/docs/databases/postgres/index.mdx b/docs/docs/databases/postgres/index.mdx index ca76597ea3d..b64dc5e5af4 100644 --- a/docs/docs/databases/postgres/index.mdx +++ b/docs/docs/databases/postgres/index.mdx @@ -204,18 +204,36 @@ Curious about any other Postgres flavors? Any other questions? Ask us on ::: +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +You can use these tools to manage your PostgreSQL database, but we recommend using your preferred +[PostgreSQL client instead](https://wiki.postgresql.org/wiki/PostgreSQL_Clients). The Hasura Console is designed to be a +tool for managing your GraphQL API, and not a full-fledged database management tool. + +::: + ## Required user role permissions -Below are the role permissions required for Hasura to perform all its functionality. Note that, with the exception of `CONNECT` and `GRANT USAGE`, the other features are opt-in, and not enabling them will simply mean that only the corresponding Hasura features will not work. - -* `CONNECT` is required in order for Hasura to connect to your Postgres data source. -* You must `GRANT USAGE` to the Hasura user role for any schema you want to access via Hasura. -* To allow queries and subscriptions via the GraphQL API, `SELECT` permissions are required. -* Similarly, `INSERT`, `UPDATE`, and `DELETE` permissions are required for mutations. -* The Hasura Console requires permissions such as `REFERENCES` and `CREATE` to make changes to your schema. -* `TRIGGER` is required to use [Event Triggers](/event-triggers/overview.mdx) -* If you want to use computed fields or user-defined Postgres functions, the `EXECUTE` permission is required. +Below are the role permissions required for Hasura to perform all its functionality. Note that, with the exception of +`CONNECT` and `GRANT USAGE`, the other features are opt-in, and not enabling them will simply mean that only the +corresponding Hasura features will not work. +- `CONNECT` is required in order for Hasura to connect to your Postgres data source. +- You must `GRANT USAGE` to the Hasura user role for any schema you want to access via Hasura. +- To allow queries and subscriptions via the GraphQL API, `SELECT` permissions are required. +- Similarly, `INSERT`, `UPDATE`, and `DELETE` permissions are required for mutations. +- The Hasura Console requires permissions such as `REFERENCES` and `CREATE` to make changes to your schema. +- `TRIGGER` is required to use [Event Triggers](/event-triggers/overview.mdx) +- If you want to use computed fields or user-defined Postgres functions, the `EXECUTE` permission is required. ## Know more diff --git a/docs/docs/databases/redshift/_category_.json b/docs/docs/databases/redshift/_category_.json new file mode 100644 index 00000000000..c26c9efadff --- /dev/null +++ b/docs/docs/databases/redshift/_category_.json @@ -0,0 +1,5 @@ +{ + "label": "Amazon Redshift", + "position": 10, + "className": "beta-cat" +} diff --git a/docs/docs/databases/redshift/getting-started/_category_.json b/docs/docs/databases/redshift/getting-started/_category_.json new file mode 100644 index 00000000000..3562d433d76 --- /dev/null +++ b/docs/docs/databases/redshift/getting-started/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Getting Started", + "position": 1 +} diff --git a/docs/docs/databases/redshift/getting-started/cloud.mdx b/docs/docs/databases/redshift/getting-started/cloud.mdx new file mode 100644 index 00000000000..2fd33ed2a54 --- /dev/null +++ b/docs/docs/databases/redshift/getting-started/cloud.mdx @@ -0,0 +1,106 @@ +--- +sidebar_label: Hasura Cloud +sidebar_position: 1 +description: Hasura Cloud for Amazon Redshift +keywords: + - hasura + - docs + - databases + - redshift + - amazon redshift + - hasura cloud +--- + +import Thumbnail from '@site/src/components/Thumbnail'; + +# Get Started with Hasura Cloud and Amazon Redshift + +## Introduction + +### Step 1: Create an account on Hasura Cloud and create a new Hasura Project + +Navigate to +[cloud.hasura.io](https://cloud.hasura.io/signup/?pg=docs&plcmt=body&cta=navigate-to-cloud-hasura-io&tech=default&skip_onboarding=true), +and create a new Hasura Cloud account. + +Once you create a project on Hasura Cloud, hit the "Launch Console" button to open the Hasura Console for your project. + + + +### Step 2: Add your Amazon Redshift database as a source to Hasura + +Head to the `Data > Manage databases` section on the Console to add your Amazon Redshift database as a source to Hasura. + +:::info Make sure your Amazon Redshift service is reachable by Hasura Cloud: + +1. **Allow public connections or + [whitelist the Hasura Cloud IP](/hasura-cloud/projects/create.mdx#cloud-projects-create-allow-nat-ip) on your Amazon + Redshift firewall:** This is good for testing and will allow you to quickly try Hasura out with your database! +2. **VPC peering:** VPC peering and private network access is available on Hasura Cloud paid tiers: Recommended for + production. Get in touch with us if you'd like to try this out against your existing databases! + +::: + +First, we need to add the redshift agent: + + + +Now we need to connect to Redshift by clicking `Connect Database`: + + + +Next, choose the `redshift (Beta)` driver: + + + +Finally, enter your Amazon Redshift database URL and +[database schema](https://docs.aws.amazon.com/athena/latest/ug/creating-tables.html) and click `Connect Database`: + + + +Once you add the Amazon Redshift service, you'll see it listed as an available database on the sidebar. + +### Step 3: Track existing tables + +To query against your Amazon Redshift service using Hasura, you'll need to have existing tables to select. Those tables +will appear under the database as shown. + + + +Track tables selectively or all so that Hasura can introspect the tables and create the corresponding GraphQL schema. +Once you've selected the tables you'd like to track, click `Track Selected` to finish setup: + + + +### Step 4: Try out a GraphQL query + +Head to the `API` tab in the Console and try running a GraphQL query! Use the explorer sidebar on GraphQL to get help in +creating a GraphQL query. + + + +## Keep up to date + +:::info Note + +Currently, Hasura supports read-only queries, subscriptions, relationships, and permissions on Amazon Redshift. + +::: + +If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our +discord! + +- [https://hasura.io/newsletter/](https://hasura.io/newsletter/) +- [https://discord.com/invite/hasura](https://discord.com/invite/hasura) diff --git a/docs/docs/databases/redshift/getting-started/docker.mdx b/docs/docs/databases/redshift/getting-started/docker.mdx new file mode 100644 index 00000000000..20705ec35ca --- /dev/null +++ b/docs/docs/databases/redshift/getting-started/docker.mdx @@ -0,0 +1,52 @@ +--- +sidebar_label: Docker +sidebar_position: 2 +description: Hasura with Docker for Amazon Redshift +keywords: + - hasura + - docs + - databases + - redshift + - amazon redshift + - docker +--- + +# Get Started with Docker and Amazon Redshift + +## Introduction + +Currently, testing continues on the Amazon Redshift connector for use in self-hosted environments. Our suggested +installation method is to use Docker Compose to deploy a working deployment of Hasura with the Amazon Redshift connector +enabled. + +In order to do this, follow the instructions for +[Hasura Enterprise Edition](/enterprise/getting-started/quickstart-docker.mdx), but change out the Docker Compose files +listed in that documentation with these values: + +```bash +# in a new directory run +wget https://raw.githubusercontent.com/hasura/graphql-engine/master/install-manifests/enterprise/redshift/docker-compose.yaml +# or run +curl https://raw.githubusercontent.com/hasura/graphql-engine/master/install-manifests/enterprise/redshift/docker-compose.yaml -o docker-compose.yml +``` + +When you use these to launch the services, you'll see three containers running instead of two. The third container is +the Amazon Redshift GraphQL Connector agent. By navigating to the Hasura Console after execution, you'll find the Amazon +Redshift data source as a type that can now be added to your Hasura GraphQL Service instance. + +## Keep up to date + +:::info Note + +Currently, Hasura supports read-only queries, subscriptions, relationships, and permissions on Amazon Redshift. + +::: + +Please watch this space to get the latest docs on how you can try these features out via the Console or by manipulating +Metadata in JSON/YAML directly. + +If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our +discord! + +- [https://hasura.io/newsletter/](https://hasura.io/newsletter/) +- [https://discord.com/invite/hasura](https://discord.com/invite/hasura) diff --git a/docs/docs/databases/redshift/getting-started/index.mdx b/docs/docs/databases/redshift/getting-started/index.mdx new file mode 100644 index 00000000000..848cbfbb104 --- /dev/null +++ b/docs/docs/databases/redshift/getting-started/index.mdx @@ -0,0 +1,33 @@ +--- +slug: index + - hasura + - docs + - databases + - redshift + - amazon redshift +--- + +# Get Started with Amazon Redshift + +:::caution Redshift Beta Availability + +The Hasura Amazon Redshift connector is currently available in beta for all Hasura Cloud offerings and for Hasura +Enterprise Edition customers. Once the Redshift connector is generally available (GA), it will be available only for +Hasura Cloud Enterprise and Enterprise Edition customers. + +::: + +:::caution Redshift Beta Regions + +At this time, Redshift access is only available on AWS Regions. + +::: + +To try Hasura with Amazon Redshift, you'll need your own new or existing Amazon Redshift database. + +Here are 2 ways you can get started with Hasura: + +1. [Hasura Cloud](/databases/redshift/getting-started/cloud.mdx) : You'll need to be able to access your Amazon Redshift + service from Hasura Cloud. +2. [Docker](/databases/redshift/getting-started/docker.mdx): Run Hasura with Docker and then connect your Amazon Redshift + service to Hasura. diff --git a/docs/docs/databases/redshift/index.mdx b/docs/docs/databases/redshift/index.mdx new file mode 100644 index 00000000000..1820b9e6a14 --- /dev/null +++ b/docs/docs/databases/redshift/index.mdx @@ -0,0 +1,70 @@ +--- +slug: index +description: Hasura Amazon Redshift database support +keywords: + - hasura + - docs + - databases + - redshift + - amazon redshift +--- + +# Amazon Redshift + +## Introduction + +Hasura allows connecting to an Amazon Redshift service to build a GraphQL API based on the schema of the service. + +:::tip Supported versions: + +1. Hasura GraphQL Engine `v2.35.0` onwards +2. [Amazon Redshift](https://aws.amazon.com/redshift/) + +::: + +## Get Started + +To try Hasura with Amazon Redshift, you'll need your own new or existing Amazon Redshift instance. + +Here are 2 ways you can get started with Hasura and Amazon Redshift: + +1. [Hasura Cloud](/databases/redshift/getting-started/cloud.mdx): You'll need to be able to access your Amazon Redshift + instance service from Hasura Cloud. +2. [Docker](/databases/redshift/getting-started/docker.mdx): Run Hasura with Docker and then connect your Amazon Redshift + instance to Hasura. + +## Supported features + +:::info Note + +Currently, Hasura supports read-only queries, relationships, and permissions on Amazon Redshift. + +::: + +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred Amazon Redshift client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + +## Keep up to date + +If you'd like to stay informed about the status of Amazon Redshift support, subscribe to our newsletter and join our +discord! + +- [https://hasura.io/newsletter/](https://hasura.io/newsletter/) +- [https://discord.com/invite/hasura](https://discord.com/invite/hasura) + +## Know more + +- [Get started](/databases/redshift/getting-started/index.mdx) diff --git a/docs/docs/databases/snowflake/getting-started/docker.mdx b/docs/docs/databases/snowflake/getting-started/docker.mdx index 29b11924a9f..7330740c783 100644 --- a/docs/docs/databases/snowflake/getting-started/docker.mdx +++ b/docs/docs/databases/snowflake/getting-started/docker.mdx @@ -16,9 +16,8 @@ import Thumbnail from '@site/src/components/Thumbnail'; ## Introduction -Testing is currently underway on the Snowflake connector for use in self-hosted environments. Our suggested -installation method is to use Docker Compose to deploy a working deployment of Hasura with the Snowflake Connector -enabled. +Testing is currently underway on the Snowflake connector for use in self-hosted environments. Our suggested installation +method is to use Docker Compose to deploy a working deployment of Hasura with the Snowflake Connector enabled. In order to do this, follow the instructions for [Hasura Enterprise Edition](/enterprise/getting-started/quickstart-docker.mdx), but change out the Docker Compose files @@ -35,6 +34,41 @@ When you use these to launch the services, you'll see three containers running i the Snowflake GraphQL Connector agent. By navigating to the Hasura Console after execution, you'll find the Snowflake data source as a type that can now be added to your Hasura GraphQL Service instance. +### Snowflake Connector Configuration + +You can directly add your JDBC connection string to the Snowflake Connector agent in the Hasura Console, or you can add +it as an environment variable to your project. + +:::info Setting the connection string as an environment variable + +It's generally accepted that setting the connection string as an environment variable is a better practice as it's more +secure and prevents any secrets from being exposed in your instance's metadata. + +An example would be to create a new +[environment variable](/deployment/graphql-engine-flags/index.mdx#using-environment-variables) called +`SNOWFLAKE_JDBC_URL` and set it equal to your JDBC connection string. + +Then, export the metadata - in JSON form - using the Console's `Settings` page or by making a call using the +[metadata API](api-reference/metadata-api/manage-metadata.mdx#metadata-export-metadata) and add the following key-value +pair to the `metadata.json`'s `configuration` object: + +```json +"template": "{\"fully_qualify_all_names\": false, \"jdbc_url\": \"{{getEnvironmentVariable(\"SNOWFLAKE_JDBC_URL\")}}\"}" +``` + +You can then apply the metadata to your instance by either using the Console's `Settings` page or by making a call using +the [metadata API](api-reference/metadata-api/manage-metadata.mdx#metadata-apply-metadata). + +::: + +:::info Ensure your password escapes special characters + +Due to the potential variations in drivers, it's crucial to escape special characters used in the password of the +connection string. These include `{ } % & #`. To escape a character, use the appropriate escape sequence based on your +database's driver's documentation. + +::: + ## Keep up to date :::info Note @@ -46,8 +80,7 @@ Currently, Hasura supports read-only queries, relationships, and permissions on Please watch this space to get the latest docs on how you can try these features out via the Console or by manipulating Metadata in JSON/YAML directly. -If you'd like to stay informed about the status of Snowflake support, subscribe to our newsletter and join our -discord! +If you'd like to stay informed about the status of Snowflake support, subscribe to our newsletter and join our discord! - [https://hasura.io/newsletter/](https://hasura.io/newsletter/) - [https://discord.com/invite/hasura](https://discord.com/invite/hasura) diff --git a/docs/docs/databases/snowflake/index.mdx b/docs/docs/databases/snowflake/index.mdx index 7e673fda340..93eef1e6d81 100644 --- a/docs/docs/databases/snowflake/index.mdx +++ b/docs/docs/databases/snowflake/index.mdx @@ -198,6 +198,22 @@ Here are 2 ways you can get started with Hasura and Snowflake: +## Managing data with the Hasura Console + +The Hasura Console is a web UI that allows you to manage your data and metadata. It is available at +`http://localhost:8080/console` when you run Hasura locally, or from your project's Console endpoint when you use +[Hasura Cloud](https://cloud.hasura.io). + +The data-management features (such as creating tables) are available in the `Data` tab. You can access your GraphQL API +in the `API` tab and interact with it using the GraphiQL interface. + +:::info Console support + +We recommend using your preferred Snowflake client instead. The Hasura Console is designed to be a tool for managing +your GraphQL API, and not a full-fledged database management tool. + +::: + ## Keep up to date :::info Note diff --git a/docs/docs/deployment/graphql-engine-flags/reference.mdx b/docs/docs/deployment/graphql-engine-flags/reference.mdx index 4d876e15512..ef7e20cc400 100644 --- a/docs/docs/deployment/graphql-engine-flags/reference.mdx +++ b/docs/docs/deployment/graphql-engine-flags/reference.mdx @@ -52,11 +52,10 @@ above, databases can be connected using any custom environment variables of your ### Metadata Database URL This Postgres database URL is used to store Hasura's Metadata. By default, the database configured using -`HASURA_GRAPHQL_DATABASE_URL` / `--database_url` will be used to store the -Metadata. This can also be a URI of the form `dynamic-from-file:///path/to/file`, where the referenced -file contains a postgres connection string, which will be read dynamically every time a new -connection is established. This allows the server to be used in an environment where secrets are -rotated frequently. +`HASURA_GRAPHQL_DATABASE_URL` / `--database_url` will be used to store the Metadata. This can also be a URI of the form +`dynamic-from-file:///path/to/file`, where the referenced file contains a postgres connection string, which will be read +dynamically every time a new connection is established. This allows the server to be used in an environment where +secrets are rotated frequently. | | | | ------------------- | --------------------------------------------------------------------------- | @@ -210,8 +209,8 @@ Whether or not to send the request body (graphql request/variables) to the auth Stringify certain [BigQuery numeric types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#numeric_types), -specifically `bignumeric`, `float64`, `int64`, `numeric` and aliases thereof, as they don't fit into the -`IEnterprise EditionE 754` spec for JSON encoding-decoding. +specifically `bignumeric`, `float64`, `int64`, `numeric` and aliases thereof, as they don't fit into the `IEEE 754` spec +for JSON encoding-decoding. | | | | ------------------- | ---------------------------------------------- | @@ -332,17 +331,17 @@ headers on any request. Enables the ability to source Postgres connection strings from files on disk using [DynamicFromFile](/api-reference/syntax-defs.mdx#pgsourceconnectioninfo), and the ability for -[Template Variables](/databases/database-config/data-connector-config.mdx#template-variables) to -use the contents of files in Data Connector configuration transforms. File paths used with these -features must start with the prefix set in this environment variable. +[Template Variables](/databases/database-config/data-connector-config.mdx#template-variables) to use the contents of +files in Data Connector configuration transforms. File paths used with these features must start with the prefix set in +this environment variable. -| | | -| ------------------- | ------------------------------------------------------------------------ | -| **Flag** | N/A | -| **Env var** | `HASURA_GRAPHQL_DYNAMIC_SECRETS_ALLOWED_PATH_PREFIX` | -| **Accepted values** | String (representing a filesystem path prefix, such as `/var/secrets/`) | -| **Default** | `null` | -| **Supported in** | CE, Enterprise Edition | +| | | +| ------------------- | ----------------------------------------------------------------------- | +| **Flag** | N/A | +| **Env var** | `HASURA_GRAPHQL_DYNAMIC_SECRETS_ALLOWED_PATH_PREFIX` | +| **Accepted values** | String (representing a filesystem path prefix, such as `/var/secrets/`) | +| **Default** | `null` | +| **Supported in** | CE, Enterprise Edition | ### Enable Allow List @@ -705,6 +704,41 @@ Multiplexed live queries are split into | **Default** | `100` | | **Supported in** | CE, Enterprise Edition, Cloud | +### Null in Non-nullable Variables + +GraphQL query validation is fixed in [`v2.36.0-beta.1`](https://hasura.io/changelog/community-edition/v2.36.0-beta.1), +where queries that assigned a `null` value to variables with non-nullable type were allowed. + +Example: + +```graphql +query ($user_id: Int!) { + users(where: { id: { _eq: $user_id } }) { + id + name + } +} +``` + +variables + +```json +{ + "user_id": null +} +``` + +To rollback to the old behavior, i.e., allow `null` value for non-nullable variables, use this option. + +| | | +| ------------------- | --------------------------------------------------------------- | +| **Flag** | `--null-in-nonnullable-variables ` | +| **Env var** | `HASURA_GRAPHQL_BACKWARDS_COMPAT_NULL_IN_NONNULLABLE_VARIABLES` | +| **Accepted values** | Boolean | +| **Options** | `true` or `false` | +| **Default** | `false` | +| **Supported in** | CE, Enterprise Edition, Cloud - from `v2.36.0-beta.1` | + ### Number of Retries :::warning Notice @@ -1055,7 +1089,7 @@ any - will be sent, at most, once during this interval. ### Stringify Numeric Types Stringify certain [Postgres numeric types](/schema/postgres/postgresql-types.mdx), specifically `bigint` ,`numeric` -,`decimal` and `double precision` as they don't fit into the `IEnterprise EditionE-754` spec for JSON encoding-decoding. +,`decimal` and `double precision` as they don't fit into the `IEEE-754` spec for JSON encoding-decoding. | | | | ------------------- | ---------------------------------------- | @@ -1181,13 +1215,13 @@ Used to set the connection initialization timeout for `graphql-ws` clients. This Used to set the `Keep Alive` delay for clients that use the `subscription-transport-ws` (Apollo) protocol. For `graphql-ws` clients, the `graphql-engine` sends `PING` messages instead. -| | | -| ------------------- | ---------------------------------------------------- | -| **Flag** | `--websocket-keepalive ` | -| **Env var** | `HASURA_GRAPHQL_WEBSOCKET_KEnterprise EditionPALIVE` | -| **Accepted values** | Integer (Representing a delay in seconds) | -| **Default** | `5` | -| **Supported in** | CE, Enterprise Edition | +| | | +| ------------------- | ----------------------------------------- | +| **Flag** | `--websocket-keepalive ` | +| **Env var** | `HASURA_GRAPHQL_WEBSOCKET_KEEPALIVE` | +| **Accepted values** | Integer (Representing a delay in seconds) | +| **Default** | `5` | +| **Supported in** | CE, Enterprise Edition | ### WS Read Cookie diff --git a/docs/docs/deployment/health-checks/source-health-check.mdx b/docs/docs/deployment/health-checks/source-health-check.mdx index 97d751fe9bf..c5fa3ed7e6a 100644 --- a/docs/docs/deployment/health-checks/source-health-check.mdx +++ b/docs/docs/deployment/health-checks/source-health-check.mdx @@ -75,34 +75,28 @@ hasura metadata apply -You can add _health check_ for a database using the -[pg_add_source](/api-reference/metadata-api/source.mdx#metadata-pg-add-source) Metadata API. +You can add/update _health check_ for a database using the +[pg_update_source](/api-reference/metadata-api/source.mdx#metadata-pg-update-source) Metadata API. -```http {17-24} +```http {7-18} POST /v1/metadata HTTP/1.1 Content-Type: application/json X-Hasura-Role: admin { - "type":"pg_add_source", - "args":{ - "name":"", - "replace_configuration":true, - "configuration":{ - "connection_info":{ - "database_url":{ - "from_env":"" - } + { + "type": "pg_update_source", + "args": { + "name": "default", + "health_check": { + "test": { + "sql": "SELECT 1" + }, + "interval": 100, + "timeout": 2, + "retries": 3, + "retry_interval": 2 } - }, - "health_check": { - "test": { - "sql": "SELECT 1" - }, - "interval": 300, - "timeout": 5, - "retries": 3, - "retry_interval": 5 } } } diff --git a/docs/docs/deployment/logging.mdx b/docs/docs/deployment/logging.mdx index 8a73f53d2fe..a4b949b4d85 100644 --- a/docs/docs/deployment/logging.mdx +++ b/docs/docs/deployment/logging.mdx @@ -42,7 +42,7 @@ the websocket layer are called `websocket-log`, logs from the Event Trigger syst You can configure the GraphQL Engine to enable/disable certain log-types using the `--enabled-log-types` flag or the `HASURA_GRAPHQL_ENABLED_LOG_TYPES` env var. See -[GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx) +[GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx#log-level) The default enabled **Community Edition** log-types are: `startup, http-log, webhook-log, websocket-log, jwk-refresh-log` @@ -108,7 +108,7 @@ Apart from the above, there are other internal log-types which cannot be configu ## Logging levels You can set the desired logging level on the server using the `log-level` flag or the `HASURA_GRAPHQL_LOG_LEVEL` env -var. See [GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx). +var. See [GraphQL Engine server config reference](/deployment/graphql-engine-flags/reference.mdx#log-level). The default log-level is `info`. diff --git a/docs/docs/getting-started/use-case/data-api.mdx b/docs/docs/getting-started/use-case/data-api.mdx index 6625486090d..9ef0ede76db 100644 --- a/docs/docs/getting-started/use-case/data-api.mdx +++ b/docs/docs/getting-started/use-case/data-api.mdx @@ -58,11 +58,11 @@ different data sources that need to be accessible to different teams. To save you time, we've generated three **read-only** databases for you to use in this guide: -| Database | Description | Connection String | -| ----------- | ---------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -| Banking | A database containing banking information of the account holders | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-1` | -| Stocks | A database containing historic, proprietary market information | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-2` | -| Real Estate | A database containing real estate information | `postgresql://read_only:kd4555jkfjfkdj39f8f8d9d@35.236.11.122:5432/growth-docs-data-layer-3` | +| Database | Description | Connection String | +| ----------- | ---------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| Banking | A database containing banking information of the account holders | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-1` | +| Stocks | A database containing historic, proprietary market information | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-2` | +| Real Estate | A database containing real estate information | `postgresql://read_only_user:readonlyuser@35.236.11.122:5432/growth-docs-data-layer-3` | :::info Only available from Hasura Cloud diff --git a/docs/docs/hasura-cli/install-hasura-cli.mdx b/docs/docs/hasura-cli/install-hasura-cli.mdx index 568aacb3b49..ed93645bbcd 100644 --- a/docs/docs/hasura-cli/install-hasura-cli.mdx +++ b/docs/docs/hasura-cli/install-hasura-cli.mdx @@ -46,7 +46,7 @@ curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | INSTALL You can also install a specific version of the CLI by providing the `VERSION` variable: ```bash -curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash +curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash ``` @@ -71,7 +71,7 @@ curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | INSTALL You can also install a specific version of the CLI by providing the `VERSION` variable: ```bash -curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.35.0 bash +curl -L https://github.com/hasura/graphql-engine/raw/stable/cli/get.sh | VERSION=v2.36.0 bash ``` diff --git a/docs/docs/hasura-cloud/environments.mdx b/docs/docs/hasura-cloud/environments.mdx index ffcc3937885..560839efcf1 100644 --- a/docs/docs/hasura-cloud/environments.mdx +++ b/docs/docs/hasura-cloud/environments.mdx @@ -66,6 +66,15 @@ hasura migrate create init --from-server --endpoint - hasura migrate apply --endpoint --admin-secret --version --skip-execution ``` +:::info When using a non-Postgres database + +Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases. +Should you wish to use a different database, you will need to manually create the migration files. For more information, +please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations) +documentation. + +::: + It is important to mark the migration as applied on the Cloud project to ensure that the schema that is already created on Hasura Cloud project is not attempted to be recreated again, which would end in an error state. @@ -142,6 +151,15 @@ hasura migrate create init --from-server --admin-secret --databas hasura migrate apply --admin-secret --version --skip-execution ``` +:::info When using a non-Postgres database + +Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases. +Should you wish to use a different database, you will need to manually create the migration files. For more information, +please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations) +documentation. + +::: + ### Step 4: Initialize Hasura Metadata ```bash diff --git a/docs/docs/migrations-metadata-seeds/auto-apply-migrations.mdx b/docs/docs/migrations-metadata-seeds/auto-apply-migrations.mdx index 7e2cd5e24ca..ab2d41687b4 100644 --- a/docs/docs/migrations-metadata-seeds/auto-apply-migrations.mdx +++ b/docs/docs/migrations-metadata-seeds/auto-apply-migrations.mdx @@ -17,6 +17,14 @@ keywords: Hasura ships a special `cli-migrations` Docker image which can be used to automatically apply Migrations and Metadata when the server starts. +### What does it mean to "auto-apply" Migrations and Metadata? + +Auto-applying migrations means that Hasura can automatically apply database schema changes or migrations to your +underlying database without requiring manual intervention. This feature simplifies the process of keeping your database +schema in sync with your GraphQL schema and makes it easier to evolve your application over time. + +### How does it work? + This image is a drop-in place replacement for the standard Hasura GraphQL Engine [images](https://hub.docker.com/r/hasura/graphql-engine). This container provides a method to apply [Migrations and Metadata](/migrations-metadata-seeds/overview.mdx) automatically when the container starts up. It works @@ -65,9 +73,10 @@ For `config v2`, see ## Applying Migrations The `migrations` and `metadata` directories created by the Hasura CLI in a Hasura Project can be mounted at the -`/hasura-migrations` and `/hasura-metadata` paths of this Docker container and the container's entrypoint script will -automatically apply the Migrations and Metadata before starting the server. If no directory is mounted at the designated -paths, the server will start and ignore the Migrations and/or Metadata. +`/hasura-migrations` and `/hasura-metadata` +[paths of this Docker container and the container's entrypoint script](https://github.com/hasura/graphql-engine/blob/master/packaging/cli-migrations/v3/docker-entrypoint.sh#L12C1-L13) +will automatically apply the Migrations and Metadata before starting the server. If no directory is mounted at the +designated paths, the server will start and ignore the Migrations and/or Metadata. You can also mount the Migrations/Metadata directories at some location other than the above by setting the following environment variables: @@ -93,5 +102,6 @@ docker run -p 8080:8080 \ ## Applying only Metadata {#auto-apply-metadata} If you're managing Migrations with a different tool and want to use this image to apply only the metadata, mount the -`metadata` directory of your Hasura Project at the `/hasura-metadata` path of this Docker container the container’s -entry point script will apply the Metadata before starting the server. +`metadata` directory of your Hasura Project at the `/hasura-metadata` +[path of this Docker container the container's entry point script](https://github.com/hasura/graphql-engine/blob/master/packaging/cli-migrations/v3/docker-entrypoint.sh#L13) +will apply the Metadata before starting the server. diff --git a/docs/docs/migrations-metadata-seeds/manage-migrations.mdx b/docs/docs/migrations-metadata-seeds/manage-migrations.mdx index 860f09ce30a..a012680b53a 100644 --- a/docs/docs/migrations-metadata-seeds/manage-migrations.mdx +++ b/docs/docs/migrations-metadata-seeds/manage-migrations.mdx @@ -69,6 +69,14 @@ This will create a new folder named in the format `_init` within anot in Hasura which is being referenced. It will contain only an `up.sql` file which describes in SQL how to create the schema in full for that database. +:::info When using a non-Postgres database + +Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases. +Should you wish to use a different database, you will need to manually create the migration files. For more information, +please see the section below. + +::: + ### Create a Migration manually {#create-manual-migrations} While the Hasura Console can auto generate Migrations for every action, sometimes you might want to write the migrations diff --git a/docs/docs/migrations-metadata-seeds/resetting-migrations-metadata.mdx b/docs/docs/migrations-metadata-seeds/resetting-migrations-metadata.mdx index 8a5521f77eb..71ab54148a6 100644 --- a/docs/docs/migrations-metadata-seeds/resetting-migrations-metadata.mdx +++ b/docs/docs/migrations-metadata-seeds/resetting-migrations-metadata.mdx @@ -67,6 +67,15 @@ server using the following commands: hasura migrate create "init" --from-server --database-name ``` +:::info When using a non-Postgres database + +Please note that when using the `migrate create init --from-server` command, Hasura only supports Postgres databases. +Should you wish to use a different database, you will need to manually create the migration files. For more information, +please see this section of the [Migrations](/migrations-metadata-seeds/manage-migrations.mdx#create-manual-migrations) +documentation. + +::: + ```bash ## note down the version ## mark the migration as applied on this server diff --git a/docs/docs/security/disable-graphql-introspection.mdx b/docs/docs/security/disable-graphql-introspection.mdx index 5c783137084..566aaabf53f 100644 --- a/docs/docs/security/disable-graphql-introspection.mdx +++ b/docs/docs/security/disable-graphql-introspection.mdx @@ -33,6 +33,7 @@ To avoid this, you can disable GraphQL introspection on a per-role basis. ## Disabling GraphQL introspection for a role -Introspection can be disabled for a role as shown below: +Introspection can be disabled by navigating to the `Security` tab, selecting `Schema Introspection` and modifying or +adding the role you wish to disable introspection for. diff --git a/docs/docs/subscriptions/postgres/index.mdx b/docs/docs/subscriptions/postgres/index.mdx index 8ff38b585b6..9de02316e86 100644 --- a/docs/docs/subscriptions/postgres/index.mdx +++ b/docs/docs/subscriptions/postgres/index.mdx @@ -36,7 +36,7 @@ have multiple subscriptions running at the same time they must be in separate qu ### Live queries -A live query subscription will return the latest result of the query being made and not necessarily all the individual +A live query subscription will return the **latest result** of the query being made and not necessarily all the individual events leading up to the result. By default, updates are delivered to clients every **1 sec**. See more details [here](/subscriptions/postgres/livequery/index.mdx). diff --git a/docs/docs/subscriptions/postgres/livequery/index.mdx b/docs/docs/subscriptions/postgres/livequery/index.mdx index cce00c37c21..286833d1a39 100644 --- a/docs/docs/subscriptions/postgres/livequery/index.mdx +++ b/docs/docs/subscriptions/postgres/livequery/index.mdx @@ -14,13 +14,15 @@ slug: index ## Introduction -A Live query subscription will return the latest result of the query being made and not necessarily all the individual -events leading up to the result. +A Live query subscription will return the **latest result** of the query being made and not necessarily all the +individual events leading up to the result, such as with a +[streaming subscription](/subscriptions/postgres/streaming/index.mdx). By default, updates are delivered to clients every **1 sec**. -See more details on -[subscriptions execution](/subscriptions/postgres/livequery/execution.mdx). +A live query is a query that is continuously monitored for changes in the database and automatically updates the query +result whenever the underlying data changes. Live queries are a PostgreSQL-specific implementation that allow real-time +updates to clients without the need for manual polling or refreshing. ## Convert a query to a subscription @@ -29,8 +31,8 @@ You can turn any query into a subscription by simply replacing `query` with `sub :::info Single subscription in each query caveat Hasura follows the [GraphQL spec](https://graphql.github.io/graphql-spec/June2018/#sec-Single-root-field) which allows -for only one root field in a subscription. You also cannot execute multiple separate subscriptions in one query. To -have multiple subscriptions running at the same time they must be in separate queries. +for only one root field in a subscription. You also cannot execute multiple separate subscriptions in one query. To have +multiple subscriptions running at the same time they must be in separate queries. ::: diff --git a/docs/docs/subscriptions/postgres/streaming/index.mdx b/docs/docs/subscriptions/postgres/streaming/index.mdx index 470b398bd9b..bd1df5790d1 100644 --- a/docs/docs/subscriptions/postgres/streaming/index.mdx +++ b/docs/docs/subscriptions/postgres/streaming/index.mdx @@ -15,8 +15,11 @@ slug: index ## Introduction A streaming subscription streams the response according to the cursor provided by the user while making the -subscription. Streaming subscriptions can be used to subscribe only to the data which has been newly added to the result -set. +subscription. Streaming subscriptions can be used to subscribe only to the data which has been **newly added to the +result set.** + +This is different from a [live query](/subscriptions/postgres/livequery/index.mdx) subscription where only the latest +value is returned to the client. :::tip Supported from @@ -35,7 +38,7 @@ Streaming subscriptions work well with other Hasura features like [relationships](/schema/postgres/table-relationships/index.mdx#table-relationships) and also leverage the power of [subscriptions multiplexing](/subscriptions/postgres/livequery/execution.mdx#subscription-multiplexing). -:::info Confguration details +:::info Configuration details In the case of streaming subscriptions, the multiplexed batch size can be configured via `HASURA_GRAPHQL_STREAMING_QUERIES_MULTIPLEXED_BATCH_SIZE` and the refetch interval can be configured via diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 6aee8213ff6..be2f5de6024 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -18,13 +18,11 @@ const config = { organizationName: 'hasura', projectName: 'graphql-engine', staticDirectories: ['static', 'public'], - scripts: [ - { - src: "https://www.chatbase.co/embed.min.js", - id: "iiL6XJbYo6tRR_M4rUB9F", - defer: true, - } - ], + customFields: { + docsBotEndpointURL: process.env.NODE_ENV === "development" ? "ws://localhost:8000/hasura-docs-ai" : "wss://hasura-docs-bot.deno.dev/hasura-docs-ai", + hasuraVersion: 2, + }, + scripts: [], webpack: { jsLoader: isServer => ({ loader: require.resolve('swc-loader'), diff --git a/docs/package.json b/docs/package.json index 6d0266ca11d..6f1b590b32c 100644 --- a/docs/package.json +++ b/docs/package.json @@ -35,6 +35,7 @@ "graphiql": "^1.5.1", "graphql": "^15.7.2", "graphql-ws": "^5.11.2", + "markdown-to-jsx": "^7.3.2", "prism-react-renderer": "^1.3.5", "react": "^17.0.2", "react-dom": "^17.0.2", diff --git a/docs/src/components/AiChatBot/AiChatBot.tsx b/docs/src/components/AiChatBot/AiChatBot.tsx new file mode 100644 index 00000000000..b258f6d6871 --- /dev/null +++ b/docs/src/components/AiChatBot/AiChatBot.tsx @@ -0,0 +1,271 @@ +import React, { useEffect, useRef, useState } from 'react'; +import Markdown from 'markdown-to-jsx'; +import './styles.css'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; +import { CloseIcon, RespondingIconGray, SparklesIcon } from '@site/src/components/AiChatBot/icons'; +import useLocalStorage from "@site/src/components/AiChatBot/useLocalStorage"; + +interface Message { + userMessage: string; + botResponse: string; +} + +interface Query { + previousMessages: Message[]; + currentUserInput: string; +} + +// Websocket Event data types (stringified) +// { type: "loading", message: "Processing your request..." } +// { type: "responsePart", message: "...part of response..." } +// { type: "error", message: "error description" } +// { type: "endOfStream", message: "End of stream..." } + +const initialMessages: Message[] = [ + { + userMessage: '', + botResponse: "Hi! I'm HasuraAI, the docs chatbot.", + }, + { + userMessage: '', + botResponse: 'You can ask me anything about Hasura and I will try to answer.', + }, + { + userMessage: '', + botResponse: 'Always check the docs for official information.', + }, +]; + + +function AiChatBot() { + // Get the docsBotEndpointURL and hasuraVersion from the siteConfig + const { + siteConfig: { customFields }, + } = useDocusaurusContext(); + // Manage the open state of the popup + const [isOpen, setIsOpen] = useState(false); + // Manage the bot responding state + const [isResponding, setIsResponding] = useState(false) + // Manage the text input + const [input, setInput] = useState(''); + // Manage the historical messages + const [messages, setMessages] = useLocalStorage(`hasuraV${customFields.hasuraVersion}BotMessages`, initialMessages); + // Manage the current message + const [currentMessage, setCurrentMessage] = useState({ userMessage: '', botResponse: '' }); + // Manage scrolling to the end + const [isAutoScroll, setIsAutoScroll] = useState(true); + // Manage the websocket + const [ws, setWs] = useState(null); + // Set is Websocket connecting + const [isConnecting, setIsConnecting] = useState(true); + + // Use a ref because of the useEffect closure issue + const currentMessageRef = useRef({ userMessage: '', botResponse: '' }); + + // Enables scrolling to the end + const scrollDiv = useRef(null); + + const { docsBotEndpointURL, hasuraVersion } = customFields as { docsBotEndpointURL: string; hasuraVersion: number }; + + const storedUserID = localStorage.getItem('hasuraDocsUserID') as string | "null"; + + // Effect to auto-scroll to the bottom if autoScroll is true + useEffect(() => { + if (isAutoScroll) { + scrollDiv.current?.scrollTo({ + top: scrollDiv.current.scrollHeight, + behavior: 'smooth' + }); + } + }, [currentMessage.botResponse]); + + // Detect if user scrolls up and disable auto-scrolling + const handleScroll = (e) => { + const atBottom = Math.abs(scrollDiv.current?.scrollHeight - Math.floor(e.target.scrollTop + e.target.clientHeight)) < 2; + setIsAutoScroll(atBottom); + }; + + + // Update the ref when the currentMessage changes ie: when the endpoint is responding + useEffect(() => { + currentMessageRef.current = currentMessage; + }, [currentMessage]); + + // Manage the websocket and set event listener for messages + useEffect(() => { + let websocket; + let reconnectInterval; + + const connectWebSocket = () => { + websocket = new WebSocket(encodeURI(`${docsBotEndpointURL}?version=${hasuraVersion}&userId=${storedUserID}`)); + + websocket.onopen = () => { + console.log('Connected to the websocket'); + setIsConnecting(false); + clearTimeout(reconnectInterval); + }; + + websocket.onmessage = (event) => { + + let response = { type: "", message: "" }; + + try { + response = JSON.parse(event.data) as {"type": string, "message": string} + } catch (e) { + console.error("error parsing websocket message", e); + } + + switch (response.type) { + case "endOfStream": { + console.log('end of stream'); + setMessages((prevMessages: Message[]) => [...prevMessages, currentMessageRef.current]); + setCurrentMessage({ userMessage: '', botResponse: '' }); + setIsResponding(false); + break; + } + case "responsePart": { + setIsResponding(true); + setCurrentMessage(prevState => { + return { ...prevState, botResponse: prevState?.botResponse + response.message }; + }); + break; + } + case "error": { + console.error("error", response.message); + break; + } + case "loading": { + console.log("loading", response.message); + break; + } + default: { + console.error("unknown response type", response.type); + break; + } + } + }; + + websocket.onclose = () => { + console.log('WebSocket closed. Attempting to reconnect...'); + setIsConnecting(true); + setIsResponding(false); + reconnectInterval = setTimeout(connectWebSocket, 3000); // attempt to reconnect every 3 seconds + }; + + websocket.onerror = error => { + console.error('WebSocket error:', error); + setIsConnecting(true); + setIsResponding(false); + websocket.close(); + }; + + setWs(websocket); + }; + + connectWebSocket(); + return () => { + clearTimeout(reconnectInterval); + if (websocket) { + websocket.close(); + } + }; + }, []); + + // Send the query to the websocket when the user submits the form + const handleSubmit = async () => { + // if the input is empty, do nothing + if (!input) { + return; + } + + if (ws) { + const toSend = JSON.stringify({ previousMessages: messages, currentUserInput: input }); + setCurrentMessage({ userMessage: input, botResponse: '' }); + setInput(''); + ws.send(toSend); + setIsResponding(true); + } + + }; + + return ( +
+ {isOpen ? ( + + ) : ( + + )} + {isOpen && ( +
+
+
+
HasuraAI
+ +
+ +
+
+ {messages.map((msg, index) => ( +
+ {msg.userMessage && ( +
+
+ {msg.userMessage} +
+
+ )} + {msg.botResponse && ( +
+
+ {msg.botResponse} +
+
+ )} +
+ ))} +
+ {currentMessage.userMessage && ( +
+ {currentMessage.userMessage} +
+ )} +
+
+
+ {currentMessage.botResponse && ( +
+ {currentMessage.botResponse} +
+ )} +
+
+ {isResponding ? + RespondingIconGray : null} +
+
+
+ {/* Handles scrolling to the end */} + {/*
*/} +
{ + e.preventDefault(); + handleSubmit(); + }} + > + setInput(e.target.value)} /> + +
+
+ )} +
+ ); +} + +export default AiChatBot; diff --git a/docs/src/components/AiChatBot/icons.tsx b/docs/src/components/AiChatBot/icons.tsx new file mode 100644 index 00000000000..45f73e58c07 --- /dev/null +++ b/docs/src/components/AiChatBot/icons.tsx @@ -0,0 +1,13 @@ +import React from "react"; + +export const SparklesIcon = Created by Royyan Wijayafrom the Noun Project + +export const CloseIcon = + +export const RespondingIconGray = + + + + + + \ No newline at end of file diff --git a/docs/src/components/AiChatBot/styles.css b/docs/src/components/AiChatBot/styles.css new file mode 100644 index 00000000000..99a9951f1a1 --- /dev/null +++ b/docs/src/components/AiChatBot/styles.css @@ -0,0 +1,213 @@ +.chat-popup { + position: fixed; + bottom: 10px; + right: 10px; + z-index: 1000; +} + +.open-chat-button { + display: flex; + align-items: center; + justify-content: center; + height: 40px; + background-color: #1699e2; + color: white; + border-radius: 8px; + padding-right: 15px; + padding-left: 15px; + font-weight: bold; + border: none; + margin-bottom: 1.5rem; + margin-right: 1.5rem; + box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2); + cursor: pointer; +} + +.close-chat-button { + display: flex; + align-items: center; + justify-content: center; + height: 40px; + background-color: #1699e2; + color: white; + border-radius: 8px; + padding-right: 15px; + padding-left: 15px; + border: none; + box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2); + cursor: pointer; +} + +.sparkles-icon { + width: 20px; + height: 20px; + fill: white; + margin-top: 3px; + margin-right: 5px; +} + +.chat-window { + width: 100%; + max-width: 500px; + border: 1px solid #ccc; + background-color: #fff; + padding: 15px; + box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.2); + margin-top: 10px; + display: flex; + flex-direction: column; + max-height: 90vh; + min-height: 50vh; + height: auto; + overflow: auto; + border-radius: 10px; +} + +.close-icon { + width: 23px; + height: 23px; + margin-top: 5px; + fill: white; + margin-right: 3px; +} + +.formatted-text { + white-space: pre-line; + color: #333; +} + +.formatted-text a { + color: blue; + text-decoration: underline; +} + +.messages-container { + overflow-y: auto; + flex: 1; +} + +.message { + border-radius: 8px; + padding: 10px 15px; + margin: 5px 0; + max-width: 80%; +} + +.user-message-container, +.bot-message-container { + width: 100%; + margin: 3px 0; +} + +.user-message-container { + display: flex; + justify-content: flex-end; +} + +.bot-message-container { + display: flex; + justify-content: flex-start; +} + +.user-message, +.bot-message { + display: block; + +} + +.user-message { + text-align: right; + background-color: #1699e2; + color: white; + border-top-right-radius: 0; +} + +.bot-message { + text-align: left; + background-color: #e9e9e9; + border-top-left-radius: 0; +} + +.responding-message { +} + +input { + width: 80%; + padding: 10px; + border-radius: 5px 0 0 5px; + border: 1px solid #ccc; + outline: none; + height: 40px; + box-sizing: border-box; + flex: 1; +} + +.input-container { + display: flex; + margin-top: auto; + width: 100%; + font-size: 16px; + background-color: #fff; +} + +.input-text { + font-size: 16px; + color: #333; + background-color: white; +} + +.input-text:disabled { + background-color: #eeeeee !important; +} + +.input-button { + background-color: #1699e2; + color: white; + padding-left: 15px; + padding-right: 15px; + border: none; + height: 40px; + border-radius: 0 5px 5px 0; + cursor: pointer; +} + +.input-button:disabled { + background-color: #ababab; +} + +.info-bar { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 10px; + background-color: #1699e2; + border-radius: 8px 8px 0 0; + padding: 10px 15px; +} + +.bot-name-pic-container { + display: flex; + color: white; +} + +.bot-name { + margin-right: 10px; + font-weight: bold; + font-size: 1.2rem; +} + +.bot-pic { + border-radius: 50%; +} + +.clear-button { + background-color: transparent; + border: none; + color: white; + cursor: pointer; + font-size: 0.9rem; +} + +html[data-theme=dark] code { + background-color: #e0e0e0; +} \ No newline at end of file diff --git a/docs/src/components/AiChatBot/useLocalStorage.ts b/docs/src/components/AiChatBot/useLocalStorage.ts new file mode 100644 index 00000000000..60352f648d7 --- /dev/null +++ b/docs/src/components/AiChatBot/useLocalStorage.ts @@ -0,0 +1,56 @@ +import { useState } from 'react'; +export const useLocalStorage = (key: string, defaultValue: T) => { + // Create state variable to store localStorage value in state + const [localStorageValue, setLocalStorageValue] = useState(() => { + try { + const value = localStorage.getItem(key); + // If value is already present in localStorage then return it + + // Else set default value in localStorage and then return it + if (value) { + let parsedValue = JSON.parse(value); + + if (Array.isArray(parsedValue)) { + const filteredValue = parsedValue.filter(item => !!item); + + // Update localStorage if non-truthy values were filtered out + if (filteredValue.length !== parsedValue.length) { + parsedValue = filteredValue; + localStorage.setItem(key, JSON.stringify(filteredValue)); + } + } + + return parsedValue as T; + } else { + localStorage.setItem(key, JSON.stringify(defaultValue)); + return defaultValue; + } + } catch (error) { + localStorage.setItem(key, JSON.stringify(defaultValue)); + return defaultValue; + } + }); + + // this method update our localStorage and our state + const setLocalStorageStateValue = valueOrFn => { + let newValue: T; + if (typeof valueOrFn === 'function') { + const fn = valueOrFn as (value: T) => T; + newValue = fn(localStorageValue); + } else { + newValue = valueOrFn; + } + + // Filter out non-truthy values if newValue is an array + if (Array.isArray(newValue)) { + newValue = newValue.filter(item => !!item) as T; + } + + localStorage.setItem(key, JSON.stringify(newValue)); + setLocalStorageValue(newValue); + }; + + return [localStorageValue, setLocalStorageStateValue] as const; +}; + +export default useLocalStorage; diff --git a/docs/src/components/BannerDismissable/DDNBanner.css b/docs/src/components/BannerDismissable/DDNBanner.css new file mode 100644 index 00000000000..5d6a6a3206f --- /dev/null +++ b/docs/src/components/BannerDismissable/DDNBanner.css @@ -0,0 +1,30 @@ +.banner { + background-color: #1699e2; + color: white; + display: flex; + justify-content: center; + align-items: center; + position: relative; + padding: 15px; + font-size: 16px; +} + +.close-btn { + background: none; + color: white; + border: none; + position: absolute; + right: 10px; + top: 10px; + font-size: 18px; + cursor: pointer; +} + +.close-btn:focus { + outline: none; +} + +.banner a { + color: white; + cursor: pointer; +} \ No newline at end of file diff --git a/docs/src/components/BannerDismissable/DDNBanner.tsx b/docs/src/components/BannerDismissable/DDNBanner.tsx new file mode 100644 index 00000000000..83eda711a06 --- /dev/null +++ b/docs/src/components/BannerDismissable/DDNBanner.tsx @@ -0,0 +1,21 @@ +import React, { useState } from 'react'; +import './DDNBanner.css'; + +export const DDNBanner = () => { + const [isVisible, setIsVisible] = useState(true); + + if (!isVisible) return null; + + return ( +
+
+ Hasura DDN is the future of data delivery. Click here for the Hasura DDN docs. +
+ +
+ ); +} \ No newline at end of file diff --git a/docs/src/components/CustomDocItem/index.tsx b/docs/src/components/CustomDocItem/index.tsx index 6e0eea7f72f..9d7d1c91991 100644 --- a/docs/src/components/CustomDocItem/index.tsx +++ b/docs/src/components/CustomDocItem/index.tsx @@ -5,7 +5,8 @@ import GraphQLWithHasuraBanner from '@site/src/components/GraphQLWithHasuraBanne import CustomFooter from '@site/src/components/CustomFooter'; import styles from './styles.module.scss'; import { Redirect } from '@docusaurus/router'; - +import AiChatBot from "@site/src/components/AiChatBot/AiChatBot"; +import BrowserOnly from '@docusaurus/BrowserOnly'; const CustomDocItem = props => { useEffect(() => { // This function is adds tags to code blocks within a table @@ -77,6 +78,9 @@ const CustomDocItem = props => { {/**/} + Loading...
}> + {() => } + diff --git a/docs/src/theme/Navbar/index.js b/docs/src/theme/Navbar/index.js new file mode 100644 index 00000000000..53e58f83bdb --- /dev/null +++ b/docs/src/theme/Navbar/index.js @@ -0,0 +1,11 @@ +import React from 'react'; +import Navbar from '@theme-original/Navbar'; +import {DDNBanner} from "@site/src/components/BannerDismissable/DDNBanner"; +export default function NavbarWrapper(props) { + return ( + <> + + + + ); +} diff --git a/docs/static/img/databases/mysql/aiven/aiven-create-mysql.png b/docs/static/img/databases/mysql/aiven/aiven-create-mysql.png new file mode 100644 index 00000000000..213187d2b96 Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-create-mysql.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-change-ip.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-change-ip.png new file mode 100644 index 00000000000..8b451991171 Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-change-ip.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-choose-driver.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-choose-driver.png new file mode 100644 index 00000000000..a787f6e04e2 Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-choose-driver.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-configuration.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-configuration.png new file mode 100644 index 00000000000..a1ded66b6fc Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-configuration.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-connection-uri.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-connection-uri.png new file mode 100644 index 00000000000..b5b2e60c50a Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-connection-uri.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-finish-connecting.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-finish-connecting.png new file mode 100644 index 00000000000..5cecf99fdf2 Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-finish-connecting.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip-set.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip-set.png new file mode 100644 index 00000000000..68a527b0f2f Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip-set.png differ diff --git a/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip.png b/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip.png new file mode 100644 index 00000000000..9345f45c317 Binary files /dev/null and b/docs/static/img/databases/mysql/aiven/aiven-mysql-hasura-cloud-ip.png differ diff --git a/docs/static/img/databases/redshift/make-graphql-query.png b/docs/static/img/databases/redshift/make-graphql-query.png new file mode 100644 index 00000000000..00dd504ab8e Binary files /dev/null and b/docs/static/img/databases/redshift/make-graphql-query.png differ diff --git a/docs/static/img/databases/redshift/redshift-add-agent.png b/docs/static/img/databases/redshift/redshift-add-agent.png new file mode 100644 index 00000000000..6286aa496ca Binary files /dev/null and b/docs/static/img/databases/redshift/redshift-add-agent.png differ diff --git a/docs/static/img/databases/redshift/redshift-add-service1.png b/docs/static/img/databases/redshift/redshift-add-service1.png new file mode 100644 index 00000000000..6eb910e1213 Binary files /dev/null and b/docs/static/img/databases/redshift/redshift-add-service1.png differ diff --git a/docs/static/img/databases/redshift/redshift-add-service2.png b/docs/static/img/databases/redshift/redshift-add-service2.png new file mode 100644 index 00000000000..dd0b7f353a5 Binary files /dev/null and b/docs/static/img/databases/redshift/redshift-add-service2.png differ diff --git a/docs/static/img/databases/redshift/redshift-tracking-tables1.png b/docs/static/img/databases/redshift/redshift-tracking-tables1.png new file mode 100644 index 00000000000..725808f7313 Binary files /dev/null and b/docs/static/img/databases/redshift/redshift-tracking-tables1.png differ diff --git a/docs/static/img/databases/redshift/redshift-tracking-tables2.png b/docs/static/img/databases/redshift/redshift-tracking-tables2.png new file mode 100644 index 00000000000..04f5d00f10f Binary files /dev/null and b/docs/static/img/databases/redshift/redshift-tracking-tables2.png differ diff --git a/docs/static/img/hasura-ai-profile-pic.png b/docs/static/img/hasura-ai-profile-pic.png new file mode 100644 index 00000000000..84cce70fb6e Binary files /dev/null and b/docs/static/img/hasura-ai-profile-pic.png differ diff --git a/docs/static/img/security/disable-introspection.png b/docs/static/img/security/disable-introspection.png index 7283ec42f87..c4960216a84 100644 Binary files a/docs/static/img/security/disable-introspection.png and b/docs/static/img/security/disable-introspection.png differ diff --git a/docs/yarn.lock b/docs/yarn.lock index 4ad6600ebef..b85c22c412a 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -5513,6 +5513,7 @@ __metadata: graphiql: ^1.5.1 graphql: ^15.7.2 graphql-ws: ^5.11.2 + markdown-to-jsx: ^7.3.2 prettier: ^3.0.0 prism-react-renderer: ^1.3.5 react: ^17.0.2 @@ -7931,6 +7932,15 @@ __metadata: languageName: node linkType: hard +"markdown-to-jsx@npm:^7.3.2": + version: 7.3.2 + resolution: "markdown-to-jsx@npm:7.3.2" + peerDependencies: + react: ">= 0.14.0" + checksum: 8885c6343b71570b0a7ec16cd85a49b853a830234790ee7430e2517ea5d8d361ff138bd52147f650790f3e7b3a28a15c755fc16f8856dd01ddf09a6161782e06 + languageName: node + linkType: hard + "mdast-squeeze-paragraphs@npm:^4.0.0": version: 4.0.0 resolution: "mdast-squeeze-paragraphs@npm:4.0.0" diff --git a/flake.lock b/flake.lock index 66b83876c43..20439309d37 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1696330627, - "narHash": "sha256-o0WGId7Vuhl+5KeNgh343Orex0YUMVCopf1LWr+kI/k=", + "lastModified": 1699914561, + "narHash": "sha256-b296O45c3Jgj8GEFg/NN7ZOJjBBCHr1o2iA4yoJ3OKE=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "7b6b919f3a707c566b8592106bb7ce070721b137", + "rev": "2f8742189e9ef86961ab90a30c68eb844565578a", "type": "github" }, "original": { diff --git a/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/CreateRestEndpoints.tsx b/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/CreateRestEndpoints.tsx index 56ccafec521..86a328e3a25 100644 --- a/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/CreateRestEndpoints.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/CreateRestEndpoints.tsx @@ -4,13 +4,16 @@ import { Button } from '../../../new-components/Button'; import { FaLink } from 'react-icons/fa'; import { Badge } from '../../../new-components/Badge'; import { Analytics } from '../../../features/Analytics'; +import { Table } from '../../../features/hasura-metadata-types'; interface CreateRestEndpointProps { tableName: string; + dataSourceName: string; + table: Table; } export const CreateRestEndpoint = (props: CreateRestEndpointProps) => { - const { tableName } = props; + const { tableName, dataSourceName, table } = props; const [isModalOpen, setIsModalOpen] = React.useState(false); const toggleModal = () => { @@ -31,7 +34,12 @@ export const CreateRestEndpoint = (props: CreateRestEndpointProps) => { {isModalOpen && ( - + )} ); diff --git a/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/EditableHeading.js b/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/EditableHeading.js index ea5361b99b6..26ee0f44b01 100644 --- a/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/EditableHeading.js +++ b/frontend/libs/console/legacy-ce/src/lib/components/Common/EditableHeading/EditableHeading.js @@ -54,7 +54,11 @@ class Heading extends React.Component {

{currentValue}

- +
@@ -75,7 +79,11 @@ class Heading extends React.Component {

{currentValue}

- +
void; tableName: string; + dataSourceName: string; + table: Table; } export const RestEndpointModal = (props: RestEndpointModalProps) => { - const { onClose, tableName } = props; + const { onClose, tableName, dataSourceName } = props; const { createRestEndpoints, endpointDefinitions, isLoading } = - useCreateRestEndpoints(); + useCreateRestEndpoints({ + dataSourceName: props.dataSourceName, + table: props.table, + }); const tableEndpointDefinitions = endpointDefinitions?.[tableName] ?? {}; @@ -62,7 +68,7 @@ export const RestEndpointModal = (props: RestEndpointModalProps) => { onClose={onClose} disabled={selectedMethods.length === 0} onSubmit={() => { - createRestEndpoints(tableName, selectedMethods, { + createRestEndpoints(dataSourceName, tableName, selectedMethods, { onSuccess: () => { hasuraToast({ type: 'success', diff --git a/frontend/libs/console/legacy-ce/src/lib/components/Services/ApiExplorer/TopNav.tsx b/frontend/libs/console/legacy-ce/src/lib/components/Services/ApiExplorer/TopNav.tsx index 0cd7d8ee445..fc29444b798 100644 --- a/frontend/libs/console/legacy-ce/src/lib/components/Services/ApiExplorer/TopNav.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/components/Services/ApiExplorer/TopNav.tsx @@ -84,6 +84,7 @@ const TopNav: React.FC = ({ location }) => { ?.schema_registry_dumps_v2[0] || []; if ( data && + data.diff_with_previous_schema && data.diff_with_previous_schema[0] && data.diff_with_previous_schema[0].schema_diff_data && data.change_recorded_at diff --git a/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/ReactTableWrapper.tsx b/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/ReactTableWrapper.tsx index 9e8de2a525b..01a1e0b8ff9 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/ReactTableWrapper.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/ReactTableWrapper.tsx @@ -110,6 +110,7 @@ export const ReactTableWrapper: React.VFC = ({ ) : ( + // eslint-disable-next-line jsx-a11y/anchor-is-valid { relationships?.onClick({ @@ -269,6 +270,7 @@ export const ReactTableWrapper: React.VFC = ({ key={`${row.id}-${i}`} data-testid={`@table-cell-${row.id}-${i}`} style={{ maxWidth: '20ch' }} + className="px-sm py-xs whitespace-nowrap text-muted overflow-hidden text-ellipsis" > {flexRender(cell.column.columnDef.cell, cell.getContext())} diff --git a/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/RowDialog.tsx b/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/RowDialog.tsx index b29d0ed5fdb..08452527697 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/RowDialog.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/BrowseRows/components/DataGrid/parts/RowDialog.tsx @@ -6,9 +6,9 @@ import { Textarea, InputField, SimpleForm, - CodeEditorField, } from '../../../../../new-components/Form'; import { TableColumn } from '../../../../DataSource'; +import ReactJson from 'react-json-view'; interface RowDialogProps { row: Record; @@ -28,7 +28,19 @@ export const RowDialog = ({ onClose, row, columns }: RowDialogProps) => { )?.consoleDataType; if (columnDataType === 'json') - return ; + return ( +
+
{key}
+
+ +
+
+ ); if (columnDataType === 'string') return ; diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/ConnectGDCSourceWidget.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/ConnectGDCSourceWidget.tsx index 73f8dcef65a..44eba037b32 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/ConnectGDCSourceWidget.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/ConnectGDCSourceWidget.tsx @@ -13,8 +13,10 @@ import { hasuraToast } from '../../../../new-components/Toasts'; import { useAvailableDrivers } from '../../../ConnectDB/hooks'; import { OpenApi3Form } from '../../../OpenApi3Form'; import { useMetadata } from '../../../hasura-metadata-api'; +import { Source } from '../../../hasura-metadata-types'; import { useManageDatabaseConnection } from '../../hooks/useManageDatabaseConnection'; import { DisplayToastErrorMessage } from '../Common/DisplayToastErrorMessage'; +import { cleanEmpty } from '../ConnectPostgresWidget/utils/helpers'; import { GraphQLCustomization } from '../GraphQLCustomization/GraphQLCustomization'; import { adaptGraphQLCustomization } from '../GraphQLCustomization/utils/adaptResponse'; import { Template } from './components/Template'; @@ -25,8 +27,6 @@ import { useFormValidationSchema, } from './useFormValidationSchema'; import { generateGDCRequestPayload } from './utils/generateRequest'; -import { Source } from '../../../hasura-metadata-types'; -import { cleanEmpty } from '../ConnectPostgresWidget/utils/helpers'; interface ConnectGDCSourceWidgetProps { driver: string; @@ -129,6 +129,11 @@ export const ConnectGDCSourceWidget = (props: ConnectGDCSourceWidgetProps) => { methods: { formState, reset }, } = useConsoleForm({ schema, + options: { + defaultValues: { + template_variables: [], + }, + }, }); useEffect(() => { diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/components/TemplateVariables.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/components/TemplateVariables.tsx index 23532ccf5e6..dc158b3b245 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/components/TemplateVariables.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/components/TemplateVariables.tsx @@ -108,7 +108,7 @@ export const TemplateVariables = () => { ), }), ], - [] + [remove] ); const table = useReactTable({ diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/useFormValidationSchema.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/useFormValidationSchema.tsx index 391d9d3f588..f1807b9fa33 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/useFormValidationSchema.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectGDCSourceWidget/useFormValidationSchema.tsx @@ -26,6 +26,10 @@ const createValidationSchema = (configSchemas: GDCConfigSchemas) => .gte(0, { message: 'Timeout must be a postive number' }) .optional(), template: z.string().optional(), + + // template variables is not marked as optional b/c it makes some pretty annoying TS issues with react-hook-form + // the field is initialized with a default value of `[]` + // with clean up empty fields, including arrays before submission, so it won't be sent to the server if the array is empty template_variables: z .object({ name: reqString('variable name'), diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/ConnectPostgresWidget.stories.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/ConnectPostgresWidget.stories.tsx index 3b36f0b4f72..4fbde1bc60b 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/ConnectPostgresWidget.stories.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/ConnectPostgresWidget.stories.tsx @@ -108,7 +108,7 @@ export const Test: StoryObj = { // Find and click on advanced settings await userEvent.click(await canvas.findByText('Advanced Settings')); await expect( - await canvas.findByText('Total Max Connections') + await canvas.findByText('Max Connections') ).toBeInTheDocument(); await expect(await canvas.findByText('Idle Timeout')).toBeInTheDocument(); await expect(await canvas.findByText('Retries')).toBeInTheDocument(); @@ -174,7 +174,7 @@ export const PostgresEditConnection: StoryObj = { await userEvent.click(await canvas.findByText('Advanced Settings')); await expect( await canvas.findByTestId( - 'configuration.connectionInfo.poolSettings.totalMaxConnections' + 'configuration.connectionInfo.poolSettings.maxConnections' ) ).toHaveValue(500); await expect( diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/ConnectPostgresForm.stories.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/ConnectPostgresForm.stories.tsx index c249fcc6dae..534b107b72c 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/ConnectPostgresForm.stories.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/ConnectPostgresForm.stories.tsx @@ -8,10 +8,14 @@ import { ReactQueryDecorator } from '../../../../../storybook/decorators/react-q import { useState } from 'react'; import { userEvent, waitFor, within } from '@storybook/testing-library'; import { expect } from '@storybook/jest'; +import { ConsoleTypeDecorator } from '../../../../../storybook/decorators'; export default { component: ConnectPostgresForm, - decorators: [ReactQueryDecorator()], + decorators: [ + ReactQueryDecorator(), + ConsoleTypeDecorator({ consoleType: 'cloud' }), + ], } as Meta; export const TestPostgresForm: StoryObj = { @@ -166,7 +170,7 @@ export const TestPostgresForm: StoryObj = { ); await userEvent.click(await canvas.findByText('Advanced Settings')); - await userEvent.type(await canvas.findByPlaceholderText('1000'), '100'); + await userEvent.type(await canvas.findByPlaceholderText('50'), '100'); await userEvent.type(await canvas.findByPlaceholderText('180'), '100'); await userEvent.type(await canvas.findByPlaceholderText('1'), '100'); await userEvent.type(await canvas.findByPlaceholderText('360'), '100'); @@ -194,7 +198,7 @@ export const TestPostgresForm: StoryObj = { port: 5432, }, poolSettings: { - totalMaxConnections: 100, + maxConnections: 100, idleTimeout: 100, retries: 100, poolTimeout: 100, @@ -252,7 +256,7 @@ export const TestPostgresForm: StoryObj = { port: 5432, }, poolSettings: { - totalMaxConnections: 100, + maxConnections: 100, idleTimeout: 100, retries: 100, poolTimeout: 100, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/PoolSettings.tsx b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/PoolSettings.tsx index 7833247f92c..ebcd105bb32 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/PoolSettings.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/parts/PoolSettings.tsx @@ -1,4 +1,7 @@ +import { isCloudConsole } from '../../../../../utils'; +import globals from '../../../../../Globals'; import { InputField } from '../../../../../new-components/Form'; +// import { isCloudConsole } from '../../../../../utils'; const commonFieldProps: Partial> = { onWheelCapture: e => e.currentTarget.blur(), @@ -7,12 +10,23 @@ const commonFieldProps: Partial> = { export const PoolSettings = ({ name }: { name: string }) => { return ( <> + {isCloudConsole(globals) && ( + + )} + { "poolSettings": { "connectionLifetime": undefined, "idleTimeout": undefined, + "maxConnections": undefined, "poolTimeout": undefined, "retries": undefined, "totalMaxConnections": undefined, @@ -98,6 +99,7 @@ describe('adaptPostgresConnection works for ', () => { "poolSettings": { "connectionLifetime": undefined, "idleTimeout": undefined, + "maxConnections": undefined, "poolTimeout": undefined, "retries": undefined, "totalMaxConnections": undefined, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/adaptResponse.ts b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/adaptResponse.ts index 40a35aa0c73..494ec557a88 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/adaptResponse.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/adaptResponse.ts @@ -43,6 +43,7 @@ export const adaptPostgresConnectionInfo = ( databaseUrl: adaptDatabaseUrl(connectionInfo.database_url), poolSettings: { totalMaxConnections: connectionInfo.pool_settings?.total_max_connections, + maxConnections: connectionInfo.pool_settings?.max_connections, idleTimeout: connectionInfo.pool_settings?.idle_timeout, retries: connectionInfo.pool_settings?.retries, poolTimeout: connectionInfo.pool_settings?.pool_timeout, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/generateRequests.ts b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/generateRequests.ts index 4a2156477e2..459aab776c6 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/generateRequests.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/components/ConnectPostgresWidget/utils/generateRequests.ts @@ -24,6 +24,7 @@ export const generateConnectionInfo = ( }, pool_settings: { total_max_connections: values.poolSettings?.totalMaxConnections, + max_connections: values.poolSettings?.maxConnections, idle_timeout: values.poolSettings?.idleTimeout, retries: values.poolSettings?.retries, pool_timeout: values.poolSettings?.poolTimeout, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/mocks/data.mock.ts b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/mocks/data.mock.ts index 07b0b39ef26..0e1c9c2d07f 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/mocks/data.mock.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/ConnectDBRedesign/mocks/data.mock.ts @@ -35,6 +35,7 @@ export const mockMetadata: Metadata = { pool_timeout: 300, retries: 400, total_max_connections: 500, + max_connections: 500, }, use_prepared_statements: true, }, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/Data/ManageTable/parts/TableName.tsx b/frontend/libs/console/legacy-ce/src/lib/features/Data/ManageTable/parts/TableName.tsx index 4230bf89348..27e444b9d47 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/Data/ManageTable/parts/TableName.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/Data/ManageTable/parts/TableName.tsx @@ -10,6 +10,8 @@ import { useAppDispatch } from '../../../../storeHooks'; import { getRoute } from '../../../../utils/getDataRoute'; import _push from '../../../../components/Services/Data/push'; import AceEditor from 'react-ace'; +import { CreateRestEndpoint } from '../../../../components/Common/EditableHeading/CreateRestEndpoints'; +import { MetadataSelectors, useMetadata } from '../../../hasura-metadata-api'; export const TableName: React.VFC<{ dataSourceName: string; @@ -17,6 +19,9 @@ export const TableName: React.VFC<{ tableName: string; }> = ({ tableName, dataSourceName, table }) => { const dispatch = useAppDispatch(); + const { data: driver = '' } = useMetadata( + m => MetadataSelectors.findSource(dataSourceName)(m)?.kind + ); const { untrackTable } = useUntrackTable({ onSuccess: () => { hasuraToast({ @@ -87,6 +92,13 @@ export const TableName: React.VFC<{
Tracked
+ {['postgres', 'mssql', 'bigquery'].includes(driver) && ( + + )}
); }; diff --git a/frontend/libs/console/legacy-ce/src/lib/features/DataSidebar/navigation-utils.ts b/frontend/libs/console/legacy-ce/src/lib/features/DataSidebar/navigation-utils.ts index 5f28f0f5c89..b8de20e9450 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/DataSidebar/navigation-utils.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/DataSidebar/navigation-utils.ts @@ -11,7 +11,7 @@ export const manageTableUrl = ({ const tab = sessionStore.getItem('manageTable.lastTab') ?? DEFAULT_MANAGE_TABLE_TAB; - return `data/v2/manage/table/${tab}?database=${dataSourceName}&table=${encodeURIComponent( + return `/data/v2/manage/table/${tab}?database=${dataSourceName}&table=${encodeURIComponent( JSON.stringify(table) )}`; }; @@ -26,6 +26,6 @@ export const manageFunctionUrl = ({ dataSourceName: string; fn: unknown; }) => - `data/v2/manage/function?database=${dataSourceName}&function=${encodeURIComponent( + `/data/v2/manage/function?database=${dataSourceName}&function=${encodeURIComponent( JSON.stringify(fn) )}`; diff --git a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/PermissionsForm.tsx b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/PermissionsForm.tsx index 397eb0e9709..8939410393d 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/PermissionsForm.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/PermissionsForm.tsx @@ -4,7 +4,7 @@ import { Button } from '../../../new-components/Button'; import { IndicatorCard } from '../../../new-components/IndicatorCard'; import { MetadataSelector, - useMetadata, + useMetadata as useLegacyMetadata, useRoles, useSupportedQueryTypes, } from '../../MetadataAPI'; @@ -41,6 +41,7 @@ import { inputValidationEnabledSchema, } from '../../../components/Services/Data/TablePermissions/InputValidation/InputValidation'; import { z } from 'zod'; +import { MetadataSelectors, useMetadata } from '../../hasura-metadata-api'; export interface ComponentProps { dataSourceName: string; @@ -70,7 +71,7 @@ const Component = (props: ComponentProps) => { useScrollIntoView(permissionSectionRef, [roleName], { behavior: 'smooth' }); - const { data: metadataTables } = useMetadata( + const { data: metadataTables } = useLegacyMetadata( MetadataSelector.getTables(dataSourceName) ); const tables = metadataTables?.map(t => t.table) ?? []; @@ -197,6 +198,7 @@ const Component = (props: ComponentProps) => { roleName={roleName} queryType={queryType} columns={formData?.columns} + computedFields={formData?.computed_fields} table={table} dataSourceName={dataSourceName} /> @@ -281,6 +283,11 @@ export const PermissionsForm = (props: PermissionsFormProps) => { const { columns: tableColumns, isLoading: isLoadingTables } = useListAllTableColumns(dataSourceName, table); + const metadataTableResult = useMetadata( + MetadataSelectors.findTable(dataSourceName, table) + ); + const computedFields = metadataTableResult.data?.computed_fields ?? []; + const { data: metadataSource } = useMetadataSource(dataSourceName); const { data, isError, isLoading } = useFormData({ @@ -328,6 +335,7 @@ export const PermissionsForm = (props: PermissionsFormProps) => { metadata: data?.metadata, table, tableColumns, + tableComputedFields: computedFields, defaultQueryRoot: data.defaultQueryRoot, metadataSource, supportedOperators: data.supportedOperators, @@ -357,6 +365,7 @@ export const PermissionsForm = (props: PermissionsFormProps) => { table, metadata: data.metadata, tableColumns, + computedFields, trackedTables: metadataSource.tables, metadataSource, validateInput: { diff --git a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/createSelectArgs.test.ts b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/createSelectArgs.test.ts index ae872cf5c21..9ba82683655 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/createSelectArgs.test.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/createSelectArgs.test.ts @@ -17,11 +17,13 @@ test('create select args object from form data', () => { args: { table: ['Album'], role: 'user', + comment: '', permission: { columns: ['AlbumId', 'Title', 'ArtistId'], filter: { _not: { AlbumId: { _eq: 'X-Hasura-User-Id' } } }, set: {}, allow_aggregations: false, + computed_fields: [], }, source: 'Chinook', }, @@ -42,6 +44,7 @@ test('create delete args object from form data', () => { args: { table: ['Album'], role: 'user', + comment: '', permission: { backend_only: false, filter: { Title: { _eq: 'Test' } } }, source: 'Chinook', }, @@ -58,6 +61,7 @@ test('create insert args object from form data', () => { args: { table: ['Album'], role: 'user', + comment: '', permission: { columns: [], check: { @@ -69,6 +73,7 @@ test('create insert args object from form data', () => { }, allow_upsert: true, set: {}, + validate_input: undefined, backend_only: false, }, source: 'Chinook', diff --git a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/utils.ts b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/utils.ts index 2f8f95a11cd..1e9d43f2129 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/utils.ts +++ b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/api/utils.ts @@ -30,6 +30,7 @@ const formatFilterValues = (formFilter: Record[] = []) => { type SelectPermissionMetadata = { columns: string[]; + computed_fields: string[]; set: Record; filter: Record; allow_aggregations?: boolean; @@ -43,12 +44,16 @@ const createSelectObject = (input: PermissionsSchema) => { const columns = Object.entries(input.columns) .filter(({ 1: value }) => value) .map(([key]) => key); + const computed_fields = Object.entries(input.computed_fields) + .filter(({ 1: value }) => value) + .map(([key]) => key); // Input may be undefined const filter = formatFilterValues(input.filter); const permissionObject: SelectPermissionMetadata = { columns, + computed_fields, filter, set: {}, allow_aggregations: input.aggregationEnabled, diff --git a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/components/ColumnPermissions.tsx b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/components/ColumnPermissions.tsx index 17c99e789cd..769f19f1e70 100644 --- a/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/components/ColumnPermissions.tsx +++ b/frontend/libs/console/legacy-ce/src/lib/features/Permissions/PermissionsForm/components/ColumnPermissions.tsx @@ -18,6 +18,7 @@ import { SubscriptionRootPermissionType, QueryRootPermissionType, } from './RootFieldPermissions/types'; +import { MetadataSelectors, useMetadata } from '../../../hasura-metadata-api'; const getAccessText = (queryType: string) => { if (queryType === 'insert') { @@ -35,6 +36,7 @@ export interface ColumnPermissionsSectionProps { queryType: QueryType; roleName: string; columns?: string[]; + computedFields?: string[]; table: unknown; dataSourceName: string; } @@ -85,19 +87,30 @@ const checkIfConfirmationIsNeeded = ( ); }; -// @todo -// this hasn't been fully implemented, it still needs computed columns adding export const ColumnPermissionsSection: React.FC< ColumnPermissionsSectionProps -> = ({ roleName, queryType, columns, table, dataSourceName }) => { +> = ({ + roleName, + queryType, + columns, + table, + computedFields, + dataSourceName, +}) => { const { setValue, watch } = useFormContext(); const [showConfirmation, setShowConfirmationModal] = useState( null ); watch(); - const [selectedColumns, queryRootFields, subscriptionRootFields] = watch([ + const [ + selectedColumns, + selectedComputedFields, + queryRootFields, + subscriptionRootFields, + ] = watch([ 'columns', + 'computed_fields', 'query_root_fields', 'subscription_root_fields', ]); @@ -112,6 +125,13 @@ export const ColumnPermissionsSection: React.FC< table ); + const metadataTableResult = useMetadata( + MetadataSelectors.findTable(dataSourceName, table) + ); + const tableComputedFields = metadataTableResult.data?.computed_fields?.map( + ({ name }) => name + ); + const onClick = () => { columns?.forEach(column => { const toggleAllOn = status !== 'All columns'; @@ -119,6 +139,12 @@ export const ColumnPermissionsSection: React.FC< // otherwise toggle all off setValue(`columns.${column}`, toggleAllOn); }); + computedFields?.forEach(field => { + const toggleAllOn = status !== 'All columns'; + // if status is not all columns: toggle all on + // otherwise toggle all off + setValue(`computed_fields.${field}`, toggleAllOn); + }); }; if (isError) { @@ -206,6 +232,26 @@ export const ColumnPermissionsSection: React.FC< {fieldName} ))} + {queryType === 'select' && + tableComputedFields?.map(fieldName => ( + + ))}